From 79f0ff30307d14f4b9a7b85792e737a49492f2e1 Mon Sep 17 00:00:00 2001 From: sunwei Date: Wed, 1 Jan 2025 11:14:48 +0800 Subject: [PATCH] add vendor --- go.mod | 2 +- go.sum | 1 + internal/domain/content/entity/query.go | 51 +- internal/domain/content/valueobject/site.go | 45 + internal/interfaces/cli/vercurr.go | 8 +- manifest.json | 2 +- .../Azure/go-autorest/autorest/LICENSE | 191 + .../Azure/go-autorest/autorest/adal/LICENSE | 191 + .../Azure/go-autorest/autorest/adal/README.md | 292 + .../Azure/go-autorest/autorest/adal/config.go | 151 + .../go-autorest/autorest/adal/devicetoken.go | 269 + .../autorest/adal/go_mod_tidy_hack.go | 24 + .../go-autorest/autorest/adal/persist.go | 73 + .../Azure/go-autorest/autorest/adal/sender.go | 95 + .../Azure/go-autorest/autorest/adal/token.go | 1135 + .../go-autorest/autorest/adal/version.go | 45 + .../go-autorest/autorest/authorization.go | 337 + .../go-autorest/autorest/authorization_sas.go | 67 + .../autorest/authorization_storage.go | 304 + .../Azure/go-autorest/autorest/autorest.go | 150 + .../Azure/go-autorest/autorest/client.go | 323 + .../Azure/go-autorest/autorest/date/LICENSE | 191 + .../Azure/go-autorest/autorest/date/date.go | 96 + .../autorest/date/go_mod_tidy_hack.go | 24 + .../Azure/go-autorest/autorest/date/time.go | 103 + .../go-autorest/autorest/date/timerfc1123.go | 100 + .../go-autorest/autorest/date/unixtime.go | 123 + .../go-autorest/autorest/date/utility.go | 25 + .../Azure/go-autorest/autorest/error.go | 98 + .../Azure/go-autorest/autorest/preparer.go | 550 + .../Azure/go-autorest/autorest/responder.go | 269 + .../go-autorest/autorest/retriablerequest.go | 52 + .../autorest/retriablerequest_1.7.go | 54 + .../autorest/retriablerequest_1.8.go | 66 + .../Azure/go-autorest/autorest/sender.go | 424 + .../Azure/go-autorest/autorest/utility.go | 239 + .../Azure/go-autorest/autorest/version.go | 41 + .../Azure/go-autorest/logger/LICENSE | 191 + .../Azure/go-autorest/logger/logger.go | 328 + .../Azure/go-autorest/tracing/LICENSE | 191 + .../Azure/go-autorest/tracing/tracing.go | 67 + vendor/github.com/BurntSushi/locker/Makefile | 7 + vendor/github.com/BurntSushi/locker/README.md | 21 + vendor/github.com/BurntSushi/locker/UNLICENSE | 24 + vendor/github.com/BurntSushi/locker/locker.go | 108 + .../github.com/BurntSushi/locker/session.vim | 1 + .../RoaringBitmap/roaring/.drone.yml | 20 + .../RoaringBitmap/roaring/.gitignore | 6 + .../RoaringBitmap/roaring/.gitmodules | 0 .../RoaringBitmap/roaring/.travis.yml | 37 + .../github.com/RoaringBitmap/roaring/AUTHORS | 11 + .../RoaringBitmap/roaring/CONTRIBUTORS | 16 + .../github.com/RoaringBitmap/roaring/LICENSE | 235 + .../RoaringBitmap/roaring/LICENSE-2.0.txt | 202 + .../github.com/RoaringBitmap/roaring/Makefile | 111 + .../RoaringBitmap/roaring/README.md | 253 + .../RoaringBitmap/roaring/arraycontainer.go | 980 + .../roaring/arraycontainer_gen.go | 134 + .../RoaringBitmap/roaring/bitmapcontainer.go | 1098 + .../roaring/bitmapcontainer_gen.go | 415 + .../RoaringBitmap/roaring/byte_input.go | 161 + .../github.com/RoaringBitmap/roaring/clz.go | 11 + .../RoaringBitmap/roaring/clz_compat.go | 36 + .../github.com/RoaringBitmap/roaring/ctz.go | 11 + .../RoaringBitmap/roaring/ctz_compat.go | 71 + .../RoaringBitmap/roaring/fastaggregation.go | 215 + .../RoaringBitmap/roaring/manyiterator.go | 18 + .../RoaringBitmap/roaring/parallel.go | 613 + .../RoaringBitmap/roaring/popcnt.go | 11 + .../RoaringBitmap/roaring/popcnt_amd64.s | 103 + .../RoaringBitmap/roaring/popcnt_asm.go | 67 + .../RoaringBitmap/roaring/popcnt_compat.go | 17 + .../RoaringBitmap/roaring/popcnt_generic.go | 23 + .../RoaringBitmap/roaring/popcnt_slices.go | 41 + .../RoaringBitmap/roaring/priorityqueue.go | 101 + .../RoaringBitmap/roaring/roaring.go | 1557 + .../RoaringBitmap/roaring/roaringarray.go | 834 + .../RoaringBitmap/roaring/roaringarray_gen.go | 529 + .../RoaringBitmap/roaring/runcontainer.go | 2526 + .../RoaringBitmap/roaring/runcontainer_gen.go | 1104 + .../RoaringBitmap/roaring/serialization.go | 34 + .../roaring/serialization_generic.go | 133 + .../roaring/serialization_littleendian.go | 134 + .../roaring/serializationfuzz.go | 21 + .../RoaringBitmap/roaring/setutil.go | 610 + .../RoaringBitmap/roaring/shortiterator.go | 52 + .../github.com/RoaringBitmap/roaring/smat.go | 383 + .../github.com/RoaringBitmap/roaring/util.go | 304 + .../alecthomas/chroma/v2/.editorconfig | 17 + .../alecthomas/chroma/v2/.gitignore | 25 + .../alecthomas/chroma/v2/.golangci.yml | 92 + .../alecthomas/chroma/v2/.goreleaser.yml | 37 + .../github.com/alecthomas/chroma/v2/Bitfile | 24 + .../github.com/alecthomas/chroma/v2/COPYING | 19 + .../github.com/alecthomas/chroma/v2/Makefile | 23 + .../github.com/alecthomas/chroma/v2/README.md | 297 + .../alecthomas/chroma/v2/coalesce.go | 35 + .../github.com/alecthomas/chroma/v2/colour.go | 192 + .../alecthomas/chroma/v2/delegate.go | 152 + vendor/github.com/alecthomas/chroma/v2/doc.go | 7 + .../alecthomas/chroma/v2/emitters.go | 218 + .../alecthomas/chroma/v2/formatter.go | 43 + .../chroma/v2/formatters/html/html.go | 623 + .../alecthomas/chroma/v2/iterator.go | 76 + .../github.com/alecthomas/chroma/v2/lexer.go | 162 + .../alecthomas/chroma/v2/lexers/README.md | 46 + .../alecthomas/chroma/v2/lexers/caddyfile.go | 275 + .../alecthomas/chroma/v2/lexers/cl.go | 243 + .../alecthomas/chroma/v2/lexers/dns.go | 17 + .../alecthomas/chroma/v2/lexers/emacs.go | 533 + .../chroma/v2/lexers/embedded/abap.xml | 154 + .../chroma/v2/lexers/embedded/abnf.xml | 66 + .../v2/lexers/embedded/actionscript.xml | 68 + .../v2/lexers/embedded/actionscript_3.xml | 163 + .../chroma/v2/lexers/embedded/ada.xml | 321 + .../chroma/v2/lexers/embedded/agda.xml | 66 + .../chroma/v2/lexers/embedded/al.xml | 75 + .../chroma/v2/lexers/embedded/alloy.xml | 58 + .../chroma/v2/lexers/embedded/angular2.xml | 108 + .../chroma/v2/lexers/embedded/antlr.xml | 317 + .../chroma/v2/lexers/embedded/apacheconf.xml | 74 + .../chroma/v2/lexers/embedded/apl.xml | 59 + .../chroma/v2/lexers/embedded/applescript.xml | 130 + .../v2/lexers/embedded/arangodb_aql.xml | 174 + .../chroma/v2/lexers/embedded/arduino.xml | 309 + .../chroma/v2/lexers/embedded/armasm.xml | 126 + .../chroma/v2/lexers/embedded/autohotkey.xml | 78 + .../chroma/v2/lexers/embedded/autoit.xml | 70 + .../chroma/v2/lexers/embedded/awk.xml | 95 + .../chroma/v2/lexers/embedded/ballerina.xml | 97 + .../chroma/v2/lexers/embedded/bash.xml | 220 + .../v2/lexers/embedded/bash_session.xml | 25 + .../chroma/v2/lexers/embedded/batchfile.xml | 660 + .../chroma/v2/lexers/embedded/bibtex.xml | 152 + .../chroma/v2/lexers/embedded/bicep.xml | 84 + .../chroma/v2/lexers/embedded/blitzbasic.xml | 141 + .../chroma/v2/lexers/embedded/bnf.xml | 28 + .../chroma/v2/lexers/embedded/bqn.xml | 83 + .../chroma/v2/lexers/embedded/brainfuck.xml | 51 + .../chroma/v2/lexers/embedded/c#.xml | 121 + .../chroma/v2/lexers/embedded/c++.xml | 331 + .../chroma/v2/lexers/embedded/c.xml | 260 + .../chroma/v2/lexers/embedded/cap_n_proto.xml | 122 + .../v2/lexers/embedded/cassandra_cql.xml | 137 + .../chroma/v2/lexers/embedded/ceylon.xml | 151 + .../chroma/v2/lexers/embedded/cfengine3.xml | 197 + .../chroma/v2/lexers/embedded/cfstatement.xml | 92 + .../chroma/v2/lexers/embedded/chaiscript.xml | 134 + .../chroma/v2/lexers/embedded/chapel.xml | 143 + .../chroma/v2/lexers/embedded/cheetah.xml | 55 + .../chroma/v2/lexers/embedded/clojure.xml | 71 + .../chroma/v2/lexers/embedded/cmake.xml | 90 + .../chroma/v2/lexers/embedded/cobol.xml | 90 + .../v2/lexers/embedded/coffeescript.xml | 210 + .../chroma/v2/lexers/embedded/common_lisp.xml | 184 + .../chroma/v2/lexers/embedded/coq.xml | 136 + .../chroma/v2/lexers/embedded/crystal.xml | 762 + .../chroma/v2/lexers/embedded/css.xml | 323 + .../chroma/v2/lexers/embedded/cue.xml | 85 + .../chroma/v2/lexers/embedded/cython.xml | 372 + .../chroma/v2/lexers/embedded/d.xml | 133 + .../chroma/v2/lexers/embedded/dart.xml | 213 + .../chroma/v2/lexers/embedded/dax.xml | 39 + .../v2/lexers/embedded/desktop_entry.xml | 17 + .../chroma/v2/lexers/embedded/diff.xml | 52 + .../v2/lexers/embedded/django_jinja.xml | 153 + .../chroma/v2/lexers/embedded/dns.xml | 44 + .../chroma/v2/lexers/embedded/docker.xml | 57 + .../chroma/v2/lexers/embedded/dtd.xml | 168 + .../chroma/v2/lexers/embedded/dylan.xml | 176 + .../chroma/v2/lexers/embedded/ebnf.xml | 90 + .../chroma/v2/lexers/embedded/elixir.xml | 744 + .../chroma/v2/lexers/embedded/elm.xml | 119 + .../chroma/v2/lexers/embedded/emacslisp.xml | 132 + .../chroma/v2/lexers/embedded/erlang.xml | 166 + .../chroma/v2/lexers/embedded/factor.xml | 412 + .../chroma/v2/lexers/embedded/fennel.xml | 68 + .../chroma/v2/lexers/embedded/fish.xml | 159 + .../chroma/v2/lexers/embedded/forth.xml | 78 + .../chroma/v2/lexers/embedded/fortran.xml | 102 + .../v2/lexers/embedded/fortranfixed.xml | 71 + .../chroma/v2/lexers/embedded/fsharp.xml | 245 + .../chroma/v2/lexers/embedded/gas.xml | 150 + .../chroma/v2/lexers/embedded/gdscript.xml | 259 + .../chroma/v2/lexers/embedded/gdscript3.xml | 270 + .../chroma/v2/lexers/embedded/gherkin.xml | 263 + .../chroma/v2/lexers/embedded/glsl.xml | 65 + .../chroma/v2/lexers/embedded/gnuplot.xml | 289 + .../chroma/v2/lexers/embedded/go_template.xml | 114 + .../chroma/v2/lexers/embedded/graphql.xml | 88 + .../chroma/v2/lexers/embedded/groff.xml | 90 + .../chroma/v2/lexers/embedded/groovy.xml | 135 + .../chroma/v2/lexers/embedded/handlebars.xml | 147 + .../chroma/v2/lexers/embedded/hare.xml | 98 + .../chroma/v2/lexers/embedded/haskell.xml | 272 + .../chroma/v2/lexers/embedded/hcl.xml | 143 + .../chroma/v2/lexers/embedded/hexdump.xml | 189 + .../chroma/v2/lexers/embedded/hlb.xml | 149 + .../chroma/v2/lexers/embedded/hlsl.xml | 110 + .../chroma/v2/lexers/embedded/holyc.xml | 252 + .../chroma/v2/lexers/embedded/html.xml | 159 + .../chroma/v2/lexers/embedded/hy.xml | 104 + .../chroma/v2/lexers/embedded/idris.xml | 216 + .../chroma/v2/lexers/embedded/igor.xml | 47 + .../chroma/v2/lexers/embedded/ini.xml | 45 + .../chroma/v2/lexers/embedded/io.xml | 71 + .../chroma/v2/lexers/embedded/iscdhcpd.xml | 96 + .../chroma/v2/lexers/embedded/j.xml | 157 + .../chroma/v2/lexers/embedded/java.xml | 193 + .../chroma/v2/lexers/embedded/javascript.xml | 160 + .../chroma/v2/lexers/embedded/json.xml | 111 + .../chroma/v2/lexers/embedded/julia.xml | 400 + .../chroma/v2/lexers/embedded/jungle.xml | 98 + .../chroma/v2/lexers/embedded/kotlin.xml | 223 + .../embedded/lighttpd_configuration_file.xml | 42 + .../chroma/v2/lexers/embedded/llvm.xml | 73 + .../chroma/v2/lexers/embedded/lua.xml | 158 + .../chroma/v2/lexers/embedded/makefile.xml | 131 + .../chroma/v2/lexers/embedded/mako.xml | 120 + .../chroma/v2/lexers/embedded/mason.xml | 89 + .../embedded/materialize_sql_dialect.xml | 155 + .../chroma/v2/lexers/embedded/mathematica.xml | 60 + .../chroma/v2/lexers/embedded/matlab.xml | 114 + .../chroma/v2/lexers/embedded/mcfunction.xml | 182 + .../chroma/v2/lexers/embedded/meson.xml | 85 + .../chroma/v2/lexers/embedded/metal.xml | 270 + .../chroma/v2/lexers/embedded/minizinc.xml | 82 + .../chroma/v2/lexers/embedded/mlir.xml | 73 + .../chroma/v2/lexers/embedded/modula-2.xml | 245 + .../chroma/v2/lexers/embedded/monkeyc.xml | 153 + .../v2/lexers/embedded/morrowindscript.xml | 90 + .../chroma/v2/lexers/embedded/myghty.xml | 77 + .../chroma/v2/lexers/embedded/mysql.xml | 121 + .../chroma/v2/lexers/embedded/nasm.xml | 126 + .../chroma/v2/lexers/embedded/natural.xml | 143 + .../chroma/v2/lexers/embedded/ndisasm.xml | 123 + .../chroma/v2/lexers/embedded/newspeak.xml | 121 + .../embedded/nginx_configuration_file.xml | 98 + .../chroma/v2/lexers/embedded/nim.xml | 211 + .../chroma/v2/lexers/embedded/nix.xml | 258 + .../chroma/v2/lexers/embedded/objective-c.xml | 510 + .../v2/lexers/embedded/objectpascal.xml | 145 + .../chroma/v2/lexers/embedded/ocaml.xml | 145 + .../chroma/v2/lexers/embedded/octave.xml | 101 + .../chroma/v2/lexers/embedded/odin.xml | 113 + .../v2/lexers/embedded/onesenterprise.xml | 92 + .../v2/lexers/embedded/openedge_abl.xml | 101 + .../chroma/v2/lexers/embedded/openscad.xml | 96 + .../chroma/v2/lexers/embedded/org_mode.xml | 329 + .../chroma/v2/lexers/embedded/pacmanconf.xml | 37 + .../chroma/v2/lexers/embedded/perl.xml | 400 + .../chroma/v2/lexers/embedded/php.xml | 212 + .../chroma/v2/lexers/embedded/pig.xml | 105 + .../chroma/v2/lexers/embedded/pkgconfig.xml | 73 + .../chroma/v2/lexers/embedded/pl_pgsql.xml | 119 + .../chroma/v2/lexers/embedded/plaintext.xml | 21 + .../chroma/v2/lexers/embedded/plutus_core.xml | 105 + .../chroma/v2/lexers/embedded/pony.xml | 135 + .../embedded/postgresql_sql_dialect.xml | 155 + .../chroma/v2/lexers/embedded/postscript.xml | 89 + .../chroma/v2/lexers/embedded/povray.xml | 58 + .../chroma/v2/lexers/embedded/powerquery.xml | 51 + .../chroma/v2/lexers/embedded/powershell.xml | 230 + .../chroma/v2/lexers/embedded/prolog.xml | 115 + .../chroma/v2/lexers/embedded/promela.xml | 119 + .../chroma/v2/lexers/embedded/promql.xml | 123 + .../chroma/v2/lexers/embedded/properties.xml | 45 + .../v2/lexers/embedded/protocol_buffer.xml | 118 + .../chroma/v2/lexers/embedded/prql.xml | 161 + .../chroma/v2/lexers/embedded/psl.xml | 213 + .../chroma/v2/lexers/embedded/puppet.xml | 100 + .../chroma/v2/lexers/embedded/python.xml | 589 + .../chroma/v2/lexers/embedded/python_2.xml | 356 + .../chroma/v2/lexers/embedded/qbasic.xml | 173 + .../chroma/v2/lexers/embedded/qml.xml | 113 + .../chroma/v2/lexers/embedded/r.xml | 128 + .../chroma/v2/lexers/embedded/racket.xml | 260 + .../chroma/v2/lexers/embedded/ragel.xml | 149 + .../chroma/v2/lexers/embedded/react.xml | 236 + .../chroma/v2/lexers/embedded/reasonml.xml | 147 + .../chroma/v2/lexers/embedded/reg.xml | 68 + .../chroma/v2/lexers/embedded/rego.xml | 94 + .../chroma/v2/lexers/embedded/rexx.xml | 127 + .../chroma/v2/lexers/embedded/rpm_spec.xml | 58 + .../chroma/v2/lexers/embedded/ruby.xml | 724 + .../chroma/v2/lexers/embedded/rust.xml | 375 + .../chroma/v2/lexers/embedded/sas.xml | 191 + .../chroma/v2/lexers/embedded/sass.xml | 362 + .../chroma/v2/lexers/embedded/scala.xml | 274 + .../chroma/v2/lexers/embedded/scheme.xml | 106 + .../chroma/v2/lexers/embedded/scilab.xml | 98 + .../chroma/v2/lexers/embedded/scss.xml | 373 + .../chroma/v2/lexers/embedded/sed.xml | 28 + .../chroma/v2/lexers/embedded/sieve.xml | 61 + .../chroma/v2/lexers/embedded/smali.xml | 73 + .../chroma/v2/lexers/embedded/smalltalk.xml | 294 + .../chroma/v2/lexers/embedded/smarty.xml | 79 + .../chroma/v2/lexers/embedded/snobol.xml | 95 + .../chroma/v2/lexers/embedded/solidity.xml | 279 + .../chroma/v2/lexers/embedded/sourcepawn.xml | 59 + .../chroma/v2/lexers/embedded/sparql.xml | 160 + .../chroma/v2/lexers/embedded/sql.xml | 90 + .../chroma/v2/lexers/embedded/squidconf.xml | 63 + .../chroma/v2/lexers/embedded/standard_ml.xml | 548 + .../chroma/v2/lexers/embedded/stas.xml | 85 + .../chroma/v2/lexers/embedded/stylus.xml | 132 + .../chroma/v2/lexers/embedded/swift.xml | 207 + .../chroma/v2/lexers/embedded/systemd.xml | 63 + .../v2/lexers/embedded/systemverilog.xml | 181 + .../chroma/v2/lexers/embedded/tablegen.xml | 69 + .../chroma/v2/lexers/embedded/tal.xml | 43 + .../chroma/v2/lexers/embedded/tasm.xml | 135 + .../chroma/v2/lexers/embedded/tcl.xml | 272 + .../chroma/v2/lexers/embedded/tcsh.xml | 121 + .../chroma/v2/lexers/embedded/termcap.xml | 75 + .../chroma/v2/lexers/embedded/terminfo.xml | 84 + .../chroma/v2/lexers/embedded/terraform.xml | 140 + .../chroma/v2/lexers/embedded/tex.xml | 113 + .../chroma/v2/lexers/embedded/thrift.xml | 154 + .../chroma/v2/lexers/embedded/toml.xml | 44 + .../chroma/v2/lexers/embedded/tradingview.xml | 81 + .../v2/lexers/embedded/transact-sql.xml | 137 + .../chroma/v2/lexers/embedded/turing.xml | 82 + .../chroma/v2/lexers/embedded/turtle.xml | 170 + .../chroma/v2/lexers/embedded/twig.xml | 155 + .../chroma/v2/lexers/embedded/typescript.xml | 263 + .../chroma/v2/lexers/embedded/typoscript.xml | 178 + .../v2/lexers/embedded/typoscriptcssdata.xml | 52 + .../v2/lexers/embedded/typoscripthtmldata.xml | 52 + .../chroma/v2/lexers/embedded/ucode.xml | 147 + .../chroma/v2/lexers/embedded/v.xml | 355 + .../chroma/v2/lexers/embedded/v_shell.xml | 365 + .../chroma/v2/lexers/embedded/vala.xml | 72 + .../chroma/v2/lexers/embedded/vb_net.xml | 162 + .../chroma/v2/lexers/embedded/verilog.xml | 158 + .../chroma/v2/lexers/embedded/vhdl.xml | 171 + .../chroma/v2/lexers/embedded/vhs.xml | 48 + .../chroma/v2/lexers/embedded/viml.xml | 85 + .../chroma/v2/lexers/embedded/vue.xml | 305 + .../chroma/v2/lexers/embedded/wdte.xml | 43 + .../embedded/webgpu_shading_language.xml | 142 + .../chroma/v2/lexers/embedded/whiley.xml | 57 + .../chroma/v2/lexers/embedded/xml.xml | 95 + .../chroma/v2/lexers/embedded/xorg.xml | 35 + .../chroma/v2/lexers/embedded/yaml.xml | 122 + .../chroma/v2/lexers/embedded/yang.xml | 99 + .../v2/lexers/embedded/z80_assembly.xml | 74 + .../chroma/v2/lexers/embedded/zed.xml | 51 + .../chroma/v2/lexers/embedded/zig.xml | 112 + .../alecthomas/chroma/v2/lexers/genshi.go | 118 + .../alecthomas/chroma/v2/lexers/go.go | 82 + .../alecthomas/chroma/v2/lexers/haxe.go | 647 + .../alecthomas/chroma/v2/lexers/html.go | 8 + .../alecthomas/chroma/v2/lexers/http.go | 131 + .../alecthomas/chroma/v2/lexers/lexers.go | 79 + .../alecthomas/chroma/v2/lexers/markdown.go | 47 + .../alecthomas/chroma/v2/lexers/mysql.go | 33 + .../alecthomas/chroma/v2/lexers/php.go | 37 + .../alecthomas/chroma/v2/lexers/raku.go | 1721 + .../alecthomas/chroma/v2/lexers/rst.go | 89 + .../alecthomas/chroma/v2/lexers/svelte.go | 70 + .../alecthomas/chroma/v2/lexers/typoscript.go | 85 + .../alecthomas/chroma/v2/lexers/zed.go | 24 + .../alecthomas/chroma/v2/mutators.go | 201 + .../alecthomas/chroma/v2/pygments-lexers.txt | 322 + .../github.com/alecthomas/chroma/v2/regexp.go | 483 + .../alecthomas/chroma/v2/registry.go | 210 + .../github.com/alecthomas/chroma/v2/remap.go | 94 + .../alecthomas/chroma/v2/renovate.json5 | 18 + .../alecthomas/chroma/v2/serialise.go | 479 + .../github.com/alecthomas/chroma/v2/style.go | 481 + .../alecthomas/chroma/v2/styles/abap.xml | 11 + .../alecthomas/chroma/v2/styles/algol.xml | 18 + .../alecthomas/chroma/v2/styles/algol_nu.xml | 18 + .../alecthomas/chroma/v2/styles/api.go | 65 + .../alecthomas/chroma/v2/styles/arduino.xml | 18 + .../alecthomas/chroma/v2/styles/autumn.xml | 36 + .../alecthomas/chroma/v2/styles/average.xml | 74 + .../chroma/v2/styles/base16-snazzy.xml | 74 + .../alecthomas/chroma/v2/styles/borland.xml | 26 + .../alecthomas/chroma/v2/styles/bw.xml | 23 + .../chroma/v2/styles/catppuccin-frappe.xml | 83 + .../chroma/v2/styles/catppuccin-latte.xml | 83 + .../chroma/v2/styles/catppuccin-macchiato.xml | 83 + .../chroma/v2/styles/catppuccin-mocha.xml | 83 + .../alecthomas/chroma/v2/styles/colorful.xml | 52 + .../alecthomas/chroma/v2/styles/compat.go | 66 + .../alecthomas/chroma/v2/styles/doom-one.xml | 51 + .../alecthomas/chroma/v2/styles/doom-one2.xml | 64 + .../alecthomas/chroma/v2/styles/dracula.xml | 74 + .../alecthomas/chroma/v2/styles/emacs.xml | 44 + .../alecthomas/chroma/v2/styles/friendly.xml | 44 + .../alecthomas/chroma/v2/styles/fruity.xml | 19 + .../chroma/v2/styles/github-dark.xml | 45 + .../alecthomas/chroma/v2/styles/github.xml | 44 + .../chroma/v2/styles/gruvbox-light.xml | 33 + .../alecthomas/chroma/v2/styles/gruvbox.xml | 33 + .../chroma/v2/styles/hr_high_contrast.xml | 12 + .../alecthomas/chroma/v2/styles/hrdark.xml | 10 + .../alecthomas/chroma/v2/styles/igor.xml | 9 + .../alecthomas/chroma/v2/styles/lovelace.xml | 53 + .../alecthomas/chroma/v2/styles/manni.xml | 44 + .../chroma/v2/styles/modus-operandi.xml | 13 + .../chroma/v2/styles/modus-vivendi.xml | 13 + .../alecthomas/chroma/v2/styles/monokai.xml | 29 + .../chroma/v2/styles/monokailight.xml | 26 + .../alecthomas/chroma/v2/styles/murphy.xml | 52 + .../alecthomas/chroma/v2/styles/native.xml | 35 + .../alecthomas/chroma/v2/styles/nord.xml | 46 + .../alecthomas/chroma/v2/styles/onedark.xml | 25 + .../chroma/v2/styles/onesenterprise.xml | 10 + .../chroma/v2/styles/paraiso-dark.xml | 37 + .../chroma/v2/styles/paraiso-light.xml | 37 + .../alecthomas/chroma/v2/styles/pastie.xml | 45 + .../alecthomas/chroma/v2/styles/perldoc.xml | 37 + .../alecthomas/chroma/v2/styles/pygments.xml | 42 + .../chroma/v2/styles/rainbow_dash.xml | 40 + .../chroma/v2/styles/rose-pine-dawn.xml | 29 + .../chroma/v2/styles/rose-pine-moon.xml | 29 + .../alecthomas/chroma/v2/styles/rose-pine.xml | 29 + .../alecthomas/chroma/v2/styles/rrt.xml | 13 + .../chroma/v2/styles/solarized-dark.xml | 39 + .../chroma/v2/styles/solarized-dark256.xml | 41 + .../chroma/v2/styles/solarized-light.xml | 17 + .../alecthomas/chroma/v2/styles/swapoff.xml | 18 + .../alecthomas/chroma/v2/styles/tango.xml | 72 + .../alecthomas/chroma/v2/styles/trac.xml | 35 + .../alecthomas/chroma/v2/styles/vim.xml | 29 + .../alecthomas/chroma/v2/styles/vs.xml | 16 + .../alecthomas/chroma/v2/styles/vulcan.xml | 74 + .../chroma/v2/styles/witchhazel.xml | 31 + .../chroma/v2/styles/xcode-dark.xml | 31 + .../alecthomas/chroma/v2/styles/xcode.xml | 22 + .../github.com/alecthomas/chroma/v2/table.py | 31 + .../alecthomas/chroma/v2/tokentype_enumer.go | 573 + .../github.com/alecthomas/chroma/v2/types.go | 340 + vendor/github.com/armon/go-radix/.gitignore | 22 + vendor/github.com/armon/go-radix/.travis.yml | 3 + vendor/github.com/armon/go-radix/LICENSE | 20 + vendor/github.com/armon/go-radix/README.md | 38 + vendor/github.com/armon/go-radix/radix.go | 540 + .../asaskevich/govalidator/.gitignore | 15 + .../asaskevich/govalidator/.travis.yml | 12 + .../asaskevich/govalidator/CODE_OF_CONDUCT.md | 43 + .../asaskevich/govalidator/CONTRIBUTING.md | 63 + .../github.com/asaskevich/govalidator/LICENSE | 21 + .../asaskevich/govalidator/README.md | 622 + .../asaskevich/govalidator/arrays.go | 87 + .../asaskevich/govalidator/converter.go | 81 + .../github.com/asaskevich/govalidator/doc.go | 3 + .../asaskevich/govalidator/error.go | 47 + .../asaskevich/govalidator/numerics.go | 100 + .../asaskevich/govalidator/patterns.go | 113 + .../asaskevich/govalidator/types.go | 656 + .../asaskevich/govalidator/utils.go | 270 + .../asaskevich/govalidator/validator.go | 1768 + .../asaskevich/govalidator/wercker.yml | 15 + vendor/github.com/bep/clocks/.gitignore | 15 + vendor/github.com/bep/clocks/LICENSE | 21 + vendor/github.com/bep/clocks/README.md | 19 + vendor/github.com/bep/clocks/clock.go | 103 + vendor/github.com/bep/debounce/.gitignore | 27 + vendor/github.com/bep/debounce/LICENSE | 21 + vendor/github.com/bep/debounce/README.md | 35 + vendor/github.com/bep/debounce/debounce.go | 43 + vendor/github.com/bep/gitmap/.gitignore | 27 + vendor/github.com/bep/gitmap/LICENSE | 21 + vendor/github.com/bep/gitmap/README.md | 11 + vendor/github.com/bep/gitmap/gitmap.go | 179 + vendor/github.com/bep/goat/.gitignore | 8 + vendor/github.com/bep/goat/LICENSE | 21 + vendor/github.com/bep/goat/README.md | 173 + vendor/github.com/bep/goat/canvas.go | 1160 + vendor/github.com/bep/goat/index.go | 51 + vendor/github.com/bep/goat/iter.go | 76 + vendor/github.com/bep/goat/svg.go | 380 + vendor/github.com/bep/godartsass/.gitignore | 17 + vendor/github.com/bep/godartsass/LICENSE | 21 + vendor/github.com/bep/godartsass/README.md | 26 + vendor/github.com/bep/godartsass/codecov.yml | 10 + vendor/github.com/bep/godartsass/conn.go | 104 + .../internal/embeddedsassv1/README.md | 4 + .../embeddedsassv1/embedded_sass_v1.pb.go | 5350 + .../embeddedsassv1/embedded_sass_v1.proto | 1005 + vendor/github.com/bep/godartsass/options.go | 231 + .../github.com/bep/godartsass/transpiler.go | 533 + .../github.com/bep/godartsass/v2/.gitignore | 17 + vendor/github.com/bep/godartsass/v2/LICENSE | 21 + vendor/github.com/bep/godartsass/v2/README.md | 15 + .../github.com/bep/godartsass/v2/codecov.yml | 10 + vendor/github.com/bep/godartsass/v2/conn.go | 125 + .../v2/internal/embeddedsass/README.md | 5 + .../internal/embeddedsass/embedded_sass.pb.go | 5278 + .../internal/embeddedsass/embedded_sass.proto | 1003 + .../github.com/bep/godartsass/v2/options.go | 231 + .../bep/godartsass/v2/transpiler.go | 564 + vendor/github.com/bep/golibsass/LICENSE | 21 + .../libsass/libsasserrors/libsasserrors.go | 31 + vendor/github.com/bep/gowebp/LICENSE | 20 + .../bep/gowebp/internal/libwebp/a__cgo.go | 7 + .../bep/gowebp/internal/libwebp/a__cgo_dev.go | 8 + .../bep/gowebp/internal/libwebp/a__cgo_src.go | 7 + .../bep/gowebp/internal/libwebp/a__encoder.go | 214 + .../bep/gowebp/internal/libwebp/alpha_dec.c | 3 + .../bep/gowebp/internal/libwebp/alpha_enc.c | 3 + .../internal/libwebp/alpha_processing.c | 3 + .../libwebp/alpha_processing_mips_dsp_r2.c | 3 + .../internal/libwebp/alpha_processing_neon.c | 3 + .../internal/libwebp/alpha_processing_sse2.c | 3 + .../internal/libwebp/alpha_processing_sse41.c | 3 + .../bep/gowebp/internal/libwebp/alphai_dec.h | 3 + .../gowebp/internal/libwebp/analysis_enc.c | 3 + .../bep/gowebp/internal/libwebp/anim_decode.c | 3 + .../bep/gowebp/internal/libwebp/anim_encode.c | 3 + .../bep/gowebp/internal/libwebp/animi.h | 3 + .../libwebp/backward_references_cost_enc.c | 3 + .../libwebp/backward_references_enc.c | 3 + .../libwebp/backward_references_enc.h | 3 + .../internal/libwebp/bit_reader_inl_utils.h | 3 + .../internal/libwebp/bit_reader_utils.c | 3 + .../internal/libwebp/bit_reader_utils.h | 3 + .../internal/libwebp/bit_writer_utils.c | 3 + .../internal/libwebp/bit_writer_utils.h | 3 + .../bep/gowebp/internal/libwebp/buffer_dec.c | 3 + .../internal/libwebp/color_cache_utils.c | 3 + .../internal/libwebp/color_cache_utils.h | 3 + .../bep/gowebp/internal/libwebp/common_dec.h | 3 + .../bep/gowebp/internal/libwebp/common_sse2.h | 3 + .../gowebp/internal/libwebp/common_sse41.h | 3 + .../bep/gowebp/internal/libwebp/config_enc.c | 3 + .../bep/gowebp/internal/libwebp/cost.c | 3 + .../bep/gowebp/internal/libwebp/cost_enc.c | 3 + .../bep/gowebp/internal/libwebp/cost_enc.h | 3 + .../bep/gowebp/internal/libwebp/cost_mips32.c | 3 + .../internal/libwebp/cost_mips_dsp_r2.c | 3 + .../bep/gowebp/internal/libwebp/cost_neon.c | 3 + .../bep/gowebp/internal/libwebp/cost_sse2.c | 3 + .../bep/gowebp/internal/libwebp/cpu.c | 3 + .../bep/gowebp/internal/libwebp/cpu.h | 3 + .../bep/gowebp/internal/libwebp/dec.c | 3 + .../gowebp/internal/libwebp/dec_clip_tables.c | 3 + .../bep/gowebp/internal/libwebp/dec_mips32.c | 3 + .../gowebp/internal/libwebp/dec_mips_dsp_r2.c | 3 + .../bep/gowebp/internal/libwebp/dec_msa.c | 3 + .../bep/gowebp/internal/libwebp/dec_neon.c | 3 + .../bep/gowebp/internal/libwebp/dec_sse2.c | 3 + .../bep/gowebp/internal/libwebp/dec_sse41.c | 3 + .../bep/gowebp/internal/libwebp/decode.h | 3 + .../bep/gowebp/internal/libwebp/demux.c | 3 + .../bep/gowebp/internal/libwebp/demux.h | 3 + .../bep/gowebp/internal/libwebp/dsp.h | 3 + .../bep/gowebp/internal/libwebp/enc.c | 3 + .../bep/gowebp/internal/libwebp/enc_mips32.c | 3 + .../gowebp/internal/libwebp/enc_mips_dsp_r2.c | 3 + .../bep/gowebp/internal/libwebp/enc_msa.c | 3 + .../bep/gowebp/internal/libwebp/enc_neon.c | 3 + .../bep/gowebp/internal/libwebp/enc_sse2.c | 3 + .../bep/gowebp/internal/libwebp/enc_sse41.c | 3 + .../bep/gowebp/internal/libwebp/encode.h | 3 + .../internal/libwebp/endian_inl_utils.h | 3 + .../bep/gowebp/internal/libwebp/filter_enc.c | 3 + .../bep/gowebp/internal/libwebp/filters.c | 3 + .../internal/libwebp/filters_mips_dsp_r2.c | 3 + .../bep/gowebp/internal/libwebp/filters_msa.c | 3 + .../gowebp/internal/libwebp/filters_neon.c | 3 + .../gowebp/internal/libwebp/filters_sse2.c | 3 + .../gowebp/internal/libwebp/filters_utils.c | 3 + .../gowebp/internal/libwebp/filters_utils.h | 3 + .../internal/libwebp/format_constants.h | 3 + .../bep/gowebp/internal/libwebp/frame_dec.c | 3 + .../bep/gowebp/internal/libwebp/frame_enc.c | 3 + .../gowebp/internal/libwebp/histogram_enc.c | 3 + .../gowebp/internal/libwebp/histogram_enc.h | 3 + .../internal/libwebp/huffman_encode_utils.c | 3 + .../internal/libwebp/huffman_encode_utils.h | 3 + .../gowebp/internal/libwebp/huffman_utils.c | 3 + .../gowebp/internal/libwebp/huffman_utils.h | 3 + .../bep/gowebp/internal/libwebp/idec_dec.c | 3 + .../bep/gowebp/internal/libwebp/io_dec.c | 3 + .../gowebp/internal/libwebp/iterator_enc.c | 3 + .../bep/gowebp/internal/libwebp/lossless.c | 3 + .../bep/gowebp/internal/libwebp/lossless.h | 3 + .../gowebp/internal/libwebp/lossless_common.h | 3 + .../gowebp/internal/libwebp/lossless_enc.c | 3 + .../internal/libwebp/lossless_enc_mips32.c | 3 + .../libwebp/lossless_enc_mips_dsp_r2.c | 3 + .../internal/libwebp/lossless_enc_msa.c | 3 + .../internal/libwebp/lossless_enc_neon.c | 3 + .../internal/libwebp/lossless_enc_sse2.c | 3 + .../internal/libwebp/lossless_enc_sse41.c | 3 + .../internal/libwebp/lossless_mips_dsp_r2.c | 3 + .../gowebp/internal/libwebp/lossless_msa.c | 3 + .../gowebp/internal/libwebp/lossless_neon.c | 3 + .../gowebp/internal/libwebp/lossless_sse2.c | 3 + .../gowebp/internal/libwebp/lossless_sse41.c | 3 + .../bep/gowebp/internal/libwebp/mips_macro.h | 3 + .../bep/gowebp/internal/libwebp/msa_macro.h | 3 + .../bep/gowebp/internal/libwebp/mux.h | 3 + .../bep/gowebp/internal/libwebp/mux_types.h | 3 + .../bep/gowebp/internal/libwebp/muxedit.c | 3 + .../bep/gowebp/internal/libwebp/muxi.h | 3 + .../bep/gowebp/internal/libwebp/muxinternal.c | 3 + .../bep/gowebp/internal/libwebp/muxread.c | 3 + .../internal/libwebp/near_lossless_enc.c | 3 + .../bep/gowebp/internal/libwebp/neon.h | 3 + .../gowebp/internal/libwebp/picture_csp_enc.c | 3 + .../bep/gowebp/internal/libwebp/picture_enc.c | 3 + .../internal/libwebp/picture_psnr_enc.c | 3 + .../internal/libwebp/picture_rescale_enc.c | 3 + .../internal/libwebp/picture_tools_enc.c | 3 + .../gowebp/internal/libwebp/predictor_enc.c | 3 + .../bep/gowebp/internal/libwebp/quant.h | 3 + .../bep/gowebp/internal/libwebp/quant_dec.c | 3 + .../bep/gowebp/internal/libwebp/quant_enc.c | 3 + .../internal/libwebp/quant_levels_dec_utils.c | 3 + .../internal/libwebp/quant_levels_dec_utils.h | 3 + .../internal/libwebp/quant_levels_utils.c | 3 + .../internal/libwebp/quant_levels_utils.h | 3 + .../gowebp/internal/libwebp/random_utils.c | 3 + .../gowebp/internal/libwebp/random_utils.h | 3 + .../bep/gowebp/internal/libwebp/rescaler.c | 3 + .../gowebp/internal/libwebp/rescaler_mips32.c | 3 + .../internal/libwebp/rescaler_mips_dsp_r2.c | 3 + .../gowebp/internal/libwebp/rescaler_msa.c | 3 + .../gowebp/internal/libwebp/rescaler_neon.c | 3 + .../gowebp/internal/libwebp/rescaler_sse2.c | 3 + .../gowebp/internal/libwebp/rescaler_utils.c | 3 + .../gowebp/internal/libwebp/rescaler_utils.h | 3 + .../bep/gowebp/internal/libwebp/sharpyuv.c | 3 + .../bep/gowebp/internal/libwebp/sharpyuv.h | 3 + .../gowebp/internal/libwebp/sharpyuv_cpu.c | 3 + .../gowebp/internal/libwebp/sharpyuv_cpu.h | 3 + .../gowebp/internal/libwebp/sharpyuv_csp.c | 3 + .../gowebp/internal/libwebp/sharpyuv_csp.h | 3 + .../gowebp/internal/libwebp/sharpyuv_dsp.c | 3 + .../gowebp/internal/libwebp/sharpyuv_dsp.h | 3 + .../gowebp/internal/libwebp/sharpyuv_gamma.c | 3 + .../gowebp/internal/libwebp/sharpyuv_gamma.h | 3 + .../gowebp/internal/libwebp/sharpyuv_neon.c | 3 + .../gowebp/internal/libwebp/sharpyuv_sse2.c | 3 + .../bep/gowebp/internal/libwebp/ssim.c | 3 + .../bep/gowebp/internal/libwebp/ssim_sse2.c | 3 + .../bep/gowebp/internal/libwebp/syntax_enc.c | 3 + .../gowebp/internal/libwebp/thread_utils.c | 3 + .../gowebp/internal/libwebp/thread_utils.h | 3 + .../bep/gowebp/internal/libwebp/token_enc.c | 3 + .../bep/gowebp/internal/libwebp/tree_dec.c | 3 + .../bep/gowebp/internal/libwebp/tree_enc.c | 3 + .../bep/gowebp/internal/libwebp/types.h | 3 + .../bep/gowebp/internal/libwebp/upsampling.c | 3 + .../internal/libwebp/upsampling_mips_dsp_r2.c | 3 + .../gowebp/internal/libwebp/upsampling_msa.c | 3 + .../gowebp/internal/libwebp/upsampling_neon.c | 3 + .../gowebp/internal/libwebp/upsampling_sse2.c | 3 + .../internal/libwebp/upsampling_sse41.c | 3 + .../bep/gowebp/internal/libwebp/utils.c | 3 + .../bep/gowebp/internal/libwebp/utils.h | 3 + .../bep/gowebp/internal/libwebp/vp8_dec.c | 3 + .../bep/gowebp/internal/libwebp/vp8_dec.h | 3 + .../bep/gowebp/internal/libwebp/vp8i_dec.h | 3 + .../bep/gowebp/internal/libwebp/vp8i_enc.h | 3 + .../bep/gowebp/internal/libwebp/vp8l_dec.c | 3 + .../bep/gowebp/internal/libwebp/vp8l_enc.c | 3 + .../bep/gowebp/internal/libwebp/vp8li_dec.h | 3 + .../bep/gowebp/internal/libwebp/vp8li_enc.h | 3 + .../bep/gowebp/internal/libwebp/webp_dec.c | 3 + .../bep/gowebp/internal/libwebp/webp_enc.c | 3 + .../bep/gowebp/internal/libwebp/webpi_dec.h | 3 + .../bep/gowebp/internal/libwebp/yuv.c | 3 + .../bep/gowebp/internal/libwebp/yuv.h | 3 + .../bep/gowebp/internal/libwebp/yuv_mips32.c | 3 + .../gowebp/internal/libwebp/yuv_mips_dsp_r2.c | 3 + .../bep/gowebp/internal/libwebp/yuv_neon.c | 3 + .../bep/gowebp/internal/libwebp/yuv_sse2.c | 3 + .../bep/gowebp/internal/libwebp/yuv_sse41.c | 3 + .../github.com/bep/gowebp/libwebp/encode.go | 19 + .../bep/gowebp/libwebp/webpoptions/options.go | 24 + vendor/github.com/bep/lazycache/.gitignore | 15 + vendor/github.com/bep/lazycache/LICENSE | 21 + vendor/github.com/bep/lazycache/README.md | 17 + vendor/github.com/bep/lazycache/codecov.yml | 10 + vendor/github.com/bep/lazycache/lazycache.go | 201 + vendor/github.com/bep/logg/.gitignore | 15 + vendor/github.com/bep/logg/LICENSE | 22 + vendor/github.com/bep/logg/README.md | 238 + vendor/github.com/bep/logg/doc.go | 4 + vendor/github.com/bep/logg/entry.go | 166 + vendor/github.com/bep/logg/handler.go | 27 + .../bep/logg/handlers/multi/multi.go | 32 + vendor/github.com/bep/logg/interfaces.go | 43 + vendor/github.com/bep/logg/levels.go | 81 + vendor/github.com/bep/logg/logger.go | 159 + vendor/github.com/bep/logg/objectpools.go | 25 + vendor/github.com/bep/logg/stack.go | 8 + vendor/github.com/bep/tmc/.gitignore | 13 + vendor/github.com/bep/tmc/LICENSE | 21 + vendor/github.com/bep/tmc/README.md | 200 + vendor/github.com/bep/tmc/adapters.go | 146 + vendor/github.com/bep/tmc/codec.go | 309 + vendor/github.com/bep/tmc/codecov.yml | 10 + .../github.com/blevesearch/bleve/.gitignore | 19 + .../github.com/blevesearch/bleve/.travis.yml | 25 + .../blevesearch/bleve/CONTRIBUTING.md | 16 + vendor/github.com/blevesearch/bleve/LICENSE | 202 + vendor/github.com/blevesearch/bleve/README.md | 70 + .../analysis/analyzer/standard/standard.go | 52 + .../analysis/datetime/flexible/flexible.go | 64 + .../analysis/datetime/optional/optional.go | 45 + .../blevesearch/bleve/analysis/freq.go | 152 + .../bleve/analysis/lang/en/analyzer_en.go | 70 + .../analysis/lang/en/possessive_filter_en.go | 67 + .../analysis/lang/en/stemmer_en_snowball.go | 49 + .../bleve/analysis/lang/en/stop_filter_en.go | 33 + .../bleve/analysis/lang/en/stop_words_en.go | 344 + .../blevesearch/bleve/analysis/test_words.txt | 7 + .../analysis/token/lowercase/lowercase.go | 105 + .../bleve/analysis/token/porter/porter.go | 53 + .../bleve/analysis/token/stop/stop.go | 70 + .../analysis/tokenizer/unicode/unicode.go | 131 + .../blevesearch/bleve/analysis/tokenmap.go | 76 + .../blevesearch/bleve/analysis/type.go | 103 + .../blevesearch/bleve/analysis/util.go | 92 + .../github.com/blevesearch/bleve/builder.go | 94 + vendor/github.com/blevesearch/bleve/config.go | 98 + .../blevesearch/bleve/config_app.go | 23 + .../blevesearch/bleve/config_disk.go | 25 + vendor/github.com/blevesearch/bleve/doc.go | 38 + .../blevesearch/bleve/document/document.go | 101 + .../blevesearch/bleve/document/field.go | 41 + .../bleve/document/field_boolean.go | 123 + .../bleve/document/field_composite.go | 124 + .../bleve/document/field_datetime.go | 159 + .../bleve/document/field_geopoint.go | 152 + .../bleve/document/field_numeric.go | 145 + .../blevesearch/bleve/document/field_text.go | 139 + .../bleve/document/indexing_options.go | 66 + vendor/github.com/blevesearch/bleve/error.go | 52 + .../blevesearch/bleve/geo/README.md | 9 + .../github.com/blevesearch/bleve/geo/geo.go | 210 + .../blevesearch/bleve/geo/geo_dist.go | 98 + .../blevesearch/bleve/geo/geohash.go | 111 + .../github.com/blevesearch/bleve/geo/parse.go | 181 + .../blevesearch/bleve/geo/sloppy.go | 212 + vendor/github.com/blevesearch/bleve/index.go | 309 + .../blevesearch/bleve/index/analysis.go | 110 + .../blevesearch/bleve/index/field_cache.go | 88 + .../blevesearch/bleve/index/index.go | 376 + .../blevesearch/bleve/index/scorch/README.md | 367 + .../blevesearch/bleve/index/scorch/builder.go | 334 + .../blevesearch/bleve/index/scorch/event.go | 64 + .../bleve/index/scorch/introducer.go | 449 + .../blevesearch/bleve/index/scorch/merge.go | 504 + .../index/scorch/mergeplan/merge_plan.go | 397 + .../bleve/index/scorch/mergeplan/sort.go | 28 + .../bleve/index/scorch/optimize.go | 396 + .../bleve/index/scorch/persister.go | 990 + .../bleve/index/scorch/rollback.go | 213 + .../blevesearch/bleve/index/scorch/scorch.go | 676 + .../bleve/index/scorch/segment/empty.go | 137 + .../bleve/index/scorch/segment/int.go | 176 + .../bleve/index/scorch/segment/plugin.go | 58 + .../bleve/index/scorch/segment/regexp.go | 75 + .../bleve/index/scorch/segment/segment.go | 153 + .../bleve/index/scorch/segment/unadorned.go | 160 + .../bleve/index/scorch/segment_plugin.go | 93 + .../bleve/index/scorch/snapshot_index.go | 755 + .../bleve/index/scorch/snapshot_index_dict.go | 108 + .../bleve/index/scorch/snapshot_index_doc.go | 80 + .../bleve/index/scorch/snapshot_index_tfr.go | 188 + .../bleve/index/scorch/snapshot_segment.go | 279 + .../blevesearch/bleve/index/scorch/stats.go | 152 + .../blevesearch/bleve/index/store/batch.go | 62 + .../bleve/index/store/boltdb/iterator.go | 85 + .../bleve/index/store/boltdb/reader.go | 73 + .../bleve/index/store/boltdb/stats.go | 26 + .../bleve/index/store/boltdb/store.go | 181 + .../bleve/index/store/boltdb/writer.go | 95 + .../bleve/index/store/gtreap/iterator.go | 152 + .../bleve/index/store/gtreap/reader.go | 66 + .../bleve/index/store/gtreap/store.go | 82 + .../bleve/index/store/gtreap/writer.go | 76 + .../blevesearch/bleve/index/store/kvstore.go | 174 + .../blevesearch/bleve/index/store/merge.go | 64 + .../blevesearch/bleve/index/store/multiget.go | 33 + .../bleve/index/upsidedown/analysis.go | 110 + .../bleve/index/upsidedown/benchmark_all.sh | 8 + .../bleve/index/upsidedown/dump.go | 174 + .../bleve/index/upsidedown/field_dict.go | 78 + .../bleve/index/upsidedown/index_reader.go | 226 + .../bleve/index/upsidedown/reader.go | 376 + .../blevesearch/bleve/index/upsidedown/row.go | 1141 + .../bleve/index/upsidedown/row_merge.go | 76 + .../bleve/index/upsidedown/stats.go | 55 + .../bleve/index/upsidedown/upsidedown.go | 1083 + .../bleve/index/upsidedown/upsidedown.pb.go | 688 + .../bleve/index/upsidedown/upsidedown.proto | 14 + .../blevesearch/bleve/index_alias.go | 37 + .../blevesearch/bleve/index_alias_impl.go | 614 + .../blevesearch/bleve/index_impl.go | 924 + .../blevesearch/bleve/index_meta.go | 97 + .../blevesearch/bleve/index_stats.go | 75 + .../github.com/blevesearch/bleve/mapping.go | 65 + .../blevesearch/bleve/mapping/analysis.go | 99 + .../blevesearch/bleve/mapping/document.go | 558 + .../blevesearch/bleve/mapping/field.go | 343 + .../blevesearch/bleve/mapping/index.go | 443 + .../blevesearch/bleve/mapping/mapping.go | 58 + .../blevesearch/bleve/mapping/reflect.go | 92 + .../blevesearch/bleve/numeric/bin.go | 43 + .../blevesearch/bleve/numeric/float.go | 34 + .../blevesearch/bleve/numeric/prefix_coded.go | 111 + vendor/github.com/blevesearch/bleve/query.go | 218 + .../blevesearch/bleve/registry/analyzer.go | 89 + .../blevesearch/bleve/registry/cache.go | 87 + .../blevesearch/bleve/registry/char_filter.go | 89 + .../bleve/registry/datetime_parser.go | 89 + .../bleve/registry/fragment_formatter.go | 89 + .../blevesearch/bleve/registry/fragmenter.go | 89 + .../blevesearch/bleve/registry/highlighter.go | 89 + .../blevesearch/bleve/registry/index_type.go | 45 + .../blevesearch/bleve/registry/registry.go | 184 + .../blevesearch/bleve/registry/store.go | 51 + .../bleve/registry/token_filter.go | 89 + .../blevesearch/bleve/registry/token_maps.go | 89 + .../blevesearch/bleve/registry/tokenizer.go | 89 + vendor/github.com/blevesearch/bleve/search.go | 631 + .../blevesearch/bleve/search/collector.go | 52 + .../bleve/search/collector/heap.go | 95 + .../bleve/search/collector/list.go | 86 + .../bleve/search/collector/slice.go | 77 + .../bleve/search/collector/topn.go | 412 + .../blevesearch/bleve/search/explanation.go | 55 + .../bleve/search/facet/benchmark_data.txt | 2909 + .../search/facet/facet_builder_datetime.go | 163 + .../search/facet/facet_builder_numeric.go | 157 + .../bleve/search/facet/facet_builder_terms.go | 117 + .../bleve/search/facets_builder.go | 341 + .../search/highlight/format/html/html.go | 91 + .../highlight/fragmenter/simple/simple.go | 147 + .../bleve/search/highlight/highlighter.go | 64 + .../search/highlight/highlighter/html/html.go | 50 + .../simple/fragment_scorer_simple.go | 49 + .../highlighter/simple/highlighter_simple.go | 221 + .../bleve/search/highlight/term_locations.go | 105 + .../blevesearch/bleve/search/levenshtein.go | 114 + .../blevesearch/bleve/search/pool.go | 91 + .../bleve/search/query/bool_field.go | 64 + .../blevesearch/bleve/search/query/boolean.go | 248 + .../blevesearch/bleve/search/query/boost.go | 33 + .../bleve/search/query/conjunction.go | 112 + .../bleve/search/query/date_range.go | 191 + .../bleve/search/query/disjunction.go | 124 + .../blevesearch/bleve/search/query/docid.go | 49 + .../blevesearch/bleve/search/query/fuzzy.go | 77 + .../bleve/search/query/geo_boundingbox.go | 113 + .../bleve/search/query/geo_boundingpolygon.go | 94 + .../bleve/search/query/geo_distance.go | 100 + .../blevesearch/bleve/search/query/match.go | 176 + .../bleve/search/query/match_all.go | 55 + .../bleve/search/query/match_none.go | 55 + .../bleve/search/query/match_phrase.go | 113 + .../bleve/search/query/multi_phrase.go | 80 + .../bleve/search/query/numeric_range.go | 87 + .../blevesearch/bleve/search/query/phrase.go | 77 + .../blevesearch/bleve/search/query/prefix.go | 62 + .../blevesearch/bleve/search/query/query.go | 361 + .../bleve/search/query/query_string.go | 67 + .../bleve/search/query/query_string.y | 328 + .../bleve/search/query/query_string.y.go | 815 + .../bleve/search/query/query_string_lex.go | 323 + .../bleve/search/query/query_string_parser.go | 85 + .../blevesearch/bleve/search/query/regexp.go | 81 + .../blevesearch/bleve/search/query/term.go | 61 + .../bleve/search/query/term_range.go | 95 + .../bleve/search/query/wildcard.go | 93 + .../bleve/search/scorer/scorer_conjunction.go | 72 + .../bleve/search/scorer/scorer_constant.go | 127 + .../bleve/search/scorer/scorer_disjunction.go | 83 + .../bleve/search/scorer/scorer_term.go | 203 + .../bleve/search/scorer/sqrt_cache.go | 30 + .../blevesearch/bleve/search/search.go | 378 + .../search/searcher/ordered_searchers_list.go | 35 + .../bleve/search/searcher/search_boolean.go | 450 + .../search/searcher/search_conjunction.go | 284 + .../search/searcher/search_disjunction.go | 113 + .../searcher/search_disjunction_heap.go | 343 + .../searcher/search_disjunction_slice.go | 298 + .../bleve/search/searcher/search_docid.go | 109 + .../bleve/search/searcher/search_filter.go | 103 + .../bleve/search/searcher/search_fuzzy.go | 117 + .../search/searcher/search_geoboundingbox.go | 272 + .../searcher/search_geopointdistance.go | 126 + .../search/searcher/search_geopolygon.go | 126 + .../bleve/search/searcher/search_match_all.go | 121 + .../search/searcher/search_match_none.go | 76 + .../search/searcher/search_multi_term.go | 215 + .../search/searcher/search_numeric_range.go | 260 + .../bleve/search/searcher/search_phrase.go | 437 + .../bleve/search/searcher/search_regexp.go | 120 + .../bleve/search/searcher/search_term.go | 141 + .../search/searcher/search_term_prefix.go | 50 + .../search/searcher/search_term_range.go | 85 + .../blevesearch/bleve/search/sort.go | 746 + .../blevesearch/bleve/search/util.go | 69 + .../blevesearch/bleve/size/sizes.go | 59 + .../blevesearch/go-porterstemmer/.gitignore | 8 + .../blevesearch/go-porterstemmer/.travis.yml | 16 + .../blevesearch/go-porterstemmer/LICENSE | 19 + .../blevesearch/go-porterstemmer/README.md | 118 + .../go-porterstemmer/porterstemmer.go | 839 + .../github.com/blevesearch/mmap-go/.gitignore | 10 + .../blevesearch/mmap-go/.travis.yml | 16 + vendor/github.com/blevesearch/mmap-go/LICENSE | 25 + .../github.com/blevesearch/mmap-go/README.md | 12 + vendor/github.com/blevesearch/mmap-go/mmap.go | 117 + .../blevesearch/mmap-go/mmap_unix.go | 51 + .../blevesearch/mmap-go/mmap_windows.go | 153 + .../github.com/blevesearch/segment/.gitignore | 10 + .../blevesearch/segment/.travis.yml | 15 + vendor/github.com/blevesearch/segment/LICENSE | 202 + .../github.com/blevesearch/segment/README.md | 92 + vendor/github.com/blevesearch/segment/doc.go | 45 + .../github.com/blevesearch/segment/segment.go | 284 + .../blevesearch/segment/segment_fuzz.go | 22 + .../blevesearch/segment/segment_words.go | 19542 ++ .../blevesearch/segment/segment_words.rl | 285 + .../blevesearch/segment/segment_words_prod.go | 173643 +++++++++++++++ .../blevesearch/snowballstem/COPYING | 29 + .../blevesearch/snowballstem/README.md | 66 + .../blevesearch/snowballstem/among.go | 16 + .../snowballstem/english/english_stemmer.go | 1341 + .../blevesearch/snowballstem/env.go | 389 + .../blevesearch/snowballstem/gen.go | 61 + .../blevesearch/snowballstem/util.go | 34 + .../github.com/blevesearch/zap/v11/.gitignore | 12 + vendor/github.com/blevesearch/zap/v11/LICENSE | 202 + .../github.com/blevesearch/zap/v11/README.md | 158 + .../github.com/blevesearch/zap/v11/build.go | 156 + .../blevesearch/zap/v11/contentcoder.go | 230 + .../github.com/blevesearch/zap/v11/count.go | 61 + vendor/github.com/blevesearch/zap/v11/dict.go | 263 + .../blevesearch/zap/v11/docvalues.go | 307 + .../blevesearch/zap/v11/enumerator.go | 126 + .../blevesearch/zap/v11/intcoder.go | 172 + .../github.com/blevesearch/zap/v11/merge.go | 860 + vendor/github.com/blevesearch/zap/v11/new.go | 847 + .../github.com/blevesearch/zap/v11/plugin.go | 37 + .../github.com/blevesearch/zap/v11/posting.go | 910 + vendor/github.com/blevesearch/zap/v11/read.go | 43 + .../github.com/blevesearch/zap/v11/segment.go | 572 + .../github.com/blevesearch/zap/v11/write.go | 145 + vendor/github.com/blevesearch/zap/v11/zap.md | 177 + .../github.com/blevesearch/zap/v12/.gitignore | 12 + vendor/github.com/blevesearch/zap/v12/LICENSE | 202 + .../github.com/blevesearch/zap/v12/README.md | 158 + .../github.com/blevesearch/zap/v12/build.go | 156 + .../github.com/blevesearch/zap/v12/chunk.go | 54 + .../blevesearch/zap/v12/contentcoder.go | 243 + .../github.com/blevesearch/zap/v12/count.go | 61 + vendor/github.com/blevesearch/zap/v12/dict.go | 263 + .../blevesearch/zap/v12/docvalues.go | 312 + .../blevesearch/zap/v12/enumerator.go | 138 + .../blevesearch/zap/v12/intDecoder.go | 111 + .../blevesearch/zap/v12/intcoder.go | 203 + .../github.com/blevesearch/zap/v12/merge.go | 847 + vendor/github.com/blevesearch/zap/v12/new.go | 860 + .../github.com/blevesearch/zap/v12/plugin.go | 37 + .../github.com/blevesearch/zap/v12/posting.go | 798 + vendor/github.com/blevesearch/zap/v12/read.go | 43 + .../github.com/blevesearch/zap/v12/segment.go | 572 + .../github.com/blevesearch/zap/v12/write.go | 145 + vendor/github.com/blevesearch/zap/v12/zap.md | 177 + .../github.com/blevesearch/zap/v13/.gitignore | 12 + vendor/github.com/blevesearch/zap/v13/LICENSE | 202 + .../github.com/blevesearch/zap/v13/README.md | 158 + .../github.com/blevesearch/zap/v13/build.go | 156 + .../github.com/blevesearch/zap/v13/chunk.go | 54 + .../blevesearch/zap/v13/contentcoder.go | 243 + .../github.com/blevesearch/zap/v13/count.go | 61 + vendor/github.com/blevesearch/zap/v13/dict.go | 263 + .../blevesearch/zap/v13/docvalues.go | 312 + .../blevesearch/zap/v13/enumerator.go | 138 + .../blevesearch/zap/v13/intDecoder.go | 111 + .../blevesearch/zap/v13/intcoder.go | 206 + .../github.com/blevesearch/zap/v13/merge.go | 847 + vendor/github.com/blevesearch/zap/v13/new.go | 860 + .../github.com/blevesearch/zap/v13/plugin.go | 37 + .../github.com/blevesearch/zap/v13/posting.go | 798 + vendor/github.com/blevesearch/zap/v13/read.go | 43 + .../github.com/blevesearch/zap/v13/segment.go | 572 + .../github.com/blevesearch/zap/v13/write.go | 145 + vendor/github.com/blevesearch/zap/v13/zap.md | 177 + .../github.com/blevesearch/zap/v14/.gitignore | 12 + vendor/github.com/blevesearch/zap/v14/LICENSE | 202 + .../github.com/blevesearch/zap/v14/README.md | 158 + .../github.com/blevesearch/zap/v14/build.go | 156 + .../github.com/blevesearch/zap/v14/chunk.go | 67 + .../blevesearch/zap/v14/contentcoder.go | 243 + .../github.com/blevesearch/zap/v14/count.go | 61 + vendor/github.com/blevesearch/zap/v14/dict.go | 263 + .../blevesearch/zap/v14/docvalues.go | 312 + .../blevesearch/zap/v14/enumerator.go | 138 + .../blevesearch/zap/v14/intDecoder.go | 118 + .../blevesearch/zap/v14/intcoder.go | 206 + .../github.com/blevesearch/zap/v14/merge.go | 847 + vendor/github.com/blevesearch/zap/v14/new.go | 860 + .../github.com/blevesearch/zap/v14/plugin.go | 37 + .../github.com/blevesearch/zap/v14/posting.go | 796 + vendor/github.com/blevesearch/zap/v14/read.go | 43 + .../github.com/blevesearch/zap/v14/segment.go | 572 + .../github.com/blevesearch/zap/v14/write.go | 145 + vendor/github.com/blevesearch/zap/v14/zap.md | 177 + .../github.com/blevesearch/zap/v15/.gitignore | 12 + vendor/github.com/blevesearch/zap/v15/LICENSE | 202 + .../github.com/blevesearch/zap/v15/README.md | 158 + .../github.com/blevesearch/zap/v15/build.go | 156 + .../github.com/blevesearch/zap/v15/chunk.go | 67 + .../blevesearch/zap/v15/contentcoder.go | 243 + .../github.com/blevesearch/zap/v15/count.go | 61 + vendor/github.com/blevesearch/zap/v15/dict.go | 263 + .../blevesearch/zap/v15/docvalues.go | 312 + .../blevesearch/zap/v15/enumerator.go | 138 + .../blevesearch/zap/v15/intDecoder.go | 126 + .../blevesearch/zap/v15/intcoder.go | 206 + .../github.com/blevesearch/zap/v15/merge.go | 893 + vendor/github.com/blevesearch/zap/v15/new.go | 860 + .../github.com/blevesearch/zap/v15/plugin.go | 37 + .../github.com/blevesearch/zap/v15/posting.go | 853 + vendor/github.com/blevesearch/zap/v15/read.go | 43 + .../github.com/blevesearch/zap/v15/segment.go | 572 + .../github.com/blevesearch/zap/v15/write.go | 145 + vendor/github.com/blevesearch/zap/v15/zap.md | 177 + .../github.com/cenkalti/backoff/v4/.gitignore | 22 + .../cenkalti/backoff/v4/.travis.yml | 10 + vendor/github.com/cenkalti/backoff/v4/LICENSE | 20 + .../github.com/cenkalti/backoff/v4/README.md | 33 + .../github.com/cenkalti/backoff/v4/backoff.go | 66 + .../github.com/cenkalti/backoff/v4/context.go | 66 + .../cenkalti/backoff/v4/exponential.go | 158 + .../github.com/cenkalti/backoff/v4/retry.go | 96 + .../github.com/cenkalti/backoff/v4/ticker.go | 97 + .../github.com/cenkalti/backoff/v4/timer.go | 35 + .../github.com/cenkalti/backoff/v4/tries.go | 38 + .../github.com/clbanning/mxj/v2/.travis.yml | 4 + vendor/github.com/clbanning/mxj/v2/LICENSE | 22 + vendor/github.com/clbanning/mxj/v2/anyxml.go | 201 + .../clbanning/mxj/v2/atomFeedString.xml | 54 + vendor/github.com/clbanning/mxj/v2/doc.go | 143 + .../clbanning/mxj/v2/escapechars.go | 93 + vendor/github.com/clbanning/mxj/v2/exists.go | 9 + vendor/github.com/clbanning/mxj/v2/files.go | 287 + .../clbanning/mxj/v2/files_test.badjson | 2 + .../clbanning/mxj/v2/files_test.badxml | 9 + .../clbanning/mxj/v2/files_test.json | 2 + .../clbanning/mxj/v2/files_test.xml | 9 + .../clbanning/mxj/v2/files_test_dup.json | 1 + .../clbanning/mxj/v2/files_test_dup.xml | 1 + .../clbanning/mxj/v2/files_test_indent.json | 12 + .../clbanning/mxj/v2/files_test_indent.xml | 8 + vendor/github.com/clbanning/mxj/v2/gob.go | 35 + vendor/github.com/clbanning/mxj/v2/json.go | 323 + .../github.com/clbanning/mxj/v2/keyvalues.go | 668 + .../github.com/clbanning/mxj/v2/leafnode.go | 112 + vendor/github.com/clbanning/mxj/v2/misc.go | 86 + vendor/github.com/clbanning/mxj/v2/mxj.go | 128 + vendor/github.com/clbanning/mxj/v2/newmap.go | 184 + vendor/github.com/clbanning/mxj/v2/readme.md | 209 + vendor/github.com/clbanning/mxj/v2/remove.go | 37 + vendor/github.com/clbanning/mxj/v2/rename.go | 61 + vendor/github.com/clbanning/mxj/v2/set.go | 26 + .../clbanning/mxj/v2/setfieldsep.go | 20 + .../github.com/clbanning/mxj/v2/songtext.xml | 29 + vendor/github.com/clbanning/mxj/v2/strict.go | 30 + vendor/github.com/clbanning/mxj/v2/struct.go | 54 + .../clbanning/mxj/v2/updatevalues.go | 258 + vendor/github.com/clbanning/mxj/v2/xml.go | 1440 + vendor/github.com/clbanning/mxj/v2/xmlseq.go | 902 + vendor/github.com/clbanning/mxj/v2/xmlseq2.go | 18 + vendor/github.com/cli/safeexec/LICENSE | 25 + vendor/github.com/cli/safeexec/README.md | 48 + vendor/github.com/cli/safeexec/lookpath.go | 17 + .../github.com/cli/safeexec/lookpath_1.18.go | 10 + .../cli/safeexec/lookpath_windows.go | 120 + .../github.com/couchbase/vellum/.travis.yml | 22 + .../couchbase/vellum/CONTRIBUTING.md | 16 + vendor/github.com/couchbase/vellum/LICENSE | 202 + vendor/github.com/couchbase/vellum/README.md | 183 + .../github.com/couchbase/vellum/automaton.go | 85 + vendor/github.com/couchbase/vellum/builder.go | 452 + vendor/github.com/couchbase/vellum/common.go | 547 + .../github.com/couchbase/vellum/decoder_v1.go | 314 + .../github.com/couchbase/vellum/encoder_v1.go | 227 + .../github.com/couchbase/vellum/encoding.go | 87 + vendor/github.com/couchbase/vellum/fst.go | 300 + .../couchbase/vellum/fst_iterator.go | 303 + .../couchbase/vellum/levenshtein/LICENSE | 203 + .../couchbase/vellum/levenshtein/README.md | 33 + .../couchbase/vellum/levenshtein/alphabet.go | 125 + .../couchbase/vellum/levenshtein/dfa.go | 250 + .../vellum/levenshtein/levenshtein.go | 64 + .../vellum/levenshtein/levenshtein_nfa.go | 292 + .../vellum/levenshtein/parametric_dfa.go | 349 + .../couchbase/vellum/merge_iterator.go | 188 + vendor/github.com/couchbase/vellum/pack.go | 55 + .../couchbase/vellum/regexp/compile.go | 343 + .../github.com/couchbase/vellum/regexp/dfa.go | 196 + .../couchbase/vellum/regexp/inst.go | 62 + .../couchbase/vellum/regexp/regexp.go | 119 + .../couchbase/vellum/regexp/sparse.go | 54 + .../github.com/couchbase/vellum/registry.go | 114 + .../github.com/couchbase/vellum/transducer.go | 55 + .../github.com/couchbase/vellum/utf8/utf8.go | 268 + vendor/github.com/couchbase/vellum/vellum.go | 111 + .../couchbase/vellum/vellum_mmap.go | 60 + .../couchbase/vellum/vellum_nommap.go | 27 + vendor/github.com/couchbase/vellum/writer.go | 92 + vendor/github.com/dgrijalva/jwt-go/.gitignore | 4 + .../github.com/dgrijalva/jwt-go/.travis.yml | 13 + vendor/github.com/dgrijalva/jwt-go/LICENSE | 8 + .../dgrijalva/jwt-go/MIGRATION_GUIDE.md | 97 + vendor/github.com/dgrijalva/jwt-go/README.md | 100 + .../dgrijalva/jwt-go/VERSION_HISTORY.md | 118 + vendor/github.com/dgrijalva/jwt-go/claims.go | 134 + vendor/github.com/dgrijalva/jwt-go/doc.go | 4 + vendor/github.com/dgrijalva/jwt-go/ecdsa.go | 148 + .../dgrijalva/jwt-go/ecdsa_utils.go | 67 + vendor/github.com/dgrijalva/jwt-go/errors.go | 59 + vendor/github.com/dgrijalva/jwt-go/hmac.go | 95 + .../github.com/dgrijalva/jwt-go/map_claims.go | 94 + vendor/github.com/dgrijalva/jwt-go/none.go | 52 + vendor/github.com/dgrijalva/jwt-go/parser.go | 148 + vendor/github.com/dgrijalva/jwt-go/rsa.go | 101 + vendor/github.com/dgrijalva/jwt-go/rsa_pss.go | 126 + .../github.com/dgrijalva/jwt-go/rsa_utils.go | 101 + .../dgrijalva/jwt-go/signing_method.go | 35 + vendor/github.com/dgrijalva/jwt-go/token.go | 108 + .../disintegration/gift/.travis.yml | 13 + vendor/github.com/disintegration/gift/LICENSE | 21 + .../github.com/disintegration/gift/README.md | 255 + .../github.com/disintegration/gift/colors.go | 511 + .../disintegration/gift/convolution.go | 579 + .../github.com/disintegration/gift/effects.go | 87 + vendor/github.com/disintegration/gift/gift.go | 215 + .../github.com/disintegration/gift/pixels.go | 493 + vendor/github.com/disintegration/gift/rank.go | 223 + .../github.com/disintegration/gift/resize.go | 462 + .../disintegration/gift/transform.go | 498 + .../github.com/disintegration/gift/utils.go | 226 + vendor/github.com/dlclark/regexp2/.gitignore | 27 + vendor/github.com/dlclark/regexp2/.travis.yml | 7 + vendor/github.com/dlclark/regexp2/ATTRIB | 133 + vendor/github.com/dlclark/regexp2/LICENSE | 21 + vendor/github.com/dlclark/regexp2/README.md | 167 + .../github.com/dlclark/regexp2/fastclock.go | 129 + vendor/github.com/dlclark/regexp2/match.go | 347 + vendor/github.com/dlclark/regexp2/regexp.go | 395 + vendor/github.com/dlclark/regexp2/replace.go | 177 + vendor/github.com/dlclark/regexp2/runner.go | 1609 + .../dlclark/regexp2/syntax/charclass.go | 865 + .../github.com/dlclark/regexp2/syntax/code.go | 274 + .../dlclark/regexp2/syntax/escape.go | 94 + .../github.com/dlclark/regexp2/syntax/fuzz.go | 20 + .../dlclark/regexp2/syntax/parser.go | 2262 + .../dlclark/regexp2/syntax/prefix.go | 896 + .../dlclark/regexp2/syntax/replacerdata.go | 87 + .../github.com/dlclark/regexp2/syntax/tree.go | 654 + .../dlclark/regexp2/syntax/writer.go | 500 + vendor/github.com/dlclark/regexp2/testoutput1 | 7061 + vendor/github.com/fatih/color/LICENSE.md | 20 + vendor/github.com/fatih/color/README.md | 176 + vendor/github.com/fatih/color/color.go | 650 + .../github.com/fatih/color/color_windows.go | 19 + vendor/github.com/fatih/color/doc.go | 134 + .../github.com/frankban/quicktest/.gitignore | 1 + .../frankban/quicktest/.godocdown.template | 13 + vendor/github.com/frankban/quicktest/LICENSE | 21 + .../github.com/frankban/quicktest/README.md | 347 + .../github.com/frankban/quicktest/checker.go | 799 + .../frankban/quicktest/checker_err.go | 92 + .../github.com/frankban/quicktest/comment.go | 31 + vendor/github.com/frankban/quicktest/doc.go | 340 + vendor/github.com/frankban/quicktest/error.go | 35 + .../github.com/frankban/quicktest/format.go | 91 + vendor/github.com/frankban/quicktest/iter.go | 55 + .../github.com/frankban/quicktest/mapiter.go | 29 + vendor/github.com/frankban/quicktest/patch.go | 72 + .../frankban/quicktest/patch_go1.14.go | 42 + .../frankban/quicktest/patch_go1.17.go | 27 + .../frankban/quicktest/quicktest.go | 370 + .../github.com/frankban/quicktest/report.go | 248 + .../glycerine/go-unsnap-stream/.gitignore | 22 + .../glycerine/go-unsnap-stream/LICENSE | 21 + .../glycerine/go-unsnap-stream/README.md | 20 + .../glycerine/go-unsnap-stream/binary.dat | Bin 0 -> 5592 bytes .../go-unsnap-stream/binary.dat.snappy | Bin 0 -> 5610 bytes .../glycerine/go-unsnap-stream/rbuf.go | 375 + .../glycerine/go-unsnap-stream/snap.go | 100 + .../glycerine/go-unsnap-stream/unenc.txt | 1 + .../go-unsnap-stream/unenc.txt.snappy | Bin 0 -> 31 bytes .../glycerine/go-unsnap-stream/unsnap.go | 513 + vendor/github.com/go-logr/logr/.golangci.yaml | 26 + vendor/github.com/go-logr/logr/CHANGELOG.md | 6 + .../github.com/go-logr/logr/CONTRIBUTING.md | 17 + vendor/github.com/go-logr/logr/LICENSE | 201 + vendor/github.com/go-logr/logr/README.md | 406 + vendor/github.com/go-logr/logr/SECURITY.md | 18 + vendor/github.com/go-logr/logr/context.go | 33 + .../github.com/go-logr/logr/context_noslog.go | 49 + .../github.com/go-logr/logr/context_slog.go | 83 + vendor/github.com/go-logr/logr/discard.go | 24 + vendor/github.com/go-logr/logr/funcr/funcr.go | 911 + .../github.com/go-logr/logr/funcr/slogsink.go | 105 + vendor/github.com/go-logr/logr/logr.go | 520 + vendor/github.com/go-logr/logr/sloghandler.go | 192 + vendor/github.com/go-logr/logr/slogr.go | 100 + vendor/github.com/go-logr/logr/slogsink.go | 120 + vendor/github.com/go-logr/stdr/LICENSE | 201 + vendor/github.com/go-logr/stdr/README.md | 6 + vendor/github.com/go-logr/stdr/stdr.go | 170 + .../go-openapi/analysis/.codecov.yml | 5 + .../go-openapi/analysis/.gitattributes | 2 + .../github.com/go-openapi/analysis/.gitignore | 5 + .../go-openapi/analysis/.golangci.yml | 61 + .../go-openapi/analysis/CODE_OF_CONDUCT.md | 74 + vendor/github.com/go-openapi/analysis/LICENSE | 202 + .../github.com/go-openapi/analysis/README.md | 27 + .../go-openapi/analysis/analyzer.go | 1064 + .../github.com/go-openapi/analysis/debug.go | 23 + vendor/github.com/go-openapi/analysis/doc.go | 43 + .../github.com/go-openapi/analysis/fixer.go | 79 + .../github.com/go-openapi/analysis/flatten.go | 814 + .../go-openapi/analysis/flatten_name.go | 308 + .../go-openapi/analysis/flatten_options.go | 79 + .../analysis/internal/debug/debug.go | 41 + .../internal/flatten/normalize/normalize.go | 87 + .../internal/flatten/operations/operations.go | 90 + .../internal/flatten/replace/replace.go | 458 + .../flatten/schutils/flatten_schema.go | 29 + .../analysis/internal/flatten/sortref/keys.go | 201 + .../internal/flatten/sortref/sort_ref.go | 141 + .../github.com/go-openapi/analysis/mixin.go | 515 + .../github.com/go-openapi/analysis/schema.go | 256 + .../go-openapi/errors/.gitattributes | 1 + .../github.com/go-openapi/errors/.gitignore | 2 + .../go-openapi/errors/.golangci.yml | 62 + .../go-openapi/errors/CODE_OF_CONDUCT.md | 74 + vendor/github.com/go-openapi/errors/LICENSE | 202 + vendor/github.com/go-openapi/errors/README.md | 8 + vendor/github.com/go-openapi/errors/api.go | 192 + vendor/github.com/go-openapi/errors/auth.go | 22 + vendor/github.com/go-openapi/errors/doc.go | 26 + .../github.com/go-openapi/errors/headers.go | 103 + .../go-openapi/errors/middleware.go | 50 + .../github.com/go-openapi/errors/parsing.go | 78 + vendor/github.com/go-openapi/errors/schema.go | 615 + .../go-openapi/jsonpointer/.editorconfig | 26 + .../go-openapi/jsonpointer/.gitignore | 1 + .../go-openapi/jsonpointer/.golangci.yml | 61 + .../go-openapi/jsonpointer/CODE_OF_CONDUCT.md | 74 + .../github.com/go-openapi/jsonpointer/LICENSE | 202 + .../go-openapi/jsonpointer/README.md | 19 + .../go-openapi/jsonpointer/pointer.go | 531 + .../go-openapi/jsonreference/.gitignore | 1 + .../go-openapi/jsonreference/.golangci.yml | 61 + .../jsonreference/CODE_OF_CONDUCT.md | 74 + .../go-openapi/jsonreference/LICENSE | 202 + .../go-openapi/jsonreference/README.md | 19 + .../jsonreference/internal/normalize_url.go | 69 + .../go-openapi/jsonreference/reference.go | 158 + .../github.com/go-openapi/loads/.editorconfig | 26 + vendor/github.com/go-openapi/loads/.gitignore | 4 + .../github.com/go-openapi/loads/.golangci.yml | 61 + .../github.com/go-openapi/loads/.travis.yml | 25 + .../go-openapi/loads/CODE_OF_CONDUCT.md | 74 + vendor/github.com/go-openapi/loads/LICENSE | 202 + vendor/github.com/go-openapi/loads/README.md | 6 + vendor/github.com/go-openapi/loads/doc.go | 18 + vendor/github.com/go-openapi/loads/loaders.go | 133 + vendor/github.com/go-openapi/loads/options.go | 61 + vendor/github.com/go-openapi/loads/spec.go | 275 + .../go-openapi/runtime/.editorconfig | 26 + .../go-openapi/runtime/.gitattributes | 1 + .../github.com/go-openapi/runtime/.gitignore | 5 + .../go-openapi/runtime/.golangci.yml | 62 + .../go-openapi/runtime/CODE_OF_CONDUCT.md | 74 + vendor/github.com/go-openapi/runtime/LICENSE | 202 + .../github.com/go-openapi/runtime/README.md | 10 + .../go-openapi/runtime/bytestream.go | 222 + .../go-openapi/runtime/client/auth_info.go | 77 + .../go-openapi/runtime/client/keepalive.go | 54 + .../runtime/client/opentelemetry.go | 211 + .../go-openapi/runtime/client/opentracing.go | 99 + .../go-openapi/runtime/client/request.go | 482 + .../go-openapi/runtime/client/response.go | 50 + .../go-openapi/runtime/client/runtime.go | 552 + .../go-openapi/runtime/client_auth_info.go | 30 + .../go-openapi/runtime/client_operation.go | 41 + .../go-openapi/runtime/client_request.go | 152 + .../go-openapi/runtime/client_response.go | 110 + .../go-openapi/runtime/constants.go | 49 + vendor/github.com/go-openapi/runtime/csv.go | 350 + .../go-openapi/runtime/csv_options.go | 121 + .../github.com/go-openapi/runtime/discard.go | 9 + vendor/github.com/go-openapi/runtime/file.go | 19 + .../github.com/go-openapi/runtime/headers.go | 45 + .../go-openapi/runtime/interfaces.go | 112 + vendor/github.com/go-openapi/runtime/json.go | 38 + .../go-openapi/runtime/logger/logger.go | 20 + .../go-openapi/runtime/logger/standard.go | 24 + .../go-openapi/runtime/middleware/context.go | 722 + .../runtime/middleware/denco/LICENSE | 19 + .../runtime/middleware/denco/README.md | 180 + .../runtime/middleware/denco/router.go | 467 + .../runtime/middleware/denco/server.go | 106 + .../runtime/middleware/denco/util.go | 12 + .../go-openapi/runtime/middleware/doc.go | 63 + .../runtime/middleware/header/header.go | 332 + .../runtime/middleware/negotiate.go | 98 + .../runtime/middleware/not_implemented.go | 67 + .../runtime/middleware/operation.go | 30 + .../runtime/middleware/parameter.go | 491 + .../go-openapi/runtime/middleware/rapidoc.go | 80 + .../go-openapi/runtime/middleware/redoc.go | 94 + .../go-openapi/runtime/middleware/request.go | 117 + .../go-openapi/runtime/middleware/router.go | 531 + .../go-openapi/runtime/middleware/security.go | 39 + .../go-openapi/runtime/middleware/spec.go | 102 + .../runtime/middleware/swaggerui.go | 175 + .../runtime/middleware/swaggerui_oauth2.go | 105 + .../runtime/middleware/ui_options.go | 173 + .../runtime/middleware/untyped/api.go | 287 + .../runtime/middleware/validation.go | 130 + .../github.com/go-openapi/runtime/request.go | 149 + .../runtime/security/authenticator.go | 277 + .../go-openapi/runtime/security/authorizer.go | 27 + .../github.com/go-openapi/runtime/statuses.go | 90 + vendor/github.com/go-openapi/runtime/text.go | 116 + .../github.com/go-openapi/runtime/values.go | 19 + vendor/github.com/go-openapi/runtime/xml.go | 36 + .../go-openapi/runtime/yamlpc/yaml.go | 39 + .../github.com/go-openapi/spec/.editorconfig | 26 + vendor/github.com/go-openapi/spec/.gitignore | 1 + .../github.com/go-openapi/spec/.golangci.yml | 61 + .../go-openapi/spec/CODE_OF_CONDUCT.md | 74 + vendor/github.com/go-openapi/spec/LICENSE | 202 + vendor/github.com/go-openapi/spec/README.md | 54 + vendor/github.com/go-openapi/spec/cache.go | 98 + .../go-openapi/spec/contact_info.go | 57 + vendor/github.com/go-openapi/spec/debug.go | 49 + vendor/github.com/go-openapi/spec/embed.go | 17 + vendor/github.com/go-openapi/spec/errors.go | 19 + vendor/github.com/go-openapi/spec/expander.go | 607 + .../go-openapi/spec/external_docs.go | 24 + vendor/github.com/go-openapi/spec/header.go | 203 + vendor/github.com/go-openapi/spec/info.go | 184 + vendor/github.com/go-openapi/spec/items.go | 234 + vendor/github.com/go-openapi/spec/license.go | 56 + .../github.com/go-openapi/spec/normalizer.go | 202 + .../go-openapi/spec/normalizer_nonwindows.go | 44 + .../go-openapi/spec/normalizer_windows.go | 154 + .../github.com/go-openapi/spec/operation.go | 400 + .../github.com/go-openapi/spec/parameter.go | 326 + .../github.com/go-openapi/spec/path_item.go | 87 + vendor/github.com/go-openapi/spec/paths.go | 97 + .../github.com/go-openapi/spec/properties.go | 91 + vendor/github.com/go-openapi/spec/ref.go | 193 + vendor/github.com/go-openapi/spec/resolver.go | 127 + vendor/github.com/go-openapi/spec/response.go | 152 + .../github.com/go-openapi/spec/responses.go | 140 + vendor/github.com/go-openapi/spec/schema.go | 645 + .../go-openapi/spec/schema_loader.go | 331 + .../spec/schemas/jsonschema-draft-04.json | 149 + .../go-openapi/spec/schemas/v2/schema.json | 1607 + .../go-openapi/spec/security_scheme.go | 170 + vendor/github.com/go-openapi/spec/spec.go | 78 + vendor/github.com/go-openapi/spec/swagger.go | 448 + vendor/github.com/go-openapi/spec/tag.go | 75 + vendor/github.com/go-openapi/spec/url_go19.go | 11 + .../github.com/go-openapi/spec/validations.go | 215 + .../github.com/go-openapi/spec/xml_object.go | 68 + .../go-openapi/strfmt/.editorconfig | 26 + .../go-openapi/strfmt/.gitattributes | 2 + .../github.com/go-openapi/strfmt/.gitignore | 2 + .../go-openapi/strfmt/.golangci.yml | 61 + .../go-openapi/strfmt/CODE_OF_CONDUCT.md | 74 + vendor/github.com/go-openapi/strfmt/LICENSE | 202 + vendor/github.com/go-openapi/strfmt/README.md | 87 + vendor/github.com/go-openapi/strfmt/bson.go | 165 + vendor/github.com/go-openapi/strfmt/date.go | 187 + .../github.com/go-openapi/strfmt/default.go | 2051 + vendor/github.com/go-openapi/strfmt/doc.go | 18 + .../github.com/go-openapi/strfmt/duration.go | 211 + vendor/github.com/go-openapi/strfmt/format.go | 327 + vendor/github.com/go-openapi/strfmt/time.go | 321 + vendor/github.com/go-openapi/strfmt/ulid.go | 230 + .../github.com/go-openapi/swag/.editorconfig | 26 + .../github.com/go-openapi/swag/.gitattributes | 2 + vendor/github.com/go-openapi/swag/.gitignore | 5 + .../github.com/go-openapi/swag/.golangci.yml | 60 + .../github.com/go-openapi/swag/BENCHMARK.md | 52 + .../go-openapi/swag/CODE_OF_CONDUCT.md | 74 + vendor/github.com/go-openapi/swag/LICENSE | 202 + vendor/github.com/go-openapi/swag/README.md | 23 + vendor/github.com/go-openapi/swag/convert.go | 208 + .../go-openapi/swag/convert_types.go | 730 + vendor/github.com/go-openapi/swag/doc.go | 31 + vendor/github.com/go-openapi/swag/file.go | 33 + .../go-openapi/swag/initialism_index.go | 202 + vendor/github.com/go-openapi/swag/json.go | 312 + vendor/github.com/go-openapi/swag/loading.go | 176 + .../github.com/go-openapi/swag/name_lexem.go | 93 + vendor/github.com/go-openapi/swag/net.go | 38 + vendor/github.com/go-openapi/swag/path.go | 59 + vendor/github.com/go-openapi/swag/split.go | 508 + .../go-openapi/swag/string_bytes.go | 8 + vendor/github.com/go-openapi/swag/util.go | 364 + vendor/github.com/go-openapi/swag/yaml.go | 481 + .../go-openapi/validate/.editorconfig | 26 + .../go-openapi/validate/.gitattributes | 2 + .../github.com/go-openapi/validate/.gitignore | 5 + .../go-openapi/validate/.golangci.yml | 61 + .../go-openapi/validate/BENCHMARK.md | 31 + .../go-openapi/validate/CODE_OF_CONDUCT.md | 74 + vendor/github.com/go-openapi/validate/LICENSE | 202 + .../github.com/go-openapi/validate/README.md | 36 + .../github.com/go-openapi/validate/context.go | 56 + .../github.com/go-openapi/validate/debug.go | 47 + .../go-openapi/validate/default_validator.go | 304 + vendor/github.com/go-openapi/validate/doc.go | 87 + .../go-openapi/validate/example_validator.go | 299 + .../github.com/go-openapi/validate/formats.go | 99 + .../github.com/go-openapi/validate/helpers.go | 333 + .../go-openapi/validate/object_validator.go | 431 + .../github.com/go-openapi/validate/options.go | 62 + .../github.com/go-openapi/validate/pools.go | 366 + .../go-openapi/validate/pools_debug.go | 1012 + .../github.com/go-openapi/validate/result.go | 563 + vendor/github.com/go-openapi/validate/rexp.go | 71 + .../github.com/go-openapi/validate/schema.go | 354 + .../go-openapi/validate/schema_messages.go | 78 + .../go-openapi/validate/schema_option.go | 83 + .../go-openapi/validate/schema_props.go | 356 + .../go-openapi/validate/slice_validator.go | 150 + vendor/github.com/go-openapi/validate/spec.go | 852 + .../go-openapi/validate/spec_messages.go | 366 + vendor/github.com/go-openapi/validate/type.go | 213 + .../go-openapi/validate/update-fixtures.sh | 15 + .../go-openapi/validate/validator.go | 1051 + .../github.com/go-openapi/validate/values.go | 450 + vendor/github.com/gobuffalo/flect/.gitignore | 29 + .../gobuffalo/flect/.gometalinter.json | 3 + vendor/github.com/gobuffalo/flect/LICENSE | 21 + vendor/github.com/gobuffalo/flect/Makefile | 61 + vendor/github.com/gobuffalo/flect/README.md | 63 + .../github.com/gobuffalo/flect/SHOULDERS.md | 12 + vendor/github.com/gobuffalo/flect/acronyms.go | 152 + vendor/github.com/gobuffalo/flect/camelize.go | 44 + .../github.com/gobuffalo/flect/capitalize.go | 24 + .../github.com/gobuffalo/flect/custom_data.go | 88 + .../github.com/gobuffalo/flect/dasherize.go | 34 + vendor/github.com/gobuffalo/flect/flect.go | 43 + vendor/github.com/gobuffalo/flect/humanize.go | 31 + vendor/github.com/gobuffalo/flect/ident.go | 122 + .../github.com/gobuffalo/flect/lower_upper.go | 13 + .../github.com/gobuffalo/flect/ordinalize.go | 43 + .../github.com/gobuffalo/flect/pascalize.go | 32 + .../gobuffalo/flect/plural_rules.go | 417 + .../github.com/gobuffalo/flect/pluralize.go | 72 + vendor/github.com/gobuffalo/flect/rule.go | 17 + .../gobuffalo/flect/singular_rules.go | 26 + .../github.com/gobuffalo/flect/singularize.go | 69 + vendor/github.com/gobuffalo/flect/titleize.go | 38 + .../github.com/gobuffalo/flect/underscore.go | 35 + vendor/github.com/gobuffalo/flect/version.go | 4 + vendor/github.com/gobwas/glob/.gitignore | 8 + vendor/github.com/gobwas/glob/.travis.yml | 9 + vendor/github.com/gobwas/glob/LICENSE | 21 + vendor/github.com/gobwas/glob/bench.sh | 26 + .../gobwas/glob/compiler/compiler.go | 525 + vendor/github.com/gobwas/glob/glob.go | 80 + vendor/github.com/gobwas/glob/match/any.go | 45 + vendor/github.com/gobwas/glob/match/any_of.go | 82 + vendor/github.com/gobwas/glob/match/btree.go | 146 + .../github.com/gobwas/glob/match/contains.go | 58 + .../github.com/gobwas/glob/match/every_of.go | 99 + vendor/github.com/gobwas/glob/match/list.go | 49 + vendor/github.com/gobwas/glob/match/match.go | 81 + vendor/github.com/gobwas/glob/match/max.go | 49 + vendor/github.com/gobwas/glob/match/min.go | 57 + .../github.com/gobwas/glob/match/nothing.go | 27 + vendor/github.com/gobwas/glob/match/prefix.go | 50 + .../gobwas/glob/match/prefix_any.go | 55 + .../gobwas/glob/match/prefix_suffix.go | 62 + vendor/github.com/gobwas/glob/match/range.go | 48 + vendor/github.com/gobwas/glob/match/row.go | 77 + .../github.com/gobwas/glob/match/segments.go | 91 + vendor/github.com/gobwas/glob/match/single.go | 43 + vendor/github.com/gobwas/glob/match/suffix.go | 35 + .../gobwas/glob/match/suffix_any.go | 43 + vendor/github.com/gobwas/glob/match/super.go | 33 + vendor/github.com/gobwas/glob/match/text.go | 45 + vendor/github.com/gobwas/glob/readme.md | 148 + .../github.com/gobwas/glob/syntax/ast/ast.go | 122 + .../gobwas/glob/syntax/ast/parser.go | 157 + .../gobwas/glob/syntax/lexer/lexer.go | 273 + .../gobwas/glob/syntax/lexer/token.go | 88 + .../github.com/gobwas/glob/syntax/syntax.go | 14 + .../gobwas/glob/util/runes/runes.go | 154 + .../gobwas/glob/util/strings/strings.go | 39 + vendor/github.com/gofrs/uuid/.gitignore | 15 + vendor/github.com/gofrs/uuid/LICENSE | 20 + vendor/github.com/gofrs/uuid/README.md | 117 + vendor/github.com/gofrs/uuid/codec.go | 234 + vendor/github.com/gofrs/uuid/fuzz.go | 48 + vendor/github.com/gofrs/uuid/generator.go | 456 + vendor/github.com/gofrs/uuid/sql.go | 116 + vendor/github.com/gofrs/uuid/uuid.go | 285 + vendor/github.com/gohugoio/go-i18n/v2/LICENSE | 19 + .../gohugoio/go-i18n/v2/i18n/bundle.go | 193 + .../gohugoio/go-i18n/v2/i18n/bundlefs.go | 18 + .../gohugoio/go-i18n/v2/i18n/doc.go | 24 + .../gohugoio/go-i18n/v2/i18n/localizer.go | 223 + .../gohugoio/go-i18n/v2/i18n/message.go | 221 + .../go-i18n/v2/i18n/message_template.go | 65 + .../gohugoio/go-i18n/v2/i18n/parse.go | 166 + .../go-i18n/v2/internal/plural/doc.go | 3 + .../go-i18n/v2/internal/plural/form.go | 16 + .../go-i18n/v2/internal/plural/operands.go | 120 + .../go-i18n/v2/internal/plural/rule.go | 44 + .../go-i18n/v2/internal/plural/rule_gen.go | 589 + .../go-i18n/v2/internal/plural/rules.go | 24 + .../gohugoio/go-i18n/v2/internal/template.go | 51 + vendor/github.com/gohugoio/locales/.gitignore | 24 + .../github.com/gohugoio/locales/.travis.yml | 26 + vendor/github.com/gohugoio/locales/LICENSE | 21 + vendor/github.com/gohugoio/locales/README.md | 172 + .../gohugoio/locales/currency/currency.go | 311 + vendor/github.com/gohugoio/locales/logo.png | Bin 0 -> 37360 bytes vendor/github.com/gohugoio/locales/rules.go | 293 + .../gohugoio/localescompressed/.gitignore | 16 + .../gohugoio/localescompressed/LICENSE | 21 + .../gohugoio/localescompressed/README.md | 5 + .../localescompressed/currencies.autogen.go | 310 + .../gohugoio/localescompressed/localen.go | 181 + .../localescompressed/locales.autogen.go | 69006 ++++++ .../gohugoio/localescompressed/locales.go | 54 + vendor/github.com/golang/protobuf/AUTHORS | 3 + .../github.com/golang/protobuf/CONTRIBUTORS | 3 + vendor/github.com/golang/protobuf/LICENSE | 28 + .../golang/protobuf/proto/buffer.go | 324 + .../golang/protobuf/proto/defaults.go | 63 + .../golang/protobuf/proto/deprecated.go | 113 + .../golang/protobuf/proto/discard.go | 58 + .../golang/protobuf/proto/extensions.go | 356 + .../golang/protobuf/proto/properties.go | 306 + .../github.com/golang/protobuf/proto/proto.go | 167 + .../golang/protobuf/proto/registry.go | 317 + .../golang/protobuf/proto/text_decode.go | 801 + .../golang/protobuf/proto/text_encode.go | 560 + .../github.com/golang/protobuf/proto/wire.go | 78 + .../golang/protobuf/proto/wrappers.go | 34 + vendor/github.com/golang/snappy/.gitignore | 16 + vendor/github.com/golang/snappy/AUTHORS | 18 + vendor/github.com/golang/snappy/CONTRIBUTORS | 41 + vendor/github.com/golang/snappy/LICENSE | 27 + vendor/github.com/golang/snappy/README | 107 + vendor/github.com/golang/snappy/decode.go | 264 + .../github.com/golang/snappy/decode_amd64.s | 490 + .../github.com/golang/snappy/decode_arm64.s | 494 + vendor/github.com/golang/snappy/decode_asm.go | 15 + .../github.com/golang/snappy/decode_other.go | 115 + vendor/github.com/golang/snappy/encode.go | 289 + .../github.com/golang/snappy/encode_amd64.s | 730 + .../github.com/golang/snappy/encode_arm64.s | 722 + vendor/github.com/golang/snappy/encode_asm.go | 30 + .../github.com/golang/snappy/encode_other.go | 238 + vendor/github.com/golang/snappy/snappy.go | 98 + vendor/github.com/google/go-cmp/LICENSE | 27 + .../google/go-cmp/cmp/cmpopts/equate.go | 185 + .../google/go-cmp/cmp/cmpopts/ignore.go | 206 + .../google/go-cmp/cmp/cmpopts/sort.go | 147 + .../go-cmp/cmp/cmpopts/struct_filter.go | 189 + .../google/go-cmp/cmp/cmpopts/xform.go | 36 + .../github.com/google/go-cmp/cmp/compare.go | 671 + vendor/github.com/google/go-cmp/cmp/export.go | 31 + .../go-cmp/cmp/internal/diff/debug_disable.go | 18 + .../go-cmp/cmp/internal/diff/debug_enable.go | 123 + .../google/go-cmp/cmp/internal/diff/diff.go | 402 + .../google/go-cmp/cmp/internal/flags/flags.go | 9 + .../go-cmp/cmp/internal/function/func.go | 99 + .../google/go-cmp/cmp/internal/value/name.go | 164 + .../go-cmp/cmp/internal/value/pointer.go | 34 + .../google/go-cmp/cmp/internal/value/sort.go | 106 + .../github.com/google/go-cmp/cmp/options.go | 554 + vendor/github.com/google/go-cmp/cmp/path.go | 390 + vendor/github.com/google/go-cmp/cmp/report.go | 54 + .../google/go-cmp/cmp/report_compare.go | 433 + .../google/go-cmp/cmp/report_references.go | 264 + .../google/go-cmp/cmp/report_reflect.go | 414 + .../google/go-cmp/cmp/report_slices.go | 614 + .../google/go-cmp/cmp/report_text.go | 432 + .../google/go-cmp/cmp/report_value.go | 121 + vendor/github.com/google/uuid/CHANGELOG.md | 41 + vendor/github.com/google/uuid/CONTRIBUTING.md | 26 + vendor/github.com/google/uuid/CONTRIBUTORS | 9 + vendor/github.com/google/uuid/LICENSE | 27 + vendor/github.com/google/uuid/README.md | 21 + vendor/github.com/google/uuid/dce.go | 80 + vendor/github.com/google/uuid/doc.go | 12 + vendor/github.com/google/uuid/hash.go | 59 + vendor/github.com/google/uuid/marshal.go | 38 + vendor/github.com/google/uuid/node.go | 90 + vendor/github.com/google/uuid/node_js.go | 12 + vendor/github.com/google/uuid/node_net.go | 33 + vendor/github.com/google/uuid/null.go | 118 + vendor/github.com/google/uuid/sql.go | 59 + vendor/github.com/google/uuid/time.go | 134 + vendor/github.com/google/uuid/util.go | 43 + vendor/github.com/google/uuid/uuid.go | 365 + vendor/github.com/google/uuid/version1.go | 44 + vendor/github.com/google/uuid/version4.go | 76 + vendor/github.com/google/uuid/version6.go | 56 + vendor/github.com/google/uuid/version7.go | 104 + .../github.com/gorilla/schema/.editorconfig | 20 + vendor/github.com/gorilla/schema/.gitignore | 1 + vendor/github.com/gorilla/schema/LICENSE | 27 + vendor/github.com/gorilla/schema/Makefile | 34 + vendor/github.com/gorilla/schema/README.md | 94 + vendor/github.com/gorilla/schema/cache.go | 305 + vendor/github.com/gorilla/schema/converter.go | 145 + vendor/github.com/gorilla/schema/decoder.go | 521 + vendor/github.com/gorilla/schema/doc.go | 148 + vendor/github.com/gorilla/schema/encoder.go | 214 + .../hashicorp/golang-lru/v2/LICENSE | 364 + .../hashicorp/golang-lru/v2/internal/list.go | 142 + .../golang-lru/v2/simplelru/LICENSE_list | 29 + .../hashicorp/golang-lru/v2/simplelru/lru.go | 177 + .../golang-lru/v2/simplelru/lru_interface.go | 46 + vendor/github.com/jdkato/prose/AUTHORS.md | 2 + vendor/github.com/jdkato/prose/LICENSE | 21 + .../jdkato/prose/internal/util/util.go | 98 + .../jdkato/prose/transform/title.go | 108 + .../jdkato/prose/transform/transform.go | 86 + .../jdkato/prose/transform/transform_fuzz.go | 22 + vendor/github.com/josharian/intern/README.md | 5 + vendor/github.com/josharian/intern/intern.go | 44 + vendor/github.com/josharian/intern/license.md | 21 + .../go-windows-terminal-sequences/LICENSE | 9 + .../go-windows-terminal-sequences/README.md | 42 + .../sequences.go | 35 + .../sequences_dummy.go | 11 + vendor/github.com/kr/pretty/.gitignore | 5 + vendor/github.com/kr/pretty/License | 19 + vendor/github.com/kr/pretty/Readme | 9 + vendor/github.com/kr/pretty/diff.go | 295 + vendor/github.com/kr/pretty/formatter.go | 355 + vendor/github.com/kr/pretty/pretty.go | 108 + vendor/github.com/kr/pretty/zero.go | 41 + vendor/github.com/kr/text/License | 19 + vendor/github.com/kr/text/Readme | 3 + vendor/github.com/kr/text/doc.go | 3 + vendor/github.com/kr/text/indent.go | 74 + vendor/github.com/kr/text/wrap.go | 86 + vendor/github.com/kyokomi/emoji/v2/.gitignore | 2 + vendor/github.com/kyokomi/emoji/v2/LICENSE | 21 + vendor/github.com/kyokomi/emoji/v2/README.md | 53 + vendor/github.com/kyokomi/emoji/v2/emoji.go | 158 + .../kyokomi/emoji/v2/emoji_codemap.go | 7839 + vendor/github.com/mailru/easyjson/LICENSE | 7 + .../github.com/mailru/easyjson/buffer/pool.go | 278 + .../mailru/easyjson/jlexer/bytestostr.go | 24 + .../easyjson/jlexer/bytestostr_nounsafe.go | 13 + .../mailru/easyjson/jlexer/error.go | 15 + .../mailru/easyjson/jlexer/lexer.go | 1244 + .../mailru/easyjson/jwriter/writer.go | 405 + .../marekm4/color-extractor/LICENSE | 21 + .../marekm4/color-extractor/README.md | 46 + .../color-extractor/color_extractor.go | 103 + vendor/github.com/mattn/go-colorable/LICENSE | 21 + .../github.com/mattn/go-colorable/README.md | 48 + .../mattn/go-colorable/colorable_appengine.go | 38 + .../mattn/go-colorable/colorable_others.go | 38 + .../mattn/go-colorable/colorable_windows.go | 1047 + .../github.com/mattn/go-colorable/go.test.sh | 12 + .../mattn/go-colorable/noncolorable.go | 57 + vendor/github.com/mattn/go-isatty/LICENSE | 9 + vendor/github.com/mattn/go-isatty/README.md | 50 + vendor/github.com/mattn/go-isatty/doc.go | 2 + vendor/github.com/mattn/go-isatty/go.test.sh | 12 + .../github.com/mattn/go-isatty/isatty_bsd.go | 20 + .../mattn/go-isatty/isatty_others.go | 17 + .../mattn/go-isatty/isatty_plan9.go | 23 + .../mattn/go-isatty/isatty_solaris.go | 21 + .../mattn/go-isatty/isatty_tcgets.go | 20 + .../mattn/go-isatty/isatty_windows.go | 125 + .../mitchellh/hashstructure/LICENSE | 21 + .../mitchellh/hashstructure/README.md | 67 + .../mitchellh/hashstructure/hashstructure.go | 422 + .../mitchellh/hashstructure/include.go | 22 + .../mitchellh/mapstructure/CHANGELOG.md | 96 + .../github.com/mitchellh/mapstructure/LICENSE | 21 + .../mitchellh/mapstructure/README.md | 46 + .../mitchellh/mapstructure/decode_hooks.go | 279 + .../mitchellh/mapstructure/error.go | 50 + .../mitchellh/mapstructure/mapstructure.go | 1540 + vendor/github.com/mschoch/smat/.gitignore | 14 + vendor/github.com/mschoch/smat/.travis.yml | 16 + vendor/github.com/mschoch/smat/LICENSE | 202 + vendor/github.com/mschoch/smat/README.md | 166 + vendor/github.com/mschoch/smat/actionseq.go | 61 + vendor/github.com/mschoch/smat/smat.go | 161 + vendor/github.com/muesli/smartcrop/.gitignore | 25 + .../github.com/muesli/smartcrop/.travis.yml | 40 + vendor/github.com/muesli/smartcrop/LICENSE | 21 + vendor/github.com/muesli/smartcrop/README.md | 70 + vendor/github.com/muesli/smartcrop/debug.go | 108 + .../muesli/smartcrop/options/resizer.go | 38 + .../github.com/muesli/smartcrop/smartcrop.go | 474 + vendor/github.com/netlify/open-api/v2/LICENSE | 20 + .../open-api/v2/go/models/access_token.go | 55 + .../v2/go/models/account_add_member_setup.go | 109 + .../v2/go/models/account_membership.go | 116 + .../models/account_membership_capabilities.go | 96 + .../open-api/v2/go/models/account_setup.go | 140 + .../open-api/v2/go/models/account_type.go | 64 + .../go/models/account_update_member_setup.go | 163 + .../v2/go/models/account_update_setup.go | 61 + .../v2/go/models/account_usage_capability.go | 46 + .../netlify/open-api/v2/go/models/asset.go | 76 + .../open-api/v2/go/models/asset_form.go | 46 + .../v2/go/models/asset_public_signature.go | 43 + .../open-api/v2/go/models/asset_signature.go | 96 + .../open-api/v2/go/models/audit_log.go | 77 + .../v2/go/models/audit_log_payload.go | 182 + .../netlify/open-api/v2/go/models/build.go | 58 + .../open-api/v2/go/models/build_hook.go | 58 + .../open-api/v2/go/models/build_hook_setup.go | 46 + .../open-api/v2/go/models/build_log_msg.go | 115 + .../open-api/v2/go/models/build_setup.go | 46 + .../open-api/v2/go/models/build_status.go | 83 + .../v2/go/models/build_status_minutes.go | 64 + .../create_env_vars_params_body_items.go | 132 + .../netlify/open-api/v2/go/models/deploy.go | 167 + .../open-api/v2/go/models/deploy_files.go | 131 + .../open-api/v2/go/models/deploy_key.go | 49 + .../v2/go/models/deploy_site_capabilities.go | 43 + .../open-api/v2/go/models/deployed_branch.go | 58 + .../open-api/v2/go/models/dev_server.go | 76 + .../open-api/v2/go/models/dev_server_hook.go | 118 + .../v2/go/models/dev_server_hook_setup.go | 106 + .../open-api/v2/go/models/dns_record.go | 73 + .../v2/go/models/dns_record_create.go | 67 + .../open-api/v2/go/models/dns_records.go | 45 + .../netlify/open-api/v2/go/models/dns_zone.go | 125 + .../open-api/v2/go/models/dns_zone_setup.go | 49 + .../open-api/v2/go/models/dns_zones.go | 45 + .../netlify/open-api/v2/go/models/env_var.go | 178 + .../open-api/v2/go/models/env_var_user.go | 52 + .../open-api/v2/go/models/env_var_value.go | 121 + .../netlify/open-api/v2/go/models/error.go | 67 + .../v2/go/models/excluded_function_route.go | 49 + .../netlify/open-api/v2/go/models/file.go | 55 + .../netlify/open-api/v2/go/models/form.go | 61 + .../netlify/open-api/v2/go/models/function.go | 49 + .../open-api/v2/go/models/function_config.go | 149 + .../open-api/v2/go/models/function_route.go | 106 + .../v2/go/models/function_schedule.go | 46 + .../netlify/open-api/v2/go/models/hook.go | 64 + .../open-api/v2/go/models/hook_type.go | 49 + .../netlify/open-api/v2/go/models/member.go | 55 + .../netlify/open-api/v2/go/models/metadata.go | 11 + .../open-api/v2/go/models/minify_options.go | 46 + .../open-api/v2/go/models/payment_method.go | 89 + .../v2/go/models/payment_method_data.go | 49 + .../netlify/open-api/v2/go/models/plugin.go | 46 + .../open-api/v2/go/models/plugin_params.go | 43 + .../open-api/v2/go/models/plugin_run.go | 95 + .../v2/go/models/plugin_run_all_of1.go | 43 + .../open-api/v2/go/models/plugin_run_data.go | 61 + .../netlify/open-api/v2/go/models/purge.go | 49 + .../open-api/v2/go/models/repo_info.go | 85 + .../netlify/open-api/v2/go/models/service.go | 79 + .../open-api/v2/go/models/service_instance.go | 76 + .../models/set_env_var_value_params_body.go | 118 + .../netlify/open-api/v2/go/models/site.go | 239 + .../v2/go/models/site_default_hooks_data.go | 43 + .../open-api/v2/go/models/site_function.go | 58 + .../v2/go/models/site_processing_settings.go | 71 + .../models/site_processing_settings_html.go | 43 + .../models/site_processing_settings_images.go | 43 + .../open-api/v2/go/models/site_setup.go | 95 + .../v2/go/models/site_setup_all_of1.go | 71 + .../open-api/v2/go/models/sni_certificate.go | 55 + .../netlify/open-api/v2/go/models/snippet.go | 61 + .../open-api/v2/go/models/split_test_setup.go | 43 + .../v2/go/models/split_test_swagger.go | 67 + .../open-api/v2/go/models/split_tests.go | 45 + .../open-api/v2/go/models/submission.go | 76 + .../netlify/open-api/v2/go/models/ticket.go | 52 + .../models/traffic_rules_aggregate_config.go | 80 + ...affic_rules_aggregate_config_keys_items.go | 100 + .../v2/go/models/traffic_rules_config.go | 71 + .../go/models/traffic_rules_config_action.go | 74 + .../traffic_rules_config_action_config.go | 99 + .../models/traffic_rules_rate_limit_config.go | 103 + .../go/models/update_env_var_params_body.go | 132 + .../netlify/open-api/v2/go/models/user.go | 101 + .../v2/go/models/user_onboarding_progress.go | 43 + .../netlify/open-api/v2/go/plumbing/doc.go | 6 + .../open-api/v2/go/plumbing/netlify_client.go | 111 + .../add_member_to_account_parameters.go | 154 + .../add_member_to_account_responses.go | 117 + .../operations/cancel_account_parameters.go | 133 + .../operations/cancel_account_responses.go | 107 + .../cancel_site_deploy_parameters.go | 133 + .../cancel_site_deploy_responses.go | 119 + .../configure_dns_for_site_parameters.go | 133 + .../configure_dns_for_site_responses.go | 117 + .../operations/create_account_parameters.go | 136 + .../operations/create_account_responses.go | 119 + .../create_deploy_key_parameters.go | 113 + .../operations/create_deploy_key_responses.go | 119 + .../create_dns_record_parameters.go | 154 + .../operations/create_dns_record_responses.go | 119 + .../operations/create_dns_zone_parameters.go | 136 + .../operations/create_dns_zone_responses.go | 119 + .../operations/create_env_vars_parameters.go | 189 + .../operations/create_env_vars_responses.go | 117 + .../create_hook_by_site_id_parameters.go | 158 + .../create_hook_by_site_id_responses.go | 119 + .../create_plugin_run_parameters.go | 154 + .../operations/create_plugin_run_responses.go | 119 + .../create_service_instance_parameters.go | 170 + .../create_service_instance_responses.go | 119 + .../create_site_asset_parameters.go | 229 + .../operations/create_site_asset_responses.go | 119 + .../create_site_build_hook_parameters.go | 154 + .../create_site_build_hook_responses.go | 119 + .../create_site_build_parameters.go | 154 + .../operations/create_site_build_responses.go | 119 + .../create_site_deploy_parameters.go | 329 + .../create_site_deploy_responses.go | 119 + .../create_site_dev_server_hook_parameters.go | 154 + .../create_site_dev_server_hook_responses.go | 119 + .../create_site_dev_server_parameters.go | 162 + .../create_site_dev_server_responses.go | 117 + .../create_site_in_team_parameters.go | 184 + .../create_site_in_team_responses.go | 119 + .../operations/create_site_parameters.go | 166 + .../operations/create_site_responses.go | 119 + .../create_site_snippet_parameters.go | 154 + .../create_site_snippet_responses.go | 119 + .../create_split_test_swagger_parameters.go | 154 + .../create_split_test_swagger_responses.go | 119 + .../operations/create_ticket_parameters.go | 137 + .../operations/create_ticket_responses.go | 119 + .../delete_deploy_key_parameters.go | 133 + .../operations/delete_deploy_key_responses.go | 107 + .../operations/delete_deploy_parameters.go | 133 + .../operations/delete_deploy_responses.go | 107 + .../delete_dns_record_parameters.go | 151 + .../operations/delete_dns_record_responses.go | 107 + .../operations/delete_dns_zone_parameters.go | 133 + .../operations/delete_dns_zone_responses.go | 107 + .../operations/delete_env_var_parameters.go | 189 + .../operations/delete_env_var_responses.go | 107 + .../delete_env_var_value_parameters.go | 210 + .../delete_env_var_value_responses.go | 107 + .../operations/delete_hook_parameters.go | 133 + .../operations/delete_hook_responses.go | 55 + .../delete_service_instance_parameters.go | 169 + .../delete_service_instance_responses.go | 107 + .../delete_site_asset_parameters.go | 151 + .../operations/delete_site_asset_responses.go | 107 + .../delete_site_build_hook_parameters.go | 151 + .../delete_site_build_hook_responses.go | 107 + .../delete_site_deploy_parameters.go | 151 + .../delete_site_deploy_responses.go | 107 + .../delete_site_dev_server_hook_parameters.go | 151 + .../delete_site_dev_server_hook_responses.go | 107 + .../delete_site_dev_servers_parameters.go | 162 + .../delete_site_dev_servers_responses.go | 107 + .../operations/delete_site_form_parameters.go | 151 + .../operations/delete_site_form_responses.go | 107 + .../operations/delete_site_parameters.go | 133 + .../operations/delete_site_responses.go | 107 + .../delete_site_snippet_parameters.go | 151 + .../delete_site_snippet_responses.go | 107 + .../delete_submission_parameters.go | 133 + .../operations/delete_submission_responses.go | 107 + .../disable_split_test_swagger_parameters.go | 151 + .../disable_split_test_swagger_responses.go | 107 + .../operations/enable_hook_parameters.go | 133 + .../operations/enable_hook_responses.go | 119 + .../enable_split_test_swagger_parameters.go | 151 + .../enable_split_test_swagger_responses.go | 107 + .../operations/exchange_ticket_parameters.go | 133 + .../operations/exchange_ticket_responses.go | 119 + .../get_account_build_status_parameters.go | 133 + .../get_account_build_status_responses.go | 117 + .../get_account_member_parameters.go | 151 + .../get_account_member_responses.go | 119 + .../operations/get_account_parameters.go | 133 + .../operations/get_account_responses.go | 117 + .../operations/get_current_user_parameters.go | 113 + .../operations/get_current_user_responses.go | 117 + .../operations/get_deploy_key_parameters.go | 133 + .../operations/get_deploy_key_responses.go | 119 + .../operations/get_deploy_parameters.go | 133 + .../operations/get_deploy_responses.go | 119 + .../operations/get_dns_for_site_parameters.go | 133 + .../operations/get_dns_for_site_responses.go | 117 + .../operations/get_dns_records_parameters.go | 133 + .../operations/get_dns_records_responses.go | 117 + .../operations/get_dns_zone_parameters.go | 133 + .../operations/get_dns_zone_responses.go | 119 + .../operations/get_dns_zones_parameters.go | 144 + .../operations/get_dns_zones_responses.go | 117 + .../operations/get_env_var_parameters.go | 189 + .../operations/get_env_var_responses.go | 119 + .../operations/get_env_vars_parameters.go | 232 + .../operations/get_env_vars_responses.go | 117 + .../operations/get_hook_parameters.go | 133 + .../plumbing/operations/get_hook_responses.go | 119 + .../get_individual_dns_record_parameters.go | 151 + .../get_individual_dns_record_responses.go | 119 + .../get_latest_plugin_runs_parameters.go | 184 + .../get_latest_plugin_runs_responses.go | 117 + .../operations/get_services_parameters.go | 144 + .../operations/get_services_responses.go | 117 + .../get_site_asset_info_parameters.go | 151 + .../get_site_asset_info_responses.go | 119 + ..._site_asset_public_signature_parameters.go | 151 + ...t_site_asset_public_signature_responses.go | 119 + .../get_site_build_hook_parameters.go | 151 + .../get_site_build_hook_responses.go | 119 + .../operations/get_site_build_parameters.go | 133 + .../operations/get_site_build_responses.go | 119 + .../operations/get_site_deploy_parameters.go | 151 + .../operations/get_site_deploy_responses.go | 119 + .../get_site_dev_server_hook_parameters.go | 151 + .../get_site_dev_server_hook_responses.go | 119 + .../get_site_dev_server_parameters.go | 151 + .../get_site_dev_server_responses.go | 70 + .../get_site_env_vars_parameters.go | 200 + .../operations/get_site_env_vars_responses.go | 117 + .../get_site_file_by_path_name_parameters.go | 151 + .../get_site_file_by_path_name_responses.go | 119 + .../get_site_metadata_parameters.go | 133 + .../operations/get_site_metadata_responses.go | 117 + .../operations/get_site_parameters.go | 162 + .../plumbing/operations/get_site_responses.go | 119 + .../operations/get_site_snippet_parameters.go | 151 + .../operations/get_site_snippet_responses.go | 119 + .../get_split_test_swagger_parameters.go | 151 + .../get_split_test_swagger_responses.go | 119 + .../operations/get_split_tests_parameters.go | 133 + .../operations/get_split_tests_responses.go | 117 + .../list_account_audit_events_parameters.go | 250 + .../list_account_audit_events_responses.go | 117 + .../list_account_types_for_user_parameters.go | 113 + .../list_account_types_for_user_responses.go | 117 + .../list_accounts_for_user_parameters.go | 113 + .../list_accounts_for_user_responses.go | 117 + .../operations/list_deploy_keys_parameters.go | 113 + .../operations/list_deploy_keys_responses.go | 117 + .../list_form_submission_parameters.go | 221 + .../list_form_submission_responses.go | 117 + .../list_form_submissions_parameters.go | 192 + .../list_form_submissions_responses.go | 117 + .../operations/list_forms_parameters.go | 143 + .../operations/list_forms_responses.go | 115 + .../operations/list_hook_types_parameters.go | 113 + .../operations/list_hook_types_responses.go | 117 + .../list_hooks_by_site_id_parameters.go | 137 + .../list_hooks_by_site_id_responses.go | 117 + .../list_members_for_account_parameters.go | 133 + .../list_members_for_account_responses.go | 117 + ...ist_payment_methods_for_user_parameters.go | 113 + ...list_payment_methods_for_user_responses.go | 117 + ...t_service_instances_for_site_parameters.go | 133 + ...st_service_instances_for_site_responses.go | 117 + .../operations/list_site_assets_parameters.go | 133 + .../operations/list_site_assets_responses.go | 117 + .../list_site_build_hooks_parameters.go | 133 + .../list_site_build_hooks_responses.go | 117 + .../operations/list_site_builds_parameters.go | 192 + .../operations/list_site_builds_responses.go | 117 + .../list_site_deployed_branches_parameters.go | 133 + .../list_site_deployed_branches_responses.go | 117 + .../list_site_deploys_parameters.go | 337 + .../operations/list_site_deploys_responses.go | 117 + .../list_site_dev_server_hooks_parameters.go | 133 + .../list_site_dev_server_hooks_responses.go | 117 + .../list_site_dev_servers_parameters.go | 192 + .../list_site_dev_servers_responses.go | 117 + .../operations/list_site_files_parameters.go | 133 + .../operations/list_site_files_responses.go | 117 + .../operations/list_site_forms_parameters.go | 133 + .../operations/list_site_forms_responses.go | 117 + .../list_site_snippets_parameters.go | 133 + .../list_site_snippets_responses.go | 117 + .../list_site_submissions_parameters.go | 192 + .../list_site_submissions_responses.go | 117 + .../list_sites_for_account_parameters.go | 221 + .../list_sites_for_account_responses.go | 117 + .../operations/list_sites_parameters.go | 232 + .../operations/list_sites_responses.go | 117 + .../operations/lock_deploy_parameters.go | 133 + .../operations/lock_deploy_responses.go | 119 + .../mark_dev_server_activity_parameters.go | 151 + .../mark_dev_server_activity_responses.go | 55 + .../notify_build_start_parameters.go | 191 + .../notify_build_start_responses.go | 107 + .../plumbing/operations/operations_client.go | 4834 + ...ovision_site_tls_certificate_parameters.go | 220 + ...rovision_site_tls_certificate_responses.go | 119 + .../operations/purge_cache_parameters.go | 136 + .../operations/purge_cache_responses.go | 111 + .../remove_account_member_parameters.go | 151 + .../remove_account_member_responses.go | 107 + .../restore_site_deploy_parameters.go | 151 + .../restore_site_deploy_responses.go | 119 + .../rollback_site_deploy_parameters.go | 133 + .../rollback_site_deploy_responses.go | 107 + .../search_site_functions_parameters.go | 162 + .../search_site_functions_responses.go | 117 + .../set_env_var_value_parameters.go | 210 + .../operations/set_env_var_value_responses.go | 119 + .../show_service_instance_parameters.go | 169 + .../show_service_instance_responses.go | 119 + .../show_service_manifest_parameters.go | 133 + .../show_service_manifest_responses.go | 117 + .../operations/show_service_parameters.go | 133 + .../operations/show_service_responses.go | 119 + .../show_site_tls_certificate_parameters.go | 133 + .../show_site_tls_certificate_responses.go | 119 + .../operations/show_ticket_parameters.go | 133 + .../operations/show_ticket_responses.go | 119 + .../transfer_dns_zone_parameters.go | 208 + .../operations/transfer_dns_zone_responses.go | 119 + .../operations/unlink_site_repo_parameters.go | 133 + .../operations/unlink_site_repo_responses.go | 98 + .../operations/unlock_deploy_parameters.go | 133 + .../operations/unlock_deploy_responses.go | 119 + .../update_account_member_parameters.go | 172 + .../update_account_member_responses.go | 119 + .../operations/update_account_parameters.go | 154 + .../operations/update_account_responses.go | 119 + .../operations/update_env_var_parameters.go | 210 + .../operations/update_env_var_responses.go | 119 + .../operations/update_hook_parameters.go | 154 + .../operations/update_hook_responses.go | 119 + .../operations/update_plugin_parameters.go | 172 + .../operations/update_plugin_responses.go | 119 + .../update_service_instance_parameters.go | 188 + .../update_service_instance_responses.go | 107 + .../update_site_asset_parameters.go | 173 + .../operations/update_site_asset_responses.go | 119 + .../update_site_build_hook_parameters.go | 172 + .../update_site_build_hook_responses.go | 107 + .../update_site_build_log_parameters.go | 154 + .../update_site_build_log_responses.go | 107 + .../update_site_deploy_parameters.go | 172 + .../update_site_deploy_responses.go | 119 + .../update_site_dev_server_hook_parameters.go | 172 + .../update_site_dev_server_hook_responses.go | 107 + .../update_site_metadata_parameters.go | 154 + .../update_site_metadata_responses.go | 107 + .../operations/update_site_parameters.go | 154 + .../operations/update_site_responses.go | 119 + .../update_site_snippet_parameters.go | 172 + .../update_site_snippet_responses.go | 107 + .../update_split_test_swagger_parameters.go | 172 + .../update_split_test_swagger_responses.go | 119 + .../upload_deploy_file_parameters.go | 201 + .../upload_deploy_file_responses.go | 119 + .../upload_deploy_function_parameters.go | 310 + .../upload_deploy_function_responses.go | 119 + .../open-api/v2/go/porcelain/assets.go | 181 + .../netlify/open-api/v2/go/porcelain/auth.go | 80 + .../v2/go/porcelain/context/context.go | 34 + .../open-api/v2/go/porcelain/deploy.go | 1067 + .../open-api/v2/go/porcelain/deploy_keys.go | 17 + .../open-api/v2/go/porcelain/deploy_unix.go | 7 + .../v2/go/porcelain/deploy_windows.go | 11 + .../netlify/open-api/v2/go/porcelain/forms.go | 29 + .../v2/go/porcelain/functions_manifest.go | 56 + .../open-api/v2/go/porcelain/http/http.go | 117 + .../v2/go/porcelain/netlify_client.go | 68 + .../netlify/open-api/v2/go/porcelain/site.go | 132 + vendor/github.com/nilslice/jwt/LICENSE | 19 + vendor/github.com/nilslice/jwt/README.md | 43 + vendor/github.com/nilslice/jwt/doc.go | 40 + vendor/github.com/nilslice/jwt/jwt.go | 208 + vendor/github.com/oklog/ulid/.gitignore | 29 + vendor/github.com/oklog/ulid/.travis.yml | 16 + vendor/github.com/oklog/ulid/AUTHORS.md | 2 + vendor/github.com/oklog/ulid/CHANGELOG.md | 33 + vendor/github.com/oklog/ulid/CONTRIBUTING.md | 17 + vendor/github.com/oklog/ulid/Gopkg.lock | 15 + vendor/github.com/oklog/ulid/Gopkg.toml | 26 + vendor/github.com/oklog/ulid/LICENSE | 201 + vendor/github.com/oklog/ulid/README.md | 150 + vendor/github.com/oklog/ulid/ulid.go | 614 + .../opentracing/opentracing-go/.gitignore | 1 + .../opentracing/opentracing-go/.travis.yml | 20 + .../opentracing/opentracing-go/CHANGELOG.md | 63 + .../opentracing/opentracing-go/LICENSE | 201 + .../opentracing/opentracing-go/Makefile | 20 + .../opentracing/opentracing-go/README.md | 171 + .../opentracing/opentracing-go/ext.go | 24 + .../opentracing/opentracing-go/ext/field.go | 17 + .../opentracing/opentracing-go/ext/tags.go | 215 + .../opentracing-go/globaltracer.go | 42 + .../opentracing/opentracing-go/gocontext.go | 65 + .../opentracing/opentracing-go/log/field.go | 282 + .../opentracing/opentracing-go/log/util.go | 61 + .../opentracing/opentracing-go/noop.go | 64 + .../opentracing/opentracing-go/propagation.go | 176 + .../opentracing/opentracing-go/span.go | 189 + .../opentracing/opentracing-go/tracer.go | 304 + vendor/github.com/pbnjay/memory/LICENSE | 29 + vendor/github.com/pbnjay/memory/README.md | 41 + vendor/github.com/pbnjay/memory/doc.go | 24 + vendor/github.com/pbnjay/memory/memory_bsd.go | 19 + .../github.com/pbnjay/memory/memory_darwin.go | 49 + .../github.com/pbnjay/memory/memory_linux.go | 29 + .../pbnjay/memory/memory_windows.go | 60 + vendor/github.com/pbnjay/memory/memsysctl.go | 21 + vendor/github.com/pbnjay/memory/stub.go | 10 + .../pelletier/go-toml/v2/.dockerignore | 2 + .../pelletier/go-toml/v2/.gitattributes | 4 + .../pelletier/go-toml/v2/.gitignore | 7 + .../pelletier/go-toml/v2/.golangci.toml | 84 + .../pelletier/go-toml/v2/.goreleaser.yaml | 127 + .../pelletier/go-toml/v2/CONTRIBUTING.md | 193 + .../pelletier/go-toml/v2/Dockerfile | 5 + .../github.com/pelletier/go-toml/v2/LICENSE | 22 + .../github.com/pelletier/go-toml/v2/README.md | 576 + .../pelletier/go-toml/v2/SECURITY.md | 16 + vendor/github.com/pelletier/go-toml/v2/ci.sh | 284 + .../github.com/pelletier/go-toml/v2/decode.go | 550 + vendor/github.com/pelletier/go-toml/v2/doc.go | 2 + .../github.com/pelletier/go-toml/v2/errors.go | 252 + .../go-toml/v2/internal/characters/ascii.go | 42 + .../go-toml/v2/internal/characters/utf8.go | 199 + .../go-toml/v2/internal/danger/danger.go | 65 + .../go-toml/v2/internal/danger/typeid.go | 23 + .../go-toml/v2/internal/tracker/key.go | 48 + .../go-toml/v2/internal/tracker/seen.go | 358 + .../go-toml/v2/internal/tracker/tracker.go | 1 + .../pelletier/go-toml/v2/localtime.go | 122 + .../pelletier/go-toml/v2/marshaler.go | 1133 + .../github.com/pelletier/go-toml/v2/strict.go | 107 + .../github.com/pelletier/go-toml/v2/toml.abnf | 243 + .../github.com/pelletier/go-toml/v2/types.go | 14 + .../pelletier/go-toml/v2/unmarshaler.go | 1334 + .../pelletier/go-toml/v2/unstable/ast.go | 136 + .../pelletier/go-toml/v2/unstable/builder.go | 71 + .../pelletier/go-toml/v2/unstable/doc.go | 3 + .../pelletier/go-toml/v2/unstable/kind.go | 71 + .../pelletier/go-toml/v2/unstable/parser.go | 1245 + .../pelletier/go-toml/v2/unstable/scanner.go | 270 + .../go-toml/v2/unstable/unmarshaler.go | 7 + vendor/github.com/philhofer/fwd/LICENSE.md | 7 + vendor/github.com/philhofer/fwd/README.md | 315 + vendor/github.com/philhofer/fwd/reader.go | 383 + vendor/github.com/philhofer/fwd/writer.go | 224 + .../philhofer/fwd/writer_appengine.go | 5 + .../github.com/philhofer/fwd/writer_unsafe.go | 18 + vendor/github.com/pkg/errors/.gitignore | 24 + vendor/github.com/pkg/errors/.travis.yml | 10 + vendor/github.com/pkg/errors/LICENSE | 23 + vendor/github.com/pkg/errors/Makefile | 44 + vendor/github.com/pkg/errors/README.md | 59 + vendor/github.com/pkg/errors/appveyor.yml | 32 + vendor/github.com/pkg/errors/errors.go | 288 + vendor/github.com/pkg/errors/go113.go | 38 + vendor/github.com/pkg/errors/stack.go | 177 + .../github.com/rogpeppe/go-internal/LICENSE | 27 + .../rogpeppe/go-internal/fmtsort/mapelem.go | 20 + .../rogpeppe/go-internal/fmtsort/sort.go | 209 + vendor/github.com/rsc/goversion/LICENSE | 27 + .../github.com/rsc/goversion/version/asm.go | 349 + .../github.com/rsc/goversion/version/exe.go | 317 + .../github.com/rsc/goversion/version/read.go | 243 + vendor/github.com/rwcarlsen/goexif/LICENSE | 24 + .../rwcarlsen/goexif/exif/README.md | 4 + .../github.com/rwcarlsen/goexif/exif/exif.go | 655 + .../rwcarlsen/goexif/exif/fields.go | 309 + .../rwcarlsen/goexif/exif/sample1.jpg | Bin 0 -> 80603 bytes .../rwcarlsen/goexif/tiff/sample1.tif | Bin 0 -> 18382 bytes .../github.com/rwcarlsen/goexif/tiff/tag.go | 445 + .../github.com/rwcarlsen/goexif/tiff/tiff.go | 153 + vendor/github.com/sirupsen/logrus/.gitignore | 2 + .../github.com/sirupsen/logrus/.golangci.yml | 40 + vendor/github.com/sirupsen/logrus/.travis.yml | 17 + .../github.com/sirupsen/logrus/CHANGELOG.md | 223 + vendor/github.com/sirupsen/logrus/LICENSE | 21 + vendor/github.com/sirupsen/logrus/README.md | 513 + vendor/github.com/sirupsen/logrus/alt_exit.go | 76 + .../github.com/sirupsen/logrus/appveyor.yml | 14 + vendor/github.com/sirupsen/logrus/doc.go | 26 + vendor/github.com/sirupsen/logrus/entry.go | 426 + vendor/github.com/sirupsen/logrus/exported.go | 225 + .../github.com/sirupsen/logrus/formatter.go | 78 + vendor/github.com/sirupsen/logrus/hooks.go | 34 + .../sirupsen/logrus/json_formatter.go | 125 + vendor/github.com/sirupsen/logrus/logger.go | 352 + vendor/github.com/sirupsen/logrus/logrus.go | 186 + .../logrus/terminal_check_appengine.go | 11 + .../sirupsen/logrus/terminal_check_bsd.go | 13 + .../sirupsen/logrus/terminal_check_js.go | 7 + .../logrus/terminal_check_no_terminal.go | 11 + .../logrus/terminal_check_notappengine.go | 17 + .../sirupsen/logrus/terminal_check_solaris.go | 11 + .../sirupsen/logrus/terminal_check_unix.go | 13 + .../sirupsen/logrus/terminal_check_windows.go | 34 + .../sirupsen/logrus/text_formatter.go | 334 + vendor/github.com/sirupsen/logrus/writer.go | 70 + vendor/github.com/spf13/afero/.gitignore | 2 + vendor/github.com/spf13/afero/LICENSE.txt | 174 + vendor/github.com/spf13/afero/README.md | 442 + vendor/github.com/spf13/afero/afero.go | 111 + vendor/github.com/spf13/afero/appveyor.yml | 10 + vendor/github.com/spf13/afero/basepath.go | 222 + .../github.com/spf13/afero/cacheOnReadFs.go | 315 + vendor/github.com/spf13/afero/const_bsds.go | 23 + .../github.com/spf13/afero/const_win_unix.go | 22 + .../github.com/spf13/afero/copyOnWriteFs.go | 327 + vendor/github.com/spf13/afero/httpFs.go | 114 + .../spf13/afero/internal/common/adapters.go | 27 + vendor/github.com/spf13/afero/iofs.go | 298 + vendor/github.com/spf13/afero/ioutil.go | 243 + vendor/github.com/spf13/afero/lstater.go | 27 + vendor/github.com/spf13/afero/match.go | 110 + vendor/github.com/spf13/afero/mem/dir.go | 37 + vendor/github.com/spf13/afero/mem/dirmap.go | 43 + vendor/github.com/spf13/afero/mem/file.go | 359 + vendor/github.com/spf13/afero/memmap.go | 465 + vendor/github.com/spf13/afero/os.go | 113 + vendor/github.com/spf13/afero/path.go | 106 + vendor/github.com/spf13/afero/readonlyfs.go | 96 + vendor/github.com/spf13/afero/regexpfs.go | 223 + vendor/github.com/spf13/afero/symlink.go | 55 + vendor/github.com/spf13/afero/unionFile.go | 330 + vendor/github.com/spf13/afero/util.go | 329 + vendor/github.com/spf13/cast/.gitignore | 25 + vendor/github.com/spf13/cast/LICENSE | 21 + vendor/github.com/spf13/cast/Makefile | 40 + vendor/github.com/spf13/cast/README.md | 75 + vendor/github.com/spf13/cast/cast.go | 176 + vendor/github.com/spf13/cast/caste.go | 1510 + .../spf13/cast/timeformattype_string.go | 27 + vendor/github.com/spf13/fsync/.gitignore | 25 + vendor/github.com/spf13/fsync/LICENSE | 19 + vendor/github.com/spf13/fsync/README.md | 2 + vendor/github.com/spf13/fsync/fsync.go | 370 + vendor/github.com/steveyen/gtreap/.gitignore | 5 + vendor/github.com/steveyen/gtreap/LICENSE | 20 + vendor/github.com/steveyen/gtreap/README.md | 90 + vendor/github.com/steveyen/gtreap/treap.go | 188 + .../tdewolff/minify/v2/.gitattributes | 2 + .../github.com/tdewolff/minify/v2/.gitignore | 32 + .../tdewolff/minify/v2/.golangci.yml | 16 + .../github.com/tdewolff/minify/v2/Dockerfile | 17 + vendor/github.com/tdewolff/minify/v2/LICENSE | 22 + vendor/github.com/tdewolff/minify/v2/Makefile | 58 + .../github.com/tdewolff/minify/v2/README.md | 735 + .../github.com/tdewolff/minify/v2/common.go | 524 + .../github.com/tdewolff/minify/v2/css/css.go | 1559 + .../github.com/tdewolff/minify/v2/css/hash.go | 1392 + .../tdewolff/minify/v2/css/table.go | 198 + .../github.com/tdewolff/minify/v2/css/util.go | 55 + .../tdewolff/minify/v2/html/buffer.go | 139 + .../tdewolff/minify/v2/html/hash.go | 610 + .../tdewolff/minify/v2/html/html.go | 531 + .../tdewolff/minify/v2/html/table.go | 1389 + vendor/github.com/tdewolff/minify/v2/js/js.go | 1267 + .../tdewolff/minify/v2/js/stmtlist.go | 349 + .../github.com/tdewolff/minify/v2/js/util.go | 1456 + .../github.com/tdewolff/minify/v2/js/vars.go | 453 + .../tdewolff/minify/v2/json/json.go | 73 + .../github.com/tdewolff/minify/v2/minify.go | 376 + .../tdewolff/minify/v2/svg/buffer.go | 136 + .../github.com/tdewolff/minify/v2/svg/hash.go | 414 + .../tdewolff/minify/v2/svg/pathdata.go | 447 + .../github.com/tdewolff/minify/v2/svg/svg.go | 311 + .../tdewolff/minify/v2/svg/table.go | 82 + .../tdewolff/minify/v2/xml/buffer.go | 86 + .../tdewolff/minify/v2/xml/table.go | 14 + .../github.com/tdewolff/minify/v2/xml/xml.go | 167 + .../tdewolff/parse/v2/.gitattributes | 1 + .../github.com/tdewolff/parse/v2/.gitignore | 5 + .../tdewolff/parse/v2/.golangci.yml | 16 + .../github.com/tdewolff/parse/v2/LICENSE.md | 22 + vendor/github.com/tdewolff/parse/v2/README.md | 64 + .../tdewolff/parse/v2/buffer/buffer.go | 12 + .../tdewolff/parse/v2/buffer/lexer.go | 164 + .../tdewolff/parse/v2/buffer/reader.go | 44 + .../tdewolff/parse/v2/buffer/streamlexer.go | 223 + .../tdewolff/parse/v2/buffer/writer.go | 65 + vendor/github.com/tdewolff/parse/v2/common.go | 542 + .../tdewolff/parse/v2/css/README.md | 170 + .../github.com/tdewolff/parse/v2/css/hash.go | 75 + .../github.com/tdewolff/parse/v2/css/lex.go | 698 + .../github.com/tdewolff/parse/v2/css/parse.go | 493 + .../github.com/tdewolff/parse/v2/css/util.go | 47 + vendor/github.com/tdewolff/parse/v2/error.go | 47 + .../tdewolff/parse/v2/html/README.md | 98 + .../github.com/tdewolff/parse/v2/html/hash.go | 81 + .../github.com/tdewolff/parse/v2/html/lex.go | 589 + .../tdewolff/parse/v2/html/parse.go | 403 + .../github.com/tdewolff/parse/v2/html/util.go | 109 + vendor/github.com/tdewolff/parse/v2/input.go | 173 + .../github.com/tdewolff/parse/v2/js/README.md | 80 + vendor/github.com/tdewolff/parse/v2/js/ast.go | 2373 + vendor/github.com/tdewolff/parse/v2/js/lex.go | 807 + .../github.com/tdewolff/parse/v2/js/parse.go | 2363 + .../github.com/tdewolff/parse/v2/js/table.go | 142 + .../tdewolff/parse/v2/js/tokentype.go | 404 + .../github.com/tdewolff/parse/v2/js/util.go | 46 + .../github.com/tdewolff/parse/v2/js/walk.go | 288 + .../tdewolff/parse/v2/json/README.md | 81 + .../tdewolff/parse/v2/json/parse.go | 308 + .../github.com/tdewolff/parse/v2/position.go | 95 + .../tdewolff/parse/v2/strconv/decimal.go | 72 + .../tdewolff/parse/v2/strconv/float.go | 257 + .../tdewolff/parse/v2/strconv/int.go | 116 + .../tdewolff/parse/v2/strconv/number.go | 125 + vendor/github.com/tdewolff/parse/v2/util.go | 212 + .../tdewolff/parse/v2/xml/README.md | 101 + .../github.com/tdewolff/parse/v2/xml/lex.go | 340 + .../github.com/tdewolff/parse/v2/xml/util.go | 87 + vendor/github.com/tidwall/gjson/LICENSE | 20 + vendor/github.com/tidwall/gjson/README.md | 488 + vendor/github.com/tidwall/gjson/SYNTAX.md | 360 + vendor/github.com/tidwall/gjson/gjson.go | 3494 + vendor/github.com/tidwall/gjson/logo.png | Bin 0 -> 15936 bytes vendor/github.com/tidwall/match/LICENSE | 20 + vendor/github.com/tidwall/match/README.md | 29 + vendor/github.com/tidwall/match/match.go | 237 + vendor/github.com/tidwall/pretty/LICENSE | 20 + vendor/github.com/tidwall/pretty/README.md | 122 + vendor/github.com/tidwall/pretty/pretty.go | 674 + vendor/github.com/tidwall/sjson/LICENSE | 21 + vendor/github.com/tidwall/sjson/README.md | 278 + vendor/github.com/tidwall/sjson/logo.png | Bin 0 -> 16874 bytes vendor/github.com/tidwall/sjson/sjson.go | 737 + vendor/github.com/tinylib/msgp/LICENSE | 8 + .../tinylib/msgp/msgp/advise_linux.go | 24 + .../tinylib/msgp/msgp/advise_other.go | 17 + .../github.com/tinylib/msgp/msgp/circular.go | 39 + vendor/github.com/tinylib/msgp/msgp/defs.go | 142 + vendor/github.com/tinylib/msgp/msgp/edit.go | 242 + vendor/github.com/tinylib/msgp/msgp/elsize.go | 99 + vendor/github.com/tinylib/msgp/msgp/errors.go | 314 + .../github.com/tinylib/msgp/msgp/extension.go | 549 + vendor/github.com/tinylib/msgp/msgp/file.go | 92 + .../github.com/tinylib/msgp/msgp/file_port.go | 47 + .../github.com/tinylib/msgp/msgp/integers.go | 174 + vendor/github.com/tinylib/msgp/msgp/json.go | 542 + .../tinylib/msgp/msgp/json_bytes.go | 363 + vendor/github.com/tinylib/msgp/msgp/number.go | 267 + vendor/github.com/tinylib/msgp/msgp/purego.go | 15 + vendor/github.com/tinylib/msgp/msgp/read.go | 1358 + .../tinylib/msgp/msgp/read_bytes.go | 1197 + vendor/github.com/tinylib/msgp/msgp/size.go | 38 + vendor/github.com/tinylib/msgp/msgp/unsafe.go | 41 + vendor/github.com/tinylib/msgp/msgp/write.go | 845 + .../tinylib/msgp/msgp/write_bytes.go | 411 + vendor/github.com/willf/bitset/.gitignore | 26 + vendor/github.com/willf/bitset/.travis.yml | 37 + vendor/github.com/willf/bitset/LICENSE | 27 + vendor/github.com/willf/bitset/Makefile | 197 + vendor/github.com/willf/bitset/README.md | 96 + vendor/github.com/willf/bitset/bitset.go | 877 + vendor/github.com/willf/bitset/popcnt.go | 53 + vendor/github.com/willf/bitset/popcnt_19.go | 45 + .../github.com/willf/bitset/popcnt_amd64.go | 68 + vendor/github.com/willf/bitset/popcnt_amd64.s | 104 + .../github.com/willf/bitset/popcnt_generic.go | 24 + .../willf/bitset/trailing_zeros_18.go | 14 + .../willf/bitset/trailing_zeros_19.go | 9 + .../github.com/yuin/goldmark-emoji/.gitignore | 15 + vendor/github.com/yuin/goldmark-emoji/LICENSE | 21 + .../github.com/yuin/goldmark-emoji/README.md | 71 + .../yuin/goldmark-emoji/ast/emoji.go | 42 + .../goldmark-emoji/definition/definition.go | 106 + .../yuin/goldmark-emoji/definition/github.go | 1831 + .../github.com/yuin/goldmark-emoji/emoji.go | 360 + vendor/github.com/yuin/goldmark/.gitignore | 19 + vendor/github.com/yuin/goldmark/.golangci.yml | 105 + vendor/github.com/yuin/goldmark/LICENSE | 21 + vendor/github.com/yuin/goldmark/Makefile | 13 + vendor/github.com/yuin/goldmark/README.md | 565 + vendor/github.com/yuin/goldmark/ast/ast.go | 508 + vendor/github.com/yuin/goldmark/ast/block.go | 508 + vendor/github.com/yuin/goldmark/ast/inline.go | 549 + .../goldmark/extension/ast/definition_list.go | 83 + .../yuin/goldmark/extension/ast/footnote.go | 138 + .../goldmark/extension/ast/strikethrough.go | 29 + .../yuin/goldmark/extension/ast/table.go | 158 + .../yuin/goldmark/extension/ast/tasklist.go | 35 + .../github.com/yuin/goldmark/extension/cjk.go | 72 + .../goldmark/extension/definition_list.go | 274 + .../yuin/goldmark/extension/footnote.go | 691 + .../github.com/yuin/goldmark/extension/gfm.go | 18 + .../yuin/goldmark/extension/linkify.go | 322 + .../yuin/goldmark/extension/package.go | 2 + .../yuin/goldmark/extension/strikethrough.go | 117 + .../yuin/goldmark/extension/table.go | 564 + .../yuin/goldmark/extension/tasklist.go | 120 + .../yuin/goldmark/extension/typographer.go | 348 + vendor/github.com/yuin/goldmark/markdown.go | 140 + .../yuin/goldmark/parser/attribute.go | 329 + .../yuin/goldmark/parser/atx_heading.go | 248 + .../yuin/goldmark/parser/auto_link.go | 42 + .../yuin/goldmark/parser/blockquote.go | 69 + .../yuin/goldmark/parser/code_block.go | 100 + .../yuin/goldmark/parser/code_span.go | 84 + .../yuin/goldmark/parser/delimiter.go | 238 + .../yuin/goldmark/parser/emphasis.go | 50 + .../yuin/goldmark/parser/fcode_block.go | 121 + .../yuin/goldmark/parser/html_block.go | 229 + .../github.com/yuin/goldmark/parser/link.go | 410 + .../yuin/goldmark/parser/link_ref.go | 152 + .../github.com/yuin/goldmark/parser/list.go | 287 + .../yuin/goldmark/parser/list_item.go | 90 + .../yuin/goldmark/parser/paragraph.go | 72 + .../github.com/yuin/goldmark/parser/parser.go | 1259 + .../yuin/goldmark/parser/raw_html.go | 153 + .../yuin/goldmark/parser/setext_headings.go | 126 + .../yuin/goldmark/parser/thematic_break.go | 75 + .../yuin/goldmark/renderer/html/html.go | 1019 + .../yuin/goldmark/renderer/renderer.go | 174 + .../github.com/yuin/goldmark/text/package.go | 2 + .../github.com/yuin/goldmark/text/reader.go | 660 + .../github.com/yuin/goldmark/text/segment.go | 209 + .../yuin/goldmark/util/html5entities.go | 2143 + .../goldmark/util/unicode_case_folding.go | 1535 + vendor/github.com/yuin/goldmark/util/util.go | 982 + .../github.com/yuin/goldmark/util/util_cjk.go | 469 + .../yuin/goldmark/util/util_safe.go | 14 + .../yuin/goldmark/util/util_unsafe.go | 24 + vendor/go.etcd.io/bbolt/.gitignore | 10 + vendor/go.etcd.io/bbolt/LICENSE | 20 + vendor/go.etcd.io/bbolt/Makefile | 63 + vendor/go.etcd.io/bbolt/README.md | 967 + vendor/go.etcd.io/bbolt/bolt_386.go | 7 + vendor/go.etcd.io/bbolt/bolt_amd64.go | 7 + vendor/go.etcd.io/bbolt/bolt_arm.go | 7 + vendor/go.etcd.io/bbolt/bolt_arm64.go | 10 + vendor/go.etcd.io/bbolt/bolt_linux.go | 10 + vendor/go.etcd.io/bbolt/bolt_loong64.go | 10 + vendor/go.etcd.io/bbolt/bolt_mips64x.go | 10 + vendor/go.etcd.io/bbolt/bolt_mipsx.go | 10 + vendor/go.etcd.io/bbolt/bolt_openbsd.go | 16 + vendor/go.etcd.io/bbolt/bolt_ppc.go | 10 + vendor/go.etcd.io/bbolt/bolt_ppc64.go | 10 + vendor/go.etcd.io/bbolt/bolt_ppc64le.go | 10 + vendor/go.etcd.io/bbolt/bolt_riscv64.go | 10 + vendor/go.etcd.io/bbolt/bolt_s390x.go | 10 + vendor/go.etcd.io/bbolt/bolt_unix.go | 87 + vendor/go.etcd.io/bbolt/bolt_unix_aix.go | 91 + vendor/go.etcd.io/bbolt/bolt_unix_solaris.go | 88 + vendor/go.etcd.io/bbolt/bolt_windows.go | 117 + vendor/go.etcd.io/bbolt/boltsync_unix.go | 9 + vendor/go.etcd.io/bbolt/bucket.go | 799 + vendor/go.etcd.io/bbolt/compact.go | 119 + vendor/go.etcd.io/bbolt/cursor.go | 420 + vendor/go.etcd.io/bbolt/db.go | 1389 + vendor/go.etcd.io/bbolt/doc.go | 40 + vendor/go.etcd.io/bbolt/errors.go | 78 + vendor/go.etcd.io/bbolt/freelist.go | 405 + vendor/go.etcd.io/bbolt/freelist_hmap.go | 178 + vendor/go.etcd.io/bbolt/mlock_unix.go | 37 + vendor/go.etcd.io/bbolt/mlock_windows.go | 11 + vendor/go.etcd.io/bbolt/node.go | 610 + vendor/go.etcd.io/bbolt/page.go | 214 + vendor/go.etcd.io/bbolt/tx.go | 797 + vendor/go.etcd.io/bbolt/tx_check.go | 226 + vendor/go.etcd.io/bbolt/unsafe.go | 39 + vendor/go.mongodb.org/mongo-driver/LICENSE | 201 + .../go.mongodb.org/mongo-driver/bson/bson.go | 50 + .../bson/bsoncodec/array_codec.go | 55 + .../mongo-driver/bson/bsoncodec/bsoncodec.go | 382 + .../bson/bsoncodec/byte_slice_codec.go | 138 + .../bson/bsoncodec/codec_cache.go | 166 + .../bson/bsoncodec/cond_addr_codec.go | 63 + .../bson/bsoncodec/default_value_decoders.go | 1807 + .../bson/bsoncodec/default_value_encoders.go | 856 + .../mongo-driver/bson/bsoncodec/doc.go | 95 + .../bson/bsoncodec/empty_interface_codec.go | 173 + .../mongo-driver/bson/bsoncodec/map_codec.go | 343 + .../mongo-driver/bson/bsoncodec/mode.go | 65 + .../bson/bsoncodec/pointer_codec.go | 108 + .../mongo-driver/bson/bsoncodec/proxy.go | 14 + .../mongo-driver/bson/bsoncodec/registry.go | 524 + .../bson/bsoncodec/slice_codec.go | 214 + .../bson/bsoncodec/string_codec.go | 140 + .../bson/bsoncodec/struct_codec.go | 736 + .../bson/bsoncodec/struct_tag_parser.go | 148 + .../mongo-driver/bson/bsoncodec/time_codec.go | 151 + .../mongo-driver/bson/bsoncodec/types.go | 58 + .../mongo-driver/bson/bsoncodec/uint_codec.go | 202 + .../bsonoptions/byte_slice_codec_options.go | 49 + .../mongo-driver/bson/bsonoptions/doc.go | 8 + .../empty_interface_codec_options.go | 49 + .../bson/bsonoptions/map_codec_options.go | 82 + .../bson/bsonoptions/slice_codec_options.go | 49 + .../bson/bsonoptions/string_codec_options.go | 52 + .../bson/bsonoptions/struct_codec_options.go | 107 + .../bson/bsonoptions/time_codec_options.go | 49 + .../bson/bsonoptions/uint_codec_options.go | 49 + .../mongo-driver/bson/bsonrw/copier.go | 489 + .../mongo-driver/bson/bsonrw/doc.go | 9 + .../bson/bsonrw/extjson_parser.go | 806 + .../bson/bsonrw/extjson_reader.go | 653 + .../bson/bsonrw/extjson_tables.go | 223 + .../bson/bsonrw/extjson_wrappers.go | 492 + .../bson/bsonrw/extjson_writer.go | 750 + .../mongo-driver/bson/bsonrw/json_scanner.go | 533 + .../mongo-driver/bson/bsonrw/mode.go | 108 + .../mongo-driver/bson/bsonrw/reader.go | 65 + .../mongo-driver/bson/bsonrw/value_reader.go | 888 + .../mongo-driver/bson/bsonrw/value_writer.go | 640 + .../mongo-driver/bson/bsonrw/writer.go | 87 + .../mongo-driver/bson/bsontype/bsontype.go | 116 + .../mongo-driver/bson/decoder.go | 208 + .../go.mongodb.org/mongo-driver/bson/doc.go | 142 + .../mongo-driver/bson/encoder.go | 199 + .../mongo-driver/bson/marshal.go | 453 + .../mongo-driver/bson/primitive/decimal.go | 432 + .../mongo-driver/bson/primitive/objectid.go | 206 + .../mongo-driver/bson/primitive/primitive.go | 231 + .../mongo-driver/bson/primitive_codecs.go | 122 + .../go.mongodb.org/mongo-driver/bson/raw.go | 101 + .../mongo-driver/bson/raw_element.go | 48 + .../mongo-driver/bson/raw_value.go | 324 + .../mongo-driver/bson/registry.go | 47 + .../go.mongodb.org/mongo-driver/bson/types.go | 50 + .../mongo-driver/bson/unmarshal.go | 177 + .../mongo-driver/x/bsonx/bsoncore/array.go | 164 + .../x/bsonx/bsoncore/bson_arraybuilder.go | 201 + .../x/bsonx/bsoncore/bson_documentbuilder.go | 189 + .../mongo-driver/x/bsonx/bsoncore/bsoncore.go | 842 + .../mongo-driver/x/bsonx/bsoncore/doc.go | 34 + .../mongo-driver/x/bsonx/bsoncore/document.go | 386 + .../x/bsonx/bsoncore/document_sequence.go | 189 + .../mongo-driver/x/bsonx/bsoncore/element.go | 152 + .../mongo-driver/x/bsonx/bsoncore/tables.go | 223 + .../mongo-driver/x/bsonx/bsoncore/value.go | 964 + .../go.opentelemetry.io/otel/.codespellignore | 7 + vendor/go.opentelemetry.io/otel/.codespellrc | 10 + .../go.opentelemetry.io/otel/.gitattributes | 3 + vendor/go.opentelemetry.io/otel/.gitignore | 22 + vendor/go.opentelemetry.io/otel/.gitmodules | 3 + vendor/go.opentelemetry.io/otel/.golangci.yml | 296 + vendor/go.opentelemetry.io/otel/.lycheeignore | 6 + .../otel/.markdownlint.yaml | 29 + vendor/go.opentelemetry.io/otel/CHANGELOG.md | 2939 + vendor/go.opentelemetry.io/otel/CODEOWNERS | 17 + .../go.opentelemetry.io/otel/CONTRIBUTING.md | 645 + vendor/go.opentelemetry.io/otel/LICENSE | 201 + vendor/go.opentelemetry.io/otel/Makefile | 318 + vendor/go.opentelemetry.io/otel/README.md | 112 + vendor/go.opentelemetry.io/otel/RELEASING.md | 139 + vendor/go.opentelemetry.io/otel/VERSIONING.md | 224 + .../go.opentelemetry.io/otel/attribute/doc.go | 16 + .../otel/attribute/encoder.go | 146 + .../otel/attribute/filter.go | 60 + .../otel/attribute/iterator.go | 161 + .../go.opentelemetry.io/otel/attribute/key.go | 134 + .../go.opentelemetry.io/otel/attribute/kv.go | 86 + .../go.opentelemetry.io/otel/attribute/set.go | 452 + .../otel/attribute/type_string.go | 31 + .../otel/attribute/value.go | 270 + .../otel/baggage/baggage.go | 744 + .../otel/baggage/context.go | 39 + .../go.opentelemetry.io/otel/baggage/doc.go | 20 + .../go.opentelemetry.io/otel/codes/codes.go | 116 + vendor/go.opentelemetry.io/otel/codes/doc.go | 21 + vendor/go.opentelemetry.io/otel/doc.go | 34 + .../go.opentelemetry.io/otel/error_handler.go | 38 + .../go.opentelemetry.io/otel/get_main_pkgs.sh | 41 + vendor/go.opentelemetry.io/otel/handler.go | 48 + .../otel/internal/attribute/attribute.go | 111 + .../otel/internal/baggage/baggage.go | 43 + .../otel/internal/baggage/context.go | 92 + .../go.opentelemetry.io/otel/internal/gen.go | 29 + .../otel/internal/global/handler.go | 102 + .../otel/internal/global/instruments.go | 371 + .../otel/internal/global/internal_logging.go | 69 + .../otel/internal/global/meter.go | 356 + .../otel/internal/global/propagator.go | 82 + .../otel/internal/global/state.go | 156 + .../otel/internal/global/trace.go | 199 + .../otel/internal/rawhelpers.go | 55 + .../otel/internal_logging.go | 26 + vendor/go.opentelemetry.io/otel/metric.go | 53 + .../go.opentelemetry.io/otel/metric/LICENSE | 201 + .../otel/metric/asyncfloat64.go | 271 + .../otel/metric/asyncint64.go | 269 + .../go.opentelemetry.io/otel/metric/config.go | 92 + vendor/go.opentelemetry.io/otel/metric/doc.go | 170 + .../otel/metric/embedded/embedded.go | 234 + .../otel/metric/instrument.go | 357 + .../go.opentelemetry.io/otel/metric/meter.go | 212 + .../otel/metric/syncfloat64.go | 185 + .../otel/metric/syncint64.go | 185 + .../go.opentelemetry.io/otel/propagation.go | 31 + .../otel/propagation/baggage.go | 58 + .../otel/propagation/doc.go | 24 + .../otel/propagation/propagation.go | 153 + .../otel/propagation/trace_context.go | 167 + .../go.opentelemetry.io/otel/requirements.txt | 1 + .../otel/semconv/internal/v2/http.go | 404 + .../otel/semconv/internal/v2/net.go | 324 + .../otel/semconv/v1.17.0/doc.go | 20 + .../otel/semconv/v1.17.0/event.go | 199 + .../otel/semconv/v1.17.0/exception.go | 20 + .../otel/semconv/v1.17.0/http.go | 21 + .../otel/semconv/v1.17.0/httpconv/http.go | 152 + .../otel/semconv/v1.17.0/resource.go | 2010 + .../otel/semconv/v1.17.0/schema.go | 20 + .../otel/semconv/v1.17.0/trace.go | 3375 + vendor/go.opentelemetry.io/otel/trace.go | 47 + vendor/go.opentelemetry.io/otel/trace/LICENSE | 201 + .../go.opentelemetry.io/otel/trace/config.go | 334 + .../go.opentelemetry.io/otel/trace/context.go | 61 + vendor/go.opentelemetry.io/otel/trace/doc.go | 130 + .../otel/trace/embedded/embedded.go | 56 + .../otel/trace/nonrecording.go | 27 + vendor/go.opentelemetry.io/otel/trace/noop.go | 93 + .../go.opentelemetry.io/otel/trace/trace.go | 577 + .../otel/trace/tracestate.go | 331 + .../otel/verify_examples.sh | 85 + vendor/go.opentelemetry.io/otel/version.go | 20 + vendor/go.opentelemetry.io/otel/versions.yaml | 56 + vendor/golang.org/x/crypto/LICENSE | 27 + vendor/golang.org/x/crypto/PATENTS | 22 + vendor/golang.org/x/crypto/bcrypt/base64.go | 35 + vendor/golang.org/x/crypto/bcrypt/bcrypt.go | 304 + vendor/golang.org/x/crypto/blowfish/block.go | 159 + vendor/golang.org/x/crypto/blowfish/cipher.go | 99 + vendor/golang.org/x/crypto/blowfish/const.go | 199 + vendor/golang.org/x/exp/LICENSE | 27 + vendor/golang.org/x/exp/PATENTS | 22 + vendor/golang.org/x/exp/maps/maps.go | 94 + vendor/golang.org/x/exp/rand/exp.go | 221 + vendor/golang.org/x/exp/rand/normal.go | 156 + vendor/golang.org/x/exp/rand/rand.go | 372 + vendor/golang.org/x/exp/rand/rng.go | 91 + vendor/golang.org/x/exp/rand/zipf.go | 77 + vendor/golang.org/x/image/LICENSE | 27 + vendor/golang.org/x/image/PATENTS | 22 + vendor/golang.org/x/image/bmp/reader.go | 253 + vendor/golang.org/x/image/bmp/writer.go | 262 + vendor/golang.org/x/image/ccitt/reader.go | 795 + vendor/golang.org/x/image/ccitt/table.go | 972 + vendor/golang.org/x/image/ccitt/writer.go | 102 + vendor/golang.org/x/image/draw/draw.go | 67 + vendor/golang.org/x/image/draw/impl.go | 8426 + vendor/golang.org/x/image/draw/scale.go | 525 + vendor/golang.org/x/image/math/f64/f64.go | 37 + vendor/golang.org/x/image/tiff/buffer.go | 69 + vendor/golang.org/x/image/tiff/compress.go | 58 + vendor/golang.org/x/image/tiff/consts.go | 149 + vendor/golang.org/x/image/tiff/fuzz.go | 29 + vendor/golang.org/x/image/tiff/lzw/reader.go | 272 + vendor/golang.org/x/image/tiff/reader.go | 777 + vendor/golang.org/x/image/tiff/writer.go | 441 + vendor/golang.org/x/mod/LICENSE | 27 + vendor/golang.org/x/mod/PATENTS | 22 + .../x/mod/internal/lazyregexp/lazyre.go | 78 + vendor/golang.org/x/mod/module/module.go | 841 + vendor/golang.org/x/mod/module/pseudo.go | 250 + vendor/golang.org/x/mod/semver/semver.go | 401 + vendor/golang.org/x/sync/LICENSE | 27 + vendor/golang.org/x/sync/PATENTS | 22 + vendor/golang.org/x/sync/errgroup/errgroup.go | 135 + vendor/golang.org/x/sync/errgroup/go120.go | 13 + .../golang.org/x/sync/errgroup/pre_go120.go | 14 + vendor/golang.org/x/sys/LICENSE | 27 + vendor/golang.org/x/sys/PATENTS | 22 + vendor/golang.org/x/sys/unix/.gitignore | 2 + vendor/golang.org/x/sys/unix/README.md | 184 + .../golang.org/x/sys/unix/affinity_linux.go | 86 + vendor/golang.org/x/sys/unix/aliases.go | 13 + vendor/golang.org/x/sys/unix/asm_aix_ppc64.s | 17 + vendor/golang.org/x/sys/unix/asm_bsd_386.s | 27 + vendor/golang.org/x/sys/unix/asm_bsd_amd64.s | 27 + vendor/golang.org/x/sys/unix/asm_bsd_arm.s | 27 + vendor/golang.org/x/sys/unix/asm_bsd_arm64.s | 27 + vendor/golang.org/x/sys/unix/asm_bsd_ppc64.s | 29 + .../golang.org/x/sys/unix/asm_bsd_riscv64.s | 27 + vendor/golang.org/x/sys/unix/asm_linux_386.s | 65 + .../golang.org/x/sys/unix/asm_linux_amd64.s | 57 + vendor/golang.org/x/sys/unix/asm_linux_arm.s | 56 + .../golang.org/x/sys/unix/asm_linux_arm64.s | 50 + .../golang.org/x/sys/unix/asm_linux_loong64.s | 51 + .../golang.org/x/sys/unix/asm_linux_mips64x.s | 54 + .../golang.org/x/sys/unix/asm_linux_mipsx.s | 52 + .../golang.org/x/sys/unix/asm_linux_ppc64x.s | 42 + .../golang.org/x/sys/unix/asm_linux_riscv64.s | 47 + .../golang.org/x/sys/unix/asm_linux_s390x.s | 54 + .../x/sys/unix/asm_openbsd_mips64.s | 29 + .../golang.org/x/sys/unix/asm_solaris_amd64.s | 17 + vendor/golang.org/x/sys/unix/asm_zos_s390x.s | 382 + .../golang.org/x/sys/unix/bluetooth_linux.go | 36 + vendor/golang.org/x/sys/unix/bpxsvc_zos.go | 657 + vendor/golang.org/x/sys/unix/bpxsvc_zos.s | 192 + vendor/golang.org/x/sys/unix/cap_freebsd.go | 195 + vendor/golang.org/x/sys/unix/constants.go | 13 + vendor/golang.org/x/sys/unix/dev_aix_ppc.go | 26 + vendor/golang.org/x/sys/unix/dev_aix_ppc64.go | 28 + vendor/golang.org/x/sys/unix/dev_darwin.go | 24 + vendor/golang.org/x/sys/unix/dev_dragonfly.go | 30 + vendor/golang.org/x/sys/unix/dev_freebsd.go | 30 + vendor/golang.org/x/sys/unix/dev_linux.go | 42 + vendor/golang.org/x/sys/unix/dev_netbsd.go | 29 + vendor/golang.org/x/sys/unix/dev_openbsd.go | 29 + vendor/golang.org/x/sys/unix/dev_zos.go | 28 + vendor/golang.org/x/sys/unix/dirent.go | 102 + vendor/golang.org/x/sys/unix/endian_big.go | 9 + vendor/golang.org/x/sys/unix/endian_little.go | 9 + vendor/golang.org/x/sys/unix/env_unix.go | 31 + vendor/golang.org/x/sys/unix/fcntl.go | 36 + vendor/golang.org/x/sys/unix/fcntl_darwin.go | 24 + .../x/sys/unix/fcntl_linux_32bit.go | 13 + vendor/golang.org/x/sys/unix/fdset.go | 29 + vendor/golang.org/x/sys/unix/gccgo.go | 59 + vendor/golang.org/x/sys/unix/gccgo_c.c | 44 + .../x/sys/unix/gccgo_linux_amd64.go | 20 + vendor/golang.org/x/sys/unix/ifreq_linux.go | 141 + vendor/golang.org/x/sys/unix/ioctl_linux.go | 238 + vendor/golang.org/x/sys/unix/ioctl_signed.go | 69 + .../golang.org/x/sys/unix/ioctl_unsigned.go | 69 + vendor/golang.org/x/sys/unix/ioctl_zos.go | 71 + vendor/golang.org/x/sys/unix/mkall.sh | 249 + vendor/golang.org/x/sys/unix/mkerrors.sh | 793 + vendor/golang.org/x/sys/unix/mmap_nomremap.go | 13 + vendor/golang.org/x/sys/unix/mremap.go | 57 + vendor/golang.org/x/sys/unix/pagesize_unix.go | 15 + .../golang.org/x/sys/unix/pledge_openbsd.go | 111 + vendor/golang.org/x/sys/unix/ptrace_darwin.go | 11 + vendor/golang.org/x/sys/unix/ptrace_ios.go | 11 + vendor/golang.org/x/sys/unix/race.go | 30 + vendor/golang.org/x/sys/unix/race0.go | 25 + .../x/sys/unix/readdirent_getdents.go | 12 + .../x/sys/unix/readdirent_getdirentries.go | 19 + .../x/sys/unix/sockcmsg_dragonfly.go | 16 + .../golang.org/x/sys/unix/sockcmsg_linux.go | 85 + vendor/golang.org/x/sys/unix/sockcmsg_unix.go | 106 + .../x/sys/unix/sockcmsg_unix_other.go | 46 + vendor/golang.org/x/sys/unix/sockcmsg_zos.go | 58 + .../golang.org/x/sys/unix/symaddr_zos_s390x.s | 75 + vendor/golang.org/x/sys/unix/syscall.go | 86 + vendor/golang.org/x/sys/unix/syscall_aix.go | 582 + .../golang.org/x/sys/unix/syscall_aix_ppc.go | 52 + .../x/sys/unix/syscall_aix_ppc64.go | 83 + vendor/golang.org/x/sys/unix/syscall_bsd.go | 609 + .../golang.org/x/sys/unix/syscall_darwin.go | 707 + .../x/sys/unix/syscall_darwin_amd64.go | 50 + .../x/sys/unix/syscall_darwin_arm64.go | 50 + .../x/sys/unix/syscall_darwin_libSystem.go | 26 + .../x/sys/unix/syscall_dragonfly.go | 347 + .../x/sys/unix/syscall_dragonfly_amd64.go | 56 + .../golang.org/x/sys/unix/syscall_freebsd.go | 455 + .../x/sys/unix/syscall_freebsd_386.go | 64 + .../x/sys/unix/syscall_freebsd_amd64.go | 64 + .../x/sys/unix/syscall_freebsd_arm.go | 60 + .../x/sys/unix/syscall_freebsd_arm64.go | 60 + .../x/sys/unix/syscall_freebsd_riscv64.go | 60 + vendor/golang.org/x/sys/unix/syscall_hurd.go | 30 + .../golang.org/x/sys/unix/syscall_hurd_386.go | 28 + .../golang.org/x/sys/unix/syscall_illumos.go | 78 + vendor/golang.org/x/sys/unix/syscall_linux.go | 2656 + .../x/sys/unix/syscall_linux_386.go | 314 + .../x/sys/unix/syscall_linux_alarm.go | 12 + .../x/sys/unix/syscall_linux_amd64.go | 145 + .../x/sys/unix/syscall_linux_amd64_gc.go | 12 + .../x/sys/unix/syscall_linux_arm.go | 216 + .../x/sys/unix/syscall_linux_arm64.go | 186 + .../golang.org/x/sys/unix/syscall_linux_gc.go | 14 + .../x/sys/unix/syscall_linux_gc_386.go | 16 + .../x/sys/unix/syscall_linux_gc_arm.go | 13 + .../x/sys/unix/syscall_linux_gccgo_386.go | 30 + .../x/sys/unix/syscall_linux_gccgo_arm.go | 20 + .../x/sys/unix/syscall_linux_loong64.go | 218 + .../x/sys/unix/syscall_linux_mips64x.go | 188 + .../x/sys/unix/syscall_linux_mipsx.go | 174 + .../x/sys/unix/syscall_linux_ppc.go | 204 + .../x/sys/unix/syscall_linux_ppc64x.go | 115 + .../x/sys/unix/syscall_linux_riscv64.go | 191 + .../x/sys/unix/syscall_linux_s390x.go | 296 + .../x/sys/unix/syscall_linux_sparc64.go | 112 + .../golang.org/x/sys/unix/syscall_netbsd.go | 371 + .../x/sys/unix/syscall_netbsd_386.go | 37 + .../x/sys/unix/syscall_netbsd_amd64.go | 37 + .../x/sys/unix/syscall_netbsd_arm.go | 37 + .../x/sys/unix/syscall_netbsd_arm64.go | 37 + .../golang.org/x/sys/unix/syscall_openbsd.go | 342 + .../x/sys/unix/syscall_openbsd_386.go | 41 + .../x/sys/unix/syscall_openbsd_amd64.go | 41 + .../x/sys/unix/syscall_openbsd_arm.go | 41 + .../x/sys/unix/syscall_openbsd_arm64.go | 41 + .../x/sys/unix/syscall_openbsd_libc.go | 26 + .../x/sys/unix/syscall_openbsd_mips64.go | 39 + .../x/sys/unix/syscall_openbsd_ppc64.go | 41 + .../x/sys/unix/syscall_openbsd_riscv64.go | 41 + .../golang.org/x/sys/unix/syscall_solaris.go | 1104 + .../x/sys/unix/syscall_solaris_amd64.go | 27 + vendor/golang.org/x/sys/unix/syscall_unix.go | 615 + .../golang.org/x/sys/unix/syscall_unix_gc.go | 14 + .../x/sys/unix/syscall_unix_gc_ppc64x.go | 22 + .../x/sys/unix/syscall_zos_s390x.go | 3117 + vendor/golang.org/x/sys/unix/sysvshm_linux.go | 20 + vendor/golang.org/x/sys/unix/sysvshm_unix.go | 51 + .../x/sys/unix/sysvshm_unix_other.go | 13 + vendor/golang.org/x/sys/unix/timestruct.go | 76 + .../golang.org/x/sys/unix/unveil_openbsd.go | 51 + .../golang.org/x/sys/unix/vgetrandom_linux.go | 13 + .../x/sys/unix/vgetrandom_unsupported.go | 11 + vendor/golang.org/x/sys/unix/xattr_bsd.go | 280 + .../golang.org/x/sys/unix/zerrors_aix_ppc.go | 1384 + .../x/sys/unix/zerrors_aix_ppc64.go | 1385 + .../x/sys/unix/zerrors_darwin_amd64.go | 1922 + .../x/sys/unix/zerrors_darwin_arm64.go | 1922 + .../x/sys/unix/zerrors_dragonfly_amd64.go | 1737 + .../x/sys/unix/zerrors_freebsd_386.go | 2042 + .../x/sys/unix/zerrors_freebsd_amd64.go | 2039 + .../x/sys/unix/zerrors_freebsd_arm.go | 2033 + .../x/sys/unix/zerrors_freebsd_arm64.go | 2033 + .../x/sys/unix/zerrors_freebsd_riscv64.go | 2147 + vendor/golang.org/x/sys/unix/zerrors_linux.go | 3703 + .../x/sys/unix/zerrors_linux_386.go | 850 + .../x/sys/unix/zerrors_linux_amd64.go | 850 + .../x/sys/unix/zerrors_linux_arm.go | 855 + .../x/sys/unix/zerrors_linux_arm64.go | 850 + .../x/sys/unix/zerrors_linux_loong64.go | 842 + .../x/sys/unix/zerrors_linux_mips.go | 856 + .../x/sys/unix/zerrors_linux_mips64.go | 856 + .../x/sys/unix/zerrors_linux_mips64le.go | 856 + .../x/sys/unix/zerrors_linux_mipsle.go | 856 + .../x/sys/unix/zerrors_linux_ppc.go | 908 + .../x/sys/unix/zerrors_linux_ppc64.go | 912 + .../x/sys/unix/zerrors_linux_ppc64le.go | 912 + .../x/sys/unix/zerrors_linux_riscv64.go | 839 + .../x/sys/unix/zerrors_linux_s390x.go | 911 + .../x/sys/unix/zerrors_linux_sparc64.go | 954 + .../x/sys/unix/zerrors_netbsd_386.go | 1779 + .../x/sys/unix/zerrors_netbsd_amd64.go | 1769 + .../x/sys/unix/zerrors_netbsd_arm.go | 1758 + .../x/sys/unix/zerrors_netbsd_arm64.go | 1769 + .../x/sys/unix/zerrors_openbsd_386.go | 1905 + .../x/sys/unix/zerrors_openbsd_amd64.go | 1905 + .../x/sys/unix/zerrors_openbsd_arm.go | 1905 + .../x/sys/unix/zerrors_openbsd_arm64.go | 1905 + .../x/sys/unix/zerrors_openbsd_mips64.go | 1905 + .../x/sys/unix/zerrors_openbsd_ppc64.go | 1904 + .../x/sys/unix/zerrors_openbsd_riscv64.go | 1903 + .../x/sys/unix/zerrors_solaris_amd64.go | 1556 + .../x/sys/unix/zerrors_zos_s390x.go | 990 + .../x/sys/unix/zptrace_armnn_linux.go | 40 + .../x/sys/unix/zptrace_linux_arm64.go | 17 + .../x/sys/unix/zptrace_mipsnn_linux.go | 49 + .../x/sys/unix/zptrace_mipsnnle_linux.go | 49 + .../x/sys/unix/zptrace_x86_linux.go | 79 + .../x/sys/unix/zsymaddr_zos_s390x.s | 364 + .../golang.org/x/sys/unix/zsyscall_aix_ppc.go | 1461 + .../x/sys/unix/zsyscall_aix_ppc64.go | 1420 + .../x/sys/unix/zsyscall_aix_ppc64_gc.go | 1188 + .../x/sys/unix/zsyscall_aix_ppc64_gccgo.go | 1069 + .../x/sys/unix/zsyscall_darwin_amd64.go | 2644 + .../x/sys/unix/zsyscall_darwin_amd64.s | 779 + .../x/sys/unix/zsyscall_darwin_arm64.go | 2644 + .../x/sys/unix/zsyscall_darwin_arm64.s | 779 + .../x/sys/unix/zsyscall_dragonfly_amd64.go | 1666 + .../x/sys/unix/zsyscall_freebsd_386.go | 1886 + .../x/sys/unix/zsyscall_freebsd_amd64.go | 1886 + .../x/sys/unix/zsyscall_freebsd_arm.go | 1886 + .../x/sys/unix/zsyscall_freebsd_arm64.go | 1886 + .../x/sys/unix/zsyscall_freebsd_riscv64.go | 1886 + .../x/sys/unix/zsyscall_illumos_amd64.go | 101 + .../golang.org/x/sys/unix/zsyscall_linux.go | 2230 + .../x/sys/unix/zsyscall_linux_386.go | 486 + .../x/sys/unix/zsyscall_linux_amd64.go | 653 + .../x/sys/unix/zsyscall_linux_arm.go | 601 + .../x/sys/unix/zsyscall_linux_arm64.go | 552 + .../x/sys/unix/zsyscall_linux_loong64.go | 486 + .../x/sys/unix/zsyscall_linux_mips.go | 653 + .../x/sys/unix/zsyscall_linux_mips64.go | 647 + .../x/sys/unix/zsyscall_linux_mips64le.go | 636 + .../x/sys/unix/zsyscall_linux_mipsle.go | 653 + .../x/sys/unix/zsyscall_linux_ppc.go | 658 + .../x/sys/unix/zsyscall_linux_ppc64.go | 704 + .../x/sys/unix/zsyscall_linux_ppc64le.go | 704 + .../x/sys/unix/zsyscall_linux_riscv64.go | 548 + .../x/sys/unix/zsyscall_linux_s390x.go | 495 + .../x/sys/unix/zsyscall_linux_sparc64.go | 648 + .../x/sys/unix/zsyscall_netbsd_386.go | 1848 + .../x/sys/unix/zsyscall_netbsd_amd64.go | 1848 + .../x/sys/unix/zsyscall_netbsd_arm.go | 1848 + .../x/sys/unix/zsyscall_netbsd_arm64.go | 1848 + .../x/sys/unix/zsyscall_openbsd_386.go | 2323 + .../x/sys/unix/zsyscall_openbsd_386.s | 699 + .../x/sys/unix/zsyscall_openbsd_amd64.go | 2323 + .../x/sys/unix/zsyscall_openbsd_amd64.s | 699 + .../x/sys/unix/zsyscall_openbsd_arm.go | 2323 + .../x/sys/unix/zsyscall_openbsd_arm.s | 699 + .../x/sys/unix/zsyscall_openbsd_arm64.go | 2323 + .../x/sys/unix/zsyscall_openbsd_arm64.s | 699 + .../x/sys/unix/zsyscall_openbsd_mips64.go | 2323 + .../x/sys/unix/zsyscall_openbsd_mips64.s | 699 + .../x/sys/unix/zsyscall_openbsd_ppc64.go | 2323 + .../x/sys/unix/zsyscall_openbsd_ppc64.s | 838 + .../x/sys/unix/zsyscall_openbsd_riscv64.go | 2323 + .../x/sys/unix/zsyscall_openbsd_riscv64.s | 699 + .../x/sys/unix/zsyscall_solaris_amd64.go | 2103 + .../x/sys/unix/zsyscall_zos_s390x.go | 3458 + .../x/sys/unix/zsysctl_openbsd_386.go | 280 + .../x/sys/unix/zsysctl_openbsd_amd64.go | 280 + .../x/sys/unix/zsysctl_openbsd_arm.go | 280 + .../x/sys/unix/zsysctl_openbsd_arm64.go | 280 + .../x/sys/unix/zsysctl_openbsd_mips64.go | 280 + .../x/sys/unix/zsysctl_openbsd_ppc64.go | 280 + .../x/sys/unix/zsysctl_openbsd_riscv64.go | 281 + .../x/sys/unix/zsysnum_darwin_amd64.go | 439 + .../x/sys/unix/zsysnum_darwin_arm64.go | 437 + .../x/sys/unix/zsysnum_dragonfly_amd64.go | 316 + .../x/sys/unix/zsysnum_freebsd_386.go | 393 + .../x/sys/unix/zsysnum_freebsd_amd64.go | 393 + .../x/sys/unix/zsysnum_freebsd_arm.go | 393 + .../x/sys/unix/zsysnum_freebsd_arm64.go | 393 + .../x/sys/unix/zsysnum_freebsd_riscv64.go | 393 + .../x/sys/unix/zsysnum_linux_386.go | 461 + .../x/sys/unix/zsysnum_linux_amd64.go | 384 + .../x/sys/unix/zsysnum_linux_arm.go | 425 + .../x/sys/unix/zsysnum_linux_arm64.go | 328 + .../x/sys/unix/zsysnum_linux_loong64.go | 324 + .../x/sys/unix/zsysnum_linux_mips.go | 445 + .../x/sys/unix/zsysnum_linux_mips64.go | 375 + .../x/sys/unix/zsysnum_linux_mips64le.go | 375 + .../x/sys/unix/zsysnum_linux_mipsle.go | 445 + .../x/sys/unix/zsysnum_linux_ppc.go | 452 + .../x/sys/unix/zsysnum_linux_ppc64.go | 424 + .../x/sys/unix/zsysnum_linux_ppc64le.go | 424 + .../x/sys/unix/zsysnum_linux_riscv64.go | 329 + .../x/sys/unix/zsysnum_linux_s390x.go | 390 + .../x/sys/unix/zsysnum_linux_sparc64.go | 403 + .../x/sys/unix/zsysnum_netbsd_386.go | 274 + .../x/sys/unix/zsysnum_netbsd_amd64.go | 274 + .../x/sys/unix/zsysnum_netbsd_arm.go | 274 + .../x/sys/unix/zsysnum_netbsd_arm64.go | 274 + .../x/sys/unix/zsysnum_openbsd_386.go | 219 + .../x/sys/unix/zsysnum_openbsd_amd64.go | 219 + .../x/sys/unix/zsysnum_openbsd_arm.go | 219 + .../x/sys/unix/zsysnum_openbsd_arm64.go | 218 + .../x/sys/unix/zsysnum_openbsd_mips64.go | 221 + .../x/sys/unix/zsysnum_openbsd_ppc64.go | 217 + .../x/sys/unix/zsysnum_openbsd_riscv64.go | 218 + .../x/sys/unix/zsysnum_zos_s390x.go | 2852 + .../golang.org/x/sys/unix/ztypes_aix_ppc.go | 353 + .../golang.org/x/sys/unix/ztypes_aix_ppc64.go | 357 + .../x/sys/unix/ztypes_darwin_amd64.go | 818 + .../x/sys/unix/ztypes_darwin_arm64.go | 818 + .../x/sys/unix/ztypes_dragonfly_amd64.go | 473 + .../x/sys/unix/ztypes_freebsd_386.go | 651 + .../x/sys/unix/ztypes_freebsd_amd64.go | 656 + .../x/sys/unix/ztypes_freebsd_arm.go | 642 + .../x/sys/unix/ztypes_freebsd_arm64.go | 636 + .../x/sys/unix/ztypes_freebsd_riscv64.go | 638 + vendor/golang.org/x/sys/unix/ztypes_linux.go | 6066 + .../golang.org/x/sys/unix/ztypes_linux_386.go | 689 + .../x/sys/unix/ztypes_linux_amd64.go | 703 + .../golang.org/x/sys/unix/ztypes_linux_arm.go | 683 + .../x/sys/unix/ztypes_linux_arm64.go | 682 + .../x/sys/unix/ztypes_linux_loong64.go | 683 + .../x/sys/unix/ztypes_linux_mips.go | 688 + .../x/sys/unix/ztypes_linux_mips64.go | 685 + .../x/sys/unix/ztypes_linux_mips64le.go | 685 + .../x/sys/unix/ztypes_linux_mipsle.go | 688 + .../golang.org/x/sys/unix/ztypes_linux_ppc.go | 696 + .../x/sys/unix/ztypes_linux_ppc64.go | 691 + .../x/sys/unix/ztypes_linux_ppc64le.go | 691 + .../x/sys/unix/ztypes_linux_riscv64.go | 770 + .../x/sys/unix/ztypes_linux_s390x.go | 705 + .../x/sys/unix/ztypes_linux_sparc64.go | 686 + .../x/sys/unix/ztypes_netbsd_386.go | 585 + .../x/sys/unix/ztypes_netbsd_amd64.go | 593 + .../x/sys/unix/ztypes_netbsd_arm.go | 590 + .../x/sys/unix/ztypes_netbsd_arm64.go | 593 + .../x/sys/unix/ztypes_openbsd_386.go | 568 + .../x/sys/unix/ztypes_openbsd_amd64.go | 568 + .../x/sys/unix/ztypes_openbsd_arm.go | 575 + .../x/sys/unix/ztypes_openbsd_arm64.go | 568 + .../x/sys/unix/ztypes_openbsd_mips64.go | 568 + .../x/sys/unix/ztypes_openbsd_ppc64.go | 570 + .../x/sys/unix/ztypes_openbsd_riscv64.go | 570 + .../x/sys/unix/ztypes_solaris_amd64.go | 516 + .../golang.org/x/sys/unix/ztypes_zos_s390x.go | 546 + vendor/golang.org/x/sys/windows/aliases.go | 12 + .../golang.org/x/sys/windows/dll_windows.go | 416 + .../golang.org/x/sys/windows/env_windows.go | 57 + vendor/golang.org/x/sys/windows/eventlog.go | 20 + .../golang.org/x/sys/windows/exec_windows.go | 248 + .../x/sys/windows/memory_windows.go | 48 + vendor/golang.org/x/sys/windows/mkerrors.bash | 70 + .../x/sys/windows/mkknownfolderids.bash | 27 + vendor/golang.org/x/sys/windows/mksyscall.go | 9 + vendor/golang.org/x/sys/windows/race.go | 30 + vendor/golang.org/x/sys/windows/race0.go | 25 + .../x/sys/windows/security_windows.go | 1458 + vendor/golang.org/x/sys/windows/service.go | 257 + .../x/sys/windows/setupapi_windows.go | 1425 + vendor/golang.org/x/sys/windows/str.go | 22 + vendor/golang.org/x/sys/windows/syscall.go | 104 + .../x/sys/windows/syscall_windows.go | 1930 + .../golang.org/x/sys/windows/types_windows.go | 3476 + .../x/sys/windows/types_windows_386.go | 35 + .../x/sys/windows/types_windows_amd64.go | 34 + .../x/sys/windows/types_windows_arm.go | 35 + .../x/sys/windows/types_windows_arm64.go | 34 + .../x/sys/windows/zerrors_windows.go | 9468 + .../x/sys/windows/zknownfolderids_windows.go | 149 + .../x/sys/windows/zsyscall_windows.go | 4615 + vendor/golang.org/x/text/LICENSE | 27 + vendor/golang.org/x/text/PATENTS | 22 + vendor/golang.org/x/text/cases/cases.go | 162 + vendor/golang.org/x/text/cases/context.go | 376 + vendor/golang.org/x/text/cases/fold.go | 34 + vendor/golang.org/x/text/cases/icu.go | 61 + vendor/golang.org/x/text/cases/info.go | 82 + vendor/golang.org/x/text/cases/map.go | 816 + .../golang.org/x/text/cases/tables10.0.0.go | 2255 + .../golang.org/x/text/cases/tables11.0.0.go | 2316 + .../golang.org/x/text/cases/tables12.0.0.go | 2359 + .../golang.org/x/text/cases/tables13.0.0.go | 2399 + .../golang.org/x/text/cases/tables15.0.0.go | 2527 + vendor/golang.org/x/text/cases/tables9.0.0.go | 2215 + vendor/golang.org/x/text/cases/trieval.go | 217 + vendor/golang.org/x/text/collate/collate.go | 403 + vendor/golang.org/x/text/collate/index.go | 32 + vendor/golang.org/x/text/collate/option.go | 239 + vendor/golang.org/x/text/collate/sort.go | 81 + vendor/golang.org/x/text/collate/tables.go | 73789 ++++++ .../x/text/internal/colltab/collelem.go | 376 + .../x/text/internal/colltab/colltab.go | 105 + .../x/text/internal/colltab/contract.go | 145 + .../x/text/internal/colltab/iter.go | 178 + .../x/text/internal/colltab/numeric.go | 236 + .../x/text/internal/colltab/table.go | 275 + .../x/text/internal/colltab/trie.go | 159 + .../x/text/internal/colltab/weighter.go | 31 + vendor/golang.org/x/text/internal/internal.go | 49 + .../x/text/internal/language/common.go | 16 + .../x/text/internal/language/compact.go | 29 + .../text/internal/language/compact/compact.go | 61 + .../internal/language/compact/language.go | 260 + .../text/internal/language/compact/parents.go | 120 + .../text/internal/language/compact/tables.go | 1015 + .../x/text/internal/language/compact/tags.go | 91 + .../x/text/internal/language/compose.go | 167 + .../x/text/internal/language/coverage.go | 28 + .../x/text/internal/language/language.go | 627 + .../x/text/internal/language/lookup.go | 412 + .../x/text/internal/language/match.go | 226 + .../x/text/internal/language/parse.go | 608 + .../x/text/internal/language/tables.go | 3494 + .../x/text/internal/language/tags.go | 48 + vendor/golang.org/x/text/internal/match.go | 67 + vendor/golang.org/x/text/internal/tag/tag.go | 100 + vendor/golang.org/x/text/language/coverage.go | 187 + vendor/golang.org/x/text/language/doc.go | 98 + vendor/golang.org/x/text/language/language.go | 605 + vendor/golang.org/x/text/language/match.go | 735 + vendor/golang.org/x/text/language/parse.go | 256 + vendor/golang.org/x/text/language/tables.go | 298 + vendor/golang.org/x/text/language/tags.go | 145 + vendor/golang.org/x/text/runes/cond.go | 187 + vendor/golang.org/x/text/runes/runes.go | 355 + .../golang.org/x/text/transform/transform.go | 709 + .../x/text/unicode/norm/composition.go | 512 + .../x/text/unicode/norm/forminfo.go | 279 + .../golang.org/x/text/unicode/norm/input.go | 109 + vendor/golang.org/x/text/unicode/norm/iter.go | 458 + .../x/text/unicode/norm/normalize.go | 610 + .../x/text/unicode/norm/readwriter.go | 125 + .../x/text/unicode/norm/tables10.0.0.go | 7657 + .../x/text/unicode/norm/tables11.0.0.go | 7693 + .../x/text/unicode/norm/tables12.0.0.go | 7710 + .../x/text/unicode/norm/tables13.0.0.go | 7760 + .../x/text/unicode/norm/tables15.0.0.go | 7907 + .../x/text/unicode/norm/tables9.0.0.go | 7637 + .../x/text/unicode/norm/transform.go | 88 + vendor/golang.org/x/text/unicode/norm/trie.go | 54 + vendor/golang.org/x/tools/LICENSE | 27 + vendor/golang.org/x/tools/PATENTS | 22 + vendor/golang.org/x/tools/txtar/archive.go | 140 + vendor/golang.org/x/tools/txtar/fs.go | 257 + vendor/google.golang.org/protobuf/LICENSE | 27 + vendor/google.golang.org/protobuf/PATENTS | 22 + .../protobuf/encoding/prototext/decode.go | 772 + .../protobuf/encoding/prototext/doc.go | 7 + .../protobuf/encoding/prototext/encode.go | 376 + .../protobuf/encoding/protowire/wire.go | 547 + .../protobuf/internal/descfmt/stringer.go | 413 + .../protobuf/internal/descopts/options.go | 29 + .../protobuf/internal/detrand/rand.go | 69 + .../internal/editiondefaults/defaults.go | 12 + .../editiondefaults/editions_defaults.binpb | 4 + .../internal/encoding/defval/default.go | 213 + .../encoding/messageset/messageset.go | 242 + .../protobuf/internal/encoding/tag/tag.go | 207 + .../protobuf/internal/encoding/text/decode.go | 686 + .../internal/encoding/text/decode_number.go | 211 + .../internal/encoding/text/decode_string.go | 161 + .../internal/encoding/text/decode_token.go | 373 + .../protobuf/internal/encoding/text/doc.go | 29 + .../protobuf/internal/encoding/text/encode.go | 272 + .../protobuf/internal/errors/errors.go | 89 + .../protobuf/internal/errors/is_go112.go | 40 + .../protobuf/internal/errors/is_go113.go | 13 + .../protobuf/internal/filedesc/build.go | 157 + .../protobuf/internal/filedesc/desc.go | 703 + .../protobuf/internal/filedesc/desc_init.go | 523 + .../protobuf/internal/filedesc/desc_lazy.go | 732 + .../protobuf/internal/filedesc/desc_list.go | 457 + .../internal/filedesc/desc_list_gen.go | 356 + .../protobuf/internal/filedesc/editions.go | 142 + .../protobuf/internal/filedesc/placeholder.go | 109 + .../protobuf/internal/filetype/build.go | 296 + .../protobuf/internal/flags/flags.go | 24 + .../internal/flags/proto_legacy_disable.go | 10 + .../internal/flags/proto_legacy_enable.go | 10 + .../protobuf/internal/genid/any_gen.go | 34 + .../protobuf/internal/genid/api_gen.go | 106 + .../protobuf/internal/genid/descriptor_gen.go | 1233 + .../protobuf/internal/genid/doc.go | 11 + .../protobuf/internal/genid/duration_gen.go | 34 + .../protobuf/internal/genid/empty_gen.go | 19 + .../protobuf/internal/genid/field_mask_gen.go | 31 + .../internal/genid/go_features_gen.go | 31 + .../protobuf/internal/genid/goname.go | 25 + .../protobuf/internal/genid/map_entry.go | 16 + .../internal/genid/source_context_gen.go | 31 + .../protobuf/internal/genid/struct_gen.go | 121 + .../protobuf/internal/genid/timestamp_gen.go | 34 + .../protobuf/internal/genid/type_gen.go | 228 + .../protobuf/internal/genid/wrappers.go | 13 + .../protobuf/internal/genid/wrappers_gen.go | 175 + .../protobuf/internal/impl/api_export.go | 177 + .../protobuf/internal/impl/checkinit.go | 141 + .../protobuf/internal/impl/codec_extension.go | 215 + .../protobuf/internal/impl/codec_field.go | 830 + .../protobuf/internal/impl/codec_gen.go | 5724 + .../protobuf/internal/impl/codec_map.go | 388 + .../protobuf/internal/impl/codec_map_go111.go | 38 + .../protobuf/internal/impl/codec_map_go112.go | 12 + .../protobuf/internal/impl/codec_message.go | 217 + .../internal/impl/codec_messageset.go | 123 + .../protobuf/internal/impl/codec_reflect.go | 210 + .../protobuf/internal/impl/codec_tables.go | 557 + .../protobuf/internal/impl/codec_unsafe.go | 18 + .../protobuf/internal/impl/convert.go | 495 + .../protobuf/internal/impl/convert_list.go | 141 + .../protobuf/internal/impl/convert_map.go | 121 + .../protobuf/internal/impl/decode.go | 285 + .../protobuf/internal/impl/encode.go | 201 + .../protobuf/internal/impl/enum.go | 21 + .../protobuf/internal/impl/extension.go | 156 + .../protobuf/internal/impl/legacy_enum.go | 218 + .../protobuf/internal/impl/legacy_export.go | 92 + .../internal/impl/legacy_extension.go | 176 + .../protobuf/internal/impl/legacy_file.go | 81 + .../protobuf/internal/impl/legacy_message.go | 568 + .../protobuf/internal/impl/merge.go | 176 + .../protobuf/internal/impl/merge_gen.go | 209 + .../protobuf/internal/impl/message.go | 284 + .../protobuf/internal/impl/message_reflect.go | 463 + .../internal/impl/message_reflect_field.go | 543 + .../internal/impl/message_reflect_gen.go | 249 + .../protobuf/internal/impl/pointer_reflect.go | 215 + .../protobuf/internal/impl/pointer_unsafe.go | 215 + .../protobuf/internal/impl/validate.go | 576 + .../protobuf/internal/impl/weak.go | 74 + .../protobuf/internal/order/order.go | 89 + .../protobuf/internal/order/range.go | 115 + .../protobuf/internal/pragma/pragma.go | 29 + .../protobuf/internal/set/ints.go | 58 + .../protobuf/internal/strs/strings.go | 196 + .../protobuf/internal/strs/strings_pure.go | 28 + .../internal/strs/strings_unsafe_go120.go | 95 + .../internal/strs/strings_unsafe_go121.go | 74 + .../protobuf/internal/version/version.go | 79 + .../protobuf/proto/checkinit.go | 71 + .../protobuf/proto/decode.go | 294 + .../protobuf/proto/decode_gen.go | 603 + .../google.golang.org/protobuf/proto/doc.go | 86 + .../protobuf/proto/encode.go | 322 + .../protobuf/proto/encode_gen.go | 97 + .../google.golang.org/protobuf/proto/equal.go | 57 + .../protobuf/proto/extension.go | 92 + .../google.golang.org/protobuf/proto/merge.go | 139 + .../protobuf/proto/messageset.go | 93 + .../google.golang.org/protobuf/proto/proto.go | 45 + .../protobuf/proto/proto_methods.go | 20 + .../protobuf/proto/proto_reflect.go | 20 + .../google.golang.org/protobuf/proto/reset.go | 43 + .../google.golang.org/protobuf/proto/size.go | 101 + .../protobuf/proto/size_gen.go | 55 + .../protobuf/proto/wrappers.go | 29 + .../protobuf/reflect/protodesc/desc.go | 285 + .../protobuf/reflect/protodesc/desc_init.go | 304 + .../reflect/protodesc/desc_resolve.go | 286 + .../reflect/protodesc/desc_validate.go | 374 + .../protobuf/reflect/protodesc/editions.go | 148 + .../protobuf/reflect/protodesc/proto.go | 252 + .../protobuf/reflect/protoreflect/methods.go | 78 + .../protobuf/reflect/protoreflect/proto.go | 513 + .../protobuf/reflect/protoreflect/source.go | 129 + .../reflect/protoreflect/source_gen.go | 552 + .../protobuf/reflect/protoreflect/type.go | 666 + .../protobuf/reflect/protoreflect/value.go | 285 + .../reflect/protoreflect/value_equal.go | 168 + .../reflect/protoreflect/value_pure.go | 60 + .../reflect/protoreflect/value_union.go | 438 + .../protoreflect/value_unsafe_go120.go | 99 + .../protoreflect/value_unsafe_go121.go | 87 + .../reflect/protoregistry/registry.go | 882 + .../protobuf/runtime/protoiface/legacy.go | 15 + .../protobuf/runtime/protoiface/methods.go | 168 + .../protobuf/runtime/protoimpl/impl.go | 44 + .../protobuf/runtime/protoimpl/version.go | 60 + .../types/descriptorpb/descriptor.pb.go | 5648 + .../types/gofeaturespb/go_features.pb.go | 177 + .../types/gofeaturespb/go_features.proto | 28 + vendor/gopkg.in/yaml.v2/.travis.yml | 17 + vendor/gopkg.in/yaml.v2/LICENSE | 201 + vendor/gopkg.in/yaml.v2/LICENSE.libyaml | 31 + vendor/gopkg.in/yaml.v2/NOTICE | 13 + vendor/gopkg.in/yaml.v2/README.md | 133 + vendor/gopkg.in/yaml.v2/apic.go | 744 + vendor/gopkg.in/yaml.v2/decode.go | 815 + vendor/gopkg.in/yaml.v2/emitterc.go | 1685 + vendor/gopkg.in/yaml.v2/encode.go | 390 + vendor/gopkg.in/yaml.v2/parserc.go | 1095 + vendor/gopkg.in/yaml.v2/readerc.go | 412 + vendor/gopkg.in/yaml.v2/resolve.go | 258 + vendor/gopkg.in/yaml.v2/scannerc.go | 2711 + vendor/gopkg.in/yaml.v2/sorter.go | 113 + vendor/gopkg.in/yaml.v2/writerc.go | 26 + vendor/gopkg.in/yaml.v2/yaml.go | 478 + vendor/gopkg.in/yaml.v2/yamlh.go | 739 + vendor/gopkg.in/yaml.v2/yamlprivateh.go | 173 + vendor/gopkg.in/yaml.v3/LICENSE | 50 + vendor/gopkg.in/yaml.v3/NOTICE | 13 + vendor/gopkg.in/yaml.v3/README.md | 150 + vendor/gopkg.in/yaml.v3/apic.go | 747 + vendor/gopkg.in/yaml.v3/decode.go | 1000 + vendor/gopkg.in/yaml.v3/emitterc.go | 2020 + vendor/gopkg.in/yaml.v3/encode.go | 577 + vendor/gopkg.in/yaml.v3/parserc.go | 1258 + vendor/gopkg.in/yaml.v3/readerc.go | 434 + vendor/gopkg.in/yaml.v3/resolve.go | 326 + vendor/gopkg.in/yaml.v3/scannerc.go | 3038 + vendor/gopkg.in/yaml.v3/sorter.go | 134 + vendor/gopkg.in/yaml.v3/writerc.go | 48 + vendor/gopkg.in/yaml.v3/yaml.go | 698 + vendor/gopkg.in/yaml.v3/yamlh.go | 807 + vendor/gopkg.in/yaml.v3/yamlprivateh.go | 198 + vendor/modules.txt | 556 + 3275 files changed, 1180334 insertions(+), 22 deletions(-) create mode 100644 vendor/github.com/Azure/go-autorest/autorest/LICENSE create mode 100644 vendor/github.com/Azure/go-autorest/autorest/adal/LICENSE create mode 100644 vendor/github.com/Azure/go-autorest/autorest/adal/README.md create mode 100644 vendor/github.com/Azure/go-autorest/autorest/adal/config.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/adal/devicetoken.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/adal/go_mod_tidy_hack.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/adal/persist.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/adal/sender.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/adal/token.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/adal/version.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/authorization.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/authorization_sas.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/authorization_storage.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/autorest.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/client.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/date/LICENSE create mode 100644 vendor/github.com/Azure/go-autorest/autorest/date/date.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/date/go_mod_tidy_hack.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/date/time.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/date/timerfc1123.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/date/unixtime.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/date/utility.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/error.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/preparer.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/responder.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/retriablerequest.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/retriablerequest_1.7.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/retriablerequest_1.8.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/sender.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/utility.go create mode 100644 vendor/github.com/Azure/go-autorest/autorest/version.go create mode 100644 vendor/github.com/Azure/go-autorest/logger/LICENSE create mode 100644 vendor/github.com/Azure/go-autorest/logger/logger.go create mode 100644 vendor/github.com/Azure/go-autorest/tracing/LICENSE create mode 100644 vendor/github.com/Azure/go-autorest/tracing/tracing.go create mode 100644 vendor/github.com/BurntSushi/locker/Makefile create mode 100644 vendor/github.com/BurntSushi/locker/README.md create mode 100644 vendor/github.com/BurntSushi/locker/UNLICENSE create mode 100644 vendor/github.com/BurntSushi/locker/locker.go create mode 100644 vendor/github.com/BurntSushi/locker/session.vim create mode 100644 vendor/github.com/RoaringBitmap/roaring/.drone.yml create mode 100644 vendor/github.com/RoaringBitmap/roaring/.gitignore create mode 100644 vendor/github.com/RoaringBitmap/roaring/.gitmodules create mode 100644 vendor/github.com/RoaringBitmap/roaring/.travis.yml create mode 100644 vendor/github.com/RoaringBitmap/roaring/AUTHORS create mode 100644 vendor/github.com/RoaringBitmap/roaring/CONTRIBUTORS create mode 100644 vendor/github.com/RoaringBitmap/roaring/LICENSE create mode 100644 vendor/github.com/RoaringBitmap/roaring/LICENSE-2.0.txt create mode 100644 vendor/github.com/RoaringBitmap/roaring/Makefile create mode 100644 vendor/github.com/RoaringBitmap/roaring/README.md create mode 100644 vendor/github.com/RoaringBitmap/roaring/arraycontainer.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/arraycontainer_gen.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/bitmapcontainer.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/bitmapcontainer_gen.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/byte_input.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/clz.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/clz_compat.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/ctz.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/ctz_compat.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/fastaggregation.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/manyiterator.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/parallel.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/popcnt.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/popcnt_amd64.s create mode 100644 vendor/github.com/RoaringBitmap/roaring/popcnt_asm.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/popcnt_compat.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/popcnt_generic.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/popcnt_slices.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/priorityqueue.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/roaring.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/roaringarray.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/roaringarray_gen.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/runcontainer.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/runcontainer_gen.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/serialization.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/serialization_generic.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/serialization_littleendian.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/serializationfuzz.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/setutil.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/shortiterator.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/smat.go create mode 100644 vendor/github.com/RoaringBitmap/roaring/util.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/.editorconfig create mode 100644 vendor/github.com/alecthomas/chroma/v2/.gitignore create mode 100644 vendor/github.com/alecthomas/chroma/v2/.golangci.yml create mode 100644 vendor/github.com/alecthomas/chroma/v2/.goreleaser.yml create mode 100644 vendor/github.com/alecthomas/chroma/v2/Bitfile create mode 100644 vendor/github.com/alecthomas/chroma/v2/COPYING create mode 100644 vendor/github.com/alecthomas/chroma/v2/Makefile create mode 100644 vendor/github.com/alecthomas/chroma/v2/README.md create mode 100644 vendor/github.com/alecthomas/chroma/v2/coalesce.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/colour.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/delegate.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/doc.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/emitters.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/formatter.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/formatters/html/html.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/iterator.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexer.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/README.md create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/caddyfile.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/cl.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/dns.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/emacs.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/abap.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/abnf.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/actionscript.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/actionscript_3.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ada.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/agda.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/al.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/alloy.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/angular2.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/antlr.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/apacheconf.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/apl.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/applescript.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/arangodb_aql.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/arduino.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/armasm.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/autohotkey.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/autoit.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/awk.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ballerina.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bash.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bash_session.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/batchfile.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bibtex.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bicep.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/blitzbasic.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bnf.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bqn.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/brainfuck.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c#.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c++.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cap_n_proto.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cassandra_cql.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ceylon.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cfengine3.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cfstatement.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/chaiscript.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/chapel.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cheetah.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/clojure.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cmake.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cobol.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/coffeescript.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/common_lisp.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/coq.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/crystal.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/css.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cue.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cython.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/d.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dart.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dax.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/desktop_entry.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/diff.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/django_jinja.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dns.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/docker.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dtd.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dylan.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ebnf.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/elixir.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/elm.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/emacslisp.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/erlang.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/factor.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fennel.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fish.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/forth.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fortran.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fortranfixed.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fsharp.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gas.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gdscript.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gdscript3.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gherkin.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/glsl.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gnuplot.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/go_template.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/graphql.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/groff.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/groovy.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/handlebars.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hare.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/haskell.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hcl.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hexdump.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hlb.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hlsl.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/holyc.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/html.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hy.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/idris.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/igor.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ini.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/io.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/iscdhcpd.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/j.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/java.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/javascript.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/json.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/julia.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/jungle.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/kotlin.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/lighttpd_configuration_file.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/llvm.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/lua.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/makefile.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mako.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mason.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/materialize_sql_dialect.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mathematica.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/matlab.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mcfunction.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/meson.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/metal.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/minizinc.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mlir.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/modula-2.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/monkeyc.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/morrowindscript.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/myghty.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mysql.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nasm.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/natural.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ndisasm.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/newspeak.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nginx_configuration_file.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nim.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nix.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/objective-c.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/objectpascal.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ocaml.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/octave.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/odin.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/onesenterprise.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/openedge_abl.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/openscad.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/org_mode.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pacmanconf.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/perl.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/php.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pig.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pkgconfig.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pl_pgsql.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/plaintext.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/plutus_core.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pony.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/postgresql_sql_dialect.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/postscript.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/povray.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/powerquery.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/powershell.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/prolog.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/promela.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/promql.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/properties.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/protocol_buffer.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/prql.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/psl.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/puppet.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/python.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/python_2.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/qbasic.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/qml.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/r.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/racket.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ragel.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/react.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/reasonml.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/reg.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rego.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rexx.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rpm_spec.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ruby.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rust.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sas.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sass.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scala.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scheme.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scilab.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scss.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sed.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sieve.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smali.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smalltalk.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smarty.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/snobol.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/solidity.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sourcepawn.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sparql.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sql.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/squidconf.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/standard_ml.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/stas.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/stylus.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/swift.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/systemd.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/systemverilog.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tablegen.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tal.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tasm.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tcl.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tcsh.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/termcap.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/terminfo.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/terraform.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tex.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/thrift.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/toml.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tradingview.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/transact-sql.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/turing.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/turtle.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/twig.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typescript.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscript.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscriptcssdata.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscripthtmldata.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ucode.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/v.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/v_shell.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vala.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vb_net.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/verilog.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vhdl.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vhs.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/viml.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vue.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/wdte.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/webgpu_shading_language.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/whiley.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/xml.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/xorg.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/yaml.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/yang.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/z80_assembly.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/zed.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/embedded/zig.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/genshi.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/go.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/haxe.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/html.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/http.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/lexers.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/markdown.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/mysql.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/php.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/raku.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/rst.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/svelte.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/typoscript.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/lexers/zed.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/mutators.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/pygments-lexers.txt create mode 100644 vendor/github.com/alecthomas/chroma/v2/regexp.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/registry.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/remap.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/renovate.json5 create mode 100644 vendor/github.com/alecthomas/chroma/v2/serialise.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/style.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/abap.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/algol.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/algol_nu.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/api.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/arduino.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/autumn.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/average.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/base16-snazzy.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/borland.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/bw.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-frappe.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-latte.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-macchiato.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-mocha.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/colorful.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/compat.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/doom-one.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/doom-one2.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/dracula.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/emacs.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/friendly.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/fruity.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/github-dark.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/github.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/gruvbox-light.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/gruvbox.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/hr_high_contrast.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/hrdark.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/igor.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/lovelace.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/manni.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/modus-operandi.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/modus-vivendi.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/monokai.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/monokailight.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/murphy.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/native.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/nord.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/onedark.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/onesenterprise.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/paraiso-dark.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/paraiso-light.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/pastie.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/perldoc.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/pygments.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/rainbow_dash.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/rose-pine-dawn.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/rose-pine-moon.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/rose-pine.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/rrt.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/solarized-dark.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/solarized-dark256.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/solarized-light.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/swapoff.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/tango.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/trac.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/vim.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/vs.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/vulcan.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/witchhazel.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/xcode-dark.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/styles/xcode.xml create mode 100644 vendor/github.com/alecthomas/chroma/v2/table.py create mode 100644 vendor/github.com/alecthomas/chroma/v2/tokentype_enumer.go create mode 100644 vendor/github.com/alecthomas/chroma/v2/types.go create mode 100644 vendor/github.com/armon/go-radix/.gitignore create mode 100644 vendor/github.com/armon/go-radix/.travis.yml create mode 100644 vendor/github.com/armon/go-radix/LICENSE create mode 100644 vendor/github.com/armon/go-radix/README.md create mode 100644 vendor/github.com/armon/go-radix/radix.go create mode 100644 vendor/github.com/asaskevich/govalidator/.gitignore create mode 100644 vendor/github.com/asaskevich/govalidator/.travis.yml create mode 100644 vendor/github.com/asaskevich/govalidator/CODE_OF_CONDUCT.md create mode 100644 vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md create mode 100644 vendor/github.com/asaskevich/govalidator/LICENSE create mode 100644 vendor/github.com/asaskevich/govalidator/README.md create mode 100644 vendor/github.com/asaskevich/govalidator/arrays.go create mode 100644 vendor/github.com/asaskevich/govalidator/converter.go create mode 100644 vendor/github.com/asaskevich/govalidator/doc.go create mode 100644 vendor/github.com/asaskevich/govalidator/error.go create mode 100644 vendor/github.com/asaskevich/govalidator/numerics.go create mode 100644 vendor/github.com/asaskevich/govalidator/patterns.go create mode 100644 vendor/github.com/asaskevich/govalidator/types.go create mode 100644 vendor/github.com/asaskevich/govalidator/utils.go create mode 100644 vendor/github.com/asaskevich/govalidator/validator.go create mode 100644 vendor/github.com/asaskevich/govalidator/wercker.yml create mode 100644 vendor/github.com/bep/clocks/.gitignore create mode 100644 vendor/github.com/bep/clocks/LICENSE create mode 100644 vendor/github.com/bep/clocks/README.md create mode 100644 vendor/github.com/bep/clocks/clock.go create mode 100644 vendor/github.com/bep/debounce/.gitignore create mode 100644 vendor/github.com/bep/debounce/LICENSE create mode 100644 vendor/github.com/bep/debounce/README.md create mode 100644 vendor/github.com/bep/debounce/debounce.go create mode 100644 vendor/github.com/bep/gitmap/.gitignore create mode 100644 vendor/github.com/bep/gitmap/LICENSE create mode 100644 vendor/github.com/bep/gitmap/README.md create mode 100644 vendor/github.com/bep/gitmap/gitmap.go create mode 100644 vendor/github.com/bep/goat/.gitignore create mode 100644 vendor/github.com/bep/goat/LICENSE create mode 100644 vendor/github.com/bep/goat/README.md create mode 100644 vendor/github.com/bep/goat/canvas.go create mode 100644 vendor/github.com/bep/goat/index.go create mode 100644 vendor/github.com/bep/goat/iter.go create mode 100644 vendor/github.com/bep/goat/svg.go create mode 100644 vendor/github.com/bep/godartsass/.gitignore create mode 100644 vendor/github.com/bep/godartsass/LICENSE create mode 100644 vendor/github.com/bep/godartsass/README.md create mode 100644 vendor/github.com/bep/godartsass/codecov.yml create mode 100644 vendor/github.com/bep/godartsass/conn.go create mode 100644 vendor/github.com/bep/godartsass/internal/embeddedsassv1/README.md create mode 100644 vendor/github.com/bep/godartsass/internal/embeddedsassv1/embedded_sass_v1.pb.go create mode 100644 vendor/github.com/bep/godartsass/internal/embeddedsassv1/embedded_sass_v1.proto create mode 100644 vendor/github.com/bep/godartsass/options.go create mode 100644 vendor/github.com/bep/godartsass/transpiler.go create mode 100644 vendor/github.com/bep/godartsass/v2/.gitignore create mode 100644 vendor/github.com/bep/godartsass/v2/LICENSE create mode 100644 vendor/github.com/bep/godartsass/v2/README.md create mode 100644 vendor/github.com/bep/godartsass/v2/codecov.yml create mode 100644 vendor/github.com/bep/godartsass/v2/conn.go create mode 100644 vendor/github.com/bep/godartsass/v2/internal/embeddedsass/README.md create mode 100644 vendor/github.com/bep/godartsass/v2/internal/embeddedsass/embedded_sass.pb.go create mode 100644 vendor/github.com/bep/godartsass/v2/internal/embeddedsass/embedded_sass.proto create mode 100644 vendor/github.com/bep/godartsass/v2/options.go create mode 100644 vendor/github.com/bep/godartsass/v2/transpiler.go create mode 100644 vendor/github.com/bep/golibsass/LICENSE create mode 100644 vendor/github.com/bep/golibsass/libsass/libsasserrors/libsasserrors.go create mode 100644 vendor/github.com/bep/gowebp/LICENSE create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/a__cgo.go create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/a__cgo_dev.go create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/a__cgo_src.go create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/a__encoder.go create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/alpha_dec.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/alpha_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing_mips_dsp_r2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing_neon.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing_sse2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing_sse41.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/alphai_dec.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/analysis_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/anim_decode.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/anim_encode.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/animi.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/backward_references_cost_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/backward_references_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/backward_references_enc.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/bit_reader_inl_utils.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/bit_reader_utils.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/bit_reader_utils.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/bit_writer_utils.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/bit_writer_utils.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/buffer_dec.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/color_cache_utils.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/color_cache_utils.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/common_dec.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/common_sse2.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/common_sse41.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/config_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/cost.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/cost_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/cost_enc.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/cost_mips32.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/cost_mips_dsp_r2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/cost_neon.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/cost_sse2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/cpu.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/cpu.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/dec.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/dec_clip_tables.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/dec_mips32.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/dec_mips_dsp_r2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/dec_msa.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/dec_neon.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/dec_sse2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/dec_sse41.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/decode.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/demux.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/demux.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/dsp.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/enc_mips32.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/enc_mips_dsp_r2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/enc_msa.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/enc_neon.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/enc_sse2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/enc_sse41.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/encode.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/endian_inl_utils.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/filter_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/filters.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/filters_mips_dsp_r2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/filters_msa.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/filters_neon.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/filters_sse2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/filters_utils.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/filters_utils.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/format_constants.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/frame_dec.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/frame_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/histogram_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/histogram_enc.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/huffman_encode_utils.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/huffman_encode_utils.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/huffman_utils.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/huffman_utils.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/idec_dec.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/io_dec.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/iterator_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/lossless.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/lossless.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/lossless_common.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_mips32.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_mips_dsp_r2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_msa.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_neon.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_sse2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_sse41.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/lossless_mips_dsp_r2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/lossless_msa.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/lossless_neon.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/lossless_sse2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/lossless_sse41.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/mips_macro.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/msa_macro.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/mux.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/mux_types.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/muxedit.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/muxi.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/muxinternal.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/muxread.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/near_lossless_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/neon.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/picture_csp_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/picture_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/picture_psnr_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/picture_rescale_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/picture_tools_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/predictor_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/quant.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/quant_dec.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/quant_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/quant_levels_dec_utils.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/quant_levels_dec_utils.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/quant_levels_utils.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/quant_levels_utils.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/random_utils.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/random_utils.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/rescaler.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/rescaler_mips32.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/rescaler_mips_dsp_r2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/rescaler_msa.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/rescaler_neon.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/rescaler_sse2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/rescaler_utils.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/rescaler_utils.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_cpu.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_cpu.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_csp.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_csp.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_dsp.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_dsp.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_gamma.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_gamma.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_neon.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_sse2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/ssim.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/ssim_sse2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/syntax_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/thread_utils.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/thread_utils.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/token_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/tree_dec.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/tree_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/types.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/upsampling.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/upsampling_mips_dsp_r2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/upsampling_msa.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/upsampling_neon.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/upsampling_sse2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/upsampling_sse41.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/utils.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/utils.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/vp8_dec.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/vp8_dec.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/vp8i_dec.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/vp8i_enc.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/vp8l_dec.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/vp8l_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/vp8li_dec.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/vp8li_enc.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/webp_dec.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/webp_enc.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/webpi_dec.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/yuv.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/yuv.h create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/yuv_mips32.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/yuv_mips_dsp_r2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/yuv_neon.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/yuv_sse2.c create mode 100644 vendor/github.com/bep/gowebp/internal/libwebp/yuv_sse41.c create mode 100644 vendor/github.com/bep/gowebp/libwebp/encode.go create mode 100644 vendor/github.com/bep/gowebp/libwebp/webpoptions/options.go create mode 100644 vendor/github.com/bep/lazycache/.gitignore create mode 100644 vendor/github.com/bep/lazycache/LICENSE create mode 100644 vendor/github.com/bep/lazycache/README.md create mode 100644 vendor/github.com/bep/lazycache/codecov.yml create mode 100644 vendor/github.com/bep/lazycache/lazycache.go create mode 100644 vendor/github.com/bep/logg/.gitignore create mode 100644 vendor/github.com/bep/logg/LICENSE create mode 100644 vendor/github.com/bep/logg/README.md create mode 100644 vendor/github.com/bep/logg/doc.go create mode 100644 vendor/github.com/bep/logg/entry.go create mode 100644 vendor/github.com/bep/logg/handler.go create mode 100644 vendor/github.com/bep/logg/handlers/multi/multi.go create mode 100644 vendor/github.com/bep/logg/interfaces.go create mode 100644 vendor/github.com/bep/logg/levels.go create mode 100644 vendor/github.com/bep/logg/logger.go create mode 100644 vendor/github.com/bep/logg/objectpools.go create mode 100644 vendor/github.com/bep/logg/stack.go create mode 100644 vendor/github.com/bep/tmc/.gitignore create mode 100644 vendor/github.com/bep/tmc/LICENSE create mode 100644 vendor/github.com/bep/tmc/README.md create mode 100644 vendor/github.com/bep/tmc/adapters.go create mode 100644 vendor/github.com/bep/tmc/codec.go create mode 100644 vendor/github.com/bep/tmc/codecov.yml create mode 100644 vendor/github.com/blevesearch/bleve/.gitignore create mode 100644 vendor/github.com/blevesearch/bleve/.travis.yml create mode 100644 vendor/github.com/blevesearch/bleve/CONTRIBUTING.md create mode 100644 vendor/github.com/blevesearch/bleve/LICENSE create mode 100644 vendor/github.com/blevesearch/bleve/README.md create mode 100644 vendor/github.com/blevesearch/bleve/analysis/analyzer/standard/standard.go create mode 100644 vendor/github.com/blevesearch/bleve/analysis/datetime/flexible/flexible.go create mode 100644 vendor/github.com/blevesearch/bleve/analysis/datetime/optional/optional.go create mode 100644 vendor/github.com/blevesearch/bleve/analysis/freq.go create mode 100644 vendor/github.com/blevesearch/bleve/analysis/lang/en/analyzer_en.go create mode 100644 vendor/github.com/blevesearch/bleve/analysis/lang/en/possessive_filter_en.go create mode 100644 vendor/github.com/blevesearch/bleve/analysis/lang/en/stemmer_en_snowball.go create mode 100644 vendor/github.com/blevesearch/bleve/analysis/lang/en/stop_filter_en.go create mode 100644 vendor/github.com/blevesearch/bleve/analysis/lang/en/stop_words_en.go create mode 100644 vendor/github.com/blevesearch/bleve/analysis/test_words.txt create mode 100644 vendor/github.com/blevesearch/bleve/analysis/token/lowercase/lowercase.go create mode 100644 vendor/github.com/blevesearch/bleve/analysis/token/porter/porter.go create mode 100644 vendor/github.com/blevesearch/bleve/analysis/token/stop/stop.go create mode 100644 vendor/github.com/blevesearch/bleve/analysis/tokenizer/unicode/unicode.go create mode 100644 vendor/github.com/blevesearch/bleve/analysis/tokenmap.go create mode 100644 vendor/github.com/blevesearch/bleve/analysis/type.go create mode 100644 vendor/github.com/blevesearch/bleve/analysis/util.go create mode 100644 vendor/github.com/blevesearch/bleve/builder.go create mode 100644 vendor/github.com/blevesearch/bleve/config.go create mode 100644 vendor/github.com/blevesearch/bleve/config_app.go create mode 100644 vendor/github.com/blevesearch/bleve/config_disk.go create mode 100644 vendor/github.com/blevesearch/bleve/doc.go create mode 100644 vendor/github.com/blevesearch/bleve/document/document.go create mode 100644 vendor/github.com/blevesearch/bleve/document/field.go create mode 100644 vendor/github.com/blevesearch/bleve/document/field_boolean.go create mode 100644 vendor/github.com/blevesearch/bleve/document/field_composite.go create mode 100644 vendor/github.com/blevesearch/bleve/document/field_datetime.go create mode 100644 vendor/github.com/blevesearch/bleve/document/field_geopoint.go create mode 100644 vendor/github.com/blevesearch/bleve/document/field_numeric.go create mode 100644 vendor/github.com/blevesearch/bleve/document/field_text.go create mode 100644 vendor/github.com/blevesearch/bleve/document/indexing_options.go create mode 100644 vendor/github.com/blevesearch/bleve/error.go create mode 100644 vendor/github.com/blevesearch/bleve/geo/README.md create mode 100644 vendor/github.com/blevesearch/bleve/geo/geo.go create mode 100644 vendor/github.com/blevesearch/bleve/geo/geo_dist.go create mode 100644 vendor/github.com/blevesearch/bleve/geo/geohash.go create mode 100644 vendor/github.com/blevesearch/bleve/geo/parse.go create mode 100644 vendor/github.com/blevesearch/bleve/geo/sloppy.go create mode 100644 vendor/github.com/blevesearch/bleve/index.go create mode 100644 vendor/github.com/blevesearch/bleve/index/analysis.go create mode 100644 vendor/github.com/blevesearch/bleve/index/field_cache.go create mode 100644 vendor/github.com/blevesearch/bleve/index/index.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/README.md create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/builder.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/event.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/introducer.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/merge.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/mergeplan/merge_plan.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/mergeplan/sort.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/optimize.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/persister.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/rollback.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/scorch.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/segment/empty.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/segment/int.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/segment/plugin.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/segment/regexp.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/segment/segment.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/segment/unadorned.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/segment_plugin.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/snapshot_index.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/snapshot_index_dict.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/snapshot_index_doc.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/snapshot_index_tfr.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/snapshot_segment.go create mode 100644 vendor/github.com/blevesearch/bleve/index/scorch/stats.go create mode 100644 vendor/github.com/blevesearch/bleve/index/store/batch.go create mode 100644 vendor/github.com/blevesearch/bleve/index/store/boltdb/iterator.go create mode 100644 vendor/github.com/blevesearch/bleve/index/store/boltdb/reader.go create mode 100644 vendor/github.com/blevesearch/bleve/index/store/boltdb/stats.go create mode 100644 vendor/github.com/blevesearch/bleve/index/store/boltdb/store.go create mode 100644 vendor/github.com/blevesearch/bleve/index/store/boltdb/writer.go create mode 100644 vendor/github.com/blevesearch/bleve/index/store/gtreap/iterator.go create mode 100644 vendor/github.com/blevesearch/bleve/index/store/gtreap/reader.go create mode 100644 vendor/github.com/blevesearch/bleve/index/store/gtreap/store.go create mode 100644 vendor/github.com/blevesearch/bleve/index/store/gtreap/writer.go create mode 100644 vendor/github.com/blevesearch/bleve/index/store/kvstore.go create mode 100644 vendor/github.com/blevesearch/bleve/index/store/merge.go create mode 100644 vendor/github.com/blevesearch/bleve/index/store/multiget.go create mode 100644 vendor/github.com/blevesearch/bleve/index/upsidedown/analysis.go create mode 100644 vendor/github.com/blevesearch/bleve/index/upsidedown/benchmark_all.sh create mode 100644 vendor/github.com/blevesearch/bleve/index/upsidedown/dump.go create mode 100644 vendor/github.com/blevesearch/bleve/index/upsidedown/field_dict.go create mode 100644 vendor/github.com/blevesearch/bleve/index/upsidedown/index_reader.go create mode 100644 vendor/github.com/blevesearch/bleve/index/upsidedown/reader.go create mode 100644 vendor/github.com/blevesearch/bleve/index/upsidedown/row.go create mode 100644 vendor/github.com/blevesearch/bleve/index/upsidedown/row_merge.go create mode 100644 vendor/github.com/blevesearch/bleve/index/upsidedown/stats.go create mode 100644 vendor/github.com/blevesearch/bleve/index/upsidedown/upsidedown.go create mode 100644 vendor/github.com/blevesearch/bleve/index/upsidedown/upsidedown.pb.go create mode 100644 vendor/github.com/blevesearch/bleve/index/upsidedown/upsidedown.proto create mode 100644 vendor/github.com/blevesearch/bleve/index_alias.go create mode 100644 vendor/github.com/blevesearch/bleve/index_alias_impl.go create mode 100644 vendor/github.com/blevesearch/bleve/index_impl.go create mode 100644 vendor/github.com/blevesearch/bleve/index_meta.go create mode 100644 vendor/github.com/blevesearch/bleve/index_stats.go create mode 100644 vendor/github.com/blevesearch/bleve/mapping.go create mode 100644 vendor/github.com/blevesearch/bleve/mapping/analysis.go create mode 100644 vendor/github.com/blevesearch/bleve/mapping/document.go create mode 100644 vendor/github.com/blevesearch/bleve/mapping/field.go create mode 100644 vendor/github.com/blevesearch/bleve/mapping/index.go create mode 100644 vendor/github.com/blevesearch/bleve/mapping/mapping.go create mode 100644 vendor/github.com/blevesearch/bleve/mapping/reflect.go create mode 100644 vendor/github.com/blevesearch/bleve/numeric/bin.go create mode 100644 vendor/github.com/blevesearch/bleve/numeric/float.go create mode 100644 vendor/github.com/blevesearch/bleve/numeric/prefix_coded.go create mode 100644 vendor/github.com/blevesearch/bleve/query.go create mode 100644 vendor/github.com/blevesearch/bleve/registry/analyzer.go create mode 100644 vendor/github.com/blevesearch/bleve/registry/cache.go create mode 100644 vendor/github.com/blevesearch/bleve/registry/char_filter.go create mode 100644 vendor/github.com/blevesearch/bleve/registry/datetime_parser.go create mode 100644 vendor/github.com/blevesearch/bleve/registry/fragment_formatter.go create mode 100644 vendor/github.com/blevesearch/bleve/registry/fragmenter.go create mode 100644 vendor/github.com/blevesearch/bleve/registry/highlighter.go create mode 100644 vendor/github.com/blevesearch/bleve/registry/index_type.go create mode 100644 vendor/github.com/blevesearch/bleve/registry/registry.go create mode 100644 vendor/github.com/blevesearch/bleve/registry/store.go create mode 100644 vendor/github.com/blevesearch/bleve/registry/token_filter.go create mode 100644 vendor/github.com/blevesearch/bleve/registry/token_maps.go create mode 100644 vendor/github.com/blevesearch/bleve/registry/tokenizer.go create mode 100644 vendor/github.com/blevesearch/bleve/search.go create mode 100644 vendor/github.com/blevesearch/bleve/search/collector.go create mode 100644 vendor/github.com/blevesearch/bleve/search/collector/heap.go create mode 100644 vendor/github.com/blevesearch/bleve/search/collector/list.go create mode 100644 vendor/github.com/blevesearch/bleve/search/collector/slice.go create mode 100644 vendor/github.com/blevesearch/bleve/search/collector/topn.go create mode 100644 vendor/github.com/blevesearch/bleve/search/explanation.go create mode 100644 vendor/github.com/blevesearch/bleve/search/facet/benchmark_data.txt create mode 100644 vendor/github.com/blevesearch/bleve/search/facet/facet_builder_datetime.go create mode 100644 vendor/github.com/blevesearch/bleve/search/facet/facet_builder_numeric.go create mode 100644 vendor/github.com/blevesearch/bleve/search/facet/facet_builder_terms.go create mode 100644 vendor/github.com/blevesearch/bleve/search/facets_builder.go create mode 100644 vendor/github.com/blevesearch/bleve/search/highlight/format/html/html.go create mode 100644 vendor/github.com/blevesearch/bleve/search/highlight/fragmenter/simple/simple.go create mode 100644 vendor/github.com/blevesearch/bleve/search/highlight/highlighter.go create mode 100644 vendor/github.com/blevesearch/bleve/search/highlight/highlighter/html/html.go create mode 100644 vendor/github.com/blevesearch/bleve/search/highlight/highlighter/simple/fragment_scorer_simple.go create mode 100644 vendor/github.com/blevesearch/bleve/search/highlight/highlighter/simple/highlighter_simple.go create mode 100644 vendor/github.com/blevesearch/bleve/search/highlight/term_locations.go create mode 100644 vendor/github.com/blevesearch/bleve/search/levenshtein.go create mode 100644 vendor/github.com/blevesearch/bleve/search/pool.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/bool_field.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/boolean.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/boost.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/conjunction.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/date_range.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/disjunction.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/docid.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/fuzzy.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/geo_boundingbox.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/geo_boundingpolygon.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/geo_distance.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/match.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/match_all.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/match_none.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/match_phrase.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/multi_phrase.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/numeric_range.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/phrase.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/prefix.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/query.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/query_string.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/query_string.y create mode 100644 vendor/github.com/blevesearch/bleve/search/query/query_string.y.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/query_string_lex.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/query_string_parser.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/regexp.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/term.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/term_range.go create mode 100644 vendor/github.com/blevesearch/bleve/search/query/wildcard.go create mode 100644 vendor/github.com/blevesearch/bleve/search/scorer/scorer_conjunction.go create mode 100644 vendor/github.com/blevesearch/bleve/search/scorer/scorer_constant.go create mode 100644 vendor/github.com/blevesearch/bleve/search/scorer/scorer_disjunction.go create mode 100644 vendor/github.com/blevesearch/bleve/search/scorer/scorer_term.go create mode 100644 vendor/github.com/blevesearch/bleve/search/scorer/sqrt_cache.go create mode 100644 vendor/github.com/blevesearch/bleve/search/search.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/ordered_searchers_list.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_boolean.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_conjunction.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_disjunction.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_disjunction_heap.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_disjunction_slice.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_docid.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_filter.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_fuzzy.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_geoboundingbox.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_geopointdistance.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_geopolygon.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_match_all.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_match_none.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_multi_term.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_numeric_range.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_phrase.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_regexp.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_term.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_term_prefix.go create mode 100644 vendor/github.com/blevesearch/bleve/search/searcher/search_term_range.go create mode 100644 vendor/github.com/blevesearch/bleve/search/sort.go create mode 100644 vendor/github.com/blevesearch/bleve/search/util.go create mode 100644 vendor/github.com/blevesearch/bleve/size/sizes.go create mode 100644 vendor/github.com/blevesearch/go-porterstemmer/.gitignore create mode 100644 vendor/github.com/blevesearch/go-porterstemmer/.travis.yml create mode 100644 vendor/github.com/blevesearch/go-porterstemmer/LICENSE create mode 100644 vendor/github.com/blevesearch/go-porterstemmer/README.md create mode 100644 vendor/github.com/blevesearch/go-porterstemmer/porterstemmer.go create mode 100644 vendor/github.com/blevesearch/mmap-go/.gitignore create mode 100644 vendor/github.com/blevesearch/mmap-go/.travis.yml create mode 100644 vendor/github.com/blevesearch/mmap-go/LICENSE create mode 100644 vendor/github.com/blevesearch/mmap-go/README.md create mode 100644 vendor/github.com/blevesearch/mmap-go/mmap.go create mode 100644 vendor/github.com/blevesearch/mmap-go/mmap_unix.go create mode 100644 vendor/github.com/blevesearch/mmap-go/mmap_windows.go create mode 100644 vendor/github.com/blevesearch/segment/.gitignore create mode 100644 vendor/github.com/blevesearch/segment/.travis.yml create mode 100644 vendor/github.com/blevesearch/segment/LICENSE create mode 100644 vendor/github.com/blevesearch/segment/README.md create mode 100644 vendor/github.com/blevesearch/segment/doc.go create mode 100644 vendor/github.com/blevesearch/segment/segment.go create mode 100644 vendor/github.com/blevesearch/segment/segment_fuzz.go create mode 100644 vendor/github.com/blevesearch/segment/segment_words.go create mode 100644 vendor/github.com/blevesearch/segment/segment_words.rl create mode 100644 vendor/github.com/blevesearch/segment/segment_words_prod.go create mode 100644 vendor/github.com/blevesearch/snowballstem/COPYING create mode 100644 vendor/github.com/blevesearch/snowballstem/README.md create mode 100644 vendor/github.com/blevesearch/snowballstem/among.go create mode 100644 vendor/github.com/blevesearch/snowballstem/english/english_stemmer.go create mode 100644 vendor/github.com/blevesearch/snowballstem/env.go create mode 100644 vendor/github.com/blevesearch/snowballstem/gen.go create mode 100644 vendor/github.com/blevesearch/snowballstem/util.go create mode 100644 vendor/github.com/blevesearch/zap/v11/.gitignore create mode 100644 vendor/github.com/blevesearch/zap/v11/LICENSE create mode 100644 vendor/github.com/blevesearch/zap/v11/README.md create mode 100644 vendor/github.com/blevesearch/zap/v11/build.go create mode 100644 vendor/github.com/blevesearch/zap/v11/contentcoder.go create mode 100644 vendor/github.com/blevesearch/zap/v11/count.go create mode 100644 vendor/github.com/blevesearch/zap/v11/dict.go create mode 100644 vendor/github.com/blevesearch/zap/v11/docvalues.go create mode 100644 vendor/github.com/blevesearch/zap/v11/enumerator.go create mode 100644 vendor/github.com/blevesearch/zap/v11/intcoder.go create mode 100644 vendor/github.com/blevesearch/zap/v11/merge.go create mode 100644 vendor/github.com/blevesearch/zap/v11/new.go create mode 100644 vendor/github.com/blevesearch/zap/v11/plugin.go create mode 100644 vendor/github.com/blevesearch/zap/v11/posting.go create mode 100644 vendor/github.com/blevesearch/zap/v11/read.go create mode 100644 vendor/github.com/blevesearch/zap/v11/segment.go create mode 100644 vendor/github.com/blevesearch/zap/v11/write.go create mode 100644 vendor/github.com/blevesearch/zap/v11/zap.md create mode 100644 vendor/github.com/blevesearch/zap/v12/.gitignore create mode 100644 vendor/github.com/blevesearch/zap/v12/LICENSE create mode 100644 vendor/github.com/blevesearch/zap/v12/README.md create mode 100644 vendor/github.com/blevesearch/zap/v12/build.go create mode 100644 vendor/github.com/blevesearch/zap/v12/chunk.go create mode 100644 vendor/github.com/blevesearch/zap/v12/contentcoder.go create mode 100644 vendor/github.com/blevesearch/zap/v12/count.go create mode 100644 vendor/github.com/blevesearch/zap/v12/dict.go create mode 100644 vendor/github.com/blevesearch/zap/v12/docvalues.go create mode 100644 vendor/github.com/blevesearch/zap/v12/enumerator.go create mode 100644 vendor/github.com/blevesearch/zap/v12/intDecoder.go create mode 100644 vendor/github.com/blevesearch/zap/v12/intcoder.go create mode 100644 vendor/github.com/blevesearch/zap/v12/merge.go create mode 100644 vendor/github.com/blevesearch/zap/v12/new.go create mode 100644 vendor/github.com/blevesearch/zap/v12/plugin.go create mode 100644 vendor/github.com/blevesearch/zap/v12/posting.go create mode 100644 vendor/github.com/blevesearch/zap/v12/read.go create mode 100644 vendor/github.com/blevesearch/zap/v12/segment.go create mode 100644 vendor/github.com/blevesearch/zap/v12/write.go create mode 100644 vendor/github.com/blevesearch/zap/v12/zap.md create mode 100644 vendor/github.com/blevesearch/zap/v13/.gitignore create mode 100644 vendor/github.com/blevesearch/zap/v13/LICENSE create mode 100644 vendor/github.com/blevesearch/zap/v13/README.md create mode 100644 vendor/github.com/blevesearch/zap/v13/build.go create mode 100644 vendor/github.com/blevesearch/zap/v13/chunk.go create mode 100644 vendor/github.com/blevesearch/zap/v13/contentcoder.go create mode 100644 vendor/github.com/blevesearch/zap/v13/count.go create mode 100644 vendor/github.com/blevesearch/zap/v13/dict.go create mode 100644 vendor/github.com/blevesearch/zap/v13/docvalues.go create mode 100644 vendor/github.com/blevesearch/zap/v13/enumerator.go create mode 100644 vendor/github.com/blevesearch/zap/v13/intDecoder.go create mode 100644 vendor/github.com/blevesearch/zap/v13/intcoder.go create mode 100644 vendor/github.com/blevesearch/zap/v13/merge.go create mode 100644 vendor/github.com/blevesearch/zap/v13/new.go create mode 100644 vendor/github.com/blevesearch/zap/v13/plugin.go create mode 100644 vendor/github.com/blevesearch/zap/v13/posting.go create mode 100644 vendor/github.com/blevesearch/zap/v13/read.go create mode 100644 vendor/github.com/blevesearch/zap/v13/segment.go create mode 100644 vendor/github.com/blevesearch/zap/v13/write.go create mode 100644 vendor/github.com/blevesearch/zap/v13/zap.md create mode 100644 vendor/github.com/blevesearch/zap/v14/.gitignore create mode 100644 vendor/github.com/blevesearch/zap/v14/LICENSE create mode 100644 vendor/github.com/blevesearch/zap/v14/README.md create mode 100644 vendor/github.com/blevesearch/zap/v14/build.go create mode 100644 vendor/github.com/blevesearch/zap/v14/chunk.go create mode 100644 vendor/github.com/blevesearch/zap/v14/contentcoder.go create mode 100644 vendor/github.com/blevesearch/zap/v14/count.go create mode 100644 vendor/github.com/blevesearch/zap/v14/dict.go create mode 100644 vendor/github.com/blevesearch/zap/v14/docvalues.go create mode 100644 vendor/github.com/blevesearch/zap/v14/enumerator.go create mode 100644 vendor/github.com/blevesearch/zap/v14/intDecoder.go create mode 100644 vendor/github.com/blevesearch/zap/v14/intcoder.go create mode 100644 vendor/github.com/blevesearch/zap/v14/merge.go create mode 100644 vendor/github.com/blevesearch/zap/v14/new.go create mode 100644 vendor/github.com/blevesearch/zap/v14/plugin.go create mode 100644 vendor/github.com/blevesearch/zap/v14/posting.go create mode 100644 vendor/github.com/blevesearch/zap/v14/read.go create mode 100644 vendor/github.com/blevesearch/zap/v14/segment.go create mode 100644 vendor/github.com/blevesearch/zap/v14/write.go create mode 100644 vendor/github.com/blevesearch/zap/v14/zap.md create mode 100644 vendor/github.com/blevesearch/zap/v15/.gitignore create mode 100644 vendor/github.com/blevesearch/zap/v15/LICENSE create mode 100644 vendor/github.com/blevesearch/zap/v15/README.md create mode 100644 vendor/github.com/blevesearch/zap/v15/build.go create mode 100644 vendor/github.com/blevesearch/zap/v15/chunk.go create mode 100644 vendor/github.com/blevesearch/zap/v15/contentcoder.go create mode 100644 vendor/github.com/blevesearch/zap/v15/count.go create mode 100644 vendor/github.com/blevesearch/zap/v15/dict.go create mode 100644 vendor/github.com/blevesearch/zap/v15/docvalues.go create mode 100644 vendor/github.com/blevesearch/zap/v15/enumerator.go create mode 100644 vendor/github.com/blevesearch/zap/v15/intDecoder.go create mode 100644 vendor/github.com/blevesearch/zap/v15/intcoder.go create mode 100644 vendor/github.com/blevesearch/zap/v15/merge.go create mode 100644 vendor/github.com/blevesearch/zap/v15/new.go create mode 100644 vendor/github.com/blevesearch/zap/v15/plugin.go create mode 100644 vendor/github.com/blevesearch/zap/v15/posting.go create mode 100644 vendor/github.com/blevesearch/zap/v15/read.go create mode 100644 vendor/github.com/blevesearch/zap/v15/segment.go create mode 100644 vendor/github.com/blevesearch/zap/v15/write.go create mode 100644 vendor/github.com/blevesearch/zap/v15/zap.md create mode 100644 vendor/github.com/cenkalti/backoff/v4/.gitignore create mode 100644 vendor/github.com/cenkalti/backoff/v4/.travis.yml create mode 100644 vendor/github.com/cenkalti/backoff/v4/LICENSE create mode 100644 vendor/github.com/cenkalti/backoff/v4/README.md create mode 100644 vendor/github.com/cenkalti/backoff/v4/backoff.go create mode 100644 vendor/github.com/cenkalti/backoff/v4/context.go create mode 100644 vendor/github.com/cenkalti/backoff/v4/exponential.go create mode 100644 vendor/github.com/cenkalti/backoff/v4/retry.go create mode 100644 vendor/github.com/cenkalti/backoff/v4/ticker.go create mode 100644 vendor/github.com/cenkalti/backoff/v4/timer.go create mode 100644 vendor/github.com/cenkalti/backoff/v4/tries.go create mode 100644 vendor/github.com/clbanning/mxj/v2/.travis.yml create mode 100644 vendor/github.com/clbanning/mxj/v2/LICENSE create mode 100644 vendor/github.com/clbanning/mxj/v2/anyxml.go create mode 100644 vendor/github.com/clbanning/mxj/v2/atomFeedString.xml create mode 100644 vendor/github.com/clbanning/mxj/v2/doc.go create mode 100644 vendor/github.com/clbanning/mxj/v2/escapechars.go create mode 100644 vendor/github.com/clbanning/mxj/v2/exists.go create mode 100644 vendor/github.com/clbanning/mxj/v2/files.go create mode 100644 vendor/github.com/clbanning/mxj/v2/files_test.badjson create mode 100644 vendor/github.com/clbanning/mxj/v2/files_test.badxml create mode 100644 vendor/github.com/clbanning/mxj/v2/files_test.json create mode 100644 vendor/github.com/clbanning/mxj/v2/files_test.xml create mode 100644 vendor/github.com/clbanning/mxj/v2/files_test_dup.json create mode 100644 vendor/github.com/clbanning/mxj/v2/files_test_dup.xml create mode 100644 vendor/github.com/clbanning/mxj/v2/files_test_indent.json create mode 100644 vendor/github.com/clbanning/mxj/v2/files_test_indent.xml create mode 100644 vendor/github.com/clbanning/mxj/v2/gob.go create mode 100644 vendor/github.com/clbanning/mxj/v2/json.go create mode 100644 vendor/github.com/clbanning/mxj/v2/keyvalues.go create mode 100644 vendor/github.com/clbanning/mxj/v2/leafnode.go create mode 100644 vendor/github.com/clbanning/mxj/v2/misc.go create mode 100644 vendor/github.com/clbanning/mxj/v2/mxj.go create mode 100644 vendor/github.com/clbanning/mxj/v2/newmap.go create mode 100644 vendor/github.com/clbanning/mxj/v2/readme.md create mode 100644 vendor/github.com/clbanning/mxj/v2/remove.go create mode 100644 vendor/github.com/clbanning/mxj/v2/rename.go create mode 100644 vendor/github.com/clbanning/mxj/v2/set.go create mode 100644 vendor/github.com/clbanning/mxj/v2/setfieldsep.go create mode 100644 vendor/github.com/clbanning/mxj/v2/songtext.xml create mode 100644 vendor/github.com/clbanning/mxj/v2/strict.go create mode 100644 vendor/github.com/clbanning/mxj/v2/struct.go create mode 100644 vendor/github.com/clbanning/mxj/v2/updatevalues.go create mode 100644 vendor/github.com/clbanning/mxj/v2/xml.go create mode 100644 vendor/github.com/clbanning/mxj/v2/xmlseq.go create mode 100644 vendor/github.com/clbanning/mxj/v2/xmlseq2.go create mode 100644 vendor/github.com/cli/safeexec/LICENSE create mode 100644 vendor/github.com/cli/safeexec/README.md create mode 100644 vendor/github.com/cli/safeexec/lookpath.go create mode 100644 vendor/github.com/cli/safeexec/lookpath_1.18.go create mode 100644 vendor/github.com/cli/safeexec/lookpath_windows.go create mode 100644 vendor/github.com/couchbase/vellum/.travis.yml create mode 100644 vendor/github.com/couchbase/vellum/CONTRIBUTING.md create mode 100644 vendor/github.com/couchbase/vellum/LICENSE create mode 100644 vendor/github.com/couchbase/vellum/README.md create mode 100644 vendor/github.com/couchbase/vellum/automaton.go create mode 100644 vendor/github.com/couchbase/vellum/builder.go create mode 100644 vendor/github.com/couchbase/vellum/common.go create mode 100644 vendor/github.com/couchbase/vellum/decoder_v1.go create mode 100644 vendor/github.com/couchbase/vellum/encoder_v1.go create mode 100644 vendor/github.com/couchbase/vellum/encoding.go create mode 100644 vendor/github.com/couchbase/vellum/fst.go create mode 100644 vendor/github.com/couchbase/vellum/fst_iterator.go create mode 100644 vendor/github.com/couchbase/vellum/levenshtein/LICENSE create mode 100644 vendor/github.com/couchbase/vellum/levenshtein/README.md create mode 100644 vendor/github.com/couchbase/vellum/levenshtein/alphabet.go create mode 100644 vendor/github.com/couchbase/vellum/levenshtein/dfa.go create mode 100644 vendor/github.com/couchbase/vellum/levenshtein/levenshtein.go create mode 100644 vendor/github.com/couchbase/vellum/levenshtein/levenshtein_nfa.go create mode 100644 vendor/github.com/couchbase/vellum/levenshtein/parametric_dfa.go create mode 100644 vendor/github.com/couchbase/vellum/merge_iterator.go create mode 100644 vendor/github.com/couchbase/vellum/pack.go create mode 100644 vendor/github.com/couchbase/vellum/regexp/compile.go create mode 100644 vendor/github.com/couchbase/vellum/regexp/dfa.go create mode 100644 vendor/github.com/couchbase/vellum/regexp/inst.go create mode 100644 vendor/github.com/couchbase/vellum/regexp/regexp.go create mode 100644 vendor/github.com/couchbase/vellum/regexp/sparse.go create mode 100644 vendor/github.com/couchbase/vellum/registry.go create mode 100644 vendor/github.com/couchbase/vellum/transducer.go create mode 100644 vendor/github.com/couchbase/vellum/utf8/utf8.go create mode 100644 vendor/github.com/couchbase/vellum/vellum.go create mode 100644 vendor/github.com/couchbase/vellum/vellum_mmap.go create mode 100644 vendor/github.com/couchbase/vellum/vellum_nommap.go create mode 100644 vendor/github.com/couchbase/vellum/writer.go create mode 100644 vendor/github.com/dgrijalva/jwt-go/.gitignore create mode 100644 vendor/github.com/dgrijalva/jwt-go/.travis.yml create mode 100644 vendor/github.com/dgrijalva/jwt-go/LICENSE create mode 100644 vendor/github.com/dgrijalva/jwt-go/MIGRATION_GUIDE.md create mode 100644 vendor/github.com/dgrijalva/jwt-go/README.md create mode 100644 vendor/github.com/dgrijalva/jwt-go/VERSION_HISTORY.md create mode 100644 vendor/github.com/dgrijalva/jwt-go/claims.go create mode 100644 vendor/github.com/dgrijalva/jwt-go/doc.go create mode 100644 vendor/github.com/dgrijalva/jwt-go/ecdsa.go create mode 100644 vendor/github.com/dgrijalva/jwt-go/ecdsa_utils.go create mode 100644 vendor/github.com/dgrijalva/jwt-go/errors.go create mode 100644 vendor/github.com/dgrijalva/jwt-go/hmac.go create mode 100644 vendor/github.com/dgrijalva/jwt-go/map_claims.go create mode 100644 vendor/github.com/dgrijalva/jwt-go/none.go create mode 100644 vendor/github.com/dgrijalva/jwt-go/parser.go create mode 100644 vendor/github.com/dgrijalva/jwt-go/rsa.go create mode 100644 vendor/github.com/dgrijalva/jwt-go/rsa_pss.go create mode 100644 vendor/github.com/dgrijalva/jwt-go/rsa_utils.go create mode 100644 vendor/github.com/dgrijalva/jwt-go/signing_method.go create mode 100644 vendor/github.com/dgrijalva/jwt-go/token.go create mode 100644 vendor/github.com/disintegration/gift/.travis.yml create mode 100644 vendor/github.com/disintegration/gift/LICENSE create mode 100644 vendor/github.com/disintegration/gift/README.md create mode 100644 vendor/github.com/disintegration/gift/colors.go create mode 100644 vendor/github.com/disintegration/gift/convolution.go create mode 100644 vendor/github.com/disintegration/gift/effects.go create mode 100644 vendor/github.com/disintegration/gift/gift.go create mode 100644 vendor/github.com/disintegration/gift/pixels.go create mode 100644 vendor/github.com/disintegration/gift/rank.go create mode 100644 vendor/github.com/disintegration/gift/resize.go create mode 100644 vendor/github.com/disintegration/gift/transform.go create mode 100644 vendor/github.com/disintegration/gift/utils.go create mode 100644 vendor/github.com/dlclark/regexp2/.gitignore create mode 100644 vendor/github.com/dlclark/regexp2/.travis.yml create mode 100644 vendor/github.com/dlclark/regexp2/ATTRIB create mode 100644 vendor/github.com/dlclark/regexp2/LICENSE create mode 100644 vendor/github.com/dlclark/regexp2/README.md create mode 100644 vendor/github.com/dlclark/regexp2/fastclock.go create mode 100644 vendor/github.com/dlclark/regexp2/match.go create mode 100644 vendor/github.com/dlclark/regexp2/regexp.go create mode 100644 vendor/github.com/dlclark/regexp2/replace.go create mode 100644 vendor/github.com/dlclark/regexp2/runner.go create mode 100644 vendor/github.com/dlclark/regexp2/syntax/charclass.go create mode 100644 vendor/github.com/dlclark/regexp2/syntax/code.go create mode 100644 vendor/github.com/dlclark/regexp2/syntax/escape.go create mode 100644 vendor/github.com/dlclark/regexp2/syntax/fuzz.go create mode 100644 vendor/github.com/dlclark/regexp2/syntax/parser.go create mode 100644 vendor/github.com/dlclark/regexp2/syntax/prefix.go create mode 100644 vendor/github.com/dlclark/regexp2/syntax/replacerdata.go create mode 100644 vendor/github.com/dlclark/regexp2/syntax/tree.go create mode 100644 vendor/github.com/dlclark/regexp2/syntax/writer.go create mode 100644 vendor/github.com/dlclark/regexp2/testoutput1 create mode 100644 vendor/github.com/fatih/color/LICENSE.md create mode 100644 vendor/github.com/fatih/color/README.md create mode 100644 vendor/github.com/fatih/color/color.go create mode 100644 vendor/github.com/fatih/color/color_windows.go create mode 100644 vendor/github.com/fatih/color/doc.go create mode 100644 vendor/github.com/frankban/quicktest/.gitignore create mode 100644 vendor/github.com/frankban/quicktest/.godocdown.template create mode 100644 vendor/github.com/frankban/quicktest/LICENSE create mode 100644 vendor/github.com/frankban/quicktest/README.md create mode 100644 vendor/github.com/frankban/quicktest/checker.go create mode 100644 vendor/github.com/frankban/quicktest/checker_err.go create mode 100644 vendor/github.com/frankban/quicktest/comment.go create mode 100644 vendor/github.com/frankban/quicktest/doc.go create mode 100644 vendor/github.com/frankban/quicktest/error.go create mode 100644 vendor/github.com/frankban/quicktest/format.go create mode 100644 vendor/github.com/frankban/quicktest/iter.go create mode 100644 vendor/github.com/frankban/quicktest/mapiter.go create mode 100644 vendor/github.com/frankban/quicktest/patch.go create mode 100644 vendor/github.com/frankban/quicktest/patch_go1.14.go create mode 100644 vendor/github.com/frankban/quicktest/patch_go1.17.go create mode 100644 vendor/github.com/frankban/quicktest/quicktest.go create mode 100644 vendor/github.com/frankban/quicktest/report.go create mode 100644 vendor/github.com/glycerine/go-unsnap-stream/.gitignore create mode 100644 vendor/github.com/glycerine/go-unsnap-stream/LICENSE create mode 100644 vendor/github.com/glycerine/go-unsnap-stream/README.md create mode 100644 vendor/github.com/glycerine/go-unsnap-stream/binary.dat create mode 100644 vendor/github.com/glycerine/go-unsnap-stream/binary.dat.snappy create mode 100644 vendor/github.com/glycerine/go-unsnap-stream/rbuf.go create mode 100644 vendor/github.com/glycerine/go-unsnap-stream/snap.go create mode 100644 vendor/github.com/glycerine/go-unsnap-stream/unenc.txt create mode 100644 vendor/github.com/glycerine/go-unsnap-stream/unenc.txt.snappy create mode 100644 vendor/github.com/glycerine/go-unsnap-stream/unsnap.go create mode 100644 vendor/github.com/go-logr/logr/.golangci.yaml create mode 100644 vendor/github.com/go-logr/logr/CHANGELOG.md create mode 100644 vendor/github.com/go-logr/logr/CONTRIBUTING.md create mode 100644 vendor/github.com/go-logr/logr/LICENSE create mode 100644 vendor/github.com/go-logr/logr/README.md create mode 100644 vendor/github.com/go-logr/logr/SECURITY.md create mode 100644 vendor/github.com/go-logr/logr/context.go create mode 100644 vendor/github.com/go-logr/logr/context_noslog.go create mode 100644 vendor/github.com/go-logr/logr/context_slog.go create mode 100644 vendor/github.com/go-logr/logr/discard.go create mode 100644 vendor/github.com/go-logr/logr/funcr/funcr.go create mode 100644 vendor/github.com/go-logr/logr/funcr/slogsink.go create mode 100644 vendor/github.com/go-logr/logr/logr.go create mode 100644 vendor/github.com/go-logr/logr/sloghandler.go create mode 100644 vendor/github.com/go-logr/logr/slogr.go create mode 100644 vendor/github.com/go-logr/logr/slogsink.go create mode 100644 vendor/github.com/go-logr/stdr/LICENSE create mode 100644 vendor/github.com/go-logr/stdr/README.md create mode 100644 vendor/github.com/go-logr/stdr/stdr.go create mode 100644 vendor/github.com/go-openapi/analysis/.codecov.yml create mode 100644 vendor/github.com/go-openapi/analysis/.gitattributes create mode 100644 vendor/github.com/go-openapi/analysis/.gitignore create mode 100644 vendor/github.com/go-openapi/analysis/.golangci.yml create mode 100644 vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md create mode 100644 vendor/github.com/go-openapi/analysis/LICENSE create mode 100644 vendor/github.com/go-openapi/analysis/README.md create mode 100644 vendor/github.com/go-openapi/analysis/analyzer.go create mode 100644 vendor/github.com/go-openapi/analysis/debug.go create mode 100644 vendor/github.com/go-openapi/analysis/doc.go create mode 100644 vendor/github.com/go-openapi/analysis/fixer.go create mode 100644 vendor/github.com/go-openapi/analysis/flatten.go create mode 100644 vendor/github.com/go-openapi/analysis/flatten_name.go create mode 100644 vendor/github.com/go-openapi/analysis/flatten_options.go create mode 100644 vendor/github.com/go-openapi/analysis/internal/debug/debug.go create mode 100644 vendor/github.com/go-openapi/analysis/internal/flatten/normalize/normalize.go create mode 100644 vendor/github.com/go-openapi/analysis/internal/flatten/operations/operations.go create mode 100644 vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go create mode 100644 vendor/github.com/go-openapi/analysis/internal/flatten/schutils/flatten_schema.go create mode 100644 vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go create mode 100644 vendor/github.com/go-openapi/analysis/internal/flatten/sortref/sort_ref.go create mode 100644 vendor/github.com/go-openapi/analysis/mixin.go create mode 100644 vendor/github.com/go-openapi/analysis/schema.go create mode 100644 vendor/github.com/go-openapi/errors/.gitattributes create mode 100644 vendor/github.com/go-openapi/errors/.gitignore create mode 100644 vendor/github.com/go-openapi/errors/.golangci.yml create mode 100644 vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md create mode 100644 vendor/github.com/go-openapi/errors/LICENSE create mode 100644 vendor/github.com/go-openapi/errors/README.md create mode 100644 vendor/github.com/go-openapi/errors/api.go create mode 100644 vendor/github.com/go-openapi/errors/auth.go create mode 100644 vendor/github.com/go-openapi/errors/doc.go create mode 100644 vendor/github.com/go-openapi/errors/headers.go create mode 100644 vendor/github.com/go-openapi/errors/middleware.go create mode 100644 vendor/github.com/go-openapi/errors/parsing.go create mode 100644 vendor/github.com/go-openapi/errors/schema.go create mode 100644 vendor/github.com/go-openapi/jsonpointer/.editorconfig create mode 100644 vendor/github.com/go-openapi/jsonpointer/.gitignore create mode 100644 vendor/github.com/go-openapi/jsonpointer/.golangci.yml create mode 100644 vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md create mode 100644 vendor/github.com/go-openapi/jsonpointer/LICENSE create mode 100644 vendor/github.com/go-openapi/jsonpointer/README.md create mode 100644 vendor/github.com/go-openapi/jsonpointer/pointer.go create mode 100644 vendor/github.com/go-openapi/jsonreference/.gitignore create mode 100644 vendor/github.com/go-openapi/jsonreference/.golangci.yml create mode 100644 vendor/github.com/go-openapi/jsonreference/CODE_OF_CONDUCT.md create mode 100644 vendor/github.com/go-openapi/jsonreference/LICENSE create mode 100644 vendor/github.com/go-openapi/jsonreference/README.md create mode 100644 vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go create mode 100644 vendor/github.com/go-openapi/jsonreference/reference.go create mode 100644 vendor/github.com/go-openapi/loads/.editorconfig create mode 100644 vendor/github.com/go-openapi/loads/.gitignore create mode 100644 vendor/github.com/go-openapi/loads/.golangci.yml create mode 100644 vendor/github.com/go-openapi/loads/.travis.yml create mode 100644 vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md create mode 100644 vendor/github.com/go-openapi/loads/LICENSE create mode 100644 vendor/github.com/go-openapi/loads/README.md create mode 100644 vendor/github.com/go-openapi/loads/doc.go create mode 100644 vendor/github.com/go-openapi/loads/loaders.go create mode 100644 vendor/github.com/go-openapi/loads/options.go create mode 100644 vendor/github.com/go-openapi/loads/spec.go create mode 100644 vendor/github.com/go-openapi/runtime/.editorconfig create mode 100644 vendor/github.com/go-openapi/runtime/.gitattributes create mode 100644 vendor/github.com/go-openapi/runtime/.gitignore create mode 100644 vendor/github.com/go-openapi/runtime/.golangci.yml create mode 100644 vendor/github.com/go-openapi/runtime/CODE_OF_CONDUCT.md create mode 100644 vendor/github.com/go-openapi/runtime/LICENSE create mode 100644 vendor/github.com/go-openapi/runtime/README.md create mode 100644 vendor/github.com/go-openapi/runtime/bytestream.go create mode 100644 vendor/github.com/go-openapi/runtime/client/auth_info.go create mode 100644 vendor/github.com/go-openapi/runtime/client/keepalive.go create mode 100644 vendor/github.com/go-openapi/runtime/client/opentelemetry.go create mode 100644 vendor/github.com/go-openapi/runtime/client/opentracing.go create mode 100644 vendor/github.com/go-openapi/runtime/client/request.go create mode 100644 vendor/github.com/go-openapi/runtime/client/response.go create mode 100644 vendor/github.com/go-openapi/runtime/client/runtime.go create mode 100644 vendor/github.com/go-openapi/runtime/client_auth_info.go create mode 100644 vendor/github.com/go-openapi/runtime/client_operation.go create mode 100644 vendor/github.com/go-openapi/runtime/client_request.go create mode 100644 vendor/github.com/go-openapi/runtime/client_response.go create mode 100644 vendor/github.com/go-openapi/runtime/constants.go create mode 100644 vendor/github.com/go-openapi/runtime/csv.go create mode 100644 vendor/github.com/go-openapi/runtime/csv_options.go create mode 100644 vendor/github.com/go-openapi/runtime/discard.go create mode 100644 vendor/github.com/go-openapi/runtime/file.go create mode 100644 vendor/github.com/go-openapi/runtime/headers.go create mode 100644 vendor/github.com/go-openapi/runtime/interfaces.go create mode 100644 vendor/github.com/go-openapi/runtime/json.go create mode 100644 vendor/github.com/go-openapi/runtime/logger/logger.go create mode 100644 vendor/github.com/go-openapi/runtime/logger/standard.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/context.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/denco/LICENSE create mode 100644 vendor/github.com/go-openapi/runtime/middleware/denco/README.md create mode 100644 vendor/github.com/go-openapi/runtime/middleware/denco/router.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/denco/server.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/denco/util.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/doc.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/header/header.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/negotiate.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/not_implemented.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/operation.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/parameter.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/rapidoc.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/redoc.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/request.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/router.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/security.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/spec.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/swaggerui.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/swaggerui_oauth2.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/ui_options.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/untyped/api.go create mode 100644 vendor/github.com/go-openapi/runtime/middleware/validation.go create mode 100644 vendor/github.com/go-openapi/runtime/request.go create mode 100644 vendor/github.com/go-openapi/runtime/security/authenticator.go create mode 100644 vendor/github.com/go-openapi/runtime/security/authorizer.go create mode 100644 vendor/github.com/go-openapi/runtime/statuses.go create mode 100644 vendor/github.com/go-openapi/runtime/text.go create mode 100644 vendor/github.com/go-openapi/runtime/values.go create mode 100644 vendor/github.com/go-openapi/runtime/xml.go create mode 100644 vendor/github.com/go-openapi/runtime/yamlpc/yaml.go create mode 100644 vendor/github.com/go-openapi/spec/.editorconfig create mode 100644 vendor/github.com/go-openapi/spec/.gitignore create mode 100644 vendor/github.com/go-openapi/spec/.golangci.yml create mode 100644 vendor/github.com/go-openapi/spec/CODE_OF_CONDUCT.md create mode 100644 vendor/github.com/go-openapi/spec/LICENSE create mode 100644 vendor/github.com/go-openapi/spec/README.md create mode 100644 vendor/github.com/go-openapi/spec/cache.go create mode 100644 vendor/github.com/go-openapi/spec/contact_info.go create mode 100644 vendor/github.com/go-openapi/spec/debug.go create mode 100644 vendor/github.com/go-openapi/spec/embed.go create mode 100644 vendor/github.com/go-openapi/spec/errors.go create mode 100644 vendor/github.com/go-openapi/spec/expander.go create mode 100644 vendor/github.com/go-openapi/spec/external_docs.go create mode 100644 vendor/github.com/go-openapi/spec/header.go create mode 100644 vendor/github.com/go-openapi/spec/info.go create mode 100644 vendor/github.com/go-openapi/spec/items.go create mode 100644 vendor/github.com/go-openapi/spec/license.go create mode 100644 vendor/github.com/go-openapi/spec/normalizer.go create mode 100644 vendor/github.com/go-openapi/spec/normalizer_nonwindows.go create mode 100644 vendor/github.com/go-openapi/spec/normalizer_windows.go create mode 100644 vendor/github.com/go-openapi/spec/operation.go create mode 100644 vendor/github.com/go-openapi/spec/parameter.go create mode 100644 vendor/github.com/go-openapi/spec/path_item.go create mode 100644 vendor/github.com/go-openapi/spec/paths.go create mode 100644 vendor/github.com/go-openapi/spec/properties.go create mode 100644 vendor/github.com/go-openapi/spec/ref.go create mode 100644 vendor/github.com/go-openapi/spec/resolver.go create mode 100644 vendor/github.com/go-openapi/spec/response.go create mode 100644 vendor/github.com/go-openapi/spec/responses.go create mode 100644 vendor/github.com/go-openapi/spec/schema.go create mode 100644 vendor/github.com/go-openapi/spec/schema_loader.go create mode 100644 vendor/github.com/go-openapi/spec/schemas/jsonschema-draft-04.json create mode 100644 vendor/github.com/go-openapi/spec/schemas/v2/schema.json create mode 100644 vendor/github.com/go-openapi/spec/security_scheme.go create mode 100644 vendor/github.com/go-openapi/spec/spec.go create mode 100644 vendor/github.com/go-openapi/spec/swagger.go create mode 100644 vendor/github.com/go-openapi/spec/tag.go create mode 100644 vendor/github.com/go-openapi/spec/url_go19.go create mode 100644 vendor/github.com/go-openapi/spec/validations.go create mode 100644 vendor/github.com/go-openapi/spec/xml_object.go create mode 100644 vendor/github.com/go-openapi/strfmt/.editorconfig create mode 100644 vendor/github.com/go-openapi/strfmt/.gitattributes create mode 100644 vendor/github.com/go-openapi/strfmt/.gitignore create mode 100644 vendor/github.com/go-openapi/strfmt/.golangci.yml create mode 100644 vendor/github.com/go-openapi/strfmt/CODE_OF_CONDUCT.md create mode 100644 vendor/github.com/go-openapi/strfmt/LICENSE create mode 100644 vendor/github.com/go-openapi/strfmt/README.md create mode 100644 vendor/github.com/go-openapi/strfmt/bson.go create mode 100644 vendor/github.com/go-openapi/strfmt/date.go create mode 100644 vendor/github.com/go-openapi/strfmt/default.go create mode 100644 vendor/github.com/go-openapi/strfmt/doc.go create mode 100644 vendor/github.com/go-openapi/strfmt/duration.go create mode 100644 vendor/github.com/go-openapi/strfmt/format.go create mode 100644 vendor/github.com/go-openapi/strfmt/time.go create mode 100644 vendor/github.com/go-openapi/strfmt/ulid.go create mode 100644 vendor/github.com/go-openapi/swag/.editorconfig create mode 100644 vendor/github.com/go-openapi/swag/.gitattributes create mode 100644 vendor/github.com/go-openapi/swag/.gitignore create mode 100644 vendor/github.com/go-openapi/swag/.golangci.yml create mode 100644 vendor/github.com/go-openapi/swag/BENCHMARK.md create mode 100644 vendor/github.com/go-openapi/swag/CODE_OF_CONDUCT.md create mode 100644 vendor/github.com/go-openapi/swag/LICENSE create mode 100644 vendor/github.com/go-openapi/swag/README.md create mode 100644 vendor/github.com/go-openapi/swag/convert.go create mode 100644 vendor/github.com/go-openapi/swag/convert_types.go create mode 100644 vendor/github.com/go-openapi/swag/doc.go create mode 100644 vendor/github.com/go-openapi/swag/file.go create mode 100644 vendor/github.com/go-openapi/swag/initialism_index.go create mode 100644 vendor/github.com/go-openapi/swag/json.go create mode 100644 vendor/github.com/go-openapi/swag/loading.go create mode 100644 vendor/github.com/go-openapi/swag/name_lexem.go create mode 100644 vendor/github.com/go-openapi/swag/net.go create mode 100644 vendor/github.com/go-openapi/swag/path.go create mode 100644 vendor/github.com/go-openapi/swag/split.go create mode 100644 vendor/github.com/go-openapi/swag/string_bytes.go create mode 100644 vendor/github.com/go-openapi/swag/util.go create mode 100644 vendor/github.com/go-openapi/swag/yaml.go create mode 100644 vendor/github.com/go-openapi/validate/.editorconfig create mode 100644 vendor/github.com/go-openapi/validate/.gitattributes create mode 100644 vendor/github.com/go-openapi/validate/.gitignore create mode 100644 vendor/github.com/go-openapi/validate/.golangci.yml create mode 100644 vendor/github.com/go-openapi/validate/BENCHMARK.md create mode 100644 vendor/github.com/go-openapi/validate/CODE_OF_CONDUCT.md create mode 100644 vendor/github.com/go-openapi/validate/LICENSE create mode 100644 vendor/github.com/go-openapi/validate/README.md create mode 100644 vendor/github.com/go-openapi/validate/context.go create mode 100644 vendor/github.com/go-openapi/validate/debug.go create mode 100644 vendor/github.com/go-openapi/validate/default_validator.go create mode 100644 vendor/github.com/go-openapi/validate/doc.go create mode 100644 vendor/github.com/go-openapi/validate/example_validator.go create mode 100644 vendor/github.com/go-openapi/validate/formats.go create mode 100644 vendor/github.com/go-openapi/validate/helpers.go create mode 100644 vendor/github.com/go-openapi/validate/object_validator.go create mode 100644 vendor/github.com/go-openapi/validate/options.go create mode 100644 vendor/github.com/go-openapi/validate/pools.go create mode 100644 vendor/github.com/go-openapi/validate/pools_debug.go create mode 100644 vendor/github.com/go-openapi/validate/result.go create mode 100644 vendor/github.com/go-openapi/validate/rexp.go create mode 100644 vendor/github.com/go-openapi/validate/schema.go create mode 100644 vendor/github.com/go-openapi/validate/schema_messages.go create mode 100644 vendor/github.com/go-openapi/validate/schema_option.go create mode 100644 vendor/github.com/go-openapi/validate/schema_props.go create mode 100644 vendor/github.com/go-openapi/validate/slice_validator.go create mode 100644 vendor/github.com/go-openapi/validate/spec.go create mode 100644 vendor/github.com/go-openapi/validate/spec_messages.go create mode 100644 vendor/github.com/go-openapi/validate/type.go create mode 100644 vendor/github.com/go-openapi/validate/update-fixtures.sh create mode 100644 vendor/github.com/go-openapi/validate/validator.go create mode 100644 vendor/github.com/go-openapi/validate/values.go create mode 100644 vendor/github.com/gobuffalo/flect/.gitignore create mode 100644 vendor/github.com/gobuffalo/flect/.gometalinter.json create mode 100644 vendor/github.com/gobuffalo/flect/LICENSE create mode 100644 vendor/github.com/gobuffalo/flect/Makefile create mode 100644 vendor/github.com/gobuffalo/flect/README.md create mode 100644 vendor/github.com/gobuffalo/flect/SHOULDERS.md create mode 100644 vendor/github.com/gobuffalo/flect/acronyms.go create mode 100644 vendor/github.com/gobuffalo/flect/camelize.go create mode 100644 vendor/github.com/gobuffalo/flect/capitalize.go create mode 100644 vendor/github.com/gobuffalo/flect/custom_data.go create mode 100644 vendor/github.com/gobuffalo/flect/dasherize.go create mode 100644 vendor/github.com/gobuffalo/flect/flect.go create mode 100644 vendor/github.com/gobuffalo/flect/humanize.go create mode 100644 vendor/github.com/gobuffalo/flect/ident.go create mode 100644 vendor/github.com/gobuffalo/flect/lower_upper.go create mode 100644 vendor/github.com/gobuffalo/flect/ordinalize.go create mode 100644 vendor/github.com/gobuffalo/flect/pascalize.go create mode 100644 vendor/github.com/gobuffalo/flect/plural_rules.go create mode 100644 vendor/github.com/gobuffalo/flect/pluralize.go create mode 100644 vendor/github.com/gobuffalo/flect/rule.go create mode 100644 vendor/github.com/gobuffalo/flect/singular_rules.go create mode 100644 vendor/github.com/gobuffalo/flect/singularize.go create mode 100644 vendor/github.com/gobuffalo/flect/titleize.go create mode 100644 vendor/github.com/gobuffalo/flect/underscore.go create mode 100644 vendor/github.com/gobuffalo/flect/version.go create mode 100644 vendor/github.com/gobwas/glob/.gitignore create mode 100644 vendor/github.com/gobwas/glob/.travis.yml create mode 100644 vendor/github.com/gobwas/glob/LICENSE create mode 100644 vendor/github.com/gobwas/glob/bench.sh create mode 100644 vendor/github.com/gobwas/glob/compiler/compiler.go create mode 100644 vendor/github.com/gobwas/glob/glob.go create mode 100644 vendor/github.com/gobwas/glob/match/any.go create mode 100644 vendor/github.com/gobwas/glob/match/any_of.go create mode 100644 vendor/github.com/gobwas/glob/match/btree.go create mode 100644 vendor/github.com/gobwas/glob/match/contains.go create mode 100644 vendor/github.com/gobwas/glob/match/every_of.go create mode 100644 vendor/github.com/gobwas/glob/match/list.go create mode 100644 vendor/github.com/gobwas/glob/match/match.go create mode 100644 vendor/github.com/gobwas/glob/match/max.go create mode 100644 vendor/github.com/gobwas/glob/match/min.go create mode 100644 vendor/github.com/gobwas/glob/match/nothing.go create mode 100644 vendor/github.com/gobwas/glob/match/prefix.go create mode 100644 vendor/github.com/gobwas/glob/match/prefix_any.go create mode 100644 vendor/github.com/gobwas/glob/match/prefix_suffix.go create mode 100644 vendor/github.com/gobwas/glob/match/range.go create mode 100644 vendor/github.com/gobwas/glob/match/row.go create mode 100644 vendor/github.com/gobwas/glob/match/segments.go create mode 100644 vendor/github.com/gobwas/glob/match/single.go create mode 100644 vendor/github.com/gobwas/glob/match/suffix.go create mode 100644 vendor/github.com/gobwas/glob/match/suffix_any.go create mode 100644 vendor/github.com/gobwas/glob/match/super.go create mode 100644 vendor/github.com/gobwas/glob/match/text.go create mode 100644 vendor/github.com/gobwas/glob/readme.md create mode 100644 vendor/github.com/gobwas/glob/syntax/ast/ast.go create mode 100644 vendor/github.com/gobwas/glob/syntax/ast/parser.go create mode 100644 vendor/github.com/gobwas/glob/syntax/lexer/lexer.go create mode 100644 vendor/github.com/gobwas/glob/syntax/lexer/token.go create mode 100644 vendor/github.com/gobwas/glob/syntax/syntax.go create mode 100644 vendor/github.com/gobwas/glob/util/runes/runes.go create mode 100644 vendor/github.com/gobwas/glob/util/strings/strings.go create mode 100644 vendor/github.com/gofrs/uuid/.gitignore create mode 100644 vendor/github.com/gofrs/uuid/LICENSE create mode 100644 vendor/github.com/gofrs/uuid/README.md create mode 100644 vendor/github.com/gofrs/uuid/codec.go create mode 100644 vendor/github.com/gofrs/uuid/fuzz.go create mode 100644 vendor/github.com/gofrs/uuid/generator.go create mode 100644 vendor/github.com/gofrs/uuid/sql.go create mode 100644 vendor/github.com/gofrs/uuid/uuid.go create mode 100644 vendor/github.com/gohugoio/go-i18n/v2/LICENSE create mode 100644 vendor/github.com/gohugoio/go-i18n/v2/i18n/bundle.go create mode 100644 vendor/github.com/gohugoio/go-i18n/v2/i18n/bundlefs.go create mode 100644 vendor/github.com/gohugoio/go-i18n/v2/i18n/doc.go create mode 100644 vendor/github.com/gohugoio/go-i18n/v2/i18n/localizer.go create mode 100644 vendor/github.com/gohugoio/go-i18n/v2/i18n/message.go create mode 100644 vendor/github.com/gohugoio/go-i18n/v2/i18n/message_template.go create mode 100644 vendor/github.com/gohugoio/go-i18n/v2/i18n/parse.go create mode 100644 vendor/github.com/gohugoio/go-i18n/v2/internal/plural/doc.go create mode 100644 vendor/github.com/gohugoio/go-i18n/v2/internal/plural/form.go create mode 100644 vendor/github.com/gohugoio/go-i18n/v2/internal/plural/operands.go create mode 100644 vendor/github.com/gohugoio/go-i18n/v2/internal/plural/rule.go create mode 100644 vendor/github.com/gohugoio/go-i18n/v2/internal/plural/rule_gen.go create mode 100644 vendor/github.com/gohugoio/go-i18n/v2/internal/plural/rules.go create mode 100644 vendor/github.com/gohugoio/go-i18n/v2/internal/template.go create mode 100644 vendor/github.com/gohugoio/locales/.gitignore create mode 100644 vendor/github.com/gohugoio/locales/.travis.yml create mode 100644 vendor/github.com/gohugoio/locales/LICENSE create mode 100644 vendor/github.com/gohugoio/locales/README.md create mode 100644 vendor/github.com/gohugoio/locales/currency/currency.go create mode 100644 vendor/github.com/gohugoio/locales/logo.png create mode 100644 vendor/github.com/gohugoio/locales/rules.go create mode 100644 vendor/github.com/gohugoio/localescompressed/.gitignore create mode 100644 vendor/github.com/gohugoio/localescompressed/LICENSE create mode 100644 vendor/github.com/gohugoio/localescompressed/README.md create mode 100644 vendor/github.com/gohugoio/localescompressed/currencies.autogen.go create mode 100644 vendor/github.com/gohugoio/localescompressed/localen.go create mode 100644 vendor/github.com/gohugoio/localescompressed/locales.autogen.go create mode 100644 vendor/github.com/gohugoio/localescompressed/locales.go create mode 100644 vendor/github.com/golang/protobuf/AUTHORS create mode 100644 vendor/github.com/golang/protobuf/CONTRIBUTORS create mode 100644 vendor/github.com/golang/protobuf/LICENSE create mode 100644 vendor/github.com/golang/protobuf/proto/buffer.go create mode 100644 vendor/github.com/golang/protobuf/proto/defaults.go create mode 100644 vendor/github.com/golang/protobuf/proto/deprecated.go create mode 100644 vendor/github.com/golang/protobuf/proto/discard.go create mode 100644 vendor/github.com/golang/protobuf/proto/extensions.go create mode 100644 vendor/github.com/golang/protobuf/proto/properties.go create mode 100644 vendor/github.com/golang/protobuf/proto/proto.go create mode 100644 vendor/github.com/golang/protobuf/proto/registry.go create mode 100644 vendor/github.com/golang/protobuf/proto/text_decode.go create mode 100644 vendor/github.com/golang/protobuf/proto/text_encode.go create mode 100644 vendor/github.com/golang/protobuf/proto/wire.go create mode 100644 vendor/github.com/golang/protobuf/proto/wrappers.go create mode 100644 vendor/github.com/golang/snappy/.gitignore create mode 100644 vendor/github.com/golang/snappy/AUTHORS create mode 100644 vendor/github.com/golang/snappy/CONTRIBUTORS create mode 100644 vendor/github.com/golang/snappy/LICENSE create mode 100644 vendor/github.com/golang/snappy/README create mode 100644 vendor/github.com/golang/snappy/decode.go create mode 100644 vendor/github.com/golang/snappy/decode_amd64.s create mode 100644 vendor/github.com/golang/snappy/decode_arm64.s create mode 100644 vendor/github.com/golang/snappy/decode_asm.go create mode 100644 vendor/github.com/golang/snappy/decode_other.go create mode 100644 vendor/github.com/golang/snappy/encode.go create mode 100644 vendor/github.com/golang/snappy/encode_amd64.s create mode 100644 vendor/github.com/golang/snappy/encode_arm64.s create mode 100644 vendor/github.com/golang/snappy/encode_asm.go create mode 100644 vendor/github.com/golang/snappy/encode_other.go create mode 100644 vendor/github.com/golang/snappy/snappy.go create mode 100644 vendor/github.com/google/go-cmp/LICENSE create mode 100644 vendor/github.com/google/go-cmp/cmp/cmpopts/equate.go create mode 100644 vendor/github.com/google/go-cmp/cmp/cmpopts/ignore.go create mode 100644 vendor/github.com/google/go-cmp/cmp/cmpopts/sort.go create mode 100644 vendor/github.com/google/go-cmp/cmp/cmpopts/struct_filter.go create mode 100644 vendor/github.com/google/go-cmp/cmp/cmpopts/xform.go create mode 100644 vendor/github.com/google/go-cmp/cmp/compare.go create mode 100644 vendor/github.com/google/go-cmp/cmp/export.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/diff/debug_disable.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/diff/debug_enable.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/diff/diff.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/flags/flags.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/function/func.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/value/name.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/value/pointer.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/value/sort.go create mode 100644 vendor/github.com/google/go-cmp/cmp/options.go create mode 100644 vendor/github.com/google/go-cmp/cmp/path.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report_compare.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report_references.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report_reflect.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report_slices.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report_text.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report_value.go create mode 100644 vendor/github.com/google/uuid/CHANGELOG.md create mode 100644 vendor/github.com/google/uuid/CONTRIBUTING.md create mode 100644 vendor/github.com/google/uuid/CONTRIBUTORS create mode 100644 vendor/github.com/google/uuid/LICENSE create mode 100644 vendor/github.com/google/uuid/README.md create mode 100644 vendor/github.com/google/uuid/dce.go create mode 100644 vendor/github.com/google/uuid/doc.go create mode 100644 vendor/github.com/google/uuid/hash.go create mode 100644 vendor/github.com/google/uuid/marshal.go create mode 100644 vendor/github.com/google/uuid/node.go create mode 100644 vendor/github.com/google/uuid/node_js.go create mode 100644 vendor/github.com/google/uuid/node_net.go create mode 100644 vendor/github.com/google/uuid/null.go create mode 100644 vendor/github.com/google/uuid/sql.go create mode 100644 vendor/github.com/google/uuid/time.go create mode 100644 vendor/github.com/google/uuid/util.go create mode 100644 vendor/github.com/google/uuid/uuid.go create mode 100644 vendor/github.com/google/uuid/version1.go create mode 100644 vendor/github.com/google/uuid/version4.go create mode 100644 vendor/github.com/google/uuid/version6.go create mode 100644 vendor/github.com/google/uuid/version7.go create mode 100644 vendor/github.com/gorilla/schema/.editorconfig create mode 100644 vendor/github.com/gorilla/schema/.gitignore create mode 100644 vendor/github.com/gorilla/schema/LICENSE create mode 100644 vendor/github.com/gorilla/schema/Makefile create mode 100644 vendor/github.com/gorilla/schema/README.md create mode 100644 vendor/github.com/gorilla/schema/cache.go create mode 100644 vendor/github.com/gorilla/schema/converter.go create mode 100644 vendor/github.com/gorilla/schema/decoder.go create mode 100644 vendor/github.com/gorilla/schema/doc.go create mode 100644 vendor/github.com/gorilla/schema/encoder.go create mode 100644 vendor/github.com/hashicorp/golang-lru/v2/LICENSE create mode 100644 vendor/github.com/hashicorp/golang-lru/v2/internal/list.go create mode 100644 vendor/github.com/hashicorp/golang-lru/v2/simplelru/LICENSE_list create mode 100644 vendor/github.com/hashicorp/golang-lru/v2/simplelru/lru.go create mode 100644 vendor/github.com/hashicorp/golang-lru/v2/simplelru/lru_interface.go create mode 100644 vendor/github.com/jdkato/prose/AUTHORS.md create mode 100644 vendor/github.com/jdkato/prose/LICENSE create mode 100644 vendor/github.com/jdkato/prose/internal/util/util.go create mode 100644 vendor/github.com/jdkato/prose/transform/title.go create mode 100644 vendor/github.com/jdkato/prose/transform/transform.go create mode 100644 vendor/github.com/jdkato/prose/transform/transform_fuzz.go create mode 100644 vendor/github.com/josharian/intern/README.md create mode 100644 vendor/github.com/josharian/intern/intern.go create mode 100644 vendor/github.com/josharian/intern/license.md create mode 100644 vendor/github.com/konsorten/go-windows-terminal-sequences/LICENSE create mode 100644 vendor/github.com/konsorten/go-windows-terminal-sequences/README.md create mode 100644 vendor/github.com/konsorten/go-windows-terminal-sequences/sequences.go create mode 100644 vendor/github.com/konsorten/go-windows-terminal-sequences/sequences_dummy.go create mode 100644 vendor/github.com/kr/pretty/.gitignore create mode 100644 vendor/github.com/kr/pretty/License create mode 100644 vendor/github.com/kr/pretty/Readme create mode 100644 vendor/github.com/kr/pretty/diff.go create mode 100644 vendor/github.com/kr/pretty/formatter.go create mode 100644 vendor/github.com/kr/pretty/pretty.go create mode 100644 vendor/github.com/kr/pretty/zero.go create mode 100644 vendor/github.com/kr/text/License create mode 100644 vendor/github.com/kr/text/Readme create mode 100644 vendor/github.com/kr/text/doc.go create mode 100644 vendor/github.com/kr/text/indent.go create mode 100644 vendor/github.com/kr/text/wrap.go create mode 100644 vendor/github.com/kyokomi/emoji/v2/.gitignore create mode 100644 vendor/github.com/kyokomi/emoji/v2/LICENSE create mode 100644 vendor/github.com/kyokomi/emoji/v2/README.md create mode 100644 vendor/github.com/kyokomi/emoji/v2/emoji.go create mode 100644 vendor/github.com/kyokomi/emoji/v2/emoji_codemap.go create mode 100644 vendor/github.com/mailru/easyjson/LICENSE create mode 100644 vendor/github.com/mailru/easyjson/buffer/pool.go create mode 100644 vendor/github.com/mailru/easyjson/jlexer/bytestostr.go create mode 100644 vendor/github.com/mailru/easyjson/jlexer/bytestostr_nounsafe.go create mode 100644 vendor/github.com/mailru/easyjson/jlexer/error.go create mode 100644 vendor/github.com/mailru/easyjson/jlexer/lexer.go create mode 100644 vendor/github.com/mailru/easyjson/jwriter/writer.go create mode 100644 vendor/github.com/marekm4/color-extractor/LICENSE create mode 100644 vendor/github.com/marekm4/color-extractor/README.md create mode 100644 vendor/github.com/marekm4/color-extractor/color_extractor.go create mode 100644 vendor/github.com/mattn/go-colorable/LICENSE create mode 100644 vendor/github.com/mattn/go-colorable/README.md create mode 100644 vendor/github.com/mattn/go-colorable/colorable_appengine.go create mode 100644 vendor/github.com/mattn/go-colorable/colorable_others.go create mode 100644 vendor/github.com/mattn/go-colorable/colorable_windows.go create mode 100644 vendor/github.com/mattn/go-colorable/go.test.sh create mode 100644 vendor/github.com/mattn/go-colorable/noncolorable.go create mode 100644 vendor/github.com/mattn/go-isatty/LICENSE create mode 100644 vendor/github.com/mattn/go-isatty/README.md create mode 100644 vendor/github.com/mattn/go-isatty/doc.go create mode 100644 vendor/github.com/mattn/go-isatty/go.test.sh create mode 100644 vendor/github.com/mattn/go-isatty/isatty_bsd.go create mode 100644 vendor/github.com/mattn/go-isatty/isatty_others.go create mode 100644 vendor/github.com/mattn/go-isatty/isatty_plan9.go create mode 100644 vendor/github.com/mattn/go-isatty/isatty_solaris.go create mode 100644 vendor/github.com/mattn/go-isatty/isatty_tcgets.go create mode 100644 vendor/github.com/mattn/go-isatty/isatty_windows.go create mode 100644 vendor/github.com/mitchellh/hashstructure/LICENSE create mode 100644 vendor/github.com/mitchellh/hashstructure/README.md create mode 100644 vendor/github.com/mitchellh/hashstructure/hashstructure.go create mode 100644 vendor/github.com/mitchellh/hashstructure/include.go create mode 100644 vendor/github.com/mitchellh/mapstructure/CHANGELOG.md create mode 100644 vendor/github.com/mitchellh/mapstructure/LICENSE create mode 100644 vendor/github.com/mitchellh/mapstructure/README.md create mode 100644 vendor/github.com/mitchellh/mapstructure/decode_hooks.go create mode 100644 vendor/github.com/mitchellh/mapstructure/error.go create mode 100644 vendor/github.com/mitchellh/mapstructure/mapstructure.go create mode 100644 vendor/github.com/mschoch/smat/.gitignore create mode 100644 vendor/github.com/mschoch/smat/.travis.yml create mode 100644 vendor/github.com/mschoch/smat/LICENSE create mode 100644 vendor/github.com/mschoch/smat/README.md create mode 100644 vendor/github.com/mschoch/smat/actionseq.go create mode 100644 vendor/github.com/mschoch/smat/smat.go create mode 100644 vendor/github.com/muesli/smartcrop/.gitignore create mode 100644 vendor/github.com/muesli/smartcrop/.travis.yml create mode 100644 vendor/github.com/muesli/smartcrop/LICENSE create mode 100644 vendor/github.com/muesli/smartcrop/README.md create mode 100644 vendor/github.com/muesli/smartcrop/debug.go create mode 100644 vendor/github.com/muesli/smartcrop/options/resizer.go create mode 100644 vendor/github.com/muesli/smartcrop/smartcrop.go create mode 100644 vendor/github.com/netlify/open-api/v2/LICENSE create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/access_token.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/account_add_member_setup.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/account_membership.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/account_membership_capabilities.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/account_setup.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/account_type.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/account_update_member_setup.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/account_update_setup.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/account_usage_capability.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/asset.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/asset_form.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/asset_public_signature.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/asset_signature.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/audit_log.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/audit_log_payload.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/build.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/build_hook.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/build_hook_setup.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/build_log_msg.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/build_setup.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/build_status.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/build_status_minutes.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/create_env_vars_params_body_items.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/deploy.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/deploy_files.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/deploy_key.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/deploy_site_capabilities.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/deployed_branch.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/dev_server.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/dev_server_hook.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/dev_server_hook_setup.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/dns_record.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/dns_record_create.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/dns_records.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/dns_zone.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/dns_zone_setup.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/dns_zones.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/env_var.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/env_var_user.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/env_var_value.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/error.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/excluded_function_route.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/file.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/form.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/function.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/function_config.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/function_route.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/function_schedule.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/hook.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/hook_type.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/member.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/metadata.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/minify_options.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/payment_method.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/payment_method_data.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/plugin.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/plugin_params.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/plugin_run.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/plugin_run_all_of1.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/plugin_run_data.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/purge.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/repo_info.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/service.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/service_instance.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/set_env_var_value_params_body.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/site.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/site_default_hooks_data.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/site_function.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/site_processing_settings.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/site_processing_settings_html.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/site_processing_settings_images.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/site_setup.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/site_setup_all_of1.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/sni_certificate.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/snippet.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/split_test_setup.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/split_test_swagger.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/split_tests.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/submission.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/ticket.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/traffic_rules_aggregate_config.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/traffic_rules_aggregate_config_keys_items.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/traffic_rules_config.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/traffic_rules_config_action.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/traffic_rules_config_action_config.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/traffic_rules_rate_limit_config.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/update_env_var_params_body.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/user.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/models/user_onboarding_progress.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/doc.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/netlify_client.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/add_member_to_account_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/add_member_to_account_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/cancel_account_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/cancel_account_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/cancel_site_deploy_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/cancel_site_deploy_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/configure_dns_for_site_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/configure_dns_for_site_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_account_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_account_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_deploy_key_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_deploy_key_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_dns_record_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_dns_record_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_dns_zone_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_dns_zone_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_env_vars_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_env_vars_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_hook_by_site_id_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_hook_by_site_id_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_plugin_run_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_plugin_run_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_service_instance_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_service_instance_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_site_asset_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_site_asset_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_site_build_hook_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_site_build_hook_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_site_build_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_site_build_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_site_deploy_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_site_deploy_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_site_dev_server_hook_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_site_dev_server_hook_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_site_dev_server_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_site_dev_server_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_site_in_team_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_site_in_team_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_site_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_site_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_site_snippet_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_site_snippet_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_split_test_swagger_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_split_test_swagger_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_ticket_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/create_ticket_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_deploy_key_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_deploy_key_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_deploy_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_deploy_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_dns_record_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_dns_record_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_dns_zone_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_dns_zone_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_env_var_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_env_var_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_env_var_value_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_env_var_value_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_hook_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_hook_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_service_instance_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_service_instance_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_site_asset_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_site_asset_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_site_build_hook_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_site_build_hook_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_site_deploy_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_site_deploy_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_site_dev_server_hook_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_site_dev_server_hook_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_site_dev_servers_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_site_dev_servers_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_site_form_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_site_form_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_site_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_site_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_site_snippet_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_site_snippet_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_submission_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/delete_submission_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/disable_split_test_swagger_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/disable_split_test_swagger_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/enable_hook_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/enable_hook_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/enable_split_test_swagger_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/enable_split_test_swagger_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/exchange_ticket_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/exchange_ticket_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_account_build_status_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_account_build_status_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_account_member_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_account_member_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_account_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_account_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_current_user_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_current_user_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_deploy_key_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_deploy_key_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_deploy_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_deploy_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_dns_for_site_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_dns_for_site_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_dns_records_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_dns_records_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_dns_zone_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_dns_zone_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_dns_zones_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_dns_zones_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_env_var_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_env_var_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_env_vars_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_env_vars_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_hook_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_hook_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_individual_dns_record_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_individual_dns_record_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_latest_plugin_runs_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_latest_plugin_runs_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_services_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_services_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_asset_info_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_asset_info_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_asset_public_signature_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_asset_public_signature_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_build_hook_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_build_hook_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_build_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_build_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_deploy_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_deploy_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_dev_server_hook_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_dev_server_hook_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_dev_server_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_dev_server_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_env_vars_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_env_vars_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_file_by_path_name_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_file_by_path_name_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_metadata_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_metadata_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_snippet_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_site_snippet_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_split_test_swagger_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_split_test_swagger_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_split_tests_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/get_split_tests_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_account_audit_events_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_account_audit_events_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_account_types_for_user_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_account_types_for_user_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_accounts_for_user_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_accounts_for_user_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_deploy_keys_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_deploy_keys_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_form_submission_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_form_submission_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_form_submissions_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_form_submissions_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_forms_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_forms_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_hook_types_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_hook_types_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_hooks_by_site_id_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_hooks_by_site_id_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_members_for_account_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_members_for_account_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_payment_methods_for_user_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_payment_methods_for_user_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_service_instances_for_site_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_service_instances_for_site_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_assets_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_assets_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_build_hooks_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_build_hooks_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_builds_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_builds_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_deployed_branches_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_deployed_branches_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_deploys_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_deploys_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_dev_server_hooks_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_dev_server_hooks_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_dev_servers_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_dev_servers_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_files_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_files_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_forms_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_forms_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_snippets_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_snippets_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_submissions_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_site_submissions_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_sites_for_account_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_sites_for_account_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_sites_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/list_sites_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/lock_deploy_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/lock_deploy_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/mark_dev_server_activity_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/mark_dev_server_activity_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/notify_build_start_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/notify_build_start_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/operations_client.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/provision_site_tls_certificate_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/provision_site_tls_certificate_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/purge_cache_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/purge_cache_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/remove_account_member_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/remove_account_member_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/restore_site_deploy_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/restore_site_deploy_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/rollback_site_deploy_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/rollback_site_deploy_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/search_site_functions_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/search_site_functions_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/set_env_var_value_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/set_env_var_value_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/show_service_instance_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/show_service_instance_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/show_service_manifest_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/show_service_manifest_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/show_service_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/show_service_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/show_site_tls_certificate_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/show_site_tls_certificate_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/show_ticket_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/show_ticket_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/transfer_dns_zone_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/transfer_dns_zone_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/unlink_site_repo_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/unlink_site_repo_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/unlock_deploy_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/unlock_deploy_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_account_member_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_account_member_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_account_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_account_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_env_var_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_env_var_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_hook_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_hook_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_plugin_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_plugin_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_service_instance_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_service_instance_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_site_asset_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_site_asset_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_site_build_hook_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_site_build_hook_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_site_build_log_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_site_build_log_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_site_deploy_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_site_deploy_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_site_dev_server_hook_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_site_dev_server_hook_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_site_metadata_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_site_metadata_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_site_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_site_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_site_snippet_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_site_snippet_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_split_test_swagger_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/update_split_test_swagger_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/upload_deploy_file_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/upload_deploy_file_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/upload_deploy_function_parameters.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/plumbing/operations/upload_deploy_function_responses.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/porcelain/assets.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/porcelain/auth.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/porcelain/context/context.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/porcelain/deploy.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/porcelain/deploy_keys.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/porcelain/deploy_unix.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/porcelain/deploy_windows.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/porcelain/forms.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/porcelain/functions_manifest.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/porcelain/http/http.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/porcelain/netlify_client.go create mode 100644 vendor/github.com/netlify/open-api/v2/go/porcelain/site.go create mode 100644 vendor/github.com/nilslice/jwt/LICENSE create mode 100644 vendor/github.com/nilslice/jwt/README.md create mode 100644 vendor/github.com/nilslice/jwt/doc.go create mode 100644 vendor/github.com/nilslice/jwt/jwt.go create mode 100644 vendor/github.com/oklog/ulid/.gitignore create mode 100644 vendor/github.com/oklog/ulid/.travis.yml create mode 100644 vendor/github.com/oklog/ulid/AUTHORS.md create mode 100644 vendor/github.com/oklog/ulid/CHANGELOG.md create mode 100644 vendor/github.com/oklog/ulid/CONTRIBUTING.md create mode 100644 vendor/github.com/oklog/ulid/Gopkg.lock create mode 100644 vendor/github.com/oklog/ulid/Gopkg.toml create mode 100644 vendor/github.com/oklog/ulid/LICENSE create mode 100644 vendor/github.com/oklog/ulid/README.md create mode 100644 vendor/github.com/oklog/ulid/ulid.go create mode 100644 vendor/github.com/opentracing/opentracing-go/.gitignore create mode 100644 vendor/github.com/opentracing/opentracing-go/.travis.yml create mode 100644 vendor/github.com/opentracing/opentracing-go/CHANGELOG.md create mode 100644 vendor/github.com/opentracing/opentracing-go/LICENSE create mode 100644 vendor/github.com/opentracing/opentracing-go/Makefile create mode 100644 vendor/github.com/opentracing/opentracing-go/README.md create mode 100644 vendor/github.com/opentracing/opentracing-go/ext.go create mode 100644 vendor/github.com/opentracing/opentracing-go/ext/field.go create mode 100644 vendor/github.com/opentracing/opentracing-go/ext/tags.go create mode 100644 vendor/github.com/opentracing/opentracing-go/globaltracer.go create mode 100644 vendor/github.com/opentracing/opentracing-go/gocontext.go create mode 100644 vendor/github.com/opentracing/opentracing-go/log/field.go create mode 100644 vendor/github.com/opentracing/opentracing-go/log/util.go create mode 100644 vendor/github.com/opentracing/opentracing-go/noop.go create mode 100644 vendor/github.com/opentracing/opentracing-go/propagation.go create mode 100644 vendor/github.com/opentracing/opentracing-go/span.go create mode 100644 vendor/github.com/opentracing/opentracing-go/tracer.go create mode 100644 vendor/github.com/pbnjay/memory/LICENSE create mode 100644 vendor/github.com/pbnjay/memory/README.md create mode 100644 vendor/github.com/pbnjay/memory/doc.go create mode 100644 vendor/github.com/pbnjay/memory/memory_bsd.go create mode 100644 vendor/github.com/pbnjay/memory/memory_darwin.go create mode 100644 vendor/github.com/pbnjay/memory/memory_linux.go create mode 100644 vendor/github.com/pbnjay/memory/memory_windows.go create mode 100644 vendor/github.com/pbnjay/memory/memsysctl.go create mode 100644 vendor/github.com/pbnjay/memory/stub.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/.dockerignore create mode 100644 vendor/github.com/pelletier/go-toml/v2/.gitattributes create mode 100644 vendor/github.com/pelletier/go-toml/v2/.gitignore create mode 100644 vendor/github.com/pelletier/go-toml/v2/.golangci.toml create mode 100644 vendor/github.com/pelletier/go-toml/v2/.goreleaser.yaml create mode 100644 vendor/github.com/pelletier/go-toml/v2/CONTRIBUTING.md create mode 100644 vendor/github.com/pelletier/go-toml/v2/Dockerfile create mode 100644 vendor/github.com/pelletier/go-toml/v2/LICENSE create mode 100644 vendor/github.com/pelletier/go-toml/v2/README.md create mode 100644 vendor/github.com/pelletier/go-toml/v2/SECURITY.md create mode 100644 vendor/github.com/pelletier/go-toml/v2/ci.sh create mode 100644 vendor/github.com/pelletier/go-toml/v2/decode.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/doc.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/errors.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/internal/characters/ascii.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/internal/characters/utf8.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/internal/danger/danger.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/internal/danger/typeid.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/internal/tracker/key.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/internal/tracker/seen.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/internal/tracker/tracker.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/localtime.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/marshaler.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/strict.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/toml.abnf create mode 100644 vendor/github.com/pelletier/go-toml/v2/types.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/unmarshaler.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/unstable/ast.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/unstable/builder.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/unstable/doc.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/unstable/kind.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/unstable/parser.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/unstable/scanner.go create mode 100644 vendor/github.com/pelletier/go-toml/v2/unstable/unmarshaler.go create mode 100644 vendor/github.com/philhofer/fwd/LICENSE.md create mode 100644 vendor/github.com/philhofer/fwd/README.md create mode 100644 vendor/github.com/philhofer/fwd/reader.go create mode 100644 vendor/github.com/philhofer/fwd/writer.go create mode 100644 vendor/github.com/philhofer/fwd/writer_appengine.go create mode 100644 vendor/github.com/philhofer/fwd/writer_unsafe.go create mode 100644 vendor/github.com/pkg/errors/.gitignore create mode 100644 vendor/github.com/pkg/errors/.travis.yml create mode 100644 vendor/github.com/pkg/errors/LICENSE create mode 100644 vendor/github.com/pkg/errors/Makefile create mode 100644 vendor/github.com/pkg/errors/README.md create mode 100644 vendor/github.com/pkg/errors/appveyor.yml create mode 100644 vendor/github.com/pkg/errors/errors.go create mode 100644 vendor/github.com/pkg/errors/go113.go create mode 100644 vendor/github.com/pkg/errors/stack.go create mode 100644 vendor/github.com/rogpeppe/go-internal/LICENSE create mode 100644 vendor/github.com/rogpeppe/go-internal/fmtsort/mapelem.go create mode 100644 vendor/github.com/rogpeppe/go-internal/fmtsort/sort.go create mode 100644 vendor/github.com/rsc/goversion/LICENSE create mode 100644 vendor/github.com/rsc/goversion/version/asm.go create mode 100644 vendor/github.com/rsc/goversion/version/exe.go create mode 100644 vendor/github.com/rsc/goversion/version/read.go create mode 100644 vendor/github.com/rwcarlsen/goexif/LICENSE create mode 100644 vendor/github.com/rwcarlsen/goexif/exif/README.md create mode 100644 vendor/github.com/rwcarlsen/goexif/exif/exif.go create mode 100644 vendor/github.com/rwcarlsen/goexif/exif/fields.go create mode 100644 vendor/github.com/rwcarlsen/goexif/exif/sample1.jpg create mode 100644 vendor/github.com/rwcarlsen/goexif/tiff/sample1.tif create mode 100644 vendor/github.com/rwcarlsen/goexif/tiff/tag.go create mode 100644 vendor/github.com/rwcarlsen/goexif/tiff/tiff.go create mode 100644 vendor/github.com/sirupsen/logrus/.gitignore create mode 100644 vendor/github.com/sirupsen/logrus/.golangci.yml create mode 100644 vendor/github.com/sirupsen/logrus/.travis.yml create mode 100644 vendor/github.com/sirupsen/logrus/CHANGELOG.md create mode 100644 vendor/github.com/sirupsen/logrus/LICENSE create mode 100644 vendor/github.com/sirupsen/logrus/README.md create mode 100644 vendor/github.com/sirupsen/logrus/alt_exit.go create mode 100644 vendor/github.com/sirupsen/logrus/appveyor.yml create mode 100644 vendor/github.com/sirupsen/logrus/doc.go create mode 100644 vendor/github.com/sirupsen/logrus/entry.go create mode 100644 vendor/github.com/sirupsen/logrus/exported.go create mode 100644 vendor/github.com/sirupsen/logrus/formatter.go create mode 100644 vendor/github.com/sirupsen/logrus/hooks.go create mode 100644 vendor/github.com/sirupsen/logrus/json_formatter.go create mode 100644 vendor/github.com/sirupsen/logrus/logger.go create mode 100644 vendor/github.com/sirupsen/logrus/logrus.go create mode 100644 vendor/github.com/sirupsen/logrus/terminal_check_appengine.go create mode 100644 vendor/github.com/sirupsen/logrus/terminal_check_bsd.go create mode 100644 vendor/github.com/sirupsen/logrus/terminal_check_js.go create mode 100644 vendor/github.com/sirupsen/logrus/terminal_check_no_terminal.go create mode 100644 vendor/github.com/sirupsen/logrus/terminal_check_notappengine.go create mode 100644 vendor/github.com/sirupsen/logrus/terminal_check_solaris.go create mode 100644 vendor/github.com/sirupsen/logrus/terminal_check_unix.go create mode 100644 vendor/github.com/sirupsen/logrus/terminal_check_windows.go create mode 100644 vendor/github.com/sirupsen/logrus/text_formatter.go create mode 100644 vendor/github.com/sirupsen/logrus/writer.go create mode 100644 vendor/github.com/spf13/afero/.gitignore create mode 100644 vendor/github.com/spf13/afero/LICENSE.txt create mode 100644 vendor/github.com/spf13/afero/README.md create mode 100644 vendor/github.com/spf13/afero/afero.go create mode 100644 vendor/github.com/spf13/afero/appveyor.yml create mode 100644 vendor/github.com/spf13/afero/basepath.go create mode 100644 vendor/github.com/spf13/afero/cacheOnReadFs.go create mode 100644 vendor/github.com/spf13/afero/const_bsds.go create mode 100644 vendor/github.com/spf13/afero/const_win_unix.go create mode 100644 vendor/github.com/spf13/afero/copyOnWriteFs.go create mode 100644 vendor/github.com/spf13/afero/httpFs.go create mode 100644 vendor/github.com/spf13/afero/internal/common/adapters.go create mode 100644 vendor/github.com/spf13/afero/iofs.go create mode 100644 vendor/github.com/spf13/afero/ioutil.go create mode 100644 vendor/github.com/spf13/afero/lstater.go create mode 100644 vendor/github.com/spf13/afero/match.go create mode 100644 vendor/github.com/spf13/afero/mem/dir.go create mode 100644 vendor/github.com/spf13/afero/mem/dirmap.go create mode 100644 vendor/github.com/spf13/afero/mem/file.go create mode 100644 vendor/github.com/spf13/afero/memmap.go create mode 100644 vendor/github.com/spf13/afero/os.go create mode 100644 vendor/github.com/spf13/afero/path.go create mode 100644 vendor/github.com/spf13/afero/readonlyfs.go create mode 100644 vendor/github.com/spf13/afero/regexpfs.go create mode 100644 vendor/github.com/spf13/afero/symlink.go create mode 100644 vendor/github.com/spf13/afero/unionFile.go create mode 100644 vendor/github.com/spf13/afero/util.go create mode 100644 vendor/github.com/spf13/cast/.gitignore create mode 100644 vendor/github.com/spf13/cast/LICENSE create mode 100644 vendor/github.com/spf13/cast/Makefile create mode 100644 vendor/github.com/spf13/cast/README.md create mode 100644 vendor/github.com/spf13/cast/cast.go create mode 100644 vendor/github.com/spf13/cast/caste.go create mode 100644 vendor/github.com/spf13/cast/timeformattype_string.go create mode 100644 vendor/github.com/spf13/fsync/.gitignore create mode 100644 vendor/github.com/spf13/fsync/LICENSE create mode 100644 vendor/github.com/spf13/fsync/README.md create mode 100644 vendor/github.com/spf13/fsync/fsync.go create mode 100644 vendor/github.com/steveyen/gtreap/.gitignore create mode 100644 vendor/github.com/steveyen/gtreap/LICENSE create mode 100644 vendor/github.com/steveyen/gtreap/README.md create mode 100644 vendor/github.com/steveyen/gtreap/treap.go create mode 100644 vendor/github.com/tdewolff/minify/v2/.gitattributes create mode 100644 vendor/github.com/tdewolff/minify/v2/.gitignore create mode 100644 vendor/github.com/tdewolff/minify/v2/.golangci.yml create mode 100644 vendor/github.com/tdewolff/minify/v2/Dockerfile create mode 100644 vendor/github.com/tdewolff/minify/v2/LICENSE create mode 100644 vendor/github.com/tdewolff/minify/v2/Makefile create mode 100644 vendor/github.com/tdewolff/minify/v2/README.md create mode 100644 vendor/github.com/tdewolff/minify/v2/common.go create mode 100644 vendor/github.com/tdewolff/minify/v2/css/css.go create mode 100644 vendor/github.com/tdewolff/minify/v2/css/hash.go create mode 100644 vendor/github.com/tdewolff/minify/v2/css/table.go create mode 100644 vendor/github.com/tdewolff/minify/v2/css/util.go create mode 100644 vendor/github.com/tdewolff/minify/v2/html/buffer.go create mode 100644 vendor/github.com/tdewolff/minify/v2/html/hash.go create mode 100644 vendor/github.com/tdewolff/minify/v2/html/html.go create mode 100644 vendor/github.com/tdewolff/minify/v2/html/table.go create mode 100644 vendor/github.com/tdewolff/minify/v2/js/js.go create mode 100644 vendor/github.com/tdewolff/minify/v2/js/stmtlist.go create mode 100644 vendor/github.com/tdewolff/minify/v2/js/util.go create mode 100644 vendor/github.com/tdewolff/minify/v2/js/vars.go create mode 100644 vendor/github.com/tdewolff/minify/v2/json/json.go create mode 100644 vendor/github.com/tdewolff/minify/v2/minify.go create mode 100644 vendor/github.com/tdewolff/minify/v2/svg/buffer.go create mode 100644 vendor/github.com/tdewolff/minify/v2/svg/hash.go create mode 100644 vendor/github.com/tdewolff/minify/v2/svg/pathdata.go create mode 100644 vendor/github.com/tdewolff/minify/v2/svg/svg.go create mode 100644 vendor/github.com/tdewolff/minify/v2/svg/table.go create mode 100644 vendor/github.com/tdewolff/minify/v2/xml/buffer.go create mode 100644 vendor/github.com/tdewolff/minify/v2/xml/table.go create mode 100644 vendor/github.com/tdewolff/minify/v2/xml/xml.go create mode 100644 vendor/github.com/tdewolff/parse/v2/.gitattributes create mode 100644 vendor/github.com/tdewolff/parse/v2/.gitignore create mode 100644 vendor/github.com/tdewolff/parse/v2/.golangci.yml create mode 100644 vendor/github.com/tdewolff/parse/v2/LICENSE.md create mode 100644 vendor/github.com/tdewolff/parse/v2/README.md create mode 100644 vendor/github.com/tdewolff/parse/v2/buffer/buffer.go create mode 100644 vendor/github.com/tdewolff/parse/v2/buffer/lexer.go create mode 100644 vendor/github.com/tdewolff/parse/v2/buffer/reader.go create mode 100644 vendor/github.com/tdewolff/parse/v2/buffer/streamlexer.go create mode 100644 vendor/github.com/tdewolff/parse/v2/buffer/writer.go create mode 100644 vendor/github.com/tdewolff/parse/v2/common.go create mode 100644 vendor/github.com/tdewolff/parse/v2/css/README.md create mode 100644 vendor/github.com/tdewolff/parse/v2/css/hash.go create mode 100644 vendor/github.com/tdewolff/parse/v2/css/lex.go create mode 100644 vendor/github.com/tdewolff/parse/v2/css/parse.go create mode 100644 vendor/github.com/tdewolff/parse/v2/css/util.go create mode 100644 vendor/github.com/tdewolff/parse/v2/error.go create mode 100644 vendor/github.com/tdewolff/parse/v2/html/README.md create mode 100644 vendor/github.com/tdewolff/parse/v2/html/hash.go create mode 100644 vendor/github.com/tdewolff/parse/v2/html/lex.go create mode 100644 vendor/github.com/tdewolff/parse/v2/html/parse.go create mode 100644 vendor/github.com/tdewolff/parse/v2/html/util.go create mode 100644 vendor/github.com/tdewolff/parse/v2/input.go create mode 100644 vendor/github.com/tdewolff/parse/v2/js/README.md create mode 100644 vendor/github.com/tdewolff/parse/v2/js/ast.go create mode 100644 vendor/github.com/tdewolff/parse/v2/js/lex.go create mode 100644 vendor/github.com/tdewolff/parse/v2/js/parse.go create mode 100644 vendor/github.com/tdewolff/parse/v2/js/table.go create mode 100644 vendor/github.com/tdewolff/parse/v2/js/tokentype.go create mode 100644 vendor/github.com/tdewolff/parse/v2/js/util.go create mode 100644 vendor/github.com/tdewolff/parse/v2/js/walk.go create mode 100644 vendor/github.com/tdewolff/parse/v2/json/README.md create mode 100644 vendor/github.com/tdewolff/parse/v2/json/parse.go create mode 100644 vendor/github.com/tdewolff/parse/v2/position.go create mode 100644 vendor/github.com/tdewolff/parse/v2/strconv/decimal.go create mode 100644 vendor/github.com/tdewolff/parse/v2/strconv/float.go create mode 100644 vendor/github.com/tdewolff/parse/v2/strconv/int.go create mode 100644 vendor/github.com/tdewolff/parse/v2/strconv/number.go create mode 100644 vendor/github.com/tdewolff/parse/v2/util.go create mode 100644 vendor/github.com/tdewolff/parse/v2/xml/README.md create mode 100644 vendor/github.com/tdewolff/parse/v2/xml/lex.go create mode 100644 vendor/github.com/tdewolff/parse/v2/xml/util.go create mode 100644 vendor/github.com/tidwall/gjson/LICENSE create mode 100644 vendor/github.com/tidwall/gjson/README.md create mode 100644 vendor/github.com/tidwall/gjson/SYNTAX.md create mode 100644 vendor/github.com/tidwall/gjson/gjson.go create mode 100644 vendor/github.com/tidwall/gjson/logo.png create mode 100644 vendor/github.com/tidwall/match/LICENSE create mode 100644 vendor/github.com/tidwall/match/README.md create mode 100644 vendor/github.com/tidwall/match/match.go create mode 100644 vendor/github.com/tidwall/pretty/LICENSE create mode 100644 vendor/github.com/tidwall/pretty/README.md create mode 100644 vendor/github.com/tidwall/pretty/pretty.go create mode 100644 vendor/github.com/tidwall/sjson/LICENSE create mode 100644 vendor/github.com/tidwall/sjson/README.md create mode 100644 vendor/github.com/tidwall/sjson/logo.png create mode 100644 vendor/github.com/tidwall/sjson/sjson.go create mode 100644 vendor/github.com/tinylib/msgp/LICENSE create mode 100644 vendor/github.com/tinylib/msgp/msgp/advise_linux.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/advise_other.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/circular.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/defs.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/edit.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/elsize.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/errors.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/extension.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/file.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/file_port.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/integers.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/json.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/json_bytes.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/number.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/purego.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/read.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/read_bytes.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/size.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/unsafe.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/write.go create mode 100644 vendor/github.com/tinylib/msgp/msgp/write_bytes.go create mode 100644 vendor/github.com/willf/bitset/.gitignore create mode 100644 vendor/github.com/willf/bitset/.travis.yml create mode 100644 vendor/github.com/willf/bitset/LICENSE create mode 100644 vendor/github.com/willf/bitset/Makefile create mode 100644 vendor/github.com/willf/bitset/README.md create mode 100644 vendor/github.com/willf/bitset/bitset.go create mode 100644 vendor/github.com/willf/bitset/popcnt.go create mode 100644 vendor/github.com/willf/bitset/popcnt_19.go create mode 100644 vendor/github.com/willf/bitset/popcnt_amd64.go create mode 100644 vendor/github.com/willf/bitset/popcnt_amd64.s create mode 100644 vendor/github.com/willf/bitset/popcnt_generic.go create mode 100644 vendor/github.com/willf/bitset/trailing_zeros_18.go create mode 100644 vendor/github.com/willf/bitset/trailing_zeros_19.go create mode 100644 vendor/github.com/yuin/goldmark-emoji/.gitignore create mode 100644 vendor/github.com/yuin/goldmark-emoji/LICENSE create mode 100644 vendor/github.com/yuin/goldmark-emoji/README.md create mode 100644 vendor/github.com/yuin/goldmark-emoji/ast/emoji.go create mode 100644 vendor/github.com/yuin/goldmark-emoji/definition/definition.go create mode 100644 vendor/github.com/yuin/goldmark-emoji/definition/github.go create mode 100644 vendor/github.com/yuin/goldmark-emoji/emoji.go create mode 100644 vendor/github.com/yuin/goldmark/.gitignore create mode 100644 vendor/github.com/yuin/goldmark/.golangci.yml create mode 100644 vendor/github.com/yuin/goldmark/LICENSE create mode 100644 vendor/github.com/yuin/goldmark/Makefile create mode 100644 vendor/github.com/yuin/goldmark/README.md create mode 100644 vendor/github.com/yuin/goldmark/ast/ast.go create mode 100644 vendor/github.com/yuin/goldmark/ast/block.go create mode 100644 vendor/github.com/yuin/goldmark/ast/inline.go create mode 100644 vendor/github.com/yuin/goldmark/extension/ast/definition_list.go create mode 100644 vendor/github.com/yuin/goldmark/extension/ast/footnote.go create mode 100644 vendor/github.com/yuin/goldmark/extension/ast/strikethrough.go create mode 100644 vendor/github.com/yuin/goldmark/extension/ast/table.go create mode 100644 vendor/github.com/yuin/goldmark/extension/ast/tasklist.go create mode 100644 vendor/github.com/yuin/goldmark/extension/cjk.go create mode 100644 vendor/github.com/yuin/goldmark/extension/definition_list.go create mode 100644 vendor/github.com/yuin/goldmark/extension/footnote.go create mode 100644 vendor/github.com/yuin/goldmark/extension/gfm.go create mode 100644 vendor/github.com/yuin/goldmark/extension/linkify.go create mode 100644 vendor/github.com/yuin/goldmark/extension/package.go create mode 100644 vendor/github.com/yuin/goldmark/extension/strikethrough.go create mode 100644 vendor/github.com/yuin/goldmark/extension/table.go create mode 100644 vendor/github.com/yuin/goldmark/extension/tasklist.go create mode 100644 vendor/github.com/yuin/goldmark/extension/typographer.go create mode 100644 vendor/github.com/yuin/goldmark/markdown.go create mode 100644 vendor/github.com/yuin/goldmark/parser/attribute.go create mode 100644 vendor/github.com/yuin/goldmark/parser/atx_heading.go create mode 100644 vendor/github.com/yuin/goldmark/parser/auto_link.go create mode 100644 vendor/github.com/yuin/goldmark/parser/blockquote.go create mode 100644 vendor/github.com/yuin/goldmark/parser/code_block.go create mode 100644 vendor/github.com/yuin/goldmark/parser/code_span.go create mode 100644 vendor/github.com/yuin/goldmark/parser/delimiter.go create mode 100644 vendor/github.com/yuin/goldmark/parser/emphasis.go create mode 100644 vendor/github.com/yuin/goldmark/parser/fcode_block.go create mode 100644 vendor/github.com/yuin/goldmark/parser/html_block.go create mode 100644 vendor/github.com/yuin/goldmark/parser/link.go create mode 100644 vendor/github.com/yuin/goldmark/parser/link_ref.go create mode 100644 vendor/github.com/yuin/goldmark/parser/list.go create mode 100644 vendor/github.com/yuin/goldmark/parser/list_item.go create mode 100644 vendor/github.com/yuin/goldmark/parser/paragraph.go create mode 100644 vendor/github.com/yuin/goldmark/parser/parser.go create mode 100644 vendor/github.com/yuin/goldmark/parser/raw_html.go create mode 100644 vendor/github.com/yuin/goldmark/parser/setext_headings.go create mode 100644 vendor/github.com/yuin/goldmark/parser/thematic_break.go create mode 100644 vendor/github.com/yuin/goldmark/renderer/html/html.go create mode 100644 vendor/github.com/yuin/goldmark/renderer/renderer.go create mode 100644 vendor/github.com/yuin/goldmark/text/package.go create mode 100644 vendor/github.com/yuin/goldmark/text/reader.go create mode 100644 vendor/github.com/yuin/goldmark/text/segment.go create mode 100644 vendor/github.com/yuin/goldmark/util/html5entities.go create mode 100644 vendor/github.com/yuin/goldmark/util/unicode_case_folding.go create mode 100644 vendor/github.com/yuin/goldmark/util/util.go create mode 100644 vendor/github.com/yuin/goldmark/util/util_cjk.go create mode 100644 vendor/github.com/yuin/goldmark/util/util_safe.go create mode 100644 vendor/github.com/yuin/goldmark/util/util_unsafe.go create mode 100644 vendor/go.etcd.io/bbolt/.gitignore create mode 100644 vendor/go.etcd.io/bbolt/LICENSE create mode 100644 vendor/go.etcd.io/bbolt/Makefile create mode 100644 vendor/go.etcd.io/bbolt/README.md create mode 100644 vendor/go.etcd.io/bbolt/bolt_386.go create mode 100644 vendor/go.etcd.io/bbolt/bolt_amd64.go create mode 100644 vendor/go.etcd.io/bbolt/bolt_arm.go create mode 100644 vendor/go.etcd.io/bbolt/bolt_arm64.go create mode 100644 vendor/go.etcd.io/bbolt/bolt_linux.go create mode 100644 vendor/go.etcd.io/bbolt/bolt_loong64.go create mode 100644 vendor/go.etcd.io/bbolt/bolt_mips64x.go create mode 100644 vendor/go.etcd.io/bbolt/bolt_mipsx.go create mode 100644 vendor/go.etcd.io/bbolt/bolt_openbsd.go create mode 100644 vendor/go.etcd.io/bbolt/bolt_ppc.go create mode 100644 vendor/go.etcd.io/bbolt/bolt_ppc64.go create mode 100644 vendor/go.etcd.io/bbolt/bolt_ppc64le.go create mode 100644 vendor/go.etcd.io/bbolt/bolt_riscv64.go create mode 100644 vendor/go.etcd.io/bbolt/bolt_s390x.go create mode 100644 vendor/go.etcd.io/bbolt/bolt_unix.go create mode 100644 vendor/go.etcd.io/bbolt/bolt_unix_aix.go create mode 100644 vendor/go.etcd.io/bbolt/bolt_unix_solaris.go create mode 100644 vendor/go.etcd.io/bbolt/bolt_windows.go create mode 100644 vendor/go.etcd.io/bbolt/boltsync_unix.go create mode 100644 vendor/go.etcd.io/bbolt/bucket.go create mode 100644 vendor/go.etcd.io/bbolt/compact.go create mode 100644 vendor/go.etcd.io/bbolt/cursor.go create mode 100644 vendor/go.etcd.io/bbolt/db.go create mode 100644 vendor/go.etcd.io/bbolt/doc.go create mode 100644 vendor/go.etcd.io/bbolt/errors.go create mode 100644 vendor/go.etcd.io/bbolt/freelist.go create mode 100644 vendor/go.etcd.io/bbolt/freelist_hmap.go create mode 100644 vendor/go.etcd.io/bbolt/mlock_unix.go create mode 100644 vendor/go.etcd.io/bbolt/mlock_windows.go create mode 100644 vendor/go.etcd.io/bbolt/node.go create mode 100644 vendor/go.etcd.io/bbolt/page.go create mode 100644 vendor/go.etcd.io/bbolt/tx.go create mode 100644 vendor/go.etcd.io/bbolt/tx_check.go create mode 100644 vendor/go.etcd.io/bbolt/unsafe.go create mode 100644 vendor/go.mongodb.org/mongo-driver/LICENSE create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bson.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/array_codec.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/bsoncodec.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/byte_slice_codec.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/codec_cache.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/cond_addr_codec.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_decoders.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_encoders.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/doc.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/empty_interface_codec.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/map_codec.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/mode.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/pointer_codec.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/proxy.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/registry.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/slice_codec.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/string_codec.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_codec.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_tag_parser.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/time_codec.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/types.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/uint_codec.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/byte_slice_codec_options.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/doc.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/empty_interface_codec_options.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/map_codec_options.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/slice_codec_options.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/string_codec_options.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/struct_codec_options.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/time_codec_options.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/uint_codec_options.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonrw/copier.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonrw/doc.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_parser.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_reader.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_tables.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_wrappers.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_writer.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonrw/json_scanner.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonrw/mode.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonrw/reader.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonrw/value_reader.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonrw/value_writer.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsonrw/writer.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/bsontype/bsontype.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/decoder.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/doc.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/encoder.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/marshal.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/primitive/decimal.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/primitive/objectid.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/primitive/primitive.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/primitive_codecs.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/raw.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/raw_element.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/raw_value.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/registry.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/types.go create mode 100644 vendor/go.mongodb.org/mongo-driver/bson/unmarshal.go create mode 100644 vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/array.go create mode 100644 vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/bson_arraybuilder.go create mode 100644 vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/bson_documentbuilder.go create mode 100644 vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/bsoncore.go create mode 100644 vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/doc.go create mode 100644 vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/document.go create mode 100644 vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/document_sequence.go create mode 100644 vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/element.go create mode 100644 vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/tables.go create mode 100644 vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/value.go create mode 100644 vendor/go.opentelemetry.io/otel/.codespellignore create mode 100644 vendor/go.opentelemetry.io/otel/.codespellrc create mode 100644 vendor/go.opentelemetry.io/otel/.gitattributes create mode 100644 vendor/go.opentelemetry.io/otel/.gitignore create mode 100644 vendor/go.opentelemetry.io/otel/.gitmodules create mode 100644 vendor/go.opentelemetry.io/otel/.golangci.yml create mode 100644 vendor/go.opentelemetry.io/otel/.lycheeignore create mode 100644 vendor/go.opentelemetry.io/otel/.markdownlint.yaml create mode 100644 vendor/go.opentelemetry.io/otel/CHANGELOG.md create mode 100644 vendor/go.opentelemetry.io/otel/CODEOWNERS create mode 100644 vendor/go.opentelemetry.io/otel/CONTRIBUTING.md create mode 100644 vendor/go.opentelemetry.io/otel/LICENSE create mode 100644 vendor/go.opentelemetry.io/otel/Makefile create mode 100644 vendor/go.opentelemetry.io/otel/README.md create mode 100644 vendor/go.opentelemetry.io/otel/RELEASING.md create mode 100644 vendor/go.opentelemetry.io/otel/VERSIONING.md create mode 100644 vendor/go.opentelemetry.io/otel/attribute/doc.go create mode 100644 vendor/go.opentelemetry.io/otel/attribute/encoder.go create mode 100644 vendor/go.opentelemetry.io/otel/attribute/filter.go create mode 100644 vendor/go.opentelemetry.io/otel/attribute/iterator.go create mode 100644 vendor/go.opentelemetry.io/otel/attribute/key.go create mode 100644 vendor/go.opentelemetry.io/otel/attribute/kv.go create mode 100644 vendor/go.opentelemetry.io/otel/attribute/set.go create mode 100644 vendor/go.opentelemetry.io/otel/attribute/type_string.go create mode 100644 vendor/go.opentelemetry.io/otel/attribute/value.go create mode 100644 vendor/go.opentelemetry.io/otel/baggage/baggage.go create mode 100644 vendor/go.opentelemetry.io/otel/baggage/context.go create mode 100644 vendor/go.opentelemetry.io/otel/baggage/doc.go create mode 100644 vendor/go.opentelemetry.io/otel/codes/codes.go create mode 100644 vendor/go.opentelemetry.io/otel/codes/doc.go create mode 100644 vendor/go.opentelemetry.io/otel/doc.go create mode 100644 vendor/go.opentelemetry.io/otel/error_handler.go create mode 100644 vendor/go.opentelemetry.io/otel/get_main_pkgs.sh create mode 100644 vendor/go.opentelemetry.io/otel/handler.go create mode 100644 vendor/go.opentelemetry.io/otel/internal/attribute/attribute.go create mode 100644 vendor/go.opentelemetry.io/otel/internal/baggage/baggage.go create mode 100644 vendor/go.opentelemetry.io/otel/internal/baggage/context.go create mode 100644 vendor/go.opentelemetry.io/otel/internal/gen.go create mode 100644 vendor/go.opentelemetry.io/otel/internal/global/handler.go create mode 100644 vendor/go.opentelemetry.io/otel/internal/global/instruments.go create mode 100644 vendor/go.opentelemetry.io/otel/internal/global/internal_logging.go create mode 100644 vendor/go.opentelemetry.io/otel/internal/global/meter.go create mode 100644 vendor/go.opentelemetry.io/otel/internal/global/propagator.go create mode 100644 vendor/go.opentelemetry.io/otel/internal/global/state.go create mode 100644 vendor/go.opentelemetry.io/otel/internal/global/trace.go create mode 100644 vendor/go.opentelemetry.io/otel/internal/rawhelpers.go create mode 100644 vendor/go.opentelemetry.io/otel/internal_logging.go create mode 100644 vendor/go.opentelemetry.io/otel/metric.go create mode 100644 vendor/go.opentelemetry.io/otel/metric/LICENSE create mode 100644 vendor/go.opentelemetry.io/otel/metric/asyncfloat64.go create mode 100644 vendor/go.opentelemetry.io/otel/metric/asyncint64.go create mode 100644 vendor/go.opentelemetry.io/otel/metric/config.go create mode 100644 vendor/go.opentelemetry.io/otel/metric/doc.go create mode 100644 vendor/go.opentelemetry.io/otel/metric/embedded/embedded.go create mode 100644 vendor/go.opentelemetry.io/otel/metric/instrument.go create mode 100644 vendor/go.opentelemetry.io/otel/metric/meter.go create mode 100644 vendor/go.opentelemetry.io/otel/metric/syncfloat64.go create mode 100644 vendor/go.opentelemetry.io/otel/metric/syncint64.go create mode 100644 vendor/go.opentelemetry.io/otel/propagation.go create mode 100644 vendor/go.opentelemetry.io/otel/propagation/baggage.go create mode 100644 vendor/go.opentelemetry.io/otel/propagation/doc.go create mode 100644 vendor/go.opentelemetry.io/otel/propagation/propagation.go create mode 100644 vendor/go.opentelemetry.io/otel/propagation/trace_context.go create mode 100644 vendor/go.opentelemetry.io/otel/requirements.txt create mode 100644 vendor/go.opentelemetry.io/otel/semconv/internal/v2/http.go create mode 100644 vendor/go.opentelemetry.io/otel/semconv/internal/v2/net.go create mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.17.0/doc.go create mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.17.0/event.go create mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.17.0/exception.go create mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.17.0/http.go create mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.17.0/httpconv/http.go create mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.17.0/resource.go create mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.17.0/schema.go create mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.17.0/trace.go create mode 100644 vendor/go.opentelemetry.io/otel/trace.go create mode 100644 vendor/go.opentelemetry.io/otel/trace/LICENSE create mode 100644 vendor/go.opentelemetry.io/otel/trace/config.go create mode 100644 vendor/go.opentelemetry.io/otel/trace/context.go create mode 100644 vendor/go.opentelemetry.io/otel/trace/doc.go create mode 100644 vendor/go.opentelemetry.io/otel/trace/embedded/embedded.go create mode 100644 vendor/go.opentelemetry.io/otel/trace/nonrecording.go create mode 100644 vendor/go.opentelemetry.io/otel/trace/noop.go create mode 100644 vendor/go.opentelemetry.io/otel/trace/trace.go create mode 100644 vendor/go.opentelemetry.io/otel/trace/tracestate.go create mode 100644 vendor/go.opentelemetry.io/otel/verify_examples.sh create mode 100644 vendor/go.opentelemetry.io/otel/version.go create mode 100644 vendor/go.opentelemetry.io/otel/versions.yaml create mode 100644 vendor/golang.org/x/crypto/LICENSE create mode 100644 vendor/golang.org/x/crypto/PATENTS create mode 100644 vendor/golang.org/x/crypto/bcrypt/base64.go create mode 100644 vendor/golang.org/x/crypto/bcrypt/bcrypt.go create mode 100644 vendor/golang.org/x/crypto/blowfish/block.go create mode 100644 vendor/golang.org/x/crypto/blowfish/cipher.go create mode 100644 vendor/golang.org/x/crypto/blowfish/const.go create mode 100644 vendor/golang.org/x/exp/LICENSE create mode 100644 vendor/golang.org/x/exp/PATENTS create mode 100644 vendor/golang.org/x/exp/maps/maps.go create mode 100644 vendor/golang.org/x/exp/rand/exp.go create mode 100644 vendor/golang.org/x/exp/rand/normal.go create mode 100644 vendor/golang.org/x/exp/rand/rand.go create mode 100644 vendor/golang.org/x/exp/rand/rng.go create mode 100644 vendor/golang.org/x/exp/rand/zipf.go create mode 100644 vendor/golang.org/x/image/LICENSE create mode 100644 vendor/golang.org/x/image/PATENTS create mode 100644 vendor/golang.org/x/image/bmp/reader.go create mode 100644 vendor/golang.org/x/image/bmp/writer.go create mode 100644 vendor/golang.org/x/image/ccitt/reader.go create mode 100644 vendor/golang.org/x/image/ccitt/table.go create mode 100644 vendor/golang.org/x/image/ccitt/writer.go create mode 100644 vendor/golang.org/x/image/draw/draw.go create mode 100644 vendor/golang.org/x/image/draw/impl.go create mode 100644 vendor/golang.org/x/image/draw/scale.go create mode 100644 vendor/golang.org/x/image/math/f64/f64.go create mode 100644 vendor/golang.org/x/image/tiff/buffer.go create mode 100644 vendor/golang.org/x/image/tiff/compress.go create mode 100644 vendor/golang.org/x/image/tiff/consts.go create mode 100644 vendor/golang.org/x/image/tiff/fuzz.go create mode 100644 vendor/golang.org/x/image/tiff/lzw/reader.go create mode 100644 vendor/golang.org/x/image/tiff/reader.go create mode 100644 vendor/golang.org/x/image/tiff/writer.go create mode 100644 vendor/golang.org/x/mod/LICENSE create mode 100644 vendor/golang.org/x/mod/PATENTS create mode 100644 vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go create mode 100644 vendor/golang.org/x/mod/module/module.go create mode 100644 vendor/golang.org/x/mod/module/pseudo.go create mode 100644 vendor/golang.org/x/mod/semver/semver.go create mode 100644 vendor/golang.org/x/sync/LICENSE create mode 100644 vendor/golang.org/x/sync/PATENTS create mode 100644 vendor/golang.org/x/sync/errgroup/errgroup.go create mode 100644 vendor/golang.org/x/sync/errgroup/go120.go create mode 100644 vendor/golang.org/x/sync/errgroup/pre_go120.go create mode 100644 vendor/golang.org/x/sys/LICENSE create mode 100644 vendor/golang.org/x/sys/PATENTS create mode 100644 vendor/golang.org/x/sys/unix/.gitignore create mode 100644 vendor/golang.org/x/sys/unix/README.md create mode 100644 vendor/golang.org/x/sys/unix/affinity_linux.go create mode 100644 vendor/golang.org/x/sys/unix/aliases.go create mode 100644 vendor/golang.org/x/sys/unix/asm_aix_ppc64.s create mode 100644 vendor/golang.org/x/sys/unix/asm_bsd_386.s create mode 100644 vendor/golang.org/x/sys/unix/asm_bsd_amd64.s create mode 100644 vendor/golang.org/x/sys/unix/asm_bsd_arm.s create mode 100644 vendor/golang.org/x/sys/unix/asm_bsd_arm64.s create mode 100644 vendor/golang.org/x/sys/unix/asm_bsd_ppc64.s create mode 100644 vendor/golang.org/x/sys/unix/asm_bsd_riscv64.s create mode 100644 vendor/golang.org/x/sys/unix/asm_linux_386.s create mode 100644 vendor/golang.org/x/sys/unix/asm_linux_amd64.s create mode 100644 vendor/golang.org/x/sys/unix/asm_linux_arm.s create mode 100644 vendor/golang.org/x/sys/unix/asm_linux_arm64.s create mode 100644 vendor/golang.org/x/sys/unix/asm_linux_loong64.s create mode 100644 vendor/golang.org/x/sys/unix/asm_linux_mips64x.s create mode 100644 vendor/golang.org/x/sys/unix/asm_linux_mipsx.s create mode 100644 vendor/golang.org/x/sys/unix/asm_linux_ppc64x.s create mode 100644 vendor/golang.org/x/sys/unix/asm_linux_riscv64.s create mode 100644 vendor/golang.org/x/sys/unix/asm_linux_s390x.s create mode 100644 vendor/golang.org/x/sys/unix/asm_openbsd_mips64.s create mode 100644 vendor/golang.org/x/sys/unix/asm_solaris_amd64.s create mode 100644 vendor/golang.org/x/sys/unix/asm_zos_s390x.s create mode 100644 vendor/golang.org/x/sys/unix/bluetooth_linux.go create mode 100644 vendor/golang.org/x/sys/unix/bpxsvc_zos.go create mode 100644 vendor/golang.org/x/sys/unix/bpxsvc_zos.s create mode 100644 vendor/golang.org/x/sys/unix/cap_freebsd.go create mode 100644 vendor/golang.org/x/sys/unix/constants.go create mode 100644 vendor/golang.org/x/sys/unix/dev_aix_ppc.go create mode 100644 vendor/golang.org/x/sys/unix/dev_aix_ppc64.go create mode 100644 vendor/golang.org/x/sys/unix/dev_darwin.go create mode 100644 vendor/golang.org/x/sys/unix/dev_dragonfly.go create mode 100644 vendor/golang.org/x/sys/unix/dev_freebsd.go create mode 100644 vendor/golang.org/x/sys/unix/dev_linux.go create mode 100644 vendor/golang.org/x/sys/unix/dev_netbsd.go create mode 100644 vendor/golang.org/x/sys/unix/dev_openbsd.go create mode 100644 vendor/golang.org/x/sys/unix/dev_zos.go create mode 100644 vendor/golang.org/x/sys/unix/dirent.go create mode 100644 vendor/golang.org/x/sys/unix/endian_big.go create mode 100644 vendor/golang.org/x/sys/unix/endian_little.go create mode 100644 vendor/golang.org/x/sys/unix/env_unix.go create mode 100644 vendor/golang.org/x/sys/unix/fcntl.go create mode 100644 vendor/golang.org/x/sys/unix/fcntl_darwin.go create mode 100644 vendor/golang.org/x/sys/unix/fcntl_linux_32bit.go create mode 100644 vendor/golang.org/x/sys/unix/fdset.go create mode 100644 vendor/golang.org/x/sys/unix/gccgo.go create mode 100644 vendor/golang.org/x/sys/unix/gccgo_c.c create mode 100644 vendor/golang.org/x/sys/unix/gccgo_linux_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/ifreq_linux.go create mode 100644 vendor/golang.org/x/sys/unix/ioctl_linux.go create mode 100644 vendor/golang.org/x/sys/unix/ioctl_signed.go create mode 100644 vendor/golang.org/x/sys/unix/ioctl_unsigned.go create mode 100644 vendor/golang.org/x/sys/unix/ioctl_zos.go create mode 100644 vendor/golang.org/x/sys/unix/mkall.sh create mode 100644 vendor/golang.org/x/sys/unix/mkerrors.sh create mode 100644 vendor/golang.org/x/sys/unix/mmap_nomremap.go create mode 100644 vendor/golang.org/x/sys/unix/mremap.go create mode 100644 vendor/golang.org/x/sys/unix/pagesize_unix.go create mode 100644 vendor/golang.org/x/sys/unix/pledge_openbsd.go create mode 100644 vendor/golang.org/x/sys/unix/ptrace_darwin.go create mode 100644 vendor/golang.org/x/sys/unix/ptrace_ios.go create mode 100644 vendor/golang.org/x/sys/unix/race.go create mode 100644 vendor/golang.org/x/sys/unix/race0.go create mode 100644 vendor/golang.org/x/sys/unix/readdirent_getdents.go create mode 100644 vendor/golang.org/x/sys/unix/readdirent_getdirentries.go create mode 100644 vendor/golang.org/x/sys/unix/sockcmsg_dragonfly.go create mode 100644 vendor/golang.org/x/sys/unix/sockcmsg_linux.go create mode 100644 vendor/golang.org/x/sys/unix/sockcmsg_unix.go create mode 100644 vendor/golang.org/x/sys/unix/sockcmsg_unix_other.go create mode 100644 vendor/golang.org/x/sys/unix/sockcmsg_zos.go create mode 100644 vendor/golang.org/x/sys/unix/symaddr_zos_s390x.s create mode 100644 vendor/golang.org/x/sys/unix/syscall.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_aix.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_aix_ppc.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_aix_ppc64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_bsd.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_darwin.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_darwin_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_darwin_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_darwin_libSystem.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_dragonfly.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_dragonfly_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_freebsd.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_freebsd_386.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_freebsd_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_freebsd_arm.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_freebsd_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_freebsd_riscv64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_hurd.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_hurd_386.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_illumos.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_386.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_alarm.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_amd64_gc.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_arm.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_gc.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_gc_386.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_gc_arm.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_gccgo_386.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_gccgo_arm.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_loong64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_mips64x.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_mipsx.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_ppc.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_ppc64x.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_riscv64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_s390x.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_linux_sparc64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_netbsd.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_netbsd_386.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_netbsd_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_netbsd_arm.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_netbsd_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_openbsd.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_openbsd_386.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_openbsd_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_openbsd_arm.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_openbsd_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_openbsd_libc.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_openbsd_mips64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_openbsd_ppc64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_openbsd_riscv64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_solaris.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_solaris_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_unix.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_unix_gc.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_unix_gc_ppc64x.go create mode 100644 vendor/golang.org/x/sys/unix/syscall_zos_s390x.go create mode 100644 vendor/golang.org/x/sys/unix/sysvshm_linux.go create mode 100644 vendor/golang.org/x/sys/unix/sysvshm_unix.go create mode 100644 vendor/golang.org/x/sys/unix/sysvshm_unix_other.go create mode 100644 vendor/golang.org/x/sys/unix/timestruct.go create mode 100644 vendor/golang.org/x/sys/unix/unveil_openbsd.go create mode 100644 vendor/golang.org/x/sys/unix/vgetrandom_linux.go create mode 100644 vendor/golang.org/x/sys/unix/vgetrandom_unsupported.go create mode 100644 vendor/golang.org/x/sys/unix/xattr_bsd.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_aix_ppc.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_aix_ppc64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_darwin_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_darwin_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_dragonfly_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_freebsd_386.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_freebsd_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_freebsd_arm.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_freebsd_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_freebsd_riscv64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_386.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_arm.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_mips.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_netbsd_386.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_netbsd_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_netbsd_arm.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_netbsd_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_openbsd_386.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_openbsd_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_openbsd_arm.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_openbsd_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_openbsd_mips64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_openbsd_ppc64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_openbsd_riscv64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_solaris_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zerrors_zos_s390x.go create mode 100644 vendor/golang.org/x/sys/unix/zptrace_armnn_linux.go create mode 100644 vendor/golang.org/x/sys/unix/zptrace_linux_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/zptrace_mipsnn_linux.go create mode 100644 vendor/golang.org/x/sys/unix/zptrace_mipsnnle_linux.go create mode 100644 vendor/golang.org/x/sys/unix/zptrace_x86_linux.go create mode 100644 vendor/golang.org/x/sys/unix/zsymaddr_zos_s390x.s create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_aix_ppc.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_aix_ppc64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_aix_ppc64_gc.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_aix_ppc64_gccgo.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_amd64.s create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_darwin_arm64.s create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_dragonfly_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_freebsd_386.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_freebsd_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_freebsd_arm.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_freebsd_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_freebsd_riscv64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_illumos_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_386.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_arm.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_loong64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_mips.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_mips64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_mips64le.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_mipsle.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_ppc.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64le.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_riscv64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_s390x.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_linux_sparc64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_netbsd_386.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_netbsd_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_netbsd_arm.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_netbsd_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.s create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.s create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.s create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.s create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_openbsd_mips64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_openbsd_mips64.s create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_openbsd_ppc64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_openbsd_ppc64.s create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_openbsd_riscv64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_openbsd_riscv64.s create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_solaris_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zsyscall_zos_s390x.go create mode 100644 vendor/golang.org/x/sys/unix/zsysctl_openbsd_386.go create mode 100644 vendor/golang.org/x/sys/unix/zsysctl_openbsd_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysctl_openbsd_arm.go create mode 100644 vendor/golang.org/x/sys/unix/zsysctl_openbsd_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysctl_openbsd_mips64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysctl_openbsd_ppc64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysctl_openbsd_riscv64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_darwin_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_darwin_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_dragonfly_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_freebsd_386.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_freebsd_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_freebsd_arm.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_freebsd_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_freebsd_riscv64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_386.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_arm.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_loong64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_mips.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_mips64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_mips64le.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_mipsle.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_ppc.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64le.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_riscv64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_s390x.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_linux_sparc64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_netbsd_386.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_netbsd_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_netbsd_arm.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_netbsd_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_openbsd_386.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_openbsd_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_openbsd_arm.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_openbsd_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_openbsd_mips64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_openbsd_ppc64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_openbsd_riscv64.go create mode 100644 vendor/golang.org/x/sys/unix/zsysnum_zos_s390x.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_aix_ppc.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_aix_ppc64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_darwin_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_darwin_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_dragonfly_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_freebsd_386.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_freebsd_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_freebsd_arm.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_freebsd_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_freebsd_riscv64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_386.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_arm.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_mips.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_netbsd_386.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_netbsd_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_netbsd_arm.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_netbsd_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_openbsd_386.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_openbsd_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_openbsd_arm.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_openbsd_arm64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_openbsd_mips64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_openbsd_ppc64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_openbsd_riscv64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_solaris_amd64.go create mode 100644 vendor/golang.org/x/sys/unix/ztypes_zos_s390x.go create mode 100644 vendor/golang.org/x/sys/windows/aliases.go create mode 100644 vendor/golang.org/x/sys/windows/dll_windows.go create mode 100644 vendor/golang.org/x/sys/windows/env_windows.go create mode 100644 vendor/golang.org/x/sys/windows/eventlog.go create mode 100644 vendor/golang.org/x/sys/windows/exec_windows.go create mode 100644 vendor/golang.org/x/sys/windows/memory_windows.go create mode 100644 vendor/golang.org/x/sys/windows/mkerrors.bash create mode 100644 vendor/golang.org/x/sys/windows/mkknownfolderids.bash create mode 100644 vendor/golang.org/x/sys/windows/mksyscall.go create mode 100644 vendor/golang.org/x/sys/windows/race.go create mode 100644 vendor/golang.org/x/sys/windows/race0.go create mode 100644 vendor/golang.org/x/sys/windows/security_windows.go create mode 100644 vendor/golang.org/x/sys/windows/service.go create mode 100644 vendor/golang.org/x/sys/windows/setupapi_windows.go create mode 100644 vendor/golang.org/x/sys/windows/str.go create mode 100644 vendor/golang.org/x/sys/windows/syscall.go create mode 100644 vendor/golang.org/x/sys/windows/syscall_windows.go create mode 100644 vendor/golang.org/x/sys/windows/types_windows.go create mode 100644 vendor/golang.org/x/sys/windows/types_windows_386.go create mode 100644 vendor/golang.org/x/sys/windows/types_windows_amd64.go create mode 100644 vendor/golang.org/x/sys/windows/types_windows_arm.go create mode 100644 vendor/golang.org/x/sys/windows/types_windows_arm64.go create mode 100644 vendor/golang.org/x/sys/windows/zerrors_windows.go create mode 100644 vendor/golang.org/x/sys/windows/zknownfolderids_windows.go create mode 100644 vendor/golang.org/x/sys/windows/zsyscall_windows.go create mode 100644 vendor/golang.org/x/text/LICENSE create mode 100644 vendor/golang.org/x/text/PATENTS create mode 100644 vendor/golang.org/x/text/cases/cases.go create mode 100644 vendor/golang.org/x/text/cases/context.go create mode 100644 vendor/golang.org/x/text/cases/fold.go create mode 100644 vendor/golang.org/x/text/cases/icu.go create mode 100644 vendor/golang.org/x/text/cases/info.go create mode 100644 vendor/golang.org/x/text/cases/map.go create mode 100644 vendor/golang.org/x/text/cases/tables10.0.0.go create mode 100644 vendor/golang.org/x/text/cases/tables11.0.0.go create mode 100644 vendor/golang.org/x/text/cases/tables12.0.0.go create mode 100644 vendor/golang.org/x/text/cases/tables13.0.0.go create mode 100644 vendor/golang.org/x/text/cases/tables15.0.0.go create mode 100644 vendor/golang.org/x/text/cases/tables9.0.0.go create mode 100644 vendor/golang.org/x/text/cases/trieval.go create mode 100644 vendor/golang.org/x/text/collate/collate.go create mode 100644 vendor/golang.org/x/text/collate/index.go create mode 100644 vendor/golang.org/x/text/collate/option.go create mode 100644 vendor/golang.org/x/text/collate/sort.go create mode 100644 vendor/golang.org/x/text/collate/tables.go create mode 100644 vendor/golang.org/x/text/internal/colltab/collelem.go create mode 100644 vendor/golang.org/x/text/internal/colltab/colltab.go create mode 100644 vendor/golang.org/x/text/internal/colltab/contract.go create mode 100644 vendor/golang.org/x/text/internal/colltab/iter.go create mode 100644 vendor/golang.org/x/text/internal/colltab/numeric.go create mode 100644 vendor/golang.org/x/text/internal/colltab/table.go create mode 100644 vendor/golang.org/x/text/internal/colltab/trie.go create mode 100644 vendor/golang.org/x/text/internal/colltab/weighter.go create mode 100644 vendor/golang.org/x/text/internal/internal.go create mode 100644 vendor/golang.org/x/text/internal/language/common.go create mode 100644 vendor/golang.org/x/text/internal/language/compact.go create mode 100644 vendor/golang.org/x/text/internal/language/compact/compact.go create mode 100644 vendor/golang.org/x/text/internal/language/compact/language.go create mode 100644 vendor/golang.org/x/text/internal/language/compact/parents.go create mode 100644 vendor/golang.org/x/text/internal/language/compact/tables.go create mode 100644 vendor/golang.org/x/text/internal/language/compact/tags.go create mode 100644 vendor/golang.org/x/text/internal/language/compose.go create mode 100644 vendor/golang.org/x/text/internal/language/coverage.go create mode 100644 vendor/golang.org/x/text/internal/language/language.go create mode 100644 vendor/golang.org/x/text/internal/language/lookup.go create mode 100644 vendor/golang.org/x/text/internal/language/match.go create mode 100644 vendor/golang.org/x/text/internal/language/parse.go create mode 100644 vendor/golang.org/x/text/internal/language/tables.go create mode 100644 vendor/golang.org/x/text/internal/language/tags.go create mode 100644 vendor/golang.org/x/text/internal/match.go create mode 100644 vendor/golang.org/x/text/internal/tag/tag.go create mode 100644 vendor/golang.org/x/text/language/coverage.go create mode 100644 vendor/golang.org/x/text/language/doc.go create mode 100644 vendor/golang.org/x/text/language/language.go create mode 100644 vendor/golang.org/x/text/language/match.go create mode 100644 vendor/golang.org/x/text/language/parse.go create mode 100644 vendor/golang.org/x/text/language/tables.go create mode 100644 vendor/golang.org/x/text/language/tags.go create mode 100644 vendor/golang.org/x/text/runes/cond.go create mode 100644 vendor/golang.org/x/text/runes/runes.go create mode 100644 vendor/golang.org/x/text/transform/transform.go create mode 100644 vendor/golang.org/x/text/unicode/norm/composition.go create mode 100644 vendor/golang.org/x/text/unicode/norm/forminfo.go create mode 100644 vendor/golang.org/x/text/unicode/norm/input.go create mode 100644 vendor/golang.org/x/text/unicode/norm/iter.go create mode 100644 vendor/golang.org/x/text/unicode/norm/normalize.go create mode 100644 vendor/golang.org/x/text/unicode/norm/readwriter.go create mode 100644 vendor/golang.org/x/text/unicode/norm/tables10.0.0.go create mode 100644 vendor/golang.org/x/text/unicode/norm/tables11.0.0.go create mode 100644 vendor/golang.org/x/text/unicode/norm/tables12.0.0.go create mode 100644 vendor/golang.org/x/text/unicode/norm/tables13.0.0.go create mode 100644 vendor/golang.org/x/text/unicode/norm/tables15.0.0.go create mode 100644 vendor/golang.org/x/text/unicode/norm/tables9.0.0.go create mode 100644 vendor/golang.org/x/text/unicode/norm/transform.go create mode 100644 vendor/golang.org/x/text/unicode/norm/trie.go create mode 100644 vendor/golang.org/x/tools/LICENSE create mode 100644 vendor/golang.org/x/tools/PATENTS create mode 100644 vendor/golang.org/x/tools/txtar/archive.go create mode 100644 vendor/golang.org/x/tools/txtar/fs.go create mode 100644 vendor/google.golang.org/protobuf/LICENSE create mode 100644 vendor/google.golang.org/protobuf/PATENTS create mode 100644 vendor/google.golang.org/protobuf/encoding/prototext/decode.go create mode 100644 vendor/google.golang.org/protobuf/encoding/prototext/doc.go create mode 100644 vendor/google.golang.org/protobuf/encoding/prototext/encode.go create mode 100644 vendor/google.golang.org/protobuf/encoding/protowire/wire.go create mode 100644 vendor/google.golang.org/protobuf/internal/descfmt/stringer.go create mode 100644 vendor/google.golang.org/protobuf/internal/descopts/options.go create mode 100644 vendor/google.golang.org/protobuf/internal/detrand/rand.go create mode 100644 vendor/google.golang.org/protobuf/internal/editiondefaults/defaults.go create mode 100644 vendor/google.golang.org/protobuf/internal/editiondefaults/editions_defaults.binpb create mode 100644 vendor/google.golang.org/protobuf/internal/encoding/defval/default.go create mode 100644 vendor/google.golang.org/protobuf/internal/encoding/messageset/messageset.go create mode 100644 vendor/google.golang.org/protobuf/internal/encoding/tag/tag.go create mode 100644 vendor/google.golang.org/protobuf/internal/encoding/text/decode.go create mode 100644 vendor/google.golang.org/protobuf/internal/encoding/text/decode_number.go create mode 100644 vendor/google.golang.org/protobuf/internal/encoding/text/decode_string.go create mode 100644 vendor/google.golang.org/protobuf/internal/encoding/text/decode_token.go create mode 100644 vendor/google.golang.org/protobuf/internal/encoding/text/doc.go create mode 100644 vendor/google.golang.org/protobuf/internal/encoding/text/encode.go create mode 100644 vendor/google.golang.org/protobuf/internal/errors/errors.go create mode 100644 vendor/google.golang.org/protobuf/internal/errors/is_go112.go create mode 100644 vendor/google.golang.org/protobuf/internal/errors/is_go113.go create mode 100644 vendor/google.golang.org/protobuf/internal/filedesc/build.go create mode 100644 vendor/google.golang.org/protobuf/internal/filedesc/desc.go create mode 100644 vendor/google.golang.org/protobuf/internal/filedesc/desc_init.go create mode 100644 vendor/google.golang.org/protobuf/internal/filedesc/desc_lazy.go create mode 100644 vendor/google.golang.org/protobuf/internal/filedesc/desc_list.go create mode 100644 vendor/google.golang.org/protobuf/internal/filedesc/desc_list_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/filedesc/editions.go create mode 100644 vendor/google.golang.org/protobuf/internal/filedesc/placeholder.go create mode 100644 vendor/google.golang.org/protobuf/internal/filetype/build.go create mode 100644 vendor/google.golang.org/protobuf/internal/flags/flags.go create mode 100644 vendor/google.golang.org/protobuf/internal/flags/proto_legacy_disable.go create mode 100644 vendor/google.golang.org/protobuf/internal/flags/proto_legacy_enable.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/any_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/api_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/descriptor_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/doc.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/duration_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/empty_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/field_mask_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/go_features_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/goname.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/map_entry.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/source_context_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/struct_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/timestamp_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/type_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/wrappers.go create mode 100644 vendor/google.golang.org/protobuf/internal/genid/wrappers_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/api_export.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/checkinit.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/codec_extension.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/codec_field.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/codec_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/codec_map.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/codec_map_go111.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/codec_map_go112.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/codec_message.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/codec_messageset.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/codec_reflect.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/codec_tables.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/codec_unsafe.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/convert.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/convert_list.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/convert_map.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/decode.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/encode.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/enum.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/extension.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/legacy_enum.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/legacy_export.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/legacy_extension.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/legacy_file.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/legacy_message.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/merge.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/merge_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/message.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/message_reflect.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/message_reflect_field.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/message_reflect_gen.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/pointer_reflect.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/pointer_unsafe.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/validate.go create mode 100644 vendor/google.golang.org/protobuf/internal/impl/weak.go create mode 100644 vendor/google.golang.org/protobuf/internal/order/order.go create mode 100644 vendor/google.golang.org/protobuf/internal/order/range.go create mode 100644 vendor/google.golang.org/protobuf/internal/pragma/pragma.go create mode 100644 vendor/google.golang.org/protobuf/internal/set/ints.go create mode 100644 vendor/google.golang.org/protobuf/internal/strs/strings.go create mode 100644 vendor/google.golang.org/protobuf/internal/strs/strings_pure.go create mode 100644 vendor/google.golang.org/protobuf/internal/strs/strings_unsafe_go120.go create mode 100644 vendor/google.golang.org/protobuf/internal/strs/strings_unsafe_go121.go create mode 100644 vendor/google.golang.org/protobuf/internal/version/version.go create mode 100644 vendor/google.golang.org/protobuf/proto/checkinit.go create mode 100644 vendor/google.golang.org/protobuf/proto/decode.go create mode 100644 vendor/google.golang.org/protobuf/proto/decode_gen.go create mode 100644 vendor/google.golang.org/protobuf/proto/doc.go create mode 100644 vendor/google.golang.org/protobuf/proto/encode.go create mode 100644 vendor/google.golang.org/protobuf/proto/encode_gen.go create mode 100644 vendor/google.golang.org/protobuf/proto/equal.go create mode 100644 vendor/google.golang.org/protobuf/proto/extension.go create mode 100644 vendor/google.golang.org/protobuf/proto/merge.go create mode 100644 vendor/google.golang.org/protobuf/proto/messageset.go create mode 100644 vendor/google.golang.org/protobuf/proto/proto.go create mode 100644 vendor/google.golang.org/protobuf/proto/proto_methods.go create mode 100644 vendor/google.golang.org/protobuf/proto/proto_reflect.go create mode 100644 vendor/google.golang.org/protobuf/proto/reset.go create mode 100644 vendor/google.golang.org/protobuf/proto/size.go create mode 100644 vendor/google.golang.org/protobuf/proto/size_gen.go create mode 100644 vendor/google.golang.org/protobuf/proto/wrappers.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protodesc/desc.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protodesc/desc_init.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protodesc/desc_resolve.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protodesc/desc_validate.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protodesc/editions.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protodesc/proto.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protoreflect/methods.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protoreflect/proto.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protoreflect/source.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protoreflect/source_gen.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protoreflect/type.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protoreflect/value.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protoreflect/value_equal.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protoreflect/value_pure.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protoreflect/value_union.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protoreflect/value_unsafe_go120.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protoreflect/value_unsafe_go121.go create mode 100644 vendor/google.golang.org/protobuf/reflect/protoregistry/registry.go create mode 100644 vendor/google.golang.org/protobuf/runtime/protoiface/legacy.go create mode 100644 vendor/google.golang.org/protobuf/runtime/protoiface/methods.go create mode 100644 vendor/google.golang.org/protobuf/runtime/protoimpl/impl.go create mode 100644 vendor/google.golang.org/protobuf/runtime/protoimpl/version.go create mode 100644 vendor/google.golang.org/protobuf/types/descriptorpb/descriptor.pb.go create mode 100644 vendor/google.golang.org/protobuf/types/gofeaturespb/go_features.pb.go create mode 100644 vendor/google.golang.org/protobuf/types/gofeaturespb/go_features.proto create mode 100644 vendor/gopkg.in/yaml.v2/.travis.yml create mode 100644 vendor/gopkg.in/yaml.v2/LICENSE create mode 100644 vendor/gopkg.in/yaml.v2/LICENSE.libyaml create mode 100644 vendor/gopkg.in/yaml.v2/NOTICE create mode 100644 vendor/gopkg.in/yaml.v2/README.md create mode 100644 vendor/gopkg.in/yaml.v2/apic.go create mode 100644 vendor/gopkg.in/yaml.v2/decode.go create mode 100644 vendor/gopkg.in/yaml.v2/emitterc.go create mode 100644 vendor/gopkg.in/yaml.v2/encode.go create mode 100644 vendor/gopkg.in/yaml.v2/parserc.go create mode 100644 vendor/gopkg.in/yaml.v2/readerc.go create mode 100644 vendor/gopkg.in/yaml.v2/resolve.go create mode 100644 vendor/gopkg.in/yaml.v2/scannerc.go create mode 100644 vendor/gopkg.in/yaml.v2/sorter.go create mode 100644 vendor/gopkg.in/yaml.v2/writerc.go create mode 100644 vendor/gopkg.in/yaml.v2/yaml.go create mode 100644 vendor/gopkg.in/yaml.v2/yamlh.go create mode 100644 vendor/gopkg.in/yaml.v2/yamlprivateh.go create mode 100644 vendor/gopkg.in/yaml.v3/LICENSE create mode 100644 vendor/gopkg.in/yaml.v3/NOTICE create mode 100644 vendor/gopkg.in/yaml.v3/README.md create mode 100644 vendor/gopkg.in/yaml.v3/apic.go create mode 100644 vendor/gopkg.in/yaml.v3/decode.go create mode 100644 vendor/gopkg.in/yaml.v3/emitterc.go create mode 100644 vendor/gopkg.in/yaml.v3/encode.go create mode 100644 vendor/gopkg.in/yaml.v3/parserc.go create mode 100644 vendor/gopkg.in/yaml.v3/readerc.go create mode 100644 vendor/gopkg.in/yaml.v3/resolve.go create mode 100644 vendor/gopkg.in/yaml.v3/scannerc.go create mode 100644 vendor/gopkg.in/yaml.v3/sorter.go create mode 100644 vendor/gopkg.in/yaml.v3/writerc.go create mode 100644 vendor/gopkg.in/yaml.v3/yaml.go create mode 100644 vendor/gopkg.in/yaml.v3/yamlh.go create mode 100644 vendor/gopkg.in/yaml.v3/yamlprivateh.go create mode 100644 vendor/modules.txt diff --git a/go.mod b/go.mod index 099c862..91817cb 100644 --- a/go.mod +++ b/go.mod @@ -28,6 +28,7 @@ require ( github.com/gobuffalo/flect v1.0.2 github.com/gobwas/glob v0.2.3 github.com/gofrs/uuid v4.4.0+incompatible + github.com/gohugoio/go-i18n/v2 v2.1.3-0.20230805085216-e63c13218d0e github.com/gohugoio/locales v0.14.0 github.com/gohugoio/localescompressed v1.0.1 github.com/gorilla/schema v1.2.1 @@ -98,7 +99,6 @@ require ( github.com/go-openapi/spec v0.21.0 // indirect github.com/go-openapi/swag v0.23.0 // indirect github.com/go-openapi/validate v0.24.0 // indirect - github.com/gohugoio/go-i18n/v2 v2.1.3-0.20230805085216-e63c13218d0e // indirect github.com/golang/protobuf v1.5.4 // indirect github.com/golang/snappy v0.0.4 // indirect github.com/google/go-cmp v0.6.0 // indirect diff --git a/go.sum b/go.sum index 77562ee..8b5e248 100644 --- a/go.sum +++ b/go.sum @@ -56,6 +56,7 @@ github.com/Azure/go-autorest/tracing v0.5.0 h1:TRn4WjSnkcSy5AEG3pnbtFSwNtwzjr4VY github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk= github.com/BurntSushi/locker v0.0.0-20171006230638-a6e239ea1c69 h1:+tu3HOoMXB7RXEINRVIpxJCT+KdYiI7LAEAUrOw3dIU= github.com/BurntSushi/locker v0.0.0-20171006230638-a6e239ea1c69/go.mod h1:L1AbZdiDllfyYH5l5OkAaZtk7VkWe89bPJFmnDBNHxg= +github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= diff --git a/internal/domain/content/entity/query.go b/internal/domain/content/entity/query.go index 129a184..1f58f0a 100644 --- a/internal/domain/content/entity/query.go +++ b/internal/domain/content/entity/query.go @@ -7,26 +7,45 @@ import ( ) func (c *Content) search(contentType string, query string) ([][]byte, error) { - // execute search for query provided, if no index for type send 404 - indices, err := c.Search.TypeQuery(contentType, query, 10, 0) - if errors.Is(err, content.ErrNoIndex) { - c.Log.Errorf("Index for type %s not found", contentType) + const pageSize = 100 // æ¯é¡µæœ€å¤§æŸ¥è¯¢æ•°é‡ + offset := 0 - return nil, err - } - if err != nil { - c.Log.Errorf("Error searching for type %s: %v", contentType, err) - return nil, err - } + var allResults [][]byte - // respond with json formatted results - bb, err := c.GetContents(indices) - if err != nil { - c.Log.Errorf("Error getting content: %v", err) - return nil, err + for { + // execute search for query provided, if no index for type send 404 + indices, err := c.Search.TypeQuery(contentType, query, pageSize, offset) + if errors.Is(err, content.ErrNoIndex) { + c.Log.Errorf("Index for type %s not found", contentType) + + return nil, err + } + if err != nil { + c.Log.Errorf("Error searching for type %s: %v", contentType, err) + return nil, err + } + + if len(indices) == 0 { + break + } + + // respond with json formatted results + bb, err := c.GetContents(indices) + if err != nil { + c.Log.Errorf("Error getting content: %v", err) + return nil, err + } + + allResults = append(allResults, bb...) + + if len(indices) < pageSize { + break + } + + offset += pageSize } - return bb, nil + return allResults, nil } func (c *Content) termSearch(contentType string, keyValue map[string]string) ([][]byte, error) { diff --git a/internal/domain/content/valueobject/site.go b/internal/domain/content/valueobject/site.go index ce506b0..1fa7f73 100644 --- a/internal/domain/content/valueobject/site.go +++ b/internal/domain/content/valueobject/site.go @@ -7,6 +7,7 @@ import ( "github.com/gohugonet/hugoverse/pkg/editor" "github.com/gohugonet/hugoverse/pkg/language" "net/http" + "strings" "text/template" ) @@ -21,6 +22,7 @@ type Site struct { Owner string `json:"owner"` WorkingDir string `json:"working_dir"` Languages []string `json:"languages"` + Menus []string `json:"menus"` } // MarshalEditor writes a buffer of html to edit a Song within the CMS @@ -83,6 +85,13 @@ func (s *Site) MarshalEditor() ([]byte, error) { "placeholder": "Enter the Languages here", }), }, + editor.Field{ + View: editor.Input("Menus", s, map[string]string{ + "label": "Menus", + "type": "text", + "placeholder": "Enter the Menus here", + }), + }, ) if err != nil { @@ -196,6 +205,7 @@ owner = "{{.Owner}}" [[module.imports]] path = "{{.Theme}}" + {{- if .IsMultiLanguages}} [languages] {{- range $index, $lang := .Languages }} @@ -206,6 +216,18 @@ owner = "{{.Owner}}" {{- end }} {{- end }} + +{{- if .HasMenus }} +[menu] +{{- range $index, $menu := .Menus }} + [[menu.after]] + name = "{{ index (split $menu ",") 0 }}" + url = "{{ index (split $menu ",") 1 }}" + weight = {{ add $index 1 }} +{{- end }} +{{- end }} + + [params] {{.Params}} @@ -214,6 +236,7 @@ owner = "{{.Owner}}" "add": func(a, b int) int { return a + b }, + "split": strings.Split, "getLanguageName": language.GetLanguageName, } @@ -234,11 +257,16 @@ func (s *Site) IsMultiLanguages() bool { return len(s.Languages) > 1 } +func (s *Site) HasMenus() bool { + return len(s.Menus) > 0 +} + func (s *Site) UnmarshalJSON(data []byte) error { // Create a temporary struct with the same fields type Alias Site temp := &struct { Languages interface{} `json:"languages"` + Menus interface{} `json:"menus"` *Alias }{ Alias: (*Alias)(s), @@ -266,5 +294,22 @@ func (s *Site) UnmarshalJSON(data []byte) error { } } + // Handle the "menus" field + switch v := temp.Menus.(type) { + case nil: + // If it's nil or an empty string, set Menus as an empty array + s.Menus = []string{} + case string: + // If it's a single string, wrap it in an array + s.Menus = []string{v} + case []interface{}: + // If it's an array, convert it into a slice of strings + for _, item := range v { + if str, ok := item.(string); ok { + s.Menus = append(s.Menus, str) + } + } + } + return nil } diff --git a/internal/interfaces/cli/vercurr.go b/internal/interfaces/cli/vercurr.go index ed6c5da..7bf42ce 100644 --- a/internal/interfaces/cli/vercurr.go +++ b/internal/interfaces/cli/vercurr.go @@ -1,8 +1,8 @@ package cli var CurrentVersion = Version{ - Major: 0, - Minor: 1, - PatchLevel: 0, - Suffix: "", + Major: 0, + Minor: 1, + PatchLevel: 1, + Suffix: "", } diff --git a/manifest.json b/manifest.json index 3a1ba32..e6904ef 100644 --- a/manifest.json +++ b/manifest.json @@ -1,5 +1,5 @@ { - "version": "0.1.0", + "version": "0.1.1", "name": "Hugoverse", "description": "Headless CMS for Hugo", "author": "sunwei", diff --git a/vendor/github.com/Azure/go-autorest/autorest/LICENSE b/vendor/github.com/Azure/go-autorest/autorest/LICENSE new file mode 100644 index 0000000..b9d6a27 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/LICENSE @@ -0,0 +1,191 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + Copyright 2015 Microsoft Corporation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/Azure/go-autorest/autorest/adal/LICENSE b/vendor/github.com/Azure/go-autorest/autorest/adal/LICENSE new file mode 100644 index 0000000..b9d6a27 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/adal/LICENSE @@ -0,0 +1,191 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + Copyright 2015 Microsoft Corporation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/Azure/go-autorest/autorest/adal/README.md b/vendor/github.com/Azure/go-autorest/autorest/adal/README.md new file mode 100644 index 0000000..fec416a --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/adal/README.md @@ -0,0 +1,292 @@ +# Azure Active Directory authentication for Go + +This is a standalone package for authenticating with Azure Active +Directory from other Go libraries and applications, in particular the [Azure SDK +for Go](https://github.com/Azure/azure-sdk-for-go). + +Note: Despite the package's name it is not related to other "ADAL" libraries +maintained in the [github.com/AzureAD](https://github.com/AzureAD) org. Issues +should be opened in [this repo's](https://github.com/Azure/go-autorest/issues) +or [the SDK's](https://github.com/Azure/azure-sdk-for-go/issues) issue +trackers. + +## Install + +```bash +go get -u github.com/Azure/go-autorest/autorest/adal +``` + +## Usage + +An Active Directory application is required in order to use this library. An application can be registered in the [Azure Portal](https://portal.azure.com/) by following these [guidelines](https://docs.microsoft.com/en-us/azure/active-directory/develop/active-directory-integrating-applications) or using the [Azure CLI](https://github.com/Azure/azure-cli). + +### Register an Azure AD Application with secret + + +1. Register a new application with a `secret` credential + + ``` + az ad app create \ + --display-name example-app \ + --homepage https://example-app/home \ + --identifier-uris https://example-app/app \ + --password secret + ``` + +2. Create a service principal using the `Application ID` from previous step + + ``` + az ad sp create --id "Application ID" + ``` + + * Replace `Application ID` with `appId` from step 1. + +### Register an Azure AD Application with certificate + +1. Create a private key + + ``` + openssl genrsa -out "example-app.key" 2048 + ``` + +2. Create the certificate + + ``` + openssl req -new -key "example-app.key" -subj "/CN=example-app" -out "example-app.csr" + openssl x509 -req -in "example-app.csr" -signkey "example-app.key" -out "example-app.crt" -days 10000 + ``` + +3. Create the PKCS12 version of the certificate containing also the private key + + ``` + openssl pkcs12 -export -out "example-app.pfx" -inkey "example-app.key" -in "example-app.crt" -passout pass: + + ``` + +4. Register a new application with the certificate content form `example-app.crt` + + ``` + certificateContents="$(tail -n+2 "example-app.crt" | head -n-1)" + + az ad app create \ + --display-name example-app \ + --homepage https://example-app/home \ + --identifier-uris https://example-app/app \ + --key-usage Verify --end-date 2018-01-01 \ + --key-value "${certificateContents}" + ``` + +5. Create a service principal using the `Application ID` from previous step + + ``` + az ad sp create --id "APPLICATION_ID" + ``` + + * Replace `APPLICATION_ID` with `appId` from step 4. + + +### Grant the necessary permissions + +Azure relies on a Role-Based Access Control (RBAC) model to manage the access to resources at a fine-grained +level. There is a set of [pre-defined roles](https://docs.microsoft.com/en-us/azure/active-directory/role-based-access-built-in-roles) +which can be assigned to a service principal of an Azure AD application depending of your needs. + +``` +az role assignment create --assigner "SERVICE_PRINCIPAL_ID" --role "ROLE_NAME" +``` + +* Replace the `SERVICE_PRINCIPAL_ID` with the `appId` from previous step. +* Replace the `ROLE_NAME` with a role name of your choice. + +It is also possible to define custom role definitions. + +``` +az role definition create --role-definition role-definition.json +``` + +* Check [custom roles](https://docs.microsoft.com/en-us/azure/active-directory/role-based-access-control-custom-roles) for more details regarding the content of `role-definition.json` file. + + +### Acquire Access Token + +The common configuration used by all flows: + +```Go +const activeDirectoryEndpoint = "https://login.microsoftonline.com/" +tenantID := "TENANT_ID" +oauthConfig, err := adal.NewOAuthConfig(activeDirectoryEndpoint, tenantID) + +applicationID := "APPLICATION_ID" + +callback := func(token adal.Token) error { + // This is called after the token is acquired +} + +// The resource for which the token is acquired +resource := "https://management.core.windows.net/" +``` + +* Replace the `TENANT_ID` with your tenant ID. +* Replace the `APPLICATION_ID` with the value from previous section. + +#### Client Credentials + +```Go +applicationSecret := "APPLICATION_SECRET" + +spt, err := adal.NewServicePrincipalToken( + *oauthConfig, + appliationID, + applicationSecret, + resource, + callbacks...) +if err != nil { + return nil, err +} + +// Acquire a new access token +err = spt.Refresh() +if (err == nil) { + token := spt.Token +} +``` + +* Replace the `APPLICATION_SECRET` with the `password` value from previous section. + +#### Client Certificate + +```Go +certificatePath := "./example-app.pfx" + +certData, err := ioutil.ReadFile(certificatePath) +if err != nil { + return nil, fmt.Errorf("failed to read the certificate file (%s): %v", certificatePath, err) +} + +// Get the certificate and private key from pfx file +certificate, rsaPrivateKey, err := decodePkcs12(certData, "") +if err != nil { + return nil, fmt.Errorf("failed to decode pkcs12 certificate while creating spt: %v", err) +} + +spt, err := adal.NewServicePrincipalTokenFromCertificate( + *oauthConfig, + applicationID, + certificate, + rsaPrivateKey, + resource, + callbacks...) + +// Acquire a new access token +err = spt.Refresh() +if (err == nil) { + token := spt.Token +} +``` + +* Update the certificate path to point to the example-app.pfx file which was created in previous section. + + +#### Device Code + +```Go +oauthClient := &http.Client{} + +// Acquire the device code +deviceCode, err := adal.InitiateDeviceAuth( + oauthClient, + *oauthConfig, + applicationID, + resource) +if err != nil { + return nil, fmt.Errorf("Failed to start device auth flow: %s", err) +} + +// Display the authentication message +fmt.Println(*deviceCode.Message) + +// Wait here until the user is authenticated +token, err := adal.WaitForUserCompletion(oauthClient, deviceCode) +if err != nil { + return nil, fmt.Errorf("Failed to finish device auth flow: %s", err) +} + +spt, err := adal.NewServicePrincipalTokenFromManualToken( + *oauthConfig, + applicationID, + resource, + *token, + callbacks...) + +if (err == nil) { + token := spt.Token +} +``` + +#### Username password authenticate + +```Go +spt, err := adal.NewServicePrincipalTokenFromUsernamePassword( + *oauthConfig, + applicationID, + username, + password, + resource, + callbacks...) + +if (err == nil) { + token := spt.Token +} +``` + +#### Authorization code authenticate + +``` Go +spt, err := adal.NewServicePrincipalTokenFromAuthorizationCode( + *oauthConfig, + applicationID, + clientSecret, + authorizationCode, + redirectURI, + resource, + callbacks...) + +err = spt.Refresh() +if (err == nil) { + token := spt.Token +} +``` + +### Command Line Tool + +A command line tool is available in `cmd/adal.go` that can acquire a token for a given resource. It supports all flows mentioned above. + +``` +adal -h + +Usage of ./adal: + -applicationId string + application id + -certificatePath string + path to pk12/PFC application certificate + -mode string + authentication mode (device, secret, cert, refresh) (default "device") + -resource string + resource for which the token is requested + -secret string + application secret + -tenantId string + tenant id + -tokenCachePath string + location of oath token cache (default "/home/cgc/.adal/accessToken.json") +``` + +Example acquire a token for `https://management.core.windows.net/` using device code flow: + +``` +adal -mode device \ + -applicationId "APPLICATION_ID" \ + -tenantId "TENANT_ID" \ + -resource https://management.core.windows.net/ + +``` diff --git a/vendor/github.com/Azure/go-autorest/autorest/adal/config.go b/vendor/github.com/Azure/go-autorest/autorest/adal/config.go new file mode 100644 index 0000000..fa59647 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/adal/config.go @@ -0,0 +1,151 @@ +package adal + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "errors" + "fmt" + "net/url" +) + +const ( + activeDirectoryEndpointTemplate = "%s/oauth2/%s%s" +) + +// OAuthConfig represents the endpoints needed +// in OAuth operations +type OAuthConfig struct { + AuthorityEndpoint url.URL `json:"authorityEndpoint"` + AuthorizeEndpoint url.URL `json:"authorizeEndpoint"` + TokenEndpoint url.URL `json:"tokenEndpoint"` + DeviceCodeEndpoint url.URL `json:"deviceCodeEndpoint"` +} + +// IsZero returns true if the OAuthConfig object is zero-initialized. +func (oac OAuthConfig) IsZero() bool { + return oac == OAuthConfig{} +} + +func validateStringParam(param, name string) error { + if len(param) == 0 { + return fmt.Errorf("parameter '" + name + "' cannot be empty") + } + return nil +} + +// NewOAuthConfig returns an OAuthConfig with tenant specific urls +func NewOAuthConfig(activeDirectoryEndpoint, tenantID string) (*OAuthConfig, error) { + apiVer := "1.0" + return NewOAuthConfigWithAPIVersion(activeDirectoryEndpoint, tenantID, &apiVer) +} + +// NewOAuthConfigWithAPIVersion returns an OAuthConfig with tenant specific urls. +// If apiVersion is not nil the "api-version" query parameter will be appended to the endpoint URLs with the specified value. +func NewOAuthConfigWithAPIVersion(activeDirectoryEndpoint, tenantID string, apiVersion *string) (*OAuthConfig, error) { + if err := validateStringParam(activeDirectoryEndpoint, "activeDirectoryEndpoint"); err != nil { + return nil, err + } + api := "" + // it's legal for tenantID to be empty so don't validate it + if apiVersion != nil { + if err := validateStringParam(*apiVersion, "apiVersion"); err != nil { + return nil, err + } + api = fmt.Sprintf("?api-version=%s", *apiVersion) + } + u, err := url.Parse(activeDirectoryEndpoint) + if err != nil { + return nil, err + } + authorityURL, err := u.Parse(tenantID) + if err != nil { + return nil, err + } + authorizeURL, err := u.Parse(fmt.Sprintf(activeDirectoryEndpointTemplate, tenantID, "authorize", api)) + if err != nil { + return nil, err + } + tokenURL, err := u.Parse(fmt.Sprintf(activeDirectoryEndpointTemplate, tenantID, "token", api)) + if err != nil { + return nil, err + } + deviceCodeURL, err := u.Parse(fmt.Sprintf(activeDirectoryEndpointTemplate, tenantID, "devicecode", api)) + if err != nil { + return nil, err + } + + return &OAuthConfig{ + AuthorityEndpoint: *authorityURL, + AuthorizeEndpoint: *authorizeURL, + TokenEndpoint: *tokenURL, + DeviceCodeEndpoint: *deviceCodeURL, + }, nil +} + +// MultiTenantOAuthConfig provides endpoints for primary and aulixiary tenant IDs. +type MultiTenantOAuthConfig interface { + PrimaryTenant() *OAuthConfig + AuxiliaryTenants() []*OAuthConfig +} + +// OAuthOptions contains optional OAuthConfig creation arguments. +type OAuthOptions struct { + APIVersion string +} + +func (c OAuthOptions) apiVersion() string { + if c.APIVersion != "" { + return fmt.Sprintf("?api-version=%s", c.APIVersion) + } + return "1.0" +} + +// NewMultiTenantOAuthConfig creates an object that support multitenant OAuth configuration. +// See https://docs.microsoft.com/en-us/azure/azure-resource-manager/authenticate-multi-tenant for more information. +func NewMultiTenantOAuthConfig(activeDirectoryEndpoint, primaryTenantID string, auxiliaryTenantIDs []string, options OAuthOptions) (MultiTenantOAuthConfig, error) { + if len(auxiliaryTenantIDs) == 0 || len(auxiliaryTenantIDs) > 3 { + return nil, errors.New("must specify one to three auxiliary tenants") + } + mtCfg := multiTenantOAuthConfig{ + cfgs: make([]*OAuthConfig, len(auxiliaryTenantIDs)+1), + } + apiVer := options.apiVersion() + pri, err := NewOAuthConfigWithAPIVersion(activeDirectoryEndpoint, primaryTenantID, &apiVer) + if err != nil { + return nil, fmt.Errorf("failed to create OAuthConfig for primary tenant: %v", err) + } + mtCfg.cfgs[0] = pri + for i := range auxiliaryTenantIDs { + aux, err := NewOAuthConfig(activeDirectoryEndpoint, auxiliaryTenantIDs[i]) + if err != nil { + return nil, fmt.Errorf("failed to create OAuthConfig for tenant '%s': %v", auxiliaryTenantIDs[i], err) + } + mtCfg.cfgs[i+1] = aux + } + return mtCfg, nil +} + +type multiTenantOAuthConfig struct { + // first config in the slice is the primary tenant + cfgs []*OAuthConfig +} + +func (m multiTenantOAuthConfig) PrimaryTenant() *OAuthConfig { + return m.cfgs[0] +} + +func (m multiTenantOAuthConfig) AuxiliaryTenants() []*OAuthConfig { + return m.cfgs[1:] +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/adal/devicetoken.go b/vendor/github.com/Azure/go-autorest/autorest/adal/devicetoken.go new file mode 100644 index 0000000..914f8af --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/adal/devicetoken.go @@ -0,0 +1,269 @@ +package adal + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/* + This file is largely based on rjw57/oauth2device's code, with the follow differences: + * scope -> resource, and only allow a single one + * receive "Message" in the DeviceCode struct and show it to users as the prompt + * azure-xplat-cli has the following behavior that this emulates: + - does not send client_secret during the token exchange + - sends resource again in the token exchange request +*/ + +import ( + "context" + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + "net/url" + "strings" + "time" +) + +const ( + logPrefix = "autorest/adal/devicetoken:" +) + +var ( + // ErrDeviceGeneric represents an unknown error from the token endpoint when using device flow + ErrDeviceGeneric = fmt.Errorf("%s Error while retrieving OAuth token: Unknown Error", logPrefix) + + // ErrDeviceAccessDenied represents an access denied error from the token endpoint when using device flow + ErrDeviceAccessDenied = fmt.Errorf("%s Error while retrieving OAuth token: Access Denied", logPrefix) + + // ErrDeviceAuthorizationPending represents the server waiting on the user to complete the device flow + ErrDeviceAuthorizationPending = fmt.Errorf("%s Error while retrieving OAuth token: Authorization Pending", logPrefix) + + // ErrDeviceCodeExpired represents the server timing out and expiring the code during device flow + ErrDeviceCodeExpired = fmt.Errorf("%s Error while retrieving OAuth token: Code Expired", logPrefix) + + // ErrDeviceSlowDown represents the service telling us we're polling too often during device flow + ErrDeviceSlowDown = fmt.Errorf("%s Error while retrieving OAuth token: Slow Down", logPrefix) + + // ErrDeviceCodeEmpty represents an empty device code from the device endpoint while using device flow + ErrDeviceCodeEmpty = fmt.Errorf("%s Error while retrieving device code: Device Code Empty", logPrefix) + + // ErrOAuthTokenEmpty represents an empty OAuth token from the token endpoint when using device flow + ErrOAuthTokenEmpty = fmt.Errorf("%s Error while retrieving OAuth token: Token Empty", logPrefix) + + errCodeSendingFails = "Error occurred while sending request for Device Authorization Code" + errCodeHandlingFails = "Error occurred while handling response from the Device Endpoint" + errTokenSendingFails = "Error occurred while sending request with device code for a token" + errTokenHandlingFails = "Error occurred while handling response from the Token Endpoint (during device flow)" + errStatusNotOK = "Error HTTP status != 200" +) + +// DeviceCode is the object returned by the device auth endpoint +// It contains information to instruct the user to complete the auth flow +type DeviceCode struct { + DeviceCode *string `json:"device_code,omitempty"` + UserCode *string `json:"user_code,omitempty"` + VerificationURL *string `json:"verification_url,omitempty"` + ExpiresIn *int64 `json:"expires_in,string,omitempty"` + Interval *int64 `json:"interval,string,omitempty"` + + Message *string `json:"message"` // Azure specific + Resource string // store the following, stored when initiating, used when exchanging + OAuthConfig OAuthConfig + ClientID string +} + +// TokenError is the object returned by the token exchange endpoint +// when something is amiss +type TokenError struct { + Error *string `json:"error,omitempty"` + ErrorCodes []int `json:"error_codes,omitempty"` + ErrorDescription *string `json:"error_description,omitempty"` + Timestamp *string `json:"timestamp,omitempty"` + TraceID *string `json:"trace_id,omitempty"` +} + +// DeviceToken is the object return by the token exchange endpoint +// It can either look like a Token or an ErrorToken, so put both here +// and check for presence of "Error" to know if we are in error state +type deviceToken struct { + Token + TokenError +} + +// InitiateDeviceAuth initiates a device auth flow. It returns a DeviceCode +// that can be used with CheckForUserCompletion or WaitForUserCompletion. +// Deprecated: use InitiateDeviceAuthWithContext() instead. +func InitiateDeviceAuth(sender Sender, oauthConfig OAuthConfig, clientID, resource string) (*DeviceCode, error) { + return InitiateDeviceAuthWithContext(context.Background(), sender, oauthConfig, clientID, resource) +} + +// InitiateDeviceAuthWithContext initiates a device auth flow. It returns a DeviceCode +// that can be used with CheckForUserCompletion or WaitForUserCompletion. +func InitiateDeviceAuthWithContext(ctx context.Context, sender Sender, oauthConfig OAuthConfig, clientID, resource string) (*DeviceCode, error) { + v := url.Values{ + "client_id": []string{clientID}, + "resource": []string{resource}, + } + + s := v.Encode() + body := ioutil.NopCloser(strings.NewReader(s)) + + req, err := http.NewRequest(http.MethodPost, oauthConfig.DeviceCodeEndpoint.String(), body) + if err != nil { + return nil, fmt.Errorf("%s %s: %s", logPrefix, errCodeSendingFails, err.Error()) + } + + req.ContentLength = int64(len(s)) + req.Header.Set(contentType, mimeTypeFormPost) + resp, err := sender.Do(req.WithContext(ctx)) + if err != nil { + return nil, fmt.Errorf("%s %s: %s", logPrefix, errCodeSendingFails, err.Error()) + } + defer resp.Body.Close() + + rb, err := ioutil.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("%s %s: %s", logPrefix, errCodeHandlingFails, err.Error()) + } + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("%s %s: %s", logPrefix, errCodeHandlingFails, errStatusNotOK) + } + + if len(strings.Trim(string(rb), " ")) == 0 { + return nil, ErrDeviceCodeEmpty + } + + var code DeviceCode + err = json.Unmarshal(rb, &code) + if err != nil { + return nil, fmt.Errorf("%s %s: %s", logPrefix, errCodeHandlingFails, err.Error()) + } + + code.ClientID = clientID + code.Resource = resource + code.OAuthConfig = oauthConfig + + return &code, nil +} + +// CheckForUserCompletion takes a DeviceCode and checks with the Azure AD OAuth endpoint +// to see if the device flow has: been completed, timed out, or otherwise failed +// Deprecated: use CheckForUserCompletionWithContext() instead. +func CheckForUserCompletion(sender Sender, code *DeviceCode) (*Token, error) { + return CheckForUserCompletionWithContext(context.Background(), sender, code) +} + +// CheckForUserCompletionWithContext takes a DeviceCode and checks with the Azure AD OAuth endpoint +// to see if the device flow has: been completed, timed out, or otherwise failed +func CheckForUserCompletionWithContext(ctx context.Context, sender Sender, code *DeviceCode) (*Token, error) { + v := url.Values{ + "client_id": []string{code.ClientID}, + "code": []string{*code.DeviceCode}, + "grant_type": []string{OAuthGrantTypeDeviceCode}, + "resource": []string{code.Resource}, + } + + s := v.Encode() + body := ioutil.NopCloser(strings.NewReader(s)) + + req, err := http.NewRequest(http.MethodPost, code.OAuthConfig.TokenEndpoint.String(), body) + if err != nil { + return nil, fmt.Errorf("%s %s: %s", logPrefix, errTokenSendingFails, err.Error()) + } + + req.ContentLength = int64(len(s)) + req.Header.Set(contentType, mimeTypeFormPost) + resp, err := sender.Do(req.WithContext(ctx)) + if err != nil { + return nil, fmt.Errorf("%s %s: %s", logPrefix, errTokenSendingFails, err.Error()) + } + defer resp.Body.Close() + + rb, err := ioutil.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("%s %s: %s", logPrefix, errTokenHandlingFails, err.Error()) + } + + if resp.StatusCode != http.StatusOK && len(strings.Trim(string(rb), " ")) == 0 { + return nil, fmt.Errorf("%s %s: %s", logPrefix, errTokenHandlingFails, errStatusNotOK) + } + if len(strings.Trim(string(rb), " ")) == 0 { + return nil, ErrOAuthTokenEmpty + } + + var token deviceToken + err = json.Unmarshal(rb, &token) + if err != nil { + return nil, fmt.Errorf("%s %s: %s", logPrefix, errTokenHandlingFails, err.Error()) + } + + if token.Error == nil { + return &token.Token, nil + } + + switch *token.Error { + case "authorization_pending": + return nil, ErrDeviceAuthorizationPending + case "slow_down": + return nil, ErrDeviceSlowDown + case "access_denied": + return nil, ErrDeviceAccessDenied + case "code_expired": + return nil, ErrDeviceCodeExpired + default: + return nil, ErrDeviceGeneric + } +} + +// WaitForUserCompletion calls CheckForUserCompletion repeatedly until a token is granted or an error state occurs. +// This prevents the user from looping and checking against 'ErrDeviceAuthorizationPending'. +// Deprecated: use WaitForUserCompletionWithContext() instead. +func WaitForUserCompletion(sender Sender, code *DeviceCode) (*Token, error) { + return WaitForUserCompletionWithContext(context.Background(), sender, code) +} + +// WaitForUserCompletionWithContext calls CheckForUserCompletion repeatedly until a token is granted or an error +// state occurs. This prevents the user from looping and checking against 'ErrDeviceAuthorizationPending'. +func WaitForUserCompletionWithContext(ctx context.Context, sender Sender, code *DeviceCode) (*Token, error) { + intervalDuration := time.Duration(*code.Interval) * time.Second + waitDuration := intervalDuration + + for { + token, err := CheckForUserCompletionWithContext(ctx, sender, code) + + if err == nil { + return token, nil + } + + switch err { + case ErrDeviceSlowDown: + waitDuration += waitDuration + case ErrDeviceAuthorizationPending: + // noop + default: // everything else is "fatal" to us + return nil, err + } + + if waitDuration > (intervalDuration * 3) { + return nil, fmt.Errorf("%s Error waiting for user to complete device flow. Server told us to slow_down too much", logPrefix) + } + + select { + case <-time.After(waitDuration): + // noop + case <-ctx.Done(): + return nil, ctx.Err() + } + } +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/adal/go_mod_tidy_hack.go b/vendor/github.com/Azure/go-autorest/autorest/adal/go_mod_tidy_hack.go new file mode 100644 index 0000000..28a4bfc --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/adal/go_mod_tidy_hack.go @@ -0,0 +1,24 @@ +// +build modhack + +package adal + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file, and the github.com/Azure/go-autorest/autorest import, won't actually become part of +// the resultant binary. + +// Necessary for safely adding multi-module repo. +// See: https://github.com/golang/go/wiki/Modules#is-it-possible-to-add-a-module-to-a-multi-module-repository +import _ "github.com/Azure/go-autorest/autorest" diff --git a/vendor/github.com/Azure/go-autorest/autorest/adal/persist.go b/vendor/github.com/Azure/go-autorest/autorest/adal/persist.go new file mode 100644 index 0000000..9e15f27 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/adal/persist.go @@ -0,0 +1,73 @@ +package adal + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "os" + "path/filepath" +) + +// LoadToken restores a Token object from a file located at 'path'. +func LoadToken(path string) (*Token, error) { + file, err := os.Open(path) + if err != nil { + return nil, fmt.Errorf("failed to open file (%s) while loading token: %v", path, err) + } + defer file.Close() + + var token Token + + dec := json.NewDecoder(file) + if err = dec.Decode(&token); err != nil { + return nil, fmt.Errorf("failed to decode contents of file (%s) into Token representation: %v", path, err) + } + return &token, nil +} + +// SaveToken persists an oauth token at the given location on disk. +// It moves the new file into place so it can safely be used to replace an existing file +// that maybe accessed by multiple processes. +func SaveToken(path string, mode os.FileMode, token Token) error { + dir := filepath.Dir(path) + err := os.MkdirAll(dir, os.ModePerm) + if err != nil { + return fmt.Errorf("failed to create directory (%s) to store token in: %v", dir, err) + } + + newFile, err := ioutil.TempFile(dir, "token") + if err != nil { + return fmt.Errorf("failed to create the temp file to write the token: %v", err) + } + tempPath := newFile.Name() + + if err := json.NewEncoder(newFile).Encode(token); err != nil { + return fmt.Errorf("failed to encode token to file (%s) while saving token: %v", tempPath, err) + } + if err := newFile.Close(); err != nil { + return fmt.Errorf("failed to close temp file %s: %v", tempPath, err) + } + + // Atomic replace to avoid multi-writer file corruptions + if err := os.Rename(tempPath, path); err != nil { + return fmt.Errorf("failed to move temporary token to desired output location. src=%s dst=%s: %v", tempPath, path, err) + } + if err := os.Chmod(path, mode); err != nil { + return fmt.Errorf("failed to chmod the token file %s: %v", path, err) + } + return nil +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/adal/sender.go b/vendor/github.com/Azure/go-autorest/autorest/adal/sender.go new file mode 100644 index 0000000..d7e4372 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/adal/sender.go @@ -0,0 +1,95 @@ +package adal + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "crypto/tls" + "net/http" + "net/http/cookiejar" + "sync" + + "github.com/Azure/go-autorest/tracing" +) + +const ( + contentType = "Content-Type" + mimeTypeFormPost = "application/x-www-form-urlencoded" +) + +var defaultSender Sender +var defaultSenderInit = &sync.Once{} + +// Sender is the interface that wraps the Do method to send HTTP requests. +// +// The standard http.Client conforms to this interface. +type Sender interface { + Do(*http.Request) (*http.Response, error) +} + +// SenderFunc is a method that implements the Sender interface. +type SenderFunc func(*http.Request) (*http.Response, error) + +// Do implements the Sender interface on SenderFunc. +func (sf SenderFunc) Do(r *http.Request) (*http.Response, error) { + return sf(r) +} + +// SendDecorator takes and possibly decorates, by wrapping, a Sender. Decorators may affect the +// http.Request and pass it along or, first, pass the http.Request along then react to the +// http.Response result. +type SendDecorator func(Sender) Sender + +// CreateSender creates, decorates, and returns, as a Sender, the default http.Client. +func CreateSender(decorators ...SendDecorator) Sender { + return DecorateSender(sender(), decorators...) +} + +// DecorateSender accepts a Sender and a, possibly empty, set of SendDecorators, which is applies to +// the Sender. Decorators are applied in the order received, but their affect upon the request +// depends on whether they are a pre-decorator (change the http.Request and then pass it along) or a +// post-decorator (pass the http.Request along and react to the results in http.Response). +func DecorateSender(s Sender, decorators ...SendDecorator) Sender { + for _, decorate := range decorators { + s = decorate(s) + } + return s +} + +func sender() Sender { + // note that we can't init defaultSender in init() since it will + // execute before calling code has had a chance to enable tracing + defaultSenderInit.Do(func() { + // Use behaviour compatible with DefaultTransport, but require TLS minimum version. + defaultTransport := http.DefaultTransport.(*http.Transport) + transport := &http.Transport{ + Proxy: defaultTransport.Proxy, + DialContext: defaultTransport.DialContext, + MaxIdleConns: defaultTransport.MaxIdleConns, + IdleConnTimeout: defaultTransport.IdleConnTimeout, + TLSHandshakeTimeout: defaultTransport.TLSHandshakeTimeout, + ExpectContinueTimeout: defaultTransport.ExpectContinueTimeout, + TLSClientConfig: &tls.Config{ + MinVersion: tls.VersionTLS12, + }, + } + var roundTripper http.RoundTripper = transport + if tracing.IsEnabled() { + roundTripper = tracing.NewTransport(transport) + } + j, _ := cookiejar.New(nil) + defaultSender = &http.Client{Jar: j, Transport: roundTripper} + }) + return defaultSender +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/adal/token.go b/vendor/github.com/Azure/go-autorest/autorest/adal/token.go new file mode 100644 index 0000000..b65b2c8 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/adal/token.go @@ -0,0 +1,1135 @@ +package adal + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "context" + "crypto/rand" + "crypto/rsa" + "crypto/sha1" + "crypto/x509" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "io" + "io/ioutil" + "math" + "net/http" + "net/url" + "os" + "strings" + "sync" + "time" + + "github.com/Azure/go-autorest/autorest/date" + "github.com/dgrijalva/jwt-go" +) + +const ( + defaultRefresh = 5 * time.Minute + + // OAuthGrantTypeDeviceCode is the "grant_type" identifier used in device flow + OAuthGrantTypeDeviceCode = "device_code" + + // OAuthGrantTypeClientCredentials is the "grant_type" identifier used in credential flows + OAuthGrantTypeClientCredentials = "client_credentials" + + // OAuthGrantTypeUserPass is the "grant_type" identifier used in username and password auth flows + OAuthGrantTypeUserPass = "password" + + // OAuthGrantTypeRefreshToken is the "grant_type" identifier used in refresh token flows + OAuthGrantTypeRefreshToken = "refresh_token" + + // OAuthGrantTypeAuthorizationCode is the "grant_type" identifier used in authorization code flows + OAuthGrantTypeAuthorizationCode = "authorization_code" + + // metadataHeader is the header required by MSI extension + metadataHeader = "Metadata" + + // msiEndpoint is the well known endpoint for getting MSI authentications tokens + msiEndpoint = "http://169.254.169.254/metadata/identity/oauth2/token" + + // the default number of attempts to refresh an MSI authentication token + defaultMaxMSIRefreshAttempts = 5 + + // asMSIEndpointEnv is the environment variable used to store the endpoint on App Service and Functions + asMSIEndpointEnv = "MSI_ENDPOINT" + + // asMSISecretEnv is the environment variable used to store the request secret on App Service and Functions + asMSISecretEnv = "MSI_SECRET" +) + +// OAuthTokenProvider is an interface which should be implemented by an access token retriever +type OAuthTokenProvider interface { + OAuthToken() string +} + +// MultitenantOAuthTokenProvider provides tokens used for multi-tenant authorization. +type MultitenantOAuthTokenProvider interface { + PrimaryOAuthToken() string + AuxiliaryOAuthTokens() []string +} + +// TokenRefreshError is an interface used by errors returned during token refresh. +type TokenRefreshError interface { + error + Response() *http.Response +} + +// Refresher is an interface for token refresh functionality +type Refresher interface { + Refresh() error + RefreshExchange(resource string) error + EnsureFresh() error +} + +// RefresherWithContext is an interface for token refresh functionality +type RefresherWithContext interface { + RefreshWithContext(ctx context.Context) error + RefreshExchangeWithContext(ctx context.Context, resource string) error + EnsureFreshWithContext(ctx context.Context) error +} + +// TokenRefreshCallback is the type representing callbacks that will be called after +// a successful token refresh +type TokenRefreshCallback func(Token) error + +// TokenRefresh is a type representing a custom callback to refresh a token +type TokenRefresh func(ctx context.Context, resource string) (*Token, error) + +// Token encapsulates the access token used to authorize Azure requests. +// https://docs.microsoft.com/en-us/azure/active-directory/develop/v1-oauth2-client-creds-grant-flow#service-to-service-access-token-response +type Token struct { + AccessToken string `json:"access_token"` + RefreshToken string `json:"refresh_token"` + + ExpiresIn json.Number `json:"expires_in"` + ExpiresOn json.Number `json:"expires_on"` + NotBefore json.Number `json:"not_before"` + + Resource string `json:"resource"` + Type string `json:"token_type"` +} + +func newToken() Token { + return Token{ + ExpiresIn: "0", + ExpiresOn: "0", + NotBefore: "0", + } +} + +// IsZero returns true if the token object is zero-initialized. +func (t Token) IsZero() bool { + return t == Token{} +} + +// Expires returns the time.Time when the Token expires. +func (t Token) Expires() time.Time { + s, err := t.ExpiresOn.Float64() + if err != nil { + s = -3600 + } + + expiration := date.NewUnixTimeFromSeconds(s) + + return time.Time(expiration).UTC() +} + +// IsExpired returns true if the Token is expired, false otherwise. +func (t Token) IsExpired() bool { + return t.WillExpireIn(0) +} + +// WillExpireIn returns true if the Token will expire after the passed time.Duration interval +// from now, false otherwise. +func (t Token) WillExpireIn(d time.Duration) bool { + return !t.Expires().After(time.Now().Add(d)) +} + +//OAuthToken return the current access token +func (t *Token) OAuthToken() string { + return t.AccessToken +} + +// ServicePrincipalSecret is an interface that allows various secret mechanism to fill the form +// that is submitted when acquiring an oAuth token. +type ServicePrincipalSecret interface { + SetAuthenticationValues(spt *ServicePrincipalToken, values *url.Values) error +} + +// ServicePrincipalNoSecret represents a secret type that contains no secret +// meaning it is not valid for fetching a fresh token. This is used by Manual +type ServicePrincipalNoSecret struct { +} + +// SetAuthenticationValues is a method of the interface ServicePrincipalSecret +// It only returns an error for the ServicePrincipalNoSecret type +func (noSecret *ServicePrincipalNoSecret) SetAuthenticationValues(spt *ServicePrincipalToken, v *url.Values) error { + return fmt.Errorf("Manually created ServicePrincipalToken does not contain secret material to retrieve a new access token") +} + +// MarshalJSON implements the json.Marshaler interface. +func (noSecret ServicePrincipalNoSecret) MarshalJSON() ([]byte, error) { + type tokenType struct { + Type string `json:"type"` + } + return json.Marshal(tokenType{ + Type: "ServicePrincipalNoSecret", + }) +} + +// ServicePrincipalTokenSecret implements ServicePrincipalSecret for client_secret type authorization. +type ServicePrincipalTokenSecret struct { + ClientSecret string `json:"value"` +} + +// SetAuthenticationValues is a method of the interface ServicePrincipalSecret. +// It will populate the form submitted during oAuth Token Acquisition using the client_secret. +func (tokenSecret *ServicePrincipalTokenSecret) SetAuthenticationValues(spt *ServicePrincipalToken, v *url.Values) error { + v.Set("client_secret", tokenSecret.ClientSecret) + return nil +} + +// MarshalJSON implements the json.Marshaler interface. +func (tokenSecret ServicePrincipalTokenSecret) MarshalJSON() ([]byte, error) { + type tokenType struct { + Type string `json:"type"` + Value string `json:"value"` + } + return json.Marshal(tokenType{ + Type: "ServicePrincipalTokenSecret", + Value: tokenSecret.ClientSecret, + }) +} + +// ServicePrincipalCertificateSecret implements ServicePrincipalSecret for generic RSA cert auth with signed JWTs. +type ServicePrincipalCertificateSecret struct { + Certificate *x509.Certificate + PrivateKey *rsa.PrivateKey +} + +// SignJwt returns the JWT signed with the certificate's private key. +func (secret *ServicePrincipalCertificateSecret) SignJwt(spt *ServicePrincipalToken) (string, error) { + hasher := sha1.New() + _, err := hasher.Write(secret.Certificate.Raw) + if err != nil { + return "", err + } + + thumbprint := base64.URLEncoding.EncodeToString(hasher.Sum(nil)) + + // The jti (JWT ID) claim provides a unique identifier for the JWT. + jti := make([]byte, 20) + _, err = rand.Read(jti) + if err != nil { + return "", err + } + + token := jwt.New(jwt.SigningMethodRS256) + token.Header["x5t"] = thumbprint + x5c := []string{base64.StdEncoding.EncodeToString(secret.Certificate.Raw)} + token.Header["x5c"] = x5c + token.Claims = jwt.MapClaims{ + "aud": spt.inner.OauthConfig.TokenEndpoint.String(), + "iss": spt.inner.ClientID, + "sub": spt.inner.ClientID, + "jti": base64.URLEncoding.EncodeToString(jti), + "nbf": time.Now().Unix(), + "exp": time.Now().Add(24 * time.Hour).Unix(), + } + + signedString, err := token.SignedString(secret.PrivateKey) + return signedString, err +} + +// SetAuthenticationValues is a method of the interface ServicePrincipalSecret. +// It will populate the form submitted during oAuth Token Acquisition using a JWT signed with a certificate. +func (secret *ServicePrincipalCertificateSecret) SetAuthenticationValues(spt *ServicePrincipalToken, v *url.Values) error { + jwt, err := secret.SignJwt(spt) + if err != nil { + return err + } + + v.Set("client_assertion", jwt) + v.Set("client_assertion_type", "urn:ietf:params:oauth:client-assertion-type:jwt-bearer") + return nil +} + +// MarshalJSON implements the json.Marshaler interface. +func (secret ServicePrincipalCertificateSecret) MarshalJSON() ([]byte, error) { + return nil, errors.New("marshalling ServicePrincipalCertificateSecret is not supported") +} + +// ServicePrincipalMSISecret implements ServicePrincipalSecret for machines running the MSI Extension. +type ServicePrincipalMSISecret struct { +} + +// SetAuthenticationValues is a method of the interface ServicePrincipalSecret. +func (msiSecret *ServicePrincipalMSISecret) SetAuthenticationValues(spt *ServicePrincipalToken, v *url.Values) error { + return nil +} + +// MarshalJSON implements the json.Marshaler interface. +func (msiSecret ServicePrincipalMSISecret) MarshalJSON() ([]byte, error) { + return nil, errors.New("marshalling ServicePrincipalMSISecret is not supported") +} + +// ServicePrincipalUsernamePasswordSecret implements ServicePrincipalSecret for username and password auth. +type ServicePrincipalUsernamePasswordSecret struct { + Username string `json:"username"` + Password string `json:"password"` +} + +// SetAuthenticationValues is a method of the interface ServicePrincipalSecret. +func (secret *ServicePrincipalUsernamePasswordSecret) SetAuthenticationValues(spt *ServicePrincipalToken, v *url.Values) error { + v.Set("username", secret.Username) + v.Set("password", secret.Password) + return nil +} + +// MarshalJSON implements the json.Marshaler interface. +func (secret ServicePrincipalUsernamePasswordSecret) MarshalJSON() ([]byte, error) { + type tokenType struct { + Type string `json:"type"` + Username string `json:"username"` + Password string `json:"password"` + } + return json.Marshal(tokenType{ + Type: "ServicePrincipalUsernamePasswordSecret", + Username: secret.Username, + Password: secret.Password, + }) +} + +// ServicePrincipalAuthorizationCodeSecret implements ServicePrincipalSecret for authorization code auth. +type ServicePrincipalAuthorizationCodeSecret struct { + ClientSecret string `json:"value"` + AuthorizationCode string `json:"authCode"` + RedirectURI string `json:"redirect"` +} + +// SetAuthenticationValues is a method of the interface ServicePrincipalSecret. +func (secret *ServicePrincipalAuthorizationCodeSecret) SetAuthenticationValues(spt *ServicePrincipalToken, v *url.Values) error { + v.Set("code", secret.AuthorizationCode) + v.Set("client_secret", secret.ClientSecret) + v.Set("redirect_uri", secret.RedirectURI) + return nil +} + +// MarshalJSON implements the json.Marshaler interface. +func (secret ServicePrincipalAuthorizationCodeSecret) MarshalJSON() ([]byte, error) { + type tokenType struct { + Type string `json:"type"` + Value string `json:"value"` + AuthCode string `json:"authCode"` + Redirect string `json:"redirect"` + } + return json.Marshal(tokenType{ + Type: "ServicePrincipalAuthorizationCodeSecret", + Value: secret.ClientSecret, + AuthCode: secret.AuthorizationCode, + Redirect: secret.RedirectURI, + }) +} + +// ServicePrincipalToken encapsulates a Token created for a Service Principal. +type ServicePrincipalToken struct { + inner servicePrincipalToken + refreshLock *sync.RWMutex + sender Sender + customRefreshFunc TokenRefresh + refreshCallbacks []TokenRefreshCallback + // MaxMSIRefreshAttempts is the maximum number of attempts to refresh an MSI token. + MaxMSIRefreshAttempts int +} + +// MarshalTokenJSON returns the marshalled inner token. +func (spt ServicePrincipalToken) MarshalTokenJSON() ([]byte, error) { + return json.Marshal(spt.inner.Token) +} + +// SetRefreshCallbacks replaces any existing refresh callbacks with the specified callbacks. +func (spt *ServicePrincipalToken) SetRefreshCallbacks(callbacks []TokenRefreshCallback) { + spt.refreshCallbacks = callbacks +} + +// SetCustomRefreshFunc sets a custom refresh function used to refresh the token. +func (spt *ServicePrincipalToken) SetCustomRefreshFunc(customRefreshFunc TokenRefresh) { + spt.customRefreshFunc = customRefreshFunc +} + +// MarshalJSON implements the json.Marshaler interface. +func (spt ServicePrincipalToken) MarshalJSON() ([]byte, error) { + return json.Marshal(spt.inner) +} + +// UnmarshalJSON implements the json.Unmarshaler interface. +func (spt *ServicePrincipalToken) UnmarshalJSON(data []byte) error { + // need to determine the token type + raw := map[string]interface{}{} + err := json.Unmarshal(data, &raw) + if err != nil { + return err + } + secret := raw["secret"].(map[string]interface{}) + switch secret["type"] { + case "ServicePrincipalNoSecret": + spt.inner.Secret = &ServicePrincipalNoSecret{} + case "ServicePrincipalTokenSecret": + spt.inner.Secret = &ServicePrincipalTokenSecret{} + case "ServicePrincipalCertificateSecret": + return errors.New("unmarshalling ServicePrincipalCertificateSecret is not supported") + case "ServicePrincipalMSISecret": + return errors.New("unmarshalling ServicePrincipalMSISecret is not supported") + case "ServicePrincipalUsernamePasswordSecret": + spt.inner.Secret = &ServicePrincipalUsernamePasswordSecret{} + case "ServicePrincipalAuthorizationCodeSecret": + spt.inner.Secret = &ServicePrincipalAuthorizationCodeSecret{} + default: + return fmt.Errorf("unrecognized token type '%s'", secret["type"]) + } + err = json.Unmarshal(data, &spt.inner) + if err != nil { + return err + } + // Don't override the refreshLock or the sender if those have been already set. + if spt.refreshLock == nil { + spt.refreshLock = &sync.RWMutex{} + } + if spt.sender == nil { + spt.sender = sender() + } + return nil +} + +// internal type used for marshalling/unmarshalling +type servicePrincipalToken struct { + Token Token `json:"token"` + Secret ServicePrincipalSecret `json:"secret"` + OauthConfig OAuthConfig `json:"oauth"` + ClientID string `json:"clientID"` + Resource string `json:"resource"` + AutoRefresh bool `json:"autoRefresh"` + RefreshWithin time.Duration `json:"refreshWithin"` +} + +func validateOAuthConfig(oac OAuthConfig) error { + if oac.IsZero() { + return fmt.Errorf("parameter 'oauthConfig' cannot be zero-initialized") + } + return nil +} + +// NewServicePrincipalTokenWithSecret create a ServicePrincipalToken using the supplied ServicePrincipalSecret implementation. +func NewServicePrincipalTokenWithSecret(oauthConfig OAuthConfig, id string, resource string, secret ServicePrincipalSecret, callbacks ...TokenRefreshCallback) (*ServicePrincipalToken, error) { + if err := validateOAuthConfig(oauthConfig); err != nil { + return nil, err + } + if err := validateStringParam(id, "id"); err != nil { + return nil, err + } + if err := validateStringParam(resource, "resource"); err != nil { + return nil, err + } + if secret == nil { + return nil, fmt.Errorf("parameter 'secret' cannot be nil") + } + spt := &ServicePrincipalToken{ + inner: servicePrincipalToken{ + Token: newToken(), + OauthConfig: oauthConfig, + Secret: secret, + ClientID: id, + Resource: resource, + AutoRefresh: true, + RefreshWithin: defaultRefresh, + }, + refreshLock: &sync.RWMutex{}, + sender: sender(), + refreshCallbacks: callbacks, + } + return spt, nil +} + +// NewServicePrincipalTokenFromManualToken creates a ServicePrincipalToken using the supplied token +func NewServicePrincipalTokenFromManualToken(oauthConfig OAuthConfig, clientID string, resource string, token Token, callbacks ...TokenRefreshCallback) (*ServicePrincipalToken, error) { + if err := validateOAuthConfig(oauthConfig); err != nil { + return nil, err + } + if err := validateStringParam(clientID, "clientID"); err != nil { + return nil, err + } + if err := validateStringParam(resource, "resource"); err != nil { + return nil, err + } + if token.IsZero() { + return nil, fmt.Errorf("parameter 'token' cannot be zero-initialized") + } + spt, err := NewServicePrincipalTokenWithSecret( + oauthConfig, + clientID, + resource, + &ServicePrincipalNoSecret{}, + callbacks...) + if err != nil { + return nil, err + } + + spt.inner.Token = token + + return spt, nil +} + +// NewServicePrincipalTokenFromManualTokenSecret creates a ServicePrincipalToken using the supplied token and secret +func NewServicePrincipalTokenFromManualTokenSecret(oauthConfig OAuthConfig, clientID string, resource string, token Token, secret ServicePrincipalSecret, callbacks ...TokenRefreshCallback) (*ServicePrincipalToken, error) { + if err := validateOAuthConfig(oauthConfig); err != nil { + return nil, err + } + if err := validateStringParam(clientID, "clientID"); err != nil { + return nil, err + } + if err := validateStringParam(resource, "resource"); err != nil { + return nil, err + } + if secret == nil { + return nil, fmt.Errorf("parameter 'secret' cannot be nil") + } + if token.IsZero() { + return nil, fmt.Errorf("parameter 'token' cannot be zero-initialized") + } + spt, err := NewServicePrincipalTokenWithSecret( + oauthConfig, + clientID, + resource, + secret, + callbacks...) + if err != nil { + return nil, err + } + + spt.inner.Token = token + + return spt, nil +} + +// NewServicePrincipalToken creates a ServicePrincipalToken from the supplied Service Principal +// credentials scoped to the named resource. +func NewServicePrincipalToken(oauthConfig OAuthConfig, clientID string, secret string, resource string, callbacks ...TokenRefreshCallback) (*ServicePrincipalToken, error) { + if err := validateOAuthConfig(oauthConfig); err != nil { + return nil, err + } + if err := validateStringParam(clientID, "clientID"); err != nil { + return nil, err + } + if err := validateStringParam(secret, "secret"); err != nil { + return nil, err + } + if err := validateStringParam(resource, "resource"); err != nil { + return nil, err + } + return NewServicePrincipalTokenWithSecret( + oauthConfig, + clientID, + resource, + &ServicePrincipalTokenSecret{ + ClientSecret: secret, + }, + callbacks..., + ) +} + +// NewServicePrincipalTokenFromCertificate creates a ServicePrincipalToken from the supplied pkcs12 bytes. +func NewServicePrincipalTokenFromCertificate(oauthConfig OAuthConfig, clientID string, certificate *x509.Certificate, privateKey *rsa.PrivateKey, resource string, callbacks ...TokenRefreshCallback) (*ServicePrincipalToken, error) { + if err := validateOAuthConfig(oauthConfig); err != nil { + return nil, err + } + if err := validateStringParam(clientID, "clientID"); err != nil { + return nil, err + } + if err := validateStringParam(resource, "resource"); err != nil { + return nil, err + } + if certificate == nil { + return nil, fmt.Errorf("parameter 'certificate' cannot be nil") + } + if privateKey == nil { + return nil, fmt.Errorf("parameter 'privateKey' cannot be nil") + } + return NewServicePrincipalTokenWithSecret( + oauthConfig, + clientID, + resource, + &ServicePrincipalCertificateSecret{ + PrivateKey: privateKey, + Certificate: certificate, + }, + callbacks..., + ) +} + +// NewServicePrincipalTokenFromUsernamePassword creates a ServicePrincipalToken from the username and password. +func NewServicePrincipalTokenFromUsernamePassword(oauthConfig OAuthConfig, clientID string, username string, password string, resource string, callbacks ...TokenRefreshCallback) (*ServicePrincipalToken, error) { + if err := validateOAuthConfig(oauthConfig); err != nil { + return nil, err + } + if err := validateStringParam(clientID, "clientID"); err != nil { + return nil, err + } + if err := validateStringParam(username, "username"); err != nil { + return nil, err + } + if err := validateStringParam(password, "password"); err != nil { + return nil, err + } + if err := validateStringParam(resource, "resource"); err != nil { + return nil, err + } + return NewServicePrincipalTokenWithSecret( + oauthConfig, + clientID, + resource, + &ServicePrincipalUsernamePasswordSecret{ + Username: username, + Password: password, + }, + callbacks..., + ) +} + +// NewServicePrincipalTokenFromAuthorizationCode creates a ServicePrincipalToken from the +func NewServicePrincipalTokenFromAuthorizationCode(oauthConfig OAuthConfig, clientID string, clientSecret string, authorizationCode string, redirectURI string, resource string, callbacks ...TokenRefreshCallback) (*ServicePrincipalToken, error) { + + if err := validateOAuthConfig(oauthConfig); err != nil { + return nil, err + } + if err := validateStringParam(clientID, "clientID"); err != nil { + return nil, err + } + if err := validateStringParam(clientSecret, "clientSecret"); err != nil { + return nil, err + } + if err := validateStringParam(authorizationCode, "authorizationCode"); err != nil { + return nil, err + } + if err := validateStringParam(redirectURI, "redirectURI"); err != nil { + return nil, err + } + if err := validateStringParam(resource, "resource"); err != nil { + return nil, err + } + + return NewServicePrincipalTokenWithSecret( + oauthConfig, + clientID, + resource, + &ServicePrincipalAuthorizationCodeSecret{ + ClientSecret: clientSecret, + AuthorizationCode: authorizationCode, + RedirectURI: redirectURI, + }, + callbacks..., + ) +} + +// GetMSIVMEndpoint gets the MSI endpoint on Virtual Machines. +func GetMSIVMEndpoint() (string, error) { + return msiEndpoint, nil +} + +func isAppService() bool { + _, asMSIEndpointEnvExists := os.LookupEnv(asMSIEndpointEnv) + _, asMSISecretEnvExists := os.LookupEnv(asMSISecretEnv) + + return asMSIEndpointEnvExists && asMSISecretEnvExists +} + +// GetMSIAppServiceEndpoint get the MSI endpoint for App Service and Functions +func GetMSIAppServiceEndpoint() (string, error) { + asMSIEndpoint, asMSIEndpointEnvExists := os.LookupEnv(asMSIEndpointEnv) + + if asMSIEndpointEnvExists { + return asMSIEndpoint, nil + } + return "", errors.New("MSI endpoint not found") +} + +// GetMSIEndpoint get the appropriate MSI endpoint depending on the runtime environment +func GetMSIEndpoint() (string, error) { + if isAppService() { + return GetMSIAppServiceEndpoint() + } + return GetMSIVMEndpoint() +} + +// NewServicePrincipalTokenFromMSI creates a ServicePrincipalToken via the MSI VM Extension. +// It will use the system assigned identity when creating the token. +func NewServicePrincipalTokenFromMSI(msiEndpoint, resource string, callbacks ...TokenRefreshCallback) (*ServicePrincipalToken, error) { + return newServicePrincipalTokenFromMSI(msiEndpoint, resource, nil, callbacks...) +} + +// NewServicePrincipalTokenFromMSIWithUserAssignedID creates a ServicePrincipalToken via the MSI VM Extension. +// It will use the specified user assigned identity when creating the token. +func NewServicePrincipalTokenFromMSIWithUserAssignedID(msiEndpoint, resource string, userAssignedID string, callbacks ...TokenRefreshCallback) (*ServicePrincipalToken, error) { + return newServicePrincipalTokenFromMSI(msiEndpoint, resource, &userAssignedID, callbacks...) +} + +func newServicePrincipalTokenFromMSI(msiEndpoint, resource string, userAssignedID *string, callbacks ...TokenRefreshCallback) (*ServicePrincipalToken, error) { + if err := validateStringParam(msiEndpoint, "msiEndpoint"); err != nil { + return nil, err + } + if err := validateStringParam(resource, "resource"); err != nil { + return nil, err + } + if userAssignedID != nil { + if err := validateStringParam(*userAssignedID, "userAssignedID"); err != nil { + return nil, err + } + } + // We set the oauth config token endpoint to be MSI's endpoint + msiEndpointURL, err := url.Parse(msiEndpoint) + if err != nil { + return nil, err + } + + v := url.Values{} + v.Set("resource", resource) + // App Service MSI currently only supports token API version 2017-09-01 + if isAppService() { + v.Set("api-version", "2017-09-01") + } else { + v.Set("api-version", "2018-02-01") + } + if userAssignedID != nil { + v.Set("client_id", *userAssignedID) + } + msiEndpointURL.RawQuery = v.Encode() + + spt := &ServicePrincipalToken{ + inner: servicePrincipalToken{ + Token: newToken(), + OauthConfig: OAuthConfig{ + TokenEndpoint: *msiEndpointURL, + }, + Secret: &ServicePrincipalMSISecret{}, + Resource: resource, + AutoRefresh: true, + RefreshWithin: defaultRefresh, + }, + refreshLock: &sync.RWMutex{}, + sender: sender(), + refreshCallbacks: callbacks, + MaxMSIRefreshAttempts: defaultMaxMSIRefreshAttempts, + } + + if userAssignedID != nil { + spt.inner.ClientID = *userAssignedID + } + + return spt, nil +} + +// internal type that implements TokenRefreshError +type tokenRefreshError struct { + message string + resp *http.Response +} + +// Error implements the error interface which is part of the TokenRefreshError interface. +func (tre tokenRefreshError) Error() string { + return tre.message +} + +// Response implements the TokenRefreshError interface, it returns the raw HTTP response from the refresh operation. +func (tre tokenRefreshError) Response() *http.Response { + return tre.resp +} + +func newTokenRefreshError(message string, resp *http.Response) TokenRefreshError { + return tokenRefreshError{message: message, resp: resp} +} + +// EnsureFresh will refresh the token if it will expire within the refresh window (as set by +// RefreshWithin) and autoRefresh flag is on. This method is safe for concurrent use. +func (spt *ServicePrincipalToken) EnsureFresh() error { + return spt.EnsureFreshWithContext(context.Background()) +} + +// EnsureFreshWithContext will refresh the token if it will expire within the refresh window (as set by +// RefreshWithin) and autoRefresh flag is on. This method is safe for concurrent use. +func (spt *ServicePrincipalToken) EnsureFreshWithContext(ctx context.Context) error { + if spt.inner.AutoRefresh && spt.inner.Token.WillExpireIn(spt.inner.RefreshWithin) { + // take the write lock then check to see if the token was already refreshed + spt.refreshLock.Lock() + defer spt.refreshLock.Unlock() + if spt.inner.Token.WillExpireIn(spt.inner.RefreshWithin) { + return spt.refreshInternal(ctx, spt.inner.Resource) + } + } + return nil +} + +// InvokeRefreshCallbacks calls any TokenRefreshCallbacks that were added to the SPT during initialization +func (spt *ServicePrincipalToken) InvokeRefreshCallbacks(token Token) error { + if spt.refreshCallbacks != nil { + for _, callback := range spt.refreshCallbacks { + err := callback(spt.inner.Token) + if err != nil { + return fmt.Errorf("adal: TokenRefreshCallback handler failed. Error = '%v'", err) + } + } + } + return nil +} + +// Refresh obtains a fresh token for the Service Principal. +// This method is safe for concurrent use. +func (spt *ServicePrincipalToken) Refresh() error { + return spt.RefreshWithContext(context.Background()) +} + +// RefreshWithContext obtains a fresh token for the Service Principal. +// This method is safe for concurrent use. +func (spt *ServicePrincipalToken) RefreshWithContext(ctx context.Context) error { + spt.refreshLock.Lock() + defer spt.refreshLock.Unlock() + return spt.refreshInternal(ctx, spt.inner.Resource) +} + +// RefreshExchange refreshes the token, but for a different resource. +// This method is safe for concurrent use. +func (spt *ServicePrincipalToken) RefreshExchange(resource string) error { + return spt.RefreshExchangeWithContext(context.Background(), resource) +} + +// RefreshExchangeWithContext refreshes the token, but for a different resource. +// This method is safe for concurrent use. +func (spt *ServicePrincipalToken) RefreshExchangeWithContext(ctx context.Context, resource string) error { + spt.refreshLock.Lock() + defer spt.refreshLock.Unlock() + return spt.refreshInternal(ctx, resource) +} + +func (spt *ServicePrincipalToken) getGrantType() string { + switch spt.inner.Secret.(type) { + case *ServicePrincipalUsernamePasswordSecret: + return OAuthGrantTypeUserPass + case *ServicePrincipalAuthorizationCodeSecret: + return OAuthGrantTypeAuthorizationCode + default: + return OAuthGrantTypeClientCredentials + } +} + +func isIMDS(u url.URL) bool { + imds, err := url.Parse(msiEndpoint) + if err != nil { + return false + } + return (u.Host == imds.Host && u.Path == imds.Path) || isAppService() +} + +func (spt *ServicePrincipalToken) refreshInternal(ctx context.Context, resource string) error { + if spt.customRefreshFunc != nil { + token, err := spt.customRefreshFunc(ctx, resource) + if err != nil { + return err + } + spt.inner.Token = *token + return spt.InvokeRefreshCallbacks(spt.inner.Token) + } + + req, err := http.NewRequest(http.MethodPost, spt.inner.OauthConfig.TokenEndpoint.String(), nil) + if err != nil { + return fmt.Errorf("adal: Failed to build the refresh request. Error = '%v'", err) + } + req.Header.Add("User-Agent", UserAgent()) + // Add header when runtime is on App Service or Functions + if isAppService() { + asMSISecret, _ := os.LookupEnv(asMSISecretEnv) + req.Header.Add("Secret", asMSISecret) + } + req = req.WithContext(ctx) + if !isIMDS(spt.inner.OauthConfig.TokenEndpoint) { + v := url.Values{} + v.Set("client_id", spt.inner.ClientID) + v.Set("resource", resource) + + if spt.inner.Token.RefreshToken != "" { + v.Set("grant_type", OAuthGrantTypeRefreshToken) + v.Set("refresh_token", spt.inner.Token.RefreshToken) + // web apps must specify client_secret when refreshing tokens + // see https://docs.microsoft.com/en-us/azure/active-directory/develop/active-directory-protocols-oauth-code#refreshing-the-access-tokens + if spt.getGrantType() == OAuthGrantTypeAuthorizationCode { + err := spt.inner.Secret.SetAuthenticationValues(spt, &v) + if err != nil { + return err + } + } + } else { + v.Set("grant_type", spt.getGrantType()) + err := spt.inner.Secret.SetAuthenticationValues(spt, &v) + if err != nil { + return err + } + } + + s := v.Encode() + body := ioutil.NopCloser(strings.NewReader(s)) + req.ContentLength = int64(len(s)) + req.Header.Set(contentType, mimeTypeFormPost) + req.Body = body + } + + if _, ok := spt.inner.Secret.(*ServicePrincipalMSISecret); ok { + req.Method = http.MethodGet + req.Header.Set(metadataHeader, "true") + } + + var resp *http.Response + if isIMDS(spt.inner.OauthConfig.TokenEndpoint) { + resp, err = retryForIMDS(spt.sender, req, spt.MaxMSIRefreshAttempts) + } else { + resp, err = spt.sender.Do(req) + } + if err != nil { + // don't return a TokenRefreshError here; this will allow retry logic to apply + return fmt.Errorf("adal: Failed to execute the refresh request. Error = '%v'", err) + } + + defer resp.Body.Close() + rb, err := ioutil.ReadAll(resp.Body) + + if resp.StatusCode != http.StatusOK { + if err != nil { + return newTokenRefreshError(fmt.Sprintf("adal: Refresh request failed. Status Code = '%d'. Failed reading response body: %v", resp.StatusCode, err), resp) + } + return newTokenRefreshError(fmt.Sprintf("adal: Refresh request failed. Status Code = '%d'. Response body: %s", resp.StatusCode, string(rb)), resp) + } + + // for the following error cases don't return a TokenRefreshError. the operation succeeded + // but some transient failure happened during deserialization. by returning a generic error + // the retry logic will kick in (we don't retry on TokenRefreshError). + + if err != nil { + return fmt.Errorf("adal: Failed to read a new service principal token during refresh. Error = '%v'", err) + } + if len(strings.Trim(string(rb), " ")) == 0 { + return fmt.Errorf("adal: Empty service principal token received during refresh") + } + var token Token + err = json.Unmarshal(rb, &token) + if err != nil { + return fmt.Errorf("adal: Failed to unmarshal the service principal token during refresh. Error = '%v' JSON = '%s'", err, string(rb)) + } + + spt.inner.Token = token + + return spt.InvokeRefreshCallbacks(token) +} + +// retry logic specific to retrieving a token from the IMDS endpoint +func retryForIMDS(sender Sender, req *http.Request, maxAttempts int) (resp *http.Response, err error) { + // copied from client.go due to circular dependency + retries := []int{ + http.StatusRequestTimeout, // 408 + http.StatusTooManyRequests, // 429 + http.StatusInternalServerError, // 500 + http.StatusBadGateway, // 502 + http.StatusServiceUnavailable, // 503 + http.StatusGatewayTimeout, // 504 + } + // extra retry status codes specific to IMDS + retries = append(retries, + http.StatusNotFound, + http.StatusGone, + // all remaining 5xx + http.StatusNotImplemented, + http.StatusHTTPVersionNotSupported, + http.StatusVariantAlsoNegotiates, + http.StatusInsufficientStorage, + http.StatusLoopDetected, + http.StatusNotExtended, + http.StatusNetworkAuthenticationRequired) + + // see https://docs.microsoft.com/en-us/azure/active-directory/managed-service-identity/how-to-use-vm-token#retry-guidance + + const maxDelay time.Duration = 60 * time.Second + + attempt := 0 + delay := time.Duration(0) + + for attempt < maxAttempts { + if resp != nil && resp.Body != nil { + io.Copy(ioutil.Discard, resp.Body) + resp.Body.Close() + } + resp, err = sender.Do(req) + // we want to retry if err is not nil or the status code is in the list of retry codes + if err == nil && !responseHasStatusCode(resp, retries...) { + return + } + + // perform exponential backoff with a cap. + // must increment attempt before calculating delay. + attempt++ + // the base value of 2 is the "delta backoff" as specified in the guidance doc + delay += (time.Duration(math.Pow(2, float64(attempt))) * time.Second) + if delay > maxDelay { + delay = maxDelay + } + + select { + case <-time.After(delay): + // intentionally left blank + case <-req.Context().Done(): + err = req.Context().Err() + return + } + } + return +} + +func responseHasStatusCode(resp *http.Response, codes ...int) bool { + if resp != nil { + for _, i := range codes { + if i == resp.StatusCode { + return true + } + } + } + return false +} + +// SetAutoRefresh enables or disables automatic refreshing of stale tokens. +func (spt *ServicePrincipalToken) SetAutoRefresh(autoRefresh bool) { + spt.inner.AutoRefresh = autoRefresh +} + +// SetRefreshWithin sets the interval within which if the token will expire, EnsureFresh will +// refresh the token. +func (spt *ServicePrincipalToken) SetRefreshWithin(d time.Duration) { + spt.inner.RefreshWithin = d + return +} + +// SetSender sets the http.Client used when obtaining the Service Principal token. An +// undecorated http.Client is used by default. +func (spt *ServicePrincipalToken) SetSender(s Sender) { spt.sender = s } + +// OAuthToken implements the OAuthTokenProvider interface. It returns the current access token. +func (spt *ServicePrincipalToken) OAuthToken() string { + spt.refreshLock.RLock() + defer spt.refreshLock.RUnlock() + return spt.inner.Token.OAuthToken() +} + +// Token returns a copy of the current token. +func (spt *ServicePrincipalToken) Token() Token { + spt.refreshLock.RLock() + defer spt.refreshLock.RUnlock() + return spt.inner.Token +} + +// MultiTenantServicePrincipalToken contains tokens for multi-tenant authorization. +type MultiTenantServicePrincipalToken struct { + PrimaryToken *ServicePrincipalToken + AuxiliaryTokens []*ServicePrincipalToken +} + +// PrimaryOAuthToken returns the primary authorization token. +func (mt *MultiTenantServicePrincipalToken) PrimaryOAuthToken() string { + return mt.PrimaryToken.OAuthToken() +} + +// AuxiliaryOAuthTokens returns one to three auxiliary authorization tokens. +func (mt *MultiTenantServicePrincipalToken) AuxiliaryOAuthTokens() []string { + tokens := make([]string, len(mt.AuxiliaryTokens)) + for i := range mt.AuxiliaryTokens { + tokens[i] = mt.AuxiliaryTokens[i].OAuthToken() + } + return tokens +} + +// EnsureFreshWithContext will refresh the token if it will expire within the refresh window (as set by +// RefreshWithin) and autoRefresh flag is on. This method is safe for concurrent use. +func (mt *MultiTenantServicePrincipalToken) EnsureFreshWithContext(ctx context.Context) error { + if err := mt.PrimaryToken.EnsureFreshWithContext(ctx); err != nil { + return fmt.Errorf("failed to refresh primary token: %v", err) + } + for _, aux := range mt.AuxiliaryTokens { + if err := aux.EnsureFreshWithContext(ctx); err != nil { + return fmt.Errorf("failed to refresh auxiliary token: %v", err) + } + } + return nil +} + +// RefreshWithContext obtains a fresh token for the Service Principal. +func (mt *MultiTenantServicePrincipalToken) RefreshWithContext(ctx context.Context) error { + if err := mt.PrimaryToken.RefreshWithContext(ctx); err != nil { + return fmt.Errorf("failed to refresh primary token: %v", err) + } + for _, aux := range mt.AuxiliaryTokens { + if err := aux.RefreshWithContext(ctx); err != nil { + return fmt.Errorf("failed to refresh auxiliary token: %v", err) + } + } + return nil +} + +// RefreshExchangeWithContext refreshes the token, but for a different resource. +func (mt *MultiTenantServicePrincipalToken) RefreshExchangeWithContext(ctx context.Context, resource string) error { + if err := mt.PrimaryToken.RefreshExchangeWithContext(ctx, resource); err != nil { + return fmt.Errorf("failed to refresh primary token: %v", err) + } + for _, aux := range mt.AuxiliaryTokens { + if err := aux.RefreshExchangeWithContext(ctx, resource); err != nil { + return fmt.Errorf("failed to refresh auxiliary token: %v", err) + } + } + return nil +} + +// NewMultiTenantServicePrincipalToken creates a new MultiTenantServicePrincipalToken with the specified credentials and resource. +func NewMultiTenantServicePrincipalToken(multiTenantCfg MultiTenantOAuthConfig, clientID string, secret string, resource string) (*MultiTenantServicePrincipalToken, error) { + if err := validateStringParam(clientID, "clientID"); err != nil { + return nil, err + } + if err := validateStringParam(secret, "secret"); err != nil { + return nil, err + } + if err := validateStringParam(resource, "resource"); err != nil { + return nil, err + } + auxTenants := multiTenantCfg.AuxiliaryTenants() + m := MultiTenantServicePrincipalToken{ + AuxiliaryTokens: make([]*ServicePrincipalToken, len(auxTenants)), + } + primary, err := NewServicePrincipalToken(*multiTenantCfg.PrimaryTenant(), clientID, secret, resource) + if err != nil { + return nil, fmt.Errorf("failed to create SPT for primary tenant: %v", err) + } + m.PrimaryToken = primary + for i := range auxTenants { + aux, err := NewServicePrincipalToken(*auxTenants[i], clientID, secret, resource) + if err != nil { + return nil, fmt.Errorf("failed to create SPT for auxiliary tenant: %v", err) + } + m.AuxiliaryTokens[i] = aux + } + return &m, nil +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/adal/version.go b/vendor/github.com/Azure/go-autorest/autorest/adal/version.go new file mode 100644 index 0000000..c867b34 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/adal/version.go @@ -0,0 +1,45 @@ +package adal + +import ( + "fmt" + "runtime" +) + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const number = "v1.0.0" + +var ( + ua = fmt.Sprintf("Go/%s (%s-%s) go-autorest/adal/%s", + runtime.Version(), + runtime.GOARCH, + runtime.GOOS, + number, + ) +) + +// UserAgent returns a string containing the Go version, system architecture and OS, and the adal version. +func UserAgent() string { + return ua +} + +// AddToUserAgent adds an extension to the current user agent +func AddToUserAgent(extension string) error { + if extension != "" { + ua = fmt.Sprintf("%s %s", ua, extension) + return nil + } + return fmt.Errorf("Extension was empty, User Agent remained as '%s'", ua) +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/authorization.go b/vendor/github.com/Azure/go-autorest/autorest/authorization.go new file mode 100644 index 0000000..f43e1a6 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/authorization.go @@ -0,0 +1,337 @@ +package autorest + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "crypto/tls" + "encoding/base64" + "fmt" + "net/http" + "net/url" + "strings" + + "github.com/Azure/go-autorest/autorest/adal" +) + +const ( + bearerChallengeHeader = "Www-Authenticate" + bearer = "Bearer" + tenantID = "tenantID" + apiKeyAuthorizerHeader = "Ocp-Apim-Subscription-Key" + bingAPISdkHeader = "X-BingApis-SDK-Client" + golangBingAPISdkHeaderValue = "Go-SDK" + authorization = "Authorization" + basic = "Basic" +) + +// Authorizer is the interface that provides a PrepareDecorator used to supply request +// authorization. Most often, the Authorizer decorator runs last so it has access to the full +// state of the formed HTTP request. +type Authorizer interface { + WithAuthorization() PrepareDecorator +} + +// NullAuthorizer implements a default, "do nothing" Authorizer. +type NullAuthorizer struct{} + +// WithAuthorization returns a PrepareDecorator that does nothing. +func (na NullAuthorizer) WithAuthorization() PrepareDecorator { + return WithNothing() +} + +// APIKeyAuthorizer implements API Key authorization. +type APIKeyAuthorizer struct { + headers map[string]interface{} + queryParameters map[string]interface{} +} + +// NewAPIKeyAuthorizerWithHeaders creates an ApiKeyAuthorizer with headers. +func NewAPIKeyAuthorizerWithHeaders(headers map[string]interface{}) *APIKeyAuthorizer { + return NewAPIKeyAuthorizer(headers, nil) +} + +// NewAPIKeyAuthorizerWithQueryParameters creates an ApiKeyAuthorizer with query parameters. +func NewAPIKeyAuthorizerWithQueryParameters(queryParameters map[string]interface{}) *APIKeyAuthorizer { + return NewAPIKeyAuthorizer(nil, queryParameters) +} + +// NewAPIKeyAuthorizer creates an ApiKeyAuthorizer with headers. +func NewAPIKeyAuthorizer(headers map[string]interface{}, queryParameters map[string]interface{}) *APIKeyAuthorizer { + return &APIKeyAuthorizer{headers: headers, queryParameters: queryParameters} +} + +// WithAuthorization returns a PrepareDecorator that adds an HTTP headers and Query Parameters. +func (aka *APIKeyAuthorizer) WithAuthorization() PrepareDecorator { + return func(p Preparer) Preparer { + return DecoratePreparer(p, WithHeaders(aka.headers), WithQueryParameters(aka.queryParameters)) + } +} + +// CognitiveServicesAuthorizer implements authorization for Cognitive Services. +type CognitiveServicesAuthorizer struct { + subscriptionKey string +} + +// NewCognitiveServicesAuthorizer is +func NewCognitiveServicesAuthorizer(subscriptionKey string) *CognitiveServicesAuthorizer { + return &CognitiveServicesAuthorizer{subscriptionKey: subscriptionKey} +} + +// WithAuthorization is +func (csa *CognitiveServicesAuthorizer) WithAuthorization() PrepareDecorator { + headers := make(map[string]interface{}) + headers[apiKeyAuthorizerHeader] = csa.subscriptionKey + headers[bingAPISdkHeader] = golangBingAPISdkHeaderValue + + return NewAPIKeyAuthorizerWithHeaders(headers).WithAuthorization() +} + +// BearerAuthorizer implements the bearer authorization +type BearerAuthorizer struct { + tokenProvider adal.OAuthTokenProvider +} + +// NewBearerAuthorizer crates a BearerAuthorizer using the given token provider +func NewBearerAuthorizer(tp adal.OAuthTokenProvider) *BearerAuthorizer { + return &BearerAuthorizer{tokenProvider: tp} +} + +// WithAuthorization returns a PrepareDecorator that adds an HTTP Authorization header whose +// value is "Bearer " followed by the token. +// +// By default, the token will be automatically refreshed through the Refresher interface. +func (ba *BearerAuthorizer) WithAuthorization() PrepareDecorator { + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r, err := p.Prepare(r) + if err == nil { + // the ordering is important here, prefer RefresherWithContext if available + if refresher, ok := ba.tokenProvider.(adal.RefresherWithContext); ok { + err = refresher.EnsureFreshWithContext(r.Context()) + } else if refresher, ok := ba.tokenProvider.(adal.Refresher); ok { + err = refresher.EnsureFresh() + } + if err != nil { + var resp *http.Response + if tokError, ok := err.(adal.TokenRefreshError); ok { + resp = tokError.Response() + } + return r, NewErrorWithError(err, "azure.BearerAuthorizer", "WithAuthorization", resp, + "Failed to refresh the Token for request to %s", r.URL) + } + return Prepare(r, WithHeader(headerAuthorization, fmt.Sprintf("Bearer %s", ba.tokenProvider.OAuthToken()))) + } + return r, err + }) + } +} + +// BearerAuthorizerCallbackFunc is the authentication callback signature. +type BearerAuthorizerCallbackFunc func(tenantID, resource string) (*BearerAuthorizer, error) + +// BearerAuthorizerCallback implements bearer authorization via a callback. +type BearerAuthorizerCallback struct { + sender Sender + callback BearerAuthorizerCallbackFunc +} + +// NewBearerAuthorizerCallback creates a bearer authorization callback. The callback +// is invoked when the HTTP request is submitted. +func NewBearerAuthorizerCallback(s Sender, callback BearerAuthorizerCallbackFunc) *BearerAuthorizerCallback { + if s == nil { + s = sender(tls.RenegotiateNever) + } + return &BearerAuthorizerCallback{sender: s, callback: callback} +} + +// WithAuthorization returns a PrepareDecorator that adds an HTTP Authorization header whose value +// is "Bearer " followed by the token. The BearerAuthorizer is obtained via a user-supplied callback. +// +// By default, the token will be automatically refreshed through the Refresher interface. +func (bacb *BearerAuthorizerCallback) WithAuthorization() PrepareDecorator { + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r, err := p.Prepare(r) + if err == nil { + // make a copy of the request and remove the body as it's not + // required and avoids us having to create a copy of it. + rCopy := *r + removeRequestBody(&rCopy) + + resp, err := bacb.sender.Do(&rCopy) + if err != nil { + return r, err + } + DrainResponseBody(resp) + if resp.StatusCode == 401 && hasBearerChallenge(resp.Header) { + bc, err := newBearerChallenge(resp.Header) + if err != nil { + return r, err + } + if bacb.callback != nil { + ba, err := bacb.callback(bc.values[tenantID], bc.values["resource"]) + if err != nil { + return r, err + } + return Prepare(r, ba.WithAuthorization()) + } + } + } + return r, err + }) + } +} + +// returns true if the HTTP response contains a bearer challenge +func hasBearerChallenge(header http.Header) bool { + authHeader := header.Get(bearerChallengeHeader) + if len(authHeader) == 0 || strings.Index(authHeader, bearer) < 0 { + return false + } + return true +} + +type bearerChallenge struct { + values map[string]string +} + +func newBearerChallenge(header http.Header) (bc bearerChallenge, err error) { + challenge := strings.TrimSpace(header.Get(bearerChallengeHeader)) + trimmedChallenge := challenge[len(bearer)+1:] + + // challenge is a set of key=value pairs that are comma delimited + pairs := strings.Split(trimmedChallenge, ",") + if len(pairs) < 1 { + err = fmt.Errorf("challenge '%s' contains no pairs", challenge) + return bc, err + } + + bc.values = make(map[string]string) + for i := range pairs { + trimmedPair := strings.TrimSpace(pairs[i]) + pair := strings.Split(trimmedPair, "=") + if len(pair) == 2 { + // remove the enclosing quotes + key := strings.Trim(pair[0], "\"") + value := strings.Trim(pair[1], "\"") + + switch key { + case "authorization", "authorization_uri": + // strip the tenant ID from the authorization URL + asURL, err := url.Parse(value) + if err != nil { + return bc, err + } + bc.values[tenantID] = asURL.Path[1:] + default: + bc.values[key] = value + } + } + } + + return bc, err +} + +// EventGridKeyAuthorizer implements authorization for event grid using key authentication. +type EventGridKeyAuthorizer struct { + topicKey string +} + +// NewEventGridKeyAuthorizer creates a new EventGridKeyAuthorizer +// with the specified topic key. +func NewEventGridKeyAuthorizer(topicKey string) EventGridKeyAuthorizer { + return EventGridKeyAuthorizer{topicKey: topicKey} +} + +// WithAuthorization returns a PrepareDecorator that adds the aeg-sas-key authentication header. +func (egta EventGridKeyAuthorizer) WithAuthorization() PrepareDecorator { + headers := map[string]interface{}{ + "aeg-sas-key": egta.topicKey, + } + return NewAPIKeyAuthorizerWithHeaders(headers).WithAuthorization() +} + +// BasicAuthorizer implements basic HTTP authorization by adding the Authorization HTTP header +// with the value "Basic " where is a base64-encoded username:password tuple. +type BasicAuthorizer struct { + userName string + password string +} + +// NewBasicAuthorizer creates a new BasicAuthorizer with the specified username and password. +func NewBasicAuthorizer(userName, password string) *BasicAuthorizer { + return &BasicAuthorizer{ + userName: userName, + password: password, + } +} + +// WithAuthorization returns a PrepareDecorator that adds an HTTP Authorization header whose +// value is "Basic " followed by the base64-encoded username:password tuple. +func (ba *BasicAuthorizer) WithAuthorization() PrepareDecorator { + headers := make(map[string]interface{}) + headers[authorization] = basic + " " + base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf("%s:%s", ba.userName, ba.password))) + + return NewAPIKeyAuthorizerWithHeaders(headers).WithAuthorization() +} + +// MultiTenantServicePrincipalTokenAuthorizer provides authentication across tenants. +type MultiTenantServicePrincipalTokenAuthorizer interface { + WithAuthorization() PrepareDecorator +} + +// NewMultiTenantServicePrincipalTokenAuthorizer crates a BearerAuthorizer using the given token provider +func NewMultiTenantServicePrincipalTokenAuthorizer(tp adal.MultitenantOAuthTokenProvider) MultiTenantServicePrincipalTokenAuthorizer { + return &multiTenantSPTAuthorizer{tp: tp} +} + +type multiTenantSPTAuthorizer struct { + tp adal.MultitenantOAuthTokenProvider +} + +// WithAuthorization returns a PrepareDecorator that adds an HTTP Authorization header using the +// primary token along with the auxiliary authorization header using the auxiliary tokens. +// +// By default, the token will be automatically refreshed through the Refresher interface. +func (mt multiTenantSPTAuthorizer) WithAuthorization() PrepareDecorator { + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r, err := p.Prepare(r) + if err != nil { + return r, err + } + if refresher, ok := mt.tp.(adal.RefresherWithContext); ok { + err = refresher.EnsureFreshWithContext(r.Context()) + if err != nil { + var resp *http.Response + if tokError, ok := err.(adal.TokenRefreshError); ok { + resp = tokError.Response() + } + return r, NewErrorWithError(err, "azure.multiTenantSPTAuthorizer", "WithAuthorization", resp, + "Failed to refresh one or more Tokens for request to %s", r.URL) + } + } + r, err = Prepare(r, WithHeader(headerAuthorization, fmt.Sprintf("Bearer %s", mt.tp.PrimaryOAuthToken()))) + if err != nil { + return r, err + } + auxTokens := mt.tp.AuxiliaryOAuthTokens() + for i := range auxTokens { + auxTokens[i] = fmt.Sprintf("Bearer %s", auxTokens[i]) + } + return Prepare(r, WithHeader(headerAuxAuthorization, strings.Join(auxTokens, "; "))) + }) + } +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/authorization_sas.go b/vendor/github.com/Azure/go-autorest/autorest/authorization_sas.go new file mode 100644 index 0000000..89a659c --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/authorization_sas.go @@ -0,0 +1,67 @@ +package autorest + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "fmt" + "net/http" + "strings" +) + +// SASTokenAuthorizer implements an authorization for SAS Token Authentication +// this can be used for interaction with Blob Storage Endpoints +type SASTokenAuthorizer struct { + sasToken string +} + +// NewSASTokenAuthorizer creates a SASTokenAuthorizer using the given credentials +func NewSASTokenAuthorizer(sasToken string) (*SASTokenAuthorizer, error) { + if strings.TrimSpace(sasToken) == "" { + return nil, fmt.Errorf("sasToken cannot be empty") + } + + token := sasToken + if strings.HasPrefix(sasToken, "?") { + token = strings.TrimPrefix(sasToken, "?") + } + + return &SASTokenAuthorizer{ + sasToken: token, + }, nil +} + +// WithAuthorization returns a PrepareDecorator that adds a shared access signature token to the +// URI's query parameters. This can be used for the Blob, Queue, and File Services. +// +// See https://docs.microsoft.com/en-us/rest/api/storageservices/delegate-access-with-shared-access-signature +func (sas *SASTokenAuthorizer) WithAuthorization() PrepareDecorator { + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r, err := p.Prepare(r) + if err != nil { + return r, err + } + + if r.URL.RawQuery != "" { + r.URL.RawQuery = fmt.Sprintf("%s&%s", r.URL.RawQuery, sas.sasToken) + } else { + r.URL.RawQuery = sas.sasToken + } + + r.RequestURI = r.URL.String() + return Prepare(r) + }) + } +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/authorization_storage.go b/vendor/github.com/Azure/go-autorest/autorest/authorization_storage.go new file mode 100644 index 0000000..b844a3d --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/authorization_storage.go @@ -0,0 +1,304 @@ +package autorest + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "bytes" + "crypto/hmac" + "crypto/sha256" + "encoding/base64" + "fmt" + "net/http" + "net/url" + "sort" + "strings" + "time" +) + +// SharedKeyType defines the enumeration for the various shared key types. +// See https://docs.microsoft.com/en-us/rest/api/storageservices/authorize-with-shared-key for details on the shared key types. +type SharedKeyType string + +const ( + // SharedKey is used to authorize against blobs, files and queues services. + SharedKey SharedKeyType = "sharedKey" + + // SharedKeyForTable is used to authorize against the table service. + SharedKeyForTable SharedKeyType = "sharedKeyTable" + + // SharedKeyLite is used to authorize against blobs, files and queues services. It's provided for + // backwards compatibility with API versions before 2009-09-19. Prefer SharedKey instead. + SharedKeyLite SharedKeyType = "sharedKeyLite" + + // SharedKeyLiteForTable is used to authorize against the table service. It's provided for + // backwards compatibility with older table API versions. Prefer SharedKeyForTable instead. + SharedKeyLiteForTable SharedKeyType = "sharedKeyLiteTable" +) + +const ( + headerAccept = "Accept" + headerAcceptCharset = "Accept-Charset" + headerContentEncoding = "Content-Encoding" + headerContentLength = "Content-Length" + headerContentMD5 = "Content-MD5" + headerContentLanguage = "Content-Language" + headerIfModifiedSince = "If-Modified-Since" + headerIfMatch = "If-Match" + headerIfNoneMatch = "If-None-Match" + headerIfUnmodifiedSince = "If-Unmodified-Since" + headerDate = "Date" + headerXMSDate = "X-Ms-Date" + headerXMSVersion = "x-ms-version" + headerRange = "Range" +) + +const storageEmulatorAccountName = "devstoreaccount1" + +// SharedKeyAuthorizer implements an authorization for Shared Key +// this can be used for interaction with Blob, File and Queue Storage Endpoints +type SharedKeyAuthorizer struct { + accountName string + accountKey []byte + keyType SharedKeyType +} + +// NewSharedKeyAuthorizer creates a SharedKeyAuthorizer using the provided credentials and shared key type. +func NewSharedKeyAuthorizer(accountName, accountKey string, keyType SharedKeyType) (*SharedKeyAuthorizer, error) { + key, err := base64.StdEncoding.DecodeString(accountKey) + if err != nil { + return nil, fmt.Errorf("malformed storage account key: %v", err) + } + return &SharedKeyAuthorizer{ + accountName: accountName, + accountKey: key, + keyType: keyType, + }, nil +} + +// WithAuthorization returns a PrepareDecorator that adds an HTTP Authorization header whose +// value is " " followed by the computed key. +// This can be used for the Blob, Queue, and File Services +// +// from: https://docs.microsoft.com/en-us/rest/api/storageservices/authorize-with-shared-key +// You may use Shared Key authorization to authorize a request made against the +// 2009-09-19 version and later of the Blob and Queue services, +// and version 2014-02-14 and later of the File services. +func (sk *SharedKeyAuthorizer) WithAuthorization() PrepareDecorator { + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r, err := p.Prepare(r) + if err != nil { + return r, err + } + + sk, err := buildSharedKey(sk.accountName, sk.accountKey, r, sk.keyType) + if err != nil { + return r, err + } + return Prepare(r, WithHeader(headerAuthorization, sk)) + }) + } +} + +func buildSharedKey(accName string, accKey []byte, req *http.Request, keyType SharedKeyType) (string, error) { + canRes, err := buildCanonicalizedResource(accName, req.URL.String(), keyType) + if err != nil { + return "", err + } + + if req.Header == nil { + req.Header = http.Header{} + } + + // ensure date is set + if req.Header.Get(headerDate) == "" && req.Header.Get(headerXMSDate) == "" { + date := time.Now().UTC().Format(http.TimeFormat) + req.Header.Set(headerXMSDate, date) + } + canString, err := buildCanonicalizedString(req.Method, req.Header, canRes, keyType) + if err != nil { + return "", err + } + return createAuthorizationHeader(accName, accKey, canString, keyType), nil +} + +func buildCanonicalizedResource(accountName, uri string, keyType SharedKeyType) (string, error) { + errMsg := "buildCanonicalizedResource error: %s" + u, err := url.Parse(uri) + if err != nil { + return "", fmt.Errorf(errMsg, err.Error()) + } + + cr := bytes.NewBufferString("") + if accountName != storageEmulatorAccountName { + cr.WriteString("/") + cr.WriteString(getCanonicalizedAccountName(accountName)) + } + + if len(u.Path) > 0 { + // Any portion of the CanonicalizedResource string that is derived from + // the resource's URI should be encoded exactly as it is in the URI. + // -- https://msdn.microsoft.com/en-gb/library/azure/dd179428.aspx + cr.WriteString(u.EscapedPath()) + } + + params, err := url.ParseQuery(u.RawQuery) + if err != nil { + return "", fmt.Errorf(errMsg, err.Error()) + } + + // See https://github.com/Azure/azure-storage-net/blob/master/Lib/Common/Core/Util/AuthenticationUtility.cs#L277 + if keyType == SharedKey { + if len(params) > 0 { + cr.WriteString("\n") + + keys := []string{} + for key := range params { + keys = append(keys, key) + } + sort.Strings(keys) + + completeParams := []string{} + for _, key := range keys { + if len(params[key]) > 1 { + sort.Strings(params[key]) + } + + completeParams = append(completeParams, fmt.Sprintf("%s:%s", key, strings.Join(params[key], ","))) + } + cr.WriteString(strings.Join(completeParams, "\n")) + } + } else { + // search for "comp" parameter, if exists then add it to canonicalizedresource + if v, ok := params["comp"]; ok { + cr.WriteString("?comp=" + v[0]) + } + } + + return string(cr.Bytes()), nil +} + +func getCanonicalizedAccountName(accountName string) string { + // since we may be trying to access a secondary storage account, we need to + // remove the -secondary part of the storage name + return strings.TrimSuffix(accountName, "-secondary") +} + +func buildCanonicalizedString(verb string, headers http.Header, canonicalizedResource string, keyType SharedKeyType) (string, error) { + contentLength := headers.Get(headerContentLength) + if contentLength == "0" { + contentLength = "" + } + date := headers.Get(headerDate) + if v := headers.Get(headerXMSDate); v != "" { + if keyType == SharedKey || keyType == SharedKeyLite { + date = "" + } else { + date = v + } + } + var canString string + switch keyType { + case SharedKey: + canString = strings.Join([]string{ + verb, + headers.Get(headerContentEncoding), + headers.Get(headerContentLanguage), + contentLength, + headers.Get(headerContentMD5), + headers.Get(headerContentType), + date, + headers.Get(headerIfModifiedSince), + headers.Get(headerIfMatch), + headers.Get(headerIfNoneMatch), + headers.Get(headerIfUnmodifiedSince), + headers.Get(headerRange), + buildCanonicalizedHeader(headers), + canonicalizedResource, + }, "\n") + case SharedKeyForTable: + canString = strings.Join([]string{ + verb, + headers.Get(headerContentMD5), + headers.Get(headerContentType), + date, + canonicalizedResource, + }, "\n") + case SharedKeyLite: + canString = strings.Join([]string{ + verb, + headers.Get(headerContentMD5), + headers.Get(headerContentType), + date, + buildCanonicalizedHeader(headers), + canonicalizedResource, + }, "\n") + case SharedKeyLiteForTable: + canString = strings.Join([]string{ + date, + canonicalizedResource, + }, "\n") + default: + return "", fmt.Errorf("key type '%s' is not supported", keyType) + } + return canString, nil +} + +func buildCanonicalizedHeader(headers http.Header) string { + cm := make(map[string]string) + + for k := range headers { + headerName := strings.TrimSpace(strings.ToLower(k)) + if strings.HasPrefix(headerName, "x-ms-") { + cm[headerName] = headers.Get(k) + } + } + + if len(cm) == 0 { + return "" + } + + keys := []string{} + for key := range cm { + keys = append(keys, key) + } + + sort.Strings(keys) + + ch := bytes.NewBufferString("") + + for _, key := range keys { + ch.WriteString(key) + ch.WriteRune(':') + ch.WriteString(cm[key]) + ch.WriteRune('\n') + } + + return strings.TrimSuffix(string(ch.Bytes()), "\n") +} + +func createAuthorizationHeader(accountName string, accountKey []byte, canonicalizedString string, keyType SharedKeyType) string { + h := hmac.New(sha256.New, accountKey) + h.Write([]byte(canonicalizedString)) + signature := base64.StdEncoding.EncodeToString(h.Sum(nil)) + var key string + switch keyType { + case SharedKey, SharedKeyForTable: + key = "SharedKey" + case SharedKeyLite, SharedKeyLiteForTable: + key = "SharedKeyLite" + } + return fmt.Sprintf("%s %s:%s", key, getCanonicalizedAccountName(accountName), signature) +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/autorest.go b/vendor/github.com/Azure/go-autorest/autorest/autorest.go new file mode 100644 index 0000000..aafdf02 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/autorest.go @@ -0,0 +1,150 @@ +/* +Package autorest implements an HTTP request pipeline suitable for use across multiple go-routines +and provides the shared routines relied on by AutoRest (see https://github.com/Azure/autorest/) +generated Go code. + +The package breaks sending and responding to HTTP requests into three phases: Preparing, Sending, +and Responding. A typical pattern is: + + req, err := Prepare(&http.Request{}, + token.WithAuthorization()) + + resp, err := Send(req, + WithLogging(logger), + DoErrorIfStatusCode(http.StatusInternalServerError), + DoCloseIfError(), + DoRetryForAttempts(5, time.Second)) + + err = Respond(resp, + ByDiscardingBody(), + ByClosing()) + +Each phase relies on decorators to modify and / or manage processing. Decorators may first modify +and then pass the data along, pass the data first and then modify the result, or wrap themselves +around passing the data (such as a logger might do). Decorators run in the order provided. For +example, the following: + + req, err := Prepare(&http.Request{}, + WithBaseURL("https://microsoft.com/"), + WithPath("a"), + WithPath("b"), + WithPath("c")) + +will set the URL to: + + https://microsoft.com/a/b/c + +Preparers and Responders may be shared and re-used (assuming the underlying decorators support +sharing and re-use). Performant use is obtained by creating one or more Preparers and Responders +shared among multiple go-routines, and a single Sender shared among multiple sending go-routines, +all bound together by means of input / output channels. + +Decorators hold their passed state within a closure (such as the path components in the example +above). Be careful to share Preparers and Responders only in a context where such held state +applies. For example, it may not make sense to share a Preparer that applies a query string from a +fixed set of values. Similarly, sharing a Responder that reads the response body into a passed +struct (e.g., ByUnmarshallingJson) is likely incorrect. + +Lastly, the Swagger specification (https://swagger.io) that drives AutoRest +(https://github.com/Azure/autorest/) precisely defines two date forms: date and date-time. The +github.com/Azure/go-autorest/autorest/date package provides time.Time derivations to ensure +correct parsing and formatting. + +Errors raised by autorest objects and methods will conform to the autorest.Error interface. + +See the included examples for more detail. For details on the suggested use of this package by +generated clients, see the Client described below. +*/ +package autorest + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "context" + "net/http" + "time" +) + +const ( + // HeaderLocation specifies the HTTP Location header. + HeaderLocation = "Location" + + // HeaderRetryAfter specifies the HTTP Retry-After header. + HeaderRetryAfter = "Retry-After" +) + +// ResponseHasStatusCode returns true if the status code in the HTTP Response is in the passed set +// and false otherwise. +func ResponseHasStatusCode(resp *http.Response, codes ...int) bool { + if resp == nil { + return false + } + return containsInt(codes, resp.StatusCode) +} + +// GetLocation retrieves the URL from the Location header of the passed response. +func GetLocation(resp *http.Response) string { + return resp.Header.Get(HeaderLocation) +} + +// GetRetryAfter extracts the retry delay from the Retry-After header of the passed response. If +// the header is absent or is malformed, it will return the supplied default delay time.Duration. +func GetRetryAfter(resp *http.Response, defaultDelay time.Duration) time.Duration { + retry := resp.Header.Get(HeaderRetryAfter) + if retry == "" { + return defaultDelay + } + + d, err := time.ParseDuration(retry + "s") + if err != nil { + return defaultDelay + } + + return d +} + +// NewPollingRequest allocates and returns a new http.Request to poll for the passed response. +func NewPollingRequest(resp *http.Response, cancel <-chan struct{}) (*http.Request, error) { + location := GetLocation(resp) + if location == "" { + return nil, NewErrorWithResponse("autorest", "NewPollingRequest", resp, "Location header missing from response that requires polling") + } + + req, err := Prepare(&http.Request{Cancel: cancel}, + AsGet(), + WithBaseURL(location)) + if err != nil { + return nil, NewErrorWithError(err, "autorest", "NewPollingRequest", nil, "Failure creating poll request to %s", location) + } + + return req, nil +} + +// NewPollingRequestWithContext allocates and returns a new http.Request with the specified context to poll for the passed response. +func NewPollingRequestWithContext(ctx context.Context, resp *http.Response) (*http.Request, error) { + location := GetLocation(resp) + if location == "" { + return nil, NewErrorWithResponse("autorest", "NewPollingRequestWithContext", resp, "Location header missing from response that requires polling") + } + + req, err := Prepare((&http.Request{}).WithContext(ctx), + AsGet(), + WithBaseURL(location)) + if err != nil { + return nil, NewErrorWithError(err, "autorest", "NewPollingRequestWithContext", nil, "Failure creating poll request to %s", location) + } + + return req, nil +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/client.go b/vendor/github.com/Azure/go-autorest/autorest/client.go new file mode 100644 index 0000000..e04f9fd --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/client.go @@ -0,0 +1,323 @@ +package autorest + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "bytes" + "crypto/tls" + "fmt" + "io" + "io/ioutil" + "log" + "net/http" + "strings" + "time" + + "github.com/Azure/go-autorest/logger" +) + +const ( + // DefaultPollingDelay is a reasonable delay between polling requests. + DefaultPollingDelay = 60 * time.Second + + // DefaultPollingDuration is a reasonable total polling duration. + DefaultPollingDuration = 15 * time.Minute + + // DefaultRetryAttempts is number of attempts for retry status codes (5xx). + DefaultRetryAttempts = 3 + + // DefaultRetryDuration is the duration to wait between retries. + DefaultRetryDuration = 30 * time.Second +) + +var ( + // StatusCodesForRetry are a defined group of status code for which the client will retry + StatusCodesForRetry = []int{ + http.StatusRequestTimeout, // 408 + http.StatusTooManyRequests, // 429 + http.StatusInternalServerError, // 500 + http.StatusBadGateway, // 502 + http.StatusServiceUnavailable, // 503 + http.StatusGatewayTimeout, // 504 + } +) + +const ( + requestFormat = `HTTP Request Begin =================================================== +%s +===================================================== HTTP Request End +` + responseFormat = `HTTP Response Begin =================================================== +%s +===================================================== HTTP Response End +` +) + +// Response serves as the base for all responses from generated clients. It provides access to the +// last http.Response. +type Response struct { + *http.Response `json:"-"` +} + +// IsHTTPStatus returns true if the returned HTTP status code matches the provided status code. +// If there was no response (i.e. the underlying http.Response is nil) the return value is false. +func (r Response) IsHTTPStatus(statusCode int) bool { + if r.Response == nil { + return false + } + return r.Response.StatusCode == statusCode +} + +// HasHTTPStatus returns true if the returned HTTP status code matches one of the provided status codes. +// If there was no response (i.e. the underlying http.Response is nil) or not status codes are provided +// the return value is false. +func (r Response) HasHTTPStatus(statusCodes ...int) bool { + return ResponseHasStatusCode(r.Response, statusCodes...) +} + +// LoggingInspector implements request and response inspectors that log the full request and +// response to a supplied log. +type LoggingInspector struct { + Logger *log.Logger +} + +// WithInspection returns a PrepareDecorator that emits the http.Request to the supplied logger. The +// body is restored after being emitted. +// +// Note: Since it reads the entire Body, this decorator should not be used where body streaming is +// important. It is best used to trace JSON or similar body values. +func (li LoggingInspector) WithInspection() PrepareDecorator { + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + var body, b bytes.Buffer + + defer r.Body.Close() + + r.Body = ioutil.NopCloser(io.TeeReader(r.Body, &body)) + if err := r.Write(&b); err != nil { + return nil, fmt.Errorf("Failed to write response: %v", err) + } + + li.Logger.Printf(requestFormat, b.String()) + + r.Body = ioutil.NopCloser(&body) + return p.Prepare(r) + }) + } +} + +// ByInspecting returns a RespondDecorator that emits the http.Response to the supplied logger. The +// body is restored after being emitted. +// +// Note: Since it reads the entire Body, this decorator should not be used where body streaming is +// important. It is best used to trace JSON or similar body values. +func (li LoggingInspector) ByInspecting() RespondDecorator { + return func(r Responder) Responder { + return ResponderFunc(func(resp *http.Response) error { + var body, b bytes.Buffer + defer resp.Body.Close() + resp.Body = ioutil.NopCloser(io.TeeReader(resp.Body, &body)) + if err := resp.Write(&b); err != nil { + return fmt.Errorf("Failed to write response: %v", err) + } + + li.Logger.Printf(responseFormat, b.String()) + + resp.Body = ioutil.NopCloser(&body) + return r.Respond(resp) + }) + } +} + +// Client is the base for autorest generated clients. It provides default, "do nothing" +// implementations of an Authorizer, RequestInspector, and ResponseInspector. It also returns the +// standard, undecorated http.Client as a default Sender. +// +// Generated clients should also use Error (see NewError and NewErrorWithError) for errors and +// return responses that compose with Response. +// +// Most customization of generated clients is best achieved by supplying a custom Authorizer, custom +// RequestInspector, and / or custom ResponseInspector. Users may log requests, implement circuit +// breakers (see https://msdn.microsoft.com/en-us/library/dn589784.aspx) or otherwise influence +// sending the request by providing a decorated Sender. +type Client struct { + Authorizer Authorizer + Sender Sender + RequestInspector PrepareDecorator + ResponseInspector RespondDecorator + + // PollingDelay sets the polling frequency used in absence of a Retry-After HTTP header + PollingDelay time.Duration + + // PollingDuration sets the maximum polling time after which an error is returned. + // Setting this to zero will use the provided context to control the duration. + PollingDuration time.Duration + + // RetryAttempts sets the default number of retry attempts for client. + RetryAttempts int + + // RetryDuration sets the delay duration for retries. + RetryDuration time.Duration + + // UserAgent, if not empty, will be set as the HTTP User-Agent header on all requests sent + // through the Do method. + UserAgent string + + Jar http.CookieJar + + // Set to true to skip attempted registration of resource providers (false by default). + SkipResourceProviderRegistration bool + + // SendDecorators can be used to override the default chain of SendDecorators. + // This can be used to specify things like a custom retry SendDecorator. + // Set this to an empty slice to use no SendDecorators. + SendDecorators []SendDecorator +} + +// NewClientWithUserAgent returns an instance of a Client with the UserAgent set to the passed +// string. +func NewClientWithUserAgent(ua string) Client { + return newClient(ua, tls.RenegotiateNever) +} + +// ClientOptions contains various Client configuration options. +type ClientOptions struct { + // UserAgent is an optional user-agent string to append to the default user agent. + UserAgent string + + // Renegotiation is an optional setting to control client-side TLS renegotiation. + Renegotiation tls.RenegotiationSupport +} + +// NewClientWithOptions returns an instance of a Client with the specified values. +func NewClientWithOptions(options ClientOptions) Client { + return newClient(options.UserAgent, options.Renegotiation) +} + +func newClient(ua string, renegotiation tls.RenegotiationSupport) Client { + c := Client{ + PollingDelay: DefaultPollingDelay, + PollingDuration: DefaultPollingDuration, + RetryAttempts: DefaultRetryAttempts, + RetryDuration: DefaultRetryDuration, + UserAgent: UserAgent(), + } + c.Sender = c.sender(renegotiation) + c.AddToUserAgent(ua) + return c +} + +// AddToUserAgent adds an extension to the current user agent +func (c *Client) AddToUserAgent(extension string) error { + if extension != "" { + c.UserAgent = fmt.Sprintf("%s %s", c.UserAgent, extension) + return nil + } + return fmt.Errorf("Extension was empty, User Agent stayed as %s", c.UserAgent) +} + +// Do implements the Sender interface by invoking the active Sender after applying authorization. +// If Sender is not set, it uses a new instance of http.Client. In both cases it will, if UserAgent +// is set, apply set the User-Agent header. +func (c Client) Do(r *http.Request) (*http.Response, error) { + if r.UserAgent() == "" { + r, _ = Prepare(r, + WithUserAgent(c.UserAgent)) + } + // NOTE: c.WithInspection() must be last in the list so that it can inspect all preceding operations + r, err := Prepare(r, + c.WithAuthorization(), + c.WithInspection()) + if err != nil { + var resp *http.Response + if detErr, ok := err.(DetailedError); ok { + // if the authorization failed (e.g. invalid credentials) there will + // be a response associated with the error, be sure to return it. + resp = detErr.Response + } + return resp, NewErrorWithError(err, "autorest/Client", "Do", nil, "Preparing request failed") + } + logger.Instance.WriteRequest(r, logger.Filter{ + Header: func(k string, v []string) (bool, []string) { + // remove the auth token from the log + if strings.EqualFold(k, "Authorization") || strings.EqualFold(k, "Ocp-Apim-Subscription-Key") { + v = []string{"**REDACTED**"} + } + return true, v + }, + }) + resp, err := SendWithSender(c.sender(tls.RenegotiateNever), r) + logger.Instance.WriteResponse(resp, logger.Filter{}) + Respond(resp, c.ByInspecting()) + return resp, err +} + +// sender returns the Sender to which to send requests. +func (c Client) sender(renengotiation tls.RenegotiationSupport) Sender { + if c.Sender == nil { + return sender(renengotiation) + } + return c.Sender +} + +// WithAuthorization is a convenience method that returns the WithAuthorization PrepareDecorator +// from the current Authorizer. If not Authorizer is set, it uses the NullAuthorizer. +func (c Client) WithAuthorization() PrepareDecorator { + return c.authorizer().WithAuthorization() +} + +// authorizer returns the Authorizer to use. +func (c Client) authorizer() Authorizer { + if c.Authorizer == nil { + return NullAuthorizer{} + } + return c.Authorizer +} + +// WithInspection is a convenience method that passes the request to the supplied RequestInspector, +// if present, or returns the WithNothing PrepareDecorator otherwise. +func (c Client) WithInspection() PrepareDecorator { + if c.RequestInspector == nil { + return WithNothing() + } + return c.RequestInspector +} + +// ByInspecting is a convenience method that passes the response to the supplied ResponseInspector, +// if present, or returns the ByIgnoring RespondDecorator otherwise. +func (c Client) ByInspecting() RespondDecorator { + if c.ResponseInspector == nil { + return ByIgnoring() + } + return c.ResponseInspector +} + +// Send sends the provided http.Request using the client's Sender or the default sender. +// It returns the http.Response and possible error. It also accepts a, possibly empty, +// default set of SendDecorators used when sending the request. +// SendDecorators have the following precedence: +// 1. In a request's context via WithSendDecorators() +// 2. Specified on the client in SendDecorators +// 3. The default values specified in this method +func (c Client) Send(req *http.Request, decorators ...SendDecorator) (*http.Response, error) { + if c.SendDecorators != nil { + decorators = c.SendDecorators + } + inCtx := req.Context().Value(ctxSendDecorators{}) + if sd, ok := inCtx.([]SendDecorator); ok { + decorators = sd + } + return SendWithSender(c, req, decorators...) +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/date/LICENSE b/vendor/github.com/Azure/go-autorest/autorest/date/LICENSE new file mode 100644 index 0000000..b9d6a27 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/date/LICENSE @@ -0,0 +1,191 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + Copyright 2015 Microsoft Corporation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/Azure/go-autorest/autorest/date/date.go b/vendor/github.com/Azure/go-autorest/autorest/date/date.go new file mode 100644 index 0000000..c457106 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/date/date.go @@ -0,0 +1,96 @@ +/* +Package date provides time.Time derivatives that conform to the Swagger.io (https://swagger.io/) +defined date formats: Date and DateTime. Both types may, in most cases, be used in lieu of +time.Time types. And both convert to time.Time through a ToTime method. +*/ +package date + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "fmt" + "time" +) + +const ( + fullDate = "2006-01-02" + fullDateJSON = `"2006-01-02"` + dateFormat = "%04d-%02d-%02d" + jsonFormat = `"%04d-%02d-%02d"` +) + +// Date defines a type similar to time.Time but assumes a layout of RFC3339 full-date (i.e., +// 2006-01-02). +type Date struct { + time.Time +} + +// ParseDate create a new Date from the passed string. +func ParseDate(date string) (d Date, err error) { + return parseDate(date, fullDate) +} + +func parseDate(date string, format string) (Date, error) { + d, err := time.Parse(format, date) + return Date{Time: d}, err +} + +// MarshalBinary preserves the Date as a byte array conforming to RFC3339 full-date (i.e., +// 2006-01-02). +func (d Date) MarshalBinary() ([]byte, error) { + return d.MarshalText() +} + +// UnmarshalBinary reconstitutes a Date saved as a byte array conforming to RFC3339 full-date (i.e., +// 2006-01-02). +func (d *Date) UnmarshalBinary(data []byte) error { + return d.UnmarshalText(data) +} + +// MarshalJSON preserves the Date as a JSON string conforming to RFC3339 full-date (i.e., +// 2006-01-02). +func (d Date) MarshalJSON() (json []byte, err error) { + return []byte(fmt.Sprintf(jsonFormat, d.Year(), d.Month(), d.Day())), nil +} + +// UnmarshalJSON reconstitutes the Date from a JSON string conforming to RFC3339 full-date (i.e., +// 2006-01-02). +func (d *Date) UnmarshalJSON(data []byte) (err error) { + d.Time, err = time.Parse(fullDateJSON, string(data)) + return err +} + +// MarshalText preserves the Date as a byte array conforming to RFC3339 full-date (i.e., +// 2006-01-02). +func (d Date) MarshalText() (text []byte, err error) { + return []byte(fmt.Sprintf(dateFormat, d.Year(), d.Month(), d.Day())), nil +} + +// UnmarshalText reconstitutes a Date saved as a byte array conforming to RFC3339 full-date (i.e., +// 2006-01-02). +func (d *Date) UnmarshalText(data []byte) (err error) { + d.Time, err = time.Parse(fullDate, string(data)) + return err +} + +// String returns the Date formatted as an RFC3339 full-date string (i.e., 2006-01-02). +func (d Date) String() string { + return fmt.Sprintf(dateFormat, d.Year(), d.Month(), d.Day()) +} + +// ToTime returns a Date as a time.Time +func (d Date) ToTime() time.Time { + return d.Time +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/date/go_mod_tidy_hack.go b/vendor/github.com/Azure/go-autorest/autorest/date/go_mod_tidy_hack.go new file mode 100644 index 0000000..55adf93 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/date/go_mod_tidy_hack.go @@ -0,0 +1,24 @@ +// +build modhack + +package date + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file, and the github.com/Azure/go-autorest/autorest import, won't actually become part of +// the resultant binary. + +// Necessary for safely adding multi-module repo. +// See: https://github.com/golang/go/wiki/Modules#is-it-possible-to-add-a-module-to-a-multi-module-repository +import _ "github.com/Azure/go-autorest/autorest" diff --git a/vendor/github.com/Azure/go-autorest/autorest/date/time.go b/vendor/github.com/Azure/go-autorest/autorest/date/time.go new file mode 100644 index 0000000..b453fad --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/date/time.go @@ -0,0 +1,103 @@ +package date + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "regexp" + "time" +) + +// Azure reports time in UTC but it doesn't include the 'Z' time zone suffix in some cases. +const ( + azureUtcFormatJSON = `"2006-01-02T15:04:05.999999999"` + azureUtcFormat = "2006-01-02T15:04:05.999999999" + rfc3339JSON = `"` + time.RFC3339Nano + `"` + rfc3339 = time.RFC3339Nano + tzOffsetRegex = `(Z|z|\+|-)(\d+:\d+)*"*$` +) + +// Time defines a type similar to time.Time but assumes a layout of RFC3339 date-time (i.e., +// 2006-01-02T15:04:05Z). +type Time struct { + time.Time +} + +// MarshalBinary preserves the Time as a byte array conforming to RFC3339 date-time (i.e., +// 2006-01-02T15:04:05Z). +func (t Time) MarshalBinary() ([]byte, error) { + return t.Time.MarshalText() +} + +// UnmarshalBinary reconstitutes a Time saved as a byte array conforming to RFC3339 date-time +// (i.e., 2006-01-02T15:04:05Z). +func (t *Time) UnmarshalBinary(data []byte) error { + return t.UnmarshalText(data) +} + +// MarshalJSON preserves the Time as a JSON string conforming to RFC3339 date-time (i.e., +// 2006-01-02T15:04:05Z). +func (t Time) MarshalJSON() (json []byte, err error) { + return t.Time.MarshalJSON() +} + +// UnmarshalJSON reconstitutes the Time from a JSON string conforming to RFC3339 date-time +// (i.e., 2006-01-02T15:04:05Z). +func (t *Time) UnmarshalJSON(data []byte) (err error) { + timeFormat := azureUtcFormatJSON + match, err := regexp.Match(tzOffsetRegex, data) + if err != nil { + return err + } else if match { + timeFormat = rfc3339JSON + } + t.Time, err = ParseTime(timeFormat, string(data)) + return err +} + +// MarshalText preserves the Time as a byte array conforming to RFC3339 date-time (i.e., +// 2006-01-02T15:04:05Z). +func (t Time) MarshalText() (text []byte, err error) { + return t.Time.MarshalText() +} + +// UnmarshalText reconstitutes a Time saved as a byte array conforming to RFC3339 date-time +// (i.e., 2006-01-02T15:04:05Z). +func (t *Time) UnmarshalText(data []byte) (err error) { + timeFormat := azureUtcFormat + match, err := regexp.Match(tzOffsetRegex, data) + if err != nil { + return err + } else if match { + timeFormat = rfc3339 + } + t.Time, err = ParseTime(timeFormat, string(data)) + return err +} + +// String returns the Time formatted as an RFC3339 date-time string (i.e., +// 2006-01-02T15:04:05Z). +func (t Time) String() string { + // Note: time.Time.String does not return an RFC3339 compliant string, time.Time.MarshalText does. + b, err := t.MarshalText() + if err != nil { + return "" + } + return string(b) +} + +// ToTime returns a Time as a time.Time +func (t Time) ToTime() time.Time { + return t.Time +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/date/timerfc1123.go b/vendor/github.com/Azure/go-autorest/autorest/date/timerfc1123.go new file mode 100644 index 0000000..48fb39b --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/date/timerfc1123.go @@ -0,0 +1,100 @@ +package date + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "errors" + "time" +) + +const ( + rfc1123JSON = `"` + time.RFC1123 + `"` + rfc1123 = time.RFC1123 +) + +// TimeRFC1123 defines a type similar to time.Time but assumes a layout of RFC1123 date-time (i.e., +// Mon, 02 Jan 2006 15:04:05 MST). +type TimeRFC1123 struct { + time.Time +} + +// UnmarshalJSON reconstitutes the Time from a JSON string conforming to RFC1123 date-time +// (i.e., Mon, 02 Jan 2006 15:04:05 MST). +func (t *TimeRFC1123) UnmarshalJSON(data []byte) (err error) { + t.Time, err = ParseTime(rfc1123JSON, string(data)) + if err != nil { + return err + } + return nil +} + +// MarshalJSON preserves the Time as a JSON string conforming to RFC1123 date-time (i.e., +// Mon, 02 Jan 2006 15:04:05 MST). +func (t TimeRFC1123) MarshalJSON() ([]byte, error) { + if y := t.Year(); y < 0 || y >= 10000 { + return nil, errors.New("Time.MarshalJSON: year outside of range [0,9999]") + } + b := []byte(t.Format(rfc1123JSON)) + return b, nil +} + +// MarshalText preserves the Time as a byte array conforming to RFC1123 date-time (i.e., +// Mon, 02 Jan 2006 15:04:05 MST). +func (t TimeRFC1123) MarshalText() ([]byte, error) { + if y := t.Year(); y < 0 || y >= 10000 { + return nil, errors.New("Time.MarshalText: year outside of range [0,9999]") + } + + b := []byte(t.Format(rfc1123)) + return b, nil +} + +// UnmarshalText reconstitutes a Time saved as a byte array conforming to RFC1123 date-time +// (i.e., Mon, 02 Jan 2006 15:04:05 MST). +func (t *TimeRFC1123) UnmarshalText(data []byte) (err error) { + t.Time, err = ParseTime(rfc1123, string(data)) + if err != nil { + return err + } + return nil +} + +// MarshalBinary preserves the Time as a byte array conforming to RFC1123 date-time (i.e., +// Mon, 02 Jan 2006 15:04:05 MST). +func (t TimeRFC1123) MarshalBinary() ([]byte, error) { + return t.MarshalText() +} + +// UnmarshalBinary reconstitutes a Time saved as a byte array conforming to RFC1123 date-time +// (i.e., Mon, 02 Jan 2006 15:04:05 MST). +func (t *TimeRFC1123) UnmarshalBinary(data []byte) error { + return t.UnmarshalText(data) +} + +// ToTime returns a Time as a time.Time +func (t TimeRFC1123) ToTime() time.Time { + return t.Time +} + +// String returns the Time formatted as an RFC1123 date-time string (i.e., +// Mon, 02 Jan 2006 15:04:05 MST). +func (t TimeRFC1123) String() string { + // Note: time.Time.String does not return an RFC1123 compliant string, time.Time.MarshalText does. + b, err := t.MarshalText() + if err != nil { + return "" + } + return string(b) +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/date/unixtime.go b/vendor/github.com/Azure/go-autorest/autorest/date/unixtime.go new file mode 100644 index 0000000..7073959 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/date/unixtime.go @@ -0,0 +1,123 @@ +package date + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "bytes" + "encoding/binary" + "encoding/json" + "time" +) + +// unixEpoch is the moment in time that should be treated as timestamp 0. +var unixEpoch = time.Date(1970, time.January, 1, 0, 0, 0, 0, time.UTC) + +// UnixTime marshals and unmarshals a time that is represented as the number +// of seconds (ignoring skip-seconds) since the Unix Epoch. +type UnixTime time.Time + +// Duration returns the time as a Duration since the UnixEpoch. +func (t UnixTime) Duration() time.Duration { + return time.Time(t).Sub(unixEpoch) +} + +// NewUnixTimeFromSeconds creates a UnixTime as a number of seconds from the UnixEpoch. +func NewUnixTimeFromSeconds(seconds float64) UnixTime { + return NewUnixTimeFromDuration(time.Duration(seconds * float64(time.Second))) +} + +// NewUnixTimeFromNanoseconds creates a UnixTime as a number of nanoseconds from the UnixEpoch. +func NewUnixTimeFromNanoseconds(nanoseconds int64) UnixTime { + return NewUnixTimeFromDuration(time.Duration(nanoseconds)) +} + +// NewUnixTimeFromDuration creates a UnixTime as a duration of time since the UnixEpoch. +func NewUnixTimeFromDuration(dur time.Duration) UnixTime { + return UnixTime(unixEpoch.Add(dur)) +} + +// UnixEpoch retreives the moment considered the Unix Epoch. I.e. The time represented by '0' +func UnixEpoch() time.Time { + return unixEpoch +} + +// MarshalJSON preserves the UnixTime as a JSON number conforming to Unix Timestamp requirements. +// (i.e. the number of seconds since midnight January 1st, 1970 not considering leap seconds.) +func (t UnixTime) MarshalJSON() ([]byte, error) { + buffer := &bytes.Buffer{} + enc := json.NewEncoder(buffer) + err := enc.Encode(float64(time.Time(t).UnixNano()) / 1e9) + if err != nil { + return nil, err + } + return buffer.Bytes(), nil +} + +// UnmarshalJSON reconstitures a UnixTime saved as a JSON number of the number of seconds since +// midnight January 1st, 1970. +func (t *UnixTime) UnmarshalJSON(text []byte) error { + dec := json.NewDecoder(bytes.NewReader(text)) + + var secondsSinceEpoch float64 + if err := dec.Decode(&secondsSinceEpoch); err != nil { + return err + } + + *t = NewUnixTimeFromSeconds(secondsSinceEpoch) + + return nil +} + +// MarshalText stores the number of seconds since the Unix Epoch as a textual floating point number. +func (t UnixTime) MarshalText() ([]byte, error) { + cast := time.Time(t) + return cast.MarshalText() +} + +// UnmarshalText populates a UnixTime with a value stored textually as a floating point number of seconds since the Unix Epoch. +func (t *UnixTime) UnmarshalText(raw []byte) error { + var unmarshaled time.Time + + if err := unmarshaled.UnmarshalText(raw); err != nil { + return err + } + + *t = UnixTime(unmarshaled) + return nil +} + +// MarshalBinary converts a UnixTime into a binary.LittleEndian float64 of nanoseconds since the epoch. +func (t UnixTime) MarshalBinary() ([]byte, error) { + buf := &bytes.Buffer{} + + payload := int64(t.Duration()) + + if err := binary.Write(buf, binary.LittleEndian, &payload); err != nil { + return nil, err + } + + return buf.Bytes(), nil +} + +// UnmarshalBinary converts a from a binary.LittleEndian float64 of nanoseconds since the epoch into a UnixTime. +func (t *UnixTime) UnmarshalBinary(raw []byte) error { + var nanosecondsSinceEpoch int64 + + if err := binary.Read(bytes.NewReader(raw), binary.LittleEndian, &nanosecondsSinceEpoch); err != nil { + return err + } + *t = NewUnixTimeFromNanoseconds(nanosecondsSinceEpoch) + return nil +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/date/utility.go b/vendor/github.com/Azure/go-autorest/autorest/date/utility.go new file mode 100644 index 0000000..12addf0 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/date/utility.go @@ -0,0 +1,25 @@ +package date + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "strings" + "time" +) + +// ParseTime to parse Time string to specified format. +func ParseTime(format string, t string) (d time.Time, err error) { + return time.Parse(format, strings.ToUpper(t)) +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/error.go b/vendor/github.com/Azure/go-autorest/autorest/error.go new file mode 100644 index 0000000..f724f33 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/error.go @@ -0,0 +1,98 @@ +package autorest + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "fmt" + "net/http" +) + +const ( + // UndefinedStatusCode is used when HTTP status code is not available for an error. + UndefinedStatusCode = 0 +) + +// DetailedError encloses a error with details of the package, method, and associated HTTP +// status code (if any). +type DetailedError struct { + Original error + + // PackageType is the package type of the object emitting the error. For types, the value + // matches that produced the the '%T' format specifier of the fmt package. For other elements, + // such as functions, it is just the package name (e.g., "autorest"). + PackageType string + + // Method is the name of the method raising the error. + Method string + + // StatusCode is the HTTP Response StatusCode (if non-zero) that led to the error. + StatusCode interface{} + + // Message is the error message. + Message string + + // Service Error is the response body of failed API in bytes + ServiceError []byte + + // Response is the response object that was returned during failure if applicable. + Response *http.Response +} + +// NewError creates a new Error conforming object from the passed packageType, method, and +// message. message is treated as a format string to which the optional args apply. +func NewError(packageType string, method string, message string, args ...interface{}) DetailedError { + return NewErrorWithError(nil, packageType, method, nil, message, args...) +} + +// NewErrorWithResponse creates a new Error conforming object from the passed +// packageType, method, statusCode of the given resp (UndefinedStatusCode if +// resp is nil), and message. message is treated as a format string to which the +// optional args apply. +func NewErrorWithResponse(packageType string, method string, resp *http.Response, message string, args ...interface{}) DetailedError { + return NewErrorWithError(nil, packageType, method, resp, message, args...) +} + +// NewErrorWithError creates a new Error conforming object from the +// passed packageType, method, statusCode of the given resp (UndefinedStatusCode +// if resp is nil), message, and original error. message is treated as a format +// string to which the optional args apply. +func NewErrorWithError(original error, packageType string, method string, resp *http.Response, message string, args ...interface{}) DetailedError { + if v, ok := original.(DetailedError); ok { + return v + } + + statusCode := UndefinedStatusCode + if resp != nil { + statusCode = resp.StatusCode + } + + return DetailedError{ + Original: original, + PackageType: packageType, + Method: method, + StatusCode: statusCode, + Message: fmt.Sprintf(message, args...), + Response: resp, + } +} + +// Error returns a formatted containing all available details (i.e., PackageType, Method, +// StatusCode, Message, and original error (if any)). +func (e DetailedError) Error() string { + if e.Original == nil { + return fmt.Sprintf("%s#%s: %s: StatusCode=%d", e.PackageType, e.Method, e.Message, e.StatusCode) + } + return fmt.Sprintf("%s#%s: %s: StatusCode=%d -- Original Error: %v", e.PackageType, e.Method, e.Message, e.StatusCode, e.Original) +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/preparer.go b/vendor/github.com/Azure/go-autorest/autorest/preparer.go new file mode 100644 index 0000000..6e8ed64 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/preparer.go @@ -0,0 +1,550 @@ +package autorest + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "bytes" + "context" + "encoding/json" + "encoding/xml" + "fmt" + "io" + "io/ioutil" + "mime/multipart" + "net/http" + "net/url" + "strings" +) + +const ( + mimeTypeJSON = "application/json" + mimeTypeOctetStream = "application/octet-stream" + mimeTypeFormPost = "application/x-www-form-urlencoded" + + headerAuthorization = "Authorization" + headerAuxAuthorization = "x-ms-authorization-auxiliary" + headerContentType = "Content-Type" + headerUserAgent = "User-Agent" +) + +// used as a key type in context.WithValue() +type ctxPrepareDecorators struct{} + +// WithPrepareDecorators adds the specified PrepareDecorators to the provided context. +// If no PrepareDecorators are provided the context is unchanged. +func WithPrepareDecorators(ctx context.Context, prepareDecorator []PrepareDecorator) context.Context { + if len(prepareDecorator) == 0 { + return ctx + } + return context.WithValue(ctx, ctxPrepareDecorators{}, prepareDecorator) +} + +// GetPrepareDecorators returns the PrepareDecorators in the provided context or the provided default PrepareDecorators. +func GetPrepareDecorators(ctx context.Context, defaultPrepareDecorators ...PrepareDecorator) []PrepareDecorator { + inCtx := ctx.Value(ctxPrepareDecorators{}) + if pd, ok := inCtx.([]PrepareDecorator); ok { + return pd + } + return defaultPrepareDecorators +} + +// Preparer is the interface that wraps the Prepare method. +// +// Prepare accepts and possibly modifies an http.Request (e.g., adding Headers). Implementations +// must ensure to not share or hold per-invocation state since Preparers may be shared and re-used. +type Preparer interface { + Prepare(*http.Request) (*http.Request, error) +} + +// PreparerFunc is a method that implements the Preparer interface. +type PreparerFunc func(*http.Request) (*http.Request, error) + +// Prepare implements the Preparer interface on PreparerFunc. +func (pf PreparerFunc) Prepare(r *http.Request) (*http.Request, error) { + return pf(r) +} + +// PrepareDecorator takes and possibly decorates, by wrapping, a Preparer. Decorators may affect the +// http.Request and pass it along or, first, pass the http.Request along then affect the result. +type PrepareDecorator func(Preparer) Preparer + +// CreatePreparer creates, decorates, and returns a Preparer. +// Without decorators, the returned Preparer returns the passed http.Request unmodified. +// Preparers are safe to share and re-use. +func CreatePreparer(decorators ...PrepareDecorator) Preparer { + return DecoratePreparer( + Preparer(PreparerFunc(func(r *http.Request) (*http.Request, error) { return r, nil })), + decorators...) +} + +// DecoratePreparer accepts a Preparer and a, possibly empty, set of PrepareDecorators, which it +// applies to the Preparer. Decorators are applied in the order received, but their affect upon the +// request depends on whether they are a pre-decorator (change the http.Request and then pass it +// along) or a post-decorator (pass the http.Request along and alter it on return). +func DecoratePreparer(p Preparer, decorators ...PrepareDecorator) Preparer { + for _, decorate := range decorators { + p = decorate(p) + } + return p +} + +// Prepare accepts an http.Request and a, possibly empty, set of PrepareDecorators. +// It creates a Preparer from the decorators which it then applies to the passed http.Request. +func Prepare(r *http.Request, decorators ...PrepareDecorator) (*http.Request, error) { + if r == nil { + return nil, NewError("autorest", "Prepare", "Invoked without an http.Request") + } + return CreatePreparer(decorators...).Prepare(r) +} + +// WithNothing returns a "do nothing" PrepareDecorator that makes no changes to the passed +// http.Request. +func WithNothing() PrepareDecorator { + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + return p.Prepare(r) + }) + } +} + +// WithHeader returns a PrepareDecorator that sets the specified HTTP header of the http.Request to +// the passed value. It canonicalizes the passed header name (via http.CanonicalHeaderKey) before +// adding the header. +func WithHeader(header string, value string) PrepareDecorator { + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r, err := p.Prepare(r) + if err == nil { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set(http.CanonicalHeaderKey(header), value) + } + return r, err + }) + } +} + +// WithHeaders returns a PrepareDecorator that sets the specified HTTP headers of the http.Request to +// the passed value. It canonicalizes the passed headers name (via http.CanonicalHeaderKey) before +// adding them. +func WithHeaders(headers map[string]interface{}) PrepareDecorator { + h := ensureValueStrings(headers) + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r, err := p.Prepare(r) + if err == nil { + if r.Header == nil { + r.Header = make(http.Header) + } + + for name, value := range h { + r.Header.Set(http.CanonicalHeaderKey(name), value) + } + } + return r, err + }) + } +} + +// WithBearerAuthorization returns a PrepareDecorator that adds an HTTP Authorization header whose +// value is "Bearer " followed by the supplied token. +func WithBearerAuthorization(token string) PrepareDecorator { + return WithHeader(headerAuthorization, fmt.Sprintf("Bearer %s", token)) +} + +// AsContentType returns a PrepareDecorator that adds an HTTP Content-Type header whose value +// is the passed contentType. +func AsContentType(contentType string) PrepareDecorator { + return WithHeader(headerContentType, contentType) +} + +// WithUserAgent returns a PrepareDecorator that adds an HTTP User-Agent header whose value is the +// passed string. +func WithUserAgent(ua string) PrepareDecorator { + return WithHeader(headerUserAgent, ua) +} + +// AsFormURLEncoded returns a PrepareDecorator that adds an HTTP Content-Type header whose value is +// "application/x-www-form-urlencoded". +func AsFormURLEncoded() PrepareDecorator { + return AsContentType(mimeTypeFormPost) +} + +// AsJSON returns a PrepareDecorator that adds an HTTP Content-Type header whose value is +// "application/json". +func AsJSON() PrepareDecorator { + return AsContentType(mimeTypeJSON) +} + +// AsOctetStream returns a PrepareDecorator that adds the "application/octet-stream" Content-Type header. +func AsOctetStream() PrepareDecorator { + return AsContentType(mimeTypeOctetStream) +} + +// WithMethod returns a PrepareDecorator that sets the HTTP method of the passed request. The +// decorator does not validate that the passed method string is a known HTTP method. +func WithMethod(method string) PrepareDecorator { + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r.Method = method + return p.Prepare(r) + }) + } +} + +// AsDelete returns a PrepareDecorator that sets the HTTP method to DELETE. +func AsDelete() PrepareDecorator { return WithMethod("DELETE") } + +// AsGet returns a PrepareDecorator that sets the HTTP method to GET. +func AsGet() PrepareDecorator { return WithMethod("GET") } + +// AsHead returns a PrepareDecorator that sets the HTTP method to HEAD. +func AsHead() PrepareDecorator { return WithMethod("HEAD") } + +// AsMerge returns a PrepareDecorator that sets the HTTP method to MERGE. +func AsMerge() PrepareDecorator { return WithMethod("MERGE") } + +// AsOptions returns a PrepareDecorator that sets the HTTP method to OPTIONS. +func AsOptions() PrepareDecorator { return WithMethod("OPTIONS") } + +// AsPatch returns a PrepareDecorator that sets the HTTP method to PATCH. +func AsPatch() PrepareDecorator { return WithMethod("PATCH") } + +// AsPost returns a PrepareDecorator that sets the HTTP method to POST. +func AsPost() PrepareDecorator { return WithMethod("POST") } + +// AsPut returns a PrepareDecorator that sets the HTTP method to PUT. +func AsPut() PrepareDecorator { return WithMethod("PUT") } + +// WithBaseURL returns a PrepareDecorator that populates the http.Request with a url.URL constructed +// from the supplied baseUrl. +func WithBaseURL(baseURL string) PrepareDecorator { + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r, err := p.Prepare(r) + if err == nil { + var u *url.URL + if u, err = url.Parse(baseURL); err != nil { + return r, err + } + if u.Scheme == "" { + err = fmt.Errorf("autorest: No scheme detected in URL %s", baseURL) + } + if err == nil { + r.URL = u + } + } + return r, err + }) + } +} + +// WithBytes returns a PrepareDecorator that takes a list of bytes +// which passes the bytes directly to the body +func WithBytes(input *[]byte) PrepareDecorator { + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r, err := p.Prepare(r) + if err == nil { + if input == nil { + return r, fmt.Errorf("Input Bytes was nil") + } + + r.ContentLength = int64(len(*input)) + r.Body = ioutil.NopCloser(bytes.NewReader(*input)) + } + return r, err + }) + } +} + +// WithCustomBaseURL returns a PrepareDecorator that replaces brace-enclosed keys within the +// request base URL (i.e., http.Request.URL) with the corresponding values from the passed map. +func WithCustomBaseURL(baseURL string, urlParameters map[string]interface{}) PrepareDecorator { + parameters := ensureValueStrings(urlParameters) + for key, value := range parameters { + baseURL = strings.Replace(baseURL, "{"+key+"}", value, -1) + } + return WithBaseURL(baseURL) +} + +// WithFormData returns a PrepareDecoratore that "URL encodes" (e.g., bar=baz&foo=quux) into the +// http.Request body. +func WithFormData(v url.Values) PrepareDecorator { + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r, err := p.Prepare(r) + if err == nil { + s := v.Encode() + + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set(http.CanonicalHeaderKey(headerContentType), mimeTypeFormPost) + r.ContentLength = int64(len(s)) + r.Body = ioutil.NopCloser(strings.NewReader(s)) + } + return r, err + }) + } +} + +// WithMultiPartFormData returns a PrepareDecoratore that "URL encodes" (e.g., bar=baz&foo=quux) form parameters +// into the http.Request body. +func WithMultiPartFormData(formDataParameters map[string]interface{}) PrepareDecorator { + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r, err := p.Prepare(r) + if err == nil { + var body bytes.Buffer + writer := multipart.NewWriter(&body) + for key, value := range formDataParameters { + if rc, ok := value.(io.ReadCloser); ok { + var fd io.Writer + if fd, err = writer.CreateFormFile(key, key); err != nil { + return r, err + } + if _, err = io.Copy(fd, rc); err != nil { + return r, err + } + } else { + if err = writer.WriteField(key, ensureValueString(value)); err != nil { + return r, err + } + } + } + if err = writer.Close(); err != nil { + return r, err + } + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set(http.CanonicalHeaderKey(headerContentType), writer.FormDataContentType()) + r.Body = ioutil.NopCloser(bytes.NewReader(body.Bytes())) + r.ContentLength = int64(body.Len()) + return r, err + } + return r, err + }) + } +} + +// WithFile returns a PrepareDecorator that sends file in request body. +func WithFile(f io.ReadCloser) PrepareDecorator { + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r, err := p.Prepare(r) + if err == nil { + b, err := ioutil.ReadAll(f) + if err != nil { + return r, err + } + r.Body = ioutil.NopCloser(bytes.NewReader(b)) + r.ContentLength = int64(len(b)) + } + return r, err + }) + } +} + +// WithBool returns a PrepareDecorator that encodes the passed bool into the body of the request +// and sets the Content-Length header. +func WithBool(v bool) PrepareDecorator { + return WithString(fmt.Sprintf("%v", v)) +} + +// WithFloat32 returns a PrepareDecorator that encodes the passed float32 into the body of the +// request and sets the Content-Length header. +func WithFloat32(v float32) PrepareDecorator { + return WithString(fmt.Sprintf("%v", v)) +} + +// WithFloat64 returns a PrepareDecorator that encodes the passed float64 into the body of the +// request and sets the Content-Length header. +func WithFloat64(v float64) PrepareDecorator { + return WithString(fmt.Sprintf("%v", v)) +} + +// WithInt32 returns a PrepareDecorator that encodes the passed int32 into the body of the request +// and sets the Content-Length header. +func WithInt32(v int32) PrepareDecorator { + return WithString(fmt.Sprintf("%v", v)) +} + +// WithInt64 returns a PrepareDecorator that encodes the passed int64 into the body of the request +// and sets the Content-Length header. +func WithInt64(v int64) PrepareDecorator { + return WithString(fmt.Sprintf("%v", v)) +} + +// WithString returns a PrepareDecorator that encodes the passed string into the body of the request +// and sets the Content-Length header. +func WithString(v string) PrepareDecorator { + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r, err := p.Prepare(r) + if err == nil { + r.ContentLength = int64(len(v)) + r.Body = ioutil.NopCloser(strings.NewReader(v)) + } + return r, err + }) + } +} + +// WithJSON returns a PrepareDecorator that encodes the data passed as JSON into the body of the +// request and sets the Content-Length header. +func WithJSON(v interface{}) PrepareDecorator { + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r, err := p.Prepare(r) + if err == nil { + b, err := json.Marshal(v) + if err == nil { + r.ContentLength = int64(len(b)) + r.Body = ioutil.NopCloser(bytes.NewReader(b)) + } + } + return r, err + }) + } +} + +// WithXML returns a PrepareDecorator that encodes the data passed as XML into the body of the +// request and sets the Content-Length header. +func WithXML(v interface{}) PrepareDecorator { + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r, err := p.Prepare(r) + if err == nil { + b, err := xml.Marshal(v) + if err == nil { + // we have to tack on an XML header + withHeader := xml.Header + string(b) + bytesWithHeader := []byte(withHeader) + + r.ContentLength = int64(len(bytesWithHeader)) + r.Body = ioutil.NopCloser(bytes.NewReader(bytesWithHeader)) + } + } + return r, err + }) + } +} + +// WithPath returns a PrepareDecorator that adds the supplied path to the request URL. If the path +// is absolute (that is, it begins with a "/"), it replaces the existing path. +func WithPath(path string) PrepareDecorator { + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r, err := p.Prepare(r) + if err == nil { + if r.URL == nil { + return r, NewError("autorest", "WithPath", "Invoked with a nil URL") + } + if r.URL, err = parseURL(r.URL, path); err != nil { + return r, err + } + } + return r, err + }) + } +} + +// WithEscapedPathParameters returns a PrepareDecorator that replaces brace-enclosed keys within the +// request path (i.e., http.Request.URL.Path) with the corresponding values from the passed map. The +// values will be escaped (aka URL encoded) before insertion into the path. +func WithEscapedPathParameters(path string, pathParameters map[string]interface{}) PrepareDecorator { + parameters := escapeValueStrings(ensureValueStrings(pathParameters)) + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r, err := p.Prepare(r) + if err == nil { + if r.URL == nil { + return r, NewError("autorest", "WithEscapedPathParameters", "Invoked with a nil URL") + } + for key, value := range parameters { + path = strings.Replace(path, "{"+key+"}", value, -1) + } + if r.URL, err = parseURL(r.URL, path); err != nil { + return r, err + } + } + return r, err + }) + } +} + +// WithPathParameters returns a PrepareDecorator that replaces brace-enclosed keys within the +// request path (i.e., http.Request.URL.Path) with the corresponding values from the passed map. +func WithPathParameters(path string, pathParameters map[string]interface{}) PrepareDecorator { + parameters := ensureValueStrings(pathParameters) + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r, err := p.Prepare(r) + if err == nil { + if r.URL == nil { + return r, NewError("autorest", "WithPathParameters", "Invoked with a nil URL") + } + for key, value := range parameters { + path = strings.Replace(path, "{"+key+"}", value, -1) + } + + if r.URL, err = parseURL(r.URL, path); err != nil { + return r, err + } + } + return r, err + }) + } +} + +func parseURL(u *url.URL, path string) (*url.URL, error) { + p := strings.TrimRight(u.String(), "/") + if !strings.HasPrefix(path, "/") { + path = "/" + path + } + return url.Parse(p + path) +} + +// WithQueryParameters returns a PrepareDecorators that encodes and applies the query parameters +// given in the supplied map (i.e., key=value). +func WithQueryParameters(queryParameters map[string]interface{}) PrepareDecorator { + parameters := MapToValues(queryParameters) + return func(p Preparer) Preparer { + return PreparerFunc(func(r *http.Request) (*http.Request, error) { + r, err := p.Prepare(r) + if err == nil { + if r.URL == nil { + return r, NewError("autorest", "WithQueryParameters", "Invoked with a nil URL") + } + v := r.URL.Query() + for key, value := range parameters { + for i := range value { + d, err := url.QueryUnescape(value[i]) + if err != nil { + return r, err + } + value[i] = d + } + v[key] = value + } + r.URL.RawQuery = v.Encode() + } + return r, err + }) + } +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/responder.go b/vendor/github.com/Azure/go-autorest/autorest/responder.go new file mode 100644 index 0000000..349e196 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/responder.go @@ -0,0 +1,269 @@ +package autorest + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "bytes" + "encoding/json" + "encoding/xml" + "fmt" + "io" + "io/ioutil" + "net/http" + "strings" +) + +// Responder is the interface that wraps the Respond method. +// +// Respond accepts and reacts to an http.Response. Implementations must ensure to not share or hold +// state since Responders may be shared and re-used. +type Responder interface { + Respond(*http.Response) error +} + +// ResponderFunc is a method that implements the Responder interface. +type ResponderFunc func(*http.Response) error + +// Respond implements the Responder interface on ResponderFunc. +func (rf ResponderFunc) Respond(r *http.Response) error { + return rf(r) +} + +// RespondDecorator takes and possibly decorates, by wrapping, a Responder. Decorators may react to +// the http.Response and pass it along or, first, pass the http.Response along then react. +type RespondDecorator func(Responder) Responder + +// CreateResponder creates, decorates, and returns a Responder. Without decorators, the returned +// Responder returns the passed http.Response unmodified. Responders may or may not be safe to share +// and re-used: It depends on the applied decorators. For example, a standard decorator that closes +// the response body is fine to share whereas a decorator that reads the body into a passed struct +// is not. +// +// To prevent memory leaks, ensure that at least one Responder closes the response body. +func CreateResponder(decorators ...RespondDecorator) Responder { + return DecorateResponder( + Responder(ResponderFunc(func(r *http.Response) error { return nil })), + decorators...) +} + +// DecorateResponder accepts a Responder and a, possibly empty, set of RespondDecorators, which it +// applies to the Responder. Decorators are applied in the order received, but their affect upon the +// request depends on whether they are a pre-decorator (react to the http.Response and then pass it +// along) or a post-decorator (pass the http.Response along and then react). +func DecorateResponder(r Responder, decorators ...RespondDecorator) Responder { + for _, decorate := range decorators { + r = decorate(r) + } + return r +} + +// Respond accepts an http.Response and a, possibly empty, set of RespondDecorators. +// It creates a Responder from the decorators it then applies to the passed http.Response. +func Respond(r *http.Response, decorators ...RespondDecorator) error { + if r == nil { + return nil + } + return CreateResponder(decorators...).Respond(r) +} + +// ByIgnoring returns a RespondDecorator that ignores the passed http.Response passing it unexamined +// to the next RespondDecorator. +func ByIgnoring() RespondDecorator { + return func(r Responder) Responder { + return ResponderFunc(func(resp *http.Response) error { + return r.Respond(resp) + }) + } +} + +// ByCopying copies the contents of the http.Response Body into the passed bytes.Buffer as +// the Body is read. +func ByCopying(b *bytes.Buffer) RespondDecorator { + return func(r Responder) Responder { + return ResponderFunc(func(resp *http.Response) error { + err := r.Respond(resp) + if err == nil && resp != nil && resp.Body != nil { + resp.Body = TeeReadCloser(resp.Body, b) + } + return err + }) + } +} + +// ByDiscardingBody returns a RespondDecorator that first invokes the passed Responder after which +// it copies the remaining bytes (if any) in the response body to ioutil.Discard. Since the passed +// Responder is invoked prior to discarding the response body, the decorator may occur anywhere +// within the set. +func ByDiscardingBody() RespondDecorator { + return func(r Responder) Responder { + return ResponderFunc(func(resp *http.Response) error { + err := r.Respond(resp) + if err == nil && resp != nil && resp.Body != nil { + if _, err := io.Copy(ioutil.Discard, resp.Body); err != nil { + return fmt.Errorf("Error discarding the response body: %v", err) + } + } + return err + }) + } +} + +// ByClosing returns a RespondDecorator that first invokes the passed Responder after which it +// closes the response body. Since the passed Responder is invoked prior to closing the response +// body, the decorator may occur anywhere within the set. +func ByClosing() RespondDecorator { + return func(r Responder) Responder { + return ResponderFunc(func(resp *http.Response) error { + err := r.Respond(resp) + if resp != nil && resp.Body != nil { + if err := resp.Body.Close(); err != nil { + return fmt.Errorf("Error closing the response body: %v", err) + } + } + return err + }) + } +} + +// ByClosingIfError returns a RespondDecorator that first invokes the passed Responder after which +// it closes the response if the passed Responder returns an error and the response body exists. +func ByClosingIfError() RespondDecorator { + return func(r Responder) Responder { + return ResponderFunc(func(resp *http.Response) error { + err := r.Respond(resp) + if err != nil && resp != nil && resp.Body != nil { + if err := resp.Body.Close(); err != nil { + return fmt.Errorf("Error closing the response body: %v", err) + } + } + return err + }) + } +} + +// ByUnmarshallingBytes returns a RespondDecorator that copies the Bytes returned in the +// response Body into the value pointed to by v. +func ByUnmarshallingBytes(v *[]byte) RespondDecorator { + return func(r Responder) Responder { + return ResponderFunc(func(resp *http.Response) error { + err := r.Respond(resp) + if err == nil { + bytes, errInner := ioutil.ReadAll(resp.Body) + if errInner != nil { + err = fmt.Errorf("Error occurred reading http.Response#Body - Error = '%v'", errInner) + } else { + *v = bytes + } + } + return err + }) + } +} + +// ByUnmarshallingJSON returns a RespondDecorator that decodes a JSON document returned in the +// response Body into the value pointed to by v. +func ByUnmarshallingJSON(v interface{}) RespondDecorator { + return func(r Responder) Responder { + return ResponderFunc(func(resp *http.Response) error { + err := r.Respond(resp) + if err == nil { + b, errInner := ioutil.ReadAll(resp.Body) + // Some responses might include a BOM, remove for successful unmarshalling + b = bytes.TrimPrefix(b, []byte("\xef\xbb\xbf")) + if errInner != nil { + err = fmt.Errorf("Error occurred reading http.Response#Body - Error = '%v'", errInner) + } else if len(strings.Trim(string(b), " ")) > 0 { + errInner = json.Unmarshal(b, v) + if errInner != nil { + err = fmt.Errorf("Error occurred unmarshalling JSON - Error = '%v' JSON = '%s'", errInner, string(b)) + } + } + } + return err + }) + } +} + +// ByUnmarshallingXML returns a RespondDecorator that decodes a XML document returned in the +// response Body into the value pointed to by v. +func ByUnmarshallingXML(v interface{}) RespondDecorator { + return func(r Responder) Responder { + return ResponderFunc(func(resp *http.Response) error { + err := r.Respond(resp) + if err == nil { + b, errInner := ioutil.ReadAll(resp.Body) + if errInner != nil { + err = fmt.Errorf("Error occurred reading http.Response#Body - Error = '%v'", errInner) + } else { + errInner = xml.Unmarshal(b, v) + if errInner != nil { + err = fmt.Errorf("Error occurred unmarshalling Xml - Error = '%v' Xml = '%s'", errInner, string(b)) + } + } + } + return err + }) + } +} + +// WithErrorUnlessStatusCode returns a RespondDecorator that emits an error unless the response +// StatusCode is among the set passed. On error, response body is fully read into a buffer and +// presented in the returned error, as well as in the response body. +func WithErrorUnlessStatusCode(codes ...int) RespondDecorator { + return func(r Responder) Responder { + return ResponderFunc(func(resp *http.Response) error { + err := r.Respond(resp) + if err == nil && !ResponseHasStatusCode(resp, codes...) { + derr := NewErrorWithResponse("autorest", "WithErrorUnlessStatusCode", resp, "%v %v failed with %s", + resp.Request.Method, + resp.Request.URL, + resp.Status) + if resp.Body != nil { + defer resp.Body.Close() + b, _ := ioutil.ReadAll(resp.Body) + derr.ServiceError = b + resp.Body = ioutil.NopCloser(bytes.NewReader(b)) + } + err = derr + } + return err + }) + } +} + +// WithErrorUnlessOK returns a RespondDecorator that emits an error if the response StatusCode is +// anything other than HTTP 200. +func WithErrorUnlessOK() RespondDecorator { + return WithErrorUnlessStatusCode(http.StatusOK) +} + +// ExtractHeader extracts all values of the specified header from the http.Response. It returns an +// empty string slice if the passed http.Response is nil or the header does not exist. +func ExtractHeader(header string, resp *http.Response) []string { + if resp != nil && resp.Header != nil { + return resp.Header[http.CanonicalHeaderKey(header)] + } + return nil +} + +// ExtractHeaderValue extracts the first value of the specified header from the http.Response. It +// returns an empty string if the passed http.Response is nil or the header does not exist. +func ExtractHeaderValue(header string, resp *http.Response) string { + h := ExtractHeader(header, resp) + if len(h) > 0 { + return h[0] + } + return "" +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/retriablerequest.go b/vendor/github.com/Azure/go-autorest/autorest/retriablerequest.go new file mode 100644 index 0000000..fa11dbe --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/retriablerequest.go @@ -0,0 +1,52 @@ +package autorest + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "bytes" + "io" + "io/ioutil" + "net/http" +) + +// NewRetriableRequest returns a wrapper around an HTTP request that support retry logic. +func NewRetriableRequest(req *http.Request) *RetriableRequest { + return &RetriableRequest{req: req} +} + +// Request returns the wrapped HTTP request. +func (rr *RetriableRequest) Request() *http.Request { + return rr.req +} + +func (rr *RetriableRequest) prepareFromByteReader() (err error) { + // fall back to making a copy (only do this once) + b := []byte{} + if rr.req.ContentLength > 0 { + b = make([]byte, rr.req.ContentLength) + _, err = io.ReadFull(rr.req.Body, b) + if err != nil { + return err + } + } else { + b, err = ioutil.ReadAll(rr.req.Body) + if err != nil { + return err + } + } + rr.br = bytes.NewReader(b) + rr.req.Body = ioutil.NopCloser(rr.br) + return err +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/retriablerequest_1.7.go b/vendor/github.com/Azure/go-autorest/autorest/retriablerequest_1.7.go new file mode 100644 index 0000000..7143cc6 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/retriablerequest_1.7.go @@ -0,0 +1,54 @@ +// +build !go1.8 + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package autorest + +import ( + "bytes" + "io/ioutil" + "net/http" +) + +// RetriableRequest provides facilities for retrying an HTTP request. +type RetriableRequest struct { + req *http.Request + br *bytes.Reader +} + +// Prepare signals that the request is about to be sent. +func (rr *RetriableRequest) Prepare() (err error) { + // preserve the request body; this is to support retry logic as + // the underlying transport will always close the reqeust body + if rr.req.Body != nil { + if rr.br != nil { + _, err = rr.br.Seek(0, 0 /*io.SeekStart*/) + rr.req.Body = ioutil.NopCloser(rr.br) + } + if err != nil { + return err + } + if rr.br == nil { + // fall back to making a copy (only do this once) + err = rr.prepareFromByteReader() + } + } + return err +} + +func removeRequestBody(req *http.Request) { + req.Body = nil + req.ContentLength = 0 +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/retriablerequest_1.8.go b/vendor/github.com/Azure/go-autorest/autorest/retriablerequest_1.8.go new file mode 100644 index 0000000..ae15c6b --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/retriablerequest_1.8.go @@ -0,0 +1,66 @@ +// +build go1.8 + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package autorest + +import ( + "bytes" + "io" + "io/ioutil" + "net/http" +) + +// RetriableRequest provides facilities for retrying an HTTP request. +type RetriableRequest struct { + req *http.Request + rc io.ReadCloser + br *bytes.Reader +} + +// Prepare signals that the request is about to be sent. +func (rr *RetriableRequest) Prepare() (err error) { + // preserve the request body; this is to support retry logic as + // the underlying transport will always close the reqeust body + if rr.req.Body != nil { + if rr.rc != nil { + rr.req.Body = rr.rc + } else if rr.br != nil { + _, err = rr.br.Seek(0, io.SeekStart) + rr.req.Body = ioutil.NopCloser(rr.br) + } + if err != nil { + return err + } + if rr.req.GetBody != nil { + // this will allow us to preserve the body without having to + // make a copy. note we need to do this on each iteration + rr.rc, err = rr.req.GetBody() + if err != nil { + return err + } + } else if rr.br == nil { + // fall back to making a copy (only do this once) + err = rr.prepareFromByteReader() + } + } + return err +} + +func removeRequestBody(req *http.Request) { + req.Body = nil + req.GetBody = nil + req.ContentLength = 0 +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/sender.go b/vendor/github.com/Azure/go-autorest/autorest/sender.go new file mode 100644 index 0000000..704f3e5 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/sender.go @@ -0,0 +1,424 @@ +package autorest + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "context" + "crypto/tls" + "fmt" + "log" + "math" + "net/http" + "net/http/cookiejar" + "strconv" + "time" + + "github.com/Azure/go-autorest/tracing" +) + +// used as a key type in context.WithValue() +type ctxSendDecorators struct{} + +// WithSendDecorators adds the specified SendDecorators to the provided context. +// If no SendDecorators are provided the context is unchanged. +func WithSendDecorators(ctx context.Context, sendDecorator []SendDecorator) context.Context { + if len(sendDecorator) == 0 { + return ctx + } + return context.WithValue(ctx, ctxSendDecorators{}, sendDecorator) +} + +// GetSendDecorators returns the SendDecorators in the provided context or the provided default SendDecorators. +func GetSendDecorators(ctx context.Context, defaultSendDecorators ...SendDecorator) []SendDecorator { + inCtx := ctx.Value(ctxSendDecorators{}) + if sd, ok := inCtx.([]SendDecorator); ok { + return sd + } + return defaultSendDecorators +} + +// Sender is the interface that wraps the Do method to send HTTP requests. +// +// The standard http.Client conforms to this interface. +type Sender interface { + Do(*http.Request) (*http.Response, error) +} + +// SenderFunc is a method that implements the Sender interface. +type SenderFunc func(*http.Request) (*http.Response, error) + +// Do implements the Sender interface on SenderFunc. +func (sf SenderFunc) Do(r *http.Request) (*http.Response, error) { + return sf(r) +} + +// SendDecorator takes and possibly decorates, by wrapping, a Sender. Decorators may affect the +// http.Request and pass it along or, first, pass the http.Request along then react to the +// http.Response result. +type SendDecorator func(Sender) Sender + +// CreateSender creates, decorates, and returns, as a Sender, the default http.Client. +func CreateSender(decorators ...SendDecorator) Sender { + return DecorateSender(sender(tls.RenegotiateNever), decorators...) +} + +// DecorateSender accepts a Sender and a, possibly empty, set of SendDecorators, which is applies to +// the Sender. Decorators are applied in the order received, but their affect upon the request +// depends on whether they are a pre-decorator (change the http.Request and then pass it along) or a +// post-decorator (pass the http.Request along and react to the results in http.Response). +func DecorateSender(s Sender, decorators ...SendDecorator) Sender { + for _, decorate := range decorators { + s = decorate(s) + } + return s +} + +// Send sends, by means of the default http.Client, the passed http.Request, returning the +// http.Response and possible error. It also accepts a, possibly empty, set of SendDecorators which +// it will apply the http.Client before invoking the Do method. +// +// Send is a convenience method and not recommended for production. Advanced users should use +// SendWithSender, passing and sharing their own Sender (e.g., instance of http.Client). +// +// Send will not poll or retry requests. +func Send(r *http.Request, decorators ...SendDecorator) (*http.Response, error) { + return SendWithSender(sender(tls.RenegotiateNever), r, decorators...) +} + +// SendWithSender sends the passed http.Request, through the provided Sender, returning the +// http.Response and possible error. It also accepts a, possibly empty, set of SendDecorators which +// it will apply the http.Client before invoking the Do method. +// +// SendWithSender will not poll or retry requests. +func SendWithSender(s Sender, r *http.Request, decorators ...SendDecorator) (*http.Response, error) { + return DecorateSender(s, decorators...).Do(r) +} + +func sender(renengotiation tls.RenegotiationSupport) Sender { + // Use behaviour compatible with DefaultTransport, but require TLS minimum version. + defaultTransport := http.DefaultTransport.(*http.Transport) + transport := &http.Transport{ + Proxy: defaultTransport.Proxy, + DialContext: defaultTransport.DialContext, + MaxIdleConns: defaultTransport.MaxIdleConns, + IdleConnTimeout: defaultTransport.IdleConnTimeout, + TLSHandshakeTimeout: defaultTransport.TLSHandshakeTimeout, + ExpectContinueTimeout: defaultTransport.ExpectContinueTimeout, + TLSClientConfig: &tls.Config{ + MinVersion: tls.VersionTLS12, + Renegotiation: renengotiation, + }, + } + var roundTripper http.RoundTripper = transport + if tracing.IsEnabled() { + roundTripper = tracing.NewTransport(transport) + } + j, _ := cookiejar.New(nil) + return &http.Client{Jar: j, Transport: roundTripper} +} + +// AfterDelay returns a SendDecorator that delays for the passed time.Duration before +// invoking the Sender. The delay may be terminated by closing the optional channel on the +// http.Request. If canceled, no further Senders are invoked. +func AfterDelay(d time.Duration) SendDecorator { + return func(s Sender) Sender { + return SenderFunc(func(r *http.Request) (*http.Response, error) { + if !DelayForBackoff(d, 0, r.Context().Done()) { + return nil, fmt.Errorf("autorest: AfterDelay canceled before full delay") + } + return s.Do(r) + }) + } +} + +// AsIs returns a SendDecorator that invokes the passed Sender without modifying the http.Request. +func AsIs() SendDecorator { + return func(s Sender) Sender { + return SenderFunc(func(r *http.Request) (*http.Response, error) { + return s.Do(r) + }) + } +} + +// DoCloseIfError returns a SendDecorator that first invokes the passed Sender after which +// it closes the response if the passed Sender returns an error and the response body exists. +func DoCloseIfError() SendDecorator { + return func(s Sender) Sender { + return SenderFunc(func(r *http.Request) (*http.Response, error) { + resp, err := s.Do(r) + if err != nil { + Respond(resp, ByDiscardingBody(), ByClosing()) + } + return resp, err + }) + } +} + +// DoErrorIfStatusCode returns a SendDecorator that emits an error if the response StatusCode is +// among the set passed. Since these are artificial errors, the response body may still require +// closing. +func DoErrorIfStatusCode(codes ...int) SendDecorator { + return func(s Sender) Sender { + return SenderFunc(func(r *http.Request) (*http.Response, error) { + resp, err := s.Do(r) + if err == nil && ResponseHasStatusCode(resp, codes...) { + err = NewErrorWithResponse("autorest", "DoErrorIfStatusCode", resp, "%v %v failed with %s", + resp.Request.Method, + resp.Request.URL, + resp.Status) + } + return resp, err + }) + } +} + +// DoErrorUnlessStatusCode returns a SendDecorator that emits an error unless the response +// StatusCode is among the set passed. Since these are artificial errors, the response body +// may still require closing. +func DoErrorUnlessStatusCode(codes ...int) SendDecorator { + return func(s Sender) Sender { + return SenderFunc(func(r *http.Request) (*http.Response, error) { + resp, err := s.Do(r) + if err == nil && !ResponseHasStatusCode(resp, codes...) { + err = NewErrorWithResponse("autorest", "DoErrorUnlessStatusCode", resp, "%v %v failed with %s", + resp.Request.Method, + resp.Request.URL, + resp.Status) + } + return resp, err + }) + } +} + +// DoPollForStatusCodes returns a SendDecorator that polls if the http.Response contains one of the +// passed status codes. It expects the http.Response to contain a Location header providing the +// URL at which to poll (using GET) and will poll until the time passed is equal to or greater than +// the supplied duration. It will delay between requests for the duration specified in the +// RetryAfter header or, if the header is absent, the passed delay. Polling may be canceled by +// closing the optional channel on the http.Request. +func DoPollForStatusCodes(duration time.Duration, delay time.Duration, codes ...int) SendDecorator { + return func(s Sender) Sender { + return SenderFunc(func(r *http.Request) (resp *http.Response, err error) { + resp, err = s.Do(r) + + if err == nil && ResponseHasStatusCode(resp, codes...) { + r, err = NewPollingRequestWithContext(r.Context(), resp) + + for err == nil && ResponseHasStatusCode(resp, codes...) { + Respond(resp, + ByDiscardingBody(), + ByClosing()) + resp, err = SendWithSender(s, r, + AfterDelay(GetRetryAfter(resp, delay))) + } + } + + return resp, err + }) + } +} + +// DoRetryForAttempts returns a SendDecorator that retries a failed request for up to the specified +// number of attempts, exponentially backing off between requests using the supplied backoff +// time.Duration (which may be zero). Retrying may be canceled by closing the optional channel on +// the http.Request. +func DoRetryForAttempts(attempts int, backoff time.Duration) SendDecorator { + return func(s Sender) Sender { + return SenderFunc(func(r *http.Request) (resp *http.Response, err error) { + rr := NewRetriableRequest(r) + for attempt := 0; attempt < attempts; attempt++ { + err = rr.Prepare() + if err != nil { + return resp, err + } + DrainResponseBody(resp) + resp, err = s.Do(rr.Request()) + if err == nil { + return resp, err + } + if !DelayForBackoff(backoff, attempt, r.Context().Done()) { + return nil, r.Context().Err() + } + } + return resp, err + }) + } +} + +// Count429AsRetry indicates that a 429 response should be included as a retry attempt. +var Count429AsRetry = true + +// Max429Delay is the maximum duration to wait between retries on a 429 if no Retry-After header was received. +var Max429Delay time.Duration + +// DoRetryForStatusCodes returns a SendDecorator that retries for specified statusCodes for up to the specified +// number of attempts, exponentially backing off between requests using the supplied backoff +// time.Duration (which may be zero). Retrying may be canceled by cancelling the context on the http.Request. +// NOTE: Code http.StatusTooManyRequests (429) will *not* be counted against the number of attempts. +func DoRetryForStatusCodes(attempts int, backoff time.Duration, codes ...int) SendDecorator { + return func(s Sender) Sender { + return SenderFunc(func(r *http.Request) (*http.Response, error) { + return doRetryForStatusCodesImpl(s, r, Count429AsRetry, attempts, backoff, 0, codes...) + }) + } +} + +// DoRetryForStatusCodesWithCap returns a SendDecorator that retries for specified statusCodes for up to the +// specified number of attempts, exponentially backing off between requests using the supplied backoff +// time.Duration (which may be zero). To cap the maximum possible delay between iterations specify a value greater +// than zero for cap. Retrying may be canceled by cancelling the context on the http.Request. +func DoRetryForStatusCodesWithCap(attempts int, backoff, cap time.Duration, codes ...int) SendDecorator { + return func(s Sender) Sender { + return SenderFunc(func(r *http.Request) (*http.Response, error) { + return doRetryForStatusCodesImpl(s, r, Count429AsRetry, attempts, backoff, cap, codes...) + }) + } +} + +func doRetryForStatusCodesImpl(s Sender, r *http.Request, count429 bool, attempts int, backoff, cap time.Duration, codes ...int) (resp *http.Response, err error) { + rr := NewRetriableRequest(r) + // Increment to add the first call (attempts denotes number of retries) + for attempt, delayCount := 0, 0; attempt < attempts+1; { + err = rr.Prepare() + if err != nil { + return + } + DrainResponseBody(resp) + resp, err = s.Do(rr.Request()) + // we want to retry if err is not nil (e.g. transient network failure). note that for failed authentication + // resp and err will both have a value, so in this case we don't want to retry as it will never succeed. + if err == nil && !ResponseHasStatusCode(resp, codes...) || IsTokenRefreshError(err) { + return resp, err + } + delayed := DelayWithRetryAfter(resp, r.Context().Done()) + // if this was a 429 set the delay cap as specified. + // applicable only in the absence of a retry-after header. + if resp != nil && resp.StatusCode == http.StatusTooManyRequests { + cap = Max429Delay + } + if !delayed && !DelayForBackoffWithCap(backoff, cap, delayCount, r.Context().Done()) { + return resp, r.Context().Err() + } + // when count429 == false don't count a 429 against the number + // of attempts so that we continue to retry until it succeeds + if count429 || (resp == nil || resp.StatusCode != http.StatusTooManyRequests) { + attempt++ + } + // delay count is tracked separately from attempts to + // ensure that 429 participates in exponential back-off + delayCount++ + } + return resp, err +} + +// DelayWithRetryAfter invokes time.After for the duration specified in the "Retry-After" header. +// The value of Retry-After can be either the number of seconds or a date in RFC1123 format. +// The function returns true after successfully waiting for the specified duration. If there is +// no Retry-After header or the wait is cancelled the return value is false. +func DelayWithRetryAfter(resp *http.Response, cancel <-chan struct{}) bool { + if resp == nil { + return false + } + var dur time.Duration + ra := resp.Header.Get("Retry-After") + if retryAfter, _ := strconv.Atoi(ra); retryAfter > 0 { + dur = time.Duration(retryAfter) * time.Second + } else if t, err := time.Parse(time.RFC1123, ra); err == nil { + dur = t.Sub(time.Now()) + } + if dur > 0 { + select { + case <-time.After(dur): + return true + case <-cancel: + return false + } + } + return false +} + +// DoRetryForDuration returns a SendDecorator that retries the request until the total time is equal +// to or greater than the specified duration, exponentially backing off between requests using the +// supplied backoff time.Duration (which may be zero). Retrying may be canceled by closing the +// optional channel on the http.Request. +func DoRetryForDuration(d time.Duration, backoff time.Duration) SendDecorator { + return func(s Sender) Sender { + return SenderFunc(func(r *http.Request) (resp *http.Response, err error) { + rr := NewRetriableRequest(r) + end := time.Now().Add(d) + for attempt := 0; time.Now().Before(end); attempt++ { + err = rr.Prepare() + if err != nil { + return resp, err + } + DrainResponseBody(resp) + resp, err = s.Do(rr.Request()) + if err == nil { + return resp, err + } + if !DelayForBackoff(backoff, attempt, r.Context().Done()) { + return nil, r.Context().Err() + } + } + return resp, err + }) + } +} + +// WithLogging returns a SendDecorator that implements simple before and after logging of the +// request. +func WithLogging(logger *log.Logger) SendDecorator { + return func(s Sender) Sender { + return SenderFunc(func(r *http.Request) (*http.Response, error) { + logger.Printf("Sending %s %s", r.Method, r.URL) + resp, err := s.Do(r) + if err != nil { + logger.Printf("%s %s received error '%v'", r.Method, r.URL, err) + } else { + logger.Printf("%s %s received %s", r.Method, r.URL, resp.Status) + } + return resp, err + }) + } +} + +// DelayForBackoff invokes time.After for the supplied backoff duration raised to the power of +// passed attempt (i.e., an exponential backoff delay). Backoff duration is in seconds and can set +// to zero for no delay. The delay may be canceled by closing the passed channel. If terminated early, +// returns false. +// Note: Passing attempt 1 will result in doubling "backoff" duration. Treat this as a zero-based attempt +// count. +func DelayForBackoff(backoff time.Duration, attempt int, cancel <-chan struct{}) bool { + return DelayForBackoffWithCap(backoff, 0, attempt, cancel) +} + +// DelayForBackoffWithCap invokes time.After for the supplied backoff duration raised to the power of +// passed attempt (i.e., an exponential backoff delay). Backoff duration is in seconds and can set +// to zero for no delay. To cap the maximum possible delay specify a value greater than zero for cap. +// The delay may be canceled by closing the passed channel. If terminated early, returns false. +// Note: Passing attempt 1 will result in doubling "backoff" duration. Treat this as a zero-based attempt +// count. +func DelayForBackoffWithCap(backoff, cap time.Duration, attempt int, cancel <-chan struct{}) bool { + d := time.Duration(backoff.Seconds()*math.Pow(2, float64(attempt))) * time.Second + if cap > 0 && d > cap { + d = cap + } + select { + case <-time.After(d): + return true + case <-cancel: + return false + } +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/utility.go b/vendor/github.com/Azure/go-autorest/autorest/utility.go new file mode 100644 index 0000000..67baab2 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/utility.go @@ -0,0 +1,239 @@ +package autorest + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "bytes" + "encoding/json" + "encoding/xml" + "fmt" + "io" + "io/ioutil" + "net" + "net/http" + "net/url" + "reflect" + "strings" + + "github.com/Azure/go-autorest/autorest/adal" +) + +// EncodedAs is a series of constants specifying various data encodings +type EncodedAs string + +const ( + // EncodedAsJSON states that data is encoded as JSON + EncodedAsJSON EncodedAs = "JSON" + + // EncodedAsXML states that data is encoded as Xml + EncodedAsXML EncodedAs = "XML" +) + +// Decoder defines the decoding method json.Decoder and xml.Decoder share +type Decoder interface { + Decode(v interface{}) error +} + +// NewDecoder creates a new decoder appropriate to the passed encoding. +// encodedAs specifies the type of encoding and r supplies the io.Reader containing the +// encoded data. +func NewDecoder(encodedAs EncodedAs, r io.Reader) Decoder { + if encodedAs == EncodedAsJSON { + return json.NewDecoder(r) + } else if encodedAs == EncodedAsXML { + return xml.NewDecoder(r) + } + return nil +} + +// CopyAndDecode decodes the data from the passed io.Reader while making a copy. Having a copy +// is especially useful if there is a chance the data will fail to decode. +// encodedAs specifies the expected encoding, r provides the io.Reader to the data, and v +// is the decoding destination. +func CopyAndDecode(encodedAs EncodedAs, r io.Reader, v interface{}) (bytes.Buffer, error) { + b := bytes.Buffer{} + return b, NewDecoder(encodedAs, io.TeeReader(r, &b)).Decode(v) +} + +// TeeReadCloser returns a ReadCloser that writes to w what it reads from rc. +// It utilizes io.TeeReader to copy the data read and has the same behavior when reading. +// Further, when it is closed, it ensures that rc is closed as well. +func TeeReadCloser(rc io.ReadCloser, w io.Writer) io.ReadCloser { + return &teeReadCloser{rc, io.TeeReader(rc, w)} +} + +type teeReadCloser struct { + rc io.ReadCloser + r io.Reader +} + +func (t *teeReadCloser) Read(p []byte) (int, error) { + return t.r.Read(p) +} + +func (t *teeReadCloser) Close() error { + return t.rc.Close() +} + +func containsInt(ints []int, n int) bool { + for _, i := range ints { + if i == n { + return true + } + } + return false +} + +func escapeValueStrings(m map[string]string) map[string]string { + for key, value := range m { + m[key] = url.QueryEscape(value) + } + return m +} + +func ensureValueStrings(mapOfInterface map[string]interface{}) map[string]string { + mapOfStrings := make(map[string]string) + for key, value := range mapOfInterface { + mapOfStrings[key] = ensureValueString(value) + } + return mapOfStrings +} + +func ensureValueString(value interface{}) string { + if value == nil { + return "" + } + switch v := value.(type) { + case string: + return v + case []byte: + return string(v) + default: + return fmt.Sprintf("%v", v) + } +} + +// MapToValues method converts map[string]interface{} to url.Values. +func MapToValues(m map[string]interface{}) url.Values { + v := url.Values{} + for key, value := range m { + x := reflect.ValueOf(value) + if x.Kind() == reflect.Array || x.Kind() == reflect.Slice { + for i := 0; i < x.Len(); i++ { + v.Add(key, ensureValueString(x.Index(i))) + } + } else { + v.Add(key, ensureValueString(value)) + } + } + return v +} + +// AsStringSlice method converts interface{} to []string. +// s must be of type slice or array or an error is returned. +// Each element of s will be converted to its string representation. +func AsStringSlice(s interface{}) ([]string, error) { + v := reflect.ValueOf(s) + if v.Kind() != reflect.Slice && v.Kind() != reflect.Array { + return nil, NewError("autorest", "AsStringSlice", "the value's type is not a slice or array.") + } + stringSlice := make([]string, 0, v.Len()) + + for i := 0; i < v.Len(); i++ { + stringSlice = append(stringSlice, fmt.Sprintf("%v", v.Index(i))) + } + return stringSlice, nil +} + +// String method converts interface v to string. If interface is a list, it +// joins list elements using the separator. Note that only sep[0] will be used for +// joining if any separator is specified. +func String(v interface{}, sep ...string) string { + if len(sep) == 0 { + return ensureValueString(v) + } + stringSlice, ok := v.([]string) + if ok == false { + var err error + stringSlice, err = AsStringSlice(v) + if err != nil { + panic(fmt.Sprintf("autorest: Couldn't convert value to a string %s.", err)) + } + } + return ensureValueString(strings.Join(stringSlice, sep[0])) +} + +// Encode method encodes url path and query parameters. +func Encode(location string, v interface{}, sep ...string) string { + s := String(v, sep...) + switch strings.ToLower(location) { + case "path": + return pathEscape(s) + case "query": + return queryEscape(s) + default: + return s + } +} + +func pathEscape(s string) string { + return strings.Replace(url.QueryEscape(s), "+", "%20", -1) +} + +func queryEscape(s string) string { + return url.QueryEscape(s) +} + +// ChangeToGet turns the specified http.Request into a GET (it assumes it wasn't). +// This is mainly useful for long-running operations that use the Azure-AsyncOperation +// header, so we change the initial PUT into a GET to retrieve the final result. +func ChangeToGet(req *http.Request) *http.Request { + req.Method = "GET" + req.Body = nil + req.ContentLength = 0 + req.Header.Del("Content-Length") + return req +} + +// IsTokenRefreshError returns true if the specified error implements the TokenRefreshError +// interface. If err is a DetailedError it will walk the chain of Original errors. +func IsTokenRefreshError(err error) bool { + if _, ok := err.(adal.TokenRefreshError); ok { + return true + } + if de, ok := err.(DetailedError); ok { + return IsTokenRefreshError(de.Original) + } + return false +} + +// IsTemporaryNetworkError returns true if the specified error is a temporary network error or false +// if it's not. If the error doesn't implement the net.Error interface the return value is true. +func IsTemporaryNetworkError(err error) bool { + if netErr, ok := err.(net.Error); !ok || (ok && netErr.Temporary()) { + return true + } + return false +} + +// DrainResponseBody reads the response body then closes it. +func DrainResponseBody(resp *http.Response) error { + if resp != nil && resp.Body != nil { + _, err := io.Copy(ioutil.Discard, resp.Body) + resp.Body.Close() + return err + } + return nil +} diff --git a/vendor/github.com/Azure/go-autorest/autorest/version.go b/vendor/github.com/Azure/go-autorest/autorest/version.go new file mode 100644 index 0000000..ee6e48f --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/autorest/version.go @@ -0,0 +1,41 @@ +package autorest + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "fmt" + "runtime" +) + +const number = "v14.1.0" + +var ( + userAgent = fmt.Sprintf("Go/%s (%s-%s) go-autorest/%s", + runtime.Version(), + runtime.GOARCH, + runtime.GOOS, + number, + ) +) + +// UserAgent returns a string containing the Go version, system architecture and OS, and the go-autorest version. +func UserAgent() string { + return userAgent +} + +// Version returns the semantic version (see http://semver.org). +func Version() string { + return number +} diff --git a/vendor/github.com/Azure/go-autorest/logger/LICENSE b/vendor/github.com/Azure/go-autorest/logger/LICENSE new file mode 100644 index 0000000..b9d6a27 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/logger/LICENSE @@ -0,0 +1,191 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + Copyright 2015 Microsoft Corporation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/Azure/go-autorest/logger/logger.go b/vendor/github.com/Azure/go-autorest/logger/logger.go new file mode 100644 index 0000000..da09f39 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/logger/logger.go @@ -0,0 +1,328 @@ +package logger + +// Copyright 2017 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "bytes" + "fmt" + "io" + "io/ioutil" + "net/http" + "net/url" + "os" + "strings" + "sync" + "time" +) + +// LevelType tells a logger the minimum level to log. When code reports a log entry, +// the LogLevel indicates the level of the log entry. The logger only records entries +// whose level is at least the level it was told to log. See the Log* constants. +// For example, if a logger is configured with LogError, then LogError, LogPanic, +// and LogFatal entries will be logged; lower level entries are ignored. +type LevelType uint32 + +const ( + // LogNone tells a logger not to log any entries passed to it. + LogNone LevelType = iota + + // LogFatal tells a logger to log all LogFatal entries passed to it. + LogFatal + + // LogPanic tells a logger to log all LogPanic and LogFatal entries passed to it. + LogPanic + + // LogError tells a logger to log all LogError, LogPanic and LogFatal entries passed to it. + LogError + + // LogWarning tells a logger to log all LogWarning, LogError, LogPanic and LogFatal entries passed to it. + LogWarning + + // LogInfo tells a logger to log all LogInfo, LogWarning, LogError, LogPanic and LogFatal entries passed to it. + LogInfo + + // LogDebug tells a logger to log all LogDebug, LogInfo, LogWarning, LogError, LogPanic and LogFatal entries passed to it. + LogDebug +) + +const ( + logNone = "NONE" + logFatal = "FATAL" + logPanic = "PANIC" + logError = "ERROR" + logWarning = "WARNING" + logInfo = "INFO" + logDebug = "DEBUG" + logUnknown = "UNKNOWN" +) + +// ParseLevel converts the specified string into the corresponding LevelType. +func ParseLevel(s string) (lt LevelType, err error) { + switch strings.ToUpper(s) { + case logFatal: + lt = LogFatal + case logPanic: + lt = LogPanic + case logError: + lt = LogError + case logWarning: + lt = LogWarning + case logInfo: + lt = LogInfo + case logDebug: + lt = LogDebug + default: + err = fmt.Errorf("bad log level '%s'", s) + } + return +} + +// String implements the stringer interface for LevelType. +func (lt LevelType) String() string { + switch lt { + case LogNone: + return logNone + case LogFatal: + return logFatal + case LogPanic: + return logPanic + case LogError: + return logError + case LogWarning: + return logWarning + case LogInfo: + return logInfo + case LogDebug: + return logDebug + default: + return logUnknown + } +} + +// Filter defines functions for filtering HTTP request/response content. +type Filter struct { + // URL returns a potentially modified string representation of a request URL. + URL func(u *url.URL) string + + // Header returns a potentially modified set of values for the specified key. + // To completely exclude the header key/values return false. + Header func(key string, val []string) (bool, []string) + + // Body returns a potentially modified request/response body. + Body func(b []byte) []byte +} + +func (f Filter) processURL(u *url.URL) string { + if f.URL == nil { + return u.String() + } + return f.URL(u) +} + +func (f Filter) processHeader(k string, val []string) (bool, []string) { + if f.Header == nil { + return true, val + } + return f.Header(k, val) +} + +func (f Filter) processBody(b []byte) []byte { + if f.Body == nil { + return b + } + return f.Body(b) +} + +// Writer defines methods for writing to a logging facility. +type Writer interface { + // Writeln writes the specified message with the standard log entry header and new-line character. + Writeln(level LevelType, message string) + + // Writef writes the specified format specifier with the standard log entry header and no new-line character. + Writef(level LevelType, format string, a ...interface{}) + + // WriteRequest writes the specified HTTP request to the logger if the log level is greater than + // or equal to LogInfo. The request body, if set, is logged at level LogDebug or higher. + // Custom filters can be specified to exclude URL, header, and/or body content from the log. + // By default no request content is excluded. + WriteRequest(req *http.Request, filter Filter) + + // WriteResponse writes the specified HTTP response to the logger if the log level is greater than + // or equal to LogInfo. The response body, if set, is logged at level LogDebug or higher. + // Custom filters can be specified to exclude URL, header, and/or body content from the log. + // By default no response content is excluded. + WriteResponse(resp *http.Response, filter Filter) +} + +// Instance is the default log writer initialized during package init. +// This can be replaced with a custom implementation as required. +var Instance Writer + +// default log level +var logLevel = LogNone + +// Level returns the value specified in AZURE_GO_AUTOREST_LOG_LEVEL. +// If no value was specified the default value is LogNone. +// Custom loggers can call this to retrieve the configured log level. +func Level() LevelType { + return logLevel +} + +func init() { + // separated for testing purposes + initDefaultLogger() +} + +func initDefaultLogger() { + // init with nilLogger so callers don't have to do a nil check on Default + Instance = nilLogger{} + llStr := strings.ToLower(os.Getenv("AZURE_GO_SDK_LOG_LEVEL")) + if llStr == "" { + return + } + var err error + logLevel, err = ParseLevel(llStr) + if err != nil { + fmt.Fprintf(os.Stderr, "go-autorest: failed to parse log level: %s\n", err.Error()) + return + } + if logLevel == LogNone { + return + } + // default to stderr + dest := os.Stderr + lfStr := os.Getenv("AZURE_GO_SDK_LOG_FILE") + if strings.EqualFold(lfStr, "stdout") { + dest = os.Stdout + } else if lfStr != "" { + lf, err := os.Create(lfStr) + if err == nil { + dest = lf + } else { + fmt.Fprintf(os.Stderr, "go-autorest: failed to create log file, using stderr: %s\n", err.Error()) + } + } + Instance = fileLogger{ + logLevel: logLevel, + mu: &sync.Mutex{}, + logFile: dest, + } +} + +// the nil logger does nothing +type nilLogger struct{} + +func (nilLogger) Writeln(LevelType, string) {} + +func (nilLogger) Writef(LevelType, string, ...interface{}) {} + +func (nilLogger) WriteRequest(*http.Request, Filter) {} + +func (nilLogger) WriteResponse(*http.Response, Filter) {} + +// A File is used instead of a Logger so the stream can be flushed after every write. +type fileLogger struct { + logLevel LevelType + mu *sync.Mutex // for synchronizing writes to logFile + logFile *os.File +} + +func (fl fileLogger) Writeln(level LevelType, message string) { + fl.Writef(level, "%s\n", message) +} + +func (fl fileLogger) Writef(level LevelType, format string, a ...interface{}) { + if fl.logLevel >= level { + fl.mu.Lock() + defer fl.mu.Unlock() + fmt.Fprintf(fl.logFile, "%s %s", entryHeader(level), fmt.Sprintf(format, a...)) + fl.logFile.Sync() + } +} + +func (fl fileLogger) WriteRequest(req *http.Request, filter Filter) { + if req == nil || fl.logLevel < LogInfo { + return + } + b := &bytes.Buffer{} + fmt.Fprintf(b, "%s REQUEST: %s %s\n", entryHeader(LogInfo), req.Method, filter.processURL(req.URL)) + // dump headers + for k, v := range req.Header { + if ok, mv := filter.processHeader(k, v); ok { + fmt.Fprintf(b, "%s: %s\n", k, strings.Join(mv, ",")) + } + } + if fl.shouldLogBody(req.Header, req.Body) { + // dump body + body, err := ioutil.ReadAll(req.Body) + if err == nil { + fmt.Fprintln(b, string(filter.processBody(body))) + if nc, ok := req.Body.(io.Seeker); ok { + // rewind to the beginning + nc.Seek(0, io.SeekStart) + } else { + // recreate the body + req.Body = ioutil.NopCloser(bytes.NewReader(body)) + } + } else { + fmt.Fprintf(b, "failed to read body: %v\n", err) + } + } + fl.mu.Lock() + defer fl.mu.Unlock() + fmt.Fprint(fl.logFile, b.String()) + fl.logFile.Sync() +} + +func (fl fileLogger) WriteResponse(resp *http.Response, filter Filter) { + if resp == nil || fl.logLevel < LogInfo { + return + } + b := &bytes.Buffer{} + fmt.Fprintf(b, "%s RESPONSE: %d %s\n", entryHeader(LogInfo), resp.StatusCode, filter.processURL(resp.Request.URL)) + // dump headers + for k, v := range resp.Header { + if ok, mv := filter.processHeader(k, v); ok { + fmt.Fprintf(b, "%s: %s\n", k, strings.Join(mv, ",")) + } + } + if fl.shouldLogBody(resp.Header, resp.Body) { + // dump body + defer resp.Body.Close() + body, err := ioutil.ReadAll(resp.Body) + if err == nil { + fmt.Fprintln(b, string(filter.processBody(body))) + resp.Body = ioutil.NopCloser(bytes.NewReader(body)) + } else { + fmt.Fprintf(b, "failed to read body: %v\n", err) + } + } + fl.mu.Lock() + defer fl.mu.Unlock() + fmt.Fprint(fl.logFile, b.String()) + fl.logFile.Sync() +} + +// returns true if the provided body should be included in the log +func (fl fileLogger) shouldLogBody(header http.Header, body io.ReadCloser) bool { + ct := header.Get("Content-Type") + return fl.logLevel >= LogDebug && body != nil && !strings.Contains(ct, "application/octet-stream") +} + +// creates standard header for log entries, it contains a timestamp and the log level +func entryHeader(level LevelType) string { + // this format provides a fixed number of digits so the size of the timestamp is constant + return fmt.Sprintf("(%s) %s:", time.Now().Format("2006-01-02T15:04:05.0000000Z07:00"), level.String()) +} diff --git a/vendor/github.com/Azure/go-autorest/tracing/LICENSE b/vendor/github.com/Azure/go-autorest/tracing/LICENSE new file mode 100644 index 0000000..b9d6a27 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/tracing/LICENSE @@ -0,0 +1,191 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + Copyright 2015 Microsoft Corporation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/Azure/go-autorest/tracing/tracing.go b/vendor/github.com/Azure/go-autorest/tracing/tracing.go new file mode 100644 index 0000000..0e7a6e9 --- /dev/null +++ b/vendor/github.com/Azure/go-autorest/tracing/tracing.go @@ -0,0 +1,67 @@ +package tracing + +// Copyright 2018 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import ( + "context" + "net/http" +) + +// Tracer represents an HTTP tracing facility. +type Tracer interface { + NewTransport(base *http.Transport) http.RoundTripper + StartSpan(ctx context.Context, name string) context.Context + EndSpan(ctx context.Context, httpStatusCode int, err error) +} + +var ( + tracer Tracer +) + +// Register will register the provided Tracer. Pass nil to unregister a Tracer. +func Register(t Tracer) { + tracer = t +} + +// IsEnabled returns true if a Tracer has been registered. +func IsEnabled() bool { + return tracer != nil +} + +// NewTransport creates a new instrumenting http.RoundTripper for the +// registered Tracer. If no Tracer has been registered it returns nil. +func NewTransport(base *http.Transport) http.RoundTripper { + if tracer != nil { + return tracer.NewTransport(base) + } + return nil +} + +// StartSpan starts a trace span with the specified name, associating it with the +// provided context. Has no effect if a Tracer has not been registered. +func StartSpan(ctx context.Context, name string) context.Context { + if tracer != nil { + return tracer.StartSpan(ctx, name) + } + return ctx +} + +// EndSpan ends a previously started span stored in the context. +// Has no effect if a Tracer has not been registered. +func EndSpan(ctx context.Context, httpStatusCode int, err error) { + if tracer != nil { + tracer.EndSpan(ctx, httpStatusCode, err) + } +} diff --git a/vendor/github.com/BurntSushi/locker/Makefile b/vendor/github.com/BurntSushi/locker/Makefile new file mode 100644 index 0000000..ca043d9 --- /dev/null +++ b/vendor/github.com/BurntSushi/locker/Makefile @@ -0,0 +1,7 @@ +build: + go build + +push: + git push origin master + git push github master + diff --git a/vendor/github.com/BurntSushi/locker/README.md b/vendor/github.com/BurntSushi/locker/README.md new file mode 100644 index 0000000..126b05e --- /dev/null +++ b/vendor/github.com/BurntSushi/locker/README.md @@ -0,0 +1,21 @@ +Package locker is a simple package to manage named ReadWrite mutexes. These +appear to be especially useful for synchronizing access to session based +information in web applications. + +The common use case is to use the package level functions, which use a package +level set of locks (safe to use from multiple goroutines simultaneously). +However, you may also create a new separate set of locks. + +All locks are implemented with read-write mutexes. To use them like a regular +mutex, simply ignore the RLock/RUnlock functions. + + +### Installation + + go get github.com/BurntSushi/locker + + +### Documentation + +http://godoc.org/github.com/BurntSushi/locker + diff --git a/vendor/github.com/BurntSushi/locker/UNLICENSE b/vendor/github.com/BurntSushi/locker/UNLICENSE new file mode 100644 index 0000000..68a49da --- /dev/null +++ b/vendor/github.com/BurntSushi/locker/UNLICENSE @@ -0,0 +1,24 @@ +This is free and unencumbered software released into the public domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or +distribute this software, either in source code form or as a compiled +binary, for any purpose, commercial or non-commercial, and by any +means. + +In jurisdictions that recognize copyright laws, the author or authors +of this software dedicate any and all copyright interest in the +software to the public domain. We make this dedication for the benefit +of the public at large and to the detriment of our heirs and +successors. We intend this dedication to be an overt act of +relinquishment in perpetuity of all present and future rights to this +software under copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +For more information, please refer to diff --git a/vendor/github.com/BurntSushi/locker/locker.go b/vendor/github.com/BurntSushi/locker/locker.go new file mode 100644 index 0000000..c7b9b88 --- /dev/null +++ b/vendor/github.com/BurntSushi/locker/locker.go @@ -0,0 +1,108 @@ +/* +Package locker is a simple package to manage named ReadWrite mutexes. These +appear to be especially useful for synchronizing access to session based +information in web applications. + +The common use case is to use the package level functions, which use a package +level set of locks (safe to use from multiple goroutines simultaneously). +However, you may also create a new separate set of locks. + +All locks are implemented with read-write mutexes. To use them like a regular +mutex, simply ignore the RLock/RUnlock functions. +*/ +package locker + +// BUG(burntsushi): The locker here can grow without bound in long running +// programs. Since it's intended to be used in web applications, this is a +// major problem. Figure out a way to keep the locker lean. + +import ( + "fmt" + "sync" +) + +// Locker represents the set of named ReadWrite mutexes. It is safe to access +// from multiple goroutines simultaneously. +type Locker struct { + locks map[string]*sync.RWMutex + locksRW *sync.RWMutex +} + +var locker *Locker + +func init() { + locker = NewLocker() +} + +func Lock(key string) { locker.Lock(key) } +func Unlock(key string) { locker.Unlock(key) } +func RLock(key string) { locker.RLock(key) } +func RUnlock(key string) { locker.RUnlock(key) } + +func NewLocker() *Locker { + return &Locker{ + locks: make(map[string]*sync.RWMutex), + locksRW: new(sync.RWMutex), + } +} + +func (lker *Locker) Lock(key string) { + lk, ok := lker.getLock(key) + if !ok { + lk = lker.newLock(key) + } + lk.Lock() +} + +func (lker *Locker) Unlock(key string) { + lk, ok := lker.getLock(key) + if !ok { + panic(fmt.Errorf("BUG: Lock for key '%s' not initialized.", key)) + } + lk.Unlock() +} + +func (lker *Locker) RLock(key string) { + lk, ok := lker.getLock(key) + if !ok { + lk = lker.newLock(key) + } + lk.RLock() +} + +func (lker *Locker) RUnlock(key string) { + lk, ok := lker.getLock(key) + if !ok { + panic(fmt.Errorf("BUG: Lock for key '%s' not initialized.", key)) + } + lk.RUnlock() +} + +func (lker *Locker) newLock(key string) *sync.RWMutex { + lker.locksRW.Lock() + defer lker.locksRW.Unlock() + + if lk, ok := lker.locks[key]; ok { + return lk + } + lk := new(sync.RWMutex) + lker.locks[key] = lk + return lk +} + +func (lker *Locker) getLock(key string) (*sync.RWMutex, bool) { + lker.locksRW.RLock() + defer lker.locksRW.RUnlock() + + lock, ok := lker.locks[key] + return lock, ok +} + +func (lker *Locker) deleteLock(key string) { + lker.locksRW.Lock() + defer lker.locksRW.Unlock() + + if _, ok := lker.locks[key]; ok { + delete(lker.locks, key) + } +} diff --git a/vendor/github.com/BurntSushi/locker/session.vim b/vendor/github.com/BurntSushi/locker/session.vim new file mode 100644 index 0000000..562164b --- /dev/null +++ b/vendor/github.com/BurntSushi/locker/session.vim @@ -0,0 +1 @@ +au BufWritePost *.go silent!make tags > /dev/null 2>&1 diff --git a/vendor/github.com/RoaringBitmap/roaring/.drone.yml b/vendor/github.com/RoaringBitmap/roaring/.drone.yml new file mode 100644 index 0000000..698cd0e --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/.drone.yml @@ -0,0 +1,20 @@ +kind: pipeline +name: default + +workspace: + base: /go + path: src/github.com/RoaringBitmap/roaring + +steps: +- name: test + image: golang + commands: + - go get -t + - go test + - go test -race -run TestConcurrent* + - go build -tags appengine + - go test -tags appengine + - GOARCH=386 go build + - GOARCH=386 go test + - GOARCH=arm go build + - GOARCH=arm64 go build diff --git a/vendor/github.com/RoaringBitmap/roaring/.gitignore b/vendor/github.com/RoaringBitmap/roaring/.gitignore new file mode 100644 index 0000000..b7943ab --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/.gitignore @@ -0,0 +1,6 @@ +*~ +roaring-fuzz.zip +workdir +coverage.out +testdata/all3.classic +testdata/all3.msgp.snappy diff --git a/vendor/github.com/RoaringBitmap/roaring/.gitmodules b/vendor/github.com/RoaringBitmap/roaring/.gitmodules new file mode 100644 index 0000000..e69de29 diff --git a/vendor/github.com/RoaringBitmap/roaring/.travis.yml b/vendor/github.com/RoaringBitmap/roaring/.travis.yml new file mode 100644 index 0000000..c178043 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/.travis.yml @@ -0,0 +1,37 @@ +language: go +sudo: false +install: +- go get -t github.com/RoaringBitmap/roaring +- go get -t golang.org/x/tools/cmd/cover +- go get -t github.com/mattn/goveralls +- go get -t github.com/mschoch/smat +notifications: + email: false +go: +- "1.7.x" +- "1.8.x" +- "1.9.x" +- "1.10.x" +- "1.11.x" +- "1.12.x" +- "1.13.x" +- tip + +# whitelist +branches: + only: + - master +script: +- goveralls -v -service travis-ci -ignore arraycontainer_gen.go,bitmapcontainer_gen.go,rle16_gen.go,rle_gen.go,roaringarray_gen.go,rle.go || go test +- go test -race -run TestConcurrent* +- go build -tags appengine +- go test -tags appengine +- GOARCH=arm64 go build +- GOARCH=386 go build +- GOARCH=386 go test +- GOARCH=arm go build +- GOARCH=arm64 go build + +matrix: + allow_failures: + - go: tip diff --git a/vendor/github.com/RoaringBitmap/roaring/AUTHORS b/vendor/github.com/RoaringBitmap/roaring/AUTHORS new file mode 100644 index 0000000..26ec99d --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/AUTHORS @@ -0,0 +1,11 @@ +# This is the official list of roaring authors for copyright purposes. + +Todd Gruben (@tgruben), +Daniel Lemire (@lemire), +Elliot Murphy (@statik), +Bob Potter (@bpot), +Tyson Maly (@tvmaly), +Will Glynn (@willglynn), +Brent Pedersen (@brentp) +Maciej BiÅ‚as (@maciej), +Joe Nall (@joenall) diff --git a/vendor/github.com/RoaringBitmap/roaring/CONTRIBUTORS b/vendor/github.com/RoaringBitmap/roaring/CONTRIBUTORS new file mode 100644 index 0000000..b1e3a37 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/CONTRIBUTORS @@ -0,0 +1,16 @@ +# This is the official list of roaring contributors + +Todd Gruben (@tgruben), +Daniel Lemire (@lemire), +Elliot Murphy (@statik), +Bob Potter (@bpot), +Tyson Maly (@tvmaly), +Will Glynn (@willglynn), +Brent Pedersen (@brentp), +Jason E. Aten (@glycerine), +Vali Malinoiu (@0x4139), +Forud Ghafouri (@fzerorubigd), +Joe Nall (@joenall), +(@fredim), +Edd Robinson (@e-dard), +Alexander Petrov (@alldroll) diff --git a/vendor/github.com/RoaringBitmap/roaring/LICENSE b/vendor/github.com/RoaringBitmap/roaring/LICENSE new file mode 100644 index 0000000..3ccdd00 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/LICENSE @@ -0,0 +1,235 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2016 by the authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +================================================================================ + +Portions of runcontainer.go are from the Go standard library, which is licensed +under: + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/RoaringBitmap/roaring/LICENSE-2.0.txt b/vendor/github.com/RoaringBitmap/roaring/LICENSE-2.0.txt new file mode 100644 index 0000000..aff5f99 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/LICENSE-2.0.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2016 by the authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/RoaringBitmap/roaring/Makefile b/vendor/github.com/RoaringBitmap/roaring/Makefile new file mode 100644 index 0000000..906bd72 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/Makefile @@ -0,0 +1,111 @@ +.PHONY: help all test format fmtcheck vet lint qa deps clean nuke ser fetch-real-roaring-datasets + + + + + + + + +# Display general help about this command +help: + @echo "" + @echo "The following commands are available:" + @echo "" + @echo " make qa : Run all the tests" + @echo " make test : Run the unit tests" + @echo "" + @echo " make format : Format the source code" + @echo " make fmtcheck : Check if the source code has been formatted" + @echo " make vet : Check for suspicious constructs" + @echo " make lint : Check for style errors" + @echo "" + @echo " make deps : Get the dependencies" + @echo " make clean : Remove any build artifact" + @echo " make nuke : Deletes any intermediate file" + @echo "" + @echo " make fuzz-smat : Fuzzy testing with smat" + @echo " make fuzz-stream : Fuzzy testing with stream deserialization" + @echo " make fuzz-buffer : Fuzzy testing with buffer deserialization" + @echo "" + +# Alias for help target +all: help +test: + go test + go test -race -run TestConcurrent* +# Format the source code +format: + @find ./ -type f -name "*.go" -exec gofmt -w {} \; + +# Check if the source code has been formatted +fmtcheck: + @mkdir -p target + @find ./ -type f -name "*.go" -exec gofmt -d {} \; | tee target/format.diff + @test ! -s target/format.diff || { echo "ERROR: the source code has not been formatted - please use 'make format' or 'gofmt'"; exit 1; } + +# Check for syntax errors +vet: + GOPATH=$(GOPATH) go vet ./... + +# Check for style errors +lint: + GOPATH=$(GOPATH) PATH=$(GOPATH)/bin:$(PATH) golint ./... + + + + + +# Alias to run all quality-assurance checks +qa: fmtcheck test vet lint + +# --- INSTALL --- + +# Get the dependencies +deps: + GOPATH=$(GOPATH) go get github.com/stretchr/testify + GOPATH=$(GOPATH) go get github.com/willf/bitset + GOPATH=$(GOPATH) go get github.com/golang/lint/golint + GOPATH=$(GOPATH) go get github.com/mschoch/smat + GOPATH=$(GOPATH) go get github.com/dvyukov/go-fuzz/go-fuzz + GOPATH=$(GOPATH) go get github.com/dvyukov/go-fuzz/go-fuzz-build + GOPATH=$(GOPATH) go get github.com/glycerine/go-unsnap-stream + GOPATH=$(GOPATH) go get github.com/philhofer/fwd + GOPATH=$(GOPATH) go get github.com/jtolds/gls + +fuzz-smat: + go test -tags=gofuzz -run=TestGenerateSmatCorpus + go-fuzz-build -func FuzzSmat github.com/RoaringBitmap/roaring + go-fuzz -bin=./roaring-fuzz.zip -workdir=workdir/ -timeout=200 + + +fuzz-stream: + go-fuzz-build -func FuzzSerializationStream github.com/RoaringBitmap/roaring + go-fuzz -bin=./roaring-fuzz.zip -workdir=workdir/ -timeout=200 + + +fuzz-buffer: + go-fuzz-build -func FuzzSerializationBuffer github.com/RoaringBitmap/roaring + go-fuzz -bin=./roaring-fuzz.zip -workdir=workdir/ -timeout=200 + +# Remove any build artifact +clean: + GOPATH=$(GOPATH) go clean ./... + +# Deletes any intermediate file +nuke: + rm -rf ./target + GOPATH=$(GOPATH) go clean -i ./... + + +ser: + go generate + +cover: + go test -coverprofile=coverage.out + go tool cover -html=coverage.out + +fetch-real-roaring-datasets: + # pull github.com/RoaringBitmap/real-roaring-datasets -> testdata/real-roaring-datasets + git submodule init + git submodule update diff --git a/vendor/github.com/RoaringBitmap/roaring/README.md b/vendor/github.com/RoaringBitmap/roaring/README.md new file mode 100644 index 0000000..94fdf05 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/README.md @@ -0,0 +1,253 @@ +roaring [![Build Status](https://travis-ci.org/RoaringBitmap/roaring.png)](https://travis-ci.org/RoaringBitmap/roaring) [![Coverage Status](https://coveralls.io/repos/github/RoaringBitmap/roaring/badge.svg?branch=master)](https://coveralls.io/github/RoaringBitmap/roaring?branch=master) [![GoDoc](https://godoc.org/github.com/RoaringBitmap/roaring?status.svg)](https://godoc.org/github.com/RoaringBitmap/roaring) [![Go Report Card](https://goreportcard.com/badge/RoaringBitmap/roaring)](https://goreportcard.com/report/github.com/RoaringBitmap/roaring) +[![Build Status](https://cloud.drone.io/api/badges/RoaringBitmap/roaring/status.svg)](https://cloud.drone.io/RoaringBitmap/roaring) +============= + +This is a go version of the Roaring bitmap data structure. + + + +Roaring bitmaps are used by several major systems such as [Apache Lucene][lucene] and derivative systems such as [Solr][solr] and +[Elasticsearch][elasticsearch], [Apache Druid (Incubating)][druid], [LinkedIn Pinot][pinot], [Netflix Atlas][atlas], [Apache Spark][spark], [OpenSearchServer][opensearchserver], [Cloud Torrent][cloudtorrent], [Whoosh][whoosh], [Pilosa][pilosa], [Microsoft Visual Studio Team Services (VSTS)][vsts], and eBay's [Apache Kylin][kylin]. + +[lucene]: https://lucene.apache.org/ +[solr]: https://lucene.apache.org/solr/ +[elasticsearch]: https://www.elastic.co/products/elasticsearch +[druid]: https://druid.apache.org/ +[spark]: https://spark.apache.org/ +[opensearchserver]: http://www.opensearchserver.com +[cloudtorrent]: https://github.com/jpillora/cloud-torrent +[whoosh]: https://bitbucket.org/mchaput/whoosh/wiki/Home +[pilosa]: https://www.pilosa.com/ +[kylin]: http://kylin.apache.org/ +[pinot]: http://github.com/linkedin/pinot/wiki +[vsts]: https://www.visualstudio.com/team-services/ +[atlas]: https://github.com/Netflix/atlas + +Roaring bitmaps are found to work well in many important applications: + +> Use Roaring for bitmap compression whenever possible. Do not use other bitmap compression methods ([Wang et al., SIGMOD 2017](http://db.ucsd.edu/wp-content/uploads/2017/03/sidm338-wangA.pdf)) + + +The ``roaring`` Go library is used by +* [Cloud Torrent](https://github.com/jpillora/cloud-torrent) +* [runv](https://github.com/hyperhq/runv) +* [InfluxDB](https://www.influxdata.com) +* [Pilosa](https://www.pilosa.com/) +* [Bleve](http://www.blevesearch.com) +* [lindb](https://github.com/lindb/lindb) +* [Elasticell](https://github.com/deepfabric/elasticell) +* [SourceGraph](https://github.com/sourcegraph/sourcegraph) +* [M3](https://github.com/m3db/m3) +* [trident](https://github.com/NetApp/trident) + + +This library is used in production in several systems, it is part of the [Awesome Go collection](https://awesome-go.com). + + +There are also [Java](https://github.com/RoaringBitmap/RoaringBitmap) and [C/C++](https://github.com/RoaringBitmap/CRoaring) versions. The Java, C, C++ and Go version are binary compatible: e.g, you can save bitmaps +from a Java program and load them back in Go, and vice versa. We have a [format specification](https://github.com/RoaringBitmap/RoaringFormatSpec). + + +This code is licensed under Apache License, Version 2.0 (ASL2.0). + +Copyright 2016-... by the authors. + + +### References + +- Daniel Lemire, Owen Kaser, Nathan Kurz, Luca Deri, Chris O'Hara, François Saint-Jacques, Gregory Ssi-Yan-Kai, Roaring Bitmaps: Implementation of an Optimized Software Library, Software: Practice and Experience 48 (4), 2018 [arXiv:1709.07821](https://arxiv.org/abs/1709.07821) +- Samy Chambi, Daniel Lemire, Owen Kaser, Robert Godin, +Better bitmap performance with Roaring bitmaps, +Software: Practice and Experience 46 (5), 2016. +http://arxiv.org/abs/1402.6407 This paper used data from http://lemire.me/data/realroaring2014.html +- Daniel Lemire, Gregory Ssi-Yan-Kai, Owen Kaser, Consistently faster and smaller compressed bitmaps with Roaring, Software: Practice and Experience 46 (11), 2016. http://arxiv.org/abs/1603.06549 + + +### Dependencies + +Dependencies are fetched automatically by giving the `-t` flag to `go get`. + +they include + - github.com/willf/bitset + - github.com/mschoch/smat + - github.com/glycerine/go-unsnap-stream + - github.com/philhofer/fwd + - github.com/jtolds/gls + +Note that the smat library requires Go 1.6 or better. + +#### Installation + + - go get -t github.com/RoaringBitmap/roaring + + +### Example + +Here is a simplified but complete example: + +```go +package main + +import ( + "fmt" + "github.com/RoaringBitmap/roaring" + "bytes" +) + + +func main() { + // example inspired by https://github.com/fzandona/goroar + fmt.Println("==roaring==") + rb1 := roaring.BitmapOf(1, 2, 3, 4, 5, 100, 1000) + fmt.Println(rb1.String()) + + rb2 := roaring.BitmapOf(3, 4, 1000) + fmt.Println(rb2.String()) + + rb3 := roaring.New() + fmt.Println(rb3.String()) + + fmt.Println("Cardinality: ", rb1.GetCardinality()) + + fmt.Println("Contains 3? ", rb1.Contains(3)) + + rb1.And(rb2) + + rb3.Add(1) + rb3.Add(5) + + rb3.Or(rb1) + + // computes union of the three bitmaps in parallel using 4 workers + roaring.ParOr(4, rb1, rb2, rb3) + // computes intersection of the three bitmaps in parallel using 4 workers + roaring.ParAnd(4, rb1, rb2, rb3) + + + // prints 1, 3, 4, 5, 1000 + i := rb3.Iterator() + for i.HasNext() { + fmt.Println(i.Next()) + } + fmt.Println() + + // next we include an example of serialization + buf := new(bytes.Buffer) + rb1.WriteTo(buf) // we omit error handling + newrb:= roaring.New() + newrb.ReadFrom(buf) + if rb1.Equals(newrb) { + fmt.Println("I wrote the content to a byte stream and read it back.") + } + // you can iterate over bitmaps using ReverseIterator(), Iterator, ManyIterator() +} +``` + +If you wish to use serialization and handle errors, you might want to +consider the following sample of code: + +```go + rb := BitmapOf(1, 2, 3, 4, 5, 100, 1000) + buf := new(bytes.Buffer) + size,err:=rb.WriteTo(buf) + if err != nil { + t.Errorf("Failed writing") + } + newrb:= New() + size,err=newrb.ReadFrom(buf) + if err != nil { + t.Errorf("Failed reading") + } + if ! rb.Equals(newrb) { + t.Errorf("Cannot retrieve serialized version") + } +``` + +Given N integers in [0,x), then the serialized size in bytes of +a Roaring bitmap should never exceed this bound: + +`` 8 + 9 * ((long)x+65535)/65536 + 2 * N `` + +That is, given a fixed overhead for the universe size (x), Roaring +bitmaps never use more than 2 bytes per integer. You can call +``BoundSerializedSizeInBytes`` for a more precise estimate. + + +### Documentation + +Current documentation is available at http://godoc.org/github.com/RoaringBitmap/roaring + +### Goroutine safety + +In general, it should not generally be considered safe to access +the same bitmaps using different goroutines--they are left +unsynchronized for performance. Should you want to access +a Bitmap from more than one goroutine, you should +provide synchronization. Typically this is done by using channels to pass +the *Bitmap around (in Go style; so there is only ever one owner), +or by using `sync.Mutex` to serialize operations on Bitmaps. + +### Coverage + +We test our software. For a report on our test coverage, see + +https://coveralls.io/github/RoaringBitmap/roaring?branch=master + +### Benchmark + +Type + + go test -bench Benchmark -run - + +To run benchmarks on [Real Roaring Datasets](https://github.com/RoaringBitmap/real-roaring-datasets) +run the following: + +```sh +go get github.com/RoaringBitmap/real-roaring-datasets +BENCH_REAL_DATA=1 go test -bench BenchmarkRealData -run - +``` + +### Iterative use + +You can use roaring with gore: + +- go get -u github.com/motemen/gore +- Make sure that ``$GOPATH/bin`` is in your ``$PATH``. +- go get github.com/RoaringBitmap/roaring + +```go +$ gore +gore version 0.2.6 :help for help +gore> :import github.com/RoaringBitmap/roaring +gore> x:=roaring.New() +gore> x.Add(1) +gore> x.String() +"{1}" +``` + + +### Fuzzy testing + +You can help us test further the library with fuzzy testing: + + go get github.com/dvyukov/go-fuzz/go-fuzz + go get github.com/dvyukov/go-fuzz/go-fuzz-build + go test -tags=gofuzz -run=TestGenerateSmatCorpus + go-fuzz-build github.com/RoaringBitmap/roaring + go-fuzz -bin=./roaring-fuzz.zip -workdir=workdir/ -timeout=200 + +Let it run, and if the # of crashers is > 0, check out the reports in +the workdir where you should be able to find the panic goroutine stack +traces. + +### Alternative in Go + +There is a Go version wrapping the C/C++ implementation https://github.com/RoaringBitmap/gocroaring + +For an alternative implementation in Go, see https://github.com/fzandona/goroar +The two versions were written independently. + + +### Mailing list/discussion group + +https://groups.google.com/forum/#!forum/roaring-bitmaps diff --git a/vendor/github.com/RoaringBitmap/roaring/arraycontainer.go b/vendor/github.com/RoaringBitmap/roaring/arraycontainer.go new file mode 100644 index 0000000..eb124f3 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/arraycontainer.go @@ -0,0 +1,980 @@ +package roaring + +import ( + "fmt" +) + +//go:generate msgp -unexported + +type arrayContainer struct { + content []uint16 +} + +func (ac *arrayContainer) String() string { + s := "{" + for it := ac.getShortIterator(); it.hasNext(); { + s += fmt.Sprintf("%v, ", it.next()) + } + return s + "}" +} + +func (ac *arrayContainer) fillLeastSignificant16bits(x []uint32, i int, mask uint32) { + for k := 0; k < len(ac.content); k++ { + x[k+i] = uint32(ac.content[k]) | mask + } +} + +func (ac *arrayContainer) iterate(cb func(x uint16) bool) bool { + iterator := shortIterator{ac.content, 0} + + for iterator.hasNext() { + if !cb(iterator.next()) { + return false + } + } + + return true +} + +func (ac *arrayContainer) getShortIterator() shortPeekable { + return &shortIterator{ac.content, 0} +} + +func (ac *arrayContainer) getReverseIterator() shortIterable { + return &reverseIterator{ac.content, len(ac.content) - 1} +} + +func (ac *arrayContainer) getManyIterator() manyIterable { + return &shortIterator{ac.content, 0} +} + +func (ac *arrayContainer) minimum() uint16 { + return ac.content[0] // assume not empty +} + +func (ac *arrayContainer) maximum() uint16 { + return ac.content[len(ac.content)-1] // assume not empty +} + +func (ac *arrayContainer) getSizeInBytes() int { + return ac.getCardinality() * 2 +} + +func (ac *arrayContainer) serializedSizeInBytes() int { + return ac.getCardinality() * 2 +} + +func arrayContainerSizeInBytes(card int) int { + return card * 2 +} + +// add the values in the range [firstOfRange,endx) +func (ac *arrayContainer) iaddRange(firstOfRange, endx int) container { + if firstOfRange >= endx { + return ac + } + indexstart := binarySearch(ac.content, uint16(firstOfRange)) + if indexstart < 0 { + indexstart = -indexstart - 1 + } + indexend := binarySearch(ac.content, uint16(endx-1)) + if indexend < 0 { + indexend = -indexend - 1 + } else { + indexend++ + } + rangelength := endx - firstOfRange + newcardinality := indexstart + (ac.getCardinality() - indexend) + rangelength + if newcardinality > arrayDefaultMaxSize { + a := ac.toBitmapContainer() + return a.iaddRange(firstOfRange, endx) + } + if cap(ac.content) < newcardinality { + tmp := make([]uint16, newcardinality, newcardinality) + copy(tmp[:indexstart], ac.content[:indexstart]) + copy(tmp[indexstart+rangelength:], ac.content[indexend:]) + + ac.content = tmp + } else { + ac.content = ac.content[:newcardinality] + copy(ac.content[indexstart+rangelength:], ac.content[indexend:]) + + } + for k := 0; k < rangelength; k++ { + ac.content[k+indexstart] = uint16(firstOfRange + k) + } + return ac +} + +// remove the values in the range [firstOfRange,endx) +func (ac *arrayContainer) iremoveRange(firstOfRange, endx int) container { + if firstOfRange >= endx { + return ac + } + indexstart := binarySearch(ac.content, uint16(firstOfRange)) + if indexstart < 0 { + indexstart = -indexstart - 1 + } + indexend := binarySearch(ac.content, uint16(endx-1)) + if indexend < 0 { + indexend = -indexend - 1 + } else { + indexend++ + } + rangelength := indexend - indexstart + answer := ac + copy(answer.content[indexstart:], ac.content[indexstart+rangelength:]) + answer.content = answer.content[:ac.getCardinality()-rangelength] + return answer +} + +// flip the values in the range [firstOfRange,endx) +func (ac *arrayContainer) not(firstOfRange, endx int) container { + if firstOfRange >= endx { + return ac.clone() + } + return ac.notClose(firstOfRange, endx-1) // remove everything in [firstOfRange,endx-1] +} + +// flip the values in the range [firstOfRange,lastOfRange] +func (ac *arrayContainer) notClose(firstOfRange, lastOfRange int) container { + if firstOfRange > lastOfRange { // unlike add and remove, not uses an inclusive range [firstOfRange,lastOfRange] + return ac.clone() + } + + // determine the span of array indices to be affected^M + startIndex := binarySearch(ac.content, uint16(firstOfRange)) + if startIndex < 0 { + startIndex = -startIndex - 1 + } + lastIndex := binarySearch(ac.content, uint16(lastOfRange)) + if lastIndex < 0 { + lastIndex = -lastIndex - 2 + } + currentValuesInRange := lastIndex - startIndex + 1 + spanToBeFlipped := lastOfRange - firstOfRange + 1 + newValuesInRange := spanToBeFlipped - currentValuesInRange + cardinalityChange := newValuesInRange - currentValuesInRange + newCardinality := len(ac.content) + cardinalityChange + if newCardinality > arrayDefaultMaxSize { + return ac.toBitmapContainer().not(firstOfRange, lastOfRange+1) + } + answer := newArrayContainer() + answer.content = make([]uint16, newCardinality, newCardinality) //a hack for sure + + copy(answer.content, ac.content[:startIndex]) + outPos := startIndex + inPos := startIndex + valInRange := firstOfRange + for ; valInRange <= lastOfRange && inPos <= lastIndex; valInRange++ { + if uint16(valInRange) != ac.content[inPos] { + answer.content[outPos] = uint16(valInRange) + outPos++ + } else { + inPos++ + } + } + + for ; valInRange <= lastOfRange; valInRange++ { + answer.content[outPos] = uint16(valInRange) + outPos++ + } + + for i := lastIndex + 1; i < len(ac.content); i++ { + answer.content[outPos] = ac.content[i] + outPos++ + } + answer.content = answer.content[:newCardinality] + return answer + +} + +func (ac *arrayContainer) equals(o container) bool { + + srb, ok := o.(*arrayContainer) + if ok { + // Check if the containers are the same object. + if ac == srb { + return true + } + + if len(srb.content) != len(ac.content) { + return false + } + + for i, v := range ac.content { + if v != srb.content[i] { + return false + } + } + return true + } + + // use generic comparison + bCard := o.getCardinality() + aCard := ac.getCardinality() + if bCard != aCard { + return false + } + + ait := ac.getShortIterator() + bit := o.getShortIterator() + for ait.hasNext() { + if bit.next() != ait.next() { + return false + } + } + return true +} + +func (ac *arrayContainer) toBitmapContainer() *bitmapContainer { + bc := newBitmapContainer() + bc.loadData(ac) + return bc + +} +func (ac *arrayContainer) iadd(x uint16) (wasNew bool) { + // Special case adding to the end of the container. + l := len(ac.content) + if l > 0 && l < arrayDefaultMaxSize && ac.content[l-1] < x { + ac.content = append(ac.content, x) + return true + } + + loc := binarySearch(ac.content, x) + + if loc < 0 { + s := ac.content + i := -loc - 1 + s = append(s, 0) + copy(s[i+1:], s[i:]) + s[i] = x + ac.content = s + return true + } + return false +} + +func (ac *arrayContainer) iaddReturnMinimized(x uint16) container { + // Special case adding to the end of the container. + l := len(ac.content) + if l > 0 && l < arrayDefaultMaxSize && ac.content[l-1] < x { + ac.content = append(ac.content, x) + return ac + } + + loc := binarySearch(ac.content, x) + + if loc < 0 { + if len(ac.content) >= arrayDefaultMaxSize { + a := ac.toBitmapContainer() + a.iadd(x) + return a + } + s := ac.content + i := -loc - 1 + s = append(s, 0) + copy(s[i+1:], s[i:]) + s[i] = x + ac.content = s + } + return ac +} + +// iremoveReturnMinimized is allowed to change the return type to minimize storage. +func (ac *arrayContainer) iremoveReturnMinimized(x uint16) container { + ac.iremove(x) + return ac +} + +func (ac *arrayContainer) iremove(x uint16) bool { + loc := binarySearch(ac.content, x) + if loc >= 0 { + s := ac.content + s = append(s[:loc], s[loc+1:]...) + ac.content = s + return true + } + return false +} + +func (ac *arrayContainer) remove(x uint16) container { + out := &arrayContainer{make([]uint16, len(ac.content))} + copy(out.content, ac.content[:]) + + loc := binarySearch(out.content, x) + if loc >= 0 { + s := out.content + s = append(s[:loc], s[loc+1:]...) + out.content = s + } + return out +} + +func (ac *arrayContainer) or(a container) container { + switch x := a.(type) { + case *arrayContainer: + return ac.orArray(x) + case *bitmapContainer: + return x.orArray(ac) + case *runContainer16: + if x.isFull() { + return x.clone() + } + return x.orArray(ac) + } + panic("unsupported container type") +} + +func (ac *arrayContainer) orCardinality(a container) int { + switch x := a.(type) { + case *arrayContainer: + return ac.orArrayCardinality(x) + case *bitmapContainer: + return x.orArrayCardinality(ac) + case *runContainer16: + return x.orArrayCardinality(ac) + } + panic("unsupported container type") +} + +func (ac *arrayContainer) ior(a container) container { + switch x := a.(type) { + case *arrayContainer: + return ac.iorArray(x) + case *bitmapContainer: + return a.(*bitmapContainer).orArray(ac) + //return ac.iorBitmap(x) // note: this does not make sense + case *runContainer16: + if x.isFull() { + return x.clone() + } + return ac.iorRun16(x) + } + panic("unsupported container type") +} + +func (ac *arrayContainer) iorArray(value2 *arrayContainer) container { + value1 := ac + len1 := value1.getCardinality() + len2 := value2.getCardinality() + maxPossibleCardinality := len1 + len2 + if maxPossibleCardinality > arrayDefaultMaxSize { // it could be a bitmap! + bc := newBitmapContainer() + for k := 0; k < len(value2.content); k++ { + v := value2.content[k] + i := uint(v) >> 6 + mask := uint64(1) << (v % 64) + bc.bitmap[i] |= mask + } + for k := 0; k < len(ac.content); k++ { + v := ac.content[k] + i := uint(v) >> 6 + mask := uint64(1) << (v % 64) + bc.bitmap[i] |= mask + } + bc.cardinality = int(popcntSlice(bc.bitmap)) + if bc.cardinality <= arrayDefaultMaxSize { + return bc.toArrayContainer() + } + return bc + } + if maxPossibleCardinality > cap(value1.content) { + newcontent := make([]uint16, 0, maxPossibleCardinality) + copy(newcontent[len2:maxPossibleCardinality], ac.content[0:len1]) + ac.content = newcontent + } else { + copy(ac.content[len2:maxPossibleCardinality], ac.content[0:len1]) + } + nl := union2by2(value1.content[len2:maxPossibleCardinality], value2.content, ac.content) + ac.content = ac.content[:nl] // reslice to match actual used capacity + return ac +} + +// Note: such code does not make practical sense, except for lazy evaluations +func (ac *arrayContainer) iorBitmap(bc2 *bitmapContainer) container { + bc1 := ac.toBitmapContainer() + bc1.iorBitmap(bc2) + *ac = *newArrayContainerFromBitmap(bc1) + return ac +} + +func (ac *arrayContainer) iorRun16(rc *runContainer16) container { + bc1 := ac.toBitmapContainer() + bc2 := rc.toBitmapContainer() + bc1.iorBitmap(bc2) + *ac = *newArrayContainerFromBitmap(bc1) + return ac +} + +func (ac *arrayContainer) lazyIOR(a container) container { + switch x := a.(type) { + case *arrayContainer: + return ac.lazyIorArray(x) + case *bitmapContainer: + return ac.lazyIorBitmap(x) + case *runContainer16: + if x.isFull() { + return x.clone() + } + return ac.lazyIorRun16(x) + + } + panic("unsupported container type") +} + +func (ac *arrayContainer) lazyIorArray(ac2 *arrayContainer) container { + // TODO actually make this lazy + return ac.iorArray(ac2) +} + +func (ac *arrayContainer) lazyIorBitmap(bc *bitmapContainer) container { + // TODO actually make this lazy + return ac.iorBitmap(bc) +} + +func (ac *arrayContainer) lazyIorRun16(rc *runContainer16) container { + // TODO actually make this lazy + return ac.iorRun16(rc) +} + +func (ac *arrayContainer) lazyOR(a container) container { + switch x := a.(type) { + case *arrayContainer: + return ac.lazyorArray(x) + case *bitmapContainer: + return a.lazyOR(ac) + case *runContainer16: + if x.isFull() { + return x.clone() + } + return x.orArray(ac) + } + panic("unsupported container type") +} + +func (ac *arrayContainer) orArray(value2 *arrayContainer) container { + value1 := ac + maxPossibleCardinality := value1.getCardinality() + value2.getCardinality() + if maxPossibleCardinality > arrayDefaultMaxSize { // it could be a bitmap! + bc := newBitmapContainer() + for k := 0; k < len(value2.content); k++ { + v := value2.content[k] + i := uint(v) >> 6 + mask := uint64(1) << (v % 64) + bc.bitmap[i] |= mask + } + for k := 0; k < len(ac.content); k++ { + v := ac.content[k] + i := uint(v) >> 6 + mask := uint64(1) << (v % 64) + bc.bitmap[i] |= mask + } + bc.cardinality = int(popcntSlice(bc.bitmap)) + if bc.cardinality <= arrayDefaultMaxSize { + return bc.toArrayContainer() + } + return bc + } + answer := newArrayContainerCapacity(maxPossibleCardinality) + nl := union2by2(value1.content, value2.content, answer.content) + answer.content = answer.content[:nl] // reslice to match actual used capacity + return answer +} + +func (ac *arrayContainer) orArrayCardinality(value2 *arrayContainer) int { + return union2by2Cardinality(ac.content, value2.content) +} + +func (ac *arrayContainer) lazyorArray(value2 *arrayContainer) container { + value1 := ac + maxPossibleCardinality := value1.getCardinality() + value2.getCardinality() + if maxPossibleCardinality > arrayLazyLowerBound { // it could be a bitmap!^M + bc := newBitmapContainer() + for k := 0; k < len(value2.content); k++ { + v := value2.content[k] + i := uint(v) >> 6 + mask := uint64(1) << (v % 64) + bc.bitmap[i] |= mask + } + for k := 0; k < len(ac.content); k++ { + v := ac.content[k] + i := uint(v) >> 6 + mask := uint64(1) << (v % 64) + bc.bitmap[i] |= mask + } + bc.cardinality = invalidCardinality + return bc + } + answer := newArrayContainerCapacity(maxPossibleCardinality) + nl := union2by2(value1.content, value2.content, answer.content) + answer.content = answer.content[:nl] // reslice to match actual used capacity + return answer +} + +func (ac *arrayContainer) and(a container) container { + switch x := a.(type) { + case *arrayContainer: + return ac.andArray(x) + case *bitmapContainer: + return x.and(ac) + case *runContainer16: + if x.isFull() { + return ac.clone() + } + return x.andArray(ac) + } + panic("unsupported container type") +} + +func (ac *arrayContainer) andCardinality(a container) int { + switch x := a.(type) { + case *arrayContainer: + return ac.andArrayCardinality(x) + case *bitmapContainer: + return x.andCardinality(ac) + case *runContainer16: + return x.andArrayCardinality(ac) + } + panic("unsupported container type") +} + +func (ac *arrayContainer) intersects(a container) bool { + switch x := a.(type) { + case *arrayContainer: + return ac.intersectsArray(x) + case *bitmapContainer: + return x.intersects(ac) + case *runContainer16: + return x.intersects(ac) + } + panic("unsupported container type") +} + +func (ac *arrayContainer) iand(a container) container { + switch x := a.(type) { + case *arrayContainer: + return ac.iandArray(x) + case *bitmapContainer: + return ac.iandBitmap(x) + case *runContainer16: + if x.isFull() { + return ac + } + return x.andArray(ac) + } + panic("unsupported container type") +} + +func (ac *arrayContainer) iandBitmap(bc *bitmapContainer) container { + pos := 0 + c := ac.getCardinality() + for k := 0; k < c; k++ { + // branchless + v := ac.content[k] + ac.content[pos] = v + pos += int(bc.bitValue(v)) + } + ac.content = ac.content[:pos] + return ac + +} + +func (ac *arrayContainer) xor(a container) container { + switch x := a.(type) { + case *arrayContainer: + return ac.xorArray(x) + case *bitmapContainer: + return a.xor(ac) + case *runContainer16: + return x.xorArray(ac) + } + panic("unsupported container type") +} + +func (ac *arrayContainer) xorArray(value2 *arrayContainer) container { + value1 := ac + totalCardinality := value1.getCardinality() + value2.getCardinality() + if totalCardinality > arrayDefaultMaxSize { // it could be a bitmap! + bc := newBitmapContainer() + for k := 0; k < len(value2.content); k++ { + v := value2.content[k] + i := uint(v) >> 6 + bc.bitmap[i] ^= (uint64(1) << (v % 64)) + } + for k := 0; k < len(ac.content); k++ { + v := ac.content[k] + i := uint(v) >> 6 + bc.bitmap[i] ^= (uint64(1) << (v % 64)) + } + bc.computeCardinality() + if bc.cardinality <= arrayDefaultMaxSize { + return bc.toArrayContainer() + } + return bc + } + desiredCapacity := totalCardinality + answer := newArrayContainerCapacity(desiredCapacity) + length := exclusiveUnion2by2(value1.content, value2.content, answer.content) + answer.content = answer.content[:length] + return answer + +} + +func (ac *arrayContainer) andNot(a container) container { + switch x := a.(type) { + case *arrayContainer: + return ac.andNotArray(x) + case *bitmapContainer: + return ac.andNotBitmap(x) + case *runContainer16: + return ac.andNotRun16(x) + } + panic("unsupported container type") +} + +func (ac *arrayContainer) andNotRun16(rc *runContainer16) container { + acb := ac.toBitmapContainer() + rcb := rc.toBitmapContainer() + return acb.andNotBitmap(rcb) +} + +func (ac *arrayContainer) iandNot(a container) container { + switch x := a.(type) { + case *arrayContainer: + return ac.iandNotArray(x) + case *bitmapContainer: + return ac.iandNotBitmap(x) + case *runContainer16: + return ac.iandNotRun16(x) + } + panic("unsupported container type") +} + +func (ac *arrayContainer) iandNotRun16(rc *runContainer16) container { + rcb := rc.toBitmapContainer() + acb := ac.toBitmapContainer() + acb.iandNotBitmapSurely(rcb) + *ac = *(acb.toArrayContainer()) + return ac +} + +func (ac *arrayContainer) andNotArray(value2 *arrayContainer) container { + value1 := ac + desiredcapacity := value1.getCardinality() + answer := newArrayContainerCapacity(desiredcapacity) + length := difference(value1.content, value2.content, answer.content) + answer.content = answer.content[:length] + return answer +} + +func (ac *arrayContainer) iandNotArray(value2 *arrayContainer) container { + length := difference(ac.content, value2.content, ac.content) + ac.content = ac.content[:length] + return ac +} + +func (ac *arrayContainer) andNotBitmap(value2 *bitmapContainer) container { + desiredcapacity := ac.getCardinality() + answer := newArrayContainerCapacity(desiredcapacity) + answer.content = answer.content[:desiredcapacity] + pos := 0 + for _, v := range ac.content { + answer.content[pos] = v + pos += 1 - int(value2.bitValue(v)) + } + answer.content = answer.content[:pos] + return answer +} + +func (ac *arrayContainer) andBitmap(value2 *bitmapContainer) container { + desiredcapacity := ac.getCardinality() + answer := newArrayContainerCapacity(desiredcapacity) + answer.content = answer.content[:desiredcapacity] + pos := 0 + for _, v := range ac.content { + answer.content[pos] = v + pos += int(value2.bitValue(v)) + } + answer.content = answer.content[:pos] + return answer +} + +func (ac *arrayContainer) iandNotBitmap(value2 *bitmapContainer) container { + pos := 0 + for _, v := range ac.content { + ac.content[pos] = v + pos += 1 - int(value2.bitValue(v)) + } + ac.content = ac.content[:pos] + return ac +} + +func copyOf(array []uint16, size int) []uint16 { + result := make([]uint16, size) + for i, x := range array { + if i == size { + break + } + result[i] = x + } + return result +} + +// flip the values in the range [firstOfRange,endx) +func (ac *arrayContainer) inot(firstOfRange, endx int) container { + if firstOfRange >= endx { + return ac + } + return ac.inotClose(firstOfRange, endx-1) // remove everything in [firstOfRange,endx-1] +} + +// flip the values in the range [firstOfRange,lastOfRange] +func (ac *arrayContainer) inotClose(firstOfRange, lastOfRange int) container { + if firstOfRange > lastOfRange { // unlike add and remove, not uses an inclusive range [firstOfRange,lastOfRange] + return ac + } + // determine the span of array indices to be affected + startIndex := binarySearch(ac.content, uint16(firstOfRange)) + if startIndex < 0 { + startIndex = -startIndex - 1 + } + lastIndex := binarySearch(ac.content, uint16(lastOfRange)) + if lastIndex < 0 { + lastIndex = -lastIndex - 1 - 1 + } + currentValuesInRange := lastIndex - startIndex + 1 + spanToBeFlipped := lastOfRange - firstOfRange + 1 + + newValuesInRange := spanToBeFlipped - currentValuesInRange + buffer := make([]uint16, newValuesInRange) + cardinalityChange := newValuesInRange - currentValuesInRange + newCardinality := len(ac.content) + cardinalityChange + if cardinalityChange > 0 { + if newCardinality > len(ac.content) { + if newCardinality > arrayDefaultMaxSize { + bcRet := ac.toBitmapContainer() + bcRet.inot(firstOfRange, lastOfRange+1) + *ac = *bcRet.toArrayContainer() + return bcRet + } + ac.content = copyOf(ac.content, newCardinality) + } + base := lastIndex + 1 + copy(ac.content[lastIndex+1+cardinalityChange:], ac.content[base:base+len(ac.content)-1-lastIndex]) + ac.negateRange(buffer, startIndex, lastIndex, firstOfRange, lastOfRange+1) + } else { // no expansion needed + ac.negateRange(buffer, startIndex, lastIndex, firstOfRange, lastOfRange+1) + if cardinalityChange < 0 { + + for i := startIndex + newValuesInRange; i < newCardinality; i++ { + ac.content[i] = ac.content[i-cardinalityChange] + } + } + } + ac.content = ac.content[:newCardinality] + return ac +} + +func (ac *arrayContainer) negateRange(buffer []uint16, startIndex, lastIndex, startRange, lastRange int) { + // compute the negation into buffer + outPos := 0 + inPos := startIndex // value here always >= valInRange, + // until it is exhausted + // n.b., we can start initially exhausted. + + valInRange := startRange + for ; valInRange < lastRange && inPos <= lastIndex; valInRange++ { + if uint16(valInRange) != ac.content[inPos] { + buffer[outPos] = uint16(valInRange) + outPos++ + } else { + inPos++ + } + } + + // if there are extra items (greater than the biggest + // pre-existing one in range), buffer them + for ; valInRange < lastRange; valInRange++ { + buffer[outPos] = uint16(valInRange) + outPos++ + } + + if outPos != len(buffer) { + panic("negateRange: internal bug") + } + + for i, item := range buffer { + ac.content[i+startIndex] = item + } +} + +func (ac *arrayContainer) isFull() bool { + return false +} + +func (ac *arrayContainer) andArray(value2 *arrayContainer) container { + desiredcapacity := minOfInt(ac.getCardinality(), value2.getCardinality()) + answer := newArrayContainerCapacity(desiredcapacity) + length := intersection2by2( + ac.content, + value2.content, + answer.content) + answer.content = answer.content[:length] + return answer +} + +func (ac *arrayContainer) andArrayCardinality(value2 *arrayContainer) int { + return intersection2by2Cardinality( + ac.content, + value2.content) +} + +func (ac *arrayContainer) intersectsArray(value2 *arrayContainer) bool { + return intersects2by2( + ac.content, + value2.content) +} + +func (ac *arrayContainer) iandArray(value2 *arrayContainer) container { + length := intersection2by2( + ac.content, + value2.content, + ac.content) + ac.content = ac.content[:length] + return ac +} + +func (ac *arrayContainer) getCardinality() int { + return len(ac.content) +} + +func (ac *arrayContainer) rank(x uint16) int { + answer := binarySearch(ac.content, x) + if answer >= 0 { + return answer + 1 + } + return -answer - 1 + +} + +func (ac *arrayContainer) selectInt(x uint16) int { + return int(ac.content[x]) +} + +func (ac *arrayContainer) clone() container { + ptr := arrayContainer{make([]uint16, len(ac.content))} + copy(ptr.content, ac.content[:]) + return &ptr +} + +func (ac *arrayContainer) contains(x uint16) bool { + return binarySearch(ac.content, x) >= 0 +} + +func (ac *arrayContainer) loadData(bitmapContainer *bitmapContainer) { + ac.content = make([]uint16, bitmapContainer.cardinality, bitmapContainer.cardinality) + bitmapContainer.fillArray(ac.content) +} +func newArrayContainer() *arrayContainer { + p := new(arrayContainer) + return p +} + +func newArrayContainerFromBitmap(bc *bitmapContainer) *arrayContainer { + ac := &arrayContainer{} + ac.loadData(bc) + return ac +} + +func newArrayContainerCapacity(size int) *arrayContainer { + p := new(arrayContainer) + p.content = make([]uint16, 0, size) + return p +} + +func newArrayContainerSize(size int) *arrayContainer { + p := new(arrayContainer) + p.content = make([]uint16, size, size) + return p +} + +func newArrayContainerRange(firstOfRun, lastOfRun int) *arrayContainer { + valuesInRange := lastOfRun - firstOfRun + 1 + this := newArrayContainerCapacity(valuesInRange) + for i := 0; i < valuesInRange; i++ { + this.content = append(this.content, uint16(firstOfRun+i)) + } + return this +} + +func (ac *arrayContainer) numberOfRuns() (nr int) { + n := len(ac.content) + var runlen uint16 + var cur, prev uint16 + + switch n { + case 0: + return 0 + case 1: + return 1 + default: + for i := 1; i < n; i++ { + prev = ac.content[i-1] + cur = ac.content[i] + + if cur == prev+1 { + runlen++ + } else { + if cur < prev { + panic("then fundamental arrayContainer assumption of sorted ac.content was broken") + } + if cur == prev { + panic("then fundamental arrayContainer assumption of deduplicated content was broken") + } else { + nr++ + runlen = 0 + } + } + } + nr++ + } + return +} + +// convert to run or array *if needed* +func (ac *arrayContainer) toEfficientContainer() container { + + numRuns := ac.numberOfRuns() + + sizeAsRunContainer := runContainer16SerializedSizeInBytes(numRuns) + sizeAsBitmapContainer := bitmapContainerSizeInBytes() + card := ac.getCardinality() + sizeAsArrayContainer := arrayContainerSizeInBytes(card) + + if sizeAsRunContainer <= minOfInt(sizeAsBitmapContainer, sizeAsArrayContainer) { + return newRunContainer16FromArray(ac) + } + if card <= arrayDefaultMaxSize { + return ac + } + return ac.toBitmapContainer() +} + +func (ac *arrayContainer) containerType() contype { + return arrayContype +} + +func (ac *arrayContainer) addOffset(x uint16) []container { + low := &arrayContainer{} + high := &arrayContainer{} + for _, val := range ac.content { + y := uint32(val) + uint32(x) + if highbits(y) > 0 { + high.content = append(high.content, lowbits(y)) + } else { + low.content = append(low.content, lowbits(y)) + } + } + return []container{low, high} +} diff --git a/vendor/github.com/RoaringBitmap/roaring/arraycontainer_gen.go b/vendor/github.com/RoaringBitmap/roaring/arraycontainer_gen.go new file mode 100644 index 0000000..6ee670e --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/arraycontainer_gen.go @@ -0,0 +1,134 @@ +package roaring + +// NOTE: THIS FILE WAS PRODUCED BY THE +// MSGP CODE GENERATION TOOL (github.com/tinylib/msgp) +// DO NOT EDIT + +import "github.com/tinylib/msgp/msgp" + +// Deprecated: DecodeMsg implements msgp.Decodable +func (z *arrayContainer) DecodeMsg(dc *msgp.Reader) (err error) { + var field []byte + _ = field + var zbzg uint32 + zbzg, err = dc.ReadMapHeader() + if err != nil { + return + } + for zbzg > 0 { + zbzg-- + field, err = dc.ReadMapKeyPtr() + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "content": + var zbai uint32 + zbai, err = dc.ReadArrayHeader() + if err != nil { + return + } + if cap(z.content) >= int(zbai) { + z.content = (z.content)[:zbai] + } else { + z.content = make([]uint16, zbai) + } + for zxvk := range z.content { + z.content[zxvk], err = dc.ReadUint16() + if err != nil { + return + } + } + default: + err = dc.Skip() + if err != nil { + return + } + } + } + return +} + +// Deprecated: EncodeMsg implements msgp.Encodable +func (z *arrayContainer) EncodeMsg(en *msgp.Writer) (err error) { + // map header, size 1 + // write "content" + err = en.Append(0x81, 0xa7, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74) + if err != nil { + return err + } + err = en.WriteArrayHeader(uint32(len(z.content))) + if err != nil { + return + } + for zxvk := range z.content { + err = en.WriteUint16(z.content[zxvk]) + if err != nil { + return + } + } + return +} + +// Deprecated: MarshalMsg implements msgp.Marshaler +func (z *arrayContainer) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + // map header, size 1 + // string "content" + o = append(o, 0x81, 0xa7, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74) + o = msgp.AppendArrayHeader(o, uint32(len(z.content))) + for zxvk := range z.content { + o = msgp.AppendUint16(o, z.content[zxvk]) + } + return +} + +// Deprecated: UnmarshalMsg implements msgp.Unmarshaler +func (z *arrayContainer) UnmarshalMsg(bts []byte) (o []byte, err error) { + var field []byte + _ = field + var zcmr uint32 + zcmr, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + return + } + for zcmr > 0 { + zcmr-- + field, bts, err = msgp.ReadMapKeyZC(bts) + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "content": + var zajw uint32 + zajw, bts, err = msgp.ReadArrayHeaderBytes(bts) + if err != nil { + return + } + if cap(z.content) >= int(zajw) { + z.content = (z.content)[:zajw] + } else { + z.content = make([]uint16, zajw) + } + for zxvk := range z.content { + z.content[zxvk], bts, err = msgp.ReadUint16Bytes(bts) + if err != nil { + return + } + } + default: + bts, err = msgp.Skip(bts) + if err != nil { + return + } + } + } + o = bts + return +} + +// Deprecated: Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z *arrayContainer) Msgsize() (s int) { + s = 1 + 8 + msgp.ArrayHeaderSize + (len(z.content) * (msgp.Uint16Size)) + return +} diff --git a/vendor/github.com/RoaringBitmap/roaring/bitmapcontainer.go b/vendor/github.com/RoaringBitmap/roaring/bitmapcontainer.go new file mode 100644 index 0000000..cd259fd --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/bitmapcontainer.go @@ -0,0 +1,1098 @@ +package roaring + +import ( + "fmt" + "unsafe" +) + +//go:generate msgp -unexported + +type bitmapContainer struct { + cardinality int + bitmap []uint64 +} + +func (bc bitmapContainer) String() string { + var s string + for it := bc.getShortIterator(); it.hasNext(); { + s += fmt.Sprintf("%v, ", it.next()) + } + return s +} + +func newBitmapContainer() *bitmapContainer { + p := new(bitmapContainer) + size := (1 << 16) / 64 + p.bitmap = make([]uint64, size, size) + return p +} + +func newBitmapContainerwithRange(firstOfRun, lastOfRun int) *bitmapContainer { + bc := newBitmapContainer() + bc.cardinality = lastOfRun - firstOfRun + 1 + if bc.cardinality == maxCapacity { + fill(bc.bitmap, uint64(0xffffffffffffffff)) + } else { + firstWord := firstOfRun / 64 + lastWord := lastOfRun / 64 + zeroPrefixLength := uint64(firstOfRun & 63) + zeroSuffixLength := uint64(63 - (lastOfRun & 63)) + + fillRange(bc.bitmap, firstWord, lastWord+1, uint64(0xffffffffffffffff)) + bc.bitmap[firstWord] ^= ((uint64(1) << zeroPrefixLength) - 1) + blockOfOnes := (uint64(1) << zeroSuffixLength) - 1 + maskOnLeft := blockOfOnes << (uint64(64) - zeroSuffixLength) + bc.bitmap[lastWord] ^= maskOnLeft + } + return bc +} + +func (bc *bitmapContainer) minimum() uint16 { + for i := 0; i < len(bc.bitmap); i++ { + w := bc.bitmap[i] + if w != 0 { + r := countTrailingZeros(w) + return uint16(r + i*64) + } + } + return MaxUint16 +} + +// i should be non-zero +func clz(i uint64) int { + n := 1 + x := uint32(i >> 32) + if x == 0 { + n += 32 + x = uint32(i) + } + if x>>16 == 0 { + n += 16 + x = x << 16 + } + if x>>24 == 0 { + n += 8 + x = x << 8 + } + if x>>28 == 0 { + n += 4 + x = x << 4 + } + if x>>30 == 0 { + n += 2 + x = x << 2 + } + return n - int(x>>31) +} + +func (bc *bitmapContainer) maximum() uint16 { + for i := len(bc.bitmap); i > 0; i-- { + w := bc.bitmap[i-1] + if w != 0 { + r := clz(w) + return uint16((i-1)*64 + 63 - r) + } + } + return uint16(0) +} + +func (bc *bitmapContainer) iterate(cb func(x uint16) bool) bool { + iterator := bitmapContainerShortIterator{bc, bc.NextSetBit(0)} + + for iterator.hasNext() { + if !cb(iterator.next()) { + return false + } + } + + return true +} + +type bitmapContainerShortIterator struct { + ptr *bitmapContainer + i int +} + +func (bcsi *bitmapContainerShortIterator) next() uint16 { + j := bcsi.i + bcsi.i = bcsi.ptr.NextSetBit(bcsi.i + 1) + return uint16(j) +} +func (bcsi *bitmapContainerShortIterator) hasNext() bool { + return bcsi.i >= 0 +} + +func (bcsi *bitmapContainerShortIterator) peekNext() uint16 { + return uint16(bcsi.i) +} + +func (bcsi *bitmapContainerShortIterator) advanceIfNeeded(minval uint16) { + if bcsi.hasNext() && bcsi.peekNext() < minval { + bcsi.i = bcsi.ptr.NextSetBit(int(minval)) + } +} + +func newBitmapContainerShortIterator(a *bitmapContainer) *bitmapContainerShortIterator { + return &bitmapContainerShortIterator{a, a.NextSetBit(0)} +} + +func (bc *bitmapContainer) getShortIterator() shortPeekable { + return newBitmapContainerShortIterator(bc) +} + +type reverseBitmapContainerShortIterator struct { + ptr *bitmapContainer + i int +} + +func (bcsi *reverseBitmapContainerShortIterator) next() uint16 { + if bcsi.i == -1 { + panic("reverseBitmapContainerShortIterator.next() going beyond what is available") + } + + j := bcsi.i + bcsi.i = bcsi.ptr.PrevSetBit(bcsi.i - 1) + return uint16(j) +} + +func (bcsi *reverseBitmapContainerShortIterator) hasNext() bool { + return bcsi.i >= 0 +} + +func newReverseBitmapContainerShortIterator(a *bitmapContainer) *reverseBitmapContainerShortIterator { + if a.cardinality == 0 { + return &reverseBitmapContainerShortIterator{a, -1} + } + return &reverseBitmapContainerShortIterator{a, int(a.maximum())} +} + +func (bc *bitmapContainer) getReverseIterator() shortIterable { + return newReverseBitmapContainerShortIterator(bc) +} + +type bitmapContainerManyIterator struct { + ptr *bitmapContainer + base int + bitset uint64 +} + +func (bcmi *bitmapContainerManyIterator) nextMany(hs uint32, buf []uint32) int { + n := 0 + base := bcmi.base + bitset := bcmi.bitset + + for n < len(buf) { + if bitset == 0 { + base++ + if base >= len(bcmi.ptr.bitmap) { + bcmi.base = base + bcmi.bitset = bitset + return n + } + bitset = bcmi.ptr.bitmap[base] + continue + } + t := bitset & -bitset + buf[n] = uint32(((base * 64) + int(popcount(t-1)))) | hs + n = n + 1 + bitset ^= t + } + + bcmi.base = base + bcmi.bitset = bitset + return n +} + +func newBitmapContainerManyIterator(a *bitmapContainer) *bitmapContainerManyIterator { + return &bitmapContainerManyIterator{a, -1, 0} +} + +func (bc *bitmapContainer) getManyIterator() manyIterable { + return newBitmapContainerManyIterator(bc) +} + +func (bc *bitmapContainer) getSizeInBytes() int { + return len(bc.bitmap) * 8 // + bcBaseBytes +} + +func (bc *bitmapContainer) serializedSizeInBytes() int { + //return bc.Msgsize()// NOO! This breaks GetSerializedSizeInBytes + return len(bc.bitmap) * 8 +} + +const bcBaseBytes = int(unsafe.Sizeof(bitmapContainer{})) + +// bitmapContainer doesn't depend on card, always fully allocated +func bitmapContainerSizeInBytes() int { + return bcBaseBytes + (1<<16)/8 +} + +func bitmapEquals(a, b []uint64) bool { + if len(a) != len(b) { + return false + } + for i, v := range a { + if v != b[i] { + return false + } + } + return true +} + +func (bc *bitmapContainer) fillLeastSignificant16bits(x []uint32, i int, mask uint32) { + // TODO: should be written as optimized assembly + pos := i + base := mask + for k := 0; k < len(bc.bitmap); k++ { + bitset := bc.bitmap[k] + for bitset != 0 { + t := bitset & -bitset + x[pos] = base + uint32(popcount(t-1)) + pos++ + bitset ^= t + } + base += 64 + } +} + +func (bc *bitmapContainer) equals(o container) bool { + srb, ok := o.(*bitmapContainer) + if ok { + if srb.cardinality != bc.cardinality { + return false + } + return bitmapEquals(bc.bitmap, srb.bitmap) + } + + // use generic comparison + if bc.getCardinality() != o.getCardinality() { + return false + } + ait := o.getShortIterator() + bit := bc.getShortIterator() + + for ait.hasNext() { + if bit.next() != ait.next() { + return false + } + } + return true +} + +func (bc *bitmapContainer) iaddReturnMinimized(i uint16) container { + bc.iadd(i) + if bc.isFull() { + return newRunContainer16Range(0, MaxUint16) + } + return bc +} + +func (bc *bitmapContainer) iadd(i uint16) bool { + x := int(i) + previous := bc.bitmap[x/64] + mask := uint64(1) << (uint(x) % 64) + newb := previous | mask + bc.bitmap[x/64] = newb + bc.cardinality += int((previous ^ newb) >> (uint(x) % 64)) + return newb != previous +} + +func (bc *bitmapContainer) iremoveReturnMinimized(i uint16) container { + if bc.iremove(i) { + if bc.cardinality == arrayDefaultMaxSize { + return bc.toArrayContainer() + } + } + return bc +} + +// iremove returns true if i was found. +func (bc *bitmapContainer) iremove(i uint16) bool { + if bc.contains(i) { + bc.cardinality-- + bc.bitmap[i/64] &^= (uint64(1) << (i % 64)) + return true + } + return false +} + +func (bc *bitmapContainer) isFull() bool { + return bc.cardinality == int(MaxUint16)+1 +} + +func (bc *bitmapContainer) getCardinality() int { + return bc.cardinality +} + +func (bc *bitmapContainer) clone() container { + ptr := bitmapContainer{bc.cardinality, make([]uint64, len(bc.bitmap))} + copy(ptr.bitmap, bc.bitmap[:]) + return &ptr +} + +// add all values in range [firstOfRange,lastOfRange) +func (bc *bitmapContainer) iaddRange(firstOfRange, lastOfRange int) container { + bc.cardinality += setBitmapRangeAndCardinalityChange(bc.bitmap, firstOfRange, lastOfRange) + return bc +} + +// remove all values in range [firstOfRange,lastOfRange) +func (bc *bitmapContainer) iremoveRange(firstOfRange, lastOfRange int) container { + bc.cardinality += resetBitmapRangeAndCardinalityChange(bc.bitmap, firstOfRange, lastOfRange) + if bc.getCardinality() <= arrayDefaultMaxSize { + return bc.toArrayContainer() + } + return bc +} + +// flip all values in range [firstOfRange,endx) +func (bc *bitmapContainer) inot(firstOfRange, endx int) container { + if endx-firstOfRange == maxCapacity { + flipBitmapRange(bc.bitmap, firstOfRange, endx) + bc.cardinality = maxCapacity - bc.cardinality + } else if endx-firstOfRange > maxCapacity/2 { + flipBitmapRange(bc.bitmap, firstOfRange, endx) + bc.computeCardinality() + } else { + bc.cardinality += flipBitmapRangeAndCardinalityChange(bc.bitmap, firstOfRange, endx) + } + if bc.getCardinality() <= arrayDefaultMaxSize { + return bc.toArrayContainer() + } + return bc +} + +// flip all values in range [firstOfRange,endx) +func (bc *bitmapContainer) not(firstOfRange, endx int) container { + answer := bc.clone() + return answer.inot(firstOfRange, endx) +} + +func (bc *bitmapContainer) or(a container) container { + switch x := a.(type) { + case *arrayContainer: + return bc.orArray(x) + case *bitmapContainer: + return bc.orBitmap(x) + case *runContainer16: + if x.isFull() { + return x.clone() + } + return x.orBitmapContainer(bc) + } + panic("unsupported container type") +} + +func (bc *bitmapContainer) orCardinality(a container) int { + switch x := a.(type) { + case *arrayContainer: + return bc.orArrayCardinality(x) + case *bitmapContainer: + return bc.orBitmapCardinality(x) + case *runContainer16: + return x.orBitmapContainerCardinality(bc) + } + panic("unsupported container type") +} + +func (bc *bitmapContainer) ior(a container) container { + switch x := a.(type) { + case *arrayContainer: + return bc.iorArray(x) + case *bitmapContainer: + return bc.iorBitmap(x) + case *runContainer16: + if x.isFull() { + return x.clone() + } + for i := range x.iv { + bc.iaddRange(int(x.iv[i].start), int(x.iv[i].last())+1) + } + if bc.isFull() { + return newRunContainer16Range(0, MaxUint16) + } + //bc.computeCardinality() + return bc + } + panic(fmt.Errorf("unsupported container type %T", a)) +} + +func (bc *bitmapContainer) lazyIOR(a container) container { + switch x := a.(type) { + case *arrayContainer: + return bc.lazyIORArray(x) + case *bitmapContainer: + return bc.lazyIORBitmap(x) + case *runContainer16: + if x.isFull() { + return x.clone() + } + + // Manually inlined setBitmapRange function + bitmap := bc.bitmap + for _, iv := range x.iv { + start := int(iv.start) + end := int(iv.last()) + 1 + if start >= end { + continue + } + firstword := start / 64 + endword := (end - 1) / 64 + if firstword == endword { + bitmap[firstword] |= (^uint64(0) << uint(start%64)) & (^uint64(0) >> (uint(-end) % 64)) + continue + } + bitmap[firstword] |= ^uint64(0) << uint(start%64) + for i := firstword + 1; i < endword; i++ { + bitmap[i] = ^uint64(0) + } + bitmap[endword] |= ^uint64(0) >> (uint(-end) % 64) + } + bc.cardinality = invalidCardinality + return bc + } + panic("unsupported container type") +} + +func (bc *bitmapContainer) lazyOR(a container) container { + switch x := a.(type) { + case *arrayContainer: + return bc.lazyORArray(x) + case *bitmapContainer: + return bc.lazyORBitmap(x) + case *runContainer16: + if x.isFull() { + return x.clone() + } + // TODO: implement lazy OR + return x.orBitmapContainer(bc) + + } + panic("unsupported container type") +} + +func (bc *bitmapContainer) orArray(value2 *arrayContainer) container { + answer := bc.clone().(*bitmapContainer) + c := value2.getCardinality() + for k := 0; k < c; k++ { + v := value2.content[k] + i := uint(v) >> 6 + bef := answer.bitmap[i] + aft := bef | (uint64(1) << (v % 64)) + answer.bitmap[i] = aft + answer.cardinality += int((bef - aft) >> 63) + } + return answer +} + +func (bc *bitmapContainer) orArrayCardinality(value2 *arrayContainer) int { + answer := 0 + c := value2.getCardinality() + for k := 0; k < c; k++ { + // branchless: + v := value2.content[k] + i := uint(v) >> 6 + bef := bc.bitmap[i] + aft := bef | (uint64(1) << (v % 64)) + answer += int((bef - aft) >> 63) + } + return answer +} + +func (bc *bitmapContainer) orBitmap(value2 *bitmapContainer) container { + answer := newBitmapContainer() + for k := 0; k < len(answer.bitmap); k++ { + answer.bitmap[k] = bc.bitmap[k] | value2.bitmap[k] + } + answer.computeCardinality() + if answer.isFull() { + return newRunContainer16Range(0, MaxUint16) + } + return answer +} + +func (bc *bitmapContainer) orBitmapCardinality(value2 *bitmapContainer) int { + return int(popcntOrSlice(bc.bitmap, value2.bitmap)) +} + +func (bc *bitmapContainer) andBitmapCardinality(value2 *bitmapContainer) int { + return int(popcntAndSlice(bc.bitmap, value2.bitmap)) +} + +func (bc *bitmapContainer) computeCardinality() { + bc.cardinality = int(popcntSlice(bc.bitmap)) +} + +func (bc *bitmapContainer) iorArray(ac *arrayContainer) container { + for k := range ac.content { + vc := ac.content[k] + i := uint(vc) >> 6 + bef := bc.bitmap[i] + aft := bef | (uint64(1) << (vc % 64)) + bc.bitmap[i] = aft + bc.cardinality += int((bef - aft) >> 63) + } + if bc.isFull() { + return newRunContainer16Range(0, MaxUint16) + } + return bc +} + +func (bc *bitmapContainer) iorBitmap(value2 *bitmapContainer) container { + answer := bc + answer.cardinality = 0 + for k := 0; k < len(answer.bitmap); k++ { + answer.bitmap[k] = bc.bitmap[k] | value2.bitmap[k] + } + answer.computeCardinality() + if bc.isFull() { + return newRunContainer16Range(0, MaxUint16) + } + return answer +} + +func (bc *bitmapContainer) lazyIORArray(value2 *arrayContainer) container { + answer := bc + c := value2.getCardinality() + for k := 0; k+3 < c; k += 4 { + content := (*[4]uint16)(unsafe.Pointer(&value2.content[k])) + vc0 := content[0] + i0 := uint(vc0) >> 6 + answer.bitmap[i0] = answer.bitmap[i0] | (uint64(1) << (vc0 % 64)) + + vc1 := content[1] + i1 := uint(vc1) >> 6 + answer.bitmap[i1] = answer.bitmap[i1] | (uint64(1) << (vc1 % 64)) + + vc2 := content[2] + i2 := uint(vc2) >> 6 + answer.bitmap[i2] = answer.bitmap[i2] | (uint64(1) << (vc2 % 64)) + + vc3 := content[3] + i3 := uint(vc3) >> 6 + answer.bitmap[i3] = answer.bitmap[i3] | (uint64(1) << (vc3 % 64)) + } + + for k := c &^ 3; k < c; k++ { + vc := value2.content[k] + i := uint(vc) >> 6 + answer.bitmap[i] = answer.bitmap[i] | (uint64(1) << (vc % 64)) + } + + answer.cardinality = invalidCardinality + return answer +} + +func (bc *bitmapContainer) lazyORArray(value2 *arrayContainer) container { + answer := bc.clone().(*bitmapContainer) + return answer.lazyIORArray(value2) +} + +func (bc *bitmapContainer) lazyIORBitmap(value2 *bitmapContainer) container { + answer := bc + for k := 0; k < len(answer.bitmap); k++ { + answer.bitmap[k] = bc.bitmap[k] | value2.bitmap[k] + } + bc.cardinality = invalidCardinality + return answer +} + +func (bc *bitmapContainer) lazyORBitmap(value2 *bitmapContainer) container { + answer := bc.clone().(*bitmapContainer) + return answer.lazyIORBitmap(value2) +} + +func (bc *bitmapContainer) xor(a container) container { + switch x := a.(type) { + case *arrayContainer: + return bc.xorArray(x) + case *bitmapContainer: + return bc.xorBitmap(x) + case *runContainer16: + return x.xorBitmap(bc) + } + panic("unsupported container type") +} + +func (bc *bitmapContainer) xorArray(value2 *arrayContainer) container { + answer := bc.clone().(*bitmapContainer) + c := value2.getCardinality() + for k := 0; k < c; k++ { + vc := value2.content[k] + index := uint(vc) >> 6 + abi := answer.bitmap[index] + mask := uint64(1) << (vc % 64) + answer.cardinality += 1 - 2*int((abi&mask)>>(vc%64)) + answer.bitmap[index] = abi ^ mask + } + if answer.cardinality <= arrayDefaultMaxSize { + return answer.toArrayContainer() + } + return answer +} + +func (bc *bitmapContainer) rank(x uint16) int { + // TODO: rewrite in assembly + leftover := (uint(x) + 1) & 63 + if leftover == 0 { + return int(popcntSlice(bc.bitmap[:(uint(x)+1)/64])) + } + return int(popcntSlice(bc.bitmap[:(uint(x)+1)/64]) + popcount(bc.bitmap[(uint(x)+1)/64]<<(64-leftover))) +} + +func (bc *bitmapContainer) selectInt(x uint16) int { + remaining := x + for k := 0; k < len(bc.bitmap); k++ { + w := popcount(bc.bitmap[k]) + if uint16(w) > remaining { + return k*64 + selectBitPosition(bc.bitmap[k], int(remaining)) + } + remaining -= uint16(w) + } + return -1 +} + +func (bc *bitmapContainer) xorBitmap(value2 *bitmapContainer) container { + newCardinality := int(popcntXorSlice(bc.bitmap, value2.bitmap)) + + if newCardinality > arrayDefaultMaxSize { + answer := newBitmapContainer() + for k := 0; k < len(answer.bitmap); k++ { + answer.bitmap[k] = bc.bitmap[k] ^ value2.bitmap[k] + } + answer.cardinality = newCardinality + if answer.isFull() { + return newRunContainer16Range(0, MaxUint16) + } + return answer + } + ac := newArrayContainerSize(newCardinality) + fillArrayXOR(ac.content, bc.bitmap, value2.bitmap) + ac.content = ac.content[:newCardinality] + return ac +} + +func (bc *bitmapContainer) and(a container) container { + switch x := a.(type) { + case *arrayContainer: + return bc.andArray(x) + case *bitmapContainer: + return bc.andBitmap(x) + case *runContainer16: + if x.isFull() { + return bc.clone() + } + return x.andBitmapContainer(bc) + } + panic("unsupported container type") +} + +func (bc *bitmapContainer) andCardinality(a container) int { + switch x := a.(type) { + case *arrayContainer: + return bc.andArrayCardinality(x) + case *bitmapContainer: + return bc.andBitmapCardinality(x) + case *runContainer16: + return x.andBitmapContainerCardinality(bc) + } + panic("unsupported container type") +} + +func (bc *bitmapContainer) intersects(a container) bool { + switch x := a.(type) { + case *arrayContainer: + return bc.intersectsArray(x) + case *bitmapContainer: + return bc.intersectsBitmap(x) + case *runContainer16: + return x.intersects(bc) + + } + panic("unsupported container type") +} + +func (bc *bitmapContainer) iand(a container) container { + switch x := a.(type) { + case *arrayContainer: + return bc.iandArray(x) + case *bitmapContainer: + return bc.iandBitmap(x) + case *runContainer16: + if x.isFull() { + return bc.clone() + } + return bc.iandRun16(x) + } + panic("unsupported container type") +} + +func (bc *bitmapContainer) iandRun16(rc *runContainer16) container { + rcb := newBitmapContainerFromRun(rc) + return bc.iandBitmap(rcb) +} + +func (bc *bitmapContainer) iandArray(ac *arrayContainer) container { + acb := ac.toBitmapContainer() + return bc.iandBitmap(acb) +} + +func (bc *bitmapContainer) andArray(value2 *arrayContainer) *arrayContainer { + answer := newArrayContainerCapacity(len(value2.content)) + answer.content = answer.content[:cap(answer.content)] + c := value2.getCardinality() + pos := 0 + for k := 0; k < c; k++ { + v := value2.content[k] + answer.content[pos] = v + pos += int(bc.bitValue(v)) + } + answer.content = answer.content[:pos] + return answer +} + +func (bc *bitmapContainer) andArrayCardinality(value2 *arrayContainer) int { + c := value2.getCardinality() + pos := 0 + for k := 0; k < c; k++ { + v := value2.content[k] + pos += int(bc.bitValue(v)) + } + return pos +} + +func (bc *bitmapContainer) getCardinalityInRange(start, end uint) int { + if start >= end { + return 0 + } + firstword := start / 64 + endword := (end - 1) / 64 + const allones = ^uint64(0) + if firstword == endword { + return int(popcount(bc.bitmap[firstword] & ((allones << (start % 64)) & (allones >> ((64 - end) & 63))))) + } + answer := popcount(bc.bitmap[firstword] & (allones << (start % 64))) + answer += popcntSlice(bc.bitmap[firstword+1 : endword]) + answer += popcount(bc.bitmap[endword] & (allones >> ((64 - end) & 63))) + return int(answer) +} + +func (bc *bitmapContainer) andBitmap(value2 *bitmapContainer) container { + newcardinality := int(popcntAndSlice(bc.bitmap, value2.bitmap)) + if newcardinality > arrayDefaultMaxSize { + answer := newBitmapContainer() + for k := 0; k < len(answer.bitmap); k++ { + answer.bitmap[k] = bc.bitmap[k] & value2.bitmap[k] + } + answer.cardinality = newcardinality + return answer + } + ac := newArrayContainerSize(newcardinality) + fillArrayAND(ac.content, bc.bitmap, value2.bitmap) + ac.content = ac.content[:newcardinality] //not sure why i need this + return ac + +} + +func (bc *bitmapContainer) intersectsArray(value2 *arrayContainer) bool { + c := value2.getCardinality() + for k := 0; k < c; k++ { + v := value2.content[k] + if bc.contains(v) { + return true + } + } + return false +} + +func (bc *bitmapContainer) intersectsBitmap(value2 *bitmapContainer) bool { + for k := 0; k < len(bc.bitmap); k++ { + if (bc.bitmap[k] & value2.bitmap[k]) != 0 { + return true + } + } + return false + +} + +func (bc *bitmapContainer) iandBitmap(value2 *bitmapContainer) container { + newcardinality := int(popcntAndSlice(bc.bitmap, value2.bitmap)) + for k := 0; k < len(bc.bitmap); k++ { + bc.bitmap[k] = bc.bitmap[k] & value2.bitmap[k] + } + bc.cardinality = newcardinality + + if newcardinality <= arrayDefaultMaxSize { + return newArrayContainerFromBitmap(bc) + } + return bc +} + +func (bc *bitmapContainer) andNot(a container) container { + switch x := a.(type) { + case *arrayContainer: + return bc.andNotArray(x) + case *bitmapContainer: + return bc.andNotBitmap(x) + case *runContainer16: + return bc.andNotRun16(x) + } + panic("unsupported container type") +} + +func (bc *bitmapContainer) andNotRun16(rc *runContainer16) container { + rcb := rc.toBitmapContainer() + return bc.andNotBitmap(rcb) +} + +func (bc *bitmapContainer) iandNot(a container) container { + switch x := a.(type) { + case *arrayContainer: + return bc.iandNotArray(x) + case *bitmapContainer: + return bc.iandNotBitmapSurely(x) + case *runContainer16: + return bc.iandNotRun16(x) + } + panic("unsupported container type") +} + +func (bc *bitmapContainer) iandNotArray(ac *arrayContainer) container { + acb := ac.toBitmapContainer() + return bc.iandNotBitmapSurely(acb) +} + +func (bc *bitmapContainer) iandNotRun16(rc *runContainer16) container { + rcb := rc.toBitmapContainer() + return bc.iandNotBitmapSurely(rcb) +} + +func (bc *bitmapContainer) andNotArray(value2 *arrayContainer) container { + answer := bc.clone().(*bitmapContainer) + c := value2.getCardinality() + for k := 0; k < c; k++ { + vc := value2.content[k] + i := uint(vc) >> 6 + oldv := answer.bitmap[i] + newv := oldv &^ (uint64(1) << (vc % 64)) + answer.bitmap[i] = newv + answer.cardinality -= int((oldv ^ newv) >> (vc % 64)) + } + if answer.cardinality <= arrayDefaultMaxSize { + return answer.toArrayContainer() + } + return answer +} + +func (bc *bitmapContainer) andNotBitmap(value2 *bitmapContainer) container { + newCardinality := int(popcntMaskSlice(bc.bitmap, value2.bitmap)) + if newCardinality > arrayDefaultMaxSize { + answer := newBitmapContainer() + for k := 0; k < len(answer.bitmap); k++ { + answer.bitmap[k] = bc.bitmap[k] &^ value2.bitmap[k] + } + answer.cardinality = newCardinality + return answer + } + ac := newArrayContainerSize(newCardinality) + fillArrayANDNOT(ac.content, bc.bitmap, value2.bitmap) + return ac +} + +func (bc *bitmapContainer) iandNotBitmapSurely(value2 *bitmapContainer) container { + newCardinality := int(popcntMaskSlice(bc.bitmap, value2.bitmap)) + for k := 0; k < len(bc.bitmap); k++ { + bc.bitmap[k] = bc.bitmap[k] &^ value2.bitmap[k] + } + bc.cardinality = newCardinality + if bc.getCardinality() <= arrayDefaultMaxSize { + return bc.toArrayContainer() + } + return bc +} + +func (bc *bitmapContainer) contains(i uint16) bool { //testbit + x := uint(i) + w := bc.bitmap[x>>6] + mask := uint64(1) << (x & 63) + return (w & mask) != 0 +} + +func (bc *bitmapContainer) bitValue(i uint16) uint64 { + x := uint(i) + w := bc.bitmap[x>>6] + return (w >> (x & 63)) & 1 +} + +func (bc *bitmapContainer) loadData(arrayContainer *arrayContainer) { + bc.cardinality = arrayContainer.getCardinality() + c := arrayContainer.getCardinality() + for k := 0; k < c; k++ { + x := arrayContainer.content[k] + i := int(x) / 64 + bc.bitmap[i] |= (uint64(1) << uint(x%64)) + } +} + +func (bc *bitmapContainer) toArrayContainer() *arrayContainer { + ac := &arrayContainer{} + ac.loadData(bc) + return ac +} + +func (bc *bitmapContainer) fillArray(container []uint16) { + //TODO: rewrite in assembly + pos := 0 + base := 0 + for k := 0; k < len(bc.bitmap); k++ { + bitset := bc.bitmap[k] + for bitset != 0 { + t := bitset & -bitset + container[pos] = uint16((base + int(popcount(t-1)))) + pos = pos + 1 + bitset ^= t + } + base += 64 + } +} + +func (bc *bitmapContainer) NextSetBit(i int) int { + x := i / 64 + if x >= len(bc.bitmap) { + return -1 + } + w := bc.bitmap[x] + w = w >> uint(i%64) + if w != 0 { + return i + countTrailingZeros(w) + } + x++ + for ; x < len(bc.bitmap); x++ { + if bc.bitmap[x] != 0 { + return (x * 64) + countTrailingZeros(bc.bitmap[x]) + } + } + return -1 +} + +func (bc *bitmapContainer) PrevSetBit(i int) int { + if i < 0 { + return -1 + } + x := i / 64 + if x >= len(bc.bitmap) { + return -1 + } + + w := bc.bitmap[x] + + b := i % 64 + + w = w << uint(63-b) + if w != 0 { + return i - countLeadingZeros(w) + } + x-- + for ; x >= 0; x-- { + if bc.bitmap[x] != 0 { + return (x * 64) + 63 - countLeadingZeros(bc.bitmap[x]) + } + } + return -1 +} + +// reference the java implementation +// https://github.com/RoaringBitmap/RoaringBitmap/blob/master/src/main/java/org/roaringbitmap/BitmapContainer.java#L875-L892 +// +func (bc *bitmapContainer) numberOfRuns() int { + if bc.cardinality == 0 { + return 0 + } + + var numRuns uint64 + nextWord := bc.bitmap[0] + + for i := 0; i < len(bc.bitmap)-1; i++ { + word := nextWord + nextWord = bc.bitmap[i+1] + numRuns += popcount((^word)&(word<<1)) + ((word >> 63) &^ nextWord) + } + + word := nextWord + numRuns += popcount((^word) & (word << 1)) + if (word & 0x8000000000000000) != 0 { + numRuns++ + } + + return int(numRuns) +} + +// convert to run or array *if needed* +func (bc *bitmapContainer) toEfficientContainer() container { + + numRuns := bc.numberOfRuns() + + sizeAsRunContainer := runContainer16SerializedSizeInBytes(numRuns) + sizeAsBitmapContainer := bitmapContainerSizeInBytes() + card := bc.getCardinality() + sizeAsArrayContainer := arrayContainerSizeInBytes(card) + + if sizeAsRunContainer <= minOfInt(sizeAsBitmapContainer, sizeAsArrayContainer) { + return newRunContainer16FromBitmapContainer(bc) + } + if card <= arrayDefaultMaxSize { + return bc.toArrayContainer() + } + return bc +} + +func newBitmapContainerFromRun(rc *runContainer16) *bitmapContainer { + + if len(rc.iv) == 1 { + return newBitmapContainerwithRange(int(rc.iv[0].start), int(rc.iv[0].last())) + } + + bc := newBitmapContainer() + for i := range rc.iv { + setBitmapRange(bc.bitmap, int(rc.iv[i].start), int(rc.iv[i].last())+1) + bc.cardinality += int(rc.iv[i].last()) + 1 - int(rc.iv[i].start) + } + //bc.computeCardinality() + return bc +} + +func (bc *bitmapContainer) containerType() contype { + return bitmapContype +} + +func (bc *bitmapContainer) addOffset(x uint16) []container { + low := newBitmapContainer() + high := newBitmapContainer() + b := uint32(x) >> 6 + i := uint32(x) % 64 + end := uint32(1024) - b + if i == 0 { + copy(low.bitmap[b:], bc.bitmap[:end]) + copy(high.bitmap[:b], bc.bitmap[end:]) + } else { + low.bitmap[b] = bc.bitmap[0] << i + for k := uint32(1); k < end; k++ { + newval := bc.bitmap[k] << i + if newval == 0 { + newval = bc.bitmap[k-1] >> (64 - i) + } + low.bitmap[b+k] = newval + } + for k := end; k < 1024; k++ { + newval := bc.bitmap[k] << i + if newval == 0 { + newval = bc.bitmap[k-1] >> (64 - i) + } + high.bitmap[k-end] = newval + } + high.bitmap[b] = bc.bitmap[1023] >> (64 - i) + } + low.computeCardinality() + high.computeCardinality() + return []container{low, high} +} diff --git a/vendor/github.com/RoaringBitmap/roaring/bitmapcontainer_gen.go b/vendor/github.com/RoaringBitmap/roaring/bitmapcontainer_gen.go new file mode 100644 index 0000000..9b5a465 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/bitmapcontainer_gen.go @@ -0,0 +1,415 @@ +package roaring + +// NOTE: THIS FILE WAS PRODUCED BY THE +// MSGP CODE GENERATION TOOL (github.com/tinylib/msgp) +// DO NOT EDIT + +import "github.com/tinylib/msgp/msgp" + +// Deprecated: DecodeMsg implements msgp.Decodable +func (z *bitmapContainer) DecodeMsg(dc *msgp.Reader) (err error) { + var field []byte + _ = field + var zbzg uint32 + zbzg, err = dc.ReadMapHeader() + if err != nil { + return + } + for zbzg > 0 { + zbzg-- + field, err = dc.ReadMapKeyPtr() + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "cardinality": + z.cardinality, err = dc.ReadInt() + if err != nil { + return + } + case "bitmap": + var zbai uint32 + zbai, err = dc.ReadArrayHeader() + if err != nil { + return + } + if cap(z.bitmap) >= int(zbai) { + z.bitmap = (z.bitmap)[:zbai] + } else { + z.bitmap = make([]uint64, zbai) + } + for zxvk := range z.bitmap { + z.bitmap[zxvk], err = dc.ReadUint64() + if err != nil { + return + } + } + default: + err = dc.Skip() + if err != nil { + return + } + } + } + return +} + +// Deprecated: EncodeMsg implements msgp.Encodable +func (z *bitmapContainer) EncodeMsg(en *msgp.Writer) (err error) { + // map header, size 2 + // write "cardinality" + err = en.Append(0x82, 0xab, 0x63, 0x61, 0x72, 0x64, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x74, 0x79) + if err != nil { + return err + } + err = en.WriteInt(z.cardinality) + if err != nil { + return + } + // write "bitmap" + err = en.Append(0xa6, 0x62, 0x69, 0x74, 0x6d, 0x61, 0x70) + if err != nil { + return err + } + err = en.WriteArrayHeader(uint32(len(z.bitmap))) + if err != nil { + return + } + for zxvk := range z.bitmap { + err = en.WriteUint64(z.bitmap[zxvk]) + if err != nil { + return + } + } + return +} + +// Deprecated: MarshalMsg implements msgp.Marshaler +func (z *bitmapContainer) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + // map header, size 2 + // string "cardinality" + o = append(o, 0x82, 0xab, 0x63, 0x61, 0x72, 0x64, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x74, 0x79) + o = msgp.AppendInt(o, z.cardinality) + // string "bitmap" + o = append(o, 0xa6, 0x62, 0x69, 0x74, 0x6d, 0x61, 0x70) + o = msgp.AppendArrayHeader(o, uint32(len(z.bitmap))) + for zxvk := range z.bitmap { + o = msgp.AppendUint64(o, z.bitmap[zxvk]) + } + return +} + +// Deprecated: UnmarshalMsg implements msgp.Unmarshaler +func (z *bitmapContainer) UnmarshalMsg(bts []byte) (o []byte, err error) { + var field []byte + _ = field + var zcmr uint32 + zcmr, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + return + } + for zcmr > 0 { + zcmr-- + field, bts, err = msgp.ReadMapKeyZC(bts) + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "cardinality": + z.cardinality, bts, err = msgp.ReadIntBytes(bts) + if err != nil { + return + } + case "bitmap": + var zajw uint32 + zajw, bts, err = msgp.ReadArrayHeaderBytes(bts) + if err != nil { + return + } + if cap(z.bitmap) >= int(zajw) { + z.bitmap = (z.bitmap)[:zajw] + } else { + z.bitmap = make([]uint64, zajw) + } + for zxvk := range z.bitmap { + z.bitmap[zxvk], bts, err = msgp.ReadUint64Bytes(bts) + if err != nil { + return + } + } + default: + bts, err = msgp.Skip(bts) + if err != nil { + return + } + } + } + o = bts + return +} + +// Deprecated: Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z *bitmapContainer) Msgsize() (s int) { + s = 1 + 12 + msgp.IntSize + 7 + msgp.ArrayHeaderSize + (len(z.bitmap) * (msgp.Uint64Size)) + return +} + +// Deprecated: DecodeMsg implements msgp.Decodable +func (z *bitmapContainerShortIterator) DecodeMsg(dc *msgp.Reader) (err error) { + var field []byte + _ = field + var zhct uint32 + zhct, err = dc.ReadMapHeader() + if err != nil { + return + } + for zhct > 0 { + zhct-- + field, err = dc.ReadMapKeyPtr() + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "ptr": + if dc.IsNil() { + err = dc.ReadNil() + if err != nil { + return + } + z.ptr = nil + } else { + if z.ptr == nil { + z.ptr = new(bitmapContainer) + } + var zcua uint32 + zcua, err = dc.ReadMapHeader() + if err != nil { + return + } + for zcua > 0 { + zcua-- + field, err = dc.ReadMapKeyPtr() + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "cardinality": + z.ptr.cardinality, err = dc.ReadInt() + if err != nil { + return + } + case "bitmap": + var zxhx uint32 + zxhx, err = dc.ReadArrayHeader() + if err != nil { + return + } + if cap(z.ptr.bitmap) >= int(zxhx) { + z.ptr.bitmap = (z.ptr.bitmap)[:zxhx] + } else { + z.ptr.bitmap = make([]uint64, zxhx) + } + for zwht := range z.ptr.bitmap { + z.ptr.bitmap[zwht], err = dc.ReadUint64() + if err != nil { + return + } + } + default: + err = dc.Skip() + if err != nil { + return + } + } + } + } + case "i": + z.i, err = dc.ReadInt() + if err != nil { + return + } + default: + err = dc.Skip() + if err != nil { + return + } + } + } + return +} + +// Deprecated: EncodeMsg implements msgp.Encodable +func (z *bitmapContainerShortIterator) EncodeMsg(en *msgp.Writer) (err error) { + // map header, size 2 + // write "ptr" + err = en.Append(0x82, 0xa3, 0x70, 0x74, 0x72) + if err != nil { + return err + } + if z.ptr == nil { + err = en.WriteNil() + if err != nil { + return + } + } else { + // map header, size 2 + // write "cardinality" + err = en.Append(0x82, 0xab, 0x63, 0x61, 0x72, 0x64, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x74, 0x79) + if err != nil { + return err + } + err = en.WriteInt(z.ptr.cardinality) + if err != nil { + return + } + // write "bitmap" + err = en.Append(0xa6, 0x62, 0x69, 0x74, 0x6d, 0x61, 0x70) + if err != nil { + return err + } + err = en.WriteArrayHeader(uint32(len(z.ptr.bitmap))) + if err != nil { + return + } + for zwht := range z.ptr.bitmap { + err = en.WriteUint64(z.ptr.bitmap[zwht]) + if err != nil { + return + } + } + } + // write "i" + err = en.Append(0xa1, 0x69) + if err != nil { + return err + } + err = en.WriteInt(z.i) + if err != nil { + return + } + return +} + +// Deprecated: MarshalMsg implements msgp.Marshaler +func (z *bitmapContainerShortIterator) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + // map header, size 2 + // string "ptr" + o = append(o, 0x82, 0xa3, 0x70, 0x74, 0x72) + if z.ptr == nil { + o = msgp.AppendNil(o) + } else { + // map header, size 2 + // string "cardinality" + o = append(o, 0x82, 0xab, 0x63, 0x61, 0x72, 0x64, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x74, 0x79) + o = msgp.AppendInt(o, z.ptr.cardinality) + // string "bitmap" + o = append(o, 0xa6, 0x62, 0x69, 0x74, 0x6d, 0x61, 0x70) + o = msgp.AppendArrayHeader(o, uint32(len(z.ptr.bitmap))) + for zwht := range z.ptr.bitmap { + o = msgp.AppendUint64(o, z.ptr.bitmap[zwht]) + } + } + // string "i" + o = append(o, 0xa1, 0x69) + o = msgp.AppendInt(o, z.i) + return +} + +// Deprecated: UnmarshalMsg implements msgp.Unmarshaler +func (z *bitmapContainerShortIterator) UnmarshalMsg(bts []byte) (o []byte, err error) { + var field []byte + _ = field + var zlqf uint32 + zlqf, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + return + } + for zlqf > 0 { + zlqf-- + field, bts, err = msgp.ReadMapKeyZC(bts) + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "ptr": + if msgp.IsNil(bts) { + bts, err = msgp.ReadNilBytes(bts) + if err != nil { + return + } + z.ptr = nil + } else { + if z.ptr == nil { + z.ptr = new(bitmapContainer) + } + var zdaf uint32 + zdaf, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + return + } + for zdaf > 0 { + zdaf-- + field, bts, err = msgp.ReadMapKeyZC(bts) + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "cardinality": + z.ptr.cardinality, bts, err = msgp.ReadIntBytes(bts) + if err != nil { + return + } + case "bitmap": + var zpks uint32 + zpks, bts, err = msgp.ReadArrayHeaderBytes(bts) + if err != nil { + return + } + if cap(z.ptr.bitmap) >= int(zpks) { + z.ptr.bitmap = (z.ptr.bitmap)[:zpks] + } else { + z.ptr.bitmap = make([]uint64, zpks) + } + for zwht := range z.ptr.bitmap { + z.ptr.bitmap[zwht], bts, err = msgp.ReadUint64Bytes(bts) + if err != nil { + return + } + } + default: + bts, err = msgp.Skip(bts) + if err != nil { + return + } + } + } + } + case "i": + z.i, bts, err = msgp.ReadIntBytes(bts) + if err != nil { + return + } + default: + bts, err = msgp.Skip(bts) + if err != nil { + return + } + } + } + o = bts + return +} + +// Deprecated: Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z *bitmapContainerShortIterator) Msgsize() (s int) { + s = 1 + 4 + if z.ptr == nil { + s += msgp.NilSize + } else { + s += 1 + 12 + msgp.IntSize + 7 + msgp.ArrayHeaderSize + (len(z.ptr.bitmap) * (msgp.Uint64Size)) + } + s += 2 + msgp.IntSize + return +} diff --git a/vendor/github.com/RoaringBitmap/roaring/byte_input.go b/vendor/github.com/RoaringBitmap/roaring/byte_input.go new file mode 100644 index 0000000..f7a98a1 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/byte_input.go @@ -0,0 +1,161 @@ +package roaring + +import ( + "encoding/binary" + "io" +) + +type byteInput interface { + // next returns a slice containing the next n bytes from the buffer, + // advancing the buffer as if the bytes had been returned by Read. + next(n int) ([]byte, error) + // readUInt32 reads uint32 with LittleEndian order + readUInt32() (uint32, error) + // readUInt16 reads uint16 with LittleEndian order + readUInt16() (uint16, error) + // getReadBytes returns read bytes + getReadBytes() int64 + // skipBytes skips exactly n bytes + skipBytes(n int) error +} + +func newByteInputFromReader(reader io.Reader) byteInput { + return &byteInputAdapter{ + r: reader, + readBytes: 0, + } +} + +func newByteInput(buf []byte) byteInput { + return &byteBuffer{ + buf: buf, + off: 0, + } +} + +type byteBuffer struct { + buf []byte + off int +} + +// next returns a slice containing the next n bytes from the reader +// If there are fewer bytes than the given n, io.ErrUnexpectedEOF will be returned +func (b *byteBuffer) next(n int) ([]byte, error) { + m := len(b.buf) - b.off + + if n > m { + return nil, io.ErrUnexpectedEOF + } + + data := b.buf[b.off : b.off+n] + b.off += n + + return data, nil +} + +// readUInt32 reads uint32 with LittleEndian order +func (b *byteBuffer) readUInt32() (uint32, error) { + if len(b.buf)-b.off < 4 { + return 0, io.ErrUnexpectedEOF + } + + v := binary.LittleEndian.Uint32(b.buf[b.off:]) + b.off += 4 + + return v, nil +} + +// readUInt16 reads uint16 with LittleEndian order +func (b *byteBuffer) readUInt16() (uint16, error) { + if len(b.buf)-b.off < 2 { + return 0, io.ErrUnexpectedEOF + } + + v := binary.LittleEndian.Uint16(b.buf[b.off:]) + b.off += 2 + + return v, nil +} + +// getReadBytes returns read bytes +func (b *byteBuffer) getReadBytes() int64 { + return int64(b.off) +} + +// skipBytes skips exactly n bytes +func (b *byteBuffer) skipBytes(n int) error { + m := len(b.buf) - b.off + + if n > m { + return io.ErrUnexpectedEOF + } + + b.off += n + + return nil +} + +// reset resets the given buffer with a new byte slice +func (b *byteBuffer) reset(buf []byte) { + b.buf = buf + b.off = 0 +} + +type byteInputAdapter struct { + r io.Reader + readBytes int +} + +// next returns a slice containing the next n bytes from the buffer, +// advancing the buffer as if the bytes had been returned by Read. +func (b *byteInputAdapter) next(n int) ([]byte, error) { + buf := make([]byte, n) + m, err := io.ReadAtLeast(b.r, buf, n) + b.readBytes += m + + if err != nil { + return nil, err + } + + return buf, nil +} + +// readUInt32 reads uint32 with LittleEndian order +func (b *byteInputAdapter) readUInt32() (uint32, error) { + buf, err := b.next(4) + + if err != nil { + return 0, err + } + + return binary.LittleEndian.Uint32(buf), nil +} + +// readUInt16 reads uint16 with LittleEndian order +func (b *byteInputAdapter) readUInt16() (uint16, error) { + buf, err := b.next(2) + + if err != nil { + return 0, err + } + + return binary.LittleEndian.Uint16(buf), nil +} + +// getReadBytes returns read bytes +func (b *byteInputAdapter) getReadBytes() int64 { + return int64(b.readBytes) +} + +// skipBytes skips exactly n bytes +func (b *byteInputAdapter) skipBytes(n int) error { + _, err := b.next(n) + + return err +} + +// reset resets the given buffer with a new stream +func (b *byteInputAdapter) reset(stream io.Reader) { + b.r = stream + b.readBytes = 0 +} diff --git a/vendor/github.com/RoaringBitmap/roaring/clz.go b/vendor/github.com/RoaringBitmap/roaring/clz.go new file mode 100644 index 0000000..bcd80d3 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/clz.go @@ -0,0 +1,11 @@ +// +build go1.9 +// "go1.9", from Go version 1.9 onward +// See https://golang.org/pkg/go/build/#hdr-Build_Constraints + +package roaring + +import "math/bits" + +func countLeadingZeros(x uint64) int { + return bits.LeadingZeros64(x) +} diff --git a/vendor/github.com/RoaringBitmap/roaring/clz_compat.go b/vendor/github.com/RoaringBitmap/roaring/clz_compat.go new file mode 100644 index 0000000..eeef4de --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/clz_compat.go @@ -0,0 +1,36 @@ +// +build !go1.9 + +package roaring + +// LeadingZeroBits returns the number of consecutive most significant zero +// bits of x. +func countLeadingZeros(i uint64) int { + if i == 0 { + return 64 + } + n := 1 + x := uint32(i >> 32) + if x == 0 { + n += 32 + x = uint32(i) + } + if (x >> 16) == 0 { + n += 16 + x <<= 16 + } + if (x >> 24) == 0 { + n += 8 + x <<= 8 + } + if x>>28 == 0 { + n += 4 + x <<= 4 + } + if x>>30 == 0 { + n += 2 + x <<= 2 + + } + n -= int(x >> 31) + return n +} diff --git a/vendor/github.com/RoaringBitmap/roaring/ctz.go b/vendor/github.com/RoaringBitmap/roaring/ctz.go new file mode 100644 index 0000000..e399ddd --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/ctz.go @@ -0,0 +1,11 @@ +// +build go1.9 +// "go1.9", from Go version 1.9 onward +// See https://golang.org/pkg/go/build/#hdr-Build_Constraints + +package roaring + +import "math/bits" + +func countTrailingZeros(x uint64) int { + return bits.TrailingZeros64(x) +} diff --git a/vendor/github.com/RoaringBitmap/roaring/ctz_compat.go b/vendor/github.com/RoaringBitmap/roaring/ctz_compat.go new file mode 100644 index 0000000..80220e6 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/ctz_compat.go @@ -0,0 +1,71 @@ +// +build !go1.9 + +package roaring + +// Reuse of portions of go/src/math/big standard lib code +// under this license: +/* +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +const deBruijn32 = 0x077CB531 + +var deBruijn32Lookup = []byte{ + 0, 1, 28, 2, 29, 14, 24, 3, 30, 22, 20, 15, 25, 17, 4, 8, + 31, 27, 13, 23, 21, 19, 16, 7, 26, 12, 18, 6, 11, 5, 10, 9, +} + +const deBruijn64 = 0x03f79d71b4ca8b09 + +var deBruijn64Lookup = []byte{ + 0, 1, 56, 2, 57, 49, 28, 3, 61, 58, 42, 50, 38, 29, 17, 4, + 62, 47, 59, 36, 45, 43, 51, 22, 53, 39, 33, 30, 24, 18, 12, 5, + 63, 55, 48, 27, 60, 41, 37, 16, 46, 35, 44, 21, 52, 32, 23, 11, + 54, 26, 40, 15, 34, 20, 31, 10, 25, 14, 19, 9, 13, 8, 7, 6, +} + +// trailingZeroBits returns the number of consecutive least significant zero +// bits of x. +func countTrailingZeros(x uint64) int { + // x & -x leaves only the right-most bit set in the word. Let k be the + // index of that bit. Since only a single bit is set, the value is two + // to the power of k. Multiplying by a power of two is equivalent to + // left shifting, in this case by k bits. The de Bruijn constant is + // such that all six bit, consecutive substrings are distinct. + // Therefore, if we have a left shifted version of this constant we can + // find by how many bits it was shifted by looking at which six bit + // substring ended up at the top of the word. + // (Knuth, volume 4, section 7.3.1) + if x == 0 { + // We have to special case 0; the fomula + // below doesn't work for 0. + return 64 + } + return int(deBruijn64Lookup[((x&-x)*(deBruijn64))>>58]) +} diff --git a/vendor/github.com/RoaringBitmap/roaring/fastaggregation.go b/vendor/github.com/RoaringBitmap/roaring/fastaggregation.go new file mode 100644 index 0000000..762e500 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/fastaggregation.go @@ -0,0 +1,215 @@ +package roaring + +import ( + "container/heap" +) + +// Or function that requires repairAfterLazy +func lazyOR(x1, x2 *Bitmap) *Bitmap { + answer := NewBitmap() + pos1 := 0 + pos2 := 0 + length1 := x1.highlowcontainer.size() + length2 := x2.highlowcontainer.size() +main: + for (pos1 < length1) && (pos2 < length2) { + s1 := x1.highlowcontainer.getKeyAtIndex(pos1) + s2 := x2.highlowcontainer.getKeyAtIndex(pos2) + + for { + if s1 < s2 { + answer.highlowcontainer.appendCopy(x1.highlowcontainer, pos1) + pos1++ + if pos1 == length1 { + break main + } + s1 = x1.highlowcontainer.getKeyAtIndex(pos1) + } else if s1 > s2 { + answer.highlowcontainer.appendCopy(x2.highlowcontainer, pos2) + pos2++ + if pos2 == length2 { + break main + } + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } else { + c1 := x1.highlowcontainer.getContainerAtIndex(pos1) + switch t := c1.(type) { + case *arrayContainer: + c1 = t.toBitmapContainer() + case *runContainer16: + if !t.isFull() { + c1 = t.toBitmapContainer() + } + } + + answer.highlowcontainer.appendContainer(s1, c1.lazyOR(x2.highlowcontainer.getContainerAtIndex(pos2)), false) + pos1++ + pos2++ + if (pos1 == length1) || (pos2 == length2) { + break main + } + s1 = x1.highlowcontainer.getKeyAtIndex(pos1) + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } + } + } + if pos1 == length1 { + answer.highlowcontainer.appendCopyMany(x2.highlowcontainer, pos2, length2) + } else if pos2 == length2 { + answer.highlowcontainer.appendCopyMany(x1.highlowcontainer, pos1, length1) + } + return answer +} + +// In-place Or function that requires repairAfterLazy +func (x1 *Bitmap) lazyOR(x2 *Bitmap) *Bitmap { + pos1 := 0 + pos2 := 0 + length1 := x1.highlowcontainer.size() + length2 := x2.highlowcontainer.size() +main: + for (pos1 < length1) && (pos2 < length2) { + s1 := x1.highlowcontainer.getKeyAtIndex(pos1) + s2 := x2.highlowcontainer.getKeyAtIndex(pos2) + + for { + if s1 < s2 { + pos1++ + if pos1 == length1 { + break main + } + s1 = x1.highlowcontainer.getKeyAtIndex(pos1) + } else if s1 > s2 { + x1.highlowcontainer.insertNewKeyValueAt(pos1, s2, x2.highlowcontainer.getContainerAtIndex(pos2).clone()) + pos2++ + pos1++ + length1++ + if pos2 == length2 { + break main + } + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } else { + c1 := x1.highlowcontainer.getContainerAtIndex(pos1) + switch t := c1.(type) { + case *arrayContainer: + c1 = t.toBitmapContainer() + case *runContainer16: + if !t.isFull() { + c1 = t.toBitmapContainer() + } + case *bitmapContainer: + c1 = x1.highlowcontainer.getWritableContainerAtIndex(pos1) + } + + x1.highlowcontainer.containers[pos1] = c1.lazyIOR(x2.highlowcontainer.getContainerAtIndex(pos2)) + x1.highlowcontainer.needCopyOnWrite[pos1] = false + pos1++ + pos2++ + if (pos1 == length1) || (pos2 == length2) { + break main + } + s1 = x1.highlowcontainer.getKeyAtIndex(pos1) + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } + } + } + if pos1 == length1 { + x1.highlowcontainer.appendCopyMany(x2.highlowcontainer, pos2, length2) + } + return x1 +} + +// to be called after lazy aggregates +func (x1 *Bitmap) repairAfterLazy() { + for pos := 0; pos < x1.highlowcontainer.size(); pos++ { + c := x1.highlowcontainer.getContainerAtIndex(pos) + switch c.(type) { + case *bitmapContainer: + if c.(*bitmapContainer).cardinality == invalidCardinality { + c = x1.highlowcontainer.getWritableContainerAtIndex(pos) + c.(*bitmapContainer).computeCardinality() + if c.(*bitmapContainer).getCardinality() <= arrayDefaultMaxSize { + x1.highlowcontainer.setContainerAtIndex(pos, c.(*bitmapContainer).toArrayContainer()) + } else if c.(*bitmapContainer).isFull() { + x1.highlowcontainer.setContainerAtIndex(pos, newRunContainer16Range(0, MaxUint16)) + } + } + } + } +} + +// FastAnd computes the intersection between many bitmaps quickly +// Compared to the And function, it can take many bitmaps as input, thus saving the trouble +// of manually calling "And" many times. +func FastAnd(bitmaps ...*Bitmap) *Bitmap { + if len(bitmaps) == 0 { + return NewBitmap() + } else if len(bitmaps) == 1 { + return bitmaps[0].Clone() + } + answer := And(bitmaps[0], bitmaps[1]) + for _, bm := range bitmaps[2:] { + answer.And(bm) + } + return answer +} + +// FastOr computes the union between many bitmaps quickly, as opposed to having to call Or repeatedly. +// It might also be faster than calling Or repeatedly. +func FastOr(bitmaps ...*Bitmap) *Bitmap { + if len(bitmaps) == 0 { + return NewBitmap() + } else if len(bitmaps) == 1 { + return bitmaps[0].Clone() + } + answer := lazyOR(bitmaps[0], bitmaps[1]) + for _, bm := range bitmaps[2:] { + answer = answer.lazyOR(bm) + } + // here is where repairAfterLazy is called. + answer.repairAfterLazy() + return answer +} + +// HeapOr computes the union between many bitmaps quickly using a heap. +// It might be faster than calling Or repeatedly. +func HeapOr(bitmaps ...*Bitmap) *Bitmap { + if len(bitmaps) == 0 { + return NewBitmap() + } + // TODO: for better speed, we could do the operation lazily, see Java implementation + pq := make(priorityQueue, len(bitmaps)) + for i, bm := range bitmaps { + pq[i] = &item{bm, i} + } + heap.Init(&pq) + + for pq.Len() > 1 { + x1 := heap.Pop(&pq).(*item) + x2 := heap.Pop(&pq).(*item) + heap.Push(&pq, &item{Or(x1.value, x2.value), 0}) + } + return heap.Pop(&pq).(*item).value +} + +// HeapXor computes the symmetric difference between many bitmaps quickly (as opposed to calling Xor repeated). +// Internally, this function uses a heap. +// It might be faster than calling Xor repeatedly. +func HeapXor(bitmaps ...*Bitmap) *Bitmap { + if len(bitmaps) == 0 { + return NewBitmap() + } + + pq := make(priorityQueue, len(bitmaps)) + for i, bm := range bitmaps { + pq[i] = &item{bm, i} + } + heap.Init(&pq) + + for pq.Len() > 1 { + x1 := heap.Pop(&pq).(*item) + x2 := heap.Pop(&pq).(*item) + heap.Push(&pq, &item{Xor(x1.value, x2.value), 0}) + } + return heap.Pop(&pq).(*item).value +} diff --git a/vendor/github.com/RoaringBitmap/roaring/manyiterator.go b/vendor/github.com/RoaringBitmap/roaring/manyiterator.go new file mode 100644 index 0000000..3007563 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/manyiterator.go @@ -0,0 +1,18 @@ +package roaring + +type manyIterable interface { + nextMany(hs uint32, buf []uint32) int +} + +func (si *shortIterator) nextMany(hs uint32, buf []uint32) int { + n := 0 + l := si.loc + s := si.slice + for n < len(buf) && l < len(s) { + buf[n] = uint32(s[l]) | hs + l++ + n++ + } + si.loc = l + return n +} diff --git a/vendor/github.com/RoaringBitmap/roaring/parallel.go b/vendor/github.com/RoaringBitmap/roaring/parallel.go new file mode 100644 index 0000000..2af1aed --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/parallel.go @@ -0,0 +1,613 @@ +package roaring + +import ( + "container/heap" + "fmt" + "runtime" + "sync" +) + +var defaultWorkerCount = runtime.NumCPU() + +type bitmapContainerKey struct { + key uint16 + idx int + bitmap *Bitmap +} + +type multipleContainers struct { + key uint16 + containers []container + idx int +} + +type keyedContainer struct { + key uint16 + container container + idx int +} + +type bitmapContainerHeap []bitmapContainerKey + +func (h bitmapContainerHeap) Len() int { return len(h) } +func (h bitmapContainerHeap) Less(i, j int) bool { return h[i].key < h[j].key } +func (h bitmapContainerHeap) Swap(i, j int) { h[i], h[j] = h[j], h[i] } + +func (h *bitmapContainerHeap) Push(x interface{}) { + // Push and Pop use pointer receivers because they modify the slice's length, + // not just its contents. + *h = append(*h, x.(bitmapContainerKey)) +} + +func (h *bitmapContainerHeap) Pop() interface{} { + old := *h + n := len(old) + x := old[n-1] + *h = old[0 : n-1] + return x +} + +func (h bitmapContainerHeap) Peek() bitmapContainerKey { + return h[0] +} + +func (h *bitmapContainerHeap) popIncrementing() (key uint16, container container) { + k := h.Peek() + key = k.key + container = k.bitmap.highlowcontainer.containers[k.idx] + + newIdx := k.idx + 1 + if newIdx < k.bitmap.highlowcontainer.size() { + k = bitmapContainerKey{ + k.bitmap.highlowcontainer.keys[newIdx], + newIdx, + k.bitmap, + } + (*h)[0] = k + heap.Fix(h, 0) + } else { + heap.Pop(h) + } + + return +} + +func (h *bitmapContainerHeap) Next(containers []container) multipleContainers { + if h.Len() == 0 { + return multipleContainers{} + } + + key, container := h.popIncrementing() + containers = append(containers, container) + + for h.Len() > 0 && key == h.Peek().key { + _, container = h.popIncrementing() + containers = append(containers, container) + } + + return multipleContainers{ + key, + containers, + -1, + } +} + +func newBitmapContainerHeap(bitmaps ...*Bitmap) bitmapContainerHeap { + // Initialize heap + var h bitmapContainerHeap = make([]bitmapContainerKey, 0, len(bitmaps)) + for _, bitmap := range bitmaps { + if !bitmap.IsEmpty() { + key := bitmapContainerKey{ + bitmap.highlowcontainer.keys[0], + 0, + bitmap, + } + h = append(h, key) + } + } + + heap.Init(&h) + + return h +} + +func repairAfterLazy(c container) container { + switch t := c.(type) { + case *bitmapContainer: + if t.cardinality == invalidCardinality { + t.computeCardinality() + } + + if t.getCardinality() <= arrayDefaultMaxSize { + return t.toArrayContainer() + } else if c.(*bitmapContainer).isFull() { + return newRunContainer16Range(0, MaxUint16) + } + } + + return c +} + +func toBitmapContainer(c container) container { + switch t := c.(type) { + case *arrayContainer: + return t.toBitmapContainer() + case *runContainer16: + if !t.isFull() { + return t.toBitmapContainer() + } + } + return c +} + +func appenderRoutine(bitmapChan chan<- *Bitmap, resultChan <-chan keyedContainer, expectedKeysChan <-chan int) { + expectedKeys := -1 + appendedKeys := 0 + var keys []uint16 + var containers []container + for appendedKeys != expectedKeys { + select { + case item := <-resultChan: + if len(keys) <= item.idx { + keys = append(keys, make([]uint16, item.idx-len(keys)+1)...) + containers = append(containers, make([]container, item.idx-len(containers)+1)...) + } + keys[item.idx] = item.key + containers[item.idx] = item.container + + appendedKeys++ + case msg := <-expectedKeysChan: + expectedKeys = msg + } + } + answer := &Bitmap{ + roaringArray{ + make([]uint16, 0, expectedKeys), + make([]container, 0, expectedKeys), + make([]bool, 0, expectedKeys), + false, + nil, + }, + } + for i := range keys { + if containers[i] != nil { // in case a resulting container was empty, see ParAnd function + answer.highlowcontainer.appendContainer(keys[i], containers[i], false) + } + } + + bitmapChan <- answer +} + +// ParHeapOr computes the union (OR) of all provided bitmaps in parallel, +// where the parameter "parallelism" determines how many workers are to be used +// (if it is set to 0, a default number of workers is chosen) +// ParHeapOr uses a heap to compute the union. For rare cases it might be faster than ParOr +func ParHeapOr(parallelism int, bitmaps ...*Bitmap) *Bitmap { + + bitmapCount := len(bitmaps) + if bitmapCount == 0 { + return NewBitmap() + } else if bitmapCount == 1 { + return bitmaps[0].Clone() + } + + if parallelism == 0 { + parallelism = defaultWorkerCount + } + + h := newBitmapContainerHeap(bitmaps...) + + bitmapChan := make(chan *Bitmap) + inputChan := make(chan multipleContainers, 128) + resultChan := make(chan keyedContainer, 32) + expectedKeysChan := make(chan int) + + pool := sync.Pool{ + New: func() interface{} { + return make([]container, 0, len(bitmaps)) + }, + } + + orFunc := func() { + // Assumes only structs with >=2 containers are passed + for input := range inputChan { + c := toBitmapContainer(input.containers[0]).lazyOR(input.containers[1]) + for _, next := range input.containers[2:] { + c = c.lazyIOR(next) + } + c = repairAfterLazy(c) + kx := keyedContainer{ + input.key, + c, + input.idx, + } + resultChan <- kx + pool.Put(input.containers[:0]) + } + } + + go appenderRoutine(bitmapChan, resultChan, expectedKeysChan) + + for i := 0; i < parallelism; i++ { + go orFunc() + } + + idx := 0 + for h.Len() > 0 { + ck := h.Next(pool.Get().([]container)) + if len(ck.containers) == 1 { + resultChan <- keyedContainer{ + ck.key, + ck.containers[0], + idx, + } + pool.Put(ck.containers[:0]) + } else { + ck.idx = idx + inputChan <- ck + } + idx++ + } + expectedKeysChan <- idx + + bitmap := <-bitmapChan + + close(inputChan) + close(resultChan) + close(expectedKeysChan) + + return bitmap +} + +// ParAnd computes the intersection (AND) of all provided bitmaps in parallel, +// where the parameter "parallelism" determines how many workers are to be used +// (if it is set to 0, a default number of workers is chosen) +func ParAnd(parallelism int, bitmaps ...*Bitmap) *Bitmap { + bitmapCount := len(bitmaps) + if bitmapCount == 0 { + return NewBitmap() + } else if bitmapCount == 1 { + return bitmaps[0].Clone() + } + + if parallelism == 0 { + parallelism = defaultWorkerCount + } + + h := newBitmapContainerHeap(bitmaps...) + + bitmapChan := make(chan *Bitmap) + inputChan := make(chan multipleContainers, 128) + resultChan := make(chan keyedContainer, 32) + expectedKeysChan := make(chan int) + + andFunc := func() { + // Assumes only structs with >=2 containers are passed + for input := range inputChan { + c := input.containers[0].and(input.containers[1]) + for _, next := range input.containers[2:] { + if c.getCardinality() == 0 { + break + } + c = c.iand(next) + } + + // Send a nil explicitly if the result of the intersection is an empty container + if c.getCardinality() == 0 { + c = nil + } + + kx := keyedContainer{ + input.key, + c, + input.idx, + } + resultChan <- kx + } + } + + go appenderRoutine(bitmapChan, resultChan, expectedKeysChan) + + for i := 0; i < parallelism; i++ { + go andFunc() + } + + idx := 0 + for h.Len() > 0 { + ck := h.Next(make([]container, 0, 4)) + if len(ck.containers) == bitmapCount { + ck.idx = idx + inputChan <- ck + idx++ + } + } + expectedKeysChan <- idx + + bitmap := <-bitmapChan + + close(inputChan) + close(resultChan) + close(expectedKeysChan) + + return bitmap +} + +// ParOr computes the union (OR) of all provided bitmaps in parallel, +// where the parameter "parallelism" determines how many workers are to be used +// (if it is set to 0, a default number of workers is chosen) +func ParOr(parallelism int, bitmaps ...*Bitmap) *Bitmap { + var lKey uint16 = MaxUint16 + var hKey uint16 + + bitmapsFiltered := bitmaps[:0] + for _, b := range bitmaps { + if !b.IsEmpty() { + bitmapsFiltered = append(bitmapsFiltered, b) + } + } + bitmaps = bitmapsFiltered + + for _, b := range bitmaps { + lKey = minOfUint16(lKey, b.highlowcontainer.keys[0]) + hKey = maxOfUint16(hKey, b.highlowcontainer.keys[b.highlowcontainer.size()-1]) + } + + if lKey == MaxUint16 && hKey == 0 { + return New() + } else if len(bitmaps) == 1 { + return bitmaps[0] + } + + keyRange := hKey - lKey + 1 + if keyRange == 1 { + // revert to FastOr. Since the key range is 0 + // no container-level aggregation parallelism is achievable + return FastOr(bitmaps...) + } + + if parallelism == 0 { + parallelism = defaultWorkerCount + } + + var chunkSize int + var chunkCount int + if parallelism*4 > int(keyRange) { + chunkSize = 1 + chunkCount = int(keyRange) + } else { + chunkCount = parallelism * 4 + chunkSize = (int(keyRange) + chunkCount - 1) / chunkCount + } + + if chunkCount*chunkSize < int(keyRange) { + // it's fine to panic to indicate an implementation error + panic(fmt.Sprintf("invariant check failed: chunkCount * chunkSize < keyRange, %d * %d < %d", chunkCount, chunkSize, keyRange)) + } + + chunks := make([]*roaringArray, chunkCount) + + chunkSpecChan := make(chan parChunkSpec, minOfInt(maxOfInt(64, 2*parallelism), int(chunkCount))) + chunkChan := make(chan parChunk, minOfInt(32, int(chunkCount))) + + orFunc := func() { + for spec := range chunkSpecChan { + ra := lazyOrOnRange(&bitmaps[0].highlowcontainer, &bitmaps[1].highlowcontainer, spec.start, spec.end) + for _, b := range bitmaps[2:] { + ra = lazyIOrOnRange(ra, &b.highlowcontainer, spec.start, spec.end) + } + + for i, c := range ra.containers { + ra.containers[i] = repairAfterLazy(c) + } + + chunkChan <- parChunk{ra, spec.idx} + } + } + + for i := 0; i < parallelism; i++ { + go orFunc() + } + + go func() { + for i := 0; i < chunkCount; i++ { + spec := parChunkSpec{ + start: uint16(int(lKey) + i*chunkSize), + end: uint16(minOfInt(int(lKey)+(i+1)*chunkSize-1, int(hKey))), + idx: int(i), + } + chunkSpecChan <- spec + } + }() + + chunksRemaining := chunkCount + for chunk := range chunkChan { + chunks[chunk.idx] = chunk.ra + chunksRemaining-- + if chunksRemaining == 0 { + break + } + } + close(chunkChan) + close(chunkSpecChan) + + containerCount := 0 + for _, chunk := range chunks { + containerCount += chunk.size() + } + + result := Bitmap{ + roaringArray{ + containers: make([]container, containerCount), + keys: make([]uint16, containerCount), + needCopyOnWrite: make([]bool, containerCount), + }, + } + + resultOffset := 0 + for _, chunk := range chunks { + copy(result.highlowcontainer.containers[resultOffset:], chunk.containers) + copy(result.highlowcontainer.keys[resultOffset:], chunk.keys) + copy(result.highlowcontainer.needCopyOnWrite[resultOffset:], chunk.needCopyOnWrite) + resultOffset += chunk.size() + } + + return &result +} + +type parChunkSpec struct { + start uint16 + end uint16 + idx int +} + +type parChunk struct { + ra *roaringArray + idx int +} + +func (c parChunk) size() int { + return c.ra.size() +} + +func parNaiveStartAt(ra *roaringArray, start uint16, last uint16) int { + for idx, key := range ra.keys { + if key >= start && key <= last { + return idx + } else if key > last { + break + } + } + return ra.size() +} + +func lazyOrOnRange(ra1, ra2 *roaringArray, start, last uint16) *roaringArray { + answer := newRoaringArray() + length1 := ra1.size() + length2 := ra2.size() + + idx1 := parNaiveStartAt(ra1, start, last) + idx2 := parNaiveStartAt(ra2, start, last) + + var key1 uint16 + var key2 uint16 + if idx1 < length1 && idx2 < length2 { + key1 = ra1.getKeyAtIndex(idx1) + key2 = ra2.getKeyAtIndex(idx2) + + for key1 <= last && key2 <= last { + + if key1 < key2 { + answer.appendCopy(*ra1, idx1) + idx1++ + if idx1 == length1 { + break + } + key1 = ra1.getKeyAtIndex(idx1) + } else if key1 > key2 { + answer.appendCopy(*ra2, idx2) + idx2++ + if idx2 == length2 { + break + } + key2 = ra2.getKeyAtIndex(idx2) + } else { + c1 := ra1.getFastContainerAtIndex(idx1, false) + + answer.appendContainer(key1, c1.lazyOR(ra2.getContainerAtIndex(idx2)), false) + idx1++ + idx2++ + if idx1 == length1 || idx2 == length2 { + break + } + + key1 = ra1.getKeyAtIndex(idx1) + key2 = ra2.getKeyAtIndex(idx2) + } + } + } + + if idx2 < length2 { + key2 = ra2.getKeyAtIndex(idx2) + for key2 <= last { + answer.appendCopy(*ra2, idx2) + idx2++ + if idx2 == length2 { + break + } + key2 = ra2.getKeyAtIndex(idx2) + } + } + + if idx1 < length1 { + key1 = ra1.getKeyAtIndex(idx1) + for key1 <= last { + answer.appendCopy(*ra1, idx1) + idx1++ + if idx1 == length1 { + break + } + key1 = ra1.getKeyAtIndex(idx1) + } + } + return answer +} + +func lazyIOrOnRange(ra1, ra2 *roaringArray, start, last uint16) *roaringArray { + length1 := ra1.size() + length2 := ra2.size() + + idx1 := 0 + idx2 := parNaiveStartAt(ra2, start, last) + + var key1 uint16 + var key2 uint16 + if idx1 < length1 && idx2 < length2 { + key1 = ra1.getKeyAtIndex(idx1) + key2 = ra2.getKeyAtIndex(idx2) + + for key1 <= last && key2 <= last { + if key1 < key2 { + idx1++ + if idx1 >= length1 { + break + } + key1 = ra1.getKeyAtIndex(idx1) + } else if key1 > key2 { + ra1.insertNewKeyValueAt(idx1, key2, ra2.getContainerAtIndex(idx2)) + ra1.needCopyOnWrite[idx1] = true + idx2++ + idx1++ + length1++ + if idx2 >= length2 { + break + } + key2 = ra2.getKeyAtIndex(idx2) + } else { + c1 := ra1.getFastContainerAtIndex(idx1, true) + + ra1.containers[idx1] = c1.lazyIOR(ra2.getContainerAtIndex(idx2)) + ra1.needCopyOnWrite[idx1] = false + idx1++ + idx2++ + if idx1 >= length1 || idx2 >= length2 { + break + } + + key1 = ra1.getKeyAtIndex(idx1) + key2 = ra2.getKeyAtIndex(idx2) + } + } + } + if idx2 < length2 { + key2 = ra2.getKeyAtIndex(idx2) + for key2 <= last { + ra1.appendCopy(*ra2, idx2) + idx2++ + if idx2 >= length2 { + break + } + key2 = ra2.getKeyAtIndex(idx2) + } + } + return ra1 +} diff --git a/vendor/github.com/RoaringBitmap/roaring/popcnt.go b/vendor/github.com/RoaringBitmap/roaring/popcnt.go new file mode 100644 index 0000000..9d99508 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/popcnt.go @@ -0,0 +1,11 @@ +// +build go1.9 +// "go1.9", from Go version 1.9 onward +// See https://golang.org/pkg/go/build/#hdr-Build_Constraints + +package roaring + +import "math/bits" + +func popcount(x uint64) uint64 { + return uint64(bits.OnesCount64(x)) +} diff --git a/vendor/github.com/RoaringBitmap/roaring/popcnt_amd64.s b/vendor/github.com/RoaringBitmap/roaring/popcnt_amd64.s new file mode 100644 index 0000000..1f13fa2 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/popcnt_amd64.s @@ -0,0 +1,103 @@ +// +build amd64,!appengine,!go1.9 + +TEXT ·hasAsm(SB),4,$0-1 +MOVQ $1, AX +CPUID +SHRQ $23, CX +ANDQ $1, CX +MOVB CX, ret+0(FP) +RET + +#define POPCNTQ_DX_DX BYTE $0xf3; BYTE $0x48; BYTE $0x0f; BYTE $0xb8; BYTE $0xd2 + +TEXT ·popcntSliceAsm(SB),4,$0-32 +XORQ AX, AX +MOVQ s+0(FP), SI +MOVQ s_len+8(FP), CX +TESTQ CX, CX +JZ popcntSliceEnd +popcntSliceLoop: +BYTE $0xf3; BYTE $0x48; BYTE $0x0f; BYTE $0xb8; BYTE $0x16 // POPCNTQ (SI), DX +ADDQ DX, AX +ADDQ $8, SI +LOOP popcntSliceLoop +popcntSliceEnd: +MOVQ AX, ret+24(FP) +RET + +TEXT ·popcntMaskSliceAsm(SB),4,$0-56 +XORQ AX, AX +MOVQ s+0(FP), SI +MOVQ s_len+8(FP), CX +TESTQ CX, CX +JZ popcntMaskSliceEnd +MOVQ m+24(FP), DI +popcntMaskSliceLoop: +MOVQ (DI), DX +NOTQ DX +ANDQ (SI), DX +POPCNTQ_DX_DX +ADDQ DX, AX +ADDQ $8, SI +ADDQ $8, DI +LOOP popcntMaskSliceLoop +popcntMaskSliceEnd: +MOVQ AX, ret+48(FP) +RET + +TEXT ·popcntAndSliceAsm(SB),4,$0-56 +XORQ AX, AX +MOVQ s+0(FP), SI +MOVQ s_len+8(FP), CX +TESTQ CX, CX +JZ popcntAndSliceEnd +MOVQ m+24(FP), DI +popcntAndSliceLoop: +MOVQ (DI), DX +ANDQ (SI), DX +POPCNTQ_DX_DX +ADDQ DX, AX +ADDQ $8, SI +ADDQ $8, DI +LOOP popcntAndSliceLoop +popcntAndSliceEnd: +MOVQ AX, ret+48(FP) +RET + +TEXT ·popcntOrSliceAsm(SB),4,$0-56 +XORQ AX, AX +MOVQ s+0(FP), SI +MOVQ s_len+8(FP), CX +TESTQ CX, CX +JZ popcntOrSliceEnd +MOVQ m+24(FP), DI +popcntOrSliceLoop: +MOVQ (DI), DX +ORQ (SI), DX +POPCNTQ_DX_DX +ADDQ DX, AX +ADDQ $8, SI +ADDQ $8, DI +LOOP popcntOrSliceLoop +popcntOrSliceEnd: +MOVQ AX, ret+48(FP) +RET + +TEXT ·popcntXorSliceAsm(SB),4,$0-56 +XORQ AX, AX +MOVQ s+0(FP), SI +MOVQ s_len+8(FP), CX +TESTQ CX, CX +JZ popcntXorSliceEnd +MOVQ m+24(FP), DI +popcntXorSliceLoop: +MOVQ (DI), DX +XORQ (SI), DX +POPCNTQ_DX_DX +ADDQ DX, AX +ADDQ $8, SI +ADDQ $8, DI +LOOP popcntXorSliceLoop +popcntXorSliceEnd: +MOVQ AX, ret+48(FP) +RET diff --git a/vendor/github.com/RoaringBitmap/roaring/popcnt_asm.go b/vendor/github.com/RoaringBitmap/roaring/popcnt_asm.go new file mode 100644 index 0000000..882d7f4 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/popcnt_asm.go @@ -0,0 +1,67 @@ +// +build amd64,!appengine,!go1.9 + +package roaring + +// *** the following functions are defined in popcnt_amd64.s + +//go:noescape + +func hasAsm() bool + +// useAsm is a flag used to select the GO or ASM implementation of the popcnt function +var useAsm = hasAsm() + +//go:noescape + +func popcntSliceAsm(s []uint64) uint64 + +//go:noescape + +func popcntMaskSliceAsm(s, m []uint64) uint64 + +//go:noescape + +func popcntAndSliceAsm(s, m []uint64) uint64 + +//go:noescape + +func popcntOrSliceAsm(s, m []uint64) uint64 + +//go:noescape + +func popcntXorSliceAsm(s, m []uint64) uint64 + +func popcntSlice(s []uint64) uint64 { + if useAsm { + return popcntSliceAsm(s) + } + return popcntSliceGo(s) +} + +func popcntMaskSlice(s, m []uint64) uint64 { + if useAsm { + return popcntMaskSliceAsm(s, m) + } + return popcntMaskSliceGo(s, m) +} + +func popcntAndSlice(s, m []uint64) uint64 { + if useAsm { + return popcntAndSliceAsm(s, m) + } + return popcntAndSliceGo(s, m) +} + +func popcntOrSlice(s, m []uint64) uint64 { + if useAsm { + return popcntOrSliceAsm(s, m) + } + return popcntOrSliceGo(s, m) +} + +func popcntXorSlice(s, m []uint64) uint64 { + if useAsm { + return popcntXorSliceAsm(s, m) + } + return popcntXorSliceGo(s, m) +} diff --git a/vendor/github.com/RoaringBitmap/roaring/popcnt_compat.go b/vendor/github.com/RoaringBitmap/roaring/popcnt_compat.go new file mode 100644 index 0000000..7ae82d4 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/popcnt_compat.go @@ -0,0 +1,17 @@ +// +build !go1.9 + +package roaring + +// bit population count, take from +// https://code.google.com/p/go/issues/detail?id=4988#c11 +// credit: https://code.google.com/u/arnehormann/ +// credit: https://play.golang.org/p/U7SogJ7psJ +// credit: http://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetParallel +func popcount(x uint64) uint64 { + x -= (x >> 1) & 0x5555555555555555 + x = (x>>2)&0x3333333333333333 + x&0x3333333333333333 + x += x >> 4 + x &= 0x0f0f0f0f0f0f0f0f + x *= 0x0101010101010101 + return x >> 56 +} diff --git a/vendor/github.com/RoaringBitmap/roaring/popcnt_generic.go b/vendor/github.com/RoaringBitmap/roaring/popcnt_generic.go new file mode 100644 index 0000000..edf2083 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/popcnt_generic.go @@ -0,0 +1,23 @@ +// +build !amd64 appengine go1.9 + +package roaring + +func popcntSlice(s []uint64) uint64 { + return popcntSliceGo(s) +} + +func popcntMaskSlice(s, m []uint64) uint64 { + return popcntMaskSliceGo(s, m) +} + +func popcntAndSlice(s, m []uint64) uint64 { + return popcntAndSliceGo(s, m) +} + +func popcntOrSlice(s, m []uint64) uint64 { + return popcntOrSliceGo(s, m) +} + +func popcntXorSlice(s, m []uint64) uint64 { + return popcntXorSliceGo(s, m) +} diff --git a/vendor/github.com/RoaringBitmap/roaring/popcnt_slices.go b/vendor/github.com/RoaringBitmap/roaring/popcnt_slices.go new file mode 100644 index 0000000..d27c5f3 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/popcnt_slices.go @@ -0,0 +1,41 @@ +package roaring + +func popcntSliceGo(s []uint64) uint64 { + cnt := uint64(0) + for _, x := range s { + cnt += popcount(x) + } + return cnt +} + +func popcntMaskSliceGo(s, m []uint64) uint64 { + cnt := uint64(0) + for i := range s { + cnt += popcount(s[i] &^ m[i]) + } + return cnt +} + +func popcntAndSliceGo(s, m []uint64) uint64 { + cnt := uint64(0) + for i := range s { + cnt += popcount(s[i] & m[i]) + } + return cnt +} + +func popcntOrSliceGo(s, m []uint64) uint64 { + cnt := uint64(0) + for i := range s { + cnt += popcount(s[i] | m[i]) + } + return cnt +} + +func popcntXorSliceGo(s, m []uint64) uint64 { + cnt := uint64(0) + for i := range s { + cnt += popcount(s[i] ^ m[i]) + } + return cnt +} diff --git a/vendor/github.com/RoaringBitmap/roaring/priorityqueue.go b/vendor/github.com/RoaringBitmap/roaring/priorityqueue.go new file mode 100644 index 0000000..9259a68 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/priorityqueue.go @@ -0,0 +1,101 @@ +package roaring + +import "container/heap" + +///////////// +// The priorityQueue is used to keep Bitmaps sorted. +//////////// + +type item struct { + value *Bitmap + index int +} + +type priorityQueue []*item + +func (pq priorityQueue) Len() int { return len(pq) } + +func (pq priorityQueue) Less(i, j int) bool { + return pq[i].value.GetSizeInBytes() < pq[j].value.GetSizeInBytes() +} + +func (pq priorityQueue) Swap(i, j int) { + pq[i], pq[j] = pq[j], pq[i] + pq[i].index = i + pq[j].index = j +} + +func (pq *priorityQueue) Push(x interface{}) { + n := len(*pq) + item := x.(*item) + item.index = n + *pq = append(*pq, item) +} + +func (pq *priorityQueue) Pop() interface{} { + old := *pq + n := len(old) + item := old[n-1] + item.index = -1 // for safety + *pq = old[0 : n-1] + return item +} + +func (pq *priorityQueue) update(item *item, value *Bitmap) { + item.value = value + heap.Fix(pq, item.index) +} + +///////////// +// The containerPriorityQueue is used to keep the containers of various Bitmaps sorted. +//////////// + +type containeritem struct { + value *Bitmap + keyindex int + index int +} + +type containerPriorityQueue []*containeritem + +func (pq containerPriorityQueue) Len() int { return len(pq) } + +func (pq containerPriorityQueue) Less(i, j int) bool { + k1 := pq[i].value.highlowcontainer.getKeyAtIndex(pq[i].keyindex) + k2 := pq[j].value.highlowcontainer.getKeyAtIndex(pq[j].keyindex) + if k1 != k2 { + return k1 < k2 + } + c1 := pq[i].value.highlowcontainer.getContainerAtIndex(pq[i].keyindex) + c2 := pq[j].value.highlowcontainer.getContainerAtIndex(pq[j].keyindex) + + return c1.getCardinality() > c2.getCardinality() +} + +func (pq containerPriorityQueue) Swap(i, j int) { + pq[i], pq[j] = pq[j], pq[i] + pq[i].index = i + pq[j].index = j +} + +func (pq *containerPriorityQueue) Push(x interface{}) { + n := len(*pq) + item := x.(*containeritem) + item.index = n + *pq = append(*pq, item) +} + +func (pq *containerPriorityQueue) Pop() interface{} { + old := *pq + n := len(old) + item := old[n-1] + item.index = -1 // for safety + *pq = old[0 : n-1] + return item +} + +//func (pq *containerPriorityQueue) update(item *containeritem, value *Bitmap, keyindex int) { +// item.value = value +// item.keyindex = keyindex +// heap.Fix(pq, item.index) +//} diff --git a/vendor/github.com/RoaringBitmap/roaring/roaring.go b/vendor/github.com/RoaringBitmap/roaring/roaring.go new file mode 100644 index 0000000..ed75d58 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/roaring.go @@ -0,0 +1,1557 @@ +// Package roaring is an implementation of Roaring Bitmaps in Go. +// They provide fast compressed bitmap data structures (also called bitset). +// They are ideally suited to represent sets of integers over +// relatively small ranges. +// See http://roaringbitmap.org for details. +package roaring + +import ( + "bytes" + "encoding/base64" + "fmt" + "io" + "strconv" + "sync" +) + +// Bitmap represents a compressed bitmap where you can add integers. +type Bitmap struct { + highlowcontainer roaringArray +} + +// ToBase64 serializes a bitmap as Base64 +func (rb *Bitmap) ToBase64() (string, error) { + buf := new(bytes.Buffer) + _, err := rb.WriteTo(buf) + return base64.StdEncoding.EncodeToString(buf.Bytes()), err + +} + +// FromBase64 deserializes a bitmap from Base64 +func (rb *Bitmap) FromBase64(str string) (int64, error) { + data, err := base64.StdEncoding.DecodeString(str) + if err != nil { + return 0, err + } + buf := bytes.NewBuffer(data) + + return rb.ReadFrom(buf) +} + +// WriteTo writes a serialized version of this bitmap to stream. +// The format is compatible with other RoaringBitmap +// implementations (Java, C) and is documented here: +// https://github.com/RoaringBitmap/RoaringFormatSpec +func (rb *Bitmap) WriteTo(stream io.Writer) (int64, error) { + return rb.highlowcontainer.writeTo(stream) +} + +// ToBytes returns an array of bytes corresponding to what is written +// when calling WriteTo +func (rb *Bitmap) ToBytes() ([]byte, error) { + return rb.highlowcontainer.toBytes() +} + +// Deprecated: WriteToMsgpack writes a msgpack2/snappy-streaming compressed serialized +// version of this bitmap to stream. The format is not +// compatible with the WriteTo() format, and is +// experimental: it may produce smaller on disk +// footprint and/or be faster to read, depending +// on your content. Currently only the Go roaring +// implementation supports this format. +func (rb *Bitmap) WriteToMsgpack(stream io.Writer) (int64, error) { + return 0, rb.highlowcontainer.writeToMsgpack(stream) +} + +// ReadFrom reads a serialized version of this bitmap from stream. +// The format is compatible with other RoaringBitmap +// implementations (Java, C) and is documented here: +// https://github.com/RoaringBitmap/RoaringFormatSpec +func (rb *Bitmap) ReadFrom(reader io.Reader) (p int64, err error) { + stream := byteInputAdapterPool.Get().(*byteInputAdapter) + stream.reset(reader) + + p, err = rb.highlowcontainer.readFrom(stream) + byteInputAdapterPool.Put(stream) + + return +} + +// FromBuffer creates a bitmap from its serialized version stored in buffer +// +// The format specification is available here: +// https://github.com/RoaringBitmap/RoaringFormatSpec +// +// The provided byte array (buf) is expected to be a constant. +// The function makes the best effort attempt not to copy data. +// You should take care not to modify buff as it will +// likely result in unexpected program behavior. +// +// Resulting bitmaps are effectively immutable in the following sense: +// a copy-on-write marker is used so that when you modify the resulting +// bitmap, copies of selected data (containers) are made. +// You should *not* change the copy-on-write status of the resulting +// bitmaps (SetCopyOnWrite). +// +// If buf becomes unavailable, then a bitmap created with +// FromBuffer would be effectively broken. Furthermore, any +// bitmap derived from this bitmap (e.g., via Or, And) might +// also be broken. Thus, before making buf unavailable, you should +// call CloneCopyOnWriteContainers on all such bitmaps. +// +func (rb *Bitmap) FromBuffer(buf []byte) (p int64, err error) { + stream := byteBufferPool.Get().(*byteBuffer) + stream.reset(buf) + + p, err = rb.highlowcontainer.readFrom(stream) + byteBufferPool.Put(stream) + + return +} + +var ( + byteBufferPool = sync.Pool{ + New: func() interface{} { + return &byteBuffer{} + }, + } + + byteInputAdapterPool = sync.Pool{ + New: func() interface{} { + return &byteInputAdapter{} + }, + } +) + +// RunOptimize attempts to further compress the runs of consecutive values found in the bitmap +func (rb *Bitmap) RunOptimize() { + rb.highlowcontainer.runOptimize() +} + +// HasRunCompression returns true if the bitmap benefits from run compression +func (rb *Bitmap) HasRunCompression() bool { + return rb.highlowcontainer.hasRunCompression() +} + +// Deprecated: ReadFromMsgpack reads a msgpack2/snappy-streaming serialized +// version of this bitmap from stream. The format is +// expected is that written by the WriteToMsgpack() +// call; see additional notes there. +func (rb *Bitmap) ReadFromMsgpack(stream io.Reader) (int64, error) { + return 0, rb.highlowcontainer.readFromMsgpack(stream) +} + +// MarshalBinary implements the encoding.BinaryMarshaler interface for the bitmap +// (same as ToBytes) +func (rb *Bitmap) MarshalBinary() ([]byte, error) { + return rb.ToBytes() +} + +// UnmarshalBinary implements the encoding.BinaryUnmarshaler interface for the bitmap +func (rb *Bitmap) UnmarshalBinary(data []byte) error { + r := bytes.NewReader(data) + _, err := rb.ReadFrom(r) + return err +} + +// NewBitmap creates a new empty Bitmap (see also New) +func NewBitmap() *Bitmap { + return &Bitmap{} +} + +// New creates a new empty Bitmap (same as NewBitmap) +func New() *Bitmap { + return &Bitmap{} +} + +// Clear resets the Bitmap to be logically empty, but may retain +// some memory allocations that may speed up future operations +func (rb *Bitmap) Clear() { + rb.highlowcontainer.clear() +} + +// ToArray creates a new slice containing all of the integers stored in the Bitmap in sorted order +func (rb *Bitmap) ToArray() []uint32 { + array := make([]uint32, rb.GetCardinality()) + pos := 0 + pos2 := 0 + + for pos < rb.highlowcontainer.size() { + hs := uint32(rb.highlowcontainer.getKeyAtIndex(pos)) << 16 + c := rb.highlowcontainer.getContainerAtIndex(pos) + pos++ + c.fillLeastSignificant16bits(array, pos2, hs) + pos2 += c.getCardinality() + } + return array +} + +// GetSizeInBytes estimates the memory usage of the Bitmap. Note that this +// might differ slightly from the amount of bytes required for persistent storage +func (rb *Bitmap) GetSizeInBytes() uint64 { + size := uint64(8) + for _, c := range rb.highlowcontainer.containers { + size += uint64(2) + uint64(c.getSizeInBytes()) + } + return size +} + +// GetSerializedSizeInBytes computes the serialized size in bytes +// of the Bitmap. It should correspond to the +// number of bytes written when invoking WriteTo. You can expect +// that this function is much cheaper computationally than WriteTo. +func (rb *Bitmap) GetSerializedSizeInBytes() uint64 { + return rb.highlowcontainer.serializedSizeInBytes() +} + +// BoundSerializedSizeInBytes returns an upper bound on the serialized size in bytes +// assuming that one wants to store "cardinality" integers in [0, universe_size) +func BoundSerializedSizeInBytes(cardinality uint64, universeSize uint64) uint64 { + contnbr := (universeSize + uint64(65535)) / uint64(65536) + if contnbr > cardinality { + contnbr = cardinality + // we can't have more containers than we have values + } + headermax := 8*contnbr + 4 + if 4 > (contnbr+7)/8 { + headermax += 4 + } else { + headermax += (contnbr + 7) / 8 + } + valsarray := uint64(arrayContainerSizeInBytes(int(cardinality))) + valsbitmap := contnbr * uint64(bitmapContainerSizeInBytes()) + valsbest := valsarray + if valsbest > valsbitmap { + valsbest = valsbitmap + } + return valsbest + headermax +} + +// IntIterable allows you to iterate over the values in a Bitmap +type IntIterable interface { + HasNext() bool + Next() uint32 +} + +// IntPeekable allows you to look at the next value without advancing and +// advance as long as the next value is smaller than minval +type IntPeekable interface { + IntIterable + // PeekNext peeks the next value without advancing the iterator + PeekNext() uint32 + // AdvanceIfNeeded advances as long as the next value is smaller than minval + AdvanceIfNeeded(minval uint32) +} + +type intIterator struct { + pos int + hs uint32 + iter shortPeekable + highlowcontainer *roaringArray +} + +// HasNext returns true if there are more integers to iterate over +func (ii *intIterator) HasNext() bool { + return ii.pos < ii.highlowcontainer.size() +} + +func (ii *intIterator) init() { + if ii.highlowcontainer.size() > ii.pos { + ii.iter = ii.highlowcontainer.getContainerAtIndex(ii.pos).getShortIterator() + ii.hs = uint32(ii.highlowcontainer.getKeyAtIndex(ii.pos)) << 16 + } +} + +// Next returns the next integer +func (ii *intIterator) Next() uint32 { + x := uint32(ii.iter.next()) | ii.hs + if !ii.iter.hasNext() { + ii.pos = ii.pos + 1 + ii.init() + } + return x +} + +// PeekNext peeks the next value without advancing the iterator +func (ii *intIterator) PeekNext() uint32 { + return uint32(ii.iter.peekNext()&maxLowBit) | ii.hs +} + +// AdvanceIfNeeded advances as long as the next value is smaller than minval +func (ii *intIterator) AdvanceIfNeeded(minval uint32) { + to := minval >> 16 + + for ii.HasNext() && (ii.hs>>16) < to { + ii.pos++ + ii.init() + } + + if ii.HasNext() && (ii.hs>>16) == to { + ii.iter.advanceIfNeeded(lowbits(minval)) + + if !ii.iter.hasNext() { + ii.pos++ + ii.init() + } + } +} + +func newIntIterator(a *Bitmap) *intIterator { + p := new(intIterator) + p.pos = 0 + p.highlowcontainer = &a.highlowcontainer + p.init() + return p +} + +type intReverseIterator struct { + pos int + hs uint32 + iter shortIterable + highlowcontainer *roaringArray +} + +// HasNext returns true if there are more integers to iterate over +func (ii *intReverseIterator) HasNext() bool { + return ii.pos >= 0 +} + +func (ii *intReverseIterator) init() { + if ii.pos >= 0 { + ii.iter = ii.highlowcontainer.getContainerAtIndex(ii.pos).getReverseIterator() + ii.hs = uint32(ii.highlowcontainer.getKeyAtIndex(ii.pos)) << 16 + } else { + ii.iter = nil + } +} + +// Next returns the next integer +func (ii *intReverseIterator) Next() uint32 { + x := uint32(ii.iter.next()) | ii.hs + if !ii.iter.hasNext() { + ii.pos = ii.pos - 1 + ii.init() + } + return x +} + +func newIntReverseIterator(a *Bitmap) *intReverseIterator { + p := new(intReverseIterator) + p.highlowcontainer = &a.highlowcontainer + p.pos = a.highlowcontainer.size() - 1 + p.init() + return p +} + +// ManyIntIterable allows you to iterate over the values in a Bitmap +type ManyIntIterable interface { + // pass in a buffer to fill up with values, returns how many values were returned + NextMany([]uint32) int +} + +type manyIntIterator struct { + pos int + hs uint32 + iter manyIterable + highlowcontainer *roaringArray +} + +func (ii *manyIntIterator) init() { + if ii.highlowcontainer.size() > ii.pos { + ii.iter = ii.highlowcontainer.getContainerAtIndex(ii.pos).getManyIterator() + ii.hs = uint32(ii.highlowcontainer.getKeyAtIndex(ii.pos)) << 16 + } else { + ii.iter = nil + } +} + +func (ii *manyIntIterator) NextMany(buf []uint32) int { + n := 0 + for n < len(buf) { + if ii.iter == nil { + break + } + moreN := ii.iter.nextMany(ii.hs, buf[n:]) + n += moreN + if moreN == 0 { + ii.pos = ii.pos + 1 + ii.init() + } + } + + return n +} + +func newManyIntIterator(a *Bitmap) *manyIntIterator { + p := new(manyIntIterator) + p.pos = 0 + p.highlowcontainer = &a.highlowcontainer + p.init() + return p +} + +// String creates a string representation of the Bitmap +func (rb *Bitmap) String() string { + // inspired by https://github.com/fzandona/goroar/ + var buffer bytes.Buffer + start := []byte("{") + buffer.Write(start) + i := rb.Iterator() + counter := 0 + if i.HasNext() { + counter = counter + 1 + buffer.WriteString(strconv.FormatInt(int64(i.Next()), 10)) + } + for i.HasNext() { + buffer.WriteString(",") + counter = counter + 1 + // to avoid exhausting the memory + if counter > 0x40000 { + buffer.WriteString("...") + break + } + buffer.WriteString(strconv.FormatInt(int64(i.Next()), 10)) + } + buffer.WriteString("}") + return buffer.String() +} + +// Iterate iterates over the bitmap, calling the given callback with each value in the bitmap. If the callback returns +// false, the iteration is halted. +// The iteration results are undefined if the bitmap is modified (e.g., with Add or Remove). +// There is no guarantee as to what order the values will be iterated +func (rb *Bitmap) Iterate(cb func(x uint32) bool) { + for i := 0; i < rb.highlowcontainer.size(); i++ { + hs := uint32(rb.highlowcontainer.getKeyAtIndex(i)) << 16 + c := rb.highlowcontainer.getContainerAtIndex(i) + + var shouldContinue bool + // This is hacky but it avoids allocations from invoking an interface method with a closure + switch t := c.(type) { + case *arrayContainer: + shouldContinue = t.iterate(func(x uint16) bool { + return cb(uint32(x) | hs) + }) + case *runContainer16: + shouldContinue = t.iterate(func(x uint16) bool { + return cb(uint32(x) | hs) + }) + case *bitmapContainer: + shouldContinue = t.iterate(func(x uint16) bool { + return cb(uint32(x) | hs) + }) + } + + if !shouldContinue { + break + } + } +} + +// Iterator creates a new IntPeekable to iterate over the integers contained in the bitmap, in sorted order; +// the iterator becomes invalid if the bitmap is modified (e.g., with Add or Remove). +func (rb *Bitmap) Iterator() IntPeekable { + return newIntIterator(rb) +} + +// ReverseIterator creates a new IntIterable to iterate over the integers contained in the bitmap, in sorted order; +// the iterator becomes invalid if the bitmap is modified (e.g., with Add or Remove). +func (rb *Bitmap) ReverseIterator() IntIterable { + return newIntReverseIterator(rb) +} + +// ManyIterator creates a new ManyIntIterable to iterate over the integers contained in the bitmap, in sorted order; +// the iterator becomes invalid if the bitmap is modified (e.g., with Add or Remove). +func (rb *Bitmap) ManyIterator() ManyIntIterable { + return newManyIntIterator(rb) +} + +// Clone creates a copy of the Bitmap +func (rb *Bitmap) Clone() *Bitmap { + ptr := new(Bitmap) + ptr.highlowcontainer = *rb.highlowcontainer.clone() + return ptr +} + +// Minimum get the smallest value stored in this roaring bitmap, assumes that it is not empty +func (rb *Bitmap) Minimum() uint32 { + return uint32(rb.highlowcontainer.containers[0].minimum()) | (uint32(rb.highlowcontainer.keys[0]) << 16) +} + +// Maximum get the largest value stored in this roaring bitmap, assumes that it is not empty +func (rb *Bitmap) Maximum() uint32 { + lastindex := len(rb.highlowcontainer.containers) - 1 + return uint32(rb.highlowcontainer.containers[lastindex].maximum()) | (uint32(rb.highlowcontainer.keys[lastindex]) << 16) +} + +// Contains returns true if the integer is contained in the bitmap +func (rb *Bitmap) Contains(x uint32) bool { + hb := highbits(x) + c := rb.highlowcontainer.getContainer(hb) + return c != nil && c.contains(lowbits(x)) +} + +// ContainsInt returns true if the integer is contained in the bitmap (this is a convenience method, the parameter is casted to uint32 and Contains is called) +func (rb *Bitmap) ContainsInt(x int) bool { + return rb.Contains(uint32(x)) +} + +// Equals returns true if the two bitmaps contain the same integers +func (rb *Bitmap) Equals(o interface{}) bool { + srb, ok := o.(*Bitmap) + if ok { + return srb.highlowcontainer.equals(rb.highlowcontainer) + } + return false +} + +// AddOffset adds the value 'offset' to each and every value in a bitmap, generating a new bitmap in the process +func AddOffset(x *Bitmap, offset uint32) (answer *Bitmap) { + return AddOffset64(x, int64(offset)) +} + +// AddOffset64 adds the value 'offset' to each and every value in a bitmap, generating a new bitmap in the process +// If offset + element is outside of the range [0,2^32), that the element will be dropped +func AddOffset64(x *Bitmap, offset int64) (answer *Bitmap) { + // we need "offset" to be a long because we want to support values + // between -0xFFFFFFFF up to +-0xFFFFFFFF + var containerOffset64 int64 + + if offset < 0 { + containerOffset64 = (offset - (1 << 16) + 1) / (1 << 16) + } else { + containerOffset64 = offset >> 16 + } + + if containerOffset64 >= (1<<16) || containerOffset64 <= -(1<<16) { + return New() + } + + containerOffset := int32(containerOffset64) + inOffset := (uint16)(offset - containerOffset64*(1<<16)) + + if inOffset == 0 { + answer = x.Clone() + for pos := 0; pos < answer.highlowcontainer.size(); pos++ { + key := int32(answer.highlowcontainer.getKeyAtIndex(pos)) + key += containerOffset + + if key >= 0 && key <= MaxUint16 { + answer.highlowcontainer.keys[pos] = uint16(key) + } + } + } else { + answer = New() + + for pos := 0; pos < x.highlowcontainer.size(); pos++ { + key := int32(x.highlowcontainer.getKeyAtIndex(pos)) + key += containerOffset + + c := x.highlowcontainer.getContainerAtIndex(pos) + offsetted := c.addOffset(inOffset) + + if offsetted[0].getCardinality() > 0 && (key >= 0 && key <= MaxUint16) { + curSize := answer.highlowcontainer.size() + lastkey := int32(0) + + if curSize > 0 { + lastkey = int32(answer.highlowcontainer.getKeyAtIndex(curSize - 1)) + } + + if curSize > 0 && lastkey == key { + prev := answer.highlowcontainer.getContainerAtIndex(curSize - 1) + orrseult := prev.ior(offsetted[0]) + answer.highlowcontainer.setContainerAtIndex(curSize-1, orrseult) + } else { + answer.highlowcontainer.appendContainer(uint16(key), offsetted[0], false) + } + } + + if offsetted[1].getCardinality() > 0 && ((key+1) >= 0 && (key+1) <= MaxUint16) { + answer.highlowcontainer.appendContainer(uint16(key+1), offsetted[1], false) + } + } + } + + return answer +} + +// Add the integer x to the bitmap +func (rb *Bitmap) Add(x uint32) { + hb := highbits(x) + ra := &rb.highlowcontainer + i := ra.getIndex(hb) + if i >= 0 { + var c container + c = ra.getWritableContainerAtIndex(i).iaddReturnMinimized(lowbits(x)) + rb.highlowcontainer.setContainerAtIndex(i, c) + } else { + newac := newArrayContainer() + rb.highlowcontainer.insertNewKeyValueAt(-i-1, hb, newac.iaddReturnMinimized(lowbits(x))) + } +} + +// add the integer x to the bitmap, return the container and its index +func (rb *Bitmap) addwithptr(x uint32) (int, container) { + hb := highbits(x) + ra := &rb.highlowcontainer + i := ra.getIndex(hb) + var c container + if i >= 0 { + c = ra.getWritableContainerAtIndex(i).iaddReturnMinimized(lowbits(x)) + rb.highlowcontainer.setContainerAtIndex(i, c) + return i, c + } + newac := newArrayContainer() + c = newac.iaddReturnMinimized(lowbits(x)) + rb.highlowcontainer.insertNewKeyValueAt(-i-1, hb, c) + return -i - 1, c +} + +// CheckedAdd adds the integer x to the bitmap and return true if it was added (false if the integer was already present) +func (rb *Bitmap) CheckedAdd(x uint32) bool { + // TODO: add unit tests for this method + hb := highbits(x) + i := rb.highlowcontainer.getIndex(hb) + if i >= 0 { + C := rb.highlowcontainer.getWritableContainerAtIndex(i) + oldcard := C.getCardinality() + C = C.iaddReturnMinimized(lowbits(x)) + rb.highlowcontainer.setContainerAtIndex(i, C) + return C.getCardinality() > oldcard + } + newac := newArrayContainer() + rb.highlowcontainer.insertNewKeyValueAt(-i-1, hb, newac.iaddReturnMinimized(lowbits(x))) + return true + +} + +// AddInt adds the integer x to the bitmap (convenience method: the parameter is casted to uint32 and we call Add) +func (rb *Bitmap) AddInt(x int) { + rb.Add(uint32(x)) +} + +// Remove the integer x from the bitmap +func (rb *Bitmap) Remove(x uint32) { + hb := highbits(x) + i := rb.highlowcontainer.getIndex(hb) + if i >= 0 { + c := rb.highlowcontainer.getWritableContainerAtIndex(i).iremoveReturnMinimized(lowbits(x)) + rb.highlowcontainer.setContainerAtIndex(i, c) + if rb.highlowcontainer.getContainerAtIndex(i).getCardinality() == 0 { + rb.highlowcontainer.removeAtIndex(i) + } + } +} + +// CheckedRemove removes the integer x from the bitmap and return true if the integer was effectively remove (and false if the integer was not present) +func (rb *Bitmap) CheckedRemove(x uint32) bool { + // TODO: add unit tests for this method + hb := highbits(x) + i := rb.highlowcontainer.getIndex(hb) + if i >= 0 { + C := rb.highlowcontainer.getWritableContainerAtIndex(i) + oldcard := C.getCardinality() + C = C.iremoveReturnMinimized(lowbits(x)) + rb.highlowcontainer.setContainerAtIndex(i, C) + if rb.highlowcontainer.getContainerAtIndex(i).getCardinality() == 0 { + rb.highlowcontainer.removeAtIndex(i) + return true + } + return C.getCardinality() < oldcard + } + return false + +} + +// IsEmpty returns true if the Bitmap is empty (it is faster than doing (GetCardinality() == 0)) +func (rb *Bitmap) IsEmpty() bool { + return rb.highlowcontainer.size() == 0 +} + +// GetCardinality returns the number of integers contained in the bitmap +func (rb *Bitmap) GetCardinality() uint64 { + size := uint64(0) + for _, c := range rb.highlowcontainer.containers { + size += uint64(c.getCardinality()) + } + return size +} + +// Rank returns the number of integers that are smaller or equal to x (Rank(infinity) would be GetCardinality()) +func (rb *Bitmap) Rank(x uint32) uint64 { + size := uint64(0) + for i := 0; i < rb.highlowcontainer.size(); i++ { + key := rb.highlowcontainer.getKeyAtIndex(i) + if key > highbits(x) { + return size + } + if key < highbits(x) { + size += uint64(rb.highlowcontainer.getContainerAtIndex(i).getCardinality()) + } else { + return size + uint64(rb.highlowcontainer.getContainerAtIndex(i).rank(lowbits(x))) + } + } + return size +} + +// Select returns the xth integer in the bitmap +func (rb *Bitmap) Select(x uint32) (uint32, error) { + if rb.GetCardinality() <= uint64(x) { + return 0, fmt.Errorf("can't find %dth integer in a bitmap with only %d items", x, rb.GetCardinality()) + } + + remaining := x + for i := 0; i < rb.highlowcontainer.size(); i++ { + c := rb.highlowcontainer.getContainerAtIndex(i) + if remaining >= uint32(c.getCardinality()) { + remaining -= uint32(c.getCardinality()) + } else { + key := rb.highlowcontainer.getKeyAtIndex(i) + return uint32(key)<<16 + uint32(c.selectInt(uint16(remaining))), nil + } + } + return 0, fmt.Errorf("can't find %dth integer in a bitmap with only %d items", x, rb.GetCardinality()) +} + +// And computes the intersection between two bitmaps and stores the result in the current bitmap +func (rb *Bitmap) And(x2 *Bitmap) { + pos1 := 0 + pos2 := 0 + intersectionsize := 0 + length1 := rb.highlowcontainer.size() + length2 := x2.highlowcontainer.size() + +main: + for { + if pos1 < length1 && pos2 < length2 { + s1 := rb.highlowcontainer.getKeyAtIndex(pos1) + s2 := x2.highlowcontainer.getKeyAtIndex(pos2) + for { + if s1 == s2 { + c1 := rb.highlowcontainer.getWritableContainerAtIndex(pos1) + c2 := x2.highlowcontainer.getContainerAtIndex(pos2) + diff := c1.iand(c2) + if diff.getCardinality() > 0 { + rb.highlowcontainer.replaceKeyAndContainerAtIndex(intersectionsize, s1, diff, false) + intersectionsize++ + } + pos1++ + pos2++ + if (pos1 == length1) || (pos2 == length2) { + break main + } + s1 = rb.highlowcontainer.getKeyAtIndex(pos1) + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } else if s1 < s2 { + pos1 = rb.highlowcontainer.advanceUntil(s2, pos1) + if pos1 == length1 { + break main + } + s1 = rb.highlowcontainer.getKeyAtIndex(pos1) + } else { //s1 > s2 + pos2 = x2.highlowcontainer.advanceUntil(s1, pos2) + if pos2 == length2 { + break main + } + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } + } + } else { + break + } + } + rb.highlowcontainer.resize(intersectionsize) +} + +// OrCardinality returns the cardinality of the union between two bitmaps, bitmaps are not modified +func (rb *Bitmap) OrCardinality(x2 *Bitmap) uint64 { + pos1 := 0 + pos2 := 0 + length1 := rb.highlowcontainer.size() + length2 := x2.highlowcontainer.size() + answer := uint64(0) +main: + for { + if (pos1 < length1) && (pos2 < length2) { + s1 := rb.highlowcontainer.getKeyAtIndex(pos1) + s2 := x2.highlowcontainer.getKeyAtIndex(pos2) + + for { + if s1 < s2 { + answer += uint64(rb.highlowcontainer.getContainerAtIndex(pos1).getCardinality()) + pos1++ + if pos1 == length1 { + break main + } + s1 = rb.highlowcontainer.getKeyAtIndex(pos1) + } else if s1 > s2 { + answer += uint64(x2.highlowcontainer.getContainerAtIndex(pos2).getCardinality()) + pos2++ + if pos2 == length2 { + break main + } + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } else { + // TODO: could be faster if we did not have to materialize the container + answer += uint64(rb.highlowcontainer.getContainerAtIndex(pos1).or(x2.highlowcontainer.getContainerAtIndex(pos2)).getCardinality()) + pos1++ + pos2++ + if (pos1 == length1) || (pos2 == length2) { + break main + } + s1 = rb.highlowcontainer.getKeyAtIndex(pos1) + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } + } + } else { + break + } + } + for ; pos1 < length1; pos1++ { + answer += uint64(rb.highlowcontainer.getContainerAtIndex(pos1).getCardinality()) + } + for ; pos2 < length2; pos2++ { + answer += uint64(x2.highlowcontainer.getContainerAtIndex(pos2).getCardinality()) + } + return answer +} + +// AndCardinality returns the cardinality of the intersection between two bitmaps, bitmaps are not modified +func (rb *Bitmap) AndCardinality(x2 *Bitmap) uint64 { + pos1 := 0 + pos2 := 0 + answer := uint64(0) + length1 := rb.highlowcontainer.size() + length2 := x2.highlowcontainer.size() + +main: + for { + if pos1 < length1 && pos2 < length2 { + s1 := rb.highlowcontainer.getKeyAtIndex(pos1) + s2 := x2.highlowcontainer.getKeyAtIndex(pos2) + for { + if s1 == s2 { + c1 := rb.highlowcontainer.getContainerAtIndex(pos1) + c2 := x2.highlowcontainer.getContainerAtIndex(pos2) + answer += uint64(c1.andCardinality(c2)) + pos1++ + pos2++ + if (pos1 == length1) || (pos2 == length2) { + break main + } + s1 = rb.highlowcontainer.getKeyAtIndex(pos1) + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } else if s1 < s2 { + pos1 = rb.highlowcontainer.advanceUntil(s2, pos1) + if pos1 == length1 { + break main + } + s1 = rb.highlowcontainer.getKeyAtIndex(pos1) + } else { //s1 > s2 + pos2 = x2.highlowcontainer.advanceUntil(s1, pos2) + if pos2 == length2 { + break main + } + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } + } + } else { + break + } + } + return answer +} + +// Intersects checks whether two bitmap intersects, bitmaps are not modified +func (rb *Bitmap) Intersects(x2 *Bitmap) bool { + pos1 := 0 + pos2 := 0 + length1 := rb.highlowcontainer.size() + length2 := x2.highlowcontainer.size() + +main: + for { + if pos1 < length1 && pos2 < length2 { + s1 := rb.highlowcontainer.getKeyAtIndex(pos1) + s2 := x2.highlowcontainer.getKeyAtIndex(pos2) + for { + if s1 == s2 { + c1 := rb.highlowcontainer.getContainerAtIndex(pos1) + c2 := x2.highlowcontainer.getContainerAtIndex(pos2) + if c1.intersects(c2) { + return true + } + pos1++ + pos2++ + if (pos1 == length1) || (pos2 == length2) { + break main + } + s1 = rb.highlowcontainer.getKeyAtIndex(pos1) + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } else if s1 < s2 { + pos1 = rb.highlowcontainer.advanceUntil(s2, pos1) + if pos1 == length1 { + break main + } + s1 = rb.highlowcontainer.getKeyAtIndex(pos1) + } else { //s1 > s2 + pos2 = x2.highlowcontainer.advanceUntil(s1, pos2) + if pos2 == length2 { + break main + } + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } + } + } else { + break + } + } + return false +} + +// Xor computes the symmetric difference between two bitmaps and stores the result in the current bitmap +func (rb *Bitmap) Xor(x2 *Bitmap) { + pos1 := 0 + pos2 := 0 + length1 := rb.highlowcontainer.size() + length2 := x2.highlowcontainer.size() + for { + if (pos1 < length1) && (pos2 < length2) { + s1 := rb.highlowcontainer.getKeyAtIndex(pos1) + s2 := x2.highlowcontainer.getKeyAtIndex(pos2) + if s1 < s2 { + pos1 = rb.highlowcontainer.advanceUntil(s2, pos1) + if pos1 == length1 { + break + } + } else if s1 > s2 { + c := x2.highlowcontainer.getWritableContainerAtIndex(pos2) + rb.highlowcontainer.insertNewKeyValueAt(pos1, x2.highlowcontainer.getKeyAtIndex(pos2), c) + length1++ + pos1++ + pos2++ + } else { + // TODO: couple be computed in-place for reduced memory usage + c := rb.highlowcontainer.getContainerAtIndex(pos1).xor(x2.highlowcontainer.getContainerAtIndex(pos2)) + if c.getCardinality() > 0 { + rb.highlowcontainer.setContainerAtIndex(pos1, c) + pos1++ + } else { + rb.highlowcontainer.removeAtIndex(pos1) + length1-- + } + pos2++ + } + } else { + break + } + } + if pos1 == length1 { + rb.highlowcontainer.appendCopyMany(x2.highlowcontainer, pos2, length2) + } +} + +// Or computes the union between two bitmaps and stores the result in the current bitmap +func (rb *Bitmap) Or(x2 *Bitmap) { + pos1 := 0 + pos2 := 0 + length1 := rb.highlowcontainer.size() + length2 := x2.highlowcontainer.size() +main: + for (pos1 < length1) && (pos2 < length2) { + s1 := rb.highlowcontainer.getKeyAtIndex(pos1) + s2 := x2.highlowcontainer.getKeyAtIndex(pos2) + + for { + if s1 < s2 { + pos1++ + if pos1 == length1 { + break main + } + s1 = rb.highlowcontainer.getKeyAtIndex(pos1) + } else if s1 > s2 { + rb.highlowcontainer.insertNewKeyValueAt(pos1, s2, x2.highlowcontainer.getContainerAtIndex(pos2).clone()) + pos1++ + length1++ + pos2++ + if pos2 == length2 { + break main + } + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } else { + rb.highlowcontainer.replaceKeyAndContainerAtIndex(pos1, s1, rb.highlowcontainer.getWritableContainerAtIndex(pos1).ior(x2.highlowcontainer.getContainerAtIndex(pos2)), false) + pos1++ + pos2++ + if (pos1 == length1) || (pos2 == length2) { + break main + } + s1 = rb.highlowcontainer.getKeyAtIndex(pos1) + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } + } + } + if pos1 == length1 { + rb.highlowcontainer.appendCopyMany(x2.highlowcontainer, pos2, length2) + } +} + +// AndNot computes the difference between two bitmaps and stores the result in the current bitmap +func (rb *Bitmap) AndNot(x2 *Bitmap) { + pos1 := 0 + pos2 := 0 + intersectionsize := 0 + length1 := rb.highlowcontainer.size() + length2 := x2.highlowcontainer.size() + +main: + for { + if pos1 < length1 && pos2 < length2 { + s1 := rb.highlowcontainer.getKeyAtIndex(pos1) + s2 := x2.highlowcontainer.getKeyAtIndex(pos2) + for { + if s1 == s2 { + c1 := rb.highlowcontainer.getWritableContainerAtIndex(pos1) + c2 := x2.highlowcontainer.getContainerAtIndex(pos2) + diff := c1.iandNot(c2) + if diff.getCardinality() > 0 { + rb.highlowcontainer.replaceKeyAndContainerAtIndex(intersectionsize, s1, diff, false) + intersectionsize++ + } + pos1++ + pos2++ + if (pos1 == length1) || (pos2 == length2) { + break main + } + s1 = rb.highlowcontainer.getKeyAtIndex(pos1) + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } else if s1 < s2 { + c1 := rb.highlowcontainer.getContainerAtIndex(pos1) + mustCopyOnWrite := rb.highlowcontainer.needsCopyOnWrite(pos1) + rb.highlowcontainer.replaceKeyAndContainerAtIndex(intersectionsize, s1, c1, mustCopyOnWrite) + intersectionsize++ + pos1++ + if pos1 == length1 { + break main + } + s1 = rb.highlowcontainer.getKeyAtIndex(pos1) + } else { //s1 > s2 + pos2 = x2.highlowcontainer.advanceUntil(s1, pos2) + if pos2 == length2 { + break main + } + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } + } + } else { + break + } + } + // TODO:implement as a copy + for pos1 < length1 { + c1 := rb.highlowcontainer.getContainerAtIndex(pos1) + s1 := rb.highlowcontainer.getKeyAtIndex(pos1) + mustCopyOnWrite := rb.highlowcontainer.needsCopyOnWrite(pos1) + rb.highlowcontainer.replaceKeyAndContainerAtIndex(intersectionsize, s1, c1, mustCopyOnWrite) + intersectionsize++ + pos1++ + } + rb.highlowcontainer.resize(intersectionsize) +} + +// Or computes the union between two bitmaps and returns the result +func Or(x1, x2 *Bitmap) *Bitmap { + answer := NewBitmap() + pos1 := 0 + pos2 := 0 + length1 := x1.highlowcontainer.size() + length2 := x2.highlowcontainer.size() +main: + for (pos1 < length1) && (pos2 < length2) { + s1 := x1.highlowcontainer.getKeyAtIndex(pos1) + s2 := x2.highlowcontainer.getKeyAtIndex(pos2) + + for { + if s1 < s2 { + answer.highlowcontainer.appendCopy(x1.highlowcontainer, pos1) + pos1++ + if pos1 == length1 { + break main + } + s1 = x1.highlowcontainer.getKeyAtIndex(pos1) + } else if s1 > s2 { + answer.highlowcontainer.appendCopy(x2.highlowcontainer, pos2) + pos2++ + if pos2 == length2 { + break main + } + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } else { + + answer.highlowcontainer.appendContainer(s1, x1.highlowcontainer.getContainerAtIndex(pos1).or(x2.highlowcontainer.getContainerAtIndex(pos2)), false) + pos1++ + pos2++ + if (pos1 == length1) || (pos2 == length2) { + break main + } + s1 = x1.highlowcontainer.getKeyAtIndex(pos1) + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } + } + } + if pos1 == length1 { + answer.highlowcontainer.appendCopyMany(x2.highlowcontainer, pos2, length2) + } else if pos2 == length2 { + answer.highlowcontainer.appendCopyMany(x1.highlowcontainer, pos1, length1) + } + return answer +} + +// And computes the intersection between two bitmaps and returns the result +func And(x1, x2 *Bitmap) *Bitmap { + answer := NewBitmap() + pos1 := 0 + pos2 := 0 + length1 := x1.highlowcontainer.size() + length2 := x2.highlowcontainer.size() +main: + for pos1 < length1 && pos2 < length2 { + s1 := x1.highlowcontainer.getKeyAtIndex(pos1) + s2 := x2.highlowcontainer.getKeyAtIndex(pos2) + for { + if s1 == s2 { + C := x1.highlowcontainer.getContainerAtIndex(pos1) + C = C.and(x2.highlowcontainer.getContainerAtIndex(pos2)) + + if C.getCardinality() > 0 { + answer.highlowcontainer.appendContainer(s1, C, false) + } + pos1++ + pos2++ + if (pos1 == length1) || (pos2 == length2) { + break main + } + s1 = x1.highlowcontainer.getKeyAtIndex(pos1) + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } else if s1 < s2 { + pos1 = x1.highlowcontainer.advanceUntil(s2, pos1) + if pos1 == length1 { + break main + } + s1 = x1.highlowcontainer.getKeyAtIndex(pos1) + } else { // s1 > s2 + pos2 = x2.highlowcontainer.advanceUntil(s1, pos2) + if pos2 == length2 { + break main + } + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } + } + } + return answer +} + +// Xor computes the symmetric difference between two bitmaps and returns the result +func Xor(x1, x2 *Bitmap) *Bitmap { + answer := NewBitmap() + pos1 := 0 + pos2 := 0 + length1 := x1.highlowcontainer.size() + length2 := x2.highlowcontainer.size() + for { + if (pos1 < length1) && (pos2 < length2) { + s1 := x1.highlowcontainer.getKeyAtIndex(pos1) + s2 := x2.highlowcontainer.getKeyAtIndex(pos2) + if s1 < s2 { + answer.highlowcontainer.appendCopy(x1.highlowcontainer, pos1) + pos1++ + } else if s1 > s2 { + answer.highlowcontainer.appendCopy(x2.highlowcontainer, pos2) + pos2++ + } else { + c := x1.highlowcontainer.getContainerAtIndex(pos1).xor(x2.highlowcontainer.getContainerAtIndex(pos2)) + if c.getCardinality() > 0 { + answer.highlowcontainer.appendContainer(s1, c, false) + } + pos1++ + pos2++ + } + } else { + break + } + } + if pos1 == length1 { + answer.highlowcontainer.appendCopyMany(x2.highlowcontainer, pos2, length2) + } else if pos2 == length2 { + answer.highlowcontainer.appendCopyMany(x1.highlowcontainer, pos1, length1) + } + return answer +} + +// AndNot computes the difference between two bitmaps and returns the result +func AndNot(x1, x2 *Bitmap) *Bitmap { + answer := NewBitmap() + pos1 := 0 + pos2 := 0 + length1 := x1.highlowcontainer.size() + length2 := x2.highlowcontainer.size() + +main: + for { + if pos1 < length1 && pos2 < length2 { + s1 := x1.highlowcontainer.getKeyAtIndex(pos1) + s2 := x2.highlowcontainer.getKeyAtIndex(pos2) + for { + if s1 < s2 { + answer.highlowcontainer.appendCopy(x1.highlowcontainer, pos1) + pos1++ + if pos1 == length1 { + break main + } + s1 = x1.highlowcontainer.getKeyAtIndex(pos1) + } else if s1 == s2 { + c1 := x1.highlowcontainer.getContainerAtIndex(pos1) + c2 := x2.highlowcontainer.getContainerAtIndex(pos2) + diff := c1.andNot(c2) + if diff.getCardinality() > 0 { + answer.highlowcontainer.appendContainer(s1, diff, false) + } + pos1++ + pos2++ + if (pos1 == length1) || (pos2 == length2) { + break main + } + s1 = x1.highlowcontainer.getKeyAtIndex(pos1) + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } else { //s1 > s2 + pos2 = x2.highlowcontainer.advanceUntil(s1, pos2) + if pos2 == length2 { + break main + } + s2 = x2.highlowcontainer.getKeyAtIndex(pos2) + } + } + } else { + break + } + } + if pos2 == length2 { + answer.highlowcontainer.appendCopyMany(x1.highlowcontainer, pos1, length1) + } + return answer +} + +// AddMany add all of the values in dat +func (rb *Bitmap) AddMany(dat []uint32) { + if len(dat) == 0 { + return + } + prev := dat[0] + idx, c := rb.addwithptr(prev) + for _, i := range dat[1:] { + if highbits(prev) == highbits(i) { + c = c.iaddReturnMinimized(lowbits(i)) + rb.highlowcontainer.setContainerAtIndex(idx, c) + } else { + idx, c = rb.addwithptr(i) + } + prev = i + } +} + +// BitmapOf generates a new bitmap filled with the specified integers +func BitmapOf(dat ...uint32) *Bitmap { + ans := NewBitmap() + ans.AddMany(dat) + return ans +} + +// Flip negates the bits in the given range (i.e., [rangeStart,rangeEnd)), any integer present in this range and in the bitmap is removed, +// and any integer present in the range and not in the bitmap is added. +// The function uses 64-bit parameters even though a Bitmap stores 32-bit values because it is allowed and meaningful to use [0,uint64(0x100000000)) as a range +// while uint64(0x100000000) cannot be represented as a 32-bit value. +func (rb *Bitmap) Flip(rangeStart, rangeEnd uint64) { + + if rangeEnd > MaxUint32+1 { + panic("rangeEnd > MaxUint32+1") + } + if rangeStart > MaxUint32+1 { + panic("rangeStart > MaxUint32+1") + } + + if rangeStart >= rangeEnd { + return + } + + hbStart := uint32(highbits(uint32(rangeStart))) + lbStart := uint32(lowbits(uint32(rangeStart))) + hbLast := uint32(highbits(uint32(rangeEnd - 1))) + lbLast := uint32(lowbits(uint32(rangeEnd - 1))) + + var max uint32 = maxLowBit + for hb := hbStart; hb <= hbLast; hb++ { + var containerStart uint32 + if hb == hbStart { + containerStart = uint32(lbStart) + } + containerLast := max + if hb == hbLast { + containerLast = uint32(lbLast) + } + + i := rb.highlowcontainer.getIndex(uint16(hb)) + + if i >= 0 { + c := rb.highlowcontainer.getWritableContainerAtIndex(i).inot(int(containerStart), int(containerLast)+1) + if c.getCardinality() > 0 { + rb.highlowcontainer.setContainerAtIndex(i, c) + } else { + rb.highlowcontainer.removeAtIndex(i) + } + } else { // *think* the range of ones must never be + // empty. + rb.highlowcontainer.insertNewKeyValueAt(-i-1, uint16(hb), rangeOfOnes(int(containerStart), int(containerLast))) + } + } +} + +// FlipInt calls Flip after casting the parameters (convenience method) +func (rb *Bitmap) FlipInt(rangeStart, rangeEnd int) { + rb.Flip(uint64(rangeStart), uint64(rangeEnd)) +} + +// AddRange adds the integers in [rangeStart, rangeEnd) to the bitmap. +// The function uses 64-bit parameters even though a Bitmap stores 32-bit values because it is allowed and meaningful to use [0,uint64(0x100000000)) as a range +// while uint64(0x100000000) cannot be represented as a 32-bit value. +func (rb *Bitmap) AddRange(rangeStart, rangeEnd uint64) { + if rangeStart >= rangeEnd { + return + } + if rangeEnd-1 > MaxUint32 { + panic("rangeEnd-1 > MaxUint32") + } + hbStart := uint32(highbits(uint32(rangeStart))) + lbStart := uint32(lowbits(uint32(rangeStart))) + hbLast := uint32(highbits(uint32(rangeEnd - 1))) + lbLast := uint32(lowbits(uint32(rangeEnd - 1))) + + var max uint32 = maxLowBit + for hb := hbStart; hb <= hbLast; hb++ { + containerStart := uint32(0) + if hb == hbStart { + containerStart = lbStart + } + containerLast := max + if hb == hbLast { + containerLast = lbLast + } + + i := rb.highlowcontainer.getIndex(uint16(hb)) + + if i >= 0 { + c := rb.highlowcontainer.getWritableContainerAtIndex(i).iaddRange(int(containerStart), int(containerLast)+1) + rb.highlowcontainer.setContainerAtIndex(i, c) + } else { // *think* the range of ones must never be + // empty. + rb.highlowcontainer.insertNewKeyValueAt(-i-1, uint16(hb), rangeOfOnes(int(containerStart), int(containerLast))) + } + } +} + +// RemoveRange removes the integers in [rangeStart, rangeEnd) from the bitmap. +// The function uses 64-bit parameters even though a Bitmap stores 32-bit values because it is allowed and meaningful to use [0,uint64(0x100000000)) as a range +// while uint64(0x100000000) cannot be represented as a 32-bit value. +func (rb *Bitmap) RemoveRange(rangeStart, rangeEnd uint64) { + if rangeStart >= rangeEnd { + return + } + if rangeEnd-1 > MaxUint32 { + // logically, we should assume that the user wants to + // remove all values from rangeStart to infinity + // see https://github.com/RoaringBitmap/roaring/issues/141 + rangeEnd = uint64(0x100000000) + } + hbStart := uint32(highbits(uint32(rangeStart))) + lbStart := uint32(lowbits(uint32(rangeStart))) + hbLast := uint32(highbits(uint32(rangeEnd - 1))) + lbLast := uint32(lowbits(uint32(rangeEnd - 1))) + + var max uint32 = maxLowBit + + if hbStart == hbLast { + i := rb.highlowcontainer.getIndex(uint16(hbStart)) + if i < 0 { + return + } + c := rb.highlowcontainer.getWritableContainerAtIndex(i).iremoveRange(int(lbStart), int(lbLast+1)) + if c.getCardinality() > 0 { + rb.highlowcontainer.setContainerAtIndex(i, c) + } else { + rb.highlowcontainer.removeAtIndex(i) + } + return + } + ifirst := rb.highlowcontainer.getIndex(uint16(hbStart)) + ilast := rb.highlowcontainer.getIndex(uint16(hbLast)) + + if ifirst >= 0 { + if lbStart != 0 { + c := rb.highlowcontainer.getWritableContainerAtIndex(ifirst).iremoveRange(int(lbStart), int(max+1)) + if c.getCardinality() > 0 { + rb.highlowcontainer.setContainerAtIndex(ifirst, c) + ifirst++ + } + } + } else { + ifirst = -ifirst - 1 + } + if ilast >= 0 { + if lbLast != max { + c := rb.highlowcontainer.getWritableContainerAtIndex(ilast).iremoveRange(int(0), int(lbLast+1)) + if c.getCardinality() > 0 { + rb.highlowcontainer.setContainerAtIndex(ilast, c) + } else { + ilast++ + } + } else { + ilast++ + } + } else { + ilast = -ilast - 1 + } + rb.highlowcontainer.removeIndexRange(ifirst, ilast) +} + +// Flip negates the bits in the given range (i.e., [rangeStart,rangeEnd)), any integer present in this range and in the bitmap is removed, +// and any integer present in the range and not in the bitmap is added, a new bitmap is returned leaving +// the current bitmap unchanged. +// The function uses 64-bit parameters even though a Bitmap stores 32-bit values because it is allowed and meaningful to use [0,uint64(0x100000000)) as a range +// while uint64(0x100000000) cannot be represented as a 32-bit value. +func Flip(bm *Bitmap, rangeStart, rangeEnd uint64) *Bitmap { + if rangeStart >= rangeEnd { + return bm.Clone() + } + + if rangeStart > MaxUint32 { + panic("rangeStart > MaxUint32") + } + if rangeEnd-1 > MaxUint32 { + panic("rangeEnd-1 > MaxUint32") + } + + answer := NewBitmap() + hbStart := uint32(highbits(uint32(rangeStart))) + lbStart := uint32(lowbits(uint32(rangeStart))) + hbLast := uint32(highbits(uint32(rangeEnd - 1))) + lbLast := uint32(lowbits(uint32(rangeEnd - 1))) + + // copy the containers before the active area + answer.highlowcontainer.appendCopiesUntil(bm.highlowcontainer, uint16(hbStart)) + + var max uint32 = maxLowBit + for hb := hbStart; hb <= hbLast; hb++ { + var containerStart uint32 + if hb == hbStart { + containerStart = uint32(lbStart) + } + containerLast := max + if hb == hbLast { + containerLast = uint32(lbLast) + } + + i := bm.highlowcontainer.getIndex(uint16(hb)) + j := answer.highlowcontainer.getIndex(uint16(hb)) + + if i >= 0 { + c := bm.highlowcontainer.getContainerAtIndex(i).not(int(containerStart), int(containerLast)+1) + if c.getCardinality() > 0 { + answer.highlowcontainer.insertNewKeyValueAt(-j-1, uint16(hb), c) + } + + } else { // *think* the range of ones must never be + // empty. + answer.highlowcontainer.insertNewKeyValueAt(-j-1, uint16(hb), + rangeOfOnes(int(containerStart), int(containerLast))) + } + } + // copy the containers after the active area. + answer.highlowcontainer.appendCopiesAfter(bm.highlowcontainer, uint16(hbLast)) + + return answer +} + +// SetCopyOnWrite sets this bitmap to use copy-on-write so that copies are fast and memory conscious +// if the parameter is true, otherwise we leave the default where hard copies are made +// (copy-on-write requires extra care in a threaded context). +// Calling SetCopyOnWrite(true) on a bitmap created with FromBuffer is unsafe. +func (rb *Bitmap) SetCopyOnWrite(val bool) { + rb.highlowcontainer.copyOnWrite = val +} + +// GetCopyOnWrite gets this bitmap's copy-on-write property +func (rb *Bitmap) GetCopyOnWrite() (val bool) { + return rb.highlowcontainer.copyOnWrite +} + +// CloneCopyOnWriteContainers clones all containers which have +// needCopyOnWrite set to true. +// This can be used to make sure it is safe to munmap a []byte +// that the roaring array may still have a reference to, after +// calling FromBuffer. +// More generally this function is useful if you call FromBuffer +// to construct a bitmap with a backing array buf +// and then later discard the buf array. Note that you should call +// CloneCopyOnWriteContainers on all bitmaps that were derived +// from the 'FromBuffer' bitmap since they map have dependencies +// on the buf array as well. +func (rb *Bitmap) CloneCopyOnWriteContainers() { + rb.highlowcontainer.cloneCopyOnWriteContainers() +} + +// FlipInt calls Flip after casting the parameters (convenience method) +func FlipInt(bm *Bitmap, rangeStart, rangeEnd int) *Bitmap { + return Flip(bm, uint64(rangeStart), uint64(rangeEnd)) +} + +// Statistics provides details on the container types in use. +type Statistics struct { + Cardinality uint64 + Containers uint64 + + ArrayContainers uint64 + ArrayContainerBytes uint64 + ArrayContainerValues uint64 + + BitmapContainers uint64 + BitmapContainerBytes uint64 + BitmapContainerValues uint64 + + RunContainers uint64 + RunContainerBytes uint64 + RunContainerValues uint64 +} + +// Stats returns details on container type usage in a Statistics struct. +func (rb *Bitmap) Stats() Statistics { + stats := Statistics{} + stats.Containers = uint64(len(rb.highlowcontainer.containers)) + for _, c := range rb.highlowcontainer.containers { + stats.Cardinality += uint64(c.getCardinality()) + + switch c.(type) { + case *arrayContainer: + stats.ArrayContainers++ + stats.ArrayContainerBytes += uint64(c.getSizeInBytes()) + stats.ArrayContainerValues += uint64(c.getCardinality()) + case *bitmapContainer: + stats.BitmapContainers++ + stats.BitmapContainerBytes += uint64(c.getSizeInBytes()) + stats.BitmapContainerValues += uint64(c.getCardinality()) + case *runContainer16: + stats.RunContainers++ + stats.RunContainerBytes += uint64(c.getSizeInBytes()) + stats.RunContainerValues += uint64(c.getCardinality()) + } + } + return stats +} diff --git a/vendor/github.com/RoaringBitmap/roaring/roaringarray.go b/vendor/github.com/RoaringBitmap/roaring/roaringarray.go new file mode 100644 index 0000000..3dddbff --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/roaringarray.go @@ -0,0 +1,834 @@ +package roaring + +import ( + "bytes" + "encoding/binary" + "fmt" + "io" + + snappy "github.com/glycerine/go-unsnap-stream" + "github.com/tinylib/msgp/msgp" +) + +//go:generate msgp -unexported + +type container interface { + addOffset(uint16) []container + + clone() container + and(container) container + andCardinality(container) int + iand(container) container // i stands for inplace + andNot(container) container + iandNot(container) container // i stands for inplace + getCardinality() int + // rank returns the number of integers that are + // smaller or equal to x. rank(infinity) would be getCardinality(). + rank(uint16) int + + iadd(x uint16) bool // inplace, returns true if x was new. + iaddReturnMinimized(uint16) container // may change return type to minimize storage. + + //addRange(start, final int) container // range is [firstOfRange,lastOfRange) (unused) + iaddRange(start, endx int) container // i stands for inplace, range is [firstOfRange,endx) + + iremove(x uint16) bool // inplace, returns true if x was present. + iremoveReturnMinimized(uint16) container // may change return type to minimize storage. + + not(start, final int) container // range is [firstOfRange,lastOfRange) + inot(firstOfRange, endx int) container // i stands for inplace, range is [firstOfRange,endx) + xor(r container) container + getShortIterator() shortPeekable + iterate(cb func(x uint16) bool) bool + getReverseIterator() shortIterable + getManyIterator() manyIterable + contains(i uint16) bool + maximum() uint16 + minimum() uint16 + + // equals is now logical equals; it does not require the + // same underlying container types, but compares across + // any of the implementations. + equals(r container) bool + + fillLeastSignificant16bits(array []uint32, i int, mask uint32) + or(r container) container + orCardinality(r container) int + isFull() bool + ior(r container) container // i stands for inplace + intersects(r container) bool // whether the two containers intersect + lazyOR(r container) container + lazyIOR(r container) container + getSizeInBytes() int + //removeRange(start, final int) container // range is [firstOfRange,lastOfRange) (unused) + iremoveRange(start, final int) container // i stands for inplace, range is [firstOfRange,lastOfRange) + selectInt(x uint16) int // selectInt returns the xth integer in the container + serializedSizeInBytes() int + writeTo(io.Writer) (int, error) + + numberOfRuns() int + toEfficientContainer() container + String() string + containerType() contype +} + +type contype uint8 + +const ( + bitmapContype contype = iota + arrayContype + run16Contype + run32Contype +) + +// careful: range is [firstOfRange,lastOfRange] +func rangeOfOnes(start, last int) container { + if start > MaxUint16 { + panic("rangeOfOnes called with start > MaxUint16") + } + if last > MaxUint16 { + panic("rangeOfOnes called with last > MaxUint16") + } + if start < 0 { + panic("rangeOfOnes called with start < 0") + } + if last < 0 { + panic("rangeOfOnes called with last < 0") + } + return newRunContainer16Range(uint16(start), uint16(last)) +} + +type roaringArray struct { + keys []uint16 + containers []container `msg:"-"` // don't try to serialize directly. + needCopyOnWrite []bool + copyOnWrite bool + + // conserz is used at serialization time + // to serialize containers. Otherwise empty. + conserz []containerSerz +} + +// containerSerz facilitates serializing container (tricky to +// serialize because it is an interface) by providing a +// light wrapper with a type identifier. +type containerSerz struct { + t contype `msg:"t"` // type + r msgp.Raw `msg:"r"` // Raw msgpack of the actual container type +} + +func newRoaringArray() *roaringArray { + return &roaringArray{} +} + +// runOptimize compresses the element containers to minimize space consumed. +// Q: how does this interact with copyOnWrite and needCopyOnWrite? +// A: since we aren't changing the logical content, just the representation, +// we don't bother to check the needCopyOnWrite bits. We replace +// (possibly all) elements of ra.containers in-place with space +// optimized versions. +func (ra *roaringArray) runOptimize() { + for i := range ra.containers { + ra.containers[i] = ra.containers[i].toEfficientContainer() + } +} + +func (ra *roaringArray) appendContainer(key uint16, value container, mustCopyOnWrite bool) { + ra.keys = append(ra.keys, key) + ra.containers = append(ra.containers, value) + ra.needCopyOnWrite = append(ra.needCopyOnWrite, mustCopyOnWrite) +} + +func (ra *roaringArray) appendWithoutCopy(sa roaringArray, startingindex int) { + mustCopyOnWrite := sa.needCopyOnWrite[startingindex] + ra.appendContainer(sa.keys[startingindex], sa.containers[startingindex], mustCopyOnWrite) +} + +func (ra *roaringArray) appendCopy(sa roaringArray, startingindex int) { + // cow only if the two request it, or if we already have a lightweight copy + copyonwrite := (ra.copyOnWrite && sa.copyOnWrite) || sa.needsCopyOnWrite(startingindex) + if !copyonwrite { + // since there is no copy-on-write, we need to clone the container (this is important) + ra.appendContainer(sa.keys[startingindex], sa.containers[startingindex].clone(), copyonwrite) + } else { + ra.appendContainer(sa.keys[startingindex], sa.containers[startingindex], copyonwrite) + if !sa.needsCopyOnWrite(startingindex) { + sa.setNeedsCopyOnWrite(startingindex) + } + } +} + +func (ra *roaringArray) appendWithoutCopyMany(sa roaringArray, startingindex, end int) { + for i := startingindex; i < end; i++ { + ra.appendWithoutCopy(sa, i) + } +} + +func (ra *roaringArray) appendCopyMany(sa roaringArray, startingindex, end int) { + for i := startingindex; i < end; i++ { + ra.appendCopy(sa, i) + } +} + +func (ra *roaringArray) appendCopiesUntil(sa roaringArray, stoppingKey uint16) { + // cow only if the two request it, or if we already have a lightweight copy + copyonwrite := ra.copyOnWrite && sa.copyOnWrite + + for i := 0; i < sa.size(); i++ { + if sa.keys[i] >= stoppingKey { + break + } + thiscopyonewrite := copyonwrite || sa.needsCopyOnWrite(i) + if thiscopyonewrite { + ra.appendContainer(sa.keys[i], sa.containers[i], thiscopyonewrite) + if !sa.needsCopyOnWrite(i) { + sa.setNeedsCopyOnWrite(i) + } + + } else { + // since there is no copy-on-write, we need to clone the container (this is important) + ra.appendContainer(sa.keys[i], sa.containers[i].clone(), thiscopyonewrite) + + } + } +} + +func (ra *roaringArray) appendCopiesAfter(sa roaringArray, beforeStart uint16) { + // cow only if the two request it, or if we already have a lightweight copy + copyonwrite := ra.copyOnWrite && sa.copyOnWrite + + startLocation := sa.getIndex(beforeStart) + if startLocation >= 0 { + startLocation++ + } else { + startLocation = -startLocation - 1 + } + + for i := startLocation; i < sa.size(); i++ { + thiscopyonewrite := copyonwrite || sa.needsCopyOnWrite(i) + if thiscopyonewrite { + ra.appendContainer(sa.keys[i], sa.containers[i], thiscopyonewrite) + if !sa.needsCopyOnWrite(i) { + sa.setNeedsCopyOnWrite(i) + } + } else { + // since there is no copy-on-write, we need to clone the container (this is important) + ra.appendContainer(sa.keys[i], sa.containers[i].clone(), thiscopyonewrite) + + } + } +} + +func (ra *roaringArray) removeIndexRange(begin, end int) { + if end <= begin { + return + } + + r := end - begin + + copy(ra.keys[begin:], ra.keys[end:]) + copy(ra.containers[begin:], ra.containers[end:]) + copy(ra.needCopyOnWrite[begin:], ra.needCopyOnWrite[end:]) + + ra.resize(len(ra.keys) - r) +} + +func (ra *roaringArray) resize(newsize int) { + for k := newsize; k < len(ra.containers); k++ { + ra.containers[k] = nil + } + + ra.keys = ra.keys[:newsize] + ra.containers = ra.containers[:newsize] + ra.needCopyOnWrite = ra.needCopyOnWrite[:newsize] +} + +func (ra *roaringArray) clear() { + ra.resize(0) + ra.copyOnWrite = false + ra.conserz = nil +} + +func (ra *roaringArray) clone() *roaringArray { + + sa := roaringArray{} + sa.copyOnWrite = ra.copyOnWrite + + // this is where copyOnWrite is used. + if ra.copyOnWrite { + sa.keys = make([]uint16, len(ra.keys)) + copy(sa.keys, ra.keys) + sa.containers = make([]container, len(ra.containers)) + copy(sa.containers, ra.containers) + sa.needCopyOnWrite = make([]bool, len(ra.needCopyOnWrite)) + + ra.markAllAsNeedingCopyOnWrite() + sa.markAllAsNeedingCopyOnWrite() + + // sa.needCopyOnWrite is shared + } else { + // make a full copy + + sa.keys = make([]uint16, len(ra.keys)) + copy(sa.keys, ra.keys) + + sa.containers = make([]container, len(ra.containers)) + for i := range sa.containers { + sa.containers[i] = ra.containers[i].clone() + } + + sa.needCopyOnWrite = make([]bool, len(ra.needCopyOnWrite)) + } + return &sa +} + +// clone all containers which have needCopyOnWrite set to true +// This can be used to make sure it is safe to munmap a []byte +// that the roaring array may still have a reference to. +func (ra *roaringArray) cloneCopyOnWriteContainers() { + for i, needCopyOnWrite := range ra.needCopyOnWrite { + if needCopyOnWrite { + ra.containers[i] = ra.containers[i].clone() + ra.needCopyOnWrite[i] = false + } + } +} + +// unused function: +//func (ra *roaringArray) containsKey(x uint16) bool { +// return (ra.binarySearch(0, int64(len(ra.keys)), x) >= 0) +//} + +func (ra *roaringArray) getContainer(x uint16) container { + i := ra.binarySearch(0, int64(len(ra.keys)), x) + if i < 0 { + return nil + } + return ra.containers[i] +} + +func (ra *roaringArray) getContainerAtIndex(i int) container { + return ra.containers[i] +} + +func (ra *roaringArray) getFastContainerAtIndex(i int, needsWriteable bool) container { + c := ra.getContainerAtIndex(i) + switch t := c.(type) { + case *arrayContainer: + c = t.toBitmapContainer() + case *runContainer16: + if !t.isFull() { + c = t.toBitmapContainer() + } + case *bitmapContainer: + if needsWriteable && ra.needCopyOnWrite[i] { + c = ra.containers[i].clone() + } + } + return c +} + +func (ra *roaringArray) getWritableContainerAtIndex(i int) container { + if ra.needCopyOnWrite[i] { + ra.containers[i] = ra.containers[i].clone() + ra.needCopyOnWrite[i] = false + } + return ra.containers[i] +} + +func (ra *roaringArray) getIndex(x uint16) int { + // before the binary search, we optimize for frequent cases + size := len(ra.keys) + if (size == 0) || (ra.keys[size-1] == x) { + return size - 1 + } + return ra.binarySearch(0, int64(size), x) +} + +func (ra *roaringArray) getKeyAtIndex(i int) uint16 { + return ra.keys[i] +} + +func (ra *roaringArray) insertNewKeyValueAt(i int, key uint16, value container) { + ra.keys = append(ra.keys, 0) + ra.containers = append(ra.containers, nil) + + copy(ra.keys[i+1:], ra.keys[i:]) + copy(ra.containers[i+1:], ra.containers[i:]) + + ra.keys[i] = key + ra.containers[i] = value + + ra.needCopyOnWrite = append(ra.needCopyOnWrite, false) + copy(ra.needCopyOnWrite[i+1:], ra.needCopyOnWrite[i:]) + ra.needCopyOnWrite[i] = false +} + +func (ra *roaringArray) remove(key uint16) bool { + i := ra.binarySearch(0, int64(len(ra.keys)), key) + if i >= 0 { // if a new key + ra.removeAtIndex(i) + return true + } + return false +} + +func (ra *roaringArray) removeAtIndex(i int) { + copy(ra.keys[i:], ra.keys[i+1:]) + copy(ra.containers[i:], ra.containers[i+1:]) + + copy(ra.needCopyOnWrite[i:], ra.needCopyOnWrite[i+1:]) + + ra.resize(len(ra.keys) - 1) +} + +func (ra *roaringArray) setContainerAtIndex(i int, c container) { + ra.containers[i] = c +} + +func (ra *roaringArray) replaceKeyAndContainerAtIndex(i int, key uint16, c container, mustCopyOnWrite bool) { + ra.keys[i] = key + ra.containers[i] = c + ra.needCopyOnWrite[i] = mustCopyOnWrite +} + +func (ra *roaringArray) size() int { + return len(ra.keys) +} + +func (ra *roaringArray) binarySearch(begin, end int64, ikey uint16) int { + low := begin + high := end - 1 + for low+16 <= high { + middleIndex := low + (high-low)/2 // avoid overflow + middleValue := ra.keys[middleIndex] + + if middleValue < ikey { + low = middleIndex + 1 + } else if middleValue > ikey { + high = middleIndex - 1 + } else { + return int(middleIndex) + } + } + for ; low <= high; low++ { + val := ra.keys[low] + if val >= ikey { + if val == ikey { + return int(low) + } + break + } + } + return -int(low + 1) +} + +func (ra *roaringArray) equals(o interface{}) bool { + srb, ok := o.(roaringArray) + if ok { + + if srb.size() != ra.size() { + return false + } + for i, k := range ra.keys { + if k != srb.keys[i] { + return false + } + } + + for i, c := range ra.containers { + if !c.equals(srb.containers[i]) { + return false + } + } + return true + } + return false +} + +func (ra *roaringArray) headerSize() uint64 { + size := uint64(len(ra.keys)) + if ra.hasRunCompression() { + if size < noOffsetThreshold { // for small bitmaps, we omit the offsets + return 4 + (size+7)/8 + 4*size + } + return 4 + (size+7)/8 + 8*size // - 4 because we pack the size with the cookie + } + return 4 + 4 + 8*size + +} + +// should be dirt cheap +func (ra *roaringArray) serializedSizeInBytes() uint64 { + answer := ra.headerSize() + for _, c := range ra.containers { + answer += uint64(c.serializedSizeInBytes()) + } + return answer +} + +// +// spec: https://github.com/RoaringBitmap/RoaringFormatSpec +// +func (ra *roaringArray) writeTo(w io.Writer) (n int64, err error) { + hasRun := ra.hasRunCompression() + isRunSizeInBytes := 0 + cookieSize := 8 + if hasRun { + cookieSize = 4 + isRunSizeInBytes = (len(ra.keys) + 7) / 8 + } + descriptiveHeaderSize := 4 * len(ra.keys) + preambleSize := cookieSize + isRunSizeInBytes + descriptiveHeaderSize + + buf := make([]byte, preambleSize+4*len(ra.keys)) + + nw := 0 + + if hasRun { + binary.LittleEndian.PutUint16(buf[0:], uint16(serialCookie)) + nw += 2 + binary.LittleEndian.PutUint16(buf[2:], uint16(len(ra.keys)-1)) + nw += 2 + // compute isRun bitmap without temporary allocation + var runbitmapslice = buf[nw:nw+isRunSizeInBytes] + for i, c := range ra.containers { + switch c.(type) { + case *runContainer16: + runbitmapslice[i / 8] |= 1<<(uint(i)%8) + } + } + nw += isRunSizeInBytes + } else { + binary.LittleEndian.PutUint32(buf[0:], uint32(serialCookieNoRunContainer)) + nw += 4 + binary.LittleEndian.PutUint32(buf[4:], uint32(len(ra.keys))) + nw += 4 + } + + // descriptive header + for i, key := range ra.keys { + binary.LittleEndian.PutUint16(buf[nw:], key) + nw += 2 + c := ra.containers[i] + binary.LittleEndian.PutUint16(buf[nw:], uint16(c.getCardinality()-1)) + nw += 2 + } + + startOffset := int64(preambleSize + 4*len(ra.keys)) + if !hasRun || (len(ra.keys) >= noOffsetThreshold) { + // offset header + for _, c := range ra.containers { + binary.LittleEndian.PutUint32(buf[nw:], uint32(startOffset)) + nw += 4 + switch rc := c.(type) { + case *runContainer16: + startOffset += 2 + int64(len(rc.iv))*4 + default: + startOffset += int64(getSizeInBytesFromCardinality(c.getCardinality())) + } + } + } + + written, err := w.Write(buf[:nw]) + if err != nil { + return n, err + } + n += int64(written) + + for _, c := range ra.containers { + written, err := c.writeTo(w) + if err != nil { + return n, err + } + n += int64(written) + } + return n, nil +} + +// +// spec: https://github.com/RoaringBitmap/RoaringFormatSpec +// +func (ra *roaringArray) toBytes() ([]byte, error) { + var buf bytes.Buffer + _, err := ra.writeTo(&buf) + return buf.Bytes(), err +} + +func (ra *roaringArray) readFrom(stream byteInput) (int64, error) { + cookie, err := stream.readUInt32() + + if err != nil { + return stream.getReadBytes(), fmt.Errorf("error in roaringArray.readFrom: could not read initial cookie: %s", err) + } + + var size uint32 + var isRunBitmap []byte + + if cookie&0x0000FFFF == serialCookie { + size = uint32(uint16(cookie>>16) + 1) + // create is-run-container bitmap + isRunBitmapSize := (int(size) + 7) / 8 + isRunBitmap, err = stream.next(isRunBitmapSize) + + if err != nil { + return stream.getReadBytes(), fmt.Errorf("malformed bitmap, failed to read is-run bitmap, got: %s", err) + } + } else if cookie == serialCookieNoRunContainer { + size, err = stream.readUInt32() + + if err != nil { + return stream.getReadBytes(), fmt.Errorf("malformed bitmap, failed to read a bitmap size: %s", err) + } + } else { + return stream.getReadBytes(), fmt.Errorf("error in roaringArray.readFrom: did not find expected serialCookie in header") + } + + if size > (1 << 16) { + return stream.getReadBytes(), fmt.Errorf("it is logically impossible to have more than (1<<16) containers") + } + + // descriptive header + buf, err := stream.next(2 * 2 * int(size)) + + if err != nil { + return stream.getReadBytes(), fmt.Errorf("failed to read descriptive header: %s", err) + } + + keycard := byteSliceAsUint16Slice(buf) + + if isRunBitmap == nil || size >= noOffsetThreshold { + if err := stream.skipBytes(int(size) * 4); err != nil { + return stream.getReadBytes(), fmt.Errorf("failed to skip bytes: %s", err) + } + } + + // Allocate slices upfront as number of containers is known + if cap(ra.containers) >= int(size) { + ra.containers = ra.containers[:size] + } else { + ra.containers = make([]container, size) + } + + if cap(ra.keys) >= int(size) { + ra.keys = ra.keys[:size] + } else { + ra.keys = make([]uint16, size) + } + + if cap(ra.needCopyOnWrite) >= int(size) { + ra.needCopyOnWrite = ra.needCopyOnWrite[:size] + } else { + ra.needCopyOnWrite = make([]bool, size) + } + + for i := uint32(0); i < size; i++ { + key := keycard[2*i] + card := int(keycard[2*i+1]) + 1 + ra.keys[i] = key + ra.needCopyOnWrite[i] = true + + if isRunBitmap != nil && isRunBitmap[i/8]&(1<<(i%8)) != 0 { + // run container + nr, err := stream.readUInt16() + + if err != nil { + return 0, fmt.Errorf("failed to read runtime container size: %s", err) + } + + buf, err := stream.next(int(nr) * 4) + + if err != nil { + return stream.getReadBytes(), fmt.Errorf("failed to read runtime container content: %s", err) + } + + nb := runContainer16{ + iv: byteSliceAsInterval16Slice(buf), + card: int64(card), + } + + ra.containers[i] = &nb + } else if card > arrayDefaultMaxSize { + // bitmap container + buf, err := stream.next(arrayDefaultMaxSize * 2) + + if err != nil { + return stream.getReadBytes(), fmt.Errorf("failed to read bitmap container: %s", err) + } + + nb := bitmapContainer{ + cardinality: card, + bitmap: byteSliceAsUint64Slice(buf), + } + + ra.containers[i] = &nb + } else { + // array container + buf, err := stream.next(card * 2) + + if err != nil { + return stream.getReadBytes(), fmt.Errorf("failed to read array container: %s", err) + } + + nb := arrayContainer{ + byteSliceAsUint16Slice(buf), + } + + ra.containers[i] = &nb + } + } + + return stream.getReadBytes(), nil +} + +func (ra *roaringArray) hasRunCompression() bool { + for _, c := range ra.containers { + switch c.(type) { + case *runContainer16: + return true + } + } + return false +} + +func (ra *roaringArray) writeToMsgpack(stream io.Writer) error { + + ra.conserz = make([]containerSerz, len(ra.containers)) + for i, v := range ra.containers { + switch cn := v.(type) { + case *bitmapContainer: + bts, err := cn.MarshalMsg(nil) + if err != nil { + return err + } + ra.conserz[i].t = bitmapContype + ra.conserz[i].r = bts + case *arrayContainer: + bts, err := cn.MarshalMsg(nil) + if err != nil { + return err + } + ra.conserz[i].t = arrayContype + ra.conserz[i].r = bts + case *runContainer16: + bts, err := cn.MarshalMsg(nil) + if err != nil { + return err + } + ra.conserz[i].t = run16Contype + ra.conserz[i].r = bts + default: + panic(fmt.Errorf("Unrecognized container implementation: %T", cn)) + } + } + w := snappy.NewWriter(stream) + err := msgp.Encode(w, ra) + ra.conserz = nil + return err +} + +func (ra *roaringArray) readFromMsgpack(stream io.Reader) error { + r := snappy.NewReader(stream) + err := msgp.Decode(r, ra) + if err != nil { + return err + } + + if len(ra.containers) != len(ra.keys) { + ra.containers = make([]container, len(ra.keys)) + } + + for i, v := range ra.conserz { + switch v.t { + case bitmapContype: + c := &bitmapContainer{} + _, err = c.UnmarshalMsg(v.r) + if err != nil { + return err + } + ra.containers[i] = c + case arrayContype: + c := &arrayContainer{} + _, err = c.UnmarshalMsg(v.r) + if err != nil { + return err + } + ra.containers[i] = c + case run16Contype: + c := &runContainer16{} + _, err = c.UnmarshalMsg(v.r) + if err != nil { + return err + } + ra.containers[i] = c + default: + return fmt.Errorf("unrecognized contype serialization code: '%v'", v.t) + } + } + ra.conserz = nil + return nil +} + +func (ra *roaringArray) advanceUntil(min uint16, pos int) int { + lower := pos + 1 + + if lower >= len(ra.keys) || ra.keys[lower] >= min { + return lower + } + + spansize := 1 + + for lower+spansize < len(ra.keys) && ra.keys[lower+spansize] < min { + spansize *= 2 + } + var upper int + if lower+spansize < len(ra.keys) { + upper = lower + spansize + } else { + upper = len(ra.keys) - 1 + } + + if ra.keys[upper] == min { + return upper + } + + if ra.keys[upper] < min { + // means + // array + // has no + // item + // >= min + // pos = array.length; + return len(ra.keys) + } + + // we know that the next-smallest span was too small + lower += (spansize >> 1) + + mid := 0 + for lower+1 != upper { + mid = (lower + upper) >> 1 + if ra.keys[mid] == min { + return mid + } else if ra.keys[mid] < min { + lower = mid + } else { + upper = mid + } + } + return upper +} + +func (ra *roaringArray) markAllAsNeedingCopyOnWrite() { + for i := range ra.needCopyOnWrite { + ra.needCopyOnWrite[i] = true + } +} + +func (ra *roaringArray) needsCopyOnWrite(i int) bool { + return ra.needCopyOnWrite[i] +} + +func (ra *roaringArray) setNeedsCopyOnWrite(i int) { + ra.needCopyOnWrite[i] = true +} diff --git a/vendor/github.com/RoaringBitmap/roaring/roaringarray_gen.go b/vendor/github.com/RoaringBitmap/roaring/roaringarray_gen.go new file mode 100644 index 0000000..dcd7187 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/roaringarray_gen.go @@ -0,0 +1,529 @@ +package roaring + +// NOTE: THIS FILE WAS PRODUCED BY THE +// MSGP CODE GENERATION TOOL (github.com/tinylib/msgp) +// DO NOT EDIT + +import ( + "github.com/tinylib/msgp/msgp" +) + +// Deprecated: DecodeMsg implements msgp.Decodable +func (z *containerSerz) DecodeMsg(dc *msgp.Reader) (err error) { + var field []byte + _ = field + var zxvk uint32 + zxvk, err = dc.ReadMapHeader() + if err != nil { + return + } + for zxvk > 0 { + zxvk-- + field, err = dc.ReadMapKeyPtr() + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "t": + { + var zbzg uint8 + zbzg, err = dc.ReadUint8() + z.t = contype(zbzg) + } + if err != nil { + return + } + case "r": + err = z.r.DecodeMsg(dc) + if err != nil { + return + } + default: + err = dc.Skip() + if err != nil { + return + } + } + } + return +} + +// Deprecated: EncodeMsg implements msgp.Encodable +func (z *containerSerz) EncodeMsg(en *msgp.Writer) (err error) { + // map header, size 2 + // write "t" + err = en.Append(0x82, 0xa1, 0x74) + if err != nil { + return err + } + err = en.WriteUint8(uint8(z.t)) + if err != nil { + return + } + // write "r" + err = en.Append(0xa1, 0x72) + if err != nil { + return err + } + err = z.r.EncodeMsg(en) + if err != nil { + return + } + return +} + +// Deprecated: MarshalMsg implements msgp.Marshaler +func (z *containerSerz) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + // map header, size 2 + // string "t" + o = append(o, 0x82, 0xa1, 0x74) + o = msgp.AppendUint8(o, uint8(z.t)) + // string "r" + o = append(o, 0xa1, 0x72) + o, err = z.r.MarshalMsg(o) + if err != nil { + return + } + return +} + +// Deprecated: UnmarshalMsg implements msgp.Unmarshaler +func (z *containerSerz) UnmarshalMsg(bts []byte) (o []byte, err error) { + var field []byte + _ = field + var zbai uint32 + zbai, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + return + } + for zbai > 0 { + zbai-- + field, bts, err = msgp.ReadMapKeyZC(bts) + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "t": + { + var zcmr uint8 + zcmr, bts, err = msgp.ReadUint8Bytes(bts) + z.t = contype(zcmr) + } + if err != nil { + return + } + case "r": + bts, err = z.r.UnmarshalMsg(bts) + if err != nil { + return + } + default: + bts, err = msgp.Skip(bts) + if err != nil { + return + } + } + } + o = bts + return +} + +// Deprecated: Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z *containerSerz) Msgsize() (s int) { + s = 1 + 2 + msgp.Uint8Size + 2 + z.r.Msgsize() + return +} + +// Deprecated: DecodeMsg implements msgp.Decodable +func (z *contype) DecodeMsg(dc *msgp.Reader) (err error) { + { + var zajw uint8 + zajw, err = dc.ReadUint8() + (*z) = contype(zajw) + } + if err != nil { + return + } + return +} + +// Deprecated: EncodeMsg implements msgp.Encodable +func (z contype) EncodeMsg(en *msgp.Writer) (err error) { + err = en.WriteUint8(uint8(z)) + if err != nil { + return + } + return +} + +// Deprecated: MarshalMsg implements msgp.Marshaler +func (z contype) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + o = msgp.AppendUint8(o, uint8(z)) + return +} + +// Deprecated: UnmarshalMsg implements msgp.Unmarshaler +func (z *contype) UnmarshalMsg(bts []byte) (o []byte, err error) { + { + var zwht uint8 + zwht, bts, err = msgp.ReadUint8Bytes(bts) + (*z) = contype(zwht) + } + if err != nil { + return + } + o = bts + return +} + +// Deprecated: Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z contype) Msgsize() (s int) { + s = msgp.Uint8Size + return +} + +// Deprecated: DecodeMsg implements msgp.Decodable +func (z *roaringArray) DecodeMsg(dc *msgp.Reader) (err error) { + var field []byte + _ = field + var zlqf uint32 + zlqf, err = dc.ReadMapHeader() + if err != nil { + return + } + for zlqf > 0 { + zlqf-- + field, err = dc.ReadMapKeyPtr() + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "keys": + var zdaf uint32 + zdaf, err = dc.ReadArrayHeader() + if err != nil { + return + } + if cap(z.keys) >= int(zdaf) { + z.keys = (z.keys)[:zdaf] + } else { + z.keys = make([]uint16, zdaf) + } + for zhct := range z.keys { + z.keys[zhct], err = dc.ReadUint16() + if err != nil { + return + } + } + case "needCopyOnWrite": + var zpks uint32 + zpks, err = dc.ReadArrayHeader() + if err != nil { + return + } + if cap(z.needCopyOnWrite) >= int(zpks) { + z.needCopyOnWrite = (z.needCopyOnWrite)[:zpks] + } else { + z.needCopyOnWrite = make([]bool, zpks) + } + for zcua := range z.needCopyOnWrite { + z.needCopyOnWrite[zcua], err = dc.ReadBool() + if err != nil { + return + } + } + case "copyOnWrite": + z.copyOnWrite, err = dc.ReadBool() + if err != nil { + return + } + case "conserz": + var zjfb uint32 + zjfb, err = dc.ReadArrayHeader() + if err != nil { + return + } + if cap(z.conserz) >= int(zjfb) { + z.conserz = (z.conserz)[:zjfb] + } else { + z.conserz = make([]containerSerz, zjfb) + } + for zxhx := range z.conserz { + var zcxo uint32 + zcxo, err = dc.ReadMapHeader() + if err != nil { + return + } + for zcxo > 0 { + zcxo-- + field, err = dc.ReadMapKeyPtr() + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "t": + { + var zeff uint8 + zeff, err = dc.ReadUint8() + z.conserz[zxhx].t = contype(zeff) + } + if err != nil { + return + } + case "r": + err = z.conserz[zxhx].r.DecodeMsg(dc) + if err != nil { + return + } + default: + err = dc.Skip() + if err != nil { + return + } + } + } + } + default: + err = dc.Skip() + if err != nil { + return + } + } + } + return +} + +// Deprecated: EncodeMsg implements msgp.Encodable +func (z *roaringArray) EncodeMsg(en *msgp.Writer) (err error) { + // map header, size 4 + // write "keys" + err = en.Append(0x84, 0xa4, 0x6b, 0x65, 0x79, 0x73) + if err != nil { + return err + } + err = en.WriteArrayHeader(uint32(len(z.keys))) + if err != nil { + return + } + for zhct := range z.keys { + err = en.WriteUint16(z.keys[zhct]) + if err != nil { + return + } + } + // write "needCopyOnWrite" + err = en.Append(0xaf, 0x6e, 0x65, 0x65, 0x64, 0x43, 0x6f, 0x70, 0x79, 0x4f, 0x6e, 0x57, 0x72, 0x69, 0x74, 0x65) + if err != nil { + return err + } + err = en.WriteArrayHeader(uint32(len(z.needCopyOnWrite))) + if err != nil { + return + } + for zcua := range z.needCopyOnWrite { + err = en.WriteBool(z.needCopyOnWrite[zcua]) + if err != nil { + return + } + } + // write "copyOnWrite" + err = en.Append(0xab, 0x63, 0x6f, 0x70, 0x79, 0x4f, 0x6e, 0x57, 0x72, 0x69, 0x74, 0x65) + if err != nil { + return err + } + err = en.WriteBool(z.copyOnWrite) + if err != nil { + return + } + // write "conserz" + err = en.Append(0xa7, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x72, 0x7a) + if err != nil { + return err + } + err = en.WriteArrayHeader(uint32(len(z.conserz))) + if err != nil { + return + } + for zxhx := range z.conserz { + // map header, size 2 + // write "t" + err = en.Append(0x82, 0xa1, 0x74) + if err != nil { + return err + } + err = en.WriteUint8(uint8(z.conserz[zxhx].t)) + if err != nil { + return + } + // write "r" + err = en.Append(0xa1, 0x72) + if err != nil { + return err + } + err = z.conserz[zxhx].r.EncodeMsg(en) + if err != nil { + return + } + } + return +} + +// Deprecated: MarshalMsg implements msgp.Marshaler +func (z *roaringArray) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + // map header, size 4 + // string "keys" + o = append(o, 0x84, 0xa4, 0x6b, 0x65, 0x79, 0x73) + o = msgp.AppendArrayHeader(o, uint32(len(z.keys))) + for zhct := range z.keys { + o = msgp.AppendUint16(o, z.keys[zhct]) + } + // string "needCopyOnWrite" + o = append(o, 0xaf, 0x6e, 0x65, 0x65, 0x64, 0x43, 0x6f, 0x70, 0x79, 0x4f, 0x6e, 0x57, 0x72, 0x69, 0x74, 0x65) + o = msgp.AppendArrayHeader(o, uint32(len(z.needCopyOnWrite))) + for zcua := range z.needCopyOnWrite { + o = msgp.AppendBool(o, z.needCopyOnWrite[zcua]) + } + // string "copyOnWrite" + o = append(o, 0xab, 0x63, 0x6f, 0x70, 0x79, 0x4f, 0x6e, 0x57, 0x72, 0x69, 0x74, 0x65) + o = msgp.AppendBool(o, z.copyOnWrite) + // string "conserz" + o = append(o, 0xa7, 0x63, 0x6f, 0x6e, 0x73, 0x65, 0x72, 0x7a) + o = msgp.AppendArrayHeader(o, uint32(len(z.conserz))) + for zxhx := range z.conserz { + // map header, size 2 + // string "t" + o = append(o, 0x82, 0xa1, 0x74) + o = msgp.AppendUint8(o, uint8(z.conserz[zxhx].t)) + // string "r" + o = append(o, 0xa1, 0x72) + o, err = z.conserz[zxhx].r.MarshalMsg(o) + if err != nil { + return + } + } + return +} + +// Deprecated: UnmarshalMsg implements msgp.Unmarshaler +func (z *roaringArray) UnmarshalMsg(bts []byte) (o []byte, err error) { + var field []byte + _ = field + var zrsw uint32 + zrsw, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + return + } + for zrsw > 0 { + zrsw-- + field, bts, err = msgp.ReadMapKeyZC(bts) + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "keys": + var zxpk uint32 + zxpk, bts, err = msgp.ReadArrayHeaderBytes(bts) + if err != nil { + return + } + if cap(z.keys) >= int(zxpk) { + z.keys = (z.keys)[:zxpk] + } else { + z.keys = make([]uint16, zxpk) + } + for zhct := range z.keys { + z.keys[zhct], bts, err = msgp.ReadUint16Bytes(bts) + if err != nil { + return + } + } + case "needCopyOnWrite": + var zdnj uint32 + zdnj, bts, err = msgp.ReadArrayHeaderBytes(bts) + if err != nil { + return + } + if cap(z.needCopyOnWrite) >= int(zdnj) { + z.needCopyOnWrite = (z.needCopyOnWrite)[:zdnj] + } else { + z.needCopyOnWrite = make([]bool, zdnj) + } + for zcua := range z.needCopyOnWrite { + z.needCopyOnWrite[zcua], bts, err = msgp.ReadBoolBytes(bts) + if err != nil { + return + } + } + case "copyOnWrite": + z.copyOnWrite, bts, err = msgp.ReadBoolBytes(bts) + if err != nil { + return + } + case "conserz": + var zobc uint32 + zobc, bts, err = msgp.ReadArrayHeaderBytes(bts) + if err != nil { + return + } + if cap(z.conserz) >= int(zobc) { + z.conserz = (z.conserz)[:zobc] + } else { + z.conserz = make([]containerSerz, zobc) + } + for zxhx := range z.conserz { + var zsnv uint32 + zsnv, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + return + } + for zsnv > 0 { + zsnv-- + field, bts, err = msgp.ReadMapKeyZC(bts) + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "t": + { + var zkgt uint8 + zkgt, bts, err = msgp.ReadUint8Bytes(bts) + z.conserz[zxhx].t = contype(zkgt) + } + if err != nil { + return + } + case "r": + bts, err = z.conserz[zxhx].r.UnmarshalMsg(bts) + if err != nil { + return + } + default: + bts, err = msgp.Skip(bts) + if err != nil { + return + } + } + } + } + default: + bts, err = msgp.Skip(bts) + if err != nil { + return + } + } + } + o = bts + return +} + +// Deprecated: Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z *roaringArray) Msgsize() (s int) { + s = 1 + 5 + msgp.ArrayHeaderSize + (len(z.keys) * (msgp.Uint16Size)) + 16 + msgp.ArrayHeaderSize + (len(z.needCopyOnWrite) * (msgp.BoolSize)) + 12 + msgp.BoolSize + 8 + msgp.ArrayHeaderSize + for zxhx := range z.conserz { + s += 1 + 2 + msgp.Uint8Size + 2 + z.conserz[zxhx].r.Msgsize() + } + return +} diff --git a/vendor/github.com/RoaringBitmap/roaring/runcontainer.go b/vendor/github.com/RoaringBitmap/roaring/runcontainer.go new file mode 100644 index 0000000..5a0f985 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/runcontainer.go @@ -0,0 +1,2526 @@ +package roaring + +// +// Copyright (c) 2016 by the roaring authors. +// Licensed under the Apache License, Version 2.0. +// +// We derive a few lines of code from the sort.Search +// function in the golang standard library. That function +// is Copyright 2009 The Go Authors, and licensed +// under the following BSD-style license. +/* +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +import ( + "fmt" + "sort" + "unsafe" +) + +//go:generate msgp -unexported + +// runContainer16 does run-length encoding of sets of +// uint16 integers. +type runContainer16 struct { + iv []interval16 + card int64 + + // avoid allocation during search + myOpts searchOptions `msg:"-"` +} + +// interval16 is the internal to runContainer16 +// structure that maintains the individual [start, last] +// closed intervals. +type interval16 struct { + start uint16 + length uint16 // length minus 1 +} + +func newInterval16Range(start, last uint16) interval16 { + if last < start { + panic(fmt.Sprintf("last (%d) cannot be smaller than start (%d)", last, start)) + } + + return interval16{ + start, + last - start, + } +} + +// runlen returns the count of integers in the interval. +func (iv interval16) runlen() int64 { + return int64(iv.length) + 1 +} + +func (iv interval16) last() uint16 { + return iv.start + iv.length +} + +// String produces a human viewable string of the contents. +func (iv interval16) String() string { + return fmt.Sprintf("[%d, %d]", iv.start, iv.length) +} + +func ivalString16(iv []interval16) string { + var s string + var j int + var p interval16 + for j, p = range iv { + s += fmt.Sprintf("%v:[%d, %d], ", j, p.start, p.last()) + } + return s +} + +// String produces a human viewable string of the contents. +func (rc *runContainer16) String() string { + if len(rc.iv) == 0 { + return "runContainer16{}" + } + is := ivalString16(rc.iv) + return `runContainer16{` + is + `}` +} + +// uint16Slice is a sort.Sort convenience method +type uint16Slice []uint16 + +// Len returns the length of p. +func (p uint16Slice) Len() int { return len(p) } + +// Less returns p[i] < p[j] +func (p uint16Slice) Less(i, j int) bool { return p[i] < p[j] } + +// Swap swaps elements i and j. +func (p uint16Slice) Swap(i, j int) { p[i], p[j] = p[j], p[i] } + +//msgp:ignore addHelper + +// addHelper helps build a runContainer16. +type addHelper16 struct { + runstart uint16 + runlen uint16 + actuallyAdded uint16 + m []interval16 + rc *runContainer16 +} + +func (ah *addHelper16) storeIval(runstart, runlen uint16) { + mi := interval16{start: runstart, length: runlen} + ah.m = append(ah.m, mi) +} + +func (ah *addHelper16) add(cur, prev uint16, i int) { + if cur == prev+1 { + ah.runlen++ + ah.actuallyAdded++ + } else { + if cur < prev { + panic(fmt.Sprintf("newRunContainer16FromVals sees "+ + "unsorted vals; vals[%v]=cur=%v < prev=%v. Sort your vals"+ + " before calling us with alreadySorted == true.", i, cur, prev)) + } + if cur == prev { + // ignore duplicates + } else { + ah.actuallyAdded++ + ah.storeIval(ah.runstart, ah.runlen) + ah.runstart = cur + ah.runlen = 0 + } + } +} + +// newRunContainerRange makes a new container made of just the specified closed interval [rangestart,rangelast] +func newRunContainer16Range(rangestart uint16, rangelast uint16) *runContainer16 { + rc := &runContainer16{} + rc.iv = append(rc.iv, newInterval16Range(rangestart, rangelast)) + return rc +} + +// newRunContainer16FromVals makes a new container from vals. +// +// For efficiency, vals should be sorted in ascending order. +// Ideally vals should not contain duplicates, but we detect and +// ignore them. If vals is already sorted in ascending order, then +// pass alreadySorted = true. Otherwise, for !alreadySorted, +// we will sort vals before creating a runContainer16 of them. +// We sort the original vals, so this will change what the +// caller sees in vals as a side effect. +func newRunContainer16FromVals(alreadySorted bool, vals ...uint16) *runContainer16 { + // keep this in sync with newRunContainer16FromArray below + + rc := &runContainer16{} + ah := addHelper16{rc: rc} + + if !alreadySorted { + sort.Sort(uint16Slice(vals)) + } + n := len(vals) + var cur, prev uint16 + switch { + case n == 0: + // nothing more + case n == 1: + ah.m = append(ah.m, newInterval16Range(vals[0], vals[0])) + ah.actuallyAdded++ + default: + ah.runstart = vals[0] + ah.actuallyAdded++ + for i := 1; i < n; i++ { + prev = vals[i-1] + cur = vals[i] + ah.add(cur, prev, i) + } + ah.storeIval(ah.runstart, ah.runlen) + } + rc.iv = ah.m + rc.card = int64(ah.actuallyAdded) + return rc +} + +// newRunContainer16FromBitmapContainer makes a new run container from bc, +// somewhat efficiently. For reference, see the Java +// https://github.com/RoaringBitmap/RoaringBitmap/blob/master/src/main/java/org/roaringbitmap/RunContainer.java#L145-L192 +func newRunContainer16FromBitmapContainer(bc *bitmapContainer) *runContainer16 { + + rc := &runContainer16{} + nbrRuns := bc.numberOfRuns() + if nbrRuns == 0 { + return rc + } + rc.iv = make([]interval16, nbrRuns) + + longCtr := 0 // index of current long in bitmap + curWord := bc.bitmap[0] // its value + runCount := 0 + for { + // potentially multiword advance to first 1 bit + for curWord == 0 && longCtr < len(bc.bitmap)-1 { + longCtr++ + curWord = bc.bitmap[longCtr] + } + + if curWord == 0 { + // wrap up, no more runs + return rc + } + localRunStart := countTrailingZeros(curWord) + runStart := localRunStart + 64*longCtr + // stuff 1s into number's LSBs + curWordWith1s := curWord | (curWord - 1) + + // find the next 0, potentially in a later word + runEnd := 0 + for curWordWith1s == maxWord && longCtr < len(bc.bitmap)-1 { + longCtr++ + curWordWith1s = bc.bitmap[longCtr] + } + + if curWordWith1s == maxWord { + // a final unterminated run of 1s + runEnd = wordSizeInBits + longCtr*64 + rc.iv[runCount].start = uint16(runStart) + rc.iv[runCount].length = uint16(runEnd) - uint16(runStart) - 1 + return rc + } + localRunEnd := countTrailingZeros(^curWordWith1s) + runEnd = localRunEnd + longCtr*64 + rc.iv[runCount].start = uint16(runStart) + rc.iv[runCount].length = uint16(runEnd) - 1 - uint16(runStart) + runCount++ + // now, zero out everything right of runEnd. + curWord = curWordWith1s & (curWordWith1s + 1) + // We've lathered and rinsed, so repeat... + } + +} + +// +// newRunContainer16FromArray populates a new +// runContainer16 from the contents of arr. +// +func newRunContainer16FromArray(arr *arrayContainer) *runContainer16 { + // keep this in sync with newRunContainer16FromVals above + + rc := &runContainer16{} + ah := addHelper16{rc: rc} + + n := arr.getCardinality() + var cur, prev uint16 + switch { + case n == 0: + // nothing more + case n == 1: + ah.m = append(ah.m, newInterval16Range(arr.content[0], arr.content[0])) + ah.actuallyAdded++ + default: + ah.runstart = arr.content[0] + ah.actuallyAdded++ + for i := 1; i < n; i++ { + prev = arr.content[i-1] + cur = arr.content[i] + ah.add(cur, prev, i) + } + ah.storeIval(ah.runstart, ah.runlen) + } + rc.iv = ah.m + rc.card = int64(ah.actuallyAdded) + return rc +} + +// set adds the integers in vals to the set. Vals +// must be sorted in increasing order; if not, you should set +// alreadySorted to false, and we will sort them in place for you. +// (Be aware of this side effect -- it will affect the callers +// view of vals). +// +// If you have a small number of additions to an already +// big runContainer16, calling Add() may be faster. +func (rc *runContainer16) set(alreadySorted bool, vals ...uint16) { + + rc2 := newRunContainer16FromVals(alreadySorted, vals...) + un := rc.union(rc2) + rc.iv = un.iv + rc.card = 0 +} + +// canMerge returns true iff the intervals +// a and b either overlap or they are +// contiguous and so can be merged into +// a single interval. +func canMerge16(a, b interval16) bool { + if int64(a.last())+1 < int64(b.start) { + return false + } + return int64(b.last())+1 >= int64(a.start) +} + +// haveOverlap differs from canMerge in that +// it tells you if the intersection of a +// and b would contain an element (otherwise +// it would be the empty set, and we return +// false). +func haveOverlap16(a, b interval16) bool { + if int64(a.last())+1 <= int64(b.start) { + return false + } + return int64(b.last())+1 > int64(a.start) +} + +// mergeInterval16s joins a and b into a +// new interval, and panics if it cannot. +func mergeInterval16s(a, b interval16) (res interval16) { + if !canMerge16(a, b) { + panic(fmt.Sprintf("cannot merge %#v and %#v", a, b)) + } + + if b.start < a.start { + res.start = b.start + } else { + res.start = a.start + } + + if b.last() > a.last() { + res.length = b.last() - res.start + } else { + res.length = a.last() - res.start + } + + return +} + +// intersectInterval16s returns the intersection +// of a and b. The isEmpty flag will be true if +// a and b were disjoint. +func intersectInterval16s(a, b interval16) (res interval16, isEmpty bool) { + if !haveOverlap16(a, b) { + isEmpty = true + return + } + if b.start > a.start { + res.start = b.start + } else { + res.start = a.start + } + + bEnd := b.last() + aEnd := a.last() + var resEnd uint16 + + if bEnd < aEnd { + resEnd = bEnd + } else { + resEnd = aEnd + } + res.length = resEnd - res.start + return +} + +// union merges two runContainer16s, producing +// a new runContainer16 with the union of rc and b. +func (rc *runContainer16) union(b *runContainer16) *runContainer16 { + + // rc is also known as 'a' here, but golint insisted we + // call it rc for consistency with the rest of the methods. + + var m []interval16 + + alim := int64(len(rc.iv)) + blim := int64(len(b.iv)) + + var na int64 // next from a + var nb int64 // next from b + + // merged holds the current merge output, which might + // get additional merges before being appended to m. + var merged interval16 + var mergedUsed bool // is merged being used at the moment? + + var cura interval16 // currently considering this interval16 from a + var curb interval16 // currently considering this interval16 from b + + pass := 0 + for na < alim && nb < blim { + pass++ + cura = rc.iv[na] + curb = b.iv[nb] + + if mergedUsed { + mergedUpdated := false + if canMerge16(cura, merged) { + merged = mergeInterval16s(cura, merged) + na = rc.indexOfIntervalAtOrAfter(int64(merged.last())+1, na+1) + mergedUpdated = true + } + if canMerge16(curb, merged) { + merged = mergeInterval16s(curb, merged) + nb = b.indexOfIntervalAtOrAfter(int64(merged.last())+1, nb+1) + mergedUpdated = true + } + if !mergedUpdated { + // we know that merged is disjoint from cura and curb + m = append(m, merged) + mergedUsed = false + } + continue + + } else { + // !mergedUsed + if !canMerge16(cura, curb) { + if cura.start < curb.start { + m = append(m, cura) + na++ + } else { + m = append(m, curb) + nb++ + } + } else { + merged = mergeInterval16s(cura, curb) + mergedUsed = true + na = rc.indexOfIntervalAtOrAfter(int64(merged.last())+1, na+1) + nb = b.indexOfIntervalAtOrAfter(int64(merged.last())+1, nb+1) + } + } + } + var aDone, bDone bool + if na >= alim { + aDone = true + } + if nb >= blim { + bDone = true + } + // finish by merging anything remaining into merged we can: + if mergedUsed { + if !aDone { + aAdds: + for na < alim { + cura = rc.iv[na] + if canMerge16(cura, merged) { + merged = mergeInterval16s(cura, merged) + na = rc.indexOfIntervalAtOrAfter(int64(merged.last())+1, na+1) + } else { + break aAdds + } + } + + } + + if !bDone { + bAdds: + for nb < blim { + curb = b.iv[nb] + if canMerge16(curb, merged) { + merged = mergeInterval16s(curb, merged) + nb = b.indexOfIntervalAtOrAfter(int64(merged.last())+1, nb+1) + } else { + break bAdds + } + } + + } + + m = append(m, merged) + } + if na < alim { + m = append(m, rc.iv[na:]...) + } + if nb < blim { + m = append(m, b.iv[nb:]...) + } + + res := &runContainer16{iv: m} + return res +} + +// unionCardinality returns the cardinality of the merger of two runContainer16s, the union of rc and b. +func (rc *runContainer16) unionCardinality(b *runContainer16) uint64 { + + // rc is also known as 'a' here, but golint insisted we + // call it rc for consistency with the rest of the methods. + answer := uint64(0) + + alim := int64(len(rc.iv)) + blim := int64(len(b.iv)) + + var na int64 // next from a + var nb int64 // next from b + + // merged holds the current merge output, which might + // get additional merges before being appended to m. + var merged interval16 + var mergedUsed bool // is merged being used at the moment? + + var cura interval16 // currently considering this interval16 from a + var curb interval16 // currently considering this interval16 from b + + pass := 0 + for na < alim && nb < blim { + pass++ + cura = rc.iv[na] + curb = b.iv[nb] + + if mergedUsed { + mergedUpdated := false + if canMerge16(cura, merged) { + merged = mergeInterval16s(cura, merged) + na = rc.indexOfIntervalAtOrAfter(int64(merged.last())+1, na+1) + mergedUpdated = true + } + if canMerge16(curb, merged) { + merged = mergeInterval16s(curb, merged) + nb = b.indexOfIntervalAtOrAfter(int64(merged.last())+1, nb+1) + mergedUpdated = true + } + if !mergedUpdated { + // we know that merged is disjoint from cura and curb + //m = append(m, merged) + answer += uint64(merged.last()) - uint64(merged.start) + 1 + mergedUsed = false + } + continue + + } else { + // !mergedUsed + if !canMerge16(cura, curb) { + if cura.start < curb.start { + answer += uint64(cura.last()) - uint64(cura.start) + 1 + //m = append(m, cura) + na++ + } else { + answer += uint64(curb.last()) - uint64(curb.start) + 1 + //m = append(m, curb) + nb++ + } + } else { + merged = mergeInterval16s(cura, curb) + mergedUsed = true + na = rc.indexOfIntervalAtOrAfter(int64(merged.last())+1, na+1) + nb = b.indexOfIntervalAtOrAfter(int64(merged.last())+1, nb+1) + } + } + } + var aDone, bDone bool + if na >= alim { + aDone = true + } + if nb >= blim { + bDone = true + } + // finish by merging anything remaining into merged we can: + if mergedUsed { + if !aDone { + aAdds: + for na < alim { + cura = rc.iv[na] + if canMerge16(cura, merged) { + merged = mergeInterval16s(cura, merged) + na = rc.indexOfIntervalAtOrAfter(int64(merged.last())+1, na+1) + } else { + break aAdds + } + } + + } + + if !bDone { + bAdds: + for nb < blim { + curb = b.iv[nb] + if canMerge16(curb, merged) { + merged = mergeInterval16s(curb, merged) + nb = b.indexOfIntervalAtOrAfter(int64(merged.last())+1, nb+1) + } else { + break bAdds + } + } + + } + + //m = append(m, merged) + answer += uint64(merged.last()) - uint64(merged.start) + 1 + } + for _, r := range rc.iv[na:] { + answer += uint64(r.last()) - uint64(r.start) + 1 + } + for _, r := range b.iv[nb:] { + answer += uint64(r.last()) - uint64(r.start) + 1 + } + return answer +} + +// indexOfIntervalAtOrAfter is a helper for union. +func (rc *runContainer16) indexOfIntervalAtOrAfter(key int64, startIndex int64) int64 { + rc.myOpts.startIndex = startIndex + rc.myOpts.endxIndex = 0 + + w, already, _ := rc.search(key, &rc.myOpts) + if already { + return w + } + return w + 1 +} + +// intersect returns a new runContainer16 holding the +// intersection of rc (also known as 'a') and b. +func (rc *runContainer16) intersect(b *runContainer16) *runContainer16 { + + a := rc + numa := int64(len(a.iv)) + numb := int64(len(b.iv)) + res := &runContainer16{} + if numa == 0 || numb == 0 { + return res + } + + if numa == 1 && numb == 1 { + if !haveOverlap16(a.iv[0], b.iv[0]) { + return res + } + } + + var output []interval16 + + var acuri int64 + var bcuri int64 + + astart := int64(a.iv[acuri].start) + bstart := int64(b.iv[bcuri].start) + + var intersection interval16 + var leftoverstart int64 + var isOverlap, isLeftoverA, isLeftoverB bool + var done bool +toploop: + for acuri < numa && bcuri < numb { + + isOverlap, isLeftoverA, isLeftoverB, leftoverstart, intersection = + intersectWithLeftover16(astart, int64(a.iv[acuri].last()), bstart, int64(b.iv[bcuri].last())) + + if !isOverlap { + switch { + case astart < bstart: + acuri, done = a.findNextIntervalThatIntersectsStartingFrom(acuri+1, bstart) + if done { + break toploop + } + astart = int64(a.iv[acuri].start) + + case astart > bstart: + bcuri, done = b.findNextIntervalThatIntersectsStartingFrom(bcuri+1, astart) + if done { + break toploop + } + bstart = int64(b.iv[bcuri].start) + + //default: + // panic("impossible that astart == bstart, since !isOverlap") + } + + } else { + // isOverlap + output = append(output, intersection) + switch { + case isLeftoverA: + // note that we change astart without advancing acuri, + // since we need to capture any 2ndary intersections with a.iv[acuri] + astart = leftoverstart + bcuri++ + if bcuri >= numb { + break toploop + } + bstart = int64(b.iv[bcuri].start) + case isLeftoverB: + // note that we change bstart without advancing bcuri, + // since we need to capture any 2ndary intersections with b.iv[bcuri] + bstart = leftoverstart + acuri++ + if acuri >= numa { + break toploop + } + astart = int64(a.iv[acuri].start) + default: + // neither had leftover, both completely consumed + // optionally, assert for sanity: + //if a.iv[acuri].endx != b.iv[bcuri].endx { + // panic("huh? should only be possible that endx agree now!") + //} + + // advance to next a interval + acuri++ + if acuri >= numa { + break toploop + } + astart = int64(a.iv[acuri].start) + + // advance to next b interval + bcuri++ + if bcuri >= numb { + break toploop + } + bstart = int64(b.iv[bcuri].start) + } + } + } // end for toploop + + if len(output) == 0 { + return res + } + + res.iv = output + return res +} + +// intersectCardinality returns the cardinality of the +// intersection of rc (also known as 'a') and b. +func (rc *runContainer16) intersectCardinality(b *runContainer16) int64 { + answer := int64(0) + + a := rc + numa := int64(len(a.iv)) + numb := int64(len(b.iv)) + if numa == 0 || numb == 0 { + return 0 + } + + if numa == 1 && numb == 1 { + if !haveOverlap16(a.iv[0], b.iv[0]) { + return 0 + } + } + + var acuri int64 + var bcuri int64 + + astart := int64(a.iv[acuri].start) + bstart := int64(b.iv[bcuri].start) + + var intersection interval16 + var leftoverstart int64 + var isOverlap, isLeftoverA, isLeftoverB bool + var done bool + pass := 0 +toploop: + for acuri < numa && bcuri < numb { + pass++ + + isOverlap, isLeftoverA, isLeftoverB, leftoverstart, intersection = + intersectWithLeftover16(astart, int64(a.iv[acuri].last()), bstart, int64(b.iv[bcuri].last())) + + if !isOverlap { + switch { + case astart < bstart: + acuri, done = a.findNextIntervalThatIntersectsStartingFrom(acuri+1, bstart) + if done { + break toploop + } + astart = int64(a.iv[acuri].start) + + case astart > bstart: + bcuri, done = b.findNextIntervalThatIntersectsStartingFrom(bcuri+1, astart) + if done { + break toploop + } + bstart = int64(b.iv[bcuri].start) + + //default: + // panic("impossible that astart == bstart, since !isOverlap") + } + + } else { + // isOverlap + answer += int64(intersection.last()) - int64(intersection.start) + 1 + switch { + case isLeftoverA: + // note that we change astart without advancing acuri, + // since we need to capture any 2ndary intersections with a.iv[acuri] + astart = leftoverstart + bcuri++ + if bcuri >= numb { + break toploop + } + bstart = int64(b.iv[bcuri].start) + case isLeftoverB: + // note that we change bstart without advancing bcuri, + // since we need to capture any 2ndary intersections with b.iv[bcuri] + bstart = leftoverstart + acuri++ + if acuri >= numa { + break toploop + } + astart = int64(a.iv[acuri].start) + default: + // neither had leftover, both completely consumed + // optionally, assert for sanity: + //if a.iv[acuri].endx != b.iv[bcuri].endx { + // panic("huh? should only be possible that endx agree now!") + //} + + // advance to next a interval + acuri++ + if acuri >= numa { + break toploop + } + astart = int64(a.iv[acuri].start) + + // advance to next b interval + bcuri++ + if bcuri >= numb { + break toploop + } + bstart = int64(b.iv[bcuri].start) + } + } + } // end for toploop + + return answer +} + +// get returns true iff key is in the container. +func (rc *runContainer16) contains(key uint16) bool { + _, in, _ := rc.search(int64(key), nil) + return in +} + +// numIntervals returns the count of intervals in the container. +func (rc *runContainer16) numIntervals() int { + return len(rc.iv) +} + +// searchOptions allows us to accelerate search with +// prior knowledge of (mostly lower) bounds. This is used by Union +// and Intersect. +type searchOptions struct { + // start here instead of at 0 + startIndex int64 + + // upper bound instead of len(rc.iv); + // endxIndex == 0 means ignore the bound and use + // endxIndex == n ==len(rc.iv) which is also + // naturally the default for search() + // when opt = nil. + endxIndex int64 +} + +// search returns alreadyPresent to indicate if the +// key is already in one of our interval16s. +// +// If key is alreadyPresent, then whichInterval16 tells +// you where. +// +// If key is not already present, then whichInterval16 is +// set as follows: +// +// a) whichInterval16 == len(rc.iv)-1 if key is beyond our +// last interval16 in rc.iv; +// +// b) whichInterval16 == -1 if key is before our first +// interval16 in rc.iv; +// +// c) whichInterval16 is set to the minimum index of rc.iv +// which comes strictly before the key; +// so rc.iv[whichInterval16].last < key, +// and if whichInterval16+1 exists, then key < rc.iv[whichInterval16+1].start +// (Note that whichInterval16+1 won't exist when +// whichInterval16 is the last interval.) +// +// runContainer16.search always returns whichInterval16 < len(rc.iv). +// +// If not nil, opts can be used to further restrict +// the search space. +// +func (rc *runContainer16) search(key int64, opts *searchOptions) (whichInterval16 int64, alreadyPresent bool, numCompares int) { + n := int64(len(rc.iv)) + if n == 0 { + return -1, false, 0 + } + + startIndex := int64(0) + endxIndex := n + if opts != nil { + startIndex = opts.startIndex + + // let endxIndex == 0 mean no effect + if opts.endxIndex > 0 { + endxIndex = opts.endxIndex + } + } + + // sort.Search returns the smallest index i + // in [0, n) at which f(i) is true, assuming that on the range [0, n), + // f(i) == true implies f(i+1) == true. + // If there is no such index, Search returns n. + + // For correctness, this began as verbatim snippet from + // sort.Search in the Go standard lib. + // We inline our comparison function for speed, and + // annotate with numCompares + // to observe and test that extra bounds are utilized. + i, j := startIndex, endxIndex + for i < j { + h := i + (j-i)/2 // avoid overflow when computing h as the bisector + // i <= h < j + numCompares++ + if !(key < int64(rc.iv[h].start)) { + i = h + 1 + } else { + j = h + } + } + below := i + // end std lib snippet. + + // The above is a simple in-lining and annotation of: + /* below := sort.Search(n, + func(i int) bool { + return key < rc.iv[i].start + }) + */ + whichInterval16 = below - 1 + + if below == n { + // all falses => key is >= start of all interval16s + // ... so does it belong to the last interval16? + if key < int64(rc.iv[n-1].last())+1 { + // yes, it belongs to the last interval16 + alreadyPresent = true + return + } + // no, it is beyond the last interval16. + // leave alreadyPreset = false + return + } + + // INVAR: key is below rc.iv[below] + if below == 0 { + // key is before the first first interval16. + // leave alreadyPresent = false + return + } + + // INVAR: key is >= rc.iv[below-1].start and + // key is < rc.iv[below].start + + // is key in below-1 interval16? + if key >= int64(rc.iv[below-1].start) && key < int64(rc.iv[below-1].last())+1 { + // yes, it is. key is in below-1 interval16. + alreadyPresent = true + return + } + + // INVAR: key >= rc.iv[below-1].endx && key < rc.iv[below].start + // leave alreadyPresent = false + return +} + +// cardinality returns the count of the integers stored in the +// runContainer16. +func (rc *runContainer16) cardinality() int64 { + if len(rc.iv) == 0 { + rc.card = 0 + return 0 + } + if rc.card > 0 { + return rc.card // already cached + } + // have to compute it + var n int64 + for _, p := range rc.iv { + n += p.runlen() + } + rc.card = n // cache it + return n +} + +// AsSlice decompresses the contents into a []uint16 slice. +func (rc *runContainer16) AsSlice() []uint16 { + s := make([]uint16, rc.cardinality()) + j := 0 + for _, p := range rc.iv { + for i := p.start; i <= p.last(); i++ { + s[j] = i + j++ + } + } + return s +} + +// newRunContainer16 creates an empty run container. +func newRunContainer16() *runContainer16 { + return &runContainer16{} +} + +// newRunContainer16CopyIv creates a run container, initializing +// with a copy of the supplied iv slice. +// +func newRunContainer16CopyIv(iv []interval16) *runContainer16 { + rc := &runContainer16{ + iv: make([]interval16, len(iv)), + } + copy(rc.iv, iv) + return rc +} + +func (rc *runContainer16) Clone() *runContainer16 { + rc2 := newRunContainer16CopyIv(rc.iv) + return rc2 +} + +// newRunContainer16TakeOwnership returns a new runContainer16 +// backed by the provided iv slice, which we will +// assume exclusive control over from now on. +// +func newRunContainer16TakeOwnership(iv []interval16) *runContainer16 { + rc := &runContainer16{ + iv: iv, + } + return rc +} + +const baseRc16Size = int(unsafe.Sizeof(runContainer16{})) +const perIntervalRc16Size = int(unsafe.Sizeof(interval16{})) + +const baseDiskRc16Size = int(unsafe.Sizeof(uint16(0))) + +// see also runContainer16SerializedSizeInBytes(numRuns int) int + +// getSizeInBytes returns the number of bytes of memory +// required by this runContainer16. +func (rc *runContainer16) getSizeInBytes() int { + return perIntervalRc16Size*len(rc.iv) + baseRc16Size +} + +// runContainer16SerializedSizeInBytes returns the number of bytes of disk +// required to hold numRuns in a runContainer16. +func runContainer16SerializedSizeInBytes(numRuns int) int { + return perIntervalRc16Size*numRuns + baseDiskRc16Size +} + +// Add adds a single value k to the set. +func (rc *runContainer16) Add(k uint16) (wasNew bool) { + // TODO comment from runContainer16.java: + // it might be better and simpler to do return + // toBitmapOrArrayContainer(getCardinality()).add(k) + // but note that some unit tests use this method to build up test + // runcontainers without calling runOptimize + + k64 := int64(k) + + index, present, _ := rc.search(k64, nil) + if present { + return // already there + } + wasNew = true + + // increment card if it is cached already + if rc.card > 0 { + rc.card++ + } + n := int64(len(rc.iv)) + if index == -1 { + // we may need to extend the first run + if n > 0 { + if rc.iv[0].start == k+1 { + rc.iv[0].start = k + rc.iv[0].length++ + return + } + } + // nope, k stands alone, starting the new first interval16. + rc.iv = append([]interval16{newInterval16Range(k, k)}, rc.iv...) + return + } + + // are we off the end? handle both index == n and index == n-1: + if index >= n-1 { + if int64(rc.iv[n-1].last())+1 == k64 { + rc.iv[n-1].length++ + return + } + rc.iv = append(rc.iv, newInterval16Range(k, k)) + return + } + + // INVAR: index and index+1 both exist, and k goes between them. + // + // Now: add k into the middle, + // possibly fusing with index or index+1 interval16 + // and possibly resulting in fusing of two interval16s + // that had a one integer gap. + + left := index + right := index + 1 + + // are we fusing left and right by adding k? + if int64(rc.iv[left].last())+1 == k64 && int64(rc.iv[right].start) == k64+1 { + // fuse into left + rc.iv[left].length = rc.iv[right].last() - rc.iv[left].start + // remove redundant right + rc.iv = append(rc.iv[:left+1], rc.iv[right+1:]...) + return + } + + // are we an addition to left? + if int64(rc.iv[left].last())+1 == k64 { + // yes + rc.iv[left].length++ + return + } + + // are we an addition to right? + if int64(rc.iv[right].start) == k64+1 { + // yes + rc.iv[right].start = k + rc.iv[right].length++ + return + } + + // k makes a standalone new interval16, inserted in the middle + tail := append([]interval16{newInterval16Range(k, k)}, rc.iv[right:]...) + rc.iv = append(rc.iv[:left+1], tail...) + return +} + +//msgp:ignore runIterator + +// runIterator16 advice: you must call hasNext() +// before calling next()/peekNext() to insure there are contents. +type runIterator16 struct { + rc *runContainer16 + curIndex int64 + curPosInIndex uint16 +} + +// newRunIterator16 returns a new empty run container. +func (rc *runContainer16) newRunIterator16() *runIterator16 { + return &runIterator16{rc: rc, curIndex: 0, curPosInIndex: 0} +} + +func (rc *runContainer16) iterate(cb func(x uint16) bool) bool { + iterator := runIterator16{rc, 0, 0} + + for iterator.hasNext() { + if !cb(iterator.next()) { + return false + } + } + + return true +} + +// hasNext returns false if calling next will panic. It +// returns true when there is at least one more value +// available in the iteration sequence. +func (ri *runIterator16) hasNext() bool { + return int64(len(ri.rc.iv)) > ri.curIndex+1 || + (int64(len(ri.rc.iv)) == ri.curIndex+1 && ri.rc.iv[ri.curIndex].length >= ri.curPosInIndex) +} + +// next returns the next value in the iteration sequence. +func (ri *runIterator16) next() uint16 { + next := ri.rc.iv[ri.curIndex].start + ri.curPosInIndex + + if ri.curPosInIndex == ri.rc.iv[ri.curIndex].length { + ri.curPosInIndex = 0 + ri.curIndex++ + } else { + ri.curPosInIndex++ + } + + return next +} + +// peekNext returns the next value in the iteration sequence without advancing the iterator +func (ri *runIterator16) peekNext() uint16 { + return ri.rc.iv[ri.curIndex].start + ri.curPosInIndex +} + +// advanceIfNeeded advances as long as the next value is smaller than minval +func (ri *runIterator16) advanceIfNeeded(minval uint16) { + if !ri.hasNext() || ri.peekNext() >= minval { + return + } + + opt := &searchOptions{ + startIndex: ri.curIndex, + endxIndex: int64(len(ri.rc.iv)), + } + + // interval cannot be -1 because of minval > peekNext + interval, isPresent, _ := ri.rc.search(int64(minval), opt) + + // if the minval is present, set the curPosIndex at the right position + if isPresent { + ri.curIndex = interval + ri.curPosInIndex = minval - ri.rc.iv[ri.curIndex].start + } else { + // otherwise interval is set to to the minimum index of rc.iv + // which comes strictly before the key, that's why we set the next interval + ri.curIndex = interval + 1 + ri.curPosInIndex = 0 + } +} + +// runReverseIterator16 advice: you must call hasNext() +// before calling next() to insure there are contents. +type runReverseIterator16 struct { + rc *runContainer16 + curIndex int64 // index into rc.iv + curPosInIndex uint16 // offset in rc.iv[curIndex] +} + +// newRunReverseIterator16 returns a new empty run iterator. +func (rc *runContainer16) newRunReverseIterator16() *runReverseIterator16 { + index := int64(len(rc.iv)) - 1 + pos := uint16(0) + + if index >= 0 { + pos = rc.iv[index].length + } + + return &runReverseIterator16{ + rc: rc, + curIndex: index, + curPosInIndex: pos, + } +} + +// hasNext returns false if calling next will panic. It +// returns true when there is at least one more value +// available in the iteration sequence. +func (ri *runReverseIterator16) hasNext() bool { + return ri.curIndex > 0 || ri.curIndex == 0 && ri.curPosInIndex >= 0 +} + +// next returns the next value in the iteration sequence. +func (ri *runReverseIterator16) next() uint16 { + next := ri.rc.iv[ri.curIndex].start + ri.curPosInIndex + + if ri.curPosInIndex > 0 { + ri.curPosInIndex-- + } else { + ri.curIndex-- + + if ri.curIndex >= 0 { + ri.curPosInIndex = ri.rc.iv[ri.curIndex].length + } + } + + return next +} + +func (rc *runContainer16) newManyRunIterator16() *runIterator16 { + return rc.newRunIterator16() +} + +// hs are the high bits to include to avoid needing to reiterate over the buffer in NextMany +func (ri *runIterator16) nextMany(hs uint32, buf []uint32) int { + n := 0 + + if !ri.hasNext() { + return n + } + + // start and end are inclusive + for n < len(buf) { + moreVals := 0 + + if ri.rc.iv[ri.curIndex].length >= ri.curPosInIndex { + // add as many as you can from this seq + moreVals = minOfInt(int(ri.rc.iv[ri.curIndex].length-ri.curPosInIndex)+1, len(buf)-n) + base := uint32(ri.rc.iv[ri.curIndex].start+ri.curPosInIndex) | hs + + // allows BCE + buf2 := buf[n : n+moreVals] + for i := range buf2 { + buf2[i] = base + uint32(i) + } + + // update values + n += moreVals + } + + if moreVals+int(ri.curPosInIndex) > int(ri.rc.iv[ri.curIndex].length) { + ri.curPosInIndex = 0 + ri.curIndex++ + + if ri.curIndex == int64(len(ri.rc.iv)) { + break + } + } else { + ri.curPosInIndex += uint16(moreVals) //moreVals always fits in uint16 + } + } + + return n +} + +// remove removes key from the container. +func (rc *runContainer16) removeKey(key uint16) (wasPresent bool) { + + var index int64 + index, wasPresent, _ = rc.search(int64(key), nil) + if !wasPresent { + return // already removed, nothing to do. + } + pos := key - rc.iv[index].start + rc.deleteAt(&index, &pos) + return +} + +// internal helper functions + +func (rc *runContainer16) deleteAt(curIndex *int64, curPosInIndex *uint16) { + rc.card-- + ci := *curIndex + pos := *curPosInIndex + + // are we first, last, or in the middle of our interval16? + switch { + case pos == 0: + if int64(rc.iv[ci].length) == 0 { + // our interval disappears + rc.iv = append(rc.iv[:ci], rc.iv[ci+1:]...) + // curIndex stays the same, since the delete did + // the advance for us. + *curPosInIndex = 0 + } else { + rc.iv[ci].start++ // no longer overflowable + rc.iv[ci].length-- + } + case pos == rc.iv[ci].length: + // length + rc.iv[ci].length-- + // our interval16 cannot disappear, else we would have been pos == 0, case first above. + *curPosInIndex-- + // if we leave *curIndex alone, then Next() will work properly even after the delete. + default: + //middle + // split into two, adding an interval16 + new0 := newInterval16Range(rc.iv[ci].start, rc.iv[ci].start+*curPosInIndex-1) + + new1start := int64(rc.iv[ci].start+*curPosInIndex) + 1 + if new1start > int64(MaxUint16) { + panic("overflow?!?!") + } + new1 := newInterval16Range(uint16(new1start), rc.iv[ci].last()) + tail := append([]interval16{new0, new1}, rc.iv[ci+1:]...) + rc.iv = append(rc.iv[:ci], tail...) + // update curIndex and curPosInIndex + *curIndex++ + *curPosInIndex = 0 + } + +} + +func have4Overlap16(astart, alast, bstart, blast int64) bool { + if alast+1 <= bstart { + return false + } + return blast+1 > astart +} + +func intersectWithLeftover16(astart, alast, bstart, blast int64) (isOverlap, isLeftoverA, isLeftoverB bool, leftoverstart int64, intersection interval16) { + if !have4Overlap16(astart, alast, bstart, blast) { + return + } + isOverlap = true + + // do the intersection: + if bstart > astart { + intersection.start = uint16(bstart) + } else { + intersection.start = uint16(astart) + } + + switch { + case blast < alast: + isLeftoverA = true + leftoverstart = blast + 1 + intersection.length = uint16(blast) - intersection.start + case alast < blast: + isLeftoverB = true + leftoverstart = alast + 1 + intersection.length = uint16(alast) - intersection.start + default: + // alast == blast + intersection.length = uint16(alast) - intersection.start + } + + return +} + +func (rc *runContainer16) findNextIntervalThatIntersectsStartingFrom(startIndex int64, key int64) (index int64, done bool) { + + rc.myOpts.startIndex = startIndex + rc.myOpts.endxIndex = 0 + + w, _, _ := rc.search(key, &rc.myOpts) + // rc.search always returns w < len(rc.iv) + if w < startIndex { + // not found and comes before lower bound startIndex, + // so just use the lower bound. + if startIndex == int64(len(rc.iv)) { + // also this bump up means that we are done + return startIndex, true + } + return startIndex, false + } + + return w, false +} + +func sliceToString16(m []interval16) string { + s := "" + for i := range m { + s += fmt.Sprintf("%v: %s, ", i, m[i]) + } + return s +} + +// selectInt16 returns the j-th value in the container. +// We panic of j is out of bounds. +func (rc *runContainer16) selectInt16(j uint16) int { + n := rc.cardinality() + if int64(j) > n { + panic(fmt.Sprintf("Cannot select %v since Cardinality is %v", j, n)) + } + + var offset int64 + for k := range rc.iv { + nextOffset := offset + rc.iv[k].runlen() + if nextOffset > int64(j) { + return int(int64(rc.iv[k].start) + (int64(j) - offset)) + } + offset = nextOffset + } + panic(fmt.Sprintf("Cannot select %v since Cardinality is %v", j, n)) +} + +// helper for invert +func (rc *runContainer16) invertlastInterval(origin uint16, lastIdx int) []interval16 { + cur := rc.iv[lastIdx] + if cur.last() == MaxUint16 { + if cur.start == origin { + return nil // empty container + } + return []interval16{newInterval16Range(origin, cur.start-1)} + } + if cur.start == origin { + return []interval16{newInterval16Range(cur.last()+1, MaxUint16)} + } + // invert splits + return []interval16{ + newInterval16Range(origin, cur.start-1), + newInterval16Range(cur.last()+1, MaxUint16), + } +} + +// invert returns a new container (not inplace), that is +// the inversion of rc. For each bit b in rc, the +// returned value has !b +func (rc *runContainer16) invert() *runContainer16 { + ni := len(rc.iv) + var m []interval16 + switch ni { + case 0: + return &runContainer16{iv: []interval16{newInterval16Range(0, MaxUint16)}} + case 1: + return &runContainer16{iv: rc.invertlastInterval(0, 0)} + } + var invstart int64 + ult := ni - 1 + for i, cur := range rc.iv { + if i == ult { + // invertlastInteval will add both intervals (b) and (c) in + // diagram below. + m = append(m, rc.invertlastInterval(uint16(invstart), i)...) + break + } + // INVAR: i and cur are not the last interval, there is a next at i+1 + // + // ........[cur.start, cur.last] ...... [next.start, next.last].... + // ^ ^ ^ + // (a) (b) (c) + // + // Now: we add interval (a); but if (a) is empty, for cur.start==0, we skip it. + if cur.start > 0 { + m = append(m, newInterval16Range(uint16(invstart), cur.start-1)) + } + invstart = int64(cur.last() + 1) + } + return &runContainer16{iv: m} +} + +func (iv interval16) equal(b interval16) bool { + return iv.start == b.start && iv.length == b.length +} + +func (iv interval16) isSuperSetOf(b interval16) bool { + return iv.start <= b.start && b.last() <= iv.last() +} + +func (iv interval16) subtractInterval(del interval16) (left []interval16, delcount int64) { + isect, isEmpty := intersectInterval16s(iv, del) + + if isEmpty { + return nil, 0 + } + if del.isSuperSetOf(iv) { + return nil, iv.runlen() + } + + switch { + case isect.start > iv.start && isect.last() < iv.last(): + new0 := newInterval16Range(iv.start, isect.start-1) + new1 := newInterval16Range(isect.last()+1, iv.last()) + return []interval16{new0, new1}, isect.runlen() + case isect.start == iv.start: + return []interval16{newInterval16Range(isect.last()+1, iv.last())}, isect.runlen() + default: + return []interval16{newInterval16Range(iv.start, isect.start-1)}, isect.runlen() + } +} + +func (rc *runContainer16) isubtract(del interval16) { + origiv := make([]interval16, len(rc.iv)) + copy(origiv, rc.iv) + n := int64(len(rc.iv)) + if n == 0 { + return // already done. + } + + _, isEmpty := intersectInterval16s(newInterval16Range(rc.iv[0].start, rc.iv[n-1].last()), del) + if isEmpty { + return // done + } + + // INVAR there is some intersection between rc and del + istart, startAlready, _ := rc.search(int64(del.start), nil) + ilast, lastAlready, _ := rc.search(int64(del.last()), nil) + rc.card = -1 + if istart == -1 { + if ilast == n-1 && !lastAlready { + rc.iv = nil + return + } + } + // some intervals will remain + switch { + case startAlready && lastAlready: + res0, _ := rc.iv[istart].subtractInterval(del) + + // would overwrite values in iv b/c res0 can have len 2. so + // write to origiv instead. + lost := 1 + ilast - istart + changeSize := int64(len(res0)) - lost + newSize := int64(len(rc.iv)) + changeSize + + // rc.iv = append(pre, caboose...) + // return + + if ilast != istart { + res1, _ := rc.iv[ilast].subtractInterval(del) + res0 = append(res0, res1...) + changeSize = int64(len(res0)) - lost + newSize = int64(len(rc.iv)) + changeSize + } + switch { + case changeSize < 0: + // shrink + copy(rc.iv[istart+int64(len(res0)):], rc.iv[ilast+1:]) + copy(rc.iv[istart:istart+int64(len(res0))], res0) + rc.iv = rc.iv[:newSize] + return + case changeSize == 0: + // stay the same + copy(rc.iv[istart:istart+int64(len(res0))], res0) + return + default: + // changeSize > 0 is only possible when ilast == istart. + // Hence we now know: changeSize == 1 and len(res0) == 2 + rc.iv = append(rc.iv, interval16{}) + // len(rc.iv) is correct now, no need to rc.iv = rc.iv[:newSize] + + // copy the tail into place + copy(rc.iv[ilast+2:], rc.iv[ilast+1:]) + // copy the new item(s) into place + copy(rc.iv[istart:istart+2], res0) + return + } + + case !startAlready && !lastAlready: + // we get to discard whole intervals + + // from the search() definition: + + // if del.start is not present, then istart is + // set as follows: + // + // a) istart == n-1 if del.start is beyond our + // last interval16 in rc.iv; + // + // b) istart == -1 if del.start is before our first + // interval16 in rc.iv; + // + // c) istart is set to the minimum index of rc.iv + // which comes strictly before the del.start; + // so del.start > rc.iv[istart].last, + // and if istart+1 exists, then del.start < rc.iv[istart+1].startx + + // if del.last is not present, then ilast is + // set as follows: + // + // a) ilast == n-1 if del.last is beyond our + // last interval16 in rc.iv; + // + // b) ilast == -1 if del.last is before our first + // interval16 in rc.iv; + // + // c) ilast is set to the minimum index of rc.iv + // which comes strictly before the del.last; + // so del.last > rc.iv[ilast].last, + // and if ilast+1 exists, then del.last < rc.iv[ilast+1].start + + // INVAR: istart >= 0 + pre := rc.iv[:istart+1] + if ilast == n-1 { + rc.iv = pre + return + } + // INVAR: ilast < n-1 + lost := ilast - istart + changeSize := -lost + newSize := int64(len(rc.iv)) + changeSize + if changeSize != 0 { + copy(rc.iv[ilast+1+changeSize:], rc.iv[ilast+1:]) + } + rc.iv = rc.iv[:newSize] + return + + case startAlready && !lastAlready: + // we can only shrink or stay the same size + // i.e. we either eliminate the whole interval, + // or just cut off the right side. + res0, _ := rc.iv[istart].subtractInterval(del) + if len(res0) > 0 { + // len(res) must be 1 + rc.iv[istart] = res0[0] + } + lost := 1 + (ilast - istart) + changeSize := int64(len(res0)) - lost + newSize := int64(len(rc.iv)) + changeSize + if changeSize != 0 { + copy(rc.iv[ilast+1+changeSize:], rc.iv[ilast+1:]) + } + rc.iv = rc.iv[:newSize] + return + + case !startAlready && lastAlready: + // we can only shrink or stay the same size + res1, _ := rc.iv[ilast].subtractInterval(del) + lost := ilast - istart + changeSize := int64(len(res1)) - lost + newSize := int64(len(rc.iv)) + changeSize + if changeSize != 0 { + // move the tail first to make room for res1 + copy(rc.iv[ilast+1+changeSize:], rc.iv[ilast+1:]) + } + copy(rc.iv[istart+1:], res1) + rc.iv = rc.iv[:newSize] + return + } +} + +// compute rc minus b, and return the result as a new value (not inplace). +// port of run_container_andnot from CRoaring... +// https://github.com/RoaringBitmap/CRoaring/blob/master/src/containers/run.c#L435-L496 +func (rc *runContainer16) AndNotRunContainer16(b *runContainer16) *runContainer16 { + + if len(b.iv) == 0 || len(rc.iv) == 0 { + return rc + } + + dst := newRunContainer16() + apos := 0 + bpos := 0 + + a := rc + + astart := a.iv[apos].start + alast := a.iv[apos].last() + bstart := b.iv[bpos].start + blast := b.iv[bpos].last() + + alen := len(a.iv) + blen := len(b.iv) + + for apos < alen && bpos < blen { + switch { + case alast < bstart: + // output the first run + dst.iv = append(dst.iv, newInterval16Range(astart, alast)) + apos++ + if apos < alen { + astart = a.iv[apos].start + alast = a.iv[apos].last() + } + case blast < astart: + // exit the second run + bpos++ + if bpos < blen { + bstart = b.iv[bpos].start + blast = b.iv[bpos].last() + } + default: + // a: [ ] + // b: [ ] + // alast >= bstart + // blast >= astart + if astart < bstart { + dst.iv = append(dst.iv, newInterval16Range(astart, bstart-1)) + } + if alast > blast { + astart = blast + 1 + } else { + apos++ + if apos < alen { + astart = a.iv[apos].start + alast = a.iv[apos].last() + } + } + } + } + if apos < alen { + dst.iv = append(dst.iv, newInterval16Range(astart, alast)) + apos++ + if apos < alen { + dst.iv = append(dst.iv, a.iv[apos:]...) + } + } + + return dst +} + +func (rc *runContainer16) numberOfRuns() (nr int) { + return len(rc.iv) +} + +func (rc *runContainer16) containerType() contype { + return run16Contype +} + +func (rc *runContainer16) equals16(srb *runContainer16) bool { + // Check if the containers are the same object. + if rc == srb { + return true + } + + if len(srb.iv) != len(rc.iv) { + return false + } + + for i, v := range rc.iv { + if v != srb.iv[i] { + return false + } + } + return true +} + +// compile time verify we meet interface requirements +var _ container = &runContainer16{} + +func (rc *runContainer16) clone() container { + return newRunContainer16CopyIv(rc.iv) +} + +func (rc *runContainer16) minimum() uint16 { + return rc.iv[0].start // assume not empty +} + +func (rc *runContainer16) maximum() uint16 { + return rc.iv[len(rc.iv)-1].last() // assume not empty +} + +func (rc *runContainer16) isFull() bool { + return (len(rc.iv) == 1) && ((rc.iv[0].start == 0) && (rc.iv[0].last() == MaxUint16)) +} + +func (rc *runContainer16) and(a container) container { + if rc.isFull() { + return a.clone() + } + switch c := a.(type) { + case *runContainer16: + return rc.intersect(c) + case *arrayContainer: + return rc.andArray(c) + case *bitmapContainer: + return rc.andBitmapContainer(c) + } + panic("unsupported container type") +} + +func (rc *runContainer16) andCardinality(a container) int { + switch c := a.(type) { + case *runContainer16: + return int(rc.intersectCardinality(c)) + case *arrayContainer: + return rc.andArrayCardinality(c) + case *bitmapContainer: + return rc.andBitmapContainerCardinality(c) + } + panic("unsupported container type") +} + +// andBitmapContainer finds the intersection of rc and b. +func (rc *runContainer16) andBitmapContainer(bc *bitmapContainer) container { + bc2 := newBitmapContainerFromRun(rc) + return bc2.andBitmap(bc) +} + +func (rc *runContainer16) andArrayCardinality(ac *arrayContainer) int { + pos := 0 + answer := 0 + maxpos := ac.getCardinality() + if maxpos == 0 { + return 0 // won't happen in actual code + } + v := ac.content[pos] +mainloop: + for _, p := range rc.iv { + for v < p.start { + pos++ + if pos == maxpos { + break mainloop + } + v = ac.content[pos] + } + for v <= p.last() { + answer++ + pos++ + if pos == maxpos { + break mainloop + } + v = ac.content[pos] + } + } + return answer +} + +func (rc *runContainer16) iand(a container) container { + if rc.isFull() { + return a.clone() + } + switch c := a.(type) { + case *runContainer16: + return rc.inplaceIntersect(c) + case *arrayContainer: + return rc.andArray(c) + case *bitmapContainer: + return rc.iandBitmapContainer(c) + } + panic("unsupported container type") +} + +func (rc *runContainer16) inplaceIntersect(rc2 *runContainer16) container { + // TODO: optimize by doing less allocation, possibly? + // sect will be new + sect := rc.intersect(rc2) + *rc = *sect + return rc +} + +func (rc *runContainer16) iandBitmapContainer(bc *bitmapContainer) container { + isect := rc.andBitmapContainer(bc) + *rc = *newRunContainer16FromContainer(isect) + return rc +} + +func (rc *runContainer16) andArray(ac *arrayContainer) container { + if len(rc.iv) == 0 { + return newArrayContainer() + } + + acCardinality := ac.getCardinality() + c := newArrayContainerCapacity(acCardinality) + + for rlePos, arrayPos := 0, 0; arrayPos < acCardinality; { + iv := rc.iv[rlePos] + arrayVal := ac.content[arrayPos] + + for iv.last() < arrayVal { + rlePos++ + if rlePos == len(rc.iv) { + return c + } + iv = rc.iv[rlePos] + } + + if iv.start > arrayVal { + arrayPos = advanceUntil(ac.content, arrayPos, len(ac.content), iv.start) + } else { + c.content = append(c.content, arrayVal) + arrayPos++ + } + } + return c +} + +func (rc *runContainer16) andNot(a container) container { + switch c := a.(type) { + case *arrayContainer: + return rc.andNotArray(c) + case *bitmapContainer: + return rc.andNotBitmap(c) + case *runContainer16: + return rc.andNotRunContainer16(c) + } + panic("unsupported container type") +} + +func (rc *runContainer16) fillLeastSignificant16bits(x []uint32, i int, mask uint32) { + k := 0 + var val int64 + for _, p := range rc.iv { + n := p.runlen() + for j := int64(0); j < n; j++ { + val = int64(p.start) + j + x[k+i] = uint32(val) | mask + k++ + } + } +} + +func (rc *runContainer16) getShortIterator() shortPeekable { + return rc.newRunIterator16() +} + +func (rc *runContainer16) getReverseIterator() shortIterable { + return rc.newRunReverseIterator16() +} + +func (rc *runContainer16) getManyIterator() manyIterable { + return rc.newManyRunIterator16() +} + +// add the values in the range [firstOfRange, endx). endx +// is still abe to express 2^16 because it is an int not an uint16. +func (rc *runContainer16) iaddRange(firstOfRange, endx int) container { + + if firstOfRange >= endx { + panic(fmt.Sprintf("invalid %v = endx >= firstOfRange", endx)) + } + addme := newRunContainer16TakeOwnership([]interval16{ + { + start: uint16(firstOfRange), + length: uint16(endx - 1 - firstOfRange), + }, + }) + *rc = *rc.union(addme) + return rc +} + +// remove the values in the range [firstOfRange,endx) +func (rc *runContainer16) iremoveRange(firstOfRange, endx int) container { + if firstOfRange >= endx { + panic(fmt.Sprintf("request to iremove empty set [%v, %v),"+ + " nothing to do.", firstOfRange, endx)) + //return rc + } + x := newInterval16Range(uint16(firstOfRange), uint16(endx-1)) + rc.isubtract(x) + return rc +} + +// not flip the values in the range [firstOfRange,endx) +func (rc *runContainer16) not(firstOfRange, endx int) container { + if firstOfRange >= endx { + panic(fmt.Sprintf("invalid %v = endx >= firstOfRange = %v", endx, firstOfRange)) + } + + return rc.Not(firstOfRange, endx) +} + +// Not flips the values in the range [firstOfRange,endx). +// This is not inplace. Only the returned value has the flipped bits. +// +// Currently implemented as (!A intersect B) union (A minus B), +// where A is rc, and B is the supplied [firstOfRange, endx) interval. +// +// TODO(time optimization): convert this to a single pass +// algorithm by copying AndNotRunContainer16() and modifying it. +// Current routine is correct but +// makes 2 more passes through the arrays than should be +// strictly necessary. Measure both ways though--this may not matter. +// +func (rc *runContainer16) Not(firstOfRange, endx int) *runContainer16 { + + if firstOfRange >= endx { + panic(fmt.Sprintf("invalid %v = endx >= firstOfRange == %v", endx, firstOfRange)) + } + + if firstOfRange >= endx { + return rc.Clone() + } + + a := rc + // algo: + // (!A intersect B) union (A minus B) + + nota := a.invert() + + bs := []interval16{newInterval16Range(uint16(firstOfRange), uint16(endx-1))} + b := newRunContainer16TakeOwnership(bs) + + notAintersectB := nota.intersect(b) + + aMinusB := a.AndNotRunContainer16(b) + + rc2 := notAintersectB.union(aMinusB) + return rc2 +} + +// equals is now logical equals; it does not require the +// same underlying container type. +func (rc *runContainer16) equals(o container) bool { + srb, ok := o.(*runContainer16) + + if !ok { + // maybe value instead of pointer + val, valok := o.(*runContainer16) + if valok { + srb = val + ok = true + } + } + if ok { + // Check if the containers are the same object. + if rc == srb { + return true + } + + if len(srb.iv) != len(rc.iv) { + return false + } + + for i, v := range rc.iv { + if v != srb.iv[i] { + return false + } + } + return true + } + + // use generic comparison + if o.getCardinality() != rc.getCardinality() { + return false + } + rit := rc.getShortIterator() + bit := o.getShortIterator() + + //k := 0 + for rit.hasNext() { + if bit.next() != rit.next() { + return false + } + //k++ + } + return true +} + +func (rc *runContainer16) iaddReturnMinimized(x uint16) container { + rc.Add(x) + return rc +} + +func (rc *runContainer16) iadd(x uint16) (wasNew bool) { + return rc.Add(x) +} + +func (rc *runContainer16) iremoveReturnMinimized(x uint16) container { + rc.removeKey(x) + return rc +} + +func (rc *runContainer16) iremove(x uint16) bool { + return rc.removeKey(x) +} + +func (rc *runContainer16) or(a container) container { + if rc.isFull() { + return rc.clone() + } + switch c := a.(type) { + case *runContainer16: + return rc.union(c) + case *arrayContainer: + return rc.orArray(c) + case *bitmapContainer: + return rc.orBitmapContainer(c) + } + panic("unsupported container type") +} + +func (rc *runContainer16) orCardinality(a container) int { + switch c := a.(type) { + case *runContainer16: + return int(rc.unionCardinality(c)) + case *arrayContainer: + return rc.orArrayCardinality(c) + case *bitmapContainer: + return rc.orBitmapContainerCardinality(c) + } + panic("unsupported container type") +} + +// orBitmapContainer finds the union of rc and bc. +func (rc *runContainer16) orBitmapContainer(bc *bitmapContainer) container { + bc2 := newBitmapContainerFromRun(rc) + return bc2.iorBitmap(bc) +} + +func (rc *runContainer16) andBitmapContainerCardinality(bc *bitmapContainer) int { + answer := 0 + for i := range rc.iv { + answer += bc.getCardinalityInRange(uint(rc.iv[i].start), uint(rc.iv[i].last())+1) + } + //bc.computeCardinality() + return answer +} + +func (rc *runContainer16) orBitmapContainerCardinality(bc *bitmapContainer) int { + return rc.getCardinality() + bc.getCardinality() - rc.andBitmapContainerCardinality(bc) +} + +// orArray finds the union of rc and ac. +func (rc *runContainer16) orArray(ac *arrayContainer) container { + bc1 := newBitmapContainerFromRun(rc) + bc2 := ac.toBitmapContainer() + return bc1.orBitmap(bc2) +} + +// orArray finds the union of rc and ac. +func (rc *runContainer16) orArrayCardinality(ac *arrayContainer) int { + return ac.getCardinality() + rc.getCardinality() - rc.andArrayCardinality(ac) +} + +func (rc *runContainer16) ior(a container) container { + if rc.isFull() { + return rc + } + switch c := a.(type) { + case *runContainer16: + return rc.inplaceUnion(c) + case *arrayContainer: + return rc.iorArray(c) + case *bitmapContainer: + return rc.iorBitmapContainer(c) + } + panic("unsupported container type") +} + +func (rc *runContainer16) inplaceUnion(rc2 *runContainer16) container { + for _, p := range rc2.iv { + last := int64(p.last()) + for i := int64(p.start); i <= last; i++ { + rc.Add(uint16(i)) + } + } + return rc +} + +func (rc *runContainer16) iorBitmapContainer(bc *bitmapContainer) container { + + it := bc.getShortIterator() + for it.hasNext() { + rc.Add(it.next()) + } + return rc +} + +func (rc *runContainer16) iorArray(ac *arrayContainer) container { + it := ac.getShortIterator() + for it.hasNext() { + rc.Add(it.next()) + } + return rc +} + +// lazyIOR is described (not yet implemented) in +// this nice note from @lemire on +// https://github.com/RoaringBitmap/roaring/pull/70#issuecomment-263613737 +// +// Description of lazyOR and lazyIOR from @lemire: +// +// Lazy functions are optional and can be simply +// wrapper around non-lazy functions. +// +// The idea of "laziness" is as follows. It is +// inspired by the concept of lazy evaluation +// you might be familiar with (functional programming +// and all that). So a roaring bitmap is +// such that all its containers are, in some +// sense, chosen to use as little memory as +// possible. This is nice. Also, all bitsets +// are "cardinality aware" so that you can do +// fast rank/select queries, or query the +// cardinality of the whole bitmap... very fast, +// without latency. +// +// However, imagine that you are aggregating 100 +// bitmaps together. So you OR the first two, then OR +// that with the third one and so forth. Clearly, +// intermediate bitmaps don't need to be as +// compressed as possible, right? They can be +// in a "dirty state". You only need the end +// result to be in a nice state... which you +// can achieve by calling repairAfterLazy at the end. +// +// The Java/C code does something special for +// the in-place lazy OR runs. The idea is that +// instead of taking two run containers and +// generating a new one, we actually try to +// do the computation in-place through a +// technique invented by @gssiyankai (pinging him!). +// What you do is you check whether the host +// run container has lots of extra capacity. +// If it does, you move its data at the end of +// the backing array, and then you write +// the answer at the beginning. What this +// trick does is minimize memory allocations. +// +func (rc *runContainer16) lazyIOR(a container) container { + // not lazy at the moment + return rc.ior(a) +} + +// lazyOR is described above in lazyIOR. +func (rc *runContainer16) lazyOR(a container) container { + // not lazy at the moment + return rc.or(a) +} + +func (rc *runContainer16) intersects(a container) bool { + // TODO: optimize by doing inplace/less allocation, possibly? + isect := rc.and(a) + return isect.getCardinality() > 0 +} + +func (rc *runContainer16) xor(a container) container { + switch c := a.(type) { + case *arrayContainer: + return rc.xorArray(c) + case *bitmapContainer: + return rc.xorBitmap(c) + case *runContainer16: + return rc.xorRunContainer16(c) + } + panic("unsupported container type") +} + +func (rc *runContainer16) iandNot(a container) container { + switch c := a.(type) { + case *arrayContainer: + return rc.iandNotArray(c) + case *bitmapContainer: + return rc.iandNotBitmap(c) + case *runContainer16: + return rc.iandNotRunContainer16(c) + } + panic("unsupported container type") +} + +// flip the values in the range [firstOfRange,endx) +func (rc *runContainer16) inot(firstOfRange, endx int) container { + if firstOfRange >= endx { + panic(fmt.Sprintf("invalid %v = endx >= firstOfRange = %v", endx, firstOfRange)) + } + // TODO: minimize copies, do it all inplace; not() makes a copy. + rc = rc.Not(firstOfRange, endx) + return rc +} + +func (rc *runContainer16) getCardinality() int { + return int(rc.cardinality()) +} + +func (rc *runContainer16) rank(x uint16) int { + n := int64(len(rc.iv)) + xx := int64(x) + w, already, _ := rc.search(xx, nil) + if w < 0 { + return 0 + } + if !already && w == n-1 { + return rc.getCardinality() + } + var rnk int64 + if !already { + for i := int64(0); i <= w; i++ { + rnk += rc.iv[i].runlen() + } + return int(rnk) + } + for i := int64(0); i < w; i++ { + rnk += rc.iv[i].runlen() + } + rnk += int64(x-rc.iv[w].start) + 1 + return int(rnk) +} + +func (rc *runContainer16) selectInt(x uint16) int { + return rc.selectInt16(x) +} + +func (rc *runContainer16) andNotRunContainer16(b *runContainer16) container { + return rc.AndNotRunContainer16(b) +} + +func (rc *runContainer16) andNotArray(ac *arrayContainer) container { + rcb := rc.toBitmapContainer() + acb := ac.toBitmapContainer() + return rcb.andNotBitmap(acb) +} + +func (rc *runContainer16) andNotBitmap(bc *bitmapContainer) container { + rcb := rc.toBitmapContainer() + return rcb.andNotBitmap(bc) +} + +func (rc *runContainer16) toBitmapContainer() *bitmapContainer { + bc := newBitmapContainer() + for i := range rc.iv { + bc.iaddRange(int(rc.iv[i].start), int(rc.iv[i].last())+1) + } + bc.computeCardinality() + return bc +} + +func (rc *runContainer16) iandNotRunContainer16(x2 *runContainer16) container { + rcb := rc.toBitmapContainer() + x2b := x2.toBitmapContainer() + rcb.iandNotBitmapSurely(x2b) + // TODO: check size and optimize the return value + // TODO: is inplace modification really required? If not, elide the copy. + rc2 := newRunContainer16FromBitmapContainer(rcb) + *rc = *rc2 + return rc +} + +func (rc *runContainer16) iandNotArray(ac *arrayContainer) container { + rcb := rc.toBitmapContainer() + acb := ac.toBitmapContainer() + rcb.iandNotBitmapSurely(acb) + // TODO: check size and optimize the return value + // TODO: is inplace modification really required? If not, elide the copy. + rc2 := newRunContainer16FromBitmapContainer(rcb) + *rc = *rc2 + return rc +} + +func (rc *runContainer16) iandNotBitmap(bc *bitmapContainer) container { + rcb := rc.toBitmapContainer() + rcb.iandNotBitmapSurely(bc) + // TODO: check size and optimize the return value + // TODO: is inplace modification really required? If not, elide the copy. + rc2 := newRunContainer16FromBitmapContainer(rcb) + *rc = *rc2 + return rc +} + +func (rc *runContainer16) xorRunContainer16(x2 *runContainer16) container { + rcb := rc.toBitmapContainer() + x2b := x2.toBitmapContainer() + return rcb.xorBitmap(x2b) +} + +func (rc *runContainer16) xorArray(ac *arrayContainer) container { + rcb := rc.toBitmapContainer() + acb := ac.toBitmapContainer() + return rcb.xorBitmap(acb) +} + +func (rc *runContainer16) xorBitmap(bc *bitmapContainer) container { + rcb := rc.toBitmapContainer() + return rcb.xorBitmap(bc) +} + +// convert to bitmap or array *if needed* +func (rc *runContainer16) toEfficientContainer() container { + + // runContainer16SerializedSizeInBytes(numRuns) + sizeAsRunContainer := rc.getSizeInBytes() + sizeAsBitmapContainer := bitmapContainerSizeInBytes() + card := int(rc.cardinality()) + sizeAsArrayContainer := arrayContainerSizeInBytes(card) + if sizeAsRunContainer <= minOfInt(sizeAsBitmapContainer, sizeAsArrayContainer) { + return rc + } + if card <= arrayDefaultMaxSize { + return rc.toArrayContainer() + } + bc := newBitmapContainerFromRun(rc) + return bc +} + +func (rc *runContainer16) toArrayContainer() *arrayContainer { + ac := newArrayContainer() + for i := range rc.iv { + ac.iaddRange(int(rc.iv[i].start), int(rc.iv[i].last())+1) + } + return ac +} + +func newRunContainer16FromContainer(c container) *runContainer16 { + + switch x := c.(type) { + case *runContainer16: + return x.Clone() + case *arrayContainer: + return newRunContainer16FromArray(x) + case *bitmapContainer: + return newRunContainer16FromBitmapContainer(x) + } + panic("unsupported container type") +} + +// And finds the intersection of rc and b. +func (rc *runContainer16) And(b *Bitmap) *Bitmap { + out := NewBitmap() + for _, p := range rc.iv { + plast := p.last() + for i := p.start; i <= plast; i++ { + if b.Contains(uint32(i)) { + out.Add(uint32(i)) + } + } + } + return out +} + +// Xor returns the exclusive-or of rc and b. +func (rc *runContainer16) Xor(b *Bitmap) *Bitmap { + out := b.Clone() + for _, p := range rc.iv { + plast := p.last() + for v := p.start; v <= plast; v++ { + w := uint32(v) + if out.Contains(w) { + out.RemoveRange(uint64(w), uint64(w+1)) + } else { + out.Add(w) + } + } + } + return out +} + +// Or returns the union of rc and b. +func (rc *runContainer16) Or(b *Bitmap) *Bitmap { + out := b.Clone() + for _, p := range rc.iv { + plast := p.last() + for v := p.start; v <= plast; v++ { + out.Add(uint32(v)) + } + } + return out +} + +// serializedSizeInBytes returns the number of bytes of memory +// required by this runContainer16. This is for the +// Roaring format, as specified https://github.com/RoaringBitmap/RoaringFormatSpec/ +func (rc *runContainer16) serializedSizeInBytes() int { + // number of runs in one uint16, then each run + // needs two more uint16 + return 2 + len(rc.iv)*4 +} + +func (rc *runContainer16) addOffset(x uint16) []container { + low := newRunContainer16() + high := newRunContainer16() + + for _, iv := range rc.iv { + val := int(iv.start) + int(x) + finalVal := int(val) + int(iv.length) + if val <= 0xffff { + if finalVal <= 0xffff { + low.iv = append(low.iv, interval16{uint16(val), iv.length}) + } else { + low.iv = append(low.iv, interval16{uint16(val), uint16(0xffff - val)}) + high.iv = append(high.iv, interval16{uint16(0), uint16(finalVal & 0xffff)}) + } + } else { + high.iv = append(high.iv, interval16{uint16(val & 0xffff), iv.length}) + } + } + return []container{low, high} +} diff --git a/vendor/github.com/RoaringBitmap/roaring/runcontainer_gen.go b/vendor/github.com/RoaringBitmap/roaring/runcontainer_gen.go new file mode 100644 index 0000000..84537d0 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/runcontainer_gen.go @@ -0,0 +1,1104 @@ +package roaring + +// NOTE: THIS FILE WAS PRODUCED BY THE +// MSGP CODE GENERATION TOOL (github.com/tinylib/msgp) +// DO NOT EDIT + +import "github.com/tinylib/msgp/msgp" + +// Deprecated: DecodeMsg implements msgp.Decodable +func (z *addHelper16) DecodeMsg(dc *msgp.Reader) (err error) { + var field []byte + _ = field + var zbai uint32 + zbai, err = dc.ReadMapHeader() + if err != nil { + return + } + for zbai > 0 { + zbai-- + field, err = dc.ReadMapKeyPtr() + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "runstart": + z.runstart, err = dc.ReadUint16() + if err != nil { + return + } + case "runlen": + z.runlen, err = dc.ReadUint16() + if err != nil { + return + } + case "actuallyAdded": + z.actuallyAdded, err = dc.ReadUint16() + if err != nil { + return + } + case "m": + var zcmr uint32 + zcmr, err = dc.ReadArrayHeader() + if err != nil { + return + } + if cap(z.m) >= int(zcmr) { + z.m = (z.m)[:zcmr] + } else { + z.m = make([]interval16, zcmr) + } + for zxvk := range z.m { + var zajw uint32 + zajw, err = dc.ReadMapHeader() + if err != nil { + return + } + for zajw > 0 { + zajw-- + field, err = dc.ReadMapKeyPtr() + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "start": + z.m[zxvk].start, err = dc.ReadUint16() + if err != nil { + return + } + case "last": + z.m[zxvk].length, err = dc.ReadUint16() + z.m[zxvk].length -= z.m[zxvk].start + if err != nil { + return + } + default: + err = dc.Skip() + if err != nil { + return + } + } + } + } + case "rc": + if dc.IsNil() { + err = dc.ReadNil() + if err != nil { + return + } + z.rc = nil + } else { + if z.rc == nil { + z.rc = new(runContainer16) + } + var zwht uint32 + zwht, err = dc.ReadMapHeader() + if err != nil { + return + } + for zwht > 0 { + zwht-- + field, err = dc.ReadMapKeyPtr() + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "iv": + var zhct uint32 + zhct, err = dc.ReadArrayHeader() + if err != nil { + return + } + if cap(z.rc.iv) >= int(zhct) { + z.rc.iv = (z.rc.iv)[:zhct] + } else { + z.rc.iv = make([]interval16, zhct) + } + for zbzg := range z.rc.iv { + var zcua uint32 + zcua, err = dc.ReadMapHeader() + if err != nil { + return + } + for zcua > 0 { + zcua-- + field, err = dc.ReadMapKeyPtr() + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "start": + z.rc.iv[zbzg].start, err = dc.ReadUint16() + if err != nil { + return + } + case "last": + z.rc.iv[zbzg].length, err = dc.ReadUint16() + z.rc.iv[zbzg].length -= z.rc.iv[zbzg].start + if err != nil { + return + } + default: + err = dc.Skip() + if err != nil { + return + } + } + } + } + case "card": + z.rc.card, err = dc.ReadInt64() + if err != nil { + return + } + default: + err = dc.Skip() + if err != nil { + return + } + } + } + } + default: + err = dc.Skip() + if err != nil { + return + } + } + } + return +} + +// Deprecated: EncodeMsg implements msgp.Encodable +func (z *addHelper16) EncodeMsg(en *msgp.Writer) (err error) { + // map header, size 5 + // write "runstart" + err = en.Append(0x85, 0xa8, 0x72, 0x75, 0x6e, 0x73, 0x74, 0x61, 0x72, 0x74) + if err != nil { + return err + } + err = en.WriteUint16(z.runstart) + if err != nil { + return + } + // write "runlen" + err = en.Append(0xa6, 0x72, 0x75, 0x6e, 0x6c, 0x65, 0x6e) + if err != nil { + return err + } + err = en.WriteUint16(z.runlen) + if err != nil { + return + } + // write "actuallyAdded" + err = en.Append(0xad, 0x61, 0x63, 0x74, 0x75, 0x61, 0x6c, 0x6c, 0x79, 0x41, 0x64, 0x64, 0x65, 0x64) + if err != nil { + return err + } + err = en.WriteUint16(z.actuallyAdded) + if err != nil { + return + } + // write "m" + err = en.Append(0xa1, 0x6d) + if err != nil { + return err + } + err = en.WriteArrayHeader(uint32(len(z.m))) + if err != nil { + return + } + for zxvk := range z.m { + // map header, size 2 + // write "start" + err = en.Append(0x82, 0xa5, 0x73, 0x74, 0x61, 0x72, 0x74) + if err != nil { + return err + } + err = en.WriteUint16(z.m[zxvk].start) + if err != nil { + return + } + // write "last" + err = en.Append(0xa4, 0x6c, 0x61, 0x73, 0x74) + if err != nil { + return err + } + err = en.WriteUint16(z.m[zxvk].last()) + if err != nil { + return + } + } + // write "rc" + err = en.Append(0xa2, 0x72, 0x63) + if err != nil { + return err + } + if z.rc == nil { + err = en.WriteNil() + if err != nil { + return + } + } else { + // map header, size 2 + // write "iv" + err = en.Append(0x82, 0xa2, 0x69, 0x76) + if err != nil { + return err + } + err = en.WriteArrayHeader(uint32(len(z.rc.iv))) + if err != nil { + return + } + for zbzg := range z.rc.iv { + // map header, size 2 + // write "start" + err = en.Append(0x82, 0xa5, 0x73, 0x74, 0x61, 0x72, 0x74) + if err != nil { + return err + } + err = en.WriteUint16(z.rc.iv[zbzg].start) + if err != nil { + return + } + // write "last" + err = en.Append(0xa4, 0x6c, 0x61, 0x73, 0x74) + if err != nil { + return err + } + err = en.WriteUint16(z.rc.iv[zbzg].last()) + if err != nil { + return + } + } + // write "card" + err = en.Append(0xa4, 0x63, 0x61, 0x72, 0x64) + if err != nil { + return err + } + err = en.WriteInt64(z.rc.card) + if err != nil { + return + } + } + return +} + +// Deprecated: MarshalMsg implements msgp.Marshaler +func (z *addHelper16) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + // map header, size 5 + // string "runstart" + o = append(o, 0x85, 0xa8, 0x72, 0x75, 0x6e, 0x73, 0x74, 0x61, 0x72, 0x74) + o = msgp.AppendUint16(o, z.runstart) + // string "runlen" + o = append(o, 0xa6, 0x72, 0x75, 0x6e, 0x6c, 0x65, 0x6e) + o = msgp.AppendUint16(o, z.runlen) + // string "actuallyAdded" + o = append(o, 0xad, 0x61, 0x63, 0x74, 0x75, 0x61, 0x6c, 0x6c, 0x79, 0x41, 0x64, 0x64, 0x65, 0x64) + o = msgp.AppendUint16(o, z.actuallyAdded) + // string "m" + o = append(o, 0xa1, 0x6d) + o = msgp.AppendArrayHeader(o, uint32(len(z.m))) + for zxvk := range z.m { + // map header, size 2 + // string "start" + o = append(o, 0x82, 0xa5, 0x73, 0x74, 0x61, 0x72, 0x74) + o = msgp.AppendUint16(o, z.m[zxvk].start) + // string "last" + o = append(o, 0xa4, 0x6c, 0x61, 0x73, 0x74) + o = msgp.AppendUint16(o, z.m[zxvk].last()) + } + // string "rc" + o = append(o, 0xa2, 0x72, 0x63) + if z.rc == nil { + o = msgp.AppendNil(o) + } else { + // map header, size 2 + // string "iv" + o = append(o, 0x82, 0xa2, 0x69, 0x76) + o = msgp.AppendArrayHeader(o, uint32(len(z.rc.iv))) + for zbzg := range z.rc.iv { + // map header, size 2 + // string "start" + o = append(o, 0x82, 0xa5, 0x73, 0x74, 0x61, 0x72, 0x74) + o = msgp.AppendUint16(o, z.rc.iv[zbzg].start) + // string "last" + o = append(o, 0xa4, 0x6c, 0x61, 0x73, 0x74) + o = msgp.AppendUint16(o, z.rc.iv[zbzg].last()) + } + // string "card" + o = append(o, 0xa4, 0x63, 0x61, 0x72, 0x64) + o = msgp.AppendInt64(o, z.rc.card) + } + return +} + +// Deprecated: UnmarshalMsg implements msgp.Unmarshaler +func (z *addHelper16) UnmarshalMsg(bts []byte) (o []byte, err error) { + var field []byte + _ = field + var zxhx uint32 + zxhx, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + return + } + for zxhx > 0 { + zxhx-- + field, bts, err = msgp.ReadMapKeyZC(bts) + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "runstart": + z.runstart, bts, err = msgp.ReadUint16Bytes(bts) + if err != nil { + return + } + case "runlen": + z.runlen, bts, err = msgp.ReadUint16Bytes(bts) + if err != nil { + return + } + case "actuallyAdded": + z.actuallyAdded, bts, err = msgp.ReadUint16Bytes(bts) + if err != nil { + return + } + case "m": + var zlqf uint32 + zlqf, bts, err = msgp.ReadArrayHeaderBytes(bts) + if err != nil { + return + } + if cap(z.m) >= int(zlqf) { + z.m = (z.m)[:zlqf] + } else { + z.m = make([]interval16, zlqf) + } + for zxvk := range z.m { + var zdaf uint32 + zdaf, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + return + } + for zdaf > 0 { + zdaf-- + field, bts, err = msgp.ReadMapKeyZC(bts) + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "start": + z.m[zxvk].start, bts, err = msgp.ReadUint16Bytes(bts) + if err != nil { + return + } + case "last": + z.m[zxvk].length, bts, err = msgp.ReadUint16Bytes(bts) + z.m[zxvk].length -= z.m[zxvk].start + if err != nil { + return + } + default: + bts, err = msgp.Skip(bts) + if err != nil { + return + } + } + } + } + case "rc": + if msgp.IsNil(bts) { + bts, err = msgp.ReadNilBytes(bts) + if err != nil { + return + } + z.rc = nil + } else { + if z.rc == nil { + z.rc = new(runContainer16) + } + var zpks uint32 + zpks, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + return + } + for zpks > 0 { + zpks-- + field, bts, err = msgp.ReadMapKeyZC(bts) + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "iv": + var zjfb uint32 + zjfb, bts, err = msgp.ReadArrayHeaderBytes(bts) + if err != nil { + return + } + if cap(z.rc.iv) >= int(zjfb) { + z.rc.iv = (z.rc.iv)[:zjfb] + } else { + z.rc.iv = make([]interval16, zjfb) + } + for zbzg := range z.rc.iv { + var zcxo uint32 + zcxo, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + return + } + for zcxo > 0 { + zcxo-- + field, bts, err = msgp.ReadMapKeyZC(bts) + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "start": + z.rc.iv[zbzg].start, bts, err = msgp.ReadUint16Bytes(bts) + if err != nil { + return + } + case "last": + z.rc.iv[zbzg].length, bts, err = msgp.ReadUint16Bytes(bts) + z.rc.iv[zbzg].length -= z.rc.iv[zbzg].start + if err != nil { + return + } + default: + bts, err = msgp.Skip(bts) + if err != nil { + return + } + } + } + } + case "card": + z.rc.card, bts, err = msgp.ReadInt64Bytes(bts) + if err != nil { + return + } + default: + bts, err = msgp.Skip(bts) + if err != nil { + return + } + } + } + } + default: + bts, err = msgp.Skip(bts) + if err != nil { + return + } + } + } + o = bts + return +} + +// Deprecated: Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z *addHelper16) Msgsize() (s int) { + s = 1 + 9 + msgp.Uint16Size + 7 + msgp.Uint16Size + 14 + msgp.Uint16Size + 2 + msgp.ArrayHeaderSize + (len(z.m) * (12 + msgp.Uint16Size + msgp.Uint16Size)) + 3 + if z.rc == nil { + s += msgp.NilSize + } else { + s += 1 + 3 + msgp.ArrayHeaderSize + (len(z.rc.iv) * (12 + msgp.Uint16Size + msgp.Uint16Size)) + 5 + msgp.Int64Size + } + return +} + +// Deprecated: DecodeMsg implements msgp.Decodable +func (z *interval16) DecodeMsg(dc *msgp.Reader) (err error) { + var field []byte + _ = field + var zeff uint32 + zeff, err = dc.ReadMapHeader() + if err != nil { + return + } + for zeff > 0 { + zeff-- + field, err = dc.ReadMapKeyPtr() + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "start": + z.start, err = dc.ReadUint16() + if err != nil { + return + } + case "last": + z.length, err = dc.ReadUint16() + z.length = -z.start + if err != nil { + return + } + default: + err = dc.Skip() + if err != nil { + return + } + } + } + return +} + +// Deprecated: EncodeMsg implements msgp.Encodable +func (z interval16) EncodeMsg(en *msgp.Writer) (err error) { + // map header, size 2 + // write "start" + err = en.Append(0x82, 0xa5, 0x73, 0x74, 0x61, 0x72, 0x74) + if err != nil { + return err + } + err = en.WriteUint16(z.start) + if err != nil { + return + } + // write "last" + err = en.Append(0xa4, 0x6c, 0x61, 0x73, 0x74) + if err != nil { + return err + } + err = en.WriteUint16(z.last()) + if err != nil { + return + } + return +} + +// Deprecated: MarshalMsg implements msgp.Marshaler +func (z interval16) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + // map header, size 2 + // string "start" + o = append(o, 0x82, 0xa5, 0x73, 0x74, 0x61, 0x72, 0x74) + o = msgp.AppendUint16(o, z.start) + // string "last" + o = append(o, 0xa4, 0x6c, 0x61, 0x73, 0x74) + o = msgp.AppendUint16(o, z.last()) + return +} + +// Deprecated: UnmarshalMsg implements msgp.Unmarshaler +func (z *interval16) UnmarshalMsg(bts []byte) (o []byte, err error) { + var field []byte + _ = field + var zrsw uint32 + zrsw, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + return + } + for zrsw > 0 { + zrsw-- + field, bts, err = msgp.ReadMapKeyZC(bts) + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "start": + z.start, bts, err = msgp.ReadUint16Bytes(bts) + if err != nil { + return + } + case "last": + z.length, bts, err = msgp.ReadUint16Bytes(bts) + z.length -= z.start + if err != nil { + return + } + default: + bts, err = msgp.Skip(bts) + if err != nil { + return + } + } + } + o = bts + return +} + +// Deprecated: Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z interval16) Msgsize() (s int) { + s = 1 + 6 + msgp.Uint16Size + 5 + msgp.Uint16Size + return +} + +// Deprecated: DecodeMsg implements msgp.Decodable +func (z *runContainer16) DecodeMsg(dc *msgp.Reader) (err error) { + var field []byte + _ = field + var zdnj uint32 + zdnj, err = dc.ReadMapHeader() + if err != nil { + return + } + for zdnj > 0 { + zdnj-- + field, err = dc.ReadMapKeyPtr() + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "iv": + var zobc uint32 + zobc, err = dc.ReadArrayHeader() + if err != nil { + return + } + if cap(z.iv) >= int(zobc) { + z.iv = (z.iv)[:zobc] + } else { + z.iv = make([]interval16, zobc) + } + for zxpk := range z.iv { + var zsnv uint32 + zsnv, err = dc.ReadMapHeader() + if err != nil { + return + } + for zsnv > 0 { + zsnv-- + field, err = dc.ReadMapKeyPtr() + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "start": + z.iv[zxpk].start, err = dc.ReadUint16() + if err != nil { + return + } + case "last": + z.iv[zxpk].length, err = dc.ReadUint16() + z.iv[zxpk].length -= z.iv[zxpk].start + if err != nil { + return + } + default: + err = dc.Skip() + if err != nil { + return + } + } + } + } + case "card": + z.card, err = dc.ReadInt64() + if err != nil { + return + } + default: + err = dc.Skip() + if err != nil { + return + } + } + } + return +} + +// Deprecated: EncodeMsg implements msgp.Encodable +func (z *runContainer16) EncodeMsg(en *msgp.Writer) (err error) { + // map header, size 2 + // write "iv" + err = en.Append(0x82, 0xa2, 0x69, 0x76) + if err != nil { + return err + } + err = en.WriteArrayHeader(uint32(len(z.iv))) + if err != nil { + return + } + for zxpk := range z.iv { + // map header, size 2 + // write "start" + err = en.Append(0x82, 0xa5, 0x73, 0x74, 0x61, 0x72, 0x74) + if err != nil { + return err + } + err = en.WriteUint16(z.iv[zxpk].start) + if err != nil { + return + } + // write "last" + err = en.Append(0xa4, 0x6c, 0x61, 0x73, 0x74) + if err != nil { + return err + } + err = en.WriteUint16(z.iv[zxpk].last()) + if err != nil { + return + } + } + // write "card" + err = en.Append(0xa4, 0x63, 0x61, 0x72, 0x64) + if err != nil { + return err + } + err = en.WriteInt64(z.card) + if err != nil { + return + } + return +} + +// Deprecated: MarshalMsg implements msgp.Marshaler +func (z *runContainer16) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + // map header, size 2 + // string "iv" + o = append(o, 0x82, 0xa2, 0x69, 0x76) + o = msgp.AppendArrayHeader(o, uint32(len(z.iv))) + for zxpk := range z.iv { + // map header, size 2 + // string "start" + o = append(o, 0x82, 0xa5, 0x73, 0x74, 0x61, 0x72, 0x74) + o = msgp.AppendUint16(o, z.iv[zxpk].start) + // string "last" + o = append(o, 0xa4, 0x6c, 0x61, 0x73, 0x74) + o = msgp.AppendUint16(o, z.iv[zxpk].last()) + } + // string "card" + o = append(o, 0xa4, 0x63, 0x61, 0x72, 0x64) + o = msgp.AppendInt64(o, z.card) + return +} + +// Deprecated: UnmarshalMsg implements msgp.Unmarshaler +func (z *runContainer16) UnmarshalMsg(bts []byte) (o []byte, err error) { + var field []byte + _ = field + var zkgt uint32 + zkgt, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + return + } + for zkgt > 0 { + zkgt-- + field, bts, err = msgp.ReadMapKeyZC(bts) + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "iv": + var zema uint32 + zema, bts, err = msgp.ReadArrayHeaderBytes(bts) + if err != nil { + return + } + if cap(z.iv) >= int(zema) { + z.iv = (z.iv)[:zema] + } else { + z.iv = make([]interval16, zema) + } + for zxpk := range z.iv { + var zpez uint32 + zpez, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + return + } + for zpez > 0 { + zpez-- + field, bts, err = msgp.ReadMapKeyZC(bts) + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "start": + z.iv[zxpk].start, bts, err = msgp.ReadUint16Bytes(bts) + if err != nil { + return + } + case "last": + z.iv[zxpk].length, bts, err = msgp.ReadUint16Bytes(bts) + z.iv[zxpk].length -= z.iv[zxpk].start + if err != nil { + return + } + default: + bts, err = msgp.Skip(bts) + if err != nil { + return + } + } + } + } + case "card": + z.card, bts, err = msgp.ReadInt64Bytes(bts) + if err != nil { + return + } + default: + bts, err = msgp.Skip(bts) + if err != nil { + return + } + } + } + o = bts + return +} + +// Deprecated: Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z *runContainer16) Msgsize() (s int) { + s = 1 + 3 + msgp.ArrayHeaderSize + (len(z.iv) * (12 + msgp.Uint16Size + msgp.Uint16Size)) + 5 + msgp.Int64Size + return +} + +// Deprecated: DecodeMsg implements msgp.Decodable +func (z *runIterator16) DecodeMsg(dc *msgp.Reader) (err error) { + var field []byte + _ = field + var zqke uint32 + zqke, err = dc.ReadMapHeader() + if err != nil { + return + } + for zqke > 0 { + zqke-- + field, err = dc.ReadMapKeyPtr() + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "rc": + if dc.IsNil() { + err = dc.ReadNil() + if err != nil { + return + } + z.rc = nil + } else { + if z.rc == nil { + z.rc = new(runContainer16) + } + err = z.rc.DecodeMsg(dc) + if err != nil { + return + } + } + case "curIndex": + z.curIndex, err = dc.ReadInt64() + if err != nil { + return + } + case "curPosInIndex": + z.curPosInIndex, err = dc.ReadUint16() + if err != nil { + return + } + default: + err = dc.Skip() + if err != nil { + return + } + } + } + return +} + +// Deprecated: EncodeMsg implements msgp.Encodable +func (z *runIterator16) EncodeMsg(en *msgp.Writer) (err error) { + // map header, size 3 + // write "rc" + err = en.Append(0x83, 0xa2, 0x72, 0x63) + if err != nil { + return err + } + if z.rc == nil { + err = en.WriteNil() + if err != nil { + return + } + } else { + err = z.rc.EncodeMsg(en) + if err != nil { + return + } + } + // write "curIndex" + err = en.Append(0xa8, 0x63, 0x75, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78) + if err != nil { + return err + } + err = en.WriteInt64(z.curIndex) + if err != nil { + return + } + // write "curPosInIndex" + err = en.Append(0xad, 0x63, 0x75, 0x72, 0x50, 0x6f, 0x73, 0x49, 0x6e, 0x49, 0x6e, 0x64, 0x65, 0x78) + if err != nil { + return err + } + err = en.WriteUint16(z.curPosInIndex) + if err != nil { + return + } + return +} + +// Deprecated: MarshalMsg implements msgp.Marshaler +func (z *runIterator16) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + // map header, size 3 + // string "rc" + o = append(o, 0x83, 0xa2, 0x72, 0x63) + if z.rc == nil { + o = msgp.AppendNil(o) + } else { + o, err = z.rc.MarshalMsg(o) + if err != nil { + return + } + } + // string "curIndex" + o = append(o, 0xa8, 0x63, 0x75, 0x72, 0x49, 0x6e, 0x64, 0x65, 0x78) + o = msgp.AppendInt64(o, z.curIndex) + // string "curPosInIndex" + o = append(o, 0xad, 0x63, 0x75, 0x72, 0x50, 0x6f, 0x73, 0x49, 0x6e, 0x49, 0x6e, 0x64, 0x65, 0x78) + o = msgp.AppendUint16(o, z.curPosInIndex) + return +} + +// Deprecated: UnmarshalMsg implements msgp.Unmarshaler +func (z *runIterator16) UnmarshalMsg(bts []byte) (o []byte, err error) { + var field []byte + _ = field + var zqyh uint32 + zqyh, bts, err = msgp.ReadMapHeaderBytes(bts) + if err != nil { + return + } + for zqyh > 0 { + zqyh-- + field, bts, err = msgp.ReadMapKeyZC(bts) + if err != nil { + return + } + switch msgp.UnsafeString(field) { + case "rc": + if msgp.IsNil(bts) { + bts, err = msgp.ReadNilBytes(bts) + if err != nil { + return + } + z.rc = nil + } else { + if z.rc == nil { + z.rc = new(runContainer16) + } + bts, err = z.rc.UnmarshalMsg(bts) + if err != nil { + return + } + } + case "curIndex": + z.curIndex, bts, err = msgp.ReadInt64Bytes(bts) + if err != nil { + return + } + case "curPosInIndex": + z.curPosInIndex, bts, err = msgp.ReadUint16Bytes(bts) + if err != nil { + return + } + default: + bts, err = msgp.Skip(bts) + if err != nil { + return + } + } + } + o = bts + return +} + +// Deprecated: Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z *runIterator16) Msgsize() (s int) { + s = 1 + 3 + if z.rc == nil { + s += msgp.NilSize + } else { + s += z.rc.Msgsize() + } + s += 9 + msgp.Int64Size + 14 + msgp.Uint16Size + return +} + +// Deprecated: DecodeMsg implements msgp.Decodable +func (z *uint16Slice) DecodeMsg(dc *msgp.Reader) (err error) { + var zjpj uint32 + zjpj, err = dc.ReadArrayHeader() + if err != nil { + return + } + if cap((*z)) >= int(zjpj) { + (*z) = (*z)[:zjpj] + } else { + (*z) = make(uint16Slice, zjpj) + } + for zywj := range *z { + (*z)[zywj], err = dc.ReadUint16() + if err != nil { + return + } + } + return +} + +// Deprecated: EncodeMsg implements msgp.Encodable +func (z uint16Slice) EncodeMsg(en *msgp.Writer) (err error) { + err = en.WriteArrayHeader(uint32(len(z))) + if err != nil { + return + } + for zzpf := range z { + err = en.WriteUint16(z[zzpf]) + if err != nil { + return + } + } + return +} + +// Deprecated: MarshalMsg implements msgp.Marshaler +func (z uint16Slice) MarshalMsg(b []byte) (o []byte, err error) { + o = msgp.Require(b, z.Msgsize()) + o = msgp.AppendArrayHeader(o, uint32(len(z))) + for zzpf := range z { + o = msgp.AppendUint16(o, z[zzpf]) + } + return +} + +// Deprecated: UnmarshalMsg implements msgp.Unmarshaler +func (z *uint16Slice) UnmarshalMsg(bts []byte) (o []byte, err error) { + var zgmo uint32 + zgmo, bts, err = msgp.ReadArrayHeaderBytes(bts) + if err != nil { + return + } + if cap((*z)) >= int(zgmo) { + (*z) = (*z)[:zgmo] + } else { + (*z) = make(uint16Slice, zgmo) + } + for zrfe := range *z { + (*z)[zrfe], bts, err = msgp.ReadUint16Bytes(bts) + if err != nil { + return + } + } + o = bts + return +} + +// Deprecated: Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message +func (z uint16Slice) Msgsize() (s int) { + s = msgp.ArrayHeaderSize + (len(z) * (msgp.Uint16Size)) + return +} diff --git a/vendor/github.com/RoaringBitmap/roaring/serialization.go b/vendor/github.com/RoaringBitmap/roaring/serialization.go new file mode 100644 index 0000000..7b7ed29 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/serialization.go @@ -0,0 +1,34 @@ +package roaring + +import ( + "encoding/binary" + "io" + + "github.com/tinylib/msgp/msgp" +) + +// writeTo for runContainer16 follows this +// spec: https://github.com/RoaringBitmap/RoaringFormatSpec +// +func (b *runContainer16) writeTo(stream io.Writer) (int, error) { + buf := make([]byte, 2+4*len(b.iv)) + binary.LittleEndian.PutUint16(buf[0:], uint16(len(b.iv))) + for i, v := range b.iv { + binary.LittleEndian.PutUint16(buf[2+i*4:], v.start) + binary.LittleEndian.PutUint16(buf[2+2+i*4:], v.length) + } + return stream.Write(buf) +} + +func (b *runContainer16) writeToMsgpack(stream io.Writer) (int, error) { + bts, err := b.MarshalMsg(nil) + if err != nil { + return 0, err + } + return stream.Write(bts) +} + +func (b *runContainer16) readFromMsgpack(stream io.Reader) (int, error) { + err := msgp.Decode(stream, b) + return 0, err +} diff --git a/vendor/github.com/RoaringBitmap/roaring/serialization_generic.go b/vendor/github.com/RoaringBitmap/roaring/serialization_generic.go new file mode 100644 index 0000000..4b9d9e3 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/serialization_generic.go @@ -0,0 +1,133 @@ +// +build !amd64,!386 appengine + +package roaring + +import ( + "encoding/binary" + "errors" + "io" +) + +func (b *arrayContainer) writeTo(stream io.Writer) (int, error) { + buf := make([]byte, 2*len(b.content)) + for i, v := range b.content { + base := i * 2 + buf[base] = byte(v) + buf[base+1] = byte(v >> 8) + } + return stream.Write(buf) +} + +func (b *arrayContainer) readFrom(stream io.Reader) (int, error) { + err := binary.Read(stream, binary.LittleEndian, b.content) + if err != nil { + return 0, err + } + return 2 * len(b.content), nil +} + +func (b *bitmapContainer) writeTo(stream io.Writer) (int, error) { + if b.cardinality <= arrayDefaultMaxSize { + return 0, errors.New("refusing to write bitmap container with cardinality of array container") + } + + // Write set + buf := make([]byte, 8*len(b.bitmap)) + for i, v := range b.bitmap { + base := i * 8 + buf[base] = byte(v) + buf[base+1] = byte(v >> 8) + buf[base+2] = byte(v >> 16) + buf[base+3] = byte(v >> 24) + buf[base+4] = byte(v >> 32) + buf[base+5] = byte(v >> 40) + buf[base+6] = byte(v >> 48) + buf[base+7] = byte(v >> 56) + } + return stream.Write(buf) +} + +func (b *bitmapContainer) readFrom(stream io.Reader) (int, error) { + err := binary.Read(stream, binary.LittleEndian, b.bitmap) + if err != nil { + return 0, err + } + b.computeCardinality() + return 8 * len(b.bitmap), nil +} + +func (bc *bitmapContainer) asLittleEndianByteSlice() []byte { + by := make([]byte, len(bc.bitmap)*8) + for i := range bc.bitmap { + binary.LittleEndian.PutUint64(by[i*8:], bc.bitmap[i]) + } + return by +} + +func uint64SliceAsByteSlice(slice []uint64) []byte { + by := make([]byte, len(slice)*8) + + for i, v := range slice { + binary.LittleEndian.PutUint64(by[i*8:], v) + } + + return by +} + +func uint16SliceAsByteSlice(slice []uint16) []byte { + by := make([]byte, len(slice)*2) + + for i, v := range slice { + binary.LittleEndian.PutUint16(by[i*2:], v) + } + + return by +} + +func byteSliceAsUint16Slice(slice []byte) []uint16 { + if len(slice)%2 != 0 { + panic("Slice size should be divisible by 2") + } + + b := make([]uint16, len(slice)/2) + + for i := range b { + b[i] = binary.LittleEndian.Uint16(slice[2*i:]) + } + + return b +} + +func byteSliceAsUint64Slice(slice []byte) []uint64 { + if len(slice)%8 != 0 { + panic("Slice size should be divisible by 8") + } + + b := make([]uint64, len(slice)/8) + + for i := range b { + b[i] = binary.LittleEndian.Uint64(slice[8*i:]) + } + + return b +} + +// Converts a byte slice to a interval16 slice. +// The function assumes that the slice byte buffer is run container data +// encoded according to Roaring Format Spec +func byteSliceAsInterval16Slice(byteSlice []byte) []interval16 { + if len(byteSlice)%4 != 0 { + panic("Slice size should be divisible by 4") + } + + intervalSlice := make([]interval16, len(byteSlice)/4) + + for i := range intervalSlice { + intervalSlice[i] = interval16{ + start: binary.LittleEndian.Uint16(byteSlice[i*4:]), + length: binary.LittleEndian.Uint16(byteSlice[i*4+2:]), + } + } + + return intervalSlice +} diff --git a/vendor/github.com/RoaringBitmap/roaring/serialization_littleendian.go b/vendor/github.com/RoaringBitmap/roaring/serialization_littleendian.go new file mode 100644 index 0000000..818a06c --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/serialization_littleendian.go @@ -0,0 +1,134 @@ +// +build 386 amd64,!appengine + +package roaring + +import ( + "errors" + "io" + "reflect" + "runtime" + "unsafe" +) + +func (ac *arrayContainer) writeTo(stream io.Writer) (int, error) { + buf := uint16SliceAsByteSlice(ac.content) + return stream.Write(buf) +} + +func (bc *bitmapContainer) writeTo(stream io.Writer) (int, error) { + if bc.cardinality <= arrayDefaultMaxSize { + return 0, errors.New("refusing to write bitmap container with cardinality of array container") + } + buf := uint64SliceAsByteSlice(bc.bitmap) + return stream.Write(buf) +} + +func uint64SliceAsByteSlice(slice []uint64) []byte { + // make a new slice header + header := *(*reflect.SliceHeader)(unsafe.Pointer(&slice)) + + // update its capacity and length + header.Len *= 8 + header.Cap *= 8 + + // instantiate result and use KeepAlive so data isn't unmapped. + result := *(*[]byte)(unsafe.Pointer(&header)) + runtime.KeepAlive(&slice) + + // return it + return result +} + +func uint16SliceAsByteSlice(slice []uint16) []byte { + // make a new slice header + header := *(*reflect.SliceHeader)(unsafe.Pointer(&slice)) + + // update its capacity and length + header.Len *= 2 + header.Cap *= 2 + + // instantiate result and use KeepAlive so data isn't unmapped. + result := *(*[]byte)(unsafe.Pointer(&header)) + runtime.KeepAlive(&slice) + + // return it + return result +} + +func (bc *bitmapContainer) asLittleEndianByteSlice() []byte { + return uint64SliceAsByteSlice(bc.bitmap) +} + +// Deserialization code follows + +//// +// These methods (byteSliceAsUint16Slice,...) do not make copies, +// they are pointer-based (unsafe). The caller is responsible to +// ensure that the input slice does not get garbage collected, deleted +// or modified while you hold the returned slince. +//// +func byteSliceAsUint16Slice(slice []byte) (result []uint16) { // here we create a new slice holder + if len(slice)%2 != 0 { + panic("Slice size should be divisible by 2") + } + // reference: https://go101.org/article/unsafe.html + + // make a new slice header + bHeader := (*reflect.SliceHeader)(unsafe.Pointer(&slice)) + rHeader := (*reflect.SliceHeader)(unsafe.Pointer(&result)) + + // transfer the data from the given slice to a new variable (our result) + rHeader.Data = bHeader.Data + rHeader.Len = bHeader.Len / 2 + rHeader.Cap = bHeader.Cap / 2 + + // instantiate result and use KeepAlive so data isn't unmapped. + runtime.KeepAlive(&slice) // it is still crucial, GC can free it) + + // return result + return +} + +func byteSliceAsUint64Slice(slice []byte) (result []uint64) { + if len(slice)%8 != 0 { + panic("Slice size should be divisible by 8") + } + // reference: https://go101.org/article/unsafe.html + + // make a new slice header + bHeader := (*reflect.SliceHeader)(unsafe.Pointer(&slice)) + rHeader := (*reflect.SliceHeader)(unsafe.Pointer(&result)) + + // transfer the data from the given slice to a new variable (our result) + rHeader.Data = bHeader.Data + rHeader.Len = bHeader.Len / 8 + rHeader.Cap = bHeader.Cap / 8 + + // instantiate result and use KeepAlive so data isn't unmapped. + runtime.KeepAlive(&slice) // it is still crucial, GC can free it) + + // return result + return +} + +func byteSliceAsInterval16Slice(slice []byte) (result []interval16) { + if len(slice)%4 != 0 { + panic("Slice size should be divisible by 4") + } + // reference: https://go101.org/article/unsafe.html + + // make a new slice header + bHeader := (*reflect.SliceHeader)(unsafe.Pointer(&slice)) + rHeader := (*reflect.SliceHeader)(unsafe.Pointer(&result)) + + // transfer the data from the given slice to a new variable (our result) + rHeader.Data = bHeader.Data + rHeader.Len = bHeader.Len / 4 + rHeader.Cap = bHeader.Cap / 4 + + // instantiate result and use KeepAlive so data isn't unmapped. + runtime.KeepAlive(&slice) // it is still crucial, GC can free it) + + // return result + return +} diff --git a/vendor/github.com/RoaringBitmap/roaring/serializationfuzz.go b/vendor/github.com/RoaringBitmap/roaring/serializationfuzz.go new file mode 100644 index 0000000..5eaa222 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/serializationfuzz.go @@ -0,0 +1,21 @@ +// +build gofuzz + +package roaring + +import "bytes" + +func FuzzSerializationStream(data []byte) int { + newrb := NewBitmap() + if _, err := newrb.ReadFrom(bytes.NewReader(data)); err != nil { + return 0 + } + return 1 +} + +func FuzzSerializationBuffer(data []byte) int { + newrb := NewBitmap() + if _, err := newrb.FromBuffer(data); err != nil { + return 0 + } + return 1 +} diff --git a/vendor/github.com/RoaringBitmap/roaring/setutil.go b/vendor/github.com/RoaringBitmap/roaring/setutil.go new file mode 100644 index 0000000..2fe8151 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/setutil.go @@ -0,0 +1,610 @@ +package roaring + +func equal(a, b []uint16) bool { + if len(a) != len(b) { + return false + } + for i := range a { + if a[i] != b[i] { + return false + } + } + return true +} + +func difference(set1 []uint16, set2 []uint16, buffer []uint16) int { + if 0 == len(set2) { + buffer = buffer[:len(set1)] + for k := 0; k < len(set1); k++ { + buffer[k] = set1[k] + } + return len(set1) + } + if 0 == len(set1) { + return 0 + } + pos := 0 + k1 := 0 + k2 := 0 + buffer = buffer[:cap(buffer)] + s1 := set1[k1] + s2 := set2[k2] + for { + if s1 < s2 { + buffer[pos] = s1 + pos++ + k1++ + if k1 >= len(set1) { + break + } + s1 = set1[k1] + } else if s1 == s2 { + k1++ + k2++ + if k1 >= len(set1) { + break + } + s1 = set1[k1] + if k2 >= len(set2) { + for ; k1 < len(set1); k1++ { + buffer[pos] = set1[k1] + pos++ + } + break + } + s2 = set2[k2] + } else { // if (val1>val2) + k2++ + if k2 >= len(set2) { + for ; k1 < len(set1); k1++ { + buffer[pos] = set1[k1] + pos++ + } + break + } + s2 = set2[k2] + } + } + return pos + +} + +func exclusiveUnion2by2(set1 []uint16, set2 []uint16, buffer []uint16) int { + if 0 == len(set2) { + buffer = buffer[:len(set1)] + copy(buffer, set1[:]) + return len(set1) + } + if 0 == len(set1) { + buffer = buffer[:len(set2)] + copy(buffer, set2[:]) + return len(set2) + } + pos := 0 + k1 := 0 + k2 := 0 + s1 := set1[k1] + s2 := set2[k2] + buffer = buffer[:cap(buffer)] + for { + if s1 < s2 { + buffer[pos] = s1 + pos++ + k1++ + if k1 >= len(set1) { + for ; k2 < len(set2); k2++ { + buffer[pos] = set2[k2] + pos++ + } + break + } + s1 = set1[k1] + } else if s1 == s2 { + k1++ + k2++ + if k1 >= len(set1) { + for ; k2 < len(set2); k2++ { + buffer[pos] = set2[k2] + pos++ + } + break + } + if k2 >= len(set2) { + for ; k1 < len(set1); k1++ { + buffer[pos] = set1[k1] + pos++ + } + break + } + s1 = set1[k1] + s2 = set2[k2] + } else { // if (val1>val2) + buffer[pos] = s2 + pos++ + k2++ + if k2 >= len(set2) { + for ; k1 < len(set1); k1++ { + buffer[pos] = set1[k1] + pos++ + } + break + } + s2 = set2[k2] + } + } + return pos +} + +func union2by2(set1 []uint16, set2 []uint16, buffer []uint16) int { + pos := 0 + k1 := 0 + k2 := 0 + if 0 == len(set2) { + buffer = buffer[:len(set1)] + copy(buffer, set1[:]) + return len(set1) + } + if 0 == len(set1) { + buffer = buffer[:len(set2)] + copy(buffer, set2[:]) + return len(set2) + } + s1 := set1[k1] + s2 := set2[k2] + buffer = buffer[:cap(buffer)] + for { + if s1 < s2 { + buffer[pos] = s1 + pos++ + k1++ + if k1 >= len(set1) { + copy(buffer[pos:], set2[k2:]) + pos += len(set2) - k2 + break + } + s1 = set1[k1] + } else if s1 == s2 { + buffer[pos] = s1 + pos++ + k1++ + k2++ + if k1 >= len(set1) { + copy(buffer[pos:], set2[k2:]) + pos += len(set2) - k2 + break + } + if k2 >= len(set2) { + copy(buffer[pos:], set1[k1:]) + pos += len(set1) - k1 + break + } + s1 = set1[k1] + s2 = set2[k2] + } else { // if (set1[k1]>set2[k2]) + buffer[pos] = s2 + pos++ + k2++ + if k2 >= len(set2) { + copy(buffer[pos:], set1[k1:]) + pos += len(set1) - k1 + break + } + s2 = set2[k2] + } + } + return pos +} + +func union2by2Cardinality(set1 []uint16, set2 []uint16) int { + pos := 0 + k1 := 0 + k2 := 0 + if 0 == len(set2) { + return len(set1) + } + if 0 == len(set1) { + return len(set2) + } + s1 := set1[k1] + s2 := set2[k2] + for { + if s1 < s2 { + pos++ + k1++ + if k1 >= len(set1) { + pos += len(set2) - k2 + break + } + s1 = set1[k1] + } else if s1 == s2 { + pos++ + k1++ + k2++ + if k1 >= len(set1) { + pos += len(set2) - k2 + break + } + if k2 >= len(set2) { + pos += len(set1) - k1 + break + } + s1 = set1[k1] + s2 = set2[k2] + } else { // if (set1[k1]>set2[k2]) + pos++ + k2++ + if k2 >= len(set2) { + pos += len(set1) - k1 + break + } + s2 = set2[k2] + } + } + return pos +} + +func intersection2by2( + set1 []uint16, + set2 []uint16, + buffer []uint16) int { + + if len(set1)*64 < len(set2) { + return onesidedgallopingintersect2by2(set1, set2, buffer) + } else if len(set2)*64 < len(set1) { + return onesidedgallopingintersect2by2(set2, set1, buffer) + } else { + return localintersect2by2(set1, set2, buffer) + } +} + +func intersection2by2Cardinality( + set1 []uint16, + set2 []uint16) int { + + if len(set1)*64 < len(set2) { + return onesidedgallopingintersect2by2Cardinality(set1, set2) + } else if len(set2)*64 < len(set1) { + return onesidedgallopingintersect2by2Cardinality(set2, set1) + } else { + return localintersect2by2Cardinality(set1, set2) + } +} + +func intersects2by2( + set1 []uint16, + set2 []uint16) bool { + // could be optimized if one set is much larger than the other one + if (0 == len(set1)) || (0 == len(set2)) { + return false + } + k1 := 0 + k2 := 0 + s1 := set1[k1] + s2 := set2[k2] +mainwhile: + for { + + if s2 < s1 { + for { + k2++ + if k2 == len(set2) { + break mainwhile + } + s2 = set2[k2] + if s2 >= s1 { + break + } + } + } + if s1 < s2 { + for { + k1++ + if k1 == len(set1) { + break mainwhile + } + s1 = set1[k1] + if s1 >= s2 { + break + } + } + + } else { + // (set2[k2] == set1[k1]) + return true + } + } + return false +} + +func localintersect2by2( + set1 []uint16, + set2 []uint16, + buffer []uint16) int { + + if (0 == len(set1)) || (0 == len(set2)) { + return 0 + } + k1 := 0 + k2 := 0 + pos := 0 + buffer = buffer[:cap(buffer)] + s1 := set1[k1] + s2 := set2[k2] +mainwhile: + for { + if s2 < s1 { + for { + k2++ + if k2 == len(set2) { + break mainwhile + } + s2 = set2[k2] + if s2 >= s1 { + break + } + } + } + if s1 < s2 { + for { + k1++ + if k1 == len(set1) { + break mainwhile + } + s1 = set1[k1] + if s1 >= s2 { + break + } + } + + } else { + // (set2[k2] == set1[k1]) + buffer[pos] = s1 + pos++ + k1++ + if k1 == len(set1) { + break + } + s1 = set1[k1] + k2++ + if k2 == len(set2) { + break + } + s2 = set2[k2] + } + } + return pos +} + +func localintersect2by2Cardinality( + set1 []uint16, + set2 []uint16) int { + + if (0 == len(set1)) || (0 == len(set2)) { + return 0 + } + k1 := 0 + k2 := 0 + pos := 0 + s1 := set1[k1] + s2 := set2[k2] +mainwhile: + for { + if s2 < s1 { + for { + k2++ + if k2 == len(set2) { + break mainwhile + } + s2 = set2[k2] + if s2 >= s1 { + break + } + } + } + if s1 < s2 { + for { + k1++ + if k1 == len(set1) { + break mainwhile + } + s1 = set1[k1] + if s1 >= s2 { + break + } + } + + } else { + // (set2[k2] == set1[k1]) + pos++ + k1++ + if k1 == len(set1) { + break + } + s1 = set1[k1] + k2++ + if k2 == len(set2) { + break + } + s2 = set2[k2] + } + } + return pos +} + +func advanceUntil( + array []uint16, + pos int, + length int, + min uint16) int { + lower := pos + 1 + + if lower >= length || array[lower] >= min { + return lower + } + + spansize := 1 + + for lower+spansize < length && array[lower+spansize] < min { + spansize *= 2 + } + var upper int + if lower+spansize < length { + upper = lower + spansize + } else { + upper = length - 1 + } + + if array[upper] == min { + return upper + } + + if array[upper] < min { + // means + // array + // has no + // item + // >= min + // pos = array.length; + return length + } + + // we know that the next-smallest span was too small + lower += (spansize >> 1) + + mid := 0 + for lower+1 != upper { + mid = (lower + upper) >> 1 + if array[mid] == min { + return mid + } else if array[mid] < min { + lower = mid + } else { + upper = mid + } + } + return upper + +} + +func onesidedgallopingintersect2by2( + smallset []uint16, + largeset []uint16, + buffer []uint16) int { + + if 0 == len(smallset) { + return 0 + } + buffer = buffer[:cap(buffer)] + k1 := 0 + k2 := 0 + pos := 0 + s1 := largeset[k1] + s2 := smallset[k2] +mainwhile: + + for { + if s1 < s2 { + k1 = advanceUntil(largeset, k1, len(largeset), s2) + if k1 == len(largeset) { + break mainwhile + } + s1 = largeset[k1] + } + if s2 < s1 { + k2++ + if k2 == len(smallset) { + break mainwhile + } + s2 = smallset[k2] + } else { + + buffer[pos] = s2 + pos++ + k2++ + if k2 == len(smallset) { + break + } + s2 = smallset[k2] + k1 = advanceUntil(largeset, k1, len(largeset), s2) + if k1 == len(largeset) { + break mainwhile + } + s1 = largeset[k1] + } + + } + return pos +} + +func onesidedgallopingintersect2by2Cardinality( + smallset []uint16, + largeset []uint16) int { + + if 0 == len(smallset) { + return 0 + } + k1 := 0 + k2 := 0 + pos := 0 + s1 := largeset[k1] + s2 := smallset[k2] +mainwhile: + + for { + if s1 < s2 { + k1 = advanceUntil(largeset, k1, len(largeset), s2) + if k1 == len(largeset) { + break mainwhile + } + s1 = largeset[k1] + } + if s2 < s1 { + k2++ + if k2 == len(smallset) { + break mainwhile + } + s2 = smallset[k2] + } else { + + pos++ + k2++ + if k2 == len(smallset) { + break + } + s2 = smallset[k2] + k1 = advanceUntil(largeset, k1, len(largeset), s2) + if k1 == len(largeset) { + break mainwhile + } + s1 = largeset[k1] + } + + } + return pos +} + +func binarySearch(array []uint16, ikey uint16) int { + low := 0 + high := len(array) - 1 + for low+16 <= high { + middleIndex := int(uint32(low+high) >> 1) + middleValue := array[middleIndex] + if middleValue < ikey { + low = middleIndex + 1 + } else if middleValue > ikey { + high = middleIndex - 1 + } else { + return middleIndex + } + } + for ; low <= high; low++ { + val := array[low] + if val >= ikey { + if val == ikey { + return low + } + break + } + } + return -(low + 1) +} diff --git a/vendor/github.com/RoaringBitmap/roaring/shortiterator.go b/vendor/github.com/RoaringBitmap/roaring/shortiterator.go new file mode 100644 index 0000000..15b78bd --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/shortiterator.go @@ -0,0 +1,52 @@ +package roaring + +type shortIterable interface { + hasNext() bool + next() uint16 +} + +type shortPeekable interface { + shortIterable + peekNext() uint16 + advanceIfNeeded(minval uint16) +} + +type shortIterator struct { + slice []uint16 + loc int +} + +func (si *shortIterator) hasNext() bool { + return si.loc < len(si.slice) +} + +func (si *shortIterator) next() uint16 { + a := si.slice[si.loc] + si.loc++ + return a +} + +func (si *shortIterator) peekNext() uint16 { + return si.slice[si.loc] +} + +func (si *shortIterator) advanceIfNeeded(minval uint16) { + if si.hasNext() && si.peekNext() < minval { + si.loc = advanceUntil(si.slice, si.loc, len(si.slice), minval) + } +} + +type reverseIterator struct { + slice []uint16 + loc int +} + +func (si *reverseIterator) hasNext() bool { + return si.loc >= 0 +} + +func (si *reverseIterator) next() uint16 { + a := si.slice[si.loc] + si.loc-- + return a +} diff --git a/vendor/github.com/RoaringBitmap/roaring/smat.go b/vendor/github.com/RoaringBitmap/roaring/smat.go new file mode 100644 index 0000000..9da4756 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/smat.go @@ -0,0 +1,383 @@ +// +build gofuzz + +/* +# Instructions for smat testing for roaring + +[smat](https://github.com/mschoch/smat) is a framework that provides +state machine assisted fuzz testing. + +To run the smat tests for roaring... + +## Prerequisites + + $ go get github.com/dvyukov/go-fuzz/go-fuzz + $ go get github.com/dvyukov/go-fuzz/go-fuzz-build + +## Steps + +1. Generate initial smat corpus: +``` + go test -tags=gofuzz -run=TestGenerateSmatCorpus +``` + +2. Build go-fuzz test program with instrumentation: +``` + go-fuzz-build -func FuzzSmat github.com/RoaringBitmap/roaring +``` + +3. Run go-fuzz: +``` + go-fuzz -bin=./roaring-fuzz.zip -workdir=workdir/ -timeout=200 +``` + +You should see output like... +``` +2016/09/16 13:58:35 slaves: 8, corpus: 1 (3s ago), crashers: 0, restarts: 1/0, execs: 0 (0/sec), cover: 0, uptime: 3s +2016/09/16 13:58:38 slaves: 8, corpus: 1 (6s ago), crashers: 0, restarts: 1/0, execs: 0 (0/sec), cover: 0, uptime: 6s +2016/09/16 13:58:41 slaves: 8, corpus: 1 (9s ago), crashers: 0, restarts: 1/44, execs: 44 (5/sec), cover: 0, uptime: 9s +2016/09/16 13:58:44 slaves: 8, corpus: 1 (12s ago), crashers: 0, restarts: 1/45, execs: 45 (4/sec), cover: 0, uptime: 12s +2016/09/16 13:58:47 slaves: 8, corpus: 1 (15s ago), crashers: 0, restarts: 1/46, execs: 46 (3/sec), cover: 0, uptime: 15s +2016/09/16 13:58:50 slaves: 8, corpus: 1 (18s ago), crashers: 0, restarts: 1/47, execs: 47 (3/sec), cover: 0, uptime: 18s +2016/09/16 13:58:53 slaves: 8, corpus: 1 (21s ago), crashers: 0, restarts: 1/63, execs: 63 (3/sec), cover: 0, uptime: 21s +2016/09/16 13:58:56 slaves: 8, corpus: 1 (24s ago), crashers: 0, restarts: 1/65, execs: 65 (3/sec), cover: 0, uptime: 24s +2016/09/16 13:58:59 slaves: 8, corpus: 1 (27s ago), crashers: 0, restarts: 1/66, execs: 66 (2/sec), cover: 0, uptime: 27s +2016/09/16 13:59:02 slaves: 8, corpus: 1 (30s ago), crashers: 0, restarts: 1/67, execs: 67 (2/sec), cover: 0, uptime: 30s +2016/09/16 13:59:05 slaves: 8, corpus: 1 (33s ago), crashers: 0, restarts: 1/83, execs: 83 (3/sec), cover: 0, uptime: 33s +2016/09/16 13:59:08 slaves: 8, corpus: 1 (36s ago), crashers: 0, restarts: 1/84, execs: 84 (2/sec), cover: 0, uptime: 36s +2016/09/16 13:59:11 slaves: 8, corpus: 2 (0s ago), crashers: 0, restarts: 1/85, execs: 85 (2/sec), cover: 0, uptime: 39s +2016/09/16 13:59:14 slaves: 8, corpus: 17 (2s ago), crashers: 0, restarts: 1/86, execs: 86 (2/sec), cover: 480, uptime: 42s +2016/09/16 13:59:17 slaves: 8, corpus: 17 (5s ago), crashers: 0, restarts: 1/66, execs: 132 (3/sec), cover: 487, uptime: 45s +2016/09/16 13:59:20 slaves: 8, corpus: 17 (8s ago), crashers: 0, restarts: 1/440, execs: 2645 (55/sec), cover: 487, uptime: 48s + +``` + +Let it run, and if the # of crashers is > 0, check out the reports in +the workdir where you should be able to find the panic goroutine stack +traces. +*/ + +package roaring + +import ( + "fmt" + "sort" + + "github.com/mschoch/smat" + "github.com/willf/bitset" +) + +// fuzz test using state machine driven by byte stream. +func FuzzSmat(data []byte) int { + return smat.Fuzz(&smatContext{}, smat.ActionID('S'), smat.ActionID('T'), + smatActionMap, data) +} + +var smatDebug = false + +func smatLog(prefix, format string, args ...interface{}) { + if smatDebug { + fmt.Print(prefix) + fmt.Printf(format, args...) + } +} + +type smatContext struct { + pairs []*smatPair + + // Two registers, x & y. + x int + y int + + actions int +} + +type smatPair struct { + bm *Bitmap + bs *bitset.BitSet +} + +// ------------------------------------------------------------------ + +var smatActionMap = smat.ActionMap{ + smat.ActionID('X'): smatAction("x++", smatWrap(func(c *smatContext) { c.x++ })), + smat.ActionID('x'): smatAction("x--", smatWrap(func(c *smatContext) { c.x-- })), + smat.ActionID('Y'): smatAction("y++", smatWrap(func(c *smatContext) { c.y++ })), + smat.ActionID('y'): smatAction("y--", smatWrap(func(c *smatContext) { c.y-- })), + smat.ActionID('*'): smatAction("x*y", smatWrap(func(c *smatContext) { c.x = c.x * c.y })), + smat.ActionID('<'): smatAction("x<<", smatWrap(func(c *smatContext) { c.x = c.x << 1 })), + + smat.ActionID('^'): smatAction("swap", smatWrap(func(c *smatContext) { c.x, c.y = c.y, c.x })), + + smat.ActionID('['): smatAction(" pushPair", smatWrap(smatPushPair)), + smat.ActionID(']'): smatAction(" popPair", smatWrap(smatPopPair)), + + smat.ActionID('B'): smatAction(" setBit", smatWrap(smatSetBit)), + smat.ActionID('b'): smatAction(" removeBit", smatWrap(smatRemoveBit)), + + smat.ActionID('o'): smatAction(" or", smatWrap(smatOr)), + smat.ActionID('a'): smatAction(" and", smatWrap(smatAnd)), + + smat.ActionID('#'): smatAction(" cardinality", smatWrap(smatCardinality)), + + smat.ActionID('O'): smatAction(" orCardinality", smatWrap(smatOrCardinality)), + smat.ActionID('A'): smatAction(" andCardinality", smatWrap(smatAndCardinality)), + + smat.ActionID('c'): smatAction(" clear", smatWrap(smatClear)), + smat.ActionID('r'): smatAction(" runOptimize", smatWrap(smatRunOptimize)), + + smat.ActionID('e'): smatAction(" isEmpty", smatWrap(smatIsEmpty)), + + smat.ActionID('i'): smatAction(" intersects", smatWrap(smatIntersects)), + + smat.ActionID('f'): smatAction(" flip", smatWrap(smatFlip)), + + smat.ActionID('-'): smatAction(" difference", smatWrap(smatDifference)), +} + +var smatRunningPercentActions []smat.PercentAction + +func init() { + var ids []int + for actionId := range smatActionMap { + ids = append(ids, int(actionId)) + } + sort.Ints(ids) + + pct := 100 / len(smatActionMap) + for _, actionId := range ids { + smatRunningPercentActions = append(smatRunningPercentActions, + smat.PercentAction{pct, smat.ActionID(actionId)}) + } + + smatActionMap[smat.ActionID('S')] = smatAction("SETUP", smatSetupFunc) + smatActionMap[smat.ActionID('T')] = smatAction("TEARDOWN", smatTeardownFunc) +} + +// We only have one smat state: running. +func smatRunning(next byte) smat.ActionID { + return smat.PercentExecute(next, smatRunningPercentActions...) +} + +func smatAction(name string, f func(ctx smat.Context) (smat.State, error)) func(smat.Context) (smat.State, error) { + return func(ctx smat.Context) (smat.State, error) { + c := ctx.(*smatContext) + c.actions++ + + smatLog(" ", "%s\n", name) + + return f(ctx) + } +} + +// Creates an smat action func based on a simple callback. +func smatWrap(cb func(c *smatContext)) func(smat.Context) (next smat.State, err error) { + return func(ctx smat.Context) (next smat.State, err error) { + c := ctx.(*smatContext) + cb(c) + return smatRunning, nil + } +} + +// Invokes a callback function with the input v bounded to len(c.pairs). +func (c *smatContext) withPair(v int, cb func(*smatPair)) { + if len(c.pairs) > 0 { + if v < 0 { + v = -v + } + v = v % len(c.pairs) + cb(c.pairs[v]) + } +} + +// ------------------------------------------------------------------ + +func smatSetupFunc(ctx smat.Context) (next smat.State, err error) { + return smatRunning, nil +} + +func smatTeardownFunc(ctx smat.Context) (next smat.State, err error) { + return nil, err +} + +// ------------------------------------------------------------------ + +func smatPushPair(c *smatContext) { + c.pairs = append(c.pairs, &smatPair{ + bm: NewBitmap(), + bs: bitset.New(100), + }) +} + +func smatPopPair(c *smatContext) { + if len(c.pairs) > 0 { + c.pairs = c.pairs[0 : len(c.pairs)-1] + } +} + +func smatSetBit(c *smatContext) { + c.withPair(c.x, func(p *smatPair) { + y := uint32(c.y) + p.bm.AddInt(int(y)) + p.bs.Set(uint(y)) + p.checkEquals() + }) +} + +func smatRemoveBit(c *smatContext) { + c.withPair(c.x, func(p *smatPair) { + y := uint32(c.y) + p.bm.Remove(y) + p.bs.Clear(uint(y)) + p.checkEquals() + }) +} + +func smatAnd(c *smatContext) { + c.withPair(c.x, func(px *smatPair) { + c.withPair(c.y, func(py *smatPair) { + px.bm.And(py.bm) + px.bs = px.bs.Intersection(py.bs) + px.checkEquals() + py.checkEquals() + }) + }) +} + +func smatOr(c *smatContext) { + c.withPair(c.x, func(px *smatPair) { + c.withPair(c.y, func(py *smatPair) { + px.bm.Or(py.bm) + px.bs = px.bs.Union(py.bs) + px.checkEquals() + py.checkEquals() + }) + }) +} + +func smatAndCardinality(c *smatContext) { + c.withPair(c.x, func(px *smatPair) { + c.withPair(c.y, func(py *smatPair) { + c0 := px.bm.AndCardinality(py.bm) + c1 := px.bs.IntersectionCardinality(py.bs) + if c0 != uint64(c1) { + panic("expected same add cardinality") + } + px.checkEquals() + py.checkEquals() + }) + }) +} + +func smatOrCardinality(c *smatContext) { + c.withPair(c.x, func(px *smatPair) { + c.withPair(c.y, func(py *smatPair) { + c0 := px.bm.OrCardinality(py.bm) + c1 := px.bs.UnionCardinality(py.bs) + if c0 != uint64(c1) { + panic("expected same or cardinality") + } + px.checkEquals() + py.checkEquals() + }) + }) +} + +func smatRunOptimize(c *smatContext) { + c.withPair(c.x, func(px *smatPair) { + px.bm.RunOptimize() + px.checkEquals() + }) +} + +func smatClear(c *smatContext) { + c.withPair(c.x, func(px *smatPair) { + px.bm.Clear() + px.bs = px.bs.ClearAll() + px.checkEquals() + }) +} + +func smatCardinality(c *smatContext) { + c.withPair(c.x, func(px *smatPair) { + c0 := px.bm.GetCardinality() + c1 := px.bs.Count() + if c0 != uint64(c1) { + panic("expected same cardinality") + } + }) +} + +func smatIsEmpty(c *smatContext) { + c.withPair(c.x, func(px *smatPair) { + c0 := px.bm.IsEmpty() + c1 := px.bs.None() + if c0 != c1 { + panic("expected same is empty") + } + }) +} + +func smatIntersects(c *smatContext) { + c.withPair(c.x, func(px *smatPair) { + c.withPair(c.y, func(py *smatPair) { + v0 := px.bm.Intersects(py.bm) + v1 := px.bs.IntersectionCardinality(py.bs) > 0 + if v0 != v1 { + panic("intersects not equal") + } + + px.checkEquals() + py.checkEquals() + }) + }) +} + +func smatFlip(c *smatContext) { + c.withPair(c.x, func(p *smatPair) { + y := uint32(c.y) + p.bm.Flip(uint64(y), uint64(y)+1) + p.bs = p.bs.Flip(uint(y)) + p.checkEquals() + }) +} + +func smatDifference(c *smatContext) { + c.withPair(c.x, func(px *smatPair) { + c.withPair(c.y, func(py *smatPair) { + px.bm.AndNot(py.bm) + px.bs = px.bs.Difference(py.bs) + px.checkEquals() + py.checkEquals() + }) + }) +} + +func (p *smatPair) checkEquals() { + if !p.equalsBitSet(p.bs, p.bm) { + panic("bitset mismatch") + } +} + +func (p *smatPair) equalsBitSet(a *bitset.BitSet, b *Bitmap) bool { + for i, e := a.NextSet(0); e; i, e = a.NextSet(i + 1) { + if !b.ContainsInt(int(i)) { + fmt.Printf("in a bitset, not b bitmap, i: %d\n", i) + fmt.Printf(" a bitset: %s\n b bitmap: %s\n", + a.String(), b.String()) + return false + } + } + + i := b.Iterator() + for i.HasNext() { + v := i.Next() + if !a.Test(uint(v)) { + fmt.Printf("in b bitmap, not a bitset, v: %d\n", v) + fmt.Printf(" a bitset: %s\n b bitmap: %s\n", + a.String(), b.String()) + return false + } + } + + return true +} diff --git a/vendor/github.com/RoaringBitmap/roaring/util.go b/vendor/github.com/RoaringBitmap/roaring/util.go new file mode 100644 index 0000000..6763033 --- /dev/null +++ b/vendor/github.com/RoaringBitmap/roaring/util.go @@ -0,0 +1,304 @@ +package roaring + +import ( + "math/rand" + "sort" +) + +const ( + arrayDefaultMaxSize = 4096 // containers with 4096 or fewer integers should be array containers. + arrayLazyLowerBound = 1024 + maxCapacity = 1 << 16 + serialCookieNoRunContainer = 12346 // only arrays and bitmaps + invalidCardinality = -1 + serialCookie = 12347 // runs, arrays, and bitmaps + noOffsetThreshold = 4 + + // MaxUint32 is the largest uint32 value. + MaxUint32 = 4294967295 + + // MaxRange is One more than the maximum allowed bitmap bit index. For use as an upper + // bound for ranges. + MaxRange uint64 = MaxUint32 + 1 + + // MaxUint16 is the largest 16 bit unsigned int. + // This is the largest value an interval16 can store. + MaxUint16 = 65535 + + // Compute wordSizeInBytes, the size of a word in bytes. + _m = ^uint64(0) + _logS = _m>>8&1 + _m>>16&1 + _m>>32&1 + wordSizeInBytes = 1 << _logS + + // other constants used in ctz_generic.go + wordSizeInBits = wordSizeInBytes << 3 // word size in bits +) + +const maxWord = 1< arrayDefaultMaxSize { + // bitmapContainer + return maxCapacity / 8 + } + // arrayContainer + return 2 * card +} + +func fill(arr []uint64, val uint64) { + for i := range arr { + arr[i] = val + } +} +func fillRange(arr []uint64, start, end int, val uint64) { + for i := start; i < end; i++ { + arr[i] = val + } +} + +func fillArrayAND(container []uint16, bitmap1, bitmap2 []uint64) { + if len(bitmap1) != len(bitmap2) { + panic("array lengths don't match") + } + // TODO: rewrite in assembly + pos := 0 + for k := range bitmap1 { + bitset := bitmap1[k] & bitmap2[k] + for bitset != 0 { + t := bitset & -bitset + container[pos] = uint16((k*64 + int(popcount(t-1)))) + pos = pos + 1 + bitset ^= t + } + } +} + +func fillArrayANDNOT(container []uint16, bitmap1, bitmap2 []uint64) { + if len(bitmap1) != len(bitmap2) { + panic("array lengths don't match") + } + // TODO: rewrite in assembly + pos := 0 + for k := range bitmap1 { + bitset := bitmap1[k] &^ bitmap2[k] + for bitset != 0 { + t := bitset & -bitset + container[pos] = uint16((k*64 + int(popcount(t-1)))) + pos = pos + 1 + bitset ^= t + } + } +} + +func fillArrayXOR(container []uint16, bitmap1, bitmap2 []uint64) { + if len(bitmap1) != len(bitmap2) { + panic("array lengths don't match") + } + // TODO: rewrite in assembly + pos := 0 + for k := 0; k < len(bitmap1); k++ { + bitset := bitmap1[k] ^ bitmap2[k] + for bitset != 0 { + t := bitset & -bitset + container[pos] = uint16((k*64 + int(popcount(t-1)))) + pos = pos + 1 + bitset ^= t + } + } +} + +func highbits(x uint32) uint16 { + return uint16(x >> 16) +} +func lowbits(x uint32) uint16 { + return uint16(x & maxLowBit) +} + +const maxLowBit = 0xFFFF + +func flipBitmapRange(bitmap []uint64, start int, end int) { + if start >= end { + return + } + firstword := start / 64 + endword := (end - 1) / 64 + bitmap[firstword] ^= ^(^uint64(0) << uint(start%64)) + for i := firstword; i < endword; i++ { + bitmap[i] = ^bitmap[i] + } + bitmap[endword] ^= ^uint64(0) >> (uint(-end) % 64) +} + +func resetBitmapRange(bitmap []uint64, start int, end int) { + if start >= end { + return + } + firstword := start / 64 + endword := (end - 1) / 64 + if firstword == endword { + bitmap[firstword] &= ^((^uint64(0) << uint(start%64)) & (^uint64(0) >> (uint(-end) % 64))) + return + } + bitmap[firstword] &= ^(^uint64(0) << uint(start%64)) + for i := firstword + 1; i < endword; i++ { + bitmap[i] = 0 + } + bitmap[endword] &= ^(^uint64(0) >> (uint(-end) % 64)) + +} + +func setBitmapRange(bitmap []uint64, start int, end int) { + if start >= end { + return + } + firstword := start / 64 + endword := (end - 1) / 64 + if firstword == endword { + bitmap[firstword] |= (^uint64(0) << uint(start%64)) & (^uint64(0) >> (uint(-end) % 64)) + return + } + bitmap[firstword] |= ^uint64(0) << uint(start%64) + for i := firstword + 1; i < endword; i++ { + bitmap[i] = ^uint64(0) + } + bitmap[endword] |= ^uint64(0) >> (uint(-end) % 64) +} + +func flipBitmapRangeAndCardinalityChange(bitmap []uint64, start int, end int) int { + before := wordCardinalityForBitmapRange(bitmap, start, end) + flipBitmapRange(bitmap, start, end) + after := wordCardinalityForBitmapRange(bitmap, start, end) + return int(after - before) +} + +func resetBitmapRangeAndCardinalityChange(bitmap []uint64, start int, end int) int { + before := wordCardinalityForBitmapRange(bitmap, start, end) + resetBitmapRange(bitmap, start, end) + after := wordCardinalityForBitmapRange(bitmap, start, end) + return int(after - before) +} + +func setBitmapRangeAndCardinalityChange(bitmap []uint64, start int, end int) int { + before := wordCardinalityForBitmapRange(bitmap, start, end) + setBitmapRange(bitmap, start, end) + after := wordCardinalityForBitmapRange(bitmap, start, end) + return int(after - before) +} + +func wordCardinalityForBitmapRange(bitmap []uint64, start int, end int) uint64 { + answer := uint64(0) + if start >= end { + return answer + } + firstword := start / 64 + endword := (end - 1) / 64 + for i := firstword; i <= endword; i++ { + answer += popcount(bitmap[i]) + } + return answer +} + +func selectBitPosition(w uint64, j int) int { + seen := 0 + + // Divide 64bit + part := w & 0xFFFFFFFF + n := popcount(part) + if n <= uint64(j) { + part = w >> 32 + seen += 32 + j -= int(n) + } + w = part + + // Divide 32bit + part = w & 0xFFFF + n = popcount(part) + if n <= uint64(j) { + part = w >> 16 + seen += 16 + j -= int(n) + } + w = part + + // Divide 16bit + part = w & 0xFF + n = popcount(part) + if n <= uint64(j) { + part = w >> 8 + seen += 8 + j -= int(n) + } + w = part + + // Lookup in final byte + var counter uint + for counter = 0; counter < 8; counter++ { + j -= int((w >> counter) & 1) + if j < 0 { + break + } + } + return seen + int(counter) + +} + +func panicOn(err error) { + if err != nil { + panic(err) + } +} + +type ph struct { + orig int + rand int +} + +type pha []ph + +func (p pha) Len() int { return len(p) } +func (p pha) Less(i, j int) bool { return p[i].rand < p[j].rand } +func (p pha) Swap(i, j int) { p[i], p[j] = p[j], p[i] } + +func getRandomPermutation(n int) []int { + r := make([]ph, n) + for i := 0; i < n; i++ { + r[i].orig = i + r[i].rand = rand.Intn(1 << 29) + } + sort.Sort(pha(r)) + m := make([]int, n) + for i := range m { + m[i] = r[i].orig + } + return m +} + +func minOfInt(a, b int) int { + if a < b { + return a + } + return b +} + +func maxOfInt(a, b int) int { + if a > b { + return a + } + return b +} + +func maxOfUint16(a, b uint16) uint16 { + if a > b { + return a + } + return b +} + +func minOfUint16(a, b uint16) uint16 { + if a < b { + return a + } + return b +} diff --git a/vendor/github.com/alecthomas/chroma/v2/.editorconfig b/vendor/github.com/alecthomas/chroma/v2/.editorconfig new file mode 100644 index 0000000..cfb2c66 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/.editorconfig @@ -0,0 +1,17 @@ +root = true + +[*] +indent_style = tab +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[*.xml] +indent_style = space +indent_size = 2 +insert_final_newline = false + +[*.yml] +indent_style = space +indent_size = 2 diff --git a/vendor/github.com/alecthomas/chroma/v2/.gitignore b/vendor/github.com/alecthomas/chroma/v2/.gitignore new file mode 100644 index 0000000..8cbdd75 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/.gitignore @@ -0,0 +1,25 @@ +# Binaries for programs and plugins +.git +.idea +.vscode +.hermit +*.exe +*.dll +*.so +*.dylib +/cmd/chroma/chroma + +# Test binary, build with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736 +.glide/ + +_models/ + +_examples/ +*.min.* +build/ diff --git a/vendor/github.com/alecthomas/chroma/v2/.golangci.yml b/vendor/github.com/alecthomas/chroma/v2/.golangci.yml new file mode 100644 index 0000000..668be37 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/.golangci.yml @@ -0,0 +1,92 @@ +run: + tests: true + skip-dirs: + - _examples + +output: + print-issued-lines: false + +linters: + enable-all: true + disable: + - maligned + - megacheck + - lll + - gocyclo + - dupl + - gochecknoglobals + - funlen + - godox + - wsl + - gomnd + - gocognit + - goerr113 + - nolintlint + - testpackage + - godot + - nestif + - paralleltest + - nlreturn + - cyclop + - exhaustivestruct + - gci + - gofumpt + - errorlint + - exhaustive + - ifshort + - wrapcheck + - stylecheck + - thelper + - nonamedreturns + - revive + - dupword + - exhaustruct + - varnamelen + - forcetypeassert + - ireturn + - maintidx + - govet + - nosnakecase + - testableexamples + - musttag + - depguard + - goconst + +linters-settings: + govet: + check-shadowing: true + gocyclo: + min-complexity: 10 + dupl: + threshold: 100 + goconst: + min-len: 8 + min-occurrences: 3 + forbidigo: + #forbid: + # - (Must)?NewLexer$ + exclude_godoc_examples: false + + +issues: + max-per-linter: 0 + max-same: 0 + exclude-use-default: false + exclude: + # Captured by errcheck. + - '^(G104|G204):' + # Very commonly not checked. + - 'Error return value of .(.*\.Help|.*\.MarkFlagRequired|(os\.)?std(out|err)\..*|.*Close|.*Flush|os\.Remove(All)?|.*printf?|os\.(Un)?Setenv). is not checked' + - 'exported method (.*\.MarshalJSON|.*\.UnmarshalJSON|.*\.EntityURN|.*\.GoString|.*\.Pos) should have comment or be unexported' + - 'composite literal uses unkeyed fields' + - 'declaration of "err" shadows declaration' + - 'should not use dot imports' + - 'Potential file inclusion via variable' + - 'should have comment or be unexported' + - 'comment on exported var .* should be of the form' + - 'at least one file in a package should have a package comment' + - 'string literal contains the Unicode' + - 'methods on the same type should have the same receiver name' + - '_TokenType_name should be _TokenTypeName' + - '`_TokenType_map` should be `_TokenTypeMap`' + - 'rewrite if-else to switch statement' diff --git a/vendor/github.com/alecthomas/chroma/v2/.goreleaser.yml b/vendor/github.com/alecthomas/chroma/v2/.goreleaser.yml new file mode 100644 index 0000000..8cd7592 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/.goreleaser.yml @@ -0,0 +1,37 @@ +project_name: chroma +release: + github: + owner: alecthomas + name: chroma +brews: + - + install: bin.install "chroma" +env: + - CGO_ENABLED=0 +builds: +- goos: + - linux + - darwin + - windows + goarch: + - arm64 + - amd64 + - "386" + goarm: + - "6" + dir: ./cmd/chroma + main: . + ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}} + binary: chroma +archives: + - + format: tar.gz + name_template: '{{ .Binary }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{ + .Arm }}{{ end }}' + files: + - COPYING + - README* +snapshot: + name_template: SNAPSHOT-{{ .Commit }} +checksum: + name_template: '{{ .ProjectName }}-{{ .Version }}-checksums.txt' diff --git a/vendor/github.com/alecthomas/chroma/v2/Bitfile b/vendor/github.com/alecthomas/chroma/v2/Bitfile new file mode 100644 index 0000000..bf15863 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/Bitfile @@ -0,0 +1,24 @@ +VERSION = %(git describe --tags --dirty --always)% +export CGOENABLED = 0 + +tokentype_enumer.go: types.go + build: go generate + +# Regenerate the list of lexers in the README +README.md: lexers/*.go lexers/*/*.xml table.py + build: ./table.py + -clean + +implicit %{1}%{2}.min.%{3}: **/*.{css,js} + build: esbuild --bundle %{IN} --minify --outfile=%{OUT} + +implicit build/%{1}: cmd/* + cd cmd/%{1} + inputs: cmd/%{1}/**/* **/*.go + build: go build -ldflags="-X 'main.version=%{VERSION}'" -o ../../build/%{1} . + +#upload: chromad +# build: +# scp chromad root@swapoff.org: +# ssh root@swapoff.org 'install -m755 ./chromad /srv/http/swapoff.org/bin && service chromad restart' +# touch upload diff --git a/vendor/github.com/alecthomas/chroma/v2/COPYING b/vendor/github.com/alecthomas/chroma/v2/COPYING new file mode 100644 index 0000000..92dc39f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/COPYING @@ -0,0 +1,19 @@ +Copyright (C) 2017 Alec Thomas + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/alecthomas/chroma/v2/Makefile b/vendor/github.com/alecthomas/chroma/v2/Makefile new file mode 100644 index 0000000..e2ff762 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/Makefile @@ -0,0 +1,23 @@ +.PHONY: chromad upload all + +VERSION ?= $(shell git describe --tags --dirty --always) +export GOOS ?= linux +export GOARCH ?= amd64 + +all: README.md tokentype_string.go + +README.md: lexers/*/*.go + ./table.py + +tokentype_string.go: types.go + go generate + +chromad: + rm -rf build + esbuild --bundle cmd/chromad/static/index.js --minify --outfile=cmd/chromad/static/index.min.js + esbuild --bundle cmd/chromad/static/index.css --minify --outfile=cmd/chromad/static/index.min.css + (export CGOENABLED=0 ; cd ./cmd/chromad && go build -ldflags="-X 'main.version=$(VERSION)'" -o ../../build/chromad .) + +upload: build/chromad + scp build/chromad root@swapoff.org: && \ + ssh root@swapoff.org 'install -m755 ./chromad /srv/http/swapoff.org/bin && service chromad restart' diff --git a/vendor/github.com/alecthomas/chroma/v2/README.md b/vendor/github.com/alecthomas/chroma/v2/README.md new file mode 100644 index 0000000..775d3af --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/README.md @@ -0,0 +1,297 @@ +# Chroma — A general purpose syntax highlighter in pure Go + +[![Golang Documentation](https://godoc.org/github.com/alecthomas/chroma?status.svg)](https://godoc.org/github.com/alecthomas/chroma) [![CI](https://github.com/alecthomas/chroma/actions/workflows/ci.yml/badge.svg)](https://github.com/alecthomas/chroma/actions/workflows/ci.yml) [![Slack chat](https://img.shields.io/static/v1?logo=slack&style=flat&label=slack&color=green&message=gophers)](https://invite.slack.golangbridge.org/) + +Chroma takes source code and other structured text and converts it into syntax +highlighted HTML, ANSI-coloured text, etc. + +Chroma is based heavily on [Pygments](http://pygments.org/), and includes +translators for Pygments lexers and styles. + +## Table of Contents + + + +1. [Supported languages](#supported-languages) +2. [Try it](#try-it) +3. [Using the library](#using-the-library) + 1. [Quick start](#quick-start) + 2. [Identifying the language](#identifying-the-language) + 3. [Formatting the output](#formatting-the-output) + 4. [The HTML formatter](#the-html-formatter) +4. [More detail](#more-detail) + 1. [Lexers](#lexers) + 2. [Formatters](#formatters) + 3. [Styles](#styles) +5. [Command-line interface](#command-line-interface) +6. [Testing lexers](#testing-lexers) +7. [What's missing compared to Pygments?](#whats-missing-compared-to-pygments) + + + +## Supported languages + +| Prefix | Language | +| :----: | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Agda, AL, Alloy, Angular2, ANTLR, ApacheConf, APL, AppleScript, ArangoDB AQL, Arduino, ArmAsm, AutoHotkey, AutoIt, Awk | +| B | Ballerina, Bash, Bash Session, Batchfile, BibTeX, Bicep, BlitzBasic, BNF, BQN, Brainfuck | +| C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Chapel, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython | +| D | D, Dart, Dax, Desktop Entry, Diff, Django/Jinja, dns, Docker, DTD, Dylan | +| E | EBNF, Elixir, Elm, EmacsLisp, Erlang | +| F | Factor, Fennel, Fish, Forth, Fortran, FortranFixed, FSharp | +| G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groff, Groovy | +| H | Handlebars, Hare, Haskell, Haxe, HCL, Hexdump, HLB, HLSL, HolyC, HTML, HTTP, Hy | +| I | Idris, Igor, INI, Io, ISCdhcpd | +| J | J, Java, JavaScript, JSON, Julia, Jungle | +| K | Kotlin | +| L | Lighttpd configuration file, LLVM, Lua | +| M | Makefile, Mako, markdown, Mason, Materialize SQL dialect, Mathematica, Matlab, mcfunction, Meson, Metal, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL | +| N | NASM, Natural, Newspeak, Nginx configuration file, Nim, Nix | +| O | Objective-C, OCaml, Octave, Odin, OnesEnterprise, OpenEdge ABL, OpenSCAD, Org Mode | +| P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Plutus Core, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerQuery, PowerShell, Prolog, PromQL, Promela, properties, Protocol Buffer, PRQL, PSL, Puppet, Python, Python 2 | +| Q | QBasic, QML | +| R | R, Racket, Ragel, Raku, react, ReasonML, reg, Rego, reStructuredText, Rexx, RPMSpec, Ruby, Rust | +| S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Sed, Sieve, Smali, Smalltalk, Smarty, Snobol, Solidity, SourcePawn, SPARQL, SQL, SquidConf, Standard ML, stas, Stylus, Svelte, Swift, SYSTEMD, systemverilog | +| T | TableGen, Tal, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData | +| V | V, V shell, Vala, VB.net, verilog, VHDL, VHS, VimL, vue | +| W | WDTE, WebGPU Shading Language, Whiley | +| X | XML, Xorg | +| Y | YAML, YANG | +| Z | Z80 Assembly, Zed, Zig | + +_I will attempt to keep this section up to date, but an authoritative list can be +displayed with `chroma --list`._ + +## Try it + +Try out various languages and styles on the [Chroma Playground](https://swapoff.org/chroma/playground/). + +## Using the library + +This is version 2 of Chroma, use the import path: + +```go +import "github.com/alecthomas/chroma/v2" +``` + +Chroma, like Pygments, has the concepts of +[lexers](https://github.com/alecthomas/chroma/tree/master/lexers), +[formatters](https://github.com/alecthomas/chroma/tree/master/formatters) and +[styles](https://github.com/alecthomas/chroma/tree/master/styles). + +Lexers convert source text into a stream of tokens, styles specify how token +types are mapped to colours, and formatters convert tokens and styles into +formatted output. + +A package exists for each of these, containing a global `Registry` variable +with all of the registered implementations. There are also helper functions +for using the registry in each package, such as looking up lexers by name or +matching filenames, etc. + +In all cases, if a lexer, formatter or style can not be determined, `nil` will +be returned. In this situation you may want to default to the `Fallback` +value in each respective package, which provides sane defaults. + +### Quick start + +A convenience function exists that can be used to simply format some source +text, without any effort: + +```go +err := quick.Highlight(os.Stdout, someSourceCode, "go", "html", "monokai") +``` + +### Identifying the language + +To highlight code, you'll first have to identify what language the code is +written in. There are three primary ways to do that: + +1. Detect the language from its filename. + + ```go + lexer := lexers.Match("foo.go") + ``` + +2. Explicitly specify the language by its Chroma syntax ID (a full list is available from `lexers.Names()`). + + ```go + lexer := lexers.Get("go") + ``` + +3. Detect the language from its content. + + ```go + lexer := lexers.Analyse("package main\n\nfunc main()\n{\n}\n") + ``` + +In all cases, `nil` will be returned if the language can not be identified. + +```go +if lexer == nil { + lexer = lexers.Fallback +} +``` + +At this point, it should be noted that some lexers can be extremely chatty. To +mitigate this, you can use the coalescing lexer to coalesce runs of identical +token types into a single token: + +```go +lexer = chroma.Coalesce(lexer) +``` + +### Formatting the output + +Once a language is identified you will need to pick a formatter and a style (theme). + +```go +style := styles.Get("swapoff") +if style == nil { + style = styles.Fallback +} +formatter := formatters.Get("html") +if formatter == nil { + formatter = formatters.Fallback +} +``` + +Then obtain an iterator over the tokens: + +```go +contents, err := ioutil.ReadAll(r) +iterator, err := lexer.Tokenise(nil, string(contents)) +``` + +And finally, format the tokens from the iterator: + +```go +err := formatter.Format(w, style, iterator) +``` + +### The HTML formatter + +By default the `html` registered formatter generates standalone HTML with +embedded CSS. More flexibility is available through the `formatters/html` package. + +Firstly, the output generated by the formatter can be customised with the +following constructor options: + +- `Standalone()` - generate standalone HTML with embedded CSS. +- `WithClasses()` - use classes rather than inlined style attributes. +- `ClassPrefix(prefix)` - prefix each generated CSS class. +- `TabWidth(width)` - Set the rendered tab width, in characters. +- `WithLineNumbers()` - Render line numbers (style with `LineNumbers`). +- `WithLinkableLineNumbers()` - Make the line numbers linkable and be a link to themselves. +- `HighlightLines(ranges)` - Highlight lines in these ranges (style with `LineHighlight`). +- `LineNumbersInTable()` - Use a table for formatting line numbers and code, rather than spans. + +If `WithClasses()` is used, the corresponding CSS can be obtained from the formatter with: + +```go +formatter := html.New(html.WithClasses(true)) +err := formatter.WriteCSS(w, style) +``` + +## More detail + +### Lexers + +See the [Pygments documentation](http://pygments.org/docs/lexerdevelopment/) +for details on implementing lexers. Most concepts apply directly to Chroma, +but see existing lexer implementations for real examples. + +In many cases lexers can be automatically converted directly from Pygments by +using the included Python 3 script `pygments2chroma_xml.py`. I use something like +the following: + +```sh +python3 _tools/pygments2chroma_xml.py \ + pygments.lexers.jvm.KotlinLexer \ + > lexers/embedded/kotlin.xml +``` + +See notes in [pygments-lexers.txt](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt) +for a list of lexers, and notes on some of the issues importing them. + +### Formatters + +Chroma supports HTML output, as well as terminal output in 8 colour, 256 colour, and true-colour. + +A `noop` formatter is included that outputs the token text only, and a `tokens` +formatter outputs raw tokens. The latter is useful for debugging lexers. + +### Styles + +Chroma styles are defined in XML. The style entries use the +[same syntax](http://pygments.org/docs/styles/) as Pygments. + +All Pygments styles have been converted to Chroma using the `_tools/style.py` +script. + +When you work with one of [Chroma's styles](https://github.com/alecthomas/chroma/tree/master/styles), +know that the `Background` token type provides the default style for tokens. It does so +by defining a foreground color and background color. + +For example, this gives each token name not defined in the style a default color +of `#f8f8f8` and uses `#000000` for the highlighted code block's background: + +```xml + +``` + +Also, token types in a style file are hierarchical. For instance, when `CommentSpecial` is not defined, Chroma uses the token style from `Comment`. So when several comment tokens use the same color, you'll only need to define `Comment` and override the one that has a different color. + +For a quick overview of the available styles and how they look, check out the [Chroma Style Gallery](https://xyproto.github.io/splash/docs/). + +## Command-line interface + +A command-line interface to Chroma is included. + +Binaries are available to install from [the releases page](https://github.com/alecthomas/chroma/releases). + +The CLI can be used as a preprocessor to colorise output of `less(1)`, +see documentation for the `LESSOPEN` environment variable. + +The `--fail` flag can be used to suppress output and return with exit status +1 to facilitate falling back to some other preprocessor in case chroma +does not resolve a specific lexer to use for the given file. For example: + +```shell +export LESSOPEN='| p() { chroma --fail "$1" || cat "$1"; }; p "%s"' +``` + +Replace `cat` with your favourite fallback preprocessor. + +When invoked as `.lessfilter`, the `--fail` flag is automatically turned +on under the hood for easy integration with [lesspipe shipping with +Debian and derivatives](https://manpages.debian.org/lesspipe#USER_DEFINED_FILTERS); +for that setup the `chroma` executable can be just symlinked to `~/.lessfilter`. + +## Testing lexers + +If you edit some lexers and want to try it, open a shell in `cmd/chromad` and run: + +```shell +go run . +``` + +A Link will be printed. Open it in your Browser. Now you can test on the Playground with your local changes. + +If you want to run the tests and the lexers, open a shell in the root directory and run: + +```shell +go test ./lexers +``` + +When updating or adding a lexer, please add tests. See [lexers/README.md](lexers/README.md) for more. + +## What's missing compared to Pygments? + +- Quite a few lexers, for various reasons (pull-requests welcome): + - Pygments lexers for complex languages often include custom code to + handle certain aspects, such as Raku's ability to nest code inside + regular expressions. These require time and effort to convert. + - I mostly only converted languages I had heard of, to reduce the porting cost. +- Some more esoteric features of Pygments are omitted for simplicity. +- Though the Chroma API supports content detection, very few languages support them. + I have plans to implement a statistical analyser at some point, but not enough time. diff --git a/vendor/github.com/alecthomas/chroma/v2/coalesce.go b/vendor/github.com/alecthomas/chroma/v2/coalesce.go new file mode 100644 index 0000000..f504895 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/coalesce.go @@ -0,0 +1,35 @@ +package chroma + +// Coalesce is a Lexer interceptor that collapses runs of common types into a single token. +func Coalesce(lexer Lexer) Lexer { return &coalescer{lexer} } + +type coalescer struct{ Lexer } + +func (d *coalescer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { + var prev Token + it, err := d.Lexer.Tokenise(options, text) + if err != nil { + return nil, err + } + return func() Token { + for token := it(); token != (EOF); token = it() { + if len(token.Value) == 0 { + continue + } + if prev == EOF { + prev = token + } else { + if prev.Type == token.Type && len(prev.Value) < 8192 { + prev.Value += token.Value + } else { + out := prev + prev = token + return out + } + } + } + out := prev + prev = EOF + return out + }, nil +} diff --git a/vendor/github.com/alecthomas/chroma/v2/colour.go b/vendor/github.com/alecthomas/chroma/v2/colour.go new file mode 100644 index 0000000..b7fd6e0 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/colour.go @@ -0,0 +1,192 @@ +package chroma + +import ( + "fmt" + "math" + "strconv" + "strings" +) + +// ANSI2RGB maps ANSI colour names, as supported by Chroma, to hex RGB values. +var ANSI2RGB = map[string]string{ + "#ansiblack": "000000", + "#ansidarkred": "7f0000", + "#ansidarkgreen": "007f00", + "#ansibrown": "7f7fe0", + "#ansidarkblue": "00007f", + "#ansipurple": "7f007f", + "#ansiteal": "007f7f", + "#ansilightgray": "e5e5e5", + // Normal + "#ansidarkgray": "555555", + "#ansired": "ff0000", + "#ansigreen": "00ff00", + "#ansiyellow": "ffff00", + "#ansiblue": "0000ff", + "#ansifuchsia": "ff00ff", + "#ansiturquoise": "00ffff", + "#ansiwhite": "ffffff", + + // Aliases without the "ansi" prefix, because...why? + "#black": "000000", + "#darkred": "7f0000", + "#darkgreen": "007f00", + "#brown": "7f7fe0", + "#darkblue": "00007f", + "#purple": "7f007f", + "#teal": "007f7f", + "#lightgray": "e5e5e5", + // Normal + "#darkgray": "555555", + "#red": "ff0000", + "#green": "00ff00", + "#yellow": "ffff00", + "#blue": "0000ff", + "#fuchsia": "ff00ff", + "#turquoise": "00ffff", + "#white": "ffffff", +} + +// Colour represents an RGB colour. +type Colour int32 + +// NewColour creates a Colour directly from RGB values. +func NewColour(r, g, b uint8) Colour { + return ParseColour(fmt.Sprintf("%02x%02x%02x", r, g, b)) +} + +// Distance between this colour and another. +// +// This uses the approach described here (https://www.compuphase.com/cmetric.htm). +// This is not as accurate as LAB, et. al. but is *vastly* simpler and sufficient for our needs. +func (c Colour) Distance(e2 Colour) float64 { + ar, ag, ab := int64(c.Red()), int64(c.Green()), int64(c.Blue()) + br, bg, bb := int64(e2.Red()), int64(e2.Green()), int64(e2.Blue()) + rmean := (ar + br) / 2 + r := ar - br + g := ag - bg + b := ab - bb + return math.Sqrt(float64((((512 + rmean) * r * r) >> 8) + 4*g*g + (((767 - rmean) * b * b) >> 8))) +} + +// Brighten returns a copy of this colour with its brightness adjusted. +// +// If factor is negative, the colour is darkened. +// +// Uses approach described here (http://www.pvladov.com/2012/09/make-color-lighter-or-darker.html). +func (c Colour) Brighten(factor float64) Colour { + r := float64(c.Red()) + g := float64(c.Green()) + b := float64(c.Blue()) + + if factor < 0 { + factor++ + r *= factor + g *= factor + b *= factor + } else { + r = (255-r)*factor + r + g = (255-g)*factor + g + b = (255-b)*factor + b + } + return NewColour(uint8(r), uint8(g), uint8(b)) +} + +// BrightenOrDarken brightens a colour if it is < 0.5 brightness or darkens if > 0.5 brightness. +func (c Colour) BrightenOrDarken(factor float64) Colour { + if c.Brightness() < 0.5 { + return c.Brighten(factor) + } + return c.Brighten(-factor) +} + +// ClampBrightness returns a copy of this colour with its brightness adjusted such that +// it falls within the range [min, max] (or very close to it due to rounding errors). +// The supplied values use the same [0.0, 1.0] range as Brightness. +func (c Colour) ClampBrightness(min, max float64) Colour { + if !c.IsSet() { + return c + } + + min = math.Max(min, 0) + max = math.Min(max, 1) + current := c.Brightness() + target := math.Min(math.Max(current, min), max) + if current == target { + return c + } + + r := float64(c.Red()) + g := float64(c.Green()) + b := float64(c.Blue()) + rgb := r + g + b + if target > current { + // Solve for x: target == ((255-r)*x + r + (255-g)*x + g + (255-b)*x + b) / 255 / 3 + return c.Brighten((target*255*3 - rgb) / (255*3 - rgb)) + } + // Solve for x: target == (r*(x+1) + g*(x+1) + b*(x+1)) / 255 / 3 + return c.Brighten((target*255*3)/rgb - 1) +} + +// Brightness of the colour (roughly) in the range 0.0 to 1.0. +func (c Colour) Brightness() float64 { + return (float64(c.Red()) + float64(c.Green()) + float64(c.Blue())) / 255.0 / 3.0 +} + +// ParseColour in the forms #rgb, #rrggbb, #ansi, or #. +// Will return an "unset" colour if invalid. +func ParseColour(colour string) Colour { + colour = normaliseColour(colour) + n, err := strconv.ParseUint(colour, 16, 32) + if err != nil { + return 0 + } + return Colour(n + 1) +} + +// MustParseColour is like ParseColour except it panics if the colour is invalid. +// +// Will panic if colour is in an invalid format. +func MustParseColour(colour string) Colour { + parsed := ParseColour(colour) + if !parsed.IsSet() { + panic(fmt.Errorf("invalid colour %q", colour)) + } + return parsed +} + +// IsSet returns true if the colour is set. +func (c Colour) IsSet() bool { return c != 0 } + +func (c Colour) String() string { return fmt.Sprintf("#%06x", int(c-1)) } +func (c Colour) GoString() string { return fmt.Sprintf("Colour(0x%06x)", int(c-1)) } + +// Red component of colour. +func (c Colour) Red() uint8 { return uint8(((c - 1) >> 16) & 0xff) } + +// Green component of colour. +func (c Colour) Green() uint8 { return uint8(((c - 1) >> 8) & 0xff) } + +// Blue component of colour. +func (c Colour) Blue() uint8 { return uint8((c - 1) & 0xff) } + +// Colours is an orderable set of colours. +type Colours []Colour + +func (c Colours) Len() int { return len(c) } +func (c Colours) Swap(i, j int) { c[i], c[j] = c[j], c[i] } +func (c Colours) Less(i, j int) bool { return c[i] < c[j] } + +// Convert colours to #rrggbb. +func normaliseColour(colour string) string { + if ansi, ok := ANSI2RGB[colour]; ok { + return ansi + } + if strings.HasPrefix(colour, "#") { + colour = colour[1:] + if len(colour) == 3 { + return colour[0:1] + colour[0:1] + colour[1:2] + colour[1:2] + colour[2:3] + colour[2:3] + } + } + return colour +} diff --git a/vendor/github.com/alecthomas/chroma/v2/delegate.go b/vendor/github.com/alecthomas/chroma/v2/delegate.go new file mode 100644 index 0000000..f848194 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/delegate.go @@ -0,0 +1,152 @@ +package chroma + +import ( + "bytes" +) + +type delegatingLexer struct { + root Lexer + language Lexer +} + +// DelegatingLexer combines two lexers to handle the common case of a language embedded inside another, such as PHP +// inside HTML or PHP inside plain text. +// +// It takes two lexer as arguments: a root lexer and a language lexer. First everything is scanned using the language +// lexer, which must return "Other" for unrecognised tokens. Then all "Other" tokens are lexed using the root lexer. +// Finally, these two sets of tokens are merged. +// +// The lexers from the template lexer package use this base lexer. +func DelegatingLexer(root Lexer, language Lexer) Lexer { + return &delegatingLexer{ + root: root, + language: language, + } +} + +func (d *delegatingLexer) AnalyseText(text string) float32 { + return d.root.AnalyseText(text) +} + +func (d *delegatingLexer) SetAnalyser(analyser func(text string) float32) Lexer { + d.root.SetAnalyser(analyser) + return d +} + +func (d *delegatingLexer) SetRegistry(r *LexerRegistry) Lexer { + d.root.SetRegistry(r) + d.language.SetRegistry(r) + return d +} + +func (d *delegatingLexer) Config() *Config { + return d.language.Config() +} + +// An insertion is the character range where language tokens should be inserted. +type insertion struct { + start, end int + tokens []Token +} + +func (d *delegatingLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint: gocognit + tokens, err := Tokenise(Coalesce(d.language), options, text) + if err != nil { + return nil, err + } + // Compute insertions and gather "Other" tokens. + others := &bytes.Buffer{} + insertions := []*insertion{} + var insert *insertion + offset := 0 + var last Token + for _, t := range tokens { + if t.Type == Other { + if last != EOF && insert != nil && last.Type != Other { + insert.end = offset + } + others.WriteString(t.Value) + } else { + if last == EOF || last.Type == Other { + insert = &insertion{start: offset} + insertions = append(insertions, insert) + } + insert.tokens = append(insert.tokens, t) + } + last = t + offset += len(t.Value) + } + + if len(insertions) == 0 { + return d.root.Tokenise(options, text) + } + + // Lex the other tokens. + rootTokens, err := Tokenise(Coalesce(d.root), options, others.String()) + if err != nil { + return nil, err + } + + // Interleave the two sets of tokens. + var out []Token + offset = 0 // Offset into text. + tokenIndex := 0 + nextToken := func() Token { + if tokenIndex >= len(rootTokens) { + return EOF + } + t := rootTokens[tokenIndex] + tokenIndex++ + return t + } + insertionIndex := 0 + nextInsertion := func() *insertion { + if insertionIndex >= len(insertions) { + return nil + } + i := insertions[insertionIndex] + insertionIndex++ + return i + } + t := nextToken() + i := nextInsertion() + for t != EOF || i != nil { + // fmt.Printf("%d->%d:%q %d->%d:%q\n", offset, offset+len(t.Value), t.Value, i.start, i.end, Stringify(i.tokens...)) + if t == EOF || (i != nil && i.start < offset+len(t.Value)) { + var l Token + l, t = splitToken(t, i.start-offset) + if l != EOF { + out = append(out, l) + offset += len(l.Value) + } + out = append(out, i.tokens...) + offset += i.end - i.start + if t == EOF { + t = nextToken() + } + i = nextInsertion() + } else { + out = append(out, t) + offset += len(t.Value) + t = nextToken() + } + } + return Literator(out...), nil +} + +func splitToken(t Token, offset int) (l Token, r Token) { + if t == EOF { + return EOF, EOF + } + if offset == 0 { + return EOF, t + } + if offset == len(t.Value) { + return t, EOF + } + l = t.Clone() + r = t.Clone() + l.Value = l.Value[:offset] + r.Value = r.Value[offset:] + return +} diff --git a/vendor/github.com/alecthomas/chroma/v2/doc.go b/vendor/github.com/alecthomas/chroma/v2/doc.go new file mode 100644 index 0000000..4dde77c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/doc.go @@ -0,0 +1,7 @@ +// Package chroma takes source code and other structured text and converts it into syntax highlighted HTML, ANSI- +// coloured text, etc. +// +// Chroma is based heavily on Pygments, and includes translators for Pygments lexers and styles. +// +// For more information, go here: https://github.com/alecthomas/chroma +package chroma diff --git a/vendor/github.com/alecthomas/chroma/v2/emitters.go b/vendor/github.com/alecthomas/chroma/v2/emitters.go new file mode 100644 index 0000000..0788b5b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/emitters.go @@ -0,0 +1,218 @@ +package chroma + +import ( + "fmt" +) + +// An Emitter takes group matches and returns tokens. +type Emitter interface { + // Emit tokens for the given regex groups. + Emit(groups []string, state *LexerState) Iterator +} + +// SerialisableEmitter is an Emitter that can be serialised and deserialised to/from JSON. +type SerialisableEmitter interface { + Emitter + EmitterKind() string +} + +// EmitterFunc is a function that is an Emitter. +type EmitterFunc func(groups []string, state *LexerState) Iterator + +// Emit tokens for groups. +func (e EmitterFunc) Emit(groups []string, state *LexerState) Iterator { + return e(groups, state) +} + +type Emitters []Emitter + +type byGroupsEmitter struct { + Emitters +} + +// ByGroups emits a token for each matching group in the rule's regex. +func ByGroups(emitters ...Emitter) Emitter { + return &byGroupsEmitter{Emitters: emitters} +} + +func (b *byGroupsEmitter) EmitterKind() string { return "bygroups" } + +func (b *byGroupsEmitter) Emit(groups []string, state *LexerState) Iterator { + iterators := make([]Iterator, 0, len(groups)-1) + if len(b.Emitters) != len(groups)-1 { + iterators = append(iterators, Error.Emit(groups, state)) + // panic(errors.Errorf("number of groups %q does not match number of emitters %v", groups, emitters)) + } else { + for i, group := range groups[1:] { + if b.Emitters[i] != nil { + iterators = append(iterators, b.Emitters[i].Emit([]string{group}, state)) + } + } + } + return Concaterator(iterators...) +} + +// ByGroupNames emits a token for each named matching group in the rule's regex. +func ByGroupNames(emitters map[string]Emitter) Emitter { + return EmitterFunc(func(groups []string, state *LexerState) Iterator { + iterators := make([]Iterator, 0, len(state.NamedGroups)-1) + if len(state.NamedGroups)-1 == 0 { + if emitter, ok := emitters[`0`]; ok { + iterators = append(iterators, emitter.Emit(groups, state)) + } else { + iterators = append(iterators, Error.Emit(groups, state)) + } + } else { + ruleRegex := state.Rules[state.State][state.Rule].Regexp + for i := 1; i < len(state.NamedGroups); i++ { + groupName := ruleRegex.GroupNameFromNumber(i) + group := state.NamedGroups[groupName] + if emitter, ok := emitters[groupName]; ok { + if emitter != nil { + iterators = append(iterators, emitter.Emit([]string{group}, state)) + } + } else { + iterators = append(iterators, Error.Emit([]string{group}, state)) + } + } + } + return Concaterator(iterators...) + }) +} + +// UsingByGroup emits tokens for the matched groups in the regex using a +// sublexer. Used when lexing code blocks where the name of a sublexer is +// contained within the block, for example on a Markdown text block or SQL +// language block. +// +// An attempt to load the sublexer will be made using the captured value from +// the text of the matched sublexerNameGroup. If a sublexer matching the +// sublexerNameGroup is available, then tokens for the matched codeGroup will +// be emitted using the sublexer. Otherwise, if no sublexer is available, then +// tokens will be emitted from the passed emitter. +// +// Example: +// +// var Markdown = internal.Register(MustNewLexer( +// &Config{ +// Name: "markdown", +// Aliases: []string{"md", "mkd"}, +// Filenames: []string{"*.md", "*.mkd", "*.markdown"}, +// MimeTypes: []string{"text/x-markdown"}, +// }, +// Rules{ +// "root": { +// {"^(```)(\\w+)(\\n)([\\w\\W]*?)(^```$)", +// UsingByGroup( +// 2, 4, +// String, String, String, Text, String, +// ), +// nil, +// }, +// }, +// }, +// )) +// +// See the lexers/markdown.go for the complete example. +// +// Note: panic's if the number of emitters does not equal the number of matched +// groups in the regex. +func UsingByGroup(sublexerNameGroup, codeGroup int, emitters ...Emitter) Emitter { + return &usingByGroup{ + SublexerNameGroup: sublexerNameGroup, + CodeGroup: codeGroup, + Emitters: emitters, + } +} + +type usingByGroup struct { + SublexerNameGroup int `xml:"sublexer_name_group"` + CodeGroup int `xml:"code_group"` + Emitters Emitters `xml:"emitters"` +} + +func (u *usingByGroup) EmitterKind() string { return "usingbygroup" } +func (u *usingByGroup) Emit(groups []string, state *LexerState) Iterator { + // bounds check + if len(u.Emitters) != len(groups)-1 { + panic("UsingByGroup expects number of emitters to be the same as len(groups)-1") + } + + // grab sublexer + sublexer := state.Registry.Get(groups[u.SublexerNameGroup]) + + // build iterators + iterators := make([]Iterator, len(groups)-1) + for i, group := range groups[1:] { + if i == u.CodeGroup-1 && sublexer != nil { + var err error + iterators[i], err = sublexer.Tokenise(nil, groups[u.CodeGroup]) + if err != nil { + panic(err) + } + } else if u.Emitters[i] != nil { + iterators[i] = u.Emitters[i].Emit([]string{group}, state) + } + } + return Concaterator(iterators...) +} + +// UsingLexer returns an Emitter that uses a given Lexer for parsing and emitting. +// +// This Emitter is not serialisable. +func UsingLexer(lexer Lexer) Emitter { + return EmitterFunc(func(groups []string, _ *LexerState) Iterator { + it, err := lexer.Tokenise(&TokeniseOptions{State: "root", Nested: true}, groups[0]) + if err != nil { + panic(err) + } + return it + }) +} + +type usingEmitter struct { + Lexer string `xml:"lexer,attr"` +} + +func (u *usingEmitter) EmitterKind() string { return "using" } + +func (u *usingEmitter) Emit(groups []string, state *LexerState) Iterator { + if state.Registry == nil { + panic(fmt.Sprintf("no LexerRegistry available for Using(%q)", u.Lexer)) + } + lexer := state.Registry.Get(u.Lexer) + if lexer == nil { + panic(fmt.Sprintf("no such lexer %q", u.Lexer)) + } + it, err := lexer.Tokenise(&TokeniseOptions{State: "root", Nested: true}, groups[0]) + if err != nil { + panic(err) + } + return it +} + +// Using returns an Emitter that uses a given Lexer reference for parsing and emitting. +// +// The referenced lexer must be stored in the same LexerRegistry. +func Using(lexer string) Emitter { + return &usingEmitter{Lexer: lexer} +} + +type usingSelfEmitter struct { + State string `xml:"state,attr"` +} + +func (u *usingSelfEmitter) EmitterKind() string { return "usingself" } + +func (u *usingSelfEmitter) Emit(groups []string, state *LexerState) Iterator { + it, err := state.Lexer.Tokenise(&TokeniseOptions{State: u.State, Nested: true}, groups[0]) + if err != nil { + panic(err) + } + return it +} + +// UsingSelf is like Using, but uses the current Lexer. +func UsingSelf(stateName string) Emitter { + return &usingSelfEmitter{stateName} +} diff --git a/vendor/github.com/alecthomas/chroma/v2/formatter.go b/vendor/github.com/alecthomas/chroma/v2/formatter.go new file mode 100644 index 0000000..00dd5d8 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/formatter.go @@ -0,0 +1,43 @@ +package chroma + +import ( + "io" +) + +// A Formatter for Chroma lexers. +type Formatter interface { + // Format returns a formatting function for tokens. + // + // If the iterator panics, the Formatter should recover. + Format(w io.Writer, style *Style, iterator Iterator) error +} + +// A FormatterFunc is a Formatter implemented as a function. +// +// Guards against iterator panics. +type FormatterFunc func(w io.Writer, style *Style, iterator Iterator) error + +func (f FormatterFunc) Format(w io.Writer, s *Style, it Iterator) (err error) { // nolint + defer func() { + if perr := recover(); perr != nil { + err = perr.(error) + } + }() + return f(w, s, it) +} + +type recoveringFormatter struct { + Formatter +} + +func (r recoveringFormatter) Format(w io.Writer, s *Style, it Iterator) (err error) { + defer func() { + if perr := recover(); perr != nil { + err = perr.(error) + } + }() + return r.Formatter.Format(w, s, it) +} + +// RecoveringFormatter wraps a formatter with panic recovery. +func RecoveringFormatter(formatter Formatter) Formatter { return recoveringFormatter{formatter} } diff --git a/vendor/github.com/alecthomas/chroma/v2/formatters/html/html.go b/vendor/github.com/alecthomas/chroma/v2/formatters/html/html.go new file mode 100644 index 0000000..92d784c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/formatters/html/html.go @@ -0,0 +1,623 @@ +package html + +import ( + "fmt" + "html" + "io" + "sort" + "strconv" + "strings" + "sync" + + "github.com/alecthomas/chroma/v2" +) + +// Option sets an option of the HTML formatter. +type Option func(f *Formatter) + +// Standalone configures the HTML formatter for generating a standalone HTML document. +func Standalone(b bool) Option { return func(f *Formatter) { f.standalone = b } } + +// ClassPrefix sets the CSS class prefix. +func ClassPrefix(prefix string) Option { return func(f *Formatter) { f.prefix = prefix } } + +// WithClasses emits HTML using CSS classes, rather than inline styles. +func WithClasses(b bool) Option { return func(f *Formatter) { f.Classes = b } } + +// WithAllClasses disables an optimisation that omits redundant CSS classes. +func WithAllClasses(b bool) Option { return func(f *Formatter) { f.allClasses = b } } + +// WithCustomCSS sets user's custom CSS styles. +func WithCustomCSS(css map[chroma.TokenType]string) Option { + return func(f *Formatter) { + f.customCSS = css + } +} + +// TabWidth sets the number of characters for a tab. Defaults to 8. +func TabWidth(width int) Option { return func(f *Formatter) { f.tabWidth = width } } + +// PreventSurroundingPre prevents the surrounding pre tags around the generated code. +func PreventSurroundingPre(b bool) Option { + return func(f *Formatter) { + f.preventSurroundingPre = b + + if b { + f.preWrapper = nopPreWrapper + } else { + f.preWrapper = defaultPreWrapper + } + } +} + +// InlineCode creates inline code wrapped in a code tag. +func InlineCode(b bool) Option { + return func(f *Formatter) { + f.inlineCode = b + f.preWrapper = preWrapper{ + start: func(code bool, styleAttr string) string { + if code { + return fmt.Sprintf(``, styleAttr) + } + + return `` + }, + end: func(code bool) string { + if code { + return `` + } + + return `` + }, + } + } +} + +// WithPreWrapper allows control of the surrounding pre tags. +func WithPreWrapper(wrapper PreWrapper) Option { + return func(f *Formatter) { + f.preWrapper = wrapper + } +} + +// WrapLongLines wraps long lines. +func WrapLongLines(b bool) Option { + return func(f *Formatter) { + f.wrapLongLines = b + } +} + +// WithLineNumbers formats output with line numbers. +func WithLineNumbers(b bool) Option { + return func(f *Formatter) { + f.lineNumbers = b + } +} + +// LineNumbersInTable will, when combined with WithLineNumbers, separate the line numbers +// and code in table td's, which make them copy-and-paste friendly. +func LineNumbersInTable(b bool) Option { + return func(f *Formatter) { + f.lineNumbersInTable = b + } +} + +// WithLinkableLineNumbers decorates the line numbers HTML elements with an "id" +// attribute so they can be linked. +func WithLinkableLineNumbers(b bool, prefix string) Option { + return func(f *Formatter) { + f.linkableLineNumbers = b + f.lineNumbersIDPrefix = prefix + } +} + +// HighlightLines higlights the given line ranges with the Highlight style. +// +// A range is the beginning and ending of a range as 1-based line numbers, inclusive. +func HighlightLines(ranges [][2]int) Option { + return func(f *Formatter) { + f.highlightRanges = ranges + sort.Sort(f.highlightRanges) + } +} + +// BaseLineNumber sets the initial number to start line numbering at. Defaults to 1. +func BaseLineNumber(n int) Option { + return func(f *Formatter) { + f.baseLineNumber = n + } +} + +// New HTML formatter. +func New(options ...Option) *Formatter { + f := &Formatter{ + baseLineNumber: 1, + preWrapper: defaultPreWrapper, + } + f.styleCache = newStyleCache(f) + for _, option := range options { + option(f) + } + return f +} + +// PreWrapper defines the operations supported in WithPreWrapper. +type PreWrapper interface { + // Start is called to write a start
 element.
+	// The code flag tells whether this block surrounds
+	// highlighted code. This will be false when surrounding
+	// line numbers.
+	Start(code bool, styleAttr string) string
+
+	// End is called to write the end 
element. + End(code bool) string +} + +type preWrapper struct { + start func(code bool, styleAttr string) string + end func(code bool) string +} + +func (p preWrapper) Start(code bool, styleAttr string) string { + return p.start(code, styleAttr) +} + +func (p preWrapper) End(code bool) string { + return p.end(code) +} + +var ( + nopPreWrapper = preWrapper{ + start: func(code bool, styleAttr string) string { return "" }, + end: func(code bool) string { return "" }, + } + defaultPreWrapper = preWrapper{ + start: func(code bool, styleAttr string) string { + if code { + return fmt.Sprintf(``, styleAttr) + } + + return fmt.Sprintf(``, styleAttr) + }, + end: func(code bool) string { + if code { + return `` + } + + return `` + }, + } +) + +// Formatter that generates HTML. +type Formatter struct { + styleCache *styleCache + standalone bool + prefix string + Classes bool // Exported field to detect when classes are being used + allClasses bool + customCSS map[chroma.TokenType]string + preWrapper PreWrapper + inlineCode bool + preventSurroundingPre bool + tabWidth int + wrapLongLines bool + lineNumbers bool + lineNumbersInTable bool + linkableLineNumbers bool + lineNumbersIDPrefix string + highlightRanges highlightRanges + baseLineNumber int +} + +type highlightRanges [][2]int + +func (h highlightRanges) Len() int { return len(h) } +func (h highlightRanges) Swap(i, j int) { h[i], h[j] = h[j], h[i] } +func (h highlightRanges) Less(i, j int) bool { return h[i][0] < h[j][0] } + +func (f *Formatter) Format(w io.Writer, style *chroma.Style, iterator chroma.Iterator) (err error) { + return f.writeHTML(w, style, iterator.Tokens()) +} + +// We deliberately don't use html/template here because it is two orders of magnitude slower (benchmarked). +// +// OTOH we need to be super careful about correct escaping... +func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.Token) (err error) { // nolint: gocyclo + css := f.styleCache.get(style, true) + if f.standalone { + fmt.Fprint(w, "\n") + if f.Classes { + fmt.Fprint(w, "") + } + fmt.Fprintf(w, "\n", f.styleAttr(css, chroma.Background)) + } + + wrapInTable := f.lineNumbers && f.lineNumbersInTable + + lines := chroma.SplitTokensIntoLines(tokens) + lineDigits := len(strconv.Itoa(f.baseLineNumber + len(lines) - 1)) + highlightIndex := 0 + + if wrapInTable { + // List line numbers in its own + fmt.Fprintf(w, "\n", f.styleAttr(css, chroma.PreWrapper)) + fmt.Fprintf(w, "", f.styleAttr(css, chroma.LineTable)) + fmt.Fprintf(w, "\n", f.styleAttr(css, chroma.LineTableTD)) + fmt.Fprintf(w, "%s", f.preWrapper.Start(false, f.styleAttr(css, chroma.PreWrapper))) + for index := range lines { + line := f.baseLineNumber + index + highlight, next := f.shouldHighlight(highlightIndex, line) + if next { + highlightIndex++ + } + if highlight { + fmt.Fprintf(w, "", f.styleAttr(css, chroma.LineHighlight)) + } + + fmt.Fprintf(w, "%s\n", f.styleAttr(css, chroma.LineNumbersTable), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(css, lineDigits, line)) + + if highlight { + fmt.Fprintf(w, "") + } + } + fmt.Fprint(w, f.preWrapper.End(false)) + fmt.Fprint(w, "\n") + fmt.Fprintf(w, "\n", f.styleAttr(css, chroma.LineTableTD, "width:100%")) + } + + fmt.Fprintf(w, "%s", f.preWrapper.Start(true, f.styleAttr(css, chroma.PreWrapper))) + + highlightIndex = 0 + for index, tokens := range lines { + // 1-based line number. + line := f.baseLineNumber + index + highlight, next := f.shouldHighlight(highlightIndex, line) + if next { + highlightIndex++ + } + + if !(f.preventSurroundingPre || f.inlineCode) { + // Start of Line + fmt.Fprint(w, ``) + } else { + fmt.Fprintf(w, "%s>", f.styleAttr(css, chroma.Line)) + } + + // Line number + if f.lineNumbers && !wrapInTable { + fmt.Fprintf(w, "%s", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(css, lineDigits, line)) + } + + fmt.Fprintf(w, ``, f.styleAttr(css, chroma.CodeLine)) + } + + for _, token := range tokens { + html := html.EscapeString(token.String()) + attr := f.styleAttr(css, token.Type) + if attr != "" { + html = fmt.Sprintf("%s", attr, html) + } + fmt.Fprint(w, html) + } + + if !(f.preventSurroundingPre || f.inlineCode) { + fmt.Fprint(w, ``) // End of CodeLine + + fmt.Fprint(w, ``) // End of Line + } + } + fmt.Fprintf(w, "%s", f.preWrapper.End(true)) + + if wrapInTable { + fmt.Fprint(w, "\n") + fmt.Fprint(w, "\n") + } + + if f.standalone { + fmt.Fprint(w, "\n\n") + fmt.Fprint(w, "\n") + } + + return nil +} + +func (f *Formatter) lineIDAttribute(line int) string { + if !f.linkableLineNumbers { + return "" + } + return fmt.Sprintf(" id=\"%s\"", f.lineID(line)) +} + +func (f *Formatter) lineTitleWithLinkIfNeeded(css map[chroma.TokenType]string, lineDigits, line int) string { + title := fmt.Sprintf("%*d", lineDigits, line) + if !f.linkableLineNumbers { + return title + } + return fmt.Sprintf("%s", f.styleAttr(css, chroma.LineLink), f.lineID(line), title) +} + +func (f *Formatter) lineID(line int) string { + return fmt.Sprintf("%s%d", f.lineNumbersIDPrefix, line) +} + +func (f *Formatter) shouldHighlight(highlightIndex, line int) (bool, bool) { + next := false + for highlightIndex < len(f.highlightRanges) && line > f.highlightRanges[highlightIndex][1] { + highlightIndex++ + next = true + } + if highlightIndex < len(f.highlightRanges) { + hrange := f.highlightRanges[highlightIndex] + if line >= hrange[0] && line <= hrange[1] { + return true, next + } + } + return false, next +} + +func (f *Formatter) class(t chroma.TokenType) string { + for t != 0 { + if cls, ok := chroma.StandardTypes[t]; ok { + if cls != "" { + return f.prefix + cls + } + return "" + } + t = t.Parent() + } + if cls := chroma.StandardTypes[t]; cls != "" { + return f.prefix + cls + } + return "" +} + +func (f *Formatter) styleAttr(styles map[chroma.TokenType]string, tt chroma.TokenType, extraCSS ...string) string { + if f.Classes { + cls := f.class(tt) + if cls == "" { + return "" + } + return fmt.Sprintf(` class="%s"`, cls) + } + if _, ok := styles[tt]; !ok { + tt = tt.SubCategory() + if _, ok := styles[tt]; !ok { + tt = tt.Category() + if _, ok := styles[tt]; !ok { + return "" + } + } + } + css := []string{styles[tt]} + css = append(css, extraCSS...) + return fmt.Sprintf(` style="%s"`, strings.Join(css, ";")) +} + +func (f *Formatter) tabWidthStyle() string { + if f.tabWidth != 0 && f.tabWidth != 8 { + return fmt.Sprintf("-moz-tab-size: %[1]d; -o-tab-size: %[1]d; tab-size: %[1]d;", f.tabWidth) + } + return "" +} + +// WriteCSS writes CSS style definitions (without any surrounding HTML). +func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error { + css := f.styleCache.get(style, false) + // Special-case background as it is mapped to the outer ".chroma" class. + if _, err := fmt.Fprintf(w, "/* %s */ .%sbg { %s }\n", chroma.Background, f.prefix, css[chroma.Background]); err != nil { + return err + } + // Special-case PreWrapper as it is the ".chroma" class. + if _, err := fmt.Fprintf(w, "/* %s */ .%schroma { %s }\n", chroma.PreWrapper, f.prefix, css[chroma.PreWrapper]); err != nil { + return err + } + // Special-case code column of table to expand width. + if f.lineNumbers && f.lineNumbersInTable { + if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s:last-child { width: 100%%; }", + chroma.LineTableTD, f.prefix, f.class(chroma.LineTableTD)); err != nil { + return err + } + } + // Special-case line number highlighting when targeted. + if f.lineNumbers || f.lineNumbersInTable { + targetedLineCSS := StyleEntryToCSS(style.Get(chroma.LineHighlight)) + for _, tt := range []chroma.TokenType{chroma.LineNumbers, chroma.LineNumbersTable} { + fmt.Fprintf(w, "/* %s targeted by URL anchor */ .%schroma .%s:target { %s }\n", tt, f.prefix, f.class(tt), targetedLineCSS) + } + } + tts := []int{} + for tt := range css { + tts = append(tts, int(tt)) + } + sort.Ints(tts) + for _, ti := range tts { + tt := chroma.TokenType(ti) + switch tt { + case chroma.Background, chroma.PreWrapper: + continue + } + class := f.class(tt) + if class == "" { + continue + } + styles := css[tt] + if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s { %s }\n", tt, f.prefix, class, styles); err != nil { + return err + } + } + return nil +} + +func (f *Formatter) styleToCSS(style *chroma.Style) map[chroma.TokenType]string { + classes := map[chroma.TokenType]string{} + bg := style.Get(chroma.Background) + // Convert the style. + for t := range chroma.StandardTypes { + entry := style.Get(t) + if t != chroma.Background { + entry = entry.Sub(bg) + } + + // Inherit from custom CSS provided by user + tokenCategory := t.Category() + tokenSubCategory := t.SubCategory() + if t != tokenCategory { + if css, ok := f.customCSS[tokenCategory]; ok { + classes[t] = css + } + } + if tokenCategory != tokenSubCategory { + if css, ok := f.customCSS[tokenSubCategory]; ok { + classes[t] += css + } + } + // Add custom CSS provided by user + if css, ok := f.customCSS[t]; ok { + classes[t] += css + } + + if !f.allClasses && entry.IsZero() && classes[t] == `` { + continue + } + + styleEntryCSS := StyleEntryToCSS(entry) + if styleEntryCSS != `` && classes[t] != `` { + styleEntryCSS += `;` + } + classes[t] = styleEntryCSS + classes[t] + } + classes[chroma.Background] += `;` + f.tabWidthStyle() + classes[chroma.PreWrapper] += classes[chroma.Background] + // Make PreWrapper a grid to show highlight style with full width. + if len(f.highlightRanges) > 0 && f.customCSS[chroma.PreWrapper] == `` { + classes[chroma.PreWrapper] += `display: grid;` + } + // Make PreWrapper wrap long lines. + if f.wrapLongLines { + classes[chroma.PreWrapper] += `white-space: pre-wrap; word-break: break-word;` + } + lineNumbersStyle := `white-space: pre; -webkit-user-select: none; user-select: none; margin-right: 0.4em; padding: 0 0.4em 0 0.4em;` + // All rules begin with default rules followed by user provided rules + classes[chroma.Line] = `display: flex;` + classes[chroma.Line] + classes[chroma.LineNumbers] = lineNumbersStyle + classes[chroma.LineNumbers] + classes[chroma.LineNumbersTable] = lineNumbersStyle + classes[chroma.LineNumbersTable] + classes[chroma.LineTable] = "border-spacing: 0; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTable] + classes[chroma.LineTableTD] = "vertical-align: top; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTableTD] + classes[chroma.LineLink] = "outline: none; text-decoration: none; color: inherit" + classes[chroma.LineLink] + return classes +} + +// StyleEntryToCSS converts a chroma.StyleEntry to CSS attributes. +func StyleEntryToCSS(e chroma.StyleEntry) string { + styles := []string{} + if e.Colour.IsSet() { + styles = append(styles, "color: "+e.Colour.String()) + } + if e.Background.IsSet() { + styles = append(styles, "background-color: "+e.Background.String()) + } + if e.Bold == chroma.Yes { + styles = append(styles, "font-weight: bold") + } + if e.Italic == chroma.Yes { + styles = append(styles, "font-style: italic") + } + if e.Underline == chroma.Yes { + styles = append(styles, "text-decoration: underline") + } + return strings.Join(styles, "; ") +} + +// Compress CSS attributes - remove spaces, transform 6-digit colours to 3. +func compressStyle(s string) string { + parts := strings.Split(s, ";") + out := []string{} + for _, p := range parts { + p = strings.Join(strings.Fields(p), " ") + p = strings.Replace(p, ": ", ":", 1) + if strings.Contains(p, "#") { + c := p[len(p)-6:] + if c[0] == c[1] && c[2] == c[3] && c[4] == c[5] { + p = p[:len(p)-6] + c[0:1] + c[2:3] + c[4:5] + } + } + out = append(out, p) + } + return strings.Join(out, ";") +} + +const styleCacheLimit = 32 + +type styleCacheEntry struct { + style *chroma.Style + compressed bool + cache map[chroma.TokenType]string +} + +type styleCache struct { + mu sync.Mutex + // LRU cache of compiled (and possibly compressed) styles. This is a slice + // because the cache size is small, and a slice is sufficiently fast for + // small N. + cache []styleCacheEntry + f *Formatter +} + +func newStyleCache(f *Formatter) *styleCache { + return &styleCache{f: f} +} + +func (l *styleCache) get(style *chroma.Style, compress bool) map[chroma.TokenType]string { + l.mu.Lock() + defer l.mu.Unlock() + + // Look for an existing entry. + for i := len(l.cache) - 1; i >= 0; i-- { + entry := l.cache[i] + if entry.style == style && entry.compressed == compress { + // Top of the cache, no need to adjust the order. + if i == len(l.cache)-1 { + return entry.cache + } + // Move this entry to the end of the LRU + copy(l.cache[i:], l.cache[i+1:]) + l.cache[len(l.cache)-1] = entry + return entry.cache + } + } + + // No entry, create one. + cached := l.f.styleToCSS(style) + if !l.f.Classes { + for t, style := range cached { + cached[t] = compressStyle(style) + } + } + if compress { + for t, style := range cached { + cached[t] = compressStyle(style) + } + } + // Evict the oldest entry. + if len(l.cache) >= styleCacheLimit { + l.cache = l.cache[0:copy(l.cache, l.cache[1:])] + } + l.cache = append(l.cache, styleCacheEntry{style: style, cache: cached, compressed: compress}) + return cached +} diff --git a/vendor/github.com/alecthomas/chroma/v2/iterator.go b/vendor/github.com/alecthomas/chroma/v2/iterator.go new file mode 100644 index 0000000..d5175de --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/iterator.go @@ -0,0 +1,76 @@ +package chroma + +import "strings" + +// An Iterator across tokens. +// +// EOF will be returned at the end of the Token stream. +// +// If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover. +type Iterator func() Token + +// Tokens consumes all tokens from the iterator and returns them as a slice. +func (i Iterator) Tokens() []Token { + var out []Token + for t := i(); t != EOF; t = i() { + out = append(out, t) + } + return out +} + +// Concaterator concatenates tokens from a series of iterators. +func Concaterator(iterators ...Iterator) Iterator { + return func() Token { + for len(iterators) > 0 { + t := iterators[0]() + if t != EOF { + return t + } + iterators = iterators[1:] + } + return EOF + } +} + +// Literator converts a sequence of literal Tokens into an Iterator. +func Literator(tokens ...Token) Iterator { + return func() Token { + if len(tokens) == 0 { + return EOF + } + token := tokens[0] + tokens = tokens[1:] + return token + } +} + +// SplitTokensIntoLines splits tokens containing newlines in two. +func SplitTokensIntoLines(tokens []Token) (out [][]Token) { + var line []Token // nolint: prealloc + for _, token := range tokens { + for strings.Contains(token.Value, "\n") { + parts := strings.SplitAfterN(token.Value, "\n", 2) + // Token becomes the tail. + token.Value = parts[1] + + // Append the head to the line and flush the line. + clone := token.Clone() + clone.Value = parts[0] + line = append(line, clone) + out = append(out, line) + line = nil + } + line = append(line, token) + } + if len(line) > 0 { + out = append(out, line) + } + // Strip empty trailing token line. + if len(out) > 0 { + last := out[len(out)-1] + if len(last) == 1 && last[0].Value == "" { + out = out[:len(out)-1] + } + } + return +} diff --git a/vendor/github.com/alecthomas/chroma/v2/lexer.go b/vendor/github.com/alecthomas/chroma/v2/lexer.go new file mode 100644 index 0000000..eb027bf --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexer.go @@ -0,0 +1,162 @@ +package chroma + +import ( + "fmt" + "strings" +) + +var ( + defaultOptions = &TokeniseOptions{ + State: "root", + EnsureLF: true, + } +) + +// Config for a lexer. +type Config struct { + // Name of the lexer. + Name string `xml:"name,omitempty"` + + // Shortcuts for the lexer + Aliases []string `xml:"alias,omitempty"` + + // File name globs + Filenames []string `xml:"filename,omitempty"` + + // Secondary file name globs + AliasFilenames []string `xml:"alias_filename,omitempty"` + + // MIME types + MimeTypes []string `xml:"mime_type,omitempty"` + + // Regex matching is case-insensitive. + CaseInsensitive bool `xml:"case_insensitive,omitempty"` + + // Regex matches all characters. + DotAll bool `xml:"dot_all,omitempty"` + + // Regex does not match across lines ($ matches EOL). + // + // Defaults to multiline. + NotMultiline bool `xml:"not_multiline,omitempty"` + + // Don't strip leading and trailing newlines from the input. + // DontStripNL bool + + // Strip all leading and trailing whitespace from the input + // StripAll bool + + // Make sure that the input ends with a newline. This + // is required for some lexers that consume input linewise. + EnsureNL bool `xml:"ensure_nl,omitempty"` + + // If given and greater than 0, expand tabs in the input. + // TabSize int + + // Priority of lexer. + // + // If this is 0 it will be treated as a default of 1. + Priority float32 `xml:"priority,omitempty"` + + // Analyse is a list of regexes to match against the input. + // + // If a match is found, the score is returned if single attribute is set to true, + // otherwise the sum of all the score of matching patterns will be + // used as the final score. + Analyse *AnalyseConfig `xml:"analyse,omitempty"` +} + +// AnalyseConfig defines the list of regexes analysers. +type AnalyseConfig struct { + Regexes []RegexConfig `xml:"regex,omitempty"` + // If true, the first matching score is returned. + First bool `xml:"first,attr"` +} + +// RegexConfig defines a single regex pattern and its score in case of match. +type RegexConfig struct { + Pattern string `xml:"pattern,attr"` + Score float32 `xml:"score,attr"` +} + +// Token output to formatter. +type Token struct { + Type TokenType `json:"type"` + Value string `json:"value"` +} + +func (t *Token) String() string { return t.Value } +func (t *Token) GoString() string { return fmt.Sprintf("&Token{%s, %q}", t.Type, t.Value) } + +// Clone returns a clone of the Token. +func (t *Token) Clone() Token { + return *t +} + +// EOF is returned by lexers at the end of input. +var EOF Token + +// TokeniseOptions contains options for tokenisers. +type TokeniseOptions struct { + // State to start tokenisation in. Defaults to "root". + State string + // Nested tokenisation. + Nested bool + + // If true, all EOLs are converted into LF + // by replacing CRLF and CR + EnsureLF bool +} + +// A Lexer for tokenising source code. +type Lexer interface { + // Config describing the features of the Lexer. + Config() *Config + // Tokenise returns an Iterator over tokens in text. + Tokenise(options *TokeniseOptions, text string) (Iterator, error) + // SetRegistry sets the registry this Lexer is associated with. + // + // The registry should be used by the Lexer if it needs to look up other + // lexers. + SetRegistry(registry *LexerRegistry) Lexer + // SetAnalyser sets a function the Lexer should use for scoring how + // likely a fragment of text is to match this lexer, between 0.0 and 1.0. + // A value of 1 indicates high confidence. + // + // Lexers may ignore this if they implement their own analysers. + SetAnalyser(analyser func(text string) float32) Lexer + // AnalyseText scores how likely a fragment of text is to match + // this lexer, between 0.0 and 1.0. A value of 1 indicates high confidence. + AnalyseText(text string) float32 +} + +// Lexers is a slice of lexers sortable by name. +type Lexers []Lexer + +func (l Lexers) Len() int { return len(l) } +func (l Lexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] } +func (l Lexers) Less(i, j int) bool { + return strings.ToLower(l[i].Config().Name) < strings.ToLower(l[j].Config().Name) +} + +// PrioritisedLexers is a slice of lexers sortable by priority. +type PrioritisedLexers []Lexer + +func (l PrioritisedLexers) Len() int { return len(l) } +func (l PrioritisedLexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] } +func (l PrioritisedLexers) Less(i, j int) bool { + ip := l[i].Config().Priority + if ip == 0 { + ip = 1 + } + jp := l[j].Config().Priority + if jp == 0 { + jp = 1 + } + return ip > jp +} + +// Analyser determines how appropriate this lexer is for the given text. +type Analyser interface { + AnalyseText(text string) float32 +} diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/README.md b/vendor/github.com/alecthomas/chroma/v2/lexers/README.md new file mode 100644 index 0000000..60a0055 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/README.md @@ -0,0 +1,46 @@ +# Chroma lexers + +All lexers in Chroma should now be defined in XML unless they require custom code. + +## Lexer tests + +The tests in this directory feed a known input `testdata/.actual` into the parser for `` and check +that its output matches `.expected`. + +It is also possible to perform several tests on a same parser ``, by placing know inputs `*.actual` into a +directory `testdata//`. + +### Running the tests + +Run the tests as normal: +```go +go test ./lexers +``` + +### Update existing tests + +When you add a new test data file (`*.actual`), you need to regenerate all tests. That's how Chroma creates the `*.expected` test file based on the corresponding lexer. + +To regenerate all tests, type in your terminal: + +```go +RECORD=true go test ./lexers +``` + +This first sets the `RECORD` environment variable to `true`. Then it runs `go test` on the `./lexers` directory of the Chroma project. + +(That environment variable tells Chroma it needs to output test data. After running `go test ./lexers` you can remove or reset that variable.) + +#### Windows users + +Windows users will find that the `RECORD=true go test ./lexers` command fails in both the standard command prompt terminal and in PowerShell. + +Instead we have to perform both steps separately: + +- Set the `RECORD` environment variable to `true`. + + In the regular command prompt window, the `set` command sets an environment variable for the current session: `set RECORD=true`. See [this page](https://superuser.com/questions/212150/how-to-set-env-variable-in-windows-cmd-line) for more. + + In PowerShell, you can use the `$env:RECORD = 'true'` command for that. See [this article](https://mcpmag.com/articles/2019/03/28/environment-variables-in-powershell.aspx) for more. + + You can also make a persistent environment variable by hand in the Windows computer settings. See [this article](https://www.computerhope.com/issues/ch000549.htm) for how. +- When the environment variable is set, run `go test ./lexers`. + +Chroma will now regenerate the test files and print its results to the console window. diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/caddyfile.go b/vendor/github.com/alecthomas/chroma/v2/lexers/caddyfile.go new file mode 100644 index 0000000..82a7efa --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/caddyfile.go @@ -0,0 +1,275 @@ +package lexers + +import ( + . "github.com/alecthomas/chroma/v2" // nolint +) + +// Matcher token stub for docs, or +// Named matcher: @name, or +// Path matcher: /foo, or +// Wildcard path matcher: * +// nolint: gosec +var caddyfileMatcherTokenRegexp = `(\[\\]|@[^\s]+|/[^\s]+|\*)` + +// Comment at start of line, or +// Comment preceded by whitespace +var caddyfileCommentRegexp = `(^|\s+)#.*\n` + +// caddyfileCommon are the rules common to both of the lexer variants +func caddyfileCommonRules() Rules { + return Rules{ + "site_block_common": { + Include("site_body"), + // Any other directive + {`[^\s#]+`, Keyword, Push("directive")}, + Include("base"), + }, + "site_body": { + // Import keyword + {`\b(import|invoke)\b( [^\s#]+)`, ByGroups(Keyword, Text), Push("subdirective")}, + // Matcher definition + {`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")}, + // Matcher token stub for docs + {`\[\\]`, NameDecorator, Push("matcher")}, + // These cannot have matchers but may have things that look like + // matchers in their arguments, so we just parse as a subdirective. + {`\b(try_files|tls|log|bind)\b`, Keyword, Push("subdirective")}, + // These are special, they can nest more directives + {`\b(handle_errors|handle_path|handle_response|replace_status|handle|route)\b`, Keyword, Push("nested_directive")}, + // uri directive has special syntax + {`\b(uri)\b`, Keyword, Push("uri_directive")}, + }, + "matcher": { + {`\{`, Punctuation, Push("block")}, + // Not can be one-liner + {`not`, Keyword, Push("deep_not_matcher")}, + // Heredoc for CEL expression + Include("heredoc"), + // Backtick for CEL expression + {"`", StringBacktick, Push("backticks")}, + // Any other same-line matcher + {`[^\s#]+`, Keyword, Push("arguments")}, + // Terminators + {`\s*\n`, Text, Pop(1)}, + {`\}`, Punctuation, Pop(1)}, + Include("base"), + }, + "block": { + {`\}`, Punctuation, Pop(2)}, + // Using double quotes doesn't stop at spaces + {`"`, StringDouble, Push("double_quotes")}, + // Using backticks doesn't stop at spaces + {"`", StringBacktick, Push("backticks")}, + // Not can be one-liner + {`not`, Keyword, Push("not_matcher")}, + // Directives & matcher definitions + Include("site_body"), + // Any directive + {`[^\s#]+`, Keyword, Push("subdirective")}, + Include("base"), + }, + "nested_block": { + {`\}`, Punctuation, Pop(2)}, + // Using double quotes doesn't stop at spaces + {`"`, StringDouble, Push("double_quotes")}, + // Using backticks doesn't stop at spaces + {"`", StringBacktick, Push("backticks")}, + // Not can be one-liner + {`not`, Keyword, Push("not_matcher")}, + // Directives & matcher definitions + Include("site_body"), + // Any other subdirective + {`[^\s#]+`, Keyword, Push("directive")}, + Include("base"), + }, + "not_matcher": { + {`\}`, Punctuation, Pop(2)}, + {`\{(?=\s)`, Punctuation, Push("block")}, + {`[^\s#]+`, Keyword, Push("arguments")}, + {`\s+`, Text, nil}, + }, + "deep_not_matcher": { + {`\}`, Punctuation, Pop(2)}, + {`\{(?=\s)`, Punctuation, Push("block")}, + {`[^\s#]+`, Keyword, Push("deep_subdirective")}, + {`\s+`, Text, nil}, + }, + "directive": { + {`\{(?=\s)`, Punctuation, Push("block")}, + {caddyfileMatcherTokenRegexp, NameDecorator, Push("arguments")}, + {caddyfileCommentRegexp, CommentSingle, Pop(1)}, + {`\s*\n`, Text, Pop(1)}, + Include("base"), + }, + "nested_directive": { + {`\{(?=\s)`, Punctuation, Push("nested_block")}, + {caddyfileMatcherTokenRegexp, NameDecorator, Push("nested_arguments")}, + {caddyfileCommentRegexp, CommentSingle, Pop(1)}, + {`\s*\n`, Text, Pop(1)}, + Include("base"), + }, + "subdirective": { + {`\{(?=\s)`, Punctuation, Push("block")}, + {caddyfileCommentRegexp, CommentSingle, Pop(1)}, + {`\s*\n`, Text, Pop(1)}, + Include("base"), + }, + "arguments": { + {`\{(?=\s)`, Punctuation, Push("block")}, + {caddyfileCommentRegexp, CommentSingle, Pop(2)}, + {`\\\n`, Text, nil}, // Skip escaped newlines + {`\s*\n`, Text, Pop(2)}, + Include("base"), + }, + "nested_arguments": { + {`\{(?=\s)`, Punctuation, Push("nested_block")}, + {caddyfileCommentRegexp, CommentSingle, Pop(2)}, + {`\\\n`, Text, nil}, // Skip escaped newlines + {`\s*\n`, Text, Pop(2)}, + Include("base"), + }, + "deep_subdirective": { + {`\{(?=\s)`, Punctuation, Push("block")}, + {caddyfileCommentRegexp, CommentSingle, Pop(3)}, + {`\s*\n`, Text, Pop(3)}, + Include("base"), + }, + "uri_directive": { + {`\{(?=\s)`, Punctuation, Push("block")}, + {caddyfileMatcherTokenRegexp, NameDecorator, nil}, + {`(strip_prefix|strip_suffix|replace|path_regexp)`, NameConstant, Push("arguments")}, + {caddyfileCommentRegexp, CommentSingle, Pop(1)}, + {`\s*\n`, Text, Pop(1)}, + Include("base"), + }, + "double_quotes": { + Include("placeholder"), + {`\\"`, StringDouble, nil}, + {`[^"]`, StringDouble, nil}, + {`"`, StringDouble, Pop(1)}, + }, + "backticks": { + Include("placeholder"), + {"\\\\`", StringBacktick, nil}, + {"[^`]", StringBacktick, nil}, + {"`", StringBacktick, Pop(1)}, + }, + "optional": { + // Docs syntax for showing optional parts with [ ] + {`\[`, Punctuation, Push("optional")}, + Include("name_constants"), + {`\|`, Punctuation, nil}, + {`[^\[\]\|]+`, String, nil}, + {`\]`, Punctuation, Pop(1)}, + }, + "heredoc": { + {`(<<([a-zA-Z0-9_-]+))(\n(.*|\n)*)(\s*)(\2)`, ByGroups(StringHeredoc, nil, String, String, String, StringHeredoc), nil}, + }, + "name_constants": { + {`\b(most_recently_modified|largest_size|smallest_size|first_exist|internal|disable_redirects|ignore_loaded_certs|disable_certs|private_ranges|first|last|before|after|on|off)\b(\||(?=\]|\s|$))`, ByGroups(NameConstant, Punctuation), nil}, + }, + "placeholder": { + // Placeholder with dots, colon for default value, brackets for args[0:] + {`\{[\w+.\[\]\:\$-]+\}`, StringEscape, nil}, + // Handle opening brackets with no matching closing one + {`\{[^\}\s]*\b`, String, nil}, + }, + "base": { + {caddyfileCommentRegexp, CommentSingle, nil}, + {`\[\\]`, NameDecorator, nil}, + Include("name_constants"), + Include("heredoc"), + {`(https?://)?([a-z0-9.-]+)(:)([0-9]+)([^\s]*)`, ByGroups(Name, Name, Punctuation, NumberInteger, Name), nil}, + {`\[`, Punctuation, Push("optional")}, + {"`", StringBacktick, Push("backticks")}, + {`"`, StringDouble, Push("double_quotes")}, + Include("placeholder"), + {`[a-z-]+/[a-z-+]+`, String, nil}, + {`[0-9]+([smhdk]|ns|us|µs|ms)?\b`, NumberInteger, nil}, + {`[^\s\n#\{]+`, String, nil}, + {`/[^\s#]*`, Name, nil}, + {`\s+`, Text, nil}, + }, + } +} + +// Caddyfile lexer. +var Caddyfile = Register(MustNewLexer( + &Config{ + Name: "Caddyfile", + Aliases: []string{"caddyfile", "caddy"}, + Filenames: []string{"Caddyfile*"}, + MimeTypes: []string{}, + }, + caddyfileRules, +)) + +func caddyfileRules() Rules { + return Rules{ + "root": { + {caddyfileCommentRegexp, CommentSingle, nil}, + // Global options block + {`^\s*(\{)\s*$`, ByGroups(Punctuation), Push("globals")}, + // Top level import + {`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil}, + // Snippets + {`(&?\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")}, + // Site label + {`[^#{(\s,]+`, GenericHeading, Push("label")}, + // Site label with placeholder + {`\{[\w+.\[\]\:\$-]+\}`, StringEscape, Push("label")}, + {`\s+`, Text, nil}, + }, + "globals": { + {`\}`, Punctuation, Pop(1)}, + // Global options are parsed as subdirectives (no matcher) + {`[^\s#]+`, Keyword, Push("subdirective")}, + Include("base"), + }, + "snippet": { + {`\}`, Punctuation, Pop(1)}, + Include("site_body"), + // Any other directive + {`[^\s#]+`, Keyword, Push("directive")}, + Include("base"), + }, + "label": { + // Allow multiple labels, comma separated, newlines after + // a comma means another label is coming + {`,\s*\n?`, Text, nil}, + {` `, Text, nil}, + // Site label with placeholder + Include("placeholder"), + // Site label + {`[^#{(\s,]+`, GenericHeading, nil}, + // Comment after non-block label (hack because comments end in \n) + {`#.*\n`, CommentSingle, Push("site_block")}, + // Note: if \n, we'll never pop out of the site_block, it's valid + {`\{(?=\s)|\n`, Punctuation, Push("site_block")}, + }, + "site_block": { + {`\}`, Punctuation, Pop(2)}, + Include("site_block_common"), + }, + }.Merge(caddyfileCommonRules()) +} + +// Caddyfile directive-only lexer. +var CaddyfileDirectives = Register(MustNewLexer( + &Config{ + Name: "Caddyfile Directives", + Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"}, + Filenames: []string{}, + MimeTypes: []string{}, + }, + caddyfileDirectivesRules, +)) + +func caddyfileDirectivesRules() Rules { + return Rules{ + // Same as "site_block" in Caddyfile + "root": { + Include("site_block_common"), + }, + }.Merge(caddyfileCommonRules()) +} diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/cl.go b/vendor/github.com/alecthomas/chroma/v2/lexers/cl.go new file mode 100644 index 0000000..3eb0c23 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/cl.go @@ -0,0 +1,243 @@ +package lexers + +import ( + . "github.com/alecthomas/chroma/v2" // nolint +) + +var ( + clBuiltinFunctions = []string{ + "<", "<=", "=", ">", ">=", "-", "/", "/=", "*", "+", "1-", "1+", + "abort", "abs", "acons", "acos", "acosh", "add-method", "adjoin", + "adjustable-array-p", "adjust-array", "allocate-instance", + "alpha-char-p", "alphanumericp", "append", "apply", "apropos", + "apropos-list", "aref", "arithmetic-error-operands", + "arithmetic-error-operation", "array-dimension", "array-dimensions", + "array-displacement", "array-element-type", "array-has-fill-pointer-p", + "array-in-bounds-p", "arrayp", "array-rank", "array-row-major-index", + "array-total-size", "ash", "asin", "asinh", "assoc", "assoc-if", + "assoc-if-not", "atan", "atanh", "atom", "bit", "bit-and", "bit-andc1", + "bit-andc2", "bit-eqv", "bit-ior", "bit-nand", "bit-nor", "bit-not", + "bit-orc1", "bit-orc2", "bit-vector-p", "bit-xor", "boole", + "both-case-p", "boundp", "break", "broadcast-stream-streams", + "butlast", "byte", "byte-position", "byte-size", "caaaar", "caaadr", + "caaar", "caadar", "caaddr", "caadr", "caar", "cadaar", "cadadr", + "cadar", "caddar", "cadddr", "caddr", "cadr", "call-next-method", "car", + "cdaaar", "cdaadr", "cdaar", "cdadar", "cdaddr", "cdadr", "cdar", + "cddaar", "cddadr", "cddar", "cdddar", "cddddr", "cdddr", "cddr", "cdr", + "ceiling", "cell-error-name", "cerror", "change-class", "char", "char<", + "char<=", "char=", "char>", "char>=", "char/=", "character", + "characterp", "char-code", "char-downcase", "char-equal", + "char-greaterp", "char-int", "char-lessp", "char-name", + "char-not-equal", "char-not-greaterp", "char-not-lessp", "char-upcase", + "cis", "class-name", "class-of", "clear-input", "clear-output", + "close", "clrhash", "code-char", "coerce", "compile", + "compiled-function-p", "compile-file", "compile-file-pathname", + "compiler-macro-function", "complement", "complex", "complexp", + "compute-applicable-methods", "compute-restarts", "concatenate", + "concatenated-stream-streams", "conjugate", "cons", "consp", + "constantly", "constantp", "continue", "copy-alist", "copy-list", + "copy-pprint-dispatch", "copy-readtable", "copy-seq", "copy-structure", + "copy-symbol", "copy-tree", "cos", "cosh", "count", "count-if", + "count-if-not", "decode-float", "decode-universal-time", "delete", + "delete-duplicates", "delete-file", "delete-if", "delete-if-not", + "delete-package", "denominator", "deposit-field", "describe", + "describe-object", "digit-char", "digit-char-p", "directory", + "directory-namestring", "disassemble", "documentation", "dpb", + "dribble", "echo-stream-input-stream", "echo-stream-output-stream", + "ed", "eighth", "elt", "encode-universal-time", "endp", + "enough-namestring", "ensure-directories-exist", + "ensure-generic-function", "eq", "eql", "equal", "equalp", "error", + "eval", "evenp", "every", "exp", "export", "expt", "fboundp", + "fceiling", "fdefinition", "ffloor", "fifth", "file-author", + "file-error-pathname", "file-length", "file-namestring", + "file-position", "file-string-length", "file-write-date", + "fill", "fill-pointer", "find", "find-all-symbols", "find-class", + "find-if", "find-if-not", "find-method", "find-package", "find-restart", + "find-symbol", "finish-output", "first", "float", "float-digits", + "floatp", "float-precision", "float-radix", "float-sign", "floor", + "fmakunbound", "force-output", "format", "fourth", "fresh-line", + "fround", "ftruncate", "funcall", "function-keywords", + "function-lambda-expression", "functionp", "gcd", "gensym", "gentemp", + "get", "get-decoded-time", "get-dispatch-macro-character", "getf", + "gethash", "get-internal-real-time", "get-internal-run-time", + "get-macro-character", "get-output-stream-string", "get-properties", + "get-setf-expansion", "get-universal-time", "graphic-char-p", + "hash-table-count", "hash-table-p", "hash-table-rehash-size", + "hash-table-rehash-threshold", "hash-table-size", "hash-table-test", + "host-namestring", "identity", "imagpart", "import", + "initialize-instance", "input-stream-p", "inspect", + "integer-decode-float", "integer-length", "integerp", + "interactive-stream-p", "intern", "intersection", + "invalid-method-error", "invoke-debugger", "invoke-restart", + "invoke-restart-interactively", "isqrt", "keywordp", "last", "lcm", + "ldb", "ldb-test", "ldiff", "length", "lisp-implementation-type", + "lisp-implementation-version", "list", "list*", "list-all-packages", + "listen", "list-length", "listp", "load", + "load-logical-pathname-translations", "log", "logand", "logandc1", + "logandc2", "logbitp", "logcount", "logeqv", "logical-pathname", + "logical-pathname-translations", "logior", "lognand", "lognor", + "lognot", "logorc1", "logorc2", "logtest", "logxor", "long-site-name", + "lower-case-p", "machine-instance", "machine-type", "machine-version", + "macroexpand", "macroexpand-1", "macro-function", "make-array", + "make-broadcast-stream", "make-concatenated-stream", "make-condition", + "make-dispatch-macro-character", "make-echo-stream", "make-hash-table", + "make-instance", "make-instances-obsolete", "make-list", + "make-load-form", "make-load-form-saving-slots", "make-package", + "make-pathname", "make-random-state", "make-sequence", "make-string", + "make-string-input-stream", "make-string-output-stream", "make-symbol", + "make-synonym-stream", "make-two-way-stream", "makunbound", "map", + "mapc", "mapcan", "mapcar", "mapcon", "maphash", "map-into", "mapl", + "maplist", "mask-field", "max", "member", "member-if", "member-if-not", + "merge", "merge-pathnames", "method-combination-error", + "method-qualifiers", "min", "minusp", "mismatch", "mod", + "muffle-warning", "name-char", "namestring", "nbutlast", "nconc", + "next-method-p", "nintersection", "ninth", "no-applicable-method", + "no-next-method", "not", "notany", "notevery", "nreconc", "nreverse", + "nset-difference", "nset-exclusive-or", "nstring-capitalize", + "nstring-downcase", "nstring-upcase", "nsublis", "nsubst", "nsubst-if", + "nsubst-if-not", "nsubstitute", "nsubstitute-if", "nsubstitute-if-not", + "nth", "nthcdr", "null", "numberp", "numerator", "nunion", "oddp", + "open", "open-stream-p", "output-stream-p", "package-error-package", + "package-name", "package-nicknames", "packagep", + "package-shadowing-symbols", "package-used-by-list", "package-use-list", + "pairlis", "parse-integer", "parse-namestring", "pathname", + "pathname-device", "pathname-directory", "pathname-host", + "pathname-match-p", "pathname-name", "pathnamep", "pathname-type", + "pathname-version", "peek-char", "phase", "plusp", "position", + "position-if", "position-if-not", "pprint", "pprint-dispatch", + "pprint-fill", "pprint-indent", "pprint-linear", "pprint-newline", + "pprint-tab", "pprint-tabular", "prin1", "prin1-to-string", "princ", + "princ-to-string", "print", "print-object", "probe-file", "proclaim", + "provide", "random", "random-state-p", "rassoc", "rassoc-if", + "rassoc-if-not", "rational", "rationalize", "rationalp", "read", + "read-byte", "read-char", "read-char-no-hang", "read-delimited-list", + "read-from-string", "read-line", "read-preserving-whitespace", + "read-sequence", "readtable-case", "readtablep", "realp", "realpart", + "reduce", "reinitialize-instance", "rem", "remhash", "remove", + "remove-duplicates", "remove-if", "remove-if-not", "remove-method", + "remprop", "rename-file", "rename-package", "replace", "require", + "rest", "restart-name", "revappend", "reverse", "room", "round", + "row-major-aref", "rplaca", "rplacd", "sbit", "scale-float", "schar", + "search", "second", "set", "set-difference", + "set-dispatch-macro-character", "set-exclusive-or", + "set-macro-character", "set-pprint-dispatch", "set-syntax-from-char", + "seventh", "shadow", "shadowing-import", "shared-initialize", + "short-site-name", "signal", "signum", "simple-bit-vector-p", + "simple-condition-format-arguments", "simple-condition-format-control", + "simple-string-p", "simple-vector-p", "sin", "sinh", "sixth", "sleep", + "slot-boundp", "slot-exists-p", "slot-makunbound", "slot-missing", + "slot-unbound", "slot-value", "software-type", "software-version", + "some", "sort", "special-operator-p", "sqrt", "stable-sort", + "standard-char-p", "store-value", "stream-element-type", + "stream-error-stream", "stream-external-format", "streamp", "string", + "string<", "string<=", "string=", "string>", "string>=", "string/=", + "string-capitalize", "string-downcase", "string-equal", + "string-greaterp", "string-left-trim", "string-lessp", + "string-not-equal", "string-not-greaterp", "string-not-lessp", + "stringp", "string-right-trim", "string-trim", "string-upcase", + "sublis", "subseq", "subsetp", "subst", "subst-if", "subst-if-not", + "substitute", "substitute-if", "substitute-if-not", "subtypep", "svref", + "sxhash", "symbol-function", "symbol-name", "symbolp", "symbol-package", + "symbol-plist", "symbol-value", "synonym-stream-symbol", "syntax:", + "tailp", "tan", "tanh", "tenth", "terpri", "third", + "translate-logical-pathname", "translate-pathname", "tree-equal", + "truename", "truncate", "two-way-stream-input-stream", + "two-way-stream-output-stream", "type-error-datum", + "type-error-expected-type", "type-of", "typep", "unbound-slot-instance", + "unexport", "unintern", "union", "unread-char", "unuse-package", + "update-instance-for-different-class", + "update-instance-for-redefined-class", "upgraded-array-element-type", + "upgraded-complex-part-type", "upper-case-p", "use-package", + "user-homedir-pathname", "use-value", "values", "values-list", "vector", + "vectorp", "vector-pop", "vector-push", "vector-push-extend", "warn", + "wild-pathname-p", "write", "write-byte", "write-char", "write-line", + "write-sequence", "write-string", "write-to-string", "yes-or-no-p", + "y-or-n-p", "zerop", + } + + clSpecialForms = []string{ + "block", "catch", "declare", "eval-when", "flet", "function", "go", "if", + "labels", "lambda", "let", "let*", "load-time-value", "locally", "macrolet", + "multiple-value-call", "multiple-value-prog1", "progn", "progv", "quote", + "return-from", "setq", "symbol-macrolet", "tagbody", "the", "throw", + "unwind-protect", + } + + clMacros = []string{ + "and", "assert", "call-method", "case", "ccase", "check-type", "cond", + "ctypecase", "decf", "declaim", "defclass", "defconstant", "defgeneric", + "define-compiler-macro", "define-condition", "define-method-combination", + "define-modify-macro", "define-setf-expander", "define-symbol-macro", + "defmacro", "defmethod", "defpackage", "defparameter", "defsetf", + "defstruct", "deftype", "defun", "defvar", "destructuring-bind", "do", + "do*", "do-all-symbols", "do-external-symbols", "dolist", "do-symbols", + "dotimes", "ecase", "etypecase", "formatter", "handler-bind", + "handler-case", "ignore-errors", "incf", "in-package", "lambda", "loop", + "loop-finish", "make-method", "multiple-value-bind", "multiple-value-list", + "multiple-value-setq", "nth-value", "or", "pop", + "pprint-exit-if-list-exhausted", "pprint-logical-block", "pprint-pop", + "print-unreadable-object", "prog", "prog*", "prog1", "prog2", "psetf", + "psetq", "push", "pushnew", "remf", "restart-bind", "restart-case", + "return", "rotatef", "setf", "shiftf", "step", "time", "trace", "typecase", + "unless", "untrace", "when", "with-accessors", "with-compilation-unit", + "with-condition-restarts", "with-hash-table-iterator", + "with-input-from-string", "with-open-file", "with-open-stream", + "with-output-to-string", "with-package-iterator", "with-simple-restart", + "with-slots", "with-standard-io-syntax", + } + + clLambdaListKeywords = []string{ + "&allow-other-keys", "&aux", "&body", "&environment", "&key", "&optional", + "&rest", "&whole", + } + + clDeclarations = []string{ + "dynamic-extent", "ignore", "optimize", "ftype", "inline", "special", + "ignorable", "notinline", "type", + } + + clBuiltinTypes = []string{ + "atom", "boolean", "base-char", "base-string", "bignum", "bit", + "compiled-function", "extended-char", "fixnum", "keyword", "nil", + "signed-byte", "short-float", "single-float", "double-float", "long-float", + "simple-array", "simple-base-string", "simple-bit-vector", "simple-string", + "simple-vector", "standard-char", "unsigned-byte", + + // Condition Types + "arithmetic-error", "cell-error", "condition", "control-error", + "division-by-zero", "end-of-file", "error", "file-error", + "floating-point-inexact", "floating-point-overflow", + "floating-point-underflow", "floating-point-invalid-operation", + "parse-error", "package-error", "print-not-readable", "program-error", + "reader-error", "serious-condition", "simple-condition", "simple-error", + "simple-type-error", "simple-warning", "stream-error", "storage-condition", + "style-warning", "type-error", "unbound-variable", "unbound-slot", + "undefined-function", "warning", + } + + clBuiltinClasses = []string{ + "array", "broadcast-stream", "bit-vector", "built-in-class", "character", + "class", "complex", "concatenated-stream", "cons", "echo-stream", + "file-stream", "float", "function", "generic-function", "hash-table", + "integer", "list", "logical-pathname", "method-combination", "method", + "null", "number", "package", "pathname", "ratio", "rational", "readtable", + "real", "random-state", "restart", "sequence", "standard-class", + "standard-generic-function", "standard-method", "standard-object", + "string-stream", "stream", "string", "structure-class", "structure-object", + "symbol", "synonym-stream", "t", "two-way-stream", "vector", + } +) + +// Common Lisp lexer. +var CommonLisp = Register(TypeRemappingLexer(MustNewXMLLexer( + embedded, + "embedded/common_lisp.xml", +), TypeMapping{ + {NameVariable, NameFunction, clBuiltinFunctions}, + {NameVariable, Keyword, clSpecialForms}, + {NameVariable, NameBuiltin, clMacros}, + {NameVariable, Keyword, clLambdaListKeywords}, + {NameVariable, Keyword, clDeclarations}, + {NameVariable, KeywordType, clBuiltinTypes}, + {NameVariable, NameClass, clBuiltinClasses}, +})) diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/dns.go b/vendor/github.com/alecthomas/chroma/v2/lexers/dns.go new file mode 100644 index 0000000..7e69962 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/dns.go @@ -0,0 +1,17 @@ +package lexers + +import ( + "regexp" +) + +// TODO(moorereason): can this be factored away? +var zoneAnalyserRe = regexp.MustCompile(`(?m)^@\s+IN\s+SOA\s+`) + +func init() { // nolint: gochecknoinits + Get("dns").SetAnalyser(func(text string) float32 { + if zoneAnalyserRe.FindString(text) != "" { + return 1.0 + } + return 0.0 + }) +} diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/emacs.go b/vendor/github.com/alecthomas/chroma/v2/lexers/emacs.go new file mode 100644 index 0000000..869b0f3 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/emacs.go @@ -0,0 +1,533 @@ +package lexers + +import ( + . "github.com/alecthomas/chroma/v2" // nolint +) + +var ( + emacsMacros = []string{ + "atomic-change-group", "case", "block", "cl-block", "cl-callf", "cl-callf2", + "cl-case", "cl-decf", "cl-declaim", "cl-declare", + "cl-define-compiler-macro", "cl-defmacro", "cl-defstruct", + "cl-defsubst", "cl-deftype", "cl-defun", "cl-destructuring-bind", + "cl-do", "cl-do*", "cl-do-all-symbols", "cl-do-symbols", "cl-dolist", + "cl-dotimes", "cl-ecase", "cl-etypecase", "eval-when", "cl-eval-when", "cl-flet", + "cl-flet*", "cl-function", "cl-incf", "cl-labels", "cl-letf", + "cl-letf*", "cl-load-time-value", "cl-locally", "cl-loop", + "cl-macrolet", "cl-multiple-value-bind", "cl-multiple-value-setq", + "cl-progv", "cl-psetf", "cl-psetq", "cl-pushnew", "cl-remf", + "cl-return", "cl-return-from", "cl-rotatef", "cl-shiftf", + "cl-symbol-macrolet", "cl-tagbody", "cl-the", "cl-typecase", + "combine-after-change-calls", "condition-case-unless-debug", "decf", + "declaim", "declare", "declare-function", "def-edebug-spec", + "defadvice", "defclass", "defcustom", "defface", "defgeneric", + "defgroup", "define-advice", "define-alternatives", + "define-compiler-macro", "define-derived-mode", "define-generic-mode", + "define-global-minor-mode", "define-globalized-minor-mode", + "define-minor-mode", "define-modify-macro", + "define-obsolete-face-alias", "define-obsolete-function-alias", + "define-obsolete-variable-alias", "define-setf-expander", + "define-skeleton", "defmacro", "defmethod", "defsetf", "defstruct", + "defsubst", "deftheme", "deftype", "defun", "defvar-local", + "delay-mode-hooks", "destructuring-bind", "do", "do*", + "do-all-symbols", "do-symbols", "dolist", "dont-compile", "dotimes", + "dotimes-with-progress-reporter", "ecase", "ert-deftest", "etypecase", + "eval-and-compile", "eval-when-compile", "flet", "ignore-errors", + "incf", "labels", "lambda", "letrec", "lexical-let", "lexical-let*", + "loop", "multiple-value-bind", "multiple-value-setq", "noreturn", + "oref", "oref-default", "oset", "oset-default", "pcase", + "pcase-defmacro", "pcase-dolist", "pcase-exhaustive", "pcase-let", + "pcase-let*", "pop", "psetf", "psetq", "push", "pushnew", "remf", + "return", "rotatef", "rx", "save-match-data", "save-selected-window", + "save-window-excursion", "setf", "setq-local", "shiftf", + "track-mouse", "typecase", "unless", "use-package", "when", + "while-no-input", "with-case-table", "with-category-table", + "with-coding-priority", "with-current-buffer", "with-demoted-errors", + "with-eval-after-load", "with-file-modes", "with-local-quit", + "with-output-to-string", "with-output-to-temp-buffer", + "with-parsed-tramp-file-name", "with-selected-frame", + "with-selected-window", "with-silent-modifications", "with-slots", + "with-syntax-table", "with-temp-buffer", "with-temp-file", + "with-temp-message", "with-timeout", "with-tramp-connection-property", + "with-tramp-file-property", "with-tramp-progress-reporter", + "with-wrapper-hook", "load-time-value", "locally", "macrolet", "progv", + "return-from", + } + + emacsSpecialForms = []string{ + "and", "catch", "cond", "condition-case", "defconst", "defvar", + "function", "if", "interactive", "let", "let*", "or", "prog1", + "prog2", "progn", "quote", "save-current-buffer", "save-excursion", + "save-restriction", "setq", "setq-default", "subr-arity", + "unwind-protect", "while", + } + + emacsBuiltinFunction = []string{ + "%", "*", "+", "-", "/", "/=", "1+", "1-", "<", "<=", "=", ">", ">=", + "Snarf-documentation", "abort-recursive-edit", "abs", + "accept-process-output", "access-file", "accessible-keymaps", "acos", + "active-minibuffer-window", "add-face-text-property", + "add-name-to-file", "add-text-properties", "all-completions", + "append", "apply", "apropos-internal", "aref", "arrayp", "aset", + "ash", "asin", "assoc", "assoc-string", "assq", "atan", "atom", + "autoload", "autoload-do-load", "backtrace", "backtrace--locals", + "backtrace-debug", "backtrace-eval", "backtrace-frame", + "backward-char", "backward-prefix-chars", "barf-if-buffer-read-only", + "base64-decode-region", "base64-decode-string", + "base64-encode-region", "base64-encode-string", "beginning-of-line", + "bidi-find-overridden-directionality", "bidi-resolved-levels", + "bitmap-spec-p", "bobp", "bolp", "bool-vector", + "bool-vector-count-consecutive", "bool-vector-count-population", + "bool-vector-exclusive-or", "bool-vector-intersection", + "bool-vector-not", "bool-vector-p", "bool-vector-set-difference", + "bool-vector-subsetp", "bool-vector-union", "boundp", + "buffer-base-buffer", "buffer-chars-modified-tick", + "buffer-enable-undo", "buffer-file-name", "buffer-has-markers-at", + "buffer-list", "buffer-live-p", "buffer-local-value", + "buffer-local-variables", "buffer-modified-p", "buffer-modified-tick", + "buffer-name", "buffer-size", "buffer-string", "buffer-substring", + "buffer-substring-no-properties", "buffer-swap-text", "bufferp", + "bury-buffer-internal", "byte-code", "byte-code-function-p", + "byte-to-position", "byte-to-string", "byteorder", + "call-interactively", "call-last-kbd-macro", "call-process", + "call-process-region", "cancel-kbd-macro-events", "capitalize", + "capitalize-region", "capitalize-word", "car", "car-less-than-car", + "car-safe", "case-table-p", "category-docstring", + "category-set-mnemonics", "category-table", "category-table-p", + "ccl-execute", "ccl-execute-on-string", "ccl-program-p", "cdr", + "cdr-safe", "ceiling", "char-after", "char-before", + "char-category-set", "char-charset", "char-equal", "char-or-string-p", + "char-resolve-modifiers", "char-syntax", "char-table-extra-slot", + "char-table-p", "char-table-parent", "char-table-range", + "char-table-subtype", "char-to-string", "char-width", "characterp", + "charset-after", "charset-id-internal", "charset-plist", + "charset-priority-list", "charsetp", "check-coding-system", + "check-coding-systems-region", "clear-buffer-auto-save-failure", + "clear-charset-maps", "clear-face-cache", "clear-font-cache", + "clear-image-cache", "clear-string", "clear-this-command-keys", + "close-font", "clrhash", "coding-system-aliases", + "coding-system-base", "coding-system-eol-type", "coding-system-p", + "coding-system-plist", "coding-system-priority-list", + "coding-system-put", "color-distance", "color-gray-p", + "color-supported-p", "combine-after-change-execute", + "command-error-default-function", "command-remapping", "commandp", + "compare-buffer-substrings", "compare-strings", + "compare-window-configurations", "completing-read", + "compose-region-internal", "compose-string-internal", + "composition-get-gstring", "compute-motion", "concat", "cons", + "consp", "constrain-to-field", "continue-process", + "controlling-tty-p", "coordinates-in-window-p", "copy-alist", + "copy-category-table", "copy-file", "copy-hash-table", "copy-keymap", + "copy-marker", "copy-sequence", "copy-syntax-table", "copysign", + "cos", "current-active-maps", "current-bidi-paragraph-direction", + "current-buffer", "current-case-table", "current-column", + "current-global-map", "current-idle-time", "current-indentation", + "current-input-mode", "current-local-map", "current-message", + "current-minor-mode-maps", "current-time", "current-time-string", + "current-time-zone", "current-window-configuration", + "cygwin-convert-file-name-from-windows", + "cygwin-convert-file-name-to-windows", "daemon-initialized", + "daemonp", "dbus--init-bus", "dbus-get-unique-name", + "dbus-message-internal", "debug-timer-check", "declare-equiv-charset", + "decode-big5-char", "decode-char", "decode-coding-region", + "decode-coding-string", "decode-sjis-char", "decode-time", + "default-boundp", "default-file-modes", "default-printer-name", + "default-toplevel-value", "default-value", "define-category", + "define-charset-alias", "define-charset-internal", + "define-coding-system-alias", "define-coding-system-internal", + "define-fringe-bitmap", "define-hash-table-test", "define-key", + "define-prefix-command", "delete", + "delete-all-overlays", "delete-and-extract-region", "delete-char", + "delete-directory-internal", "delete-field", "delete-file", + "delete-frame", "delete-other-windows-internal", "delete-overlay", + "delete-process", "delete-region", "delete-terminal", + "delete-window-internal", "delq", "describe-buffer-bindings", + "describe-vector", "destroy-fringe-bitmap", "detect-coding-region", + "detect-coding-string", "ding", "directory-file-name", + "directory-files", "directory-files-and-attributes", "discard-input", + "display-supports-face-attributes-p", "do-auto-save", "documentation", + "documentation-property", "downcase", "downcase-region", + "downcase-word", "draw-string", "dump-colors", "dump-emacs", + "dump-face", "dump-frame-glyph-matrix", "dump-glyph-matrix", + "dump-glyph-row", "dump-redisplay-history", "dump-tool-bar-row", + "elt", "emacs-pid", "encode-big5-char", "encode-char", + "encode-coding-region", "encode-coding-string", "encode-sjis-char", + "encode-time", "end-kbd-macro", "end-of-line", "eobp", "eolp", "eq", + "eql", "equal", "equal-including-properties", "erase-buffer", + "error-message-string", "eval", "eval-buffer", "eval-region", + "event-convert-list", "execute-kbd-macro", "exit-recursive-edit", + "exp", "expand-file-name", "expt", "external-debugging-output", + "face-attribute-relative-p", "face-attributes-as-vector", "face-font", + "fboundp", "fceiling", "fetch-bytecode", "ffloor", + "field-beginning", "field-end", "field-string", + "field-string-no-properties", "file-accessible-directory-p", + "file-acl", "file-attributes", "file-attributes-lessp", + "file-directory-p", "file-executable-p", "file-exists-p", + "file-locked-p", "file-modes", "file-name-absolute-p", + "file-name-all-completions", "file-name-as-directory", + "file-name-completion", "file-name-directory", + "file-name-nondirectory", "file-newer-than-file-p", "file-readable-p", + "file-regular-p", "file-selinux-context", "file-symlink-p", + "file-system-info", "file-system-info", "file-writable-p", + "fillarray", "find-charset-region", "find-charset-string", + "find-coding-systems-region-internal", "find-composition-internal", + "find-file-name-handler", "find-font", "find-operation-coding-system", + "float", "float-time", "floatp", "floor", "fmakunbound", + "following-char", "font-at", "font-drive-otf", "font-face-attributes", + "font-family-list", "font-get", "font-get-glyphs", + "font-get-system-font", "font-get-system-normal-font", "font-info", + "font-match-p", "font-otf-alternates", "font-put", + "font-shape-gstring", "font-spec", "font-variation-glyphs", + "font-xlfd-name", "fontp", "fontset-font", "fontset-info", + "fontset-list", "fontset-list-all", "force-mode-line-update", + "force-window-update", "format", "format-mode-line", + "format-network-address", "format-time-string", "forward-char", + "forward-comment", "forward-line", "forward-word", + "frame-border-width", "frame-bottom-divider-width", + "frame-can-run-window-configuration-change-hook", "frame-char-height", + "frame-char-width", "frame-face-alist", "frame-first-window", + "frame-focus", "frame-font-cache", "frame-fringe-width", "frame-list", + "frame-live-p", "frame-or-buffer-changed-p", "frame-parameter", + "frame-parameters", "frame-pixel-height", "frame-pixel-width", + "frame-pointer-visible-p", "frame-right-divider-width", + "frame-root-window", "frame-scroll-bar-height", + "frame-scroll-bar-width", "frame-selected-window", "frame-terminal", + "frame-text-cols", "frame-text-height", "frame-text-lines", + "frame-text-width", "frame-total-cols", "frame-total-lines", + "frame-visible-p", "framep", "frexp", "fringe-bitmaps-at-pos", + "fround", "fset", "ftruncate", "funcall", "funcall-interactively", + "function-equal", "functionp", "gap-position", "gap-size", + "garbage-collect", "gc-status", "generate-new-buffer-name", "get", + "get-buffer", "get-buffer-create", "get-buffer-process", + "get-buffer-window", "get-byte", "get-char-property", + "get-char-property-and-overlay", "get-file-buffer", "get-file-char", + "get-internal-run-time", "get-load-suffixes", "get-pos-property", + "get-process", "get-screen-color", "get-text-property", + "get-unicode-property-internal", "get-unused-category", + "get-unused-iso-final-char", "getenv-internal", "gethash", + "gfile-add-watch", "gfile-rm-watch", "global-key-binding", + "gnutls-available-p", "gnutls-boot", "gnutls-bye", "gnutls-deinit", + "gnutls-error-fatalp", "gnutls-error-string", "gnutls-errorp", + "gnutls-get-initstage", "gnutls-peer-status", + "gnutls-peer-status-warning-describe", "goto-char", "gpm-mouse-start", + "gpm-mouse-stop", "group-gid", "group-real-gid", + "handle-save-session", "handle-switch-frame", "hash-table-count", + "hash-table-p", "hash-table-rehash-size", + "hash-table-rehash-threshold", "hash-table-size", "hash-table-test", + "hash-table-weakness", "iconify-frame", "identity", "image-flush", + "image-mask-p", "image-metadata", "image-size", "imagemagick-types", + "imagep", "indent-to", "indirect-function", "indirect-variable", + "init-image-library", "inotify-add-watch", "inotify-rm-watch", + "input-pending-p", "insert", "insert-and-inherit", + "insert-before-markers", "insert-before-markers-and-inherit", + "insert-buffer-substring", "insert-byte", "insert-char", + "insert-file-contents", "insert-startup-screen", "int86", + "integer-or-marker-p", "integerp", "interactive-form", "intern", + "intern-soft", "internal--track-mouse", "internal-char-font", + "internal-complete-buffer", "internal-copy-lisp-face", + "internal-default-process-filter", + "internal-default-process-sentinel", "internal-describe-syntax-value", + "internal-event-symbol-parse-modifiers", + "internal-face-x-get-resource", "internal-get-lisp-face-attribute", + "internal-lisp-face-attribute-values", "internal-lisp-face-empty-p", + "internal-lisp-face-equal-p", "internal-lisp-face-p", + "internal-make-lisp-face", "internal-make-var-non-special", + "internal-merge-in-global-face", + "internal-set-alternative-font-family-alist", + "internal-set-alternative-font-registry-alist", + "internal-set-font-selection-order", + "internal-set-lisp-face-attribute", + "internal-set-lisp-face-attribute-from-resource", + "internal-show-cursor", "internal-show-cursor-p", "interrupt-process", + "invisible-p", "invocation-directory", "invocation-name", "isnan", + "iso-charset", "key-binding", "key-description", + "keyboard-coding-system", "keymap-parent", "keymap-prompt", "keymapp", + "keywordp", "kill-all-local-variables", "kill-buffer", "kill-emacs", + "kill-local-variable", "kill-process", "last-nonminibuffer-frame", + "lax-plist-get", "lax-plist-put", "ldexp", "length", + "libxml-parse-html-region", "libxml-parse-xml-region", + "line-beginning-position", "line-end-position", "line-pixel-height", + "list", "list-fonts", "list-system-processes", "listp", "load", + "load-average", "local-key-binding", "local-variable-if-set-p", + "local-variable-p", "locale-info", "locate-file-internal", + "lock-buffer", "log", "logand", "logb", "logior", "lognot", "logxor", + "looking-at", "lookup-image", "lookup-image-map", "lookup-key", + "lower-frame", "lsh", "macroexpand", "make-bool-vector", + "make-byte-code", "make-category-set", "make-category-table", + "make-char", "make-char-table", "make-directory-internal", + "make-frame-invisible", "make-frame-visible", "make-hash-table", + "make-indirect-buffer", "make-keymap", "make-list", + "make-local-variable", "make-marker", "make-network-process", + "make-overlay", "make-serial-process", "make-sparse-keymap", + "make-string", "make-symbol", "make-symbolic-link", "make-temp-name", + "make-terminal-frame", "make-variable-buffer-local", + "make-variable-frame-local", "make-vector", "makunbound", + "map-char-table", "map-charset-chars", "map-keymap", + "map-keymap-internal", "mapatoms", "mapc", "mapcar", "mapconcat", + "maphash", "mark-marker", "marker-buffer", "marker-insertion-type", + "marker-position", "markerp", "match-beginning", "match-data", + "match-end", "matching-paren", "max", "max-char", "md5", "member", + "memory-info", "memory-limit", "memory-use-counts", "memq", "memql", + "menu-bar-menu-at-x-y", "menu-or-popup-active-p", + "menu-or-popup-active-p", "merge-face-attribute", "message", + "message-box", "message-or-box", "min", + "minibuffer-completion-contents", "minibuffer-contents", + "minibuffer-contents-no-properties", "minibuffer-depth", + "minibuffer-prompt", "minibuffer-prompt-end", + "minibuffer-selected-window", "minibuffer-window", "minibufferp", + "minor-mode-key-binding", "mod", "modify-category-entry", + "modify-frame-parameters", "modify-syntax-entry", + "mouse-pixel-position", "mouse-position", "move-overlay", + "move-point-visually", "move-to-column", "move-to-window-line", + "msdos-downcase-filename", "msdos-long-file-names", "msdos-memget", + "msdos-memput", "msdos-mouse-disable", "msdos-mouse-enable", + "msdos-mouse-init", "msdos-mouse-p", "msdos-remember-default-colors", + "msdos-set-keyboard", "msdos-set-mouse-buttons", + "multibyte-char-to-unibyte", "multibyte-string-p", "narrow-to-region", + "natnump", "nconc", "network-interface-info", + "network-interface-list", "new-fontset", "newline-cache-check", + "next-char-property-change", "next-frame", "next-overlay-change", + "next-property-change", "next-read-file-uses-dialog-p", + "next-single-char-property-change", "next-single-property-change", + "next-window", "nlistp", "nreverse", "nth", "nthcdr", "null", + "number-or-marker-p", "number-to-string", "numberp", + "open-dribble-file", "open-font", "open-termscript", + "optimize-char-table", "other-buffer", "other-window-for-scrolling", + "overlay-buffer", "overlay-end", "overlay-get", "overlay-lists", + "overlay-properties", "overlay-put", "overlay-recenter", + "overlay-start", "overlayp", "overlays-at", "overlays-in", + "parse-partial-sexp", "play-sound-internal", "plist-get", + "plist-member", "plist-put", "point", "point-marker", "point-max", + "point-max-marker", "point-min", "point-min-marker", + "pos-visible-in-window-p", "position-bytes", "posix-looking-at", + "posix-search-backward", "posix-search-forward", "posix-string-match", + "posn-at-point", "posn-at-x-y", "preceding-char", + "prefix-numeric-value", "previous-char-property-change", + "previous-frame", "previous-overlay-change", + "previous-property-change", "previous-single-char-property-change", + "previous-single-property-change", "previous-window", "prin1", + "prin1-to-string", "princ", "print", "process-attributes", + "process-buffer", "process-coding-system", "process-command", + "process-connection", "process-contact", "process-datagram-address", + "process-exit-status", "process-filter", "process-filter-multibyte-p", + "process-id", "process-inherit-coding-system-flag", "process-list", + "process-mark", "process-name", "process-plist", + "process-query-on-exit-flag", "process-running-child-p", + "process-send-eof", "process-send-region", "process-send-string", + "process-sentinel", "process-status", "process-tty-name", + "process-type", "processp", "profiler-cpu-log", + "profiler-cpu-running-p", "profiler-cpu-start", "profiler-cpu-stop", + "profiler-memory-log", "profiler-memory-running-p", + "profiler-memory-start", "profiler-memory-stop", "propertize", + "purecopy", "put", "put-text-property", + "put-unicode-property-internal", "puthash", "query-font", + "query-fontset", "quit-process", "raise-frame", "random", "rassoc", + "rassq", "re-search-backward", "re-search-forward", "read", + "read-buffer", "read-char", "read-char-exclusive", + "read-coding-system", "read-command", "read-event", + "read-from-minibuffer", "read-from-string", "read-function", + "read-key-sequence", "read-key-sequence-vector", + "read-no-blanks-input", "read-non-nil-coding-system", "read-string", + "read-variable", "recent-auto-save-p", "recent-doskeys", + "recent-keys", "recenter", "recursion-depth", "recursive-edit", + "redirect-debugging-output", "redirect-frame-focus", "redisplay", + "redraw-display", "redraw-frame", "regexp-quote", "region-beginning", + "region-end", "register-ccl-program", "register-code-conversion-map", + "remhash", "remove-list-of-text-properties", "remove-text-properties", + "rename-buffer", "rename-file", "replace-match", + "reset-this-command-lengths", "resize-mini-window-internal", + "restore-buffer-modified-p", "resume-tty", "reverse", "round", + "run-hook-with-args", "run-hook-with-args-until-failure", + "run-hook-with-args-until-success", "run-hook-wrapped", "run-hooks", + "run-window-configuration-change-hook", "run-window-scroll-functions", + "safe-length", "scan-lists", "scan-sexps", "scroll-down", + "scroll-left", "scroll-other-window", "scroll-right", "scroll-up", + "search-backward", "search-forward", "secure-hash", "select-frame", + "select-window", "selected-frame", "selected-window", + "self-insert-command", "send-string-to-terminal", "sequencep", + "serial-process-configure", "set", "set-buffer", + "set-buffer-auto-saved", "set-buffer-major-mode", + "set-buffer-modified-p", "set-buffer-multibyte", "set-case-table", + "set-category-table", "set-char-table-extra-slot", + "set-char-table-parent", "set-char-table-range", "set-charset-plist", + "set-charset-priority", "set-coding-system-priority", + "set-cursor-size", "set-default", "set-default-file-modes", + "set-default-toplevel-value", "set-file-acl", "set-file-modes", + "set-file-selinux-context", "set-file-times", "set-fontset-font", + "set-frame-height", "set-frame-position", "set-frame-selected-window", + "set-frame-size", "set-frame-width", "set-fringe-bitmap-face", + "set-input-interrupt-mode", "set-input-meta-mode", "set-input-mode", + "set-keyboard-coding-system-internal", "set-keymap-parent", + "set-marker", "set-marker-insertion-type", "set-match-data", + "set-message-beep", "set-minibuffer-window", + "set-mouse-pixel-position", "set-mouse-position", + "set-network-process-option", "set-output-flow-control", + "set-process-buffer", "set-process-coding-system", + "set-process-datagram-address", "set-process-filter", + "set-process-filter-multibyte", + "set-process-inherit-coding-system-flag", "set-process-plist", + "set-process-query-on-exit-flag", "set-process-sentinel", + "set-process-window-size", "set-quit-char", + "set-safe-terminal-coding-system-internal", "set-screen-color", + "set-standard-case-table", "set-syntax-table", + "set-terminal-coding-system-internal", "set-terminal-local-value", + "set-terminal-parameter", "set-text-properties", "set-time-zone-rule", + "set-visited-file-modtime", "set-window-buffer", + "set-window-combination-limit", "set-window-configuration", + "set-window-dedicated-p", "set-window-display-table", + "set-window-fringes", "set-window-hscroll", "set-window-margins", + "set-window-new-normal", "set-window-new-pixel", + "set-window-new-total", "set-window-next-buffers", + "set-window-parameter", "set-window-point", "set-window-prev-buffers", + "set-window-redisplay-end-trigger", "set-window-scroll-bars", + "set-window-start", "set-window-vscroll", "setcar", "setcdr", + "setplist", "show-face-resources", "signal", "signal-process", "sin", + "single-key-description", "skip-chars-backward", "skip-chars-forward", + "skip-syntax-backward", "skip-syntax-forward", "sleep-for", "sort", + "sort-charsets", "special-variable-p", "split-char", + "split-window-internal", "sqrt", "standard-case-table", + "standard-category-table", "standard-syntax-table", "start-kbd-macro", + "start-process", "stop-process", "store-kbd-macro-event", "string", + "string-as-multibyte", "string-as-unibyte", "string-bytes", + "string-collate-equalp", "string-collate-lessp", "string-equal", + "string-lessp", "string-make-multibyte", "string-make-unibyte", + "string-match", "string-to-char", "string-to-multibyte", + "string-to-number", "string-to-syntax", "string-to-unibyte", + "string-width", "stringp", "subr-name", "subrp", + "subst-char-in-region", "substitute-command-keys", + "substitute-in-file-name", "substring", "substring-no-properties", + "suspend-emacs", "suspend-tty", "suspicious-object", "sxhash", + "symbol-function", "symbol-name", "symbol-plist", "symbol-value", + "symbolp", "syntax-table", "syntax-table-p", "system-groups", + "system-move-file-to-trash", "system-name", "system-users", "tan", + "terminal-coding-system", "terminal-list", "terminal-live-p", + "terminal-local-value", "terminal-name", "terminal-parameter", + "terminal-parameters", "terpri", "test-completion", + "text-char-description", "text-properties-at", "text-property-any", + "text-property-not-all", "this-command-keys", + "this-command-keys-vector", "this-single-command-keys", + "this-single-command-raw-keys", "time-add", "time-less-p", + "time-subtract", "tool-bar-get-system-style", "tool-bar-height", + "tool-bar-pixel-width", "top-level", "trace-redisplay", + "trace-to-stderr", "translate-region-internal", "transpose-regions", + "truncate", "try-completion", "tty-display-color-cells", + "tty-display-color-p", "tty-no-underline", + "tty-suppress-bold-inverse-default-colors", "tty-top-frame", + "tty-type", "type-of", "undo-boundary", "unencodable-char-position", + "unhandled-file-name-directory", "unibyte-char-to-multibyte", + "unibyte-string", "unicode-property-table-internal", "unify-charset", + "unintern", "unix-sync", "unlock-buffer", "upcase", "upcase-initials", + "upcase-initials-region", "upcase-region", "upcase-word", + "use-global-map", "use-local-map", "user-full-name", + "user-login-name", "user-real-login-name", "user-real-uid", + "user-uid", "variable-binding-locus", "vconcat", "vector", + "vector-or-char-table-p", "vectorp", "verify-visited-file-modtime", + "vertical-motion", "visible-frame-list", "visited-file-modtime", + "w16-get-clipboard-data", "w16-selection-exists-p", + "w16-set-clipboard-data", "w32-battery-status", + "w32-default-color-map", "w32-define-rgb-color", + "w32-display-monitor-attributes-list", "w32-frame-menu-bar-size", + "w32-frame-rect", "w32-get-clipboard-data", + "w32-get-codepage-charset", "w32-get-console-codepage", + "w32-get-console-output-codepage", "w32-get-current-locale-id", + "w32-get-default-locale-id", "w32-get-keyboard-layout", + "w32-get-locale-info", "w32-get-valid-codepages", + "w32-get-valid-keyboard-layouts", "w32-get-valid-locale-ids", + "w32-has-winsock", "w32-long-file-name", "w32-reconstruct-hot-key", + "w32-register-hot-key", "w32-registered-hot-keys", + "w32-selection-exists-p", "w32-send-sys-command", + "w32-set-clipboard-data", "w32-set-console-codepage", + "w32-set-console-output-codepage", "w32-set-current-locale", + "w32-set-keyboard-layout", "w32-set-process-priority", + "w32-shell-execute", "w32-short-file-name", "w32-toggle-lock-key", + "w32-unload-winsock", "w32-unregister-hot-key", "w32-window-exists-p", + "w32notify-add-watch", "w32notify-rm-watch", + "waiting-for-user-input-p", "where-is-internal", "widen", + "widget-apply", "widget-get", "widget-put", + "window-absolute-pixel-edges", "window-at", "window-body-height", + "window-body-width", "window-bottom-divider-width", "window-buffer", + "window-combination-limit", "window-configuration-frame", + "window-configuration-p", "window-dedicated-p", + "window-display-table", "window-edges", "window-end", "window-frame", + "window-fringes", "window-header-line-height", "window-hscroll", + "window-inside-absolute-pixel-edges", "window-inside-edges", + "window-inside-pixel-edges", "window-left-child", + "window-left-column", "window-line-height", "window-list", + "window-list-1", "window-live-p", "window-margins", + "window-minibuffer-p", "window-mode-line-height", "window-new-normal", + "window-new-pixel", "window-new-total", "window-next-buffers", + "window-next-sibling", "window-normal-size", "window-old-point", + "window-parameter", "window-parameters", "window-parent", + "window-pixel-edges", "window-pixel-height", "window-pixel-left", + "window-pixel-top", "window-pixel-width", "window-point", + "window-prev-buffers", "window-prev-sibling", + "window-redisplay-end-trigger", "window-resize-apply", + "window-resize-apply-total", "window-right-divider-width", + "window-scroll-bar-height", "window-scroll-bar-width", + "window-scroll-bars", "window-start", "window-system", + "window-text-height", "window-text-pixel-size", "window-text-width", + "window-top-child", "window-top-line", "window-total-height", + "window-total-width", "window-use-time", "window-valid-p", + "window-vscroll", "windowp", "write-char", "write-region", + "x-backspace-delete-keys-p", "x-change-window-property", + "x-change-window-property", "x-close-connection", + "x-close-connection", "x-create-frame", "x-create-frame", + "x-delete-window-property", "x-delete-window-property", + "x-disown-selection-internal", "x-display-backing-store", + "x-display-backing-store", "x-display-color-cells", + "x-display-color-cells", "x-display-grayscale-p", + "x-display-grayscale-p", "x-display-list", "x-display-list", + "x-display-mm-height", "x-display-mm-height", "x-display-mm-width", + "x-display-mm-width", "x-display-monitor-attributes-list", + "x-display-pixel-height", "x-display-pixel-height", + "x-display-pixel-width", "x-display-pixel-width", "x-display-planes", + "x-display-planes", "x-display-save-under", "x-display-save-under", + "x-display-screens", "x-display-screens", "x-display-visual-class", + "x-display-visual-class", "x-family-fonts", "x-file-dialog", + "x-file-dialog", "x-file-dialog", "x-focus-frame", "x-frame-geometry", + "x-frame-geometry", "x-get-atom-name", "x-get-resource", + "x-get-selection-internal", "x-hide-tip", "x-hide-tip", + "x-list-fonts", "x-load-color-file", "x-menu-bar-open-internal", + "x-menu-bar-open-internal", "x-open-connection", "x-open-connection", + "x-own-selection-internal", "x-parse-geometry", "x-popup-dialog", + "x-popup-menu", "x-register-dnd-atom", "x-select-font", + "x-select-font", "x-selection-exists-p", "x-selection-owner-p", + "x-send-client-message", "x-server-max-request-size", + "x-server-max-request-size", "x-server-vendor", "x-server-vendor", + "x-server-version", "x-server-version", "x-show-tip", "x-show-tip", + "x-synchronize", "x-synchronize", "x-uses-old-gtk-dialog", + "x-window-property", "x-window-property", "x-wm-set-size-hint", + "xw-color-defined-p", "xw-color-defined-p", "xw-color-values", + "xw-color-values", "xw-display-color-p", "xw-display-color-p", + "yes-or-no-p", "zlib-available-p", "zlib-decompress-region", + "forward-point", + } + + emacsBuiltinFunctionHighlighted = []string{ + "defvaralias", "provide", "require", + "with-no-warnings", "define-widget", "with-electric-help", + "throw", "defalias", "featurep", + } + + emacsLambdaListKeywords = []string{ + "&allow-other-keys", "&aux", "&body", "&environment", "&key", "&optional", + "&rest", "&whole", + } + + emacsErrorKeywords = []string{ + "cl-assert", "cl-check-type", "error", "signal", + "user-error", "warn", + } +) + +// EmacsLisp lexer. +var EmacsLisp = Register(TypeRemappingLexer(MustNewXMLLexer( + embedded, + "embedded/emacslisp.xml", +), TypeMapping{ + {NameVariable, NameFunction, emacsBuiltinFunction}, + {NameVariable, NameBuiltin, emacsSpecialForms}, + {NameVariable, NameException, emacsErrorKeywords}, + {NameVariable, NameBuiltin, append(emacsBuiltinFunctionHighlighted, emacsMacros...)}, + {NameVariable, KeywordPseudo, emacsLambdaListKeywords}, +})) diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/abap.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/abap.xml new file mode 100644 index 0000000..e8140b7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/abap.xml @@ -0,0 +1,154 @@ + + + ABAP + abap + *.abap + *.ABAP + text/x-abap + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/abnf.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/abnf.xml new file mode 100644 index 0000000..3ffd51c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/abnf.xml @@ -0,0 +1,66 @@ + + + ABNF + abnf + *.abnf + text/x-abnf + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/actionscript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/actionscript.xml new file mode 100644 index 0000000..d6727a1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/actionscript.xml @@ -0,0 +1,68 @@ + + + ActionScript + as + actionscript + *.as + application/x-actionscript + text/x-actionscript + text/actionscript + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/actionscript_3.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/actionscript_3.xml new file mode 100644 index 0000000..e5f6538 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/actionscript_3.xml @@ -0,0 +1,163 @@ + + + ActionScript 3 + as3 + actionscript3 + *.as + application/x-actionscript3 + text/x-actionscript3 + text/actionscript3 + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ada.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ada.xml new file mode 100644 index 0000000..5854a20 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ada.xml @@ -0,0 +1,321 @@ + + + Ada + ada + ada95 + ada2005 + *.adb + *.ads + *.ada + text/x-ada + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/agda.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/agda.xml new file mode 100644 index 0000000..6f2b2d5 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/agda.xml @@ -0,0 +1,66 @@ + + + Agda + agda + *.agda + text/x-agda + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/al.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/al.xml new file mode 100644 index 0000000..30bad5a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/al.xml @@ -0,0 +1,75 @@ + + + AL + al + *.al + *.dal + text/x-al + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/alloy.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/alloy.xml new file mode 100644 index 0000000..1de9ea6 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/alloy.xml @@ -0,0 +1,58 @@ + + + + Alloy + alloy + *.als + text/x-alloy + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/angular2.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/angular2.xml new file mode 100644 index 0000000..84fe20b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/angular2.xml @@ -0,0 +1,108 @@ + + + Angular2 + ng2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/antlr.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/antlr.xml new file mode 100644 index 0000000..e57edd4 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/antlr.xml @@ -0,0 +1,317 @@ + + + ANTLR + antlr + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/apacheconf.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/apacheconf.xml new file mode 100644 index 0000000..7643541 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/apacheconf.xml @@ -0,0 +1,74 @@ + + + ApacheConf + apacheconf + aconf + apache + .htaccess + apache.conf + apache2.conf + text/x-apacheconf + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/apl.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/apl.xml new file mode 100644 index 0000000..959448c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/apl.xml @@ -0,0 +1,59 @@ + + + APL + apl + *.apl + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/applescript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/applescript.xml new file mode 100644 index 0000000..1de6c67 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/applescript.xml @@ -0,0 +1,130 @@ + + + AppleScript + applescript + *.applescript + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/arangodb_aql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/arangodb_aql.xml new file mode 100644 index 0000000..e711973 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/arangodb_aql.xml @@ -0,0 +1,174 @@ + + + ArangoDB AQL + aql + *.aql + text/x-aql + true + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/arduino.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/arduino.xml new file mode 100644 index 0000000..00399c2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/arduino.xml @@ -0,0 +1,309 @@ + + + Arduino + arduino + *.ino + text/x-arduino + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/armasm.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/armasm.xml new file mode 100644 index 0000000..e5966cf --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/armasm.xml @@ -0,0 +1,126 @@ + + + ArmAsm + armasm + *.s + *.S + text/x-armasm + text/x-asm + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/autohotkey.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/autohotkey.xml new file mode 100644 index 0000000..6ec94ed --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/autohotkey.xml @@ -0,0 +1,78 @@ + + + + AutoHotkey + autohotkey + ahk + *.ahk + *.ahkl + text/x-autohotkey + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/autoit.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/autoit.xml new file mode 100644 index 0000000..1f7e15d --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/autoit.xml @@ -0,0 +1,70 @@ + + + + AutoIt + autoit + *.au3 + text/x-autoit + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/awk.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/awk.xml new file mode 100644 index 0000000..07476ff --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/awk.xml @@ -0,0 +1,95 @@ + + + Awk + awk + gawk + mawk + nawk + *.awk + application/x-awk + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ballerina.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ballerina.xml new file mode 100644 index 0000000..d13c123 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ballerina.xml @@ -0,0 +1,97 @@ + + + Ballerina + ballerina + *.bal + text/x-ballerina + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bash.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bash.xml new file mode 100644 index 0000000..d704a8f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bash.xml @@ -0,0 +1,220 @@ + + + Bash + bash + sh + ksh + zsh + shell + *.sh + *.ksh + *.bash + *.ebuild + *.eclass + .env + *.env + *.exheres-0 + *.exlib + *.zsh + *.zshrc + .bashrc + bashrc + .bash_* + bash_* + zshrc + .zshrc + PKGBUILD + application/x-sh + application/x-shellscript + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bash_session.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bash_session.xml new file mode 100644 index 0000000..82c5fd6 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bash_session.xml @@ -0,0 +1,25 @@ + + + Bash Session + bash-session + console + shell-session + *.sh-session + text/x-sh + true + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/batchfile.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/batchfile.xml new file mode 100644 index 0000000..d3e0627 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/batchfile.xml @@ -0,0 +1,660 @@ + + + Batchfile + bat + batch + dosbatch + winbatch + *.bat + *.cmd + application/x-dos-batch + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bibtex.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bibtex.xml new file mode 100644 index 0000000..8fde161 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bibtex.xml @@ -0,0 +1,152 @@ + + + BibTeX + bib + bibtex + *.bib + text/x-bibtex + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bicep.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bicep.xml new file mode 100644 index 0000000..db90f31 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bicep.xml @@ -0,0 +1,84 @@ + + + Bicep + bicep + *.bicep + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/blitzbasic.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/blitzbasic.xml new file mode 100644 index 0000000..591b1ad --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/blitzbasic.xml @@ -0,0 +1,141 @@ + + + BlitzBasic + blitzbasic + b3d + bplus + *.bb + *.decls + text/x-bb + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bnf.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bnf.xml new file mode 100644 index 0000000..5c98424 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bnf.xml @@ -0,0 +1,28 @@ + + + BNF + bnf + *.bnf + text/x-bnf + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bqn.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bqn.xml new file mode 100644 index 0000000..c1090ea --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bqn.xml @@ -0,0 +1,83 @@ + + + BQN + bqn + *.bqn + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/brainfuck.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/brainfuck.xml new file mode 100644 index 0000000..4c84c33 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/brainfuck.xml @@ -0,0 +1,51 @@ + + + Brainfuck + brainfuck + bf + *.bf + *.b + application/x-brainfuck + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c#.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c#.xml new file mode 100644 index 0000000..f1e21db --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c#.xml @@ -0,0 +1,121 @@ + + + C# + csharp + c# + *.cs + text/x-csharp + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c++.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c++.xml new file mode 100644 index 0000000..680a19a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c++.xml @@ -0,0 +1,331 @@ + + + C++ + cpp + c++ + *.cpp + *.hpp + *.c++ + *.h++ + *.cc + *.hh + *.cxx + *.hxx + *.C + *.H + *.cp + *.CPP + *.tpp + text/x-c++hdr + text/x-c++src + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c.xml new file mode 100644 index 0000000..35ee32d --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c.xml @@ -0,0 +1,260 @@ + + + C + c + *.c + *.h + *.idc + *.x[bp]m + text/x-chdr + text/x-csrc + image/x-xbitmap + image/x-xpixmap + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cap_n_proto.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cap_n_proto.xml new file mode 100644 index 0000000..3e7d147 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cap_n_proto.xml @@ -0,0 +1,122 @@ + + + Cap'n Proto + capnp + *.capnp + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cassandra_cql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cassandra_cql.xml new file mode 100644 index 0000000..1a78f99 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cassandra_cql.xml @@ -0,0 +1,137 @@ + + + Cassandra CQL + cassandra + cql + *.cql + text/x-cql + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1 + 6 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ceylon.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ceylon.xml new file mode 100644 index 0000000..4c41218 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ceylon.xml @@ -0,0 +1,151 @@ + + + Ceylon + ceylon + *.ceylon + text/x-ceylon + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cfengine3.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cfengine3.xml new file mode 100644 index 0000000..4950305 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cfengine3.xml @@ -0,0 +1,197 @@ + + + CFEngine3 + cfengine3 + cf3 + *.cf + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cfstatement.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cfstatement.xml new file mode 100644 index 0000000..46a84cf --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cfstatement.xml @@ -0,0 +1,92 @@ + + + cfstatement + cfs + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/chaiscript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/chaiscript.xml new file mode 100644 index 0000000..860439a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/chaiscript.xml @@ -0,0 +1,134 @@ + + + ChaiScript + chai + chaiscript + *.chai + text/x-chaiscript + application/x-chaiscript + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/chapel.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/chapel.xml new file mode 100644 index 0000000..c89cafc --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/chapel.xml @@ -0,0 +1,143 @@ + + + Chapel + chapel + chpl + *.chpl + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cheetah.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cheetah.xml new file mode 100644 index 0000000..284457c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cheetah.xml @@ -0,0 +1,55 @@ + + + Cheetah + cheetah + spitfire + *.tmpl + *.spt + application/x-cheetah + application/x-spitfire + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/clojure.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/clojure.xml new file mode 100644 index 0000000..967ba39 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/clojure.xml @@ -0,0 +1,71 @@ + + + Clojure + clojure + clj + edn + *.clj + *.edn + text/x-clojure + application/x-clojure + application/edn + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cmake.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cmake.xml new file mode 100644 index 0000000..b041cfd --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cmake.xml @@ -0,0 +1,90 @@ + + + CMake + cmake + *.cmake + CMakeLists.txt + text/x-cmake + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cobol.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cobol.xml new file mode 100644 index 0000000..a8a8029 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cobol.xml @@ -0,0 +1,90 @@ + + + COBOL + cobol + *.cob + *.COB + *.cpy + *.CPY + text/x-cobol + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/coffeescript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/coffeescript.xml new file mode 100644 index 0000000..e29722f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/coffeescript.xml @@ -0,0 +1,210 @@ + + + CoffeeScript + coffee-script + coffeescript + coffee + *.coffee + text/coffeescript + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/common_lisp.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/common_lisp.xml new file mode 100644 index 0000000..0fb9a7a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/common_lisp.xml @@ -0,0 +1,184 @@ + + + Common Lisp + common-lisp + cl + lisp + *.cl + *.lisp + text/x-common-lisp + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/coq.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/coq.xml new file mode 100644 index 0000000..62f64ff --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/coq.xml @@ -0,0 +1,136 @@ + + + Coq + coq + *.v + text/x-coq + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/crystal.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/crystal.xml new file mode 100644 index 0000000..94853db --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/crystal.xml @@ -0,0 +1,762 @@ + + + Crystal + cr + crystal + *.cr + text/x-crystal + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/css.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/css.xml new file mode 100644 index 0000000..6e370c7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/css.xml @@ -0,0 +1,323 @@ + + + CSS + css + *.css + text/css + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cue.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cue.xml new file mode 100644 index 0000000..d6c3ea7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cue.xml @@ -0,0 +1,85 @@ + + + CUE + cue + *.cue + text/x-cue + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cython.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cython.xml new file mode 100644 index 0000000..15dfe4d --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cython.xml @@ -0,0 +1,372 @@ + + + Cython + cython + pyx + pyrex + *.pyx + *.pxd + *.pxi + text/x-cython + application/x-cython + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/d.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/d.xml new file mode 100644 index 0000000..3c030e2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/d.xml @@ -0,0 +1,133 @@ + + + D + d + *.d + *.di + text/x-d + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dart.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dart.xml new file mode 100644 index 0000000..f1b454f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dart.xml @@ -0,0 +1,213 @@ + + + Dart + dart + *.dart + text/x-dart + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dax.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dax.xml new file mode 100644 index 0000000..2bb3a1a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dax.xml @@ -0,0 +1,39 @@ + + + Dax + dax + *.dax + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/desktop_entry.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/desktop_entry.xml new file mode 100644 index 0000000..ad71ad4 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/desktop_entry.xml @@ -0,0 +1,17 @@ + + + Desktop file + desktop + desktop_entry + *.desktop + application/x-desktop + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/diff.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/diff.xml new file mode 100644 index 0000000..dc0beb7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/diff.xml @@ -0,0 +1,52 @@ + + + Diff + diff + udiff + *.diff + *.patch + text/x-diff + text/x-patch + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/django_jinja.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/django_jinja.xml new file mode 100644 index 0000000..3c97c22 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/django_jinja.xml @@ -0,0 +1,153 @@ + + + Django/Jinja + django + jinja + application/x-django-templating + application/x-jinja + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dns.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dns.xml new file mode 100644 index 0000000..ef8f663 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dns.xml @@ -0,0 +1,44 @@ + + + + dns + zone + bind + *.zone + text/dns + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/docker.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/docker.xml new file mode 100644 index 0000000..a73c52c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/docker.xml @@ -0,0 +1,57 @@ + + + Docker + docker + dockerfile + Dockerfile + Dockerfile.* + *.Dockerfile + *.docker + text/x-dockerfile-config + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dtd.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dtd.xml new file mode 100644 index 0000000..0edbbde --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dtd.xml @@ -0,0 +1,168 @@ + + + DTD + dtd + *.dtd + application/xml-dtd + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dylan.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dylan.xml new file mode 100644 index 0000000..3660d14 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dylan.xml @@ -0,0 +1,176 @@ + + + Dylan + dylan + *.dylan + *.dyl + *.intr + text/x-dylan + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ebnf.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ebnf.xml new file mode 100644 index 0000000..df5d62f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ebnf.xml @@ -0,0 +1,90 @@ + + + EBNF + ebnf + *.ebnf + text/x-ebnf + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/elixir.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/elixir.xml new file mode 100644 index 0000000..286f53a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/elixir.xml @@ -0,0 +1,744 @@ + + + Elixir + elixir + ex + exs + *.ex + *.eex + *.exs + text/x-elixir + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/elm.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/elm.xml new file mode 100644 index 0000000..ed65efc --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/elm.xml @@ -0,0 +1,119 @@ + + + Elm + elm + *.elm + text/x-elm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/emacslisp.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/emacslisp.xml new file mode 100644 index 0000000..668bc62 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/emacslisp.xml @@ -0,0 +1,132 @@ + + + EmacsLisp + emacs + elisp + emacs-lisp + *.el + text/x-elisp + application/x-elisp + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/erlang.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/erlang.xml new file mode 100644 index 0000000..b186588 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/erlang.xml @@ -0,0 +1,166 @@ + + + Erlang + erlang + *.erl + *.hrl + *.es + *.escript + text/x-erlang + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/factor.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/factor.xml new file mode 100644 index 0000000..4743b9a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/factor.xml @@ -0,0 +1,412 @@ + + + Factor + factor + *.factor + text/x-factor + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fennel.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fennel.xml new file mode 100644 index 0000000..b9b6d59 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fennel.xml @@ -0,0 +1,68 @@ + + + Fennel + fennel + fnl + *.fennel + text/x-fennel + application/x-fennel + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fish.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fish.xml new file mode 100644 index 0000000..deb7814 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fish.xml @@ -0,0 +1,159 @@ + + + Fish + fish + fishshell + *.fish + *.load + application/x-fish + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/forth.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/forth.xml new file mode 100644 index 0000000..31096a2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/forth.xml @@ -0,0 +1,78 @@ + + + Forth + forth + *.frt + *.fth + *.fs + application/x-forth + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fortran.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fortran.xml new file mode 100644 index 0000000..6140e70 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fortran.xml @@ -0,0 +1,102 @@ + + + Fortran + fortran + f90 + *.f03 + *.f90 + *.f95 + *.F03 + *.F90 + *.F95 + text/x-fortran + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fortranfixed.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fortranfixed.xml new file mode 100644 index 0000000..11343c0 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fortranfixed.xml @@ -0,0 +1,71 @@ + + + FortranFixed + fortranfixed + *.f + *.F + text/x-fortran + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fsharp.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fsharp.xml new file mode 100644 index 0000000..e1c19ff --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fsharp.xml @@ -0,0 +1,245 @@ + + + FSharp + fsharp + *.fs + *.fsi + text/x-fsharp + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gas.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gas.xml new file mode 100644 index 0000000..7557bce --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gas.xml @@ -0,0 +1,150 @@ + + + GAS + gas + asm + *.s + *.S + text/x-gas + 0.1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gdscript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gdscript.xml new file mode 100644 index 0000000..811f38d --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gdscript.xml @@ -0,0 +1,259 @@ + + + GDScript + gdscript + gd + *.gd + text/x-gdscript + application/x-gdscript + 0.1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gdscript3.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gdscript3.xml new file mode 100644 index 0000000..b50c9dd --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gdscript3.xml @@ -0,0 +1,270 @@ + + + GDScript3 + gdscript3 + gd3 + *.gd + text/x-gdscript + application/x-gdscript + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gherkin.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gherkin.xml new file mode 100644 index 0000000..c53a2cb --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gherkin.xml @@ -0,0 +1,263 @@ + + + Gherkin + cucumber + Cucumber + gherkin + Gherkin + *.feature + *.FEATURE + text/x-gherkin + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/glsl.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/glsl.xml new file mode 100644 index 0000000..ca0b696 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/glsl.xml @@ -0,0 +1,65 @@ + + + GLSL + glsl + *.vert + *.frag + *.geo + text/x-glslsrc + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gnuplot.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gnuplot.xml new file mode 100644 index 0000000..ee6a245 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gnuplot.xml @@ -0,0 +1,289 @@ + + + Gnuplot + gnuplot + *.plot + *.plt + text/x-gnuplot + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/go_template.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/go_template.xml new file mode 100644 index 0000000..36f737b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/go_template.xml @@ -0,0 +1,114 @@ + + + Go Template + go-template + *.gotmpl + *.go.tmpl + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/graphql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/graphql.xml new file mode 100644 index 0000000..b062273 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/graphql.xml @@ -0,0 +1,88 @@ + + + GraphQL + graphql + graphqls + gql + *.graphql + *.graphqls + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/groff.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/groff.xml new file mode 100644 index 0000000..3af0a43 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/groff.xml @@ -0,0 +1,90 @@ + + + Groff + groff + nroff + man + *.[1-9] + *.1p + *.3pm + *.man + application/x-troff + text/troff + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/groovy.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/groovy.xml new file mode 100644 index 0000000..3cca2e9 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/groovy.xml @@ -0,0 +1,135 @@ + + + Groovy + groovy + *.groovy + *.gradle + text/x-groovy + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/handlebars.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/handlebars.xml new file mode 100644 index 0000000..7cf2a64 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/handlebars.xml @@ -0,0 +1,147 @@ + + + Handlebars + handlebars + hbs + *.handlebars + *.hbs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hare.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hare.xml new file mode 100644 index 0000000..ea63642 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hare.xml @@ -0,0 +1,98 @@ + + + Hare + hare + *.ha + text/x-hare + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/haskell.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/haskell.xml new file mode 100644 index 0000000..5f805d6 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/haskell.xml @@ -0,0 +1,272 @@ + + + Haskell + haskell + hs + *.hs + text/x-haskell + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hcl.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hcl.xml new file mode 100644 index 0000000..d3ed208 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hcl.xml @@ -0,0 +1,143 @@ + + + HCL + hcl + *.hcl + application/x-hcl + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hexdump.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hexdump.xml new file mode 100644 index 0000000..a6f28ea --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hexdump.xml @@ -0,0 +1,189 @@ + + + Hexdump + hexdump + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hlb.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hlb.xml new file mode 100644 index 0000000..64e667d --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hlb.xml @@ -0,0 +1,149 @@ + + + HLB + hlb + *.hlb + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hlsl.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hlsl.xml new file mode 100644 index 0000000..41ab323 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hlsl.xml @@ -0,0 +1,110 @@ + + + HLSL + hlsl + *.hlsl + *.hlsli + *.cginc + *.fx + *.fxh + text/x-hlsl + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/holyc.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/holyc.xml new file mode 100644 index 0000000..cd2d9d1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/holyc.xml @@ -0,0 +1,252 @@ + + + HolyC + holyc + *.HC + *.hc + *.HH + *.hh + *.hc.z + *.HC.Z + text/x-chdr + text/x-csrc + image/x-xbitmap + image/x-xpixmap + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/html.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/html.xml new file mode 100644 index 0000000..2f1a8a9 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/html.xml @@ -0,0 +1,159 @@ + + + HTML + html + *.html + *.htm + *.xhtml + *.xslt + text/html + application/xhtml+xml + true + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hy.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hy.xml new file mode 100644 index 0000000..a0dae46 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hy.xml @@ -0,0 +1,104 @@ + + + Hy + hylang + *.hy + text/x-hy + application/x-hy + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/idris.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/idris.xml new file mode 100644 index 0000000..9592d88 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/idris.xml @@ -0,0 +1,216 @@ + + + Idris + idris + idr + *.idr + text/x-idris + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/igor.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/igor.xml new file mode 100644 index 0000000..1cc0205 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/igor.xml @@ -0,0 +1,47 @@ + + + Igor + igor + igorpro + *.ipf + text/ipf + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ini.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ini.xml new file mode 100644 index 0000000..08f3870 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ini.xml @@ -0,0 +1,45 @@ + + + INI + ini + cfg + dosini + *.ini + *.cfg + *.inf + *.service + *.socket + .gitconfig + .editorconfig + pylintrc + .pylintrc + text/x-ini + text/inf + 0.1 + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/io.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/io.xml new file mode 100644 index 0000000..9ad94fa --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/io.xml @@ -0,0 +1,71 @@ + + + Io + io + *.io + text/x-iosrc + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/iscdhcpd.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/iscdhcpd.xml new file mode 100644 index 0000000..645cb05 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/iscdhcpd.xml @@ -0,0 +1,96 @@ + + + ISCdhcpd + iscdhcpd + dhcpd.conf + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/j.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/j.xml new file mode 100644 index 0000000..872d081 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/j.xml @@ -0,0 +1,157 @@ + + + J + j + *.ijs + text/x-j + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/java.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/java.xml new file mode 100644 index 0000000..3ce33ff --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/java.xml @@ -0,0 +1,193 @@ + + + Java + java + *.java + text/x-java + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/javascript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/javascript.xml new file mode 100644 index 0000000..efe80ed --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/javascript.xml @@ -0,0 +1,160 @@ + + + JavaScript + js + javascript + *.js + *.jsm + *.mjs + *.cjs + application/javascript + application/x-javascript + text/x-javascript + text/javascript + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/json.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/json.xml new file mode 100644 index 0000000..3473cfd --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/json.xml @@ -0,0 +1,111 @@ + + + JSON + json + *.json + *.avsc + application/json + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/julia.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/julia.xml new file mode 100644 index 0000000..776dcdb --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/julia.xml @@ -0,0 +1,400 @@ + + + Julia + julia + jl + *.jl + text/x-julia + application/x-julia + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/jungle.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/jungle.xml new file mode 100644 index 0000000..92c785d --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/jungle.xml @@ -0,0 +1,98 @@ + + + Jungle + jungle + *.jungle + text/x-jungle + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/kotlin.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/kotlin.xml new file mode 100644 index 0000000..09c638a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/kotlin.xml @@ -0,0 +1,223 @@ + + + Kotlin + kotlin + *.kt + text/x-kotlin + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/lighttpd_configuration_file.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/lighttpd_configuration_file.xml new file mode 100644 index 0000000..1319e5c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/lighttpd_configuration_file.xml @@ -0,0 +1,42 @@ + + + Lighttpd configuration file + lighty + lighttpd + text/x-lighttpd-conf + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/llvm.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/llvm.xml new file mode 100644 index 0000000..f24f152 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/llvm.xml @@ -0,0 +1,73 @@ + + + LLVM + llvm + *.ll + text/x-llvm + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/lua.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/lua.xml new file mode 100644 index 0000000..903d458 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/lua.xml @@ -0,0 +1,158 @@ + + + Lua + lua + *.lua + *.wlua + text/x-lua + application/x-lua + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/makefile.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/makefile.xml new file mode 100644 index 0000000..a82a7f8 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/makefile.xml @@ -0,0 +1,131 @@ + + + Makefile + make + makefile + mf + bsdmake + *.mak + *.mk + Makefile + makefile + Makefile.* + GNUmakefile + BSDmakefile + Justfile + justfile + .justfile + text/x-makefile + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mako.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mako.xml new file mode 100644 index 0000000..7824140 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mako.xml @@ -0,0 +1,120 @@ + + + Mako + mako + *.mao + application/x-mako + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mason.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mason.xml new file mode 100644 index 0000000..5873f2a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mason.xml @@ -0,0 +1,89 @@ + + + Mason + mason + *.m + *.mhtml + *.mc + *.mi + autohandler + dhandler + application/x-mason + 0.1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/materialize_sql_dialect.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/materialize_sql_dialect.xml new file mode 100644 index 0000000..7b22a46 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/materialize_sql_dialect.xml @@ -0,0 +1,155 @@ + + + Materialize SQL dialect + materialize + mzsql + text/x-materializesql + true + true + + + + + + + + + + + + + + + + + + + 6 + 12 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 12 + 4 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mathematica.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mathematica.xml new file mode 100644 index 0000000..0b8dfb6 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mathematica.xml @@ -0,0 +1,60 @@ + + + Mathematica + mathematica + mma + nb + *.cdf + *.m + *.ma + *.mt + *.mx + *.nb + *.nbp + *.wl + application/mathematica + application/vnd.wolfram.mathematica + application/vnd.wolfram.mathematica.package + application/vnd.wolfram.cdf + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/matlab.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/matlab.xml new file mode 100644 index 0000000..ebb4e2c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/matlab.xml @@ -0,0 +1,114 @@ + + + Matlab + matlab + *.m + text/matlab + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mcfunction.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mcfunction.xml new file mode 100644 index 0000000..3310520 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mcfunction.xml @@ -0,0 +1,182 @@ + + + mcfunction + mcfunction + *.mcfunction + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/meson.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/meson.xml new file mode 100644 index 0000000..130047d --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/meson.xml @@ -0,0 +1,85 @@ + + + Meson + meson + meson.build + meson.build + meson_options.txt + text/x-meson + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/metal.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/metal.xml new file mode 100644 index 0000000..62d04ba --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/metal.xml @@ -0,0 +1,270 @@ + + + Metal + metal + *.metal + text/x-metal + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/minizinc.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/minizinc.xml new file mode 100644 index 0000000..1ad6860 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/minizinc.xml @@ -0,0 +1,82 @@ + + + MiniZinc + minizinc + MZN + mzn + *.mzn + *.dzn + *.fzn + text/minizinc + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mlir.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mlir.xml new file mode 100644 index 0000000..025c3dc --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mlir.xml @@ -0,0 +1,73 @@ + + + MLIR + mlir + *.mlir + text/x-mlir + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/modula-2.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/modula-2.xml new file mode 100644 index 0000000..0bf37bc --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/modula-2.xml @@ -0,0 +1,245 @@ + + + Modula-2 + modula2 + m2 + *.def + *.mod + text/x-modula2 + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/monkeyc.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/monkeyc.xml new file mode 100644 index 0000000..7445a63 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/monkeyc.xml @@ -0,0 +1,153 @@ + + + MonkeyC + monkeyc + *.mc + text/x-monkeyc + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/morrowindscript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/morrowindscript.xml new file mode 100644 index 0000000..724a19f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/morrowindscript.xml @@ -0,0 +1,90 @@ + + + MorrowindScript + morrowind + mwscript + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/myghty.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/myghty.xml new file mode 100644 index 0000000..6d03917 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/myghty.xml @@ -0,0 +1,77 @@ + + + Myghty + myghty + *.myt + autodelegate + application/x-myghty + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mysql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mysql.xml new file mode 100644 index 0000000..b6c2046 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mysql.xml @@ -0,0 +1,121 @@ + + + MySQL + mysql + mariadb + *.sql + text/x-mysql + text/x-mariadb + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nasm.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nasm.xml new file mode 100644 index 0000000..defe65b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nasm.xml @@ -0,0 +1,126 @@ + + + NASM + nasm + *.asm + *.ASM + *.nasm + text/x-nasm + true + 1.0 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/natural.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/natural.xml new file mode 100644 index 0000000..707252b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/natural.xml @@ -0,0 +1,143 @@ + + + Natural + natural + *.NSN + *.NSP + *.NSS + *.NSH + *.NSG + *.NSL + *.NSA + *.NSM + *.NSC + *.NS7 + text/x-natural + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ndisasm.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ndisasm.xml new file mode 100644 index 0000000..74d443b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ndisasm.xml @@ -0,0 +1,123 @@ + + + NDISASM + ndisasm + text/x-disasm + true + 0.5 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/newspeak.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/newspeak.xml new file mode 100644 index 0000000..b932657 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/newspeak.xml @@ -0,0 +1,121 @@ + + + Newspeak + newspeak + *.ns2 + text/x-newspeak + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nginx_configuration_file.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nginx_configuration_file.xml new file mode 100644 index 0000000..46bdf57 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nginx_configuration_file.xml @@ -0,0 +1,98 @@ + + + Nginx configuration file + nginx + nginx.conf + text/x-nginx-conf + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nim.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nim.xml new file mode 100644 index 0000000..bfdd615 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nim.xml @@ -0,0 +1,211 @@ + + + Nim + nim + nimrod + *.nim + *.nimrod + text/x-nim + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nix.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nix.xml new file mode 100644 index 0000000..0ed040c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nix.xml @@ -0,0 +1,258 @@ + + + Nix + nixos + nix + *.nix + text/x-nix + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/objective-c.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/objective-c.xml new file mode 100644 index 0000000..0dc9328 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/objective-c.xml @@ -0,0 +1,510 @@ + + + Objective-C + objective-c + objectivec + obj-c + objc + *.m + *.h + text/x-objective-c + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/objectpascal.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/objectpascal.xml new file mode 100644 index 0000000..12af64b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/objectpascal.xml @@ -0,0 +1,145 @@ + + + ObjectPascal + objectpascal + *.pas + *.pp + *.inc + *.dpr + *.dpk + *.lpr + *.lpk + text/x-pascal + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ocaml.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ocaml.xml new file mode 100644 index 0000000..77f67ac --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ocaml.xml @@ -0,0 +1,145 @@ + + + OCaml + ocaml + *.ml + *.mli + *.mll + *.mly + text/x-ocaml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/octave.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/octave.xml new file mode 100644 index 0000000..0515d28 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/octave.xml @@ -0,0 +1,101 @@ + + + Octave + octave + *.m + text/octave + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/odin.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/odin.xml new file mode 100644 index 0000000..b984263 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/odin.xml @@ -0,0 +1,113 @@ + + + Odin + odin + *.odin + text/odin + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/onesenterprise.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/onesenterprise.xml new file mode 100644 index 0000000..530bad7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/onesenterprise.xml @@ -0,0 +1,92 @@ + + + OnesEnterprise + ones + onesenterprise + 1S + 1S:Enterprise + *.EPF + *.epf + *.ERF + *.erf + application/octet-stream + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/openedge_abl.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/openedge_abl.xml new file mode 100644 index 0000000..04a80f3 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/openedge_abl.xml @@ -0,0 +1,101 @@ + + + OpenEdge ABL + openedge + abl + progress + openedgeabl + *.p + *.cls + *.w + *.i + text/x-openedge + application/x-openedge + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/openscad.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/openscad.xml new file mode 100644 index 0000000..84d0fe1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/openscad.xml @@ -0,0 +1,96 @@ + + + OpenSCAD + openscad + *.scad + text/x-scad + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/org_mode.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/org_mode.xml new file mode 100644 index 0000000..3f227ad --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/org_mode.xml @@ -0,0 +1,329 @@ + + + Org Mode + org + orgmode + *.org + text/org + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2 + 4 + + + + + + + + + + + + 2 + 4 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pacmanconf.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pacmanconf.xml new file mode 100644 index 0000000..caf7236 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pacmanconf.xml @@ -0,0 +1,37 @@ + + + PacmanConf + pacmanconf + pacman.conf + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/perl.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/perl.xml new file mode 100644 index 0000000..8ac02ab --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/perl.xml @@ -0,0 +1,400 @@ + + + Perl + perl + pl + *.pl + *.pm + *.t + text/x-perl + application/x-perl + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/php.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/php.xml new file mode 100644 index 0000000..c9e22ea --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/php.xml @@ -0,0 +1,212 @@ + + + PHP + php + php3 + php4 + php5 + *.php + *.php[345] + *.inc + text/x-php + true + true + true + 3 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pig.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pig.xml new file mode 100644 index 0000000..5acd773 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pig.xml @@ -0,0 +1,105 @@ + + + Pig + pig + *.pig + text/x-pig + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pkgconfig.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pkgconfig.xml new file mode 100644 index 0000000..875dcba --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pkgconfig.xml @@ -0,0 +1,73 @@ + + + PkgConfig + pkgconfig + *.pc + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pl_pgsql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pl_pgsql.xml new file mode 100644 index 0000000..e3e813a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pl_pgsql.xml @@ -0,0 +1,119 @@ + + + PL/pgSQL + plpgsql + text/x-plpgsql + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/plaintext.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/plaintext.xml new file mode 100644 index 0000000..d5e3243 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/plaintext.xml @@ -0,0 +1,21 @@ + + + plaintext + text + plain + no-highlight + *.txt + text/plain + -1 + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/plutus_core.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/plutus_core.xml new file mode 100644 index 0000000..4ff5a97 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/plutus_core.xml @@ -0,0 +1,105 @@ + + + Plutus Core + plutus-core + plc + *.plc + text/x-plutus-core + application/x-plutus-core + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pony.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pony.xml new file mode 100644 index 0000000..4efa9db --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pony.xml @@ -0,0 +1,135 @@ + + + Pony + pony + *.pony + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/postgresql_sql_dialect.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/postgresql_sql_dialect.xml new file mode 100644 index 0000000..e901c18 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/postgresql_sql_dialect.xml @@ -0,0 +1,155 @@ + + + PostgreSQL SQL dialect + postgresql + postgres + text/x-postgresql + true + true + + + + + + + + + + + + + + + + + + + 6 + 12 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 12 + 4 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/postscript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/postscript.xml new file mode 100644 index 0000000..15a3422 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/postscript.xml @@ -0,0 +1,89 @@ + + + PostScript + postscript + postscr + *.ps + *.eps + application/postscript + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/povray.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/povray.xml new file mode 100644 index 0000000..f37dab9 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/povray.xml @@ -0,0 +1,58 @@ + + + POVRay + pov + *.pov + *.inc + text/x-povray + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/powerquery.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/powerquery.xml new file mode 100644 index 0000000..0ff1e35 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/powerquery.xml @@ -0,0 +1,51 @@ + + + PowerQuery + powerquery + pq + *.pq + text/x-powerquery + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/powershell.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/powershell.xml new file mode 100644 index 0000000..b63a150 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/powershell.xml @@ -0,0 +1,230 @@ + + + PowerShell + powershell + posh + ps1 + psm1 + psd1 + pwsh + *.ps1 + *.psm1 + *.psd1 + text/x-powershell + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/prolog.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/prolog.xml new file mode 100644 index 0000000..391bae3 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/prolog.xml @@ -0,0 +1,115 @@ + + + Prolog + prolog + *.ecl + *.prolog + *.pro + *.pl + text/x-prolog + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/promela.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/promela.xml new file mode 100644 index 0000000..84558c3 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/promela.xml @@ -0,0 +1,119 @@ + + + + Promela + promela + *.pml + *.prom + *.prm + *.promela + *.pr + *.pm + text/x-promela + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/promql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/promql.xml new file mode 100644 index 0000000..e95e333 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/promql.xml @@ -0,0 +1,123 @@ + + + PromQL + promql + *.promql + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/properties.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/properties.xml new file mode 100644 index 0000000..d5ae0a2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/properties.xml @@ -0,0 +1,45 @@ + + + properties + java-properties + *.properties + text/x-java-properties + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/protocol_buffer.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/protocol_buffer.xml new file mode 100644 index 0000000..157d321 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/protocol_buffer.xml @@ -0,0 +1,118 @@ + + + Protocol Buffer + protobuf + proto + *.proto + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/prql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/prql.xml new file mode 100644 index 0000000..21f21c6 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/prql.xml @@ -0,0 +1,161 @@ + + + PRQL + prql + *.prql + application/prql + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/psl.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/psl.xml new file mode 100644 index 0000000..ab375da --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/psl.xml @@ -0,0 +1,213 @@ + + + PSL + psl + *.psl + *.BATCH + *.TRIG + *.PROC + text/x-psl + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/puppet.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/puppet.xml new file mode 100644 index 0000000..fbb587c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/puppet.xml @@ -0,0 +1,100 @@ + + + Puppet + puppet + *.pp + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/python.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/python.xml new file mode 100644 index 0000000..3c6af86 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/python.xml @@ -0,0 +1,589 @@ + + + Python + python + py + sage + python3 + py3 + *.py + *.pyi + *.pyw + *.jy + *.sage + *.sc + SConstruct + SConscript + *.bzl + BUCK + BUILD + BUILD.bazel + WORKSPACE + *.tac + text/x-python + application/x-python + text/x-python3 + application/x-python3 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/python_2.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/python_2.xml new file mode 100644 index 0000000..3297a22 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/python_2.xml @@ -0,0 +1,356 @@ + + + Python 2 + python2 + py2 + text/x-python2 + application/x-python2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/qbasic.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/qbasic.xml new file mode 100644 index 0000000..193fe18 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/qbasic.xml @@ -0,0 +1,173 @@ + + + QBasic + qbasic + basic + *.BAS + *.bas + text/basic + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/qml.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/qml.xml new file mode 100644 index 0000000..43eb3eb --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/qml.xml @@ -0,0 +1,113 @@ + + + QML + qml + qbs + *.qml + *.qbs + application/x-qml + application/x-qt.qbs+qml + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/r.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/r.xml new file mode 100644 index 0000000..c1fba4e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/r.xml @@ -0,0 +1,128 @@ + + + R + splus + s + r + *.S + *.R + *.r + .Rhistory + .Rprofile + .Renviron + text/S-plus + text/S + text/x-r-source + text/x-r + text/x-R + text/x-r-history + text/x-r-profile + 0.1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/racket.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/racket.xml new file mode 100644 index 0000000..6cdd303 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/racket.xml @@ -0,0 +1,260 @@ + + + Racket + racket + rkt + *.rkt + *.rktd + *.rktl + text/x-racket + application/x-racket + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ragel.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ragel.xml new file mode 100644 index 0000000..69638d2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ragel.xml @@ -0,0 +1,149 @@ + + + Ragel + ragel + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/react.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/react.xml new file mode 100644 index 0000000..a4109b0 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/react.xml @@ -0,0 +1,236 @@ + + + react + jsx + react + *.jsx + *.react + text/jsx + text/typescript-jsx + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/reasonml.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/reasonml.xml new file mode 100644 index 0000000..8b7bcc5 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/reasonml.xml @@ -0,0 +1,147 @@ + + + ReasonML + reason + reasonml + *.re + *.rei + text/x-reasonml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/reg.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/reg.xml new file mode 100644 index 0000000..501d380 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/reg.xml @@ -0,0 +1,68 @@ + + + reg + registry + *.reg + text/x-windows-registry + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rego.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rego.xml new file mode 100644 index 0000000..517b713 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rego.xml @@ -0,0 +1,94 @@ + + + Rego + rego + *.rego + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rexx.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rexx.xml new file mode 100644 index 0000000..e682500 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rexx.xml @@ -0,0 +1,127 @@ + + + Rexx + rexx + arexx + *.rexx + *.rex + *.rx + *.arexx + text/x-rexx + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rpm_spec.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rpm_spec.xml new file mode 100644 index 0000000..8362772 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rpm_spec.xml @@ -0,0 +1,58 @@ + + + + RPMSpec + spec + *.spec + text/x-rpm-spec + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ruby.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ruby.xml new file mode 100644 index 0000000..baa7e43 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ruby.xml @@ -0,0 +1,724 @@ + + + Ruby + rb + ruby + duby + *.rb + *.rbw + Rakefile + *.rake + *.gemspec + *.rbx + *.duby + Gemfile + Vagrantfile + text/x-ruby + application/x-ruby + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rust.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rust.xml new file mode 100644 index 0000000..083b96f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rust.xml @@ -0,0 +1,375 @@ + + + Rust + rust + rs + *.rs + *.rs.in + text/rust + text/x-rust + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sas.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sas.xml new file mode 100644 index 0000000..af1107b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sas.xml @@ -0,0 +1,191 @@ + + + SAS + sas + *.SAS + *.sas + text/x-sas + text/sas + application/x-sas + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sass.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sass.xml new file mode 100644 index 0000000..f801594 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sass.xml @@ -0,0 +1,362 @@ + + + Sass + sass + *.sass + text/x-sass + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scala.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scala.xml new file mode 100644 index 0000000..2f8ddd4 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scala.xml @@ -0,0 +1,274 @@ + + + Scala + scala + *.scala + text/x-scala + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scheme.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scheme.xml new file mode 100644 index 0000000..0198bd7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scheme.xml @@ -0,0 +1,106 @@ + + + Scheme + scheme + scm + *.scm + *.ss + text/x-scheme + application/x-scheme + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scilab.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scilab.xml new file mode 100644 index 0000000..9e10949 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scilab.xml @@ -0,0 +1,98 @@ + + + Scilab + scilab + *.sci + *.sce + *.tst + text/scilab + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scss.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scss.xml new file mode 100644 index 0000000..ee060fc --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scss.xml @@ -0,0 +1,373 @@ + + + SCSS + scss + *.scss + text/x-scss + true + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sed.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sed.xml new file mode 100644 index 0000000..2209aa7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sed.xml @@ -0,0 +1,28 @@ + + + Sed + sed + gsed + ssed + *.sed + *.[gs]sed + text/x-sed + + + + + + + + + + + + + + None + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sieve.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sieve.xml new file mode 100644 index 0000000..fc60563 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sieve.xml @@ -0,0 +1,61 @@ + + + Sieve + sieve + *.siv + *.sieve + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smali.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smali.xml new file mode 100644 index 0000000..e468766 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smali.xml @@ -0,0 +1,73 @@ + + + + Smali + smali + *.smali + text/smali + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smalltalk.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smalltalk.xml new file mode 100644 index 0000000..0027111 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smalltalk.xml @@ -0,0 +1,294 @@ + + + Smalltalk + smalltalk + squeak + st + *.st + text/x-smalltalk + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smarty.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smarty.xml new file mode 100644 index 0000000..dd7752c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smarty.xml @@ -0,0 +1,79 @@ + + + Smarty + smarty + *.tpl + application/x-smarty + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/snobol.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/snobol.xml new file mode 100644 index 0000000..f53dbcb --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/snobol.xml @@ -0,0 +1,95 @@ + + + Snobol + snobol + *.snobol + text/x-snobol + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/solidity.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/solidity.xml new file mode 100644 index 0000000..04403c8 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/solidity.xml @@ -0,0 +1,279 @@ + + + Solidity + sol + solidity + *.sol + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sourcepawn.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sourcepawn.xml new file mode 100644 index 0000000..caca401 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sourcepawn.xml @@ -0,0 +1,59 @@ + + + SourcePawn + sp + *.sp + *.inc + text/x-sourcepawn + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sparql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sparql.xml new file mode 100644 index 0000000..7dc65af --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sparql.xml @@ -0,0 +1,160 @@ + + + SPARQL + sparql + *.rq + *.sparql + application/sparql-query + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sql.xml new file mode 100644 index 0000000..b542b65 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sql.xml @@ -0,0 +1,90 @@ + + + SQL + sql + *.sql + text/x-sql + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/squidconf.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/squidconf.xml new file mode 100644 index 0000000..cbd8dbc --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/squidconf.xml @@ -0,0 +1,63 @@ + + + SquidConf + squidconf + squid.conf + squid + squid.conf + text/x-squidconf + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/standard_ml.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/standard_ml.xml new file mode 100644 index 0000000..39cf4f2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/standard_ml.xml @@ -0,0 +1,548 @@ + + + Standard ML + sml + *.sml + *.sig + *.fun + text/x-standardml + application/x-standardml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/stas.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/stas.xml new file mode 100644 index 0000000..56b4f92 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/stas.xml @@ -0,0 +1,85 @@ + + + stas + *.stas + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/stylus.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/stylus.xml new file mode 100644 index 0000000..c2d8807 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/stylus.xml @@ -0,0 +1,132 @@ + + + Stylus + stylus + *.styl + text/x-styl + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/swift.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/swift.xml new file mode 100644 index 0000000..416bf90 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/swift.xml @@ -0,0 +1,207 @@ + + + Swift + swift + *.swift + text/x-swift + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/systemd.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/systemd.xml new file mode 100644 index 0000000..e31bfc2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/systemd.xml @@ -0,0 +1,63 @@ + + + SYSTEMD + systemd + *.automount + *.device + *.dnssd + *.link + *.mount + *.netdev + *.network + *.path + *.scope + *.service + *.slice + *.socket + *.swap + *.target + *.timer + text/plain + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/systemverilog.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/systemverilog.xml new file mode 100644 index 0000000..fac3da2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/systemverilog.xml @@ -0,0 +1,181 @@ + + + systemverilog + systemverilog + sv + *.sv + *.svh + text/x-systemverilog + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tablegen.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tablegen.xml new file mode 100644 index 0000000..a020ce8 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tablegen.xml @@ -0,0 +1,69 @@ + + + TableGen + tablegen + *.td + text/x-tablegen + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tal.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tal.xml new file mode 100644 index 0000000..a071d4c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tal.xml @@ -0,0 +1,43 @@ + + + + Tal + tal + uxntal + *.tal + text/x-uxntal + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tasm.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tasm.xml new file mode 100644 index 0000000..1347f53 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tasm.xml @@ -0,0 +1,135 @@ + + + TASM + tasm + *.asm + *.ASM + *.tasm + text/x-tasm + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tcl.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tcl.xml new file mode 100644 index 0000000..7ed69bc --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tcl.xml @@ -0,0 +1,272 @@ + + + Tcl + tcl + *.tcl + *.rvt + text/x-tcl + text/x-script.tcl + application/x-tcl + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tcsh.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tcsh.xml new file mode 100644 index 0000000..9895643 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tcsh.xml @@ -0,0 +1,121 @@ + + + Tcsh + tcsh + csh + *.tcsh + *.csh + application/x-csh + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/termcap.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/termcap.xml new file mode 100644 index 0000000..e863bbd --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/termcap.xml @@ -0,0 +1,75 @@ + + + Termcap + termcap + termcap + termcap.src + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/terminfo.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/terminfo.xml new file mode 100644 index 0000000..9e8f56e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/terminfo.xml @@ -0,0 +1,84 @@ + + + Terminfo + terminfo + terminfo + terminfo.src + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/terraform.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/terraform.xml new file mode 100644 index 0000000..452f211 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/terraform.xml @@ -0,0 +1,140 @@ + + + Terraform + terraform + tf + *.tf + application/x-tf + application/x-terraform + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tex.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tex.xml new file mode 100644 index 0000000..809bb9a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tex.xml @@ -0,0 +1,113 @@ + + + TeX + tex + latex + *.tex + *.aux + *.toc + text/x-tex + text/x-latex + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/thrift.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/thrift.xml new file mode 100644 index 0000000..f14257d --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/thrift.xml @@ -0,0 +1,154 @@ + + + Thrift + thrift + *.thrift + application/x-thrift + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/toml.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/toml.xml new file mode 100644 index 0000000..9c98ba5 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/toml.xml @@ -0,0 +1,44 @@ + + + TOML + toml + *.toml + Pipfile + poetry.lock + text/x-toml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tradingview.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tradingview.xml new file mode 100644 index 0000000..3671f61 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tradingview.xml @@ -0,0 +1,81 @@ + + + TradingView + tradingview + tv + *.tv + text/x-tradingview + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/transact-sql.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/transact-sql.xml new file mode 100644 index 0000000..b0490aa --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/transact-sql.xml @@ -0,0 +1,137 @@ + + + Transact-SQL + tsql + t-sql + text/x-tsql + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/turing.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/turing.xml new file mode 100644 index 0000000..4eab69b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/turing.xml @@ -0,0 +1,82 @@ + + + Turing + turing + *.turing + *.tu + text/x-turing + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/turtle.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/turtle.xml new file mode 100644 index 0000000..7c572f9 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/turtle.xml @@ -0,0 +1,170 @@ + + + Turtle + turtle + *.ttl + text/turtle + application/x-turtle + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/twig.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/twig.xml new file mode 100644 index 0000000..de95c5f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/twig.xml @@ -0,0 +1,155 @@ + + + Twig + twig + *.twig + application/x-twig + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typescript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typescript.xml new file mode 100644 index 0000000..d49241e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typescript.xml @@ -0,0 +1,263 @@ + + + TypeScript + ts + tsx + typescript + *.ts + *.tsx + *.mts + *.cts + text/x-typescript + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscript.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscript.xml new file mode 100644 index 0000000..bc416d4 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscript.xml @@ -0,0 +1,178 @@ + + + TypoScript + typoscript + *.ts + text/x-typoscript + true + 0.1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscriptcssdata.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscriptcssdata.xml new file mode 100644 index 0000000..62c42c1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscriptcssdata.xml @@ -0,0 +1,52 @@ + + + TypoScriptCssData + typoscriptcssdata + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscripthtmldata.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscripthtmldata.xml new file mode 100644 index 0000000..1b0af3a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscripthtmldata.xml @@ -0,0 +1,52 @@ + + + TypoScriptHtmlData + typoscripthtmldata + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ucode.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ucode.xml new file mode 100644 index 0000000..054fa89 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ucode.xml @@ -0,0 +1,147 @@ + + + ucode + *.uc + application/x.ucode + text/x.ucode + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/v.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/v.xml new file mode 100644 index 0000000..e1af3d1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/v.xml @@ -0,0 +1,355 @@ + + + V + v + vlang + *.v + *.vv + v.mod + text/x-v + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/v_shell.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/v_shell.xml new file mode 100644 index 0000000..34ce610 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/v_shell.xml @@ -0,0 +1,365 @@ + + + V shell + vsh + vshell + *.vsh + text/x-vsh + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vala.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vala.xml new file mode 100644 index 0000000..17c1acf --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vala.xml @@ -0,0 +1,72 @@ + + + + Vala + vala + vapi + *.vala + *.vapi + text/x-vala + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vb_net.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vb_net.xml new file mode 100644 index 0000000..9f85afd --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vb_net.xml @@ -0,0 +1,162 @@ + + + VB.net + vb.net + vbnet + *.vb + *.bas + text/x-vbnet + text/x-vba + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/verilog.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/verilog.xml new file mode 100644 index 0000000..cd4b9ff --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/verilog.xml @@ -0,0 +1,158 @@ + + + verilog + verilog + v + *.v + text/x-verilog + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vhdl.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vhdl.xml new file mode 100644 index 0000000..aa42044 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vhdl.xml @@ -0,0 +1,171 @@ + + + VHDL + vhdl + *.vhdl + *.vhd + text/x-vhdl + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vhs.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vhs.xml new file mode 100644 index 0000000..ee84d12 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vhs.xml @@ -0,0 +1,48 @@ + + + VHS + vhs + tape + cassette + *.tape + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/viml.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/viml.xml new file mode 100644 index 0000000..43e6bfa --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/viml.xml @@ -0,0 +1,85 @@ + + + VimL + vim + *.vim + .vimrc + .exrc + .gvimrc + _vimrc + _exrc + _gvimrc + vimrc + gvimrc + text/x-vim + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vue.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vue.xml new file mode 100644 index 0000000..7518020 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vue.xml @@ -0,0 +1,305 @@ + + + vue + vue + vuejs + *.vue + text/x-vue + application/x-vue + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/wdte.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/wdte.xml new file mode 100644 index 0000000..c663ee2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/wdte.xml @@ -0,0 +1,43 @@ + + + WDTE + *.wdte + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/webgpu_shading_language.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/webgpu_shading_language.xml new file mode 100644 index 0000000..ea2b6e1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/webgpu_shading_language.xml @@ -0,0 +1,142 @@ + + + WebGPU Shading Language + wgsl + *.wgsl + text/wgsl + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/whiley.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/whiley.xml new file mode 100644 index 0000000..1762c96 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/whiley.xml @@ -0,0 +1,57 @@ + + + Whiley + whiley + *.whiley + text/x-whiley + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/xml.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/xml.xml new file mode 100644 index 0000000..2c6a4d9 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/xml.xml @@ -0,0 +1,95 @@ + + + XML + xml + *.xml + *.xsl + *.rss + *.xslt + *.xsd + *.wsdl + *.wsf + *.svg + *.csproj + *.vcxproj + *.fsproj + text/xml + application/xml + image/svg+xml + application/rss+xml + application/atom+xml + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/xorg.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/xorg.xml new file mode 100644 index 0000000..53bf432 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/xorg.xml @@ -0,0 +1,35 @@ + + + Xorg + xorg.conf + xorg.conf + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/yaml.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/yaml.xml new file mode 100644 index 0000000..97a0b6e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/yaml.xml @@ -0,0 +1,122 @@ + + + YAML + yaml + *.yaml + *.yml + text/x-yaml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/yang.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/yang.xml new file mode 100644 index 0000000..f3da7ce --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/yang.xml @@ -0,0 +1,99 @@ + + + YANG + yang + *.yang + application/yang + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/z80_assembly.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/z80_assembly.xml new file mode 100644 index 0000000..5bb77a9 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/z80_assembly.xml @@ -0,0 +1,74 @@ + + + Z80 Assembly + z80 + *.z80 + *.asm + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/zed.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/zed.xml new file mode 100644 index 0000000..929f495 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/zed.xml @@ -0,0 +1,51 @@ + + + Zed + zed + *.zed + text/zed + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/zig.xml b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/zig.xml new file mode 100644 index 0000000..fb51cc1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/embedded/zig.xml @@ -0,0 +1,112 @@ + + + Zig + zig + *.zig + text/zig + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/genshi.go b/vendor/github.com/alecthomas/chroma/v2/lexers/genshi.go new file mode 100644 index 0000000..7f396f4 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/genshi.go @@ -0,0 +1,118 @@ +package lexers + +import ( + . "github.com/alecthomas/chroma/v2" // nolint +) + +// Genshi Text lexer. +var GenshiText = Register(MustNewLexer( + &Config{ + Name: "Genshi Text", + Aliases: []string{"genshitext"}, + Filenames: []string{}, + MimeTypes: []string{"application/x-genshi-text", "text/x-genshi"}, + }, + genshiTextRules, +)) + +func genshiTextRules() Rules { + return Rules{ + "root": { + {`[^#$\s]+`, Other, nil}, + {`^(\s*)(##.*)$`, ByGroups(Text, Comment), nil}, + {`^(\s*)(#)`, ByGroups(Text, CommentPreproc), Push("directive")}, + Include("variable"), + {`[#$\s]`, Other, nil}, + }, + "directive": { + {`\n`, Text, Pop(1)}, + {`(?:def|for|if)\s+.*`, Using("Python"), Pop(1)}, + {`(choose|when|with)([^\S\n]+)(.*)`, ByGroups(Keyword, Text, Using("Python")), Pop(1)}, + {`(choose|otherwise)\b`, Keyword, Pop(1)}, + {`(end\w*)([^\S\n]*)(.*)`, ByGroups(Keyword, Text, Comment), Pop(1)}, + }, + "variable": { + {`(?)`, ByGroups(CommentPreproc, Using("Python"), CommentPreproc), nil}, + {`<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>`, Other, nil}, + {`<\s*py:[a-zA-Z0-9]+`, NameTag, Push("pytag")}, + {`<\s*[a-zA-Z0-9:.]+`, NameTag, Push("tag")}, + Include("variable"), + {`[<$]`, Other, nil}, + }, + "pytag": { + {`\s+`, Text, nil}, + {`[\w:-]+\s*=`, NameAttribute, Push("pyattr")}, + {`/?\s*>`, NameTag, Pop(1)}, + }, + "pyattr": { + {`(")(.*?)(")`, ByGroups(LiteralString, Using("Python"), LiteralString), Pop(1)}, + {`(')(.*?)(')`, ByGroups(LiteralString, Using("Python"), LiteralString), Pop(1)}, + {`[^\s>]+`, LiteralString, Pop(1)}, + }, + "tag": { + {`\s+`, Text, nil}, + {`py:[\w-]+\s*=`, NameAttribute, Push("pyattr")}, + {`[\w:-]+\s*=`, NameAttribute, Push("attr")}, + {`/?\s*>`, NameTag, Pop(1)}, + }, + "attr": { + {`"`, LiteralString, Push("attr-dstring")}, + {`'`, LiteralString, Push("attr-sstring")}, + {`[^\s>]*`, LiteralString, Pop(1)}, + }, + "attr-dstring": { + {`"`, LiteralString, Pop(1)}, + Include("strings"), + {`'`, LiteralString, nil}, + }, + "attr-sstring": { + {`'`, LiteralString, Pop(1)}, + Include("strings"), + {`'`, LiteralString, nil}, + }, + "strings": { + {`[^"'$]+`, LiteralString, nil}, + Include("variable"), + }, + "variable": { + {`(?>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\||<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])`, Operator, nil}, + {`([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(NameFunction, UsingSelf("root"), Punctuation), nil}, + {`[|^<>=!()\[\]{}.,;:~]`, Punctuation, nil}, + {`[^\W\d]\w*`, NameOther, nil}, + }, + } +} + +var GoHTMLTemplate = Register(DelegatingLexer(HTML, MustNewXMLLexer( + embedded, + "embedded/go_template.xml", +).SetConfig( + &Config{ + Name: "Go HTML Template", + Aliases: []string{"go-html-template"}, + }, +))) + +var GoTextTemplate = Register(MustNewXMLLexer( + embedded, + "embedded/go_template.xml", +).SetConfig( + &Config{ + Name: "Go Text Template", + Aliases: []string{"go-text-template"}, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/haxe.go b/vendor/github.com/alecthomas/chroma/v2/lexers/haxe.go new file mode 100644 index 0000000..9a72de8 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/haxe.go @@ -0,0 +1,647 @@ +package lexers + +import ( + . "github.com/alecthomas/chroma/v2" // nolint +) + +// Haxe lexer. +var Haxe = Register(MustNewLexer( + &Config{ + Name: "Haxe", + Aliases: []string{"hx", "haxe", "hxsl"}, + Filenames: []string{"*.hx", "*.hxsl"}, + MimeTypes: []string{"text/haxe", "text/x-haxe", "text/x-hx"}, + DotAll: true, + }, + haxeRules, +)) + +func haxeRules() Rules { + return Rules{ + "root": { + Include("spaces"), + Include("meta"), + {`(?:package)\b`, KeywordNamespace, Push("semicolon", "package")}, + {`(?:import)\b`, KeywordNamespace, Push("semicolon", "import")}, + {`(?:using)\b`, KeywordNamespace, Push("semicolon", "using")}, + {`(?:extern|private)\b`, KeywordDeclaration, nil}, + {`(?:abstract)\b`, KeywordDeclaration, Push("abstract")}, + {`(?:class|interface)\b`, KeywordDeclaration, Push("class")}, + {`(?:enum)\b`, KeywordDeclaration, Push("enum")}, + {`(?:typedef)\b`, KeywordDeclaration, Push("typedef")}, + {`(?=.)`, Text, Push("expr-statement")}, + }, + "spaces": { + {`\s+`, Text, nil}, + {`//[^\n\r]*`, CommentSingle, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`(#)(if|elseif|else|end|error)\b`, CommentPreproc, MutatorFunc(haxePreProcMutator)}, + }, + "string-single-interpol": { + {`\$\{`, LiteralStringInterpol, Push("string-interpol-close", "expr")}, + {`\$\$`, LiteralStringEscape, nil}, + {`\$(?=(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+))`, LiteralStringInterpol, Push("ident")}, + Include("string-single"), + }, + "string-single": { + {`'`, LiteralStringSingle, Pop(1)}, + {`\\.`, LiteralStringEscape, nil}, + {`.`, LiteralStringSingle, nil}, + }, + "string-double": { + {`"`, LiteralStringDouble, Pop(1)}, + {`\\.`, LiteralStringEscape, nil}, + {`.`, LiteralStringDouble, nil}, + }, + "string-interpol-close": { + {`\$(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, LiteralStringInterpol, nil}, + {`\}`, LiteralStringInterpol, Pop(1)}, + }, + "package": { + Include("spaces"), + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameNamespace, nil}, + {`\.`, Punctuation, Push("import-ident")}, + Default(Pop(1)), + }, + "import": { + Include("spaces"), + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameNamespace, nil}, + {`\*`, Keyword, nil}, + {`\.`, Punctuation, Push("import-ident")}, + {`in`, KeywordNamespace, Push("ident")}, + Default(Pop(1)), + }, + "import-ident": { + Include("spaces"), + {`\*`, Keyword, Pop(1)}, + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameNamespace, Pop(1)}, + }, + "using": { + Include("spaces"), + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameNamespace, nil}, + {`\.`, Punctuation, Push("import-ident")}, + Default(Pop(1)), + }, + "preproc-error": { + {`\s+`, CommentPreproc, nil}, + {`'`, LiteralStringSingle, Push("#pop", "string-single")}, + {`"`, LiteralStringDouble, Push("#pop", "string-double")}, + Default(Pop(1)), + }, + "preproc-expr": { + {`\s+`, CommentPreproc, nil}, + {`\!`, CommentPreproc, nil}, + {`\(`, CommentPreproc, Push("#pop", "preproc-parenthesis")}, + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, CommentPreproc, Pop(1)}, + {`\.[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+[eE][+\-]?[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+\.[0-9]*[eE][+\-]?[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+\.[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+\.(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)|\.\.)`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`'`, LiteralStringSingle, Push("#pop", "string-single")}, + {`"`, LiteralStringDouble, Push("#pop", "string-double")}, + }, + "preproc-parenthesis": { + {`\s+`, CommentPreproc, nil}, + {`\)`, CommentPreproc, Pop(1)}, + Default(Push("preproc-expr-in-parenthesis")), + }, + "preproc-expr-chain": { + {`\s+`, CommentPreproc, nil}, + {`(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|/|\-|=>|=)`, CommentPreproc, Push("#pop", "preproc-expr-in-parenthesis")}, + Default(Pop(1)), + }, + "preproc-expr-in-parenthesis": { + {`\s+`, CommentPreproc, nil}, + {`\!`, CommentPreproc, nil}, + {`\(`, CommentPreproc, Push("#pop", "preproc-expr-chain", "preproc-parenthesis")}, + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, CommentPreproc, Push("#pop", "preproc-expr-chain")}, + {`\.[0-9]+`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")}, + {`[0-9]+[eE][+\-]?[0-9]+`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")}, + {`[0-9]+\.[0-9]*[eE][+\-]?[0-9]+`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")}, + {`[0-9]+\.[0-9]+`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")}, + {`[0-9]+\.(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)|\.\.)`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, Push("#pop", "preproc-expr-chain")}, + {`[0-9]+`, LiteralNumberInteger, Push("#pop", "preproc-expr-chain")}, + {`'`, LiteralStringSingle, Push("#pop", "preproc-expr-chain", "string-single")}, + {`"`, LiteralStringDouble, Push("#pop", "preproc-expr-chain", "string-double")}, + }, + "abstract": { + Include("spaces"), + Default(Pop(1), Push("abstract-body"), Push("abstract-relation"), Push("abstract-opaque"), Push("type-param-constraint"), Push("type-name")), + }, + "abstract-body": { + Include("spaces"), + {`\{`, Punctuation, Push("#pop", "class-body")}, + }, + "abstract-opaque": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "parenthesis-close", "type")}, + Default(Pop(1)), + }, + "abstract-relation": { + Include("spaces"), + {`(?:to|from)`, KeywordDeclaration, Push("type")}, + {`,`, Punctuation, nil}, + Default(Pop(1)), + }, + "meta": { + Include("spaces"), + {`@`, NameDecorator, Push("meta-body", "meta-ident", "meta-colon")}, + }, + "meta-colon": { + Include("spaces"), + {`:`, NameDecorator, Pop(1)}, + Default(Pop(1)), + }, + "meta-ident": { + Include("spaces"), + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameDecorator, Pop(1)}, + }, + "meta-body": { + Include("spaces"), + {`\(`, NameDecorator, Push("#pop", "meta-call")}, + Default(Pop(1)), + }, + "meta-call": { + Include("spaces"), + {`\)`, NameDecorator, Pop(1)}, + Default(Pop(1), Push("meta-call-sep"), Push("expr")), + }, + "meta-call-sep": { + Include("spaces"), + {`\)`, NameDecorator, Pop(1)}, + {`,`, Punctuation, Push("#pop", "meta-call")}, + }, + "typedef": { + Include("spaces"), + Default(Pop(1), Push("typedef-body"), Push("type-param-constraint"), Push("type-name")), + }, + "typedef-body": { + Include("spaces"), + {`=`, Operator, Push("#pop", "optional-semicolon", "type")}, + }, + "enum": { + Include("spaces"), + Default(Pop(1), Push("enum-body"), Push("bracket-open"), Push("type-param-constraint"), Push("type-name")), + }, + "enum-body": { + Include("spaces"), + Include("meta"), + {`\}`, Punctuation, Pop(1)}, + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("enum-member", "type-param-constraint")}, + }, + "enum-member": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "semicolon", "flag", "function-param")}, + Default(Pop(1), Push("semicolon"), Push("flag")), + }, + "class": { + Include("spaces"), + Default(Pop(1), Push("class-body"), Push("bracket-open"), Push("extends"), Push("type-param-constraint"), Push("type-name")), + }, + "extends": { + Include("spaces"), + {`(?:extends|implements)\b`, KeywordDeclaration, Push("type")}, + {`,`, Punctuation, nil}, + Default(Pop(1)), + }, + "bracket-open": { + Include("spaces"), + {`\{`, Punctuation, Pop(1)}, + }, + "bracket-close": { + Include("spaces"), + {`\}`, Punctuation, Pop(1)}, + }, + "class-body": { + Include("spaces"), + Include("meta"), + {`\}`, Punctuation, Pop(1)}, + {`(?:static|public|private|override|dynamic|inline|macro)\b`, KeywordDeclaration, nil}, + Default(Push("class-member")), + }, + "class-member": { + Include("spaces"), + {`(var)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "var")}, + {`(function)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "class-method")}, + }, + "function-local": { + Include("spaces"), + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameFunction, Push("#pop", "optional-expr", "flag", "function-param", "parenthesis-open", "type-param-constraint")}, + Default(Pop(1), Push("optional-expr"), Push("flag"), Push("function-param"), Push("parenthesis-open"), Push("type-param-constraint")), + }, + "optional-expr": { + Include("spaces"), + Include("expr"), + Default(Pop(1)), + }, + "class-method": { + Include("spaces"), + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameFunction, Push("#pop", "optional-expr", "flag", "function-param", "parenthesis-open", "type-param-constraint")}, + }, + "function-param": { + Include("spaces"), + {`\)`, Punctuation, Pop(1)}, + {`\?`, Punctuation, nil}, + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "function-param-sep", "assign", "flag")}, + }, + "function-param-sep": { + Include("spaces"), + {`\)`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "function-param")}, + }, + "prop-get-set": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "parenthesis-close", "prop-get-set-opt", "comma", "prop-get-set-opt")}, + Default(Pop(1)), + }, + "prop-get-set-opt": { + Include("spaces"), + {`(?:default|null|never|dynamic|get|set)\b`, Keyword, Pop(1)}, + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Text, Pop(1)}, + }, + "expr-statement": { + Include("spaces"), + Default(Pop(1), Push("optional-semicolon"), Push("expr")), + }, + "expr": { + Include("spaces"), + {`@`, NameDecorator, Push("#pop", "optional-expr", "meta-body", "meta-ident", "meta-colon")}, + {`(?:\+\+|\-\-|~(?!/)|!|\-)`, Operator, nil}, + {`\(`, Punctuation, Push("#pop", "expr-chain", "parenthesis")}, + {`(?:static|public|private|override|dynamic|inline)\b`, KeywordDeclaration, nil}, + {`(?:function)\b`, KeywordDeclaration, Push("#pop", "expr-chain", "function-local")}, + {`\{`, Punctuation, Push("#pop", "expr-chain", "bracket")}, + {`(?:true|false|null)\b`, KeywordConstant, Push("#pop", "expr-chain")}, + {`(?:this)\b`, Keyword, Push("#pop", "expr-chain")}, + {`(?:cast)\b`, Keyword, Push("#pop", "expr-chain", "cast")}, + {`(?:try)\b`, Keyword, Push("#pop", "catch", "expr")}, + {`(?:var)\b`, KeywordDeclaration, Push("#pop", "var")}, + {`(?:new)\b`, Keyword, Push("#pop", "expr-chain", "new")}, + {`(?:switch)\b`, Keyword, Push("#pop", "switch")}, + {`(?:if)\b`, Keyword, Push("#pop", "if")}, + {`(?:do)\b`, Keyword, Push("#pop", "do")}, + {`(?:while)\b`, Keyword, Push("#pop", "while")}, + {`(?:for)\b`, Keyword, Push("#pop", "for")}, + {`(?:untyped|throw)\b`, Keyword, nil}, + {`(?:return)\b`, Keyword, Push("#pop", "optional-expr")}, + {`(?:macro)\b`, Keyword, Push("#pop", "macro")}, + {`(?:continue|break)\b`, Keyword, Pop(1)}, + {`(?:\$\s*[a-z]\b|\$(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)))`, Name, Push("#pop", "dollar")}, + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "expr-chain")}, + {`\.[0-9]+`, LiteralNumberFloat, Push("#pop", "expr-chain")}, + {`[0-9]+[eE][+\-]?[0-9]+`, LiteralNumberFloat, Push("#pop", "expr-chain")}, + {`[0-9]+\.[0-9]*[eE][+\-]?[0-9]+`, LiteralNumberFloat, Push("#pop", "expr-chain")}, + {`[0-9]+\.[0-9]+`, LiteralNumberFloat, Push("#pop", "expr-chain")}, + {`[0-9]+\.(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)|\.\.)`, LiteralNumberFloat, Push("#pop", "expr-chain")}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, Push("#pop", "expr-chain")}, + {`[0-9]+`, LiteralNumberInteger, Push("#pop", "expr-chain")}, + {`'`, LiteralStringSingle, Push("#pop", "expr-chain", "string-single-interpol")}, + {`"`, LiteralStringDouble, Push("#pop", "expr-chain", "string-double")}, + {`~/(\\\\|\\/|[^/\n])*/[gimsu]*`, LiteralStringRegex, Push("#pop", "expr-chain")}, + {`\[`, Punctuation, Push("#pop", "expr-chain", "array-decl")}, + }, + "expr-chain": { + Include("spaces"), + {`(?:\+\+|\-\-)`, Operator, nil}, + {`(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|/|\-|=>|=)`, Operator, Push("#pop", "expr")}, + {`(?:in)\b`, Keyword, Push("#pop", "expr")}, + {`\?`, Operator, Push("#pop", "expr", "ternary", "expr")}, + {`(\.)((?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+))`, ByGroups(Punctuation, Name), nil}, + {`\[`, Punctuation, Push("array-access")}, + {`\(`, Punctuation, Push("call")}, + Default(Pop(1)), + }, + "macro": { + Include("spaces"), + Include("meta"), + {`:`, Punctuation, Push("#pop", "type")}, + {`(?:extern|private)\b`, KeywordDeclaration, nil}, + {`(?:abstract)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "abstract")}, + {`(?:class|interface)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "macro-class")}, + {`(?:enum)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "enum")}, + {`(?:typedef)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "typedef")}, + Default(Pop(1), Push("expr")), + }, + "macro-class": { + {`\{`, Punctuation, Push("#pop", "class-body")}, + Include("class"), + }, + "cast": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "parenthesis-close", "cast-type", "expr")}, + Default(Pop(1), Push("expr")), + }, + "cast-type": { + Include("spaces"), + {`,`, Punctuation, Push("#pop", "type")}, + Default(Pop(1)), + }, + "catch": { + Include("spaces"), + {`(?:catch)\b`, Keyword, Push("expr", "function-param", "parenthesis-open")}, + Default(Pop(1)), + }, + "do": { + Include("spaces"), + Default(Pop(1), Push("do-while"), Push("expr")), + }, + "do-while": { + Include("spaces"), + {`(?:while)\b`, Keyword, Push("#pop", "parenthesis", "parenthesis-open")}, + }, + "while": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "expr", "parenthesis")}, + }, + "for": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "expr", "parenthesis")}, + }, + "if": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "else", "optional-semicolon", "expr", "parenthesis")}, + }, + "else": { + Include("spaces"), + {`(?:else)\b`, Keyword, Push("#pop", "expr")}, + Default(Pop(1)), + }, + "switch": { + Include("spaces"), + Default(Pop(1), Push("switch-body"), Push("bracket-open"), Push("expr")), + }, + "switch-body": { + Include("spaces"), + {`(?:case|default)\b`, Keyword, Push("case-block", "case")}, + {`\}`, Punctuation, Pop(1)}, + }, + "case": { + Include("spaces"), + {`:`, Punctuation, Pop(1)}, + Default(Pop(1), Push("case-sep"), Push("case-guard"), Push("expr")), + }, + "case-sep": { + Include("spaces"), + {`:`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "case")}, + }, + "case-guard": { + Include("spaces"), + {`(?:if)\b`, Keyword, Push("#pop", "parenthesis", "parenthesis-open")}, + Default(Pop(1)), + }, + "case-block": { + Include("spaces"), + {`(?!(?:case|default)\b|\})`, Keyword, Push("expr-statement")}, + Default(Pop(1)), + }, + "new": { + Include("spaces"), + Default(Pop(1), Push("call"), Push("parenthesis-open"), Push("type")), + }, + "array-decl": { + Include("spaces"), + {`\]`, Punctuation, Pop(1)}, + Default(Pop(1), Push("array-decl-sep"), Push("expr")), + }, + "array-decl-sep": { + Include("spaces"), + {`\]`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "array-decl")}, + }, + "array-access": { + Include("spaces"), + Default(Pop(1), Push("array-access-close"), Push("expr")), + }, + "array-access-close": { + Include("spaces"), + {`\]`, Punctuation, Pop(1)}, + }, + "comma": { + Include("spaces"), + {`,`, Punctuation, Pop(1)}, + }, + "colon": { + Include("spaces"), + {`:`, Punctuation, Pop(1)}, + }, + "semicolon": { + Include("spaces"), + {`;`, Punctuation, Pop(1)}, + }, + "optional-semicolon": { + Include("spaces"), + {`;`, Punctuation, Pop(1)}, + Default(Pop(1)), + }, + "ident": { + Include("spaces"), + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Pop(1)}, + }, + "dollar": { + Include("spaces"), + {`\{`, Punctuation, Push("#pop", "expr-chain", "bracket-close", "expr")}, + Default(Pop(1), Push("expr-chain")), + }, + "type-name": { + Include("spaces"), + {`_*[A-Z]\w*`, Name, Pop(1)}, + }, + "type-full-name": { + Include("spaces"), + {`\.`, Punctuation, Push("ident")}, + Default(Pop(1)), + }, + "type": { + Include("spaces"), + {`\?`, Punctuation, nil}, + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "type-check", "type-full-name")}, + {`\{`, Punctuation, Push("#pop", "type-check", "type-struct")}, + {`\(`, Punctuation, Push("#pop", "type-check", "type-parenthesis")}, + }, + "type-parenthesis": { + Include("spaces"), + Default(Pop(1), Push("parenthesis-close"), Push("type")), + }, + "type-check": { + Include("spaces"), + {`->`, Punctuation, Push("#pop", "type")}, + {`<(?!=)`, Punctuation, Push("type-param")}, + Default(Pop(1)), + }, + "type-struct": { + Include("spaces"), + {`\}`, Punctuation, Pop(1)}, + {`\?`, Punctuation, nil}, + {`>`, Punctuation, Push("comma", "type")}, + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "type-struct-sep", "type", "colon")}, + Include("class-body"), + }, + "type-struct-sep": { + Include("spaces"), + {`\}`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "type-struct")}, + }, + "type-param-type": { + {`\.[0-9]+`, LiteralNumberFloat, Pop(1)}, + {`[0-9]+[eE][+\-]?[0-9]+`, LiteralNumberFloat, Pop(1)}, + {`[0-9]+\.[0-9]*[eE][+\-]?[0-9]+`, LiteralNumberFloat, Pop(1)}, + {`[0-9]+\.[0-9]+`, LiteralNumberFloat, Pop(1)}, + {`[0-9]+\.(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)|\.\.)`, LiteralNumberFloat, Pop(1)}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, Pop(1)}, + {`[0-9]+`, LiteralNumberInteger, Pop(1)}, + {`'`, LiteralStringSingle, Push("#pop", "string-single")}, + {`"`, LiteralStringDouble, Push("#pop", "string-double")}, + {`~/(\\\\|\\/|[^/\n])*/[gim]*`, LiteralStringRegex, Pop(1)}, + {`\[`, Operator, Push("#pop", "array-decl")}, + Include("type"), + }, + "type-param": { + Include("spaces"), + Default(Pop(1), Push("type-param-sep"), Push("type-param-type")), + }, + "type-param-sep": { + Include("spaces"), + {`>`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "type-param")}, + }, + "type-param-constraint": { + Include("spaces"), + {`<(?!=)`, Punctuation, Push("#pop", "type-param-constraint-sep", "type-param-constraint-flag", "type-name")}, + Default(Pop(1)), + }, + "type-param-constraint-sep": { + Include("spaces"), + {`>`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "type-param-constraint-sep", "type-param-constraint-flag", "type-name")}, + }, + "type-param-constraint-flag": { + Include("spaces"), + {`:`, Punctuation, Push("#pop", "type-param-constraint-flag-type")}, + Default(Pop(1)), + }, + "type-param-constraint-flag-type": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "type-param-constraint-flag-type-sep", "type")}, + Default(Pop(1), Push("type")), + }, + "type-param-constraint-flag-type-sep": { + Include("spaces"), + {`\)`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("type")}, + }, + "parenthesis": { + Include("spaces"), + Default(Pop(1), Push("parenthesis-close"), Push("flag"), Push("expr")), + }, + "parenthesis-open": { + Include("spaces"), + {`\(`, Punctuation, Pop(1)}, + }, + "parenthesis-close": { + Include("spaces"), + {`\)`, Punctuation, Pop(1)}, + }, + "var": { + Include("spaces"), + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Text, Push("#pop", "var-sep", "assign", "flag", "prop-get-set")}, + }, + "var-sep": { + Include("spaces"), + {`,`, Punctuation, Push("#pop", "var")}, + Default(Pop(1)), + }, + "assign": { + Include("spaces"), + {`=`, Operator, Push("#pop", "expr")}, + Default(Pop(1)), + }, + "flag": { + Include("spaces"), + {`:`, Punctuation, Push("#pop", "type")}, + Default(Pop(1)), + }, + "ternary": { + Include("spaces"), + {`:`, Operator, Pop(1)}, + }, + "call": { + Include("spaces"), + {`\)`, Punctuation, Pop(1)}, + Default(Pop(1), Push("call-sep"), Push("expr")), + }, + "call-sep": { + Include("spaces"), + {`\)`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "call")}, + }, + "bracket": { + Include("spaces"), + {`(?!(?:\$\s*[a-z]\b|\$(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+))))(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "bracket-check")}, + {`'`, LiteralStringSingle, Push("#pop", "bracket-check", "string-single")}, + {`"`, LiteralStringDouble, Push("#pop", "bracket-check", "string-double")}, + Default(Pop(1), Push("block")), + }, + "bracket-check": { + Include("spaces"), + {`:`, Punctuation, Push("#pop", "object-sep", "expr")}, + Default(Pop(1), Push("block"), Push("optional-semicolon"), Push("expr-chain")), + }, + "block": { + Include("spaces"), + {`\}`, Punctuation, Pop(1)}, + Default(Push("expr-statement")), + }, + "object": { + Include("spaces"), + {`\}`, Punctuation, Pop(1)}, + Default(Pop(1), Push("object-sep"), Push("expr"), Push("colon"), Push("ident-or-string")), + }, + "ident-or-string": { + Include("spaces"), + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Pop(1)}, + {`'`, LiteralStringSingle, Push("#pop", "string-single")}, + {`"`, LiteralStringDouble, Push("#pop", "string-double")}, + }, + "object-sep": { + Include("spaces"), + {`\}`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "object")}, + }, + } +} + +func haxePreProcMutator(state *LexerState) error { + stack, ok := state.Get("haxe-pre-proc").([][]string) + if !ok { + stack = [][]string{} + } + + proc := state.Groups[2] + switch proc { + case "if": + stack = append(stack, state.Stack) + case "else", "elseif": + if len(stack) > 0 { + state.Stack = stack[len(stack)-1] + } + case "end": + if len(stack) > 0 { + stack = stack[:len(stack)-1] + } + } + + if proc == "if" || proc == "elseif" { + state.Stack = append(state.Stack, "preproc-expr") + } + + if proc == "error" { + state.Stack = append(state.Stack, "preproc-error") + } + state.Set("haxe-pre-proc", stack) + return nil +} diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/html.go b/vendor/github.com/alecthomas/chroma/v2/lexers/html.go new file mode 100644 index 0000000..c858042 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/html.go @@ -0,0 +1,8 @@ +package lexers + +import ( + "github.com/alecthomas/chroma/v2" +) + +// HTML lexer. +var HTML = chroma.MustNewXMLLexer(embedded, "embedded/html.xml") diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/http.go b/vendor/github.com/alecthomas/chroma/v2/lexers/http.go new file mode 100644 index 0000000..b57cb1b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/http.go @@ -0,0 +1,131 @@ +package lexers + +import ( + "strings" + + . "github.com/alecthomas/chroma/v2" // nolint +) + +// HTTP lexer. +var HTTP = Register(httpBodyContentTypeLexer(MustNewLexer( + &Config{ + Name: "HTTP", + Aliases: []string{"http"}, + Filenames: []string{}, + MimeTypes: []string{}, + NotMultiline: true, + DotAll: true, + }, + httpRules, +))) + +func httpRules() Rules { + return Rules{ + "root": { + {`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)([123](?:\.[01])?)(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")}, + {`(HTTP)(/)([123](?:\.[01])?)( +)(\d{3})( *)([^\r\n]*)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")}, + }, + "headers": { + {`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil}, + {`([\t ]+)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpContinuousHeaderBlock), nil}, + {`\r?\n`, Text, Push("content")}, + }, + "content": { + {`.+`, EmitterFunc(httpContentBlock), nil}, + }, + } +} + +func httpContentBlock(groups []string, state *LexerState) Iterator { + tokens := []Token{ + {Generic, groups[0]}, + } + return Literator(tokens...) +} + +func httpHeaderBlock(groups []string, state *LexerState) Iterator { + tokens := []Token{ + {Name, groups[1]}, + {Text, groups[2]}, + {Operator, groups[3]}, + {Text, groups[4]}, + {Literal, groups[5]}, + {Text, groups[6]}, + } + return Literator(tokens...) +} + +func httpContinuousHeaderBlock(groups []string, state *LexerState) Iterator { + tokens := []Token{ + {Text, groups[1]}, + {Literal, groups[2]}, + {Text, groups[3]}, + } + return Literator(tokens...) +} + +func httpBodyContentTypeLexer(lexer Lexer) Lexer { return &httpBodyContentTyper{lexer} } + +type httpBodyContentTyper struct{ Lexer } + +func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint: gocognit + var contentType string + var isContentType bool + var subIterator Iterator + + it, err := d.Lexer.Tokenise(options, text) + if err != nil { + return nil, err + } + + return func() Token { + token := it() + + if token == EOF { + if subIterator != nil { + return subIterator() + } + return EOF + } + + switch { + case token.Type == Name && strings.ToLower(token.Value) == "content-type": + { + isContentType = true + } + case token.Type == Literal && isContentType: + { + isContentType = false + contentType = strings.TrimSpace(token.Value) + pos := strings.Index(contentType, ";") + if pos > 0 { + contentType = strings.TrimSpace(contentType[:pos]) + } + } + case token.Type == Generic && contentType != "": + { + lexer := MatchMimeType(contentType) + + // application/calendar+xml can be treated as application/xml + // if there's not a better match. + if lexer == nil && strings.Contains(contentType, "+") { + slashPos := strings.Index(contentType, "/") + plusPos := strings.LastIndex(contentType, "+") + contentType = contentType[:slashPos+1] + contentType[plusPos+1:] + lexer = MatchMimeType(contentType) + } + + if lexer == nil { + token.Type = Text + } else { + subIterator, err = lexer.Tokenise(nil, token.Value) + if err != nil { + panic(err) + } + return EOF + } + } + } + return token + }, nil +} diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/lexers.go b/vendor/github.com/alecthomas/chroma/v2/lexers/lexers.go new file mode 100644 index 0000000..4fa35ad --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/lexers.go @@ -0,0 +1,79 @@ +package lexers + +import ( + "embed" + "io/fs" + + "github.com/alecthomas/chroma/v2" +) + +//go:embed embedded +var embedded embed.FS + +// GlobalLexerRegistry is the global LexerRegistry of Lexers. +var GlobalLexerRegistry = func() *chroma.LexerRegistry { + reg := chroma.NewLexerRegistry() + // index(reg) + paths, err := fs.Glob(embedded, "embedded/*.xml") + if err != nil { + panic(err) + } + for _, path := range paths { + reg.Register(chroma.MustNewXMLLexer(embedded, path)) + } + return reg +}() + +// Names of all lexers, optionally including aliases. +func Names(withAliases bool) []string { + return GlobalLexerRegistry.Names(withAliases) +} + +// Get a Lexer by name, alias or file extension. +// +// Note that this if there isn't an exact match on name or alias, this will +// call Match(), so it is not efficient. +func Get(name string) chroma.Lexer { + return GlobalLexerRegistry.Get(name) +} + +// MatchMimeType attempts to find a lexer for the given MIME type. +func MatchMimeType(mimeType string) chroma.Lexer { + return GlobalLexerRegistry.MatchMimeType(mimeType) +} + +// Match returns the first lexer matching filename. +// +// Note that this iterates over all file patterns in all lexers, so it's not +// particularly efficient. +func Match(filename string) chroma.Lexer { + return GlobalLexerRegistry.Match(filename) +} + +// Register a Lexer with the global registry. +func Register(lexer chroma.Lexer) chroma.Lexer { + return GlobalLexerRegistry.Register(lexer) +} + +// Analyse text content and return the "best" lexer.. +func Analyse(text string) chroma.Lexer { + return GlobalLexerRegistry.Analyse(text) +} + +// PlaintextRules is used for the fallback lexer as well as the explicit +// plaintext lexer. +func PlaintextRules() chroma.Rules { + return chroma.Rules{ + "root": []chroma.Rule{ + {`.+`, chroma.Text, nil}, + {`\n`, chroma.Text, nil}, + }, + } +} + +// Fallback lexer if no other is found. +var Fallback chroma.Lexer = chroma.MustNewLexer(&chroma.Config{ + Name: "fallback", + Filenames: []string{"*"}, + Priority: -1, +}, PlaintextRules) diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/markdown.go b/vendor/github.com/alecthomas/chroma/v2/lexers/markdown.go new file mode 100644 index 0000000..1fb9f5b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/markdown.go @@ -0,0 +1,47 @@ +package lexers + +import ( + . "github.com/alecthomas/chroma/v2" // nolint +) + +// Markdown lexer. +var Markdown = Register(DelegatingLexer(HTML, MustNewLexer( + &Config{ + Name: "markdown", + Aliases: []string{"md", "mkd"}, + Filenames: []string{"*.md", "*.mkd", "*.markdown"}, + MimeTypes: []string{"text/x-markdown"}, + }, + markdownRules, +))) + +func markdownRules() Rules { + return Rules{ + "root": { + {`^(#[^#].+\n)`, ByGroups(GenericHeading), nil}, + {`^(#{2,6}.+\n)`, ByGroups(GenericSubheading), nil}, + {`^(\s*)([*-] )(\[[ xX]\])( .+\n)`, ByGroups(Text, Keyword, Keyword, UsingSelf("inline")), nil}, + {`^(\s*)([*-])(\s)(.+\n)`, ByGroups(Text, Keyword, Text, UsingSelf("inline")), nil}, + {`^(\s*)([0-9]+\.)( .+\n)`, ByGroups(Text, Keyword, UsingSelf("inline")), nil}, + {`^(\s*>\s)(.+\n)`, ByGroups(Keyword, GenericEmph), nil}, + {"^(```\\n)([\\w\\W]*?)(^```$)", ByGroups(String, Text, String), nil}, + { + "^(```)(\\w+)(\\n)([\\w\\W]*?)(^```$)", + UsingByGroup(2, 4, String, String, String, Text, String), + nil, + }, + Include("inline"), + }, + "inline": { + {`\\.`, Text, nil}, + {`(\s)(\*|_)((?:(?!\2).)*)(\2)((?=\W|\n))`, ByGroups(Text, GenericEmph, GenericEmph, GenericEmph, Text), nil}, + {`(\s)((\*\*|__).*?)\3((?=\W|\n))`, ByGroups(Text, GenericStrong, GenericStrong, Text), nil}, + {`(\s)(~~[^~]+~~)((?=\W|\n))`, ByGroups(Text, GenericDeleted, Text), nil}, + {"`[^`]+`", LiteralStringBacktick, nil}, + {`[@#][\w/:]+`, NameEntity, nil}, + {`(!?\[)([^]]+)(\])(\()([^)]+)(\))`, ByGroups(Text, NameTag, Text, Text, NameAttribute, Text), nil}, + {`[^\\\s]+`, Other, nil}, + {`.|\n`, Other, nil}, + }, + } +} diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/mysql.go b/vendor/github.com/alecthomas/chroma/v2/lexers/mysql.go new file mode 100644 index 0000000..32e94c2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/mysql.go @@ -0,0 +1,33 @@ +package lexers + +import ( + "regexp" +) + +var ( + mysqlAnalyserNameBetweenBacktickRe = regexp.MustCompile("`[a-zA-Z_]\\w*`") + mysqlAnalyserNameBetweenBracketRe = regexp.MustCompile(`\[[a-zA-Z_]\w*\]`) +) + +func init() { // nolint: gochecknoinits + Get("mysql"). + SetAnalyser(func(text string) float32 { + nameBetweenBacktickCount := len(mysqlAnalyserNameBetweenBacktickRe.FindAllString(text, -1)) + nameBetweenBracketCount := len(mysqlAnalyserNameBetweenBracketRe.FindAllString(text, -1)) + + var result float32 + + // Same logic as above in the TSQL analysis. + dialectNameCount := nameBetweenBacktickCount + nameBetweenBracketCount + if dialectNameCount >= 1 && nameBetweenBacktickCount >= (2*nameBetweenBracketCount) { + // Found at least twice as many `name` as [name]. + result += 0.5 + } else if nameBetweenBacktickCount > nameBetweenBracketCount { + result += 0.2 + } else if nameBetweenBacktickCount > 0 { + result += 0.1 + } + + return result + }) +} diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/php.go b/vendor/github.com/alecthomas/chroma/v2/lexers/php.go new file mode 100644 index 0000000..ff82f6e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/php.go @@ -0,0 +1,37 @@ +package lexers + +import ( + "strings" + + . "github.com/alecthomas/chroma/v2" // nolint +) + +// phtml lexer is PHP in HTML. +var _ = Register(DelegatingLexer(HTML, MustNewLexer( + &Config{ + Name: "PHTML", + Aliases: []string{"phtml"}, + Filenames: []string{"*.phtml", "*.php", "*.php[345]", "*.inc"}, + MimeTypes: []string{"application/x-php", "application/x-httpd-php", "application/x-httpd-php3", "application/x-httpd-php4", "application/x-httpd-php5", "text/x-php"}, + DotAll: true, + CaseInsensitive: true, + EnsureNL: true, + Priority: 2, + }, + func() Rules { + return Get("PHP").(*RegexLexer).MustRules(). + Rename("root", "php"). + Merge(Rules{ + "root": { + {`<\?(php)?`, CommentPreproc, Push("php")}, + {`[^<]+`, Other, nil}, + {`<`, Other, nil}, + }, + }) + }, +).SetAnalyser(func(text string) float32 { + if strings.Contains(text, ">|>|»|\)|\]|\})` + colonPairPattern = `(?:)(?\w[\w'-]*)(?` + colonPairOpeningBrackets + `)` + colonPairLookahead = `(?=(:['\w-]+` + + colonPairOpeningBrackets + `.+?` + colonPairClosingBrackets + `)?` + namePattern = `(?:(?!` + colonPairPattern + `)(?:::|[\w':-]))+` + variablePattern = `[$@%&]+[.^:?=!~]?` + namePattern + globalVariablePattern = `[$@%&]+\*` + namePattern + ) + + keywords := []string{ + `BEGIN`, `CATCH`, `CHECK`, `CLOSE`, `CONTROL`, `DOC`, `END`, `ENTER`, `FIRST`, `INIT`, + `KEEP`, `LAST`, `LEAVE`, `NEXT`, `POST`, `PRE`, `QUIT`, `UNDO`, `anon`, `augment`, `but`, + `class`, `constant`, `default`, `does`, `else`, `elsif`, `enum`, `for`, `gather`, `given`, + `grammar`, `has`, `if`, `import`, `is`, `of`, `let`, `loop`, `made`, `make`, `method`, + `module`, `multi`, `my`, `need`, `orwith`, `our`, `proceed`, `proto`, `repeat`, `require`, + `where`, `return`, `return-rw`, `returns`, `->`, `-->`, `role`, `state`, `sub`, `no`, + `submethod`, `subset`, `succeed`, `supersede`, `try`, `unit`, `unless`, `until`, + `use`, `when`, `while`, `with`, `without`, `export`, `native`, `repr`, `required`, `rw`, + `symbol`, `default`, `cached`, `DEPRECATED`, `dynamic`, `hidden-from-backtrace`, `nodal`, + `pure`, `raw`, `start`, `react`, `supply`, `whenever`, `also`, `rule`, `token`, `regex`, + `dynamic-scope`, `built`, `temp`, + } + + keywordsPattern := Words(`(?)`, `(>=)`, `minmax`, `notandthen`, `S`, + } + + wordOperatorsPattern := Words(`(?<=^|\b|\s)`, `(?=$|\b|\s)`, wordOperators...) + + operators := []string{ + `++`, `--`, `-`, `**`, `!`, `+`, `~`, `?`, `+^`, `~^`, `?^`, `^`, `*`, `/`, `%`, `%%`, `+&`, + `+<`, `+>`, `~&`, `~<`, `~>`, `?&`, `+|`, `+^`, `~|`, `~^`, `?`, `?|`, `?^`, `&`, `^`, + `<=>`, `^…^`, `^…`, `…^`, `…`, `...`, `...^`, `^...`, `^...^`, `..`, `..^`, `^..`, `^..^`, + `::=`, `:=`, `!=`, `==`, `<=`, `<`, `>=`, `>`, `~~`, `===`, `&&`, `||`, `|`, `^^`, `//`, + `??`, `!!`, `^fff^`, `^ff^`, `<==`, `==>`, `<<==`, `==>>`, `=>`, `=`, `<<`, `«`, `>>`, `»`, + `,`, `>>.`, `».`, `.&`, `.=`, `.^`, `.?`, `.+`, `.*`, `.`, `∘`, `∩`, `âŠ`, `∪`, `⊎`, `∖`, + `⊖`, `≠`, `≤`, `≥`, `=:=`, `=~=`, `≅`, `∈`, `∉`, `≡`, `≢`, `∋`, `∌`, `⊂`, `⊄`, `⊆`, `⊈`, + `⊃`, `⊅`, `⊇`, `⊉`, `:`, `!!!`, `???`, `¯`, `×`, `÷`, `−`, `âº`, `â»`, + } + + operatorsPattern := Words(``, ``, operators...) + + builtinTypes := []string{ + `False`, `True`, `Order`, `More`, `Less`, `Same`, `Any`, `Array`, `Associative`, `AST`, + `atomicint`, `Attribute`, `Backtrace`, `Backtrace::Frame`, `Bag`, `Baggy`, `BagHash`, + `Blob`, `Block`, `Bool`, `Buf`, `Callable`, `CallFrame`, `Cancellation`, `Capture`, + `CArray`, `Channel`, `Code`, `compiler`, `Complex`, `ComplexStr`, `CompUnit`, + `CompUnit::PrecompilationRepository`, `CompUnit::Repository`, `Empty`, + `CompUnit::Repository::FileSystem`, `CompUnit::Repository::Installation`, `Cool`, + `CurrentThreadScheduler`, `CX::Warn`, `CX::Take`, `CX::Succeed`, `CX::Return`, `CX::Redo`, + `CX::Proceed`, `CX::Next`, `CX::Last`, `CX::Emit`, `CX::Done`, `Cursor`, `Date`, `Dateish`, + `DateTime`, `Distribution`, `Distribution::Hash`, `Distribution::Locally`, + `Distribution::Path`, `Distribution::Resource`, `Distro`, `Duration`, `Encoding`, + `Encoding::GlobalLexerRegistry`, `Endian`, `Enumeration`, `Exception`, `Failure`, `FatRat`, `Grammar`, + `Hash`, `HyperWhatever`, `Instant`, `Int`, `int`, `int16`, `int32`, `int64`, `int8`, `str`, + `IntStr`, `IO`, `IO::ArgFiles`, `IO::CatHandle`, `IO::Handle`, `IO::Notification`, + `IO::Notification::Change`, `IO::Path`, `IO::Path::Cygwin`, `IO::Path::Parts`, + `IO::Path::QNX`, `IO::Path::Unix`, `IO::Path::Win32`, `IO::Pipe`, `IO::Socket`, + `IO::Socket::Async`, `IO::Socket::Async::ListenSocket`, `IO::Socket::INET`, `IO::Spec`, + `IO::Spec::Cygwin`, `IO::Spec::QNX`, `IO::Spec::Unix`, `IO::Spec::Win32`, `IO::Special`, + `Iterable`, `Iterator`, `Junction`, `Kernel`, `Label`, `List`, `Lock`, `Lock::Async`, + `Lock::ConditionVariable`, `long`, `longlong`, `Macro`, `Map`, `Match`, + `Metamodel::AttributeContainer`, `Metamodel::C3MRO`, `Metamodel::ClassHOW`, + `Metamodel::ConcreteRoleHOW`, `Metamodel::CurriedRoleHOW`, `Metamodel::DefiniteHOW`, + `Metamodel::Documenting`, `Metamodel::EnumHOW`, `Metamodel::Finalization`, + `Metamodel::MethodContainer`, `Metamodel::Mixins`, `Metamodel::MROBasedMethodDispatch`, + `Metamodel::MultipleInheritance`, `Metamodel::Naming`, `Metamodel::Primitives`, + `Metamodel::PrivateMethodContainer`, `Metamodel::RoleContainer`, `Metamodel::RolePunning`, + `Metamodel::Stashing`, `Metamodel::Trusting`, `Metamodel::Versioning`, `Method`, `Mix`, + `MixHash`, `Mixy`, `Mu`, `NFC`, `NFD`, `NFKC`, `NFKD`, `Nil`, `Num`, `num32`, `num64`, + `Numeric`, `NumStr`, `ObjAt`, `Order`, `Pair`, `Parameter`, `Perl`, `Pod::Block`, + `Pod::Block::Code`, `Pod::Block::Comment`, `Pod::Block::Declarator`, `Pod::Block::Named`, + `Pod::Block::Para`, `Pod::Block::Table`, `Pod::Heading`, `Pod::Item`, `Pointer`, + `Positional`, `PositionalBindFailover`, `Proc`, `Proc::Async`, `Promise`, `Proxy`, + `PseudoStash`, `QuantHash`, `RaceSeq`, `Raku`, `Range`, `Rat`, `Rational`, `RatStr`, + `Real`, `Regex`, `Routine`, `Routine::WrapHandle`, `Scalar`, `Scheduler`, `Semaphore`, + `Seq`, `Sequence`, `Set`, `SetHash`, `Setty`, `Signature`, `size_t`, `Slip`, `Stash`, + `Str`, `StrDistance`, `Stringy`, `Sub`, `Submethod`, `Supplier`, `Supplier::Preserving`, + `Supply`, `Systemic`, `Tap`, `Telemetry`, `Telemetry::Instrument::Thread`, + `Telemetry::Instrument::ThreadPool`, `Telemetry::Instrument::Usage`, `Telemetry::Period`, + `Telemetry::Sampler`, `Thread`, `Test`, `ThreadPoolScheduler`, `UInt`, `uint16`, `uint32`, + `uint64`, `uint8`, `Uni`, `utf8`, `ValueObjAt`, `Variable`, `Version`, `VM`, `Whatever`, + `WhateverCode`, `WrapHandle`, `NativeCall`, + // Pragmas + `precompilation`, `experimental`, `worries`, `MONKEY-TYPING`, `MONKEY-SEE-NO-EVAL`, + `MONKEY-GUTS`, `fatal`, `lib`, `isms`, `newline`, `nqp`, `soft`, + `strict`, `trace`, `variables`, + } + + builtinTypesPattern := Words(`(? 0 { + if tokenClass == rakuPod { + match, err := podRegex.FindRunesMatchStartingAt(text, searchPos+nChars) + if err == nil { + closingChars = match.Runes() + nextClosePos = match.Index + } else { + nextClosePos = -1 + } + } else { + nextClosePos = indexAt(text, closingChars, searchPos+nChars) + } + + nextOpenPos := indexAt(text, openingChars, searchPos+nChars) + + switch { + case nextClosePos == -1: + nextClosePos = len(text) + nestingLevel = 0 + case nextOpenPos != -1 && nextOpenPos < nextClosePos: + nestingLevel++ + nChars = len(openingChars) + searchPos = nextOpenPos + default: // next_close_pos < next_open_pos + nestingLevel-- + nChars = len(closingChars) + searchPos = nextClosePos + } + } + + endPos = nextClosePos + } + + if endPos < 0 { + // if we didn't find a closer, just highlight the + // rest of the text in this class + endPos = len(text) + } + + adverbre := regexp.MustCompile(`:to\b|:heredoc\b`) + var heredocTerminator []rune + var endHeredocPos int + if adverbre.MatchString(string(adverbs)) { + if endPos != len(text) { + heredocTerminator = text[state.Pos:endPos] + nChars = len(heredocTerminator) + } else { + endPos = state.Pos + 1 + heredocTerminator = []rune{} + nChars = 0 + } + + if nChars > 0 { + endHeredocPos = indexAt(text[endPos:], heredocTerminator, 0) + if endHeredocPos > -1 { + endPos += endHeredocPos + } else { + endPos = len(text) + } + } + } + + textBetweenBrackets := string(text[state.Pos:endPos]) + switch tokenClass { + case rakuPod, rakuPodDeclaration, rakuNameAttribute: + state.NamedGroups[`value`] = textBetweenBrackets + state.NamedGroups[`closing_delimiters`] = string(closingChars) + case rakuQuote: + if len(heredocTerminator) > 0 { + // Length of heredoc terminator + closing chars + `;` + heredocFristPunctuationLen := nChars + len(openingChars) + 1 + + state.NamedGroups[`opening_delimiters`] = string(openingChars) + + string(text[state.Pos:state.Pos+heredocFristPunctuationLen]) + + state.NamedGroups[`value`] = + string(text[state.Pos+heredocFristPunctuationLen : endPos]) + + if endHeredocPos > -1 { + state.NamedGroups[`closing_delimiters`] = string(heredocTerminator) + } + } else { + state.NamedGroups[`value`] = textBetweenBrackets + if nChars > 0 { + state.NamedGroups[`closing_delimiters`] = string(closingChars) + } + } + default: + state.Groups = []string{state.Groups[0] + string(text[state.Pos:endPos+nChars])} + } + + state.Pos = endPos + nChars + + return nil + } + } + + // Raku rules + // Empty capture groups are placeholders and will be replaced by mutators + // DO NOT REMOVE THEM! + return Rules{ + "root": { + // Placeholder, will be overwritten by mutators, DO NOT REMOVE! + {`\A\z`, nil, nil}, + Include("common"), + {`{`, Punctuation, Push(`root`)}, + {`\(`, Punctuation, Push(`root`)}, + {`[)}]`, Punctuation, Pop(1)}, + {`;`, Punctuation, nil}, + {`\[|\]`, Operator, nil}, + {`.+?`, Text, nil}, + }, + "common": { + {`^#![^\n]*$`, CommentHashbang, nil}, + Include("pod"), + // Multi-line, Embedded comment + { + "#`(?(?" + bracketsPattern + `)\k*)`, + CommentMultiline, + findBrackets(rakuMultilineComment), + }, + {`#[^\n]*$`, CommentSingle, nil}, + // /regex/ + { + `(?<=(?:^|\(|=|:|~~|\[|{|,|=>)\s*)(/)(?!\]|\))((?:\\\\|\\/|.)*?)((?>)(\S+?)(<<)`, ByGroups(Operator, UsingSelf("root"), Operator), nil}, + {`(»)(\S+?)(«)`, ByGroups(Operator, UsingSelf("root"), Operator), nil}, + // Hyperoperator | «*« + {`(<<)(\S+?)(<<)`, ByGroups(Operator, UsingSelf("root"), Operator), nil}, + {`(«)(\S+?)(«)`, ByGroups(Operator, UsingSelf("root"), Operator), nil}, + // Hyperoperator | »*» + {`(>>)(\S+?)(>>)`, ByGroups(Operator, UsingSelf("root"), Operator), nil}, + {`(»)(\S+?)(»)`, ByGroups(Operator, UsingSelf("root"), Operator), nil}, + // <> + {`(?>)[^\n])+?[},;] *\n)(?!(?:(?!>>).)+?>>\S+?>>)`, Punctuation, Push("<<")}, + // «quoted words» + {`(? operators | something < onething > something + { + `(?<=[$@%&]?\w[\w':-]* +)(<=?)( *[^ ]+? *)(>=?)(?= *[$@%&]?\w[\w':-]*)`, + ByGroups(Operator, UsingSelf("root"), Operator), + nil, + }, + // + { + `(?])+?)(>)(?!\s*(?:\d+|\.(?:Int|Numeric)|[$@%]\*?\w[\w':-]*[^(]|\s+\[))`, + ByGroups(Punctuation, String, Punctuation), + nil, + }, + {`C?X::['\w:-]+`, NameException, nil}, + Include("metaoperator"), + // Pair | key => value + { + `(\w[\w'-]*)(\s*)(=>)`, + ByGroups(String, Text, Operator), + nil, + }, + Include("colon-pair"), + // Token + { + `(?<=(?:^|\s)(?:regex|token|rule)(\s+))` + namePattern + colonPairLookahead + `\s*[({])`, + NameFunction, + Push("token", "name-adverb"), + }, + // Substitution + {`(?<=^|\b|\s)(?(?:qq|q|Q))(?(?::?(?:heredoc|to|qq|ww|q|w|s|a|h|f|c|b|to|v|x))*)(?\s*)(?(?[^0-9a-zA-Z:\s])\k*)`, + EmitterFunc(quote), + findBrackets(rakuQuote), + }, + // Function + { + `\b` + namePattern + colonPairLookahead + `\()`, + NameFunction, + Push("name-adverb"), + }, + // Method + { + `(?(?[^\w:\s])\k*)`, + ByGroupNames( + map[string]Emitter{ + `opening_delimiters`: Punctuation, + `delimiter`: nil, + }, + ), + findBrackets(rakuMatchRegex), + }, + }, + "substitution": { + Include("colon-pair-attribute"), + // Substitution | s{regex} = value + { + `(?(?` + bracketsPattern + `)\k*)`, + ByGroupNames(map[string]Emitter{ + `opening_delimiters`: Punctuation, + `delimiter`: nil, + }), + findBrackets(rakuMatchRegex), + }, + // Substitution | s/regex/string/ + { + `(?[^\w:\s])`, + Punctuation, + findBrackets(rakuSubstitutionRegex), + }, + }, + "number": { + {`0_?[0-7]+(_[0-7]+)*`, LiteralNumberOct, nil}, + {`0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*`, LiteralNumberHex, nil}, + {`0b[01]+(_[01]+)*`, LiteralNumberBin, nil}, + { + `(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?`, + LiteralNumberFloat, + nil, + }, + {`(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*`, LiteralNumberFloat, nil}, + {`(?<=\d+)i`, NameConstant, nil}, + {`\d+(_\d+)*`, LiteralNumberInteger, nil}, + }, + "name-adverb": { + Include("colon-pair-attribute-keyvalue"), + Default(Pop(1)), + }, + "colon-pair": { + // :key(value) + {colonPairPattern, colonPair(String), findBrackets(rakuNameAttribute)}, + // :123abc + { + `(:)(\d+)(\w[\w'-]*)`, + ByGroups(Punctuation, UsingSelf("number"), String), + nil, + }, + // :key + {`(:)(!?)(\w[\w'-]*)`, ByGroups(Punctuation, Operator, String), nil}, + {`\s+`, Text, nil}, + }, + "colon-pair-attribute": { + // :key(value) + {colonPairPattern, colonPair(NameAttribute), findBrackets(rakuNameAttribute)}, + // :123abc + { + `(:)(\d+)(\w[\w'-]*)`, + ByGroups(Punctuation, UsingSelf("number"), NameAttribute), + nil, + }, + // :key + {`(:)(!?)(\w[\w'-]*)`, ByGroups(Punctuation, Operator, NameAttribute), nil}, + {`\s+`, Text, nil}, + }, + "colon-pair-attribute-keyvalue": { + // :key(value) + {colonPairPattern, colonPair(NameAttribute), findBrackets(rakuNameAttribute)}, + }, + "escape-qq": { + { + `(? + { + `(?`), + tokenType: Punctuation, + stateName: `root`, + pushState: true, + }), + }, + // {code} + Include(`closure`), + // Properties + {`(:)(\w+)`, ByGroups(Punctuation, NameAttribute), nil}, + // Operator + {`\|\||\||&&|&|\.\.|\*\*|%%|%|:|!|<<|«|>>|»|\+|\*\*|\*|\?|=|~|<~~>`, Operator, nil}, + // Anchors + {`\^\^|\^|\$\$|\$`, NameEntity, nil}, + {`\.`, NameEntity, nil}, + {`#[^\n]*\n`, CommentSingle, nil}, + // Lookaround + { + `(?`), + tokenType: Punctuation, + stateName: `regex`, + pushState: true, + }), + }, + { + `(?)`, + ByGroups(Punctuation, Operator, OperatorWord, Punctuation), + nil, + }, + // <$variable> + { + `(?)`, + ByGroups(Punctuation, Operator, NameVariable, Punctuation), + nil, + }, + // Capture markers + {`(?`, Operator, nil}, + { + `(? + {`(?`, Punctuation, Pop(1)}, + // + { + `\(`, + Punctuation, + replaceRule(ruleReplacingConfig{ + delimiter: []rune(`)>`), + tokenType: Punctuation, + stateName: `root`, + popState: true, + pushState: true, + }), + }, + // + { + `\s+`, + StringRegex, + replaceRule(ruleReplacingConfig{ + delimiter: []rune(`>`), + tokenType: Punctuation, + stateName: `regex`, + popState: true, + pushState: true, + }), + }, + // + { + `:`, + Punctuation, + replaceRule(ruleReplacingConfig{ + delimiter: []rune(`>`), + tokenType: Punctuation, + stateName: `root`, + popState: true, + pushState: true, + }), + }, + }, + "regex-variable": { + Include(`regex-starting-operators`), + // + {`(&)?(\w[\w':-]*)(>)`, ByGroups(Operator, NameFunction, Punctuation), Pop(1)}, + // `, Punctuation, Pop(1)}, + Include("regex-class-builtin"), + Include("variable"), + Include(`regex-starting-operators`), + Include("colon-pair-attribute"), + {`(?] + { + `\b([RZX]+)\b(\[)([^\s\]]+?)(\])`, + ByGroups(OperatorWord, Punctuation, UsingSelf("root"), Punctuation), + nil, + }, + // Z=> + {`\b([RZX]+)\b([^\s\]]+)`, ByGroups(OperatorWord, UsingSelf("operator")), nil}, + }, + "operator": { + // Word Operator + {wordOperatorsPattern, OperatorWord, nil}, + // Operator + {operatorsPattern, Operator, nil}, + }, + "pod": { + // Single-line pod declaration + {`(#[|=])\s`, Keyword, Push("pod-single")}, + // Multi-line pod declaration + { + "(?#[|=])(?(?" + bracketsPattern + `)\k*)(?)(?)`, + ByGroupNames( + map[string]Emitter{ + `keyword`: Keyword, + `opening_delimiters`: Punctuation, + `delimiter`: nil, + `value`: UsingSelf("pod-declaration"), + `closing_delimiters`: Punctuation, + }), + findBrackets(rakuPodDeclaration), + }, + Include("pod-blocks"), + }, + "pod-blocks": { + // =begin code + { + `(?<=^ *)(? *)(?=begin)(? +)(?code)(?[^\n]*)(?.*?)(?^\k)(?=end)(? +)\k`, + EmitterFunc(podCode), + nil, + }, + // =begin + { + `(?<=^ *)(? *)(?=begin)(? +)(?!code)(?\w[\w'-]*)(?[^\n]*)(?)(?)`, + ByGroupNames( + map[string]Emitter{ + `ws`: Comment, + `keyword`: Keyword, + `ws2`: StringDoc, + `name`: Keyword, + `config`: EmitterFunc(podConfig), + `value`: UsingSelf("pod-begin"), + `closing_delimiters`: Keyword, + }), + findBrackets(rakuPod), + }, + // =for ... + { + `(?<=^ *)(? *)(?=(?:for|defn))(? +)(?\w[\w'-]*)(?[^\n]*\n)`, + ByGroups(Comment, Keyword, StringDoc, Keyword, EmitterFunc(podConfig)), + Push("pod-paragraph"), + }, + // =config + { + `(?<=^ *)(? *)(?=config)(? +)(?\w[\w'-]*)(?[^\n]*\n)`, + ByGroups(Comment, Keyword, StringDoc, Keyword, EmitterFunc(podConfig)), + nil, + }, + // =alias + { + `(?<=^ *)(? *)(?=alias)(? +)(?\w[\w'-]*)(?[^\n]*\n)`, + ByGroups(Comment, Keyword, StringDoc, Keyword, StringDoc), + nil, + }, + // =encoding + { + `(?<=^ *)(? *)(?=encoding)(? +)(?[^\n]+)`, + ByGroups(Comment, Keyword, StringDoc, Name), + nil, + }, + // =para ... + { + `(?<=^ *)(? *)(?=(?:para|table|pod))(?(? *)(?=head\d+)(? *)(?#?)`, + ByGroups(Comment, Keyword, GenericHeading, Keyword), + Push("pod-heading"), + }, + // =item ... + { + `(?<=^ *)(? *)(?=(?:item\d*|comment|data|[A-Z]+))(? *)(?#?)`, + ByGroups(Comment, Keyword, StringDoc, Keyword), + Push("pod-paragraph"), + }, + { + `(?<=^ *)(? *)(?=finish)(?[^\n]*)`, + ByGroups(Comment, Keyword, EmitterFunc(podConfig)), + Push("pod-finish"), + }, + // ={custom} ... + { + `(?<=^ *)(? *)(?=\w[\w'-]*)(? *)(?#?)`, + ByGroups(Comment, Name, StringDoc, Keyword), + Push("pod-paragraph"), + }, + // = podconfig + { + `(?<=^ *)(? *=)(? *)(?(?::\w[\w'-]*(?:` + colonPairOpeningBrackets + `.+?` + + colonPairClosingBrackets + `) *)*\n)`, + ByGroups(Keyword, StringDoc, EmitterFunc(podConfig)), + nil, + }, + }, + "pod-begin": { + Include("pod-blocks"), + Include("pre-pod-formatter"), + {`.+?`, StringDoc, nil}, + }, + "pod-declaration": { + Include("pre-pod-formatter"), + {`.+?`, StringDoc, nil}, + }, + "pod-paragraph": { + {`\n *\n|\n(?=^ *=)`, StringDoc, Pop(1)}, + Include("pre-pod-formatter"), + {`.+?`, StringDoc, nil}, + }, + "pod-single": { + {`\n`, StringDoc, Pop(1)}, + Include("pre-pod-formatter"), + {`.+?`, StringDoc, nil}, + }, + "pod-heading": { + {`\n *\n|\n(?=^ *=)`, GenericHeading, Pop(1)}, + Include("pre-pod-formatter"), + {`.+?`, GenericHeading, nil}, + }, + "pod-finish": { + {`\z`, nil, Pop(1)}, + Include("pre-pod-formatter"), + {`.+?`, StringDoc, nil}, + }, + "pre-pod-formatter": { + // C, B, ... + { + `(?[CBIUDTKRPAELZVMSXN])(?<+|«)`, + ByGroups(Keyword, Punctuation), + findBrackets(rakuPodFormatter), + }, + }, + "pod-formatter": { + // Placeholder rule, will be replaced by mutators. DO NOT REMOVE! + {`>`, Punctuation, Pop(1)}, + Include("pre-pod-formatter"), + // Placeholder rule, will be replaced by mutators. DO NOT REMOVE! + {`.+?`, StringOther, nil}, + }, + "variable": { + {variablePattern, NameVariable, Push("name-adverb")}, + {globalVariablePattern, NameVariableGlobal, Push("name-adverb")}, + {`[$@]<[^>]+>`, NameVariable, nil}, + {`\$[/!¢]`, NameVariable, nil}, + {`[$@%]`, NameVariable, nil}, + }, + "single-quote": { + {`(?>(?!\s*(?:\d+|\.(?:Int|Numeric)|[$@%]\*?[\w':-]+|\s+\[))`, Punctuation, Pop(1)}, + Include("ww"), + }, + "«": { + {`»(?!\s*(?:\d+|\.(?:Int|Numeric)|[$@%]\*?[\w':-]+|\s+\[))`, Punctuation, Pop(1)}, + Include("ww"), + }, + "ww": { + Include("single-quote"), + Include("qq"), + }, + "qq": { + Include("qq-variable"), + Include("closure"), + Include(`escape-char`), + Include("escape-hexadecimal"), + Include("escape-c-name"), + Include("escape-qq"), + {`.+?`, StringDouble, nil}, + }, + "qq-variable": { + { + `(?\.)(?` + namePattern + `)` + colonPairLookahead + `\()`, + ByGroupNames(map[string]Emitter{ + `operator`: Operator, + `method_name`: NameFunction, + }), + Push(`name-adverb`), + }, + // Function/Signature + { + `\(`, Punctuation, replaceRule( + ruleReplacingConfig{ + delimiter: []rune(`)`), + tokenType: Punctuation, + stateName: `root`, + pushState: true, + }), + }, + Default(Pop(1)), + }, + "Q": { + Include("escape-qq"), + {`.+?`, String, nil}, + }, + "Q-closure": { + Include("escape-qq"), + Include("closure"), + {`.+?`, String, nil}, + }, + "Q-variable": { + Include("escape-qq"), + Include("qq-variable"), + {`.+?`, String, nil}, + }, + "closure": { + {`(? -1 { + idx = utf8.RuneCountInString(text[:idx]) + + // Search again if the substr is escaped with backslash + if (idx > 1 && strFromPos[idx-1] == '\\' && strFromPos[idx-2] != '\\') || + (idx == 1 && strFromPos[idx-1] == '\\') { + idx = indexAt(str[pos:], substr, idx+1) + + idx = utf8.RuneCountInString(text[:idx]) + + if idx < 0 { + return idx + } + } + idx += pos + } + + return idx +} + +// Tells if an array of string contains a string +func contains(s []string, e string) bool { + for _, value := range s { + if value == e { + return true + } + } + return false +} + +type rulePosition int + +const ( + topRule rulePosition = 0 + bottomRule = -1 +) + +type ruleMakingConfig struct { + delimiter []rune + pattern string + tokenType Emitter + mutator Mutator + numberOfDelimiterChars int +} + +type ruleReplacingConfig struct { + delimiter []rune + pattern string + tokenType Emitter + numberOfDelimiterChars int + mutator Mutator + appendMutator Mutator + rulePosition rulePosition + stateName string + pop bool + popState bool + pushState bool +} + +// Pops rule from state-stack and replaces the rule with the previous rule +func popRule(rule ruleReplacingConfig) MutatorFunc { + return func(state *LexerState) error { + stackName := genStackName(rule.stateName, rule.rulePosition) + + stack, ok := state.Get(stackName).([]ruleReplacingConfig) + + if ok && len(stack) > 0 { + // Pop from stack + stack = stack[:len(stack)-1] + lastRule := stack[len(stack)-1] + lastRule.pushState = false + lastRule.popState = false + lastRule.pop = true + state.Set(stackName, stack) + + // Call replaceRule to use the last rule + err := replaceRule(lastRule)(state) + if err != nil { + panic(err) + } + } + + return nil + } +} + +// Replaces a state's rule based on the rule config and position +func replaceRule(rule ruleReplacingConfig) MutatorFunc { + return func(state *LexerState) error { + stateName := rule.stateName + stackName := genStackName(rule.stateName, rule.rulePosition) + + stack, ok := state.Get(stackName).([]ruleReplacingConfig) + if !ok { + stack = []ruleReplacingConfig{} + } + + // If state-stack is empty fill it with the placeholder rule + if len(stack) == 0 { + stack = []ruleReplacingConfig{ + { + // Placeholder, will be overwritten by mutators, DO NOT REMOVE! + pattern: `\A\z`, + tokenType: nil, + mutator: nil, + stateName: stateName, + rulePosition: rule.rulePosition, + }, + } + state.Set(stackName, stack) + } + + var mutator Mutator + mutators := []Mutator{} + + switch { + case rule.rulePosition == topRule && rule.mutator == nil: + // Default mutator for top rule + mutators = []Mutator{Pop(1), popRule(rule)} + case rule.rulePosition == topRule && rule.mutator != nil: + // Default mutator for top rule, when rule.mutator is set + mutators = []Mutator{rule.mutator, popRule(rule)} + case rule.mutator != nil: + mutators = []Mutator{rule.mutator} + } + + if rule.appendMutator != nil { + mutators = append(mutators, rule.appendMutator) + } + + if len(mutators) > 0 { + mutator = Mutators(mutators...) + } else { + mutator = nil + } + + ruleConfig := ruleMakingConfig{ + pattern: rule.pattern, + delimiter: rule.delimiter, + numberOfDelimiterChars: rule.numberOfDelimiterChars, + tokenType: rule.tokenType, + mutator: mutator, + } + + cRule := makeRule(ruleConfig) + + switch rule.rulePosition { + case topRule: + state.Rules[stateName][0] = cRule + case bottomRule: + state.Rules[stateName][len(state.Rules[stateName])-1] = cRule + } + + // Pop state name from stack if asked. State should be popped first before Pushing + if rule.popState { + err := Pop(1).Mutate(state) + if err != nil { + panic(err) + } + } + + // Push state name to stack if asked + if rule.pushState { + err := Push(stateName).Mutate(state) + if err != nil { + panic(err) + } + } + + if !rule.pop { + state.Set(stackName, append(stack, rule)) + } + + return nil + } +} + +// Generates rule replacing stack using state name and rule position +func genStackName(stateName string, rulePosition rulePosition) (stackName string) { + switch rulePosition { + case topRule: + stackName = stateName + `-top-stack` + case bottomRule: + stackName = stateName + `-bottom-stack` + } + return +} + +// Makes a compiled rule and returns it +func makeRule(config ruleMakingConfig) *CompiledRule { + var rePattern string + + if len(config.delimiter) > 0 { + delimiter := string(config.delimiter) + + if config.numberOfDelimiterChars > 1 { + delimiter = strings.Repeat(delimiter, config.numberOfDelimiterChars) + } + + rePattern = `(? 1 { + lang = langMatch[1] + } + + // Tokenise code based on lang property + sublexer := Get(lang) + if sublexer != nil { + iterator, err := sublexer.Tokenise(nil, state.NamedGroups[`value`]) + + if err != nil { + panic(err) + } else { + iterators = append(iterators, iterator) + } + } else { + iterators = append(iterators, Literator(tokens[4])) + } + + // Append the rest of the tokens + iterators = append(iterators, Literator(tokens[5:]...)) + + return Concaterator(iterators...) +} diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/rst.go b/vendor/github.com/alecthomas/chroma/v2/lexers/rst.go new file mode 100644 index 0000000..66ec03c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/rst.go @@ -0,0 +1,89 @@ +package lexers + +import ( + "strings" + + . "github.com/alecthomas/chroma/v2" // nolint +) + +// Restructuredtext lexer. +var Restructuredtext = Register(MustNewLexer( + &Config{ + Name: "reStructuredText", + Aliases: []string{"rst", "rest", "restructuredtext"}, + Filenames: []string{"*.rst", "*.rest"}, + MimeTypes: []string{"text/x-rst", "text/prs.fallenstein.rst"}, + }, + restructuredtextRules, +)) + +func restructuredtextRules() Rules { + return Rules{ + "root": { + {"^(=+|-+|`+|:+|\\.+|\\'+|\"+|~+|\\^+|_+|\\*+|\\++|#+)([ \\t]*\\n)(.+)(\\n)(\\1)(\\n)", ByGroups(GenericHeading, Text, GenericHeading, Text, GenericHeading, Text), nil}, + {"^(\\S.*)(\\n)(={3,}|-{3,}|`{3,}|:{3,}|\\.{3,}|\\'{3,}|\"{3,}|~{3,}|\\^{3,}|_{3,}|\\*{3,}|\\+{3,}|#{3,})(\\n)", ByGroups(GenericHeading, Text, GenericHeading, Text), nil}, + {`^(\s*)([-*+])( .+\n(?:\1 .+\n)*)`, ByGroups(Text, LiteralNumber, UsingSelf("inline")), nil}, + {`^(\s*)([0-9#ivxlcmIVXLCM]+\.)( .+\n(?:\1 .+\n)*)`, ByGroups(Text, LiteralNumber, UsingSelf("inline")), nil}, + {`^(\s*)(\(?[0-9#ivxlcmIVXLCM]+\))( .+\n(?:\1 .+\n)*)`, ByGroups(Text, LiteralNumber, UsingSelf("inline")), nil}, + {`^(\s*)([A-Z]+\.)( .+\n(?:\1 .+\n)+)`, ByGroups(Text, LiteralNumber, UsingSelf("inline")), nil}, + {`^(\s*)(\(?[A-Za-z]+\))( .+\n(?:\1 .+\n)+)`, ByGroups(Text, LiteralNumber, UsingSelf("inline")), nil}, + {`^(\s*)(\|)( .+\n(?:\| .+\n)*)`, ByGroups(Text, Operator, UsingSelf("inline")), nil}, + {`^( *\.\.)(\s*)((?:source)?code(?:-block)?)(::)([ \t]*)([^\n]+)(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*|)\n)+)`, EmitterFunc(rstCodeBlock), nil}, + {`^( *\.\.)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))`, ByGroups(Punctuation, Text, OperatorWord, Punctuation, Text, UsingSelf("inline")), nil}, + {`^( *\.\.)(\s*)(_(?:[^:\\]|\\.)+:)(.*?)$`, ByGroups(Punctuation, Text, NameTag, UsingSelf("inline")), nil}, + {`^( *\.\.)(\s*)(\[.+\])(.*?)$`, ByGroups(Punctuation, Text, NameTag, UsingSelf("inline")), nil}, + {`^( *\.\.)(\s*)(\|.+\|)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))`, ByGroups(Punctuation, Text, NameTag, Text, OperatorWord, Punctuation, Text, UsingSelf("inline")), nil}, + {`^ *\.\..*(\n( +.*\n|\n)+)?`, CommentPreproc, nil}, + {`^( *)(:[a-zA-Z-]+:)(\s*)$`, ByGroups(Text, NameClass, Text), nil}, + {`^( *)(:.*?:)([ \t]+)(.*?)$`, ByGroups(Text, NameClass, Text, NameFunction), nil}, + {`^(\S.*(?)(`__?)", ByGroups(LiteralString, LiteralStringInterpol, LiteralString), nil}, + {"`.+?`__?", LiteralString, nil}, + {"(`.+?`)(:[a-zA-Z0-9:-]+?:)?", ByGroups(NameVariable, NameAttribute), nil}, + {"(:[a-zA-Z0-9:-]+?:)(`.+?`)", ByGroups(NameAttribute, NameVariable), nil}, + {`\*\*.+?\*\*`, GenericStrong, nil}, + {`\*.+?\*`, GenericEmph, nil}, + {`\[.*?\]_`, LiteralString, nil}, + {`<.+?>`, NameTag, nil}, + {"[^\\\\\\n\\[*`:]+", Text, nil}, + {`.`, Text, nil}, + }, + "literal": { + {"[^`]+", LiteralString, nil}, + {"``((?=$)|(?=[-/:.,; \\n\\x00\\\u2010\\\u2011\\\u2012\\\u2013\\\u2014\\\u00a0\\'\\\"\\)\\]\\}\\>\\\u2019\\\u201d\\\u00bb\\!\\?]))", LiteralString, Pop(1)}, + {"`", LiteralString, nil}, + }, + } +} + +func rstCodeBlock(groups []string, state *LexerState) Iterator { + iterators := []Iterator{} + tokens := []Token{ + {Punctuation, groups[1]}, + {Text, groups[2]}, + {OperatorWord, groups[3]}, + {Punctuation, groups[4]}, + {Text, groups[5]}, + {Keyword, groups[6]}, + {Text, groups[7]}, + } + code := strings.Join(groups[8:], "") + lexer := Get(groups[6]) + if lexer == nil { + tokens = append(tokens, Token{String, code}) + iterators = append(iterators, Literator(tokens...)) + } else { + sub, err := lexer.Tokenise(nil, code) + if err != nil { + panic(err) + } + iterators = append(iterators, Literator(tokens...), sub) + } + return Concaterator(iterators...) +} diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/svelte.go b/vendor/github.com/alecthomas/chroma/v2/lexers/svelte.go new file mode 100644 index 0000000..39211c4 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/lexers/svelte.go @@ -0,0 +1,70 @@ +package lexers + +import ( + . "github.com/alecthomas/chroma/v2" // nolint +) + +// Svelte lexer. +var Svelte = Register(DelegatingLexer(HTML, MustNewLexer( + &Config{ + Name: "Svelte", + Aliases: []string{"svelte"}, + Filenames: []string{"*.svelte"}, + MimeTypes: []string{"application/x-svelte"}, + DotAll: true, + }, + svelteRules, +))) + +func svelteRules() Rules { + return Rules{ + "root": { + // Let HTML handle the comments, including comments containing script and style tags + {``, Other, Pop(1)}, + {`.+?`, Other, nil}, + }, + "templates": { + {`}`, Punctuation, Pop(1)}, + // Let TypeScript handle strings and the curly braces inside them + {`(?]*>`, Using("TypoScriptHTMLData"), nil}, + {`&[^;\n]*;`, LiteralString, nil}, + {`(_CSS_DEFAULT_STYLE)(\s*)(\()(?s)(.*(?=\n\)))`, ByGroups(NameClass, Text, LiteralStringSymbol, Using("TypoScriptCSSData")), nil}, + }, + "literal": { + {`0x[0-9A-Fa-f]+t?`, LiteralNumberHex, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`(###\w+###)`, NameConstant, nil}, + }, + "label": { + {`(EXT|FILE|LLL):[^}\n"]*`, LiteralString, nil}, + {`(?![^\w\-])([\w\-]+(?:/[\w\-]+)+/?)(\S*\n)`, ByGroups(LiteralString, LiteralString), nil}, + }, + "punctuation": { + {`[,.]`, Punctuation, nil}, + }, + "operator": { + {`[<>,:=.*%+|]`, Operator, nil}, + }, + "structure": { + {`[{}()\[\]\\]`, LiteralStringSymbol, nil}, + }, + "constant": { + {`(\{)(\$)((?:[\w\-]+\.)*)([\w\-]+)(\})`, ByGroups(LiteralStringSymbol, Operator, NameConstant, NameConstant, LiteralStringSymbol), nil}, + {`(\{)([\w\-]+)(\s*:\s*)([\w\-]+)(\})`, ByGroups(LiteralStringSymbol, NameConstant, Operator, NameConstant, LiteralStringSymbol), nil}, + {`(#[a-fA-F0-9]{6}\b|#[a-fA-F0-9]{3}\b)`, LiteralStringChar, nil}, + }, + "comment": { + {`(? lexers.txt + +kotlin: + invalid unicode escape sequences + FIXED: Have to disable wide Unicode characters in unistring.py + +pygments.lexers.ambient.AmbientTalkLexer +pygments.lexers.ampl.AmplLexer +pygments.lexers.actionscript.ActionScriptLexer +pygments.lexers.actionscript.ActionScript3Lexer +pygments.lexers.actionscript.MxmlLexer +pygments.lexers.algebra.GAPLexer +pygments.lexers.algebra.MathematicaLexer +pygments.lexers.algebra.MuPADLexer +pygments.lexers.algebra.BCLexer +pygments.lexers.apl.APLLexer +pygments.lexers.bibtex.BibTeXLexer +pygments.lexers.bibtex.BSTLexer +pygments.lexers.basic.BlitzMaxLexer +pygments.lexers.basic.BlitzBasicLexer +pygments.lexers.basic.MonkeyLexer +pygments.lexers.basic.CbmBasicV2Lexer +pygments.lexers.basic.QBasicLexer +pygments.lexers.automation.AutohotkeyLexer +pygments.lexers.automation.AutoItLexer +pygments.lexers.archetype.AtomsLexer +pygments.lexers.c_like.ClayLexer +pygments.lexers.c_like.ValaLexer +pygments.lexers.asm.GasLexer +pygments.lexers.asm.ObjdumpLexer +pygments.lexers.asm.HsailLexer +pygments.lexers.asm.LlvmLexer +pygments.lexers.asm.NasmLexer +pygments.lexers.asm.TasmLexer +pygments.lexers.asm.Ca65Lexer +pygments.lexers.business.CobolLexer +pygments.lexers.business.ABAPLexer +pygments.lexers.business.OpenEdgeLexer +pygments.lexers.business.GoodDataCLLexer +pygments.lexers.business.MaqlLexer +pygments.lexers.capnproto.CapnProtoLexer +pygments.lexers.chapel.ChapelLexer +pygments.lexers.clean.CleanLexer +pygments.lexers.c_cpp.CFamilyLexer +pygments.lexers.console.VCTreeStatusLexer +pygments.lexers.console.PyPyLogLexer +pygments.lexers.csound.CsoundLexer +pygments.lexers.csound.CsoundDocumentLexer +pygments.lexers.csound.CsoundDocumentLexer +pygments.lexers.crystal.CrystalLexer +pygments.lexers.dalvik.SmaliLexer +pygments.lexers.css.CssLexer +pygments.lexers.css.SassLexer +pygments.lexers.css.ScssLexer +pygments.lexers.configs.IniLexer +pygments.lexers.configs.RegeditLexer +pygments.lexers.configs.PropertiesLexer +pygments.lexers.configs.KconfigLexer +pygments.lexers.configs.Cfengine3Lexer +pygments.lexers.configs.ApacheConfLexer +pygments.lexers.configs.SquidConfLexer +pygments.lexers.configs.NginxConfLexer +pygments.lexers.configs.LighttpdConfLexer +pygments.lexers.configs.DockerLexer +pygments.lexers.configs.TerraformLexer +pygments.lexers.configs.TermcapLexer +pygments.lexers.configs.TerminfoLexer +pygments.lexers.configs.PkgConfigLexer +pygments.lexers.configs.PacmanConfLexer +pygments.lexers.data.YamlLexer +pygments.lexers.data.JsonLexer +pygments.lexers.diff.DiffLexer +pygments.lexers.diff.DarcsPatchLexer +pygments.lexers.diff.WDiffLexer +pygments.lexers.dotnet.CSharpLexer +pygments.lexers.dotnet.NemerleLexer +pygments.lexers.dotnet.BooLexer +pygments.lexers.dotnet.VbNetLexer +pygments.lexers.dotnet.GenericAspxLexer +pygments.lexers.dotnet.FSharpLexer +pygments.lexers.dylan.DylanLexer +pygments.lexers.dylan.DylanLidLexer +pygments.lexers.ecl.ECLLexer +pygments.lexers.eiffel.EiffelLexer +pygments.lexers.dsls.ProtoBufLexer +pygments.lexers.dsls.ThriftLexer +pygments.lexers.dsls.BroLexer +pygments.lexers.dsls.PuppetLexer +pygments.lexers.dsls.RslLexer +pygments.lexers.dsls.MscgenLexer +pygments.lexers.dsls.VGLLexer +pygments.lexers.dsls.AlloyLexer +pygments.lexers.dsls.PanLexer +pygments.lexers.dsls.CrmshLexer +pygments.lexers.dsls.FlatlineLexer +pygments.lexers.dsls.SnowballLexer +pygments.lexers.elm.ElmLexer +pygments.lexers.erlang.ErlangLexer +pygments.lexers.erlang.ElixirLexer +pygments.lexers.ezhil.EzhilLexer +pygments.lexers.esoteric.BrainfuckLexer +pygments.lexers.esoteric.BefungeLexer +pygments.lexers.esoteric.CAmkESLexer +pygments.lexers.esoteric.CapDLLexer +pygments.lexers.esoteric.RedcodeLexer +pygments.lexers.esoteric.AheuiLexer +pygments.lexers.factor.FactorLexer +pygments.lexers.fantom.FantomLexer +pygments.lexers.felix.FelixLexer +pygments.lexers.forth.ForthLexer +pygments.lexers.fortran.FortranLexer +pygments.lexers.fortran.FortranFixedLexer +pygments.lexers.go.GoLexer +pygments.lexers.foxpro.FoxProLexer +pygments.lexers.graph.CypherLexer +pygments.lexers.grammar_notation.BnfLexer +pygments.lexers.grammar_notation.AbnfLexer +pygments.lexers.grammar_notation.JsgfLexer +pygments.lexers.graphics.GLShaderLexer +pygments.lexers.graphics.PostScriptLexer +pygments.lexers.graphics.AsymptoteLexer +pygments.lexers.graphics.GnuplotLexer +pygments.lexers.graphics.PovrayLexer +pygments.lexers.hexdump.HexdumpLexer +pygments.lexers.haskell.HaskellLexer +pygments.lexers.haskell.IdrisLexer +pygments.lexers.haskell.AgdaLexer +pygments.lexers.haskell.CryptolLexer +pygments.lexers.haskell.KokaLexer +pygments.lexers.haxe.HaxeLexer +pygments.lexers.haxe.HxmlLexer +pygments.lexers.hdl.VerilogLexer +pygments.lexers.hdl.SystemVerilogLexer +pygments.lexers.hdl.VhdlLexer +pygments.lexers.idl.IDLLexer +pygments.lexers.inferno.LimboLexer +pygments.lexers.igor.IgorLexer +pygments.lexers.html.HtmlLexer +pygments.lexers.html.DtdLexer +pygments.lexers.html.XmlLexer +pygments.lexers.html.HamlLexer +pygments.lexers.html.ScamlLexer +pygments.lexers.html.PugLexer +pygments.lexers.installers.NSISLexer +pygments.lexers.installers.RPMSpecLexer +pygments.lexers.installers.SourcesListLexer +pygments.lexers.installers.DebianControlLexer +pygments.lexers.iolang.IoLexer +pygments.lexers.julia.JuliaLexer +pygments.lexers.int_fiction.Inform6Lexer +pygments.lexers.int_fiction.Inform7Lexer +pygments.lexers.int_fiction.Tads3Lexer +pygments.lexers.make.BaseMakefileLexer +pygments.lexers.make.CMakeLexer +pygments.lexers.javascript.JavascriptLexer +pygments.lexers.javascript.KalLexer +pygments.lexers.javascript.LiveScriptLexer +pygments.lexers.javascript.DartLexer +pygments.lexers.javascript.TypeScriptLexer +pygments.lexers.javascript.LassoLexer +pygments.lexers.javascript.ObjectiveJLexer +pygments.lexers.javascript.CoffeeScriptLexer +pygments.lexers.javascript.MaskLexer +pygments.lexers.javascript.EarlGreyLexer +pygments.lexers.javascript.JuttleLexer +pygments.lexers.jvm.JavaLexer +pygments.lexers.jvm.ScalaLexer +pygments.lexers.jvm.GosuLexer +pygments.lexers.jvm.GroovyLexer +pygments.lexers.jvm.IokeLexer +pygments.lexers.jvm.ClojureLexer +pygments.lexers.jvm.TeaLangLexer +pygments.lexers.jvm.CeylonLexer +pygments.lexers.jvm.KotlinLexer +pygments.lexers.jvm.XtendLexer +pygments.lexers.jvm.PigLexer +pygments.lexers.jvm.GoloLexer +pygments.lexers.jvm.JasminLexer +pygments.lexers.markup.BBCodeLexer +pygments.lexers.markup.MoinWikiLexer +pygments.lexers.markup.RstLexer +pygments.lexers.markup.TexLexer +pygments.lexers.markup.GroffLexer +pygments.lexers.markup.MozPreprocHashLexer +pygments.lexers.markup.MarkdownLexer +pygments.lexers.ml.SMLLexer +pygments.lexers.ml.OcamlLexer +pygments.lexers.ml.OpaLexer +pygments.lexers.modeling.ModelicaLexer +pygments.lexers.modeling.BugsLexer +pygments.lexers.modeling.JagsLexer +pygments.lexers.modeling.StanLexer +pygments.lexers.matlab.MatlabLexer +pygments.lexers.matlab.OctaveLexer +pygments.lexers.matlab.ScilabLexer +pygments.lexers.monte.MonteLexer +pygments.lexers.lisp.SchemeLexer +pygments.lexers.lisp.CommonLispLexer +pygments.lexers.lisp.HyLexer +pygments.lexers.lisp.RacketLexer +pygments.lexers.lisp.NewLispLexer +pygments.lexers.lisp.EmacsLispLexer +pygments.lexers.lisp.ShenLexer +pygments.lexers.lisp.XtlangLexer +pygments.lexers.modula2.Modula2Lexer +pygments.lexers.ncl.NCLLexer +pygments.lexers.nim.NimLexer +pygments.lexers.nit.NitLexer +pygments.lexers.nix.NixLexer +pygments.lexers.oberon.ComponentPascalLexer +pygments.lexers.ooc.OocLexer +pygments.lexers.objective.SwiftLexer +pygments.lexers.parasail.ParaSailLexer +pygments.lexers.pawn.SourcePawnLexer +pygments.lexers.pawn.PawnLexer +pygments.lexers.pascal.AdaLexer +pygments.lexers.parsers.RagelLexer +pygments.lexers.parsers.RagelEmbeddedLexer +pygments.lexers.parsers.AntlrLexer +pygments.lexers.parsers.TreetopBaseLexer +pygments.lexers.parsers.EbnfLexer +pygments.lexers.php.ZephirLexer +pygments.lexers.php.PhpLexer +pygments.lexers.perl.PerlLexer +pygments.lexers.perl.Perl6Lexer +pygments.lexers.praat.PraatLexer +pygments.lexers.prolog.PrologLexer +pygments.lexers.prolog.LogtalkLexer +pygments.lexers.qvt.QVToLexer +pygments.lexers.rdf.SparqlLexer +pygments.lexers.rdf.TurtleLexer +pygments.lexers.python.PythonLexer +pygments.lexers.python.Python3Lexer +pygments.lexers.python.PythonTracebackLexer +pygments.lexers.python.Python3TracebackLexer +pygments.lexers.python.CythonLexer +pygments.lexers.python.DgLexer +pygments.lexers.rebol.RebolLexer +pygments.lexers.rebol.RedLexer +pygments.lexers.resource.ResourceLexer +pygments.lexers.rnc.RNCCompactLexer +pygments.lexers.roboconf.RoboconfGraphLexer +pygments.lexers.roboconf.RoboconfInstancesLexer +pygments.lexers.rust.RustLexer +pygments.lexers.ruby.RubyLexer +pygments.lexers.ruby.FancyLexer +pygments.lexers.sas.SASLexer +pygments.lexers.smalltalk.SmalltalkLexer +pygments.lexers.smalltalk.NewspeakLexer +pygments.lexers.smv.NuSMVLexer +pygments.lexers.shell.BashLexer +pygments.lexers.shell.BatchLexer +pygments.lexers.shell.TcshLexer +pygments.lexers.shell.PowerShellLexer +pygments.lexers.shell.FishShellLexer +pygments.lexers.snobol.SnobolLexer +pygments.lexers.scripting.LuaLexer +pygments.lexers.scripting.ChaiscriptLexer +pygments.lexers.scripting.LSLLexer +pygments.lexers.scripting.AppleScriptLexer +pygments.lexers.scripting.RexxLexer +pygments.lexers.scripting.MOOCodeLexer +pygments.lexers.scripting.HybrisLexer +pygments.lexers.scripting.EasytrieveLexer +pygments.lexers.scripting.JclLexer +pygments.lexers.supercollider.SuperColliderLexer +pygments.lexers.stata.StataLexer +pygments.lexers.tcl.TclLexer +pygments.lexers.sql.PostgresLexer +pygments.lexers.sql.PlPgsqlLexer +pygments.lexers.sql.PsqlRegexLexer +pygments.lexers.sql.SqlLexer +pygments.lexers.sql.TransactSqlLexer +pygments.lexers.sql.MySqlLexer +pygments.lexers.sql.RqlLexer +pygments.lexers.testing.GherkinLexer +pygments.lexers.testing.TAPLexer +pygments.lexers.textedit.AwkLexer +pygments.lexers.textedit.VimLexer +pygments.lexers.textfmts.IrcLogsLexer +pygments.lexers.textfmts.GettextLexer +pygments.lexers.textfmts.HttpLexer +pygments.lexers.textfmts.TodotxtLexer +pygments.lexers.trafficscript.RtsLexer +pygments.lexers.theorem.CoqLexer +pygments.lexers.theorem.IsabelleLexer +pygments.lexers.theorem.LeanLexer +pygments.lexers.templates.SmartyLexer +pygments.lexers.templates.VelocityLexer +pygments.lexers.templates.DjangoLexer +pygments.lexers.templates.MyghtyLexer +pygments.lexers.templates.MasonLexer +pygments.lexers.templates.MakoLexer +pygments.lexers.templates.CheetahLexer +pygments.lexers.templates.GenshiTextLexer +pygments.lexers.templates.GenshiMarkupLexer +pygments.lexers.templates.JspRootLexer +pygments.lexers.templates.EvoqueLexer +pygments.lexers.templates.ColdfusionLexer +pygments.lexers.templates.ColdfusionMarkupLexer +pygments.lexers.templates.TeaTemplateRootLexer +pygments.lexers.templates.HandlebarsLexer +pygments.lexers.templates.LiquidLexer +pygments.lexers.templates.TwigLexer +pygments.lexers.templates.Angular2Lexer +pygments.lexers.urbi.UrbiscriptLexer +pygments.lexers.typoscript.TypoScriptCssDataLexer +pygments.lexers.typoscript.TypoScriptHtmlDataLexer +pygments.lexers.typoscript.TypoScriptLexer +pygments.lexers.varnish.VCLLexer +pygments.lexers.verification.BoogieLexer +pygments.lexers.verification.SilverLexer +pygments.lexers.x10.X10Lexer +pygments.lexers.whiley.WhileyLexer +pygments.lexers.xorg.XorgLexer +pygments.lexers.webmisc.DuelLexer +pygments.lexers.webmisc.XQueryLexer +pygments.lexers.webmisc.QmlLexer +pygments.lexers.webmisc.CirruLexer +pygments.lexers.webmisc.SlimLexer diff --git a/vendor/github.com/alecthomas/chroma/v2/regexp.go b/vendor/github.com/alecthomas/chroma/v2/regexp.go new file mode 100644 index 0000000..0dcb077 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/regexp.go @@ -0,0 +1,483 @@ +package chroma + +import ( + "fmt" + "os" + "path/filepath" + "regexp" + "sort" + "strings" + "sync" + "time" + "unicode/utf8" + + "github.com/dlclark/regexp2" +) + +// A Rule is the fundamental matching unit of the Regex lexer state machine. +type Rule struct { + Pattern string + Type Emitter + Mutator Mutator +} + +// Words creates a regex that matches any of the given literal words. +func Words(prefix, suffix string, words ...string) string { + sort.Slice(words, func(i, j int) bool { + return len(words[j]) < len(words[i]) + }) + for i, word := range words { + words[i] = regexp.QuoteMeta(word) + } + return prefix + `(` + strings.Join(words, `|`) + `)` + suffix +} + +// Tokenise text using lexer, returning tokens as a slice. +func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]Token, error) { + var out []Token + it, err := lexer.Tokenise(options, text) + if err != nil { + return nil, err + } + for t := it(); t != EOF; t = it() { + out = append(out, t) + } + return out, nil +} + +// Rules maps from state to a sequence of Rules. +type Rules map[string][]Rule + +// Rename clones rules then a rule. +func (r Rules) Rename(oldRule, newRule string) Rules { + r = r.Clone() + r[newRule] = r[oldRule] + delete(r, oldRule) + return r +} + +// Clone returns a clone of the Rules. +func (r Rules) Clone() Rules { + out := map[string][]Rule{} + for key, rules := range r { + out[key] = make([]Rule, len(rules)) + copy(out[key], rules) + } + return out +} + +// Merge creates a clone of "r" then merges "rules" into the clone. +func (r Rules) Merge(rules Rules) Rules { + out := r.Clone() + for k, v := range rules.Clone() { + out[k] = v + } + return out +} + +// MustNewLexer creates a new Lexer with deferred rules generation or panics. +func MustNewLexer(config *Config, rules func() Rules) *RegexLexer { + lexer, err := NewLexer(config, rules) + if err != nil { + panic(err) + } + return lexer +} + +// NewLexer creates a new regex-based Lexer. +// +// "rules" is a state machine transition map. Each key is a state. Values are sets of rules +// that match input, optionally modify lexer state, and output tokens. +func NewLexer(config *Config, rulesFunc func() Rules) (*RegexLexer, error) { + if config == nil { + config = &Config{} + } + for _, glob := range append(config.Filenames, config.AliasFilenames...) { + _, err := filepath.Match(glob, "") + if err != nil { + return nil, fmt.Errorf("%s: %q is not a valid glob: %w", config.Name, glob, err) + } + } + r := &RegexLexer{ + config: config, + fetchRulesFunc: func() (Rules, error) { return rulesFunc(), nil }, + } + // One-off code to generate XML lexers in the Chroma source tree. + // var nameCleanRe = regexp.MustCompile(`[^-+A-Za-z0-9_]`) + // name := strings.ToLower(nameCleanRe.ReplaceAllString(config.Name, "_")) + // data, err := Marshal(r) + // if err != nil { + // if errors.Is(err, ErrNotSerialisable) { + // fmt.Fprintf(os.Stderr, "warning: %q: %s\n", name, err) + // return r, nil + // } + // return nil, err + // } + // _, file, _, ok := runtime.Caller(2) + // if !ok { + // panic("??") + // } + // fmt.Println(file) + // if strings.Contains(file, "/lexers/") { + // dir := filepath.Join(filepath.Dir(file), "embedded") + // err = os.MkdirAll(dir, 0700) + // if err != nil { + // return nil, err + // } + // filename := filepath.Join(dir, name) + ".xml" + // fmt.Println(filename) + // err = ioutil.WriteFile(filename, data, 0600) + // if err != nil { + // return nil, err + // } + // } + return r, nil +} + +// Trace enables debug tracing. +func (r *RegexLexer) Trace(trace bool) *RegexLexer { + r.trace = trace + return r +} + +// A CompiledRule is a Rule with a pre-compiled regex. +// +// Note that regular expressions are lazily compiled on first use of the lexer. +type CompiledRule struct { + Rule + Regexp *regexp2.Regexp + flags string +} + +// CompiledRules is a map of rule name to sequence of compiled rules in that rule. +type CompiledRules map[string][]*CompiledRule + +// LexerState contains the state for a single lex. +type LexerState struct { + Lexer *RegexLexer + Registry *LexerRegistry + Text []rune + Pos int + Rules CompiledRules + Stack []string + State string + Rule int + // Group matches. + Groups []string + // Named Group matches. + NamedGroups map[string]string + // Custum context for mutators. + MutatorContext map[interface{}]interface{} + iteratorStack []Iterator + options *TokeniseOptions + newlineAdded bool +} + +// Set mutator context. +func (l *LexerState) Set(key interface{}, value interface{}) { + l.MutatorContext[key] = value +} + +// Get mutator context. +func (l *LexerState) Get(key interface{}) interface{} { + return l.MutatorContext[key] +} + +// Iterator returns the next Token from the lexer. +func (l *LexerState) Iterator() Token { // nolint: gocognit + end := len(l.Text) + if l.newlineAdded { + end-- + } + for l.Pos < end && len(l.Stack) > 0 { + // Exhaust the iterator stack, if any. + for len(l.iteratorStack) > 0 { + n := len(l.iteratorStack) - 1 + t := l.iteratorStack[n]() + if t == EOF { + l.iteratorStack = l.iteratorStack[:n] + continue + } + return t + } + + l.State = l.Stack[len(l.Stack)-1] + if l.Lexer.trace { + fmt.Fprintf(os.Stderr, "%s: pos=%d, text=%q\n", l.State, l.Pos, string(l.Text[l.Pos:])) + } + selectedRule, ok := l.Rules[l.State] + if !ok { + panic("unknown state " + l.State) + } + ruleIndex, rule, groups, namedGroups := matchRules(l.Text, l.Pos, selectedRule) + // No match. + if groups == nil { + // From Pygments :\ + // + // If the RegexLexer encounters a newline that is flagged as an error token, the stack is + // emptied and the lexer continues scanning in the 'root' state. This can help producing + // error-tolerant highlighting for erroneous input, e.g. when a single-line string is not + // closed. + if l.Text[l.Pos] == '\n' && l.State != l.options.State { + l.Stack = []string{l.options.State} + continue + } + l.Pos++ + return Token{Error, string(l.Text[l.Pos-1 : l.Pos])} + } + l.Rule = ruleIndex + l.Groups = groups + l.NamedGroups = namedGroups + l.Pos += utf8.RuneCountInString(groups[0]) + if rule.Mutator != nil { + if err := rule.Mutator.Mutate(l); err != nil { + panic(err) + } + } + if rule.Type != nil { + l.iteratorStack = append(l.iteratorStack, rule.Type.Emit(l.Groups, l)) + } + } + // Exhaust the IteratorStack, if any. + // Duplicate code, but eh. + for len(l.iteratorStack) > 0 { + n := len(l.iteratorStack) - 1 + t := l.iteratorStack[n]() + if t == EOF { + l.iteratorStack = l.iteratorStack[:n] + continue + } + return t + } + + // If we get to here and we still have text, return it as an error. + if l.Pos != len(l.Text) && len(l.Stack) == 0 { + value := string(l.Text[l.Pos:]) + l.Pos = len(l.Text) + return Token{Type: Error, Value: value} + } + return EOF +} + +// RegexLexer is the default lexer implementation used in Chroma. +type RegexLexer struct { + registry *LexerRegistry // The LexerRegistry this Lexer is associated with, if any. + config *Config + analyser func(text string) float32 + trace bool + + mu sync.Mutex + compiled bool + rawRules Rules + rules map[string][]*CompiledRule + fetchRulesFunc func() (Rules, error) + compileOnce sync.Once +} + +func (r *RegexLexer) String() string { + return r.config.Name +} + +// Rules in the Lexer. +func (r *RegexLexer) Rules() (Rules, error) { + if err := r.needRules(); err != nil { + return nil, err + } + return r.rawRules, nil +} + +// SetRegistry the lexer will use to lookup other lexers if necessary. +func (r *RegexLexer) SetRegistry(registry *LexerRegistry) Lexer { + r.registry = registry + return r +} + +// SetAnalyser sets the analyser function used to perform content inspection. +func (r *RegexLexer) SetAnalyser(analyser func(text string) float32) Lexer { + r.analyser = analyser + return r +} + +// AnalyseText scores how likely a fragment of text is to match this lexer, between 0.0 and 1.0. +func (r *RegexLexer) AnalyseText(text string) float32 { + if r.analyser != nil { + return r.analyser(text) + } + return 0 +} + +// SetConfig replaces the Config for this Lexer. +func (r *RegexLexer) SetConfig(config *Config) *RegexLexer { + r.config = config + return r +} + +// Config returns the Config for this Lexer. +func (r *RegexLexer) Config() *Config { + return r.config +} + +// Regex compilation is deferred until the lexer is used. This is to avoid significant init() time costs. +func (r *RegexLexer) maybeCompile() (err error) { + r.mu.Lock() + defer r.mu.Unlock() + if r.compiled { + return nil + } + for state, rules := range r.rules { + for i, rule := range rules { + if rule.Regexp == nil { + pattern := "(?:" + rule.Pattern + ")" + if rule.flags != "" { + pattern = "(?" + rule.flags + ")" + pattern + } + pattern = `\G` + pattern + rule.Regexp, err = regexp2.Compile(pattern, 0) + if err != nil { + return fmt.Errorf("failed to compile rule %s.%d: %s", state, i, err) + } + rule.Regexp.MatchTimeout = time.Millisecond * 250 + } + } + } +restart: + seen := map[LexerMutator]bool{} + for state := range r.rules { + for i := 0; i < len(r.rules[state]); i++ { + rule := r.rules[state][i] + if compile, ok := rule.Mutator.(LexerMutator); ok { + if seen[compile] { + return fmt.Errorf("saw mutator %T twice; this should not happen", compile) + } + seen[compile] = true + if err := compile.MutateLexer(r.rules, state, i); err != nil { + return err + } + // Process the rules again in case the mutator added/removed rules. + // + // This sounds bad, but shouldn't be significant in practice. + goto restart + } + } + } + r.compiled = true + return nil +} + +func (r *RegexLexer) fetchRules() error { + rules, err := r.fetchRulesFunc() + if err != nil { + return fmt.Errorf("%s: failed to compile rules: %w", r.config.Name, err) + } + if _, ok := rules["root"]; !ok { + return fmt.Errorf("no \"root\" state") + } + compiledRules := map[string][]*CompiledRule{} + for state, rules := range rules { + compiledRules[state] = nil + for _, rule := range rules { + flags := "" + if !r.config.NotMultiline { + flags += "m" + } + if r.config.CaseInsensitive { + flags += "i" + } + if r.config.DotAll { + flags += "s" + } + compiledRules[state] = append(compiledRules[state], &CompiledRule{Rule: rule, flags: flags}) + } + } + + r.rawRules = rules + r.rules = compiledRules + return nil +} + +func (r *RegexLexer) needRules() error { + var err error + if r.fetchRulesFunc != nil { + r.compileOnce.Do(func() { + err = r.fetchRules() + }) + } + if err := r.maybeCompile(); err != nil { + return err + } + return err +} + +// Tokenise text using lexer, returning an iterator. +func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { + err := r.needRules() + if err != nil { + return nil, err + } + if options == nil { + options = defaultOptions + } + if options.EnsureLF { + text = ensureLF(text) + } + newlineAdded := false + if !options.Nested && r.config.EnsureNL && !strings.HasSuffix(text, "\n") { + text += "\n" + newlineAdded = true + } + state := &LexerState{ + Registry: r.registry, + newlineAdded: newlineAdded, + options: options, + Lexer: r, + Text: []rune(text), + Stack: []string{options.State}, + Rules: r.rules, + MutatorContext: map[interface{}]interface{}{}, + } + return state.Iterator, nil +} + +// MustRules is like Rules() but will panic on error. +func (r *RegexLexer) MustRules() Rules { + rules, err := r.Rules() + if err != nil { + panic(err) + } + return rules +} + +func matchRules(text []rune, pos int, rules []*CompiledRule) (int, *CompiledRule, []string, map[string]string) { + for i, rule := range rules { + match, err := rule.Regexp.FindRunesMatchStartingAt(text, pos) + if match != nil && err == nil && match.Index == pos { + groups := []string{} + namedGroups := make(map[string]string) + for _, g := range match.Groups() { + namedGroups[g.Name] = g.String() + groups = append(groups, g.String()) + } + return i, rule, groups, namedGroups + } + } + return 0, &CompiledRule{}, nil, nil +} + +// replace \r and \r\n with \n +// same as strings.ReplaceAll but more efficient +func ensureLF(text string) string { + buf := make([]byte, len(text)) + var j int + for i := 0; i < len(text); i++ { + c := text[i] + if c == '\r' { + if i < len(text)-1 && text[i+1] == '\n' { + continue + } + c = '\n' + } + buf[j] = c + j++ + } + return string(buf[:j]) +} diff --git a/vendor/github.com/alecthomas/chroma/v2/registry.go b/vendor/github.com/alecthomas/chroma/v2/registry.go new file mode 100644 index 0000000..4742e8c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/registry.go @@ -0,0 +1,210 @@ +package chroma + +import ( + "path/filepath" + "sort" + "strings" +) + +var ( + ignoredSuffixes = [...]string{ + // Editor backups + "~", ".bak", ".old", ".orig", + // Debian and derivatives apt/dpkg/ucf backups + ".dpkg-dist", ".dpkg-old", ".ucf-dist", ".ucf-new", ".ucf-old", + // Red Hat and derivatives rpm backups + ".rpmnew", ".rpmorig", ".rpmsave", + // Build system input/template files + ".in", + } +) + +// LexerRegistry is a registry of Lexers. +type LexerRegistry struct { + Lexers Lexers + byName map[string]Lexer + byAlias map[string]Lexer +} + +// NewLexerRegistry creates a new LexerRegistry of Lexers. +func NewLexerRegistry() *LexerRegistry { + return &LexerRegistry{ + byName: map[string]Lexer{}, + byAlias: map[string]Lexer{}, + } +} + +// Names of all lexers, optionally including aliases. +func (l *LexerRegistry) Names(withAliases bool) []string { + out := []string{} + for _, lexer := range l.Lexers { + config := lexer.Config() + out = append(out, config.Name) + if withAliases { + out = append(out, config.Aliases...) + } + } + sort.Strings(out) + return out +} + +// Get a Lexer by name, alias or file extension. +func (l *LexerRegistry) Get(name string) Lexer { + if lexer := l.byName[name]; lexer != nil { + return lexer + } + if lexer := l.byAlias[name]; lexer != nil { + return lexer + } + if lexer := l.byName[strings.ToLower(name)]; lexer != nil { + return lexer + } + if lexer := l.byAlias[strings.ToLower(name)]; lexer != nil { + return lexer + } + + candidates := PrioritisedLexers{} + // Try file extension. + if lexer := l.Match("filename." + name); lexer != nil { + candidates = append(candidates, lexer) + } + // Try exact filename. + if lexer := l.Match(name); lexer != nil { + candidates = append(candidates, lexer) + } + if len(candidates) == 0 { + return nil + } + sort.Sort(candidates) + return candidates[0] +} + +// MatchMimeType attempts to find a lexer for the given MIME type. +func (l *LexerRegistry) MatchMimeType(mimeType string) Lexer { + matched := PrioritisedLexers{} + for _, l := range l.Lexers { + for _, lmt := range l.Config().MimeTypes { + if mimeType == lmt { + matched = append(matched, l) + } + } + } + if len(matched) != 0 { + sort.Sort(matched) + return matched[0] + } + return nil +} + +// Match returns the first lexer matching filename. +// +// Note that this iterates over all file patterns in all lexers, so is not fast. +func (l *LexerRegistry) Match(filename string) Lexer { + filename = filepath.Base(filename) + matched := PrioritisedLexers{} + // First, try primary filename matches. + for _, lexer := range l.Lexers { + config := lexer.Config() + for _, glob := range config.Filenames { + ok, err := filepath.Match(glob, filename) + if err != nil { // nolint + panic(err) + } else if ok { + matched = append(matched, lexer) + } else { + for _, suf := range &ignoredSuffixes { + ok, err := filepath.Match(glob+suf, filename) + if err != nil { + panic(err) + } else if ok { + matched = append(matched, lexer) + break + } + } + } + } + } + if len(matched) > 0 { + sort.Sort(matched) + return matched[0] + } + matched = nil + // Next, try filename aliases. + for _, lexer := range l.Lexers { + config := lexer.Config() + for _, glob := range config.AliasFilenames { + ok, err := filepath.Match(glob, filename) + if err != nil { // nolint + panic(err) + } else if ok { + matched = append(matched, lexer) + } else { + for _, suf := range &ignoredSuffixes { + ok, err := filepath.Match(glob+suf, filename) + if err != nil { + panic(err) + } else if ok { + matched = append(matched, lexer) + break + } + } + } + } + } + if len(matched) > 0 { + sort.Sort(matched) + return matched[0] + } + return nil +} + +// Analyse text content and return the "best" lexer.. +func (l *LexerRegistry) Analyse(text string) Lexer { + var picked Lexer + highest := float32(0.0) + for _, lexer := range l.Lexers { + if analyser, ok := lexer.(Analyser); ok { + weight := analyser.AnalyseText(text) + if weight > highest { + picked = lexer + highest = weight + } + } + } + return picked +} + +// Register a Lexer with the LexerRegistry. If the lexer is already registered +// it will be replaced. +func (l *LexerRegistry) Register(lexer Lexer) Lexer { + lexer.SetRegistry(l) + config := lexer.Config() + + l.byName[config.Name] = lexer + l.byName[strings.ToLower(config.Name)] = lexer + + for _, alias := range config.Aliases { + l.byAlias[alias] = lexer + l.byAlias[strings.ToLower(alias)] = lexer + } + + l.Lexers = add(l.Lexers, lexer) + + return lexer +} + +// add adds a lexer to a slice of lexers if it doesn't already exist, or if found will replace it. +func add(lexers Lexers, lexer Lexer) Lexers { + for i, val := range lexers { + if val == nil { + continue + } + + if val.Config().Name == lexer.Config().Name { + lexers[i] = lexer + return lexers + } + } + + return append(lexers, lexer) +} diff --git a/vendor/github.com/alecthomas/chroma/v2/remap.go b/vendor/github.com/alecthomas/chroma/v2/remap.go new file mode 100644 index 0000000..bcf5e66 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/remap.go @@ -0,0 +1,94 @@ +package chroma + +type remappingLexer struct { + lexer Lexer + mapper func(Token) []Token +} + +// RemappingLexer remaps a token to a set of, potentially empty, tokens. +func RemappingLexer(lexer Lexer, mapper func(Token) []Token) Lexer { + return &remappingLexer{lexer, mapper} +} + +func (r *remappingLexer) AnalyseText(text string) float32 { + return r.lexer.AnalyseText(text) +} + +func (r *remappingLexer) SetAnalyser(analyser func(text string) float32) Lexer { + r.lexer.SetAnalyser(analyser) + return r +} + +func (r *remappingLexer) SetRegistry(registry *LexerRegistry) Lexer { + r.lexer.SetRegistry(registry) + return r +} + +func (r *remappingLexer) Config() *Config { + return r.lexer.Config() +} + +func (r *remappingLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { + it, err := r.lexer.Tokenise(options, text) + if err != nil { + return nil, err + } + var buffer []Token + return func() Token { + for { + if len(buffer) > 0 { + t := buffer[0] + buffer = buffer[1:] + return t + } + t := it() + if t == EOF { + return t + } + buffer = r.mapper(t) + } + }, nil +} + +// TypeMapping defines type maps for the TypeRemappingLexer. +type TypeMapping []struct { + From, To TokenType + Words []string +} + +// TypeRemappingLexer remaps types of tokens coming from a parent Lexer. +// +// eg. Map "defvaralias" tokens of type NameVariable to NameFunction: +// +// mapping := TypeMapping{ +// {NameVariable, NameFunction, []string{"defvaralias"}, +// } +// lexer = TypeRemappingLexer(lexer, mapping) +func TypeRemappingLexer(lexer Lexer, mapping TypeMapping) Lexer { + // Lookup table for fast remapping. + lut := map[TokenType]map[string]TokenType{} + for _, rt := range mapping { + km, ok := lut[rt.From] + if !ok { + km = map[string]TokenType{} + lut[rt.From] = km + } + if len(rt.Words) == 0 { + km[""] = rt.To + } else { + for _, k := range rt.Words { + km[k] = rt.To + } + } + } + return RemappingLexer(lexer, func(t Token) []Token { + if k, ok := lut[t.Type]; ok { + if tt, ok := k[t.Value]; ok { + t.Type = tt + } else if tt, ok := k[""]; ok { + t.Type = tt + } + } + return []Token{t} + }) +} diff --git a/vendor/github.com/alecthomas/chroma/v2/renovate.json5 b/vendor/github.com/alecthomas/chroma/v2/renovate.json5 new file mode 100644 index 0000000..77c7b01 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/renovate.json5 @@ -0,0 +1,18 @@ +{ + $schema: "https://docs.renovatebot.com/renovate-schema.json", + extends: [ + "config:recommended", + ":semanticCommits", + ":semanticCommitTypeAll(chore)", + ":semanticCommitScope(deps)", + "group:allNonMajor", + "schedule:earlyMondays", // Run once a week. + ], + packageRules: [ + { + matchPackageNames: ["golangci-lint"], + matchManagers: ["hermit"], + enabled: false, + }, + ], +} diff --git a/vendor/github.com/alecthomas/chroma/v2/serialise.go b/vendor/github.com/alecthomas/chroma/v2/serialise.go new file mode 100644 index 0000000..645a5fa --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/serialise.go @@ -0,0 +1,479 @@ +package chroma + +import ( + "compress/gzip" + "encoding/xml" + "errors" + "fmt" + "io" + "io/fs" + "math" + "path/filepath" + "reflect" + "regexp" + "strings" + + "github.com/dlclark/regexp2" +) + +// Serialisation of Chroma rules to XML. The format is: +// +// +// +// +// [<$EMITTER ...>] +// [<$MUTATOR ...>] +// +// +// +// +// eg. Include("String") would become: +// +// +// +// +// +// [null, null, {"kind": "include", "state": "String"}] +// +// eg. Rule{`\d+`, Text, nil} would become: +// +// +// +// +// +// eg. Rule{`"`, String, Push("String")} +// +// +// +// +// +// +// eg. Rule{`(\w+)(\n)`, ByGroups(Keyword, Whitespace), nil}, +// +// +// +// +// +var ( + // ErrNotSerialisable is returned if a lexer contains Rules that cannot be serialised. + ErrNotSerialisable = fmt.Errorf("not serialisable") + emitterTemplates = func() map[string]SerialisableEmitter { + out := map[string]SerialisableEmitter{} + for _, emitter := range []SerialisableEmitter{ + &byGroupsEmitter{}, + &usingSelfEmitter{}, + TokenType(0), + &usingEmitter{}, + &usingByGroup{}, + } { + out[emitter.EmitterKind()] = emitter + } + return out + }() + mutatorTemplates = func() map[string]SerialisableMutator { + out := map[string]SerialisableMutator{} + for _, mutator := range []SerialisableMutator{ + &includeMutator{}, + &combinedMutator{}, + &multiMutator{}, + &pushMutator{}, + &popMutator{}, + } { + out[mutator.MutatorKind()] = mutator + } + return out + }() +) + +// fastUnmarshalConfig unmarshals only the Config from a serialised lexer. +func fastUnmarshalConfig(from fs.FS, path string) (*Config, error) { + r, err := from.Open(path) + if err != nil { + return nil, err + } + defer r.Close() + dec := xml.NewDecoder(r) + for { + token, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + return nil, fmt.Errorf("could not find element") + } + return nil, err + } + switch se := token.(type) { + case xml.StartElement: + if se.Name.Local != "config" { + break + } + + var config Config + err = dec.DecodeElement(&config, &se) + if err != nil { + return nil, fmt.Errorf("%s: %w", path, err) + } + return &config, nil + } + } +} + +// MustNewXMLLexer constructs a new RegexLexer from an XML file or panics. +func MustNewXMLLexer(from fs.FS, path string) *RegexLexer { + lex, err := NewXMLLexer(from, path) + if err != nil { + panic(err) + } + return lex +} + +// NewXMLLexer creates a new RegexLexer from a serialised RegexLexer. +func NewXMLLexer(from fs.FS, path string) (*RegexLexer, error) { + config, err := fastUnmarshalConfig(from, path) + if err != nil { + return nil, err + } + + for _, glob := range append(config.Filenames, config.AliasFilenames...) { + _, err := filepath.Match(glob, "") + if err != nil { + return nil, fmt.Errorf("%s: %q is not a valid glob: %w", config.Name, glob, err) + } + } + + var analyserFn func(string) float32 + + if config.Analyse != nil { + type regexAnalyse struct { + re *regexp2.Regexp + score float32 + } + + regexAnalysers := make([]regexAnalyse, 0, len(config.Analyse.Regexes)) + + for _, ra := range config.Analyse.Regexes { + re, err := regexp2.Compile(ra.Pattern, regexp2.None) + if err != nil { + return nil, fmt.Errorf("%s: %q is not a valid analyser regex: %w", config.Name, ra.Pattern, err) + } + + regexAnalysers = append(regexAnalysers, regexAnalyse{re, ra.Score}) + } + + analyserFn = func(text string) float32 { + var score float32 + + for _, ra := range regexAnalysers { + ok, err := ra.re.MatchString(text) + if err != nil { + return 0 + } + + if ok && config.Analyse.First { + return float32(math.Min(float64(ra.score), 1.0)) + } + + if ok { + score += ra.score + } + } + + return float32(math.Min(float64(score), 1.0)) + } + } + + return &RegexLexer{ + config: config, + analyser: analyserFn, + fetchRulesFunc: func() (Rules, error) { + var lexer struct { + Config + Rules Rules `xml:"rules"` + } + // Try to open .xml fallback to .xml.gz + fr, err := from.Open(path) + if err != nil { + if errors.Is(err, fs.ErrNotExist) { + path += ".gz" + fr, err = from.Open(path) + if err != nil { + return nil, err + } + } else { + return nil, err + } + } + defer fr.Close() + var r io.Reader = fr + if strings.HasSuffix(path, ".gz") { + r, err = gzip.NewReader(r) + if err != nil { + return nil, fmt.Errorf("%s: %w", path, err) + } + } + err = xml.NewDecoder(r).Decode(&lexer) + if err != nil { + return nil, fmt.Errorf("%s: %w", path, err) + } + return lexer.Rules, nil + }, + }, nil +} + +// Marshal a RegexLexer to XML. +func Marshal(l *RegexLexer) ([]byte, error) { + type lexer struct { + Config Config `xml:"config"` + Rules Rules `xml:"rules"` + } + + rules, err := l.Rules() + if err != nil { + return nil, err + } + root := &lexer{ + Config: *l.Config(), + Rules: rules, + } + data, err := xml.MarshalIndent(root, "", " ") + if err != nil { + return nil, err + } + re := regexp.MustCompile(`>`) + data = re.ReplaceAll(data, []byte(`/>`)) + return data, nil +} + +// Unmarshal a RegexLexer from XML. +func Unmarshal(data []byte) (*RegexLexer, error) { + type lexer struct { + Config Config `xml:"config"` + Rules Rules `xml:"rules"` + } + root := &lexer{} + err := xml.Unmarshal(data, root) + if err != nil { + return nil, fmt.Errorf("invalid Lexer XML: %w", err) + } + lex, err := NewLexer(&root.Config, func() Rules { return root.Rules }) + if err != nil { + return nil, err + } + return lex, nil +} + +func marshalMutator(e *xml.Encoder, mutator Mutator) error { + if mutator == nil { + return nil + } + smutator, ok := mutator.(SerialisableMutator) + if !ok { + return fmt.Errorf("unsupported mutator: %w", ErrNotSerialisable) + } + return e.EncodeElement(mutator, xml.StartElement{Name: xml.Name{Local: smutator.MutatorKind()}}) +} + +func unmarshalMutator(d *xml.Decoder, start xml.StartElement) (Mutator, error) { + kind := start.Name.Local + mutator, ok := mutatorTemplates[kind] + if !ok { + return nil, fmt.Errorf("unknown mutator %q: %w", kind, ErrNotSerialisable) + } + value, target := newFromTemplate(mutator) + if err := d.DecodeElement(target, &start); err != nil { + return nil, err + } + return value().(SerialisableMutator), nil +} + +func marshalEmitter(e *xml.Encoder, emitter Emitter) error { + if emitter == nil { + return nil + } + semitter, ok := emitter.(SerialisableEmitter) + if !ok { + return fmt.Errorf("unsupported emitter %T: %w", emitter, ErrNotSerialisable) + } + return e.EncodeElement(emitter, xml.StartElement{ + Name: xml.Name{Local: semitter.EmitterKind()}, + }) +} + +func unmarshalEmitter(d *xml.Decoder, start xml.StartElement) (Emitter, error) { + kind := start.Name.Local + mutator, ok := emitterTemplates[kind] + if !ok { + return nil, fmt.Errorf("unknown emitter %q: %w", kind, ErrNotSerialisable) + } + value, target := newFromTemplate(mutator) + if err := d.DecodeElement(target, &start); err != nil { + return nil, err + } + return value().(SerialisableEmitter), nil +} + +func (r Rule) MarshalXML(e *xml.Encoder, _ xml.StartElement) error { + start := xml.StartElement{ + Name: xml.Name{Local: "rule"}, + } + if r.Pattern != "" { + start.Attr = append(start.Attr, xml.Attr{ + Name: xml.Name{Local: "pattern"}, + Value: r.Pattern, + }) + } + if err := e.EncodeToken(start); err != nil { + return err + } + if err := marshalEmitter(e, r.Type); err != nil { + return err + } + if err := marshalMutator(e, r.Mutator); err != nil { + return err + } + return e.EncodeToken(xml.EndElement{Name: start.Name}) +} + +func (r *Rule) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error { + for _, attr := range start.Attr { + if attr.Name.Local == "pattern" { + r.Pattern = attr.Value + break + } + } + for { + token, err := d.Token() + if err != nil { + return err + } + switch token := token.(type) { + case xml.StartElement: + mutator, err := unmarshalMutator(d, token) + if err != nil && !errors.Is(err, ErrNotSerialisable) { + return err + } else if err == nil { + if r.Mutator != nil { + return fmt.Errorf("duplicate mutator") + } + r.Mutator = mutator + continue + } + emitter, err := unmarshalEmitter(d, token) + if err != nil && !errors.Is(err, ErrNotSerialisable) { // nolint: gocritic + return err + } else if err == nil { + if r.Type != nil { + return fmt.Errorf("duplicate emitter") + } + r.Type = emitter + continue + } else { + return err + } + + case xml.EndElement: + return nil + } + } +} + +type xmlRuleState struct { + Name string `xml:"name,attr"` + Rules []Rule `xml:"rule"` +} + +type xmlRules struct { + States []xmlRuleState `xml:"state"` +} + +func (r Rules) MarshalXML(e *xml.Encoder, _ xml.StartElement) error { + xr := xmlRules{} + for state, rules := range r { + xr.States = append(xr.States, xmlRuleState{ + Name: state, + Rules: rules, + }) + } + return e.EncodeElement(xr, xml.StartElement{Name: xml.Name{Local: "rules"}}) +} + +func (r *Rules) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error { + xr := xmlRules{} + if err := d.DecodeElement(&xr, &start); err != nil { + return err + } + if *r == nil { + *r = Rules{} + } + for _, state := range xr.States { + (*r)[state.Name] = state.Rules + } + return nil +} + +type xmlTokenType struct { + Type string `xml:"type,attr"` +} + +func (t *TokenType) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error { + el := xmlTokenType{} + if err := d.DecodeElement(&el, &start); err != nil { + return err + } + tt, err := TokenTypeString(el.Type) + if err != nil { + return err + } + *t = tt + return nil +} + +func (t TokenType) MarshalXML(e *xml.Encoder, start xml.StartElement) error { + start.Attr = append(start.Attr, xml.Attr{Name: xml.Name{Local: "type"}, Value: t.String()}) + if err := e.EncodeToken(start); err != nil { + return err + } + return e.EncodeToken(xml.EndElement{Name: start.Name}) +} + +// This hijinks is a bit unfortunate but without it we can't deserialise into TokenType. +func newFromTemplate(template interface{}) (value func() interface{}, target interface{}) { + t := reflect.TypeOf(template) + if t.Kind() == reflect.Ptr { + v := reflect.New(t.Elem()) + return v.Interface, v.Interface() + } + v := reflect.New(t) + return func() interface{} { return v.Elem().Interface() }, v.Interface() +} + +func (b *Emitters) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error { + for { + token, err := d.Token() + if err != nil { + return err + } + switch token := token.(type) { + case xml.StartElement: + emitter, err := unmarshalEmitter(d, token) + if err != nil { + return err + } + *b = append(*b, emitter) + + case xml.EndElement: + return nil + } + } +} + +func (b Emitters) MarshalXML(e *xml.Encoder, start xml.StartElement) error { + if err := e.EncodeToken(start); err != nil { + return err + } + for _, m := range b { + if err := marshalEmitter(e, m); err != nil { + return err + } + } + return e.EncodeToken(xml.EndElement{Name: start.Name}) +} diff --git a/vendor/github.com/alecthomas/chroma/v2/style.go b/vendor/github.com/alecthomas/chroma/v2/style.go new file mode 100644 index 0000000..cc8d9a6 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/style.go @@ -0,0 +1,481 @@ +package chroma + +import ( + "encoding/xml" + "fmt" + "io" + "sort" + "strings" +) + +// Trilean value for StyleEntry value inheritance. +type Trilean uint8 + +// Trilean states. +const ( + Pass Trilean = iota + Yes + No +) + +func (t Trilean) String() string { + switch t { + case Yes: + return "Yes" + case No: + return "No" + default: + return "Pass" + } +} + +// Prefix returns s with "no" as a prefix if Trilean is no. +func (t Trilean) Prefix(s string) string { + if t == Yes { + return s + } else if t == No { + return "no" + s + } + return "" +} + +// A StyleEntry in the Style map. +type StyleEntry struct { + // Hex colours. + Colour Colour + Background Colour + Border Colour + + Bold Trilean + Italic Trilean + Underline Trilean + NoInherit bool +} + +func (s StyleEntry) MarshalText() ([]byte, error) { + return []byte(s.String()), nil +} + +func (s StyleEntry) String() string { + out := []string{} + if s.Bold != Pass { + out = append(out, s.Bold.Prefix("bold")) + } + if s.Italic != Pass { + out = append(out, s.Italic.Prefix("italic")) + } + if s.Underline != Pass { + out = append(out, s.Underline.Prefix("underline")) + } + if s.NoInherit { + out = append(out, "noinherit") + } + if s.Colour.IsSet() { + out = append(out, s.Colour.String()) + } + if s.Background.IsSet() { + out = append(out, "bg:"+s.Background.String()) + } + if s.Border.IsSet() { + out = append(out, "border:"+s.Border.String()) + } + return strings.Join(out, " ") +} + +// Sub subtracts e from s where elements match. +func (s StyleEntry) Sub(e StyleEntry) StyleEntry { + out := StyleEntry{} + if e.Colour != s.Colour { + out.Colour = s.Colour + } + if e.Background != s.Background { + out.Background = s.Background + } + if e.Bold != s.Bold { + out.Bold = s.Bold + } + if e.Italic != s.Italic { + out.Italic = s.Italic + } + if e.Underline != s.Underline { + out.Underline = s.Underline + } + if e.Border != s.Border { + out.Border = s.Border + } + return out +} + +// Inherit styles from ancestors. +// +// Ancestors should be provided from oldest to newest. +func (s StyleEntry) Inherit(ancestors ...StyleEntry) StyleEntry { + out := s + for i := len(ancestors) - 1; i >= 0; i-- { + if out.NoInherit { + return out + } + ancestor := ancestors[i] + if !out.Colour.IsSet() { + out.Colour = ancestor.Colour + } + if !out.Background.IsSet() { + out.Background = ancestor.Background + } + if !out.Border.IsSet() { + out.Border = ancestor.Border + } + if out.Bold == Pass { + out.Bold = ancestor.Bold + } + if out.Italic == Pass { + out.Italic = ancestor.Italic + } + if out.Underline == Pass { + out.Underline = ancestor.Underline + } + } + return out +} + +func (s StyleEntry) IsZero() bool { + return s.Colour == 0 && s.Background == 0 && s.Border == 0 && s.Bold == Pass && s.Italic == Pass && + s.Underline == Pass && !s.NoInherit +} + +// A StyleBuilder is a mutable structure for building styles. +// +// Once built, a Style is immutable. +type StyleBuilder struct { + entries map[TokenType]string + name string + parent *Style +} + +func NewStyleBuilder(name string) *StyleBuilder { + return &StyleBuilder{name: name, entries: map[TokenType]string{}} +} + +func (s *StyleBuilder) AddAll(entries StyleEntries) *StyleBuilder { + for ttype, entry := range entries { + s.entries[ttype] = entry + } + return s +} + +func (s *StyleBuilder) Get(ttype TokenType) StyleEntry { + // This is less than ideal, but it's the price for not having to check errors on each Add(). + entry, _ := ParseStyleEntry(s.entries[ttype]) + if s.parent != nil { + entry = entry.Inherit(s.parent.Get(ttype)) + } + return entry +} + +// Add an entry to the Style map. +// +// See http://pygments.org/docs/styles/#style-rules for details. +func (s *StyleBuilder) Add(ttype TokenType, entry string) *StyleBuilder { // nolint: gocyclo + s.entries[ttype] = entry + return s +} + +func (s *StyleBuilder) AddEntry(ttype TokenType, entry StyleEntry) *StyleBuilder { + s.entries[ttype] = entry.String() + return s +} + +// Transform passes each style entry currently defined in the builder to the supplied +// function and saves the returned value. This can be used to adjust a style's colours; +// see Colour's ClampBrightness function, for example. +func (s *StyleBuilder) Transform(transform func(StyleEntry) StyleEntry) *StyleBuilder { + types := make(map[TokenType]struct{}) + for tt := range s.entries { + types[tt] = struct{}{} + } + if s.parent != nil { + for _, tt := range s.parent.Types() { + types[tt] = struct{}{} + } + } + for tt := range types { + s.AddEntry(tt, transform(s.Get(tt))) + } + return s +} + +func (s *StyleBuilder) Build() (*Style, error) { + style := &Style{ + Name: s.name, + entries: map[TokenType]StyleEntry{}, + parent: s.parent, + } + for ttype, descriptor := range s.entries { + entry, err := ParseStyleEntry(descriptor) + if err != nil { + return nil, fmt.Errorf("invalid entry for %s: %s", ttype, err) + } + style.entries[ttype] = entry + } + return style, nil +} + +// StyleEntries mapping TokenType to colour definition. +type StyleEntries map[TokenType]string + +// NewXMLStyle parses an XML style definition. +func NewXMLStyle(r io.Reader) (*Style, error) { + dec := xml.NewDecoder(r) + style := &Style{} + return style, dec.Decode(style) +} + +// MustNewXMLStyle is like NewXMLStyle but panics on error. +func MustNewXMLStyle(r io.Reader) *Style { + style, err := NewXMLStyle(r) + if err != nil { + panic(err) + } + return style +} + +// NewStyle creates a new style definition. +func NewStyle(name string, entries StyleEntries) (*Style, error) { + return NewStyleBuilder(name).AddAll(entries).Build() +} + +// MustNewStyle creates a new style or panics. +func MustNewStyle(name string, entries StyleEntries) *Style { + style, err := NewStyle(name, entries) + if err != nil { + panic(err) + } + return style +} + +// A Style definition. +// +// See http://pygments.org/docs/styles/ for details. Semantics are intended to be identical. +type Style struct { + Name string + entries map[TokenType]StyleEntry + parent *Style +} + +func (s *Style) MarshalXML(e *xml.Encoder, start xml.StartElement) error { + if s.parent != nil { + return fmt.Errorf("cannot marshal style with parent") + } + start.Name = xml.Name{Local: "style"} + start.Attr = []xml.Attr{{Name: xml.Name{Local: "name"}, Value: s.Name}} + if err := e.EncodeToken(start); err != nil { + return err + } + sorted := make([]TokenType, 0, len(s.entries)) + for ttype := range s.entries { + sorted = append(sorted, ttype) + } + sort.Slice(sorted, func(i, j int) bool { return sorted[i] < sorted[j] }) + for _, ttype := range sorted { + entry := s.entries[ttype] + el := xml.StartElement{Name: xml.Name{Local: "entry"}} + el.Attr = []xml.Attr{ + {Name: xml.Name{Local: "type"}, Value: ttype.String()}, + {Name: xml.Name{Local: "style"}, Value: entry.String()}, + } + if err := e.EncodeToken(el); err != nil { + return err + } + if err := e.EncodeToken(xml.EndElement{Name: el.Name}); err != nil { + return err + } + } + return e.EncodeToken(xml.EndElement{Name: start.Name}) +} + +func (s *Style) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error { + for _, attr := range start.Attr { + if attr.Name.Local == "name" { + s.Name = attr.Value + } else { + return fmt.Errorf("unexpected attribute %s", attr.Name.Local) + } + } + if s.Name == "" { + return fmt.Errorf("missing style name attribute") + } + s.entries = map[TokenType]StyleEntry{} + for { + tok, err := d.Token() + if err != nil { + return err + } + switch el := tok.(type) { + case xml.StartElement: + if el.Name.Local != "entry" { + return fmt.Errorf("unexpected element %s", el.Name.Local) + } + var ttype TokenType + var entry StyleEntry + for _, attr := range el.Attr { + switch attr.Name.Local { + case "type": + ttype, err = TokenTypeString(attr.Value) + if err != nil { + return err + } + + case "style": + entry, err = ParseStyleEntry(attr.Value) + if err != nil { + return err + } + + default: + return fmt.Errorf("unexpected attribute %s", attr.Name.Local) + } + } + s.entries[ttype] = entry + + case xml.EndElement: + if el.Name.Local == start.Name.Local { + return nil + } + } + } +} + +// Types that are styled. +func (s *Style) Types() []TokenType { + dedupe := map[TokenType]bool{} + for tt := range s.entries { + dedupe[tt] = true + } + if s.parent != nil { + for _, tt := range s.parent.Types() { + dedupe[tt] = true + } + } + out := make([]TokenType, 0, len(dedupe)) + for tt := range dedupe { + out = append(out, tt) + } + return out +} + +// Builder creates a mutable builder from this Style. +// +// The builder can then be safely modified. This is a cheap operation. +func (s *Style) Builder() *StyleBuilder { + return &StyleBuilder{ + name: s.Name, + entries: map[TokenType]string{}, + parent: s, + } +} + +// Has checks if an exact style entry match exists for a token type. +// +// This is distinct from Get() which will merge parent tokens. +func (s *Style) Has(ttype TokenType) bool { + return !s.get(ttype).IsZero() || s.synthesisable(ttype) +} + +// Get a style entry. Will try sub-category or category if an exact match is not found, and +// finally return the Background. +func (s *Style) Get(ttype TokenType) StyleEntry { + return s.get(ttype).Inherit( + s.get(Background), + s.get(Text), + s.get(ttype.Category()), + s.get(ttype.SubCategory())) +} + +func (s *Style) get(ttype TokenType) StyleEntry { + out := s.entries[ttype] + if out.IsZero() && s.parent != nil { + return s.parent.get(ttype) + } + if out.IsZero() && s.synthesisable(ttype) { + out = s.synthesise(ttype) + } + return out +} + +func (s *Style) synthesise(ttype TokenType) StyleEntry { + bg := s.get(Background) + text := StyleEntry{Colour: bg.Colour} + text.Colour = text.Colour.BrightenOrDarken(0.5) + + switch ttype { + // If we don't have a line highlight colour, make one that is 10% brighter/darker than the background. + case LineHighlight: + return StyleEntry{Background: bg.Background.BrightenOrDarken(0.1)} + + // If we don't have line numbers, use the text colour but 20% brighter/darker + case LineNumbers, LineNumbersTable: + return text + + default: + return StyleEntry{} + } +} + +func (s *Style) synthesisable(ttype TokenType) bool { + return ttype == LineHighlight || ttype == LineNumbers || ttype == LineNumbersTable +} + +// MustParseStyleEntry parses a Pygments style entry or panics. +func MustParseStyleEntry(entry string) StyleEntry { + out, err := ParseStyleEntry(entry) + if err != nil { + panic(err) + } + return out +} + +// ParseStyleEntry parses a Pygments style entry. +func ParseStyleEntry(entry string) (StyleEntry, error) { // nolint: gocyclo + out := StyleEntry{} + parts := strings.Fields(entry) + for _, part := range parts { + switch { + case part == "italic": + out.Italic = Yes + case part == "noitalic": + out.Italic = No + case part == "bold": + out.Bold = Yes + case part == "nobold": + out.Bold = No + case part == "underline": + out.Underline = Yes + case part == "nounderline": + out.Underline = No + case part == "inherit": + out.NoInherit = false + case part == "noinherit": + out.NoInherit = true + case part == "bg:": + out.Background = 0 + case strings.HasPrefix(part, "bg:#"): + out.Background = ParseColour(part[3:]) + if !out.Background.IsSet() { + return StyleEntry{}, fmt.Errorf("invalid background colour %q", part) + } + case strings.HasPrefix(part, "border:#"): + out.Border = ParseColour(part[7:]) + if !out.Border.IsSet() { + return StyleEntry{}, fmt.Errorf("invalid border colour %q", part) + } + case strings.HasPrefix(part, "#"): + out.Colour = ParseColour(part) + if !out.Colour.IsSet() { + return StyleEntry{}, fmt.Errorf("invalid colour %q", part) + } + default: + return StyleEntry{}, fmt.Errorf("unknown style element %q", part) + } + } + return out, nil +} diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/abap.xml b/vendor/github.com/alecthomas/chroma/v2/styles/abap.xml new file mode 100644 index 0000000..36ea2f1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/abap.xml @@ -0,0 +1,11 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/algol.xml b/vendor/github.com/alecthomas/chroma/v2/styles/algol.xml new file mode 100644 index 0000000..e8a6dc1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/algol.xml @@ -0,0 +1,18 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/algol_nu.xml b/vendor/github.com/alecthomas/chroma/v2/styles/algol_nu.xml new file mode 100644 index 0000000..7fa340f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/algol_nu.xml @@ -0,0 +1,18 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/api.go b/vendor/github.com/alecthomas/chroma/v2/styles/api.go new file mode 100644 index 0000000..e26d6f0 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/api.go @@ -0,0 +1,65 @@ +package styles + +import ( + "embed" + "io/fs" + "sort" + + "github.com/alecthomas/chroma/v2" +) + +//go:embed *.xml +var embedded embed.FS + +// Registry of Styles. +var Registry = func() map[string]*chroma.Style { + registry := map[string]*chroma.Style{} + // Register all embedded styles. + files, err := fs.ReadDir(embedded, ".") + if err != nil { + panic(err) + } + for _, file := range files { + if file.IsDir() { + continue + } + r, err := embedded.Open(file.Name()) + if err != nil { + panic(err) + } + style, err := chroma.NewXMLStyle(r) + if err != nil { + panic(err) + } + registry[style.Name] = style + _ = r.Close() + } + return registry +}() + +// Fallback style. Reassign to change the default fallback style. +var Fallback = Registry["swapoff"] + +// Register a chroma.Style. +func Register(style *chroma.Style) *chroma.Style { + Registry[style.Name] = style + return style +} + +// Names of all available styles. +func Names() []string { + out := []string{} + for name := range Registry { + out = append(out, name) + } + sort.Strings(out) + return out +} + +// Get named style, or Fallback. +func Get(name string) *chroma.Style { + if style, ok := Registry[name]; ok { + return style + } + return Fallback +} diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/arduino.xml b/vendor/github.com/alecthomas/chroma/v2/styles/arduino.xml new file mode 100644 index 0000000..d9891dc --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/arduino.xml @@ -0,0 +1,18 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/autumn.xml b/vendor/github.com/alecthomas/chroma/v2/styles/autumn.xml new file mode 100644 index 0000000..74d2eae --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/autumn.xml @@ -0,0 +1,36 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/average.xml b/vendor/github.com/alecthomas/chroma/v2/styles/average.xml new file mode 100644 index 0000000..79bdb95 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/average.xml @@ -0,0 +1,74 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/base16-snazzy.xml b/vendor/github.com/alecthomas/chroma/v2/styles/base16-snazzy.xml new file mode 100644 index 0000000..a05ba24 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/base16-snazzy.xml @@ -0,0 +1,74 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/borland.xml b/vendor/github.com/alecthomas/chroma/v2/styles/borland.xml new file mode 100644 index 0000000..0d8f574 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/borland.xml @@ -0,0 +1,26 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/bw.xml b/vendor/github.com/alecthomas/chroma/v2/styles/bw.xml new file mode 100644 index 0000000..fb0e868 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/bw.xml @@ -0,0 +1,23 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-frappe.xml b/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-frappe.xml new file mode 100644 index 0000000..0adf1ba --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-frappe.xml @@ -0,0 +1,83 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-latte.xml b/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-latte.xml new file mode 100644 index 0000000..3ea767f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-latte.xml @@ -0,0 +1,83 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-macchiato.xml b/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-macchiato.xml new file mode 100644 index 0000000..6b50028 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-macchiato.xml @@ -0,0 +1,83 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-mocha.xml b/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-mocha.xml new file mode 100644 index 0000000..9a40191 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/catppuccin-mocha.xml @@ -0,0 +1,83 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/colorful.xml b/vendor/github.com/alecthomas/chroma/v2/styles/colorful.xml new file mode 100644 index 0000000..32442d7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/colorful.xml @@ -0,0 +1,52 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/compat.go b/vendor/github.com/alecthomas/chroma/v2/styles/compat.go new file mode 100644 index 0000000..4a6aaa6 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/compat.go @@ -0,0 +1,66 @@ +package styles + +// Present for backwards compatibility. +// +// Deprecated: use styles.Get(name) instead. +var ( + Abap = Registry["abap"] + Algol = Registry["algol"] + AlgolNu = Registry["algol_nu"] + Arduino = Registry["arduino"] + Autumn = Registry["autumn"] + Average = Registry["average"] + Base16Snazzy = Registry["base16-snazzy"] + Borland = Registry["borland"] + BlackWhite = Registry["bw"] + CatppuccinFrappe = Registry["catppuccin-frappe"] + CatppuccinLatte = Registry["catppuccin-latte"] + CatppuccinMacchiato = Registry["catppuccin-macchiato"] + CatppuccinMocha = Registry["catppuccin-mocha"] + Colorful = Registry["colorful"] + DoomOne = Registry["doom-one"] + DoomOne2 = Registry["doom-one2"] + Dracula = Registry["dracula"] + Emacs = Registry["emacs"] + Friendly = Registry["friendly"] + Fruity = Registry["fruity"] + GitHubDark = Registry["github-dark"] + GitHub = Registry["github"] + GruvboxLight = Registry["gruvbox-light"] + Gruvbox = Registry["gruvbox"] + HrDark = Registry["hrdark"] + HrHighContrast = Registry["hr_high_contrast"] + Igor = Registry["igor"] + Lovelace = Registry["lovelace"] + Manni = Registry["manni"] + ModusOperandi = Registry["modus-operandi"] + ModusVivendi = Registry["modus-vivendi"] + Monokai = Registry["monokai"] + MonokaiLight = Registry["monokailight"] + Murphy = Registry["murphy"] + Native = Registry["native"] + Nord = Registry["nord"] + OnesEnterprise = Registry["onesenterprise"] + ParaisoDark = Registry["paraiso-dark"] + ParaisoLight = Registry["paraiso-light"] + Pastie = Registry["pastie"] + Perldoc = Registry["perldoc"] + Pygments = Registry["pygments"] + RainbowDash = Registry["rainbow_dash"] + RosePineDawn = Registry["rose-pine-dawn"] + RosePineMoon = Registry["rose-pine-moon"] + RosePine = Registry["rose-pine"] + Rrt = Registry["rrt"] + SolarizedDark = Registry["solarized-dark"] + SolarizedDark256 = Registry["solarized-dark256"] + SolarizedLight = Registry["solarized-light"] + SwapOff = Registry["swapoff"] + Tango = Registry["tango"] + Trac = Registry["trac"] + Vim = Registry["vim"] + VisualStudio = Registry["vs"] + Vulcan = Registry["vulcan"] + WitchHazel = Registry["witchhazel"] + XcodeDark = Registry["xcode-dark"] + Xcode = Registry["xcode"] +) diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/doom-one.xml b/vendor/github.com/alecthomas/chroma/v2/styles/doom-one.xml new file mode 100644 index 0000000..1f5127e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/doom-one.xml @@ -0,0 +1,51 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/doom-one2.xml b/vendor/github.com/alecthomas/chroma/v2/styles/doom-one2.xml new file mode 100644 index 0000000..f47deba --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/doom-one2.xml @@ -0,0 +1,64 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/dracula.xml b/vendor/github.com/alecthomas/chroma/v2/styles/dracula.xml new file mode 100644 index 0000000..9df7da1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/dracula.xml @@ -0,0 +1,74 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/emacs.xml b/vendor/github.com/alecthomas/chroma/v2/styles/emacs.xml new file mode 100644 index 0000000..981ce8e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/emacs.xml @@ -0,0 +1,44 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/friendly.xml b/vendor/github.com/alecthomas/chroma/v2/styles/friendly.xml new file mode 100644 index 0000000..f498010 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/friendly.xml @@ -0,0 +1,44 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/fruity.xml b/vendor/github.com/alecthomas/chroma/v2/styles/fruity.xml new file mode 100644 index 0000000..bcc06aa --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/fruity.xml @@ -0,0 +1,19 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/github-dark.xml b/vendor/github.com/alecthomas/chroma/v2/styles/github-dark.xml new file mode 100644 index 0000000..711aeaf --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/github-dark.xml @@ -0,0 +1,45 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/github.xml b/vendor/github.com/alecthomas/chroma/v2/styles/github.xml new file mode 100644 index 0000000..e7caee7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/github.xml @@ -0,0 +1,44 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/gruvbox-light.xml b/vendor/github.com/alecthomas/chroma/v2/styles/gruvbox-light.xml new file mode 100644 index 0000000..8c4f064 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/gruvbox-light.xml @@ -0,0 +1,33 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/gruvbox.xml b/vendor/github.com/alecthomas/chroma/v2/styles/gruvbox.xml new file mode 100644 index 0000000..2f6a0a2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/gruvbox.xml @@ -0,0 +1,33 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/hr_high_contrast.xml b/vendor/github.com/alecthomas/chroma/v2/styles/hr_high_contrast.xml new file mode 100644 index 0000000..61cde20 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/hr_high_contrast.xml @@ -0,0 +1,12 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/hrdark.xml b/vendor/github.com/alecthomas/chroma/v2/styles/hrdark.xml new file mode 100644 index 0000000..bc7a6f3 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/hrdark.xml @@ -0,0 +1,10 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/igor.xml b/vendor/github.com/alecthomas/chroma/v2/styles/igor.xml new file mode 100644 index 0000000..773c83b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/igor.xml @@ -0,0 +1,9 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/lovelace.xml b/vendor/github.com/alecthomas/chroma/v2/styles/lovelace.xml new file mode 100644 index 0000000..e336c93 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/lovelace.xml @@ -0,0 +1,53 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/manni.xml b/vendor/github.com/alecthomas/chroma/v2/styles/manni.xml new file mode 100644 index 0000000..99324bd --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/manni.xml @@ -0,0 +1,44 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/modus-operandi.xml b/vendor/github.com/alecthomas/chroma/v2/styles/modus-operandi.xml new file mode 100644 index 0000000..023137a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/modus-operandi.xml @@ -0,0 +1,13 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/modus-vivendi.xml b/vendor/github.com/alecthomas/chroma/v2/styles/modus-vivendi.xml new file mode 100644 index 0000000..8da663d --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/modus-vivendi.xml @@ -0,0 +1,13 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/monokai.xml b/vendor/github.com/alecthomas/chroma/v2/styles/monokai.xml new file mode 100644 index 0000000..1a789dd --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/monokai.xml @@ -0,0 +1,29 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/monokailight.xml b/vendor/github.com/alecthomas/chroma/v2/styles/monokailight.xml new file mode 100644 index 0000000..85cd23e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/monokailight.xml @@ -0,0 +1,26 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/murphy.xml b/vendor/github.com/alecthomas/chroma/v2/styles/murphy.xml new file mode 100644 index 0000000..112d620 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/murphy.xml @@ -0,0 +1,52 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/native.xml b/vendor/github.com/alecthomas/chroma/v2/styles/native.xml new file mode 100644 index 0000000..43eea7f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/native.xml @@ -0,0 +1,35 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/nord.xml b/vendor/github.com/alecthomas/chroma/v2/styles/nord.xml new file mode 100644 index 0000000..1c1d1ff --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/nord.xml @@ -0,0 +1,46 @@ + diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/onedark.xml b/vendor/github.com/alecthomas/chroma/v2/styles/onedark.xml new file mode 100644 index 0000000..6921eb5 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/onedark.xml @@ -0,0 +1,25 @@ + diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/onesenterprise.xml b/vendor/github.com/alecthomas/chroma/v2/styles/onesenterprise.xml new file mode 100644 index 0000000..ce86db3 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/onesenterprise.xml @@ -0,0 +1,10 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/paraiso-dark.xml b/vendor/github.com/alecthomas/chroma/v2/styles/paraiso-dark.xml new file mode 100644 index 0000000..788db3f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/paraiso-dark.xml @@ -0,0 +1,37 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/paraiso-light.xml b/vendor/github.com/alecthomas/chroma/v2/styles/paraiso-light.xml new file mode 100644 index 0000000..06a63ba --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/paraiso-light.xml @@ -0,0 +1,37 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/pastie.xml b/vendor/github.com/alecthomas/chroma/v2/styles/pastie.xml new file mode 100644 index 0000000..a3b0abd --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/pastie.xml @@ -0,0 +1,45 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/perldoc.xml b/vendor/github.com/alecthomas/chroma/v2/styles/perldoc.xml new file mode 100644 index 0000000..9e5564c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/perldoc.xml @@ -0,0 +1,37 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/pygments.xml b/vendor/github.com/alecthomas/chroma/v2/styles/pygments.xml new file mode 100644 index 0000000..a3d0d8b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/pygments.xml @@ -0,0 +1,42 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/rainbow_dash.xml b/vendor/github.com/alecthomas/chroma/v2/styles/rainbow_dash.xml new file mode 100644 index 0000000..5b0fe49 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/rainbow_dash.xml @@ -0,0 +1,40 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/rose-pine-dawn.xml b/vendor/github.com/alecthomas/chroma/v2/styles/rose-pine-dawn.xml new file mode 100644 index 0000000..788bd6f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/rose-pine-dawn.xml @@ -0,0 +1,29 @@ + diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/rose-pine-moon.xml b/vendor/github.com/alecthomas/chroma/v2/styles/rose-pine-moon.xml new file mode 100644 index 0000000..f67b804 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/rose-pine-moon.xml @@ -0,0 +1,29 @@ + diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/rose-pine.xml b/vendor/github.com/alecthomas/chroma/v2/styles/rose-pine.xml new file mode 100644 index 0000000..3fb70a5 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/rose-pine.xml @@ -0,0 +1,29 @@ + diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/rrt.xml b/vendor/github.com/alecthomas/chroma/v2/styles/rrt.xml new file mode 100644 index 0000000..5f1daaa --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/rrt.xml @@ -0,0 +1,13 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/solarized-dark.xml b/vendor/github.com/alecthomas/chroma/v2/styles/solarized-dark.xml new file mode 100644 index 0000000..a3cf46f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/solarized-dark.xml @@ -0,0 +1,39 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/solarized-dark256.xml b/vendor/github.com/alecthomas/chroma/v2/styles/solarized-dark256.xml new file mode 100644 index 0000000..977cfbe --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/solarized-dark256.xml @@ -0,0 +1,41 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/solarized-light.xml b/vendor/github.com/alecthomas/chroma/v2/styles/solarized-light.xml new file mode 100644 index 0000000..4fbc1d4 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/solarized-light.xml @@ -0,0 +1,17 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/swapoff.xml b/vendor/github.com/alecthomas/chroma/v2/styles/swapoff.xml new file mode 100644 index 0000000..8a398df --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/swapoff.xml @@ -0,0 +1,18 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/tango.xml b/vendor/github.com/alecthomas/chroma/v2/styles/tango.xml new file mode 100644 index 0000000..5ca46bb --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/tango.xml @@ -0,0 +1,72 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/trac.xml b/vendor/github.com/alecthomas/chroma/v2/styles/trac.xml new file mode 100644 index 0000000..9f1d266 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/trac.xml @@ -0,0 +1,35 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/vim.xml b/vendor/github.com/alecthomas/chroma/v2/styles/vim.xml new file mode 100644 index 0000000..fec6934 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/vim.xml @@ -0,0 +1,29 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/vs.xml b/vendor/github.com/alecthomas/chroma/v2/styles/vs.xml new file mode 100644 index 0000000..5643501 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/vs.xml @@ -0,0 +1,16 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/vulcan.xml b/vendor/github.com/alecthomas/chroma/v2/styles/vulcan.xml new file mode 100644 index 0000000..4e69094 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/vulcan.xml @@ -0,0 +1,74 @@ + diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/witchhazel.xml b/vendor/github.com/alecthomas/chroma/v2/styles/witchhazel.xml new file mode 100644 index 0000000..52f2299 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/witchhazel.xml @@ -0,0 +1,31 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/xcode-dark.xml b/vendor/github.com/alecthomas/chroma/v2/styles/xcode-dark.xml new file mode 100644 index 0000000..9343979 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/xcode-dark.xml @@ -0,0 +1,31 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/styles/xcode.xml b/vendor/github.com/alecthomas/chroma/v2/styles/xcode.xml new file mode 100644 index 0000000..523d746 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/styles/xcode.xml @@ -0,0 +1,22 @@ + \ No newline at end of file diff --git a/vendor/github.com/alecthomas/chroma/v2/table.py b/vendor/github.com/alecthomas/chroma/v2/table.py new file mode 100644 index 0000000..ea4b755 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/table.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python3 +import re +from collections import defaultdict +from subprocess import check_output + +README_FILE = "README.md" + +lines = check_output(["chroma", "--list"]).decode("utf-8").splitlines() +lines = [line.strip() for line in lines if line.startswith(" ") and not line.startswith(" ")] +lines = sorted(lines, key=lambda l: l.lower()) + +table = defaultdict(list) + +for line in lines: + table[line[0].upper()].append(line) + +rows = [] +for key, value in table.items(): + rows.append("{} | {}".format(key, ", ".join(value))) +tbody = "\n".join(rows) + +with open(README_FILE, "r") as f: + content = f.read() + +with open(README_FILE, "w") as f: + marker = re.compile(r"(?P:----: \\| --------\n).*?(?P\n\n)", re.DOTALL) + replacement = r"\g%s\g" % tbody + updated_content = marker.sub(replacement, content) + f.write(updated_content) + +print(tbody) diff --git a/vendor/github.com/alecthomas/chroma/v2/tokentype_enumer.go b/vendor/github.com/alecthomas/chroma/v2/tokentype_enumer.go new file mode 100644 index 0000000..696e9ce --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/tokentype_enumer.go @@ -0,0 +1,573 @@ +// Code generated by "enumer -text -type TokenType"; DO NOT EDIT. + +package chroma + +import ( + "fmt" + "strings" +) + +const _TokenTypeName = "NoneOtherErrorCodeLineLineLinkLineTableTDLineTableLineHighlightLineNumbersTableLineNumbersLinePreWrapperBackgroundEOFTypeKeywordKeywordConstantKeywordDeclarationKeywordNamespaceKeywordPseudoKeywordReservedKeywordTypeNameNameAttributeNameBuiltinNameBuiltinPseudoNameClassNameConstantNameDecoratorNameEntityNameExceptionNameFunctionNameFunctionMagicNameKeywordNameLabelNameNamespaceNameOperatorNameOtherNamePseudoNamePropertyNameTagNameVariableNameVariableAnonymousNameVariableClassNameVariableGlobalNameVariableInstanceNameVariableMagicLiteralLiteralDateLiteralOtherLiteralStringLiteralStringAffixLiteralStringAtomLiteralStringBacktickLiteralStringBooleanLiteralStringCharLiteralStringDelimiterLiteralStringDocLiteralStringDoubleLiteralStringEscapeLiteralStringHeredocLiteralStringInterpolLiteralStringNameLiteralStringOtherLiteralStringRegexLiteralStringSingleLiteralStringSymbolLiteralNumberLiteralNumberBinLiteralNumberFloatLiteralNumberHexLiteralNumberIntegerLiteralNumberIntegerLongLiteralNumberOctOperatorOperatorWordPunctuationCommentCommentHashbangCommentMultilineCommentSingleCommentSpecialCommentPreprocCommentPreprocFileGenericGenericDeletedGenericEmphGenericErrorGenericHeadingGenericInsertedGenericOutputGenericPromptGenericStrongGenericSubheadingGenericTracebackGenericUnderlineTextTextWhitespaceTextSymbolTextPunctuation" +const _TokenTypeLowerName = "noneothererrorcodelinelinelinklinetabletdlinetablelinehighlightlinenumberstablelinenumberslineprewrapperbackgroundeoftypekeywordkeywordconstantkeyworddeclarationkeywordnamespacekeywordpseudokeywordreservedkeywordtypenamenameattributenamebuiltinnamebuiltinpseudonameclassnameconstantnamedecoratornameentitynameexceptionnamefunctionnamefunctionmagicnamekeywordnamelabelnamenamespacenameoperatornameothernamepseudonamepropertynametagnamevariablenamevariableanonymousnamevariableclassnamevariableglobalnamevariableinstancenamevariablemagicliteralliteraldateliteralotherliteralstringliteralstringaffixliteralstringatomliteralstringbacktickliteralstringbooleanliteralstringcharliteralstringdelimiterliteralstringdocliteralstringdoubleliteralstringescapeliteralstringheredocliteralstringinterpolliteralstringnameliteralstringotherliteralstringregexliteralstringsingleliteralstringsymbolliteralnumberliteralnumberbinliteralnumberfloatliteralnumberhexliteralnumberintegerliteralnumberintegerlongliteralnumberoctoperatoroperatorwordpunctuationcommentcommenthashbangcommentmultilinecommentsinglecommentspecialcommentpreproccommentpreprocfilegenericgenericdeletedgenericemphgenericerrorgenericheadinggenericinsertedgenericoutputgenericpromptgenericstronggenericsubheadinggenerictracebackgenericunderlinetexttextwhitespacetextsymboltextpunctuation" + +var _TokenTypeMap = map[TokenType]string{ + -13: _TokenTypeName[0:4], + -12: _TokenTypeName[4:9], + -11: _TokenTypeName[9:14], + -10: _TokenTypeName[14:22], + -9: _TokenTypeName[22:30], + -8: _TokenTypeName[30:41], + -7: _TokenTypeName[41:50], + -6: _TokenTypeName[50:63], + -5: _TokenTypeName[63:79], + -4: _TokenTypeName[79:90], + -3: _TokenTypeName[90:94], + -2: _TokenTypeName[94:104], + -1: _TokenTypeName[104:114], + 0: _TokenTypeName[114:121], + 1000: _TokenTypeName[121:128], + 1001: _TokenTypeName[128:143], + 1002: _TokenTypeName[143:161], + 1003: _TokenTypeName[161:177], + 1004: _TokenTypeName[177:190], + 1005: _TokenTypeName[190:205], + 1006: _TokenTypeName[205:216], + 2000: _TokenTypeName[216:220], + 2001: _TokenTypeName[220:233], + 2002: _TokenTypeName[233:244], + 2003: _TokenTypeName[244:261], + 2004: _TokenTypeName[261:270], + 2005: _TokenTypeName[270:282], + 2006: _TokenTypeName[282:295], + 2007: _TokenTypeName[295:305], + 2008: _TokenTypeName[305:318], + 2009: _TokenTypeName[318:330], + 2010: _TokenTypeName[330:347], + 2011: _TokenTypeName[347:358], + 2012: _TokenTypeName[358:367], + 2013: _TokenTypeName[367:380], + 2014: _TokenTypeName[380:392], + 2015: _TokenTypeName[392:401], + 2016: _TokenTypeName[401:411], + 2017: _TokenTypeName[411:423], + 2018: _TokenTypeName[423:430], + 2019: _TokenTypeName[430:442], + 2020: _TokenTypeName[442:463], + 2021: _TokenTypeName[463:480], + 2022: _TokenTypeName[480:498], + 2023: _TokenTypeName[498:518], + 2024: _TokenTypeName[518:535], + 3000: _TokenTypeName[535:542], + 3001: _TokenTypeName[542:553], + 3002: _TokenTypeName[553:565], + 3100: _TokenTypeName[565:578], + 3101: _TokenTypeName[578:596], + 3102: _TokenTypeName[596:613], + 3103: _TokenTypeName[613:634], + 3104: _TokenTypeName[634:654], + 3105: _TokenTypeName[654:671], + 3106: _TokenTypeName[671:693], + 3107: _TokenTypeName[693:709], + 3108: _TokenTypeName[709:728], + 3109: _TokenTypeName[728:747], + 3110: _TokenTypeName[747:767], + 3111: _TokenTypeName[767:788], + 3112: _TokenTypeName[788:805], + 3113: _TokenTypeName[805:823], + 3114: _TokenTypeName[823:841], + 3115: _TokenTypeName[841:860], + 3116: _TokenTypeName[860:879], + 3200: _TokenTypeName[879:892], + 3201: _TokenTypeName[892:908], + 3202: _TokenTypeName[908:926], + 3203: _TokenTypeName[926:942], + 3204: _TokenTypeName[942:962], + 3205: _TokenTypeName[962:986], + 3206: _TokenTypeName[986:1002], + 4000: _TokenTypeName[1002:1010], + 4001: _TokenTypeName[1010:1022], + 5000: _TokenTypeName[1022:1033], + 6000: _TokenTypeName[1033:1040], + 6001: _TokenTypeName[1040:1055], + 6002: _TokenTypeName[1055:1071], + 6003: _TokenTypeName[1071:1084], + 6004: _TokenTypeName[1084:1098], + 6100: _TokenTypeName[1098:1112], + 6101: _TokenTypeName[1112:1130], + 7000: _TokenTypeName[1130:1137], + 7001: _TokenTypeName[1137:1151], + 7002: _TokenTypeName[1151:1162], + 7003: _TokenTypeName[1162:1174], + 7004: _TokenTypeName[1174:1188], + 7005: _TokenTypeName[1188:1203], + 7006: _TokenTypeName[1203:1216], + 7007: _TokenTypeName[1216:1229], + 7008: _TokenTypeName[1229:1242], + 7009: _TokenTypeName[1242:1259], + 7010: _TokenTypeName[1259:1275], + 7011: _TokenTypeName[1275:1291], + 8000: _TokenTypeName[1291:1295], + 8001: _TokenTypeName[1295:1309], + 8002: _TokenTypeName[1309:1319], + 8003: _TokenTypeName[1319:1334], +} + +func (i TokenType) String() string { + if str, ok := _TokenTypeMap[i]; ok { + return str + } + return fmt.Sprintf("TokenType(%d)", i) +} + +// An "invalid array index" compiler error signifies that the constant values have changed. +// Re-run the stringer command to generate them again. +func _TokenTypeNoOp() { + var x [1]struct{} + _ = x[None-(-13)] + _ = x[Other-(-12)] + _ = x[Error-(-11)] + _ = x[CodeLine-(-10)] + _ = x[LineLink-(-9)] + _ = x[LineTableTD-(-8)] + _ = x[LineTable-(-7)] + _ = x[LineHighlight-(-6)] + _ = x[LineNumbersTable-(-5)] + _ = x[LineNumbers-(-4)] + _ = x[Line-(-3)] + _ = x[PreWrapper-(-2)] + _ = x[Background-(-1)] + _ = x[EOFType-(0)] + _ = x[Keyword-(1000)] + _ = x[KeywordConstant-(1001)] + _ = x[KeywordDeclaration-(1002)] + _ = x[KeywordNamespace-(1003)] + _ = x[KeywordPseudo-(1004)] + _ = x[KeywordReserved-(1005)] + _ = x[KeywordType-(1006)] + _ = x[Name-(2000)] + _ = x[NameAttribute-(2001)] + _ = x[NameBuiltin-(2002)] + _ = x[NameBuiltinPseudo-(2003)] + _ = x[NameClass-(2004)] + _ = x[NameConstant-(2005)] + _ = x[NameDecorator-(2006)] + _ = x[NameEntity-(2007)] + _ = x[NameException-(2008)] + _ = x[NameFunction-(2009)] + _ = x[NameFunctionMagic-(2010)] + _ = x[NameKeyword-(2011)] + _ = x[NameLabel-(2012)] + _ = x[NameNamespace-(2013)] + _ = x[NameOperator-(2014)] + _ = x[NameOther-(2015)] + _ = x[NamePseudo-(2016)] + _ = x[NameProperty-(2017)] + _ = x[NameTag-(2018)] + _ = x[NameVariable-(2019)] + _ = x[NameVariableAnonymous-(2020)] + _ = x[NameVariableClass-(2021)] + _ = x[NameVariableGlobal-(2022)] + _ = x[NameVariableInstance-(2023)] + _ = x[NameVariableMagic-(2024)] + _ = x[Literal-(3000)] + _ = x[LiteralDate-(3001)] + _ = x[LiteralOther-(3002)] + _ = x[LiteralString-(3100)] + _ = x[LiteralStringAffix-(3101)] + _ = x[LiteralStringAtom-(3102)] + _ = x[LiteralStringBacktick-(3103)] + _ = x[LiteralStringBoolean-(3104)] + _ = x[LiteralStringChar-(3105)] + _ = x[LiteralStringDelimiter-(3106)] + _ = x[LiteralStringDoc-(3107)] + _ = x[LiteralStringDouble-(3108)] + _ = x[LiteralStringEscape-(3109)] + _ = x[LiteralStringHeredoc-(3110)] + _ = x[LiteralStringInterpol-(3111)] + _ = x[LiteralStringName-(3112)] + _ = x[LiteralStringOther-(3113)] + _ = x[LiteralStringRegex-(3114)] + _ = x[LiteralStringSingle-(3115)] + _ = x[LiteralStringSymbol-(3116)] + _ = x[LiteralNumber-(3200)] + _ = x[LiteralNumberBin-(3201)] + _ = x[LiteralNumberFloat-(3202)] + _ = x[LiteralNumberHex-(3203)] + _ = x[LiteralNumberInteger-(3204)] + _ = x[LiteralNumberIntegerLong-(3205)] + _ = x[LiteralNumberOct-(3206)] + _ = x[Operator-(4000)] + _ = x[OperatorWord-(4001)] + _ = x[Punctuation-(5000)] + _ = x[Comment-(6000)] + _ = x[CommentHashbang-(6001)] + _ = x[CommentMultiline-(6002)] + _ = x[CommentSingle-(6003)] + _ = x[CommentSpecial-(6004)] + _ = x[CommentPreproc-(6100)] + _ = x[CommentPreprocFile-(6101)] + _ = x[Generic-(7000)] + _ = x[GenericDeleted-(7001)] + _ = x[GenericEmph-(7002)] + _ = x[GenericError-(7003)] + _ = x[GenericHeading-(7004)] + _ = x[GenericInserted-(7005)] + _ = x[GenericOutput-(7006)] + _ = x[GenericPrompt-(7007)] + _ = x[GenericStrong-(7008)] + _ = x[GenericSubheading-(7009)] + _ = x[GenericTraceback-(7010)] + _ = x[GenericUnderline-(7011)] + _ = x[Text-(8000)] + _ = x[TextWhitespace-(8001)] + _ = x[TextSymbol-(8002)] + _ = x[TextPunctuation-(8003)] +} + +var _TokenTypeValues = []TokenType{None, Other, Error, CodeLine, LineLink, LineTableTD, LineTable, LineHighlight, LineNumbersTable, LineNumbers, Line, PreWrapper, Background, EOFType, Keyword, KeywordConstant, KeywordDeclaration, KeywordNamespace, KeywordPseudo, KeywordReserved, KeywordType, Name, NameAttribute, NameBuiltin, NameBuiltinPseudo, NameClass, NameConstant, NameDecorator, NameEntity, NameException, NameFunction, NameFunctionMagic, NameKeyword, NameLabel, NameNamespace, NameOperator, NameOther, NamePseudo, NameProperty, NameTag, NameVariable, NameVariableAnonymous, NameVariableClass, NameVariableGlobal, NameVariableInstance, NameVariableMagic, Literal, LiteralDate, LiteralOther, LiteralString, LiteralStringAffix, LiteralStringAtom, LiteralStringBacktick, LiteralStringBoolean, LiteralStringChar, LiteralStringDelimiter, LiteralStringDoc, LiteralStringDouble, LiteralStringEscape, LiteralStringHeredoc, LiteralStringInterpol, LiteralStringName, LiteralStringOther, LiteralStringRegex, LiteralStringSingle, LiteralStringSymbol, LiteralNumber, LiteralNumberBin, LiteralNumberFloat, LiteralNumberHex, LiteralNumberInteger, LiteralNumberIntegerLong, LiteralNumberOct, Operator, OperatorWord, Punctuation, Comment, CommentHashbang, CommentMultiline, CommentSingle, CommentSpecial, CommentPreproc, CommentPreprocFile, Generic, GenericDeleted, GenericEmph, GenericError, GenericHeading, GenericInserted, GenericOutput, GenericPrompt, GenericStrong, GenericSubheading, GenericTraceback, GenericUnderline, Text, TextWhitespace, TextSymbol, TextPunctuation} + +var _TokenTypeNameToValueMap = map[string]TokenType{ + _TokenTypeName[0:4]: None, + _TokenTypeLowerName[0:4]: None, + _TokenTypeName[4:9]: Other, + _TokenTypeLowerName[4:9]: Other, + _TokenTypeName[9:14]: Error, + _TokenTypeLowerName[9:14]: Error, + _TokenTypeName[14:22]: CodeLine, + _TokenTypeLowerName[14:22]: CodeLine, + _TokenTypeName[22:30]: LineLink, + _TokenTypeLowerName[22:30]: LineLink, + _TokenTypeName[30:41]: LineTableTD, + _TokenTypeLowerName[30:41]: LineTableTD, + _TokenTypeName[41:50]: LineTable, + _TokenTypeLowerName[41:50]: LineTable, + _TokenTypeName[50:63]: LineHighlight, + _TokenTypeLowerName[50:63]: LineHighlight, + _TokenTypeName[63:79]: LineNumbersTable, + _TokenTypeLowerName[63:79]: LineNumbersTable, + _TokenTypeName[79:90]: LineNumbers, + _TokenTypeLowerName[79:90]: LineNumbers, + _TokenTypeName[90:94]: Line, + _TokenTypeLowerName[90:94]: Line, + _TokenTypeName[94:104]: PreWrapper, + _TokenTypeLowerName[94:104]: PreWrapper, + _TokenTypeName[104:114]: Background, + _TokenTypeLowerName[104:114]: Background, + _TokenTypeName[114:121]: EOFType, + _TokenTypeLowerName[114:121]: EOFType, + _TokenTypeName[121:128]: Keyword, + _TokenTypeLowerName[121:128]: Keyword, + _TokenTypeName[128:143]: KeywordConstant, + _TokenTypeLowerName[128:143]: KeywordConstant, + _TokenTypeName[143:161]: KeywordDeclaration, + _TokenTypeLowerName[143:161]: KeywordDeclaration, + _TokenTypeName[161:177]: KeywordNamespace, + _TokenTypeLowerName[161:177]: KeywordNamespace, + _TokenTypeName[177:190]: KeywordPseudo, + _TokenTypeLowerName[177:190]: KeywordPseudo, + _TokenTypeName[190:205]: KeywordReserved, + _TokenTypeLowerName[190:205]: KeywordReserved, + _TokenTypeName[205:216]: KeywordType, + _TokenTypeLowerName[205:216]: KeywordType, + _TokenTypeName[216:220]: Name, + _TokenTypeLowerName[216:220]: Name, + _TokenTypeName[220:233]: NameAttribute, + _TokenTypeLowerName[220:233]: NameAttribute, + _TokenTypeName[233:244]: NameBuiltin, + _TokenTypeLowerName[233:244]: NameBuiltin, + _TokenTypeName[244:261]: NameBuiltinPseudo, + _TokenTypeLowerName[244:261]: NameBuiltinPseudo, + _TokenTypeName[261:270]: NameClass, + _TokenTypeLowerName[261:270]: NameClass, + _TokenTypeName[270:282]: NameConstant, + _TokenTypeLowerName[270:282]: NameConstant, + _TokenTypeName[282:295]: NameDecorator, + _TokenTypeLowerName[282:295]: NameDecorator, + _TokenTypeName[295:305]: NameEntity, + _TokenTypeLowerName[295:305]: NameEntity, + _TokenTypeName[305:318]: NameException, + _TokenTypeLowerName[305:318]: NameException, + _TokenTypeName[318:330]: NameFunction, + _TokenTypeLowerName[318:330]: NameFunction, + _TokenTypeName[330:347]: NameFunctionMagic, + _TokenTypeLowerName[330:347]: NameFunctionMagic, + _TokenTypeName[347:358]: NameKeyword, + _TokenTypeLowerName[347:358]: NameKeyword, + _TokenTypeName[358:367]: NameLabel, + _TokenTypeLowerName[358:367]: NameLabel, + _TokenTypeName[367:380]: NameNamespace, + _TokenTypeLowerName[367:380]: NameNamespace, + _TokenTypeName[380:392]: NameOperator, + _TokenTypeLowerName[380:392]: NameOperator, + _TokenTypeName[392:401]: NameOther, + _TokenTypeLowerName[392:401]: NameOther, + _TokenTypeName[401:411]: NamePseudo, + _TokenTypeLowerName[401:411]: NamePseudo, + _TokenTypeName[411:423]: NameProperty, + _TokenTypeLowerName[411:423]: NameProperty, + _TokenTypeName[423:430]: NameTag, + _TokenTypeLowerName[423:430]: NameTag, + _TokenTypeName[430:442]: NameVariable, + _TokenTypeLowerName[430:442]: NameVariable, + _TokenTypeName[442:463]: NameVariableAnonymous, + _TokenTypeLowerName[442:463]: NameVariableAnonymous, + _TokenTypeName[463:480]: NameVariableClass, + _TokenTypeLowerName[463:480]: NameVariableClass, + _TokenTypeName[480:498]: NameVariableGlobal, + _TokenTypeLowerName[480:498]: NameVariableGlobal, + _TokenTypeName[498:518]: NameVariableInstance, + _TokenTypeLowerName[498:518]: NameVariableInstance, + _TokenTypeName[518:535]: NameVariableMagic, + _TokenTypeLowerName[518:535]: NameVariableMagic, + _TokenTypeName[535:542]: Literal, + _TokenTypeLowerName[535:542]: Literal, + _TokenTypeName[542:553]: LiteralDate, + _TokenTypeLowerName[542:553]: LiteralDate, + _TokenTypeName[553:565]: LiteralOther, + _TokenTypeLowerName[553:565]: LiteralOther, + _TokenTypeName[565:578]: LiteralString, + _TokenTypeLowerName[565:578]: LiteralString, + _TokenTypeName[578:596]: LiteralStringAffix, + _TokenTypeLowerName[578:596]: LiteralStringAffix, + _TokenTypeName[596:613]: LiteralStringAtom, + _TokenTypeLowerName[596:613]: LiteralStringAtom, + _TokenTypeName[613:634]: LiteralStringBacktick, + _TokenTypeLowerName[613:634]: LiteralStringBacktick, + _TokenTypeName[634:654]: LiteralStringBoolean, + _TokenTypeLowerName[634:654]: LiteralStringBoolean, + _TokenTypeName[654:671]: LiteralStringChar, + _TokenTypeLowerName[654:671]: LiteralStringChar, + _TokenTypeName[671:693]: LiteralStringDelimiter, + _TokenTypeLowerName[671:693]: LiteralStringDelimiter, + _TokenTypeName[693:709]: LiteralStringDoc, + _TokenTypeLowerName[693:709]: LiteralStringDoc, + _TokenTypeName[709:728]: LiteralStringDouble, + _TokenTypeLowerName[709:728]: LiteralStringDouble, + _TokenTypeName[728:747]: LiteralStringEscape, + _TokenTypeLowerName[728:747]: LiteralStringEscape, + _TokenTypeName[747:767]: LiteralStringHeredoc, + _TokenTypeLowerName[747:767]: LiteralStringHeredoc, + _TokenTypeName[767:788]: LiteralStringInterpol, + _TokenTypeLowerName[767:788]: LiteralStringInterpol, + _TokenTypeName[788:805]: LiteralStringName, + _TokenTypeLowerName[788:805]: LiteralStringName, + _TokenTypeName[805:823]: LiteralStringOther, + _TokenTypeLowerName[805:823]: LiteralStringOther, + _TokenTypeName[823:841]: LiteralStringRegex, + _TokenTypeLowerName[823:841]: LiteralStringRegex, + _TokenTypeName[841:860]: LiteralStringSingle, + _TokenTypeLowerName[841:860]: LiteralStringSingle, + _TokenTypeName[860:879]: LiteralStringSymbol, + _TokenTypeLowerName[860:879]: LiteralStringSymbol, + _TokenTypeName[879:892]: LiteralNumber, + _TokenTypeLowerName[879:892]: LiteralNumber, + _TokenTypeName[892:908]: LiteralNumberBin, + _TokenTypeLowerName[892:908]: LiteralNumberBin, + _TokenTypeName[908:926]: LiteralNumberFloat, + _TokenTypeLowerName[908:926]: LiteralNumberFloat, + _TokenTypeName[926:942]: LiteralNumberHex, + _TokenTypeLowerName[926:942]: LiteralNumberHex, + _TokenTypeName[942:962]: LiteralNumberInteger, + _TokenTypeLowerName[942:962]: LiteralNumberInteger, + _TokenTypeName[962:986]: LiteralNumberIntegerLong, + _TokenTypeLowerName[962:986]: LiteralNumberIntegerLong, + _TokenTypeName[986:1002]: LiteralNumberOct, + _TokenTypeLowerName[986:1002]: LiteralNumberOct, + _TokenTypeName[1002:1010]: Operator, + _TokenTypeLowerName[1002:1010]: Operator, + _TokenTypeName[1010:1022]: OperatorWord, + _TokenTypeLowerName[1010:1022]: OperatorWord, + _TokenTypeName[1022:1033]: Punctuation, + _TokenTypeLowerName[1022:1033]: Punctuation, + _TokenTypeName[1033:1040]: Comment, + _TokenTypeLowerName[1033:1040]: Comment, + _TokenTypeName[1040:1055]: CommentHashbang, + _TokenTypeLowerName[1040:1055]: CommentHashbang, + _TokenTypeName[1055:1071]: CommentMultiline, + _TokenTypeLowerName[1055:1071]: CommentMultiline, + _TokenTypeName[1071:1084]: CommentSingle, + _TokenTypeLowerName[1071:1084]: CommentSingle, + _TokenTypeName[1084:1098]: CommentSpecial, + _TokenTypeLowerName[1084:1098]: CommentSpecial, + _TokenTypeName[1098:1112]: CommentPreproc, + _TokenTypeLowerName[1098:1112]: CommentPreproc, + _TokenTypeName[1112:1130]: CommentPreprocFile, + _TokenTypeLowerName[1112:1130]: CommentPreprocFile, + _TokenTypeName[1130:1137]: Generic, + _TokenTypeLowerName[1130:1137]: Generic, + _TokenTypeName[1137:1151]: GenericDeleted, + _TokenTypeLowerName[1137:1151]: GenericDeleted, + _TokenTypeName[1151:1162]: GenericEmph, + _TokenTypeLowerName[1151:1162]: GenericEmph, + _TokenTypeName[1162:1174]: GenericError, + _TokenTypeLowerName[1162:1174]: GenericError, + _TokenTypeName[1174:1188]: GenericHeading, + _TokenTypeLowerName[1174:1188]: GenericHeading, + _TokenTypeName[1188:1203]: GenericInserted, + _TokenTypeLowerName[1188:1203]: GenericInserted, + _TokenTypeName[1203:1216]: GenericOutput, + _TokenTypeLowerName[1203:1216]: GenericOutput, + _TokenTypeName[1216:1229]: GenericPrompt, + _TokenTypeLowerName[1216:1229]: GenericPrompt, + _TokenTypeName[1229:1242]: GenericStrong, + _TokenTypeLowerName[1229:1242]: GenericStrong, + _TokenTypeName[1242:1259]: GenericSubheading, + _TokenTypeLowerName[1242:1259]: GenericSubheading, + _TokenTypeName[1259:1275]: GenericTraceback, + _TokenTypeLowerName[1259:1275]: GenericTraceback, + _TokenTypeName[1275:1291]: GenericUnderline, + _TokenTypeLowerName[1275:1291]: GenericUnderline, + _TokenTypeName[1291:1295]: Text, + _TokenTypeLowerName[1291:1295]: Text, + _TokenTypeName[1295:1309]: TextWhitespace, + _TokenTypeLowerName[1295:1309]: TextWhitespace, + _TokenTypeName[1309:1319]: TextSymbol, + _TokenTypeLowerName[1309:1319]: TextSymbol, + _TokenTypeName[1319:1334]: TextPunctuation, + _TokenTypeLowerName[1319:1334]: TextPunctuation, +} + +var _TokenTypeNames = []string{ + _TokenTypeName[0:4], + _TokenTypeName[4:9], + _TokenTypeName[9:14], + _TokenTypeName[14:22], + _TokenTypeName[22:30], + _TokenTypeName[30:41], + _TokenTypeName[41:50], + _TokenTypeName[50:63], + _TokenTypeName[63:79], + _TokenTypeName[79:90], + _TokenTypeName[90:94], + _TokenTypeName[94:104], + _TokenTypeName[104:114], + _TokenTypeName[114:121], + _TokenTypeName[121:128], + _TokenTypeName[128:143], + _TokenTypeName[143:161], + _TokenTypeName[161:177], + _TokenTypeName[177:190], + _TokenTypeName[190:205], + _TokenTypeName[205:216], + _TokenTypeName[216:220], + _TokenTypeName[220:233], + _TokenTypeName[233:244], + _TokenTypeName[244:261], + _TokenTypeName[261:270], + _TokenTypeName[270:282], + _TokenTypeName[282:295], + _TokenTypeName[295:305], + _TokenTypeName[305:318], + _TokenTypeName[318:330], + _TokenTypeName[330:347], + _TokenTypeName[347:358], + _TokenTypeName[358:367], + _TokenTypeName[367:380], + _TokenTypeName[380:392], + _TokenTypeName[392:401], + _TokenTypeName[401:411], + _TokenTypeName[411:423], + _TokenTypeName[423:430], + _TokenTypeName[430:442], + _TokenTypeName[442:463], + _TokenTypeName[463:480], + _TokenTypeName[480:498], + _TokenTypeName[498:518], + _TokenTypeName[518:535], + _TokenTypeName[535:542], + _TokenTypeName[542:553], + _TokenTypeName[553:565], + _TokenTypeName[565:578], + _TokenTypeName[578:596], + _TokenTypeName[596:613], + _TokenTypeName[613:634], + _TokenTypeName[634:654], + _TokenTypeName[654:671], + _TokenTypeName[671:693], + _TokenTypeName[693:709], + _TokenTypeName[709:728], + _TokenTypeName[728:747], + _TokenTypeName[747:767], + _TokenTypeName[767:788], + _TokenTypeName[788:805], + _TokenTypeName[805:823], + _TokenTypeName[823:841], + _TokenTypeName[841:860], + _TokenTypeName[860:879], + _TokenTypeName[879:892], + _TokenTypeName[892:908], + _TokenTypeName[908:926], + _TokenTypeName[926:942], + _TokenTypeName[942:962], + _TokenTypeName[962:986], + _TokenTypeName[986:1002], + _TokenTypeName[1002:1010], + _TokenTypeName[1010:1022], + _TokenTypeName[1022:1033], + _TokenTypeName[1033:1040], + _TokenTypeName[1040:1055], + _TokenTypeName[1055:1071], + _TokenTypeName[1071:1084], + _TokenTypeName[1084:1098], + _TokenTypeName[1098:1112], + _TokenTypeName[1112:1130], + _TokenTypeName[1130:1137], + _TokenTypeName[1137:1151], + _TokenTypeName[1151:1162], + _TokenTypeName[1162:1174], + _TokenTypeName[1174:1188], + _TokenTypeName[1188:1203], + _TokenTypeName[1203:1216], + _TokenTypeName[1216:1229], + _TokenTypeName[1229:1242], + _TokenTypeName[1242:1259], + _TokenTypeName[1259:1275], + _TokenTypeName[1275:1291], + _TokenTypeName[1291:1295], + _TokenTypeName[1295:1309], + _TokenTypeName[1309:1319], + _TokenTypeName[1319:1334], +} + +// TokenTypeString retrieves an enum value from the enum constants string name. +// Throws an error if the param is not part of the enum. +func TokenTypeString(s string) (TokenType, error) { + if val, ok := _TokenTypeNameToValueMap[s]; ok { + return val, nil + } + + if val, ok := _TokenTypeNameToValueMap[strings.ToLower(s)]; ok { + return val, nil + } + return 0, fmt.Errorf("%s does not belong to TokenType values", s) +} + +// TokenTypeValues returns all values of the enum +func TokenTypeValues() []TokenType { + return _TokenTypeValues +} + +// TokenTypeStrings returns a slice of all String values of the enum +func TokenTypeStrings() []string { + strs := make([]string, len(_TokenTypeNames)) + copy(strs, _TokenTypeNames) + return strs +} + +// IsATokenType returns "true" if the value is listed in the enum definition. "false" otherwise +func (i TokenType) IsATokenType() bool { + _, ok := _TokenTypeMap[i] + return ok +} + +// MarshalText implements the encoding.TextMarshaler interface for TokenType +func (i TokenType) MarshalText() ([]byte, error) { + return []byte(i.String()), nil +} + +// UnmarshalText implements the encoding.TextUnmarshaler interface for TokenType +func (i *TokenType) UnmarshalText(text []byte) error { + var err error + *i, err = TokenTypeString(string(text)) + return err +} diff --git a/vendor/github.com/alecthomas/chroma/v2/types.go b/vendor/github.com/alecthomas/chroma/v2/types.go new file mode 100644 index 0000000..3d12310 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/v2/types.go @@ -0,0 +1,340 @@ +package chroma + +//go:generate enumer -text -type TokenType + +// TokenType is the type of token to highlight. +// +// It is also an Emitter, emitting a single token of itself +type TokenType int + +// Set of TokenTypes. +// +// Categories of types are grouped in ranges of 1000, while sub-categories are in ranges of 100. For +// example, the literal category is in the range 3000-3999. The sub-category for literal strings is +// in the range 3100-3199. + +// Meta token types. +const ( + // Default background style. + Background TokenType = -1 - iota + // PreWrapper style. + PreWrapper + // Line style. + Line + // Line numbers in output. + LineNumbers + // Line numbers in output when in table. + LineNumbersTable + // Line higlight style. + LineHighlight + // Line numbers table wrapper style. + LineTable + // Line numbers table TD wrapper style. + LineTableTD + // Line number links. + LineLink + // Code line wrapper style. + CodeLine + // Input that could not be tokenised. + Error + // Other is used by the Delegate lexer to indicate which tokens should be handled by the delegate. + Other + // No highlighting. + None + // Used as an EOF marker / nil token + EOFType TokenType = 0 +) + +// Keywords. +const ( + Keyword TokenType = 1000 + iota + KeywordConstant + KeywordDeclaration + KeywordNamespace + KeywordPseudo + KeywordReserved + KeywordType +) + +// Names. +const ( + Name TokenType = 2000 + iota + NameAttribute + NameBuiltin + NameBuiltinPseudo + NameClass + NameConstant + NameDecorator + NameEntity + NameException + NameFunction + NameFunctionMagic + NameKeyword + NameLabel + NameNamespace + NameOperator + NameOther + NamePseudo + NameProperty + NameTag + NameVariable + NameVariableAnonymous + NameVariableClass + NameVariableGlobal + NameVariableInstance + NameVariableMagic +) + +// Literals. +const ( + Literal TokenType = 3000 + iota + LiteralDate + LiteralOther +) + +// Strings. +const ( + LiteralString TokenType = 3100 + iota + LiteralStringAffix + LiteralStringAtom + LiteralStringBacktick + LiteralStringBoolean + LiteralStringChar + LiteralStringDelimiter + LiteralStringDoc + LiteralStringDouble + LiteralStringEscape + LiteralStringHeredoc + LiteralStringInterpol + LiteralStringName + LiteralStringOther + LiteralStringRegex + LiteralStringSingle + LiteralStringSymbol +) + +// Literals. +const ( + LiteralNumber TokenType = 3200 + iota + LiteralNumberBin + LiteralNumberFloat + LiteralNumberHex + LiteralNumberInteger + LiteralNumberIntegerLong + LiteralNumberOct +) + +// Operators. +const ( + Operator TokenType = 4000 + iota + OperatorWord +) + +// Punctuation. +const ( + Punctuation TokenType = 5000 + iota +) + +// Comments. +const ( + Comment TokenType = 6000 + iota + CommentHashbang + CommentMultiline + CommentSingle + CommentSpecial +) + +// Preprocessor "comments". +const ( + CommentPreproc TokenType = 6100 + iota + CommentPreprocFile +) + +// Generic tokens. +const ( + Generic TokenType = 7000 + iota + GenericDeleted + GenericEmph + GenericError + GenericHeading + GenericInserted + GenericOutput + GenericPrompt + GenericStrong + GenericSubheading + GenericTraceback + GenericUnderline +) + +// Text. +const ( + Text TokenType = 8000 + iota + TextWhitespace + TextSymbol + TextPunctuation +) + +// Aliases. +const ( + Whitespace = TextWhitespace + + Date = LiteralDate + + String = LiteralString + StringAffix = LiteralStringAffix + StringBacktick = LiteralStringBacktick + StringChar = LiteralStringChar + StringDelimiter = LiteralStringDelimiter + StringDoc = LiteralStringDoc + StringDouble = LiteralStringDouble + StringEscape = LiteralStringEscape + StringHeredoc = LiteralStringHeredoc + StringInterpol = LiteralStringInterpol + StringOther = LiteralStringOther + StringRegex = LiteralStringRegex + StringSingle = LiteralStringSingle + StringSymbol = LiteralStringSymbol + + Number = LiteralNumber + NumberBin = LiteralNumberBin + NumberFloat = LiteralNumberFloat + NumberHex = LiteralNumberHex + NumberInteger = LiteralNumberInteger + NumberIntegerLong = LiteralNumberIntegerLong + NumberOct = LiteralNumberOct +) + +var ( + StandardTypes = map[TokenType]string{ + Background: "bg", + PreWrapper: "chroma", + Line: "line", + LineNumbers: "ln", + LineNumbersTable: "lnt", + LineHighlight: "hl", + LineTable: "lntable", + LineTableTD: "lntd", + LineLink: "lnlinks", + CodeLine: "cl", + Text: "", + Whitespace: "w", + Error: "err", + Other: "x", + // I have no idea what this is used for... + // Escape: "esc", + + Keyword: "k", + KeywordConstant: "kc", + KeywordDeclaration: "kd", + KeywordNamespace: "kn", + KeywordPseudo: "kp", + KeywordReserved: "kr", + KeywordType: "kt", + + Name: "n", + NameAttribute: "na", + NameBuiltin: "nb", + NameBuiltinPseudo: "bp", + NameClass: "nc", + NameConstant: "no", + NameDecorator: "nd", + NameEntity: "ni", + NameException: "ne", + NameFunction: "nf", + NameFunctionMagic: "fm", + NameProperty: "py", + NameLabel: "nl", + NameNamespace: "nn", + NameOther: "nx", + NameTag: "nt", + NameVariable: "nv", + NameVariableClass: "vc", + NameVariableGlobal: "vg", + NameVariableInstance: "vi", + NameVariableMagic: "vm", + + Literal: "l", + LiteralDate: "ld", + + String: "s", + StringAffix: "sa", + StringBacktick: "sb", + StringChar: "sc", + StringDelimiter: "dl", + StringDoc: "sd", + StringDouble: "s2", + StringEscape: "se", + StringHeredoc: "sh", + StringInterpol: "si", + StringOther: "sx", + StringRegex: "sr", + StringSingle: "s1", + StringSymbol: "ss", + + Number: "m", + NumberBin: "mb", + NumberFloat: "mf", + NumberHex: "mh", + NumberInteger: "mi", + NumberIntegerLong: "il", + NumberOct: "mo", + + Operator: "o", + OperatorWord: "ow", + + Punctuation: "p", + + Comment: "c", + CommentHashbang: "ch", + CommentMultiline: "cm", + CommentPreproc: "cp", + CommentPreprocFile: "cpf", + CommentSingle: "c1", + CommentSpecial: "cs", + + Generic: "g", + GenericDeleted: "gd", + GenericEmph: "ge", + GenericError: "gr", + GenericHeading: "gh", + GenericInserted: "gi", + GenericOutput: "go", + GenericPrompt: "gp", + GenericStrong: "gs", + GenericSubheading: "gu", + GenericTraceback: "gt", + GenericUnderline: "gl", + } +) + +func (t TokenType) Parent() TokenType { + if t%100 != 0 { + return t / 100 * 100 + } + if t%1000 != 0 { + return t / 1000 * 1000 + } + return 0 +} + +func (t TokenType) Category() TokenType { + return t / 1000 * 1000 +} + +func (t TokenType) SubCategory() TokenType { + return t / 100 * 100 +} + +func (t TokenType) InCategory(other TokenType) bool { + return t/1000 == other/1000 +} + +func (t TokenType) InSubCategory(other TokenType) bool { + return t/100 == other/100 +} + +func (t TokenType) Emit(groups []string, _ *LexerState) Iterator { + return Literator(Token{Type: t, Value: groups[0]}) +} + +func (t TokenType) EmitterKind() string { return "token" } diff --git a/vendor/github.com/armon/go-radix/.gitignore b/vendor/github.com/armon/go-radix/.gitignore new file mode 100644 index 0000000..0026861 --- /dev/null +++ b/vendor/github.com/armon/go-radix/.gitignore @@ -0,0 +1,22 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe diff --git a/vendor/github.com/armon/go-radix/.travis.yml b/vendor/github.com/armon/go-radix/.travis.yml new file mode 100644 index 0000000..1a0bbea --- /dev/null +++ b/vendor/github.com/armon/go-radix/.travis.yml @@ -0,0 +1,3 @@ +language: go +go: + - tip diff --git a/vendor/github.com/armon/go-radix/LICENSE b/vendor/github.com/armon/go-radix/LICENSE new file mode 100644 index 0000000..a5df10e --- /dev/null +++ b/vendor/github.com/armon/go-radix/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2014 Armon Dadgar + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/armon/go-radix/README.md b/vendor/github.com/armon/go-radix/README.md new file mode 100644 index 0000000..26f42a2 --- /dev/null +++ b/vendor/github.com/armon/go-radix/README.md @@ -0,0 +1,38 @@ +go-radix [![Build Status](https://travis-ci.org/armon/go-radix.png)](https://travis-ci.org/armon/go-radix) +========= + +Provides the `radix` package that implements a [radix tree](http://en.wikipedia.org/wiki/Radix_tree). +The package only provides a single `Tree` implementation, optimized for sparse nodes. + +As a radix tree, it provides the following: + * O(k) operations. In many cases, this can be faster than a hash table since + the hash function is an O(k) operation, and hash tables have very poor cache locality. + * Minimum / Maximum value lookups + * Ordered iteration + +For an immutable variant, see [go-immutable-radix](https://github.com/hashicorp/go-immutable-radix). + +Documentation +============= + +The full documentation is available on [Godoc](http://godoc.org/github.com/armon/go-radix). + +Example +======= + +Below is a simple example of usage + +```go +// Create a tree +r := radix.New() +r.Insert("foo", 1) +r.Insert("bar", 2) +r.Insert("foobar", 2) + +// Find the longest prefix match +m, _, _ := r.LongestPrefix("foozip") +if m != "foo" { + panic("should be foo") +} +``` + diff --git a/vendor/github.com/armon/go-radix/radix.go b/vendor/github.com/armon/go-radix/radix.go new file mode 100644 index 0000000..e2bb22e --- /dev/null +++ b/vendor/github.com/armon/go-radix/radix.go @@ -0,0 +1,540 @@ +package radix + +import ( + "sort" + "strings" +) + +// WalkFn is used when walking the tree. Takes a +// key and value, returning if iteration should +// be terminated. +type WalkFn func(s string, v interface{}) bool + +// leafNode is used to represent a value +type leafNode struct { + key string + val interface{} +} + +// edge is used to represent an edge node +type edge struct { + label byte + node *node +} + +type node struct { + // leaf is used to store possible leaf + leaf *leafNode + + // prefix is the common prefix we ignore + prefix string + + // Edges should be stored in-order for iteration. + // We avoid a fully materialized slice to save memory, + // since in most cases we expect to be sparse + edges edges +} + +func (n *node) isLeaf() bool { + return n.leaf != nil +} + +func (n *node) addEdge(e edge) { + n.edges = append(n.edges, e) + n.edges.Sort() +} + +func (n *node) updateEdge(label byte, node *node) { + num := len(n.edges) + idx := sort.Search(num, func(i int) bool { + return n.edges[i].label >= label + }) + if idx < num && n.edges[idx].label == label { + n.edges[idx].node = node + return + } + panic("replacing missing edge") +} + +func (n *node) getEdge(label byte) *node { + num := len(n.edges) + idx := sort.Search(num, func(i int) bool { + return n.edges[i].label >= label + }) + if idx < num && n.edges[idx].label == label { + return n.edges[idx].node + } + return nil +} + +func (n *node) delEdge(label byte) { + num := len(n.edges) + idx := sort.Search(num, func(i int) bool { + return n.edges[i].label >= label + }) + if idx < num && n.edges[idx].label == label { + copy(n.edges[idx:], n.edges[idx+1:]) + n.edges[len(n.edges)-1] = edge{} + n.edges = n.edges[:len(n.edges)-1] + } +} + +type edges []edge + +func (e edges) Len() int { + return len(e) +} + +func (e edges) Less(i, j int) bool { + return e[i].label < e[j].label +} + +func (e edges) Swap(i, j int) { + e[i], e[j] = e[j], e[i] +} + +func (e edges) Sort() { + sort.Sort(e) +} + +// Tree implements a radix tree. This can be treated as a +// Dictionary abstract data type. The main advantage over +// a standard hash map is prefix-based lookups and +// ordered iteration, +type Tree struct { + root *node + size int +} + +// New returns an empty Tree +func New() *Tree { + return NewFromMap(nil) +} + +// NewFromMap returns a new tree containing the keys +// from an existing map +func NewFromMap(m map[string]interface{}) *Tree { + t := &Tree{root: &node{}} + for k, v := range m { + t.Insert(k, v) + } + return t +} + +// Len is used to return the number of elements in the tree +func (t *Tree) Len() int { + return t.size +} + +// longestPrefix finds the length of the shared prefix +// of two strings +func longestPrefix(k1, k2 string) int { + max := len(k1) + if l := len(k2); l < max { + max = l + } + var i int + for i = 0; i < max; i++ { + if k1[i] != k2[i] { + break + } + } + return i +} + +// Insert is used to add a newentry or update +// an existing entry. Returns if updated. +func (t *Tree) Insert(s string, v interface{}) (interface{}, bool) { + var parent *node + n := t.root + search := s + for { + // Handle key exhaution + if len(search) == 0 { + if n.isLeaf() { + old := n.leaf.val + n.leaf.val = v + return old, true + } + + n.leaf = &leafNode{ + key: s, + val: v, + } + t.size++ + return nil, false + } + + // Look for the edge + parent = n + n = n.getEdge(search[0]) + + // No edge, create one + if n == nil { + e := edge{ + label: search[0], + node: &node{ + leaf: &leafNode{ + key: s, + val: v, + }, + prefix: search, + }, + } + parent.addEdge(e) + t.size++ + return nil, false + } + + // Determine longest prefix of the search key on match + commonPrefix := longestPrefix(search, n.prefix) + if commonPrefix == len(n.prefix) { + search = search[commonPrefix:] + continue + } + + // Split the node + t.size++ + child := &node{ + prefix: search[:commonPrefix], + } + parent.updateEdge(search[0], child) + + // Restore the existing node + child.addEdge(edge{ + label: n.prefix[commonPrefix], + node: n, + }) + n.prefix = n.prefix[commonPrefix:] + + // Create a new leaf node + leaf := &leafNode{ + key: s, + val: v, + } + + // If the new key is a subset, add to to this node + search = search[commonPrefix:] + if len(search) == 0 { + child.leaf = leaf + return nil, false + } + + // Create a new edge for the node + child.addEdge(edge{ + label: search[0], + node: &node{ + leaf: leaf, + prefix: search, + }, + }) + return nil, false + } +} + +// Delete is used to delete a key, returning the previous +// value and if it was deleted +func (t *Tree) Delete(s string) (interface{}, bool) { + var parent *node + var label byte + n := t.root + search := s + for { + // Check for key exhaution + if len(search) == 0 { + if !n.isLeaf() { + break + } + goto DELETE + } + + // Look for an edge + parent = n + label = search[0] + n = n.getEdge(label) + if n == nil { + break + } + + // Consume the search prefix + if strings.HasPrefix(search, n.prefix) { + search = search[len(n.prefix):] + } else { + break + } + } + return nil, false + +DELETE: + // Delete the leaf + leaf := n.leaf + n.leaf = nil + t.size-- + + // Check if we should delete this node from the parent + if parent != nil && len(n.edges) == 0 { + parent.delEdge(label) + } + + // Check if we should merge this node + if n != t.root && len(n.edges) == 1 { + n.mergeChild() + } + + // Check if we should merge the parent's other child + if parent != nil && parent != t.root && len(parent.edges) == 1 && !parent.isLeaf() { + parent.mergeChild() + } + + return leaf.val, true +} + +// DeletePrefix is used to delete the subtree under a prefix +// Returns how many nodes were deleted +// Use this to delete large subtrees efficiently +func (t *Tree) DeletePrefix(s string) int { + return t.deletePrefix(nil, t.root, s) +} + +// delete does a recursive deletion +func (t *Tree) deletePrefix(parent, n *node, prefix string) int { + // Check for key exhaustion + if len(prefix) == 0 { + // Remove the leaf node + subTreeSize := 0 + //recursively walk from all edges of the node to be deleted + recursiveWalk(n, func(s string, v interface{}) bool { + subTreeSize++ + return false + }) + if n.isLeaf() { + n.leaf = nil + } + n.edges = nil // deletes the entire subtree + + // Check if we should merge the parent's other child + if parent != nil && parent != t.root && len(parent.edges) == 1 && !parent.isLeaf() { + parent.mergeChild() + } + t.size -= subTreeSize + return subTreeSize + } + + // Look for an edge + label := prefix[0] + child := n.getEdge(label) + if child == nil || (!strings.HasPrefix(child.prefix, prefix) && !strings.HasPrefix(prefix, child.prefix)) { + return 0 + } + + // Consume the search prefix + if len(child.prefix) > len(prefix) { + prefix = prefix[len(prefix):] + } else { + prefix = prefix[len(child.prefix):] + } + return t.deletePrefix(n, child, prefix) +} + +func (n *node) mergeChild() { + e := n.edges[0] + child := e.node + n.prefix = n.prefix + child.prefix + n.leaf = child.leaf + n.edges = child.edges +} + +// Get is used to lookup a specific key, returning +// the value and if it was found +func (t *Tree) Get(s string) (interface{}, bool) { + n := t.root + search := s + for { + // Check for key exhaution + if len(search) == 0 { + if n.isLeaf() { + return n.leaf.val, true + } + break + } + + // Look for an edge + n = n.getEdge(search[0]) + if n == nil { + break + } + + // Consume the search prefix + if strings.HasPrefix(search, n.prefix) { + search = search[len(n.prefix):] + } else { + break + } + } + return nil, false +} + +// LongestPrefix is like Get, but instead of an +// exact match, it will return the longest prefix match. +func (t *Tree) LongestPrefix(s string) (string, interface{}, bool) { + var last *leafNode + n := t.root + search := s + for { + // Look for a leaf node + if n.isLeaf() { + last = n.leaf + } + + // Check for key exhaution + if len(search) == 0 { + break + } + + // Look for an edge + n = n.getEdge(search[0]) + if n == nil { + break + } + + // Consume the search prefix + if strings.HasPrefix(search, n.prefix) { + search = search[len(n.prefix):] + } else { + break + } + } + if last != nil { + return last.key, last.val, true + } + return "", nil, false +} + +// Minimum is used to return the minimum value in the tree +func (t *Tree) Minimum() (string, interface{}, bool) { + n := t.root + for { + if n.isLeaf() { + return n.leaf.key, n.leaf.val, true + } + if len(n.edges) > 0 { + n = n.edges[0].node + } else { + break + } + } + return "", nil, false +} + +// Maximum is used to return the maximum value in the tree +func (t *Tree) Maximum() (string, interface{}, bool) { + n := t.root + for { + if num := len(n.edges); num > 0 { + n = n.edges[num-1].node + continue + } + if n.isLeaf() { + return n.leaf.key, n.leaf.val, true + } + break + } + return "", nil, false +} + +// Walk is used to walk the tree +func (t *Tree) Walk(fn WalkFn) { + recursiveWalk(t.root, fn) +} + +// WalkPrefix is used to walk the tree under a prefix +func (t *Tree) WalkPrefix(prefix string, fn WalkFn) { + n := t.root + search := prefix + for { + // Check for key exhaution + if len(search) == 0 { + recursiveWalk(n, fn) + return + } + + // Look for an edge + n = n.getEdge(search[0]) + if n == nil { + break + } + + // Consume the search prefix + if strings.HasPrefix(search, n.prefix) { + search = search[len(n.prefix):] + + } else if strings.HasPrefix(n.prefix, search) { + // Child may be under our search prefix + recursiveWalk(n, fn) + return + } else { + break + } + } + +} + +// WalkPath is used to walk the tree, but only visiting nodes +// from the root down to a given leaf. Where WalkPrefix walks +// all the entries *under* the given prefix, this walks the +// entries *above* the given prefix. +func (t *Tree) WalkPath(path string, fn WalkFn) { + n := t.root + search := path + for { + // Visit the leaf values if any + if n.leaf != nil && fn(n.leaf.key, n.leaf.val) { + return + } + + // Check for key exhaution + if len(search) == 0 { + return + } + + // Look for an edge + n = n.getEdge(search[0]) + if n == nil { + return + } + + // Consume the search prefix + if strings.HasPrefix(search, n.prefix) { + search = search[len(n.prefix):] + } else { + break + } + } +} + +// recursiveWalk is used to do a pre-order walk of a node +// recursively. Returns true if the walk should be aborted +func recursiveWalk(n *node, fn WalkFn) bool { + // Visit the leaf values if any + if n.leaf != nil && fn(n.leaf.key, n.leaf.val) { + return true + } + + // Recurse on the children + for _, e := range n.edges { + if recursiveWalk(e.node, fn) { + return true + } + } + return false +} + +// ToMap is used to walk the tree and convert it into a map +func (t *Tree) ToMap() map[string]interface{} { + out := make(map[string]interface{}, t.size) + t.Walk(func(k string, v interface{}) bool { + out[k] = v + return false + }) + return out +} diff --git a/vendor/github.com/asaskevich/govalidator/.gitignore b/vendor/github.com/asaskevich/govalidator/.gitignore new file mode 100644 index 0000000..8d69a94 --- /dev/null +++ b/vendor/github.com/asaskevich/govalidator/.gitignore @@ -0,0 +1,15 @@ +bin/ +.idea/ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + diff --git a/vendor/github.com/asaskevich/govalidator/.travis.yml b/vendor/github.com/asaskevich/govalidator/.travis.yml new file mode 100644 index 0000000..bb83c66 --- /dev/null +++ b/vendor/github.com/asaskevich/govalidator/.travis.yml @@ -0,0 +1,12 @@ +language: go +dist: xenial +go: + - '1.10' + - '1.11' + - '1.12' + - '1.13' + - 'tip' + +script: + - go test -coverpkg=./... -coverprofile=coverage.info -timeout=5s + - bash <(curl -s https://codecov.io/bash) diff --git a/vendor/github.com/asaskevich/govalidator/CODE_OF_CONDUCT.md b/vendor/github.com/asaskevich/govalidator/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..4b462b0 --- /dev/null +++ b/vendor/github.com/asaskevich/govalidator/CODE_OF_CONDUCT.md @@ -0,0 +1,43 @@ +# Contributor Code of Conduct + +This project adheres to [The Code Manifesto](http://codemanifesto.com) +as its guidelines for contributor interactions. + +## The Code Manifesto + +We want to work in an ecosystem that empowers developers to reach their +potential — one that encourages growth and effective collaboration. A space +that is safe for all. + +A space such as this benefits everyone that participates in it. It encourages +new developers to enter our field. It is through discussion and collaboration +that we grow, and through growth that we improve. + +In the effort to create such a place, we hold to these values: + +1. **Discrimination limits us.** This includes discrimination on the basis of + race, gender, sexual orientation, gender identity, age, nationality, + technology and any other arbitrary exclusion of a group of people. +2. **Boundaries honor us.** Your comfort levels are not everyone’s comfort + levels. Remember that, and if brought to your attention, heed it. +3. **We are our biggest assets.** None of us were born masters of our trade. + Each of us has been helped along the way. Return that favor, when and where + you can. +4. **We are resources for the future.** As an extension of #3, share what you + know. Make yourself a resource to help those that come after you. +5. **Respect defines us.** Treat others as you wish to be treated. Make your + discussions, criticisms and debates from a position of respectfulness. Ask + yourself, is it true? Is it necessary? Is it constructive? Anything less is + unacceptable. +6. **Reactions require grace.** Angry responses are valid, but abusive language + and vindictive actions are toxic. When something happens that offends you, + handle it assertively, but be respectful. Escalate reasonably, and try to + allow the offender an opportunity to explain themselves, and possibly + correct the issue. +7. **Opinions are just that: opinions.** Each and every one of us, due to our + background and upbringing, have varying opinions. That is perfectly + acceptable. Remember this: if you respect your own opinions, you should + respect the opinions of others. +8. **To err is human.** You might not intend it, but mistakes do happen and + contribute to build experience. Tolerate honest mistakes, and don't + hesitate to apologize if you make one yourself. diff --git a/vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md b/vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md new file mode 100644 index 0000000..7ed268a --- /dev/null +++ b/vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md @@ -0,0 +1,63 @@ +#### Support +If you do have a contribution to the package, feel free to create a Pull Request or an Issue. + +#### What to contribute +If you don't know what to do, there are some features and functions that need to be done + +- [ ] Refactor code +- [ ] Edit docs and [README](https://github.com/asaskevich/govalidator/README.md): spellcheck, grammar and typo check +- [ ] Create actual list of contributors and projects that currently using this package +- [ ] Resolve [issues and bugs](https://github.com/asaskevich/govalidator/issues) +- [ ] Update actual [list of functions](https://github.com/asaskevich/govalidator#list-of-functions) +- [ ] Update [list of validators](https://github.com/asaskevich/govalidator#validatestruct-2) that available for `ValidateStruct` and add new +- [ ] Implement new validators: `IsFQDN`, `IsIMEI`, `IsPostalCode`, `IsISIN`, `IsISRC` etc +- [x] Implement [validation by maps](https://github.com/asaskevich/govalidator/issues/224) +- [ ] Implement fuzzing testing +- [ ] Implement some struct/map/array utilities +- [ ] Implement map/array validation +- [ ] Implement benchmarking +- [ ] Implement batch of examples +- [ ] Look at forks for new features and fixes + +#### Advice +Feel free to create what you want, but keep in mind when you implement new features: +- Code must be clear and readable, names of variables/constants clearly describes what they are doing +- Public functions must be documented and described in source file and added to README.md to the list of available functions +- There are must be unit-tests for any new functions and improvements + +## Financial contributions + +We also welcome financial contributions in full transparency on our [open collective](https://opencollective.com/govalidator). +Anyone can file an expense. If the expense makes sense for the development of the community, it will be "merged" in the ledger of our open collective by the core contributors and the person who filed the expense will be reimbursed. + + +## Credits + + +### Contributors + +Thank you to all the people who have already contributed to govalidator! + + + +### Backers + +Thank you to all our backers! [[Become a backer](https://opencollective.com/govalidator#backer)] + + + + +### Sponsors + +Thank you to all our sponsors! (please ask your company to also support this open source project by [becoming a sponsor](https://opencollective.com/govalidator#sponsor)) + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/asaskevich/govalidator/LICENSE b/vendor/github.com/asaskevich/govalidator/LICENSE new file mode 100644 index 0000000..cacba91 --- /dev/null +++ b/vendor/github.com/asaskevich/govalidator/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2020 Alex Saskevich + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/asaskevich/govalidator/README.md b/vendor/github.com/asaskevich/govalidator/README.md new file mode 100644 index 0000000..2c3fc35 --- /dev/null +++ b/vendor/github.com/asaskevich/govalidator/README.md @@ -0,0 +1,622 @@ +govalidator +=========== +[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/asaskevich/govalidator?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [![GoDoc](https://godoc.org/github.com/asaskevich/govalidator?status.png)](https://godoc.org/github.com/asaskevich/govalidator) +[![Build Status](https://travis-ci.org/asaskevich/govalidator.svg?branch=master)](https://travis-ci.org/asaskevich/govalidator) +[![Coverage](https://codecov.io/gh/asaskevich/govalidator/branch/master/graph/badge.svg)](https://codecov.io/gh/asaskevich/govalidator) [![Go Report Card](https://goreportcard.com/badge/github.com/asaskevich/govalidator)](https://goreportcard.com/report/github.com/asaskevich/govalidator) [![GoSearch](http://go-search.org/badge?id=github.com%2Fasaskevich%2Fgovalidator)](http://go-search.org/view?id=github.com%2Fasaskevich%2Fgovalidator) [![Backers on Open Collective](https://opencollective.com/govalidator/backers/badge.svg)](#backers) [![Sponsors on Open Collective](https://opencollective.com/govalidator/sponsors/badge.svg)](#sponsors) [![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator.svg?type=shield)](https://app.fossa.io/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator?ref=badge_shield) + +A package of validators and sanitizers for strings, structs and collections. Based on [validator.js](https://github.com/chriso/validator.js). + +#### Installation +Make sure that Go is installed on your computer. +Type the following command in your terminal: + + go get github.com/asaskevich/govalidator + +or you can get specified release of the package with `gopkg.in`: + + go get gopkg.in/asaskevich/govalidator.v10 + +After it the package is ready to use. + + +#### Import package in your project +Add following line in your `*.go` file: +```go +import "github.com/asaskevich/govalidator" +``` +If you are unhappy to use long `govalidator`, you can do something like this: +```go +import ( + valid "github.com/asaskevich/govalidator" +) +``` + +#### Activate behavior to require all fields have a validation tag by default +`SetFieldsRequiredByDefault` causes validation to fail when struct fields do not include validations or are not explicitly marked as exempt (using `valid:"-"` or `valid:"email,optional"`). A good place to activate this is a package init function or the main() function. + +`SetNilPtrAllowedByRequired` causes validation to pass when struct fields marked by `required` are set to nil. This is disabled by default for consistency, but some packages that need to be able to determine between `nil` and `zero value` state can use this. If disabled, both `nil` and `zero` values cause validation errors. + +```go +import "github.com/asaskevich/govalidator" + +func init() { + govalidator.SetFieldsRequiredByDefault(true) +} +``` + +Here's some code to explain it: +```go +// this struct definition will fail govalidator.ValidateStruct() (and the field values do not matter): +type exampleStruct struct { + Name string `` + Email string `valid:"email"` +} + +// this, however, will only fail when Email is empty or an invalid email address: +type exampleStruct2 struct { + Name string `valid:"-"` + Email string `valid:"email"` +} + +// lastly, this will only fail when Email is an invalid email address but not when it's empty: +type exampleStruct2 struct { + Name string `valid:"-"` + Email string `valid:"email,optional"` +} +``` + +#### Recent breaking changes (see [#123](https://github.com/asaskevich/govalidator/pull/123)) +##### Custom validator function signature +A context was added as the second parameter, for structs this is the object being validated – this makes dependent validation possible. +```go +import "github.com/asaskevich/govalidator" + +// old signature +func(i interface{}) bool + +// new signature +func(i interface{}, o interface{}) bool +``` + +##### Adding a custom validator +This was changed to prevent data races when accessing custom validators. +```go +import "github.com/asaskevich/govalidator" + +// before +govalidator.CustomTypeTagMap["customByteArrayValidator"] = func(i interface{}, o interface{}) bool { + // ... +} + +// after +govalidator.CustomTypeTagMap.Set("customByteArrayValidator", func(i interface{}, o interface{}) bool { + // ... +}) +``` + +#### List of functions: +```go +func Abs(value float64) float64 +func BlackList(str, chars string) string +func ByteLength(str string, params ...string) bool +func CamelCaseToUnderscore(str string) string +func Contains(str, substring string) bool +func Count(array []interface{}, iterator ConditionIterator) int +func Each(array []interface{}, iterator Iterator) +func ErrorByField(e error, field string) string +func ErrorsByField(e error) map[string]string +func Filter(array []interface{}, iterator ConditionIterator) []interface{} +func Find(array []interface{}, iterator ConditionIterator) interface{} +func GetLine(s string, index int) (string, error) +func GetLines(s string) []string +func HasLowerCase(str string) bool +func HasUpperCase(str string) bool +func HasWhitespace(str string) bool +func HasWhitespaceOnly(str string) bool +func InRange(value interface{}, left interface{}, right interface{}) bool +func InRangeFloat32(value, left, right float32) bool +func InRangeFloat64(value, left, right float64) bool +func InRangeInt(value, left, right interface{}) bool +func IsASCII(str string) bool +func IsAlpha(str string) bool +func IsAlphanumeric(str string) bool +func IsBase64(str string) bool +func IsByteLength(str string, min, max int) bool +func IsCIDR(str string) bool +func IsCRC32(str string) bool +func IsCRC32b(str string) bool +func IsCreditCard(str string) bool +func IsDNSName(str string) bool +func IsDataURI(str string) bool +func IsDialString(str string) bool +func IsDivisibleBy(str, num string) bool +func IsEmail(str string) bool +func IsExistingEmail(email string) bool +func IsFilePath(str string) (bool, int) +func IsFloat(str string) bool +func IsFullWidth(str string) bool +func IsHalfWidth(str string) bool +func IsHash(str string, algorithm string) bool +func IsHexadecimal(str string) bool +func IsHexcolor(str string) bool +func IsHost(str string) bool +func IsIP(str string) bool +func IsIPv4(str string) bool +func IsIPv6(str string) bool +func IsISBN(str string, version int) bool +func IsISBN10(str string) bool +func IsISBN13(str string) bool +func IsISO3166Alpha2(str string) bool +func IsISO3166Alpha3(str string) bool +func IsISO4217(str string) bool +func IsISO693Alpha2(str string) bool +func IsISO693Alpha3b(str string) bool +func IsIn(str string, params ...string) bool +func IsInRaw(str string, params ...string) bool +func IsInt(str string) bool +func IsJSON(str string) bool +func IsLatitude(str string) bool +func IsLongitude(str string) bool +func IsLowerCase(str string) bool +func IsMAC(str string) bool +func IsMD4(str string) bool +func IsMD5(str string) bool +func IsMagnetURI(str string) bool +func IsMongoID(str string) bool +func IsMultibyte(str string) bool +func IsNatural(value float64) bool +func IsNegative(value float64) bool +func IsNonNegative(value float64) bool +func IsNonPositive(value float64) bool +func IsNotNull(str string) bool +func IsNull(str string) bool +func IsNumeric(str string) bool +func IsPort(str string) bool +func IsPositive(value float64) bool +func IsPrintableASCII(str string) bool +func IsRFC3339(str string) bool +func IsRFC3339WithoutZone(str string) bool +func IsRGBcolor(str string) bool +func IsRegex(str string) bool +func IsRequestURI(rawurl string) bool +func IsRequestURL(rawurl string) bool +func IsRipeMD128(str string) bool +func IsRipeMD160(str string) bool +func IsRsaPub(str string, params ...string) bool +func IsRsaPublicKey(str string, keylen int) bool +func IsSHA1(str string) bool +func IsSHA256(str string) bool +func IsSHA384(str string) bool +func IsSHA512(str string) bool +func IsSSN(str string) bool +func IsSemver(str string) bool +func IsTiger128(str string) bool +func IsTiger160(str string) bool +func IsTiger192(str string) bool +func IsTime(str string, format string) bool +func IsType(v interface{}, params ...string) bool +func IsURL(str string) bool +func IsUTFDigit(str string) bool +func IsUTFLetter(str string) bool +func IsUTFLetterNumeric(str string) bool +func IsUTFNumeric(str string) bool +func IsUUID(str string) bool +func IsUUIDv3(str string) bool +func IsUUIDv4(str string) bool +func IsUUIDv5(str string) bool +func IsULID(str string) bool +func IsUnixTime(str string) bool +func IsUpperCase(str string) bool +func IsVariableWidth(str string) bool +func IsWhole(value float64) bool +func LeftTrim(str, chars string) string +func Map(array []interface{}, iterator ResultIterator) []interface{} +func Matches(str, pattern string) bool +func MaxStringLength(str string, params ...string) bool +func MinStringLength(str string, params ...string) bool +func NormalizeEmail(str string) (string, error) +func PadBoth(str string, padStr string, padLen int) string +func PadLeft(str string, padStr string, padLen int) string +func PadRight(str string, padStr string, padLen int) string +func PrependPathToErrors(err error, path string) error +func Range(str string, params ...string) bool +func RemoveTags(s string) string +func ReplacePattern(str, pattern, replace string) string +func Reverse(s string) string +func RightTrim(str, chars string) string +func RuneLength(str string, params ...string) bool +func SafeFileName(str string) string +func SetFieldsRequiredByDefault(value bool) +func SetNilPtrAllowedByRequired(value bool) +func Sign(value float64) float64 +func StringLength(str string, params ...string) bool +func StringMatches(s string, params ...string) bool +func StripLow(str string, keepNewLines bool) string +func ToBoolean(str string) (bool, error) +func ToFloat(str string) (float64, error) +func ToInt(value interface{}) (res int64, err error) +func ToJSON(obj interface{}) (string, error) +func ToString(obj interface{}) string +func Trim(str, chars string) string +func Truncate(str string, length int, ending string) string +func TruncatingErrorf(str string, args ...interface{}) error +func UnderscoreToCamelCase(s string) string +func ValidateMap(inputMap map[string]interface{}, validationMap map[string]interface{}) (bool, error) +func ValidateStruct(s interface{}) (bool, error) +func WhiteList(str, chars string) string +type ConditionIterator +type CustomTypeValidator +type Error +func (e Error) Error() string +type Errors +func (es Errors) Error() string +func (es Errors) Errors() []error +type ISO3166Entry +type ISO693Entry +type InterfaceParamValidator +type Iterator +type ParamValidator +type ResultIterator +type UnsupportedTypeError +func (e *UnsupportedTypeError) Error() string +type Validator +``` + +#### Examples +###### IsURL +```go +println(govalidator.IsURL(`http://user@pass:domain.com/path/page`)) +``` +###### IsType +```go +println(govalidator.IsType("Bob", "string")) +println(govalidator.IsType(1, "int")) +i := 1 +println(govalidator.IsType(&i, "*int")) +``` + +IsType can be used through the tag `type` which is essential for map validation: +```go +type User struct { + Name string `valid:"type(string)"` + Age int `valid:"type(int)"` + Meta interface{} `valid:"type(string)"` +} +result, err := govalidator.ValidateStruct(User{"Bob", 20, "meta"}) +if err != nil { + println("error: " + err.Error()) +} +println(result) +``` +###### ToString +```go +type User struct { + FirstName string + LastName string +} + +str := govalidator.ToString(&User{"John", "Juan"}) +println(str) +``` +###### Each, Map, Filter, Count for slices +Each iterates over the slice/array and calls Iterator for every item +```go +data := []interface{}{1, 2, 3, 4, 5} +var fn govalidator.Iterator = func(value interface{}, index int) { + println(value.(int)) +} +govalidator.Each(data, fn) +``` +```go +data := []interface{}{1, 2, 3, 4, 5} +var fn govalidator.ResultIterator = func(value interface{}, index int) interface{} { + return value.(int) * 3 +} +_ = govalidator.Map(data, fn) // result = []interface{}{1, 6, 9, 12, 15} +``` +```go +data := []interface{}{1, 2, 3, 4, 5, 6, 7, 8, 9, 10} +var fn govalidator.ConditionIterator = func(value interface{}, index int) bool { + return value.(int)%2 == 0 +} +_ = govalidator.Filter(data, fn) // result = []interface{}{2, 4, 6, 8, 10} +_ = govalidator.Count(data, fn) // result = 5 +``` +###### ValidateStruct [#2](https://github.com/asaskevich/govalidator/pull/2) +If you want to validate structs, you can use tag `valid` for any field in your structure. All validators used with this field in one tag are separated by comma. If you want to skip validation, place `-` in your tag. If you need a validator that is not on the list below, you can add it like this: +```go +govalidator.TagMap["duck"] = govalidator.Validator(func(str string) bool { + return str == "duck" +}) +``` +For completely custom validators (interface-based), see below. + +Here is a list of available validators for struct fields (validator - used function): +```go +"email": IsEmail, +"url": IsURL, +"dialstring": IsDialString, +"requrl": IsRequestURL, +"requri": IsRequestURI, +"alpha": IsAlpha, +"utfletter": IsUTFLetter, +"alphanum": IsAlphanumeric, +"utfletternum": IsUTFLetterNumeric, +"numeric": IsNumeric, +"utfnumeric": IsUTFNumeric, +"utfdigit": IsUTFDigit, +"hexadecimal": IsHexadecimal, +"hexcolor": IsHexcolor, +"rgbcolor": IsRGBcolor, +"lowercase": IsLowerCase, +"uppercase": IsUpperCase, +"int": IsInt, +"float": IsFloat, +"null": IsNull, +"uuid": IsUUID, +"uuidv3": IsUUIDv3, +"uuidv4": IsUUIDv4, +"uuidv5": IsUUIDv5, +"creditcard": IsCreditCard, +"isbn10": IsISBN10, +"isbn13": IsISBN13, +"json": IsJSON, +"multibyte": IsMultibyte, +"ascii": IsASCII, +"printableascii": IsPrintableASCII, +"fullwidth": IsFullWidth, +"halfwidth": IsHalfWidth, +"variablewidth": IsVariableWidth, +"base64": IsBase64, +"datauri": IsDataURI, +"ip": IsIP, +"port": IsPort, +"ipv4": IsIPv4, +"ipv6": IsIPv6, +"dns": IsDNSName, +"host": IsHost, +"mac": IsMAC, +"latitude": IsLatitude, +"longitude": IsLongitude, +"ssn": IsSSN, +"semver": IsSemver, +"rfc3339": IsRFC3339, +"rfc3339WithoutZone": IsRFC3339WithoutZone, +"ISO3166Alpha2": IsISO3166Alpha2, +"ISO3166Alpha3": IsISO3166Alpha3, +"ulid": IsULID, +``` +Validators with parameters + +```go +"range(min|max)": Range, +"length(min|max)": ByteLength, +"runelength(min|max)": RuneLength, +"stringlength(min|max)": StringLength, +"matches(pattern)": StringMatches, +"in(string1|string2|...|stringN)": IsIn, +"rsapub(keylength)" : IsRsaPub, +"minstringlength(int): MinStringLength, +"maxstringlength(int): MaxStringLength, +``` +Validators with parameters for any type + +```go +"type(type)": IsType, +``` + +And here is small example of usage: +```go +type Post struct { + Title string `valid:"alphanum,required"` + Message string `valid:"duck,ascii"` + Message2 string `valid:"animal(dog)"` + AuthorIP string `valid:"ipv4"` + Date string `valid:"-"` +} +post := &Post{ + Title: "My Example Post", + Message: "duck", + Message2: "dog", + AuthorIP: "123.234.54.3", +} + +// Add your own struct validation tags +govalidator.TagMap["duck"] = govalidator.Validator(func(str string) bool { + return str == "duck" +}) + +// Add your own struct validation tags with parameter +govalidator.ParamTagMap["animal"] = govalidator.ParamValidator(func(str string, params ...string) bool { + species := params[0] + return str == species +}) +govalidator.ParamTagRegexMap["animal"] = regexp.MustCompile("^animal\\((\\w+)\\)$") + +result, err := govalidator.ValidateStruct(post) +if err != nil { + println("error: " + err.Error()) +} +println(result) +``` +###### ValidateMap [#2](https://github.com/asaskevich/govalidator/pull/338) +If you want to validate maps, you can use the map to be validated and a validation map that contain the same tags used in ValidateStruct, both maps have to be in the form `map[string]interface{}` + +So here is small example of usage: +```go +var mapTemplate = map[string]interface{}{ + "name":"required,alpha", + "family":"required,alpha", + "email":"required,email", + "cell-phone":"numeric", + "address":map[string]interface{}{ + "line1":"required,alphanum", + "line2":"alphanum", + "postal-code":"numeric", + }, +} + +var inputMap = map[string]interface{}{ + "name":"Bob", + "family":"Smith", + "email":"foo@bar.baz", + "address":map[string]interface{}{ + "line1":"", + "line2":"", + "postal-code":"", + }, +} + +result, err := govalidator.ValidateMap(inputMap, mapTemplate) +if err != nil { + println("error: " + err.Error()) +} +println(result) +``` + +###### WhiteList +```go +// Remove all characters from string ignoring characters between "a" and "z" +println(govalidator.WhiteList("a3a43a5a4a3a2a23a4a5a4a3a4", "a-z") == "aaaaaaaaaaaa") +``` + +###### Custom validation functions +Custom validation using your own domain specific validators is also available - here's an example of how to use it: +```go +import "github.com/asaskevich/govalidator" + +type CustomByteArray [6]byte // custom types are supported and can be validated + +type StructWithCustomByteArray struct { + ID CustomByteArray `valid:"customByteArrayValidator,customMinLengthValidator"` // multiple custom validators are possible as well and will be evaluated in sequence + Email string `valid:"email"` + CustomMinLength int `valid:"-"` +} + +govalidator.CustomTypeTagMap.Set("customByteArrayValidator", func(i interface{}, context interface{}) bool { + switch v := context.(type) { // you can type switch on the context interface being validated + case StructWithCustomByteArray: + // you can check and validate against some other field in the context, + // return early or not validate against the context at all – your choice + case SomeOtherType: + // ... + default: + // expecting some other type? Throw/panic here or continue + } + + switch v := i.(type) { // type switch on the struct field being validated + case CustomByteArray: + for _, e := range v { // this validator checks that the byte array is not empty, i.e. not all zeroes + if e != 0 { + return true + } + } + } + return false +}) +govalidator.CustomTypeTagMap.Set("customMinLengthValidator", func(i interface{}, context interface{}) bool { + switch v := context.(type) { // this validates a field against the value in another field, i.e. dependent validation + case StructWithCustomByteArray: + return len(v.ID) >= v.CustomMinLength + } + return false +}) +``` + +###### Loop over Error() +By default .Error() returns all errors in a single String. To access each error you can do this: +```go + if err != nil { + errs := err.(govalidator.Errors).Errors() + for _, e := range errs { + fmt.Println(e.Error()) + } + } +``` + +###### Custom error messages +Custom error messages are supported via annotations by adding the `~` separator - here's an example of how to use it: +```go +type Ticket struct { + Id int64 `json:"id"` + FirstName string `json:"firstname" valid:"required~First name is blank"` +} +``` + +#### Notes +Documentation is available here: [godoc.org](https://godoc.org/github.com/asaskevich/govalidator). +Full information about code coverage is also available here: [govalidator on gocover.io](http://gocover.io/github.com/asaskevich/govalidator). + +#### Support +If you do have a contribution to the package, feel free to create a Pull Request or an Issue. + +#### What to contribute +If you don't know what to do, there are some features and functions that need to be done + +- [ ] Refactor code +- [ ] Edit docs and [README](https://github.com/asaskevich/govalidator/README.md): spellcheck, grammar and typo check +- [ ] Create actual list of contributors and projects that currently using this package +- [ ] Resolve [issues and bugs](https://github.com/asaskevich/govalidator/issues) +- [ ] Update actual [list of functions](https://github.com/asaskevich/govalidator#list-of-functions) +- [ ] Update [list of validators](https://github.com/asaskevich/govalidator#validatestruct-2) that available for `ValidateStruct` and add new +- [ ] Implement new validators: `IsFQDN`, `IsIMEI`, `IsPostalCode`, `IsISIN`, `IsISRC` etc +- [x] Implement [validation by maps](https://github.com/asaskevich/govalidator/issues/224) +- [ ] Implement fuzzing testing +- [ ] Implement some struct/map/array utilities +- [ ] Implement map/array validation +- [ ] Implement benchmarking +- [ ] Implement batch of examples +- [ ] Look at forks for new features and fixes + +#### Advice +Feel free to create what you want, but keep in mind when you implement new features: +- Code must be clear and readable, names of variables/constants clearly describes what they are doing +- Public functions must be documented and described in source file and added to README.md to the list of available functions +- There are must be unit-tests for any new functions and improvements + +## Credits +### Contributors + +This project exists thanks to all the people who contribute. [[Contribute](CONTRIBUTING.md)]. + +#### Special thanks to [contributors](https://github.com/asaskevich/govalidator/graphs/contributors) +* [Daniel Lohse](https://github.com/annismckenzie) +* [Attila Oláh](https://github.com/attilaolah) +* [Daniel Korner](https://github.com/Dadie) +* [Steven Wilkin](https://github.com/stevenwilkin) +* [Deiwin Sarjas](https://github.com/deiwin) +* [Noah Shibley](https://github.com/slugmobile) +* [Nathan Davies](https://github.com/nathj07) +* [Matt Sanford](https://github.com/mzsanford) +* [Simon ccl1115](https://github.com/ccl1115) + + + + +### Backers + +Thank you to all our backers! 🙠[[Become a backer](https://opencollective.com/govalidator#backer)] + + + + +### Sponsors + +Support this project by becoming a sponsor. Your logo will show up here with a link to your website. [[Become a sponsor](https://opencollective.com/govalidator#sponsor)] + + + + + + + + + + + + + + + +## License +[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator?ref=badge_large) diff --git a/vendor/github.com/asaskevich/govalidator/arrays.go b/vendor/github.com/asaskevich/govalidator/arrays.go new file mode 100644 index 0000000..3e1da7c --- /dev/null +++ b/vendor/github.com/asaskevich/govalidator/arrays.go @@ -0,0 +1,87 @@ +package govalidator + +// Iterator is the function that accepts element of slice/array and its index +type Iterator func(interface{}, int) + +// ResultIterator is the function that accepts element of slice/array and its index and returns any result +type ResultIterator func(interface{}, int) interface{} + +// ConditionIterator is the function that accepts element of slice/array and its index and returns boolean +type ConditionIterator func(interface{}, int) bool + +// ReduceIterator is the function that accepts two element of slice/array and returns result of merging those values +type ReduceIterator func(interface{}, interface{}) interface{} + +// Some validates that any item of array corresponds to ConditionIterator. Returns boolean. +func Some(array []interface{}, iterator ConditionIterator) bool { + res := false + for index, data := range array { + res = res || iterator(data, index) + } + return res +} + +// Every validates that every item of array corresponds to ConditionIterator. Returns boolean. +func Every(array []interface{}, iterator ConditionIterator) bool { + res := true + for index, data := range array { + res = res && iterator(data, index) + } + return res +} + +// Reduce boils down a list of values into a single value by ReduceIterator +func Reduce(array []interface{}, iterator ReduceIterator, initialValue interface{}) interface{} { + for _, data := range array { + initialValue = iterator(initialValue, data) + } + return initialValue +} + +// Each iterates over the slice and apply Iterator to every item +func Each(array []interface{}, iterator Iterator) { + for index, data := range array { + iterator(data, index) + } +} + +// Map iterates over the slice and apply ResultIterator to every item. Returns new slice as a result. +func Map(array []interface{}, iterator ResultIterator) []interface{} { + var result = make([]interface{}, len(array)) + for index, data := range array { + result[index] = iterator(data, index) + } + return result +} + +// Find iterates over the slice and apply ConditionIterator to every item. Returns first item that meet ConditionIterator or nil otherwise. +func Find(array []interface{}, iterator ConditionIterator) interface{} { + for index, data := range array { + if iterator(data, index) { + return data + } + } + return nil +} + +// Filter iterates over the slice and apply ConditionIterator to every item. Returns new slice. +func Filter(array []interface{}, iterator ConditionIterator) []interface{} { + var result = make([]interface{}, 0) + for index, data := range array { + if iterator(data, index) { + result = append(result, data) + } + } + return result +} + +// Count iterates over the slice and apply ConditionIterator to every item. Returns count of items that meets ConditionIterator. +func Count(array []interface{}, iterator ConditionIterator) int { + count := 0 + for index, data := range array { + if iterator(data, index) { + count = count + 1 + } + } + return count +} diff --git a/vendor/github.com/asaskevich/govalidator/converter.go b/vendor/github.com/asaskevich/govalidator/converter.go new file mode 100644 index 0000000..d68e990 --- /dev/null +++ b/vendor/github.com/asaskevich/govalidator/converter.go @@ -0,0 +1,81 @@ +package govalidator + +import ( + "encoding/json" + "fmt" + "reflect" + "strconv" +) + +// ToString convert the input to a string. +func ToString(obj interface{}) string { + res := fmt.Sprintf("%v", obj) + return res +} + +// ToJSON convert the input to a valid JSON string +func ToJSON(obj interface{}) (string, error) { + res, err := json.Marshal(obj) + if err != nil { + res = []byte("") + } + return string(res), err +} + +// ToFloat convert the input string to a float, or 0.0 if the input is not a float. +func ToFloat(value interface{}) (res float64, err error) { + val := reflect.ValueOf(value) + + switch value.(type) { + case int, int8, int16, int32, int64: + res = float64(val.Int()) + case uint, uint8, uint16, uint32, uint64: + res = float64(val.Uint()) + case float32, float64: + res = val.Float() + case string: + res, err = strconv.ParseFloat(val.String(), 64) + if err != nil { + res = 0 + } + default: + err = fmt.Errorf("ToInt: unknown interface type %T", value) + res = 0 + } + + return +} + +// ToInt convert the input string or any int type to an integer type 64, or 0 if the input is not an integer. +func ToInt(value interface{}) (res int64, err error) { + val := reflect.ValueOf(value) + + switch value.(type) { + case int, int8, int16, int32, int64: + res = val.Int() + case uint, uint8, uint16, uint32, uint64: + res = int64(val.Uint()) + case float32, float64: + res = int64(val.Float()) + case string: + if IsInt(val.String()) { + res, err = strconv.ParseInt(val.String(), 0, 64) + if err != nil { + res = 0 + } + } else { + err = fmt.Errorf("ToInt: invalid numeric format %g", value) + res = 0 + } + default: + err = fmt.Errorf("ToInt: unknown interface type %T", value) + res = 0 + } + + return +} + +// ToBoolean convert the input string to a boolean. +func ToBoolean(str string) (bool, error) { + return strconv.ParseBool(str) +} diff --git a/vendor/github.com/asaskevich/govalidator/doc.go b/vendor/github.com/asaskevich/govalidator/doc.go new file mode 100644 index 0000000..55dce62 --- /dev/null +++ b/vendor/github.com/asaskevich/govalidator/doc.go @@ -0,0 +1,3 @@ +package govalidator + +// A package of validators and sanitizers for strings, structures and collections. diff --git a/vendor/github.com/asaskevich/govalidator/error.go b/vendor/github.com/asaskevich/govalidator/error.go new file mode 100644 index 0000000..1da2336 --- /dev/null +++ b/vendor/github.com/asaskevich/govalidator/error.go @@ -0,0 +1,47 @@ +package govalidator + +import ( + "sort" + "strings" +) + +// Errors is an array of multiple errors and conforms to the error interface. +type Errors []error + +// Errors returns itself. +func (es Errors) Errors() []error { + return es +} + +func (es Errors) Error() string { + var errs []string + for _, e := range es { + errs = append(errs, e.Error()) + } + sort.Strings(errs) + return strings.Join(errs, ";") +} + +// Error encapsulates a name, an error and whether there's a custom error message or not. +type Error struct { + Name string + Err error + CustomErrorMessageExists bool + + // Validator indicates the name of the validator that failed + Validator string + Path []string +} + +func (e Error) Error() string { + if e.CustomErrorMessageExists { + return e.Err.Error() + } + + errName := e.Name + if len(e.Path) > 0 { + errName = strings.Join(append(e.Path, e.Name), ".") + } + + return errName + ": " + e.Err.Error() +} diff --git a/vendor/github.com/asaskevich/govalidator/numerics.go b/vendor/github.com/asaskevich/govalidator/numerics.go new file mode 100644 index 0000000..5041d9e --- /dev/null +++ b/vendor/github.com/asaskevich/govalidator/numerics.go @@ -0,0 +1,100 @@ +package govalidator + +import ( + "math" +) + +// Abs returns absolute value of number +func Abs(value float64) float64 { + return math.Abs(value) +} + +// Sign returns signum of number: 1 in case of value > 0, -1 in case of value < 0, 0 otherwise +func Sign(value float64) float64 { + if value > 0 { + return 1 + } else if value < 0 { + return -1 + } else { + return 0 + } +} + +// IsNegative returns true if value < 0 +func IsNegative(value float64) bool { + return value < 0 +} + +// IsPositive returns true if value > 0 +func IsPositive(value float64) bool { + return value > 0 +} + +// IsNonNegative returns true if value >= 0 +func IsNonNegative(value float64) bool { + return value >= 0 +} + +// IsNonPositive returns true if value <= 0 +func IsNonPositive(value float64) bool { + return value <= 0 +} + +// InRangeInt returns true if value lies between left and right border +func InRangeInt(value, left, right interface{}) bool { + value64, _ := ToInt(value) + left64, _ := ToInt(left) + right64, _ := ToInt(right) + if left64 > right64 { + left64, right64 = right64, left64 + } + return value64 >= left64 && value64 <= right64 +} + +// InRangeFloat32 returns true if value lies between left and right border +func InRangeFloat32(value, left, right float32) bool { + if left > right { + left, right = right, left + } + return value >= left && value <= right +} + +// InRangeFloat64 returns true if value lies between left and right border +func InRangeFloat64(value, left, right float64) bool { + if left > right { + left, right = right, left + } + return value >= left && value <= right +} + +// InRange returns true if value lies between left and right border, generic type to handle int, float32, float64 and string. +// All types must the same type. +// False if value doesn't lie in range or if it incompatible or not comparable +func InRange(value interface{}, left interface{}, right interface{}) bool { + switch value.(type) { + case int: + intValue, _ := ToInt(value) + intLeft, _ := ToInt(left) + intRight, _ := ToInt(right) + return InRangeInt(intValue, intLeft, intRight) + case float32, float64: + intValue, _ := ToFloat(value) + intLeft, _ := ToFloat(left) + intRight, _ := ToFloat(right) + return InRangeFloat64(intValue, intLeft, intRight) + case string: + return value.(string) >= left.(string) && value.(string) <= right.(string) + default: + return false + } +} + +// IsWhole returns true if value is whole number +func IsWhole(value float64) bool { + return math.Remainder(value, 1) == 0 +} + +// IsNatural returns true if value is natural number (positive and whole) +func IsNatural(value float64) bool { + return IsWhole(value) && IsPositive(value) +} diff --git a/vendor/github.com/asaskevich/govalidator/patterns.go b/vendor/github.com/asaskevich/govalidator/patterns.go new file mode 100644 index 0000000..bafc376 --- /dev/null +++ b/vendor/github.com/asaskevich/govalidator/patterns.go @@ -0,0 +1,113 @@ +package govalidator + +import "regexp" + +// Basic regular expressions for validating strings +const ( + Email string = "^(((([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+(\\.([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+)*)|((\\x22)((((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(([\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(\\([\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}]))))*(((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(\\x22)))@((([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|\\.|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.)+(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.?$" + CreditCard string = "^(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14}|(222[1-9]|22[3-9][0-9]|2[3-6][0-9]{2}|27[01][0-9]|2720)[0-9]{12}|6(?:011|5[0-9][0-9])[0-9]{12}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\\d{3})\\d{11}|6[27][0-9]{14})$" + ISBN10 string = "^(?:[0-9]{9}X|[0-9]{10})$" + ISBN13 string = "^(?:[0-9]{13})$" + UUID3 string = "^[0-9a-f]{8}-[0-9a-f]{4}-3[0-9a-f]{3}-[0-9a-f]{4}-[0-9a-f]{12}$" + UUID4 string = "^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$" + UUID5 string = "^[0-9a-f]{8}-[0-9a-f]{4}-5[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$" + UUID string = "^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$" + Alpha string = "^[a-zA-Z]+$" + Alphanumeric string = "^[a-zA-Z0-9]+$" + Numeric string = "^[0-9]+$" + Int string = "^(?:[-+]?(?:0|[1-9][0-9]*))$" + Float string = "^(?:[-+]?(?:[0-9]+))?(?:\\.[0-9]*)?(?:[eE][\\+\\-]?(?:[0-9]+))?$" + Hexadecimal string = "^[0-9a-fA-F]+$" + Hexcolor string = "^#?([0-9a-fA-F]{3}|[0-9a-fA-F]{6})$" + RGBcolor string = "^rgb\\(\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*\\)$" + ASCII string = "^[\x00-\x7F]+$" + Multibyte string = "[^\x00-\x7F]" + FullWidth string = "[^\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]" + HalfWidth string = "[\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]" + Base64 string = "^(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}==|[A-Za-z0-9+\\/]{3}=|[A-Za-z0-9+\\/]{4})$" + PrintableASCII string = "^[\x20-\x7E]+$" + DataURI string = "^data:.+\\/(.+);base64$" + MagnetURI string = "^magnet:\\?xt=urn:[a-zA-Z0-9]+:[a-zA-Z0-9]{32,40}&dn=.+&tr=.+$" + Latitude string = "^[-+]?([1-8]?\\d(\\.\\d+)?|90(\\.0+)?)$" + Longitude string = "^[-+]?(180(\\.0+)?|((1[0-7]\\d)|([1-9]?\\d))(\\.\\d+)?)$" + DNSName string = `^([a-zA-Z0-9_]{1}[a-zA-Z0-9_-]{0,62}){1}(\.[a-zA-Z0-9_]{1}[a-zA-Z0-9_-]{0,62})*[\._]?$` + IP string = `(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))` + URLSchema string = `((ftp|tcp|udp|wss?|https?):\/\/)` + URLUsername string = `(\S+(:\S*)?@)` + URLPath string = `((\/|\?|#)[^\s]*)` + URLPort string = `(:(\d{1,5}))` + URLIP string = `([1-9]\d?|1\d\d|2[01]\d|22[0-3]|24\d|25[0-5])(\.(\d{1,2}|1\d\d|2[0-4]\d|25[0-5])){2}(?:\.([0-9]\d?|1\d\d|2[0-4]\d|25[0-5]))` + URLSubdomain string = `((www\.)|([a-zA-Z0-9]+([-_\.]?[a-zA-Z0-9])*[a-zA-Z0-9]\.[a-zA-Z0-9]+))` + URL = `^` + URLSchema + `?` + URLUsername + `?` + `((` + URLIP + `|(\[` + IP + `\])|(([a-zA-Z0-9]([a-zA-Z0-9-_]+)?[a-zA-Z0-9]([-\.][a-zA-Z0-9]+)*)|(` + URLSubdomain + `?))?(([a-zA-Z\x{00a1}-\x{ffff}0-9]+-?-?)*[a-zA-Z\x{00a1}-\x{ffff}0-9]+)(?:\.([a-zA-Z\x{00a1}-\x{ffff}]{1,}))?))\.?` + URLPort + `?` + URLPath + `?$` + SSN string = `^\d{3}[- ]?\d{2}[- ]?\d{4}$` + WinPath string = `^[a-zA-Z]:\\(?:[^\\/:*?"<>|\r\n]+\\)*[^\\/:*?"<>|\r\n]*$` + UnixPath string = `^(/[^/\x00]*)+/?$` + WinARPath string = `^(?:(?:[a-zA-Z]:|\\\\[a-z0-9_.$â—-]+\\[a-z0-9_.$â—-]+)\\|\\?[^\\/:*?"<>|\r\n]+\\?)(?:[^\\/:*?"<>|\r\n]+\\)*[^\\/:*?"<>|\r\n]*$` + UnixARPath string = `^((\.{0,2}/)?([^/\x00]*))+/?$` + Semver string = "^v?(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)(-(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(\\.(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(\\+[0-9a-zA-Z-]+(\\.[0-9a-zA-Z-]+)*)?$" + tagName string = "valid" + hasLowerCase string = ".*[[:lower:]]" + hasUpperCase string = ".*[[:upper:]]" + hasWhitespace string = ".*[[:space:]]" + hasWhitespaceOnly string = "^[[:space:]]+$" + IMEI string = "^[0-9a-f]{14}$|^\\d{15}$|^\\d{18}$" + IMSI string = "^\\d{14,15}$" + E164 string = `^\+?[1-9]\d{1,14}$` +) + +// Used by IsFilePath func +const ( + // Unknown is unresolved OS type + Unknown = iota + // Win is Windows type + Win + // Unix is *nix OS types + Unix +) + +var ( + userRegexp = regexp.MustCompile("^[a-zA-Z0-9!#$%&'*+/=?^_`{|}~.-]+$") + hostRegexp = regexp.MustCompile("^[^\\s]+\\.[^\\s]+$") + userDotRegexp = regexp.MustCompile("(^[.]{1})|([.]{1}$)|([.]{2,})") + rxEmail = regexp.MustCompile(Email) + rxCreditCard = regexp.MustCompile(CreditCard) + rxISBN10 = regexp.MustCompile(ISBN10) + rxISBN13 = regexp.MustCompile(ISBN13) + rxUUID3 = regexp.MustCompile(UUID3) + rxUUID4 = regexp.MustCompile(UUID4) + rxUUID5 = regexp.MustCompile(UUID5) + rxUUID = regexp.MustCompile(UUID) + rxAlpha = regexp.MustCompile(Alpha) + rxAlphanumeric = regexp.MustCompile(Alphanumeric) + rxNumeric = regexp.MustCompile(Numeric) + rxInt = regexp.MustCompile(Int) + rxFloat = regexp.MustCompile(Float) + rxHexadecimal = regexp.MustCompile(Hexadecimal) + rxHexcolor = regexp.MustCompile(Hexcolor) + rxRGBcolor = regexp.MustCompile(RGBcolor) + rxASCII = regexp.MustCompile(ASCII) + rxPrintableASCII = regexp.MustCompile(PrintableASCII) + rxMultibyte = regexp.MustCompile(Multibyte) + rxFullWidth = regexp.MustCompile(FullWidth) + rxHalfWidth = regexp.MustCompile(HalfWidth) + rxBase64 = regexp.MustCompile(Base64) + rxDataURI = regexp.MustCompile(DataURI) + rxMagnetURI = regexp.MustCompile(MagnetURI) + rxLatitude = regexp.MustCompile(Latitude) + rxLongitude = regexp.MustCompile(Longitude) + rxDNSName = regexp.MustCompile(DNSName) + rxURL = regexp.MustCompile(URL) + rxSSN = regexp.MustCompile(SSN) + rxWinPath = regexp.MustCompile(WinPath) + rxUnixPath = regexp.MustCompile(UnixPath) + rxARWinPath = regexp.MustCompile(WinARPath) + rxARUnixPath = regexp.MustCompile(UnixARPath) + rxSemver = regexp.MustCompile(Semver) + rxHasLowerCase = regexp.MustCompile(hasLowerCase) + rxHasUpperCase = regexp.MustCompile(hasUpperCase) + rxHasWhitespace = regexp.MustCompile(hasWhitespace) + rxHasWhitespaceOnly = regexp.MustCompile(hasWhitespaceOnly) + rxIMEI = regexp.MustCompile(IMEI) + rxIMSI = regexp.MustCompile(IMSI) + rxE164 = regexp.MustCompile(E164) +) diff --git a/vendor/github.com/asaskevich/govalidator/types.go b/vendor/github.com/asaskevich/govalidator/types.go new file mode 100644 index 0000000..c573abb --- /dev/null +++ b/vendor/github.com/asaskevich/govalidator/types.go @@ -0,0 +1,656 @@ +package govalidator + +import ( + "reflect" + "regexp" + "sort" + "sync" +) + +// Validator is a wrapper for a validator function that returns bool and accepts string. +type Validator func(str string) bool + +// CustomTypeValidator is a wrapper for validator functions that returns bool and accepts any type. +// The second parameter should be the context (in the case of validating a struct: the whole object being validated). +type CustomTypeValidator func(i interface{}, o interface{}) bool + +// ParamValidator is a wrapper for validator functions that accept additional parameters. +type ParamValidator func(str string, params ...string) bool + +// InterfaceParamValidator is a wrapper for functions that accept variants parameters for an interface value +type InterfaceParamValidator func(in interface{}, params ...string) bool +type tagOptionsMap map[string]tagOption + +func (t tagOptionsMap) orderedKeys() []string { + var keys []string + for k := range t { + keys = append(keys, k) + } + + sort.Slice(keys, func(a, b int) bool { + return t[keys[a]].order < t[keys[b]].order + }) + + return keys +} + +type tagOption struct { + name string + customErrorMessage string + order int +} + +// UnsupportedTypeError is a wrapper for reflect.Type +type UnsupportedTypeError struct { + Type reflect.Type +} + +// stringValues is a slice of reflect.Value holding *reflect.StringValue. +// It implements the methods to sort by string. +type stringValues []reflect.Value + +// InterfaceParamTagMap is a map of functions accept variants parameters for an interface value +var InterfaceParamTagMap = map[string]InterfaceParamValidator{ + "type": IsType, +} + +// InterfaceParamTagRegexMap maps interface param tags to their respective regexes. +var InterfaceParamTagRegexMap = map[string]*regexp.Regexp{ + "type": regexp.MustCompile(`^type\((.*)\)$`), +} + +// ParamTagMap is a map of functions accept variants parameters +var ParamTagMap = map[string]ParamValidator{ + "length": ByteLength, + "range": Range, + "runelength": RuneLength, + "stringlength": StringLength, + "matches": StringMatches, + "in": IsInRaw, + "rsapub": IsRsaPub, + "minstringlength": MinStringLength, + "maxstringlength": MaxStringLength, +} + +// ParamTagRegexMap maps param tags to their respective regexes. +var ParamTagRegexMap = map[string]*regexp.Regexp{ + "range": regexp.MustCompile("^range\\((\\d+)\\|(\\d+)\\)$"), + "length": regexp.MustCompile("^length\\((\\d+)\\|(\\d+)\\)$"), + "runelength": regexp.MustCompile("^runelength\\((\\d+)\\|(\\d+)\\)$"), + "stringlength": regexp.MustCompile("^stringlength\\((\\d+)\\|(\\d+)\\)$"), + "in": regexp.MustCompile(`^in\((.*)\)`), + "matches": regexp.MustCompile(`^matches\((.+)\)$`), + "rsapub": regexp.MustCompile("^rsapub\\((\\d+)\\)$"), + "minstringlength": regexp.MustCompile("^minstringlength\\((\\d+)\\)$"), + "maxstringlength": regexp.MustCompile("^maxstringlength\\((\\d+)\\)$"), +} + +type customTypeTagMap struct { + validators map[string]CustomTypeValidator + + sync.RWMutex +} + +func (tm *customTypeTagMap) Get(name string) (CustomTypeValidator, bool) { + tm.RLock() + defer tm.RUnlock() + v, ok := tm.validators[name] + return v, ok +} + +func (tm *customTypeTagMap) Set(name string, ctv CustomTypeValidator) { + tm.Lock() + defer tm.Unlock() + tm.validators[name] = ctv +} + +// CustomTypeTagMap is a map of functions that can be used as tags for ValidateStruct function. +// Use this to validate compound or custom types that need to be handled as a whole, e.g. +// `type UUID [16]byte` (this would be handled as an array of bytes). +var CustomTypeTagMap = &customTypeTagMap{validators: make(map[string]CustomTypeValidator)} + +// TagMap is a map of functions, that can be used as tags for ValidateStruct function. +var TagMap = map[string]Validator{ + "email": IsEmail, + "url": IsURL, + "dialstring": IsDialString, + "requrl": IsRequestURL, + "requri": IsRequestURI, + "alpha": IsAlpha, + "utfletter": IsUTFLetter, + "alphanum": IsAlphanumeric, + "utfletternum": IsUTFLetterNumeric, + "numeric": IsNumeric, + "utfnumeric": IsUTFNumeric, + "utfdigit": IsUTFDigit, + "hexadecimal": IsHexadecimal, + "hexcolor": IsHexcolor, + "rgbcolor": IsRGBcolor, + "lowercase": IsLowerCase, + "uppercase": IsUpperCase, + "int": IsInt, + "float": IsFloat, + "null": IsNull, + "notnull": IsNotNull, + "uuid": IsUUID, + "uuidv3": IsUUIDv3, + "uuidv4": IsUUIDv4, + "uuidv5": IsUUIDv5, + "creditcard": IsCreditCard, + "isbn10": IsISBN10, + "isbn13": IsISBN13, + "json": IsJSON, + "multibyte": IsMultibyte, + "ascii": IsASCII, + "printableascii": IsPrintableASCII, + "fullwidth": IsFullWidth, + "halfwidth": IsHalfWidth, + "variablewidth": IsVariableWidth, + "base64": IsBase64, + "datauri": IsDataURI, + "ip": IsIP, + "port": IsPort, + "ipv4": IsIPv4, + "ipv6": IsIPv6, + "dns": IsDNSName, + "host": IsHost, + "mac": IsMAC, + "latitude": IsLatitude, + "longitude": IsLongitude, + "ssn": IsSSN, + "semver": IsSemver, + "rfc3339": IsRFC3339, + "rfc3339WithoutZone": IsRFC3339WithoutZone, + "ISO3166Alpha2": IsISO3166Alpha2, + "ISO3166Alpha3": IsISO3166Alpha3, + "ISO4217": IsISO4217, + "IMEI": IsIMEI, + "ulid": IsULID, +} + +// ISO3166Entry stores country codes +type ISO3166Entry struct { + EnglishShortName string + FrenchShortName string + Alpha2Code string + Alpha3Code string + Numeric string +} + +//ISO3166List based on https://www.iso.org/obp/ui/#search/code/ Code Type "Officially Assigned Codes" +var ISO3166List = []ISO3166Entry{ + {"Afghanistan", "Afghanistan (l')", "AF", "AFG", "004"}, + {"Albania", "Albanie (l')", "AL", "ALB", "008"}, + {"Antarctica", "Antarctique (l')", "AQ", "ATA", "010"}, + {"Algeria", "Algérie (l')", "DZ", "DZA", "012"}, + {"American Samoa", "Samoa américaines (les)", "AS", "ASM", "016"}, + {"Andorra", "Andorre (l')", "AD", "AND", "020"}, + {"Angola", "Angola (l')", "AO", "AGO", "024"}, + {"Antigua and Barbuda", "Antigua-et-Barbuda", "AG", "ATG", "028"}, + {"Azerbaijan", "Azerbaïdjan (l')", "AZ", "AZE", "031"}, + {"Argentina", "Argentine (l')", "AR", "ARG", "032"}, + {"Australia", "Australie (l')", "AU", "AUS", "036"}, + {"Austria", "Autriche (l')", "AT", "AUT", "040"}, + {"Bahamas (the)", "Bahamas (les)", "BS", "BHS", "044"}, + {"Bahrain", "Bahreïn", "BH", "BHR", "048"}, + {"Bangladesh", "Bangladesh (le)", "BD", "BGD", "050"}, + {"Armenia", "Arménie (l')", "AM", "ARM", "051"}, + {"Barbados", "Barbade (la)", "BB", "BRB", "052"}, + {"Belgium", "Belgique (la)", "BE", "BEL", "056"}, + {"Bermuda", "Bermudes (les)", "BM", "BMU", "060"}, + {"Bhutan", "Bhoutan (le)", "BT", "BTN", "064"}, + {"Bolivia (Plurinational State of)", "Bolivie (État plurinational de)", "BO", "BOL", "068"}, + {"Bosnia and Herzegovina", "Bosnie-Herzégovine (la)", "BA", "BIH", "070"}, + {"Botswana", "Botswana (le)", "BW", "BWA", "072"}, + {"Bouvet Island", "Bouvet (l'ÃŽle)", "BV", "BVT", "074"}, + {"Brazil", "Brésil (le)", "BR", "BRA", "076"}, + {"Belize", "Belize (le)", "BZ", "BLZ", "084"}, + {"British Indian Ocean Territory (the)", "Indien (le Territoire britannique de l'océan)", "IO", "IOT", "086"}, + {"Solomon Islands", "Salomon (ÃŽles)", "SB", "SLB", "090"}, + {"Virgin Islands (British)", "Vierges britanniques (les ÃŽles)", "VG", "VGB", "092"}, + {"Brunei Darussalam", "Brunéi Darussalam (le)", "BN", "BRN", "096"}, + {"Bulgaria", "Bulgarie (la)", "BG", "BGR", "100"}, + {"Myanmar", "Myanmar (le)", "MM", "MMR", "104"}, + {"Burundi", "Burundi (le)", "BI", "BDI", "108"}, + {"Belarus", "Bélarus (le)", "BY", "BLR", "112"}, + {"Cambodia", "Cambodge (le)", "KH", "KHM", "116"}, + {"Cameroon", "Cameroun (le)", "CM", "CMR", "120"}, + {"Canada", "Canada (le)", "CA", "CAN", "124"}, + {"Cabo Verde", "Cabo Verde", "CV", "CPV", "132"}, + {"Cayman Islands (the)", "Caïmans (les ÃŽles)", "KY", "CYM", "136"}, + {"Central African Republic (the)", "République centrafricaine (la)", "CF", "CAF", "140"}, + {"Sri Lanka", "Sri Lanka", "LK", "LKA", "144"}, + {"Chad", "Tchad (le)", "TD", "TCD", "148"}, + {"Chile", "Chili (le)", "CL", "CHL", "152"}, + {"China", "Chine (la)", "CN", "CHN", "156"}, + {"Taiwan (Province of China)", "Taïwan (Province de Chine)", "TW", "TWN", "158"}, + {"Christmas Island", "Christmas (l'ÃŽle)", "CX", "CXR", "162"}, + {"Cocos (Keeling) Islands (the)", "Cocos (les ÃŽles)/ Keeling (les ÃŽles)", "CC", "CCK", "166"}, + {"Colombia", "Colombie (la)", "CO", "COL", "170"}, + {"Comoros (the)", "Comores (les)", "KM", "COM", "174"}, + {"Mayotte", "Mayotte", "YT", "MYT", "175"}, + {"Congo (the)", "Congo (le)", "CG", "COG", "178"}, + {"Congo (the Democratic Republic of the)", "Congo (la République démocratique du)", "CD", "COD", "180"}, + {"Cook Islands (the)", "Cook (les ÃŽles)", "CK", "COK", "184"}, + {"Costa Rica", "Costa Rica (le)", "CR", "CRI", "188"}, + {"Croatia", "Croatie (la)", "HR", "HRV", "191"}, + {"Cuba", "Cuba", "CU", "CUB", "192"}, + {"Cyprus", "Chypre", "CY", "CYP", "196"}, + {"Czech Republic (the)", "tchèque (la République)", "CZ", "CZE", "203"}, + {"Benin", "Bénin (le)", "BJ", "BEN", "204"}, + {"Denmark", "Danemark (le)", "DK", "DNK", "208"}, + {"Dominica", "Dominique (la)", "DM", "DMA", "212"}, + {"Dominican Republic (the)", "dominicaine (la République)", "DO", "DOM", "214"}, + {"Ecuador", "Équateur (l')", "EC", "ECU", "218"}, + {"El Salvador", "El Salvador", "SV", "SLV", "222"}, + {"Equatorial Guinea", "Guinée équatoriale (la)", "GQ", "GNQ", "226"}, + {"Ethiopia", "Éthiopie (l')", "ET", "ETH", "231"}, + {"Eritrea", "Érythrée (l')", "ER", "ERI", "232"}, + {"Estonia", "Estonie (l')", "EE", "EST", "233"}, + {"Faroe Islands (the)", "Féroé (les ÃŽles)", "FO", "FRO", "234"}, + {"Falkland Islands (the) [Malvinas]", "Falkland (les ÃŽles)/Malouines (les ÃŽles)", "FK", "FLK", "238"}, + {"South Georgia and the South Sandwich Islands", "Géorgie du Sud-et-les ÃŽles Sandwich du Sud (la)", "GS", "SGS", "239"}, + {"Fiji", "Fidji (les)", "FJ", "FJI", "242"}, + {"Finland", "Finlande (la)", "FI", "FIN", "246"}, + {"Ã…land Islands", "Ã…land(les ÃŽles)", "AX", "ALA", "248"}, + {"France", "France (la)", "FR", "FRA", "250"}, + {"French Guiana", "Guyane française (la )", "GF", "GUF", "254"}, + {"French Polynesia", "Polynésie française (la)", "PF", "PYF", "258"}, + {"French Southern Territories (the)", "Terres australes françaises (les)", "TF", "ATF", "260"}, + {"Djibouti", "Djibouti", "DJ", "DJI", "262"}, + {"Gabon", "Gabon (le)", "GA", "GAB", "266"}, + {"Georgia", "Géorgie (la)", "GE", "GEO", "268"}, + {"Gambia (the)", "Gambie (la)", "GM", "GMB", "270"}, + {"Palestine, State of", "Palestine, État de", "PS", "PSE", "275"}, + {"Germany", "Allemagne (l')", "DE", "DEU", "276"}, + {"Ghana", "Ghana (le)", "GH", "GHA", "288"}, + {"Gibraltar", "Gibraltar", "GI", "GIB", "292"}, + {"Kiribati", "Kiribati", "KI", "KIR", "296"}, + {"Greece", "Grèce (la)", "GR", "GRC", "300"}, + {"Greenland", "Groenland (le)", "GL", "GRL", "304"}, + {"Grenada", "Grenade (la)", "GD", "GRD", "308"}, + {"Guadeloupe", "Guadeloupe (la)", "GP", "GLP", "312"}, + {"Guam", "Guam", "GU", "GUM", "316"}, + {"Guatemala", "Guatemala (le)", "GT", "GTM", "320"}, + {"Guinea", "Guinée (la)", "GN", "GIN", "324"}, + {"Guyana", "Guyana (le)", "GY", "GUY", "328"}, + {"Haiti", "Haïti", "HT", "HTI", "332"}, + {"Heard Island and McDonald Islands", "Heard-et-ÃŽles MacDonald (l'ÃŽle)", "HM", "HMD", "334"}, + {"Holy See (the)", "Saint-Siège (le)", "VA", "VAT", "336"}, + {"Honduras", "Honduras (le)", "HN", "HND", "340"}, + {"Hong Kong", "Hong Kong", "HK", "HKG", "344"}, + {"Hungary", "Hongrie (la)", "HU", "HUN", "348"}, + {"Iceland", "Islande (l')", "IS", "ISL", "352"}, + {"India", "Inde (l')", "IN", "IND", "356"}, + {"Indonesia", "Indonésie (l')", "ID", "IDN", "360"}, + {"Iran (Islamic Republic of)", "Iran (République Islamique d')", "IR", "IRN", "364"}, + {"Iraq", "Iraq (l')", "IQ", "IRQ", "368"}, + {"Ireland", "Irlande (l')", "IE", "IRL", "372"}, + {"Israel", "Israël", "IL", "ISR", "376"}, + {"Italy", "Italie (l')", "IT", "ITA", "380"}, + {"Côte d'Ivoire", "Côte d'Ivoire (la)", "CI", "CIV", "384"}, + {"Jamaica", "Jamaïque (la)", "JM", "JAM", "388"}, + {"Japan", "Japon (le)", "JP", "JPN", "392"}, + {"Kazakhstan", "Kazakhstan (le)", "KZ", "KAZ", "398"}, + {"Jordan", "Jordanie (la)", "JO", "JOR", "400"}, + {"Kenya", "Kenya (le)", "KE", "KEN", "404"}, + {"Korea (the Democratic People's Republic of)", "Corée (la République populaire démocratique de)", "KP", "PRK", "408"}, + {"Korea (the Republic of)", "Corée (la République de)", "KR", "KOR", "410"}, + {"Kuwait", "Koweït (le)", "KW", "KWT", "414"}, + {"Kyrgyzstan", "Kirghizistan (le)", "KG", "KGZ", "417"}, + {"Lao People's Democratic Republic (the)", "Lao, République démocratique populaire", "LA", "LAO", "418"}, + {"Lebanon", "Liban (le)", "LB", "LBN", "422"}, + {"Lesotho", "Lesotho (le)", "LS", "LSO", "426"}, + {"Latvia", "Lettonie (la)", "LV", "LVA", "428"}, + {"Liberia", "Libéria (le)", "LR", "LBR", "430"}, + {"Libya", "Libye (la)", "LY", "LBY", "434"}, + {"Liechtenstein", "Liechtenstein (le)", "LI", "LIE", "438"}, + {"Lithuania", "Lituanie (la)", "LT", "LTU", "440"}, + {"Luxembourg", "Luxembourg (le)", "LU", "LUX", "442"}, + {"Macao", "Macao", "MO", "MAC", "446"}, + {"Madagascar", "Madagascar", "MG", "MDG", "450"}, + {"Malawi", "Malawi (le)", "MW", "MWI", "454"}, + {"Malaysia", "Malaisie (la)", "MY", "MYS", "458"}, + {"Maldives", "Maldives (les)", "MV", "MDV", "462"}, + {"Mali", "Mali (le)", "ML", "MLI", "466"}, + {"Malta", "Malte", "MT", "MLT", "470"}, + {"Martinique", "Martinique (la)", "MQ", "MTQ", "474"}, + {"Mauritania", "Mauritanie (la)", "MR", "MRT", "478"}, + {"Mauritius", "Maurice", "MU", "MUS", "480"}, + {"Mexico", "Mexique (le)", "MX", "MEX", "484"}, + {"Monaco", "Monaco", "MC", "MCO", "492"}, + {"Mongolia", "Mongolie (la)", "MN", "MNG", "496"}, + {"Moldova (the Republic of)", "Moldova , République de", "MD", "MDA", "498"}, + {"Montenegro", "Monténégro (le)", "ME", "MNE", "499"}, + {"Montserrat", "Montserrat", "MS", "MSR", "500"}, + {"Morocco", "Maroc (le)", "MA", "MAR", "504"}, + {"Mozambique", "Mozambique (le)", "MZ", "MOZ", "508"}, + {"Oman", "Oman", "OM", "OMN", "512"}, + {"Namibia", "Namibie (la)", "NA", "NAM", "516"}, + {"Nauru", "Nauru", "NR", "NRU", "520"}, + {"Nepal", "Népal (le)", "NP", "NPL", "524"}, + {"Netherlands (the)", "Pays-Bas (les)", "NL", "NLD", "528"}, + {"Curaçao", "Curaçao", "CW", "CUW", "531"}, + {"Aruba", "Aruba", "AW", "ABW", "533"}, + {"Sint Maarten (Dutch part)", "Saint-Martin (partie néerlandaise)", "SX", "SXM", "534"}, + {"Bonaire, Sint Eustatius and Saba", "Bonaire, Saint-Eustache et Saba", "BQ", "BES", "535"}, + {"New Caledonia", "Nouvelle-Calédonie (la)", "NC", "NCL", "540"}, + {"Vanuatu", "Vanuatu (le)", "VU", "VUT", "548"}, + {"New Zealand", "Nouvelle-Zélande (la)", "NZ", "NZL", "554"}, + {"Nicaragua", "Nicaragua (le)", "NI", "NIC", "558"}, + {"Niger (the)", "Niger (le)", "NE", "NER", "562"}, + {"Nigeria", "Nigéria (le)", "NG", "NGA", "566"}, + {"Niue", "Niue", "NU", "NIU", "570"}, + {"Norfolk Island", "Norfolk (l'ÃŽle)", "NF", "NFK", "574"}, + {"Norway", "Norvège (la)", "NO", "NOR", "578"}, + {"Northern Mariana Islands (the)", "Mariannes du Nord (les ÃŽles)", "MP", "MNP", "580"}, + {"United States Minor Outlying Islands (the)", "ÃŽles mineures éloignées des États-Unis (les)", "UM", "UMI", "581"}, + {"Micronesia (Federated States of)", "Micronésie (États fédérés de)", "FM", "FSM", "583"}, + {"Marshall Islands (the)", "Marshall (ÃŽles)", "MH", "MHL", "584"}, + {"Palau", "Palaos (les)", "PW", "PLW", "585"}, + {"Pakistan", "Pakistan (le)", "PK", "PAK", "586"}, + {"Panama", "Panama (le)", "PA", "PAN", "591"}, + {"Papua New Guinea", "Papouasie-Nouvelle-Guinée (la)", "PG", "PNG", "598"}, + {"Paraguay", "Paraguay (le)", "PY", "PRY", "600"}, + {"Peru", "Pérou (le)", "PE", "PER", "604"}, + {"Philippines (the)", "Philippines (les)", "PH", "PHL", "608"}, + {"Pitcairn", "Pitcairn", "PN", "PCN", "612"}, + {"Poland", "Pologne (la)", "PL", "POL", "616"}, + {"Portugal", "Portugal (le)", "PT", "PRT", "620"}, + {"Guinea-Bissau", "Guinée-Bissau (la)", "GW", "GNB", "624"}, + {"Timor-Leste", "Timor-Leste (le)", "TL", "TLS", "626"}, + {"Puerto Rico", "Porto Rico", "PR", "PRI", "630"}, + {"Qatar", "Qatar (le)", "QA", "QAT", "634"}, + {"Réunion", "Réunion (La)", "RE", "REU", "638"}, + {"Romania", "Roumanie (la)", "RO", "ROU", "642"}, + {"Russian Federation (the)", "Russie (la Fédération de)", "RU", "RUS", "643"}, + {"Rwanda", "Rwanda (le)", "RW", "RWA", "646"}, + {"Saint Barthélemy", "Saint-Barthélemy", "BL", "BLM", "652"}, + {"Saint Helena, Ascension and Tristan da Cunha", "Sainte-Hélène, Ascension et Tristan da Cunha", "SH", "SHN", "654"}, + {"Saint Kitts and Nevis", "Saint-Kitts-et-Nevis", "KN", "KNA", "659"}, + {"Anguilla", "Anguilla", "AI", "AIA", "660"}, + {"Saint Lucia", "Sainte-Lucie", "LC", "LCA", "662"}, + {"Saint Martin (French part)", "Saint-Martin (partie française)", "MF", "MAF", "663"}, + {"Saint Pierre and Miquelon", "Saint-Pierre-et-Miquelon", "PM", "SPM", "666"}, + {"Saint Vincent and the Grenadines", "Saint-Vincent-et-les Grenadines", "VC", "VCT", "670"}, + {"San Marino", "Saint-Marin", "SM", "SMR", "674"}, + {"Sao Tome and Principe", "Sao Tomé-et-Principe", "ST", "STP", "678"}, + {"Saudi Arabia", "Arabie saoudite (l')", "SA", "SAU", "682"}, + {"Senegal", "Sénégal (le)", "SN", "SEN", "686"}, + {"Serbia", "Serbie (la)", "RS", "SRB", "688"}, + {"Seychelles", "Seychelles (les)", "SC", "SYC", "690"}, + {"Sierra Leone", "Sierra Leone (la)", "SL", "SLE", "694"}, + {"Singapore", "Singapour", "SG", "SGP", "702"}, + {"Slovakia", "Slovaquie (la)", "SK", "SVK", "703"}, + {"Viet Nam", "Viet Nam (le)", "VN", "VNM", "704"}, + {"Slovenia", "Slovénie (la)", "SI", "SVN", "705"}, + {"Somalia", "Somalie (la)", "SO", "SOM", "706"}, + {"South Africa", "Afrique du Sud (l')", "ZA", "ZAF", "710"}, + {"Zimbabwe", "Zimbabwe (le)", "ZW", "ZWE", "716"}, + {"Spain", "Espagne (l')", "ES", "ESP", "724"}, + {"South Sudan", "Soudan du Sud (le)", "SS", "SSD", "728"}, + {"Sudan (the)", "Soudan (le)", "SD", "SDN", "729"}, + {"Western Sahara*", "Sahara occidental (le)*", "EH", "ESH", "732"}, + {"Suriname", "Suriname (le)", "SR", "SUR", "740"}, + {"Svalbard and Jan Mayen", "Svalbard et l'ÃŽle Jan Mayen (le)", "SJ", "SJM", "744"}, + {"Swaziland", "Swaziland (le)", "SZ", "SWZ", "748"}, + {"Sweden", "Suède (la)", "SE", "SWE", "752"}, + {"Switzerland", "Suisse (la)", "CH", "CHE", "756"}, + {"Syrian Arab Republic", "République arabe syrienne (la)", "SY", "SYR", "760"}, + {"Tajikistan", "Tadjikistan (le)", "TJ", "TJK", "762"}, + {"Thailand", "Thaïlande (la)", "TH", "THA", "764"}, + {"Togo", "Togo (le)", "TG", "TGO", "768"}, + {"Tokelau", "Tokelau (les)", "TK", "TKL", "772"}, + {"Tonga", "Tonga (les)", "TO", "TON", "776"}, + {"Trinidad and Tobago", "Trinité-et-Tobago (la)", "TT", "TTO", "780"}, + {"United Arab Emirates (the)", "Émirats arabes unis (les)", "AE", "ARE", "784"}, + {"Tunisia", "Tunisie (la)", "TN", "TUN", "788"}, + {"Turkey", "Turquie (la)", "TR", "TUR", "792"}, + {"Turkmenistan", "Turkménistan (le)", "TM", "TKM", "795"}, + {"Turks and Caicos Islands (the)", "Turks-et-Caïcos (les ÃŽles)", "TC", "TCA", "796"}, + {"Tuvalu", "Tuvalu (les)", "TV", "TUV", "798"}, + {"Uganda", "Ouganda (l')", "UG", "UGA", "800"}, + {"Ukraine", "Ukraine (l')", "UA", "UKR", "804"}, + {"Macedonia (the former Yugoslav Republic of)", "Macédoine (l'ex‑République yougoslave de)", "MK", "MKD", "807"}, + {"Egypt", "Égypte (l')", "EG", "EGY", "818"}, + {"United Kingdom of Great Britain and Northern Ireland (the)", "Royaume-Uni de Grande-Bretagne et d'Irlande du Nord (le)", "GB", "GBR", "826"}, + {"Guernsey", "Guernesey", "GG", "GGY", "831"}, + {"Jersey", "Jersey", "JE", "JEY", "832"}, + {"Isle of Man", "ÃŽle de Man", "IM", "IMN", "833"}, + {"Tanzania, United Republic of", "Tanzanie, République-Unie de", "TZ", "TZA", "834"}, + {"United States of America (the)", "États-Unis d'Amérique (les)", "US", "USA", "840"}, + {"Virgin Islands (U.S.)", "Vierges des États-Unis (les ÃŽles)", "VI", "VIR", "850"}, + {"Burkina Faso", "Burkina Faso (le)", "BF", "BFA", "854"}, + {"Uruguay", "Uruguay (l')", "UY", "URY", "858"}, + {"Uzbekistan", "Ouzbékistan (l')", "UZ", "UZB", "860"}, + {"Venezuela (Bolivarian Republic of)", "Venezuela (République bolivarienne du)", "VE", "VEN", "862"}, + {"Wallis and Futuna", "Wallis-et-Futuna", "WF", "WLF", "876"}, + {"Samoa", "Samoa (le)", "WS", "WSM", "882"}, + {"Yemen", "Yémen (le)", "YE", "YEM", "887"}, + {"Zambia", "Zambie (la)", "ZM", "ZMB", "894"}, +} + +// ISO4217List is the list of ISO currency codes +var ISO4217List = []string{ + "AED", "AFN", "ALL", "AMD", "ANG", "AOA", "ARS", "AUD", "AWG", "AZN", + "BAM", "BBD", "BDT", "BGN", "BHD", "BIF", "BMD", "BND", "BOB", "BOV", "BRL", "BSD", "BTN", "BWP", "BYN", "BZD", + "CAD", "CDF", "CHE", "CHF", "CHW", "CLF", "CLP", "CNY", "COP", "COU", "CRC", "CUC", "CUP", "CVE", "CZK", + "DJF", "DKK", "DOP", "DZD", + "EGP", "ERN", "ETB", "EUR", + "FJD", "FKP", + "GBP", "GEL", "GHS", "GIP", "GMD", "GNF", "GTQ", "GYD", + "HKD", "HNL", "HRK", "HTG", "HUF", + "IDR", "ILS", "INR", "IQD", "IRR", "ISK", + "JMD", "JOD", "JPY", + "KES", "KGS", "KHR", "KMF", "KPW", "KRW", "KWD", "KYD", "KZT", + "LAK", "LBP", "LKR", "LRD", "LSL", "LYD", + "MAD", "MDL", "MGA", "MKD", "MMK", "MNT", "MOP", "MRO", "MUR", "MVR", "MWK", "MXN", "MXV", "MYR", "MZN", + "NAD", "NGN", "NIO", "NOK", "NPR", "NZD", + "OMR", + "PAB", "PEN", "PGK", "PHP", "PKR", "PLN", "PYG", + "QAR", + "RON", "RSD", "RUB", "RWF", + "SAR", "SBD", "SCR", "SDG", "SEK", "SGD", "SHP", "SLL", "SOS", "SRD", "SSP", "STD", "STN", "SVC", "SYP", "SZL", + "THB", "TJS", "TMT", "TND", "TOP", "TRY", "TTD", "TWD", "TZS", + "UAH", "UGX", "USD", "USN", "UYI", "UYU", "UYW", "UZS", + "VEF", "VES", "VND", "VUV", + "WST", + "XAF", "XAG", "XAU", "XBA", "XBB", "XBC", "XBD", "XCD", "XDR", "XOF", "XPD", "XPF", "XPT", "XSU", "XTS", "XUA", "XXX", + "YER", + "ZAR", "ZMW", "ZWL", +} + +// ISO693Entry stores ISO language codes +type ISO693Entry struct { + Alpha3bCode string + Alpha2Code string + English string +} + +//ISO693List based on http://data.okfn.org/data/core/language-codes/r/language-codes-3b2.json +var ISO693List = []ISO693Entry{ + {Alpha3bCode: "aar", Alpha2Code: "aa", English: "Afar"}, + {Alpha3bCode: "abk", Alpha2Code: "ab", English: "Abkhazian"}, + {Alpha3bCode: "afr", Alpha2Code: "af", English: "Afrikaans"}, + {Alpha3bCode: "aka", Alpha2Code: "ak", English: "Akan"}, + {Alpha3bCode: "alb", Alpha2Code: "sq", English: "Albanian"}, + {Alpha3bCode: "amh", Alpha2Code: "am", English: "Amharic"}, + {Alpha3bCode: "ara", Alpha2Code: "ar", English: "Arabic"}, + {Alpha3bCode: "arg", Alpha2Code: "an", English: "Aragonese"}, + {Alpha3bCode: "arm", Alpha2Code: "hy", English: "Armenian"}, + {Alpha3bCode: "asm", Alpha2Code: "as", English: "Assamese"}, + {Alpha3bCode: "ava", Alpha2Code: "av", English: "Avaric"}, + {Alpha3bCode: "ave", Alpha2Code: "ae", English: "Avestan"}, + {Alpha3bCode: "aym", Alpha2Code: "ay", English: "Aymara"}, + {Alpha3bCode: "aze", Alpha2Code: "az", English: "Azerbaijani"}, + {Alpha3bCode: "bak", Alpha2Code: "ba", English: "Bashkir"}, + {Alpha3bCode: "bam", Alpha2Code: "bm", English: "Bambara"}, + {Alpha3bCode: "baq", Alpha2Code: "eu", English: "Basque"}, + {Alpha3bCode: "bel", Alpha2Code: "be", English: "Belarusian"}, + {Alpha3bCode: "ben", Alpha2Code: "bn", English: "Bengali"}, + {Alpha3bCode: "bih", Alpha2Code: "bh", English: "Bihari languages"}, + {Alpha3bCode: "bis", Alpha2Code: "bi", English: "Bislama"}, + {Alpha3bCode: "bos", Alpha2Code: "bs", English: "Bosnian"}, + {Alpha3bCode: "bre", Alpha2Code: "br", English: "Breton"}, + {Alpha3bCode: "bul", Alpha2Code: "bg", English: "Bulgarian"}, + {Alpha3bCode: "bur", Alpha2Code: "my", English: "Burmese"}, + {Alpha3bCode: "cat", Alpha2Code: "ca", English: "Catalan; Valencian"}, + {Alpha3bCode: "cha", Alpha2Code: "ch", English: "Chamorro"}, + {Alpha3bCode: "che", Alpha2Code: "ce", English: "Chechen"}, + {Alpha3bCode: "chi", Alpha2Code: "zh", English: "Chinese"}, + {Alpha3bCode: "chu", Alpha2Code: "cu", English: "Church Slavic; Old Slavonic; Church Slavonic; Old Bulgarian; Old Church Slavonic"}, + {Alpha3bCode: "chv", Alpha2Code: "cv", English: "Chuvash"}, + {Alpha3bCode: "cor", Alpha2Code: "kw", English: "Cornish"}, + {Alpha3bCode: "cos", Alpha2Code: "co", English: "Corsican"}, + {Alpha3bCode: "cre", Alpha2Code: "cr", English: "Cree"}, + {Alpha3bCode: "cze", Alpha2Code: "cs", English: "Czech"}, + {Alpha3bCode: "dan", Alpha2Code: "da", English: "Danish"}, + {Alpha3bCode: "div", Alpha2Code: "dv", English: "Divehi; Dhivehi; Maldivian"}, + {Alpha3bCode: "dut", Alpha2Code: "nl", English: "Dutch; Flemish"}, + {Alpha3bCode: "dzo", Alpha2Code: "dz", English: "Dzongkha"}, + {Alpha3bCode: "eng", Alpha2Code: "en", English: "English"}, + {Alpha3bCode: "epo", Alpha2Code: "eo", English: "Esperanto"}, + {Alpha3bCode: "est", Alpha2Code: "et", English: "Estonian"}, + {Alpha3bCode: "ewe", Alpha2Code: "ee", English: "Ewe"}, + {Alpha3bCode: "fao", Alpha2Code: "fo", English: "Faroese"}, + {Alpha3bCode: "fij", Alpha2Code: "fj", English: "Fijian"}, + {Alpha3bCode: "fin", Alpha2Code: "fi", English: "Finnish"}, + {Alpha3bCode: "fre", Alpha2Code: "fr", English: "French"}, + {Alpha3bCode: "fry", Alpha2Code: "fy", English: "Western Frisian"}, + {Alpha3bCode: "ful", Alpha2Code: "ff", English: "Fulah"}, + {Alpha3bCode: "geo", Alpha2Code: "ka", English: "Georgian"}, + {Alpha3bCode: "ger", Alpha2Code: "de", English: "German"}, + {Alpha3bCode: "gla", Alpha2Code: "gd", English: "Gaelic; Scottish Gaelic"}, + {Alpha3bCode: "gle", Alpha2Code: "ga", English: "Irish"}, + {Alpha3bCode: "glg", Alpha2Code: "gl", English: "Galician"}, + {Alpha3bCode: "glv", Alpha2Code: "gv", English: "Manx"}, + {Alpha3bCode: "gre", Alpha2Code: "el", English: "Greek, Modern (1453-)"}, + {Alpha3bCode: "grn", Alpha2Code: "gn", English: "Guarani"}, + {Alpha3bCode: "guj", Alpha2Code: "gu", English: "Gujarati"}, + {Alpha3bCode: "hat", Alpha2Code: "ht", English: "Haitian; Haitian Creole"}, + {Alpha3bCode: "hau", Alpha2Code: "ha", English: "Hausa"}, + {Alpha3bCode: "heb", Alpha2Code: "he", English: "Hebrew"}, + {Alpha3bCode: "her", Alpha2Code: "hz", English: "Herero"}, + {Alpha3bCode: "hin", Alpha2Code: "hi", English: "Hindi"}, + {Alpha3bCode: "hmo", Alpha2Code: "ho", English: "Hiri Motu"}, + {Alpha3bCode: "hrv", Alpha2Code: "hr", English: "Croatian"}, + {Alpha3bCode: "hun", Alpha2Code: "hu", English: "Hungarian"}, + {Alpha3bCode: "ibo", Alpha2Code: "ig", English: "Igbo"}, + {Alpha3bCode: "ice", Alpha2Code: "is", English: "Icelandic"}, + {Alpha3bCode: "ido", Alpha2Code: "io", English: "Ido"}, + {Alpha3bCode: "iii", Alpha2Code: "ii", English: "Sichuan Yi; Nuosu"}, + {Alpha3bCode: "iku", Alpha2Code: "iu", English: "Inuktitut"}, + {Alpha3bCode: "ile", Alpha2Code: "ie", English: "Interlingue; Occidental"}, + {Alpha3bCode: "ina", Alpha2Code: "ia", English: "Interlingua (International Auxiliary Language Association)"}, + {Alpha3bCode: "ind", Alpha2Code: "id", English: "Indonesian"}, + {Alpha3bCode: "ipk", Alpha2Code: "ik", English: "Inupiaq"}, + {Alpha3bCode: "ita", Alpha2Code: "it", English: "Italian"}, + {Alpha3bCode: "jav", Alpha2Code: "jv", English: "Javanese"}, + {Alpha3bCode: "jpn", Alpha2Code: "ja", English: "Japanese"}, + {Alpha3bCode: "kal", Alpha2Code: "kl", English: "Kalaallisut; Greenlandic"}, + {Alpha3bCode: "kan", Alpha2Code: "kn", English: "Kannada"}, + {Alpha3bCode: "kas", Alpha2Code: "ks", English: "Kashmiri"}, + {Alpha3bCode: "kau", Alpha2Code: "kr", English: "Kanuri"}, + {Alpha3bCode: "kaz", Alpha2Code: "kk", English: "Kazakh"}, + {Alpha3bCode: "khm", Alpha2Code: "km", English: "Central Khmer"}, + {Alpha3bCode: "kik", Alpha2Code: "ki", English: "Kikuyu; Gikuyu"}, + {Alpha3bCode: "kin", Alpha2Code: "rw", English: "Kinyarwanda"}, + {Alpha3bCode: "kir", Alpha2Code: "ky", English: "Kirghiz; Kyrgyz"}, + {Alpha3bCode: "kom", Alpha2Code: "kv", English: "Komi"}, + {Alpha3bCode: "kon", Alpha2Code: "kg", English: "Kongo"}, + {Alpha3bCode: "kor", Alpha2Code: "ko", English: "Korean"}, + {Alpha3bCode: "kua", Alpha2Code: "kj", English: "Kuanyama; Kwanyama"}, + {Alpha3bCode: "kur", Alpha2Code: "ku", English: "Kurdish"}, + {Alpha3bCode: "lao", Alpha2Code: "lo", English: "Lao"}, + {Alpha3bCode: "lat", Alpha2Code: "la", English: "Latin"}, + {Alpha3bCode: "lav", Alpha2Code: "lv", English: "Latvian"}, + {Alpha3bCode: "lim", Alpha2Code: "li", English: "Limburgan; Limburger; Limburgish"}, + {Alpha3bCode: "lin", Alpha2Code: "ln", English: "Lingala"}, + {Alpha3bCode: "lit", Alpha2Code: "lt", English: "Lithuanian"}, + {Alpha3bCode: "ltz", Alpha2Code: "lb", English: "Luxembourgish; Letzeburgesch"}, + {Alpha3bCode: "lub", Alpha2Code: "lu", English: "Luba-Katanga"}, + {Alpha3bCode: "lug", Alpha2Code: "lg", English: "Ganda"}, + {Alpha3bCode: "mac", Alpha2Code: "mk", English: "Macedonian"}, + {Alpha3bCode: "mah", Alpha2Code: "mh", English: "Marshallese"}, + {Alpha3bCode: "mal", Alpha2Code: "ml", English: "Malayalam"}, + {Alpha3bCode: "mao", Alpha2Code: "mi", English: "Maori"}, + {Alpha3bCode: "mar", Alpha2Code: "mr", English: "Marathi"}, + {Alpha3bCode: "may", Alpha2Code: "ms", English: "Malay"}, + {Alpha3bCode: "mlg", Alpha2Code: "mg", English: "Malagasy"}, + {Alpha3bCode: "mlt", Alpha2Code: "mt", English: "Maltese"}, + {Alpha3bCode: "mon", Alpha2Code: "mn", English: "Mongolian"}, + {Alpha3bCode: "nau", Alpha2Code: "na", English: "Nauru"}, + {Alpha3bCode: "nav", Alpha2Code: "nv", English: "Navajo; Navaho"}, + {Alpha3bCode: "nbl", Alpha2Code: "nr", English: "Ndebele, South; South Ndebele"}, + {Alpha3bCode: "nde", Alpha2Code: "nd", English: "Ndebele, North; North Ndebele"}, + {Alpha3bCode: "ndo", Alpha2Code: "ng", English: "Ndonga"}, + {Alpha3bCode: "nep", Alpha2Code: "ne", English: "Nepali"}, + {Alpha3bCode: "nno", Alpha2Code: "nn", English: "Norwegian Nynorsk; Nynorsk, Norwegian"}, + {Alpha3bCode: "nob", Alpha2Code: "nb", English: "BokmÃ¥l, Norwegian; Norwegian BokmÃ¥l"}, + {Alpha3bCode: "nor", Alpha2Code: "no", English: "Norwegian"}, + {Alpha3bCode: "nya", Alpha2Code: "ny", English: "Chichewa; Chewa; Nyanja"}, + {Alpha3bCode: "oci", Alpha2Code: "oc", English: "Occitan (post 1500); Provençal"}, + {Alpha3bCode: "oji", Alpha2Code: "oj", English: "Ojibwa"}, + {Alpha3bCode: "ori", Alpha2Code: "or", English: "Oriya"}, + {Alpha3bCode: "orm", Alpha2Code: "om", English: "Oromo"}, + {Alpha3bCode: "oss", Alpha2Code: "os", English: "Ossetian; Ossetic"}, + {Alpha3bCode: "pan", Alpha2Code: "pa", English: "Panjabi; Punjabi"}, + {Alpha3bCode: "per", Alpha2Code: "fa", English: "Persian"}, + {Alpha3bCode: "pli", Alpha2Code: "pi", English: "Pali"}, + {Alpha3bCode: "pol", Alpha2Code: "pl", English: "Polish"}, + {Alpha3bCode: "por", Alpha2Code: "pt", English: "Portuguese"}, + {Alpha3bCode: "pus", Alpha2Code: "ps", English: "Pushto; Pashto"}, + {Alpha3bCode: "que", Alpha2Code: "qu", English: "Quechua"}, + {Alpha3bCode: "roh", Alpha2Code: "rm", English: "Romansh"}, + {Alpha3bCode: "rum", Alpha2Code: "ro", English: "Romanian; Moldavian; Moldovan"}, + {Alpha3bCode: "run", Alpha2Code: "rn", English: "Rundi"}, + {Alpha3bCode: "rus", Alpha2Code: "ru", English: "Russian"}, + {Alpha3bCode: "sag", Alpha2Code: "sg", English: "Sango"}, + {Alpha3bCode: "san", Alpha2Code: "sa", English: "Sanskrit"}, + {Alpha3bCode: "sin", Alpha2Code: "si", English: "Sinhala; Sinhalese"}, + {Alpha3bCode: "slo", Alpha2Code: "sk", English: "Slovak"}, + {Alpha3bCode: "slv", Alpha2Code: "sl", English: "Slovenian"}, + {Alpha3bCode: "sme", Alpha2Code: "se", English: "Northern Sami"}, + {Alpha3bCode: "smo", Alpha2Code: "sm", English: "Samoan"}, + {Alpha3bCode: "sna", Alpha2Code: "sn", English: "Shona"}, + {Alpha3bCode: "snd", Alpha2Code: "sd", English: "Sindhi"}, + {Alpha3bCode: "som", Alpha2Code: "so", English: "Somali"}, + {Alpha3bCode: "sot", Alpha2Code: "st", English: "Sotho, Southern"}, + {Alpha3bCode: "spa", Alpha2Code: "es", English: "Spanish; Castilian"}, + {Alpha3bCode: "srd", Alpha2Code: "sc", English: "Sardinian"}, + {Alpha3bCode: "srp", Alpha2Code: "sr", English: "Serbian"}, + {Alpha3bCode: "ssw", Alpha2Code: "ss", English: "Swati"}, + {Alpha3bCode: "sun", Alpha2Code: "su", English: "Sundanese"}, + {Alpha3bCode: "swa", Alpha2Code: "sw", English: "Swahili"}, + {Alpha3bCode: "swe", Alpha2Code: "sv", English: "Swedish"}, + {Alpha3bCode: "tah", Alpha2Code: "ty", English: "Tahitian"}, + {Alpha3bCode: "tam", Alpha2Code: "ta", English: "Tamil"}, + {Alpha3bCode: "tat", Alpha2Code: "tt", English: "Tatar"}, + {Alpha3bCode: "tel", Alpha2Code: "te", English: "Telugu"}, + {Alpha3bCode: "tgk", Alpha2Code: "tg", English: "Tajik"}, + {Alpha3bCode: "tgl", Alpha2Code: "tl", English: "Tagalog"}, + {Alpha3bCode: "tha", Alpha2Code: "th", English: "Thai"}, + {Alpha3bCode: "tib", Alpha2Code: "bo", English: "Tibetan"}, + {Alpha3bCode: "tir", Alpha2Code: "ti", English: "Tigrinya"}, + {Alpha3bCode: "ton", Alpha2Code: "to", English: "Tonga (Tonga Islands)"}, + {Alpha3bCode: "tsn", Alpha2Code: "tn", English: "Tswana"}, + {Alpha3bCode: "tso", Alpha2Code: "ts", English: "Tsonga"}, + {Alpha3bCode: "tuk", Alpha2Code: "tk", English: "Turkmen"}, + {Alpha3bCode: "tur", Alpha2Code: "tr", English: "Turkish"}, + {Alpha3bCode: "twi", Alpha2Code: "tw", English: "Twi"}, + {Alpha3bCode: "uig", Alpha2Code: "ug", English: "Uighur; Uyghur"}, + {Alpha3bCode: "ukr", Alpha2Code: "uk", English: "Ukrainian"}, + {Alpha3bCode: "urd", Alpha2Code: "ur", English: "Urdu"}, + {Alpha3bCode: "uzb", Alpha2Code: "uz", English: "Uzbek"}, + {Alpha3bCode: "ven", Alpha2Code: "ve", English: "Venda"}, + {Alpha3bCode: "vie", Alpha2Code: "vi", English: "Vietnamese"}, + {Alpha3bCode: "vol", Alpha2Code: "vo", English: "Volapük"}, + {Alpha3bCode: "wel", Alpha2Code: "cy", English: "Welsh"}, + {Alpha3bCode: "wln", Alpha2Code: "wa", English: "Walloon"}, + {Alpha3bCode: "wol", Alpha2Code: "wo", English: "Wolof"}, + {Alpha3bCode: "xho", Alpha2Code: "xh", English: "Xhosa"}, + {Alpha3bCode: "yid", Alpha2Code: "yi", English: "Yiddish"}, + {Alpha3bCode: "yor", Alpha2Code: "yo", English: "Yoruba"}, + {Alpha3bCode: "zha", Alpha2Code: "za", English: "Zhuang; Chuang"}, + {Alpha3bCode: "zul", Alpha2Code: "zu", English: "Zulu"}, +} diff --git a/vendor/github.com/asaskevich/govalidator/utils.go b/vendor/github.com/asaskevich/govalidator/utils.go new file mode 100644 index 0000000..f4c30f8 --- /dev/null +++ b/vendor/github.com/asaskevich/govalidator/utils.go @@ -0,0 +1,270 @@ +package govalidator + +import ( + "errors" + "fmt" + "html" + "math" + "path" + "regexp" + "strings" + "unicode" + "unicode/utf8" +) + +// Contains checks if the string contains the substring. +func Contains(str, substring string) bool { + return strings.Contains(str, substring) +} + +// Matches checks if string matches the pattern (pattern is regular expression) +// In case of error return false +func Matches(str, pattern string) bool { + match, _ := regexp.MatchString(pattern, str) + return match +} + +// LeftTrim trims characters from the left side of the input. +// If second argument is empty, it will remove leading spaces. +func LeftTrim(str, chars string) string { + if chars == "" { + return strings.TrimLeftFunc(str, unicode.IsSpace) + } + r, _ := regexp.Compile("^[" + chars + "]+") + return r.ReplaceAllString(str, "") +} + +// RightTrim trims characters from the right side of the input. +// If second argument is empty, it will remove trailing spaces. +func RightTrim(str, chars string) string { + if chars == "" { + return strings.TrimRightFunc(str, unicode.IsSpace) + } + r, _ := regexp.Compile("[" + chars + "]+$") + return r.ReplaceAllString(str, "") +} + +// Trim trims characters from both sides of the input. +// If second argument is empty, it will remove spaces. +func Trim(str, chars string) string { + return LeftTrim(RightTrim(str, chars), chars) +} + +// WhiteList removes characters that do not appear in the whitelist. +func WhiteList(str, chars string) string { + pattern := "[^" + chars + "]+" + r, _ := regexp.Compile(pattern) + return r.ReplaceAllString(str, "") +} + +// BlackList removes characters that appear in the blacklist. +func BlackList(str, chars string) string { + pattern := "[" + chars + "]+" + r, _ := regexp.Compile(pattern) + return r.ReplaceAllString(str, "") +} + +// StripLow removes characters with a numerical value < 32 and 127, mostly control characters. +// If keep_new_lines is true, newline characters are preserved (\n and \r, hex 0xA and 0xD). +func StripLow(str string, keepNewLines bool) string { + chars := "" + if keepNewLines { + chars = "\x00-\x09\x0B\x0C\x0E-\x1F\x7F" + } else { + chars = "\x00-\x1F\x7F" + } + return BlackList(str, chars) +} + +// ReplacePattern replaces regular expression pattern in string +func ReplacePattern(str, pattern, replace string) string { + r, _ := regexp.Compile(pattern) + return r.ReplaceAllString(str, replace) +} + +// Escape replaces <, >, & and " with HTML entities. +var Escape = html.EscapeString + +func addSegment(inrune, segment []rune) []rune { + if len(segment) == 0 { + return inrune + } + if len(inrune) != 0 { + inrune = append(inrune, '_') + } + inrune = append(inrune, segment...) + return inrune +} + +// UnderscoreToCamelCase converts from underscore separated form to camel case form. +// Ex.: my_func => MyFunc +func UnderscoreToCamelCase(s string) string { + return strings.Replace(strings.Title(strings.Replace(strings.ToLower(s), "_", " ", -1)), " ", "", -1) +} + +// CamelCaseToUnderscore converts from camel case form to underscore separated form. +// Ex.: MyFunc => my_func +func CamelCaseToUnderscore(str string) string { + var output []rune + var segment []rune + for _, r := range str { + + // not treat number as separate segment + if !unicode.IsLower(r) && string(r) != "_" && !unicode.IsNumber(r) { + output = addSegment(output, segment) + segment = nil + } + segment = append(segment, unicode.ToLower(r)) + } + output = addSegment(output, segment) + return string(output) +} + +// Reverse returns reversed string +func Reverse(s string) string { + r := []rune(s) + for i, j := 0, len(r)-1; i < j; i, j = i+1, j-1 { + r[i], r[j] = r[j], r[i] + } + return string(r) +} + +// GetLines splits string by "\n" and return array of lines +func GetLines(s string) []string { + return strings.Split(s, "\n") +} + +// GetLine returns specified line of multiline string +func GetLine(s string, index int) (string, error) { + lines := GetLines(s) + if index < 0 || index >= len(lines) { + return "", errors.New("line index out of bounds") + } + return lines[index], nil +} + +// RemoveTags removes all tags from HTML string +func RemoveTags(s string) string { + return ReplacePattern(s, "<[^>]*>", "") +} + +// SafeFileName returns safe string that can be used in file names +func SafeFileName(str string) string { + name := strings.ToLower(str) + name = path.Clean(path.Base(name)) + name = strings.Trim(name, " ") + separators, err := regexp.Compile(`[ &_=+:]`) + if err == nil { + name = separators.ReplaceAllString(name, "-") + } + legal, err := regexp.Compile(`[^[:alnum:]-.]`) + if err == nil { + name = legal.ReplaceAllString(name, "") + } + for strings.Contains(name, "--") { + name = strings.Replace(name, "--", "-", -1) + } + return name +} + +// NormalizeEmail canonicalize an email address. +// The local part of the email address is lowercased for all domains; the hostname is always lowercased and +// the local part of the email address is always lowercased for hosts that are known to be case-insensitive (currently only GMail). +// Normalization follows special rules for known providers: currently, GMail addresses have dots removed in the local part and +// are stripped of tags (e.g. some.one+tag@gmail.com becomes someone@gmail.com) and all @googlemail.com addresses are +// normalized to @gmail.com. +func NormalizeEmail(str string) (string, error) { + if !IsEmail(str) { + return "", fmt.Errorf("%s is not an email", str) + } + parts := strings.Split(str, "@") + parts[0] = strings.ToLower(parts[0]) + parts[1] = strings.ToLower(parts[1]) + if parts[1] == "gmail.com" || parts[1] == "googlemail.com" { + parts[1] = "gmail.com" + parts[0] = strings.Split(ReplacePattern(parts[0], `\.`, ""), "+")[0] + } + return strings.Join(parts, "@"), nil +} + +// Truncate a string to the closest length without breaking words. +func Truncate(str string, length int, ending string) string { + var aftstr, befstr string + if len(str) > length { + words := strings.Fields(str) + before, present := 0, 0 + for i := range words { + befstr = aftstr + before = present + aftstr = aftstr + words[i] + " " + present = len(aftstr) + if present > length && i != 0 { + if (length - before) < (present - length) { + return Trim(befstr, " /\\.,\"'#!?&@+-") + ending + } + return Trim(aftstr, " /\\.,\"'#!?&@+-") + ending + } + } + } + + return str +} + +// PadLeft pads left side of a string if size of string is less then indicated pad length +func PadLeft(str string, padStr string, padLen int) string { + return buildPadStr(str, padStr, padLen, true, false) +} + +// PadRight pads right side of a string if size of string is less then indicated pad length +func PadRight(str string, padStr string, padLen int) string { + return buildPadStr(str, padStr, padLen, false, true) +} + +// PadBoth pads both sides of a string if size of string is less then indicated pad length +func PadBoth(str string, padStr string, padLen int) string { + return buildPadStr(str, padStr, padLen, true, true) +} + +// PadString either left, right or both sides. +// Note that padding string can be unicode and more then one character +func buildPadStr(str string, padStr string, padLen int, padLeft bool, padRight bool) string { + + // When padded length is less then the current string size + if padLen < utf8.RuneCountInString(str) { + return str + } + + padLen -= utf8.RuneCountInString(str) + + targetLen := padLen + + targetLenLeft := targetLen + targetLenRight := targetLen + if padLeft && padRight { + targetLenLeft = padLen / 2 + targetLenRight = padLen - targetLenLeft + } + + strToRepeatLen := utf8.RuneCountInString(padStr) + + repeatTimes := int(math.Ceil(float64(targetLen) / float64(strToRepeatLen))) + repeatedString := strings.Repeat(padStr, repeatTimes) + + leftSide := "" + if padLeft { + leftSide = repeatedString[0:targetLenLeft] + } + + rightSide := "" + if padRight { + rightSide = repeatedString[0:targetLenRight] + } + + return leftSide + str + rightSide +} + +// TruncatingErrorf removes extra args from fmt.Errorf if not formatted in the str object +func TruncatingErrorf(str string, args ...interface{}) error { + n := strings.Count(str, "%s") + return fmt.Errorf(str, args[:n]...) +} diff --git a/vendor/github.com/asaskevich/govalidator/validator.go b/vendor/github.com/asaskevich/govalidator/validator.go new file mode 100644 index 0000000..c9c4fac --- /dev/null +++ b/vendor/github.com/asaskevich/govalidator/validator.go @@ -0,0 +1,1768 @@ +// Package govalidator is package of validators and sanitizers for strings, structs and collections. +package govalidator + +import ( + "bytes" + "crypto/rsa" + "crypto/x509" + "encoding/base64" + "encoding/json" + "encoding/pem" + "fmt" + "io/ioutil" + "net" + "net/url" + "reflect" + "regexp" + "sort" + "strconv" + "strings" + "time" + "unicode" + "unicode/utf8" +) + +var ( + fieldsRequiredByDefault bool + nilPtrAllowedByRequired = false + notNumberRegexp = regexp.MustCompile("[^0-9]+") + whiteSpacesAndMinus = regexp.MustCompile(`[\s-]+`) + paramsRegexp = regexp.MustCompile(`\(.*\)$`) +) + +const maxURLRuneCount = 2083 +const minURLRuneCount = 3 +const rfc3339WithoutZone = "2006-01-02T15:04:05" + +// SetFieldsRequiredByDefault causes validation to fail when struct fields +// do not include validations or are not explicitly marked as exempt (using `valid:"-"` or `valid:"email,optional"`). +// This struct definition will fail govalidator.ValidateStruct() (and the field values do not matter): +// type exampleStruct struct { +// Name string `` +// Email string `valid:"email"` +// This, however, will only fail when Email is empty or an invalid email address: +// type exampleStruct2 struct { +// Name string `valid:"-"` +// Email string `valid:"email"` +// Lastly, this will only fail when Email is an invalid email address but not when it's empty: +// type exampleStruct2 struct { +// Name string `valid:"-"` +// Email string `valid:"email,optional"` +func SetFieldsRequiredByDefault(value bool) { + fieldsRequiredByDefault = value +} + +// SetNilPtrAllowedByRequired causes validation to pass for nil ptrs when a field is set to required. +// The validation will still reject ptr fields in their zero value state. Example with this enabled: +// type exampleStruct struct { +// Name *string `valid:"required"` +// With `Name` set to "", this will be considered invalid input and will cause a validation error. +// With `Name` set to nil, this will be considered valid by validation. +// By default this is disabled. +func SetNilPtrAllowedByRequired(value bool) { + nilPtrAllowedByRequired = value +} + +// IsEmail checks if the string is an email. +func IsEmail(str string) bool { + // TODO uppercase letters are not supported + return rxEmail.MatchString(str) +} + +// IsExistingEmail checks if the string is an email of existing domain +func IsExistingEmail(email string) bool { + + if len(email) < 6 || len(email) > 254 { + return false + } + at := strings.LastIndex(email, "@") + if at <= 0 || at > len(email)-3 { + return false + } + user := email[:at] + host := email[at+1:] + if len(user) > 64 { + return false + } + switch host { + case "localhost", "example.com": + return true + } + if userDotRegexp.MatchString(user) || !userRegexp.MatchString(user) || !hostRegexp.MatchString(host) { + return false + } + if _, err := net.LookupMX(host); err != nil { + if _, err := net.LookupIP(host); err != nil { + return false + } + } + + return true +} + +// IsURL checks if the string is an URL. +func IsURL(str string) bool { + if str == "" || utf8.RuneCountInString(str) >= maxURLRuneCount || len(str) <= minURLRuneCount || strings.HasPrefix(str, ".") { + return false + } + strTemp := str + if strings.Contains(str, ":") && !strings.Contains(str, "://") { + // support no indicated urlscheme but with colon for port number + // http:// is appended so url.Parse will succeed, strTemp used so it does not impact rxURL.MatchString + strTemp = "http://" + str + } + u, err := url.Parse(strTemp) + if err != nil { + return false + } + if strings.HasPrefix(u.Host, ".") { + return false + } + if u.Host == "" && (u.Path != "" && !strings.Contains(u.Path, ".")) { + return false + } + return rxURL.MatchString(str) +} + +// IsRequestURL checks if the string rawurl, assuming +// it was received in an HTTP request, is a valid +// URL confirm to RFC 3986 +func IsRequestURL(rawurl string) bool { + url, err := url.ParseRequestURI(rawurl) + if err != nil { + return false //Couldn't even parse the rawurl + } + if len(url.Scheme) == 0 { + return false //No Scheme found + } + return true +} + +// IsRequestURI checks if the string rawurl, assuming +// it was received in an HTTP request, is an +// absolute URI or an absolute path. +func IsRequestURI(rawurl string) bool { + _, err := url.ParseRequestURI(rawurl) + return err == nil +} + +// IsAlpha checks if the string contains only letters (a-zA-Z). Empty string is valid. +func IsAlpha(str string) bool { + if IsNull(str) { + return true + } + return rxAlpha.MatchString(str) +} + +//IsUTFLetter checks if the string contains only unicode letter characters. +//Similar to IsAlpha but for all languages. Empty string is valid. +func IsUTFLetter(str string) bool { + if IsNull(str) { + return true + } + + for _, c := range str { + if !unicode.IsLetter(c) { + return false + } + } + return true + +} + +// IsAlphanumeric checks if the string contains only letters and numbers. Empty string is valid. +func IsAlphanumeric(str string) bool { + if IsNull(str) { + return true + } + return rxAlphanumeric.MatchString(str) +} + +// IsUTFLetterNumeric checks if the string contains only unicode letters and numbers. Empty string is valid. +func IsUTFLetterNumeric(str string) bool { + if IsNull(str) { + return true + } + for _, c := range str { + if !unicode.IsLetter(c) && !unicode.IsNumber(c) { //letters && numbers are ok + return false + } + } + return true + +} + +// IsNumeric checks if the string contains only numbers. Empty string is valid. +func IsNumeric(str string) bool { + if IsNull(str) { + return true + } + return rxNumeric.MatchString(str) +} + +// IsUTFNumeric checks if the string contains only unicode numbers of any kind. +// Numbers can be 0-9 but also Fractions ¾,Roman â…¨ and Hangzhou 〩. Empty string is valid. +func IsUTFNumeric(str string) bool { + if IsNull(str) { + return true + } + if strings.IndexAny(str, "+-") > 0 { + return false + } + if len(str) > 1 { + str = strings.TrimPrefix(str, "-") + str = strings.TrimPrefix(str, "+") + } + for _, c := range str { + if !unicode.IsNumber(c) { //numbers && minus sign are ok + return false + } + } + return true + +} + +// IsUTFDigit checks if the string contains only unicode radix-10 decimal digits. Empty string is valid. +func IsUTFDigit(str string) bool { + if IsNull(str) { + return true + } + if strings.IndexAny(str, "+-") > 0 { + return false + } + if len(str) > 1 { + str = strings.TrimPrefix(str, "-") + str = strings.TrimPrefix(str, "+") + } + for _, c := range str { + if !unicode.IsDigit(c) { //digits && minus sign are ok + return false + } + } + return true + +} + +// IsHexadecimal checks if the string is a hexadecimal number. +func IsHexadecimal(str string) bool { + return rxHexadecimal.MatchString(str) +} + +// IsHexcolor checks if the string is a hexadecimal color. +func IsHexcolor(str string) bool { + return rxHexcolor.MatchString(str) +} + +// IsRGBcolor checks if the string is a valid RGB color in form rgb(RRR, GGG, BBB). +func IsRGBcolor(str string) bool { + return rxRGBcolor.MatchString(str) +} + +// IsLowerCase checks if the string is lowercase. Empty string is valid. +func IsLowerCase(str string) bool { + if IsNull(str) { + return true + } + return str == strings.ToLower(str) +} + +// IsUpperCase checks if the string is uppercase. Empty string is valid. +func IsUpperCase(str string) bool { + if IsNull(str) { + return true + } + return str == strings.ToUpper(str) +} + +// HasLowerCase checks if the string contains at least 1 lowercase. Empty string is valid. +func HasLowerCase(str string) bool { + if IsNull(str) { + return true + } + return rxHasLowerCase.MatchString(str) +} + +// HasUpperCase checks if the string contains as least 1 uppercase. Empty string is valid. +func HasUpperCase(str string) bool { + if IsNull(str) { + return true + } + return rxHasUpperCase.MatchString(str) +} + +// IsInt checks if the string is an integer. Empty string is valid. +func IsInt(str string) bool { + if IsNull(str) { + return true + } + return rxInt.MatchString(str) +} + +// IsFloat checks if the string is a float. +func IsFloat(str string) bool { + return str != "" && rxFloat.MatchString(str) +} + +// IsDivisibleBy checks if the string is a number that's divisible by another. +// If second argument is not valid integer or zero, it's return false. +// Otherwise, if first argument is not valid integer or zero, it's return true (Invalid string converts to zero). +func IsDivisibleBy(str, num string) bool { + f, _ := ToFloat(str) + p := int64(f) + q, _ := ToInt(num) + if q == 0 { + return false + } + return (p == 0) || (p%q == 0) +} + +// IsNull checks if the string is null. +func IsNull(str string) bool { + return len(str) == 0 +} + +// IsNotNull checks if the string is not null. +func IsNotNull(str string) bool { + return !IsNull(str) +} + +// HasWhitespaceOnly checks the string only contains whitespace +func HasWhitespaceOnly(str string) bool { + return len(str) > 0 && rxHasWhitespaceOnly.MatchString(str) +} + +// HasWhitespace checks if the string contains any whitespace +func HasWhitespace(str string) bool { + return len(str) > 0 && rxHasWhitespace.MatchString(str) +} + +// IsByteLength checks if the string's length (in bytes) falls in a range. +func IsByteLength(str string, min, max int) bool { + return len(str) >= min && len(str) <= max +} + +// IsUUIDv3 checks if the string is a UUID version 3. +func IsUUIDv3(str string) bool { + return rxUUID3.MatchString(str) +} + +// IsUUIDv4 checks if the string is a UUID version 4. +func IsUUIDv4(str string) bool { + return rxUUID4.MatchString(str) +} + +// IsUUIDv5 checks if the string is a UUID version 5. +func IsUUIDv5(str string) bool { + return rxUUID5.MatchString(str) +} + +// IsUUID checks if the string is a UUID (version 3, 4 or 5). +func IsUUID(str string) bool { + return rxUUID.MatchString(str) +} + +// Byte to index table for O(1) lookups when unmarshaling. +// We use 0xFF as sentinel value for invalid indexes. +var ulidDec = [...]byte{ + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x01, + 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, + 0x0F, 0x10, 0x11, 0xFF, 0x12, 0x13, 0xFF, 0x14, 0x15, 0xFF, + 0x16, 0x17, 0x18, 0x19, 0x1A, 0xFF, 0x1B, 0x1C, 0x1D, 0x1E, + 0x1F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0A, 0x0B, 0x0C, + 0x0D, 0x0E, 0x0F, 0x10, 0x11, 0xFF, 0x12, 0x13, 0xFF, 0x14, + 0x15, 0xFF, 0x16, 0x17, 0x18, 0x19, 0x1A, 0xFF, 0x1B, 0x1C, + 0x1D, 0x1E, 0x1F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, +} + +// EncodedSize is the length of a text encoded ULID. +const ulidEncodedSize = 26 + +// IsULID checks if the string is a ULID. +// +// Implementation got from: +// https://github.com/oklog/ulid (Apache-2.0 License) +// +func IsULID(str string) bool { + // Check if a base32 encoded ULID is the right length. + if len(str) != ulidEncodedSize { + return false + } + + // Check if all the characters in a base32 encoded ULID are part of the + // expected base32 character set. + if ulidDec[str[0]] == 0xFF || + ulidDec[str[1]] == 0xFF || + ulidDec[str[2]] == 0xFF || + ulidDec[str[3]] == 0xFF || + ulidDec[str[4]] == 0xFF || + ulidDec[str[5]] == 0xFF || + ulidDec[str[6]] == 0xFF || + ulidDec[str[7]] == 0xFF || + ulidDec[str[8]] == 0xFF || + ulidDec[str[9]] == 0xFF || + ulidDec[str[10]] == 0xFF || + ulidDec[str[11]] == 0xFF || + ulidDec[str[12]] == 0xFF || + ulidDec[str[13]] == 0xFF || + ulidDec[str[14]] == 0xFF || + ulidDec[str[15]] == 0xFF || + ulidDec[str[16]] == 0xFF || + ulidDec[str[17]] == 0xFF || + ulidDec[str[18]] == 0xFF || + ulidDec[str[19]] == 0xFF || + ulidDec[str[20]] == 0xFF || + ulidDec[str[21]] == 0xFF || + ulidDec[str[22]] == 0xFF || + ulidDec[str[23]] == 0xFF || + ulidDec[str[24]] == 0xFF || + ulidDec[str[25]] == 0xFF { + return false + } + + // Check if the first character in a base32 encoded ULID will overflow. This + // happens because the base32 representation encodes 130 bits, while the + // ULID is only 128 bits. + // + // See https://github.com/oklog/ulid/issues/9 for details. + if str[0] > '7' { + return false + } + return true +} + +// IsCreditCard checks if the string is a credit card. +func IsCreditCard(str string) bool { + sanitized := whiteSpacesAndMinus.ReplaceAllString(str, "") + if !rxCreditCard.MatchString(sanitized) { + return false + } + + number, _ := ToInt(sanitized) + number, lastDigit := number / 10, number % 10 + + var sum int64 + for i:=0; number > 0; i++ { + digit := number % 10 + + if i % 2 == 0 { + digit *= 2 + if digit > 9 { + digit -= 9 + } + } + + sum += digit + number = number / 10 + } + + return (sum + lastDigit) % 10 == 0 +} + +// IsISBN10 checks if the string is an ISBN version 10. +func IsISBN10(str string) bool { + return IsISBN(str, 10) +} + +// IsISBN13 checks if the string is an ISBN version 13. +func IsISBN13(str string) bool { + return IsISBN(str, 13) +} + +// IsISBN checks if the string is an ISBN (version 10 or 13). +// If version value is not equal to 10 or 13, it will be checks both variants. +func IsISBN(str string, version int) bool { + sanitized := whiteSpacesAndMinus.ReplaceAllString(str, "") + var checksum int32 + var i int32 + if version == 10 { + if !rxISBN10.MatchString(sanitized) { + return false + } + for i = 0; i < 9; i++ { + checksum += (i + 1) * int32(sanitized[i]-'0') + } + if sanitized[9] == 'X' { + checksum += 10 * 10 + } else { + checksum += 10 * int32(sanitized[9]-'0') + } + if checksum%11 == 0 { + return true + } + return false + } else if version == 13 { + if !rxISBN13.MatchString(sanitized) { + return false + } + factor := []int32{1, 3} + for i = 0; i < 12; i++ { + checksum += factor[i%2] * int32(sanitized[i]-'0') + } + return (int32(sanitized[12]-'0'))-((10-(checksum%10))%10) == 0 + } + return IsISBN(str, 10) || IsISBN(str, 13) +} + +// IsJSON checks if the string is valid JSON (note: uses json.Unmarshal). +func IsJSON(str string) bool { + var js json.RawMessage + return json.Unmarshal([]byte(str), &js) == nil +} + +// IsMultibyte checks if the string contains one or more multibyte chars. Empty string is valid. +func IsMultibyte(str string) bool { + if IsNull(str) { + return true + } + return rxMultibyte.MatchString(str) +} + +// IsASCII checks if the string contains ASCII chars only. Empty string is valid. +func IsASCII(str string) bool { + if IsNull(str) { + return true + } + return rxASCII.MatchString(str) +} + +// IsPrintableASCII checks if the string contains printable ASCII chars only. Empty string is valid. +func IsPrintableASCII(str string) bool { + if IsNull(str) { + return true + } + return rxPrintableASCII.MatchString(str) +} + +// IsFullWidth checks if the string contains any full-width chars. Empty string is valid. +func IsFullWidth(str string) bool { + if IsNull(str) { + return true + } + return rxFullWidth.MatchString(str) +} + +// IsHalfWidth checks if the string contains any half-width chars. Empty string is valid. +func IsHalfWidth(str string) bool { + if IsNull(str) { + return true + } + return rxHalfWidth.MatchString(str) +} + +// IsVariableWidth checks if the string contains a mixture of full and half-width chars. Empty string is valid. +func IsVariableWidth(str string) bool { + if IsNull(str) { + return true + } + return rxHalfWidth.MatchString(str) && rxFullWidth.MatchString(str) +} + +// IsBase64 checks if a string is base64 encoded. +func IsBase64(str string) bool { + return rxBase64.MatchString(str) +} + +// IsFilePath checks is a string is Win or Unix file path and returns it's type. +func IsFilePath(str string) (bool, int) { + if rxWinPath.MatchString(str) { + //check windows path limit see: + // http://msdn.microsoft.com/en-us/library/aa365247(VS.85).aspx#maxpath + if len(str[3:]) > 32767 { + return false, Win + } + return true, Win + } else if rxUnixPath.MatchString(str) { + return true, Unix + } + return false, Unknown +} + +//IsWinFilePath checks both relative & absolute paths in Windows +func IsWinFilePath(str string) bool { + if rxARWinPath.MatchString(str) { + //check windows path limit see: + // http://msdn.microsoft.com/en-us/library/aa365247(VS.85).aspx#maxpath + if len(str[3:]) > 32767 { + return false + } + return true + } + return false +} + +//IsUnixFilePath checks both relative & absolute paths in Unix +func IsUnixFilePath(str string) bool { + if rxARUnixPath.MatchString(str) { + return true + } + return false +} + +// IsDataURI checks if a string is base64 encoded data URI such as an image +func IsDataURI(str string) bool { + dataURI := strings.Split(str, ",") + if !rxDataURI.MatchString(dataURI[0]) { + return false + } + return IsBase64(dataURI[1]) +} + +// IsMagnetURI checks if a string is valid magnet URI +func IsMagnetURI(str string) bool { + return rxMagnetURI.MatchString(str) +} + +// IsISO3166Alpha2 checks if a string is valid two-letter country code +func IsISO3166Alpha2(str string) bool { + for _, entry := range ISO3166List { + if str == entry.Alpha2Code { + return true + } + } + return false +} + +// IsISO3166Alpha3 checks if a string is valid three-letter country code +func IsISO3166Alpha3(str string) bool { + for _, entry := range ISO3166List { + if str == entry.Alpha3Code { + return true + } + } + return false +} + +// IsISO693Alpha2 checks if a string is valid two-letter language code +func IsISO693Alpha2(str string) bool { + for _, entry := range ISO693List { + if str == entry.Alpha2Code { + return true + } + } + return false +} + +// IsISO693Alpha3b checks if a string is valid three-letter language code +func IsISO693Alpha3b(str string) bool { + for _, entry := range ISO693List { + if str == entry.Alpha3bCode { + return true + } + } + return false +} + +// IsDNSName will validate the given string as a DNS name +func IsDNSName(str string) bool { + if str == "" || len(strings.Replace(str, ".", "", -1)) > 255 { + // constraints already violated + return false + } + return !IsIP(str) && rxDNSName.MatchString(str) +} + +// IsHash checks if a string is a hash of type algorithm. +// Algorithm is one of ['md4', 'md5', 'sha1', 'sha256', 'sha384', 'sha512', 'ripemd128', 'ripemd160', 'tiger128', 'tiger160', 'tiger192', 'crc32', 'crc32b'] +func IsHash(str string, algorithm string) bool { + var len string + algo := strings.ToLower(algorithm) + + if algo == "crc32" || algo == "crc32b" { + len = "8" + } else if algo == "md5" || algo == "md4" || algo == "ripemd128" || algo == "tiger128" { + len = "32" + } else if algo == "sha1" || algo == "ripemd160" || algo == "tiger160" { + len = "40" + } else if algo == "tiger192" { + len = "48" + } else if algo == "sha3-224" { + len = "56" + } else if algo == "sha256" || algo == "sha3-256" { + len = "64" + } else if algo == "sha384" || algo == "sha3-384" { + len = "96" + } else if algo == "sha512" || algo == "sha3-512" { + len = "128" + } else { + return false + } + + return Matches(str, "^[a-f0-9]{"+len+"}$") +} + +// IsSHA3224 checks is a string is a SHA3-224 hash. Alias for `IsHash(str, "sha3-224")` +func IsSHA3224(str string) bool { + return IsHash(str, "sha3-224") +} + +// IsSHA3256 checks is a string is a SHA3-256 hash. Alias for `IsHash(str, "sha3-256")` +func IsSHA3256(str string) bool { + return IsHash(str, "sha3-256") +} + +// IsSHA3384 checks is a string is a SHA3-384 hash. Alias for `IsHash(str, "sha3-384")` +func IsSHA3384(str string) bool { + return IsHash(str, "sha3-384") +} + +// IsSHA3512 checks is a string is a SHA3-512 hash. Alias for `IsHash(str, "sha3-512")` +func IsSHA3512(str string) bool { + return IsHash(str, "sha3-512") +} + +// IsSHA512 checks is a string is a SHA512 hash. Alias for `IsHash(str, "sha512")` +func IsSHA512(str string) bool { + return IsHash(str, "sha512") +} + +// IsSHA384 checks is a string is a SHA384 hash. Alias for `IsHash(str, "sha384")` +func IsSHA384(str string) bool { + return IsHash(str, "sha384") +} + +// IsSHA256 checks is a string is a SHA256 hash. Alias for `IsHash(str, "sha256")` +func IsSHA256(str string) bool { + return IsHash(str, "sha256") +} + +// IsTiger192 checks is a string is a Tiger192 hash. Alias for `IsHash(str, "tiger192")` +func IsTiger192(str string) bool { + return IsHash(str, "tiger192") +} + +// IsTiger160 checks is a string is a Tiger160 hash. Alias for `IsHash(str, "tiger160")` +func IsTiger160(str string) bool { + return IsHash(str, "tiger160") +} + +// IsRipeMD160 checks is a string is a RipeMD160 hash. Alias for `IsHash(str, "ripemd160")` +func IsRipeMD160(str string) bool { + return IsHash(str, "ripemd160") +} + +// IsSHA1 checks is a string is a SHA-1 hash. Alias for `IsHash(str, "sha1")` +func IsSHA1(str string) bool { + return IsHash(str, "sha1") +} + +// IsTiger128 checks is a string is a Tiger128 hash. Alias for `IsHash(str, "tiger128")` +func IsTiger128(str string) bool { + return IsHash(str, "tiger128") +} + +// IsRipeMD128 checks is a string is a RipeMD128 hash. Alias for `IsHash(str, "ripemd128")` +func IsRipeMD128(str string) bool { + return IsHash(str, "ripemd128") +} + +// IsCRC32 checks is a string is a CRC32 hash. Alias for `IsHash(str, "crc32")` +func IsCRC32(str string) bool { + return IsHash(str, "crc32") +} + +// IsCRC32b checks is a string is a CRC32b hash. Alias for `IsHash(str, "crc32b")` +func IsCRC32b(str string) bool { + return IsHash(str, "crc32b") +} + +// IsMD5 checks is a string is a MD5 hash. Alias for `IsHash(str, "md5")` +func IsMD5(str string) bool { + return IsHash(str, "md5") +} + +// IsMD4 checks is a string is a MD4 hash. Alias for `IsHash(str, "md4")` +func IsMD4(str string) bool { + return IsHash(str, "md4") +} + +// IsDialString validates the given string for usage with the various Dial() functions +func IsDialString(str string) bool { + if h, p, err := net.SplitHostPort(str); err == nil && h != "" && p != "" && (IsDNSName(h) || IsIP(h)) && IsPort(p) { + return true + } + + return false +} + +// IsIP checks if a string is either IP version 4 or 6. Alias for `net.ParseIP` +func IsIP(str string) bool { + return net.ParseIP(str) != nil +} + +// IsPort checks if a string represents a valid port +func IsPort(str string) bool { + if i, err := strconv.Atoi(str); err == nil && i > 0 && i < 65536 { + return true + } + return false +} + +// IsIPv4 checks if the string is an IP version 4. +func IsIPv4(str string) bool { + ip := net.ParseIP(str) + return ip != nil && strings.Contains(str, ".") +} + +// IsIPv6 checks if the string is an IP version 6. +func IsIPv6(str string) bool { + ip := net.ParseIP(str) + return ip != nil && strings.Contains(str, ":") +} + +// IsCIDR checks if the string is an valid CIDR notiation (IPV4 & IPV6) +func IsCIDR(str string) bool { + _, _, err := net.ParseCIDR(str) + return err == nil +} + +// IsMAC checks if a string is valid MAC address. +// Possible MAC formats: +// 01:23:45:67:89:ab +// 01:23:45:67:89:ab:cd:ef +// 01-23-45-67-89-ab +// 01-23-45-67-89-ab-cd-ef +// 0123.4567.89ab +// 0123.4567.89ab.cdef +func IsMAC(str string) bool { + _, err := net.ParseMAC(str) + return err == nil +} + +// IsHost checks if the string is a valid IP (both v4 and v6) or a valid DNS name +func IsHost(str string) bool { + return IsIP(str) || IsDNSName(str) +} + +// IsMongoID checks if the string is a valid hex-encoded representation of a MongoDB ObjectId. +func IsMongoID(str string) bool { + return rxHexadecimal.MatchString(str) && (len(str) == 24) +} + +// IsLatitude checks if a string is valid latitude. +func IsLatitude(str string) bool { + return rxLatitude.MatchString(str) +} + +// IsLongitude checks if a string is valid longitude. +func IsLongitude(str string) bool { + return rxLongitude.MatchString(str) +} + +// IsIMEI checks if a string is valid IMEI +func IsIMEI(str string) bool { + return rxIMEI.MatchString(str) +} + +// IsIMSI checks if a string is valid IMSI +func IsIMSI(str string) bool { + if !rxIMSI.MatchString(str) { + return false + } + + mcc, err := strconv.ParseInt(str[0:3], 10, 32) + if err != nil { + return false + } + + switch mcc { + case 202, 204, 206, 208, 212, 213, 214, 216, 218, 219: + case 220, 221, 222, 226, 228, 230, 231, 232, 234, 235: + case 238, 240, 242, 244, 246, 247, 248, 250, 255, 257: + case 259, 260, 262, 266, 268, 270, 272, 274, 276, 278: + case 280, 282, 283, 284, 286, 288, 289, 290, 292, 293: + case 294, 295, 297, 302, 308, 310, 311, 312, 313, 314: + case 315, 316, 330, 332, 334, 338, 340, 342, 344, 346: + case 348, 350, 352, 354, 356, 358, 360, 362, 363, 364: + case 365, 366, 368, 370, 372, 374, 376, 400, 401, 402: + case 404, 405, 406, 410, 412, 413, 414, 415, 416, 417: + case 418, 419, 420, 421, 422, 424, 425, 426, 427, 428: + case 429, 430, 431, 432, 434, 436, 437, 438, 440, 441: + case 450, 452, 454, 455, 456, 457, 460, 461, 466, 467: + case 470, 472, 502, 505, 510, 514, 515, 520, 525, 528: + case 530, 536, 537, 539, 540, 541, 542, 543, 544, 545: + case 546, 547, 548, 549, 550, 551, 552, 553, 554, 555: + case 602, 603, 604, 605, 606, 607, 608, 609, 610, 611: + case 612, 613, 614, 615, 616, 617, 618, 619, 620, 621: + case 622, 623, 624, 625, 626, 627, 628, 629, 630, 631: + case 632, 633, 634, 635, 636, 637, 638, 639, 640, 641: + case 642, 643, 645, 646, 647, 648, 649, 650, 651, 652: + case 653, 654, 655, 657, 658, 659, 702, 704, 706, 708: + case 710, 712, 714, 716, 722, 724, 730, 732, 734, 736: + case 738, 740, 742, 744, 746, 748, 750, 995: + return true + default: + return false + } + return true +} + +// IsRsaPublicKey checks if a string is valid public key with provided length +func IsRsaPublicKey(str string, keylen int) bool { + bb := bytes.NewBufferString(str) + pemBytes, err := ioutil.ReadAll(bb) + if err != nil { + return false + } + block, _ := pem.Decode(pemBytes) + if block != nil && block.Type != "PUBLIC KEY" { + return false + } + var der []byte + + if block != nil { + der = block.Bytes + } else { + der, err = base64.StdEncoding.DecodeString(str) + if err != nil { + return false + } + } + + key, err := x509.ParsePKIXPublicKey(der) + if err != nil { + return false + } + pubkey, ok := key.(*rsa.PublicKey) + if !ok { + return false + } + bitlen := len(pubkey.N.Bytes()) * 8 + return bitlen == int(keylen) +} + +// IsRegex checks if a give string is a valid regex with RE2 syntax or not +func IsRegex(str string) bool { + if _, err := regexp.Compile(str); err == nil { + return true + } + return false +} + +func toJSONName(tag string) string { + if tag == "" { + return "" + } + + // JSON name always comes first. If there's no options then split[0] is + // JSON name, if JSON name is not set, then split[0] is an empty string. + split := strings.SplitN(tag, ",", 2) + + name := split[0] + + // However it is possible that the field is skipped when + // (de-)serializing from/to JSON, in which case assume that there is no + // tag name to use + if name == "-" { + return "" + } + return name +} + +func prependPathToErrors(err error, path string) error { + switch err2 := err.(type) { + case Error: + err2.Path = append([]string{path}, err2.Path...) + return err2 + case Errors: + errors := err2.Errors() + for i, err3 := range errors { + errors[i] = prependPathToErrors(err3, path) + } + return err2 + } + return err +} + +// ValidateArray performs validation according to condition iterator that validates every element of the array +func ValidateArray(array []interface{}, iterator ConditionIterator) bool { + return Every(array, iterator) +} + +// ValidateMap use validation map for fields. +// result will be equal to `false` if there are any errors. +// s is the map containing the data to be validated. +// m is the validation map in the form: +// map[string]interface{}{"name":"required,alpha","address":map[string]interface{}{"line1":"required,alphanum"}} +func ValidateMap(s map[string]interface{}, m map[string]interface{}) (bool, error) { + if s == nil { + return true, nil + } + result := true + var err error + var errs Errors + var index int + val := reflect.ValueOf(s) + for key, value := range s { + presentResult := true + validator, ok := m[key] + if !ok { + presentResult = false + var err error + err = fmt.Errorf("all map keys has to be present in the validation map; got %s", key) + err = prependPathToErrors(err, key) + errs = append(errs, err) + } + valueField := reflect.ValueOf(value) + mapResult := true + typeResult := true + structResult := true + resultField := true + switch subValidator := validator.(type) { + case map[string]interface{}: + var err error + if v, ok := value.(map[string]interface{}); !ok { + mapResult = false + err = fmt.Errorf("map validator has to be for the map type only; got %s", valueField.Type().String()) + err = prependPathToErrors(err, key) + errs = append(errs, err) + } else { + mapResult, err = ValidateMap(v, subValidator) + if err != nil { + mapResult = false + err = prependPathToErrors(err, key) + errs = append(errs, err) + } + } + case string: + if (valueField.Kind() == reflect.Struct || + (valueField.Kind() == reflect.Ptr && valueField.Elem().Kind() == reflect.Struct)) && + subValidator != "-" { + var err error + structResult, err = ValidateStruct(valueField.Interface()) + if err != nil { + err = prependPathToErrors(err, key) + errs = append(errs, err) + } + } + resultField, err = typeCheck(valueField, reflect.StructField{ + Name: key, + PkgPath: "", + Type: val.Type(), + Tag: reflect.StructTag(fmt.Sprintf("%s:%q", tagName, subValidator)), + Offset: 0, + Index: []int{index}, + Anonymous: false, + }, val, nil) + if err != nil { + errs = append(errs, err) + } + case nil: + // already handlerd when checked before + default: + typeResult = false + err = fmt.Errorf("map validator has to be either map[string]interface{} or string; got %s", valueField.Type().String()) + err = prependPathToErrors(err, key) + errs = append(errs, err) + } + result = result && presentResult && typeResult && resultField && structResult && mapResult + index++ + } + // checks required keys + requiredResult := true + for key, value := range m { + if schema, ok := value.(string); ok { + tags := parseTagIntoMap(schema) + if required, ok := tags["required"]; ok { + if _, ok := s[key]; !ok { + requiredResult = false + if required.customErrorMessage != "" { + err = Error{key, fmt.Errorf(required.customErrorMessage), true, "required", []string{}} + } else { + err = Error{key, fmt.Errorf("required field missing"), false, "required", []string{}} + } + errs = append(errs, err) + } + } + } + } + + if len(errs) > 0 { + err = errs + } + return result && requiredResult, err +} + +// ValidateStruct use tags for fields. +// result will be equal to `false` if there are any errors. +// todo currently there is no guarantee that errors will be returned in predictable order (tests may to fail) +func ValidateStruct(s interface{}) (bool, error) { + if s == nil { + return true, nil + } + result := true + var err error + val := reflect.ValueOf(s) + if val.Kind() == reflect.Interface || val.Kind() == reflect.Ptr { + val = val.Elem() + } + // we only accept structs + if val.Kind() != reflect.Struct { + return false, fmt.Errorf("function only accepts structs; got %s", val.Kind()) + } + var errs Errors + for i := 0; i < val.NumField(); i++ { + valueField := val.Field(i) + typeField := val.Type().Field(i) + if typeField.PkgPath != "" { + continue // Private field + } + structResult := true + if valueField.Kind() == reflect.Interface { + valueField = valueField.Elem() + } + if (valueField.Kind() == reflect.Struct || + (valueField.Kind() == reflect.Ptr && valueField.Elem().Kind() == reflect.Struct)) && + typeField.Tag.Get(tagName) != "-" { + var err error + structResult, err = ValidateStruct(valueField.Interface()) + if err != nil { + err = prependPathToErrors(err, typeField.Name) + errs = append(errs, err) + } + } + resultField, err2 := typeCheck(valueField, typeField, val, nil) + if err2 != nil { + + // Replace structure name with JSON name if there is a tag on the variable + jsonTag := toJSONName(typeField.Tag.Get("json")) + if jsonTag != "" { + switch jsonError := err2.(type) { + case Error: + jsonError.Name = jsonTag + err2 = jsonError + case Errors: + for i2, err3 := range jsonError { + switch customErr := err3.(type) { + case Error: + customErr.Name = jsonTag + jsonError[i2] = customErr + } + } + + err2 = jsonError + } + } + + errs = append(errs, err2) + } + result = result && resultField && structResult + } + if len(errs) > 0 { + err = errs + } + return result, err +} + +// ValidateStructAsync performs async validation of the struct and returns results through the channels +func ValidateStructAsync(s interface{}) (<-chan bool, <-chan error) { + res := make(chan bool) + errors := make(chan error) + + go func() { + defer close(res) + defer close(errors) + + isValid, isFailed := ValidateStruct(s) + + res <- isValid + errors <- isFailed + }() + + return res, errors +} + +// ValidateMapAsync performs async validation of the map and returns results through the channels +func ValidateMapAsync(s map[string]interface{}, m map[string]interface{}) (<-chan bool, <-chan error) { + res := make(chan bool) + errors := make(chan error) + + go func() { + defer close(res) + defer close(errors) + + isValid, isFailed := ValidateMap(s, m) + + res <- isValid + errors <- isFailed + }() + + return res, errors +} + +// parseTagIntoMap parses a struct tag `valid:required~Some error message,length(2|3)` into map[string]string{"required": "Some error message", "length(2|3)": ""} +func parseTagIntoMap(tag string) tagOptionsMap { + optionsMap := make(tagOptionsMap) + options := strings.Split(tag, ",") + + for i, option := range options { + option = strings.TrimSpace(option) + + validationOptions := strings.Split(option, "~") + if !isValidTag(validationOptions[0]) { + continue + } + if len(validationOptions) == 2 { + optionsMap[validationOptions[0]] = tagOption{validationOptions[0], validationOptions[1], i} + } else { + optionsMap[validationOptions[0]] = tagOption{validationOptions[0], "", i} + } + } + return optionsMap +} + +func isValidTag(s string) bool { + if s == "" { + return false + } + for _, c := range s { + switch { + case strings.ContainsRune("\\'\"!#$%&()*+-./:<=>?@[]^_{|}~ ", c): + // Backslash and quote chars are reserved, but + // otherwise any punctuation chars are allowed + // in a tag name. + default: + if !unicode.IsLetter(c) && !unicode.IsDigit(c) { + return false + } + } + } + return true +} + +// IsSSN will validate the given string as a U.S. Social Security Number +func IsSSN(str string) bool { + if str == "" || len(str) != 11 { + return false + } + return rxSSN.MatchString(str) +} + +// IsSemver checks if string is valid semantic version +func IsSemver(str string) bool { + return rxSemver.MatchString(str) +} + +// IsType checks if interface is of some type +func IsType(v interface{}, params ...string) bool { + if len(params) == 1 { + typ := params[0] + return strings.Replace(reflect.TypeOf(v).String(), " ", "", -1) == strings.Replace(typ, " ", "", -1) + } + return false +} + +// IsTime checks if string is valid according to given format +func IsTime(str string, format string) bool { + _, err := time.Parse(format, str) + return err == nil +} + +// IsUnixTime checks if string is valid unix timestamp value +func IsUnixTime(str string) bool { + if _, err := strconv.Atoi(str); err == nil { + return true + } + return false +} + +// IsRFC3339 checks if string is valid timestamp value according to RFC3339 +func IsRFC3339(str string) bool { + return IsTime(str, time.RFC3339) +} + +// IsRFC3339WithoutZone checks if string is valid timestamp value according to RFC3339 which excludes the timezone. +func IsRFC3339WithoutZone(str string) bool { + return IsTime(str, rfc3339WithoutZone) +} + +// IsISO4217 checks if string is valid ISO currency code +func IsISO4217(str string) bool { + for _, currency := range ISO4217List { + if str == currency { + return true + } + } + + return false +} + +// ByteLength checks string's length +func ByteLength(str string, params ...string) bool { + if len(params) == 2 { + min, _ := ToInt(params[0]) + max, _ := ToInt(params[1]) + return len(str) >= int(min) && len(str) <= int(max) + } + + return false +} + +// RuneLength checks string's length +// Alias for StringLength +func RuneLength(str string, params ...string) bool { + return StringLength(str, params...) +} + +// IsRsaPub checks whether string is valid RSA key +// Alias for IsRsaPublicKey +func IsRsaPub(str string, params ...string) bool { + if len(params) == 1 { + len, _ := ToInt(params[0]) + return IsRsaPublicKey(str, int(len)) + } + + return false +} + +// StringMatches checks if a string matches a given pattern. +func StringMatches(s string, params ...string) bool { + if len(params) == 1 { + pattern := params[0] + return Matches(s, pattern) + } + return false +} + +// StringLength checks string's length (including multi byte strings) +func StringLength(str string, params ...string) bool { + + if len(params) == 2 { + strLength := utf8.RuneCountInString(str) + min, _ := ToInt(params[0]) + max, _ := ToInt(params[1]) + return strLength >= int(min) && strLength <= int(max) + } + + return false +} + +// MinStringLength checks string's minimum length (including multi byte strings) +func MinStringLength(str string, params ...string) bool { + + if len(params) == 1 { + strLength := utf8.RuneCountInString(str) + min, _ := ToInt(params[0]) + return strLength >= int(min) + } + + return false +} + +// MaxStringLength checks string's maximum length (including multi byte strings) +func MaxStringLength(str string, params ...string) bool { + + if len(params) == 1 { + strLength := utf8.RuneCountInString(str) + max, _ := ToInt(params[0]) + return strLength <= int(max) + } + + return false +} + +// Range checks string's length +func Range(str string, params ...string) bool { + if len(params) == 2 { + value, _ := ToFloat(str) + min, _ := ToFloat(params[0]) + max, _ := ToFloat(params[1]) + return InRange(value, min, max) + } + + return false +} + +// IsInRaw checks if string is in list of allowed values +func IsInRaw(str string, params ...string) bool { + if len(params) == 1 { + rawParams := params[0] + + parsedParams := strings.Split(rawParams, "|") + + return IsIn(str, parsedParams...) + } + + return false +} + +// IsIn checks if string str is a member of the set of strings params +func IsIn(str string, params ...string) bool { + for _, param := range params { + if str == param { + return true + } + } + + return false +} + +func checkRequired(v reflect.Value, t reflect.StructField, options tagOptionsMap) (bool, error) { + if nilPtrAllowedByRequired { + k := v.Kind() + if (k == reflect.Ptr || k == reflect.Interface) && v.IsNil() { + return true, nil + } + } + + if requiredOption, isRequired := options["required"]; isRequired { + if len(requiredOption.customErrorMessage) > 0 { + return false, Error{t.Name, fmt.Errorf(requiredOption.customErrorMessage), true, "required", []string{}} + } + return false, Error{t.Name, fmt.Errorf("non zero value required"), false, "required", []string{}} + } else if _, isOptional := options["optional"]; fieldsRequiredByDefault && !isOptional { + return false, Error{t.Name, fmt.Errorf("Missing required field"), false, "required", []string{}} + } + // not required and empty is valid + return true, nil +} + +func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value, options tagOptionsMap) (isValid bool, resultErr error) { + if !v.IsValid() { + return false, nil + } + + tag := t.Tag.Get(tagName) + + // checks if the field should be ignored + switch tag { + case "": + if v.Kind() != reflect.Slice && v.Kind() != reflect.Map { + if !fieldsRequiredByDefault { + return true, nil + } + return false, Error{t.Name, fmt.Errorf("All fields are required to at least have one validation defined"), false, "required", []string{}} + } + case "-": + return true, nil + } + + isRootType := false + if options == nil { + isRootType = true + options = parseTagIntoMap(tag) + } + + if isEmptyValue(v) { + // an empty value is not validated, checks only required + isValid, resultErr = checkRequired(v, t, options) + for key := range options { + delete(options, key) + } + return isValid, resultErr + } + + var customTypeErrors Errors + optionsOrder := options.orderedKeys() + for _, validatorName := range optionsOrder { + validatorStruct := options[validatorName] + if validatefunc, ok := CustomTypeTagMap.Get(validatorName); ok { + delete(options, validatorName) + + if result := validatefunc(v.Interface(), o.Interface()); !result { + if len(validatorStruct.customErrorMessage) > 0 { + customTypeErrors = append(customTypeErrors, Error{Name: t.Name, Err: TruncatingErrorf(validatorStruct.customErrorMessage, fmt.Sprint(v), validatorName), CustomErrorMessageExists: true, Validator: stripParams(validatorName)}) + continue + } + customTypeErrors = append(customTypeErrors, Error{Name: t.Name, Err: fmt.Errorf("%s does not validate as %s", fmt.Sprint(v), validatorName), CustomErrorMessageExists: false, Validator: stripParams(validatorName)}) + } + } + } + + if len(customTypeErrors.Errors()) > 0 { + return false, customTypeErrors + } + + if isRootType { + // Ensure that we've checked the value by all specified validators before report that the value is valid + defer func() { + delete(options, "optional") + delete(options, "required") + + if isValid && resultErr == nil && len(options) != 0 { + optionsOrder := options.orderedKeys() + for _, validator := range optionsOrder { + isValid = false + resultErr = Error{t.Name, fmt.Errorf( + "The following validator is invalid or can't be applied to the field: %q", validator), false, stripParams(validator), []string{}} + return + } + } + }() + } + + for _, validatorSpec := range optionsOrder { + validatorStruct := options[validatorSpec] + var negate bool + validator := validatorSpec + customMsgExists := len(validatorStruct.customErrorMessage) > 0 + + // checks whether the tag looks like '!something' or 'something' + if validator[0] == '!' { + validator = validator[1:] + negate = true + } + + // checks for interface param validators + for key, value := range InterfaceParamTagRegexMap { + ps := value.FindStringSubmatch(validator) + if len(ps) == 0 { + continue + } + + validatefunc, ok := InterfaceParamTagMap[key] + if !ok { + continue + } + + delete(options, validatorSpec) + + field := fmt.Sprint(v) + if result := validatefunc(v.Interface(), ps[1:]...); (!result && !negate) || (result && negate) { + if customMsgExists { + return false, Error{t.Name, TruncatingErrorf(validatorStruct.customErrorMessage, field, validator), customMsgExists, stripParams(validatorSpec), []string{}} + } + if negate { + return false, Error{t.Name, fmt.Errorf("%s does validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}} + } + return false, Error{t.Name, fmt.Errorf("%s does not validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}} + } + } + } + + switch v.Kind() { + case reflect.Bool, + reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, + reflect.Float32, reflect.Float64, + reflect.String: + // for each tag option checks the map of validator functions + for _, validatorSpec := range optionsOrder { + validatorStruct := options[validatorSpec] + var negate bool + validator := validatorSpec + customMsgExists := len(validatorStruct.customErrorMessage) > 0 + + // checks whether the tag looks like '!something' or 'something' + if validator[0] == '!' { + validator = validator[1:] + negate = true + } + + // checks for param validators + for key, value := range ParamTagRegexMap { + ps := value.FindStringSubmatch(validator) + if len(ps) == 0 { + continue + } + + validatefunc, ok := ParamTagMap[key] + if !ok { + continue + } + + delete(options, validatorSpec) + + switch v.Kind() { + case reflect.String, + reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, + reflect.Float32, reflect.Float64: + + field := fmt.Sprint(v) // make value into string, then validate with regex + if result := validatefunc(field, ps[1:]...); (!result && !negate) || (result && negate) { + if customMsgExists { + return false, Error{t.Name, TruncatingErrorf(validatorStruct.customErrorMessage, field, validator), customMsgExists, stripParams(validatorSpec), []string{}} + } + if negate { + return false, Error{t.Name, fmt.Errorf("%s does validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}} + } + return false, Error{t.Name, fmt.Errorf("%s does not validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}} + } + default: + // type not yet supported, fail + return false, Error{t.Name, fmt.Errorf("Validator %s doesn't support kind %s", validator, v.Kind()), false, stripParams(validatorSpec), []string{}} + } + } + + if validatefunc, ok := TagMap[validator]; ok { + delete(options, validatorSpec) + + switch v.Kind() { + case reflect.String, + reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, + reflect.Float32, reflect.Float64: + field := fmt.Sprint(v) // make value into string, then validate with regex + if result := validatefunc(field); !result && !negate || result && negate { + if customMsgExists { + return false, Error{t.Name, TruncatingErrorf(validatorStruct.customErrorMessage, field, validator), customMsgExists, stripParams(validatorSpec), []string{}} + } + if negate { + return false, Error{t.Name, fmt.Errorf("%s does validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}} + } + return false, Error{t.Name, fmt.Errorf("%s does not validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}} + } + default: + //Not Yet Supported Types (Fail here!) + err := fmt.Errorf("Validator %s doesn't support kind %s for value %v", validator, v.Kind(), v) + return false, Error{t.Name, err, false, stripParams(validatorSpec), []string{}} + } + } + } + return true, nil + case reflect.Map: + if v.Type().Key().Kind() != reflect.String { + return false, &UnsupportedTypeError{v.Type()} + } + var sv stringValues + sv = v.MapKeys() + sort.Sort(sv) + result := true + for i, k := range sv { + var resultItem bool + var err error + if v.MapIndex(k).Kind() != reflect.Struct { + resultItem, err = typeCheck(v.MapIndex(k), t, o, options) + if err != nil { + return false, err + } + } else { + resultItem, err = ValidateStruct(v.MapIndex(k).Interface()) + if err != nil { + err = prependPathToErrors(err, t.Name+"."+sv[i].Interface().(string)) + return false, err + } + } + result = result && resultItem + } + return result, nil + case reflect.Slice, reflect.Array: + result := true + for i := 0; i < v.Len(); i++ { + var resultItem bool + var err error + if v.Index(i).Kind() != reflect.Struct { + resultItem, err = typeCheck(v.Index(i), t, o, options) + if err != nil { + return false, err + } + } else { + resultItem, err = ValidateStruct(v.Index(i).Interface()) + if err != nil { + err = prependPathToErrors(err, t.Name+"."+strconv.Itoa(i)) + return false, err + } + } + result = result && resultItem + } + return result, nil + case reflect.Interface: + // If the value is an interface then encode its element + if v.IsNil() { + return true, nil + } + return ValidateStruct(v.Interface()) + case reflect.Ptr: + // If the value is a pointer then checks its element + if v.IsNil() { + return true, nil + } + return typeCheck(v.Elem(), t, o, options) + case reflect.Struct: + return true, nil + default: + return false, &UnsupportedTypeError{v.Type()} + } +} + +func stripParams(validatorString string) string { + return paramsRegexp.ReplaceAllString(validatorString, "") +} + +// isEmptyValue checks whether value empty or not +func isEmptyValue(v reflect.Value) bool { + switch v.Kind() { + case reflect.String, reflect.Array: + return v.Len() == 0 + case reflect.Map, reflect.Slice: + return v.Len() == 0 || v.IsNil() + case reflect.Bool: + return !v.Bool() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Float32, reflect.Float64: + return v.Float() == 0 + case reflect.Interface, reflect.Ptr: + return v.IsNil() + } + + return reflect.DeepEqual(v.Interface(), reflect.Zero(v.Type()).Interface()) +} + +// ErrorByField returns error for specified field of the struct +// validated by ValidateStruct or empty string if there are no errors +// or this field doesn't exists or doesn't have any errors. +func ErrorByField(e error, field string) string { + if e == nil { + return "" + } + return ErrorsByField(e)[field] +} + +// ErrorsByField returns map of errors of the struct validated +// by ValidateStruct or empty map if there are no errors. +func ErrorsByField(e error) map[string]string { + m := make(map[string]string) + if e == nil { + return m + } + // prototype for ValidateStruct + + switch e := e.(type) { + case Error: + m[e.Name] = e.Err.Error() + case Errors: + for _, item := range e.Errors() { + n := ErrorsByField(item) + for k, v := range n { + m[k] = v + } + } + } + + return m +} + +// Error returns string equivalent for reflect.Type +func (e *UnsupportedTypeError) Error() string { + return "validator: unsupported type: " + e.Type.String() +} + +func (sv stringValues) Len() int { return len(sv) } +func (sv stringValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] } +func (sv stringValues) Less(i, j int) bool { return sv.get(i) < sv.get(j) } +func (sv stringValues) get(i int) string { return sv[i].String() } + +func IsE164(str string) bool { + return rxE164.MatchString(str) +} diff --git a/vendor/github.com/asaskevich/govalidator/wercker.yml b/vendor/github.com/asaskevich/govalidator/wercker.yml new file mode 100644 index 0000000..bc5f7b0 --- /dev/null +++ b/vendor/github.com/asaskevich/govalidator/wercker.yml @@ -0,0 +1,15 @@ +box: golang +build: + steps: + - setup-go-workspace + + - script: + name: go get + code: | + go version + go get -t ./... + + - script: + name: go test + code: | + go test -race -v ./... diff --git a/vendor/github.com/bep/clocks/.gitignore b/vendor/github.com/bep/clocks/.gitignore new file mode 100644 index 0000000..66fd13c --- /dev/null +++ b/vendor/github.com/bep/clocks/.gitignore @@ -0,0 +1,15 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ diff --git a/vendor/github.com/bep/clocks/LICENSE b/vendor/github.com/bep/clocks/LICENSE new file mode 100644 index 0000000..0f527e1 --- /dev/null +++ b/vendor/github.com/bep/clocks/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Bjørn Erik Pedersen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/bep/clocks/README.md b/vendor/github.com/bep/clocks/README.md new file mode 100644 index 0000000..6071631 --- /dev/null +++ b/vendor/github.com/bep/clocks/README.md @@ -0,0 +1,19 @@ +[![Tests on Linux, MacOS and Windows](https://github.com/bep/clocks/workflows/Test/badge.svg)](https://github.com/bep/clocks/actions?query=workflow:Test) +[![Go Report Card](https://goreportcard.com/badge/github.com/bep/clocks)](https://goreportcard.com/report/github.com/bep/clocks) +[![GoDoc](https://godoc.org/github.com/bep/clocks?status.svg)](https://godoc.org/github.com/bep/clocks) + +This package provides a _ticking clock_ that allows you to set the start time. It also provides a system clock, both implementing this interface: + +```go +// Clock provides the sub set of methods in time.Time that this package provides. +type Clock interface { + Now() time.Time + Since(t time.Time) time.Duration + Until(t time.Time) time.Duration + + // Offset returns the offset of this clock relative to the system clock. + Offset() time.Duration +} +``` + +Note that this only support a subset of all the methods in `time.Time` (see above) and is by design very simple. If you're looking for a more advanced time mocking library, have a look at https://github.com/benbjohnson/clock. diff --git a/vendor/github.com/bep/clocks/clock.go b/vendor/github.com/bep/clocks/clock.go new file mode 100644 index 0000000..bb188ca --- /dev/null +++ b/vendor/github.com/bep/clocks/clock.go @@ -0,0 +1,103 @@ +package clocks + +import ( + "time" +) + +// TimeCupFinalNorway1976 is the start time in UTC for the final match of the 1976 Norwegian Football Cup. +// This is typically used in tests where you need a historic time with a special meaning. +var TimeCupFinalNorway1976 = time.Date(1976, time.October, 24, 12, 15, 2, 127686412, time.UTC) + +// Clock provides the sub set of methods in time.Time that this package provides. +type Clock interface { + Now() time.Time + Since(t time.Time) time.Duration + Until(t time.Time) time.Duration + + // Offset returns the offset of this clock relative to the system clock. + Offset() time.Duration +} + +// Start creates a new Clock starting at t. +func Start(t time.Time) Clock { + return &clock{ + offset: t.Sub(time.Now()), + } +} + +type clock struct { + offset time.Duration +} + +// Now returns the current time relative to the configured start time. +func (c *clock) Now() time.Time { + return time.Now().Add(c.offset) +} + +// Since returns the time elapsed since t. +func (c *clock) Since(t time.Time) time.Duration { + return c.Now().Sub(t) +} + +// Until returns the duration until t. +func (c *clock) Until(t time.Time) time.Duration { + return t.Sub(c.Now()) +} + +// Offset returns the offset of this clock relative to the system clock. +// This can be used to convert to/from system time. +func (c *clock) Offset() time.Duration { + return c.offset +} + +var goClock = &systemClock{} + +// System is a Clock that uses the system clock, meaning it just delegates to time.Now() etc. +func System() Clock { + return goClock +} + +type systemClock struct{} + +func (c *systemClock) Now() time.Time { + return time.Now() +} + +func (c *systemClock) Since(t time.Time) time.Duration { + return time.Since(t) +} + +func (c *systemClock) Until(t time.Time) time.Duration { + return time.Until(t) +} + +func (c *systemClock) Offset() time.Duration { + return 0 +} + +// Fixed returns a Clock that always returns the given time. +func Fixed(t time.Time) Clock { + return &fixedClock{t: t} +} + +// fixedClock is a Clock that always returns the same time. +type fixedClock struct { + t time.Time +} + +func (c *fixedClock) Now() time.Time { + return c.t +} + +func (c *fixedClock) Since(t time.Time) time.Duration { + return c.Now().Sub(t) +} + +func (c *fixedClock) Until(t time.Time) time.Duration { + return t.Sub(c.Now()) +} + +// Offset returns the offset of this clock relative to the system clock. +func (c *fixedClock) Offset() time.Duration { + return time.Since(c.t) +} diff --git a/vendor/github.com/bep/debounce/.gitignore b/vendor/github.com/bep/debounce/.gitignore new file mode 100644 index 0000000..2eb4a46 --- /dev/null +++ b/vendor/github.com/bep/debounce/.gitignore @@ -0,0 +1,27 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof + +cover.out +nohup.out diff --git a/vendor/github.com/bep/debounce/LICENSE b/vendor/github.com/bep/debounce/LICENSE new file mode 100644 index 0000000..3a120e9 --- /dev/null +++ b/vendor/github.com/bep/debounce/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Bjørn Erik Pedersen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/bep/debounce/README.md b/vendor/github.com/bep/debounce/README.md new file mode 100644 index 0000000..b26a64b --- /dev/null +++ b/vendor/github.com/bep/debounce/README.md @@ -0,0 +1,35 @@ +# Go Debounce + +[![Tests on Linux, MacOS and Windows](https://github.com/bep/debounce/workflows/Test/badge.svg)](https://github.com/bep/debounce/actions?query=workflow:Test) +[![GoDoc](https://godoc.org/github.com/bep/debounce?status.svg)](https://godoc.org/github.com/bep/debounce) +[![Go Report Card](https://goreportcard.com/badge/github.com/bep/debounce)](https://goreportcard.com/report/github.com/bep/debounce) +[![codecov](https://codecov.io/gh/bep/debounce/branch/master/graph/badge.svg)](https://codecov.io/gh/bep/debounce) +[![Release](https://img.shields.io/github/release/bep/debounce.svg?style=flat-square)](https://github.com/bep/debounce/releases/latest) + +## Example + +```go +func ExampleNew() { + var counter uint64 + + f := func() { + atomic.AddUint64(&counter, 1) + } + + debounced := debounce.New(100 * time.Millisecond) + + for i := 0; i < 3; i++ { + for j := 0; j < 10; j++ { + debounced(f) + } + + time.Sleep(200 * time.Millisecond) + } + + c := int(atomic.LoadUint64(&counter)) + + fmt.Println("Counter is", c) + // Output: Counter is 3 +} +``` + diff --git a/vendor/github.com/bep/debounce/debounce.go b/vendor/github.com/bep/debounce/debounce.go new file mode 100644 index 0000000..793d5ed --- /dev/null +++ b/vendor/github.com/bep/debounce/debounce.go @@ -0,0 +1,43 @@ +// Copyright © 2019 Bjørn Erik Pedersen . +// +// Use of this source code is governed by an MIT-style +// license that can be found in the LICENSE file. + +// Package debounce provides a debouncer func. The most typical use case would be +// the user typing a text into a form; the UI needs an update, but let's wait for +// a break. +package debounce + +import ( + "sync" + "time" +) + +// New returns a debounced function that takes another functions as its argument. +// This function will be called when the debounced function stops being called +// for the given duration. +// The debounced function can be invoked with different functions, if needed, +// the last one will win. +func New(after time.Duration) func(f func()) { + d := &debouncer{after: after} + + return func(f func()) { + d.add(f) + } +} + +type debouncer struct { + mu sync.Mutex + after time.Duration + timer *time.Timer +} + +func (d *debouncer) add(f func()) { + d.mu.Lock() + defer d.mu.Unlock() + + if d.timer != nil { + d.timer.Stop() + } + d.timer = time.AfterFunc(d.after, f) +} diff --git a/vendor/github.com/bep/gitmap/.gitignore b/vendor/github.com/bep/gitmap/.gitignore new file mode 100644 index 0000000..c305c2e --- /dev/null +++ b/vendor/github.com/bep/gitmap/.gitignore @@ -0,0 +1,27 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof +cover.out +bench.txt +bench2.txt diff --git a/vendor/github.com/bep/gitmap/LICENSE b/vendor/github.com/bep/gitmap/LICENSE new file mode 100644 index 0000000..3a120e9 --- /dev/null +++ b/vendor/github.com/bep/gitmap/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Bjørn Erik Pedersen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/bep/gitmap/README.md b/vendor/github.com/bep/gitmap/README.md new file mode 100644 index 0000000..cb2fa23 --- /dev/null +++ b/vendor/github.com/bep/gitmap/README.md @@ -0,0 +1,11 @@ +# GitMap + +[![GoDoc](https://godoc.org/github.com/bep/gitmap?status.svg)](https://godoc.org/github.com/bep/gitmap) +[![Build Status](https://travis-ci.org/bep/gitmap.svg)](https://travis-ci.org/bep/gitmap) [![Build status](https://ci.appveyor.com/api/projects/status/c8tu1wdoa4j7q81g?svg=true)](https://ci.appveyor.com/project/bjornerik/gitmap) +[![Go Report Card](https://goreportcard.com/badge/github.com/bep/gitmap)](https://goreportcard.com/report/github.com/bep/gitmap) + +A fairly fast way to create a map from all the filenames to info objects for a given revision of a Git repo. + +This library uses `os/exec` to talk to Git. There are faster ways to do this by using some Go Git-lib or C bindings, but that adds dependencies I really don't want or need. + +If some `git log kung fu master` out there have suggestions for improvements, please open an issue or a PR. diff --git a/vendor/github.com/bep/gitmap/gitmap.go b/vendor/github.com/bep/gitmap/gitmap.go new file mode 100644 index 0000000..72ffada --- /dev/null +++ b/vendor/github.com/bep/gitmap/gitmap.go @@ -0,0 +1,179 @@ +// Copyright 2024 Bjørn Erik Pedersen . +// +// Use of this source code is governed by an MIT-style +// license that can be found in the LICENSE file. + +package gitmap + +import ( + "bytes" + "errors" + "fmt" + "io" + "os/exec" + "path/filepath" + "strings" + "time" +) + +var ( + // will be modified during tests + gitExec string + + ErrGitNotFound = errors.New("git executable not found in $PATH") +) + +type GitRepo struct { + // TopLevelAbsPath contains the absolute path of the top-level directory. + // This is similar to the answer from "git rev-parse --show-toplevel" + // except symbolic link is not followed on non-Windows platforms. + // Note that this follows Git's way of handling paths, so expect to get forward slashes, + // even on Windows. + TopLevelAbsPath string + + // The files in this Git repository. + Files GitMap +} + +// GitMap maps filenames to Git revision information. +type GitMap map[string]*GitInfo + +// GitInfo holds information about a Git commit. +type GitInfo struct { + Hash string `json:"hash"` // Commit hash + AbbreviatedHash string `json:"abbreviatedHash"` // Abbreviated commit hash + Subject string `json:"subject"` // The commit message's subject/title line + AuthorName string `json:"authorName"` // The author name, respecting .mailmap + AuthorEmail string `json:"authorEmail"` // The author email address, respecting .mailmap + AuthorDate time.Time `json:"authorDate"` // The author date + CommitDate time.Time `json:"commitDate"` // The commit date + Body string `json:"body"` // The commit message body +} + +// Runner is an interface for running Git commands, +// as implemented buy *exec.Cmd. +type Runner interface { + Run() error +} + +// Options for the Map function +type Options struct { + Repository string // Path to the repository to map + Revision string // Use blank or HEAD for the currently active revision + GetGitCommandFunc func(stdout, stderr io.Writer, args ...string) (Runner, error) +} + +// Map creates a GitRepo with a file map from the given options. +func Map(opts Options) (*GitRepo, error) { + if opts.GetGitCommandFunc == nil { + opts.GetGitCommandFunc = func(stdout, stderr io.Writer, args ...string) (Runner, error) { + cmd := exec.Command(gitExec, args...) + cmd.Stdout = stdout + cmd.Stderr = stderr + return cmd, nil + } + } + + m := make(GitMap) + + // First get the top level repo path + absRepoPath, err := filepath.Abs(opts.Repository) + if err != nil { + return nil, err + } + + out, err := git(opts, "-C", opts.Repository, "rev-parse", "--show-cdup") + if err != nil { + return nil, err + } + + cdUp := strings.TrimSpace(string(out)) + topLevelPath := filepath.ToSlash(filepath.Join(absRepoPath, cdUp)) + + gitLogArgs := strings.Fields(fmt.Sprintf( + `--name-only --no-merges --format=format:%%x1e%%H%%x1f%%h%%x1f%%s%%x1f%%aN%%x1f%%aE%%x1f%%ai%%x1f%%ci%%x1f%%b%%x1d %s`, + opts.Revision, + )) + + gitLogArgs = append([]string{"-c", "diff.renames=0", "-c", "log.showSignature=0", "-C", opts.Repository, "log"}, gitLogArgs...) + out, err = git(opts, gitLogArgs...) + if err != nil { + return nil, err + } + + entriesStr := strings.Trim(out, "\n\x1e'") + entries := strings.Split(entriesStr, "\x1e") + + for _, e := range entries { + lines := strings.Split(e, "\x1d") + gitInfo, err := toGitInfo(lines[0]) + if err != nil { + return nil, err + } + filenames := strings.Split(lines[1], "\n") + for _, filename := range filenames { + filename := strings.TrimSpace(filename) + if filename == "" { + continue + } + if _, ok := m[filename]; !ok { + m[filename] = gitInfo + } + } + } + + return &GitRepo{Files: m, TopLevelAbsPath: topLevelPath}, nil +} + +func git(opts Options, args ...string) (string, error) { + var outBuff bytes.Buffer + var errBuff bytes.Buffer + cmd, err := opts.GetGitCommandFunc(&outBuff, &errBuff, args...) + if err != nil { + return "", err + } + err = cmd.Run() + if err != nil { + if ee, ok := err.(*exec.Error); ok { + if ee.Err == exec.ErrNotFound { + return "", ErrGitNotFound + } + } + return "", errors.New(strings.TrimSpace(errBuff.String())) + } + return outBuff.String(), nil +} + +func toGitInfo(entry string) (*GitInfo, error) { + items := strings.Split(entry, "\x1f") + if len(items) == 7 { + items = append(items, "") + } + authorDate, err := time.Parse("2006-01-02 15:04:05 -0700", items[5]) + if err != nil { + return nil, err + } + commitDate, err := time.Parse("2006-01-02 15:04:05 -0700", items[6]) + if err != nil { + return nil, err + } + + return &GitInfo{ + Hash: items[0], + AbbreviatedHash: items[1], + Subject: items[2], + AuthorName: items[3], + AuthorEmail: items[4], + AuthorDate: authorDate, + CommitDate: commitDate, + Body: strings.TrimSpace(items[7]), + }, nil +} + +func init() { + initDefaults() +} + +func initDefaults() { + gitExec = "git" +} diff --git a/vendor/github.com/bep/goat/.gitignore b/vendor/github.com/bep/goat/.gitignore new file mode 100644 index 0000000..5040fb2 --- /dev/null +++ b/vendor/github.com/bep/goat/.gitignore @@ -0,0 +1,8 @@ +*.swp +.DS_Store + +vendor +examples/*.svg + +goat +goat.test diff --git a/vendor/github.com/bep/goat/LICENSE b/vendor/github.com/bep/goat/LICENSE new file mode 100644 index 0000000..07e3696 --- /dev/null +++ b/vendor/github.com/bep/goat/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Bryce Lampe + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/bep/goat/README.md b/vendor/github.com/bep/goat/README.md new file mode 100644 index 0000000..bc77e7e --- /dev/null +++ b/vendor/github.com/bep/goat/README.md @@ -0,0 +1,173 @@ +# GoAT: Go ASCII Tool + +This is a Go implementation of [markdeep.mini.js]'s ASCII diagram +generation. + +## Usage + +```bash +$ go get github.com/bep/goat +$ goat my-cool-diagram.txt > my-cool-diagram.svg +``` + +## TODO + +- Dashed lines signaled by `:` or `=`. +- Bold lines signaled by ???. + +## Examples + +Here are some SVGs and the ASCII input they were generated from: + +### Trees + +![Trees Example](https://cdn.rawgit.com/blampe/goat/master/examples/trees.svg) + +``` + . . . .--- 1 .-- 1 / 1 + / \ | | .---+ .-+ + + / \ .---+---. .--+--. | '--- 2 | '-- 2 / \ 2 + + + | | | | ---+ ---+ + + / \ / \ .-+-. .-+-. .+. .+. | .--- 3 | .-- 3 \ / 3 + / \ / \ | | | | | | | | '---+ '-+ + + 1 2 3 4 1 2 3 4 1 2 3 4 '--- 4 '-- 4 \ 4 +``` + +### Overlaps + +![Overlaps Example](https://cdn.rawgit.com/blampe/goat/master/examples/overlaps.svg) + +``` + .-. .-. .-. .-. .-. .-. + | | | | | | | | | | | | + .---------. .--+---+--. .--+---+--. .--| |--. .--+ +--. .------|--. + | | | | | | | | | | | | | | | | | | + '---------' '--+---+--' '--+---+--' '--| |--' '--+ +--' '--|------' + | | | | | | | | | | | | + '-' '-' '-' '-' '-' '-' +``` + +### Line Decorations + +![Line Decorations Example](https://cdn.rawgit.com/blampe/goat/master/examples/line-decorations.svg) + +``` + ________ o * * .--------------. + *---+--. | | o o | ^ \ / | .----------. | + | | '--* -+- | | v / \ / | | <------. | | + | '-----> .---(---' --->*<--- / .+->*<--o----' | | | | | + <--' ^ ^ | | | | | ^ \ | '--------' | | + \/ *-----' o |<----->| '-----' |__| v '------------' | + /\ *---------------' +``` + +### Line Ends + +![Line Ends Example](https://cdn.rawgit.com/blampe/goat/master/examples/line-ends.svg) + +``` + o--o *--o / / * o o o o o * * * * o o o o * * * * o o o o * * * * + o--* *--* v v ^ ^ | | | | | | | | \ \ \ \ \ \ \ \ / / / / / / / / + o--> *--> * o / / o * v ' o * v ' o * v \ o * v \ o * v / o * v / + o--- *--- + ^ ^ ^ ^ . . . . ^ ^ ^ ^ \ \ \ \ ^ ^ ^ ^ / / / / + | | * o \ \ * o | | | | | | | | \ \ \ \ \ \ \ \ / / / / / / / / + v v ^ ^ v v ^ ^ o * v ' o * v ' o * v \ o * v \ o * v / o * v / + * o | | * o \ \ + + <--o <--* <--> <--- ---o ---* ---> ---- *<-- o<-- -->o -->* +``` + +### Dot Grids + +![Dot Grids Example](https://cdn.rawgit.com/blampe/goat/master/examples/dot-grids.svg) + +``` + o o o o o * * * * * * * o o * o o o * * * o o o · * · · · · · · + o o o o o * * * * * o o o o * o o o o * * * * * o * * · * * · · · · · · + o o o o o * * * * * o * o o o o o o o o * * * * * o o o o o · o · · o · · * * · + o o o o o * * * * * o * o o o o o o o * * * * o * o o · · · · o · · * · + o o o o o * * * * * * * * * o o o o * * * o * o · · · · · · · * +``` + +### Large Nodes + +![Large Node Example](https://cdn.rawgit.com/blampe/goat/master/examples/large-nodes.svg) + +``` + .---. .-. .-. .-. .-. + | A +----->| 1 +<---->| 2 |<----+ 4 +------------------. | 8 | + '---' '-' '+' '-' | '-' + | ^ | ^ + v | v | + .-. .-+-. .-. .-+-. .-. .+. .---. + | 3 +---->| B |<----->| 5 +---->| C +---->| 6 +---->| 7 |<---->| D | + '-' '---' '-' '---' '-' '-' '---' +``` + +### Small Grids + +![Small Grids Example](https://cdn.rawgit.com/blampe/goat/master/examples/small-grids.svg) + +``` + ___ ___ .---+---+---+---+---. .---+---+---+---. .---. .---. + ___/ \___/ \ | | | | | | / \ / \ / \ / \ / | +---+ | + / \___/ \___/ +---+---+---+---+---+ +---+---+---+---+ +---+ +---+ + \___/ b \___/ \ | | | b | | | \ / \a/ \b/ \ / \ | +---+ | + / a \___/ \___/ +---+---+---+---+---+ +---+---+---+---+ +---+ b +---+ + \___/ \___/ \ | | a | | | | / \ / \ / \ / \ / | a +---+ | + \___/ \___/ '---+---+---+---+---' '---+---+---+---' '---' '---' +``` + +### Big Grids + +![Big Grids Example](https://cdn.rawgit.com/blampe/goat/master/examples/big-grids.svg) + +``` + .----. .----. + / \ / \ .-----+-----+-----. + + +----+ +----. | | | | .-----+-----+-----+-----+ + \ / \ / \ | | | | / / / / / + +----+ B +----+ + +-----+-----+-----+ +-----+-----+-----+-----+ + / \ / \ / | | | | / / / / / + + A +----+ +----+ | | B | | +-----+-----+-----+-----+ + \ / \ / \ +-----+-----+-----+ / / A / B / / + '----+ +----+ + | | | | +-----+-----+-----+-----+ + \ / \ / | A | | | / / / / / + '----' '----' '-----+-----+-----' '-----+-----+-----+-----+ +``` + +### Complicated + +![Complicated Example](https://cdn.rawgit.com/blampe/goat/master/examples/complicated.svg) + +``` ++-------------------+ ^ .---. +| A Box |__.--.__ __.--> | .-. | | +| | '--' v | * |<--- | | ++-------------------+ '-' | | + Round *---(-. | + .-----------------. .-------. .----------. .-------. | | | + | Mixed Rounded | | | / Diagonals \ | | | | | | + | & Square Corners | '--. .--' / \ |---+---| '-)-' .--------. + '--+------------+-' .--. | '-------+--------' | | | | / Search / + | | | | '---. | '-------' | '-+------' + |<---------->| | | | v Interior | ^ + ' <---' '----' .-----------. ---. .--- v | + .------------------. Diag line | .-------. +---. \ / . | + | if (a > b) +---. .--->| | | | | Curved line \ / / \ | + | obj->fcn() | \ / | '-------' |<--' + / \ | + '------------------' '--' '--+--------' .--. .--. | .-. +Done?+-' + .---+-----. | ^ |\ | | /| .--+ | | \ / + | | | Join \|/ | | Curved | \| |/ | | \ | \ / + | | +----> o --o-- '-' Vertical '--' '--' '-- '--' + .---. + <--+---+-----' | /|\ | | 3 | + v not:line 'quotes' .-' '---' + .-. .---+--------. / A || B *bold* | ^ + | | | Not a dot | <---+---<-- A dash--is not a line v | + '-' '---------+--' / Nor/is this. --- +``` + +More examples are available [here](examples). + +[markdeep.mini.js]: http://casual-effects.com/markdeep/ diff --git a/vendor/github.com/bep/goat/canvas.go b/vendor/github.com/bep/goat/canvas.go new file mode 100644 index 0000000..68ff69c --- /dev/null +++ b/vendor/github.com/bep/goat/canvas.go @@ -0,0 +1,1160 @@ +package goat + +import ( + "bufio" + "bytes" + "io" + "sort" +) + +// Characters where more than one line segment can come together. +var jointRunes = []rune{'.', '\'', '+', '*', 'o'} + +var reservedRunes = map[rune]bool{ + '-': true, + '_': true, + '|': true, + 'v': true, + '^': true, + '>': true, + '<': true, + 'o': true, + '*': true, + '+': true, + '.': true, + '\'': true, + '/': true, + '\\': true, + ')': true, + '(': true, + ' ': true, +} + +func contains(in []rune, r rune) bool { + for _, v := range in { + if r == v { + return true + } + } + return false +} + +func isJoint(r rune) bool { + return contains(jointRunes, r) +} + +func isDot(r rune) bool { + return r == 'o' || r == '*' +} + +func isTriangle(r rune) bool { + return r == '^' || r == 'v' || r == '<' || r == '>' +} + +// Canvas represents a 2D ASCII rectangle. +type Canvas struct { + Width int + Height int + data map[Index]rune + text map[Index]rune +} + +func (c *Canvas) String() string { + var buffer bytes.Buffer + + for h := 0; h < c.Height; h++ { + for w := 0; w < c.Width; w++ { + idx := Index{w, h} + + // Grab from our text buffer and if nothing's there try the data + // buffer. + r := c.text[idx] + if r == 0 { + r = c.runeAt(idx) + } + + _, err := buffer.WriteRune(r) + if err != nil { + continue + } + } + + err := buffer.WriteByte('\n') + if err != nil { + continue + } + } + + return buffer.String() +} + +func (c *Canvas) heightScreen() int { + return c.Height*16 + 8 + 1 +} + +func (c *Canvas) widthScreen() int { + return (c.Width + 1) * 8 +} + +func (c *Canvas) runeAt(i Index) rune { + if val, ok := c.data[i]; ok { + return val + } + + return ' ' +} + +// NewCanvas creates a new canvas with contents read from the given io.Reader. +// Content should be newline delimited. +func NewCanvas(in io.Reader) Canvas { + width := 0 + height := 0 + + scanner := bufio.NewScanner(in) + + data := make(map[Index]rune) + + for scanner.Scan() { + line := scanner.Text() + + w := 0 + // Can't use index here because it corresponds to unicode offsets + // instead of logical characters. + for _, c := range line { + idx := Index{x: w, y: height} + data[idx] = rune(c) + w++ + } + + if w > width { + width = w + } + height++ + } + + text := make(map[Index]rune) + + c := Canvas{ + Width: width, + Height: height, + data: data, + text: text, + } + + // Extract everything we detect as text to make diagram parsing easier. + for idx := range leftRight(width, height) { + if c.isText(idx) { + c.text[idx] = c.runeAt(idx) + } + } + for idx := range c.text { + delete(c.data, idx) + } + + return c +} + +// Drawable represents anything that can Draw itself. +type Drawable interface { + Draw(out io.Writer) +} + +// Line represents a straight segment between two points. +type Line struct { + start Index + stop Index + // dashed bool + needsNudgingDown bool + needsNudgingLeft bool + needsNudgingRight bool + needsTinyNudgingLeft bool + needsTinyNudgingRight bool + + // This is a line segment all by itself. This centers the segment around + // the midline. + lonely bool + // N or S. Only useful for half steps - chops of this half of the line. + chop Orientation + + orientation Orientation + + state lineState +} + +type lineState int + +const ( + _Unstarted lineState = iota + _Started +) + +func (l *Line) started() bool { + return l.state == _Started +} + +func (l *Line) setStart(i Index) { + if l.state == _Unstarted { + l.start = i + l.stop = i + l.state = _Started + } +} + +func (l *Line) setStop(i Index) { + if l.state == _Started { + l.stop = i + } +} + +func (l *Line) goesSomewhere() bool { + return l.start != l.stop +} + +func (l *Line) horizontal() bool { + return l.orientation == E || l.orientation == W +} + +func (l *Line) vertical() bool { + return l.orientation == N || l.orientation == S +} + +func (l *Line) diagonal() bool { + return l.orientation == NE || l.orientation == SE || l.orientation == SW || l.orientation == NW +} + +// Triangle corresponds to "^", "v", "<" and ">" runes in the absence of +// surrounding alphanumerics. +type Triangle struct { + start Index + orientation Orientation + needsNudging bool +} + +// Circle corresponds to "o" or "*" runes in the absence of surrounding +// alphanumerics. +type Circle struct { + start Index + bold bool +} + +// RoundedCorner corresponds to combinations of "-." or "-'". +type RoundedCorner struct { + start Index + orientation Orientation +} + +// Text corresponds to any runes not reserved for diagrams, or reserved runes +// surrounded by alphanumerics. +type Text struct { + start Index + contents string +} + +// Bridge correspondes to combinations of "-)-" or "-(-" and is displayed as +// the vertical line "hopping over" the horizontal. +type Bridge struct { + start Index + orientation Orientation +} + +// Orientation represents the primary direction that a Drawable is facing. +type Orientation int + +const ( + NONE Orientation = iota // No orientation; no structure present. + N // North + NE // Northeast + NW // Northwest + S // South + SE // Southeast + SW // Southwest + E // East + W // West +) + +func (c *Canvas) WriteSVGBody(dst io.Writer) { + writeBytes(dst, "\n") + + for _, l := range c.Lines() { + l.Draw(dst) + } + + for _, t := range c.Triangles() { + t.Draw(dst) + } + + for _, c := range c.RoundedCorners() { + c.Draw(dst) + } + + for _, c := range c.Circles() { + c.Draw(dst) + } + + for _, b := range c.Bridges() { + b.Draw(dst) + } + + for _, t := range c.Text() { + t.Draw(dst) + } + + writeBytes(dst, "\n") +} + +// Lines returns a slice of all Line drawables that we can detect -- in all +// possible orientations. +func (c *Canvas) Lines() []Line { + horizontalMidlines := c.getLinesForSegment('-') + + diagUpLines := c.getLinesForSegment('/') + for i, l := range diagUpLines { + // /_ + if c.runeAt(l.start.east()) == '_' { + diagUpLines[i].needsTinyNudgingLeft = true + } + + // _ + // / + if c.runeAt(l.stop.north()) == '_' { + diagUpLines[i].needsTinyNudgingRight = true + } + + // _ + // / + if !l.lonely && c.runeAt(l.stop.nEast()) == '_' { + diagUpLines[i].needsTinyNudgingRight = true + } + + // _/ + if !l.lonely && c.runeAt(l.start.west()) == '_' { + diagUpLines[i].needsTinyNudgingLeft = true + } + + // \ + // / + if !l.lonely && c.runeAt(l.stop.north()) == '\\' { + diagUpLines[i].needsTinyNudgingRight = true + } + + // / + // \ + if !l.lonely && c.runeAt(l.start.south()) == '\\' { + diagUpLines[i].needsTinyNudgingLeft = true + } + } + + diagDownLines := c.getLinesForSegment('\\') + for i, l := range diagDownLines { + // _\ + if c.runeAt(l.stop.west()) == '_' { + diagDownLines[i].needsTinyNudgingRight = true + } + + // _ + // \ + if c.runeAt(l.start.north()) == '_' { + diagDownLines[i].needsTinyNudgingLeft = true + } + + // _ + // \ + if !l.lonely && c.runeAt(l.start.nWest()) == '_' { + diagDownLines[i].needsTinyNudgingLeft = true + } + + // \_ + if !l.lonely && c.runeAt(l.stop.east()) == '_' { + diagDownLines[i].needsTinyNudgingRight = true + } + + // \ + // / + if !l.lonely && c.runeAt(l.stop.south()) == '/' { + diagDownLines[i].needsTinyNudgingRight = true + } + + // / + // \ + if !l.lonely && c.runeAt(l.start.north()) == '/' { + diagDownLines[i].needsTinyNudgingLeft = true + } + } + + horizontalBaselines := c.getLinesForSegment('_') + for i, l := range horizontalBaselines { + // TODO: make this nudge an orientation + horizontalBaselines[i].needsNudgingDown = true + + // _ + // _| | + if c.runeAt(l.stop.sEast()) == '|' || c.runeAt(l.stop.nEast()) == '|' { + horizontalBaselines[i].needsNudgingRight = true + } + + // _ + // | _| + if c.runeAt(l.start.sWest()) == '|' || c.runeAt(l.start.nWest()) == '|' { + horizontalBaselines[i].needsNudgingLeft = true + } + + // _ + // _/ \ + if c.runeAt(l.stop.east()) == '/' || c.runeAt(l.stop.sEast()) == '\\' { + horizontalBaselines[i].needsTinyNudgingRight = true + } + + // _ + // \_ / + if c.runeAt(l.start.west()) == '\\' || c.runeAt(l.start.sWest()) == '/' { + horizontalBaselines[i].needsTinyNudgingLeft = true + } + + // _\ + if c.runeAt(l.stop.east()) == '\\' { + horizontalBaselines[i].needsNudgingRight = true + horizontalBaselines[i].needsTinyNudgingRight = true + } + + // + // /_ + if c.runeAt(l.start.west()) == '/' { + horizontalBaselines[i].needsNudgingLeft = true + horizontalBaselines[i].needsTinyNudgingLeft = true + } + // _ + // / + if c.runeAt(l.stop.south()) == '/' { + horizontalBaselines[i].needsTinyNudgingRight = true + } + + // _ + // \ + if c.runeAt(l.start.south()) == '\\' { + horizontalBaselines[i].needsTinyNudgingLeft = true + } + + // _ + // ' + if c.runeAt(l.start.sWest()) == '\'' { + horizontalBaselines[i].needsNudgingLeft = true + } + + // _ + // ' + if c.runeAt(l.stop.sEast()) == '\'' { + horizontalBaselines[i].needsNudgingRight = true + } + } + + verticalLines := c.getLinesForSegment('|') + + var lines []Line + + lines = append(lines, horizontalMidlines...) + lines = append(lines, horizontalBaselines...) + lines = append(lines, verticalLines...) + lines = append(lines, diagUpLines...) + lines = append(lines, diagDownLines...) + lines = append(lines, c.HalfSteps()...) + + return lines +} + +func newHalfStep(i Index, chop Orientation) Line { + return Line{ + start: i, + stop: i.south(), + lonely: true, + chop: chop, + orientation: S, + } +} + +func (c *Canvas) HalfSteps() []Line { + var lines []Line + + for idx := range upDown(c.Width, c.Height) { + if o := c.partOfHalfStep(idx); o != NONE { + lines = append( + lines, + newHalfStep(idx, o), + ) + } + } + + return lines +} + +func (c *Canvas) getLinesForSegment(segment rune) []Line { + var iter canvasIterator + var orientation Orientation + var passThroughs []rune + + switch segment { + case '-': + iter = leftRight + orientation = E + passThroughs = append(jointRunes, '<', '>', '(', ')') + case '_': + iter = leftRight + orientation = E + passThroughs = append(jointRunes, '|') + case '|': + iter = upDown + orientation = S + passThroughs = append(jointRunes, '^', 'v') + case '/': + iter = diagUp + orientation = NE + passThroughs = append(jointRunes, 'o', '*', '<', '>', '^', 'v', '|') + case '\\': + iter = diagDown + orientation = SE + passThroughs = append(jointRunes, 'o', '*', '<', '>', '^', 'v', '|') + default: + return nil + } + + return c.getLines(iter, segment, passThroughs, orientation) +} + +// ci: the order that we traverse locations on the canvas. +// segment: the primary character we're tracking for this line. +// passThroughs: characters the line segment is allowed to be drawn underneath +// (without terminating the line). +// orientation: the orientation for this line. +func (c *Canvas) getLines( + ci canvasIterator, + segment rune, + passThroughs []rune, + o Orientation, +) []Line { + + var lines []Line + + // Helper to throw the current line we're tracking on to the slice and + // start a new one. + snip := func(l Line) Line { + // Only collect lines that actually go somewhere or are isolated + // segments. + if l.goesSomewhere() { + lines = append(lines, l) + } + + return Line{orientation: o} + } + + currentLine := Line{orientation: o} + lastSeenRune := ' ' + + for idx := range ci(c.Width+1, c.Height+1) { + r := c.runeAt(idx) + + isSegment := r == segment + isPassThrough := contains(passThroughs, r) + isRoundedCorner := c.isRoundedCorner(idx) + isDot := isDot(r) + isTriangle := isTriangle(r) + + justPassedThrough := contains(passThroughs, lastSeenRune) + + shouldKeep := (isSegment || isPassThrough) && isRoundedCorner == NONE + + // This is an edge case where we have a rounded corner... that's also a + // joint... attached to orthogonal line, e.g.: + // + // '+-- + // | + // + // TODO: This also depends on the orientation of the corner and our + // line. + // NW / NE line can't go with EW/NS lines, vertical is OK though. + if isRoundedCorner != NONE && o != E && (c.partOfVerticalLine(idx) || c.partOfDiagonalLine(idx)) { + shouldKeep = true + } + + // Don't connect | to > for diagonal lines or )) for horizontal lines. + if isPassThrough && justPassedThrough && o != S { + currentLine = snip(currentLine) + } + + // Don't connect o to o, + to o, etc. This character is a new pass-through + // so we still want to respect shouldKeep; we just don't want to draw + // the existing line through this cell. + if justPassedThrough && (isDot || isTriangle) { + currentLine = snip(currentLine) + } + + switch currentLine.state { + case _Unstarted: + if shouldKeep { + currentLine.setStart(idx) + } + case _Started: + if !shouldKeep { + // Snip the existing line, don't add the current cell to it + // *unless* its a line segment all by itself. If it is, keep a + // record that it's an individual segment because we need to + // adjust later in the / and \ cases. + if !currentLine.goesSomewhere() && lastSeenRune == segment { + if !c.partOfRoundedCorner(currentLine.start) { + currentLine.setStop(idx) + currentLine.lonely = true + } + } + currentLine = snip(currentLine) + } else if isPassThrough { + // Snip the existing line but include the current pass-through + // character because we may be continuing the line. + currentLine.setStop(idx) + currentLine = snip(currentLine) + currentLine.setStart(idx) + } else if shouldKeep { + // Keep the line going and extend it by this character. + currentLine.setStop(idx) + } + } + + lastSeenRune = r + } + + return lines +} + +// Triangles returns a slice of all detectable Triangles. +func (c *Canvas) Triangles() []Drawable { + var triangles []Drawable + + o := NONE + + for idx := range upDown(c.Width, c.Height) { + needsNudging := false + start := idx + + r := c.runeAt(idx) + + if !isTriangle(r) { + continue + } + + // Identify our orientation and nudge the triangle to touch any + // adjacent walls. + switch r { + case '^': + o = N + // ^ and ^ + // / \ + if c.runeAt(start.sWest()) == '/' { + o = NE + } else if c.runeAt(start.sEast()) == '\\' { + o = NW + } + case 'v': + o = S + // / and \ + // v v + if c.runeAt(start.nEast()) == '/' { + o = SW + } else if c.runeAt(start.nWest()) == '\\' { + o = SE + } + case '<': + o = W + case '>': + o = E + } + + // Determine if we need to snap the triangle to something and, if so, + // draw a tail if we need to. + switch o { + case N: + r := c.runeAt(start.north()) + if r == '-' || isJoint(r) && !isDot(r) { + needsNudging = true + triangles = append(triangles, newHalfStep(start, N)) + } + case NW: + r := c.runeAt(start.nWest()) + // Need to draw a tail. + if r == '-' || isJoint(r) && !isDot(r) { + needsNudging = true + triangles = append( + triangles, + Line{ + start: start.nWest(), + stop: start, + orientation: SE, + }, + ) + } + case NE: + r := c.runeAt(start.nEast()) + if r == '-' || isJoint(r) && !isDot(r) { + needsNudging = true + triangles = append( + triangles, + Line{ + start: start, + stop: start.nEast(), + orientation: NE, + }, + ) + } + case S: + r := c.runeAt(start.south()) + if r == '-' || isJoint(r) && !isDot(r) { + needsNudging = true + triangles = append(triangles, newHalfStep(start, S)) + } + case SE: + r := c.runeAt(start.sEast()) + if r == '-' || isJoint(r) && !isDot(r) { + needsNudging = true + triangles = append( + triangles, + Line{ + start: start, + stop: start.sEast(), + orientation: SE, + }, + ) + } + case SW: + r := c.runeAt(start.sWest()) + if r == '-' || isJoint(r) && !isDot(r) { + needsNudging = true + triangles = append( + triangles, + Line{ + start: start.sWest(), + stop: start, + orientation: NE, + }, + ) + } + case W: + r := c.runeAt(start.west()) + if isDot(r) { + needsNudging = true + } + case E: + r := c.runeAt(start.east()) + if isDot(r) { + needsNudging = true + } + } + + triangles = append( + triangles, + Triangle{ + start: start, + orientation: o, + needsNudging: needsNudging, + }, + ) + } + + return triangles +} + +// Circles returns a slice of all 'o' and '*' characters not considered text. +func (c *Canvas) Circles() []Circle { + var circles []Circle + + for idx := range upDown(c.Width, c.Height) { + // TODO INCOMING + if c.runeAt(idx) == 'o' { + circles = append(circles, Circle{start: idx}) + } else if c.runeAt(idx) == '*' { + circles = append(circles, Circle{start: idx, bold: true}) + } + } + + return circles +} + +// RoundedCorners returns a slice of all curvy corners in the diagram. +func (c *Canvas) RoundedCorners() []RoundedCorner { + var corners []RoundedCorner + + for idx := range leftRight(c.Width, c.Height) { + if o := c.isRoundedCorner(idx); o != NONE { + corners = append( + corners, + RoundedCorner{start: idx, orientation: o}, + ) + } + } + + return corners +} + +// For . and ' characters this will return a non-NONE orientation if the +// character falls on a rounded corner. +func (c *Canvas) isRoundedCorner(i Index) Orientation { + r := c.runeAt(i) + + if !isJoint(r) { + return NONE + } + + left := i.west() + right := i.east() + lowerLeft := i.sWest() + lowerRight := i.sEast() + upperLeft := i.nWest() + upperRight := i.nEast() + + opensUp := r == '\'' || r == '+' + opensDown := r == '.' || r == '+' + + dashRight := c.runeAt(right) == '-' || c.runeAt(right) == '+' || c.runeAt(right) == '_' || c.runeAt(upperRight) == '_' + dashLeft := c.runeAt(left) == '-' || c.runeAt(left) == '+' || c.runeAt(left) == '_' || c.runeAt(upperLeft) == '_' + + isVerticalSegment := func(i Index) bool { + r := c.runeAt(i) + return r == '|' || r == '+' || r == ')' || r == '(' || isDot(r) + } + + // .- or .- + // | + + if opensDown && dashRight && isVerticalSegment(lowerLeft) { + return NW + } + + // -. or -. or -. or _. or -. + // | + ) ) o + if opensDown && dashLeft && isVerticalSegment(lowerRight) { + return NE + } + + // | or + or | or + or + or_ ) + // -' -' +' +' ++ ' + if opensUp && dashLeft && isVerticalSegment(upperRight) { + return SE + } + + // | or + + // '- '- + if opensUp && dashRight && isVerticalSegment(upperLeft) { + return SW + } + + return NONE +} + +// A wrapper to enable sorting. +type indexRuneDrawable struct { + i Index + r rune + Drawable +} + +// Text returns a slice of all text characters not belonging to part of the diagram. +// How these characters are identified is rather complicated. +func (c *Canvas) Text() []Drawable { + newLine := func(i Index, r rune, o Orientation) Drawable { + stop := i + + switch o { + case NE: + stop = i.nEast() + case SE: + stop = i.sEast() + } + + l := Line{ + start: i, + stop: stop, + lonely: true, + orientation: o, + } + + return indexRuneDrawable{ + Drawable: l, + i: i, + r: r, + } + } + + text := make([]Drawable, len(c.text)) + var j int + + for i, r := range c.text { + switch r { + // Weird unicode edge cases that markdeep handles. These get + // substituted with lines. + case '╱': + text[j] = newLine(i, r, NE) + case '╲': + text[j] = newLine(i, r, SE) + case '╳': + text[j] = newLine(i, r, NE) + default: + text[j] = indexRuneDrawable{Drawable: Text{start: i, contents: string(r)}, i: i, r: r} + } + j++ + } + + sort.Slice(text, func(i, j int) bool { + ti, tj := text[i].(indexRuneDrawable), text[j].(indexRuneDrawable) + + if ti.i.x == tj.i.x { + return ti.i.y < tj.i.y || (ti.i.y == tj.i.y && ti.r < tj.r) + } + + return ti.i.x < tj.i.x + }) + + return text +} + +// Bridges returns a slice of all bridges, "-)-" or "-(-". +func (c *Canvas) Bridges() []Drawable { + var bridges []Drawable + + for idx := range leftRight(c.Width, c.Height) { + if o := c.isBridge(idx); o != NONE { + bridges = append( + bridges, + newHalfStep(idx.north(), S), + newHalfStep(idx.south(), N), + Bridge{ + start: idx, + orientation: o, + }, + ) + } + } + + return bridges +} + +// -)- or -(- or +func (c *Canvas) isBridge(i Index) Orientation { + r := c.runeAt(i) + + left := c.runeAt(i.west()) + right := c.runeAt(i.east()) + + if left != '-' || right != '-' { + return NONE + } + + if r == '(' { + return W + } + + if r == ')' { + return E + } + + return NONE +} + +func (c *Canvas) isText(i Index) bool { + // Short circuit, we already saw this index and called it text. + if _, isText := c.text[i]; isText { + return true + } + + if c.runeAt(i) == ' ' { + return false + } + + if !c.isReserved(i) { + return true + } + + // This is a reserved character with an incoming line (e.g., "|") above it, + // so call it non-text. + if c.hasLineAboveOrBelow(i) { + return false + } + + // Reserved characters like "o" or "*" with letters sitting next to them + // are probably text. + // TODO: Fix this to count contiguous blocks of text. If we had a bunch of + // reserved characters previously that were counted as text then this + // should be as well, e.g., "A----B". + + // We're reserved but surrounded by text and probably part of an existing + // word. Use a hash lookup on the left to preserve chains of + // reserved-but-text characters like "foo----bar". + if _, textLeft := c.text[i.west()]; textLeft || !c.isReserved(i.east()) { + return true + } + + w := i.west() + e := i.east() + + if !(c.runeAt(w) == ' ' && c.runeAt(e) == ' ') { + return false + } + + // Circles surrounded by whitespace shouldn't be shown as text. + if c.runeAt(i) == 'o' || c.runeAt(i) == '*' { + return false + } + + // We're surrounded by whitespace + text on either side. + if !c.isReserved(w.west()) || !c.isReserved(e.east()) { + return true + } + + return false +} + +// Returns true if the character at this index is not reserved for diagrams. +// Characters like "o" need more context (e.g., are other text characters +// nearby) to determine whether they're part of a diagram. +func (c *Canvas) isReserved(i Index) bool { + r := c.runeAt(i) + _, isReserved := reservedRunes[r] + return isReserved +} + +// Returns true if it looks like this character belongs to anything besides a +// horizontal line. This is the context we use to determine if a reserved +// character is text or not. +func (c *Canvas) hasLineAboveOrBelow(i Index) bool { + r := c.runeAt(i) + + switch r { + case '*', 'o', '+', 'v', '^': + return c.partOfDiagonalLine(i) || c.partOfVerticalLine(i) + case '|': + return c.partOfVerticalLine(i) || c.partOfRoundedCorner(i) + case '/', '\\': + return c.partOfDiagonalLine(i) + case '-': + return c.partOfRoundedCorner(i) + case '(', ')': + return c.partOfVerticalLine(i) + } + + return false +} + +// Returns true if a "|" segment passes through this index. +func (c *Canvas) partOfVerticalLine(i Index) bool { + this := c.runeAt(i) + north := c.runeAt(i.north()) + south := c.runeAt(i.south()) + + jointAboveMe := this == '|' && isJoint(north) + + if north == '|' || jointAboveMe { + return true + } + + jointBelowMe := this == '|' && isJoint(south) + + if south == '|' || jointBelowMe { + return true + } + + return false +} + +// Return true if a "--" segment passes through this index. +func (c *Canvas) partOfHorizontalLine(i Index) bool { + return c.runeAt(i.east()) == '-' || c.runeAt(i.west()) == '-' +} + +func (c *Canvas) partOfDiagonalLine(i Index) bool { + r := c.runeAt(i) + + n := c.runeAt(i.north()) + s := c.runeAt(i.south()) + nw := c.runeAt(i.nWest()) + se := c.runeAt(i.sEast()) + ne := c.runeAt(i.nEast()) + sw := c.runeAt(i.sWest()) + + switch r { + // Diagonal segments can be connected to joint or other segments. + case '/': + return ne == r || sw == r || isJoint(ne) || isJoint(sw) || n == '\\' || s == '\\' + case '\\': + return nw == r || se == r || isJoint(nw) || isJoint(se) || n == '/' || s == '/' + + // For everything else just check if we have segments next to us. + default: + return nw == '\\' || ne == '/' || sw == '/' || se == '\\' + } +} + +// For "-" and "|" characters returns true if they could be part of a rounded +// corner. +func (c *Canvas) partOfRoundedCorner(i Index) bool { + r := c.runeAt(i) + + switch r { + case '-': + dotNext := c.runeAt(i.west()) == '.' || c.runeAt(i.east()) == '.' + hyphenNext := c.runeAt(i.west()) == '\'' || c.runeAt(i.east()) == '\'' + return dotNext || hyphenNext + + case '|': + dotAbove := c.runeAt(i.nWest()) == '.' || c.runeAt(i.nEast()) == '.' + hyphenBelow := c.runeAt(i.sWest()) == '\'' || c.runeAt(i.sEast()) == '\'' + return dotAbove || hyphenBelow + } + + return false +} + +// TODO: Have this take care of all the vertical line nudging. +func (c *Canvas) partOfHalfStep(i Index) Orientation { + r := c.runeAt(i) + if r != '\'' && r != '.' && r != '|' { + return NONE + } + + if c.isRoundedCorner(i) != NONE { + return NONE + } + + w := c.runeAt(i.west()) + e := c.runeAt(i.east()) + n := c.runeAt(i.north()) + s := c.runeAt(i.south()) + nw := c.runeAt(i.nWest()) + ne := c.runeAt(i.nEast()) + + switch r { + case '\'': + // _ _ + // '- -' + if (nw == '_' && e == '-') || (w == '-' && ne == '_') { + return N + } + case '.': + // _.- -._ + if (w == '-' && e == '_') || (w == '_' && e == '-') { + return S + } + case '|': + //// _ _ + //// | | + if n != '|' && (ne == '_' || nw == '_') { + return N + } + + if n == '-' { + return N + } + + //// _| |_ + if s != '|' && (w == '_' || e == '_') { + return S + } + + if s == '-' { + return S + } + } + return NONE +} diff --git a/vendor/github.com/bep/goat/index.go b/vendor/github.com/bep/goat/index.go new file mode 100644 index 0000000..6061857 --- /dev/null +++ b/vendor/github.com/bep/goat/index.go @@ -0,0 +1,51 @@ +package goat + +// Index represents a position within an ASCII diagram. +type Index struct { + x int + y int +} + +// Pixel represents the on-screen coordinates for an Index. +type Pixel Index + +func (i *Index) asPixel() Pixel { + return Pixel{x: i.x * 8, y: i.y * 16} +} + +func (i *Index) asPixelXY() (int, int) { + p := i.asPixel() + return p.x, p.y +} + +func (i *Index) east() Index { + return Index{i.x + 1, i.y} +} + +func (i *Index) west() Index { + return Index{i.x - 1, i.y} +} + +func (i *Index) north() Index { + return Index{i.x, i.y - 1} +} + +func (i *Index) south() Index { + return Index{i.x, i.y + 1} +} + +func (i *Index) nWest() Index { + return Index{i.x - 1, i.y - 1} +} + +func (i *Index) nEast() Index { + return Index{i.x + 1, i.y - 1} +} + +func (i *Index) sWest() Index { + return Index{i.x - 1, i.y + 1} +} + +func (i *Index) sEast() Index { + return Index{i.x + 1, i.y + 1} +} diff --git a/vendor/github.com/bep/goat/iter.go b/vendor/github.com/bep/goat/iter.go new file mode 100644 index 0000000..e67597d --- /dev/null +++ b/vendor/github.com/bep/goat/iter.go @@ -0,0 +1,76 @@ +package goat + +type canvasIterator func(width int, height int) chan Index + +func upDown(width int, height int) chan Index { + c := make(chan Index, width*height) + + go func() { + for w := 0; w < width; w++ { + for h := 0; h < height; h++ { + c <- Index{w, h} + } + } + close(c) + }() + + return c +} + +func leftRight(width int, height int) chan Index { + c := make(chan Index, width*height) + + // Transpose an upDown order. + go func() { + for i := range upDown(height, width) { + c <- Index{i.y, i.x} + } + + close(c) + }() + + return c +} + +func diagDown(width int, height int) chan Index { + c := make(chan Index, width*height) + + go func() { + minSum := -height + 1 + maxSum := width + + for sum := minSum; sum <= maxSum; sum++ { + for w := 0; w < width; w++ { + for h := 0; h < height; h++ { + if w-h == sum { + c <- Index{w, h} + } + } + } + } + close(c) + }() + + return c +} + +func diagUp(width int, height int) chan Index { + c := make(chan Index, width*height) + + go func() { + maxSum := width + height - 2 + + for sum := 0; sum <= maxSum; sum++ { + for w := 0; w < width; w++ { + for h := 0; h < height; h++ { + if h+w == sum { + c <- Index{w, h} + } + } + } + } + close(c) + }() + + return c +} diff --git a/vendor/github.com/bep/goat/svg.go b/vendor/github.com/bep/goat/svg.go new file mode 100644 index 0000000..59ad1e6 --- /dev/null +++ b/vendor/github.com/bep/goat/svg.go @@ -0,0 +1,380 @@ +package goat + +import ( + "bytes" + "fmt" + "io" +) + +type SVG struct { + Body string + Width int + Height int +} + +func (s SVG) String() string { + return fmt.Sprintf("\n%s\n", + "diagram", + "http://www.w3.org/2000/svg", + "1.1", s.Height, s.Width, s.Body) +} + +// BuildSVG reads in a newline-delimited ASCII diagram from src and returns a SVG. +func BuildSVG(src io.Reader) SVG { + var buff bytes.Buffer + canvas := NewCanvas(src) + canvas.WriteSVGBody(&buff) + return SVG{ + Body: buff.String(), + Width: canvas.widthScreen(), + Height: canvas.heightScreen(), + } +} + +// BuildAndWriteSVG reads in a newline-delimited ASCII diagram from src and writes a +// corresponding SVG diagram to dst. +func BuildAndWriteSVG(src io.Reader, dst io.Writer) { + canvas := NewCanvas(src) + + // Preamble + writeBytes(dst, + "\n", + "diagram", + "http://www.w3.org/2000/svg", + "1.1", + canvas.heightScreen(), canvas.widthScreen(), + ) + + canvas.WriteSVGBody(dst) + + writeBytes(dst, "\n") +} + +func writeBytes(out io.Writer, format string, args ...interface{}) { + bytesOut := fmt.Sprintf(format, args...) + + _, err := out.Write([]byte(bytesOut)) + if err != nil { + panic(nil) + } +} + +// Draw a straight line as an SVG path. +func (l Line) Draw(out io.Writer) { + start := l.start.asPixel() + stop := l.stop.asPixel() + + // For cases when a vertical line hits a perpendicular like this: + // + // | | + // | or v + // --- --- + // + // We need to nudge the vertical line half a vertical cell in the + // appropriate direction in order to meet up cleanly with the midline of + // the cell next to it. + + // A diagonal segment all by itself needs to be shifted slightly to line + // up with _ baselines: + // _ + // \_ + // + // TODO make this a method on Line to return accurate pixel + if l.lonely { + switch l.orientation { + case NE: + start.x -= 4 + stop.x -= 4 + start.y += 8 + stop.y += 8 + case SE: + start.x -= 4 + stop.x -= 4 + start.y -= 8 + stop.y -= 8 + case S: + start.y -= 8 + stop.y -= 8 + } + + // Half steps + switch l.chop { + case N: + stop.y -= 8 + case S: + start.y += 8 + } + } + + if l.needsNudgingDown { + stop.y += 8 + if l.horizontal() { + start.y += 8 + } + } + + if l.needsNudgingLeft { + start.x -= 8 + } + + if l.needsNudgingRight { + stop.x += 8 + } + + if l.needsTinyNudgingLeft { + start.x -= 4 + if l.orientation == NE { + start.y += 8 + } else if l.orientation == SE { + start.y -= 8 + } + } + + if l.needsTinyNudgingRight { + stop.x += 4 + if l.orientation == NE { + stop.y -= 8 + } else if l.orientation == SE { + stop.y += 8 + } + } + + writeBytes(out, + "\n", + start.x, start.y, + stop.x, stop.y, + ) +} + +// Draw a solid triable as an SVG polygon element. +func (t Triangle) Draw(out io.Writer) { + // https://www.w3.org/TR/SVG/shapes.html#PolygonElement + + /* + +-----+-----+ + | /|\ | + | / | \ | + x +- / -+- \ -+ + | / | \ | + |/ | \| + +-----+-----+ + y + */ + + x, y := float32(t.start.asPixel().x), float32(t.start.asPixel().y) + r := 0.0 + + x0 := x + 8 + y0 := y + x1 := x - 4 + y1 := y - 0.35*16 + x2 := x - 4 + y2 := y + 0.35*16 + + switch t.orientation { + case N: + r = 270 + if t.needsNudging { + x0 += 8 + x1 += 8 + x2 += 8 + } + case NE: + r = 300 + x0 += 4 + x1 += 4 + x2 += 4 + if t.needsNudging { + x0 += 6 + x1 += 6 + x2 += 6 + } + case NW: + r = 240 + x0 += 4 + x1 += 4 + x2 += 4 + if t.needsNudging { + x0 += 6 + x1 += 6 + x2 += 6 + } + case W: + r = 180 + if t.needsNudging { + x0 -= 8 + x1 -= 8 + x2 -= 8 + } + case E: + r = 0 + if t.needsNudging { + x0 -= 8 + x1 -= 8 + x2 -= 8 + } + case S: + r = 90 + if t.needsNudging { + x0 += 8 + x1 += 8 + x2 += 8 + } + case SW: + r = 120 + x0 += 4 + x1 += 4 + x2 += 4 + if t.needsNudging { + x0 += 6 + x1 += 6 + x2 += 6 + } + case SE: + r = 60 + x0 += 4 + x1 += 4 + x2 += 4 + if t.needsNudging { + x0 += 6 + x1 += 6 + x2 += 6 + } + } + + writeBytes(out, + "\n", + x0, y0, + x1, y1, + x2, y2, + r, + x, y, + ) +} + +// Draw a solid circle as an SVG circle element. +func (c *Circle) Draw(out io.Writer) { + fill := "#fff" + + if c.bold { + fill = "currentColor" + } + + pixel := c.start.asPixel() + + writeBytes(out, + "\n", + pixel.x, + pixel.y, + fill, + ) +} + +// Draw a single text character as an SVG text element. +func (t Text) Draw(out io.Writer) { + p := t.start.asPixel() + c := t.contents + + opacity := 0 + + // Markdeep special-cases these character and treats them like a + // checkerboard. + switch c { + case "â–‰": + opacity = -64 + case "â–“": + opacity = 64 + case "â–’": + opacity = 128 + case "â–‘": + opacity = 191 + } + + fill := "currentColor" + if opacity > 0 { + fill = fmt.Sprintf("rgb(%d,%d,%d)", opacity, opacity, opacity) + } + + if opacity != 0 { + writeBytes(out, + "", + p.x-4, p.y-8, + fill, + ) + return + } + + // Escape for XML + switch c { + case "&": + c = "&" + case ">": + c = ">" + case "<": + c = "<" + } + + writeBytes(out, + "%s\n", + p.x, p.y+4, c, + ) +} + +// Draw a rounded corner as an SVG elliptical arc element. +func (c *RoundedCorner) Draw(out io.Writer) { + // https://www.w3.org/TR/SVG/paths.html#PathDataEllipticalArcCommands + + x, y := c.start.asPixelXY() + startX, startY, endX, endY, sweepFlag := 0, 0, 0, 0, 0 + + switch c.orientation { + case NW: + startX = x + 8 + startY = y + endX = x - 8 + endY = y + 16 + case NE: + sweepFlag = 1 + startX = x - 8 + startY = y + endX = x + 8 + endY = y + 16 + case SE: + sweepFlag = 1 + startX = x + 8 + startY = y - 16 + endX = x - 8 + endY = y + case SW: + startX = x - 8 + startY = y - 16 + endX = x + 8 + endY = y + } + + writeBytes(out, + "\n", + startX, + startY, + sweepFlag, + endX, + endY, + ) +} + +// Draw a bridge as an SVG elliptical arc element. +func (b Bridge) Draw(out io.Writer) { + x, y := b.start.asPixelXY() + sweepFlag := 1 + + if b.orientation == W { + sweepFlag = 0 + } + + writeBytes(out, + "\n", + x, y-8, + sweepFlag, + x, y+8, + ) +} diff --git a/vendor/github.com/bep/godartsass/.gitignore b/vendor/github.com/bep/godartsass/.gitignore new file mode 100644 index 0000000..dff079f --- /dev/null +++ b/vendor/github.com/bep/godartsass/.gitignore @@ -0,0 +1,17 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +lib/ + +# Dependency directories (remove the comment below to include it) +# vendor/ diff --git a/vendor/github.com/bep/godartsass/LICENSE b/vendor/github.com/bep/godartsass/LICENSE new file mode 100644 index 0000000..7e406ef --- /dev/null +++ b/vendor/github.com/bep/godartsass/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Bjørn Erik Pedersen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/bep/godartsass/README.md b/vendor/github.com/bep/godartsass/README.md new file mode 100644 index 0000000..50f78f2 --- /dev/null +++ b/vendor/github.com/bep/godartsass/README.md @@ -0,0 +1,26 @@ +[![Tests on Linux, MacOS and Windows](https://github.com/bep/godartsass/workflows/Test/badge.svg)](https://github.com/bep/godartsass/actions?query=workflow%3ATest) +[![Go Report Card](https://goreportcard.com/badge/github.com/bep/godartsass)](https://goreportcard.com/report/github.com/bep/godartsass) +[![codecov](https://codecov.io/gh/bep/godartsass/branch/main/graph/badge.svg?token=OWZ9RCAYWO)](https://codecov.io/gh/bep/godartsass) +[![GoDoc](https://godoc.org/github.com/bep/godartsass?status.svg)](https://godoc.org/github.com/bep/godartsass) + +This is a Go API backed by the native [Dart Sass Embedded](https://github.com/sass/dart-sass-embedded) executable. + +The primary motivation for this project is to provide `SCSS` support to [Hugo](https://gohugo.io/). I welcome PRs with bug fixes. I will also consider adding functionality, but please raise an issue discussing it first. + +For LibSass bindings in Go, see [GoLibSass](https://github.com/bep/golibsass). + +The benchmark below compares [GoLibSass](https://github.com/bep/golibsass) with this library. This is almost twice as fast when running single-threaded, but slower when running with multiple Goroutines. We're communicating with the compiler process via stdin/stdout, which becomes the serialized bottle neck here. That may be possible to improve, but for most practical applications (including Hugo), this should not matter. + +```bash +Transpile/SCSS-16 770µs ± 0% 467µs ± 1% -39.36% (p=0.029 n=4+4) +Transpile/SCSS_Parallel-16 92.2µs ± 2% 362.5µs ± 1% +293.39% (p=0.029 n=4+4) + +name old alloc/op new alloc/op delta +Transpile/SCSS-16 192B ± 0% 1268B ± 0% +560.42% (p=0.029 n=4+4) +Transpile/SCSS_Parallel-16 192B ± 0% 1272B ± 0% +562.37% (p=0.029 n=4+4) + +name old allocs/op new allocs/op delta +Transpile/SCSS-16 2.00 ± 0% 19.00 ± 0% +850.00% (p=0.029 n=4+4) +Transpile/SCSS_Parallel-16 2.00 ± 0% 19.00 ± 0% +850.00% (p=0.029 n=4+4) +``` + diff --git a/vendor/github.com/bep/godartsass/codecov.yml b/vendor/github.com/bep/godartsass/codecov.yml new file mode 100644 index 0000000..2e3090a --- /dev/null +++ b/vendor/github.com/bep/godartsass/codecov.yml @@ -0,0 +1,10 @@ +coverage: + status: + project: + default: + target: auto + threshold: 0.5% + patch: off + +comment: + require_changes: true diff --git a/vendor/github.com/bep/godartsass/conn.go b/vendor/github.com/bep/godartsass/conn.go new file mode 100644 index 0000000..97b40f0 --- /dev/null +++ b/vendor/github.com/bep/godartsass/conn.go @@ -0,0 +1,104 @@ +package godartsass + +import ( + "bufio" + "bytes" + "errors" + "io" + "os/exec" + "regexp" + "time" +) + +func newConn(cmd *exec.Cmd) (_ conn, err error) { + in, err := cmd.StdinPipe() + if err != nil { + return conn{}, err + } + defer func() { + if err != nil { + in.Close() + } + }() + + out, err := cmd.StdoutPipe() + stdErr := &tailBuffer{limit: 1024} + buff := bufio.NewReader(out) + c := conn{buff, buff, out, in, stdErr, cmd} + cmd.Stderr = c.stdErr + + return c, err +} + +type byteReadWriteCloser interface { + io.ReadWriteCloser + io.ByteReader +} + +type conn struct { + io.ByteReader + io.Reader + readerCloser io.Closer + io.WriteCloser + stdErr *tailBuffer + cmd *exec.Cmd +} + +// Start starts conn's Cmd. +func (c conn) Start() error { + err := c.cmd.Start() + if err != nil { + return c.Close() + } + return err +} + +// Close closes conn's WriteCloser, ReadClosers, and waits for the command to finish. +func (c conn) Close() error { + writeErr := c.WriteCloser.Close() + readErr := c.readerCloser.Close() + cmdErr := c.waitWithTimeout() + + if writeErr != nil { + return writeErr + } + + if readErr != nil { + return readErr + } + + return cmdErr +} + +var brokenPipeRe = regexp.MustCompile("Broken pipe|pipe is being closed") + +// dart-sass-embedded ends on itself on EOF, this is just to give it some +// time to do so. +func (c conn) waitWithTimeout() error { + result := make(chan error, 1) + go func() { result <- c.cmd.Wait() }() + select { + case err := <-result: + if _, ok := err.(*exec.ExitError); ok { + if brokenPipeRe.MatchString(c.stdErr.String()) { + return nil + } + } + return err + case <-time.After(5 * time.Second): + return errors.New("timed out waiting for dart-sass-embedded to finish") + } +} + +type tailBuffer struct { + limit int + bytes.Buffer +} + +func (b *tailBuffer) Write(p []byte) (n int, err error) { + if len(p)+b.Buffer.Len() > b.limit { + b.Reset() + } + n, err = b.Buffer.Write(p) + return +} diff --git a/vendor/github.com/bep/godartsass/internal/embeddedsassv1/README.md b/vendor/github.com/bep/godartsass/internal/embeddedsassv1/README.md new file mode 100644 index 0000000..608581a --- /dev/null +++ b/vendor/github.com/bep/godartsass/internal/embeddedsassv1/README.md @@ -0,0 +1,4 @@ + +* Install protobuf: https://github.com/protocolbuffers/protobuf +* Install the Go plugin: go get -u google.golang.org/protobuf/cmd/protoc-gen-go +* protoc --go_opt=Membedded_sass_v1.proto=github.com/bep/godartsass/internal/embeddedsassv1 --go_opt=paths=source_relative --go_out=. embedded_sass_v1.proto diff --git a/vendor/github.com/bep/godartsass/internal/embeddedsassv1/embedded_sass_v1.pb.go b/vendor/github.com/bep/godartsass/internal/embeddedsassv1/embedded_sass_v1.pb.go new file mode 100644 index 0000000..f3a5482 --- /dev/null +++ b/vendor/github.com/bep/godartsass/internal/embeddedsassv1/embedded_sass_v1.pb.go @@ -0,0 +1,5350 @@ +// Copyright 2019 Google Inc. Use of this source code is governed by an +// MIT-style license that can be found in the LICENSE file or at +// https://opensource.org/licenses/MIT. + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.30.0 +// protoc v4.23.2 +// source: embedded_sass_v1.proto + +package embeddedsassv1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +// Possible ways to format the CSS output. The compiler is not required to +// support all possible options; if the host requests an unsupported style, the +// compiler should choose the closest supported style. +type OutputStyle int32 + +const ( + // Each selector and declaration is written on its own line. + OutputStyle_EXPANDED OutputStyle = 0 + // The entire stylesheet is written on a single line, with as few characters + // as possible. + OutputStyle_COMPRESSED OutputStyle = 1 +) + +// Enum value maps for OutputStyle. +var ( + OutputStyle_name = map[int32]string{ + 0: "EXPANDED", + 1: "COMPRESSED", + } + OutputStyle_value = map[string]int32{ + "EXPANDED": 0, + "COMPRESSED": 1, + } +) + +func (x OutputStyle) Enum() *OutputStyle { + p := new(OutputStyle) + *p = x + return p +} + +func (x OutputStyle) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (OutputStyle) Descriptor() protoreflect.EnumDescriptor { + return file_embedded_sass_v1_proto_enumTypes[0].Descriptor() +} + +func (OutputStyle) Type() protoreflect.EnumType { + return &file_embedded_sass_v1_proto_enumTypes[0] +} + +func (x OutputStyle) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use OutputStyle.Descriptor instead. +func (OutputStyle) EnumDescriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{0} +} + +// Possible syntaxes for a Sass stylesheet. +type Syntax int32 + +const ( + // The CSS-superset `.scss` syntax. + Syntax_SCSS Syntax = 0 + // The indented `.sass` syntax. + Syntax_INDENTED Syntax = 1 + // Plain CSS syntax that doesn't support any special Sass features. + Syntax_CSS Syntax = 2 +) + +// Enum value maps for Syntax. +var ( + Syntax_name = map[int32]string{ + 0: "SCSS", + 1: "INDENTED", + 2: "CSS", + } + Syntax_value = map[string]int32{ + "SCSS": 0, + "INDENTED": 1, + "CSS": 2, + } +) + +func (x Syntax) Enum() *Syntax { + p := new(Syntax) + *p = x + return p +} + +func (x Syntax) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Syntax) Descriptor() protoreflect.EnumDescriptor { + return file_embedded_sass_v1_proto_enumTypes[1].Descriptor() +} + +func (Syntax) Type() protoreflect.EnumType { + return &file_embedded_sass_v1_proto_enumTypes[1] +} + +func (x Syntax) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Syntax.Descriptor instead. +func (Syntax) EnumDescriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{1} +} + +// The possible types of [LogEvent]. +type LogEventType int32 + +const ( + // A warning for something other than a deprecated Sass feature. Often emitted + // due to a stylesheet using the `@warn` rule. + LogEventType_WARNING LogEventType = 0 + // A warning indicating that the stylesheet is using a deprecated Sass + // feature. Compilers should not add text like "deprecation warning" to + // deprecation warnings; it's up to the host to determine how to signal that + // to the user. + LogEventType_DEPRECATION_WARNING LogEventType = 1 + // A message generated by the user for their own debugging purposes. + LogEventType_DEBUG LogEventType = 2 +) + +// Enum value maps for LogEventType. +var ( + LogEventType_name = map[int32]string{ + 0: "WARNING", + 1: "DEPRECATION_WARNING", + 2: "DEBUG", + } + LogEventType_value = map[string]int32{ + "WARNING": 0, + "DEPRECATION_WARNING": 1, + "DEBUG": 2, + } +) + +func (x LogEventType) Enum() *LogEventType { + p := new(LogEventType) + *p = x + return p +} + +func (x LogEventType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (LogEventType) Descriptor() protoreflect.EnumDescriptor { + return file_embedded_sass_v1_proto_enumTypes[2].Descriptor() +} + +func (LogEventType) Type() protoreflect.EnumType { + return &file_embedded_sass_v1_proto_enumTypes[2] +} + +func (x LogEventType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use LogEventType.Descriptor instead. +func (LogEventType) EnumDescriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{2} +} + +// Potential types of protocol errors. +type ProtocolErrorType int32 + +const ( + // A message was received that couldn't be decoded as an `InboundMessage` (for + // the compiler) or `OutboundMessage` (for the host). + ProtocolErrorType_PARSE ProtocolErrorType = 0 + // A message was received that violated a documented restriction, such as not + // providing a mandatory field. + ProtocolErrorType_PARAMS ProtocolErrorType = 1 + // Something unexpected went wrong within the endpoint. + ProtocolErrorType_INTERNAL ProtocolErrorType = 2 +) + +// Enum value maps for ProtocolErrorType. +var ( + ProtocolErrorType_name = map[int32]string{ + 0: "PARSE", + 1: "PARAMS", + 2: "INTERNAL", + } + ProtocolErrorType_value = map[string]int32{ + "PARSE": 0, + "PARAMS": 1, + "INTERNAL": 2, + } +) + +func (x ProtocolErrorType) Enum() *ProtocolErrorType { + p := new(ProtocolErrorType) + *p = x + return p +} + +func (x ProtocolErrorType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ProtocolErrorType) Descriptor() protoreflect.EnumDescriptor { + return file_embedded_sass_v1_proto_enumTypes[3].Descriptor() +} + +func (ProtocolErrorType) Type() protoreflect.EnumType { + return &file_embedded_sass_v1_proto_enumTypes[3] +} + +func (x ProtocolErrorType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ProtocolErrorType.Descriptor instead. +func (ProtocolErrorType) EnumDescriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{3} +} + +// Different types of separators a list can have. +type ListSeparator int32 + +const ( + // List elements are separated by a comma. + ListSeparator_COMMA ListSeparator = 0 + // List elements are separated by whitespace. + ListSeparator_SPACE ListSeparator = 1 + // List elements are separated by a forward slash. + ListSeparator_SLASH ListSeparator = 2 + // The list's separator hasn't yet been determined. This is only allowed for + // singleton and empty lists. + // + // Singleton lists and empty lists don't have separators defined. This means + // that list functions will prefer other lists' separators if possible. + ListSeparator_UNDECIDED ListSeparator = 3 +) + +// Enum value maps for ListSeparator. +var ( + ListSeparator_name = map[int32]string{ + 0: "COMMA", + 1: "SPACE", + 2: "SLASH", + 3: "UNDECIDED", + } + ListSeparator_value = map[string]int32{ + "COMMA": 0, + "SPACE": 1, + "SLASH": 2, + "UNDECIDED": 3, + } +) + +func (x ListSeparator) Enum() *ListSeparator { + p := new(ListSeparator) + *p = x + return p +} + +func (x ListSeparator) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ListSeparator) Descriptor() protoreflect.EnumDescriptor { + return file_embedded_sass_v1_proto_enumTypes[4].Descriptor() +} + +func (ListSeparator) Type() protoreflect.EnumType { + return &file_embedded_sass_v1_proto_enumTypes[4] +} + +func (x ListSeparator) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ListSeparator.Descriptor instead. +func (ListSeparator) EnumDescriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{4} +} + +// Singleton SassScript values that have no internal state. +type SingletonValue int32 + +const ( + // The SassScript boolean true value. + SingletonValue_TRUE SingletonValue = 0 + // The SassScript boolean false value. + SingletonValue_FALSE SingletonValue = 1 + // The SassScript null value. + SingletonValue_NULL SingletonValue = 2 +) + +// Enum value maps for SingletonValue. +var ( + SingletonValue_name = map[int32]string{ + 0: "TRUE", + 1: "FALSE", + 2: "NULL", + } + SingletonValue_value = map[string]int32{ + "TRUE": 0, + "FALSE": 1, + "NULL": 2, + } +) + +func (x SingletonValue) Enum() *SingletonValue { + p := new(SingletonValue) + *p = x + return p +} + +func (x SingletonValue) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (SingletonValue) Descriptor() protoreflect.EnumDescriptor { + return file_embedded_sass_v1_proto_enumTypes[5].Descriptor() +} + +func (SingletonValue) Type() protoreflect.EnumType { + return &file_embedded_sass_v1_proto_enumTypes[5] +} + +func (x SingletonValue) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use SingletonValue.Descriptor instead. +func (SingletonValue) EnumDescriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{5} +} + +// An operator used in a calculation value's operation. +type CalculationOperator int32 + +const ( + // The addition operator. + CalculationOperator_PLUS CalculationOperator = 0 + // The subtraction operator. + CalculationOperator_MINUS CalculationOperator = 1 + // The multiplication operator. + CalculationOperator_TIMES CalculationOperator = 2 + // The division operator. + CalculationOperator_DIVIDE CalculationOperator = 3 +) + +// Enum value maps for CalculationOperator. +var ( + CalculationOperator_name = map[int32]string{ + 0: "PLUS", + 1: "MINUS", + 2: "TIMES", + 3: "DIVIDE", + } + CalculationOperator_value = map[string]int32{ + "PLUS": 0, + "MINUS": 1, + "TIMES": 2, + "DIVIDE": 3, + } +) + +func (x CalculationOperator) Enum() *CalculationOperator { + p := new(CalculationOperator) + *p = x + return p +} + +func (x CalculationOperator) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (CalculationOperator) Descriptor() protoreflect.EnumDescriptor { + return file_embedded_sass_v1_proto_enumTypes[6].Descriptor() +} + +func (CalculationOperator) Type() protoreflect.EnumType { + return &file_embedded_sass_v1_proto_enumTypes[6] +} + +func (x CalculationOperator) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use CalculationOperator.Descriptor instead. +func (CalculationOperator) EnumDescriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{6} +} + +// The wrapper type for all messages sent from the host to the compiler. This +// provides a `oneof` that makes it possible to determine the type of each +// inbound message. +type InboundMessage struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The wrapped message. Mandatory. + // + // Types that are assignable to Message: + // + // *InboundMessage_CompileRequest_ + // *InboundMessage_CanonicalizeResponse_ + // *InboundMessage_ImportResponse_ + // *InboundMessage_FileImportResponse_ + // *InboundMessage_FunctionCallResponse_ + // *InboundMessage_VersionRequest_ + Message isInboundMessage_Message `protobuf_oneof:"message"` +} + +func (x *InboundMessage) Reset() { + *x = InboundMessage{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage) ProtoMessage() {} + +func (x *InboundMessage) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage.ProtoReflect.Descriptor instead. +func (*InboundMessage) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{0} +} + +func (m *InboundMessage) GetMessage() isInboundMessage_Message { + if m != nil { + return m.Message + } + return nil +} + +func (x *InboundMessage) GetCompileRequest() *InboundMessage_CompileRequest { + if x, ok := x.GetMessage().(*InboundMessage_CompileRequest_); ok { + return x.CompileRequest + } + return nil +} + +func (x *InboundMessage) GetCanonicalizeResponse() *InboundMessage_CanonicalizeResponse { + if x, ok := x.GetMessage().(*InboundMessage_CanonicalizeResponse_); ok { + return x.CanonicalizeResponse + } + return nil +} + +func (x *InboundMessage) GetImportResponse() *InboundMessage_ImportResponse { + if x, ok := x.GetMessage().(*InboundMessage_ImportResponse_); ok { + return x.ImportResponse + } + return nil +} + +func (x *InboundMessage) GetFileImportResponse() *InboundMessage_FileImportResponse { + if x, ok := x.GetMessage().(*InboundMessage_FileImportResponse_); ok { + return x.FileImportResponse + } + return nil +} + +func (x *InboundMessage) GetFunctionCallResponse() *InboundMessage_FunctionCallResponse { + if x, ok := x.GetMessage().(*InboundMessage_FunctionCallResponse_); ok { + return x.FunctionCallResponse + } + return nil +} + +func (x *InboundMessage) GetVersionRequest() *InboundMessage_VersionRequest { + if x, ok := x.GetMessage().(*InboundMessage_VersionRequest_); ok { + return x.VersionRequest + } + return nil +} + +type isInboundMessage_Message interface { + isInboundMessage_Message() +} + +type InboundMessage_CompileRequest_ struct { + CompileRequest *InboundMessage_CompileRequest `protobuf:"bytes,2,opt,name=compile_request,json=compileRequest,proto3,oneof"` +} + +type InboundMessage_CanonicalizeResponse_ struct { + CanonicalizeResponse *InboundMessage_CanonicalizeResponse `protobuf:"bytes,3,opt,name=canonicalize_response,json=canonicalizeResponse,proto3,oneof"` +} + +type InboundMessage_ImportResponse_ struct { + ImportResponse *InboundMessage_ImportResponse `protobuf:"bytes,4,opt,name=import_response,json=importResponse,proto3,oneof"` +} + +type InboundMessage_FileImportResponse_ struct { + FileImportResponse *InboundMessage_FileImportResponse `protobuf:"bytes,5,opt,name=file_import_response,json=fileImportResponse,proto3,oneof"` +} + +type InboundMessage_FunctionCallResponse_ struct { + FunctionCallResponse *InboundMessage_FunctionCallResponse `protobuf:"bytes,6,opt,name=function_call_response,json=functionCallResponse,proto3,oneof"` +} + +type InboundMessage_VersionRequest_ struct { + VersionRequest *InboundMessage_VersionRequest `protobuf:"bytes,7,opt,name=version_request,json=versionRequest,proto3,oneof"` +} + +func (*InboundMessage_CompileRequest_) isInboundMessage_Message() {} + +func (*InboundMessage_CanonicalizeResponse_) isInboundMessage_Message() {} + +func (*InboundMessage_ImportResponse_) isInboundMessage_Message() {} + +func (*InboundMessage_FileImportResponse_) isInboundMessage_Message() {} + +func (*InboundMessage_FunctionCallResponse_) isInboundMessage_Message() {} + +func (*InboundMessage_VersionRequest_) isInboundMessage_Message() {} + +// The wrapper type for all messages sent from the compiler to the host. This +// provides a `oneof` that makes it possible to determine the type of each +// outbound message. +type OutboundMessage struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The wrapped message. Mandatory. + // + // Types that are assignable to Message: + // + // *OutboundMessage_Error + // *OutboundMessage_CompileResponse_ + // *OutboundMessage_LogEvent_ + // *OutboundMessage_CanonicalizeRequest_ + // *OutboundMessage_ImportRequest_ + // *OutboundMessage_FileImportRequest_ + // *OutboundMessage_FunctionCallRequest_ + // *OutboundMessage_VersionResponse_ + Message isOutboundMessage_Message `protobuf_oneof:"message"` +} + +func (x *OutboundMessage) Reset() { + *x = OutboundMessage{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage) ProtoMessage() {} + +func (x *OutboundMessage) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage.ProtoReflect.Descriptor instead. +func (*OutboundMessage) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{1} +} + +func (m *OutboundMessage) GetMessage() isOutboundMessage_Message { + if m != nil { + return m.Message + } + return nil +} + +func (x *OutboundMessage) GetError() *ProtocolError { + if x, ok := x.GetMessage().(*OutboundMessage_Error); ok { + return x.Error + } + return nil +} + +func (x *OutboundMessage) GetCompileResponse() *OutboundMessage_CompileResponse { + if x, ok := x.GetMessage().(*OutboundMessage_CompileResponse_); ok { + return x.CompileResponse + } + return nil +} + +func (x *OutboundMessage) GetLogEvent() *OutboundMessage_LogEvent { + if x, ok := x.GetMessage().(*OutboundMessage_LogEvent_); ok { + return x.LogEvent + } + return nil +} + +func (x *OutboundMessage) GetCanonicalizeRequest() *OutboundMessage_CanonicalizeRequest { + if x, ok := x.GetMessage().(*OutboundMessage_CanonicalizeRequest_); ok { + return x.CanonicalizeRequest + } + return nil +} + +func (x *OutboundMessage) GetImportRequest() *OutboundMessage_ImportRequest { + if x, ok := x.GetMessage().(*OutboundMessage_ImportRequest_); ok { + return x.ImportRequest + } + return nil +} + +func (x *OutboundMessage) GetFileImportRequest() *OutboundMessage_FileImportRequest { + if x, ok := x.GetMessage().(*OutboundMessage_FileImportRequest_); ok { + return x.FileImportRequest + } + return nil +} + +func (x *OutboundMessage) GetFunctionCallRequest() *OutboundMessage_FunctionCallRequest { + if x, ok := x.GetMessage().(*OutboundMessage_FunctionCallRequest_); ok { + return x.FunctionCallRequest + } + return nil +} + +func (x *OutboundMessage) GetVersionResponse() *OutboundMessage_VersionResponse { + if x, ok := x.GetMessage().(*OutboundMessage_VersionResponse_); ok { + return x.VersionResponse + } + return nil +} + +type isOutboundMessage_Message interface { + isOutboundMessage_Message() +} + +type OutboundMessage_Error struct { + Error *ProtocolError `protobuf:"bytes,1,opt,name=error,proto3,oneof"` +} + +type OutboundMessage_CompileResponse_ struct { + CompileResponse *OutboundMessage_CompileResponse `protobuf:"bytes,2,opt,name=compile_response,json=compileResponse,proto3,oneof"` +} + +type OutboundMessage_LogEvent_ struct { + LogEvent *OutboundMessage_LogEvent `protobuf:"bytes,3,opt,name=log_event,json=logEvent,proto3,oneof"` +} + +type OutboundMessage_CanonicalizeRequest_ struct { + CanonicalizeRequest *OutboundMessage_CanonicalizeRequest `protobuf:"bytes,4,opt,name=canonicalize_request,json=canonicalizeRequest,proto3,oneof"` +} + +type OutboundMessage_ImportRequest_ struct { + ImportRequest *OutboundMessage_ImportRequest `protobuf:"bytes,5,opt,name=import_request,json=importRequest,proto3,oneof"` +} + +type OutboundMessage_FileImportRequest_ struct { + FileImportRequest *OutboundMessage_FileImportRequest `protobuf:"bytes,6,opt,name=file_import_request,json=fileImportRequest,proto3,oneof"` +} + +type OutboundMessage_FunctionCallRequest_ struct { + FunctionCallRequest *OutboundMessage_FunctionCallRequest `protobuf:"bytes,7,opt,name=function_call_request,json=functionCallRequest,proto3,oneof"` +} + +type OutboundMessage_VersionResponse_ struct { + VersionResponse *OutboundMessage_VersionResponse `protobuf:"bytes,8,opt,name=version_response,json=versionResponse,proto3,oneof"` +} + +func (*OutboundMessage_Error) isOutboundMessage_Message() {} + +func (*OutboundMessage_CompileResponse_) isOutboundMessage_Message() {} + +func (*OutboundMessage_LogEvent_) isOutboundMessage_Message() {} + +func (*OutboundMessage_CanonicalizeRequest_) isOutboundMessage_Message() {} + +func (*OutboundMessage_ImportRequest_) isOutboundMessage_Message() {} + +func (*OutboundMessage_FileImportRequest_) isOutboundMessage_Message() {} + +func (*OutboundMessage_FunctionCallRequest_) isOutboundMessage_Message() {} + +func (*OutboundMessage_VersionResponse_) isOutboundMessage_Message() {} + +// An error reported when an endpoint violates the embedded Sass protocol. +type ProtocolError struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Type ProtocolErrorType `protobuf:"varint,1,opt,name=type,proto3,enum=sass.embedded_protocolv1.ProtocolErrorType" json:"type,omitempty"` + // The ID of the request that had an error. This MUST be `4294967295` if the + // request ID couldn't be determined, or if the error is being reported for a + // response or an event. + Id uint32 `protobuf:"varint,2,opt,name=id,proto3" json:"id,omitempty"` + // A human-readable message providing more detail about the error. + Message string `protobuf:"bytes,3,opt,name=message,proto3" json:"message,omitempty"` +} + +func (x *ProtocolError) Reset() { + *x = ProtocolError{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ProtocolError) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ProtocolError) ProtoMessage() {} + +func (x *ProtocolError) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ProtocolError.ProtoReflect.Descriptor instead. +func (*ProtocolError) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{2} +} + +func (x *ProtocolError) GetType() ProtocolErrorType { + if x != nil { + return x.Type + } + return ProtocolErrorType_PARSE +} + +func (x *ProtocolError) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *ProtocolError) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +// A chunk of a source file. +type SourceSpan struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The text covered by the source span. Compilers must guarantee that this is + // the text between `start.offset` and `end.offset` in the source file + // referred to by `url`. + Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // The location of the first character in this span. Mandatory. + Start *SourceSpan_SourceLocation `protobuf:"bytes,2,opt,name=start,proto3" json:"start,omitempty"` + // The location of the first character after this span. Optional. + // + // If this is omitted, it indicates that the span is empty and points + // immediately before `start`. In that case, `text` must be empty. + // + // This must not point to a location before `start`. + End *SourceSpan_SourceLocation `protobuf:"bytes,3,opt,name=end,proto3" json:"end,omitempty"` + // The URL of the file to which this span refers. + // + // This may be empty, indicating that the span refers to a + // `CompileRequest.StringInput` file that doesn't specify a URL. + Url string `protobuf:"bytes,4,opt,name=url,proto3" json:"url,omitempty"` + // Additional source text surrounding this span. + // + // If this isn't empty, it must contain `text`. Furthermore, `text` must begin + // at column `start.column` of a line in `context`. + // + // This usually contains the full lines the span begins and ends on if the + // span itself doesn't cover the full lines. + Context string `protobuf:"bytes,5,opt,name=context,proto3" json:"context,omitempty"` +} + +func (x *SourceSpan) Reset() { + *x = SourceSpan{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SourceSpan) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SourceSpan) ProtoMessage() {} + +func (x *SourceSpan) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SourceSpan.ProtoReflect.Descriptor instead. +func (*SourceSpan) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{3} +} + +func (x *SourceSpan) GetText() string { + if x != nil { + return x.Text + } + return "" +} + +func (x *SourceSpan) GetStart() *SourceSpan_SourceLocation { + if x != nil { + return x.Start + } + return nil +} + +func (x *SourceSpan) GetEnd() *SourceSpan_SourceLocation { + if x != nil { + return x.End + } + return nil +} + +func (x *SourceSpan) GetUrl() string { + if x != nil { + return x.Url + } + return "" +} + +func (x *SourceSpan) GetContext() string { + if x != nil { + return x.Context + } + return "" +} + +// A SassScript value, passed to and returned by functions. +type Value struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The value itself. Mandatory. + // + // This is wrapped in a message type rather than used directly to reduce + // repetition, and because oneofs can't be repeated. + // + // Types that are assignable to Value: + // + // *Value_String_ + // *Value_Number_ + // *Value_RgbColor_ + // *Value_HslColor_ + // *Value_List_ + // *Value_Map_ + // *Value_Singleton + // *Value_CompilerFunction_ + // *Value_HostFunction_ + // *Value_ArgumentList_ + // *Value_HwbColor_ + // *Value_Calculation_ + Value isValue_Value `protobuf_oneof:"value"` +} + +func (x *Value) Reset() { + *x = Value{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value) ProtoMessage() {} + +func (x *Value) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value.ProtoReflect.Descriptor instead. +func (*Value) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{4} +} + +func (m *Value) GetValue() isValue_Value { + if m != nil { + return m.Value + } + return nil +} + +func (x *Value) GetString_() *Value_String { + if x, ok := x.GetValue().(*Value_String_); ok { + return x.String_ + } + return nil +} + +func (x *Value) GetNumber() *Value_Number { + if x, ok := x.GetValue().(*Value_Number_); ok { + return x.Number + } + return nil +} + +func (x *Value) GetRgbColor() *Value_RgbColor { + if x, ok := x.GetValue().(*Value_RgbColor_); ok { + return x.RgbColor + } + return nil +} + +func (x *Value) GetHslColor() *Value_HslColor { + if x, ok := x.GetValue().(*Value_HslColor_); ok { + return x.HslColor + } + return nil +} + +func (x *Value) GetList() *Value_List { + if x, ok := x.GetValue().(*Value_List_); ok { + return x.List + } + return nil +} + +func (x *Value) GetMap() *Value_Map { + if x, ok := x.GetValue().(*Value_Map_); ok { + return x.Map + } + return nil +} + +func (x *Value) GetSingleton() SingletonValue { + if x, ok := x.GetValue().(*Value_Singleton); ok { + return x.Singleton + } + return SingletonValue_TRUE +} + +func (x *Value) GetCompilerFunction() *Value_CompilerFunction { + if x, ok := x.GetValue().(*Value_CompilerFunction_); ok { + return x.CompilerFunction + } + return nil +} + +func (x *Value) GetHostFunction() *Value_HostFunction { + if x, ok := x.GetValue().(*Value_HostFunction_); ok { + return x.HostFunction + } + return nil +} + +func (x *Value) GetArgumentList() *Value_ArgumentList { + if x, ok := x.GetValue().(*Value_ArgumentList_); ok { + return x.ArgumentList + } + return nil +} + +func (x *Value) GetHwbColor() *Value_HwbColor { + if x, ok := x.GetValue().(*Value_HwbColor_); ok { + return x.HwbColor + } + return nil +} + +func (x *Value) GetCalculation() *Value_Calculation { + if x, ok := x.GetValue().(*Value_Calculation_); ok { + return x.Calculation + } + return nil +} + +type isValue_Value interface { + isValue_Value() +} + +type Value_String_ struct { + String_ *Value_String `protobuf:"bytes,1,opt,name=string,proto3,oneof"` +} + +type Value_Number_ struct { + Number *Value_Number `protobuf:"bytes,2,opt,name=number,proto3,oneof"` +} + +type Value_RgbColor_ struct { + RgbColor *Value_RgbColor `protobuf:"bytes,3,opt,name=rgb_color,json=rgbColor,proto3,oneof"` +} + +type Value_HslColor_ struct { + HslColor *Value_HslColor `protobuf:"bytes,4,opt,name=hsl_color,json=hslColor,proto3,oneof"` +} + +type Value_List_ struct { + List *Value_List `protobuf:"bytes,5,opt,name=list,proto3,oneof"` +} + +type Value_Map_ struct { + Map *Value_Map `protobuf:"bytes,6,opt,name=map,proto3,oneof"` +} + +type Value_Singleton struct { + Singleton SingletonValue `protobuf:"varint,7,opt,name=singleton,proto3,enum=sass.embedded_protocolv1.SingletonValue,oneof"` +} + +type Value_CompilerFunction_ struct { + CompilerFunction *Value_CompilerFunction `protobuf:"bytes,8,opt,name=compiler_function,json=compilerFunction,proto3,oneof"` +} + +type Value_HostFunction_ struct { + HostFunction *Value_HostFunction `protobuf:"bytes,9,opt,name=host_function,json=hostFunction,proto3,oneof"` +} + +type Value_ArgumentList_ struct { + ArgumentList *Value_ArgumentList `protobuf:"bytes,10,opt,name=argument_list,json=argumentList,proto3,oneof"` +} + +type Value_HwbColor_ struct { + HwbColor *Value_HwbColor `protobuf:"bytes,11,opt,name=hwb_color,json=hwbColor,proto3,oneof"` +} + +type Value_Calculation_ struct { + Calculation *Value_Calculation `protobuf:"bytes,12,opt,name=calculation,proto3,oneof"` +} + +func (*Value_String_) isValue_Value() {} + +func (*Value_Number_) isValue_Value() {} + +func (*Value_RgbColor_) isValue_Value() {} + +func (*Value_HslColor_) isValue_Value() {} + +func (*Value_List_) isValue_Value() {} + +func (*Value_Map_) isValue_Value() {} + +func (*Value_Singleton) isValue_Value() {} + +func (*Value_CompilerFunction_) isValue_Value() {} + +func (*Value_HostFunction_) isValue_Value() {} + +func (*Value_ArgumentList_) isValue_Value() {} + +func (*Value_HwbColor_) isValue_Value() {} + +func (*Value_Calculation_) isValue_Value() {} + +// A request for information about the version of the embedded compiler. The +// host can use this to provide diagnostic information to the user, to check +// which features the compiler supports, or to ensure that it's compatible +// with the same protocol version the compiler supports. +type InboundMessage_VersionRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // This version request's id. Mandatory. + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` +} + +func (x *InboundMessage_VersionRequest) Reset() { + *x = InboundMessage_VersionRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage_VersionRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage_VersionRequest) ProtoMessage() {} + +func (x *InboundMessage_VersionRequest) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage_VersionRequest.ProtoReflect.Descriptor instead. +func (*InboundMessage_VersionRequest) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{0, 0} +} + +func (x *InboundMessage_VersionRequest) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +// A request that compiles an entrypoint to CSS. +type InboundMessage_CompileRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // This compilation's request id. This is included in messages sent from the + // compiler to the host. Mandatory. + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The input stylesheet to parse. Mandatory. + // + // Types that are assignable to Input: + // + // *InboundMessage_CompileRequest_String_ + // *InboundMessage_CompileRequest_Path + Input isInboundMessage_CompileRequest_Input `protobuf_oneof:"input"` + // How to format the CSS output. + Style OutputStyle `protobuf:"varint,4,opt,name=style,proto3,enum=sass.embedded_protocolv1.OutputStyle" json:"style,omitempty"` + // Whether to generate a source map. Note that this will *not* add a source + // map comment to the stylesheet; that's up to the host or its users. + SourceMap bool `protobuf:"varint,5,opt,name=source_map,json=sourceMap,proto3" json:"source_map,omitempty"` + // Importers (including load paths on the filesystem) to use when resolving + // imports that can't be resolved relative to the file that contains it. Each + // importer is checked in order until one recognizes the imported URL. + Importers []*InboundMessage_CompileRequest_Importer `protobuf:"bytes,6,rep,name=importers,proto3" json:"importers,omitempty"` + // Signatures for custom global functions whose behavior is defined by the + // host. These must be valid Sass function signatures that could appear in + // after `@function` in a Sass stylesheet, such as + // `mix($color1, $color2, $weight: 50%)`. + // + // Compilers must ensure that pure-Sass functions take precedence over + // custom global functions. They must also reject any custom function names + // that conflict with function names built into the Sass language. + GlobalFunctions []string `protobuf:"bytes,7,rep,name=global_functions,json=globalFunctions,proto3" json:"global_functions,omitempty"` + // Whether to use terminal colors in the formatted message of errors and + // logs. + AlertColor bool `protobuf:"varint,8,opt,name=alert_color,json=alertColor,proto3" json:"alert_color,omitempty"` + // Whether to encode the formatted message of errors and logs in ASCII. + AlertAscii bool `protobuf:"varint,9,opt,name=alert_ascii,json=alertAscii,proto3" json:"alert_ascii,omitempty"` + // Whether to report all deprecation warnings or only the first few ones. + // If this is `false`, the compiler may choose not to send events for + // repeated deprecation warnings. If this is `true`, the compiler must emit + // an event for every deprecation warning it encounters. + Verbose bool `protobuf:"varint,10,opt,name=verbose,proto3" json:"verbose,omitempty"` + // Whether to omit events for deprecation warnings coming from dependencies + // (files loaded from a different importer than the input). + QuietDeps bool `protobuf:"varint,11,opt,name=quiet_deps,json=quietDeps,proto3" json:"quiet_deps,omitempty"` + // Whether to include sources in the generated sourcemap + SourceMapIncludeSources bool `protobuf:"varint,12,opt,name=source_map_include_sources,json=sourceMapIncludeSources,proto3" json:"source_map_include_sources,omitempty"` + // Whether to emit a `@charset`/BOM for non-ASCII stylesheets. + Charset bool `protobuf:"varint,13,opt,name=charset,proto3" json:"charset,omitempty"` +} + +func (x *InboundMessage_CompileRequest) Reset() { + *x = InboundMessage_CompileRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage_CompileRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage_CompileRequest) ProtoMessage() {} + +func (x *InboundMessage_CompileRequest) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage_CompileRequest.ProtoReflect.Descriptor instead. +func (*InboundMessage_CompileRequest) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{0, 1} +} + +func (x *InboundMessage_CompileRequest) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (m *InboundMessage_CompileRequest) GetInput() isInboundMessage_CompileRequest_Input { + if m != nil { + return m.Input + } + return nil +} + +func (x *InboundMessage_CompileRequest) GetString_() *InboundMessage_CompileRequest_StringInput { + if x, ok := x.GetInput().(*InboundMessage_CompileRequest_String_); ok { + return x.String_ + } + return nil +} + +func (x *InboundMessage_CompileRequest) GetPath() string { + if x, ok := x.GetInput().(*InboundMessage_CompileRequest_Path); ok { + return x.Path + } + return "" +} + +func (x *InboundMessage_CompileRequest) GetStyle() OutputStyle { + if x != nil { + return x.Style + } + return OutputStyle_EXPANDED +} + +func (x *InboundMessage_CompileRequest) GetSourceMap() bool { + if x != nil { + return x.SourceMap + } + return false +} + +func (x *InboundMessage_CompileRequest) GetImporters() []*InboundMessage_CompileRequest_Importer { + if x != nil { + return x.Importers + } + return nil +} + +func (x *InboundMessage_CompileRequest) GetGlobalFunctions() []string { + if x != nil { + return x.GlobalFunctions + } + return nil +} + +func (x *InboundMessage_CompileRequest) GetAlertColor() bool { + if x != nil { + return x.AlertColor + } + return false +} + +func (x *InboundMessage_CompileRequest) GetAlertAscii() bool { + if x != nil { + return x.AlertAscii + } + return false +} + +func (x *InboundMessage_CompileRequest) GetVerbose() bool { + if x != nil { + return x.Verbose + } + return false +} + +func (x *InboundMessage_CompileRequest) GetQuietDeps() bool { + if x != nil { + return x.QuietDeps + } + return false +} + +func (x *InboundMessage_CompileRequest) GetSourceMapIncludeSources() bool { + if x != nil { + return x.SourceMapIncludeSources + } + return false +} + +func (x *InboundMessage_CompileRequest) GetCharset() bool { + if x != nil { + return x.Charset + } + return false +} + +type isInboundMessage_CompileRequest_Input interface { + isInboundMessage_CompileRequest_Input() +} + +type InboundMessage_CompileRequest_String_ struct { + // A stylesheet loaded from its contents. + String_ *InboundMessage_CompileRequest_StringInput `protobuf:"bytes,2,opt,name=string,proto3,oneof"` +} + +type InboundMessage_CompileRequest_Path struct { + // A stylesheet loaded from the given path on the filesystem. + Path string `protobuf:"bytes,3,opt,name=path,proto3,oneof"` +} + +func (*InboundMessage_CompileRequest_String_) isInboundMessage_CompileRequest_Input() {} + +func (*InboundMessage_CompileRequest_Path) isInboundMessage_CompileRequest_Input() {} + +// A response indicating the result of canonicalizing an imported URL. +type InboundMessage_CanonicalizeResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The result of canonicalization. Optional. If this is `null`, it indicates + // that the importer either did not recognize the URL, or could not find a + // stylesheet at the location it referred to. + // + // Types that are assignable to Result: + // + // *InboundMessage_CanonicalizeResponse_Url + // *InboundMessage_CanonicalizeResponse_Error + Result isInboundMessage_CanonicalizeResponse_Result `protobuf_oneof:"result"` +} + +func (x *InboundMessage_CanonicalizeResponse) Reset() { + *x = InboundMessage_CanonicalizeResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage_CanonicalizeResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage_CanonicalizeResponse) ProtoMessage() {} + +func (x *InboundMessage_CanonicalizeResponse) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage_CanonicalizeResponse.ProtoReflect.Descriptor instead. +func (*InboundMessage_CanonicalizeResponse) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{0, 2} +} + +func (x *InboundMessage_CanonicalizeResponse) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (m *InboundMessage_CanonicalizeResponse) GetResult() isInboundMessage_CanonicalizeResponse_Result { + if m != nil { + return m.Result + } + return nil +} + +func (x *InboundMessage_CanonicalizeResponse) GetUrl() string { + if x, ok := x.GetResult().(*InboundMessage_CanonicalizeResponse_Url); ok { + return x.Url + } + return "" +} + +func (x *InboundMessage_CanonicalizeResponse) GetError() string { + if x, ok := x.GetResult().(*InboundMessage_CanonicalizeResponse_Error); ok { + return x.Error + } + return "" +} + +type isInboundMessage_CanonicalizeResponse_Result interface { + isInboundMessage_CanonicalizeResponse_Result() +} + +type InboundMessage_CanonicalizeResponse_Url struct { + // The successfully canonicalized URL. This must be an absolute URL, + // including scheme. + Url string `protobuf:"bytes,2,opt,name=url,proto3,oneof"` +} + +type InboundMessage_CanonicalizeResponse_Error struct { + // An error message explaining why canonicalization failed. + // + // This indicates that a stylesheet was found, but a canonical URL for it + // could not be determined. If no stylesheet was found, `result` should be + // `null` instead. + Error string `protobuf:"bytes,3,opt,name=error,proto3,oneof"` +} + +func (*InboundMessage_CanonicalizeResponse_Url) isInboundMessage_CanonicalizeResponse_Result() {} + +func (*InboundMessage_CanonicalizeResponse_Error) isInboundMessage_CanonicalizeResponse_Result() {} + +// A response indicating the result of importing a canonical URL. +type InboundMessage_ImportResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The result of loading the URL. Optional. If this is `null`, it indicates + // that the importer either did not recognize the URL, or could not find a + // stylesheet at the location it referred to. + // + // Types that are assignable to Result: + // + // *InboundMessage_ImportResponse_Success + // *InboundMessage_ImportResponse_Error + Result isInboundMessage_ImportResponse_Result `protobuf_oneof:"result"` +} + +func (x *InboundMessage_ImportResponse) Reset() { + *x = InboundMessage_ImportResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage_ImportResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage_ImportResponse) ProtoMessage() {} + +func (x *InboundMessage_ImportResponse) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage_ImportResponse.ProtoReflect.Descriptor instead. +func (*InboundMessage_ImportResponse) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{0, 3} +} + +func (x *InboundMessage_ImportResponse) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (m *InboundMessage_ImportResponse) GetResult() isInboundMessage_ImportResponse_Result { + if m != nil { + return m.Result + } + return nil +} + +func (x *InboundMessage_ImportResponse) GetSuccess() *InboundMessage_ImportResponse_ImportSuccess { + if x, ok := x.GetResult().(*InboundMessage_ImportResponse_Success); ok { + return x.Success + } + return nil +} + +func (x *InboundMessage_ImportResponse) GetError() string { + if x, ok := x.GetResult().(*InboundMessage_ImportResponse_Error); ok { + return x.Error + } + return "" +} + +type isInboundMessage_ImportResponse_Result interface { + isInboundMessage_ImportResponse_Result() +} + +type InboundMessage_ImportResponse_Success struct { + // The contents of the loaded stylesheet. + Success *InboundMessage_ImportResponse_ImportSuccess `protobuf:"bytes,2,opt,name=success,proto3,oneof"` +} + +type InboundMessage_ImportResponse_Error struct { + // An error message explaining why the URL could not be loaded. + Error string `protobuf:"bytes,3,opt,name=error,proto3,oneof"` +} + +func (*InboundMessage_ImportResponse_Success) isInboundMessage_ImportResponse_Result() {} + +func (*InboundMessage_ImportResponse_Error) isInboundMessage_ImportResponse_Result() {} + +// A response indicating the result of redirecting a URL to the filesystem. +type InboundMessage_FileImportResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The result of loading the URL. Optional. A null result indicates that the + // importer did not recognize the URL and other importers or load paths + // should be tried. + // + // Types that are assignable to Result: + // + // *InboundMessage_FileImportResponse_FileUrl + // *InboundMessage_FileImportResponse_Error + Result isInboundMessage_FileImportResponse_Result `protobuf_oneof:"result"` +} + +func (x *InboundMessage_FileImportResponse) Reset() { + *x = InboundMessage_FileImportResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage_FileImportResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage_FileImportResponse) ProtoMessage() {} + +func (x *InboundMessage_FileImportResponse) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage_FileImportResponse.ProtoReflect.Descriptor instead. +func (*InboundMessage_FileImportResponse) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{0, 4} +} + +func (x *InboundMessage_FileImportResponse) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (m *InboundMessage_FileImportResponse) GetResult() isInboundMessage_FileImportResponse_Result { + if m != nil { + return m.Result + } + return nil +} + +func (x *InboundMessage_FileImportResponse) GetFileUrl() string { + if x, ok := x.GetResult().(*InboundMessage_FileImportResponse_FileUrl); ok { + return x.FileUrl + } + return "" +} + +func (x *InboundMessage_FileImportResponse) GetError() string { + if x, ok := x.GetResult().(*InboundMessage_FileImportResponse_Error); ok { + return x.Error + } + return "" +} + +type isInboundMessage_FileImportResponse_Result interface { + isInboundMessage_FileImportResponse_Result() +} + +type InboundMessage_FileImportResponse_FileUrl struct { + // The absolute `file:` URL to look for the file on the physical + // filesystem. + // + // The host must ensure that this URL follows the format for an absolute + // `file:` URL on the current operating system without a hostname, and the + // compiler must verify this to the best of its ability. See + // https://en.wikipedia.org/wiki/File_URI_scheme for details on the + // format. + // + // The compiler must handle turning this into a canonical URL by resolving + // it for partials, file extensions, and index files. The compiler must + // then loading the contents of the resulting canonical URL from the + // filesystem. + FileUrl string `protobuf:"bytes,2,opt,name=file_url,json=fileUrl,proto3,oneof"` +} + +type InboundMessage_FileImportResponse_Error struct { + // An error message explaining why the URL could not be loaded. + Error string `protobuf:"bytes,3,opt,name=error,proto3,oneof"` +} + +func (*InboundMessage_FileImportResponse_FileUrl) isInboundMessage_FileImportResponse_Result() {} + +func (*InboundMessage_FileImportResponse_Error) isInboundMessage_FileImportResponse_Result() {} + +// A response indicating the result of calling a custom Sass function defined +// in the host. +type InboundMessage_FunctionCallResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The result of calling the function. Mandatory. + // + // Types that are assignable to Result: + // + // *InboundMessage_FunctionCallResponse_Success + // *InboundMessage_FunctionCallResponse_Error + Result isInboundMessage_FunctionCallResponse_Result `protobuf_oneof:"result"` + // The IDs of all `Value.ArgumentList`s in `FunctionCallRequest.arguments` + // whose keywords were accessed. See `Value.ArgumentList` for details. + // Mandatory if `result.success` is set. This may not include the special + // value `0` and it may not include multiple instances of the same ID. + AccessedArgumentLists []uint32 `protobuf:"varint,4,rep,packed,name=accessed_argument_lists,json=accessedArgumentLists,proto3" json:"accessed_argument_lists,omitempty"` +} + +func (x *InboundMessage_FunctionCallResponse) Reset() { + *x = InboundMessage_FunctionCallResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage_FunctionCallResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage_FunctionCallResponse) ProtoMessage() {} + +func (x *InboundMessage_FunctionCallResponse) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage_FunctionCallResponse.ProtoReflect.Descriptor instead. +func (*InboundMessage_FunctionCallResponse) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{0, 5} +} + +func (x *InboundMessage_FunctionCallResponse) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (m *InboundMessage_FunctionCallResponse) GetResult() isInboundMessage_FunctionCallResponse_Result { + if m != nil { + return m.Result + } + return nil +} + +func (x *InboundMessage_FunctionCallResponse) GetSuccess() *Value { + if x, ok := x.GetResult().(*InboundMessage_FunctionCallResponse_Success); ok { + return x.Success + } + return nil +} + +func (x *InboundMessage_FunctionCallResponse) GetError() string { + if x, ok := x.GetResult().(*InboundMessage_FunctionCallResponse_Error); ok { + return x.Error + } + return "" +} + +func (x *InboundMessage_FunctionCallResponse) GetAccessedArgumentLists() []uint32 { + if x != nil { + return x.AccessedArgumentLists + } + return nil +} + +type isInboundMessage_FunctionCallResponse_Result interface { + isInboundMessage_FunctionCallResponse_Result() +} + +type InboundMessage_FunctionCallResponse_Success struct { + // The return value of a successful function call. + Success *Value `protobuf:"bytes,2,opt,name=success,proto3,oneof"` +} + +type InboundMessage_FunctionCallResponse_Error struct { + // An error message explaining why the function call failed. + Error string `protobuf:"bytes,3,opt,name=error,proto3,oneof"` +} + +func (*InboundMessage_FunctionCallResponse_Success) isInboundMessage_FunctionCallResponse_Result() {} + +func (*InboundMessage_FunctionCallResponse_Error) isInboundMessage_FunctionCallResponse_Result() {} + +// An input stylesheet provided as plain text, rather than loaded from the +// filesystem. +type InboundMessage_CompileRequest_StringInput struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The contents of the stylesheet. + Source string `protobuf:"bytes,1,opt,name=source,proto3" json:"source,omitempty"` + // The location from which `source` was loaded. If this is empty, it + // indicates that the URL is unknown. + // + // This must be a canonical URL recognized by `importer`, if it's passed. + Url string `protobuf:"bytes,2,opt,name=url,proto3" json:"url,omitempty"` + // The syntax to use to parse `source`. + Syntax Syntax `protobuf:"varint,3,opt,name=syntax,proto3,enum=sass.embedded_protocolv1.Syntax" json:"syntax,omitempty"` + // The importer to use to resolve imports relative to `url`. + Importer *InboundMessage_CompileRequest_Importer `protobuf:"bytes,4,opt,name=importer,proto3" json:"importer,omitempty"` +} + +func (x *InboundMessage_CompileRequest_StringInput) Reset() { + *x = InboundMessage_CompileRequest_StringInput{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage_CompileRequest_StringInput) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage_CompileRequest_StringInput) ProtoMessage() {} + +func (x *InboundMessage_CompileRequest_StringInput) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage_CompileRequest_StringInput.ProtoReflect.Descriptor instead. +func (*InboundMessage_CompileRequest_StringInput) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{0, 1, 0} +} + +func (x *InboundMessage_CompileRequest_StringInput) GetSource() string { + if x != nil { + return x.Source + } + return "" +} + +func (x *InboundMessage_CompileRequest_StringInput) GetUrl() string { + if x != nil { + return x.Url + } + return "" +} + +func (x *InboundMessage_CompileRequest_StringInput) GetSyntax() Syntax { + if x != nil { + return x.Syntax + } + return Syntax_SCSS +} + +func (x *InboundMessage_CompileRequest_StringInput) GetImporter() *InboundMessage_CompileRequest_Importer { + if x != nil { + return x.Importer + } + return nil +} + +// A wrapper message that represents either a user-defined importer or a +// load path on disk. This must be a wrapper because `oneof` types can't be +// `repeated`. +type InboundMessage_CompileRequest_Importer struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The possible types of importer. Mandatory. + // + // Types that are assignable to Importer: + // + // *InboundMessage_CompileRequest_Importer_Path + // *InboundMessage_CompileRequest_Importer_ImporterId + // *InboundMessage_CompileRequest_Importer_FileImporterId + Importer isInboundMessage_CompileRequest_Importer_Importer `protobuf_oneof:"importer"` +} + +func (x *InboundMessage_CompileRequest_Importer) Reset() { + *x = InboundMessage_CompileRequest_Importer{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage_CompileRequest_Importer) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage_CompileRequest_Importer) ProtoMessage() {} + +func (x *InboundMessage_CompileRequest_Importer) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage_CompileRequest_Importer.ProtoReflect.Descriptor instead. +func (*InboundMessage_CompileRequest_Importer) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{0, 1, 1} +} + +func (m *InboundMessage_CompileRequest_Importer) GetImporter() isInboundMessage_CompileRequest_Importer_Importer { + if m != nil { + return m.Importer + } + return nil +} + +func (x *InboundMessage_CompileRequest_Importer) GetPath() string { + if x, ok := x.GetImporter().(*InboundMessage_CompileRequest_Importer_Path); ok { + return x.Path + } + return "" +} + +func (x *InboundMessage_CompileRequest_Importer) GetImporterId() uint32 { + if x, ok := x.GetImporter().(*InboundMessage_CompileRequest_Importer_ImporterId); ok { + return x.ImporterId + } + return 0 +} + +func (x *InboundMessage_CompileRequest_Importer) GetFileImporterId() uint32 { + if x, ok := x.GetImporter().(*InboundMessage_CompileRequest_Importer_FileImporterId); ok { + return x.FileImporterId + } + return 0 +} + +type isInboundMessage_CompileRequest_Importer_Importer interface { + isInboundMessage_CompileRequest_Importer_Importer() +} + +type InboundMessage_CompileRequest_Importer_Path struct { + // A built-in importer that loads Sass files within the given directory + // on disk. + Path string `protobuf:"bytes,1,opt,name=path,proto3,oneof"` +} + +type InboundMessage_CompileRequest_Importer_ImporterId struct { + // A unique ID for a user-defined importer. This ID will be included in + // outbound `CanonicalizeRequest` and `ImportRequest` messages to + // indicate which importer is being called. The host is responsible for + // generating this ID and ensuring that it's unique across all + // importers registered for this compilation. + ImporterId uint32 `protobuf:"varint,2,opt,name=importer_id,json=importerId,proto3,oneof"` +} + +type InboundMessage_CompileRequest_Importer_FileImporterId struct { + // A unique ID for a special kind of user-defined importer that tells + // the compiler where to look for files on the physical filesystem, but + // leaves the details of resolving partials and extensions and loading + // the file from disk up to the compiler itself. + // + // This ID will be included in outbound `FileImportRequest` messages to + // indicate which importer is being called. The host is responsible for + // generating this ID and ensuring that it's unique across all importers + // registered for this compilation. + FileImporterId uint32 `protobuf:"varint,3,opt,name=file_importer_id,json=fileImporterId,proto3,oneof"` +} + +func (*InboundMessage_CompileRequest_Importer_Path) isInboundMessage_CompileRequest_Importer_Importer() { +} + +func (*InboundMessage_CompileRequest_Importer_ImporterId) isInboundMessage_CompileRequest_Importer_Importer() { +} + +func (*InboundMessage_CompileRequest_Importer_FileImporterId) isInboundMessage_CompileRequest_Importer_Importer() { +} + +// The stylesheet's contents were loaded successfully. +type InboundMessage_ImportResponse_ImportSuccess struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The text of the stylesheet. Mandatory. + Contents string `protobuf:"bytes,1,opt,name=contents,proto3" json:"contents,omitempty"` + // The syntax of `contents`. Mandatory. + Syntax Syntax `protobuf:"varint,2,opt,name=syntax,proto3,enum=sass.embedded_protocolv1.Syntax" json:"syntax,omitempty"` + // An absolute, browser-accessible URL indicating the resolved location of + // the imported stylesheet. Optional. + // + // This should be a `file:` URL if one is available, but an `http:` URL is + // acceptable as well. If no URL is supplied, a `data:` URL is generated + // automatically from `contents`. + // + // If this is provided, it must be an absolute URL, including scheme. + SourceMapUrl string `protobuf:"bytes,3,opt,name=source_map_url,json=sourceMapUrl,proto3" json:"source_map_url,omitempty"` +} + +func (x *InboundMessage_ImportResponse_ImportSuccess) Reset() { + *x = InboundMessage_ImportResponse_ImportSuccess{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage_ImportResponse_ImportSuccess) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage_ImportResponse_ImportSuccess) ProtoMessage() {} + +func (x *InboundMessage_ImportResponse_ImportSuccess) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage_ImportResponse_ImportSuccess.ProtoReflect.Descriptor instead. +func (*InboundMessage_ImportResponse_ImportSuccess) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{0, 3, 0} +} + +func (x *InboundMessage_ImportResponse_ImportSuccess) GetContents() string { + if x != nil { + return x.Contents + } + return "" +} + +func (x *InboundMessage_ImportResponse_ImportSuccess) GetSyntax() Syntax { + if x != nil { + return x.Syntax + } + return Syntax_SCSS +} + +func (x *InboundMessage_ImportResponse_ImportSuccess) GetSourceMapUrl() string { + if x != nil { + return x.SourceMapUrl + } + return "" +} + +// A response that contains the version of the embedded compiler. +type OutboundMessage_VersionResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // This version request's id. Mandatory. + Id uint32 `protobuf:"varint,5,opt,name=id,proto3" json:"id,omitempty"` + // The version of the embedded protocol, in semver format. + ProtocolVersion string `protobuf:"bytes,1,opt,name=protocol_version,json=protocolVersion,proto3" json:"protocol_version,omitempty"` + // The version of the embedded compiler package. This has no guaranteed + // format, although compilers are encouraged to use semver. + CompilerVersion string `protobuf:"bytes,2,opt,name=compiler_version,json=compilerVersion,proto3" json:"compiler_version,omitempty"` + // The version of the Sass implementation that the embedded compiler wraps. + // This has no guaranteed format, although Sass implementations are + // encouraged to use semver. + ImplementationVersion string `protobuf:"bytes,3,opt,name=implementation_version,json=implementationVersion,proto3" json:"implementation_version,omitempty"` + // The name of the Sass implementation that the embedded compiler wraps. + ImplementationName string `protobuf:"bytes,4,opt,name=implementation_name,json=implementationName,proto3" json:"implementation_name,omitempty"` +} + +func (x *OutboundMessage_VersionResponse) Reset() { + *x = OutboundMessage_VersionResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage_VersionResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage_VersionResponse) ProtoMessage() {} + +func (x *OutboundMessage_VersionResponse) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage_VersionResponse.ProtoReflect.Descriptor instead. +func (*OutboundMessage_VersionResponse) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{1, 0} +} + +func (x *OutboundMessage_VersionResponse) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *OutboundMessage_VersionResponse) GetProtocolVersion() string { + if x != nil { + return x.ProtocolVersion + } + return "" +} + +func (x *OutboundMessage_VersionResponse) GetCompilerVersion() string { + if x != nil { + return x.CompilerVersion + } + return "" +} + +func (x *OutboundMessage_VersionResponse) GetImplementationVersion() string { + if x != nil { + return x.ImplementationVersion + } + return "" +} + +func (x *OutboundMessage_VersionResponse) GetImplementationName() string { + if x != nil { + return x.ImplementationName + } + return "" +} + +// A response that contains the result of a compilation. +type OutboundMessage_CompileResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The compilation's request id. Mandatory. + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The success or failure result of the compilation. Mandatory. + // + // Types that are assignable to Result: + // + // *OutboundMessage_CompileResponse_Success + // *OutboundMessage_CompileResponse_Failure + Result isOutboundMessage_CompileResponse_Result `protobuf_oneof:"result"` +} + +func (x *OutboundMessage_CompileResponse) Reset() { + *x = OutboundMessage_CompileResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage_CompileResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage_CompileResponse) ProtoMessage() {} + +func (x *OutboundMessage_CompileResponse) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[15] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage_CompileResponse.ProtoReflect.Descriptor instead. +func (*OutboundMessage_CompileResponse) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{1, 1} +} + +func (x *OutboundMessage_CompileResponse) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (m *OutboundMessage_CompileResponse) GetResult() isOutboundMessage_CompileResponse_Result { + if m != nil { + return m.Result + } + return nil +} + +func (x *OutboundMessage_CompileResponse) GetSuccess() *OutboundMessage_CompileResponse_CompileSuccess { + if x, ok := x.GetResult().(*OutboundMessage_CompileResponse_Success); ok { + return x.Success + } + return nil +} + +func (x *OutboundMessage_CompileResponse) GetFailure() *OutboundMessage_CompileResponse_CompileFailure { + if x, ok := x.GetResult().(*OutboundMessage_CompileResponse_Failure); ok { + return x.Failure + } + return nil +} + +type isOutboundMessage_CompileResponse_Result interface { + isOutboundMessage_CompileResponse_Result() +} + +type OutboundMessage_CompileResponse_Success struct { + // The result of a successful compilation. + Success *OutboundMessage_CompileResponse_CompileSuccess `protobuf:"bytes,2,opt,name=success,proto3,oneof"` +} + +type OutboundMessage_CompileResponse_Failure struct { + // The result of a failed compilation. + Failure *OutboundMessage_CompileResponse_CompileFailure `protobuf:"bytes,3,opt,name=failure,proto3,oneof"` +} + +func (*OutboundMessage_CompileResponse_Success) isOutboundMessage_CompileResponse_Result() {} + +func (*OutboundMessage_CompileResponse_Failure) isOutboundMessage_CompileResponse_Result() {} + +// An event indicating that a message should be displayed to the user. +type OutboundMessage_LogEvent struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The request id for the compilation that triggered the message. Mandatory. + CompilationId uint32 `protobuf:"varint,1,opt,name=compilation_id,json=compilationId,proto3" json:"compilation_id,omitempty"` + Type LogEventType `protobuf:"varint,2,opt,name=type,proto3,enum=sass.embedded_protocolv1.LogEventType" json:"type,omitempty"` + // The text of the message. + Message string `protobuf:"bytes,3,opt,name=message,proto3" json:"message,omitempty"` + // The span associated with this message. Optional. + Span *SourceSpan `protobuf:"bytes,4,opt,name=span,proto3" json:"span,omitempty"` + // The stack trace associated with this message. + // + // The empty string indicates that no stack trace is available. Otherwise, + // the format of this stack trace is not specified and is likely to be + // inconsistent between implementations. + StackTrace string `protobuf:"bytes,5,opt,name=stack_trace,json=stackTrace,proto3" json:"stack_trace,omitempty"` + // A formatted, human-readable string that contains the message, span (if + // available), and trace (if available). The format of this string is not + // specified and is likely to be inconsistent between implementations. + Formatted string `protobuf:"bytes,6,opt,name=formatted,proto3" json:"formatted,omitempty"` +} + +func (x *OutboundMessage_LogEvent) Reset() { + *x = OutboundMessage_LogEvent{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage_LogEvent) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage_LogEvent) ProtoMessage() {} + +func (x *OutboundMessage_LogEvent) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[16] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage_LogEvent.ProtoReflect.Descriptor instead. +func (*OutboundMessage_LogEvent) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{1, 2} +} + +func (x *OutboundMessage_LogEvent) GetCompilationId() uint32 { + if x != nil { + return x.CompilationId + } + return 0 +} + +func (x *OutboundMessage_LogEvent) GetType() LogEventType { + if x != nil { + return x.Type + } + return LogEventType_WARNING +} + +func (x *OutboundMessage_LogEvent) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +func (x *OutboundMessage_LogEvent) GetSpan() *SourceSpan { + if x != nil { + return x.Span + } + return nil +} + +func (x *OutboundMessage_LogEvent) GetStackTrace() string { + if x != nil { + return x.StackTrace + } + return "" +} + +func (x *OutboundMessage_LogEvent) GetFormatted() string { + if x != nil { + return x.Formatted + } + return "" +} + +// A request for a custom importer to convert an imported URL to its canonical +// format. +// +// If the URL is not recognized by this importer, or if no stylesheet is found +// at that URL, `CanonicalizeResponse.result` must be `null`. Otherwise, the +// importer must return an absolute URL, including a scheme. +// +// > The host's documentation should encourage the use of file importers (via +// > `CompileRequest.Importer.file_importer_id`, `FileImportRequest`, and +// > `FileImportResponse`) for any importers that simply refer to files on +// > disk. This will allow Sass to handle the logic of resolving partials, +// > file extensions, and index files. +// +// If Sass has already loaded a stylesheet with the returned canonical URL, it +// re-uses the existing parse tree. This means that importers must ensure that +// the same canonical URL always refers to the same stylesheet, *even across +// different importers*. Importers must also ensure that any canonicalized +// URLs they return can be passed back to `CanonicalizeRequest` and will be +// returned unchanged. +// +// If this importer's URL format supports file extensions, it should +// canonicalize them the same way as the default filesystem importer: +// +// - The importer should look for stylesheets by adding the prefix `_` to the +// URL's basename, and by adding the extensions `.sass` and `.scss` if the +// URL doesn't already have one of those extensions. For example, if the URL +// was `foo/bar/baz`, the importer would look for: +// +// - `foo/bar/baz.sass` +// +// - `foo/bar/baz.scss` +// +// - `foo/bar/_baz.sass` +// +// - `foo/bar/_baz.scss` +// +// If the URL was foo/bar/baz.scss, the importer would just look for: +// +// - `foo/bar/baz.scss` +// +// - `foo/bar/_baz.scss` +// +// If the importer finds a stylesheet at more than one of these URLs, it +// should respond with a `CanonicalizeResponse.result.error` indicating that +// the import is ambiguous. Note that if the extension is explicitly +// specified, a stylesheet with another extension may exist without error. +// +// - If none of the possible paths is valid, the importer should perform the +// same resolution on the URL followed by `/index`. In the example above, it +// would look for: +// +// - `foo/bar/baz/_index.sass` +// +// - `foo/bar/baz/index.sass` +// +// - `foo/bar/baz/_index.scss` +// +// - `foo/bar/baz/index.scss` +// +// As above, if the importer finds a stylesheet at more than one of these +// URLs, it should respond with a `CanonicalizeResponse.result.error` +// indicating that the import is ambiguous. +type OutboundMessage_CanonicalizeRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The request id for the compilation that triggered the message. Mandatory. + CompilationId uint32 `protobuf:"varint,2,opt,name=compilation_id,json=compilationId,proto3" json:"compilation_id,omitempty"` + // The unique ID of the importer being invoked. This must match an importer + // ID passed to this compilation in `CompileRequest.importers` or + // `CompileRequest.input.string.importer`. Mandatory. + ImporterId uint32 `protobuf:"varint,3,opt,name=importer_id,json=importerId,proto3" json:"importer_id,omitempty"` + // The URL of the import to be canonicalized. This may be either absolute or + // relative. + // + // When loading a URL, the compiler must first try resolving that URL + // relative to the canonical URL of the current file, and canonicalizing the + // result using the importer that loaded the current file. If this returns + // `null`, the compiler must then try canonicalizing the original URL with + // each importer in order until one returns something other than `null`. + // That is the result of the import. + Url string `protobuf:"bytes,4,opt,name=url,proto3" json:"url,omitempty"` + // / Whether this request comes from an `@import` rule. + // / + // / When evaluating `@import` rules, URLs should canonicalize to an + // / [import-only file] if one exists for the URL being canonicalized. + // / Otherwise, canonicalization should be identical for `@import` and `@use` + // / rules. + // / + // / [import-only file]: https://sass-lang.com/documentation/at-rules/import#import-only-files + FromImport bool `protobuf:"varint,5,opt,name=from_import,json=fromImport,proto3" json:"from_import,omitempty"` +} + +func (x *OutboundMessage_CanonicalizeRequest) Reset() { + *x = OutboundMessage_CanonicalizeRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage_CanonicalizeRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage_CanonicalizeRequest) ProtoMessage() {} + +func (x *OutboundMessage_CanonicalizeRequest) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[17] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage_CanonicalizeRequest.ProtoReflect.Descriptor instead. +func (*OutboundMessage_CanonicalizeRequest) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{1, 3} +} + +func (x *OutboundMessage_CanonicalizeRequest) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *OutboundMessage_CanonicalizeRequest) GetCompilationId() uint32 { + if x != nil { + return x.CompilationId + } + return 0 +} + +func (x *OutboundMessage_CanonicalizeRequest) GetImporterId() uint32 { + if x != nil { + return x.ImporterId + } + return 0 +} + +func (x *OutboundMessage_CanonicalizeRequest) GetUrl() string { + if x != nil { + return x.Url + } + return "" +} + +func (x *OutboundMessage_CanonicalizeRequest) GetFromImport() bool { + if x != nil { + return x.FromImport + } + return false +} + +// A request for a custom importer to load the contents of a stylesheet. +type OutboundMessage_ImportRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The request id for the compilation that triggered the message. Mandatory. + CompilationId uint32 `protobuf:"varint,2,opt,name=compilation_id,json=compilationId,proto3" json:"compilation_id,omitempty"` + // The unique ID of the importer being invoked. This must match an + // `Importer.importer_id` passed to this compilation in + // `CompileRequest.importers` or `CompileRequest.input.string.importer`. + // Mandatory. + ImporterId uint32 `protobuf:"varint,3,opt,name=importer_id,json=importerId,proto3" json:"importer_id,omitempty"` + // The canonical URL of the import. This is guaranteed to be a URL returned + // by a `CanonicalizeRequest` to this importer. + Url string `protobuf:"bytes,4,opt,name=url,proto3" json:"url,omitempty"` +} + +func (x *OutboundMessage_ImportRequest) Reset() { + *x = OutboundMessage_ImportRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage_ImportRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage_ImportRequest) ProtoMessage() {} + +func (x *OutboundMessage_ImportRequest) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[18] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage_ImportRequest.ProtoReflect.Descriptor instead. +func (*OutboundMessage_ImportRequest) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{1, 4} +} + +func (x *OutboundMessage_ImportRequest) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *OutboundMessage_ImportRequest) GetCompilationId() uint32 { + if x != nil { + return x.CompilationId + } + return 0 +} + +func (x *OutboundMessage_ImportRequest) GetImporterId() uint32 { + if x != nil { + return x.ImporterId + } + return 0 +} + +func (x *OutboundMessage_ImportRequest) GetUrl() string { + if x != nil { + return x.Url + } + return "" +} + +// A request for a custom filesystem importer to load the contents of a +// stylesheet. +// +// A filesystem importer is represented in the compiler as an [importer]. When +// the importer is invoked with a string `string`: +// +// * If `string` is an absolute URL whose scheme is `file`: +// +// - Let `url` be string. +// +// * Otherwise: +// +// - Let `fromImport` be `true` if the importer is being run for an +// `@import` and `false` otherwise. +// +// - Let `response` be the result of sending a `FileImportRequest` with +// `string` as its `url` and `fromImport` as `from_import`. +// +// - If `response.result` is null, return null. +// +// - Otherwise, if `response.result.error` is set, throw an error. +// +// - Otherwise, let `url` be `response.result.file_url`. +// +// * Let `resolved` be the result of [resolving `url`]. +// +// * If `resolved` is null, return null. +// +// * Let `text` be the contents of the file at `resolved`. +// +// * Let `syntax` be: +// +// - "scss" if `url` ends in `.scss`. +// +// - "indented" if `url` ends in `.sass`. +// +// - "css" if `url` ends in `.css`. +// +// > The algorithm for resolving a `file:` URL guarantees that `url` will have +// > one of these extensions. +// +// * Return `text`, `syntax`, and `resolved`. +// +// [importer]: https://github.com/sass/sass/tree/main/spec/modules.md#importer +// [resolving `url`]: https://github.com/sass/sass/tree/main/spec/modules.md#resolving-a-file-url +type OutboundMessage_FileImportRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The request id for the compilation that triggered the message. Mandatory. + CompilationId uint32 `protobuf:"varint,2,opt,name=compilation_id,json=compilationId,proto3" json:"compilation_id,omitempty"` + // The unique ID of the importer being invoked. This must match an + // `Importer.file_importer_id` passed to this compilation in + // `CompileRequest.importers` or `CompileRequest.input.string.importer`. + // Mandatory. + ImporterId uint32 `protobuf:"varint,3,opt,name=importer_id,json=importerId,proto3" json:"importer_id,omitempty"` + // The (non-canonicalized) URL of the import. + Url string `protobuf:"bytes,4,opt,name=url,proto3" json:"url,omitempty"` + // / Whether this request comes from an `@import` rule. + // / + // / When evaluating `@import` rules, filesystem importers should load an + // / [import-only file] if one exists for the URL being canonicalized. + // / Otherwise, canonicalization should be identical for `@import` and `@use` + // / rules. + // / + // / [import-only file]: https://sass-lang.com/documentation/at-rules/import#import-only-files + FromImport bool `protobuf:"varint,5,opt,name=from_import,json=fromImport,proto3" json:"from_import,omitempty"` +} + +func (x *OutboundMessage_FileImportRequest) Reset() { + *x = OutboundMessage_FileImportRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage_FileImportRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage_FileImportRequest) ProtoMessage() {} + +func (x *OutboundMessage_FileImportRequest) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[19] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage_FileImportRequest.ProtoReflect.Descriptor instead. +func (*OutboundMessage_FileImportRequest) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{1, 5} +} + +func (x *OutboundMessage_FileImportRequest) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *OutboundMessage_FileImportRequest) GetCompilationId() uint32 { + if x != nil { + return x.CompilationId + } + return 0 +} + +func (x *OutboundMessage_FileImportRequest) GetImporterId() uint32 { + if x != nil { + return x.ImporterId + } + return 0 +} + +func (x *OutboundMessage_FileImportRequest) GetUrl() string { + if x != nil { + return x.Url + } + return "" +} + +func (x *OutboundMessage_FileImportRequest) GetFromImport() bool { + if x != nil { + return x.FromImport + } + return false +} + +// A request to invoke a custom Sass function and return its result. +type OutboundMessage_FunctionCallRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The request id for the compilation that triggered the message. Mandatory. + CompilationId uint32 `protobuf:"varint,2,opt,name=compilation_id,json=compilationId,proto3" json:"compilation_id,omitempty"` + // An identifier that indicates which function to invoke. Mandatory. + // + // Types that are assignable to Identifier: + // + // *OutboundMessage_FunctionCallRequest_Name + // *OutboundMessage_FunctionCallRequest_FunctionId + Identifier isOutboundMessage_FunctionCallRequest_Identifier `protobuf_oneof:"identifier"` + // The arguments passed to the function, in the order they appear in the + // function signature passed to `CompileRequest.global_functions`. Mandatory. + // + // The compiler must ensure that a valid number of arguments are passed for + // the given signature, that default argument values are instantiated + // appropriately, and that variable argument lists (`$args...`) are passed + // as `Value.ArgumentList`s. + Arguments []*Value `protobuf:"bytes,5,rep,name=arguments,proto3" json:"arguments,omitempty"` +} + +func (x *OutboundMessage_FunctionCallRequest) Reset() { + *x = OutboundMessage_FunctionCallRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage_FunctionCallRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage_FunctionCallRequest) ProtoMessage() {} + +func (x *OutboundMessage_FunctionCallRequest) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[20] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage_FunctionCallRequest.ProtoReflect.Descriptor instead. +func (*OutboundMessage_FunctionCallRequest) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{1, 6} +} + +func (x *OutboundMessage_FunctionCallRequest) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *OutboundMessage_FunctionCallRequest) GetCompilationId() uint32 { + if x != nil { + return x.CompilationId + } + return 0 +} + +func (m *OutboundMessage_FunctionCallRequest) GetIdentifier() isOutboundMessage_FunctionCallRequest_Identifier { + if m != nil { + return m.Identifier + } + return nil +} + +func (x *OutboundMessage_FunctionCallRequest) GetName() string { + if x, ok := x.GetIdentifier().(*OutboundMessage_FunctionCallRequest_Name); ok { + return x.Name + } + return "" +} + +func (x *OutboundMessage_FunctionCallRequest) GetFunctionId() uint32 { + if x, ok := x.GetIdentifier().(*OutboundMessage_FunctionCallRequest_FunctionId); ok { + return x.FunctionId + } + return 0 +} + +func (x *OutboundMessage_FunctionCallRequest) GetArguments() []*Value { + if x != nil { + return x.Arguments + } + return nil +} + +type isOutboundMessage_FunctionCallRequest_Identifier interface { + isOutboundMessage_FunctionCallRequest_Identifier() +} + +type OutboundMessage_FunctionCallRequest_Name struct { + // The name of the function to invoke. + // + // This must match the name of a function signature the host passed to the + // corresponding `CompileRequest.global_functions` call, including hyphens + // and underscores. + Name string `protobuf:"bytes,3,opt,name=name,proto3,oneof"` +} + +type OutboundMessage_FunctionCallRequest_FunctionId struct { + // The opaque ID of the function to invoke. + // + // This must match the ID of a `Value.HostFunction` that the host passed + // to the compiler. + FunctionId uint32 `protobuf:"varint,4,opt,name=function_id,json=functionId,proto3,oneof"` +} + +func (*OutboundMessage_FunctionCallRequest_Name) isOutboundMessage_FunctionCallRequest_Identifier() {} + +func (*OutboundMessage_FunctionCallRequest_FunctionId) isOutboundMessage_FunctionCallRequest_Identifier() { +} + +// A message indicating that the Sass file was successfully compiled to CSS. +type OutboundMessage_CompileResponse_CompileSuccess struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The compiled CSS. + Css string `protobuf:"bytes,1,opt,name=css,proto3" json:"css,omitempty"` + // The JSON-encoded source map, or the empty string if + // `CompileRequest.source_map` was `false`. + // + // The compiler must not add a `"file"` key to this source map. It's the + // host's (or the host's user's) responsibility to determine how the + // generated CSS can be reached from the source map. + SourceMap string `protobuf:"bytes,2,opt,name=source_map,json=sourceMap,proto3" json:"source_map,omitempty"` + // The canonical URLs of all source files loaded during the compilation. + // + // The compiler must ensure that each canonical URL appears only once in + // this list. This must include the entrypoint file's URL if either + // `CompileRequest.input.path` or `CompileRequest.StringInput.url` was + // passed. + LoadedUrls []string `protobuf:"bytes,3,rep,name=loaded_urls,json=loadedUrls,proto3" json:"loaded_urls,omitempty"` +} + +func (x *OutboundMessage_CompileResponse_CompileSuccess) Reset() { + *x = OutboundMessage_CompileResponse_CompileSuccess{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage_CompileResponse_CompileSuccess) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage_CompileResponse_CompileSuccess) ProtoMessage() {} + +func (x *OutboundMessage_CompileResponse_CompileSuccess) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[21] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage_CompileResponse_CompileSuccess.ProtoReflect.Descriptor instead. +func (*OutboundMessage_CompileResponse_CompileSuccess) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{1, 1, 0} +} + +func (x *OutboundMessage_CompileResponse_CompileSuccess) GetCss() string { + if x != nil { + return x.Css + } + return "" +} + +func (x *OutboundMessage_CompileResponse_CompileSuccess) GetSourceMap() string { + if x != nil { + return x.SourceMap + } + return "" +} + +func (x *OutboundMessage_CompileResponse_CompileSuccess) GetLoadedUrls() []string { + if x != nil { + return x.LoadedUrls + } + return nil +} + +// A message indicating that the Sass file could not be successfully +// compiled to CSS. +type OutboundMessage_CompileResponse_CompileFailure struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // A message describing the reason for the failure. + Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` + // The span associated with the failure. Mandatory. + Span *SourceSpan `protobuf:"bytes,2,opt,name=span,proto3" json:"span,omitempty"` + // The stack trace associated with the failure. + // + // The empty string indicates that no stack trace is available. Otherwise, + // the format of this stack trace is not specified and is likely to be + // inconsistent between implementations. + StackTrace string `protobuf:"bytes,3,opt,name=stack_trace,json=stackTrace,proto3" json:"stack_trace,omitempty"` + // A formatted, human-readable string that contains the message, span + // (if available), and trace (if available). The format of this string is + // not specified and is likely to be inconsistent between implementations. + Formatted string `protobuf:"bytes,4,opt,name=formatted,proto3" json:"formatted,omitempty"` +} + +func (x *OutboundMessage_CompileResponse_CompileFailure) Reset() { + *x = OutboundMessage_CompileResponse_CompileFailure{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage_CompileResponse_CompileFailure) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage_CompileResponse_CompileFailure) ProtoMessage() {} + +func (x *OutboundMessage_CompileResponse_CompileFailure) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[22] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage_CompileResponse_CompileFailure.ProtoReflect.Descriptor instead. +func (*OutboundMessage_CompileResponse_CompileFailure) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{1, 1, 1} +} + +func (x *OutboundMessage_CompileResponse_CompileFailure) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +func (x *OutboundMessage_CompileResponse_CompileFailure) GetSpan() *SourceSpan { + if x != nil { + return x.Span + } + return nil +} + +func (x *OutboundMessage_CompileResponse_CompileFailure) GetStackTrace() string { + if x != nil { + return x.StackTrace + } + return "" +} + +func (x *OutboundMessage_CompileResponse_CompileFailure) GetFormatted() string { + if x != nil { + return x.Formatted + } + return "" +} + +// A single point in a source file. +type SourceSpan_SourceLocation struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The 0-based offset of this location within the source file. Mandatory. + Offset uint32 `protobuf:"varint,1,opt,name=offset,proto3" json:"offset,omitempty"` + // The 0-based line number of this location within the source file. + // Mandatory. + Line uint32 `protobuf:"varint,2,opt,name=line,proto3" json:"line,omitempty"` + // The 0-based column number of this location within its line. Mandatory. + Column uint32 `protobuf:"varint,3,opt,name=column,proto3" json:"column,omitempty"` +} + +func (x *SourceSpan_SourceLocation) Reset() { + *x = SourceSpan_SourceLocation{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SourceSpan_SourceLocation) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SourceSpan_SourceLocation) ProtoMessage() {} + +func (x *SourceSpan_SourceLocation) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[23] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SourceSpan_SourceLocation.ProtoReflect.Descriptor instead. +func (*SourceSpan_SourceLocation) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{3, 0} +} + +func (x *SourceSpan_SourceLocation) GetOffset() uint32 { + if x != nil { + return x.Offset + } + return 0 +} + +func (x *SourceSpan_SourceLocation) GetLine() uint32 { + if x != nil { + return x.Line + } + return 0 +} + +func (x *SourceSpan_SourceLocation) GetColumn() uint32 { + if x != nil { + return x.Column + } + return 0 +} + +// A SassScript string value. +type Value_String struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The contents of the string. Mandatory. + Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // Whether the string is quoted or unquoted. Mandatory. + Quoted bool `protobuf:"varint,2,opt,name=quoted,proto3" json:"quoted,omitempty"` +} + +func (x *Value_String) Reset() { + *x = Value_String{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_String) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_String) ProtoMessage() {} + +func (x *Value_String) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[24] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_String.ProtoReflect.Descriptor instead. +func (*Value_String) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{4, 0} +} + +func (x *Value_String) GetText() string { + if x != nil { + return x.Text + } + return "" +} + +func (x *Value_String) GetQuoted() bool { + if x != nil { + return x.Quoted + } + return false +} + +// A SassScript number value. +type Value_Number struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The number's numeric value. Mandatory. + Value float64 `protobuf:"fixed64,1,opt,name=value,proto3" json:"value,omitempty"` + // The number's numerator units. + // + // The endpoint sending the number must ensure that no numerator units are + // [compatible][] with any denominator units. Such compatible units must be + // simplified away according to the multiplicative factor between them + // defined in the CSS Values and Units spec. + // + // [compatible]: https://www.w3.org/TR/css-values-4/#compat + Numerators []string `protobuf:"bytes,2,rep,name=numerators,proto3" json:"numerators,omitempty"` + // The number's denominator units. + Denominators []string `protobuf:"bytes,3,rep,name=denominators,proto3" json:"denominators,omitempty"` +} + +func (x *Value_Number) Reset() { + *x = Value_Number{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[25] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_Number) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_Number) ProtoMessage() {} + +func (x *Value_Number) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[25] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_Number.ProtoReflect.Descriptor instead. +func (*Value_Number) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{4, 1} +} + +func (x *Value_Number) GetValue() float64 { + if x != nil { + return x.Value + } + return 0 +} + +func (x *Value_Number) GetNumerators() []string { + if x != nil { + return x.Numerators + } + return nil +} + +func (x *Value_Number) GetDenominators() []string { + if x != nil { + return x.Denominators + } + return nil +} + +// A SassScript color value, represented as red, green, and blue channels. +// +// All Sass color values can be equivalently represented as `RgbColor`, +// `HslColor`, and `HwbColor` messages without loss of color information that +// can affect CSS rendering. As such, either endpoint may choose to send any +// color value as any one of these three messages. +type Value_RgbColor struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The color's red channel. Mandatory. May not be above 255. + Red uint32 `protobuf:"varint,1,opt,name=red,proto3" json:"red,omitempty"` + // The color's green channel. Mandatory. May not be above 255. + Green uint32 `protobuf:"varint,2,opt,name=green,proto3" json:"green,omitempty"` + // The color's blue channel. Mandatory. May not be above 255. + Blue uint32 `protobuf:"varint,3,opt,name=blue,proto3" json:"blue,omitempty"` + // The color's alpha channel. Mandatory. Must be between 0 and 1, + // inclusive. + Alpha float64 `protobuf:"fixed64,4,opt,name=alpha,proto3" json:"alpha,omitempty"` +} + +func (x *Value_RgbColor) Reset() { + *x = Value_RgbColor{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[26] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_RgbColor) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_RgbColor) ProtoMessage() {} + +func (x *Value_RgbColor) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[26] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_RgbColor.ProtoReflect.Descriptor instead. +func (*Value_RgbColor) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{4, 2} +} + +func (x *Value_RgbColor) GetRed() uint32 { + if x != nil { + return x.Red + } + return 0 +} + +func (x *Value_RgbColor) GetGreen() uint32 { + if x != nil { + return x.Green + } + return 0 +} + +func (x *Value_RgbColor) GetBlue() uint32 { + if x != nil { + return x.Blue + } + return 0 +} + +func (x *Value_RgbColor) GetAlpha() float64 { + if x != nil { + return x.Alpha + } + return 0 +} + +// A SassScript color value, represented as hue, saturation, and lightness channels. +type Value_HslColor struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The color's hue. Mandatory. + Hue float64 `protobuf:"fixed64,1,opt,name=hue,proto3" json:"hue,omitempty"` + // The color's percent saturation. Mandatory. Must be between 0 and 100, + // inclusive. + Saturation float64 `protobuf:"fixed64,2,opt,name=saturation,proto3" json:"saturation,omitempty"` + // The color's percent lightness. Mandatory. Must be between 0 and 100, + // inclusive. + Lightness float64 `protobuf:"fixed64,3,opt,name=lightness,proto3" json:"lightness,omitempty"` + // The color's alpha channel. Mandatory. Must be between 0 and 1, + // inclusive. + Alpha float64 `protobuf:"fixed64,4,opt,name=alpha,proto3" json:"alpha,omitempty"` +} + +func (x *Value_HslColor) Reset() { + *x = Value_HslColor{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[27] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_HslColor) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_HslColor) ProtoMessage() {} + +func (x *Value_HslColor) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[27] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_HslColor.ProtoReflect.Descriptor instead. +func (*Value_HslColor) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{4, 3} +} + +func (x *Value_HslColor) GetHue() float64 { + if x != nil { + return x.Hue + } + return 0 +} + +func (x *Value_HslColor) GetSaturation() float64 { + if x != nil { + return x.Saturation + } + return 0 +} + +func (x *Value_HslColor) GetLightness() float64 { + if x != nil { + return x.Lightness + } + return 0 +} + +func (x *Value_HslColor) GetAlpha() float64 { + if x != nil { + return x.Alpha + } + return 0 +} + +// A SassScript color value, represented as hue, whiteness, and blackness +// channels. +type Value_HwbColor struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The color's hue. Mandatory. + Hue float64 `protobuf:"fixed64,1,opt,name=hue,proto3" json:"hue,omitempty"` + // The color's percent whiteness. Mandatory. Must be between 0 and 100, + // inclusive. The sum of `whiteness` and `blackness` must not exceed 100. + Whiteness float64 `protobuf:"fixed64,2,opt,name=whiteness,proto3" json:"whiteness,omitempty"` + // The color's percent blackness. Mandatory. Must be between 0 and 100, + // inclusive. The sum of `whiteness` and `blackness` must not exceed 100. + Blackness float64 `protobuf:"fixed64,3,opt,name=blackness,proto3" json:"blackness,omitempty"` + // The color's alpha channel. Mandatory. Must be between 0 and 1, + // inclusive. + Alpha float64 `protobuf:"fixed64,4,opt,name=alpha,proto3" json:"alpha,omitempty"` +} + +func (x *Value_HwbColor) Reset() { + *x = Value_HwbColor{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[28] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_HwbColor) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_HwbColor) ProtoMessage() {} + +func (x *Value_HwbColor) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[28] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_HwbColor.ProtoReflect.Descriptor instead. +func (*Value_HwbColor) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{4, 4} +} + +func (x *Value_HwbColor) GetHue() float64 { + if x != nil { + return x.Hue + } + return 0 +} + +func (x *Value_HwbColor) GetWhiteness() float64 { + if x != nil { + return x.Whiteness + } + return 0 +} + +func (x *Value_HwbColor) GetBlackness() float64 { + if x != nil { + return x.Blackness + } + return 0 +} + +func (x *Value_HwbColor) GetAlpha() float64 { + if x != nil { + return x.Alpha + } + return 0 +} + +// A SassScript list value. +type Value_List struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The type of separator for this list. Mandatory. + Separator ListSeparator `protobuf:"varint,1,opt,name=separator,proto3,enum=sass.embedded_protocolv1.ListSeparator" json:"separator,omitempty"` + // Whether this list has square brackets. Mandatory. + HasBrackets bool `protobuf:"varint,2,opt,name=has_brackets,json=hasBrackets,proto3" json:"has_brackets,omitempty"` + // The elements of this list. + Contents []*Value `protobuf:"bytes,3,rep,name=contents,proto3" json:"contents,omitempty"` +} + +func (x *Value_List) Reset() { + *x = Value_List{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[29] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_List) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_List) ProtoMessage() {} + +func (x *Value_List) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[29] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_List.ProtoReflect.Descriptor instead. +func (*Value_List) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{4, 5} +} + +func (x *Value_List) GetSeparator() ListSeparator { + if x != nil { + return x.Separator + } + return ListSeparator_COMMA +} + +func (x *Value_List) GetHasBrackets() bool { + if x != nil { + return x.HasBrackets + } + return false +} + +func (x *Value_List) GetContents() []*Value { + if x != nil { + return x.Contents + } + return nil +} + +// A SassScript map value. +type Value_Map struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The entries in this map. The sending endpoint must guarantee that no two + // entries have the same key. + Entries []*Value_Map_Entry `protobuf:"bytes,1,rep,name=entries,proto3" json:"entries,omitempty"` +} + +func (x *Value_Map) Reset() { + *x = Value_Map{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[30] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_Map) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_Map) ProtoMessage() {} + +func (x *Value_Map) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[30] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_Map.ProtoReflect.Descriptor instead. +func (*Value_Map) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{4, 6} +} + +func (x *Value_Map) GetEntries() []*Value_Map_Entry { + if x != nil { + return x.Entries + } + return nil +} + +// A first-class function defined in the compiler. New `CompilerFunction`s may +// only be created by the compiler, but the host may pass `CompilerFunction`s +// back to the compiler as long as their IDs match IDs of functions received +// by the host during that same compilation. +type Value_CompilerFunction struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // A unique ID for this function. The compiler is responsible for generating + // this ID and ensuring it's unique across all functions passed to the host + // for this compilation. Mandatory. + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` +} + +func (x *Value_CompilerFunction) Reset() { + *x = Value_CompilerFunction{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[31] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_CompilerFunction) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_CompilerFunction) ProtoMessage() {} + +func (x *Value_CompilerFunction) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[31] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_CompilerFunction.ProtoReflect.Descriptor instead. +func (*Value_CompilerFunction) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{4, 7} +} + +func (x *Value_CompilerFunction) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +// An anonymous custom function defined in the host. New `HostFunction`s may +// only be created by the host, and `HostFunction`s may *never* be passed from +// the compiler to the host. The compiler must instead pass a +// `CompilerFunction` that wraps the `HostFunction`. +type Value_HostFunction struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // A unique ID for this function. The compiler must pass this ID as + // `OutboundRequest.FunctionCallRequest.id` when invoking this function. The + // host is responsible for generating this ID and ensuring it's unique + // across all functions for *all* compilations. Mandatory. + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The signature for this function. Mandatory. + // + // If this isn't a valid Sass function signature that could appear after + // `@function` in a Sass stylesheet (such as `mix($color1, $color2, $weight: + // 50%)`), the compiler must treat the function's return value as invalid. + // + // > This ensures that the host doesn't need to be able to correctly parse + // > the entire function declaration syntax. + // + // The compiler may not invoke the function by its name, since it's not + // guaranteed to be globally unique. However, it may use the name to + // generate the string representation of this function. + Signature string `protobuf:"bytes,2,opt,name=signature,proto3" json:"signature,omitempty"` +} + +func (x *Value_HostFunction) Reset() { + *x = Value_HostFunction{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[32] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_HostFunction) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_HostFunction) ProtoMessage() {} + +func (x *Value_HostFunction) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[32] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_HostFunction.ProtoReflect.Descriptor instead. +func (*Value_HostFunction) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{4, 8} +} + +func (x *Value_HostFunction) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *Value_HostFunction) GetSignature() string { + if x != nil { + return x.Signature + } + return "" +} + +// A SassScript argument list value. This represents rest arguments passed to +// a function's `$arg...` parameter. Unlike a normal `List`, an argument list +// has an associated keywords map which tracks keyword arguments passed in +// alongside positional arguments. +// +// For each `ArgumentList` in `FunctionCallRequest.arguments` (including those +// nested within `List`s and `Map`s), the host must track whether its keyword +// arguments were accessed by the user. If they were, it must add its +// `ArgumentList.id` to `FunctionCallResponse.accessed_argument_lists`. +// +// The compiler must treat every `ArgumentList` whose `ArgumentList.id` +// appears in `FunctionCallResponse.accessed_argument_lists` as though it had +// been passed to `meta.keywords()`. +type Value_ArgumentList struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // An ID for this argument list that's unique within the scope of a given + // `FunctionCallRequest`. + // + // The special ID `0` is reserved for `ArgumentList`s created by the host, + // and may not be used by the compiler. These `ArgumentList`s do not need to + // have their IDs added to `FunctionCallResponse.accessed_argument_lists`, + // and the compiler should treat them as though their keywords have always + // been accessed. + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The type of separator for this list. The compiler must set this, but + // the host may omit it for `ArgumentList`s that were originally created by + // the compiler (that is, those with a non-0 ID). + Separator ListSeparator `protobuf:"varint,2,opt,name=separator,proto3,enum=sass.embedded_protocolv1.ListSeparator" json:"separator,omitempty"` + // The argument list's positional contents. The compiler must set this, but + // the host may omit it for `ArgumentList`s that were originally created by + // the compiler (that is, those with a non-0 ID). + Contents []*Value `protobuf:"bytes,3,rep,name=contents,proto3" json:"contents,omitempty"` + // The argument list's keywords. The compiler must set this, but the host + // may omit it for `ArgumentList`s that were originally created by the + // compiler (that is, those with a non-0 ID). + Keywords map[string]*Value `protobuf:"bytes,4,rep,name=keywords,proto3" json:"keywords,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *Value_ArgumentList) Reset() { + *x = Value_ArgumentList{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[33] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_ArgumentList) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_ArgumentList) ProtoMessage() {} + +func (x *Value_ArgumentList) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[33] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_ArgumentList.ProtoReflect.Descriptor instead. +func (*Value_ArgumentList) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{4, 9} +} + +func (x *Value_ArgumentList) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *Value_ArgumentList) GetSeparator() ListSeparator { + if x != nil { + return x.Separator + } + return ListSeparator_COMMA +} + +func (x *Value_ArgumentList) GetContents() []*Value { + if x != nil { + return x.Contents + } + return nil +} + +func (x *Value_ArgumentList) GetKeywords() map[string]*Value { + if x != nil { + return x.Keywords + } + return nil +} + +// A SassScript calculation value. The compiler must send fully [simplified] +// calculations, meaning that simplifying it again will produce the same +// calculation. The host is not required to simplify calculations. +// +// The compiler must simplify any calculations it receives from the host +// before returning them from a function. If this simplification produces an +// error, it should be treated as though the function call threw that error. +// It should *not* be treated as a protocol error. +// +// [simplified]: https://github.com/sass/sass/tree/main/spec/types/calculation.md#simplifying-a-calculation +type Value_Calculation struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The calculation's name. Mandatory. The host may only set this to names + // that the Sass specification uses to create calculations. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The calculation's arguments. Mandatory. The host must use exactly the + // number of arguments used by the Sass specification for calculations with + // the given `name`. + Arguments []*Value_Calculation_CalculationValue `protobuf:"bytes,2,rep,name=arguments,proto3" json:"arguments,omitempty"` +} + +func (x *Value_Calculation) Reset() { + *x = Value_Calculation{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[34] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_Calculation) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_Calculation) ProtoMessage() {} + +func (x *Value_Calculation) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[34] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_Calculation.ProtoReflect.Descriptor instead. +func (*Value_Calculation) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{4, 10} +} + +func (x *Value_Calculation) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Value_Calculation) GetArguments() []*Value_Calculation_CalculationValue { + if x != nil { + return x.Arguments + } + return nil +} + +// A single key/value pair in the map. +type Value_Map_Entry struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The key this entry is associated with. Mandatory. + Key *Value `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // The value associated with this key. Mandatory. + Value *Value `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (x *Value_Map_Entry) Reset() { + *x = Value_Map_Entry{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[35] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_Map_Entry) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_Map_Entry) ProtoMessage() {} + +func (x *Value_Map_Entry) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[35] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_Map_Entry.ProtoReflect.Descriptor instead. +func (*Value_Map_Entry) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{4, 6, 0} +} + +func (x *Value_Map_Entry) GetKey() *Value { + if x != nil { + return x.Key + } + return nil +} + +func (x *Value_Map_Entry) GetValue() *Value { + if x != nil { + return x.Value + } + return nil +} + +// A single component of a calculation expression. +type Value_Calculation_CalculationValue struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The value of the component. Mandatory. + // + // Types that are assignable to Value: + // + // *Value_Calculation_CalculationValue_Number + // *Value_Calculation_CalculationValue_String_ + // *Value_Calculation_CalculationValue_Interpolation + // *Value_Calculation_CalculationValue_Operation + // *Value_Calculation_CalculationValue_Calculation + Value isValue_Calculation_CalculationValue_Value `protobuf_oneof:"value"` +} + +func (x *Value_Calculation_CalculationValue) Reset() { + *x = Value_Calculation_CalculationValue{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[37] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_Calculation_CalculationValue) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_Calculation_CalculationValue) ProtoMessage() {} + +func (x *Value_Calculation_CalculationValue) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[37] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_Calculation_CalculationValue.ProtoReflect.Descriptor instead. +func (*Value_Calculation_CalculationValue) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{4, 10, 0} +} + +func (m *Value_Calculation_CalculationValue) GetValue() isValue_Calculation_CalculationValue_Value { + if m != nil { + return m.Value + } + return nil +} + +func (x *Value_Calculation_CalculationValue) GetNumber() *Value_Number { + if x, ok := x.GetValue().(*Value_Calculation_CalculationValue_Number); ok { + return x.Number + } + return nil +} + +func (x *Value_Calculation_CalculationValue) GetString_() string { + if x, ok := x.GetValue().(*Value_Calculation_CalculationValue_String_); ok { + return x.String_ + } + return "" +} + +func (x *Value_Calculation_CalculationValue) GetInterpolation() string { + if x, ok := x.GetValue().(*Value_Calculation_CalculationValue_Interpolation); ok { + return x.Interpolation + } + return "" +} + +func (x *Value_Calculation_CalculationValue) GetOperation() *Value_Calculation_CalculationOperation { + if x, ok := x.GetValue().(*Value_Calculation_CalculationValue_Operation); ok { + return x.Operation + } + return nil +} + +func (x *Value_Calculation_CalculationValue) GetCalculation() *Value_Calculation { + if x, ok := x.GetValue().(*Value_Calculation_CalculationValue_Calculation); ok { + return x.Calculation + } + return nil +} + +type isValue_Calculation_CalculationValue_Value interface { + isValue_Calculation_CalculationValue_Value() +} + +type Value_Calculation_CalculationValue_Number struct { + Number *Value_Number `protobuf:"bytes,1,opt,name=number,proto3,oneof"` +} + +type Value_Calculation_CalculationValue_String_ struct { + // An unquoted string, as from a function like `var()` or `env()`. + String_ string `protobuf:"bytes,2,opt,name=string,proto3,oneof"` +} + +type Value_Calculation_CalculationValue_Interpolation struct { + // An unquoted string as created by interpolation for + // backwards-compatibility with older Sass syntax. + Interpolation string `protobuf:"bytes,3,opt,name=interpolation,proto3,oneof"` +} + +type Value_Calculation_CalculationValue_Operation struct { + Operation *Value_Calculation_CalculationOperation `protobuf:"bytes,4,opt,name=operation,proto3,oneof"` +} + +type Value_Calculation_CalculationValue_Calculation struct { + Calculation *Value_Calculation `protobuf:"bytes,5,opt,name=calculation,proto3,oneof"` +} + +func (*Value_Calculation_CalculationValue_Number) isValue_Calculation_CalculationValue_Value() {} + +func (*Value_Calculation_CalculationValue_String_) isValue_Calculation_CalculationValue_Value() {} + +func (*Value_Calculation_CalculationValue_Interpolation) isValue_Calculation_CalculationValue_Value() { +} + +func (*Value_Calculation_CalculationValue_Operation) isValue_Calculation_CalculationValue_Value() {} + +func (*Value_Calculation_CalculationValue_Calculation) isValue_Calculation_CalculationValue_Value() {} + +// A binary operation that appears in a calculation. +type Value_Calculation_CalculationOperation struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The operator to perform. + Operator CalculationOperator `protobuf:"varint,1,opt,name=operator,proto3,enum=sass.embedded_protocolv1.CalculationOperator" json:"operator,omitempty"` + // The left-hand side of the operation. + Left *Value_Calculation_CalculationValue `protobuf:"bytes,2,opt,name=left,proto3" json:"left,omitempty"` + // The right-hand side of the operation. + Right *Value_Calculation_CalculationValue `protobuf:"bytes,3,opt,name=right,proto3" json:"right,omitempty"` +} + +func (x *Value_Calculation_CalculationOperation) Reset() { + *x = Value_Calculation_CalculationOperation{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_v1_proto_msgTypes[38] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_Calculation_CalculationOperation) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_Calculation_CalculationOperation) ProtoMessage() {} + +func (x *Value_Calculation_CalculationOperation) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_v1_proto_msgTypes[38] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_Calculation_CalculationOperation.ProtoReflect.Descriptor instead. +func (*Value_Calculation_CalculationOperation) Descriptor() ([]byte, []int) { + return file_embedded_sass_v1_proto_rawDescGZIP(), []int{4, 10, 1} +} + +func (x *Value_Calculation_CalculationOperation) GetOperator() CalculationOperator { + if x != nil { + return x.Operator + } + return CalculationOperator_PLUS +} + +func (x *Value_Calculation_CalculationOperation) GetLeft() *Value_Calculation_CalculationValue { + if x != nil { + return x.Left + } + return nil +} + +func (x *Value_Calculation_CalculationOperation) GetRight() *Value_Calculation_CalculationValue { + if x != nil { + return x.Right + } + return nil +} + +var File_embedded_sass_v1_proto protoreflect.FileDescriptor + +var file_embedded_sass_v1_proto_rawDesc = []byte{ + 0x0a, 0x16, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x73, 0x61, 0x73, 0x73, 0x5f, + 0x76, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x18, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, + 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, + 0x76, 0x31, 0x22, 0xa9, 0x12, 0x0a, 0x0e, 0x49, 0x6e, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x62, 0x0a, 0x0f, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, + 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, + 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x62, 0x6f, 0x75, 0x6e, + 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x0e, 0x63, 0x6f, 0x6d, 0x70, 0x69, + 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x74, 0x0a, 0x15, 0x63, 0x61, 0x6e, + 0x6f, 0x6e, 0x69, 0x63, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x5f, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3d, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, + 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x2e, 0x43, 0x61, 0x6e, 0x6f, 0x6e, 0x69, 0x63, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x00, 0x52, 0x14, 0x63, 0x61, 0x6e, 0x6f, 0x6e, + 0x69, 0x63, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x62, 0x0a, 0x0f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x5f, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, + 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x48, 0x00, 0x52, 0x0e, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x6f, 0x0a, 0x14, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x69, 0x6d, 0x70, 0x6f, + 0x72, 0x74, 0x5f, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x3b, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, + 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x62, + 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x46, 0x69, 0x6c, 0x65, + 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x00, + 0x52, 0x12, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x75, 0x0a, 0x16, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x63, 0x61, 0x6c, 0x6c, 0x5f, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x18, 0x06, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3d, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, + 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, + 0x49, 0x6e, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x46, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x61, 0x6c, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x48, 0x00, 0x52, 0x14, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, + 0x61, 0x6c, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x62, 0x0a, 0x0f, 0x76, + 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x18, 0x07, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, + 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, + 0x49, 0x6e, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x56, + 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, + 0x0e, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x20, 0x0a, 0x0e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, + 0x64, 0x1a, 0xa6, 0x07, 0x0a, 0x0e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, + 0x52, 0x02, 0x69, 0x64, 0x12, 0x5d, 0x0a, 0x06, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x43, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, + 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, + 0x49, 0x6e, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x43, + 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x53, 0x74, + 0x72, 0x69, 0x6e, 0x67, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x12, 0x14, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x09, 0x48, 0x00, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x3b, 0x0a, 0x05, 0x73, 0x74, 0x79, + 0x6c, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x25, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, + 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x76, 0x31, 0x2e, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x53, 0x74, 0x79, 0x6c, 0x65, 0x52, + 0x05, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x5f, 0x6d, 0x61, 0x70, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x4d, 0x61, 0x70, 0x12, 0x5e, 0x0a, 0x09, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, + 0x72, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x40, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, + 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x52, 0x09, 0x69, 0x6d, 0x70, 0x6f, + 0x72, 0x74, 0x65, 0x72, 0x73, 0x12, 0x29, 0x0a, 0x10, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x5f, + 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x09, 0x52, + 0x0f, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x12, 0x1f, 0x0a, 0x0b, 0x61, 0x6c, 0x65, 0x72, 0x74, 0x5f, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x18, + 0x08, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x61, 0x6c, 0x65, 0x72, 0x74, 0x43, 0x6f, 0x6c, 0x6f, + 0x72, 0x12, 0x1f, 0x0a, 0x0b, 0x61, 0x6c, 0x65, 0x72, 0x74, 0x5f, 0x61, 0x73, 0x63, 0x69, 0x69, + 0x18, 0x09, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x61, 0x6c, 0x65, 0x72, 0x74, 0x41, 0x73, 0x63, + 0x69, 0x69, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x62, 0x6f, 0x73, 0x65, 0x18, 0x0a, 0x20, + 0x01, 0x28, 0x08, 0x52, 0x07, 0x76, 0x65, 0x72, 0x62, 0x6f, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, + 0x71, 0x75, 0x69, 0x65, 0x74, 0x5f, 0x64, 0x65, 0x70, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x09, 0x71, 0x75, 0x69, 0x65, 0x74, 0x44, 0x65, 0x70, 0x73, 0x12, 0x3b, 0x0a, 0x1a, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x61, 0x70, 0x5f, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, + 0x65, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x08, 0x52, + 0x17, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x61, 0x70, 0x49, 0x6e, 0x63, 0x6c, 0x75, 0x64, + 0x65, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x68, 0x61, 0x72, + 0x73, 0x65, 0x74, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x63, 0x68, 0x61, 0x72, 0x73, + 0x65, 0x74, 0x1a, 0xcf, 0x01, 0x0a, 0x0b, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x49, 0x6e, 0x70, + 0x75, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, + 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x38, 0x0a, 0x06, + 0x73, 0x79, 0x6e, 0x74, 0x61, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x20, 0x2e, 0x73, + 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x53, 0x79, 0x6e, 0x74, 0x61, 0x78, 0x52, 0x06, + 0x73, 0x79, 0x6e, 0x74, 0x61, 0x78, 0x12, 0x5c, 0x0a, 0x08, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, + 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x40, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, + 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x52, 0x08, 0x69, 0x6d, 0x70, 0x6f, + 0x72, 0x74, 0x65, 0x72, 0x1a, 0x7b, 0x0a, 0x08, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, + 0x12, 0x14, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, + 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x21, 0x0a, 0x0b, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, + 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x48, 0x00, 0x52, 0x0a, 0x69, + 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x49, 0x64, 0x12, 0x2a, 0x0a, 0x10, 0x66, 0x69, 0x6c, + 0x65, 0x5f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x0d, 0x48, 0x00, 0x52, 0x0e, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, + 0x74, 0x65, 0x72, 0x49, 0x64, 0x42, 0x0a, 0x0a, 0x08, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, + 0x72, 0x42, 0x07, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x1a, 0x5c, 0x0a, 0x14, 0x43, 0x61, + 0x6e, 0x6f, 0x6e, 0x69, 0x63, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, + 0x69, 0x64, 0x12, 0x12, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, + 0x00, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x08, + 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x1a, 0xb3, 0x02, 0x0a, 0x0e, 0x49, 0x6d, 0x70, + 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x61, 0x0a, 0x07, 0x73, + 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x45, 0x2e, 0x73, + 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x53, 0x75, 0x63, 0x63, + 0x65, 0x73, 0x73, 0x48, 0x00, 0x52, 0x07, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x12, 0x16, + 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, + 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x1a, 0x8b, 0x01, 0x0a, 0x0d, 0x49, 0x6d, 0x70, 0x6f, 0x72, + 0x74, 0x53, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x73, 0x12, 0x38, 0x0a, 0x06, 0x73, 0x79, 0x6e, 0x74, 0x61, 0x78, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0e, 0x32, 0x20, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, + 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, + 0x53, 0x79, 0x6e, 0x74, 0x61, 0x78, 0x52, 0x06, 0x73, 0x79, 0x6e, 0x74, 0x61, 0x78, 0x12, 0x24, + 0x0a, 0x0e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x61, 0x70, 0x5f, 0x75, 0x72, 0x6c, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x61, + 0x70, 0x55, 0x72, 0x6c, 0x42, 0x08, 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x1a, 0x63, + 0x0a, 0x12, 0x46, 0x69, 0x6c, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, + 0x52, 0x02, 0x69, 0x64, 0x12, 0x1b, 0x0a, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x75, 0x72, 0x6c, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x07, 0x66, 0x69, 0x6c, 0x65, 0x55, 0x72, + 0x6c, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, + 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x08, 0x0a, 0x06, 0x72, 0x65, 0x73, + 0x75, 0x6c, 0x74, 0x1a, 0xbd, 0x01, 0x0a, 0x14, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x43, 0x61, 0x6c, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x3b, 0x0a, 0x07, + 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, + 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x48, 0x00, + 0x52, 0x07, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x12, 0x36, 0x0a, 0x17, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x5f, 0x61, 0x72, + 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x73, 0x18, 0x04, 0x20, 0x03, + 0x28, 0x0d, 0x52, 0x15, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x41, 0x72, 0x67, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x73, 0x42, 0x08, 0x0a, 0x06, 0x72, 0x65, 0x73, + 0x75, 0x6c, 0x74, 0x42, 0x09, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0xb7, + 0x13, 0x0a, 0x0f, 0x4f, 0x75, 0x74, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x12, 0x3f, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x27, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, + 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x50, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, + 0x72, 0x6f, 0x72, 0x12, 0x66, 0x0a, 0x10, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x5f, 0x72, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x39, 0x2e, + 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x4f, 0x75, 0x74, 0x62, 0x6f, 0x75, 0x6e, + 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x00, 0x52, 0x0f, 0x63, 0x6f, 0x6d, 0x70, + 0x69, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x51, 0x0a, 0x09, 0x6c, + 0x6f, 0x67, 0x5f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, + 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x4f, 0x75, 0x74, 0x62, 0x6f, 0x75, + 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x4c, 0x6f, 0x67, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0x48, 0x00, 0x52, 0x08, 0x6c, 0x6f, 0x67, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x72, + 0x0a, 0x14, 0x63, 0x61, 0x6e, 0x6f, 0x6e, 0x69, 0x63, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x5f, 0x72, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3d, 0x2e, 0x73, + 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x4f, 0x75, 0x74, 0x62, 0x6f, 0x75, 0x6e, 0x64, + 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x43, 0x61, 0x6e, 0x6f, 0x6e, 0x69, 0x63, 0x61, + 0x6c, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x13, 0x63, + 0x61, 0x6e, 0x6f, 0x6e, 0x69, 0x63, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x60, 0x0a, 0x0e, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x5f, 0x72, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x73, 0x61, 0x73, + 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x4f, 0x75, 0x74, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x0d, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x6d, 0x0a, 0x13, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x69, 0x6d, 0x70, + 0x6f, 0x72, 0x74, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x3b, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, + 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x4f, 0x75, 0x74, + 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x46, 0x69, 0x6c, + 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, + 0x52, 0x11, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x73, 0x0a, 0x15, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x63, 0x61, 0x6c, 0x6c, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x18, 0x07, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x3d, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, + 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x4f, 0x75, + 0x74, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x46, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x61, 0x6c, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x48, 0x00, 0x52, 0x13, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x61, 0x6c, + 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x66, 0x0a, 0x10, 0x76, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x18, 0x08, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x39, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, + 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x4f, 0x75, + 0x74, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x56, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x00, 0x52, + 0x0f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x1a, 0xdf, 0x01, 0x0a, 0x0f, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0d, + 0x52, 0x02, 0x69, 0x64, 0x12, 0x29, 0x0a, 0x10, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, + 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, + 0x29, 0x0a, 0x10, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x5f, 0x76, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x63, 0x6f, 0x6d, 0x70, 0x69, + 0x6c, 0x65, 0x72, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x35, 0x0a, 0x16, 0x69, 0x6d, + 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x76, 0x65, 0x72, + 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x15, 0x69, 0x6d, 0x70, 0x6c, + 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, + 0x6e, 0x12, 0x2f, 0x0a, 0x13, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, + 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4e, 0x61, + 0x6d, 0x65, 0x1a, 0x81, 0x04, 0x0a, 0x0f, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x64, 0x0a, 0x07, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, + 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x48, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, + 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, + 0x76, 0x31, 0x2e, 0x4f, 0x75, 0x74, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x53, 0x75, 0x63, 0x63, 0x65, 0x73, + 0x73, 0x48, 0x00, 0x52, 0x07, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x12, 0x64, 0x0a, 0x07, + 0x66, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x48, 0x2e, + 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x4f, 0x75, 0x74, 0x62, 0x6f, 0x75, 0x6e, + 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, + 0x46, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x48, 0x00, 0x52, 0x07, 0x66, 0x61, 0x69, 0x6c, 0x75, + 0x72, 0x65, 0x1a, 0x62, 0x0a, 0x0e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x53, 0x75, 0x63, + 0x63, 0x65, 0x73, 0x73, 0x12, 0x10, 0x0a, 0x03, 0x63, 0x73, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x03, 0x63, 0x73, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x5f, 0x6d, 0x61, 0x70, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x4d, 0x61, 0x70, 0x12, 0x1f, 0x0a, 0x0b, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x5f, + 0x75, 0x72, 0x6c, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x6c, 0x6f, 0x61, 0x64, + 0x65, 0x64, 0x55, 0x72, 0x6c, 0x73, 0x1a, 0xa3, 0x01, 0x0a, 0x0e, 0x43, 0x6f, 0x6d, 0x70, 0x69, + 0x6c, 0x65, 0x46, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x12, 0x38, 0x0a, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x24, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, + 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x53, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x53, 0x70, 0x61, 0x6e, 0x52, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x12, 0x1f, 0x0a, + 0x0b, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x5f, 0x74, 0x72, 0x61, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x54, 0x72, 0x61, 0x63, 0x65, 0x12, 0x1c, + 0x0a, 0x09, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x74, 0x65, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x09, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x74, 0x65, 0x64, 0x42, 0x08, 0x0a, 0x06, + 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x1a, 0x80, 0x02, 0x0a, 0x08, 0x4c, 0x6f, 0x67, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x12, 0x25, 0x0a, 0x0e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0d, 0x63, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x3a, 0x0a, 0x04, 0x74, 0x79, + 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x26, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, + 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x76, 0x31, 0x2e, 0x4c, 0x6f, 0x67, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, + 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x12, 0x38, 0x0a, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, + 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x53, 0x70, 0x61, 0x6e, 0x52, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x12, 0x1f, 0x0a, 0x0b, 0x73, 0x74, + 0x61, 0x63, 0x6b, 0x5f, 0x74, 0x72, 0x61, 0x63, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0a, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x54, 0x72, 0x61, 0x63, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x66, + 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x74, 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, + 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x74, 0x65, 0x64, 0x1a, 0xa0, 0x01, 0x0a, 0x13, 0x43, 0x61, + 0x6e, 0x6f, 0x6e, 0x69, 0x63, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, + 0x64, 0x12, 0x25, 0x0a, 0x0e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0d, 0x63, 0x6f, 0x6d, 0x70, 0x69, + 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x1f, 0x0a, 0x0b, 0x69, 0x6d, 0x70, 0x6f, + 0x72, 0x74, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0a, 0x69, + 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x49, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x1f, 0x0a, 0x0b, 0x66, + 0x72, 0x6f, 0x6d, 0x5f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x0a, 0x66, 0x72, 0x6f, 0x6d, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x1a, 0x79, 0x0a, 0x0d, + 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, + 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x25, 0x0a, + 0x0e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0d, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x1f, 0x0a, 0x0b, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, + 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0a, 0x69, 0x6d, 0x70, 0x6f, 0x72, + 0x74, 0x65, 0x72, 0x49, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x1a, 0x9e, 0x01, 0x0a, 0x11, 0x46, 0x69, 0x6c, 0x65, + 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, + 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x25, 0x0a, + 0x0e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0d, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x1f, 0x0a, 0x0b, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, + 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0a, 0x69, 0x6d, 0x70, 0x6f, 0x72, + 0x74, 0x65, 0x72, 0x49, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x1f, 0x0a, 0x0b, 0x66, 0x72, 0x6f, 0x6d, 0x5f, + 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x66, 0x72, + 0x6f, 0x6d, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x1a, 0xd2, 0x01, 0x0a, 0x13, 0x46, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x61, 0x6c, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, + 0x12, 0x25, 0x0a, 0x0e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0d, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x12, 0x14, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x21, 0x0a, + 0x0b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x0d, 0x48, 0x00, 0x52, 0x0a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, + 0x12, 0x3d, 0x0a, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x05, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, + 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x56, + 0x61, 0x6c, 0x75, 0x65, 0x52, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x42, + 0x0c, 0x0a, 0x0a, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x42, 0x09, 0x0a, + 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x7a, 0x0a, 0x0d, 0x50, 0x72, 0x6f, 0x74, + 0x6f, 0x63, 0x6f, 0x6c, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x3f, 0x0a, 0x04, 0x74, 0x79, 0x70, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2b, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, + 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, + 0x76, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x45, 0x72, 0x72, 0x6f, 0x72, + 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x22, 0xb4, 0x02, 0x0a, 0x0a, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, + 0x70, 0x61, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x12, 0x49, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x33, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, + 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, + 0x31, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x70, 0x61, 0x6e, 0x2e, 0x53, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x73, 0x74, 0x61, + 0x72, 0x74, 0x12, 0x45, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x33, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x53, 0x70, 0x61, 0x6e, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4c, 0x6f, 0x63, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x03, 0x65, 0x6e, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x18, 0x0a, 0x07, 0x63, + 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x78, 0x74, 0x1a, 0x54, 0x0a, 0x0e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4c, + 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, + 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x12, + 0x12, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x04, 0x6c, + 0x69, 0x6e, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x0d, 0x52, 0x06, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x22, 0xf0, 0x16, 0x0a, 0x05, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x40, 0x0a, 0x06, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, + 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, + 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x48, 0x00, 0x52, + 0x06, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x40, 0x0a, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, + 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, + 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, + 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x4e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x48, + 0x00, 0x52, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x47, 0x0a, 0x09, 0x72, 0x67, 0x62, + 0x5f, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x73, + 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x52, 0x67, + 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x08, 0x72, 0x67, 0x62, 0x43, 0x6f, 0x6c, + 0x6f, 0x72, 0x12, 0x47, 0x0a, 0x09, 0x68, 0x73, 0x6c, 0x5f, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, + 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, + 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x48, 0x73, 0x6c, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x48, + 0x00, 0x52, 0x08, 0x68, 0x73, 0x6c, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x12, 0x3a, 0x0a, 0x04, 0x6c, + 0x69, 0x73, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x73, 0x61, 0x73, 0x73, + 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, + 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x48, + 0x00, 0x52, 0x04, 0x6c, 0x69, 0x73, 0x74, 0x12, 0x37, 0x0a, 0x03, 0x6d, 0x61, 0x70, 0x18, 0x06, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, + 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x4d, 0x61, 0x70, 0x48, 0x00, 0x52, 0x03, 0x6d, 0x61, 0x70, + 0x12, 0x48, 0x0a, 0x09, 0x73, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x74, 0x6f, 0x6e, 0x18, 0x07, 0x20, + 0x01, 0x28, 0x0e, 0x32, 0x28, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, + 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x53, + 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x74, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x48, 0x00, 0x52, + 0x09, 0x73, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x74, 0x6f, 0x6e, 0x12, 0x5f, 0x0a, 0x11, 0x63, 0x6f, + 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x5f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, + 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, + 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, + 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x46, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x10, 0x63, 0x6f, 0x6d, 0x70, 0x69, + 0x6c, 0x65, 0x72, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x53, 0x0a, 0x0d, 0x68, + 0x6f, 0x73, 0x74, 0x5f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x09, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, + 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x48, 0x6f, 0x73, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x48, 0x00, 0x52, 0x0c, 0x68, 0x6f, 0x73, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x12, 0x53, 0x0a, 0x0d, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x6c, 0x69, 0x73, + 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, + 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, + 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, + 0x74, 0x4c, 0x69, 0x73, 0x74, 0x48, 0x00, 0x52, 0x0c, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, + 0x74, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x47, 0x0a, 0x09, 0x68, 0x77, 0x62, 0x5f, 0x63, 0x6f, 0x6c, + 0x6f, 0x72, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, + 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x48, 0x77, 0x62, 0x43, 0x6f, 0x6c, + 0x6f, 0x72, 0x48, 0x00, 0x52, 0x08, 0x68, 0x77, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x12, 0x4f, + 0x0a, 0x0b, 0x63, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x0c, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, + 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x56, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x48, 0x00, 0x52, 0x0b, 0x63, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x1a, + 0x34, 0x0a, 0x06, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x65, 0x78, + 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x12, 0x16, 0x0a, + 0x06, 0x71, 0x75, 0x6f, 0x74, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x71, + 0x75, 0x6f, 0x74, 0x65, 0x64, 0x1a, 0x62, 0x0a, 0x06, 0x4e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, + 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x6e, 0x75, 0x6d, 0x65, 0x72, 0x61, 0x74, + 0x6f, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x6e, 0x75, 0x6d, 0x65, 0x72, + 0x61, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x22, 0x0a, 0x0c, 0x64, 0x65, 0x6e, 0x6f, 0x6d, 0x69, 0x6e, + 0x61, 0x74, 0x6f, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0c, 0x64, 0x65, 0x6e, + 0x6f, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x1a, 0x5c, 0x0a, 0x08, 0x52, 0x67, 0x62, + 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0d, 0x52, 0x03, 0x72, 0x65, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x67, 0x72, 0x65, 0x65, 0x6e, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x67, 0x72, 0x65, 0x65, 0x6e, 0x12, 0x12, 0x0a, + 0x04, 0x62, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x04, 0x62, 0x6c, 0x75, + 0x65, 0x12, 0x14, 0x0a, 0x05, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x01, + 0x52, 0x05, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x1a, 0x70, 0x0a, 0x08, 0x48, 0x73, 0x6c, 0x43, 0x6f, + 0x6c, 0x6f, 0x72, 0x12, 0x10, 0x0a, 0x03, 0x68, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, + 0x52, 0x03, 0x68, 0x75, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x61, 0x74, 0x75, 0x72, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0a, 0x73, 0x61, 0x74, 0x75, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1c, 0x0a, 0x09, 0x6c, 0x69, 0x67, 0x68, 0x74, 0x6e, 0x65, + 0x73, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x52, 0x09, 0x6c, 0x69, 0x67, 0x68, 0x74, 0x6e, + 0x65, 0x73, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x01, 0x52, 0x05, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x1a, 0x6e, 0x0a, 0x08, 0x48, 0x77, 0x62, + 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x12, 0x10, 0x0a, 0x03, 0x68, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x01, 0x52, 0x03, 0x68, 0x75, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x77, 0x68, 0x69, 0x74, 0x65, + 0x6e, 0x65, 0x73, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x09, 0x77, 0x68, 0x69, 0x74, + 0x65, 0x6e, 0x65, 0x73, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x62, 0x6c, 0x61, 0x63, 0x6b, 0x6e, 0x65, + 0x73, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x52, 0x09, 0x62, 0x6c, 0x61, 0x63, 0x6b, 0x6e, + 0x65, 0x73, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x01, 0x52, 0x05, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x1a, 0xad, 0x01, 0x0a, 0x04, 0x4c, 0x69, + 0x73, 0x74, 0x12, 0x45, 0x0a, 0x09, 0x73, 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x27, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, + 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, + 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x09, + 0x73, 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x21, 0x0a, 0x0c, 0x68, 0x61, 0x73, + 0x5f, 0x62, 0x72, 0x61, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, + 0x0b, 0x68, 0x61, 0x73, 0x42, 0x72, 0x61, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x12, 0x3b, 0x0a, 0x08, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, + 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, + 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0xbd, 0x01, 0x0a, 0x03, 0x4d, 0x61, + 0x70, 0x12, 0x43, 0x0a, 0x07, 0x65, 0x6e, 0x74, 0x72, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, + 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x4d, 0x61, 0x70, 0x2e, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x65, + 0x6e, 0x74, 0x72, 0x69, 0x65, 0x73, 0x1a, 0x71, 0x0a, 0x05, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, + 0x31, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x73, + 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x03, 0x6b, + 0x65, 0x79, 0x12, 0x35, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, + 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x1a, 0x22, 0x0a, 0x10, 0x43, 0x6f, 0x6d, + 0x70, 0x69, 0x6c, 0x65, 0x72, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, + 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x1a, 0x3c, 0x0a, + 0x0c, 0x48, 0x6f, 0x73, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, + 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1c, 0x0a, + 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x1a, 0xd8, 0x02, 0x0a, 0x0c, + 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x45, 0x0a, 0x09, + 0x73, 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, + 0x27, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x53, + 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x09, 0x73, 0x65, 0x70, 0x61, 0x72, 0x61, + 0x74, 0x6f, 0x72, 0x12, 0x3b, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, + 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, + 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, + 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, + 0x12, 0x56, 0x0a, 0x08, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x73, 0x18, 0x04, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, + 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, + 0x2e, 0x4b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, + 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x73, 0x1a, 0x5c, 0x0a, 0x0d, 0x4b, 0x65, 0x79, 0x77, + 0x6f, 0x72, 0x64, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x35, 0x0a, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x73, 0x61, 0x73, + 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0xdc, 0x05, 0x0a, 0x0b, 0x43, 0x61, 0x6c, 0x63, 0x75, + 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x5a, 0x0a, 0x09, 0x61, 0x72, + 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3c, 0x2e, + 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x43, + 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x43, 0x61, 0x6c, 0x63, 0x75, + 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x09, 0x61, 0x72, 0x67, + 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0xd2, 0x02, 0x0a, 0x10, 0x43, 0x61, 0x6c, 0x63, 0x75, + 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x40, 0x0a, 0x06, 0x6e, + 0x75, 0x6d, 0x62, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x73, 0x61, + 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x4e, 0x75, 0x6d, + 0x62, 0x65, 0x72, 0x48, 0x00, 0x52, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x18, 0x0a, + 0x06, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, + 0x06, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x26, 0x0a, 0x0d, 0x69, 0x6e, 0x74, 0x65, 0x72, + 0x70, 0x6f, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, + 0x52, 0x0d, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x6f, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, + 0x60, 0x0a, 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x40, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, + 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, + 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4f, 0x70, 0x65, 0x72, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x12, 0x4f, 0x0a, 0x0b, 0x63, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, + 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, + 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x0b, 0x63, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x42, 0x07, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x1a, 0x87, 0x02, 0x0a, 0x14, + 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4f, 0x70, 0x65, 0x72, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x49, 0x0a, 0x08, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2d, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, + 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, + 0x31, 0x2e, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4f, 0x70, 0x65, + 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x08, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, + 0x50, 0x0a, 0x04, 0x6c, 0x65, 0x66, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3c, 0x2e, + 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x43, + 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x43, 0x61, 0x6c, 0x63, 0x75, + 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x04, 0x6c, 0x65, 0x66, + 0x74, 0x12, 0x52, 0x0a, 0x05, 0x72, 0x69, 0x67, 0x68, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x3c, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, + 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x43, 0x61, + 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, + 0x72, 0x69, 0x67, 0x68, 0x74, 0x42, 0x07, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x2a, 0x2b, + 0x0a, 0x0b, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x53, 0x74, 0x79, 0x6c, 0x65, 0x12, 0x0c, 0x0a, + 0x08, 0x45, 0x58, 0x50, 0x41, 0x4e, 0x44, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0e, 0x0a, 0x0a, 0x43, + 0x4f, 0x4d, 0x50, 0x52, 0x45, 0x53, 0x53, 0x45, 0x44, 0x10, 0x01, 0x2a, 0x29, 0x0a, 0x06, 0x53, + 0x79, 0x6e, 0x74, 0x61, 0x78, 0x12, 0x08, 0x0a, 0x04, 0x53, 0x43, 0x53, 0x53, 0x10, 0x00, 0x12, + 0x0c, 0x0a, 0x08, 0x49, 0x4e, 0x44, 0x45, 0x4e, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, 0x07, 0x0a, + 0x03, 0x43, 0x53, 0x53, 0x10, 0x02, 0x2a, 0x3f, 0x0a, 0x0c, 0x4c, 0x6f, 0x67, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x57, 0x41, 0x52, 0x4e, 0x49, 0x4e, + 0x47, 0x10, 0x00, 0x12, 0x17, 0x0a, 0x13, 0x44, 0x45, 0x50, 0x52, 0x45, 0x43, 0x41, 0x54, 0x49, + 0x4f, 0x4e, 0x5f, 0x57, 0x41, 0x52, 0x4e, 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, + 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x02, 0x2a, 0x38, 0x0a, 0x11, 0x50, 0x72, 0x6f, 0x74, 0x6f, + 0x63, 0x6f, 0x6c, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x54, 0x79, 0x70, 0x65, 0x12, 0x09, 0x0a, 0x05, + 0x50, 0x41, 0x52, 0x53, 0x45, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x50, 0x41, 0x52, 0x41, 0x4d, + 0x53, 0x10, 0x01, 0x12, 0x0c, 0x0a, 0x08, 0x49, 0x4e, 0x54, 0x45, 0x52, 0x4e, 0x41, 0x4c, 0x10, + 0x02, 0x2a, 0x3f, 0x0a, 0x0d, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, + 0x6f, 0x72, 0x12, 0x09, 0x0a, 0x05, 0x43, 0x4f, 0x4d, 0x4d, 0x41, 0x10, 0x00, 0x12, 0x09, 0x0a, + 0x05, 0x53, 0x50, 0x41, 0x43, 0x45, 0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, 0x53, 0x4c, 0x41, 0x53, + 0x48, 0x10, 0x02, 0x12, 0x0d, 0x0a, 0x09, 0x55, 0x4e, 0x44, 0x45, 0x43, 0x49, 0x44, 0x45, 0x44, + 0x10, 0x03, 0x2a, 0x2f, 0x0a, 0x0e, 0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x74, 0x6f, 0x6e, 0x56, + 0x61, 0x6c, 0x75, 0x65, 0x12, 0x08, 0x0a, 0x04, 0x54, 0x52, 0x55, 0x45, 0x10, 0x00, 0x12, 0x09, + 0x0a, 0x05, 0x46, 0x41, 0x4c, 0x53, 0x45, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x4e, 0x55, 0x4c, + 0x4c, 0x10, 0x02, 0x2a, 0x41, 0x0a, 0x13, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x08, 0x0a, 0x04, 0x50, 0x4c, + 0x55, 0x53, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x4d, 0x49, 0x4e, 0x55, 0x53, 0x10, 0x01, 0x12, + 0x09, 0x0a, 0x05, 0x54, 0x49, 0x4d, 0x45, 0x53, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x44, 0x49, + 0x56, 0x49, 0x44, 0x45, 0x10, 0x03, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_embedded_sass_v1_proto_rawDescOnce sync.Once + file_embedded_sass_v1_proto_rawDescData = file_embedded_sass_v1_proto_rawDesc +) + +func file_embedded_sass_v1_proto_rawDescGZIP() []byte { + file_embedded_sass_v1_proto_rawDescOnce.Do(func() { + file_embedded_sass_v1_proto_rawDescData = protoimpl.X.CompressGZIP(file_embedded_sass_v1_proto_rawDescData) + }) + return file_embedded_sass_v1_proto_rawDescData +} + +var file_embedded_sass_v1_proto_enumTypes = make([]protoimpl.EnumInfo, 7) +var file_embedded_sass_v1_proto_msgTypes = make([]protoimpl.MessageInfo, 39) +var file_embedded_sass_v1_proto_goTypes = []interface{}{ + (OutputStyle)(0), // 0: sass.embedded_protocolv1.OutputStyle + (Syntax)(0), // 1: sass.embedded_protocolv1.Syntax + (LogEventType)(0), // 2: sass.embedded_protocolv1.LogEventType + (ProtocolErrorType)(0), // 3: sass.embedded_protocolv1.ProtocolErrorType + (ListSeparator)(0), // 4: sass.embedded_protocolv1.ListSeparator + (SingletonValue)(0), // 5: sass.embedded_protocolv1.SingletonValue + (CalculationOperator)(0), // 6: sass.embedded_protocolv1.CalculationOperator + (*InboundMessage)(nil), // 7: sass.embedded_protocolv1.InboundMessage + (*OutboundMessage)(nil), // 8: sass.embedded_protocolv1.OutboundMessage + (*ProtocolError)(nil), // 9: sass.embedded_protocolv1.ProtocolError + (*SourceSpan)(nil), // 10: sass.embedded_protocolv1.SourceSpan + (*Value)(nil), // 11: sass.embedded_protocolv1.Value + (*InboundMessage_VersionRequest)(nil), // 12: sass.embedded_protocolv1.InboundMessage.VersionRequest + (*InboundMessage_CompileRequest)(nil), // 13: sass.embedded_protocolv1.InboundMessage.CompileRequest + (*InboundMessage_CanonicalizeResponse)(nil), // 14: sass.embedded_protocolv1.InboundMessage.CanonicalizeResponse + (*InboundMessage_ImportResponse)(nil), // 15: sass.embedded_protocolv1.InboundMessage.ImportResponse + (*InboundMessage_FileImportResponse)(nil), // 16: sass.embedded_protocolv1.InboundMessage.FileImportResponse + (*InboundMessage_FunctionCallResponse)(nil), // 17: sass.embedded_protocolv1.InboundMessage.FunctionCallResponse + (*InboundMessage_CompileRequest_StringInput)(nil), // 18: sass.embedded_protocolv1.InboundMessage.CompileRequest.StringInput + (*InboundMessage_CompileRequest_Importer)(nil), // 19: sass.embedded_protocolv1.InboundMessage.CompileRequest.Importer + (*InboundMessage_ImportResponse_ImportSuccess)(nil), // 20: sass.embedded_protocolv1.InboundMessage.ImportResponse.ImportSuccess + (*OutboundMessage_VersionResponse)(nil), // 21: sass.embedded_protocolv1.OutboundMessage.VersionResponse + (*OutboundMessage_CompileResponse)(nil), // 22: sass.embedded_protocolv1.OutboundMessage.CompileResponse + (*OutboundMessage_LogEvent)(nil), // 23: sass.embedded_protocolv1.OutboundMessage.LogEvent + (*OutboundMessage_CanonicalizeRequest)(nil), // 24: sass.embedded_protocolv1.OutboundMessage.CanonicalizeRequest + (*OutboundMessage_ImportRequest)(nil), // 25: sass.embedded_protocolv1.OutboundMessage.ImportRequest + (*OutboundMessage_FileImportRequest)(nil), // 26: sass.embedded_protocolv1.OutboundMessage.FileImportRequest + (*OutboundMessage_FunctionCallRequest)(nil), // 27: sass.embedded_protocolv1.OutboundMessage.FunctionCallRequest + (*OutboundMessage_CompileResponse_CompileSuccess)(nil), // 28: sass.embedded_protocolv1.OutboundMessage.CompileResponse.CompileSuccess + (*OutboundMessage_CompileResponse_CompileFailure)(nil), // 29: sass.embedded_protocolv1.OutboundMessage.CompileResponse.CompileFailure + (*SourceSpan_SourceLocation)(nil), // 30: sass.embedded_protocolv1.SourceSpan.SourceLocation + (*Value_String)(nil), // 31: sass.embedded_protocolv1.Value.String + (*Value_Number)(nil), // 32: sass.embedded_protocolv1.Value.Number + (*Value_RgbColor)(nil), // 33: sass.embedded_protocolv1.Value.RgbColor + (*Value_HslColor)(nil), // 34: sass.embedded_protocolv1.Value.HslColor + (*Value_HwbColor)(nil), // 35: sass.embedded_protocolv1.Value.HwbColor + (*Value_List)(nil), // 36: sass.embedded_protocolv1.Value.List + (*Value_Map)(nil), // 37: sass.embedded_protocolv1.Value.Map + (*Value_CompilerFunction)(nil), // 38: sass.embedded_protocolv1.Value.CompilerFunction + (*Value_HostFunction)(nil), // 39: sass.embedded_protocolv1.Value.HostFunction + (*Value_ArgumentList)(nil), // 40: sass.embedded_protocolv1.Value.ArgumentList + (*Value_Calculation)(nil), // 41: sass.embedded_protocolv1.Value.Calculation + (*Value_Map_Entry)(nil), // 42: sass.embedded_protocolv1.Value.Map.Entry + nil, // 43: sass.embedded_protocolv1.Value.ArgumentList.KeywordsEntry + (*Value_Calculation_CalculationValue)(nil), // 44: sass.embedded_protocolv1.Value.Calculation.CalculationValue + (*Value_Calculation_CalculationOperation)(nil), // 45: sass.embedded_protocolv1.Value.Calculation.CalculationOperation +} +var file_embedded_sass_v1_proto_depIdxs = []int32{ + 13, // 0: sass.embedded_protocolv1.InboundMessage.compile_request:type_name -> sass.embedded_protocolv1.InboundMessage.CompileRequest + 14, // 1: sass.embedded_protocolv1.InboundMessage.canonicalize_response:type_name -> sass.embedded_protocolv1.InboundMessage.CanonicalizeResponse + 15, // 2: sass.embedded_protocolv1.InboundMessage.import_response:type_name -> sass.embedded_protocolv1.InboundMessage.ImportResponse + 16, // 3: sass.embedded_protocolv1.InboundMessage.file_import_response:type_name -> sass.embedded_protocolv1.InboundMessage.FileImportResponse + 17, // 4: sass.embedded_protocolv1.InboundMessage.function_call_response:type_name -> sass.embedded_protocolv1.InboundMessage.FunctionCallResponse + 12, // 5: sass.embedded_protocolv1.InboundMessage.version_request:type_name -> sass.embedded_protocolv1.InboundMessage.VersionRequest + 9, // 6: sass.embedded_protocolv1.OutboundMessage.error:type_name -> sass.embedded_protocolv1.ProtocolError + 22, // 7: sass.embedded_protocolv1.OutboundMessage.compile_response:type_name -> sass.embedded_protocolv1.OutboundMessage.CompileResponse + 23, // 8: sass.embedded_protocolv1.OutboundMessage.log_event:type_name -> sass.embedded_protocolv1.OutboundMessage.LogEvent + 24, // 9: sass.embedded_protocolv1.OutboundMessage.canonicalize_request:type_name -> sass.embedded_protocolv1.OutboundMessage.CanonicalizeRequest + 25, // 10: sass.embedded_protocolv1.OutboundMessage.import_request:type_name -> sass.embedded_protocolv1.OutboundMessage.ImportRequest + 26, // 11: sass.embedded_protocolv1.OutboundMessage.file_import_request:type_name -> sass.embedded_protocolv1.OutboundMessage.FileImportRequest + 27, // 12: sass.embedded_protocolv1.OutboundMessage.function_call_request:type_name -> sass.embedded_protocolv1.OutboundMessage.FunctionCallRequest + 21, // 13: sass.embedded_protocolv1.OutboundMessage.version_response:type_name -> sass.embedded_protocolv1.OutboundMessage.VersionResponse + 3, // 14: sass.embedded_protocolv1.ProtocolError.type:type_name -> sass.embedded_protocolv1.ProtocolErrorType + 30, // 15: sass.embedded_protocolv1.SourceSpan.start:type_name -> sass.embedded_protocolv1.SourceSpan.SourceLocation + 30, // 16: sass.embedded_protocolv1.SourceSpan.end:type_name -> sass.embedded_protocolv1.SourceSpan.SourceLocation + 31, // 17: sass.embedded_protocolv1.Value.string:type_name -> sass.embedded_protocolv1.Value.String + 32, // 18: sass.embedded_protocolv1.Value.number:type_name -> sass.embedded_protocolv1.Value.Number + 33, // 19: sass.embedded_protocolv1.Value.rgb_color:type_name -> sass.embedded_protocolv1.Value.RgbColor + 34, // 20: sass.embedded_protocolv1.Value.hsl_color:type_name -> sass.embedded_protocolv1.Value.HslColor + 36, // 21: sass.embedded_protocolv1.Value.list:type_name -> sass.embedded_protocolv1.Value.List + 37, // 22: sass.embedded_protocolv1.Value.map:type_name -> sass.embedded_protocolv1.Value.Map + 5, // 23: sass.embedded_protocolv1.Value.singleton:type_name -> sass.embedded_protocolv1.SingletonValue + 38, // 24: sass.embedded_protocolv1.Value.compiler_function:type_name -> sass.embedded_protocolv1.Value.CompilerFunction + 39, // 25: sass.embedded_protocolv1.Value.host_function:type_name -> sass.embedded_protocolv1.Value.HostFunction + 40, // 26: sass.embedded_protocolv1.Value.argument_list:type_name -> sass.embedded_protocolv1.Value.ArgumentList + 35, // 27: sass.embedded_protocolv1.Value.hwb_color:type_name -> sass.embedded_protocolv1.Value.HwbColor + 41, // 28: sass.embedded_protocolv1.Value.calculation:type_name -> sass.embedded_protocolv1.Value.Calculation + 18, // 29: sass.embedded_protocolv1.InboundMessage.CompileRequest.string:type_name -> sass.embedded_protocolv1.InboundMessage.CompileRequest.StringInput + 0, // 30: sass.embedded_protocolv1.InboundMessage.CompileRequest.style:type_name -> sass.embedded_protocolv1.OutputStyle + 19, // 31: sass.embedded_protocolv1.InboundMessage.CompileRequest.importers:type_name -> sass.embedded_protocolv1.InboundMessage.CompileRequest.Importer + 20, // 32: sass.embedded_protocolv1.InboundMessage.ImportResponse.success:type_name -> sass.embedded_protocolv1.InboundMessage.ImportResponse.ImportSuccess + 11, // 33: sass.embedded_protocolv1.InboundMessage.FunctionCallResponse.success:type_name -> sass.embedded_protocolv1.Value + 1, // 34: sass.embedded_protocolv1.InboundMessage.CompileRequest.StringInput.syntax:type_name -> sass.embedded_protocolv1.Syntax + 19, // 35: sass.embedded_protocolv1.InboundMessage.CompileRequest.StringInput.importer:type_name -> sass.embedded_protocolv1.InboundMessage.CompileRequest.Importer + 1, // 36: sass.embedded_protocolv1.InboundMessage.ImportResponse.ImportSuccess.syntax:type_name -> sass.embedded_protocolv1.Syntax + 28, // 37: sass.embedded_protocolv1.OutboundMessage.CompileResponse.success:type_name -> sass.embedded_protocolv1.OutboundMessage.CompileResponse.CompileSuccess + 29, // 38: sass.embedded_protocolv1.OutboundMessage.CompileResponse.failure:type_name -> sass.embedded_protocolv1.OutboundMessage.CompileResponse.CompileFailure + 2, // 39: sass.embedded_protocolv1.OutboundMessage.LogEvent.type:type_name -> sass.embedded_protocolv1.LogEventType + 10, // 40: sass.embedded_protocolv1.OutboundMessage.LogEvent.span:type_name -> sass.embedded_protocolv1.SourceSpan + 11, // 41: sass.embedded_protocolv1.OutboundMessage.FunctionCallRequest.arguments:type_name -> sass.embedded_protocolv1.Value + 10, // 42: sass.embedded_protocolv1.OutboundMessage.CompileResponse.CompileFailure.span:type_name -> sass.embedded_protocolv1.SourceSpan + 4, // 43: sass.embedded_protocolv1.Value.List.separator:type_name -> sass.embedded_protocolv1.ListSeparator + 11, // 44: sass.embedded_protocolv1.Value.List.contents:type_name -> sass.embedded_protocolv1.Value + 42, // 45: sass.embedded_protocolv1.Value.Map.entries:type_name -> sass.embedded_protocolv1.Value.Map.Entry + 4, // 46: sass.embedded_protocolv1.Value.ArgumentList.separator:type_name -> sass.embedded_protocolv1.ListSeparator + 11, // 47: sass.embedded_protocolv1.Value.ArgumentList.contents:type_name -> sass.embedded_protocolv1.Value + 43, // 48: sass.embedded_protocolv1.Value.ArgumentList.keywords:type_name -> sass.embedded_protocolv1.Value.ArgumentList.KeywordsEntry + 44, // 49: sass.embedded_protocolv1.Value.Calculation.arguments:type_name -> sass.embedded_protocolv1.Value.Calculation.CalculationValue + 11, // 50: sass.embedded_protocolv1.Value.Map.Entry.key:type_name -> sass.embedded_protocolv1.Value + 11, // 51: sass.embedded_protocolv1.Value.Map.Entry.value:type_name -> sass.embedded_protocolv1.Value + 11, // 52: sass.embedded_protocolv1.Value.ArgumentList.KeywordsEntry.value:type_name -> sass.embedded_protocolv1.Value + 32, // 53: sass.embedded_protocolv1.Value.Calculation.CalculationValue.number:type_name -> sass.embedded_protocolv1.Value.Number + 45, // 54: sass.embedded_protocolv1.Value.Calculation.CalculationValue.operation:type_name -> sass.embedded_protocolv1.Value.Calculation.CalculationOperation + 41, // 55: sass.embedded_protocolv1.Value.Calculation.CalculationValue.calculation:type_name -> sass.embedded_protocolv1.Value.Calculation + 6, // 56: sass.embedded_protocolv1.Value.Calculation.CalculationOperation.operator:type_name -> sass.embedded_protocolv1.CalculationOperator + 44, // 57: sass.embedded_protocolv1.Value.Calculation.CalculationOperation.left:type_name -> sass.embedded_protocolv1.Value.Calculation.CalculationValue + 44, // 58: sass.embedded_protocolv1.Value.Calculation.CalculationOperation.right:type_name -> sass.embedded_protocolv1.Value.Calculation.CalculationValue + 59, // [59:59] is the sub-list for method output_type + 59, // [59:59] is the sub-list for method input_type + 59, // [59:59] is the sub-list for extension type_name + 59, // [59:59] is the sub-list for extension extendee + 0, // [0:59] is the sub-list for field type_name +} + +func init() { file_embedded_sass_v1_proto_init() } +func file_embedded_sass_v1_proto_init() { + if File_embedded_sass_v1_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_embedded_sass_v1_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ProtocolError); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SourceSpan); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage_VersionRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage_CompileRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage_CanonicalizeResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage_ImportResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage_FileImportResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage_FunctionCallResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage_CompileRequest_StringInput); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage_CompileRequest_Importer); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage_ImportResponse_ImportSuccess); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage_VersionResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage_CompileResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage_LogEvent); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage_CanonicalizeRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage_ImportRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage_FileImportRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage_FunctionCallRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage_CompileResponse_CompileSuccess); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage_CompileResponse_CompileFailure); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SourceSpan_SourceLocation); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_String); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_Number); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_RgbColor); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_HslColor); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_HwbColor); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[29].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_List); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[30].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_Map); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_CompilerFunction); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_HostFunction); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[33].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_ArgumentList); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_Calculation); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[35].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_Map_Entry); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[37].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_Calculation_CalculationValue); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_v1_proto_msgTypes[38].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_Calculation_CalculationOperation); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_embedded_sass_v1_proto_msgTypes[0].OneofWrappers = []interface{}{ + (*InboundMessage_CompileRequest_)(nil), + (*InboundMessage_CanonicalizeResponse_)(nil), + (*InboundMessage_ImportResponse_)(nil), + (*InboundMessage_FileImportResponse_)(nil), + (*InboundMessage_FunctionCallResponse_)(nil), + (*InboundMessage_VersionRequest_)(nil), + } + file_embedded_sass_v1_proto_msgTypes[1].OneofWrappers = []interface{}{ + (*OutboundMessage_Error)(nil), + (*OutboundMessage_CompileResponse_)(nil), + (*OutboundMessage_LogEvent_)(nil), + (*OutboundMessage_CanonicalizeRequest_)(nil), + (*OutboundMessage_ImportRequest_)(nil), + (*OutboundMessage_FileImportRequest_)(nil), + (*OutboundMessage_FunctionCallRequest_)(nil), + (*OutboundMessage_VersionResponse_)(nil), + } + file_embedded_sass_v1_proto_msgTypes[4].OneofWrappers = []interface{}{ + (*Value_String_)(nil), + (*Value_Number_)(nil), + (*Value_RgbColor_)(nil), + (*Value_HslColor_)(nil), + (*Value_List_)(nil), + (*Value_Map_)(nil), + (*Value_Singleton)(nil), + (*Value_CompilerFunction_)(nil), + (*Value_HostFunction_)(nil), + (*Value_ArgumentList_)(nil), + (*Value_HwbColor_)(nil), + (*Value_Calculation_)(nil), + } + file_embedded_sass_v1_proto_msgTypes[6].OneofWrappers = []interface{}{ + (*InboundMessage_CompileRequest_String_)(nil), + (*InboundMessage_CompileRequest_Path)(nil), + } + file_embedded_sass_v1_proto_msgTypes[7].OneofWrappers = []interface{}{ + (*InboundMessage_CanonicalizeResponse_Url)(nil), + (*InboundMessage_CanonicalizeResponse_Error)(nil), + } + file_embedded_sass_v1_proto_msgTypes[8].OneofWrappers = []interface{}{ + (*InboundMessage_ImportResponse_Success)(nil), + (*InboundMessage_ImportResponse_Error)(nil), + } + file_embedded_sass_v1_proto_msgTypes[9].OneofWrappers = []interface{}{ + (*InboundMessage_FileImportResponse_FileUrl)(nil), + (*InboundMessage_FileImportResponse_Error)(nil), + } + file_embedded_sass_v1_proto_msgTypes[10].OneofWrappers = []interface{}{ + (*InboundMessage_FunctionCallResponse_Success)(nil), + (*InboundMessage_FunctionCallResponse_Error)(nil), + } + file_embedded_sass_v1_proto_msgTypes[12].OneofWrappers = []interface{}{ + (*InboundMessage_CompileRequest_Importer_Path)(nil), + (*InboundMessage_CompileRequest_Importer_ImporterId)(nil), + (*InboundMessage_CompileRequest_Importer_FileImporterId)(nil), + } + file_embedded_sass_v1_proto_msgTypes[15].OneofWrappers = []interface{}{ + (*OutboundMessage_CompileResponse_Success)(nil), + (*OutboundMessage_CompileResponse_Failure)(nil), + } + file_embedded_sass_v1_proto_msgTypes[20].OneofWrappers = []interface{}{ + (*OutboundMessage_FunctionCallRequest_Name)(nil), + (*OutboundMessage_FunctionCallRequest_FunctionId)(nil), + } + file_embedded_sass_v1_proto_msgTypes[37].OneofWrappers = []interface{}{ + (*Value_Calculation_CalculationValue_Number)(nil), + (*Value_Calculation_CalculationValue_String_)(nil), + (*Value_Calculation_CalculationValue_Interpolation)(nil), + (*Value_Calculation_CalculationValue_Operation)(nil), + (*Value_Calculation_CalculationValue_Calculation)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_embedded_sass_v1_proto_rawDesc, + NumEnums: 7, + NumMessages: 39, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_embedded_sass_v1_proto_goTypes, + DependencyIndexes: file_embedded_sass_v1_proto_depIdxs, + EnumInfos: file_embedded_sass_v1_proto_enumTypes, + MessageInfos: file_embedded_sass_v1_proto_msgTypes, + }.Build() + File_embedded_sass_v1_proto = out.File + file_embedded_sass_v1_proto_rawDesc = nil + file_embedded_sass_v1_proto_goTypes = nil + file_embedded_sass_v1_proto_depIdxs = nil +} diff --git a/vendor/github.com/bep/godartsass/internal/embeddedsassv1/embedded_sass_v1.proto b/vendor/github.com/bep/godartsass/internal/embeddedsassv1/embedded_sass_v1.proto new file mode 100644 index 0000000..cecda62 --- /dev/null +++ b/vendor/github.com/bep/godartsass/internal/embeddedsassv1/embedded_sass_v1.proto @@ -0,0 +1,1005 @@ +// Copyright 2019 Google Inc. Use of this source code is governed by an +// MIT-style license that can be found in the LICENSE file or at +// https://opensource.org/licenses/MIT. + +syntax = "proto3"; + +package sass.embedded_protocolv1; + +// The wrapper type for all messages sent from the host to the compiler. This +// provides a `oneof` that makes it possible to determine the type of each +// inbound message. +message InboundMessage { + // A request for information about the version of the embedded compiler. The + // host can use this to provide diagnostic information to the user, to check + // which features the compiler supports, or to ensure that it's compatible + // with the same protocol version the compiler supports. + message VersionRequest { + // This version request's id. Mandatory. + uint32 id = 1; + + // This message's contents are intentionally empty. It just acts as a signal + // to the compiler to send a VersionResponse. More fields may be added in + // the future. + } + + // A request that compiles an entrypoint to CSS. + message CompileRequest { + // This compilation's request id. This is included in messages sent from the + // compiler to the host. Mandatory. + uint32 id = 1; + + // An input stylesheet provided as plain text, rather than loaded from the + // filesystem. + message StringInput { + // The contents of the stylesheet. + string source = 1; + + // The location from which `source` was loaded. If this is empty, it + // indicates that the URL is unknown. + // + // This must be a canonical URL recognized by `importer`, if it's passed. + string url = 2; + + // The syntax to use to parse `source`. + Syntax syntax = 3; + + // The importer to use to resolve imports relative to `url`. + Importer importer = 4; + } + + // The input stylesheet to parse. Mandatory. + oneof input { + // A stylesheet loaded from its contents. + StringInput string = 2; + + // A stylesheet loaded from the given path on the filesystem. + string path = 3; + } + + // How to format the CSS output. + OutputStyle style = 4; + + // Whether to generate a source map. Note that this will *not* add a source + // map comment to the stylesheet; that's up to the host or its users. + bool source_map = 5; + + // A wrapper message that represents either a user-defined importer or a + // load path on disk. This must be a wrapper because `oneof` types can't be + // `repeated`. + message Importer { + // The possible types of importer. Mandatory. + oneof importer { + // A built-in importer that loads Sass files within the given directory + // on disk. + string path = 1; + + // A unique ID for a user-defined importer. This ID will be included in + // outbound `CanonicalizeRequest` and `ImportRequest` messages to + // indicate which importer is being called. The host is responsible for + // generating this ID and ensuring that it's unique across all + // importers registered for this compilation. + uint32 importer_id = 2; + + // A unique ID for a special kind of user-defined importer that tells + // the compiler where to look for files on the physical filesystem, but + // leaves the details of resolving partials and extensions and loading + // the file from disk up to the compiler itself. + // + // This ID will be included in outbound `FileImportRequest` messages to + // indicate which importer is being called. The host is responsible for + // generating this ID and ensuring that it's unique across all importers + // registered for this compilation. + uint32 file_importer_id = 3; + } + } + + // Importers (including load paths on the filesystem) to use when resolving + // imports that can't be resolved relative to the file that contains it. Each + // importer is checked in order until one recognizes the imported URL. + repeated Importer importers = 6; + + // Signatures for custom global functions whose behavior is defined by the + // host. These must be valid Sass function signatures that could appear in + // after `@function` in a Sass stylesheet, such as + // `mix($color1, $color2, $weight: 50%)`. + // + // Compilers must ensure that pure-Sass functions take precedence over + // custom global functions. They must also reject any custom function names + // that conflict with function names built into the Sass language. + repeated string global_functions = 7; + + // Whether to use terminal colors in the formatted message of errors and + // logs. + bool alert_color = 8; + + // Whether to encode the formatted message of errors and logs in ASCII. + bool alert_ascii = 9; + + // Whether to report all deprecation warnings or only the first few ones. + // If this is `false`, the compiler may choose not to send events for + // repeated deprecation warnings. If this is `true`, the compiler must emit + // an event for every deprecation warning it encounters. + bool verbose = 10; + + // Whether to omit events for deprecation warnings coming from dependencies + // (files loaded from a different importer than the input). + bool quiet_deps = 11; + + // Whether to include sources in the generated sourcemap + bool source_map_include_sources = 12; + + // Whether to emit a `@charset`/BOM for non-ASCII stylesheets. + bool charset = 13; + } + + // A response indicating the result of canonicalizing an imported URL. + message CanonicalizeResponse { + uint32 id = 1; + + // The result of canonicalization. Optional. If this is `null`, it indicates + // that the importer either did not recognize the URL, or could not find a + // stylesheet at the location it referred to. + oneof result { + // The successfully canonicalized URL. This must be an absolute URL, + // including scheme. + string url = 2; + + // An error message explaining why canonicalization failed. + // + // This indicates that a stylesheet was found, but a canonical URL for it + // could not be determined. If no stylesheet was found, `result` should be + // `null` instead. + string error = 3; + } + } + + // A response indicating the result of importing a canonical URL. + message ImportResponse { + uint32 id = 1; + + // The stylesheet's contents were loaded successfully. + message ImportSuccess { + // The text of the stylesheet. Mandatory. + string contents = 1; + + // The syntax of `contents`. Mandatory. + Syntax syntax = 2; + + // An absolute, browser-accessible URL indicating the resolved location of + // the imported stylesheet. Optional. + // + // This should be a `file:` URL if one is available, but an `http:` URL is + // acceptable as well. If no URL is supplied, a `data:` URL is generated + // automatically from `contents`. + // + // If this is provided, it must be an absolute URL, including scheme. + string source_map_url = 3; + } + + // The result of loading the URL. Optional. If this is `null`, it indicates + // that the importer either did not recognize the URL, or could not find a + // stylesheet at the location it referred to. + oneof result { + // The contents of the loaded stylesheet. + ImportSuccess success = 2; + + // An error message explaining why the URL could not be loaded. + string error = 3; + } + } + + // A response indicating the result of redirecting a URL to the filesystem. + message FileImportResponse { + uint32 id = 1; + + // The result of loading the URL. Optional. A null result indicates that the + // importer did not recognize the URL and other importers or load paths + // should be tried. + oneof result { + // The absolute `file:` URL to look for the file on the physical + // filesystem. + // + // The host must ensure that this URL follows the format for an absolute + // `file:` URL on the current operating system without a hostname, and the + // compiler must verify this to the best of its ability. See + // https://en.wikipedia.org/wiki/File_URI_scheme for details on the + // format. + // + // The compiler must handle turning this into a canonical URL by resolving + // it for partials, file extensions, and index files. The compiler must + // then loading the contents of the resulting canonical URL from the + // filesystem. + string file_url = 2; + + // An error message explaining why the URL could not be loaded. + string error = 3; + } + } + + // A response indicating the result of calling a custom Sass function defined + // in the host. + message FunctionCallResponse { + uint32 id = 1; + + // The result of calling the function. Mandatory. + oneof result { + // The return value of a successful function call. + Value success = 2; + + // An error message explaining why the function call failed. + string error = 3; + } + + // The IDs of all `Value.ArgumentList`s in `FunctionCallRequest.arguments` + // whose keywords were accessed. See `Value.ArgumentList` for details. + // Mandatory if `result.success` is set. This may not include the special + // value `0` and it may not include multiple instances of the same ID. + repeated uint32 accessed_argument_lists = 4; + } + + // The wrapped message. Mandatory. + oneof message { + CompileRequest compile_request = 2; + CanonicalizeResponse canonicalize_response = 3; + ImportResponse import_response = 4; + FileImportResponse file_import_response = 5; + FunctionCallResponse function_call_response = 6; + VersionRequest version_request = 7; + } +} + +// The wrapper type for all messages sent from the compiler to the host. This +// provides a `oneof` that makes it possible to determine the type of each +// outbound message. +message OutboundMessage { + // A response that contains the version of the embedded compiler. + message VersionResponse { + // This version request's id. Mandatory. + uint32 id = 5; + + // The version of the embedded protocol, in semver format. + string protocol_version = 1; + + // The version of the embedded compiler package. This has no guaranteed + // format, although compilers are encouraged to use semver. + string compiler_version = 2; + + // The version of the Sass implementation that the embedded compiler wraps. + // This has no guaranteed format, although Sass implementations are + // encouraged to use semver. + string implementation_version = 3; + + // The name of the Sass implementation that the embedded compiler wraps. + string implementation_name = 4; + } + + // A response that contains the result of a compilation. + message CompileResponse { + // The compilation's request id. Mandatory. + uint32 id = 1; + + // A message indicating that the Sass file was successfully compiled to CSS. + message CompileSuccess { + // The compiled CSS. + string css = 1; + + // The JSON-encoded source map, or the empty string if + // `CompileRequest.source_map` was `false`. + // + // The compiler must not add a `"file"` key to this source map. It's the + // host's (or the host's user's) responsibility to determine how the + // generated CSS can be reached from the source map. + string source_map = 2; + + // The canonical URLs of all source files loaded during the compilation. + // + // The compiler must ensure that each canonical URL appears only once in + // this list. This must include the entrypoint file's URL if either + // `CompileRequest.input.path` or `CompileRequest.StringInput.url` was + // passed. + repeated string loaded_urls = 3; + } + + // A message indicating that the Sass file could not be successfully + // compiled to CSS. + message CompileFailure { + // A message describing the reason for the failure. + string message = 1; + + // The span associated with the failure. Mandatory. + SourceSpan span = 2; + + // The stack trace associated with the failure. + // + // The empty string indicates that no stack trace is available. Otherwise, + // the format of this stack trace is not specified and is likely to be + // inconsistent between implementations. + string stack_trace = 3; + + // A formatted, human-readable string that contains the message, span + // (if available), and trace (if available). The format of this string is + // not specified and is likely to be inconsistent between implementations. + string formatted = 4; + } + + // The success or failure result of the compilation. Mandatory. + oneof result { + // The result of a successful compilation. + CompileSuccess success = 2; + + // The result of a failed compilation. + CompileFailure failure = 3; + } + } + + // An event indicating that a message should be displayed to the user. + message LogEvent { + // The request id for the compilation that triggered the message. Mandatory. + uint32 compilation_id = 1; + + LogEventType type = 2; + + // The text of the message. + string message = 3; + + // The span associated with this message. Optional. + SourceSpan span = 4; + + // The stack trace associated with this message. + // + // The empty string indicates that no stack trace is available. Otherwise, + // the format of this stack trace is not specified and is likely to be + // inconsistent between implementations. + string stack_trace = 5; + + // A formatted, human-readable string that contains the message, span (if + // available), and trace (if available). The format of this string is not + // specified and is likely to be inconsistent between implementations. + string formatted = 6; + } + + // A request for a custom importer to convert an imported URL to its canonical + // format. + // + // If the URL is not recognized by this importer, or if no stylesheet is found + // at that URL, `CanonicalizeResponse.result` must be `null`. Otherwise, the + // importer must return an absolute URL, including a scheme. + // + // > The host's documentation should encourage the use of file importers (via + // > `CompileRequest.Importer.file_importer_id`, `FileImportRequest`, and + // > `FileImportResponse`) for any importers that simply refer to files on + // > disk. This will allow Sass to handle the logic of resolving partials, + // > file extensions, and index files. + // + // If Sass has already loaded a stylesheet with the returned canonical URL, it + // re-uses the existing parse tree. This means that importers must ensure that + // the same canonical URL always refers to the same stylesheet, *even across + // different importers*. Importers must also ensure that any canonicalized + // URLs they return can be passed back to `CanonicalizeRequest` and will be + // returned unchanged. + // + // If this importer's URL format supports file extensions, it should + // canonicalize them the same way as the default filesystem importer: + // + // * The importer should look for stylesheets by adding the prefix `_` to the + // URL's basename, and by adding the extensions `.sass` and `.scss` if the + // URL doesn't already have one of those extensions. For example, if the URL + // was `foo/bar/baz`, the importer would look for: + // + // * `foo/bar/baz.sass` + // * `foo/bar/baz.scss` + // * `foo/bar/_baz.sass` + // * `foo/bar/_baz.scss` + // + // If the URL was foo/bar/baz.scss, the importer would just look for: + // + // * `foo/bar/baz.scss` + // * `foo/bar/_baz.scss` + // + // If the importer finds a stylesheet at more than one of these URLs, it + // should respond with a `CanonicalizeResponse.result.error` indicating that + // the import is ambiguous. Note that if the extension is explicitly + // specified, a stylesheet with another extension may exist without error. + // + // * If none of the possible paths is valid, the importer should perform the + // same resolution on the URL followed by `/index`. In the example above, it + // would look for: + // + // * `foo/bar/baz/_index.sass` + // * `foo/bar/baz/index.sass` + // * `foo/bar/baz/_index.scss` + // * `foo/bar/baz/index.scss` + // + // As above, if the importer finds a stylesheet at more than one of these + // URLs, it should respond with a `CanonicalizeResponse.result.error` + // indicating that the import is ambiguous. + message CanonicalizeRequest { + uint32 id = 1; + + // The request id for the compilation that triggered the message. Mandatory. + uint32 compilation_id = 2; + + // The unique ID of the importer being invoked. This must match an importer + // ID passed to this compilation in `CompileRequest.importers` or + // `CompileRequest.input.string.importer`. Mandatory. + uint32 importer_id = 3; + + // The URL of the import to be canonicalized. This may be either absolute or + // relative. + // + // When loading a URL, the compiler must first try resolving that URL + // relative to the canonical URL of the current file, and canonicalizing the + // result using the importer that loaded the current file. If this returns + // `null`, the compiler must then try canonicalizing the original URL with + // each importer in order until one returns something other than `null`. + // That is the result of the import. + string url = 4; + + /// Whether this request comes from an `@import` rule. + /// + /// When evaluating `@import` rules, URLs should canonicalize to an + /// [import-only file] if one exists for the URL being canonicalized. + /// Otherwise, canonicalization should be identical for `@import` and `@use` + /// rules. + /// + /// [import-only file]: https://sass-lang.com/documentation/at-rules/import#import-only-files + bool from_import = 5; + } + + // A request for a custom importer to load the contents of a stylesheet. + message ImportRequest { + uint32 id = 1; + + // The request id for the compilation that triggered the message. Mandatory. + uint32 compilation_id = 2; + + // The unique ID of the importer being invoked. This must match an + // `Importer.importer_id` passed to this compilation in + // `CompileRequest.importers` or `CompileRequest.input.string.importer`. + // Mandatory. + uint32 importer_id = 3; + + // The canonical URL of the import. This is guaranteed to be a URL returned + // by a `CanonicalizeRequest` to this importer. + string url = 4; + } + + // A request for a custom filesystem importer to load the contents of a + // stylesheet. + // + // A filesystem importer is represented in the compiler as an [importer]. When + // the importer is invoked with a string `string`: + // + // [importer]: https://github.com/sass/sass/tree/main/spec/modules.md#importer + // + // * If `string` is an absolute URL whose scheme is `file`: + // + // * Let `url` be string. + // + // * Otherwise: + // + // * Let `fromImport` be `true` if the importer is being run for an + // `@import` and `false` otherwise. + // + // * Let `response` be the result of sending a `FileImportRequest` with + // `string` as its `url` and `fromImport` as `from_import`. + // + // * If `response.result` is null, return null. + // + // * Otherwise, if `response.result.error` is set, throw an error. + // + // * Otherwise, let `url` be `response.result.file_url`. + // + // * Let `resolved` be the result of [resolving `url`]. + // + // * If `resolved` is null, return null. + // + // * Let `text` be the contents of the file at `resolved`. + // + // * Let `syntax` be: + // * "scss" if `url` ends in `.scss`. + // * "indented" if `url` ends in `.sass`. + // * "css" if `url` ends in `.css`. + // + // > The algorithm for resolving a `file:` URL guarantees that `url` will have + // > one of these extensions. + // + // * Return `text`, `syntax`, and `resolved`. + // + // [resolving `url`]: https://github.com/sass/sass/tree/main/spec/modules.md#resolving-a-file-url + message FileImportRequest { + uint32 id = 1; + + // The request id for the compilation that triggered the message. Mandatory. + uint32 compilation_id = 2; + + // The unique ID of the importer being invoked. This must match an + // `Importer.file_importer_id` passed to this compilation in + // `CompileRequest.importers` or `CompileRequest.input.string.importer`. + // Mandatory. + uint32 importer_id = 3; + + // The (non-canonicalized) URL of the import. + string url = 4; + + /// Whether this request comes from an `@import` rule. + /// + /// When evaluating `@import` rules, filesystem importers should load an + /// [import-only file] if one exists for the URL being canonicalized. + /// Otherwise, canonicalization should be identical for `@import` and `@use` + /// rules. + /// + /// [import-only file]: https://sass-lang.com/documentation/at-rules/import#import-only-files + bool from_import = 5; + } + + // A request to invoke a custom Sass function and return its result. + message FunctionCallRequest { + uint32 id = 1; + + // The request id for the compilation that triggered the message. Mandatory. + uint32 compilation_id = 2; + + // An identifier that indicates which function to invoke. Mandatory. + oneof identifier { + // The name of the function to invoke. + // + // This must match the name of a function signature the host passed to the + // corresponding `CompileRequest.global_functions` call, including hyphens + // and underscores. + string name = 3; + + // The opaque ID of the function to invoke. + // + // This must match the ID of a `Value.HostFunction` that the host passed + // to the compiler. + uint32 function_id = 4; + } + + // The arguments passed to the function, in the order they appear in the + // function signature passed to `CompileRequest.global_functions`. Mandatory. + // + // The compiler must ensure that a valid number of arguments are passed for + // the given signature, that default argument values are instantiated + // appropriately, and that variable argument lists (`$args...`) are passed + // as `Value.ArgumentList`s. + repeated Value arguments = 5; + } + + // The wrapped message. Mandatory. + oneof message { + ProtocolError error = 1; + CompileResponse compile_response = 2; + LogEvent log_event = 3; + CanonicalizeRequest canonicalize_request = 4; + ImportRequest import_request = 5; + FileImportRequest file_import_request = 6; + FunctionCallRequest function_call_request = 7; + VersionResponse version_response = 8; + } +} + +// Possible ways to format the CSS output. The compiler is not required to +// support all possible options; if the host requests an unsupported style, the +// compiler should choose the closest supported style. +enum OutputStyle { + // Each selector and declaration is written on its own line. + EXPANDED = 0; + + // The entire stylesheet is written on a single line, with as few characters + // as possible. + COMPRESSED = 1; +} + +// Possible syntaxes for a Sass stylesheet. +enum Syntax { + // The CSS-superset `.scss` syntax. + SCSS = 0; + + // The indented `.sass` syntax. + INDENTED = 1; + + // Plain CSS syntax that doesn't support any special Sass features. + CSS = 2; +} + +// The possible types of [LogEvent]. +enum LogEventType { + // A warning for something other than a deprecated Sass feature. Often emitted + // due to a stylesheet using the `@warn` rule. + WARNING = 0; + + // A warning indicating that the stylesheet is using a deprecated Sass + // feature. Compilers should not add text like "deprecation warning" to + // deprecation warnings; it's up to the host to determine how to signal that + // to the user. + DEPRECATION_WARNING = 1; + + // A message generated by the user for their own debugging purposes. + DEBUG = 2; +} + +// An error reported when an endpoint violates the embedded Sass protocol. +message ProtocolError { + ProtocolErrorType type = 1; + + // The ID of the request that had an error. This MUST be `4294967295` if the + // request ID couldn't be determined, or if the error is being reported for a + // response or an event. + uint32 id = 2; + + // A human-readable message providing more detail about the error. + string message = 3; +} + +// Potential types of protocol errors. +enum ProtocolErrorType { + // A message was received that couldn't be decoded as an `InboundMessage` (for + // the compiler) or `OutboundMessage` (for the host). + PARSE = 0; + + // A message was received that violated a documented restriction, such as not + // providing a mandatory field. + PARAMS = 1; + + // Something unexpected went wrong within the endpoint. + INTERNAL = 2; +} + +// A chunk of a source file. +message SourceSpan { + // The text covered by the source span. Compilers must guarantee that this is + // the text between `start.offset` and `end.offset` in the source file + // referred to by `url`. + string text = 1; + + // A single point in a source file. + message SourceLocation { + // The 0-based offset of this location within the source file. Mandatory. + uint32 offset = 1; + + // The 0-based line number of this location within the source file. + // Mandatory. + uint32 line = 2; + + // The 0-based column number of this location within its line. Mandatory. + uint32 column = 3; + } + + // The location of the first character in this span. Mandatory. + SourceLocation start = 2; + + // The location of the first character after this span. Optional. + // + // If this is omitted, it indicates that the span is empty and points + // immediately before `start`. In that case, `text` must be empty. + // + // This must not point to a location before `start`. + SourceLocation end = 3; + + // The URL of the file to which this span refers. + // + // This may be empty, indicating that the span refers to a + // `CompileRequest.StringInput` file that doesn't specify a URL. + string url = 4; + + // Additional source text surrounding this span. + // + // If this isn't empty, it must contain `text`. Furthermore, `text` must begin + // at column `start.column` of a line in `context`. + // + // This usually contains the full lines the span begins and ends on if the + // span itself doesn't cover the full lines. + string context = 5; +} + +// A SassScript value, passed to and returned by functions. +message Value { + // A SassScript string value. + message String { + // The contents of the string. Mandatory. + string text = 1; + + // Whether the string is quoted or unquoted. Mandatory. + bool quoted = 2; + } + + // A SassScript number value. + message Number { + // The number's numeric value. Mandatory. + double value = 1; + + // The number's numerator units. + // + // The endpoint sending the number must ensure that no numerator units are + // [compatible][] with any denominator units. Such compatible units must be + // simplified away according to the multiplicative factor between them + // defined in the CSS Values and Units spec. + // + // [compatible]: https://www.w3.org/TR/css-values-4/#compat + repeated string numerators = 2; + + // The number's denominator units. + repeated string denominators = 3; + } + + // A SassScript color value, represented as red, green, and blue channels. + // + // All Sass color values can be equivalently represented as `RgbColor`, + // `HslColor`, and `HwbColor` messages without loss of color information that + // can affect CSS rendering. As such, either endpoint may choose to send any + // color value as any one of these three messages. + message RgbColor { + // The color's red channel. Mandatory. May not be above 255. + uint32 red = 1; + + // The color's green channel. Mandatory. May not be above 255. + uint32 green = 2; + + // The color's blue channel. Mandatory. May not be above 255. + uint32 blue = 3; + + // The color's alpha channel. Mandatory. Must be between 0 and 1, + // inclusive. + double alpha = 4; + } + + // A SassScript color value, represented as hue, saturation, and lightness channels. + message HslColor { + // The color's hue. Mandatory. + double hue = 1; + + // The color's percent saturation. Mandatory. Must be between 0 and 100, + // inclusive. + double saturation = 2; + + // The color's percent lightness. Mandatory. Must be between 0 and 100, + // inclusive. + double lightness = 3; + + // The color's alpha channel. Mandatory. Must be between 0 and 1, + // inclusive. + double alpha = 4; + } + + // A SassScript color value, represented as hue, whiteness, and blackness + // channels. + message HwbColor { + // The color's hue. Mandatory. + double hue = 1; + + // The color's percent whiteness. Mandatory. Must be between 0 and 100, + // inclusive. The sum of `whiteness` and `blackness` must not exceed 100. + double whiteness = 2; + + // The color's percent blackness. Mandatory. Must be between 0 and 100, + // inclusive. The sum of `whiteness` and `blackness` must not exceed 100. + double blackness = 3; + + // The color's alpha channel. Mandatory. Must be between 0 and 1, + // inclusive. + double alpha = 4; + } + + // A SassScript list value. + message List { + // The type of separator for this list. Mandatory. + ListSeparator separator = 1; + + // Whether this list has square brackets. Mandatory. + bool has_brackets = 2; + + // The elements of this list. + repeated Value contents = 3; + } + + // A SassScript map value. + message Map { + // A single key/value pair in the map. + message Entry { + // The key this entry is associated with. Mandatory. + Value key = 1; + + // The value associated with this key. Mandatory. + Value value = 2; + } + + // The entries in this map. The sending endpoint must guarantee that no two + // entries have the same key. + repeated Entry entries = 1; + } + + // A first-class function defined in the compiler. New `CompilerFunction`s may + // only be created by the compiler, but the host may pass `CompilerFunction`s + // back to the compiler as long as their IDs match IDs of functions received + // by the host during that same compilation. + message CompilerFunction { + // A unique ID for this function. The compiler is responsible for generating + // this ID and ensuring it's unique across all functions passed to the host + // for this compilation. Mandatory. + uint32 id = 1; + } + + // An anonymous custom function defined in the host. New `HostFunction`s may + // only be created by the host, and `HostFunction`s may *never* be passed from + // the compiler to the host. The compiler must instead pass a + // `CompilerFunction` that wraps the `HostFunction`. + message HostFunction { + // A unique ID for this function. The compiler must pass this ID as + // `OutboundRequest.FunctionCallRequest.id` when invoking this function. The + // host is responsible for generating this ID and ensuring it's unique + // across all functions for *all* compilations. Mandatory. + uint32 id = 1; + + // The signature for this function. Mandatory. + // + // If this isn't a valid Sass function signature that could appear after + // `@function` in a Sass stylesheet (such as `mix($color1, $color2, $weight: + // 50%)`), the compiler must treat the function's return value as invalid. + // + // > This ensures that the host doesn't need to be able to correctly parse + // > the entire function declaration syntax. + // + // The compiler may not invoke the function by its name, since it's not + // guaranteed to be globally unique. However, it may use the name to + // generate the string representation of this function. + string signature = 2; + } + + // A SassScript argument list value. This represents rest arguments passed to + // a function's `$arg...` parameter. Unlike a normal `List`, an argument list + // has an associated keywords map which tracks keyword arguments passed in + // alongside positional arguments. + // + // For each `ArgumentList` in `FunctionCallRequest.arguments` (including those + // nested within `List`s and `Map`s), the host must track whether its keyword + // arguments were accessed by the user. If they were, it must add its + // `ArgumentList.id` to `FunctionCallResponse.accessed_argument_lists`. + // + // The compiler must treat every `ArgumentList` whose `ArgumentList.id` + // appears in `FunctionCallResponse.accessed_argument_lists` as though it had + // been passed to `meta.keywords()`. + message ArgumentList { + // An ID for this argument list that's unique within the scope of a given + // `FunctionCallRequest`. + // + // The special ID `0` is reserved for `ArgumentList`s created by the host, + // and may not be used by the compiler. These `ArgumentList`s do not need to + // have their IDs added to `FunctionCallResponse.accessed_argument_lists`, + // and the compiler should treat them as though their keywords have always + // been accessed. + uint32 id = 1; + + // The type of separator for this list. The compiler must set this, but + // the host may omit it for `ArgumentList`s that were originally created by + // the compiler (that is, those with a non-0 ID). + ListSeparator separator = 2; + + // The argument list's positional contents. The compiler must set this, but + // the host may omit it for `ArgumentList`s that were originally created by + // the compiler (that is, those with a non-0 ID). + repeated Value contents = 3; + + // The argument list's keywords. The compiler must set this, but the host + // may omit it for `ArgumentList`s that were originally created by the + // compiler (that is, those with a non-0 ID). + map keywords = 4; + } + + // A SassScript calculation value. The compiler must send fully [simplified] + // calculations, meaning that simplifying it again will produce the same + // calculation. The host is not required to simplify calculations. + // + // [simplified]: https://github.com/sass/sass/tree/main/spec/types/calculation.md#simplifying-a-calculation + // + // The compiler must simplify any calculations it receives from the host + // before returning them from a function. If this simplification produces an + // error, it should be treated as though the function call threw that error. + // It should *not* be treated as a protocol error. + message Calculation { + // The calculation's name. Mandatory. The host may only set this to names + // that the Sass specification uses to create calculations. + string name = 1; + + // The calculation's arguments. Mandatory. The host must use exactly the + // number of arguments used by the Sass specification for calculations with + // the given `name`. + repeated CalculationValue arguments = 2; + + // A single component of a calculation expression. + message CalculationValue { + // The value of the component. Mandatory. + oneof value { + Number number = 1; + + // An unquoted string, as from a function like `var()` or `env()`. + string string = 2; + + // An unquoted string as created by interpolation for + // backwards-compatibility with older Sass syntax. + string interpolation = 3; + + CalculationOperation operation = 4; + Calculation calculation = 5; + } + } + + // A binary operation that appears in a calculation. + message CalculationOperation { + // The operator to perform. + CalculationOperator operator = 1; + + // The left-hand side of the operation. + CalculationValue left = 2; + + // The right-hand side of the operation. + CalculationValue right = 3; + } + } + + // The value itself. Mandatory. + // + // This is wrapped in a message type rather than used directly to reduce + // repetition, and because oneofs can't be repeated. + oneof value { + String string = 1; + Number number = 2; + RgbColor rgb_color = 3; + HslColor hsl_color = 4; + List list = 5; + Map map = 6; + SingletonValue singleton = 7; + CompilerFunction compiler_function = 8; + HostFunction host_function = 9; + ArgumentList argument_list = 10; + HwbColor hwb_color = 11; + Calculation calculation = 12; + } +} + +// Different types of separators a list can have. +enum ListSeparator { + // List elements are separated by a comma. + COMMA = 0; + + // List elements are separated by whitespace. + SPACE = 1; + + // List elements are separated by a forward slash. + SLASH = 2; + + // The list's separator hasn't yet been determined. This is only allowed for + // singleton and empty lists. + // + // Singleton lists and empty lists don't have separators defined. This means + // that list functions will prefer other lists' separators if possible. + UNDECIDED = 3; +} + +// Singleton SassScript values that have no internal state. +enum SingletonValue { + // The SassScript boolean true value. + TRUE = 0; + + // The SassScript boolean false value. + FALSE = 1; + + // The SassScript null value. + NULL = 2; +} + +// An operator used in a calculation value's operation. +enum CalculationOperator { + // The addition operator. + PLUS = 0; + + // The subtraction operator. + MINUS = 1; + + // The multiplication operator. + TIMES = 2; + + // The division operator. + DIVIDE = 3; +} diff --git a/vendor/github.com/bep/godartsass/options.go b/vendor/github.com/bep/godartsass/options.go new file mode 100644 index 0000000..08d4976 --- /dev/null +++ b/vendor/github.com/bep/godartsass/options.go @@ -0,0 +1,231 @@ +package godartsass + +import ( + "fmt" + "path/filepath" + "strings" + "time" + + "github.com/bep/godartsass/internal/embeddedsassv1" +) + +// Options configures a Transpiler. +type Options struct { + // The path to the Dart Sass wrapper binary, an absolute filename + // if not in $PATH. + // If this is not set, we will try 'dart-sass-embedded' + // (or 'dart-sass-embedded.bat' on Windows) in the OS $PATH. + // There may be several ways to install this, one would be to + // download it from here: https://github.com/sass/dart-sass-embedded/releases + DartSassEmbeddedFilename string + + // Timeout is the duration allowed for dart sass to transpile. + // This was added for the beta6 version of Dart Sass Protocol, + // as running this code against the beta5 binary would hang + // on Execute. + Timeout time.Duration + + // LogEventHandler will, if set, receive log events from Dart Sass, + // e.g. @debug and @warn log statements. + LogEventHandler func(LogEvent) +} + +// LogEvent is a type of log event from Dart Sass. +type LogEventType int + +const ( + // Usually triggered by the @warn directive. + LogEventTypeWarning LogEventType = iota + + // Events trigered for usage of deprecated Sass features. + LogEventTypeDeprecated + + // Triggered by the @debug directive. + LogEventTypeDebug +) + +type LogEvent struct { + // Type is the type of log event. + Type LogEventType + + // Message on the form url:line:col message. + Message string +} + +func (opts *Options) init() error { + if opts.DartSassEmbeddedFilename == "" { + opts.DartSassEmbeddedFilename = defaultDartSassEmbeddedFilename + } + + if opts.Timeout == 0 { + opts.Timeout = 30 * time.Second + } + + return nil +} + +// ImportResolver allows custom import resolution. +// +// CanonicalizeURL should create a canonical version of the given URL if it's +// able to resolve it, else return an empty string. +// +// A canonicalized URL should include a scheme, e.g. 'file:///foo/bar.scss', +// if applicable, see: +// +// https://en.wikipedia.org/wiki/File_URI_scheme +// +// Importers must ensure that the same canonical URL +// always refers to the same stylesheet. +// +// Load loads the canonicalized URL's content. +type ImportResolver interface { + CanonicalizeURL(url string) (string, error) + Load(canonicalizedURL string) (Import, error) +} + +type Import struct { + // The content of the imported file. + Content string + + // The syntax of the imported file. + SourceSyntax SourceSyntax +} + +// Args holds the arguments to Execute. +type Args struct { + // The input source. + Source string + + // The URL of the Source. + // Leave empty if it's unknown. + // Must include a scheme, e.g. 'file:///myproject/main.scss' + // See https://en.wikipedia.org/wiki/File_URI_scheme + // + // Note: There is an open issue for this value when combined with custom + // importers, see https://github.com/sass/dart-sass-embedded/issues/24 + URL string + + // Defaults is SCSS. + SourceSyntax SourceSyntax + + // Default is EXPANDED. + OutputStyle OutputStyle + + // If enabled, a sourcemap will be generated and returned in Result. + EnableSourceMap bool + + // If enabled, sources will be embedded in the generated source map. + SourceMapIncludeSources bool + + // Custom resolver to use to resolve imports. + // If set, this will be the first in the resolver chain. + ImportResolver ImportResolver + + // Additional file paths to uses to resolve imports. + IncludePaths []string + + sassOutputStyle embeddedsassv1.OutputStyle + sassSourceSyntax embeddedsassv1.Syntax + + // Ordered list starting with options.ImportResolver, then IncludePaths. + sassImporters []*embeddedsassv1.InboundMessage_CompileRequest_Importer +} + +func (args *Args) init(seq uint32, opts Options) error { + if args.OutputStyle == "" { + args.OutputStyle = OutputStyleExpanded + } + if args.SourceSyntax == "" { + args.SourceSyntax = SourceSyntaxSCSS + } + + v, ok := embeddedsassv1.OutputStyle_value[string(args.OutputStyle)] + if !ok { + return fmt.Errorf("invalid OutputStyle %q", args.OutputStyle) + } + args.sassOutputStyle = embeddedsassv1.OutputStyle(v) + + v, ok = embeddedsassv1.Syntax_value[string(args.SourceSyntax)] + if !ok { + return fmt.Errorf("invalid SourceSyntax %q", args.SourceSyntax) + } + + args.sassSourceSyntax = embeddedsassv1.Syntax(v) + + if args.ImportResolver != nil { + args.sassImporters = []*embeddedsassv1.InboundMessage_CompileRequest_Importer{ + { + Importer: &embeddedsassv1.InboundMessage_CompileRequest_Importer_ImporterId{ + ImporterId: seq, + }, + }, + } + } + + if args.IncludePaths != nil { + for _, p := range args.IncludePaths { + args.sassImporters = append(args.sassImporters, &embeddedsassv1.InboundMessage_CompileRequest_Importer{Importer: &embeddedsassv1.InboundMessage_CompileRequest_Importer_Path{ + Path: filepath.Clean(p), + }}) + } + } + + return nil +} + +type ( + // OutputStyle defines the style of the generated CSS. + OutputStyle string + + // SourceSyntax defines the syntax of the source passed in Execute. + SourceSyntax string +) + +const ( + // Expanded (default) output. + // Note that LibSASS and Ruby SASS have more output styles, and their + // default is NESTED. + OutputStyleExpanded OutputStyle = "EXPANDED" + + // Compressed/minified output. + OutputStyleCompressed OutputStyle = "COMPRESSED" +) + +const ( + // SCSS style source syntax (default). + SourceSyntaxSCSS SourceSyntax = "SCSS" + + // Indented or SASS style source syntax. + SourceSyntaxSASS SourceSyntax = "INDENTED" + + // Regular CSS source syntax. + SourceSyntaxCSS SourceSyntax = "CSS" +) + +// ParseOutputStyle will convert s into OutputStyle. +// Case insensitive, returns OutputStyleNested for unknown value. +func ParseOutputStyle(s string) OutputStyle { + switch OutputStyle(strings.ToUpper(s)) { + case OutputStyleCompressed: + return OutputStyleCompressed + case OutputStyleExpanded: + return OutputStyleExpanded + default: + return OutputStyleExpanded + } +} + +// ParseSourceSyntax will convert s into SourceSyntax. +// Case insensitive, returns SourceSyntaxSCSS for unknown value. +func ParseSourceSyntax(s string) SourceSyntax { + switch SourceSyntax(strings.ToUpper(s)) { + case SourceSyntaxSCSS: + return SourceSyntaxSCSS + case SourceSyntaxSASS, "SASS": + return SourceSyntaxSASS + case SourceSyntaxCSS: + return SourceSyntaxCSS + default: + return SourceSyntaxSCSS + } +} diff --git a/vendor/github.com/bep/godartsass/transpiler.go b/vendor/github.com/bep/godartsass/transpiler.go new file mode 100644 index 0000000..68b354a --- /dev/null +++ b/vendor/github.com/bep/godartsass/transpiler.go @@ -0,0 +1,533 @@ +// Package godartsass provides a Go API for the Dass Sass Embedded protocol. +// +// Use the Start function to create and start a new thread safe transpiler. +// Close it when done. +package godartsass + +import ( + "encoding/binary" + "encoding/json" + "errors" + "fmt" + "io" + "net/url" + "os" + "os/exec" + "path" + "strings" + "sync" + "time" + + "github.com/cli/safeexec" + + "github.com/bep/godartsass/internal/embeddedsassv1" + "google.golang.org/protobuf/proto" +) + +const defaultDartSassEmbeddedFilename = "dart-sass-embedded" + +// ErrShutdown will be returned from Execute and Close if the transpiler is or +// is about to be shut down. +var ErrShutdown = errors.New("connection is shut down") + +// Start creates and starts a new SCSS transpiler that communicates with the +// Dass Sass Embedded protocol via Stdin and Stdout. +// +// Closing the transpiler will shut down the process. +// +// Note that the Transpiler is thread safe, and the recommended way of using +// this is to create one and use that for all the SCSS processing needed. +func Start(opts Options) (*Transpiler, error) { + if err := opts.init(); err != nil { + return nil, err + } + + // See https://github.com/golang/go/issues/38736 + bin, err := safeexec.LookPath(opts.DartSassEmbeddedFilename) + if err != nil { + return nil, err + } + + cmd := exec.Command(bin) + cmd.Stderr = os.Stderr + + conn, err := newConn(cmd) + if err != nil { + return nil, err + } + + if err := conn.Start(); err != nil { + return nil, err + } + + t := &Transpiler{ + opts: opts, + conn: conn, + lenBuf: make([]byte, binary.MaxVarintLen64), + pending: make(map[uint32]*call), + } + + go t.input() + + return t, nil +} + +// Version returns version information about the Dart Sass frameworks used +// in dartSassEmbeddedFilename. +func Version(dartSassEmbeddedFilename string) (DartSassVersion, error) { + var v DartSassVersion + bin, err := safeexec.LookPath(dartSassEmbeddedFilename) + if err != nil { + return v, err + } + + cmd := exec.Command(bin, "--version") + cmd.Stderr = os.Stderr + + out, err := cmd.Output() + if err != nil { + return v, err + } + + if err := json.Unmarshal(out, &v); err != nil { + return v, err + } + + return v, nil + +} + +type DartSassVersion struct { + ProtocolVersion string `json:"protocolVersion"` + CompilerVersion string `json:"compilerVersion"` + ImplementationVersion string `json:"implementationVersion"` + ImplementationName string `json:"implementationName"` + ID int `json:"id"` +} + +// Transpiler controls transpiling of SCSS into CSS. +type Transpiler struct { + opts Options + + // stdin/stdout of the Dart Sass protocol + conn byteReadWriteCloser + lenBuf []byte + msgBuf []byte + + closing bool + shutdown bool + + // Protects the sending of messages to Dart Sass. + sendMu sync.Mutex + + mu sync.Mutex // Protects all below. + seq uint32 + pending map[uint32]*call +} + +// IsShutDown checks if all pending calls have been shut down. +// Used in tests. +func (t *Transpiler) IsShutDown() bool { + for _, p := range t.pending { + if p.Error != ErrShutdown { + return false + } + } + return true +} + +// Result holds the result returned from Execute. +type Result struct { + CSS string + SourceMap string +} + +// SassError is the error returned from Execute on compile errors. +type SassError struct { + Message string `json:"message"` + Span struct { + Text string `json:"text"` + Start struct { + Offset int `json:"offset"` + Column int `json:"column"` + } `json:"start"` + End struct { + Offset int `json:"offset"` + Column int `json:"column"` + } `json:"end"` + Url string `json:"url"` + Context string `json:"context"` + } `json:"span"` +} + +func (e SassError) Error() string { + span := e.Span + file := path.Clean(strings.TrimPrefix(span.Url, "file:")) + return fmt.Sprintf("file: %q, context: %q: %s", file, span.Context, e.Message) +} + +// Close closes the stream to the embedded Dart Sass Protocol, shutting it down. +// If it is already shutting down, ErrShutdown is returned. +func (t *Transpiler) Close() error { + t.sendMu.Lock() + defer t.sendMu.Unlock() + t.mu.Lock() + defer t.mu.Unlock() + + if t.closing { + return ErrShutdown + } + + t.closing = true + err := t.conn.Close() + + return err +} + +// Execute transpiles the string Source given in Args into CSS. +// If Dart Sass resturns a "compile failure", the error returned will be +// of type SassError. +func (t *Transpiler) Execute(args Args) (Result, error) { + var result Result + + createInboundMessage := func(seq uint32) (*embeddedsassv1.InboundMessage, error) { + if err := args.init(seq, t.opts); err != nil { + return nil, err + } + + message := &embeddedsassv1.InboundMessage_CompileRequest_{ + CompileRequest: &embeddedsassv1.InboundMessage_CompileRequest{ + Importers: args.sassImporters, + Style: args.sassOutputStyle, + Input: &embeddedsassv1.InboundMessage_CompileRequest_String_{ + String_: &embeddedsassv1.InboundMessage_CompileRequest_StringInput{ + Syntax: args.sassSourceSyntax, + Source: args.Source, + Url: args.URL, + }, + }, + SourceMap: args.EnableSourceMap, + SourceMapIncludeSources: args.SourceMapIncludeSources, + }, + } + + return &embeddedsassv1.InboundMessage{ + Message: message, + }, nil + } + + call, err := t.newCall(createInboundMessage, args) + if err != nil { + return result, err + } + + select { + case call = <-call.Done: + case <-time.After(t.opts.Timeout): + return result, errors.New("timeout waiting for Dart Sass to respond; if you're running with Embedded Sass protocol < beta6, you need to upgrade") + } + + if call.Error != nil { + return result, call.Error + } + + response := call.Response + csp := response.Message.(*embeddedsassv1.OutboundMessage_CompileResponse_) + + switch resp := csp.CompileResponse.Result.(type) { + case *embeddedsassv1.OutboundMessage_CompileResponse_Success: + result.CSS = resp.Success.Css + result.SourceMap = resp.Success.SourceMap + case *embeddedsassv1.OutboundMessage_CompileResponse_Failure: + asJson, err := json.Marshal(resp.Failure) + if err != nil { + return result, err + } + var sassErr SassError + err = json.Unmarshal(asJson, &sassErr) + if err != nil { + return result, err + } + return result, sassErr + default: + return result, fmt.Errorf("unsupported response type: %T", resp) + } + + return result, nil +} + +func (t *Transpiler) getCall(id uint32) *call { + t.mu.Lock() + defer t.mu.Unlock() + call, found := t.pending[id] + if !found { + panic(fmt.Sprintf("call with ID %d not found", id)) + } + return call +} + +func (t *Transpiler) input() { + var err error + + for err == nil { + // The header is the length in bytes of the remaining message. + var l uint64 + l, err = binary.ReadUvarint(t.conn) + if err != nil { + break + } + + plen := int(l) + if len(t.msgBuf) < plen { + t.msgBuf = make([]byte, plen) + } + + buf := t.msgBuf[:plen] + + _, err = io.ReadFull(t.conn, buf) + if err != nil { + break + } + + var msg embeddedsassv1.OutboundMessage + + if err = proto.Unmarshal(buf, &msg); err != nil { + break + } + + switch c := msg.Message.(type) { + case *embeddedsassv1.OutboundMessage_CompileResponse_: + id := c.CompileResponse.Id + // Attach it to the correct pending call. + t.mu.Lock() + call := t.pending[id] + delete(t.pending, id) + t.mu.Unlock() + if call == nil { + err = fmt.Errorf("call with ID %d not found", id) + break + } + call.Response = &msg + call.done() + case *embeddedsassv1.OutboundMessage_CanonicalizeRequest_: + call := t.getCall(c.CanonicalizeRequest.CompilationId) + resolved, resolveErr := call.importResolver.CanonicalizeURL(c.CanonicalizeRequest.GetUrl()) + + var response *embeddedsassv1.InboundMessage_CanonicalizeResponse + if resolveErr != nil { + response = &embeddedsassv1.InboundMessage_CanonicalizeResponse{ + Id: c.CanonicalizeRequest.GetId(), + Result: &embeddedsassv1.InboundMessage_CanonicalizeResponse_Error{ + Error: resolveErr.Error(), + }, + } + } else { + var url *embeddedsassv1.InboundMessage_CanonicalizeResponse_Url + if resolved != "" { + url = &embeddedsassv1.InboundMessage_CanonicalizeResponse_Url{ + Url: resolved, + } + } + response = &embeddedsassv1.InboundMessage_CanonicalizeResponse{ + Id: c.CanonicalizeRequest.GetId(), + Result: url, + } + } + + err = t.sendInboundMessage( + &embeddedsassv1.InboundMessage{ + Message: &embeddedsassv1.InboundMessage_CanonicalizeResponse_{ + CanonicalizeResponse: response, + }, + }, + ) + case *embeddedsassv1.OutboundMessage_ImportRequest_: + call := t.getCall(c.ImportRequest.CompilationId) + url := c.ImportRequest.GetUrl() + imp, loadErr := call.importResolver.Load(url) + sourceSyntax := embeddedsassv1.Syntax_value[string(imp.SourceSyntax)] + + var response *embeddedsassv1.InboundMessage_ImportResponse + var sourceMapURL string + + // Dart Sass expect a browser-accessible URL or an empty string. + // If no URL is supplied, a `data:` URL wil be generated + // automatically from `contents` + if hasScheme(url) { + sourceMapURL = url + } + + if loadErr != nil { + response = &embeddedsassv1.InboundMessage_ImportResponse{ + Id: c.ImportRequest.GetId(), + Result: &embeddedsassv1.InboundMessage_ImportResponse_Error{ + Error: loadErr.Error(), + }, + } + } else { + response = &embeddedsassv1.InboundMessage_ImportResponse{ + Id: c.ImportRequest.GetId(), + Result: &embeddedsassv1.InboundMessage_ImportResponse_Success{ + Success: &embeddedsassv1.InboundMessage_ImportResponse_ImportSuccess{ + Contents: imp.Content, + SourceMapUrl: sourceMapURL, + Syntax: embeddedsassv1.Syntax(sourceSyntax), + }, + }, + } + } + + err = t.sendInboundMessage( + &embeddedsassv1.InboundMessage{ + Message: &embeddedsassv1.InboundMessage_ImportResponse_{ + ImportResponse: response, + }, + }, + ) + case *embeddedsassv1.OutboundMessage_LogEvent_: + if t.opts.LogEventHandler != nil { + var logEvent LogEvent + e := c.LogEvent + if e.Span != nil { + u := e.Span.Url + if u == "" { + u = "stdin" + } + u, _ = url.QueryUnescape(u) + logEvent = LogEvent{ + Type: LogEventType(e.Type), + Message: fmt.Sprintf("%s:%d:%d: %s", u, e.Span.Start.Line, e.Span.Start.Column, c.LogEvent.GetMessage()), + } + } else { + logEvent = LogEvent{ + Type: LogEventType(e.Type), + Message: e.GetMessage(), + } + } + + t.opts.LogEventHandler(logEvent) + + } + + case *embeddedsassv1.OutboundMessage_Error: + err = fmt.Errorf("SASS error: %s", c.Error.GetMessage()) + default: + err = fmt.Errorf("unsupported response message type. %T", msg.Message) + } + + } + + // Terminate pending calls. + t.sendMu.Lock() + defer t.sendMu.Unlock() + t.mu.Lock() + defer t.mu.Unlock() + + t.shutdown = true + isEOF := err == io.EOF || strings.Contains(err.Error(), "already closed") + if isEOF { + if t.closing { + err = ErrShutdown + } else { + err = io.ErrUnexpectedEOF + } + } + + for _, call := range t.pending { + call.Error = err + call.done() + } +} + +func (t *Transpiler) newCall(createInbound func(seq uint32) (*embeddedsassv1.InboundMessage, error), args Args) (*call, error) { + t.mu.Lock() + id := t.seq + req, err := createInbound(id) + if err != nil { + t.mu.Unlock() + return nil, err + } + + call := &call{ + Request: req, + Done: make(chan *call, 1), + importResolver: args.ImportResolver, + } + + if t.shutdown || t.closing { + t.mu.Unlock() + call.Error = ErrShutdown + call.done() + return call, nil + } + + t.pending[id] = call + t.seq++ + + t.mu.Unlock() + + switch c := call.Request.Message.(type) { + case *embeddedsassv1.InboundMessage_CompileRequest_: + c.CompileRequest.Id = id + default: + return nil, fmt.Errorf("unsupported request message type. %T", call.Request.Message) + } + + return call, t.sendInboundMessage(call.Request) +} + +func (t *Transpiler) sendInboundMessage(message *embeddedsassv1.InboundMessage) error { + t.sendMu.Lock() + defer t.sendMu.Unlock() + t.mu.Lock() + if t.closing || t.shutdown { + t.mu.Unlock() + return ErrShutdown + } + t.mu.Unlock() + + out, err := proto.Marshal(message) + if err != nil { + return fmt.Errorf("failed to marshal request: %s", err) + } + + // Every message must begin with a varint indicating the length in bytes of + // the remaining message. + reqLen := uint64(len(out)) + + n := binary.PutUvarint(t.lenBuf, reqLen) + _, err = t.conn.Write(t.lenBuf[:n]) + if err != nil { + return err + } + + n, err = t.conn.Write(out) + if n != len(out) { + return errors.New("failed to write payload") + } + return err +} + +type call struct { + Request *embeddedsassv1.InboundMessage + Response *embeddedsassv1.OutboundMessage + importResolver ImportResolver + + Error error + Done chan *call +} + +func (call *call) done() { + select { + case call.Done <- call: + default: + } +} + +func hasScheme(s string) bool { + u, err := url.ParseRequestURI(s) + if err != nil { + return false + } + return u.Scheme != "" +} diff --git a/vendor/github.com/bep/godartsass/v2/.gitignore b/vendor/github.com/bep/godartsass/v2/.gitignore new file mode 100644 index 0000000..dff079f --- /dev/null +++ b/vendor/github.com/bep/godartsass/v2/.gitignore @@ -0,0 +1,17 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +lib/ + +# Dependency directories (remove the comment below to include it) +# vendor/ diff --git a/vendor/github.com/bep/godartsass/v2/LICENSE b/vendor/github.com/bep/godartsass/v2/LICENSE new file mode 100644 index 0000000..7e406ef --- /dev/null +++ b/vendor/github.com/bep/godartsass/v2/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Bjørn Erik Pedersen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/bep/godartsass/v2/README.md b/vendor/github.com/bep/godartsass/v2/README.md new file mode 100644 index 0000000..5332060 --- /dev/null +++ b/vendor/github.com/bep/godartsass/v2/README.md @@ -0,0 +1,15 @@ +[![Tests on Linux, MacOS and Windows](https://github.com/bep/godartsass/workflows/Test/badge.svg)](https://github.com/bep/godartsass/actions?query=workflow%3ATest) +[![Go Report Card](https://goreportcard.com/badge/github.com/bep/godartsass)](https://goreportcard.com/report/github.com/bep/godartsass) +[![codecov](https://codecov.io/gh/bep/godartsass/branch/main/graph/badge.svg?token=OWZ9RCAYWO)](https://codecov.io/gh/bep/godartsass) +[![GoDoc](https://godoc.org/github.com/bep/godartsass?status.svg)](https://godoc.org/github.com/bep/godartsass) + +This is a Go API backed by the native [Dart Sass](https://github.com/sass/dart-sass/releases) executable running with `sass --embedded`. + +>**Note:** The `v2.x.x` of this project targets the `v2` of the Dart Sass Embedded protocol with the `sass` exexutable in releases that can be downloaeded [here](https://github.com/sass/dart-sass/releases). For `v1` you need to import `github.com/bep/godartsass` and not `github.com/bep/godartsass/v2`. + +The primary motivation for this project is to provide `SCSS` support to [Hugo](https://gohugo.io/). I welcome PRs with bug fixes. I will also consider adding functionality, but please raise an issue discussing it first. + +For LibSass bindings in Go, see [GoLibSass](https://github.com/bep/golibsass). + +``` + diff --git a/vendor/github.com/bep/godartsass/v2/codecov.yml b/vendor/github.com/bep/godartsass/v2/codecov.yml new file mode 100644 index 0000000..2e3090a --- /dev/null +++ b/vendor/github.com/bep/godartsass/v2/codecov.yml @@ -0,0 +1,10 @@ +coverage: + status: + project: + default: + target: auto + threshold: 0.5% + patch: off + +comment: + require_changes: true diff --git a/vendor/github.com/bep/godartsass/v2/conn.go b/vendor/github.com/bep/godartsass/v2/conn.go new file mode 100644 index 0000000..2b6042d --- /dev/null +++ b/vendor/github.com/bep/godartsass/v2/conn.go @@ -0,0 +1,125 @@ +package godartsass + +import ( + "bufio" + "bytes" + "errors" + "io" + "os" + "os/exec" + "regexp" + "runtime" + "time" +) + +func newConn(cmd *exec.Cmd) (_ conn, err error) { + in, err := cmd.StdinPipe() + if err != nil { + return conn{}, err + } + defer func() { + if err != nil { + in.Close() + } + }() + + out, err := cmd.StdoutPipe() + stdErr := &tailBuffer{limit: 1024} + buff := bufio.NewReader(out) + c := conn{buff, buff, out, in, stdErr, cmd} + cmd.Stderr = c.stdErr + + return c, err +} + +type byteReadWriteCloser interface { + io.ReadWriteCloser + io.ByteReader +} + +type conn struct { + io.ByteReader + io.Reader + readerCloser io.Closer + io.WriteCloser + stdErr *tailBuffer + cmd *exec.Cmd +} + +// Start starts conn's Cmd. +func (c conn) Start() error { + err := c.cmd.Start() + if err != nil { + return c.Close() + } + return err +} + +// Close closes conn's WriteCloser, ReadClosers, and waits for the command to finish. +func (c conn) Close() error { + + writeErr := c.WriteCloser.Close() + readErr := c.readerCloser.Close() + var interruptErr error + + if runtime.GOOS != "windows" { + // See https://github.com/bep/godartsass/issues/19 + interruptErr = c.cmd.Process.Signal(os.Interrupt) + if interruptErr == os.ErrProcessDone { + interruptErr = nil + } + } + + cmdErr := c.waitWithTimeout() + + if writeErr != nil { + return writeErr + } + + if readErr != nil { + return readErr + } + + if interruptErr != nil { + return interruptErr + } + + return cmdErr +} + +var brokenPipeRe = regexp.MustCompile("Broken pipe|pipe is being closed") + +// dart-sass ends on itself on EOF, this is just to give it some +// time to do so. +func (c conn) waitWithTimeout() error { + result := make(chan error, 1) + go func() { result <- c.cmd.Wait() }() + select { + case err := <-result: + if eerr, ok := err.(*exec.ExitError); ok { + if eerr.Error() == "signal: interrupt" { + return nil + } + if brokenPipeRe.MatchString(c.stdErr.String()) { + return nil + } + + } + return err + case <-time.After(5 * time.Second): + return errors.New("timed out waiting for dart-sass to finish") + } +} + +type tailBuffer struct { + limit int + bytes.Buffer +} + +func (b *tailBuffer) Write(p []byte) (n int, err error) { + if len(p)+b.Buffer.Len() > b.limit { + b.Reset() + } + n, err = b.Buffer.Write(p) + return +} diff --git a/vendor/github.com/bep/godartsass/v2/internal/embeddedsass/README.md b/vendor/github.com/bep/godartsass/v2/internal/embeddedsass/README.md new file mode 100644 index 0000000..e9da421 --- /dev/null +++ b/vendor/github.com/bep/godartsass/v2/internal/embeddedsass/README.md @@ -0,0 +1,5 @@ + +* Install protobuf: https://github.com/protocolbuffers/protobuf +* Install the Go plugin: go install google.golang.org/protobuf/cmd/protoc-gen-go@latest +* Download the correct version of the proto file: https://github.com/sass/sass/blob/main/spec/embedded_sass.proto +* protoc --go_opt=Membedded_sass.proto=github.com/bep/godartsass/internal/embeddedsass --go_opt=paths=source_relative --go_out=. embedded_sass.proto diff --git a/vendor/github.com/bep/godartsass/v2/internal/embeddedsass/embedded_sass.pb.go b/vendor/github.com/bep/godartsass/v2/internal/embeddedsass/embedded_sass.pb.go new file mode 100644 index 0000000..36a31ec --- /dev/null +++ b/vendor/github.com/bep/godartsass/v2/internal/embeddedsass/embedded_sass.pb.go @@ -0,0 +1,5278 @@ +// Copyright 2019 Google Inc. Use of this source code is governed by an +// MIT-style license that can be found in the LICENSE file or at +// https://opensource.org/licenses/MIT. + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.30.0 +// protoc v4.23.2 +// source: embedded_sass.proto + +package embeddedsass + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +// Possible ways to format the CSS output. The compiler is not required to +// support all possible options; if the host requests an unsupported style, the +// compiler should choose the closest supported style. +type OutputStyle int32 + +const ( + // Each selector and declaration is written on its own line. + OutputStyle_EXPANDED OutputStyle = 0 + // The entire stylesheet is written on a single line, with as few characters + // as possible. + OutputStyle_COMPRESSED OutputStyle = 1 +) + +// Enum value maps for OutputStyle. +var ( + OutputStyle_name = map[int32]string{ + 0: "EXPANDED", + 1: "COMPRESSED", + } + OutputStyle_value = map[string]int32{ + "EXPANDED": 0, + "COMPRESSED": 1, + } +) + +func (x OutputStyle) Enum() *OutputStyle { + p := new(OutputStyle) + *p = x + return p +} + +func (x OutputStyle) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (OutputStyle) Descriptor() protoreflect.EnumDescriptor { + return file_embedded_sass_proto_enumTypes[0].Descriptor() +} + +func (OutputStyle) Type() protoreflect.EnumType { + return &file_embedded_sass_proto_enumTypes[0] +} + +func (x OutputStyle) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use OutputStyle.Descriptor instead. +func (OutputStyle) EnumDescriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{0} +} + +// Possible syntaxes for a Sass stylesheet. +type Syntax int32 + +const ( + // The CSS-superset `.scss` syntax. + Syntax_SCSS Syntax = 0 + // The indented `.sass` syntax. + Syntax_INDENTED Syntax = 1 + // Plain CSS syntax that doesn't support any special Sass features. + Syntax_CSS Syntax = 2 +) + +// Enum value maps for Syntax. +var ( + Syntax_name = map[int32]string{ + 0: "SCSS", + 1: "INDENTED", + 2: "CSS", + } + Syntax_value = map[string]int32{ + "SCSS": 0, + "INDENTED": 1, + "CSS": 2, + } +) + +func (x Syntax) Enum() *Syntax { + p := new(Syntax) + *p = x + return p +} + +func (x Syntax) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (Syntax) Descriptor() protoreflect.EnumDescriptor { + return file_embedded_sass_proto_enumTypes[1].Descriptor() +} + +func (Syntax) Type() protoreflect.EnumType { + return &file_embedded_sass_proto_enumTypes[1] +} + +func (x Syntax) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use Syntax.Descriptor instead. +func (Syntax) EnumDescriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{1} +} + +// The possible types of [LogEvent]. +type LogEventType int32 + +const ( + // A warning for something other than a deprecated Sass feature. Often emitted + // due to a stylesheet using the `@warn` rule. + LogEventType_WARNING LogEventType = 0 + // A warning indicating that the stylesheet is using a deprecated Sass + // feature. Compilers should not add text like "deprecation warning" to + // deprecation warnings; it's up to the host to determine how to signal that + // to the user. + LogEventType_DEPRECATION_WARNING LogEventType = 1 + // A message generated by the user for their own debugging purposes. + LogEventType_DEBUG LogEventType = 2 +) + +// Enum value maps for LogEventType. +var ( + LogEventType_name = map[int32]string{ + 0: "WARNING", + 1: "DEPRECATION_WARNING", + 2: "DEBUG", + } + LogEventType_value = map[string]int32{ + "WARNING": 0, + "DEPRECATION_WARNING": 1, + "DEBUG": 2, + } +) + +func (x LogEventType) Enum() *LogEventType { + p := new(LogEventType) + *p = x + return p +} + +func (x LogEventType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (LogEventType) Descriptor() protoreflect.EnumDescriptor { + return file_embedded_sass_proto_enumTypes[2].Descriptor() +} + +func (LogEventType) Type() protoreflect.EnumType { + return &file_embedded_sass_proto_enumTypes[2] +} + +func (x LogEventType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use LogEventType.Descriptor instead. +func (LogEventType) EnumDescriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{2} +} + +// Potential types of protocol errors. +type ProtocolErrorType int32 + +const ( + // A message was received that couldn't be decoded as an `InboundMessage` (for + // the compiler) or `OutboundMessage` (for the host). + ProtocolErrorType_PARSE ProtocolErrorType = 0 + // A message was received that violated a documented restriction, such as not + // providing a mandatory field. + ProtocolErrorType_PARAMS ProtocolErrorType = 1 + // Something unexpected went wrong within the endpoint. + ProtocolErrorType_INTERNAL ProtocolErrorType = 2 +) + +// Enum value maps for ProtocolErrorType. +var ( + ProtocolErrorType_name = map[int32]string{ + 0: "PARSE", + 1: "PARAMS", + 2: "INTERNAL", + } + ProtocolErrorType_value = map[string]int32{ + "PARSE": 0, + "PARAMS": 1, + "INTERNAL": 2, + } +) + +func (x ProtocolErrorType) Enum() *ProtocolErrorType { + p := new(ProtocolErrorType) + *p = x + return p +} + +func (x ProtocolErrorType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ProtocolErrorType) Descriptor() protoreflect.EnumDescriptor { + return file_embedded_sass_proto_enumTypes[3].Descriptor() +} + +func (ProtocolErrorType) Type() protoreflect.EnumType { + return &file_embedded_sass_proto_enumTypes[3] +} + +func (x ProtocolErrorType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ProtocolErrorType.Descriptor instead. +func (ProtocolErrorType) EnumDescriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{3} +} + +// Different types of separators a list can have. +type ListSeparator int32 + +const ( + // List elements are separated by a comma. + ListSeparator_COMMA ListSeparator = 0 + // List elements are separated by whitespace. + ListSeparator_SPACE ListSeparator = 1 + // List elements are separated by a forward slash. + ListSeparator_SLASH ListSeparator = 2 + // The list's separator hasn't yet been determined. This is only allowed for + // singleton and empty lists. + // + // Singleton lists and empty lists don't have separators defined. This means + // that list functions will prefer other lists' separators if possible. + ListSeparator_UNDECIDED ListSeparator = 3 +) + +// Enum value maps for ListSeparator. +var ( + ListSeparator_name = map[int32]string{ + 0: "COMMA", + 1: "SPACE", + 2: "SLASH", + 3: "UNDECIDED", + } + ListSeparator_value = map[string]int32{ + "COMMA": 0, + "SPACE": 1, + "SLASH": 2, + "UNDECIDED": 3, + } +) + +func (x ListSeparator) Enum() *ListSeparator { + p := new(ListSeparator) + *p = x + return p +} + +func (x ListSeparator) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ListSeparator) Descriptor() protoreflect.EnumDescriptor { + return file_embedded_sass_proto_enumTypes[4].Descriptor() +} + +func (ListSeparator) Type() protoreflect.EnumType { + return &file_embedded_sass_proto_enumTypes[4] +} + +func (x ListSeparator) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ListSeparator.Descriptor instead. +func (ListSeparator) EnumDescriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{4} +} + +// Singleton SassScript values that have no internal state. +type SingletonValue int32 + +const ( + // The SassScript boolean true value. + SingletonValue_TRUE SingletonValue = 0 + // The SassScript boolean false value. + SingletonValue_FALSE SingletonValue = 1 + // The SassScript null value. + SingletonValue_NULL SingletonValue = 2 +) + +// Enum value maps for SingletonValue. +var ( + SingletonValue_name = map[int32]string{ + 0: "TRUE", + 1: "FALSE", + 2: "NULL", + } + SingletonValue_value = map[string]int32{ + "TRUE": 0, + "FALSE": 1, + "NULL": 2, + } +) + +func (x SingletonValue) Enum() *SingletonValue { + p := new(SingletonValue) + *p = x + return p +} + +func (x SingletonValue) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (SingletonValue) Descriptor() protoreflect.EnumDescriptor { + return file_embedded_sass_proto_enumTypes[5].Descriptor() +} + +func (SingletonValue) Type() protoreflect.EnumType { + return &file_embedded_sass_proto_enumTypes[5] +} + +func (x SingletonValue) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use SingletonValue.Descriptor instead. +func (SingletonValue) EnumDescriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{5} +} + +// An operator used in a calculation value's operation. +type CalculationOperator int32 + +const ( + // The addition operator. + CalculationOperator_PLUS CalculationOperator = 0 + // The subtraction operator. + CalculationOperator_MINUS CalculationOperator = 1 + // The multiplication operator. + CalculationOperator_TIMES CalculationOperator = 2 + // The division operator. + CalculationOperator_DIVIDE CalculationOperator = 3 +) + +// Enum value maps for CalculationOperator. +var ( + CalculationOperator_name = map[int32]string{ + 0: "PLUS", + 1: "MINUS", + 2: "TIMES", + 3: "DIVIDE", + } + CalculationOperator_value = map[string]int32{ + "PLUS": 0, + "MINUS": 1, + "TIMES": 2, + "DIVIDE": 3, + } +) + +func (x CalculationOperator) Enum() *CalculationOperator { + p := new(CalculationOperator) + *p = x + return p +} + +func (x CalculationOperator) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (CalculationOperator) Descriptor() protoreflect.EnumDescriptor { + return file_embedded_sass_proto_enumTypes[6].Descriptor() +} + +func (CalculationOperator) Type() protoreflect.EnumType { + return &file_embedded_sass_proto_enumTypes[6] +} + +func (x CalculationOperator) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use CalculationOperator.Descriptor instead. +func (CalculationOperator) EnumDescriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{6} +} + +// The wrapper type for all messages sent from the host to the compiler. This +// provides a `oneof` that makes it possible to determine the type of each +// inbound message. +type InboundMessage struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The wrapped message. Mandatory. + // + // Types that are assignable to Message: + // + // *InboundMessage_CompileRequest_ + // *InboundMessage_CanonicalizeResponse_ + // *InboundMessage_ImportResponse_ + // *InboundMessage_FileImportResponse_ + // *InboundMessage_FunctionCallResponse_ + // *InboundMessage_VersionRequest_ + Message isInboundMessage_Message `protobuf_oneof:"message"` +} + +func (x *InboundMessage) Reset() { + *x = InboundMessage{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage) ProtoMessage() {} + +func (x *InboundMessage) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage.ProtoReflect.Descriptor instead. +func (*InboundMessage) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{0} +} + +func (m *InboundMessage) GetMessage() isInboundMessage_Message { + if m != nil { + return m.Message + } + return nil +} + +func (x *InboundMessage) GetCompileRequest() *InboundMessage_CompileRequest { + if x, ok := x.GetMessage().(*InboundMessage_CompileRequest_); ok { + return x.CompileRequest + } + return nil +} + +func (x *InboundMessage) GetCanonicalizeResponse() *InboundMessage_CanonicalizeResponse { + if x, ok := x.GetMessage().(*InboundMessage_CanonicalizeResponse_); ok { + return x.CanonicalizeResponse + } + return nil +} + +func (x *InboundMessage) GetImportResponse() *InboundMessage_ImportResponse { + if x, ok := x.GetMessage().(*InboundMessage_ImportResponse_); ok { + return x.ImportResponse + } + return nil +} + +func (x *InboundMessage) GetFileImportResponse() *InboundMessage_FileImportResponse { + if x, ok := x.GetMessage().(*InboundMessage_FileImportResponse_); ok { + return x.FileImportResponse + } + return nil +} + +func (x *InboundMessage) GetFunctionCallResponse() *InboundMessage_FunctionCallResponse { + if x, ok := x.GetMessage().(*InboundMessage_FunctionCallResponse_); ok { + return x.FunctionCallResponse + } + return nil +} + +func (x *InboundMessage) GetVersionRequest() *InboundMessage_VersionRequest { + if x, ok := x.GetMessage().(*InboundMessage_VersionRequest_); ok { + return x.VersionRequest + } + return nil +} + +type isInboundMessage_Message interface { + isInboundMessage_Message() +} + +type InboundMessage_CompileRequest_ struct { + CompileRequest *InboundMessage_CompileRequest `protobuf:"bytes,2,opt,name=compile_request,json=compileRequest,proto3,oneof"` +} + +type InboundMessage_CanonicalizeResponse_ struct { + CanonicalizeResponse *InboundMessage_CanonicalizeResponse `protobuf:"bytes,3,opt,name=canonicalize_response,json=canonicalizeResponse,proto3,oneof"` +} + +type InboundMessage_ImportResponse_ struct { + ImportResponse *InboundMessage_ImportResponse `protobuf:"bytes,4,opt,name=import_response,json=importResponse,proto3,oneof"` +} + +type InboundMessage_FileImportResponse_ struct { + FileImportResponse *InboundMessage_FileImportResponse `protobuf:"bytes,5,opt,name=file_import_response,json=fileImportResponse,proto3,oneof"` +} + +type InboundMessage_FunctionCallResponse_ struct { + FunctionCallResponse *InboundMessage_FunctionCallResponse `protobuf:"bytes,6,opt,name=function_call_response,json=functionCallResponse,proto3,oneof"` +} + +type InboundMessage_VersionRequest_ struct { + VersionRequest *InboundMessage_VersionRequest `protobuf:"bytes,7,opt,name=version_request,json=versionRequest,proto3,oneof"` +} + +func (*InboundMessage_CompileRequest_) isInboundMessage_Message() {} + +func (*InboundMessage_CanonicalizeResponse_) isInboundMessage_Message() {} + +func (*InboundMessage_ImportResponse_) isInboundMessage_Message() {} + +func (*InboundMessage_FileImportResponse_) isInboundMessage_Message() {} + +func (*InboundMessage_FunctionCallResponse_) isInboundMessage_Message() {} + +func (*InboundMessage_VersionRequest_) isInboundMessage_Message() {} + +// The wrapper type for all messages sent from the compiler to the host. This +// provides a `oneof` that makes it possible to determine the type of each +// outbound message. +type OutboundMessage struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The wrapped message. Mandatory. + // + // Types that are assignable to Message: + // + // *OutboundMessage_Error + // *OutboundMessage_CompileResponse_ + // *OutboundMessage_LogEvent_ + // *OutboundMessage_CanonicalizeRequest_ + // *OutboundMessage_ImportRequest_ + // *OutboundMessage_FileImportRequest_ + // *OutboundMessage_FunctionCallRequest_ + // *OutboundMessage_VersionResponse_ + Message isOutboundMessage_Message `protobuf_oneof:"message"` +} + +func (x *OutboundMessage) Reset() { + *x = OutboundMessage{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage) ProtoMessage() {} + +func (x *OutboundMessage) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage.ProtoReflect.Descriptor instead. +func (*OutboundMessage) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{1} +} + +func (m *OutboundMessage) GetMessage() isOutboundMessage_Message { + if m != nil { + return m.Message + } + return nil +} + +func (x *OutboundMessage) GetError() *ProtocolError { + if x, ok := x.GetMessage().(*OutboundMessage_Error); ok { + return x.Error + } + return nil +} + +func (x *OutboundMessage) GetCompileResponse() *OutboundMessage_CompileResponse { + if x, ok := x.GetMessage().(*OutboundMessage_CompileResponse_); ok { + return x.CompileResponse + } + return nil +} + +func (x *OutboundMessage) GetLogEvent() *OutboundMessage_LogEvent { + if x, ok := x.GetMessage().(*OutboundMessage_LogEvent_); ok { + return x.LogEvent + } + return nil +} + +func (x *OutboundMessage) GetCanonicalizeRequest() *OutboundMessage_CanonicalizeRequest { + if x, ok := x.GetMessage().(*OutboundMessage_CanonicalizeRequest_); ok { + return x.CanonicalizeRequest + } + return nil +} + +func (x *OutboundMessage) GetImportRequest() *OutboundMessage_ImportRequest { + if x, ok := x.GetMessage().(*OutboundMessage_ImportRequest_); ok { + return x.ImportRequest + } + return nil +} + +func (x *OutboundMessage) GetFileImportRequest() *OutboundMessage_FileImportRequest { + if x, ok := x.GetMessage().(*OutboundMessage_FileImportRequest_); ok { + return x.FileImportRequest + } + return nil +} + +func (x *OutboundMessage) GetFunctionCallRequest() *OutboundMessage_FunctionCallRequest { + if x, ok := x.GetMessage().(*OutboundMessage_FunctionCallRequest_); ok { + return x.FunctionCallRequest + } + return nil +} + +func (x *OutboundMessage) GetVersionResponse() *OutboundMessage_VersionResponse { + if x, ok := x.GetMessage().(*OutboundMessage_VersionResponse_); ok { + return x.VersionResponse + } + return nil +} + +type isOutboundMessage_Message interface { + isOutboundMessage_Message() +} + +type OutboundMessage_Error struct { + Error *ProtocolError `protobuf:"bytes,1,opt,name=error,proto3,oneof"` +} + +type OutboundMessage_CompileResponse_ struct { + CompileResponse *OutboundMessage_CompileResponse `protobuf:"bytes,2,opt,name=compile_response,json=compileResponse,proto3,oneof"` +} + +type OutboundMessage_LogEvent_ struct { + LogEvent *OutboundMessage_LogEvent `protobuf:"bytes,3,opt,name=log_event,json=logEvent,proto3,oneof"` +} + +type OutboundMessage_CanonicalizeRequest_ struct { + CanonicalizeRequest *OutboundMessage_CanonicalizeRequest `protobuf:"bytes,4,opt,name=canonicalize_request,json=canonicalizeRequest,proto3,oneof"` +} + +type OutboundMessage_ImportRequest_ struct { + ImportRequest *OutboundMessage_ImportRequest `protobuf:"bytes,5,opt,name=import_request,json=importRequest,proto3,oneof"` +} + +type OutboundMessage_FileImportRequest_ struct { + FileImportRequest *OutboundMessage_FileImportRequest `protobuf:"bytes,6,opt,name=file_import_request,json=fileImportRequest,proto3,oneof"` +} + +type OutboundMessage_FunctionCallRequest_ struct { + FunctionCallRequest *OutboundMessage_FunctionCallRequest `protobuf:"bytes,7,opt,name=function_call_request,json=functionCallRequest,proto3,oneof"` +} + +type OutboundMessage_VersionResponse_ struct { + VersionResponse *OutboundMessage_VersionResponse `protobuf:"bytes,8,opt,name=version_response,json=versionResponse,proto3,oneof"` +} + +func (*OutboundMessage_Error) isOutboundMessage_Message() {} + +func (*OutboundMessage_CompileResponse_) isOutboundMessage_Message() {} + +func (*OutboundMessage_LogEvent_) isOutboundMessage_Message() {} + +func (*OutboundMessage_CanonicalizeRequest_) isOutboundMessage_Message() {} + +func (*OutboundMessage_ImportRequest_) isOutboundMessage_Message() {} + +func (*OutboundMessage_FileImportRequest_) isOutboundMessage_Message() {} + +func (*OutboundMessage_FunctionCallRequest_) isOutboundMessage_Message() {} + +func (*OutboundMessage_VersionResponse_) isOutboundMessage_Message() {} + +// An error reported when an endpoint violates the embedded Sass protocol. +type ProtocolError struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Type ProtocolErrorType `protobuf:"varint,1,opt,name=type,proto3,enum=sass.embedded_protocol.ProtocolErrorType" json:"type,omitempty"` + // The ID of the request that had an error. This MUST be `4294967295` if the + // request ID couldn't be determined, or if the error is being reported for a + // response or an event. + Id uint32 `protobuf:"varint,2,opt,name=id,proto3" json:"id,omitempty"` + // A human-readable message providing more detail about the error. + Message string `protobuf:"bytes,3,opt,name=message,proto3" json:"message,omitempty"` +} + +func (x *ProtocolError) Reset() { + *x = ProtocolError{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ProtocolError) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ProtocolError) ProtoMessage() {} + +func (x *ProtocolError) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ProtocolError.ProtoReflect.Descriptor instead. +func (*ProtocolError) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{2} +} + +func (x *ProtocolError) GetType() ProtocolErrorType { + if x != nil { + return x.Type + } + return ProtocolErrorType_PARSE +} + +func (x *ProtocolError) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *ProtocolError) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +// A chunk of a source file. +type SourceSpan struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The text covered by the source span. Compilers must guarantee that this is + // the text between `start.offset` and `end.offset` in the source file + // referred to by `url`. + Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // The location of the first character in this span. + Start *SourceSpan_SourceLocation `protobuf:"bytes,2,opt,name=start,proto3" json:"start,omitempty"` + // The location of the first character after this span. + // + // If this is omitted, it indicates that the span is empty and points + // immediately before `start`. In that case, `text` must be empty. + // + // This must not point to a location before `start`. + End *SourceSpan_SourceLocation `protobuf:"bytes,3,opt,name=end,proto3,oneof" json:"end,omitempty"` + // The URL of the file to which this span refers. + // + // This may be empty, indicating that the span refers to a + // `CompileRequest.StringInput` file that doesn't specify a URL. + Url string `protobuf:"bytes,4,opt,name=url,proto3" json:"url,omitempty"` + // Additional source text surrounding this span. + // + // If this isn't empty, it must contain `text`. Furthermore, `text` must begin + // at column `start.column` of a line in `context`. + // + // This usually contains the full lines the span begins and ends on if the + // span itself doesn't cover the full lines. + Context string `protobuf:"bytes,5,opt,name=context,proto3" json:"context,omitempty"` +} + +func (x *SourceSpan) Reset() { + *x = SourceSpan{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SourceSpan) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SourceSpan) ProtoMessage() {} + +func (x *SourceSpan) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SourceSpan.ProtoReflect.Descriptor instead. +func (*SourceSpan) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{3} +} + +func (x *SourceSpan) GetText() string { + if x != nil { + return x.Text + } + return "" +} + +func (x *SourceSpan) GetStart() *SourceSpan_SourceLocation { + if x != nil { + return x.Start + } + return nil +} + +func (x *SourceSpan) GetEnd() *SourceSpan_SourceLocation { + if x != nil { + return x.End + } + return nil +} + +func (x *SourceSpan) GetUrl() string { + if x != nil { + return x.Url + } + return "" +} + +func (x *SourceSpan) GetContext() string { + if x != nil { + return x.Context + } + return "" +} + +// A SassScript value, passed to and returned by functions. +type Value struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The value itself. Mandatory. + // + // This is wrapped in a message type rather than used directly to reduce + // repetition, and because oneofs can't be repeated. + // + // Types that are assignable to Value: + // + // *Value_String_ + // *Value_Number_ + // *Value_RgbColor_ + // *Value_HslColor_ + // *Value_List_ + // *Value_Map_ + // *Value_Singleton + // *Value_CompilerFunction_ + // *Value_HostFunction_ + // *Value_ArgumentList_ + // *Value_HwbColor_ + // *Value_Calculation_ + Value isValue_Value `protobuf_oneof:"value"` +} + +func (x *Value) Reset() { + *x = Value{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value) ProtoMessage() {} + +func (x *Value) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value.ProtoReflect.Descriptor instead. +func (*Value) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{4} +} + +func (m *Value) GetValue() isValue_Value { + if m != nil { + return m.Value + } + return nil +} + +func (x *Value) GetString_() *Value_String { + if x, ok := x.GetValue().(*Value_String_); ok { + return x.String_ + } + return nil +} + +func (x *Value) GetNumber() *Value_Number { + if x, ok := x.GetValue().(*Value_Number_); ok { + return x.Number + } + return nil +} + +func (x *Value) GetRgbColor() *Value_RgbColor { + if x, ok := x.GetValue().(*Value_RgbColor_); ok { + return x.RgbColor + } + return nil +} + +func (x *Value) GetHslColor() *Value_HslColor { + if x, ok := x.GetValue().(*Value_HslColor_); ok { + return x.HslColor + } + return nil +} + +func (x *Value) GetList() *Value_List { + if x, ok := x.GetValue().(*Value_List_); ok { + return x.List + } + return nil +} + +func (x *Value) GetMap() *Value_Map { + if x, ok := x.GetValue().(*Value_Map_); ok { + return x.Map + } + return nil +} + +func (x *Value) GetSingleton() SingletonValue { + if x, ok := x.GetValue().(*Value_Singleton); ok { + return x.Singleton + } + return SingletonValue_TRUE +} + +func (x *Value) GetCompilerFunction() *Value_CompilerFunction { + if x, ok := x.GetValue().(*Value_CompilerFunction_); ok { + return x.CompilerFunction + } + return nil +} + +func (x *Value) GetHostFunction() *Value_HostFunction { + if x, ok := x.GetValue().(*Value_HostFunction_); ok { + return x.HostFunction + } + return nil +} + +func (x *Value) GetArgumentList() *Value_ArgumentList { + if x, ok := x.GetValue().(*Value_ArgumentList_); ok { + return x.ArgumentList + } + return nil +} + +func (x *Value) GetHwbColor() *Value_HwbColor { + if x, ok := x.GetValue().(*Value_HwbColor_); ok { + return x.HwbColor + } + return nil +} + +func (x *Value) GetCalculation() *Value_Calculation { + if x, ok := x.GetValue().(*Value_Calculation_); ok { + return x.Calculation + } + return nil +} + +type isValue_Value interface { + isValue_Value() +} + +type Value_String_ struct { + String_ *Value_String `protobuf:"bytes,1,opt,name=string,proto3,oneof"` +} + +type Value_Number_ struct { + Number *Value_Number `protobuf:"bytes,2,opt,name=number,proto3,oneof"` +} + +type Value_RgbColor_ struct { + RgbColor *Value_RgbColor `protobuf:"bytes,3,opt,name=rgb_color,json=rgbColor,proto3,oneof"` +} + +type Value_HslColor_ struct { + HslColor *Value_HslColor `protobuf:"bytes,4,opt,name=hsl_color,json=hslColor,proto3,oneof"` +} + +type Value_List_ struct { + List *Value_List `protobuf:"bytes,5,opt,name=list,proto3,oneof"` +} + +type Value_Map_ struct { + Map *Value_Map `protobuf:"bytes,6,opt,name=map,proto3,oneof"` +} + +type Value_Singleton struct { + Singleton SingletonValue `protobuf:"varint,7,opt,name=singleton,proto3,enum=sass.embedded_protocol.SingletonValue,oneof"` +} + +type Value_CompilerFunction_ struct { + CompilerFunction *Value_CompilerFunction `protobuf:"bytes,8,opt,name=compiler_function,json=compilerFunction,proto3,oneof"` +} + +type Value_HostFunction_ struct { + HostFunction *Value_HostFunction `protobuf:"bytes,9,opt,name=host_function,json=hostFunction,proto3,oneof"` +} + +type Value_ArgumentList_ struct { + ArgumentList *Value_ArgumentList `protobuf:"bytes,10,opt,name=argument_list,json=argumentList,proto3,oneof"` +} + +type Value_HwbColor_ struct { + HwbColor *Value_HwbColor `protobuf:"bytes,11,opt,name=hwb_color,json=hwbColor,proto3,oneof"` +} + +type Value_Calculation_ struct { + Calculation *Value_Calculation `protobuf:"bytes,12,opt,name=calculation,proto3,oneof"` +} + +func (*Value_String_) isValue_Value() {} + +func (*Value_Number_) isValue_Value() {} + +func (*Value_RgbColor_) isValue_Value() {} + +func (*Value_HslColor_) isValue_Value() {} + +func (*Value_List_) isValue_Value() {} + +func (*Value_Map_) isValue_Value() {} + +func (*Value_Singleton) isValue_Value() {} + +func (*Value_CompilerFunction_) isValue_Value() {} + +func (*Value_HostFunction_) isValue_Value() {} + +func (*Value_ArgumentList_) isValue_Value() {} + +func (*Value_HwbColor_) isValue_Value() {} + +func (*Value_Calculation_) isValue_Value() {} + +// A request for information about the version of the embedded compiler. The +// host can use this to provide diagnostic information to the user, to check +// which features the compiler supports, or to ensure that it's compatible +// with the same protocol version the compiler supports. +type InboundMessage_VersionRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // This version request's id. + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` +} + +func (x *InboundMessage_VersionRequest) Reset() { + *x = InboundMessage_VersionRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage_VersionRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage_VersionRequest) ProtoMessage() {} + +func (x *InboundMessage_VersionRequest) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage_VersionRequest.ProtoReflect.Descriptor instead. +func (*InboundMessage_VersionRequest) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{0, 0} +} + +func (x *InboundMessage_VersionRequest) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +// A request that compiles an entrypoint to CSS. +type InboundMessage_CompileRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The input stylesheet to parse. Mandatory. + // + // Types that are assignable to Input: + // + // *InboundMessage_CompileRequest_String_ + // *InboundMessage_CompileRequest_Path + Input isInboundMessage_CompileRequest_Input `protobuf_oneof:"input"` + // How to format the CSS output. + Style OutputStyle `protobuf:"varint,4,opt,name=style,proto3,enum=sass.embedded_protocol.OutputStyle" json:"style,omitempty"` + // Whether to generate a source map. Note that this will *not* add a source + // map comment to the stylesheet; that's up to the host or its users. + SourceMap bool `protobuf:"varint,5,opt,name=source_map,json=sourceMap,proto3" json:"source_map,omitempty"` + // Importers (including load paths on the filesystem) to use when resolving + // imports that can't be resolved relative to the file that contains it. Each + // importer is checked in order until one recognizes the imported URL. + Importers []*InboundMessage_CompileRequest_Importer `protobuf:"bytes,6,rep,name=importers,proto3" json:"importers,omitempty"` + // Signatures for custom global functions whose behavior is defined by the + // host. + // + // If this is not a valid Sass function signature that could appear after + // `@function` in a Sass stylesheet (such as `mix($color1, $color2, $weight: + // 50%)`), or if it conflicts with a function name that's built into the + // Sass language, the compiler must treat the compilation as failed. + // + // Compilers must ensure that pure-Sass functions take precedence over + // custom global functions. + GlobalFunctions []string `protobuf:"bytes,7,rep,name=global_functions,json=globalFunctions,proto3" json:"global_functions,omitempty"` + // Whether to use terminal colors in the formatted message of errors and + // logs. + AlertColor bool `protobuf:"varint,8,opt,name=alert_color,json=alertColor,proto3" json:"alert_color,omitempty"` + // Whether to encode the formatted message of errors and logs in ASCII. + AlertAscii bool `protobuf:"varint,9,opt,name=alert_ascii,json=alertAscii,proto3" json:"alert_ascii,omitempty"` + // Whether to report all deprecation warnings or only the first few ones. + // If this is `false`, the compiler may choose not to send events for + // repeated deprecation warnings. If this is `true`, the compiler must emit + // an event for every deprecation warning it encounters. + Verbose bool `protobuf:"varint,10,opt,name=verbose,proto3" json:"verbose,omitempty"` + // Whether to omit events for deprecation warnings coming from dependencies + // (files loaded from a different importer than the input). + QuietDeps bool `protobuf:"varint,11,opt,name=quiet_deps,json=quietDeps,proto3" json:"quiet_deps,omitempty"` + // Whether to include sources in the generated sourcemap + SourceMapIncludeSources bool `protobuf:"varint,12,opt,name=source_map_include_sources,json=sourceMapIncludeSources,proto3" json:"source_map_include_sources,omitempty"` + // Whether to emit a `@charset`/BOM for non-ASCII stylesheets. + Charset bool `protobuf:"varint,13,opt,name=charset,proto3" json:"charset,omitempty"` +} + +func (x *InboundMessage_CompileRequest) Reset() { + *x = InboundMessage_CompileRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage_CompileRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage_CompileRequest) ProtoMessage() {} + +func (x *InboundMessage_CompileRequest) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage_CompileRequest.ProtoReflect.Descriptor instead. +func (*InboundMessage_CompileRequest) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{0, 1} +} + +func (m *InboundMessage_CompileRequest) GetInput() isInboundMessage_CompileRequest_Input { + if m != nil { + return m.Input + } + return nil +} + +func (x *InboundMessage_CompileRequest) GetString_() *InboundMessage_CompileRequest_StringInput { + if x, ok := x.GetInput().(*InboundMessage_CompileRequest_String_); ok { + return x.String_ + } + return nil +} + +func (x *InboundMessage_CompileRequest) GetPath() string { + if x, ok := x.GetInput().(*InboundMessage_CompileRequest_Path); ok { + return x.Path + } + return "" +} + +func (x *InboundMessage_CompileRequest) GetStyle() OutputStyle { + if x != nil { + return x.Style + } + return OutputStyle_EXPANDED +} + +func (x *InboundMessage_CompileRequest) GetSourceMap() bool { + if x != nil { + return x.SourceMap + } + return false +} + +func (x *InboundMessage_CompileRequest) GetImporters() []*InboundMessage_CompileRequest_Importer { + if x != nil { + return x.Importers + } + return nil +} + +func (x *InboundMessage_CompileRequest) GetGlobalFunctions() []string { + if x != nil { + return x.GlobalFunctions + } + return nil +} + +func (x *InboundMessage_CompileRequest) GetAlertColor() bool { + if x != nil { + return x.AlertColor + } + return false +} + +func (x *InboundMessage_CompileRequest) GetAlertAscii() bool { + if x != nil { + return x.AlertAscii + } + return false +} + +func (x *InboundMessage_CompileRequest) GetVerbose() bool { + if x != nil { + return x.Verbose + } + return false +} + +func (x *InboundMessage_CompileRequest) GetQuietDeps() bool { + if x != nil { + return x.QuietDeps + } + return false +} + +func (x *InboundMessage_CompileRequest) GetSourceMapIncludeSources() bool { + if x != nil { + return x.SourceMapIncludeSources + } + return false +} + +func (x *InboundMessage_CompileRequest) GetCharset() bool { + if x != nil { + return x.Charset + } + return false +} + +type isInboundMessage_CompileRequest_Input interface { + isInboundMessage_CompileRequest_Input() +} + +type InboundMessage_CompileRequest_String_ struct { + // A stylesheet loaded from its contents. + String_ *InboundMessage_CompileRequest_StringInput `protobuf:"bytes,2,opt,name=string,proto3,oneof"` +} + +type InboundMessage_CompileRequest_Path struct { + // A stylesheet loaded from the given path on the filesystem. + Path string `protobuf:"bytes,3,opt,name=path,proto3,oneof"` +} + +func (*InboundMessage_CompileRequest_String_) isInboundMessage_CompileRequest_Input() {} + +func (*InboundMessage_CompileRequest_Path) isInboundMessage_CompileRequest_Input() {} + +// A response indicating the result of canonicalizing an imported URL. +type InboundMessage_CanonicalizeResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The result of canonicalization. If this is unset, it indicates that the + // importer either did not recognize the URL, or could not find a stylesheet + // at the location it referred to. Optional. + // + // Types that are assignable to Result: + // + // *InboundMessage_CanonicalizeResponse_Url + // *InboundMessage_CanonicalizeResponse_Error + Result isInboundMessage_CanonicalizeResponse_Result `protobuf_oneof:"result"` +} + +func (x *InboundMessage_CanonicalizeResponse) Reset() { + *x = InboundMessage_CanonicalizeResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage_CanonicalizeResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage_CanonicalizeResponse) ProtoMessage() {} + +func (x *InboundMessage_CanonicalizeResponse) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage_CanonicalizeResponse.ProtoReflect.Descriptor instead. +func (*InboundMessage_CanonicalizeResponse) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{0, 2} +} + +func (x *InboundMessage_CanonicalizeResponse) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (m *InboundMessage_CanonicalizeResponse) GetResult() isInboundMessage_CanonicalizeResponse_Result { + if m != nil { + return m.Result + } + return nil +} + +func (x *InboundMessage_CanonicalizeResponse) GetUrl() string { + if x, ok := x.GetResult().(*InboundMessage_CanonicalizeResponse_Url); ok { + return x.Url + } + return "" +} + +func (x *InboundMessage_CanonicalizeResponse) GetError() string { + if x, ok := x.GetResult().(*InboundMessage_CanonicalizeResponse_Error); ok { + return x.Error + } + return "" +} + +type isInboundMessage_CanonicalizeResponse_Result interface { + isInboundMessage_CanonicalizeResponse_Result() +} + +type InboundMessage_CanonicalizeResponse_Url struct { + // The successfully canonicalized URL. + // + // If this is not an absolute URL (including scheme), the compiler must + // treat that as an error thrown by the importer. + Url string `protobuf:"bytes,2,opt,name=url,proto3,oneof"` +} + +type InboundMessage_CanonicalizeResponse_Error struct { + // An error message explaining why canonicalization failed. + // + // This indicates that a stylesheet was found, but a canonical URL for it + // could not be determined. If no stylesheet was found, `result` should be + // `null` instead. + Error string `protobuf:"bytes,3,opt,name=error,proto3,oneof"` +} + +func (*InboundMessage_CanonicalizeResponse_Url) isInboundMessage_CanonicalizeResponse_Result() {} + +func (*InboundMessage_CanonicalizeResponse_Error) isInboundMessage_CanonicalizeResponse_Result() {} + +// A response indicating the result of importing a canonical URL. +type InboundMessage_ImportResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The result of loading the URL. If this is unset, it indicates that the + // importer either did not recognize the URL, or could not find a stylesheet + // at the location it referred to. Optional. + // + // Types that are assignable to Result: + // + // *InboundMessage_ImportResponse_Success + // *InboundMessage_ImportResponse_Error + Result isInboundMessage_ImportResponse_Result `protobuf_oneof:"result"` +} + +func (x *InboundMessage_ImportResponse) Reset() { + *x = InboundMessage_ImportResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage_ImportResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage_ImportResponse) ProtoMessage() {} + +func (x *InboundMessage_ImportResponse) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage_ImportResponse.ProtoReflect.Descriptor instead. +func (*InboundMessage_ImportResponse) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{0, 3} +} + +func (x *InboundMessage_ImportResponse) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (m *InboundMessage_ImportResponse) GetResult() isInboundMessage_ImportResponse_Result { + if m != nil { + return m.Result + } + return nil +} + +func (x *InboundMessage_ImportResponse) GetSuccess() *InboundMessage_ImportResponse_ImportSuccess { + if x, ok := x.GetResult().(*InboundMessage_ImportResponse_Success); ok { + return x.Success + } + return nil +} + +func (x *InboundMessage_ImportResponse) GetError() string { + if x, ok := x.GetResult().(*InboundMessage_ImportResponse_Error); ok { + return x.Error + } + return "" +} + +type isInboundMessage_ImportResponse_Result interface { + isInboundMessage_ImportResponse_Result() +} + +type InboundMessage_ImportResponse_Success struct { + // The contents of the loaded stylesheet. + Success *InboundMessage_ImportResponse_ImportSuccess `protobuf:"bytes,2,opt,name=success,proto3,oneof"` +} + +type InboundMessage_ImportResponse_Error struct { + // An error message explaining why the URL could not be loaded. + Error string `protobuf:"bytes,3,opt,name=error,proto3,oneof"` +} + +func (*InboundMessage_ImportResponse_Success) isInboundMessage_ImportResponse_Result() {} + +func (*InboundMessage_ImportResponse_Error) isInboundMessage_ImportResponse_Result() {} + +// A response indicating the result of redirecting a URL to the filesystem. +type InboundMessage_FileImportResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The result of loading the URL. An unset result indicates that the + // importer did not recognize the URL and other importers or load paths + // should be tried. Optional. + // + // Types that are assignable to Result: + // + // *InboundMessage_FileImportResponse_FileUrl + // *InboundMessage_FileImportResponse_Error + Result isInboundMessage_FileImportResponse_Result `protobuf_oneof:"result"` +} + +func (x *InboundMessage_FileImportResponse) Reset() { + *x = InboundMessage_FileImportResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage_FileImportResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage_FileImportResponse) ProtoMessage() {} + +func (x *InboundMessage_FileImportResponse) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage_FileImportResponse.ProtoReflect.Descriptor instead. +func (*InboundMessage_FileImportResponse) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{0, 4} +} + +func (x *InboundMessage_FileImportResponse) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (m *InboundMessage_FileImportResponse) GetResult() isInboundMessage_FileImportResponse_Result { + if m != nil { + return m.Result + } + return nil +} + +func (x *InboundMessage_FileImportResponse) GetFileUrl() string { + if x, ok := x.GetResult().(*InboundMessage_FileImportResponse_FileUrl); ok { + return x.FileUrl + } + return "" +} + +func (x *InboundMessage_FileImportResponse) GetError() string { + if x, ok := x.GetResult().(*InboundMessage_FileImportResponse_Error); ok { + return x.Error + } + return "" +} + +type isInboundMessage_FileImportResponse_Result interface { + isInboundMessage_FileImportResponse_Result() +} + +type InboundMessage_FileImportResponse_FileUrl struct { + // The absolute `file:` URL to look for the file on the physical + // filesystem. + // + // The compiler must verify to the best of its ability that this URL + // follows the format for an absolute `file:` URL on the current operating + // system without a hostname. If it doesn't, the compiler must treat that + // as an error thrown by the importer. See + // https://en.wikipedia.org/wiki/File_URI_scheme for details on the + // format. + // + // The compiler must handle turning this into a canonical URL by resolving + // it for partials, file extensions, and index files. The compiler must + // then loading the contents of the resulting canonical URL from the + // filesystem. + FileUrl string `protobuf:"bytes,2,opt,name=file_url,json=fileUrl,proto3,oneof"` +} + +type InboundMessage_FileImportResponse_Error struct { + // An error message explaining why the URL could not be loaded. + Error string `protobuf:"bytes,3,opt,name=error,proto3,oneof"` +} + +func (*InboundMessage_FileImportResponse_FileUrl) isInboundMessage_FileImportResponse_Result() {} + +func (*InboundMessage_FileImportResponse_Error) isInboundMessage_FileImportResponse_Result() {} + +// A response indicating the result of calling a custom Sass function defined +// in the host. +type InboundMessage_FunctionCallResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The result of calling the function. Mandatory. + // + // Types that are assignable to Result: + // + // *InboundMessage_FunctionCallResponse_Success + // *InboundMessage_FunctionCallResponse_Error + Result isInboundMessage_FunctionCallResponse_Result `protobuf_oneof:"result"` + // The IDs of all `Value.ArgumentList`s in `FunctionCallRequest.arguments` + // whose keywords were accessed. See `Value.ArgumentList` for details. This + // may not include the special value `0` and it may not include multiple + // instances of the same ID. + AccessedArgumentLists []uint32 `protobuf:"varint,4,rep,packed,name=accessed_argument_lists,json=accessedArgumentLists,proto3" json:"accessed_argument_lists,omitempty"` +} + +func (x *InboundMessage_FunctionCallResponse) Reset() { + *x = InboundMessage_FunctionCallResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage_FunctionCallResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage_FunctionCallResponse) ProtoMessage() {} + +func (x *InboundMessage_FunctionCallResponse) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage_FunctionCallResponse.ProtoReflect.Descriptor instead. +func (*InboundMessage_FunctionCallResponse) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{0, 5} +} + +func (x *InboundMessage_FunctionCallResponse) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (m *InboundMessage_FunctionCallResponse) GetResult() isInboundMessage_FunctionCallResponse_Result { + if m != nil { + return m.Result + } + return nil +} + +func (x *InboundMessage_FunctionCallResponse) GetSuccess() *Value { + if x, ok := x.GetResult().(*InboundMessage_FunctionCallResponse_Success); ok { + return x.Success + } + return nil +} + +func (x *InboundMessage_FunctionCallResponse) GetError() string { + if x, ok := x.GetResult().(*InboundMessage_FunctionCallResponse_Error); ok { + return x.Error + } + return "" +} + +func (x *InboundMessage_FunctionCallResponse) GetAccessedArgumentLists() []uint32 { + if x != nil { + return x.AccessedArgumentLists + } + return nil +} + +type isInboundMessage_FunctionCallResponse_Result interface { + isInboundMessage_FunctionCallResponse_Result() +} + +type InboundMessage_FunctionCallResponse_Success struct { + // The return value of a successful function call. + Success *Value `protobuf:"bytes,2,opt,name=success,proto3,oneof"` +} + +type InboundMessage_FunctionCallResponse_Error struct { + // An error message explaining why the function call failed. + Error string `protobuf:"bytes,3,opt,name=error,proto3,oneof"` +} + +func (*InboundMessage_FunctionCallResponse_Success) isInboundMessage_FunctionCallResponse_Result() {} + +func (*InboundMessage_FunctionCallResponse_Error) isInboundMessage_FunctionCallResponse_Result() {} + +// An input stylesheet provided as plain text, rather than loaded from the +// filesystem. +type InboundMessage_CompileRequest_StringInput struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The contents of the stylesheet. + Source string `protobuf:"bytes,1,opt,name=source,proto3" json:"source,omitempty"` + // The location from which `source` was loaded. If this is empty, it + // indicates that the URL is unknown. + // + // This must be a canonical URL recognized by `importer`, if it's passed. + Url string `protobuf:"bytes,2,opt,name=url,proto3" json:"url,omitempty"` + // The syntax to use to parse `source`. + Syntax Syntax `protobuf:"varint,3,opt,name=syntax,proto3,enum=sass.embedded_protocol.Syntax" json:"syntax,omitempty"` + // The importer to use to resolve imports relative to `url`. + Importer *InboundMessage_CompileRequest_Importer `protobuf:"bytes,4,opt,name=importer,proto3" json:"importer,omitempty"` +} + +func (x *InboundMessage_CompileRequest_StringInput) Reset() { + *x = InboundMessage_CompileRequest_StringInput{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage_CompileRequest_StringInput) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage_CompileRequest_StringInput) ProtoMessage() {} + +func (x *InboundMessage_CompileRequest_StringInput) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage_CompileRequest_StringInput.ProtoReflect.Descriptor instead. +func (*InboundMessage_CompileRequest_StringInput) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{0, 1, 0} +} + +func (x *InboundMessage_CompileRequest_StringInput) GetSource() string { + if x != nil { + return x.Source + } + return "" +} + +func (x *InboundMessage_CompileRequest_StringInput) GetUrl() string { + if x != nil { + return x.Url + } + return "" +} + +func (x *InboundMessage_CompileRequest_StringInput) GetSyntax() Syntax { + if x != nil { + return x.Syntax + } + return Syntax_SCSS +} + +func (x *InboundMessage_CompileRequest_StringInput) GetImporter() *InboundMessage_CompileRequest_Importer { + if x != nil { + return x.Importer + } + return nil +} + +// A wrapper message that represents either a user-defined importer or a +// load path on disk. This must be a wrapper because `oneof` types can't be +// `repeated`. +type InboundMessage_CompileRequest_Importer struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The possible types of importer. Mandatory. + // + // Types that are assignable to Importer: + // + // *InboundMessage_CompileRequest_Importer_Path + // *InboundMessage_CompileRequest_Importer_ImporterId + // *InboundMessage_CompileRequest_Importer_FileImporterId + Importer isInboundMessage_CompileRequest_Importer_Importer `protobuf_oneof:"importer"` +} + +func (x *InboundMessage_CompileRequest_Importer) Reset() { + *x = InboundMessage_CompileRequest_Importer{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage_CompileRequest_Importer) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage_CompileRequest_Importer) ProtoMessage() {} + +func (x *InboundMessage_CompileRequest_Importer) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage_CompileRequest_Importer.ProtoReflect.Descriptor instead. +func (*InboundMessage_CompileRequest_Importer) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{0, 1, 1} +} + +func (m *InboundMessage_CompileRequest_Importer) GetImporter() isInboundMessage_CompileRequest_Importer_Importer { + if m != nil { + return m.Importer + } + return nil +} + +func (x *InboundMessage_CompileRequest_Importer) GetPath() string { + if x, ok := x.GetImporter().(*InboundMessage_CompileRequest_Importer_Path); ok { + return x.Path + } + return "" +} + +func (x *InboundMessage_CompileRequest_Importer) GetImporterId() uint32 { + if x, ok := x.GetImporter().(*InboundMessage_CompileRequest_Importer_ImporterId); ok { + return x.ImporterId + } + return 0 +} + +func (x *InboundMessage_CompileRequest_Importer) GetFileImporterId() uint32 { + if x, ok := x.GetImporter().(*InboundMessage_CompileRequest_Importer_FileImporterId); ok { + return x.FileImporterId + } + return 0 +} + +type isInboundMessage_CompileRequest_Importer_Importer interface { + isInboundMessage_CompileRequest_Importer_Importer() +} + +type InboundMessage_CompileRequest_Importer_Path struct { + // A built-in importer that loads Sass files within the given directory + // on disk. + Path string `protobuf:"bytes,1,opt,name=path,proto3,oneof"` +} + +type InboundMessage_CompileRequest_Importer_ImporterId struct { + // A unique ID for a user-defined importer. This ID will be included in + // outbound `CanonicalizeRequest` and `ImportRequest` messages to + // indicate which importer is being called. The host is responsible for + // generating this ID and ensuring that it's unique across all + // importers registered for this compilation. + ImporterId uint32 `protobuf:"varint,2,opt,name=importer_id,json=importerId,proto3,oneof"` +} + +type InboundMessage_CompileRequest_Importer_FileImporterId struct { + // A unique ID for a special kind of user-defined importer that tells + // the compiler where to look for files on the physical filesystem, but + // leaves the details of resolving partials and extensions and loading + // the file from disk up to the compiler itself. + // + // This ID will be included in outbound `FileImportRequest` messages to + // indicate which importer is being called. The host is responsible for + // generating this ID and ensuring that it's unique across all importers + // registered for this compilation. + FileImporterId uint32 `protobuf:"varint,3,opt,name=file_importer_id,json=fileImporterId,proto3,oneof"` +} + +func (*InboundMessage_CompileRequest_Importer_Path) isInboundMessage_CompileRequest_Importer_Importer() { +} + +func (*InboundMessage_CompileRequest_Importer_ImporterId) isInboundMessage_CompileRequest_Importer_Importer() { +} + +func (*InboundMessage_CompileRequest_Importer_FileImporterId) isInboundMessage_CompileRequest_Importer_Importer() { +} + +// The stylesheet's contents were loaded successfully. +type InboundMessage_ImportResponse_ImportSuccess struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The text of the stylesheet. + Contents string `protobuf:"bytes,1,opt,name=contents,proto3" json:"contents,omitempty"` + // The syntax of `contents`. + Syntax Syntax `protobuf:"varint,2,opt,name=syntax,proto3,enum=sass.embedded_protocol.Syntax" json:"syntax,omitempty"` + // An absolute, browser-accessible URL indicating the resolved location of + // the imported stylesheet. + // + // This should be a `file:` URL if one is available, but an `http:` URL is + // acceptable as well. If no URL is supplied, a `data:` URL is generated + // automatically from `contents`. + // + // If this is provided and is not an absolute URL (including scheme) the + // compiler must treat that as an error thrown by the importer. + SourceMapUrl *string `protobuf:"bytes,3,opt,name=source_map_url,json=sourceMapUrl,proto3,oneof" json:"source_map_url,omitempty"` +} + +func (x *InboundMessage_ImportResponse_ImportSuccess) Reset() { + *x = InboundMessage_ImportResponse_ImportSuccess{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *InboundMessage_ImportResponse_ImportSuccess) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*InboundMessage_ImportResponse_ImportSuccess) ProtoMessage() {} + +func (x *InboundMessage_ImportResponse_ImportSuccess) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use InboundMessage_ImportResponse_ImportSuccess.ProtoReflect.Descriptor instead. +func (*InboundMessage_ImportResponse_ImportSuccess) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{0, 3, 0} +} + +func (x *InboundMessage_ImportResponse_ImportSuccess) GetContents() string { + if x != nil { + return x.Contents + } + return "" +} + +func (x *InboundMessage_ImportResponse_ImportSuccess) GetSyntax() Syntax { + if x != nil { + return x.Syntax + } + return Syntax_SCSS +} + +func (x *InboundMessage_ImportResponse_ImportSuccess) GetSourceMapUrl() string { + if x != nil && x.SourceMapUrl != nil { + return *x.SourceMapUrl + } + return "" +} + +// A response that contains the version of the embedded compiler. +type OutboundMessage_VersionResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // This version request's id. + Id uint32 `protobuf:"varint,5,opt,name=id,proto3" json:"id,omitempty"` + // The version of the embedded protocol, in semver format. + ProtocolVersion string `protobuf:"bytes,1,opt,name=protocol_version,json=protocolVersion,proto3" json:"protocol_version,omitempty"` + // The version of the embedded compiler package. This has no guaranteed + // format, although compilers are encouraged to use semver. + CompilerVersion string `protobuf:"bytes,2,opt,name=compiler_version,json=compilerVersion,proto3" json:"compiler_version,omitempty"` + // The version of the Sass implementation that the embedded compiler wraps. + // This has no guaranteed format, although Sass implementations are + // encouraged to use semver. + ImplementationVersion string `protobuf:"bytes,3,opt,name=implementation_version,json=implementationVersion,proto3" json:"implementation_version,omitempty"` + // The name of the Sass implementation that the embedded compiler wraps. + ImplementationName string `protobuf:"bytes,4,opt,name=implementation_name,json=implementationName,proto3" json:"implementation_name,omitempty"` +} + +func (x *OutboundMessage_VersionResponse) Reset() { + *x = OutboundMessage_VersionResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage_VersionResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage_VersionResponse) ProtoMessage() {} + +func (x *OutboundMessage_VersionResponse) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage_VersionResponse.ProtoReflect.Descriptor instead. +func (*OutboundMessage_VersionResponse) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{1, 0} +} + +func (x *OutboundMessage_VersionResponse) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *OutboundMessage_VersionResponse) GetProtocolVersion() string { + if x != nil { + return x.ProtocolVersion + } + return "" +} + +func (x *OutboundMessage_VersionResponse) GetCompilerVersion() string { + if x != nil { + return x.CompilerVersion + } + return "" +} + +func (x *OutboundMessage_VersionResponse) GetImplementationVersion() string { + if x != nil { + return x.ImplementationVersion + } + return "" +} + +func (x *OutboundMessage_VersionResponse) GetImplementationName() string { + if x != nil { + return x.ImplementationName + } + return "" +} + +// A response that contains the result of a compilation. +type OutboundMessage_CompileResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The success or failure result of the compilation. Mandatory. + // + // Types that are assignable to Result: + // + // *OutboundMessage_CompileResponse_Success + // *OutboundMessage_CompileResponse_Failure + Result isOutboundMessage_CompileResponse_Result `protobuf_oneof:"result"` + // The canonical URLs of all source files loaded during the compilation. + // + // The compiler must ensure that each canonical URL appears only once in + // this list. This must include the entrypoint file's URL if either + // `CompileRequest.input.path` or `CompileRequest.StringInput.url` was + // passed. + LoadedUrls []string `protobuf:"bytes,4,rep,name=loaded_urls,json=loadedUrls,proto3" json:"loaded_urls,omitempty"` +} + +func (x *OutboundMessage_CompileResponse) Reset() { + *x = OutboundMessage_CompileResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage_CompileResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage_CompileResponse) ProtoMessage() {} + +func (x *OutboundMessage_CompileResponse) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[15] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage_CompileResponse.ProtoReflect.Descriptor instead. +func (*OutboundMessage_CompileResponse) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{1, 1} +} + +func (m *OutboundMessage_CompileResponse) GetResult() isOutboundMessage_CompileResponse_Result { + if m != nil { + return m.Result + } + return nil +} + +func (x *OutboundMessage_CompileResponse) GetSuccess() *OutboundMessage_CompileResponse_CompileSuccess { + if x, ok := x.GetResult().(*OutboundMessage_CompileResponse_Success); ok { + return x.Success + } + return nil +} + +func (x *OutboundMessage_CompileResponse) GetFailure() *OutboundMessage_CompileResponse_CompileFailure { + if x, ok := x.GetResult().(*OutboundMessage_CompileResponse_Failure); ok { + return x.Failure + } + return nil +} + +func (x *OutboundMessage_CompileResponse) GetLoadedUrls() []string { + if x != nil { + return x.LoadedUrls + } + return nil +} + +type isOutboundMessage_CompileResponse_Result interface { + isOutboundMessage_CompileResponse_Result() +} + +type OutboundMessage_CompileResponse_Success struct { + // The result of a successful compilation. + Success *OutboundMessage_CompileResponse_CompileSuccess `protobuf:"bytes,2,opt,name=success,proto3,oneof"` +} + +type OutboundMessage_CompileResponse_Failure struct { + // The result of a failed compilation. + Failure *OutboundMessage_CompileResponse_CompileFailure `protobuf:"bytes,3,opt,name=failure,proto3,oneof"` +} + +func (*OutboundMessage_CompileResponse_Success) isOutboundMessage_CompileResponse_Result() {} + +func (*OutboundMessage_CompileResponse_Failure) isOutboundMessage_CompileResponse_Result() {} + +// An event indicating that a message should be displayed to the user. +type OutboundMessage_LogEvent struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Type LogEventType `protobuf:"varint,2,opt,name=type,proto3,enum=sass.embedded_protocol.LogEventType" json:"type,omitempty"` + // The text of the message. + Message string `protobuf:"bytes,3,opt,name=message,proto3" json:"message,omitempty"` + // The span associated with this message. + Span *SourceSpan `protobuf:"bytes,4,opt,name=span,proto3,oneof" json:"span,omitempty"` + // The stack trace associated with this message. + // + // The empty string indicates that no stack trace is available. Otherwise, + // the format of this stack trace is not specified and is likely to be + // inconsistent between implementations. + StackTrace string `protobuf:"bytes,5,opt,name=stack_trace,json=stackTrace,proto3" json:"stack_trace,omitempty"` + // A formatted, human-readable string that contains the message, span (if + // available), and trace (if available). The format of this string is not + // specified and is likely to be inconsistent between implementations. + Formatted string `protobuf:"bytes,6,opt,name=formatted,proto3" json:"formatted,omitempty"` +} + +func (x *OutboundMessage_LogEvent) Reset() { + *x = OutboundMessage_LogEvent{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage_LogEvent) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage_LogEvent) ProtoMessage() {} + +func (x *OutboundMessage_LogEvent) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[16] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage_LogEvent.ProtoReflect.Descriptor instead. +func (*OutboundMessage_LogEvent) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{1, 2} +} + +func (x *OutboundMessage_LogEvent) GetType() LogEventType { + if x != nil { + return x.Type + } + return LogEventType_WARNING +} + +func (x *OutboundMessage_LogEvent) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +func (x *OutboundMessage_LogEvent) GetSpan() *SourceSpan { + if x != nil { + return x.Span + } + return nil +} + +func (x *OutboundMessage_LogEvent) GetStackTrace() string { + if x != nil { + return x.StackTrace + } + return "" +} + +func (x *OutboundMessage_LogEvent) GetFormatted() string { + if x != nil { + return x.Formatted + } + return "" +} + +// A request for a custom importer to convert an imported URL to its canonical +// format. +// +// If the URL is not recognized by this importer, or if no stylesheet is found +// at that URL, `CanonicalizeResponse.result` must be `null`. Otherwise, the +// importer must return an absolute URL, including a scheme. +// +// > The host's documentation should encourage the use of file importers (via +// > `CompileRequest.Importer.file_importer_id`, `FileImportRequest`, and +// > `FileImportResponse`) for any importers that simply refer to files on +// > disk. This will allow Sass to handle the logic of resolving partials, +// > file extensions, and index files. +// +// If Sass has already loaded a stylesheet with the returned canonical URL, it +// re-uses the existing parse tree. This means that importers must ensure that +// the same canonical URL always refers to the same stylesheet, *even across +// different importers*. Importers must also ensure that any canonicalized +// URLs they return can be passed back to `CanonicalizeRequest` and will be +// returned unchanged. +// +// If this importer's URL format supports file extensions, it should +// canonicalize them the same way as the default filesystem importer: +// +// - The importer should look for stylesheets by adding the prefix `_` to the +// URL's basename, and by adding the extensions `.sass` and `.scss` if the +// URL doesn't already have one of those extensions. For example, if the URL +// was `foo/bar/baz`, the importer would look for: +// +// - `foo/bar/baz.sass` +// +// - `foo/bar/baz.scss` +// +// - `foo/bar/_baz.sass` +// +// - `foo/bar/_baz.scss` +// +// If the URL was foo/bar/baz.scss, the importer would just look for: +// +// - `foo/bar/baz.scss` +// +// - `foo/bar/_baz.scss` +// +// If the importer finds a stylesheet at more than one of these URLs, it +// should respond with a `CanonicalizeResponse.result.error` indicating that +// the import is ambiguous. Note that if the extension is explicitly +// specified, a stylesheet with another extension may exist without error. +// +// - If none of the possible paths is valid, the importer should perform the +// same resolution on the URL followed by `/index`. In the example above, it +// would look for: +// +// - `foo/bar/baz/_index.sass` +// +// - `foo/bar/baz/index.sass` +// +// - `foo/bar/baz/_index.scss` +// +// - `foo/bar/baz/index.scss` +// +// As above, if the importer finds a stylesheet at more than one of these +// URLs, it should respond with a `CanonicalizeResponse.result.error` +// indicating that the import is ambiguous. +type OutboundMessage_CanonicalizeRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The unique ID of the importer being invoked. This must match an importer + // ID passed to this compilation in `CompileRequest.importers` or + // `CompileRequest.input.string.importer`. + ImporterId uint32 `protobuf:"varint,3,opt,name=importer_id,json=importerId,proto3" json:"importer_id,omitempty"` + // The URL of the import to be canonicalized. This may be either absolute or + // relative. + // + // When loading a URL, the compiler must first try resolving that URL + // relative to the canonical URL of the current file, and canonicalizing the + // result using the importer that loaded the current file. If this returns + // `null`, the compiler must then try canonicalizing the original URL with + // each importer in order until one returns something other than `null`. + // That is the result of the import. + Url string `protobuf:"bytes,4,opt,name=url,proto3" json:"url,omitempty"` + // / Whether this request comes from an `@import` rule. + // / + // / When evaluating `@import` rules, URLs should canonicalize to an + // / [import-only file] if one exists for the URL being canonicalized. + // / Otherwise, canonicalization should be identical for `@import` and `@use` + // / rules. + // / + // / [import-only file]: https://sass-lang.com/documentation/at-rules/import#import-only-files + FromImport bool `protobuf:"varint,5,opt,name=from_import,json=fromImport,proto3" json:"from_import,omitempty"` +} + +func (x *OutboundMessage_CanonicalizeRequest) Reset() { + *x = OutboundMessage_CanonicalizeRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage_CanonicalizeRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage_CanonicalizeRequest) ProtoMessage() {} + +func (x *OutboundMessage_CanonicalizeRequest) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[17] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage_CanonicalizeRequest.ProtoReflect.Descriptor instead. +func (*OutboundMessage_CanonicalizeRequest) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{1, 3} +} + +func (x *OutboundMessage_CanonicalizeRequest) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *OutboundMessage_CanonicalizeRequest) GetImporterId() uint32 { + if x != nil { + return x.ImporterId + } + return 0 +} + +func (x *OutboundMessage_CanonicalizeRequest) GetUrl() string { + if x != nil { + return x.Url + } + return "" +} + +func (x *OutboundMessage_CanonicalizeRequest) GetFromImport() bool { + if x != nil { + return x.FromImport + } + return false +} + +// A request for a custom importer to load the contents of a stylesheet. +type OutboundMessage_ImportRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The unique ID of the importer being invoked. This must match an + // `Importer.importer_id` passed to this compilation in + // `CompileRequest.importers` or `CompileRequest.input.string.importer`. + ImporterId uint32 `protobuf:"varint,3,opt,name=importer_id,json=importerId,proto3" json:"importer_id,omitempty"` + // The canonical URL of the import. This is guaranteed to be a URL returned + // by a `CanonicalizeRequest` to this importer. + Url string `protobuf:"bytes,4,opt,name=url,proto3" json:"url,omitempty"` +} + +func (x *OutboundMessage_ImportRequest) Reset() { + *x = OutboundMessage_ImportRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage_ImportRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage_ImportRequest) ProtoMessage() {} + +func (x *OutboundMessage_ImportRequest) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[18] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage_ImportRequest.ProtoReflect.Descriptor instead. +func (*OutboundMessage_ImportRequest) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{1, 4} +} + +func (x *OutboundMessage_ImportRequest) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *OutboundMessage_ImportRequest) GetImporterId() uint32 { + if x != nil { + return x.ImporterId + } + return 0 +} + +func (x *OutboundMessage_ImportRequest) GetUrl() string { + if x != nil { + return x.Url + } + return "" +} + +// A request for a custom filesystem importer to load the contents of a +// stylesheet. +// +// A filesystem importer is represented in the compiler as an [importer]. When +// the importer is invoked with a string `string`: +// +// * If `string` is an absolute URL whose scheme is `file`: +// +// - Let `url` be string. +// +// * Otherwise: +// +// - Let `fromImport` be `true` if the importer is being run for an +// `@import` and `false` otherwise. +// +// - Let `response` be the result of sending a `FileImportRequest` with +// `string` as its `url` and `fromImport` as `from_import`. +// +// - If `response.result` is null, return null. +// +// - Otherwise, if `response.result.error` is set, throw an error. +// +// - Otherwise, let `url` be `response.result.file_url`. +// +// * Let `resolved` be the result of [resolving `url`]. +// +// * If `resolved` is null, return null. +// +// * Let `text` be the contents of the file at `resolved`. +// +// * Let `syntax` be: +// +// - "scss" if `url` ends in `.scss`. +// +// - "indented" if `url` ends in `.sass`. +// +// - "css" if `url` ends in `.css`. +// +// > The algorithm for resolving a `file:` URL guarantees that `url` will have +// > one of these extensions. +// +// * Return `text`, `syntax`, and `resolved`. +// +// [importer]: https://github.com/sass/sass/tree/main/spec/modules.md#importer +// [resolving `url`]: https://github.com/sass/sass/tree/main/spec/modules.md#resolving-a-file-url +type OutboundMessage_FileImportRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The unique ID of the importer being invoked. This must match an + // `Importer.file_importer_id` passed to this compilation in + // `CompileRequest.importers` or `CompileRequest.input.string.importer`. + ImporterId uint32 `protobuf:"varint,3,opt,name=importer_id,json=importerId,proto3" json:"importer_id,omitempty"` + // The (non-canonicalized) URL of the import. + Url string `protobuf:"bytes,4,opt,name=url,proto3" json:"url,omitempty"` + // / Whether this request comes from an `@import` rule. + // / + // / When evaluating `@import` rules, filesystem importers should load an + // / [import-only file] if one exists for the URL being canonicalized. + // / Otherwise, canonicalization should be identical for `@import` and `@use` + // / rules. + // / + // / [import-only file]: https://sass-lang.com/documentation/at-rules/import#import-only-files + FromImport bool `protobuf:"varint,5,opt,name=from_import,json=fromImport,proto3" json:"from_import,omitempty"` +} + +func (x *OutboundMessage_FileImportRequest) Reset() { + *x = OutboundMessage_FileImportRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage_FileImportRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage_FileImportRequest) ProtoMessage() {} + +func (x *OutboundMessage_FileImportRequest) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[19] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage_FileImportRequest.ProtoReflect.Descriptor instead. +func (*OutboundMessage_FileImportRequest) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{1, 5} +} + +func (x *OutboundMessage_FileImportRequest) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *OutboundMessage_FileImportRequest) GetImporterId() uint32 { + if x != nil { + return x.ImporterId + } + return 0 +} + +func (x *OutboundMessage_FileImportRequest) GetUrl() string { + if x != nil { + return x.Url + } + return "" +} + +func (x *OutboundMessage_FileImportRequest) GetFromImport() bool { + if x != nil { + return x.FromImport + } + return false +} + +// A request to invoke a custom Sass function and return its result. +type OutboundMessage_FunctionCallRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // An identifier that indicates which function to invoke. Mandatory. + // + // Types that are assignable to Identifier: + // + // *OutboundMessage_FunctionCallRequest_Name + // *OutboundMessage_FunctionCallRequest_FunctionId + Identifier isOutboundMessage_FunctionCallRequest_Identifier `protobuf_oneof:"identifier"` + // The arguments passed to the function, in the order they appear in the + // function signature passed to `CompileRequest.global_functions`. + // + // The compiler must ensure that a valid number of arguments are passed for + // the given signature, that default argument values are instantiated + // appropriately, and that variable argument lists (`$args...`) are passed + // as `Value.ArgumentList`s. + Arguments []*Value `protobuf:"bytes,5,rep,name=arguments,proto3" json:"arguments,omitempty"` +} + +func (x *OutboundMessage_FunctionCallRequest) Reset() { + *x = OutboundMessage_FunctionCallRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage_FunctionCallRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage_FunctionCallRequest) ProtoMessage() {} + +func (x *OutboundMessage_FunctionCallRequest) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[20] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage_FunctionCallRequest.ProtoReflect.Descriptor instead. +func (*OutboundMessage_FunctionCallRequest) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{1, 6} +} + +func (x *OutboundMessage_FunctionCallRequest) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (m *OutboundMessage_FunctionCallRequest) GetIdentifier() isOutboundMessage_FunctionCallRequest_Identifier { + if m != nil { + return m.Identifier + } + return nil +} + +func (x *OutboundMessage_FunctionCallRequest) GetName() string { + if x, ok := x.GetIdentifier().(*OutboundMessage_FunctionCallRequest_Name); ok { + return x.Name + } + return "" +} + +func (x *OutboundMessage_FunctionCallRequest) GetFunctionId() uint32 { + if x, ok := x.GetIdentifier().(*OutboundMessage_FunctionCallRequest_FunctionId); ok { + return x.FunctionId + } + return 0 +} + +func (x *OutboundMessage_FunctionCallRequest) GetArguments() []*Value { + if x != nil { + return x.Arguments + } + return nil +} + +type isOutboundMessage_FunctionCallRequest_Identifier interface { + isOutboundMessage_FunctionCallRequest_Identifier() +} + +type OutboundMessage_FunctionCallRequest_Name struct { + // The name of the function to invoke. + // + // This must match the name of a function signature the host passed to the + // corresponding `CompileRequest.global_functions` call, including hyphens + // and underscores. + Name string `protobuf:"bytes,3,opt,name=name,proto3,oneof"` +} + +type OutboundMessage_FunctionCallRequest_FunctionId struct { + // The opaque ID of the function to invoke. + // + // This must match the ID of a `Value.HostFunction` that the host passed + // to the compiler. + FunctionId uint32 `protobuf:"varint,4,opt,name=function_id,json=functionId,proto3,oneof"` +} + +func (*OutboundMessage_FunctionCallRequest_Name) isOutboundMessage_FunctionCallRequest_Identifier() {} + +func (*OutboundMessage_FunctionCallRequest_FunctionId) isOutboundMessage_FunctionCallRequest_Identifier() { +} + +// A message indicating that the Sass file was successfully compiled to CSS. +type OutboundMessage_CompileResponse_CompileSuccess struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The compiled CSS. + Css string `protobuf:"bytes,1,opt,name=css,proto3" json:"css,omitempty"` + // The JSON-encoded source map, or the empty string if + // `CompileRequest.source_map` was `false`. + // + // The compiler must not add a `"file"` key to this source map. It's the + // host's (or the host's user's) responsibility to determine how the + // generated CSS can be reached from the source map. + SourceMap string `protobuf:"bytes,2,opt,name=source_map,json=sourceMap,proto3" json:"source_map,omitempty"` +} + +func (x *OutboundMessage_CompileResponse_CompileSuccess) Reset() { + *x = OutboundMessage_CompileResponse_CompileSuccess{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage_CompileResponse_CompileSuccess) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage_CompileResponse_CompileSuccess) ProtoMessage() {} + +func (x *OutboundMessage_CompileResponse_CompileSuccess) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[21] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage_CompileResponse_CompileSuccess.ProtoReflect.Descriptor instead. +func (*OutboundMessage_CompileResponse_CompileSuccess) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{1, 1, 0} +} + +func (x *OutboundMessage_CompileResponse_CompileSuccess) GetCss() string { + if x != nil { + return x.Css + } + return "" +} + +func (x *OutboundMessage_CompileResponse_CompileSuccess) GetSourceMap() string { + if x != nil { + return x.SourceMap + } + return "" +} + +// A message indicating that the Sass file could not be successfully +// compiled to CSS. +type OutboundMessage_CompileResponse_CompileFailure struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // A message describing the reason for the failure. + Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` + // The span associated with the failure. + Span *SourceSpan `protobuf:"bytes,2,opt,name=span,proto3" json:"span,omitempty"` + // The stack trace associated with the failure. + // + // The empty string indicates that no stack trace is available. Otherwise, + // the format of this stack trace is not specified and is likely to be + // inconsistent between implementations. + StackTrace string `protobuf:"bytes,3,opt,name=stack_trace,json=stackTrace,proto3" json:"stack_trace,omitempty"` + // A formatted, human-readable string that contains the message, span + // (if available), and trace (if available). The format of this string is + // not specified and is likely to be inconsistent between implementations. + Formatted string `protobuf:"bytes,4,opt,name=formatted,proto3" json:"formatted,omitempty"` +} + +func (x *OutboundMessage_CompileResponse_CompileFailure) Reset() { + *x = OutboundMessage_CompileResponse_CompileFailure{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OutboundMessage_CompileResponse_CompileFailure) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OutboundMessage_CompileResponse_CompileFailure) ProtoMessage() {} + +func (x *OutboundMessage_CompileResponse_CompileFailure) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[22] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OutboundMessage_CompileResponse_CompileFailure.ProtoReflect.Descriptor instead. +func (*OutboundMessage_CompileResponse_CompileFailure) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{1, 1, 1} +} + +func (x *OutboundMessage_CompileResponse_CompileFailure) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +func (x *OutboundMessage_CompileResponse_CompileFailure) GetSpan() *SourceSpan { + if x != nil { + return x.Span + } + return nil +} + +func (x *OutboundMessage_CompileResponse_CompileFailure) GetStackTrace() string { + if x != nil { + return x.StackTrace + } + return "" +} + +func (x *OutboundMessage_CompileResponse_CompileFailure) GetFormatted() string { + if x != nil { + return x.Formatted + } + return "" +} + +// A single point in a source file. +type SourceSpan_SourceLocation struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The 0-based offset of this location within the source file. + Offset uint32 `protobuf:"varint,1,opt,name=offset,proto3" json:"offset,omitempty"` + // The 0-based line number of this location within the source file. + Line uint32 `protobuf:"varint,2,opt,name=line,proto3" json:"line,omitempty"` + // The 0-based column number of this location within its line. + Column uint32 `protobuf:"varint,3,opt,name=column,proto3" json:"column,omitempty"` +} + +func (x *SourceSpan_SourceLocation) Reset() { + *x = SourceSpan_SourceLocation{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SourceSpan_SourceLocation) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SourceSpan_SourceLocation) ProtoMessage() {} + +func (x *SourceSpan_SourceLocation) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[23] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SourceSpan_SourceLocation.ProtoReflect.Descriptor instead. +func (*SourceSpan_SourceLocation) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{3, 0} +} + +func (x *SourceSpan_SourceLocation) GetOffset() uint32 { + if x != nil { + return x.Offset + } + return 0 +} + +func (x *SourceSpan_SourceLocation) GetLine() uint32 { + if x != nil { + return x.Line + } + return 0 +} + +func (x *SourceSpan_SourceLocation) GetColumn() uint32 { + if x != nil { + return x.Column + } + return 0 +} + +// A SassScript string value. +type Value_String struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The contents of the string. + Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // Whether the string is quoted or unquoted. + Quoted bool `protobuf:"varint,2,opt,name=quoted,proto3" json:"quoted,omitempty"` +} + +func (x *Value_String) Reset() { + *x = Value_String{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_String) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_String) ProtoMessage() {} + +func (x *Value_String) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[24] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_String.ProtoReflect.Descriptor instead. +func (*Value_String) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{4, 0} +} + +func (x *Value_String) GetText() string { + if x != nil { + return x.Text + } + return "" +} + +func (x *Value_String) GetQuoted() bool { + if x != nil { + return x.Quoted + } + return false +} + +// A SassScript number value. +type Value_Number struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The number's numeric value. + Value float64 `protobuf:"fixed64,1,opt,name=value,proto3" json:"value,omitempty"` + // The number's numerator units. + // + // The endpoint sending the number must ensure that no numerator units are + // [compatible][] with any denominator units. Such compatible units must be + // simplified away according to the multiplicative factor between them + // defined in the CSS Values and Units spec. + // + // [compatible]: https://www.w3.org/TR/css-values-4/#compat + Numerators []string `protobuf:"bytes,2,rep,name=numerators,proto3" json:"numerators,omitempty"` + // The number's denominator units. + Denominators []string `protobuf:"bytes,3,rep,name=denominators,proto3" json:"denominators,omitempty"` +} + +func (x *Value_Number) Reset() { + *x = Value_Number{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[25] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_Number) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_Number) ProtoMessage() {} + +func (x *Value_Number) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[25] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_Number.ProtoReflect.Descriptor instead. +func (*Value_Number) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{4, 1} +} + +func (x *Value_Number) GetValue() float64 { + if x != nil { + return x.Value + } + return 0 +} + +func (x *Value_Number) GetNumerators() []string { + if x != nil { + return x.Numerators + } + return nil +} + +func (x *Value_Number) GetDenominators() []string { + if x != nil { + return x.Denominators + } + return nil +} + +// A SassScript color value, represented as red, green, and blue channels. +// +// All Sass color values can be equivalently represented as `RgbColor`, +// `HslColor`, and `HwbColor` messages without loss of color information that +// can affect CSS rendering. As such, either endpoint may choose to send any +// color value as any one of these three messages. +type Value_RgbColor struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The color's red channel. May not be above 255. + Red uint32 `protobuf:"varint,1,opt,name=red,proto3" json:"red,omitempty"` + // The color's green channel. May not be above 255. + Green uint32 `protobuf:"varint,2,opt,name=green,proto3" json:"green,omitempty"` + // The color's blue channel. May not be above 255. + Blue uint32 `protobuf:"varint,3,opt,name=blue,proto3" json:"blue,omitempty"` + // The color's alpha channel. Must be between 0 and 1, + // inclusive. + Alpha float64 `protobuf:"fixed64,4,opt,name=alpha,proto3" json:"alpha,omitempty"` +} + +func (x *Value_RgbColor) Reset() { + *x = Value_RgbColor{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[26] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_RgbColor) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_RgbColor) ProtoMessage() {} + +func (x *Value_RgbColor) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[26] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_RgbColor.ProtoReflect.Descriptor instead. +func (*Value_RgbColor) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{4, 2} +} + +func (x *Value_RgbColor) GetRed() uint32 { + if x != nil { + return x.Red + } + return 0 +} + +func (x *Value_RgbColor) GetGreen() uint32 { + if x != nil { + return x.Green + } + return 0 +} + +func (x *Value_RgbColor) GetBlue() uint32 { + if x != nil { + return x.Blue + } + return 0 +} + +func (x *Value_RgbColor) GetAlpha() float64 { + if x != nil { + return x.Alpha + } + return 0 +} + +// A SassScript color value, represented as hue, saturation, and lightness channels. +type Value_HslColor struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The color's hue. + Hue float64 `protobuf:"fixed64,1,opt,name=hue,proto3" json:"hue,omitempty"` + // The color's percent saturation. Must be between 0 and 100, + // inclusive. + Saturation float64 `protobuf:"fixed64,2,opt,name=saturation,proto3" json:"saturation,omitempty"` + // The color's percent lightness. Must be between 0 and 100, + // inclusive. + Lightness float64 `protobuf:"fixed64,3,opt,name=lightness,proto3" json:"lightness,omitempty"` + // The color's alpha channel. Must be between 0 and 1, + // inclusive. + Alpha float64 `protobuf:"fixed64,4,opt,name=alpha,proto3" json:"alpha,omitempty"` +} + +func (x *Value_HslColor) Reset() { + *x = Value_HslColor{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[27] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_HslColor) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_HslColor) ProtoMessage() {} + +func (x *Value_HslColor) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[27] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_HslColor.ProtoReflect.Descriptor instead. +func (*Value_HslColor) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{4, 3} +} + +func (x *Value_HslColor) GetHue() float64 { + if x != nil { + return x.Hue + } + return 0 +} + +func (x *Value_HslColor) GetSaturation() float64 { + if x != nil { + return x.Saturation + } + return 0 +} + +func (x *Value_HslColor) GetLightness() float64 { + if x != nil { + return x.Lightness + } + return 0 +} + +func (x *Value_HslColor) GetAlpha() float64 { + if x != nil { + return x.Alpha + } + return 0 +} + +// A SassScript color value, represented as hue, whiteness, and blackness +// channels. +type Value_HwbColor struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The color's hue. + Hue float64 `protobuf:"fixed64,1,opt,name=hue,proto3" json:"hue,omitempty"` + // The color's percent whiteness. Must be between 0 and 100, + // inclusive. The sum of `whiteness` and `blackness` must not exceed 100. + Whiteness float64 `protobuf:"fixed64,2,opt,name=whiteness,proto3" json:"whiteness,omitempty"` + // The color's percent blackness. Must be between 0 and 100, + // inclusive. The sum of `whiteness` and `blackness` must not exceed 100. + Blackness float64 `protobuf:"fixed64,3,opt,name=blackness,proto3" json:"blackness,omitempty"` + // The color's alpha channel. Mandatory. Must be between 0 and 1, + // inclusive. + Alpha float64 `protobuf:"fixed64,4,opt,name=alpha,proto3" json:"alpha,omitempty"` +} + +func (x *Value_HwbColor) Reset() { + *x = Value_HwbColor{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[28] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_HwbColor) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_HwbColor) ProtoMessage() {} + +func (x *Value_HwbColor) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[28] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_HwbColor.ProtoReflect.Descriptor instead. +func (*Value_HwbColor) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{4, 4} +} + +func (x *Value_HwbColor) GetHue() float64 { + if x != nil { + return x.Hue + } + return 0 +} + +func (x *Value_HwbColor) GetWhiteness() float64 { + if x != nil { + return x.Whiteness + } + return 0 +} + +func (x *Value_HwbColor) GetBlackness() float64 { + if x != nil { + return x.Blackness + } + return 0 +} + +func (x *Value_HwbColor) GetAlpha() float64 { + if x != nil { + return x.Alpha + } + return 0 +} + +// A SassScript list value. +type Value_List struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The type of separator for this list. Mandatory. + Separator ListSeparator `protobuf:"varint,1,opt,name=separator,proto3,enum=sass.embedded_protocol.ListSeparator" json:"separator,omitempty"` + // Whether this list has square brackets. Mandatory. + HasBrackets bool `protobuf:"varint,2,opt,name=has_brackets,json=hasBrackets,proto3" json:"has_brackets,omitempty"` + // The elements of this list. + Contents []*Value `protobuf:"bytes,3,rep,name=contents,proto3" json:"contents,omitempty"` +} + +func (x *Value_List) Reset() { + *x = Value_List{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[29] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_List) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_List) ProtoMessage() {} + +func (x *Value_List) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[29] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_List.ProtoReflect.Descriptor instead. +func (*Value_List) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{4, 5} +} + +func (x *Value_List) GetSeparator() ListSeparator { + if x != nil { + return x.Separator + } + return ListSeparator_COMMA +} + +func (x *Value_List) GetHasBrackets() bool { + if x != nil { + return x.HasBrackets + } + return false +} + +func (x *Value_List) GetContents() []*Value { + if x != nil { + return x.Contents + } + return nil +} + +// A SassScript map value. +type Value_Map struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The entries in this map. The sending endpoint must guarantee that no two + // entries have the same key. + Entries []*Value_Map_Entry `protobuf:"bytes,1,rep,name=entries,proto3" json:"entries,omitempty"` +} + +func (x *Value_Map) Reset() { + *x = Value_Map{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[30] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_Map) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_Map) ProtoMessage() {} + +func (x *Value_Map) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[30] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_Map.ProtoReflect.Descriptor instead. +func (*Value_Map) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{4, 6} +} + +func (x *Value_Map) GetEntries() []*Value_Map_Entry { + if x != nil { + return x.Entries + } + return nil +} + +// A first-class function defined in the compiler. New `CompilerFunction`s may +// only be created by the compiler, but the host may pass `CompilerFunction`s +// back to the compiler as long as their IDs match IDs of functions received +// by the host during that same compilation. +type Value_CompilerFunction struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // A unique ID for this function. The compiler is responsible for generating + // this ID and ensuring it's unique across all functions passed to the host + // for this compilation. Mandatory. + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` +} + +func (x *Value_CompilerFunction) Reset() { + *x = Value_CompilerFunction{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[31] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_CompilerFunction) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_CompilerFunction) ProtoMessage() {} + +func (x *Value_CompilerFunction) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[31] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_CompilerFunction.ProtoReflect.Descriptor instead. +func (*Value_CompilerFunction) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{4, 7} +} + +func (x *Value_CompilerFunction) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +// An anonymous custom function defined in the host. New `HostFunction`s may +// only be created by the host, and `HostFunction`s may *never* be passed from +// the compiler to the host. The compiler must instead pass a +// `CompilerFunction` that wraps the `HostFunction`. +type Value_HostFunction struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // A unique ID for this function. The compiler must pass this ID as + // `OutboundRequest.FunctionCallRequest.id` when invoking this function. The + // host is responsible for generating this ID and ensuring it's unique + // across all functions for *all* compilations. Mandatory. + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The signature for this function. Mandatory. + // + // If this isn't a valid Sass function signature that could appear after + // `@function` in a Sass stylesheet (such as `mix($color1, $color2, $weight: + // 50%)`), the compiler must treat it as though the function that returned + // this `HostFunction` threw an error. + // + // > This ensures that the host doesn't need to be able to correctly parse + // > the entire function declaration syntax. + // + // The compiler may not invoke the function by its name, since it's not + // guaranteed to be globally unique. However, it may use the name to + // generate the string representation of this function. + Signature string `protobuf:"bytes,2,opt,name=signature,proto3" json:"signature,omitempty"` +} + +func (x *Value_HostFunction) Reset() { + *x = Value_HostFunction{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[32] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_HostFunction) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_HostFunction) ProtoMessage() {} + +func (x *Value_HostFunction) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[32] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_HostFunction.ProtoReflect.Descriptor instead. +func (*Value_HostFunction) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{4, 8} +} + +func (x *Value_HostFunction) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *Value_HostFunction) GetSignature() string { + if x != nil { + return x.Signature + } + return "" +} + +// A SassScript argument list value. This represents rest arguments passed to +// a function's `$arg...` parameter. Unlike a normal `List`, an argument list +// has an associated keywords map which tracks keyword arguments passed in +// alongside positional arguments. +// +// For each `ArgumentList` in `FunctionCallRequest.arguments` (including those +// nested within `List`s and `Map`s), the host must track whether its keyword +// arguments were accessed by the user. If they were, it must add its +// `ArgumentList.id` to `FunctionCallResponse.accessed_argument_lists`. +// +// The compiler must treat every `ArgumentList` whose `ArgumentList.id` +// appears in `FunctionCallResponse.accessed_argument_lists` as though it had +// been passed to `meta.keywords()`. +type Value_ArgumentList struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // An ID for this argument list that's unique within the scope of a given + // `FunctionCallRequest`. + // + // The special ID `0` is reserved for `ArgumentList`s created by the host, + // and may not be used by the compiler. These `ArgumentList`s do not need to + // have their IDs added to `FunctionCallResponse.accessed_argument_lists`, + // and the compiler should treat them as though their keywords have always + // been accessed. + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The type of separator for this list. The compiler must set this, but + // the host may omit it for `ArgumentList`s that were originally created by + // the compiler (that is, those with a non-0 ID). + Separator ListSeparator `protobuf:"varint,2,opt,name=separator,proto3,enum=sass.embedded_protocol.ListSeparator" json:"separator,omitempty"` + // The argument list's positional contents. The compiler must set this, but + // the host may omit it for `ArgumentList`s that were originally created by + // the compiler (that is, those with a non-0 ID). + Contents []*Value `protobuf:"bytes,3,rep,name=contents,proto3" json:"contents,omitempty"` + // The argument list's keywords. The compiler must set this, but the host + // may omit it for `ArgumentList`s that were originally created by the + // compiler (that is, those with a non-0 ID). + Keywords map[string]*Value `protobuf:"bytes,4,rep,name=keywords,proto3" json:"keywords,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *Value_ArgumentList) Reset() { + *x = Value_ArgumentList{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[33] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_ArgumentList) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_ArgumentList) ProtoMessage() {} + +func (x *Value_ArgumentList) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[33] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_ArgumentList.ProtoReflect.Descriptor instead. +func (*Value_ArgumentList) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{4, 9} +} + +func (x *Value_ArgumentList) GetId() uint32 { + if x != nil { + return x.Id + } + return 0 +} + +func (x *Value_ArgumentList) GetSeparator() ListSeparator { + if x != nil { + return x.Separator + } + return ListSeparator_COMMA +} + +func (x *Value_ArgumentList) GetContents() []*Value { + if x != nil { + return x.Contents + } + return nil +} + +func (x *Value_ArgumentList) GetKeywords() map[string]*Value { + if x != nil { + return x.Keywords + } + return nil +} + +// A SassScript calculation value. The compiler must send fully [simplified] +// calculations, meaning that simplifying it again will produce the same +// calculation. The host is not required to simplify calculations. +// +// The compiler must simplify any calculations it receives from the host +// before returning them from a function. If this simplification produces an +// error, it should be treated as though the function call threw that error. +// It should *not* be treated as a protocol error. +// +// [simplified]: https://github.com/sass/sass/tree/main/spec/types/calculation.md#simplifying-a-calculation +type Value_Calculation struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The calculation's name. Mandatory. The host may only set this to names + // that the Sass specification uses to create calculations. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The calculation's arguments. Mandatory. The host must use exactly the + // number of arguments used by the Sass specification for calculations with + // the given `name`. + Arguments []*Value_Calculation_CalculationValue `protobuf:"bytes,2,rep,name=arguments,proto3" json:"arguments,omitempty"` +} + +func (x *Value_Calculation) Reset() { + *x = Value_Calculation{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[34] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_Calculation) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_Calculation) ProtoMessage() {} + +func (x *Value_Calculation) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[34] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_Calculation.ProtoReflect.Descriptor instead. +func (*Value_Calculation) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{4, 10} +} + +func (x *Value_Calculation) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Value_Calculation) GetArguments() []*Value_Calculation_CalculationValue { + if x != nil { + return x.Arguments + } + return nil +} + +// A single key/value pair in the map. +type Value_Map_Entry struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The key this entry is associated with. Mandatory. + Key *Value `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // The value associated with this key. Mandatory. + Value *Value `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (x *Value_Map_Entry) Reset() { + *x = Value_Map_Entry{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[35] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_Map_Entry) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_Map_Entry) ProtoMessage() {} + +func (x *Value_Map_Entry) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[35] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_Map_Entry.ProtoReflect.Descriptor instead. +func (*Value_Map_Entry) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{4, 6, 0} +} + +func (x *Value_Map_Entry) GetKey() *Value { + if x != nil { + return x.Key + } + return nil +} + +func (x *Value_Map_Entry) GetValue() *Value { + if x != nil { + return x.Value + } + return nil +} + +// A single component of a calculation expression. +type Value_Calculation_CalculationValue struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The value of the component. Mandatory. + // + // Types that are assignable to Value: + // + // *Value_Calculation_CalculationValue_Number + // *Value_Calculation_CalculationValue_String_ + // *Value_Calculation_CalculationValue_Interpolation + // *Value_Calculation_CalculationValue_Operation + // *Value_Calculation_CalculationValue_Calculation + Value isValue_Calculation_CalculationValue_Value `protobuf_oneof:"value"` +} + +func (x *Value_Calculation_CalculationValue) Reset() { + *x = Value_Calculation_CalculationValue{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[37] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_Calculation_CalculationValue) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_Calculation_CalculationValue) ProtoMessage() {} + +func (x *Value_Calculation_CalculationValue) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[37] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_Calculation_CalculationValue.ProtoReflect.Descriptor instead. +func (*Value_Calculation_CalculationValue) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{4, 10, 0} +} + +func (m *Value_Calculation_CalculationValue) GetValue() isValue_Calculation_CalculationValue_Value { + if m != nil { + return m.Value + } + return nil +} + +func (x *Value_Calculation_CalculationValue) GetNumber() *Value_Number { + if x, ok := x.GetValue().(*Value_Calculation_CalculationValue_Number); ok { + return x.Number + } + return nil +} + +func (x *Value_Calculation_CalculationValue) GetString_() string { + if x, ok := x.GetValue().(*Value_Calculation_CalculationValue_String_); ok { + return x.String_ + } + return "" +} + +func (x *Value_Calculation_CalculationValue) GetInterpolation() string { + if x, ok := x.GetValue().(*Value_Calculation_CalculationValue_Interpolation); ok { + return x.Interpolation + } + return "" +} + +func (x *Value_Calculation_CalculationValue) GetOperation() *Value_Calculation_CalculationOperation { + if x, ok := x.GetValue().(*Value_Calculation_CalculationValue_Operation); ok { + return x.Operation + } + return nil +} + +func (x *Value_Calculation_CalculationValue) GetCalculation() *Value_Calculation { + if x, ok := x.GetValue().(*Value_Calculation_CalculationValue_Calculation); ok { + return x.Calculation + } + return nil +} + +type isValue_Calculation_CalculationValue_Value interface { + isValue_Calculation_CalculationValue_Value() +} + +type Value_Calculation_CalculationValue_Number struct { + Number *Value_Number `protobuf:"bytes,1,opt,name=number,proto3,oneof"` +} + +type Value_Calculation_CalculationValue_String_ struct { + // An unquoted string, as from a function like `var()` or `env()`. + String_ string `protobuf:"bytes,2,opt,name=string,proto3,oneof"` +} + +type Value_Calculation_CalculationValue_Interpolation struct { + // An unquoted string as created by interpolation for + // backwards-compatibility with older Sass syntax. + Interpolation string `protobuf:"bytes,3,opt,name=interpolation,proto3,oneof"` +} + +type Value_Calculation_CalculationValue_Operation struct { + Operation *Value_Calculation_CalculationOperation `protobuf:"bytes,4,opt,name=operation,proto3,oneof"` +} + +type Value_Calculation_CalculationValue_Calculation struct { + Calculation *Value_Calculation `protobuf:"bytes,5,opt,name=calculation,proto3,oneof"` +} + +func (*Value_Calculation_CalculationValue_Number) isValue_Calculation_CalculationValue_Value() {} + +func (*Value_Calculation_CalculationValue_String_) isValue_Calculation_CalculationValue_Value() {} + +func (*Value_Calculation_CalculationValue_Interpolation) isValue_Calculation_CalculationValue_Value() { +} + +func (*Value_Calculation_CalculationValue_Operation) isValue_Calculation_CalculationValue_Value() {} + +func (*Value_Calculation_CalculationValue_Calculation) isValue_Calculation_CalculationValue_Value() {} + +// A binary operation that appears in a calculation. +type Value_Calculation_CalculationOperation struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // The operator to perform. + Operator CalculationOperator `protobuf:"varint,1,opt,name=operator,proto3,enum=sass.embedded_protocol.CalculationOperator" json:"operator,omitempty"` + // The left-hand side of the operation. + Left *Value_Calculation_CalculationValue `protobuf:"bytes,2,opt,name=left,proto3" json:"left,omitempty"` + // The right-hand side of the operation. + Right *Value_Calculation_CalculationValue `protobuf:"bytes,3,opt,name=right,proto3" json:"right,omitempty"` +} + +func (x *Value_Calculation_CalculationOperation) Reset() { + *x = Value_Calculation_CalculationOperation{} + if protoimpl.UnsafeEnabled { + mi := &file_embedded_sass_proto_msgTypes[38] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Value_Calculation_CalculationOperation) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Value_Calculation_CalculationOperation) ProtoMessage() {} + +func (x *Value_Calculation_CalculationOperation) ProtoReflect() protoreflect.Message { + mi := &file_embedded_sass_proto_msgTypes[38] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Value_Calculation_CalculationOperation.ProtoReflect.Descriptor instead. +func (*Value_Calculation_CalculationOperation) Descriptor() ([]byte, []int) { + return file_embedded_sass_proto_rawDescGZIP(), []int{4, 10, 1} +} + +func (x *Value_Calculation_CalculationOperation) GetOperator() CalculationOperator { + if x != nil { + return x.Operator + } + return CalculationOperator_PLUS +} + +func (x *Value_Calculation_CalculationOperation) GetLeft() *Value_Calculation_CalculationValue { + if x != nil { + return x.Left + } + return nil +} + +func (x *Value_Calculation_CalculationOperation) GetRight() *Value_Calculation_CalculationValue { + if x != nil { + return x.Right + } + return nil +} + +var File_embedded_sass_proto protoreflect.FileDescriptor + +var file_embedded_sass_proto_rawDesc = []byte{ + 0x0a, 0x13, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x73, 0x61, 0x73, 0x73, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x16, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, + 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x22, 0x9b, 0x12, + 0x0a, 0x0e, 0x49, 0x6e, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x12, 0x60, 0x0a, 0x0f, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x5f, 0x72, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x73, 0x61, 0x73, 0x73, + 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, + 0x6f, 0x6c, 0x2e, 0x49, 0x6e, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x48, 0x00, 0x52, 0x0e, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x72, 0x0a, 0x15, 0x63, 0x61, 0x6e, 0x6f, 0x6e, 0x69, 0x63, 0x61, 0x6c, 0x69, + 0x7a, 0x65, 0x5f, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x3b, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, + 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x49, 0x6e, 0x62, 0x6f, 0x75, + 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x43, 0x61, 0x6e, 0x6f, 0x6e, 0x69, + 0x63, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x00, + 0x52, 0x14, 0x63, 0x61, 0x6e, 0x6f, 0x6e, 0x69, 0x63, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x60, 0x0a, 0x0f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, + 0x5f, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x35, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x49, 0x6e, 0x62, 0x6f, 0x75, 0x6e, 0x64, + 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x00, 0x52, 0x0e, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x6d, 0x0a, 0x14, 0x66, 0x69, 0x6c, 0x65, + 0x5f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x5f, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x39, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, + 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, + 0x49, 0x6e, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x46, + 0x69, 0x6c, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x48, 0x00, 0x52, 0x12, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x73, 0x0a, 0x16, 0x66, 0x75, 0x6e, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x61, 0x6c, 0x6c, 0x5f, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3b, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, + 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, + 0x2e, 0x49, 0x6e, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, + 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x61, 0x6c, 0x6c, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x00, 0x52, 0x14, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x43, 0x61, 0x6c, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x60, 0x0a, 0x0f, + 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x18, + 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, + 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x49, + 0x6e, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x56, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x0e, + 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, + 0x0a, 0x0e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, + 0x1a, 0x92, 0x07, 0x0a, 0x0e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x5b, 0x0a, 0x06, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x41, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, + 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x49, 0x6e, 0x62, + 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, + 0x69, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, + 0x67, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, + 0x12, 0x14, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, + 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x39, 0x0a, 0x05, 0x73, 0x74, 0x79, 0x6c, 0x65, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x23, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, + 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x4f, + 0x75, 0x74, 0x70, 0x75, 0x74, 0x53, 0x74, 0x79, 0x6c, 0x65, 0x52, 0x05, 0x73, 0x74, 0x79, 0x6c, + 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x61, 0x70, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x61, 0x70, + 0x12, 0x5c, 0x0a, 0x09, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x73, 0x18, 0x06, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x3e, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, + 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x49, 0x6e, 0x62, + 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, + 0x69, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, + 0x74, 0x65, 0x72, 0x52, 0x09, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x73, 0x12, 0x29, + 0x0a, 0x10, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x5f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0f, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, + 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x61, 0x6c, 0x65, + 0x72, 0x74, 0x5f, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x18, 0x08, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, + 0x61, 0x6c, 0x65, 0x72, 0x74, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x12, 0x1f, 0x0a, 0x0b, 0x61, 0x6c, + 0x65, 0x72, 0x74, 0x5f, 0x61, 0x73, 0x63, 0x69, 0x69, 0x18, 0x09, 0x20, 0x01, 0x28, 0x08, 0x52, + 0x0a, 0x61, 0x6c, 0x65, 0x72, 0x74, 0x41, 0x73, 0x63, 0x69, 0x69, 0x12, 0x18, 0x0a, 0x07, 0x76, + 0x65, 0x72, 0x62, 0x6f, 0x73, 0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x76, 0x65, + 0x72, 0x62, 0x6f, 0x73, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x71, 0x75, 0x69, 0x65, 0x74, 0x5f, 0x64, + 0x65, 0x70, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x71, 0x75, 0x69, 0x65, 0x74, + 0x44, 0x65, 0x70, 0x73, 0x12, 0x3b, 0x0a, 0x1a, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, + 0x61, 0x70, 0x5f, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x73, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x08, 0x52, 0x17, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x4d, 0x61, 0x70, 0x49, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x73, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x68, 0x61, 0x72, 0x73, 0x65, 0x74, 0x18, 0x0d, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x07, 0x63, 0x68, 0x61, 0x72, 0x73, 0x65, 0x74, 0x1a, 0xcb, 0x01, 0x0a, 0x0b, + 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x36, 0x0a, 0x06, 0x73, 0x79, 0x6e, 0x74, 0x61, 0x78, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1e, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, + 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x53, + 0x79, 0x6e, 0x74, 0x61, 0x78, 0x52, 0x06, 0x73, 0x79, 0x6e, 0x74, 0x61, 0x78, 0x12, 0x5a, 0x0a, + 0x08, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x3e, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x49, 0x6e, 0x62, 0x6f, 0x75, 0x6e, 0x64, + 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x52, + 0x08, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x1a, 0x7b, 0x0a, 0x08, 0x49, 0x6d, 0x70, + 0x6f, 0x72, 0x74, 0x65, 0x72, 0x12, 0x14, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x21, 0x0a, 0x0b, 0x69, + 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, + 0x48, 0x00, 0x52, 0x0a, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x49, 0x64, 0x12, 0x2a, + 0x0a, 0x10, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x5f, + 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x48, 0x00, 0x52, 0x0e, 0x66, 0x69, 0x6c, 0x65, + 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x49, 0x64, 0x42, 0x0a, 0x0a, 0x08, 0x69, 0x6d, + 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x42, 0x07, 0x0a, 0x05, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x4a, + 0x04, 0x08, 0x01, 0x10, 0x02, 0x1a, 0x5c, 0x0a, 0x14, 0x43, 0x61, 0x6e, 0x6f, 0x6e, 0x69, 0x63, + 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, + 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, + 0x03, 0x75, 0x72, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x03, 0x75, 0x72, + 0x6c, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, + 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x08, 0x0a, 0x06, 0x72, 0x65, 0x73, + 0x75, 0x6c, 0x74, 0x1a, 0xc7, 0x02, 0x0a, 0x0e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x5f, 0x0a, 0x07, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, + 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x43, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, + 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, + 0x2e, 0x49, 0x6e, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, + 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x49, + 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x53, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x48, 0x00, 0x52, 0x07, + 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x1a, + 0xa1, 0x01, 0x0a, 0x0d, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x53, 0x75, 0x63, 0x63, 0x65, 0x73, + 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x36, 0x0a, + 0x06, 0x73, 0x79, 0x6e, 0x74, 0x61, 0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1e, 0x2e, + 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x53, 0x79, 0x6e, 0x74, 0x61, 0x78, 0x52, 0x06, 0x73, + 0x79, 0x6e, 0x74, 0x61, 0x78, 0x12, 0x29, 0x0a, 0x0e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, + 0x6d, 0x61, 0x70, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, + 0x0c, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x61, 0x70, 0x55, 0x72, 0x6c, 0x88, 0x01, 0x01, + 0x42, 0x11, 0x0a, 0x0f, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x61, 0x70, 0x5f, + 0x75, 0x72, 0x6c, 0x42, 0x08, 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x1a, 0x63, 0x0a, + 0x12, 0x46, 0x69, 0x6c, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, + 0x02, 0x69, 0x64, 0x12, 0x1b, 0x0a, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x75, 0x72, 0x6c, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x07, 0x66, 0x69, 0x6c, 0x65, 0x55, 0x72, 0x6c, + 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, + 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x08, 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, + 0x6c, 0x74, 0x1a, 0xbb, 0x01, 0x0a, 0x14, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, + 0x61, 0x6c, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x39, 0x0a, 0x07, 0x73, + 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, + 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x48, 0x00, 0x52, 0x07, 0x73, + 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x36, + 0x0a, 0x17, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x5f, 0x61, 0x72, 0x67, 0x75, 0x6d, + 0x65, 0x6e, 0x74, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0d, 0x52, + 0x15, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, + 0x74, 0x4c, 0x69, 0x73, 0x74, 0x73, 0x42, 0x08, 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, + 0x42, 0x09, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0xfe, 0x11, 0x0a, 0x0f, + 0x4f, 0x75, 0x74, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, + 0x3d, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, + 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, + 0x45, 0x72, 0x72, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x64, + 0x0a, 0x10, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x5f, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x37, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, + 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x2e, 0x4f, 0x75, 0x74, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x48, 0x00, 0x52, 0x0f, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4f, 0x0a, 0x09, 0x6c, 0x6f, 0x67, 0x5f, 0x65, 0x76, 0x65, 0x6e, + 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, + 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, + 0x2e, 0x4f, 0x75, 0x74, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x2e, 0x4c, 0x6f, 0x67, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x48, 0x00, 0x52, 0x08, 0x6c, 0x6f, 0x67, + 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x70, 0x0a, 0x14, 0x63, 0x61, 0x6e, 0x6f, 0x6e, 0x69, 0x63, + 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x3b, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, + 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x4f, 0x75, 0x74, + 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x43, 0x61, 0x6e, + 0x6f, 0x6e, 0x69, 0x63, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x48, 0x00, 0x52, 0x13, 0x63, 0x61, 0x6e, 0x6f, 0x6e, 0x69, 0x63, 0x61, 0x6c, 0x69, 0x7a, 0x65, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x5e, 0x0a, 0x0e, 0x69, 0x6d, 0x70, 0x6f, 0x72, + 0x74, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x35, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x4f, 0x75, 0x74, 0x62, 0x6f, 0x75, 0x6e, + 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x0d, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x6b, 0x0a, 0x13, 0x66, 0x69, 0x6c, 0x65, 0x5f, + 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x18, 0x06, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x39, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, + 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x4f, 0x75, + 0x74, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x46, 0x69, + 0x6c, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, + 0x00, 0x52, 0x11, 0x66, 0x69, 0x6c, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x71, 0x0a, 0x15, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x63, 0x61, 0x6c, 0x6c, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x18, 0x07, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x3b, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, + 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x4f, 0x75, 0x74, + 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x46, 0x75, 0x6e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x61, 0x6c, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x48, 0x00, 0x52, 0x13, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x61, 0x6c, 0x6c, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x64, 0x0a, 0x10, 0x76, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x37, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, + 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x4f, 0x75, 0x74, 0x62, 0x6f, + 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x48, 0x00, 0x52, 0x0f, 0x76, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x1a, 0xdf, 0x01, + 0x0a, 0x0f, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, + 0x64, 0x12, 0x29, 0x0a, 0x10, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x5f, 0x76, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x29, 0x0a, 0x10, + 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, + 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x35, 0x0a, 0x16, 0x69, 0x6d, 0x70, 0x6c, 0x65, + 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, + 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x15, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, + 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x2f, + 0x0a, 0x13, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x69, 0x6d, 0x70, + 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x1a, + 0xf7, 0x03, 0x0a, 0x0f, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x62, 0x0a, 0x07, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x46, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, + 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x4f, 0x75, + 0x74, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x43, 0x6f, + 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x43, 0x6f, + 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x53, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x48, 0x00, 0x52, 0x07, + 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x12, 0x62, 0x0a, 0x07, 0x66, 0x61, 0x69, 0x6c, 0x75, + 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x46, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, + 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x2e, 0x4f, 0x75, 0x74, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x46, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, + 0x48, 0x00, 0x52, 0x07, 0x66, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x6c, + 0x6f, 0x61, 0x64, 0x65, 0x64, 0x5f, 0x75, 0x72, 0x6c, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x09, + 0x52, 0x0a, 0x6c, 0x6f, 0x61, 0x64, 0x65, 0x64, 0x55, 0x72, 0x6c, 0x73, 0x1a, 0x47, 0x0a, 0x0e, + 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x53, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x12, 0x10, + 0x0a, 0x03, 0x63, 0x73, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x63, 0x73, 0x73, + 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x6d, 0x61, 0x70, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x61, 0x70, 0x4a, + 0x04, 0x08, 0x03, 0x10, 0x04, 0x1a, 0xa1, 0x01, 0x0a, 0x0e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, + 0x65, 0x46, 0x61, 0x69, 0x6c, 0x75, 0x72, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x12, 0x36, 0x0a, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x22, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, + 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x53, 0x70, 0x61, 0x6e, 0x52, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x12, 0x1f, 0x0a, 0x0b, 0x73, 0x74, + 0x61, 0x63, 0x6b, 0x5f, 0x74, 0x72, 0x61, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0a, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x54, 0x72, 0x61, 0x63, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x66, + 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x74, 0x65, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, + 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x74, 0x65, 0x64, 0x42, 0x08, 0x0a, 0x06, 0x72, 0x65, 0x73, + 0x75, 0x6c, 0x74, 0x4a, 0x04, 0x08, 0x01, 0x10, 0x02, 0x1a, 0xe9, 0x01, 0x0a, 0x08, 0x4c, 0x6f, + 0x67, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x38, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0e, 0x32, 0x24, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, + 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x4c, 0x6f, + 0x67, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, + 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x3b, 0x0a, 0x04, 0x73, 0x70, + 0x61, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, + 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x70, 0x61, 0x6e, 0x48, 0x00, 0x52, 0x04, + 0x73, 0x70, 0x61, 0x6e, 0x88, 0x01, 0x01, 0x12, 0x1f, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x63, 0x6b, + 0x5f, 0x74, 0x72, 0x61, 0x63, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x74, + 0x61, 0x63, 0x6b, 0x54, 0x72, 0x61, 0x63, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x66, 0x6f, 0x72, 0x6d, + 0x61, 0x74, 0x74, 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x66, 0x6f, 0x72, + 0x6d, 0x61, 0x74, 0x74, 0x65, 0x64, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x73, 0x70, 0x61, 0x6e, 0x4a, + 0x04, 0x08, 0x01, 0x10, 0x02, 0x1a, 0x7f, 0x0a, 0x13, 0x43, 0x61, 0x6e, 0x6f, 0x6e, 0x69, 0x63, + 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1f, 0x0a, 0x0b, + 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x0d, 0x52, 0x0a, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x49, 0x64, 0x12, 0x10, 0x0a, + 0x03, 0x75, 0x72, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, + 0x1f, 0x0a, 0x0b, 0x66, 0x72, 0x6f, 0x6d, 0x5f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x05, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x66, 0x72, 0x6f, 0x6d, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, + 0x4a, 0x04, 0x08, 0x02, 0x10, 0x03, 0x1a, 0x58, 0x0a, 0x0d, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1f, 0x0a, 0x0b, 0x69, 0x6d, 0x70, 0x6f, 0x72, + 0x74, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0a, 0x69, 0x6d, + 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x49, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x4a, 0x04, 0x08, 0x02, 0x10, 0x03, + 0x1a, 0x7d, 0x0a, 0x11, 0x46, 0x69, 0x6c, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1f, 0x0a, 0x0b, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, + 0x72, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0a, 0x69, 0x6d, 0x70, 0x6f, + 0x72, 0x74, 0x65, 0x72, 0x49, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x1f, 0x0a, 0x0b, 0x66, 0x72, 0x6f, 0x6d, + 0x5f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x66, + 0x72, 0x6f, 0x6d, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x4a, 0x04, 0x08, 0x02, 0x10, 0x03, 0x1a, + 0xaf, 0x01, 0x0a, 0x13, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x61, 0x6c, 0x6c, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x21, 0x0a, + 0x0b, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x0d, 0x48, 0x00, 0x52, 0x0a, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, + 0x12, 0x3b, 0x0a, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x05, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, + 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x52, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x42, 0x0c, 0x0a, + 0x0a, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x4a, 0x04, 0x08, 0x02, 0x10, + 0x03, 0x42, 0x09, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x78, 0x0a, 0x0d, + 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x3d, 0x0a, + 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x29, 0x2e, 0x73, 0x61, + 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x45, 0x72, 0x72, + 0x6f, 0x72, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x0e, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x18, 0x0a, 0x07, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0xbd, 0x02, 0x0a, 0x0a, 0x53, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x53, 0x70, 0x61, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x12, 0x47, 0x0a, 0x05, 0x73, 0x74, 0x61, + 0x72, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, + 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x70, 0x61, 0x6e, 0x2e, 0x53, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x73, 0x74, 0x61, + 0x72, 0x74, 0x12, 0x48, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x31, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, + 0x70, 0x61, 0x6e, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x03, 0x65, 0x6e, 0x64, 0x88, 0x01, 0x01, 0x12, 0x10, 0x0a, 0x03, + 0x75, 0x72, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x18, + 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x1a, 0x54, 0x0a, 0x0e, 0x53, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x66, + 0x66, 0x73, 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x6f, 0x66, 0x66, 0x73, + 0x65, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, + 0x52, 0x04, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x42, 0x06, + 0x0a, 0x04, 0x5f, 0x65, 0x6e, 0x64, 0x22, 0xb8, 0x16, 0x0a, 0x05, 0x56, 0x61, 0x6c, 0x75, 0x65, + 0x12, 0x3e, 0x0a, 0x06, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x24, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, + 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, + 0x12, 0x3e, 0x0a, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x24, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, + 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x4e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x48, 0x00, 0x52, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, + 0x12, 0x45, 0x0a, 0x09, 0x72, 0x67, 0x62, 0x5f, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, + 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x2e, 0x52, 0x67, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x08, 0x72, + 0x67, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x12, 0x45, 0x0a, 0x09, 0x68, 0x73, 0x6c, 0x5f, 0x63, + 0x6f, 0x6c, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x73, 0x61, 0x73, + 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x48, 0x73, 0x6c, 0x43, 0x6f, 0x6c, + 0x6f, 0x72, 0x48, 0x00, 0x52, 0x08, 0x68, 0x73, 0x6c, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x12, 0x38, + 0x0a, 0x04, 0x6c, 0x69, 0x73, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x73, + 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, + 0x48, 0x00, 0x52, 0x04, 0x6c, 0x69, 0x73, 0x74, 0x12, 0x35, 0x0a, 0x03, 0x6d, 0x61, 0x70, 0x18, + 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, + 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x4d, 0x61, 0x70, 0x48, 0x00, 0x52, 0x03, 0x6d, 0x61, 0x70, 0x12, + 0x46, 0x0a, 0x09, 0x73, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x74, 0x6f, 0x6e, 0x18, 0x07, 0x20, 0x01, + 0x28, 0x0e, 0x32, 0x26, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, + 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x53, 0x69, 0x6e, 0x67, + 0x6c, 0x65, 0x74, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x48, 0x00, 0x52, 0x09, 0x73, 0x69, + 0x6e, 0x67, 0x6c, 0x65, 0x74, 0x6f, 0x6e, 0x12, 0x5d, 0x0a, 0x11, 0x63, 0x6f, 0x6d, 0x70, 0x69, + 0x6c, 0x65, 0x72, 0x5f, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, + 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x10, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x46, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x51, 0x0a, 0x0d, 0x68, 0x6f, 0x73, 0x74, 0x5f, 0x66, + 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2a, 0x2e, + 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x48, 0x6f, 0x73, + 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x0c, 0x68, 0x6f, 0x73, + 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x51, 0x0a, 0x0d, 0x61, 0x72, 0x67, + 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x6c, 0x69, 0x73, 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x2a, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, + 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, + 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x48, 0x00, 0x52, 0x0c, + 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x45, 0x0a, 0x09, + 0x68, 0x77, 0x62, 0x5f, 0x63, 0x6f, 0x6c, 0x6f, 0x72, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x26, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x48, + 0x77, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x48, 0x00, 0x52, 0x08, 0x68, 0x77, 0x62, 0x43, 0x6f, + 0x6c, 0x6f, 0x72, 0x12, 0x4d, 0x0a, 0x0b, 0x63, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, + 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x0b, 0x63, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x1a, 0x34, 0x0a, 0x06, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x12, 0x0a, 0x04, + 0x74, 0x65, 0x78, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, + 0x12, 0x16, 0x0a, 0x06, 0x71, 0x75, 0x6f, 0x74, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x06, 0x71, 0x75, 0x6f, 0x74, 0x65, 0x64, 0x1a, 0x62, 0x0a, 0x06, 0x4e, 0x75, 0x6d, 0x62, + 0x65, 0x72, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x6e, 0x75, 0x6d, 0x65, + 0x72, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x6e, 0x75, + 0x6d, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x22, 0x0a, 0x0c, 0x64, 0x65, 0x6e, 0x6f, + 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0c, + 0x64, 0x65, 0x6e, 0x6f, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x1a, 0x5c, 0x0a, 0x08, + 0x52, 0x67, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x65, 0x64, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x03, 0x72, 0x65, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x67, 0x72, + 0x65, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x67, 0x72, 0x65, 0x65, 0x6e, + 0x12, 0x12, 0x0a, 0x04, 0x62, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x04, + 0x62, 0x6c, 0x75, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x01, 0x52, 0x05, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x1a, 0x70, 0x0a, 0x08, 0x48, 0x73, + 0x6c, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x12, 0x10, 0x0a, 0x03, 0x68, 0x75, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x01, 0x52, 0x03, 0x68, 0x75, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x61, 0x74, 0x75, + 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0a, 0x73, 0x61, + 0x74, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1c, 0x0a, 0x09, 0x6c, 0x69, 0x67, 0x68, + 0x74, 0x6e, 0x65, 0x73, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x52, 0x09, 0x6c, 0x69, 0x67, + 0x68, 0x74, 0x6e, 0x65, 0x73, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x1a, 0x6e, 0x0a, 0x08, + 0x48, 0x77, 0x62, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x12, 0x10, 0x0a, 0x03, 0x68, 0x75, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x03, 0x68, 0x75, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x77, 0x68, + 0x69, 0x74, 0x65, 0x6e, 0x65, 0x73, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x09, 0x77, + 0x68, 0x69, 0x74, 0x65, 0x6e, 0x65, 0x73, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x62, 0x6c, 0x61, 0x63, + 0x6b, 0x6e, 0x65, 0x73, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x52, 0x09, 0x62, 0x6c, 0x61, + 0x63, 0x6b, 0x6e, 0x65, 0x73, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x1a, 0xa9, 0x01, 0x0a, + 0x04, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x43, 0x0a, 0x09, 0x73, 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, + 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x25, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, + 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, + 0x09, 0x73, 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x21, 0x0a, 0x0c, 0x68, 0x61, + 0x73, 0x5f, 0x62, 0x72, 0x61, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x0b, 0x68, 0x61, 0x73, 0x42, 0x72, 0x61, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x12, 0x39, 0x0a, + 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x1d, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x08, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0xb7, 0x01, 0x0a, 0x03, 0x4d, 0x61, 0x70, + 0x12, 0x41, 0x0a, 0x07, 0x65, 0x6e, 0x74, 0x72, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x27, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, + 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x4d, 0x61, 0x70, 0x2e, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x65, 0x6e, 0x74, 0x72, + 0x69, 0x65, 0x73, 0x1a, 0x6d, 0x0a, 0x05, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x2f, 0x0a, 0x03, + 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x61, 0x73, 0x73, + 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, + 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x33, 0x0a, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, + 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x1a, 0x22, 0x0a, 0x10, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x72, 0x46, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x1a, 0x3c, 0x0a, 0x0c, 0x48, 0x6f, 0x73, 0x74, 0x46, 0x75, + 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, + 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, + 0x74, 0x75, 0x72, 0x65, 0x1a, 0xd0, 0x02, 0x0a, 0x0c, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, + 0x74, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0d, 0x52, 0x02, 0x69, 0x64, 0x12, 0x43, 0x0a, 0x09, 0x73, 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, + 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x25, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, + 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, + 0x6c, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, + 0x09, 0x73, 0x65, 0x70, 0x61, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x39, 0x0a, 0x08, 0x63, 0x6f, + 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, + 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x08, 0x63, 0x6f, 0x6e, + 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x54, 0x0a, 0x08, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, + 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x38, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, + 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, + 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x41, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x4c, + 0x69, 0x73, 0x74, 0x2e, 0x4b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x73, 0x45, 0x6e, 0x74, 0x72, + 0x79, 0x52, 0x08, 0x6b, 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x73, 0x1a, 0x5a, 0x0a, 0x0d, 0x4b, + 0x65, 0x79, 0x77, 0x6f, 0x72, 0x64, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, + 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x33, + 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, + 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0xce, 0x05, 0x0a, 0x0b, 0x43, 0x61, 0x6c, 0x63, + 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x58, 0x0a, 0x09, 0x61, + 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3a, + 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x43, 0x61, + 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x09, 0x61, 0x72, 0x67, 0x75, + 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0xcc, 0x02, 0x0a, 0x10, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x3e, 0x0a, 0x06, 0x6e, 0x75, + 0x6d, 0x62, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x73, 0x61, 0x73, + 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x4e, 0x75, 0x6d, 0x62, 0x65, 0x72, + 0x48, 0x00, 0x52, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x18, 0x0a, 0x06, 0x73, 0x74, + 0x72, 0x69, 0x6e, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x73, 0x74, + 0x72, 0x69, 0x6e, 0x67, 0x12, 0x26, 0x0a, 0x0d, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x6f, 0x6c, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0d, 0x69, + 0x6e, 0x74, 0x65, 0x72, 0x70, 0x6f, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x5e, 0x0a, 0x09, + 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x3e, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x43, + 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x43, 0x61, 0x6c, 0x63, 0x75, + 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x48, + 0x00, 0x52, 0x09, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x4d, 0x0a, 0x0b, + 0x63, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x29, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, + 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, + 0x2e, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x0b, + 0x63, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x07, 0x0a, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x1a, 0x81, 0x02, 0x0a, 0x14, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x47, 0x0a, + 0x08, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, + 0x2b, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x08, 0x6f, 0x70, + 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x4e, 0x0a, 0x04, 0x6c, 0x65, 0x66, 0x74, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, 0x65, + 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x2e, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, + 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65, + 0x52, 0x04, 0x6c, 0x65, 0x66, 0x74, 0x12, 0x50, 0x0a, 0x05, 0x72, 0x69, 0x67, 0x68, 0x74, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x73, 0x61, 0x73, 0x73, 0x2e, 0x65, 0x6d, 0x62, + 0x65, 0x64, 0x64, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x2e, 0x56, + 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x2e, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x52, 0x05, 0x72, 0x69, 0x67, 0x68, 0x74, 0x42, 0x07, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x2a, 0x2b, 0x0a, 0x0b, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x53, 0x74, 0x79, 0x6c, 0x65, + 0x12, 0x0c, 0x0a, 0x08, 0x45, 0x58, 0x50, 0x41, 0x4e, 0x44, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0e, + 0x0a, 0x0a, 0x43, 0x4f, 0x4d, 0x50, 0x52, 0x45, 0x53, 0x53, 0x45, 0x44, 0x10, 0x01, 0x2a, 0x29, + 0x0a, 0x06, 0x53, 0x79, 0x6e, 0x74, 0x61, 0x78, 0x12, 0x08, 0x0a, 0x04, 0x53, 0x43, 0x53, 0x53, + 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08, 0x49, 0x4e, 0x44, 0x45, 0x4e, 0x54, 0x45, 0x44, 0x10, 0x01, + 0x12, 0x07, 0x0a, 0x03, 0x43, 0x53, 0x53, 0x10, 0x02, 0x2a, 0x3f, 0x0a, 0x0c, 0x4c, 0x6f, 0x67, + 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x57, 0x41, 0x52, + 0x4e, 0x49, 0x4e, 0x47, 0x10, 0x00, 0x12, 0x17, 0x0a, 0x13, 0x44, 0x45, 0x50, 0x52, 0x45, 0x43, + 0x41, 0x54, 0x49, 0x4f, 0x4e, 0x5f, 0x57, 0x41, 0x52, 0x4e, 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, + 0x09, 0x0a, 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x02, 0x2a, 0x38, 0x0a, 0x11, 0x50, 0x72, + 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x54, 0x79, 0x70, 0x65, 0x12, + 0x09, 0x0a, 0x05, 0x50, 0x41, 0x52, 0x53, 0x45, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x50, 0x41, + 0x52, 0x41, 0x4d, 0x53, 0x10, 0x01, 0x12, 0x0c, 0x0a, 0x08, 0x49, 0x4e, 0x54, 0x45, 0x52, 0x4e, + 0x41, 0x4c, 0x10, 0x02, 0x2a, 0x3f, 0x0a, 0x0d, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x65, 0x70, 0x61, + 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x09, 0x0a, 0x05, 0x43, 0x4f, 0x4d, 0x4d, 0x41, 0x10, 0x00, + 0x12, 0x09, 0x0a, 0x05, 0x53, 0x50, 0x41, 0x43, 0x45, 0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, 0x53, + 0x4c, 0x41, 0x53, 0x48, 0x10, 0x02, 0x12, 0x0d, 0x0a, 0x09, 0x55, 0x4e, 0x44, 0x45, 0x43, 0x49, + 0x44, 0x45, 0x44, 0x10, 0x03, 0x2a, 0x2f, 0x0a, 0x0e, 0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x74, + 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x08, 0x0a, 0x04, 0x54, 0x52, 0x55, 0x45, 0x10, + 0x00, 0x12, 0x09, 0x0a, 0x05, 0x46, 0x41, 0x4c, 0x53, 0x45, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, + 0x4e, 0x55, 0x4c, 0x4c, 0x10, 0x02, 0x2a, 0x41, 0x0a, 0x13, 0x43, 0x61, 0x6c, 0x63, 0x75, 0x6c, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x08, 0x0a, + 0x04, 0x50, 0x4c, 0x55, 0x53, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x4d, 0x49, 0x4e, 0x55, 0x53, + 0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, 0x54, 0x49, 0x4d, 0x45, 0x53, 0x10, 0x02, 0x12, 0x0a, 0x0a, + 0x06, 0x44, 0x49, 0x56, 0x49, 0x44, 0x45, 0x10, 0x03, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x33, +} + +var ( + file_embedded_sass_proto_rawDescOnce sync.Once + file_embedded_sass_proto_rawDescData = file_embedded_sass_proto_rawDesc +) + +func file_embedded_sass_proto_rawDescGZIP() []byte { + file_embedded_sass_proto_rawDescOnce.Do(func() { + file_embedded_sass_proto_rawDescData = protoimpl.X.CompressGZIP(file_embedded_sass_proto_rawDescData) + }) + return file_embedded_sass_proto_rawDescData +} + +var file_embedded_sass_proto_enumTypes = make([]protoimpl.EnumInfo, 7) +var file_embedded_sass_proto_msgTypes = make([]protoimpl.MessageInfo, 39) +var file_embedded_sass_proto_goTypes = []interface{}{ + (OutputStyle)(0), // 0: sass.embedded_protocol.OutputStyle + (Syntax)(0), // 1: sass.embedded_protocol.Syntax + (LogEventType)(0), // 2: sass.embedded_protocol.LogEventType + (ProtocolErrorType)(0), // 3: sass.embedded_protocol.ProtocolErrorType + (ListSeparator)(0), // 4: sass.embedded_protocol.ListSeparator + (SingletonValue)(0), // 5: sass.embedded_protocol.SingletonValue + (CalculationOperator)(0), // 6: sass.embedded_protocol.CalculationOperator + (*InboundMessage)(nil), // 7: sass.embedded_protocol.InboundMessage + (*OutboundMessage)(nil), // 8: sass.embedded_protocol.OutboundMessage + (*ProtocolError)(nil), // 9: sass.embedded_protocol.ProtocolError + (*SourceSpan)(nil), // 10: sass.embedded_protocol.SourceSpan + (*Value)(nil), // 11: sass.embedded_protocol.Value + (*InboundMessage_VersionRequest)(nil), // 12: sass.embedded_protocol.InboundMessage.VersionRequest + (*InboundMessage_CompileRequest)(nil), // 13: sass.embedded_protocol.InboundMessage.CompileRequest + (*InboundMessage_CanonicalizeResponse)(nil), // 14: sass.embedded_protocol.InboundMessage.CanonicalizeResponse + (*InboundMessage_ImportResponse)(nil), // 15: sass.embedded_protocol.InboundMessage.ImportResponse + (*InboundMessage_FileImportResponse)(nil), // 16: sass.embedded_protocol.InboundMessage.FileImportResponse + (*InboundMessage_FunctionCallResponse)(nil), // 17: sass.embedded_protocol.InboundMessage.FunctionCallResponse + (*InboundMessage_CompileRequest_StringInput)(nil), // 18: sass.embedded_protocol.InboundMessage.CompileRequest.StringInput + (*InboundMessage_CompileRequest_Importer)(nil), // 19: sass.embedded_protocol.InboundMessage.CompileRequest.Importer + (*InboundMessage_ImportResponse_ImportSuccess)(nil), // 20: sass.embedded_protocol.InboundMessage.ImportResponse.ImportSuccess + (*OutboundMessage_VersionResponse)(nil), // 21: sass.embedded_protocol.OutboundMessage.VersionResponse + (*OutboundMessage_CompileResponse)(nil), // 22: sass.embedded_protocol.OutboundMessage.CompileResponse + (*OutboundMessage_LogEvent)(nil), // 23: sass.embedded_protocol.OutboundMessage.LogEvent + (*OutboundMessage_CanonicalizeRequest)(nil), // 24: sass.embedded_protocol.OutboundMessage.CanonicalizeRequest + (*OutboundMessage_ImportRequest)(nil), // 25: sass.embedded_protocol.OutboundMessage.ImportRequest + (*OutboundMessage_FileImportRequest)(nil), // 26: sass.embedded_protocol.OutboundMessage.FileImportRequest + (*OutboundMessage_FunctionCallRequest)(nil), // 27: sass.embedded_protocol.OutboundMessage.FunctionCallRequest + (*OutboundMessage_CompileResponse_CompileSuccess)(nil), // 28: sass.embedded_protocol.OutboundMessage.CompileResponse.CompileSuccess + (*OutboundMessage_CompileResponse_CompileFailure)(nil), // 29: sass.embedded_protocol.OutboundMessage.CompileResponse.CompileFailure + (*SourceSpan_SourceLocation)(nil), // 30: sass.embedded_protocol.SourceSpan.SourceLocation + (*Value_String)(nil), // 31: sass.embedded_protocol.Value.String + (*Value_Number)(nil), // 32: sass.embedded_protocol.Value.Number + (*Value_RgbColor)(nil), // 33: sass.embedded_protocol.Value.RgbColor + (*Value_HslColor)(nil), // 34: sass.embedded_protocol.Value.HslColor + (*Value_HwbColor)(nil), // 35: sass.embedded_protocol.Value.HwbColor + (*Value_List)(nil), // 36: sass.embedded_protocol.Value.List + (*Value_Map)(nil), // 37: sass.embedded_protocol.Value.Map + (*Value_CompilerFunction)(nil), // 38: sass.embedded_protocol.Value.CompilerFunction + (*Value_HostFunction)(nil), // 39: sass.embedded_protocol.Value.HostFunction + (*Value_ArgumentList)(nil), // 40: sass.embedded_protocol.Value.ArgumentList + (*Value_Calculation)(nil), // 41: sass.embedded_protocol.Value.Calculation + (*Value_Map_Entry)(nil), // 42: sass.embedded_protocol.Value.Map.Entry + nil, // 43: sass.embedded_protocol.Value.ArgumentList.KeywordsEntry + (*Value_Calculation_CalculationValue)(nil), // 44: sass.embedded_protocol.Value.Calculation.CalculationValue + (*Value_Calculation_CalculationOperation)(nil), // 45: sass.embedded_protocol.Value.Calculation.CalculationOperation +} +var file_embedded_sass_proto_depIdxs = []int32{ + 13, // 0: sass.embedded_protocol.InboundMessage.compile_request:type_name -> sass.embedded_protocol.InboundMessage.CompileRequest + 14, // 1: sass.embedded_protocol.InboundMessage.canonicalize_response:type_name -> sass.embedded_protocol.InboundMessage.CanonicalizeResponse + 15, // 2: sass.embedded_protocol.InboundMessage.import_response:type_name -> sass.embedded_protocol.InboundMessage.ImportResponse + 16, // 3: sass.embedded_protocol.InboundMessage.file_import_response:type_name -> sass.embedded_protocol.InboundMessage.FileImportResponse + 17, // 4: sass.embedded_protocol.InboundMessage.function_call_response:type_name -> sass.embedded_protocol.InboundMessage.FunctionCallResponse + 12, // 5: sass.embedded_protocol.InboundMessage.version_request:type_name -> sass.embedded_protocol.InboundMessage.VersionRequest + 9, // 6: sass.embedded_protocol.OutboundMessage.error:type_name -> sass.embedded_protocol.ProtocolError + 22, // 7: sass.embedded_protocol.OutboundMessage.compile_response:type_name -> sass.embedded_protocol.OutboundMessage.CompileResponse + 23, // 8: sass.embedded_protocol.OutboundMessage.log_event:type_name -> sass.embedded_protocol.OutboundMessage.LogEvent + 24, // 9: sass.embedded_protocol.OutboundMessage.canonicalize_request:type_name -> sass.embedded_protocol.OutboundMessage.CanonicalizeRequest + 25, // 10: sass.embedded_protocol.OutboundMessage.import_request:type_name -> sass.embedded_protocol.OutboundMessage.ImportRequest + 26, // 11: sass.embedded_protocol.OutboundMessage.file_import_request:type_name -> sass.embedded_protocol.OutboundMessage.FileImportRequest + 27, // 12: sass.embedded_protocol.OutboundMessage.function_call_request:type_name -> sass.embedded_protocol.OutboundMessage.FunctionCallRequest + 21, // 13: sass.embedded_protocol.OutboundMessage.version_response:type_name -> sass.embedded_protocol.OutboundMessage.VersionResponse + 3, // 14: sass.embedded_protocol.ProtocolError.type:type_name -> sass.embedded_protocol.ProtocolErrorType + 30, // 15: sass.embedded_protocol.SourceSpan.start:type_name -> sass.embedded_protocol.SourceSpan.SourceLocation + 30, // 16: sass.embedded_protocol.SourceSpan.end:type_name -> sass.embedded_protocol.SourceSpan.SourceLocation + 31, // 17: sass.embedded_protocol.Value.string:type_name -> sass.embedded_protocol.Value.String + 32, // 18: sass.embedded_protocol.Value.number:type_name -> sass.embedded_protocol.Value.Number + 33, // 19: sass.embedded_protocol.Value.rgb_color:type_name -> sass.embedded_protocol.Value.RgbColor + 34, // 20: sass.embedded_protocol.Value.hsl_color:type_name -> sass.embedded_protocol.Value.HslColor + 36, // 21: sass.embedded_protocol.Value.list:type_name -> sass.embedded_protocol.Value.List + 37, // 22: sass.embedded_protocol.Value.map:type_name -> sass.embedded_protocol.Value.Map + 5, // 23: sass.embedded_protocol.Value.singleton:type_name -> sass.embedded_protocol.SingletonValue + 38, // 24: sass.embedded_protocol.Value.compiler_function:type_name -> sass.embedded_protocol.Value.CompilerFunction + 39, // 25: sass.embedded_protocol.Value.host_function:type_name -> sass.embedded_protocol.Value.HostFunction + 40, // 26: sass.embedded_protocol.Value.argument_list:type_name -> sass.embedded_protocol.Value.ArgumentList + 35, // 27: sass.embedded_protocol.Value.hwb_color:type_name -> sass.embedded_protocol.Value.HwbColor + 41, // 28: sass.embedded_protocol.Value.calculation:type_name -> sass.embedded_protocol.Value.Calculation + 18, // 29: sass.embedded_protocol.InboundMessage.CompileRequest.string:type_name -> sass.embedded_protocol.InboundMessage.CompileRequest.StringInput + 0, // 30: sass.embedded_protocol.InboundMessage.CompileRequest.style:type_name -> sass.embedded_protocol.OutputStyle + 19, // 31: sass.embedded_protocol.InboundMessage.CompileRequest.importers:type_name -> sass.embedded_protocol.InboundMessage.CompileRequest.Importer + 20, // 32: sass.embedded_protocol.InboundMessage.ImportResponse.success:type_name -> sass.embedded_protocol.InboundMessage.ImportResponse.ImportSuccess + 11, // 33: sass.embedded_protocol.InboundMessage.FunctionCallResponse.success:type_name -> sass.embedded_protocol.Value + 1, // 34: sass.embedded_protocol.InboundMessage.CompileRequest.StringInput.syntax:type_name -> sass.embedded_protocol.Syntax + 19, // 35: sass.embedded_protocol.InboundMessage.CompileRequest.StringInput.importer:type_name -> sass.embedded_protocol.InboundMessage.CompileRequest.Importer + 1, // 36: sass.embedded_protocol.InboundMessage.ImportResponse.ImportSuccess.syntax:type_name -> sass.embedded_protocol.Syntax + 28, // 37: sass.embedded_protocol.OutboundMessage.CompileResponse.success:type_name -> sass.embedded_protocol.OutboundMessage.CompileResponse.CompileSuccess + 29, // 38: sass.embedded_protocol.OutboundMessage.CompileResponse.failure:type_name -> sass.embedded_protocol.OutboundMessage.CompileResponse.CompileFailure + 2, // 39: sass.embedded_protocol.OutboundMessage.LogEvent.type:type_name -> sass.embedded_protocol.LogEventType + 10, // 40: sass.embedded_protocol.OutboundMessage.LogEvent.span:type_name -> sass.embedded_protocol.SourceSpan + 11, // 41: sass.embedded_protocol.OutboundMessage.FunctionCallRequest.arguments:type_name -> sass.embedded_protocol.Value + 10, // 42: sass.embedded_protocol.OutboundMessage.CompileResponse.CompileFailure.span:type_name -> sass.embedded_protocol.SourceSpan + 4, // 43: sass.embedded_protocol.Value.List.separator:type_name -> sass.embedded_protocol.ListSeparator + 11, // 44: sass.embedded_protocol.Value.List.contents:type_name -> sass.embedded_protocol.Value + 42, // 45: sass.embedded_protocol.Value.Map.entries:type_name -> sass.embedded_protocol.Value.Map.Entry + 4, // 46: sass.embedded_protocol.Value.ArgumentList.separator:type_name -> sass.embedded_protocol.ListSeparator + 11, // 47: sass.embedded_protocol.Value.ArgumentList.contents:type_name -> sass.embedded_protocol.Value + 43, // 48: sass.embedded_protocol.Value.ArgumentList.keywords:type_name -> sass.embedded_protocol.Value.ArgumentList.KeywordsEntry + 44, // 49: sass.embedded_protocol.Value.Calculation.arguments:type_name -> sass.embedded_protocol.Value.Calculation.CalculationValue + 11, // 50: sass.embedded_protocol.Value.Map.Entry.key:type_name -> sass.embedded_protocol.Value + 11, // 51: sass.embedded_protocol.Value.Map.Entry.value:type_name -> sass.embedded_protocol.Value + 11, // 52: sass.embedded_protocol.Value.ArgumentList.KeywordsEntry.value:type_name -> sass.embedded_protocol.Value + 32, // 53: sass.embedded_protocol.Value.Calculation.CalculationValue.number:type_name -> sass.embedded_protocol.Value.Number + 45, // 54: sass.embedded_protocol.Value.Calculation.CalculationValue.operation:type_name -> sass.embedded_protocol.Value.Calculation.CalculationOperation + 41, // 55: sass.embedded_protocol.Value.Calculation.CalculationValue.calculation:type_name -> sass.embedded_protocol.Value.Calculation + 6, // 56: sass.embedded_protocol.Value.Calculation.CalculationOperation.operator:type_name -> sass.embedded_protocol.CalculationOperator + 44, // 57: sass.embedded_protocol.Value.Calculation.CalculationOperation.left:type_name -> sass.embedded_protocol.Value.Calculation.CalculationValue + 44, // 58: sass.embedded_protocol.Value.Calculation.CalculationOperation.right:type_name -> sass.embedded_protocol.Value.Calculation.CalculationValue + 59, // [59:59] is the sub-list for method output_type + 59, // [59:59] is the sub-list for method input_type + 59, // [59:59] is the sub-list for extension type_name + 59, // [59:59] is the sub-list for extension extendee + 0, // [0:59] is the sub-list for field type_name +} + +func init() { file_embedded_sass_proto_init() } +func file_embedded_sass_proto_init() { + if File_embedded_sass_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_embedded_sass_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ProtocolError); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SourceSpan); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage_VersionRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage_CompileRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage_CanonicalizeResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage_ImportResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage_FileImportResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage_FunctionCallResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage_CompileRequest_StringInput); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage_CompileRequest_Importer); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*InboundMessage_ImportResponse_ImportSuccess); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage_VersionResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage_CompileResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage_LogEvent); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage_CanonicalizeRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage_ImportRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage_FileImportRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage_FunctionCallRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage_CompileResponse_CompileSuccess); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OutboundMessage_CompileResponse_CompileFailure); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SourceSpan_SourceLocation); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_String); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_Number); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_RgbColor); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_HslColor); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_HwbColor); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[29].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_List); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[30].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_Map); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_CompilerFunction); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_HostFunction); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[33].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_ArgumentList); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_Calculation); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[35].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_Map_Entry); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[37].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_Calculation_CalculationValue); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_embedded_sass_proto_msgTypes[38].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Value_Calculation_CalculationOperation); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_embedded_sass_proto_msgTypes[0].OneofWrappers = []interface{}{ + (*InboundMessage_CompileRequest_)(nil), + (*InboundMessage_CanonicalizeResponse_)(nil), + (*InboundMessage_ImportResponse_)(nil), + (*InboundMessage_FileImportResponse_)(nil), + (*InboundMessage_FunctionCallResponse_)(nil), + (*InboundMessage_VersionRequest_)(nil), + } + file_embedded_sass_proto_msgTypes[1].OneofWrappers = []interface{}{ + (*OutboundMessage_Error)(nil), + (*OutboundMessage_CompileResponse_)(nil), + (*OutboundMessage_LogEvent_)(nil), + (*OutboundMessage_CanonicalizeRequest_)(nil), + (*OutboundMessage_ImportRequest_)(nil), + (*OutboundMessage_FileImportRequest_)(nil), + (*OutboundMessage_FunctionCallRequest_)(nil), + (*OutboundMessage_VersionResponse_)(nil), + } + file_embedded_sass_proto_msgTypes[3].OneofWrappers = []interface{}{} + file_embedded_sass_proto_msgTypes[4].OneofWrappers = []interface{}{ + (*Value_String_)(nil), + (*Value_Number_)(nil), + (*Value_RgbColor_)(nil), + (*Value_HslColor_)(nil), + (*Value_List_)(nil), + (*Value_Map_)(nil), + (*Value_Singleton)(nil), + (*Value_CompilerFunction_)(nil), + (*Value_HostFunction_)(nil), + (*Value_ArgumentList_)(nil), + (*Value_HwbColor_)(nil), + (*Value_Calculation_)(nil), + } + file_embedded_sass_proto_msgTypes[6].OneofWrappers = []interface{}{ + (*InboundMessage_CompileRequest_String_)(nil), + (*InboundMessage_CompileRequest_Path)(nil), + } + file_embedded_sass_proto_msgTypes[7].OneofWrappers = []interface{}{ + (*InboundMessage_CanonicalizeResponse_Url)(nil), + (*InboundMessage_CanonicalizeResponse_Error)(nil), + } + file_embedded_sass_proto_msgTypes[8].OneofWrappers = []interface{}{ + (*InboundMessage_ImportResponse_Success)(nil), + (*InboundMessage_ImportResponse_Error)(nil), + } + file_embedded_sass_proto_msgTypes[9].OneofWrappers = []interface{}{ + (*InboundMessage_FileImportResponse_FileUrl)(nil), + (*InboundMessage_FileImportResponse_Error)(nil), + } + file_embedded_sass_proto_msgTypes[10].OneofWrappers = []interface{}{ + (*InboundMessage_FunctionCallResponse_Success)(nil), + (*InboundMessage_FunctionCallResponse_Error)(nil), + } + file_embedded_sass_proto_msgTypes[12].OneofWrappers = []interface{}{ + (*InboundMessage_CompileRequest_Importer_Path)(nil), + (*InboundMessage_CompileRequest_Importer_ImporterId)(nil), + (*InboundMessage_CompileRequest_Importer_FileImporterId)(nil), + } + file_embedded_sass_proto_msgTypes[13].OneofWrappers = []interface{}{} + file_embedded_sass_proto_msgTypes[15].OneofWrappers = []interface{}{ + (*OutboundMessage_CompileResponse_Success)(nil), + (*OutboundMessage_CompileResponse_Failure)(nil), + } + file_embedded_sass_proto_msgTypes[16].OneofWrappers = []interface{}{} + file_embedded_sass_proto_msgTypes[20].OneofWrappers = []interface{}{ + (*OutboundMessage_FunctionCallRequest_Name)(nil), + (*OutboundMessage_FunctionCallRequest_FunctionId)(nil), + } + file_embedded_sass_proto_msgTypes[37].OneofWrappers = []interface{}{ + (*Value_Calculation_CalculationValue_Number)(nil), + (*Value_Calculation_CalculationValue_String_)(nil), + (*Value_Calculation_CalculationValue_Interpolation)(nil), + (*Value_Calculation_CalculationValue_Operation)(nil), + (*Value_Calculation_CalculationValue_Calculation)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_embedded_sass_proto_rawDesc, + NumEnums: 7, + NumMessages: 39, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_embedded_sass_proto_goTypes, + DependencyIndexes: file_embedded_sass_proto_depIdxs, + EnumInfos: file_embedded_sass_proto_enumTypes, + MessageInfos: file_embedded_sass_proto_msgTypes, + }.Build() + File_embedded_sass_proto = out.File + file_embedded_sass_proto_rawDesc = nil + file_embedded_sass_proto_goTypes = nil + file_embedded_sass_proto_depIdxs = nil +} diff --git a/vendor/github.com/bep/godartsass/v2/internal/embeddedsass/embedded_sass.proto b/vendor/github.com/bep/godartsass/v2/internal/embeddedsass/embedded_sass.proto new file mode 100644 index 0000000..e5e0c2a --- /dev/null +++ b/vendor/github.com/bep/godartsass/v2/internal/embeddedsass/embedded_sass.proto @@ -0,0 +1,1003 @@ +// Copyright 2019 Google Inc. Use of this source code is governed by an +// MIT-style license that can be found in the LICENSE file or at +// https://opensource.org/licenses/MIT. + +syntax = "proto3"; + +package sass.embedded_protocol; + +// The wrapper type for all messages sent from the host to the compiler. This +// provides a `oneof` that makes it possible to determine the type of each +// inbound message. +message InboundMessage { + // A request for information about the version of the embedded compiler. The + // host can use this to provide diagnostic information to the user, to check + // which features the compiler supports, or to ensure that it's compatible + // with the same protocol version the compiler supports. + message VersionRequest { + // This version request's id. + uint32 id = 1; + + // This message's contents are intentionally empty. It just acts as a signal + // to the compiler to send a VersionResponse. More fields may be added in + // the future. + } + + // A request that compiles an entrypoint to CSS. + message CompileRequest { + reserved 1; + + // An input stylesheet provided as plain text, rather than loaded from the + // filesystem. + message StringInput { + // The contents of the stylesheet. + string source = 1; + + // The location from which `source` was loaded. If this is empty, it + // indicates that the URL is unknown. + // + // This must be a canonical URL recognized by `importer`, if it's passed. + string url = 2; + + // The syntax to use to parse `source`. + Syntax syntax = 3; + + // The importer to use to resolve imports relative to `url`. + Importer importer = 4; + } + + // The input stylesheet to parse. Mandatory. + oneof input { + // A stylesheet loaded from its contents. + StringInput string = 2; + + // A stylesheet loaded from the given path on the filesystem. + string path = 3; + } + + // How to format the CSS output. + OutputStyle style = 4; + + // Whether to generate a source map. Note that this will *not* add a source + // map comment to the stylesheet; that's up to the host or its users. + bool source_map = 5; + + // A wrapper message that represents either a user-defined importer or a + // load path on disk. This must be a wrapper because `oneof` types can't be + // `repeated`. + message Importer { + // The possible types of importer. Mandatory. + oneof importer { + // A built-in importer that loads Sass files within the given directory + // on disk. + string path = 1; + + // A unique ID for a user-defined importer. This ID will be included in + // outbound `CanonicalizeRequest` and `ImportRequest` messages to + // indicate which importer is being called. The host is responsible for + // generating this ID and ensuring that it's unique across all + // importers registered for this compilation. + uint32 importer_id = 2; + + // A unique ID for a special kind of user-defined importer that tells + // the compiler where to look for files on the physical filesystem, but + // leaves the details of resolving partials and extensions and loading + // the file from disk up to the compiler itself. + // + // This ID will be included in outbound `FileImportRequest` messages to + // indicate which importer is being called. The host is responsible for + // generating this ID and ensuring that it's unique across all importers + // registered for this compilation. + uint32 file_importer_id = 3; + } + } + + // Importers (including load paths on the filesystem) to use when resolving + // imports that can't be resolved relative to the file that contains it. Each + // importer is checked in order until one recognizes the imported URL. + repeated Importer importers = 6; + + // Signatures for custom global functions whose behavior is defined by the + // host. + // + // If this is not a valid Sass function signature that could appear after + // `@function` in a Sass stylesheet (such as `mix($color1, $color2, $weight: + // 50%)`), or if it conflicts with a function name that's built into the + // Sass language, the compiler must treat the compilation as failed. + // + // Compilers must ensure that pure-Sass functions take precedence over + // custom global functions. + repeated string global_functions = 7; + + // Whether to use terminal colors in the formatted message of errors and + // logs. + bool alert_color = 8; + + // Whether to encode the formatted message of errors and logs in ASCII. + bool alert_ascii = 9; + + // Whether to report all deprecation warnings or only the first few ones. + // If this is `false`, the compiler may choose not to send events for + // repeated deprecation warnings. If this is `true`, the compiler must emit + // an event for every deprecation warning it encounters. + bool verbose = 10; + + // Whether to omit events for deprecation warnings coming from dependencies + // (files loaded from a different importer than the input). + bool quiet_deps = 11; + + // Whether to include sources in the generated sourcemap + bool source_map_include_sources = 12; + + // Whether to emit a `@charset`/BOM for non-ASCII stylesheets. + bool charset = 13; + } + + // A response indicating the result of canonicalizing an imported URL. + message CanonicalizeResponse { + uint32 id = 1; + + // The result of canonicalization. If this is unset, it indicates that the + // importer either did not recognize the URL, or could not find a stylesheet + // at the location it referred to. Optional. + oneof result { + // The successfully canonicalized URL. + // + // If this is not an absolute URL (including scheme), the compiler must + // treat that as an error thrown by the importer. + string url = 2; + + // An error message explaining why canonicalization failed. + // + // This indicates that a stylesheet was found, but a canonical URL for it + // could not be determined. If no stylesheet was found, `result` should be + // `null` instead. + string error = 3; + } + } + + // A response indicating the result of importing a canonical URL. + message ImportResponse { + uint32 id = 1; + + // The stylesheet's contents were loaded successfully. + message ImportSuccess { + // The text of the stylesheet. + string contents = 1; + + // The syntax of `contents`. + Syntax syntax = 2; + + // An absolute, browser-accessible URL indicating the resolved location of + // the imported stylesheet. + // + // This should be a `file:` URL if one is available, but an `http:` URL is + // acceptable as well. If no URL is supplied, a `data:` URL is generated + // automatically from `contents`. + // + // If this is provided and is not an absolute URL (including scheme) the + // compiler must treat that as an error thrown by the importer. + optional string source_map_url = 3; + } + + // The result of loading the URL. If this is unset, it indicates that the + // importer either did not recognize the URL, or could not find a stylesheet + // at the location it referred to. Optional. + oneof result { + // The contents of the loaded stylesheet. + ImportSuccess success = 2; + + // An error message explaining why the URL could not be loaded. + string error = 3; + } + } + + // A response indicating the result of redirecting a URL to the filesystem. + message FileImportResponse { + uint32 id = 1; + + // The result of loading the URL. An unset result indicates that the + // importer did not recognize the URL and other importers or load paths + // should be tried. Optional. + oneof result { + // The absolute `file:` URL to look for the file on the physical + // filesystem. + // + // The compiler must verify to the best of its ability that this URL + // follows the format for an absolute `file:` URL on the current operating + // system without a hostname. If it doesn't, the compiler must treat that + // as an error thrown by the importer. See + // https://en.wikipedia.org/wiki/File_URI_scheme for details on the + // format. + // + // The compiler must handle turning this into a canonical URL by resolving + // it for partials, file extensions, and index files. The compiler must + // then loading the contents of the resulting canonical URL from the + // filesystem. + string file_url = 2; + + // An error message explaining why the URL could not be loaded. + string error = 3; + } + } + + // A response indicating the result of calling a custom Sass function defined + // in the host. + message FunctionCallResponse { + uint32 id = 1; + + // The result of calling the function. Mandatory. + oneof result { + // The return value of a successful function call. + Value success = 2; + + // An error message explaining why the function call failed. + string error = 3; + } + + // The IDs of all `Value.ArgumentList`s in `FunctionCallRequest.arguments` + // whose keywords were accessed. See `Value.ArgumentList` for details. This + // may not include the special value `0` and it may not include multiple + // instances of the same ID. + repeated uint32 accessed_argument_lists = 4; + } + + // The wrapped message. Mandatory. + oneof message { + CompileRequest compile_request = 2; + CanonicalizeResponse canonicalize_response = 3; + ImportResponse import_response = 4; + FileImportResponse file_import_response = 5; + FunctionCallResponse function_call_response = 6; + VersionRequest version_request = 7; + } +} + +// The wrapper type for all messages sent from the compiler to the host. This +// provides a `oneof` that makes it possible to determine the type of each +// outbound message. +message OutboundMessage { + // A response that contains the version of the embedded compiler. + message VersionResponse { + // This version request's id. + uint32 id = 5; + + // The version of the embedded protocol, in semver format. + string protocol_version = 1; + + // The version of the embedded compiler package. This has no guaranteed + // format, although compilers are encouraged to use semver. + string compiler_version = 2; + + // The version of the Sass implementation that the embedded compiler wraps. + // This has no guaranteed format, although Sass implementations are + // encouraged to use semver. + string implementation_version = 3; + + // The name of the Sass implementation that the embedded compiler wraps. + string implementation_name = 4; + } + + // A response that contains the result of a compilation. + message CompileResponse { + reserved 1; + + // A message indicating that the Sass file was successfully compiled to CSS. + message CompileSuccess { + reserved 3; + + // The compiled CSS. + string css = 1; + + // The JSON-encoded source map, or the empty string if + // `CompileRequest.source_map` was `false`. + // + // The compiler must not add a `"file"` key to this source map. It's the + // host's (or the host's user's) responsibility to determine how the + // generated CSS can be reached from the source map. + string source_map = 2; + } + + // A message indicating that the Sass file could not be successfully + // compiled to CSS. + message CompileFailure { + // A message describing the reason for the failure. + string message = 1; + + // The span associated with the failure. + SourceSpan span = 2; + + // The stack trace associated with the failure. + // + // The empty string indicates that no stack trace is available. Otherwise, + // the format of this stack trace is not specified and is likely to be + // inconsistent between implementations. + string stack_trace = 3; + + // A formatted, human-readable string that contains the message, span + // (if available), and trace (if available). The format of this string is + // not specified and is likely to be inconsistent between implementations. + string formatted = 4; + } + + // The success or failure result of the compilation. Mandatory. + oneof result { + // The result of a successful compilation. + CompileSuccess success = 2; + + // The result of a failed compilation. + CompileFailure failure = 3; + } + + // The canonical URLs of all source files loaded during the compilation. + // + // The compiler must ensure that each canonical URL appears only once in + // this list. This must include the entrypoint file's URL if either + // `CompileRequest.input.path` or `CompileRequest.StringInput.url` was + // passed. + repeated string loaded_urls = 4; + } + + // An event indicating that a message should be displayed to the user. + message LogEvent { + reserved 1; + + LogEventType type = 2; + + // The text of the message. + string message = 3; + + // The span associated with this message. + optional SourceSpan span = 4; + + // The stack trace associated with this message. + // + // The empty string indicates that no stack trace is available. Otherwise, + // the format of this stack trace is not specified and is likely to be + // inconsistent between implementations. + string stack_trace = 5; + + // A formatted, human-readable string that contains the message, span (if + // available), and trace (if available). The format of this string is not + // specified and is likely to be inconsistent between implementations. + string formatted = 6; + } + + // A request for a custom importer to convert an imported URL to its canonical + // format. + // + // If the URL is not recognized by this importer, or if no stylesheet is found + // at that URL, `CanonicalizeResponse.result` must be `null`. Otherwise, the + // importer must return an absolute URL, including a scheme. + // + // > The host's documentation should encourage the use of file importers (via + // > `CompileRequest.Importer.file_importer_id`, `FileImportRequest`, and + // > `FileImportResponse`) for any importers that simply refer to files on + // > disk. This will allow Sass to handle the logic of resolving partials, + // > file extensions, and index files. + // + // If Sass has already loaded a stylesheet with the returned canonical URL, it + // re-uses the existing parse tree. This means that importers must ensure that + // the same canonical URL always refers to the same stylesheet, *even across + // different importers*. Importers must also ensure that any canonicalized + // URLs they return can be passed back to `CanonicalizeRequest` and will be + // returned unchanged. + // + // If this importer's URL format supports file extensions, it should + // canonicalize them the same way as the default filesystem importer: + // + // * The importer should look for stylesheets by adding the prefix `_` to the + // URL's basename, and by adding the extensions `.sass` and `.scss` if the + // URL doesn't already have one of those extensions. For example, if the URL + // was `foo/bar/baz`, the importer would look for: + // + // * `foo/bar/baz.sass` + // * `foo/bar/baz.scss` + // * `foo/bar/_baz.sass` + // * `foo/bar/_baz.scss` + // + // If the URL was foo/bar/baz.scss, the importer would just look for: + // + // * `foo/bar/baz.scss` + // * `foo/bar/_baz.scss` + // + // If the importer finds a stylesheet at more than one of these URLs, it + // should respond with a `CanonicalizeResponse.result.error` indicating that + // the import is ambiguous. Note that if the extension is explicitly + // specified, a stylesheet with another extension may exist without error. + // + // * If none of the possible paths is valid, the importer should perform the + // same resolution on the URL followed by `/index`. In the example above, it + // would look for: + // + // * `foo/bar/baz/_index.sass` + // * `foo/bar/baz/index.sass` + // * `foo/bar/baz/_index.scss` + // * `foo/bar/baz/index.scss` + // + // As above, if the importer finds a stylesheet at more than one of these + // URLs, it should respond with a `CanonicalizeResponse.result.error` + // indicating that the import is ambiguous. + message CanonicalizeRequest { + reserved 2; + + uint32 id = 1; + + // The unique ID of the importer being invoked. This must match an importer + // ID passed to this compilation in `CompileRequest.importers` or + // `CompileRequest.input.string.importer`. + uint32 importer_id = 3; + + // The URL of the import to be canonicalized. This may be either absolute or + // relative. + // + // When loading a URL, the compiler must first try resolving that URL + // relative to the canonical URL of the current file, and canonicalizing the + // result using the importer that loaded the current file. If this returns + // `null`, the compiler must then try canonicalizing the original URL with + // each importer in order until one returns something other than `null`. + // That is the result of the import. + string url = 4; + + /// Whether this request comes from an `@import` rule. + /// + /// When evaluating `@import` rules, URLs should canonicalize to an + /// [import-only file] if one exists for the URL being canonicalized. + /// Otherwise, canonicalization should be identical for `@import` and `@use` + /// rules. + /// + /// [import-only file]: https://sass-lang.com/documentation/at-rules/import#import-only-files + bool from_import = 5; + } + + // A request for a custom importer to load the contents of a stylesheet. + message ImportRequest { + reserved 2; + + uint32 id = 1; + + // The unique ID of the importer being invoked. This must match an + // `Importer.importer_id` passed to this compilation in + // `CompileRequest.importers` or `CompileRequest.input.string.importer`. + uint32 importer_id = 3; + + // The canonical URL of the import. This is guaranteed to be a URL returned + // by a `CanonicalizeRequest` to this importer. + string url = 4; + } + + // A request for a custom filesystem importer to load the contents of a + // stylesheet. + // + // A filesystem importer is represented in the compiler as an [importer]. When + // the importer is invoked with a string `string`: + // + // [importer]: https://github.com/sass/sass/tree/main/spec/modules.md#importer + // + // * If `string` is an absolute URL whose scheme is `file`: + // + // * Let `url` be string. + // + // * Otherwise: + // + // * Let `fromImport` be `true` if the importer is being run for an + // `@import` and `false` otherwise. + // + // * Let `response` be the result of sending a `FileImportRequest` with + // `string` as its `url` and `fromImport` as `from_import`. + // + // * If `response.result` is null, return null. + // + // * Otherwise, if `response.result.error` is set, throw an error. + // + // * Otherwise, let `url` be `response.result.file_url`. + // + // * Let `resolved` be the result of [resolving `url`]. + // + // * If `resolved` is null, return null. + // + // * Let `text` be the contents of the file at `resolved`. + // + // * Let `syntax` be: + // * "scss" if `url` ends in `.scss`. + // * "indented" if `url` ends in `.sass`. + // * "css" if `url` ends in `.css`. + // + // > The algorithm for resolving a `file:` URL guarantees that `url` will have + // > one of these extensions. + // + // * Return `text`, `syntax`, and `resolved`. + // + // [resolving `url`]: https://github.com/sass/sass/tree/main/spec/modules.md#resolving-a-file-url + message FileImportRequest { + reserved 2; + + uint32 id = 1; + + // The unique ID of the importer being invoked. This must match an + // `Importer.file_importer_id` passed to this compilation in + // `CompileRequest.importers` or `CompileRequest.input.string.importer`. + uint32 importer_id = 3; + + // The (non-canonicalized) URL of the import. + string url = 4; + + /// Whether this request comes from an `@import` rule. + /// + /// When evaluating `@import` rules, filesystem importers should load an + /// [import-only file] if one exists for the URL being canonicalized. + /// Otherwise, canonicalization should be identical for `@import` and `@use` + /// rules. + /// + /// [import-only file]: https://sass-lang.com/documentation/at-rules/import#import-only-files + bool from_import = 5; + } + + // A request to invoke a custom Sass function and return its result. + message FunctionCallRequest { + reserved 2; + + uint32 id = 1; + + // An identifier that indicates which function to invoke. Mandatory. + oneof identifier { + // The name of the function to invoke. + // + // This must match the name of a function signature the host passed to the + // corresponding `CompileRequest.global_functions` call, including hyphens + // and underscores. + string name = 3; + + // The opaque ID of the function to invoke. + // + // This must match the ID of a `Value.HostFunction` that the host passed + // to the compiler. + uint32 function_id = 4; + } + + // The arguments passed to the function, in the order they appear in the + // function signature passed to `CompileRequest.global_functions`. + // + // The compiler must ensure that a valid number of arguments are passed for + // the given signature, that default argument values are instantiated + // appropriately, and that variable argument lists (`$args...`) are passed + // as `Value.ArgumentList`s. + repeated Value arguments = 5; + } + + // The wrapped message. Mandatory. + oneof message { + ProtocolError error = 1; + CompileResponse compile_response = 2; + LogEvent log_event = 3; + CanonicalizeRequest canonicalize_request = 4; + ImportRequest import_request = 5; + FileImportRequest file_import_request = 6; + FunctionCallRequest function_call_request = 7; + VersionResponse version_response = 8; + } +} + +// Possible ways to format the CSS output. The compiler is not required to +// support all possible options; if the host requests an unsupported style, the +// compiler should choose the closest supported style. +enum OutputStyle { + // Each selector and declaration is written on its own line. + EXPANDED = 0; + + // The entire stylesheet is written on a single line, with as few characters + // as possible. + COMPRESSED = 1; +} + +// Possible syntaxes for a Sass stylesheet. +enum Syntax { + // The CSS-superset `.scss` syntax. + SCSS = 0; + + // The indented `.sass` syntax. + INDENTED = 1; + + // Plain CSS syntax that doesn't support any special Sass features. + CSS = 2; +} + +// The possible types of [LogEvent]. +enum LogEventType { + // A warning for something other than a deprecated Sass feature. Often emitted + // due to a stylesheet using the `@warn` rule. + WARNING = 0; + + // A warning indicating that the stylesheet is using a deprecated Sass + // feature. Compilers should not add text like "deprecation warning" to + // deprecation warnings; it's up to the host to determine how to signal that + // to the user. + DEPRECATION_WARNING = 1; + + // A message generated by the user for their own debugging purposes. + DEBUG = 2; +} + +// An error reported when an endpoint violates the embedded Sass protocol. +message ProtocolError { + ProtocolErrorType type = 1; + + // The ID of the request that had an error. This MUST be `4294967295` if the + // request ID couldn't be determined, or if the error is being reported for a + // response or an event. + uint32 id = 2; + + // A human-readable message providing more detail about the error. + string message = 3; +} + +// Potential types of protocol errors. +enum ProtocolErrorType { + // A message was received that couldn't be decoded as an `InboundMessage` (for + // the compiler) or `OutboundMessage` (for the host). + PARSE = 0; + + // A message was received that violated a documented restriction, such as not + // providing a mandatory field. + PARAMS = 1; + + // Something unexpected went wrong within the endpoint. + INTERNAL = 2; +} + +// A chunk of a source file. +message SourceSpan { + // The text covered by the source span. Compilers must guarantee that this is + // the text between `start.offset` and `end.offset` in the source file + // referred to by `url`. + string text = 1; + + // A single point in a source file. + message SourceLocation { + // The 0-based offset of this location within the source file. + uint32 offset = 1; + + // The 0-based line number of this location within the source file. + uint32 line = 2; + + // The 0-based column number of this location within its line. + uint32 column = 3; + } + + // The location of the first character in this span. + SourceLocation start = 2; + + // The location of the first character after this span. + // + // If this is omitted, it indicates that the span is empty and points + // immediately before `start`. In that case, `text` must be empty. + // + // This must not point to a location before `start`. + optional SourceLocation end = 3; + + // The URL of the file to which this span refers. + // + // This may be empty, indicating that the span refers to a + // `CompileRequest.StringInput` file that doesn't specify a URL. + string url = 4; + + // Additional source text surrounding this span. + // + // If this isn't empty, it must contain `text`. Furthermore, `text` must begin + // at column `start.column` of a line in `context`. + // + // This usually contains the full lines the span begins and ends on if the + // span itself doesn't cover the full lines. + string context = 5; +} + +// A SassScript value, passed to and returned by functions. +message Value { + // A SassScript string value. + message String { + // The contents of the string. + string text = 1; + + // Whether the string is quoted or unquoted. + bool quoted = 2; + } + + // A SassScript number value. + message Number { + // The number's numeric value. + double value = 1; + + // The number's numerator units. + // + // The endpoint sending the number must ensure that no numerator units are + // [compatible][] with any denominator units. Such compatible units must be + // simplified away according to the multiplicative factor between them + // defined in the CSS Values and Units spec. + // + // [compatible]: https://www.w3.org/TR/css-values-4/#compat + repeated string numerators = 2; + + // The number's denominator units. + repeated string denominators = 3; + } + + // A SassScript color value, represented as red, green, and blue channels. + // + // All Sass color values can be equivalently represented as `RgbColor`, + // `HslColor`, and `HwbColor` messages without loss of color information that + // can affect CSS rendering. As such, either endpoint may choose to send any + // color value as any one of these three messages. + message RgbColor { + // The color's red channel. May not be above 255. + uint32 red = 1; + + // The color's green channel. May not be above 255. + uint32 green = 2; + + // The color's blue channel. May not be above 255. + uint32 blue = 3; + + // The color's alpha channel. Must be between 0 and 1, + // inclusive. + double alpha = 4; + } + + // A SassScript color value, represented as hue, saturation, and lightness channels. + message HslColor { + // The color's hue. + double hue = 1; + + // The color's percent saturation. Must be between 0 and 100, + // inclusive. + double saturation = 2; + + // The color's percent lightness. Must be between 0 and 100, + // inclusive. + double lightness = 3; + + // The color's alpha channel. Must be between 0 and 1, + // inclusive. + double alpha = 4; + } + + // A SassScript color value, represented as hue, whiteness, and blackness + // channels. + message HwbColor { + // The color's hue. + double hue = 1; + + // The color's percent whiteness. Must be between 0 and 100, + // inclusive. The sum of `whiteness` and `blackness` must not exceed 100. + double whiteness = 2; + + // The color's percent blackness. Must be between 0 and 100, + // inclusive. The sum of `whiteness` and `blackness` must not exceed 100. + double blackness = 3; + + // The color's alpha channel. Mandatory. Must be between 0 and 1, + // inclusive. + double alpha = 4; + } + + // A SassScript list value. + message List { + // The type of separator for this list. Mandatory. + ListSeparator separator = 1; + + // Whether this list has square brackets. Mandatory. + bool has_brackets = 2; + + // The elements of this list. + repeated Value contents = 3; + } + + // A SassScript map value. + message Map { + // A single key/value pair in the map. + message Entry { + // The key this entry is associated with. Mandatory. + Value key = 1; + + // The value associated with this key. Mandatory. + Value value = 2; + } + + // The entries in this map. The sending endpoint must guarantee that no two + // entries have the same key. + repeated Entry entries = 1; + } + + // A first-class function defined in the compiler. New `CompilerFunction`s may + // only be created by the compiler, but the host may pass `CompilerFunction`s + // back to the compiler as long as their IDs match IDs of functions received + // by the host during that same compilation. + message CompilerFunction { + // A unique ID for this function. The compiler is responsible for generating + // this ID and ensuring it's unique across all functions passed to the host + // for this compilation. Mandatory. + uint32 id = 1; + } + + // An anonymous custom function defined in the host. New `HostFunction`s may + // only be created by the host, and `HostFunction`s may *never* be passed from + // the compiler to the host. The compiler must instead pass a + // `CompilerFunction` that wraps the `HostFunction`. + message HostFunction { + // A unique ID for this function. The compiler must pass this ID as + // `OutboundRequest.FunctionCallRequest.id` when invoking this function. The + // host is responsible for generating this ID and ensuring it's unique + // across all functions for *all* compilations. Mandatory. + uint32 id = 1; + + // The signature for this function. Mandatory. + // + // If this isn't a valid Sass function signature that could appear after + // `@function` in a Sass stylesheet (such as `mix($color1, $color2, $weight: + // 50%)`), the compiler must treat it as though the function that returned + // this `HostFunction` threw an error. + // + // > This ensures that the host doesn't need to be able to correctly parse + // > the entire function declaration syntax. + // + // The compiler may not invoke the function by its name, since it's not + // guaranteed to be globally unique. However, it may use the name to + // generate the string representation of this function. + string signature = 2; + } + + // A SassScript argument list value. This represents rest arguments passed to + // a function's `$arg...` parameter. Unlike a normal `List`, an argument list + // has an associated keywords map which tracks keyword arguments passed in + // alongside positional arguments. + // + // For each `ArgumentList` in `FunctionCallRequest.arguments` (including those + // nested within `List`s and `Map`s), the host must track whether its keyword + // arguments were accessed by the user. If they were, it must add its + // `ArgumentList.id` to `FunctionCallResponse.accessed_argument_lists`. + // + // The compiler must treat every `ArgumentList` whose `ArgumentList.id` + // appears in `FunctionCallResponse.accessed_argument_lists` as though it had + // been passed to `meta.keywords()`. + message ArgumentList { + // An ID for this argument list that's unique within the scope of a given + // `FunctionCallRequest`. + // + // The special ID `0` is reserved for `ArgumentList`s created by the host, + // and may not be used by the compiler. These `ArgumentList`s do not need to + // have their IDs added to `FunctionCallResponse.accessed_argument_lists`, + // and the compiler should treat them as though their keywords have always + // been accessed. + uint32 id = 1; + + // The type of separator for this list. The compiler must set this, but + // the host may omit it for `ArgumentList`s that were originally created by + // the compiler (that is, those with a non-0 ID). + ListSeparator separator = 2; + + // The argument list's positional contents. The compiler must set this, but + // the host may omit it for `ArgumentList`s that were originally created by + // the compiler (that is, those with a non-0 ID). + repeated Value contents = 3; + + // The argument list's keywords. The compiler must set this, but the host + // may omit it for `ArgumentList`s that were originally created by the + // compiler (that is, those with a non-0 ID). + map keywords = 4; + } + + // A SassScript calculation value. The compiler must send fully [simplified] + // calculations, meaning that simplifying it again will produce the same + // calculation. The host is not required to simplify calculations. + // + // [simplified]: https://github.com/sass/sass/tree/main/spec/types/calculation.md#simplifying-a-calculation + // + // The compiler must simplify any calculations it receives from the host + // before returning them from a function. If this simplification produces an + // error, it should be treated as though the function call threw that error. + // It should *not* be treated as a protocol error. + message Calculation { + // The calculation's name. Mandatory. The host may only set this to names + // that the Sass specification uses to create calculations. + string name = 1; + + // The calculation's arguments. Mandatory. The host must use exactly the + // number of arguments used by the Sass specification for calculations with + // the given `name`. + repeated CalculationValue arguments = 2; + + // A single component of a calculation expression. + message CalculationValue { + // The value of the component. Mandatory. + oneof value { + Number number = 1; + + // An unquoted string, as from a function like `var()` or `env()`. + string string = 2; + + // An unquoted string as created by interpolation for + // backwards-compatibility with older Sass syntax. + string interpolation = 3; + + CalculationOperation operation = 4; + Calculation calculation = 5; + } + } + + // A binary operation that appears in a calculation. + message CalculationOperation { + // The operator to perform. + CalculationOperator operator = 1; + + // The left-hand side of the operation. + CalculationValue left = 2; + + // The right-hand side of the operation. + CalculationValue right = 3; + } + } + + // The value itself. Mandatory. + // + // This is wrapped in a message type rather than used directly to reduce + // repetition, and because oneofs can't be repeated. + oneof value { + String string = 1; + Number number = 2; + RgbColor rgb_color = 3; + HslColor hsl_color = 4; + List list = 5; + Map map = 6; + SingletonValue singleton = 7; + CompilerFunction compiler_function = 8; + HostFunction host_function = 9; + ArgumentList argument_list = 10; + HwbColor hwb_color = 11; + Calculation calculation = 12; + } +} + +// Different types of separators a list can have. +enum ListSeparator { + // List elements are separated by a comma. + COMMA = 0; + + // List elements are separated by whitespace. + SPACE = 1; + + // List elements are separated by a forward slash. + SLASH = 2; + + // The list's separator hasn't yet been determined. This is only allowed for + // singleton and empty lists. + // + // Singleton lists and empty lists don't have separators defined. This means + // that list functions will prefer other lists' separators if possible. + UNDECIDED = 3; +} + +// Singleton SassScript values that have no internal state. +enum SingletonValue { + // The SassScript boolean true value. + TRUE = 0; + + // The SassScript boolean false value. + FALSE = 1; + + // The SassScript null value. + NULL = 2; +} + +// An operator used in a calculation value's operation. +enum CalculationOperator { + // The addition operator. + PLUS = 0; + + // The subtraction operator. + MINUS = 1; + + // The multiplication operator. + TIMES = 2; + + // The division operator. + DIVIDE = 3; +} diff --git a/vendor/github.com/bep/godartsass/v2/options.go b/vendor/github.com/bep/godartsass/v2/options.go new file mode 100644 index 0000000..9bed3e0 --- /dev/null +++ b/vendor/github.com/bep/godartsass/v2/options.go @@ -0,0 +1,231 @@ +package godartsass + +import ( + "fmt" + "path/filepath" + "strings" + "time" + + "github.com/bep/godartsass/v2/internal/embeddedsass" +) + +// Options configures a Transpiler. +type Options struct { + // The path to the Dart Sass wrapper binary, an absolute filename + // if not in $PATH. + // If this is not set, we will try 'dart-sass' + // (or 'dart-sass.bat' on Windows) in the OS $PATH. + // There may be several ways to install this, one would be to + // download it from here: https://github.com/sass/dart-sass/releases + DartSassEmbeddedFilename string + + // Timeout is the duration allowed for dart sass to transpile. + // This was added for the beta6 version of Dart Sass Protocol, + // as running this code against the beta5 binary would hang + // on Execute. + Timeout time.Duration + + // LogEventHandler will, if set, receive log events from Dart Sass, + // e.g. @debug and @warn log statements. + LogEventHandler func(LogEvent) +} + +// LogEvent is a type of log event from Dart Sass. +type LogEventType int + +const ( + // Usually triggered by the @warn directive. + LogEventTypeWarning LogEventType = iota + + // Events trigered for usage of deprecated Sass features. + LogEventTypeDeprecated + + // Triggered by the @debug directive. + LogEventTypeDebug +) + +type LogEvent struct { + // Type is the type of log event. + Type LogEventType + + // Message on the form url:line:col message. + Message string +} + +func (opts *Options) init() error { + if opts.DartSassEmbeddedFilename == "" { + opts.DartSassEmbeddedFilename = defaultDartSassBinaryFilename + } + + if opts.Timeout == 0 { + opts.Timeout = 30 * time.Second + } + + return nil +} + +// ImportResolver allows custom import resolution. +// +// CanonicalizeURL should create a canonical version of the given URL if it's +// able to resolve it, else return an empty string. +// +// A canonicalized URL should include a scheme, e.g. 'file:///foo/bar.scss', +// if applicable, see: +// +// https://en.wikipedia.org/wiki/File_URI_scheme +// +// Importers must ensure that the same canonical URL +// always refers to the same stylesheet. +// +// Load loads the canonicalized URL's content. +type ImportResolver interface { + CanonicalizeURL(url string) (string, error) + Load(canonicalizedURL string) (Import, error) +} + +type Import struct { + // The content of the imported file. + Content string + + // The syntax of the imported file. + SourceSyntax SourceSyntax +} + +// Args holds the arguments to Execute. +type Args struct { + // The input source. + Source string + + // The URL of the Source. + // Leave empty if it's unknown. + // Must include a scheme, e.g. 'file:///myproject/main.scss' + // See https://en.wikipedia.org/wiki/File_URI_scheme + // + // Note: There is an open issue for this value when combined with custom + // importers, see https://github.com/sass/dart-sass/issues/24 + URL string + + // Defaults is SCSS. + SourceSyntax SourceSyntax + + // Default is EXPANDED. + OutputStyle OutputStyle + + // If enabled, a sourcemap will be generated and returned in Result. + EnableSourceMap bool + + // If enabled, sources will be embedded in the generated source map. + SourceMapIncludeSources bool + + // Custom resolver to use to resolve imports. + // If set, this will be the first in the resolver chain. + ImportResolver ImportResolver + + // Additional file paths to uses to resolve imports. + IncludePaths []string + + sassOutputStyle embeddedsass.OutputStyle + sassSourceSyntax embeddedsass.Syntax + + // Ordered list starting with options.ImportResolver, then IncludePaths. + sassImporters []*embeddedsass.InboundMessage_CompileRequest_Importer +} + +func (args *Args) init(seq uint32, opts Options) error { + if args.OutputStyle == "" { + args.OutputStyle = OutputStyleExpanded + } + if args.SourceSyntax == "" { + args.SourceSyntax = SourceSyntaxSCSS + } + + v, ok := embeddedsass.OutputStyle_value[string(args.OutputStyle)] + if !ok { + return fmt.Errorf("invalid OutputStyle %q", args.OutputStyle) + } + args.sassOutputStyle = embeddedsass.OutputStyle(v) + + v, ok = embeddedsass.Syntax_value[string(args.SourceSyntax)] + if !ok { + return fmt.Errorf("invalid SourceSyntax %q", args.SourceSyntax) + } + + args.sassSourceSyntax = embeddedsass.Syntax(v) + + if args.ImportResolver != nil { + args.sassImporters = []*embeddedsass.InboundMessage_CompileRequest_Importer{ + { + Importer: &embeddedsass.InboundMessage_CompileRequest_Importer_ImporterId{ + ImporterId: seq, + }, + }, + } + } + + if args.IncludePaths != nil { + for _, p := range args.IncludePaths { + args.sassImporters = append(args.sassImporters, &embeddedsass.InboundMessage_CompileRequest_Importer{Importer: &embeddedsass.InboundMessage_CompileRequest_Importer_Path{ + Path: filepath.Clean(p), + }}) + } + } + + return nil +} + +type ( + // OutputStyle defines the style of the generated CSS. + OutputStyle string + + // SourceSyntax defines the syntax of the source passed in Execute. + SourceSyntax string +) + +const ( + // Expanded (default) output. + // Note that LibSASS and Ruby SASS have more output styles, and their + // default is NESTED. + OutputStyleExpanded OutputStyle = "EXPANDED" + + // Compressed/minified output. + OutputStyleCompressed OutputStyle = "COMPRESSED" +) + +const ( + // SCSS style source syntax (default). + SourceSyntaxSCSS SourceSyntax = "SCSS" + + // Indented or SASS style source syntax. + SourceSyntaxSASS SourceSyntax = "INDENTED" + + // Regular CSS source syntax. + SourceSyntaxCSS SourceSyntax = "CSS" +) + +// ParseOutputStyle will convert s into OutputStyle. +// Case insensitive, returns OutputStyleNested for unknown value. +func ParseOutputStyle(s string) OutputStyle { + switch OutputStyle(strings.ToUpper(s)) { + case OutputStyleCompressed: + return OutputStyleCompressed + case OutputStyleExpanded: + return OutputStyleExpanded + default: + return OutputStyleExpanded + } +} + +// ParseSourceSyntax will convert s into SourceSyntax. +// Case insensitive, returns SourceSyntaxSCSS for unknown value. +func ParseSourceSyntax(s string) SourceSyntax { + switch SourceSyntax(strings.ToUpper(s)) { + case SourceSyntaxSCSS: + return SourceSyntaxSCSS + case SourceSyntaxSASS, "SASS": + return SourceSyntaxSASS + case SourceSyntaxCSS: + return SourceSyntaxCSS + default: + return SourceSyntaxSCSS + } +} diff --git a/vendor/github.com/bep/godartsass/v2/transpiler.go b/vendor/github.com/bep/godartsass/v2/transpiler.go new file mode 100644 index 0000000..0f14959 --- /dev/null +++ b/vendor/github.com/bep/godartsass/v2/transpiler.go @@ -0,0 +1,564 @@ +// Package godartsass provides a Go API for the Dass Sass Embedded protocol. +// +// Use the Start function to create and start a new thread safe transpiler. +// Close it when done. +package godartsass + +import ( + "encoding/binary" + "encoding/json" + "errors" + "fmt" + "io" + "net/url" + "os" + "os/exec" + "path" + "strings" + "sync" + "time" + + "github.com/cli/safeexec" + + "github.com/bep/godartsass/v2/internal/embeddedsass" + "google.golang.org/protobuf/proto" +) + +const defaultDartSassBinaryFilename = "sass" + +// ErrShutdown will be returned from Execute and Close if the transpiler is or +// is about to be shut down. +var ErrShutdown = errors.New("connection is shut down") + +// Start creates and starts a new SCSS transpiler that communicates with the +// Dass Sass Embedded protocol via Stdin and Stdout. +// +// Closing the transpiler will shut down the process. +// +// Note that the Transpiler is thread safe, and the recommended way of using +// this is to create one and use that for all the SCSS processing needed. +func Start(opts Options) (*Transpiler, error) { + if err := opts.init(); err != nil { + return nil, err + } + + // See https://github.com/golang/go/issues/38736 + bin, err := safeexec.LookPath(opts.DartSassEmbeddedFilename) + if err != nil { + return nil, err + } + cmd := exec.Command(bin) + cmd.Args = append(cmd.Args, "--embedded") + cmd.Stderr = os.Stderr + + conn, err := newConn(cmd) + if err != nil { + return nil, err + } + + if err := conn.Start(); err != nil { + return nil, err + } + + t := &Transpiler{ + opts: opts, + conn: conn, + lenBuf: make([]byte, binary.MaxVarintLen64), + idBuf: make([]byte, binary.MaxVarintLen64), + pending: make(map[uint32]*call), + } + + go t.input() + + return t, nil +} + +// Version returns version information about the Dart Sass frameworks used +// in dartSassEmbeddedFilename. +func Version(dartSassEmbeddedFilename string) (DartSassVersion, error) { + var v DartSassVersion + bin, err := safeexec.LookPath(dartSassEmbeddedFilename) + if err != nil { + return v, err + } + + cmd := exec.Command(bin, "--embedded", "--version") + cmd.Stderr = os.Stderr + + out, err := cmd.Output() + if err != nil { + return v, err + } + + if err := json.Unmarshal(out, &v); err != nil { + return v, err + } + + return v, nil +} + +type DartSassVersion struct { + ProtocolVersion string `json:"protocolVersion"` + CompilerVersion string `json:"compilerVersion"` + ImplementationVersion string `json:"implementationVersion"` + ImplementationName string `json:"implementationName"` + ID int `json:"id"` +} + +// Transpiler controls transpiling of SCSS into CSS. +type Transpiler struct { + opts Options + + // stdin/stdout of the Dart Sass protocol + conn byteReadWriteCloser + lenBuf []byte + idBuf []byte + msgBuf []byte + + closing bool + shutdown bool + + // Protects the sending of messages to Dart Sass. + sendMu sync.Mutex + + mu sync.Mutex // Protects all below. + seq uint32 + pending map[uint32]*call +} + +// IsShutDown checks if all pending calls have been shut down. +// Used in tests. +func (t *Transpiler) IsShutDown() bool { + for _, p := range t.pending { + if p.Error != ErrShutdown { + return false + } + } + return true +} + +// Result holds the result returned from Execute. +type Result struct { + CSS string + SourceMap string +} + +// SassError is the error returned from Execute on compile errors. +type SassError struct { + Message string `json:"message"` + Span struct { + Text string `json:"text"` + Start struct { + Offset int `json:"offset"` + Column int `json:"column"` + } `json:"start"` + End struct { + Offset int `json:"offset"` + Column int `json:"column"` + } `json:"end"` + Url string `json:"url"` + Context string `json:"context"` + } `json:"span"` +} + +func (e SassError) Error() string { + span := e.Span + file := path.Clean(strings.TrimPrefix(span.Url, "file:")) + return fmt.Sprintf("file: %q, context: %q: %s", file, span.Context, e.Message) +} + +// Close closes the stream to the embedded Dart Sass Protocol, shutting it down. +// If it is already shutting down, ErrShutdown is returned. +func (t *Transpiler) Close() error { + t.sendMu.Lock() + defer t.sendMu.Unlock() + t.mu.Lock() + defer t.mu.Unlock() + + if t.closing { + return ErrShutdown + } + + t.closing = true + err := t.conn.Close() + + if eerr, ok := err.(*exec.ExitError); ok { + if eerr.ExitCode() == 1 { + // This is the expected exit code when shutting down. + return ErrShutdown + } + } + + return err +} + +// Execute transpiles the string Source given in Args into CSS. +// If Dart Sass resturns a "compile failure", the error returned will be +// of type SassError. +func (t *Transpiler) Execute(args Args) (Result, error) { + var result Result + + createInboundMessage := func(seq uint32) (*embeddedsass.InboundMessage, error) { + if err := args.init(seq, t.opts); err != nil { + return nil, err + } + + message := &embeddedsass.InboundMessage_CompileRequest_{ + CompileRequest: &embeddedsass.InboundMessage_CompileRequest{ + Importers: args.sassImporters, + Style: args.sassOutputStyle, + Input: &embeddedsass.InboundMessage_CompileRequest_String_{ + String_: &embeddedsass.InboundMessage_CompileRequest_StringInput{ + Syntax: args.sassSourceSyntax, + Source: args.Source, + Url: args.URL, + }, + }, + SourceMap: args.EnableSourceMap, + SourceMapIncludeSources: args.SourceMapIncludeSources, + }, + } + + return &embeddedsass.InboundMessage{ + Message: message, + }, nil + } + + call, err := t.newCall(createInboundMessage, args) + if err != nil { + return result, err + } + + select { + case call = <-call.Done: + case <-time.After(t.opts.Timeout): + return result, errors.New("timeout waiting for Dart Sass to respond; note that this project is only compatible with the Dart Sass Binary found here: https://github.com/sass/dart-sass/releases/") + } + + if call.Error != nil { + return result, call.Error + } + + response := call.Response + csp := response.Message.(*embeddedsass.OutboundMessage_CompileResponse_) + + switch resp := csp.CompileResponse.Result.(type) { + case *embeddedsass.OutboundMessage_CompileResponse_Success: + result.CSS = resp.Success.Css + result.SourceMap = resp.Success.SourceMap + case *embeddedsass.OutboundMessage_CompileResponse_Failure: + asJson, err := json.Marshal(resp.Failure) + if err != nil { + return result, err + } + var sassErr SassError + err = json.Unmarshal(asJson, &sassErr) + if err != nil { + return result, err + } + return result, sassErr + default: + return result, fmt.Errorf("unsupported response type: %T", resp) + } + + return result, nil +} + +func (t *Transpiler) getCall(id uint32) *call { + t.mu.Lock() + defer t.mu.Unlock() + call, found := t.pending[id] + if !found { + panic(fmt.Sprintf("call with ID %d not found", id)) + } + return call +} + +func (t *Transpiler) input() { + var err error + + for err == nil { + // The header is the length in bytes of the remaining message including the compilation ID. + var l uint64 + + l, err = binary.ReadUvarint(t.conn) + if err != nil { + break + } + + plen := int(l) + if len(t.msgBuf) < plen { + t.msgBuf = make([]byte, plen) + } + + buf := t.msgBuf[:plen] + + _, err = io.ReadFull(t.conn, buf) + if err != nil { + break + } + + v, n := binary.Uvarint(buf) + if n <= 0 { + break + } + compilationID := uint32(v) + + buf = buf[n:] + + var msg embeddedsass.OutboundMessage + + if err = proto.Unmarshal(buf, &msg); err != nil { + break + } + + switch c := msg.Message.(type) { + case *embeddedsass.OutboundMessage_CompileResponse_: + // Attach it to the correct pending call. + t.mu.Lock() + call := t.pending[compilationID] + delete(t.pending, compilationID) + t.mu.Unlock() + if call == nil { + err = fmt.Errorf("call with ID %d not found", compilationID) + break + } + call.Response = &msg + call.done() + case *embeddedsass.OutboundMessage_CanonicalizeRequest_: + call := t.getCall(compilationID) + resolved, resolveErr := call.importResolver.CanonicalizeURL(c.CanonicalizeRequest.GetUrl()) + + var response *embeddedsass.InboundMessage_CanonicalizeResponse + if resolveErr != nil { + response = &embeddedsass.InboundMessage_CanonicalizeResponse{ + Id: c.CanonicalizeRequest.GetId(), + Result: &embeddedsass.InboundMessage_CanonicalizeResponse_Error{ + Error: resolveErr.Error(), + }, + } + } else { + var url *embeddedsass.InboundMessage_CanonicalizeResponse_Url + if resolved != "" { + url = &embeddedsass.InboundMessage_CanonicalizeResponse_Url{ + Url: resolved, + } + } + response = &embeddedsass.InboundMessage_CanonicalizeResponse{ + Id: c.CanonicalizeRequest.GetId(), + Result: url, + } + } + + err = t.sendInboundMessage( + compilationID, + &embeddedsass.InboundMessage{ + Message: &embeddedsass.InboundMessage_CanonicalizeResponse_{ + CanonicalizeResponse: response, + }, + }, + ) + case *embeddedsass.OutboundMessage_ImportRequest_: + call := t.getCall(compilationID) + url := c.ImportRequest.GetUrl() + imp, loadErr := call.importResolver.Load(url) + sourceSyntax := embeddedsass.Syntax_value[string(imp.SourceSyntax)] + + var response *embeddedsass.InboundMessage_ImportResponse + var sourceMapURL string + + // Dart Sass expect a browser-accessible URL or an empty string. + // If no URL is supplied, a `data:` URL wil be generated + // automatically from `contents` + if hasScheme(url) { + sourceMapURL = url + } + + if loadErr != nil { + response = &embeddedsass.InboundMessage_ImportResponse{ + Id: c.ImportRequest.GetId(), + Result: &embeddedsass.InboundMessage_ImportResponse_Error{ + Error: loadErr.Error(), + }, + } + } else { + response = &embeddedsass.InboundMessage_ImportResponse{ + Id: c.ImportRequest.GetId(), + Result: &embeddedsass.InboundMessage_ImportResponse_Success{ + Success: &embeddedsass.InboundMessage_ImportResponse_ImportSuccess{ + Contents: imp.Content, + SourceMapUrl: &sourceMapURL, + Syntax: embeddedsass.Syntax(sourceSyntax), + }, + }, + } + } + + err = t.sendInboundMessage( + compilationID, + &embeddedsass.InboundMessage{ + Message: &embeddedsass.InboundMessage_ImportResponse_{ + ImportResponse: response, + }, + }, + ) + case *embeddedsass.OutboundMessage_LogEvent_: + if t.opts.LogEventHandler != nil { + var logEvent LogEvent + e := c.LogEvent + if e.Span != nil { + u := e.Span.Url + if u == "" { + u = "stdin" + } + u, _ = url.QueryUnescape(u) + logEvent = LogEvent{ + Type: LogEventType(e.Type), + Message: fmt.Sprintf("%s:%d:%d: %s", u, e.Span.Start.Line, e.Span.Start.Column, c.LogEvent.GetMessage()), + } + } else { + logEvent = LogEvent{ + Type: LogEventType(e.Type), + Message: e.GetMessage(), + } + } + + t.opts.LogEventHandler(logEvent) + + } + + case *embeddedsass.OutboundMessage_Error: + err = fmt.Errorf("SASS error: %s", c.Error.GetMessage()) + default: + err = fmt.Errorf("unsupported response message type. %T", msg.Message) + } + + } + + // Terminate pending calls. + t.sendMu.Lock() + defer t.sendMu.Unlock() + t.mu.Lock() + defer t.mu.Unlock() + + t.shutdown = true + isEOF := err == io.EOF || strings.Contains(err.Error(), "already closed") + if isEOF { + if t.closing { + err = ErrShutdown + } else { + err = io.ErrUnexpectedEOF + } + } + + for _, call := range t.pending { + call.Error = err + call.done() + } +} + +func (t *Transpiler) nextSeq() uint32 { + t.seq++ + // The compilation ID 0 is reserved for `VersionRequest` and `VersionResponse`, + // 4294967295 is reserved for error handling. This is the maximum number representable by a `uint32` so it should be safe to start over. + if t.seq == 0 || t.seq == 4294967295 { + t.seq = 1 + } + return t.seq +} + +func (t *Transpiler) newCall(createInbound func(seq uint32) (*embeddedsass.InboundMessage, error), args Args) (*call, error) { + t.mu.Lock() + id := t.nextSeq() + req, err := createInbound(id) + if err != nil { + t.mu.Unlock() + return nil, err + } + + call := &call{ + Request: req, + Done: make(chan *call, 1), + importResolver: args.ImportResolver, + } + + if t.shutdown || t.closing { + t.mu.Unlock() + call.Error = ErrShutdown + call.done() + return call, nil + } + + t.pending[id] = call + + t.mu.Unlock() + + switch call.Request.Message.(type) { + case *embeddedsass.InboundMessage_CompileRequest_: + default: + return nil, fmt.Errorf("unsupported request message type. %T", call.Request.Message) + } + + return call, t.sendInboundMessage(id, call.Request) +} + +func (t *Transpiler) sendInboundMessage(compilationID uint32, message *embeddedsass.InboundMessage) error { + t.sendMu.Lock() + defer t.sendMu.Unlock() + t.mu.Lock() + if t.closing || t.shutdown { + t.mu.Unlock() + return ErrShutdown + } + t.mu.Unlock() + + out, err := proto.Marshal(message) + if err != nil { + return fmt.Errorf("failed to marshal request: %s", err) + } + + // Every message must begin with a varint indicating the length in bytes of + // the remaining message including the compilation ID + reqLen := uint64(len(out)) + compilationIDLen := binary.PutUvarint(t.idBuf, uint64(compilationID)) + headerLen := binary.PutUvarint(t.lenBuf, reqLen+uint64(compilationIDLen)) + _, err = t.conn.Write(t.lenBuf[:headerLen]) + if err != nil { + return err + } + _, err = t.conn.Write(t.idBuf[:compilationIDLen]) + if err != nil { + return err + } + + headerLen, err = t.conn.Write(out) + if headerLen != len(out) { + return errors.New("failed to write payload") + } + + return err +} + +type call struct { + Request *embeddedsass.InboundMessage + Response *embeddedsass.OutboundMessage + importResolver ImportResolver + + Error error + Done chan *call +} + +func (call *call) done() { + select { + case call.Done <- call: + default: + } +} + +func hasScheme(s string) bool { + u, err := url.ParseRequestURI(s) + if err != nil { + return false + } + return u.Scheme != "" +} diff --git a/vendor/github.com/bep/golibsass/LICENSE b/vendor/github.com/bep/golibsass/LICENSE new file mode 100644 index 0000000..7e406ef --- /dev/null +++ b/vendor/github.com/bep/golibsass/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Bjørn Erik Pedersen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/bep/golibsass/libsass/libsasserrors/libsasserrors.go b/vendor/github.com/bep/golibsass/libsass/libsasserrors/libsasserrors.go new file mode 100644 index 0000000..f2c577a --- /dev/null +++ b/vendor/github.com/bep/golibsass/libsass/libsasserrors/libsasserrors.go @@ -0,0 +1,31 @@ +// Copyright © 2022 Bjørn Erik Pedersen . +// +// Use of this source code is governed by an MIT-style +// license that can be found in the LICENSE file. + +// Package libsasserrors holds the error types used by the libsass package. +package libsasserrors + +import ( + "encoding/json" + "fmt" +) + +// JsonToError converts a JSON string to an error. +func JsonToError(jsonstr string) (e Error) { + _ = json.Unmarshal([]byte(jsonstr), &e) + return +} + +// Error is a libsass error. +type Error struct { + Status int `json:"status"` + Column int `json:"column"` + File string `json:"file"` + Line int `json:"line"` + Message string `json:"message"` +} + +func (e Error) Error() string { + return fmt.Sprintf("file %q, line %d, col %d: %s ", e.File, e.Line, e.Column, e.Message) +} diff --git a/vendor/github.com/bep/gowebp/LICENSE b/vendor/github.com/bep/gowebp/LICENSE new file mode 100644 index 0000000..b67d493 --- /dev/null +++ b/vendor/github.com/bep/gowebp/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2021 The gowebp Authors + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/a__cgo.go b/vendor/github.com/bep/gowebp/internal/libwebp/a__cgo.go new file mode 100644 index 0000000..bcf9bac --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/a__cgo.go @@ -0,0 +1,7 @@ +//go:build !dev +// +build !dev + +package libwebp + +// #cgo unix LDFLAGS: -lm +import "C" diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/a__cgo_dev.go b/vendor/github.com/bep/gowebp/internal/libwebp/a__cgo_dev.go new file mode 100644 index 0000000..9bbaa42 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/a__cgo_dev.go @@ -0,0 +1,8 @@ +//go:build dev +// +build dev + +package libwebp + +// #cgo LDFLAGS: -lwebp +// #cgo CFLAGS: -DLIBWEBP_NO_SRC +import "C" diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/a__cgo_src.go b/vendor/github.com/bep/gowebp/internal/libwebp/a__cgo_src.go new file mode 100644 index 0000000..dc27780 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/a__cgo_src.go @@ -0,0 +1,7 @@ +//go:build !dev +// +build !dev + +package libwebp + +// #cgo CFLAGS: -I../../libwebp_src +import "C" diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/a__encoder.go b/vendor/github.com/bep/gowebp/internal/libwebp/a__encoder.go new file mode 100644 index 0000000..0df2124 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/a__encoder.go @@ -0,0 +1,214 @@ +package libwebp + +/* +#include +#include // for memset +#ifndef LIBWEBP_NO_SRC +#include +#else +#include +#endif + +static uint8_t* encodeNRGBA(WebPConfig* config, const uint8_t* rgba, int width, int height, int stride, size_t* output_size) { + WebPPicture pic; + WebPMemoryWriter wrt; + int ok; + if (!WebPPictureInit(&pic)) { + return NULL; + } + pic.use_argb = 1; + pic.width = width; + pic.height = height; + pic.writer = WebPMemoryWrite; + pic.custom_ptr = &wrt; + WebPMemoryWriterInit(&wrt); + ok = WebPPictureImportRGBA(&pic, rgba, stride) && WebPEncode(config, &pic); + WebPPictureFree(&pic); + if (!ok) { + WebPMemoryWriterClear(&wrt); + return NULL; + } + *output_size = wrt.size; + return wrt.mem; +} + +static uint8_t* encodeGray(WebPConfig* config, uint8_t *y, int width, int height, int stride, size_t* output_size) { + WebPPicture pic; + WebPMemoryWriter wrt; + + int ok; + if (!WebPPictureInit(&pic)) { + return NULL; + } + + pic.use_argb = 0; + pic.width = width; + pic.height = height; + pic.y_stride = stride; + pic.writer = WebPMemoryWrite; + pic.custom_ptr = &wrt; + WebPMemoryWriterInit(&wrt); + + const int uvWidth = (int)(((int64_t)width + 1) >> 1); + const int uvHeight = (int)(((int64_t)height + 1) >> 1); + const int uvStride = uvWidth; + const int uvSize = uvStride * uvHeight; + const int gray = 128; + uint8_t* chroma; + + chroma = malloc(uvSize); + if (!chroma) { + return 0; + } + memset(chroma, gray, uvSize); + + pic.y = y; + pic.u = chroma; + pic.v = chroma; + pic.uv_stride = uvStride; + + ok = WebPEncode(config, &pic); + + free(chroma); + + WebPPictureFree(&pic); + if (!ok) { + WebPMemoryWriterClear(&wrt); + return NULL; + } + *output_size = wrt.size; + return wrt.mem; + +} + +*/ +import "C" + +import ( + "errors" + "image" + "image/draw" + "io" + "unsafe" + + "github.com/bep/gowebp/libwebp/webpoptions" +) + +type ( + Encoder struct { + config *C.WebPConfig + img *image.NRGBA + } +) + +// Encode encodes src into w considering the options in o. +// +// Any src that isn't one of *image.RGBA, *image.NRGBA, or *image.Gray +// will be converted to *image.NRGBA using draw.Draw first. +// +func Encode(w io.Writer, src image.Image, o webpoptions.EncodingOptions) error { + config, err := encodingOptionsToCConfig(o) + if err != nil { + return err + } + + var ( + bounds = src.Bounds() + output *C.uchar + size C.size_t + ) + + switch v := src.(type) { + case *image.RGBA: + output = C.encodeNRGBA( + config, + (*C.uint8_t)(&v.Pix[0]), + C.int(bounds.Max.X), + C.int(bounds.Max.Y), + C.int(v.Stride), + &size, + ) + case *image.NRGBA: + output = C.encodeNRGBA( + config, + (*C.uint8_t)(&v.Pix[0]), + C.int(bounds.Max.X), + C.int(bounds.Max.Y), + C.int(v.Stride), + &size, + ) + case *image.Gray: + gray := (*C.uint8_t)(&v.Pix[0]) + output = C.encodeGray( + config, + gray, + C.int(bounds.Max.X), + C.int(bounds.Max.Y), + C.int(v.Stride), + &size, + ) + default: + rgba := ConvertToNRGBA(src) + output = C.encodeNRGBA( + config, + (*C.uint8_t)(&rgba.Pix[0]), + C.int(bounds.Max.X), + C.int(bounds.Max.Y), + C.int(rgba.Stride), + &size, + ) + } + + if output == nil || size == 0 { + return errors.New("failed to encode") + } + defer C.free(unsafe.Pointer(output)) + + _, err = w.Write(((*[1 << 30]byte)(unsafe.Pointer(output)))[0:int(size):int(size)]) + + return err +} + +func ConvertToNRGBA(src image.Image) *image.NRGBA { + dst := image.NewNRGBA(src.Bounds()) + draw.Draw(dst, dst.Bounds(), src, src.Bounds().Min, draw.Src) + + return dst +} + +func encodingOptionsToCConfig(o webpoptions.EncodingOptions) (*C.WebPConfig, error) { + cfg := &C.WebPConfig{} + quality := C.float(o.Quality) + + if C.WebPConfigPreset(cfg, C.WebPPreset(o.EncodingPreset), quality) == 0 { + return nil, errors.New("failed to init encoder config") + } + + if quality == 0 { + // Activate the lossless compression mode with the desired efficiency level + // between 0 (fastest, lowest compression) and 9 (slower, best compression). + // A good default level is '6', providing a fair tradeoff between compression + // speed and final compressed size. + if C.WebPConfigLosslessPreset(cfg, C.int(6)) == 0 { + return nil, errors.New("failed to init lossless preset") + } + } + + cfg.use_sharp_yuv = boolToCInt(o.UseSharpYuv) + + if C.WebPValidateConfig(cfg) == 0 { + return nil, errors.New("failed to validate config") + } + + return cfg, nil +} + +func boolToCInt(b bool) (result C.int) { + result = 0 + + if b { + result = 1 + } + + return +} diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/alpha_dec.c b/vendor/github.com/bep/gowebp/internal/libwebp/alpha_dec.c new file mode 100644 index 0000000..9821177 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/alpha_dec.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dec/alpha_dec.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/alpha_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/alpha_enc.c new file mode 100644 index 0000000..ad84edc --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/alpha_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/alpha_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing.c b/vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing.c new file mode 100644 index 0000000..5cffd5f --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/alpha_processing.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing_mips_dsp_r2.c b/vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing_mips_dsp_r2.c new file mode 100644 index 0000000..63c9301 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing_mips_dsp_r2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/alpha_processing_mips_dsp_r2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing_neon.c b/vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing_neon.c new file mode 100644 index 0000000..7a2f0a1 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing_neon.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/alpha_processing_neon.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing_sse2.c b/vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing_sse2.c new file mode 100644 index 0000000..4ac93ba --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing_sse2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/alpha_processing_sse2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing_sse41.c b/vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing_sse41.c new file mode 100644 index 0000000..25f49a4 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/alpha_processing_sse41.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/alpha_processing_sse41.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/alphai_dec.h b/vendor/github.com/bep/gowebp/internal/libwebp/alphai_dec.h new file mode 100644 index 0000000..1986ef2 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/alphai_dec.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dec/alphai_dec.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/analysis_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/analysis_enc.c new file mode 100644 index 0000000..8930ef0 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/analysis_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/analysis_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/anim_decode.c b/vendor/github.com/bep/gowebp/internal/libwebp/anim_decode.c new file mode 100644 index 0000000..10e4aa5 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/anim_decode.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/demux/anim_decode.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/anim_encode.c b/vendor/github.com/bep/gowebp/internal/libwebp/anim_encode.c new file mode 100644 index 0000000..8d67e22 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/anim_encode.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/mux/anim_encode.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/animi.h b/vendor/github.com/bep/gowebp/internal/libwebp/animi.h new file mode 100644 index 0000000..4838ec5 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/animi.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/mux/animi.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/backward_references_cost_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/backward_references_cost_enc.c new file mode 100644 index 0000000..534716f --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/backward_references_cost_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/backward_references_cost_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/backward_references_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/backward_references_enc.c new file mode 100644 index 0000000..3b7c0ae --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/backward_references_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/backward_references_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/backward_references_enc.h b/vendor/github.com/bep/gowebp/internal/libwebp/backward_references_enc.h new file mode 100644 index 0000000..98bc48d --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/backward_references_enc.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/backward_references_enc.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/bit_reader_inl_utils.h b/vendor/github.com/bep/gowebp/internal/libwebp/bit_reader_inl_utils.h new file mode 100644 index 0000000..562e21b --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/bit_reader_inl_utils.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/bit_reader_inl_utils.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/bit_reader_utils.c b/vendor/github.com/bep/gowebp/internal/libwebp/bit_reader_utils.c new file mode 100644 index 0000000..8d21c5c --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/bit_reader_utils.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/bit_reader_utils.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/bit_reader_utils.h b/vendor/github.com/bep/gowebp/internal/libwebp/bit_reader_utils.h new file mode 100644 index 0000000..d3d31ab --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/bit_reader_utils.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/bit_reader_utils.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/bit_writer_utils.c b/vendor/github.com/bep/gowebp/internal/libwebp/bit_writer_utils.c new file mode 100644 index 0000000..cff2baa --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/bit_writer_utils.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/bit_writer_utils.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/bit_writer_utils.h b/vendor/github.com/bep/gowebp/internal/libwebp/bit_writer_utils.h new file mode 100644 index 0000000..704d08f --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/bit_writer_utils.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/bit_writer_utils.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/buffer_dec.c b/vendor/github.com/bep/gowebp/internal/libwebp/buffer_dec.c new file mode 100644 index 0000000..45d4de9 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/buffer_dec.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dec/buffer_dec.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/color_cache_utils.c b/vendor/github.com/bep/gowebp/internal/libwebp/color_cache_utils.c new file mode 100644 index 0000000..c3626e0 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/color_cache_utils.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/color_cache_utils.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/color_cache_utils.h b/vendor/github.com/bep/gowebp/internal/libwebp/color_cache_utils.h new file mode 100644 index 0000000..7e5e680 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/color_cache_utils.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/color_cache_utils.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/common_dec.h b/vendor/github.com/bep/gowebp/internal/libwebp/common_dec.h new file mode 100644 index 0000000..902eee3 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/common_dec.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dec/common_dec.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/common_sse2.h b/vendor/github.com/bep/gowebp/internal/libwebp/common_sse2.h new file mode 100644 index 0000000..7ab6170 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/common_sse2.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/common_sse2.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/common_sse41.h b/vendor/github.com/bep/gowebp/internal/libwebp/common_sse41.h new file mode 100644 index 0000000..5af604f --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/common_sse41.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/common_sse41.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/config_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/config_enc.c new file mode 100644 index 0000000..91b1480 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/config_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/config_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/cost.c b/vendor/github.com/bep/gowebp/internal/libwebp/cost.c new file mode 100644 index 0000000..4164b9c --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/cost.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/cost.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/cost_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/cost_enc.c new file mode 100644 index 0000000..00c1288 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/cost_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/cost_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/cost_enc.h b/vendor/github.com/bep/gowebp/internal/libwebp/cost_enc.h new file mode 100644 index 0000000..adfc646 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/cost_enc.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/cost_enc.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/cost_mips32.c b/vendor/github.com/bep/gowebp/internal/libwebp/cost_mips32.c new file mode 100644 index 0000000..b74ec1e --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/cost_mips32.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/cost_mips32.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/cost_mips_dsp_r2.c b/vendor/github.com/bep/gowebp/internal/libwebp/cost_mips_dsp_r2.c new file mode 100644 index 0000000..ab4b3dc --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/cost_mips_dsp_r2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/cost_mips_dsp_r2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/cost_neon.c b/vendor/github.com/bep/gowebp/internal/libwebp/cost_neon.c new file mode 100644 index 0000000..7a28bdd --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/cost_neon.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/cost_neon.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/cost_sse2.c b/vendor/github.com/bep/gowebp/internal/libwebp/cost_sse2.c new file mode 100644 index 0000000..0a5b8c3 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/cost_sse2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/cost_sse2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/cpu.c b/vendor/github.com/bep/gowebp/internal/libwebp/cpu.c new file mode 100644 index 0000000..9e11c1f --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/cpu.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/cpu.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/cpu.h b/vendor/github.com/bep/gowebp/internal/libwebp/cpu.h new file mode 100644 index 0000000..9b2197f --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/cpu.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/cpu.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/dec.c b/vendor/github.com/bep/gowebp/internal/libwebp/dec.c new file mode 100644 index 0000000..4b2cc05 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/dec.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/dec.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/dec_clip_tables.c b/vendor/github.com/bep/gowebp/internal/libwebp/dec_clip_tables.c new file mode 100644 index 0000000..e8a4a2e --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/dec_clip_tables.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/dec_clip_tables.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/dec_mips32.c b/vendor/github.com/bep/gowebp/internal/libwebp/dec_mips32.c new file mode 100644 index 0000000..60d5571 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/dec_mips32.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/dec_mips32.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/dec_mips_dsp_r2.c b/vendor/github.com/bep/gowebp/internal/libwebp/dec_mips_dsp_r2.c new file mode 100644 index 0000000..d316259 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/dec_mips_dsp_r2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/dec_mips_dsp_r2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/dec_msa.c b/vendor/github.com/bep/gowebp/internal/libwebp/dec_msa.c new file mode 100644 index 0000000..73a3413 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/dec_msa.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/dec_msa.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/dec_neon.c b/vendor/github.com/bep/gowebp/internal/libwebp/dec_neon.c new file mode 100644 index 0000000..ae9b7bc --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/dec_neon.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/dec_neon.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/dec_sse2.c b/vendor/github.com/bep/gowebp/internal/libwebp/dec_sse2.c new file mode 100644 index 0000000..b595446 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/dec_sse2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/dec_sse2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/dec_sse41.c b/vendor/github.com/bep/gowebp/internal/libwebp/dec_sse41.c new file mode 100644 index 0000000..4865826 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/dec_sse41.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/dec_sse41.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/decode.h b/vendor/github.com/bep/gowebp/internal/libwebp/decode.h new file mode 100644 index 0000000..aebe3fe --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/decode.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/webp/decode.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/demux.c b/vendor/github.com/bep/gowebp/internal/libwebp/demux.c new file mode 100644 index 0000000..82b19b3 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/demux.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/demux/demux.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/demux.h b/vendor/github.com/bep/gowebp/internal/libwebp/demux.h new file mode 100644 index 0000000..f16f69c --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/demux.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/webp/demux.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/dsp.h b/vendor/github.com/bep/gowebp/internal/libwebp/dsp.h new file mode 100644 index 0000000..c703488 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/dsp.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/dsp.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/enc.c new file mode 100644 index 0000000..2a389b4 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/enc_mips32.c b/vendor/github.com/bep/gowebp/internal/libwebp/enc_mips32.c new file mode 100644 index 0000000..8bda30e --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/enc_mips32.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/enc_mips32.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/enc_mips_dsp_r2.c b/vendor/github.com/bep/gowebp/internal/libwebp/enc_mips_dsp_r2.c new file mode 100644 index 0000000..fedc922 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/enc_mips_dsp_r2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/enc_mips_dsp_r2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/enc_msa.c b/vendor/github.com/bep/gowebp/internal/libwebp/enc_msa.c new file mode 100644 index 0000000..77d76ba --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/enc_msa.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/enc_msa.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/enc_neon.c b/vendor/github.com/bep/gowebp/internal/libwebp/enc_neon.c new file mode 100644 index 0000000..42f75c9 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/enc_neon.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/enc_neon.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/enc_sse2.c b/vendor/github.com/bep/gowebp/internal/libwebp/enc_sse2.c new file mode 100644 index 0000000..22346b6 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/enc_sse2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/enc_sse2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/enc_sse41.c b/vendor/github.com/bep/gowebp/internal/libwebp/enc_sse41.c new file mode 100644 index 0000000..c6e3448 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/enc_sse41.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/enc_sse41.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/encode.h b/vendor/github.com/bep/gowebp/internal/libwebp/encode.h new file mode 100644 index 0000000..39c3f7a --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/encode.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/webp/encode.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/endian_inl_utils.h b/vendor/github.com/bep/gowebp/internal/libwebp/endian_inl_utils.h new file mode 100644 index 0000000..7b4d17a --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/endian_inl_utils.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/endian_inl_utils.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/filter_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/filter_enc.c new file mode 100644 index 0000000..d2e5508 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/filter_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/filter_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/filters.c b/vendor/github.com/bep/gowebp/internal/libwebp/filters.c new file mode 100644 index 0000000..da28d60 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/filters.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/filters.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/filters_mips_dsp_r2.c b/vendor/github.com/bep/gowebp/internal/libwebp/filters_mips_dsp_r2.c new file mode 100644 index 0000000..e573b6e --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/filters_mips_dsp_r2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/filters_mips_dsp_r2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/filters_msa.c b/vendor/github.com/bep/gowebp/internal/libwebp/filters_msa.c new file mode 100644 index 0000000..332c337 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/filters_msa.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/filters_msa.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/filters_neon.c b/vendor/github.com/bep/gowebp/internal/libwebp/filters_neon.c new file mode 100644 index 0000000..d327884 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/filters_neon.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/filters_neon.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/filters_sse2.c b/vendor/github.com/bep/gowebp/internal/libwebp/filters_sse2.c new file mode 100644 index 0000000..e4f6ad9 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/filters_sse2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/filters_sse2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/filters_utils.c b/vendor/github.com/bep/gowebp/internal/libwebp/filters_utils.c new file mode 100644 index 0000000..451f16a --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/filters_utils.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/filters_utils.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/filters_utils.h b/vendor/github.com/bep/gowebp/internal/libwebp/filters_utils.h new file mode 100644 index 0000000..12944ae --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/filters_utils.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/filters_utils.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/format_constants.h b/vendor/github.com/bep/gowebp/internal/libwebp/format_constants.h new file mode 100644 index 0000000..bb47e83 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/format_constants.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/webp/format_constants.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/frame_dec.c b/vendor/github.com/bep/gowebp/internal/libwebp/frame_dec.c new file mode 100644 index 0000000..123299e --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/frame_dec.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dec/frame_dec.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/frame_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/frame_enc.c new file mode 100644 index 0000000..70b0f7b --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/frame_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/frame_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/histogram_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/histogram_enc.c new file mode 100644 index 0000000..68f616b --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/histogram_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/histogram_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/histogram_enc.h b/vendor/github.com/bep/gowebp/internal/libwebp/histogram_enc.h new file mode 100644 index 0000000..54c3b45 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/histogram_enc.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/histogram_enc.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/huffman_encode_utils.c b/vendor/github.com/bep/gowebp/internal/libwebp/huffman_encode_utils.c new file mode 100644 index 0000000..83c9e85 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/huffman_encode_utils.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/huffman_encode_utils.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/huffman_encode_utils.h b/vendor/github.com/bep/gowebp/internal/libwebp/huffman_encode_utils.h new file mode 100644 index 0000000..38c612a --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/huffman_encode_utils.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/huffman_encode_utils.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/huffman_utils.c b/vendor/github.com/bep/gowebp/internal/libwebp/huffman_utils.c new file mode 100644 index 0000000..d91c05c --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/huffman_utils.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/huffman_utils.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/huffman_utils.h b/vendor/github.com/bep/gowebp/internal/libwebp/huffman_utils.h new file mode 100644 index 0000000..c0f512b --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/huffman_utils.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/huffman_utils.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/idec_dec.c b/vendor/github.com/bep/gowebp/internal/libwebp/idec_dec.c new file mode 100644 index 0000000..a2994eb --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/idec_dec.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dec/idec_dec.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/io_dec.c b/vendor/github.com/bep/gowebp/internal/libwebp/io_dec.c new file mode 100644 index 0000000..65502df --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/io_dec.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dec/io_dec.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/iterator_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/iterator_enc.c new file mode 100644 index 0000000..33e4431 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/iterator_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/iterator_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/lossless.c b/vendor/github.com/bep/gowebp/internal/libwebp/lossless.c new file mode 100644 index 0000000..de7b085 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/lossless.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/lossless.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/lossless.h b/vendor/github.com/bep/gowebp/internal/libwebp/lossless.h new file mode 100644 index 0000000..997a3eb --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/lossless.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/lossless.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/lossless_common.h b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_common.h new file mode 100644 index 0000000..8b872ba --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_common.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/lossless_common.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc.c new file mode 100644 index 0000000..2b74c2c --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/lossless_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_mips32.c b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_mips32.c new file mode 100644 index 0000000..24695a7 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_mips32.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/lossless_enc_mips32.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_mips_dsp_r2.c b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_mips_dsp_r2.c new file mode 100644 index 0000000..1e4f8bb --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_mips_dsp_r2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/lossless_enc_mips_dsp_r2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_msa.c b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_msa.c new file mode 100644 index 0000000..b912347 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_msa.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/lossless_enc_msa.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_neon.c b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_neon.c new file mode 100644 index 0000000..297a730 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_neon.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/lossless_enc_neon.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_sse2.c b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_sse2.c new file mode 100644 index 0000000..fa7249c --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_sse2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/lossless_enc_sse2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_sse41.c b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_sse41.c new file mode 100644 index 0000000..166fc02 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_enc_sse41.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/lossless_enc_sse41.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/lossless_mips_dsp_r2.c b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_mips_dsp_r2.c new file mode 100644 index 0000000..7a83a41 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_mips_dsp_r2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/lossless_mips_dsp_r2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/lossless_msa.c b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_msa.c new file mode 100644 index 0000000..8aa5e58 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_msa.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/lossless_msa.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/lossless_neon.c b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_neon.c new file mode 100644 index 0000000..3d5769b --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_neon.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/lossless_neon.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/lossless_sse2.c b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_sse2.c new file mode 100644 index 0000000..80086fa --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_sse2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/lossless_sse2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/lossless_sse41.c b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_sse41.c new file mode 100644 index 0000000..c0a70a8 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/lossless_sse41.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/lossless_sse41.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/mips_macro.h b/vendor/github.com/bep/gowebp/internal/libwebp/mips_macro.h new file mode 100644 index 0000000..11a7dee --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/mips_macro.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/mips_macro.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/msa_macro.h b/vendor/github.com/bep/gowebp/internal/libwebp/msa_macro.h new file mode 100644 index 0000000..c4ceb46 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/msa_macro.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/msa_macro.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/mux.h b/vendor/github.com/bep/gowebp/internal/libwebp/mux.h new file mode 100644 index 0000000..50402b2 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/mux.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/webp/mux.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/mux_types.h b/vendor/github.com/bep/gowebp/internal/libwebp/mux_types.h new file mode 100644 index 0000000..3ec86e7 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/mux_types.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/webp/mux_types.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/muxedit.c b/vendor/github.com/bep/gowebp/internal/libwebp/muxedit.c new file mode 100644 index 0000000..ed42405 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/muxedit.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/mux/muxedit.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/muxi.h b/vendor/github.com/bep/gowebp/internal/libwebp/muxi.h new file mode 100644 index 0000000..2c7c102 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/muxi.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/mux/muxi.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/muxinternal.c b/vendor/github.com/bep/gowebp/internal/libwebp/muxinternal.c new file mode 100644 index 0000000..4a62f2f --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/muxinternal.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/mux/muxinternal.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/muxread.c b/vendor/github.com/bep/gowebp/internal/libwebp/muxread.c new file mode 100644 index 0000000..d0d11fc --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/muxread.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/mux/muxread.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/near_lossless_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/near_lossless_enc.c new file mode 100644 index 0000000..13b5b01 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/near_lossless_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/near_lossless_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/neon.h b/vendor/github.com/bep/gowebp/internal/libwebp/neon.h new file mode 100644 index 0000000..3f115b7 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/neon.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/neon.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/picture_csp_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/picture_csp_enc.c new file mode 100644 index 0000000..4c9c2d9 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/picture_csp_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/picture_csp_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/picture_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/picture_enc.c new file mode 100644 index 0000000..23b4c22 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/picture_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/picture_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/picture_psnr_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/picture_psnr_enc.c new file mode 100644 index 0000000..85111d4 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/picture_psnr_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/picture_psnr_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/picture_rescale_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/picture_rescale_enc.c new file mode 100644 index 0000000..69cd1f7 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/picture_rescale_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/picture_rescale_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/picture_tools_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/picture_tools_enc.c new file mode 100644 index 0000000..6f0ab08 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/picture_tools_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/picture_tools_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/predictor_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/predictor_enc.c new file mode 100644 index 0000000..fd2bd4e --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/predictor_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/predictor_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/quant.h b/vendor/github.com/bep/gowebp/internal/libwebp/quant.h new file mode 100644 index 0000000..68224df --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/quant.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/quant.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/quant_dec.c b/vendor/github.com/bep/gowebp/internal/libwebp/quant_dec.c new file mode 100644 index 0000000..b55e90b --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/quant_dec.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dec/quant_dec.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/quant_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/quant_enc.c new file mode 100644 index 0000000..7807e96 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/quant_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/quant_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/quant_levels_dec_utils.c b/vendor/github.com/bep/gowebp/internal/libwebp/quant_levels_dec_utils.c new file mode 100644 index 0000000..22473bd --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/quant_levels_dec_utils.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/quant_levels_dec_utils.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/quant_levels_dec_utils.h b/vendor/github.com/bep/gowebp/internal/libwebp/quant_levels_dec_utils.h new file mode 100644 index 0000000..4a4c440 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/quant_levels_dec_utils.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/quant_levels_dec_utils.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/quant_levels_utils.c b/vendor/github.com/bep/gowebp/internal/libwebp/quant_levels_utils.c new file mode 100644 index 0000000..82a4afa --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/quant_levels_utils.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/quant_levels_utils.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/quant_levels_utils.h b/vendor/github.com/bep/gowebp/internal/libwebp/quant_levels_utils.h new file mode 100644 index 0000000..379225b --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/quant_levels_utils.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/quant_levels_utils.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/random_utils.c b/vendor/github.com/bep/gowebp/internal/libwebp/random_utils.c new file mode 100644 index 0000000..97b949a --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/random_utils.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/random_utils.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/random_utils.h b/vendor/github.com/bep/gowebp/internal/libwebp/random_utils.h new file mode 100644 index 0000000..0159272 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/random_utils.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/random_utils.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/rescaler.c b/vendor/github.com/bep/gowebp/internal/libwebp/rescaler.c new file mode 100644 index 0000000..b4f2490 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/rescaler.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/rescaler.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_mips32.c b/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_mips32.c new file mode 100644 index 0000000..7b855fa --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_mips32.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/rescaler_mips32.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_mips_dsp_r2.c b/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_mips_dsp_r2.c new file mode 100644 index 0000000..0667371 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_mips_dsp_r2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/rescaler_mips_dsp_r2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_msa.c b/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_msa.c new file mode 100644 index 0000000..0768362 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_msa.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/rescaler_msa.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_neon.c b/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_neon.c new file mode 100644 index 0000000..07e4685 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_neon.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/rescaler_neon.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_sse2.c b/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_sse2.c new file mode 100644 index 0000000..5fcefb7 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_sse2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/rescaler_sse2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_utils.c b/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_utils.c new file mode 100644 index 0000000..b1e6b5d --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_utils.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/rescaler_utils.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_utils.h b/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_utils.h new file mode 100644 index 0000000..41a3aae --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/rescaler_utils.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/rescaler_utils.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv.c b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv.c new file mode 100644 index 0000000..a000f7e --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/sharpyuv/sharpyuv.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv.h b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv.h new file mode 100644 index 0000000..1cc83c1 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/sharpyuv/sharpyuv.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_cpu.c b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_cpu.c new file mode 100644 index 0000000..eea8791 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_cpu.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/sharpyuv/sharpyuv_cpu.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_cpu.h b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_cpu.h new file mode 100644 index 0000000..56d3f5b --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_cpu.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/sharpyuv/sharpyuv_cpu.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_csp.c b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_csp.c new file mode 100644 index 0000000..74d46d5 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_csp.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/sharpyuv/sharpyuv_csp.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_csp.h b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_csp.h new file mode 100644 index 0000000..74040e9 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_csp.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/sharpyuv/sharpyuv_csp.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_dsp.c b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_dsp.c new file mode 100644 index 0000000..653f540 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_dsp.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/sharpyuv/sharpyuv_dsp.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_dsp.h b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_dsp.h new file mode 100644 index 0000000..b135b45 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_dsp.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/sharpyuv/sharpyuv_dsp.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_gamma.c b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_gamma.c new file mode 100644 index 0000000..7f366fd --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_gamma.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/sharpyuv/sharpyuv_gamma.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_gamma.h b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_gamma.h new file mode 100644 index 0000000..0c44ef4 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_gamma.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/sharpyuv/sharpyuv_gamma.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_neon.c b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_neon.c new file mode 100644 index 0000000..c33c4ea --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_neon.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/sharpyuv/sharpyuv_neon.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_sse2.c b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_sse2.c new file mode 100644 index 0000000..49b0a71 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/sharpyuv_sse2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/sharpyuv/sharpyuv_sse2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/ssim.c b/vendor/github.com/bep/gowebp/internal/libwebp/ssim.c new file mode 100644 index 0000000..9fab7e5 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/ssim.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/ssim.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/ssim_sse2.c b/vendor/github.com/bep/gowebp/internal/libwebp/ssim_sse2.c new file mode 100644 index 0000000..ac68428 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/ssim_sse2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/ssim_sse2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/syntax_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/syntax_enc.c new file mode 100644 index 0000000..760e5d6 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/syntax_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/syntax_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/thread_utils.c b/vendor/github.com/bep/gowebp/internal/libwebp/thread_utils.c new file mode 100644 index 0000000..cd6bb19 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/thread_utils.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/thread_utils.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/thread_utils.h b/vendor/github.com/bep/gowebp/internal/libwebp/thread_utils.h new file mode 100644 index 0000000..14ea78c --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/thread_utils.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/thread_utils.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/token_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/token_enc.c new file mode 100644 index 0000000..1c3110a --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/token_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/token_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/tree_dec.c b/vendor/github.com/bep/gowebp/internal/libwebp/tree_dec.c new file mode 100644 index 0000000..c24a785 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/tree_dec.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dec/tree_dec.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/tree_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/tree_enc.c new file mode 100644 index 0000000..a8b969e --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/tree_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/tree_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/types.h b/vendor/github.com/bep/gowebp/internal/libwebp/types.h new file mode 100644 index 0000000..b6732a5 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/types.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/webp/types.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/upsampling.c b/vendor/github.com/bep/gowebp/internal/libwebp/upsampling.c new file mode 100644 index 0000000..6542e79 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/upsampling.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/upsampling.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/upsampling_mips_dsp_r2.c b/vendor/github.com/bep/gowebp/internal/libwebp/upsampling_mips_dsp_r2.c new file mode 100644 index 0000000..52aa3d1 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/upsampling_mips_dsp_r2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/upsampling_mips_dsp_r2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/upsampling_msa.c b/vendor/github.com/bep/gowebp/internal/libwebp/upsampling_msa.c new file mode 100644 index 0000000..d4dfeb1 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/upsampling_msa.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/upsampling_msa.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/upsampling_neon.c b/vendor/github.com/bep/gowebp/internal/libwebp/upsampling_neon.c new file mode 100644 index 0000000..9aa4424 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/upsampling_neon.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/upsampling_neon.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/upsampling_sse2.c b/vendor/github.com/bep/gowebp/internal/libwebp/upsampling_sse2.c new file mode 100644 index 0000000..98213cd --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/upsampling_sse2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/upsampling_sse2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/upsampling_sse41.c b/vendor/github.com/bep/gowebp/internal/libwebp/upsampling_sse41.c new file mode 100644 index 0000000..da6de28 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/upsampling_sse41.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/upsampling_sse41.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/utils.c b/vendor/github.com/bep/gowebp/internal/libwebp/utils.c new file mode 100644 index 0000000..827540d --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/utils.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/utils.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/utils.h b/vendor/github.com/bep/gowebp/internal/libwebp/utils.h new file mode 100644 index 0000000..e59f2db --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/utils.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/utils/utils.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/vp8_dec.c b/vendor/github.com/bep/gowebp/internal/libwebp/vp8_dec.c new file mode 100644 index 0000000..0cca3ad --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/vp8_dec.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dec/vp8_dec.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/vp8_dec.h b/vendor/github.com/bep/gowebp/internal/libwebp/vp8_dec.h new file mode 100644 index 0000000..a3f2638 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/vp8_dec.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dec/vp8_dec.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/vp8i_dec.h b/vendor/github.com/bep/gowebp/internal/libwebp/vp8i_dec.h new file mode 100644 index 0000000..e625040 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/vp8i_dec.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dec/vp8i_dec.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/vp8i_enc.h b/vendor/github.com/bep/gowebp/internal/libwebp/vp8i_enc.h new file mode 100644 index 0000000..0e7f840 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/vp8i_enc.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/vp8i_enc.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/vp8l_dec.c b/vendor/github.com/bep/gowebp/internal/libwebp/vp8l_dec.c new file mode 100644 index 0000000..2cc1551 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/vp8l_dec.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dec/vp8l_dec.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/vp8l_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/vp8l_enc.c new file mode 100644 index 0000000..7e682bf --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/vp8l_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/vp8l_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/vp8li_dec.h b/vendor/github.com/bep/gowebp/internal/libwebp/vp8li_dec.h new file mode 100644 index 0000000..e7b6ec7 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/vp8li_dec.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dec/vp8li_dec.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/vp8li_enc.h b/vendor/github.com/bep/gowebp/internal/libwebp/vp8li_enc.h new file mode 100644 index 0000000..abc30ee --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/vp8li_enc.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/vp8li_enc.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/webp_dec.c b/vendor/github.com/bep/gowebp/internal/libwebp/webp_dec.c new file mode 100644 index 0000000..c5c70ef --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/webp_dec.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dec/webp_dec.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/webp_enc.c b/vendor/github.com/bep/gowebp/internal/libwebp/webp_enc.c new file mode 100644 index 0000000..bbb51e5 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/webp_enc.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/enc/webp_enc.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/webpi_dec.h b/vendor/github.com/bep/gowebp/internal/libwebp/webpi_dec.h new file mode 100644 index 0000000..3bf7874 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/webpi_dec.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dec/webpi_dec.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/yuv.c b/vendor/github.com/bep/gowebp/internal/libwebp/yuv.c new file mode 100644 index 0000000..2cfef58 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/yuv.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/yuv.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/yuv.h b/vendor/github.com/bep/gowebp/internal/libwebp/yuv.h new file mode 100644 index 0000000..9c6ea9e --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/yuv.h @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/yuv.h" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/yuv_mips32.c b/vendor/github.com/bep/gowebp/internal/libwebp/yuv_mips32.c new file mode 100644 index 0000000..3ba5c6f --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/yuv_mips32.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/yuv_mips32.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/yuv_mips_dsp_r2.c b/vendor/github.com/bep/gowebp/internal/libwebp/yuv_mips_dsp_r2.c new file mode 100644 index 0000000..1292a54 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/yuv_mips_dsp_r2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/yuv_mips_dsp_r2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/yuv_neon.c b/vendor/github.com/bep/gowebp/internal/libwebp/yuv_neon.c new file mode 100644 index 0000000..45e1256 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/yuv_neon.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/yuv_neon.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/yuv_sse2.c b/vendor/github.com/bep/gowebp/internal/libwebp/yuv_sse2.c new file mode 100644 index 0000000..ea7b3b3 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/yuv_sse2.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/yuv_sse2.c" +#endif diff --git a/vendor/github.com/bep/gowebp/internal/libwebp/yuv_sse41.c b/vendor/github.com/bep/gowebp/internal/libwebp/yuv_sse41.c new file mode 100644 index 0000000..2193b23 --- /dev/null +++ b/vendor/github.com/bep/gowebp/internal/libwebp/yuv_sse41.c @@ -0,0 +1,3 @@ +#ifndef LIBWEBP_NO_SRC +#include "../../libwebp_src/src/dsp/yuv_sse41.c" +#endif diff --git a/vendor/github.com/bep/gowebp/libwebp/encode.go b/vendor/github.com/bep/gowebp/libwebp/encode.go new file mode 100644 index 0000000..370ac61 --- /dev/null +++ b/vendor/github.com/bep/gowebp/libwebp/encode.go @@ -0,0 +1,19 @@ +package libwebp + +import ( + "image" + "io" + + "github.com/bep/gowebp/libwebp/webpoptions" + + "github.com/bep/gowebp/internal/libwebp" +) + +// Encode encodes src as Webp into w using the options in o. +// +// Any src that isn't one of *image.RGBA, *image.NRGBA, or *image.Gray +// will be converted to *image.NRGBA using draw.Draw first. +// +func Encode(w io.Writer, src image.Image, o webpoptions.EncodingOptions) error { + return libwebp.Encode(w, src, o) +} diff --git a/vendor/github.com/bep/gowebp/libwebp/webpoptions/options.go b/vendor/github.com/bep/gowebp/libwebp/webpoptions/options.go new file mode 100644 index 0000000..1ed8692 --- /dev/null +++ b/vendor/github.com/bep/gowebp/libwebp/webpoptions/options.go @@ -0,0 +1,24 @@ +package webpoptions + +const ( + EncodingPresetDefault EncodingPreset = iota + EncodingPresetPicture + EncodingPresetPhoto + EncodingPresetDrawing + EncodingPresetIcon + EncodingPresetText +) + +type ( + EncodingPreset int + EncodingOptions struct { + // Quality is a number between 0 and 100. Set to 0 for lossless. + Quality int + + // The encoding preset to use. + EncodingPreset + + // Use sharp (and slow) RGB->YUV conversion. + UseSharpYuv bool + } +) diff --git a/vendor/github.com/bep/lazycache/.gitignore b/vendor/github.com/bep/lazycache/.gitignore new file mode 100644 index 0000000..66fd13c --- /dev/null +++ b/vendor/github.com/bep/lazycache/.gitignore @@ -0,0 +1,15 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ diff --git a/vendor/github.com/bep/lazycache/LICENSE b/vendor/github.com/bep/lazycache/LICENSE new file mode 100644 index 0000000..0f527e1 --- /dev/null +++ b/vendor/github.com/bep/lazycache/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Bjørn Erik Pedersen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/bep/lazycache/README.md b/vendor/github.com/bep/lazycache/README.md new file mode 100644 index 0000000..4909e6f --- /dev/null +++ b/vendor/github.com/bep/lazycache/README.md @@ -0,0 +1,17 @@ +[![Tests on Linux, MacOS and Windows](https://github.com/bep/lazycache/workflows/Test/badge.svg)](https://github.com/bep/lazycache/actions?query=workflow:Test) +[![Go Report Card](https://goreportcard.com/badge/github.com/bep/lazycache)](https://goreportcard.com/report/github.com/bep/lazycache) +[![codecov](https://codecov.io/github/bep/lazycache/branch/main/graph/badge.svg?token=HJCUCT07CH)](https://codecov.io/github/bep/lazycache) +[![GoDoc](https://godoc.org/github.com/bep/lazycache?status.svg)](https://godoc.org/github.com/bep/lazycache) + +**Lazycache** is a simple thread safe in-memory LRU cache. Under the hood it leverages the great [simpleru package in golang-lru](https://github.com/hashicorp/golang-lru), with its exellent performance. One big difference between `golang-lru` and this library is the [GetOrCreate](https://pkg.go.dev/github.com/bep/lazycache#Cache.GetOrCreate) method, which provides: + +* Non-blocking cache priming on cache misses. +* A guarantee that the prime function is only called once for a given key. +* The cache's [RWMutex](https://pkg.go.dev/sync#RWMutex) is not locked during the execution of the prime function, which should make it easier to reason about potential deadlocks. + +Other notable features: + +* The API is [generic](https://go.dev/doc/tutorial/generics) +* The cache can be [resized](https://pkg.go.dev/github.com/bep/lazycache#Cache.Resize) while running. +* When the number of entries overflows the defined cache size, the least recently used item gets discarded (LRU). + diff --git a/vendor/github.com/bep/lazycache/codecov.yml b/vendor/github.com/bep/lazycache/codecov.yml new file mode 100644 index 0000000..2e3090a --- /dev/null +++ b/vendor/github.com/bep/lazycache/codecov.yml @@ -0,0 +1,10 @@ +coverage: + status: + project: + default: + target: auto + threshold: 0.5% + patch: off + +comment: + require_changes: true diff --git a/vendor/github.com/bep/lazycache/lazycache.go b/vendor/github.com/bep/lazycache/lazycache.go new file mode 100644 index 0000000..2c6569e --- /dev/null +++ b/vendor/github.com/bep/lazycache/lazycache.go @@ -0,0 +1,201 @@ +package lazycache + +import ( + "sync" + + "github.com/hashicorp/golang-lru/v2/simplelru" +) + +// New creates a new Cache. +func New[K comparable, V any](options Options[K, V]) *Cache[K, V] { + var onEvict simplelru.EvictCallback[K, *valueWrapper[V]] = nil + if options.OnEvict != nil { + onEvict = func(key K, value *valueWrapper[V]) { + value.wait() + if value.found { + options.OnEvict(key, value.value) + } + } + } + + lru, err := simplelru.NewLRU[K, *valueWrapper[V]](int(options.MaxEntries), onEvict) + if err != nil { + panic(err) + } + c := &Cache[K, V]{ + lru: lru, + } + return c +} + +// Options holds the cache options. +type Options[K comparable, V any] struct { + // MaxEntries is the maximum number of entries that the cache should hold. + // Note that this can also be adjusted after the cache is created with Resize. + MaxEntries int + + // OnEvict is an optional callback that is called when an entry is evicted. + OnEvict func(key K, value V) +} + +// Cache is a thread-safe resizable LRU cache. +type Cache[K comparable, V any] struct { + lru *simplelru.LRU[K, *valueWrapper[V]] + mu sync.RWMutex + + zerov V +} + +// Delete deletes the item with given key from the cache, returning if the +// key was contained. +func (c *Cache[K, V]) Delete(key K) bool { + c.mu.Lock() + defer c.mu.Unlock() + return c.lru.Remove(key) +} + +// DeleteFunc deletes all entries for which the given function returns true. +func (c *Cache[K, V]) DeleteFunc(matches func(key K, item V) bool) int { + c.mu.RLock() + keys := c.lru.Keys() + + var keysToDelete []K + for _, key := range keys { + w, _ := c.lru.Peek(key) + if !w.wait().found { + continue + } + if matches(key, w.value) { + keysToDelete = append(keysToDelete, key) + } + } + c.mu.RUnlock() + + c.mu.Lock() + defer c.mu.Unlock() + var deleteCount int + for _, key := range keysToDelete { + if c.lru.Remove(key) { + deleteCount++ + } + } + + return deleteCount +} + +// Get returns the value associated with key. +func (c *Cache[K, V]) Get(key K) (V, bool) { + c.mu.Lock() + w := c.get(key) + c.mu.Unlock() + if w == nil { + return c.zerov, false + } + w.wait() + return w.value, w.found +} + +// GetOrCreate returns the value associated with key, or creates it if it doesn't. +// It also returns a bool indicating if the value was found in the cache. +// Note that create, the cache prime function, is called once and then not called again for a given key +// unless the cache entry is evicted; it does not block other goroutines from calling GetOrCreate, +// it is not called with the cache lock held. +// Note that any error returned by create will be returned by GetOrCreate and repeated calls with the same key will +// receive the same error. +func (c *Cache[K, V]) GetOrCreate(key K, create func(key K) (V, error)) (V, bool, error) { + c.mu.Lock() + w := c.get(key) + if w != nil { + c.mu.Unlock() + w.wait() + // If w.ready is nil, we will repeat any error from the create function to concurrent callers. + return w.value, true, w.err + } + + w = &valueWrapper[V]{ + ready: make(chan struct{}), + } + + // Concurrent access to the same key will see w, but needs to wait for w.ready + // to get the value. + c.lru.Add(key, w) + c.mu.Unlock() + + // Create the value with the lock released. + v, err := create(key) + w.err = err + w.value = v + w.found = err == nil + + close(w.ready) + + if err != nil { + c.Delete(key) + return c.zerov, false, err + } + return v, false, nil +} + +// Resize changes the cache size and returns the number of entries evicted. +func (c *Cache[K, V]) Resize(size int) (evicted int) { + c.mu.Lock() + evicted = c.lru.Resize(size) + c.mu.Unlock() + return evicted +} + +// Set associates value with key. +func (c *Cache[K, V]) Set(key K, value V) { + c.mu.Lock() + c.lru.Add(key, &valueWrapper[V]{value: value, found: true}) + c.mu.Unlock() +} + +func (c *Cache[K, V]) get(key K) *valueWrapper[V] { + w, ok := c.lru.Get(key) + if !ok { + return nil + } + return w +} + +// contains returns true if the given key is in the cache. +// note that this wil also return true if the key is in the cache but the value is not yet ready. +func (c *Cache[K, V]) contains(key K) bool { + c.mu.RLock() + b := c.lru.Contains(key) + c.mu.RUnlock() + return b +} + +// keys returns a slice of the keys in the cache, oldest first. +// note that this wil also include keys that are not yet ready. +func (c *Cache[K, V]) keys() []K { + c.mu.RLock() + defer c.mu.RUnlock() + return c.lru.Keys() +} + +// len returns the number of items in the cache. +// note that this wil also include values that are not yet ready. +func (c *Cache[K, V]) len() int { + c.mu.RLock() + defer c.mu.RUnlock() + return c.lru.Len() +} + +// valueWrapper holds a cache value that is not available unless the done channel is nil or closed. +// This construct makes more sense if you look at the code in GetOrCreate. +type valueWrapper[V any] struct { + value V + found bool + err error + ready chan struct{} +} + +func (w *valueWrapper[V]) wait() *valueWrapper[V] { + if w.ready != nil { + <-w.ready + } + return w +} diff --git a/vendor/github.com/bep/logg/.gitignore b/vendor/github.com/bep/logg/.gitignore new file mode 100644 index 0000000..f7b3492 --- /dev/null +++ b/vendor/github.com/bep/logg/.gitignore @@ -0,0 +1,15 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ \ No newline at end of file diff --git a/vendor/github.com/bep/logg/LICENSE b/vendor/github.com/bep/logg/LICENSE new file mode 100644 index 0000000..af71800 --- /dev/null +++ b/vendor/github.com/bep/logg/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2015 TJ Holowaychuk tj@tjholowaychuk.com + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/bep/logg/README.md b/vendor/github.com/bep/logg/README.md new file mode 100644 index 0000000..2ee2ec1 --- /dev/null +++ b/vendor/github.com/bep/logg/README.md @@ -0,0 +1,238 @@ + +[![Tests on Linux, MacOS and Windows](https://github.com/bep/logg/workflows/Test/badge.svg)](https://github.com/bep/logg/actions?query=workflow:Test) +[![Go Report Card](https://goreportcard.com/badge/github.com/bep/logg)](https://goreportcard.com/report/github.com/bep/logg) +[![GoDoc](https://godoc.org/github.com/bep/logg?status.svg)](https://godoc.org/github.com/bep/logg) + +This is a fork of the exellent [Apex Log](https://github.com/apex/log) library. + +Main changes: + +* Trim unneeded dependencies. +* Make `Fields` into a slice to preserve log order. +* Split the old `Interface` in two and remove all but one `Log` method (see below). +* This allows for lazy creation of messages in `Log(fmt.Stringer)` and ignoring fields added in `LevelLogger`s with levels below the `Logger`s. +* The pointer passed to `HandleLog` is not safe to use outside of the current log chain, and needs to be cloned with `Clone` first if that's needed. +* See [Benchmarks](#benchmarks) for more info. + +This is probably the very fastest structured log library when logging is disabled: + +image + +> One can never have enough log libraries! + +```go +// Logger is the main interface for the logger. +type Logger interface { + // WithLevel returns a new entry with `level` set. + WithLevel(Level) *Entry +} + +// LevelLogger is the logger at a given level. +type LevelLogger interface { + // Log logs a message at the given level using the string from calling s.String(). + // Note that s.String() will not be called if the level is not enabled. + Log(s fmt.Stringer) + + // Logf logs a message at the given level using the format and args from calling fmt.Sprintf(). + // Note that fmt.Sprintf() will not be called if the level is not enabled. + Logf(format string, a ...any) + + // WithLevel returns a new entry with `level` set. + WithLevel(Level) *Entry + + // WithFields returns a new entry with the`fields` in fields set. + // This is a noop if LevelLogger's level is less than Logger's. + WithFields(fields Fielder) *Entry + + // WithLevel returns a new entry with the field f set with value v + // This is a noop if LevelLogger's level is less than Logger's. + WithField(f string, v any) *Entry + + // WithDuration returns a new entry with the "duration" field set + // to the given duration in milliseconds. + // This is a noop if LevelLogger's level is less than Logger's. + WithDuration(time.Duration) *Entry + + // WithError returns a new entry with the "error" set to `err`. + // This is a noop if err is nil or LevelLogger's level is less than Logger's. + WithError(error) *Entry +} +``` + +## Benchmarks + +Benchmarks below are borrowed and adapted from [Zap](https://github.com/uber-go/zap/tree/master/benchmarks). + +### Logging at a disabled level without any structured context + +``` +name time/op +DisabledWithoutFields/apex/log-10 33.9ns ± 0% +DisabledWithoutFields/bep/logg-10 0.28ns ± 0% +DisabledWithoutFields/sirupsen/logrus-10 6.54ns ± 0% +DisabledWithoutFields/rs/zerolog-10 0.31ns ± 0% + +name alloc/op +DisabledWithoutFields/apex/log-10 112B ± 0% +DisabledWithoutFields/bep/logg-10 0.00B +DisabledWithoutFields/sirupsen/logrus-10 16.0B ± 0% +DisabledWithoutFields/rs/zerolog-10 0.00B + +name allocs/op +DisabledWithoutFields/apex/log-10 1.00 ± 0% +DisabledWithoutFields/bep/logg-10 0.00 +DisabledWithoutFields/sirupsen/logrus-10 1.00 ± 0% +DisabledWithoutFields/rs/zerolog-10 0.00 +``` + + +### Logging at a disabled level with some accumulated context + +``` +name time/op +DisabledAccumulatedContext/apex/log-10 0.29ns ± 0% +DisabledAccumulatedContext/bep/logg-10 0.27ns ± 0% +DisabledAccumulatedContext/sirupsen/logrus-10 6.61ns ± 0% +DisabledAccumulatedContext/rs/zerolog-10 0.32ns ± 0% + +name alloc/op +DisabledAccumulatedContext/apex/log-10 0.00B +DisabledAccumulatedContext/bep/logg-10 0.00B +DisabledAccumulatedContext/sirupsen/logrus-10 16.0B ± 0% +DisabledAccumulatedContext/rs/zerolog-10 0.00B + +name allocs/op +DisabledAccumulatedContext/apex/log-10 0.00 +DisabledAccumulatedContext/bep/logg-10 0.00 +DisabledAccumulatedContext/sirupsen/logrus-10 1.00 ± 0% +DisabledAccumulatedContext/rs/zerolog-10 0.00 +``` + +### Logging at a disabled level, adding context at each log site + +``` +name time/op +DisabledAddingFields/apex/log-10 328ns ± 0% +DisabledAddingFields/bep/logg-10 0.38ns ± 0% +DisabledAddingFields/sirupsen/logrus-10 610ns ± 0% +DisabledAddingFields/rs/zerolog-10 10.5ns ± 0% + +name alloc/op +DisabledAddingFields/apex/log-10 886B ± 0% +DisabledAddingFields/bep/logg-10 0.00B +DisabledAddingFields/sirupsen/logrus-10 1.52kB ± 0% +DisabledAddingFields/rs/zerolog-10 24.0B ± 0% + +name allocs/op +DisabledAddingFields/apex/log-10 10.0 ± 0% +DisabledAddingFields/bep/logg-10 0.00 +DisabledAddingFields/sirupsen/logrus-10 12.0 ± 0% +DisabledAddingFields/rs/zerolog-10 1.00 ± 0% +``` + +### Logging without any structured context + +``` +name time/op +WithoutFields/apex/log-10 964ns ± 0% +WithoutFields/bep/logg-10 100ns ± 0% +WithoutFields/go-kit/kit/log-10 232ns ± 0% +WithoutFields/inconshreveable/log15-10 2.13µs ± 0% +WithoutFields/sirupsen/logrus-10 866ns ± 0% +WithoutFields/stdlib.Println-10 7.08ns ± 0% +WithoutFields/stdlib.Printf-10 56.4ns ± 0% +WithoutFields/rs/zerolog-10 30.9ns ± 0% +WithoutFields/rs/zerolog.Formatting-10 1.33µs ± 0% +WithoutFields/rs/zerolog.Check-10 32.1ns ± 0% + +name alloc/op +WithoutFields/apex/log-10 352B ± 0% +WithoutFields/bep/logg-10 56.0B ± 0% +WithoutFields/go-kit/kit/log-10 520B ± 0% +WithoutFields/inconshreveable/log15-10 1.43kB ± 0% +WithoutFields/sirupsen/logrus-10 1.14kB ± 0% +WithoutFields/stdlib.Println-10 16.0B ± 0% +WithoutFields/stdlib.Printf-10 136B ± 0% +WithoutFields/rs/zerolog-10 0.00B +WithoutFields/rs/zerolog.Formatting-10 1.92kB ± 0% +WithoutFields/rs/zerolog.Check-10 0.00B + +name allocs/op +WithoutFields/apex/log-10 6.00 ± 0% +WithoutFields/bep/logg-10 2.00 ± 0% +WithoutFields/go-kit/kit/log-10 9.00 ± 0% +WithoutFields/inconshreveable/log15-10 20.0 ± 0% +WithoutFields/sirupsen/logrus-10 23.0 ± 0% +WithoutFields/stdlib.Println-10 1.00 ± 0% +WithoutFields/stdlib.Printf-10 6.00 ± 0% +WithoutFields/rs/zerolog-10 0.00 +WithoutFields/rs/zerolog.Formatting-10 58.0 ± 0% +WithoutFields/rs/zerolog.Check-10 0.00 +``` + + +### Logging with some accumulated context + +``` +name time/op +AccumulatedContext/apex/log-10 12.7µs ± 0% +AccumulatedContext/bep/logg-10 1.52µs ± 0% +AccumulatedContext/go-kit/kit/log-10 2.52µs ± 0% +AccumulatedContext/inconshreveable/log15-10 9.36µs ± 0% +AccumulatedContext/sirupsen/logrus-10 3.41µs ± 0% +AccumulatedContext/rs/zerolog-10 37.9ns ± 0% +AccumulatedContext/rs/zerolog.Check-10 34.0ns ± 0% +AccumulatedContext/rs/zerolog.Formatting-10 1.36µs ± 0% + +name alloc/op +AccumulatedContext/apex/log-10 3.30kB ± 0% +AccumulatedContext/bep/logg-10 1.16kB ± 0% +AccumulatedContext/go-kit/kit/log-10 3.67kB ± 0% +AccumulatedContext/inconshreveable/log15-10 3.31kB ± 0% +AccumulatedContext/sirupsen/logrus-10 4.73kB ± 0% +AccumulatedContext/rs/zerolog-10 0.00B +AccumulatedContext/rs/zerolog.Check-10 0.00B +AccumulatedContext/rs/zerolog.Formatting-10 1.92kB ± 0% + +name allocs/op +AccumulatedContext/apex/log-10 53.0 ± 0% +AccumulatedContext/bep/logg-10 25.0 ± 0% +AccumulatedContext/go-kit/kit/log-10 56.0 ± 0% +AccumulatedContext/inconshreveable/log15-10 70.0 ± 0% +AccumulatedContext/sirupsen/logrus-10 68.0 ± 0% +AccumulatedContext/rs/zerolog-10 0.00 +AccumulatedContext/rs/zerolog.Check-10 0.00 +AccumulatedContext/rs/zerolog.Formatting-10 58.0 ± 0% +``` + + +## Logging with additional context at each log site + +``` +name time/op +AddingFields/apex/log-10 13.2µs ± 0% +AddingFields/bep/logg-10 1.79µs ± 0% +AddingFields/go-kit/kit/log-10 2.23µs ± 0% +AddingFields/inconshreveable/log15-10 14.3µs ± 0% +AddingFields/sirupsen/logrus-10 4.46µs ± 0% +AddingFields/rs/zerolog-10 398ns ± 0% +AddingFields/rs/zerolog.Check-10 389ns ± 0% + +name alloc/op +AddingFields/apex/log-10 4.19kB ± 0% +AddingFields/bep/logg-10 2.02kB ± 0% +AddingFields/go-kit/kit/log-10 3.31kB ± 0% +AddingFields/inconshreveable/log15-10 6.68kB ± 0% +AddingFields/sirupsen/logrus-10 6.27kB ± 0% +AddingFields/rs/zerolog-10 24.0B ± 0% +AddingFields/rs/zerolog.Check-10 24.0B ± 0% + +name allocs/op +AddingFields/apex/log-10 63.0 ± 0% +AddingFields/bep/logg-10 34.0 ± 0% +AddingFields/go-kit/kit/log-10 57.0 ± 0% +AddingFields/inconshreveable/log15-10 74.0 ± 0% +AddingFields/sirupsen/logrus-10 79.0 ± 0% +AddingFields/rs/zerolog-10 1.00 ± 0% +AddingFields/rs/zerolog.Check-10 1.00 ± 0% +``` diff --git a/vendor/github.com/bep/logg/doc.go b/vendor/github.com/bep/logg/doc.go new file mode 100644 index 0000000..e3a93de --- /dev/null +++ b/vendor/github.com/bep/logg/doc.go @@ -0,0 +1,4 @@ +/* +package logg implements a simple structured logging API. +*/ +package logg diff --git a/vendor/github.com/bep/logg/entry.go b/vendor/github.com/bep/logg/entry.go new file mode 100644 index 0000000..37283a6 --- /dev/null +++ b/vendor/github.com/bep/logg/entry.go @@ -0,0 +1,166 @@ +package logg + +import ( + "fmt" + "strings" + "time" +) + +// assert interface compliance. +var ( + _ LevelLogger = (*Entry)(nil) +) + +// Entry represents a single log entry at a given log level. +type Entry struct { + logger *logger + + Level Level `json:"level"` + Timestamp time.Time `json:"timestamp"` + Fields Fields `json:"fields,omitempty"` + Message string `json:"message"` + + fieldsAddedCounter int +} + +// NewEntry returns a new entry for `log`. +func NewEntry(log *logger) *Entry { + return &Entry{ + logger: log, + } +} + +func (e Entry) WithLevel(level Level) *Entry { + e.Level = level + return &e +} + +func (e *Entry) WithFields(fielder Fielder) *Entry { + if e.isLevelDisabled() { + return e + } + x := *e + fields := fielder.Fields() + x.fieldsAddedCounter += len(fields) + x.Fields = append(x.Fields, fields...) + if x.fieldsAddedCounter > 100 { + // This operation will eventually also be performed on the final entry, + // do it here to avoid the slice to grow indefinitely. + x.mergeFields() + x.fieldsAddedCounter = 0 + } + return &x +} + +func (e *Entry) WithField(key string, value any) *Entry { + if e.isLevelDisabled() { + return e + } + return e.WithFields(Fields{{key, value}}) +} + +func (e *Entry) WithDuration(d time.Duration) *Entry { + if e.isLevelDisabled() { + return e + } + return e.WithField("duration", d.Milliseconds()) +} + +// WithError returns a new entry with the "error" set to `err`. +// +// The given error may implement .Fielder, if it does the method +// will add all its `.Fields()` into the returned entry. +func (e *Entry) WithError(err error) *Entry { + if err == nil || e.isLevelDisabled() { + return e + } + + ctx := e.WithField("error", err.Error()) + + if s, ok := err.(stackTracer); ok { + frame := s.StackTrace()[0] + + name := fmt.Sprintf("%n", frame) + file := fmt.Sprintf("%+s", frame) + line := fmt.Sprintf("%d", frame) + + parts := strings.Split(file, "\n\t") + if len(parts) > 1 { + file = parts[1] + } + + ctx = ctx.WithField("source", fmt.Sprintf("%s: %s:%s", name, file, line)) + } + + if f, ok := err.(Fielder); ok { + ctx = ctx.WithFields(f) + } + + return ctx +} + +func (e *Entry) isLevelDisabled() bool { + return e.Level < e.logger.Level +} + +// Log a message at the given level. +func (e *Entry) Log(s fmt.Stringer) { + e.logger.log(e, s) +} + +// Log a message at the given level. +func (e *Entry) Logf(format string, a ...any) { + e.logger.log(e, StringFunc(func() string { + return fmt.Sprintf(format, a...) + })) + +} + +// Clone returns a new Entry with the same fields. +func (e *Entry) Clone() *Entry { + x := *e + x.Fields = make(Fields, len(e.Fields)) + copy(x.Fields, e.Fields) + return &x +} + +func (e *Entry) reset() { + e.logger = nil + e.Level = 0 + e.Fields = e.Fields[:0] + e.Message = "" + e.Timestamp = time.Time{} +} + +// Remove any early entries with the same name. +func (e *Entry) mergeFields() { + n := 0 + for i, f := range e.Fields { + keep := true + for j := i + 1; j < len(e.Fields); j++ { + if e.Fields[j].Name == f.Name { + keep = false + break + } + } + if keep { + e.Fields[n] = f + n++ + } + } + e.Fields = e.Fields[:n] +} + +// finalize populates dst with Level and Fields merged from e and Message and Timestamp set. +func (e *Entry) finalize(dst *Entry, msg string) { + dst.Message = msg + dst.Timestamp = e.logger.Clock.Now() + dst.Level = e.Level + if cap(dst.Fields) < len(e.Fields) { + dst.Fields = make(Fields, len(e.Fields)) + } else { + dst.Fields = dst.Fields[:len(e.Fields)] + } + copy(dst.Fields, e.Fields) + dst.mergeFields() +} diff --git a/vendor/github.com/bep/logg/handler.go b/vendor/github.com/bep/logg/handler.go new file mode 100644 index 0000000..5effe24 --- /dev/null +++ b/vendor/github.com/bep/logg/handler.go @@ -0,0 +1,27 @@ +package logg + +// Handler is used to handle log events, outputting them to +// stdio or sending them to remote services. See the "handlers" +// directory for implementations. +// +// It is left up to Handlers to implement thread-safety. +type Handler interface { + // HandleLog is invoked for each log event. + // Note that if the Entry is going to be used after the call to HandleLog + // in the handler chain returns, it must be cloned with Clone(). See + // the memory.Handler implementation for an example. + // + // The Entry can be modified if needed, e.g. when passed down via + // a multi.Handler (e.g. to sanitize the data). + HandleLog(e *Entry) error +} + +// The HandlerFunc type is an adapter to allow the use of ordinary functions as +// log handlers. If f is a function with the appropriate signature, +// HandlerFunc(f) is a Handler object that calls f. +type HandlerFunc func(*Entry) error + +// HandleLog calls f(e). +func (f HandlerFunc) HandleLog(e *Entry) error { + return f(e) +} diff --git a/vendor/github.com/bep/logg/handlers/multi/multi.go b/vendor/github.com/bep/logg/handlers/multi/multi.go new file mode 100644 index 0000000..b27e5d6 --- /dev/null +++ b/vendor/github.com/bep/logg/handlers/multi/multi.go @@ -0,0 +1,32 @@ +// Package multi implements a handler which invokes a number of handlers. +package multi + +import ( + "github.com/bep/logg" +) + +// Handler implementation. +type Handler struct { + Handlers []logg.Handler +} + +// New handler. +func New(h ...logg.Handler) *Handler { + return &Handler{ + Handlers: h, + } +} + +// HandleLog implements logg.Handler. +func (h *Handler) HandleLog(e *logg.Entry) error { + for _, handler := range h.Handlers { + // TODO(tj): maybe just write to stderr here, definitely not ideal + // to miss out logging to a more critical handler if something + // goes wrong + if err := handler.HandleLog(e); err != nil { + return err + } + } + + return nil +} diff --git a/vendor/github.com/bep/logg/interfaces.go b/vendor/github.com/bep/logg/interfaces.go new file mode 100644 index 0000000..57511db --- /dev/null +++ b/vendor/github.com/bep/logg/interfaces.go @@ -0,0 +1,43 @@ +package logg + +import ( + "fmt" + "time" +) + +// Logger is the main interface for the logger. +type Logger interface { + // WithLevel returns a new entry with `level` set. + WithLevel(Level) *Entry +} + +// LevelLogger is the logger at a given level. +type LevelLogger interface { + // Log logs a message at the given level using the string from calling s.String(). + // Note that s.String() will not be called if the level is not enabled. + Log(s fmt.Stringer) + + // Logf logs a message at the given level using the format and args from calling fmt.Sprintf(). + // Note that fmt.Sprintf() will not be called if the level is not enabled. + Logf(format string, a ...any) + + // WithLevel returns a new entry with `level` set. + WithLevel(Level) *Entry + + // WithFields returns a new entry with the`fields` in fields set. + // This is a noop if LevelLogger's level is less than Logger's. + WithFields(fields Fielder) *Entry + + // WithLevel returns a new entry with the field f set with value v + // This is a noop if LevelLogger's level is less than Logger's. + WithField(f string, v any) *Entry + + // WithDuration returns a new entry with the "duration" field set + // to the given duration in milliseconds. + // This is a noop if LevelLogger's level is less than Logger's. + WithDuration(time.Duration) *Entry + + // WithError returns a new entry with the "error" set to `err`. + // This is a noop if err is nil or LevelLogger's level is less than Logger's. + WithError(error) *Entry +} diff --git a/vendor/github.com/bep/logg/levels.go b/vendor/github.com/bep/logg/levels.go new file mode 100644 index 0000000..0ab6cf2 --- /dev/null +++ b/vendor/github.com/bep/logg/levels.go @@ -0,0 +1,81 @@ +package logg + +import ( + "bytes" + "errors" + "strings" +) + +// ErrInvalidLevel is returned if the severity level is invalid. +var ErrInvalidLevel = errors.New("invalid level") + +// Level of severity. +type Level int + +// Log levels. +const ( + LevelInvalid Level = iota + LevelTrace + LevelDebug + LevelInfo + LevelWarn + LevelError +) + +var levelNames = [...]string{ + LevelTrace: "trace", + LevelDebug: "debug", + LevelInfo: "info", + LevelWarn: "warn", + LevelError: "error", +} + +var levelStrings = map[string]Level{ + "trace": LevelTrace, + "debug": LevelDebug, + "info": LevelInfo, + "warn": LevelWarn, + "warning": LevelWarn, + "error": LevelError, +} + +// String implementation. +func (l Level) String() string { + return levelNames[l] +} + +// MarshalJSON implementation. +func (l Level) MarshalJSON() ([]byte, error) { + return []byte(`"` + l.String() + `"`), nil +} + +// UnmarshalJSON implementation. +func (l *Level) UnmarshalJSON(b []byte) error { + v, err := ParseLevel(string(bytes.Trim(b, `"`))) + if err != nil { + return err + } + + *l = v + return nil +} + +// ParseLevel parses level string. +func ParseLevel(s string) (Level, error) { + l, ok := levelStrings[strings.ToLower(s)] + if !ok { + return LevelInvalid, ErrInvalidLevel + } + + return l, nil +} + +// MustParseLevel parses level string or panics. +func MustParseLevel(s string) Level { + l, err := ParseLevel(s) + if err != nil { + panic("invalid log level") + } + + return l +} diff --git a/vendor/github.com/bep/logg/logger.go b/vendor/github.com/bep/logg/logger.go new file mode 100644 index 0000000..7cee32c --- /dev/null +++ b/vendor/github.com/bep/logg/logger.go @@ -0,0 +1,159 @@ +package logg + +import ( + "fmt" + stdlog "log" + "time" + + "github.com/bep/clocks" +) + +// assert interface compliance. +var _ Logger = (*logger)(nil) + +// String implements fmt.Stringer and can be used directly in +// the log methods. +type String string + +// StringFunc is a function that returns a string. +// It also implements the fmt.Stringer interface and +// can therefore be used as argument to the log methods. +type StringFunc func() string + +func (f StringFunc) String() string { + return f() +} + +func (s String) String() string { + return string(s) +} + +// Fielder is an interface for providing fields to custom types. +type Fielder interface { + Fields() Fields +} + +func NewFieldsFunc(fn func() Fields) FieldsFunc { + return FieldsFunc(fn) +} + +type FieldsFunc func() Fields + +func (f FieldsFunc) Fields() Fields { + return f() +} + +// Field holds a named value. +type Field struct { + Name string `json:"name"` + Value any `json:"value"` +} + +// Fields represents a slice of entry level data used for structured logging. +type Fields []Field + +// Fields implements Fielder. +func (f Fields) Fields() Fields { + return f +} + +// Options is the set of options used to configure a logger. +type Options struct { + // Level is the minimum level to log at. + // If not set, defaults to InfoLevel. + Level Level + + // Handler is the log handler to use. + Handler Handler + + // Clock is the clock to use for timestamps. + // If not set, the system clock is used. + Clock Clock +} + +// New returns a new logger. +func New(cfg Options) Logger { + if cfg.Handler == nil { + panic("handler cannot be nil") + } + + if cfg.Level <= 0 || cfg.Level > LevelError { + panic("log level is out of range") + } + + if cfg.Clock == nil { + cfg.Clock = clocks.System() + } + + if cfg.Level == 0 { + cfg.Level = LevelInfo + } + + return &logger{ + Handler: cfg.Handler, + Level: cfg.Level, + Clock: cfg.Clock, + } +} + +// logger represents a logger with configurable Level and Handler. +type logger struct { + Handler Handler + Level Level + Clock Clock +} + +// Clock provides the current time. +type Clock interface { + Now() time.Time +} + +// WithLevel returns a new entry with `level` set. +func (l *logger) WithLevel(level Level) *Entry { + return NewEntry(l).WithLevel(level) +} + +// WithFields returns a new entry with `fields` set. +func (l *logger) WithFields(fields Fielder) *Entry { + return NewEntry(l).WithFields(fields.Fields()) +} + +// WithField returns a new entry with the `key` and `value` set. +// +// Note that the `key` should not have spaces in it - use camel +// case or underscores +func (l *logger) WithField(key string, value any) *Entry { + return NewEntry(l).WithField(key, value) +} + +// WithDuration returns a new entry with the "duration" field set +// to the given duration in milliseconds. +func (l *logger) WithDuration(d time.Duration) *Entry { + return NewEntry(l).WithDuration(d) +} + +// WithError returns a new entry with the "error" set to `err`. +func (l *logger) WithError(err error) *Entry { + return NewEntry(l).WithError(err) +} + +// ErrStopLogEntry is a sentinel error that can be returned from a +// handler to stop the entry from being passed to the next handler. +var ErrStopLogEntry = fmt.Errorf("stop log entry") + +// log the message, invoking the handler. +func (l *logger) log(e *Entry, s fmt.Stringer) { + if e.Level < l.Level { + return + } + + finalized := objectPools.GetEntry() + defer objectPools.PutEntry(finalized) + e.finalize(finalized, s.String()) + + if err := l.Handler.HandleLog(finalized); err != nil { + if err != ErrStopLogEntry { + stdlog.Printf("error logging: %s", err) + } + } +} diff --git a/vendor/github.com/bep/logg/objectpools.go b/vendor/github.com/bep/logg/objectpools.go new file mode 100644 index 0000000..45d91d9 --- /dev/null +++ b/vendor/github.com/bep/logg/objectpools.go @@ -0,0 +1,25 @@ +package logg + +import "sync" + +var objectPools = &objectPoolsHolder{ + entryPool: &sync.Pool{ + New: func() any { + return &Entry{} + }, + }, +} + +type objectPoolsHolder struct { + // This is only used for the event copy passed to HandleLog. + entryPool *sync.Pool +} + +func (h *objectPoolsHolder) GetEntry() *Entry { + return h.entryPool.Get().(*Entry) +} + +func (h *objectPoolsHolder) PutEntry(e *Entry) { + e.reset() + h.entryPool.Put(e) +} diff --git a/vendor/github.com/bep/logg/stack.go b/vendor/github.com/bep/logg/stack.go new file mode 100644 index 0000000..e637f3f --- /dev/null +++ b/vendor/github.com/bep/logg/stack.go @@ -0,0 +1,8 @@ +package logg + +import "github.com/pkg/errors" + +// stackTracer interface. +type stackTracer interface { + StackTrace() errors.StackTrace +} diff --git a/vendor/github.com/bep/tmc/.gitignore b/vendor/github.com/bep/tmc/.gitignore new file mode 100644 index 0000000..847a33f --- /dev/null +++ b/vendor/github.com/bep/tmc/.gitignore @@ -0,0 +1,13 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, build with `go test -c` +*.test +coverage.txt + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out diff --git a/vendor/github.com/bep/tmc/LICENSE b/vendor/github.com/bep/tmc/LICENSE new file mode 100644 index 0000000..c86c321 --- /dev/null +++ b/vendor/github.com/bep/tmc/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Bjørn Erik Pedersen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/bep/tmc/README.md b/vendor/github.com/bep/tmc/README.md new file mode 100644 index 0000000..b00a58f --- /dev/null +++ b/vendor/github.com/bep/tmc/README.md @@ -0,0 +1,200 @@ +

Codec for a Typed Map

+

Provides round-trip serialization of typed Go maps.

+

+ + +

+ + +### How to Use + +See the [GoDoc](https://godoc.org/github.com/bep/tmc) for some basic examples and how to configure custom codec, adapters etc. + +### Why? + +Text based serialization formats like JSON and YAML are convenient, but when used with Go maps, most type information gets lost in translation. + +Listed below is a round-trip example in JSON (see https://play.golang.org/p/zxt-wi4Ljz3 for a runnable version): + +```go +package main + +import ( + "encoding/json" + "log" + "math/big" + "time" + + "github.com/kr/pretty" +) + +func main() { + mi := map[string]interface{}{ + "vstring": "Hello", + "vint": 32, + "vrat": big.NewRat(1, 2), + "vtime": time.Now(), + "vduration": 3 * time.Second, + "vsliceint": []int{1, 3, 4}, + "nested": map[string]interface{}{ + "vint": 55, + "vduration": 5 * time.Second, + }, + "nested-typed-int": map[string]int{ + "vint": 42, + }, + "nested-typed-duration": map[string]time.Duration{ + "v1": 5 * time.Second, + "v2": 10 * time.Second, + }, + } + + data, err := json.Marshal(mi) + if err != nil { + log.Fatal(err) + } + m := make(map[string]interface{}) + if err := json.Unmarshal(data, &m); err != nil { + log.Fatal(err) + } + + pretty.Print(m) + +} +``` + +This prints: + +```go +map[string]interface {}{ + "vint": float64(32), + "vrat": "1/2", + "vtime": "2009-11-10T23:00:00Z", + "vduration": float64(3e+09), + "vsliceint": []interface {}{ + float64(1), + float64(3), + float64(4), + }, + "vstring": "Hello", + "nested": map[string]interface {}{ + "vduration": float64(5e+09), + "vint": float64(55), + }, + "nested-typed-duration": map[string]interface {}{ + "v2": float64(1e+10), + "v1": float64(5e+09), + }, + "nested-typed-int": map[string]interface {}{ + "vint": float64(42), + }, +} +``` + +And that is very different from the origin: + +* All numbers are now `float64` +* `time.Duration` is also `float64` +* `time.Now` and `*big.Rat` are strings +* Slices are `[]interface {}`, maps `map[string]interface {}` + +So, for structs, you can work around some of the limitations above with custom `MarshalJSON`, `UnmarshalJSON`, `MarshalText` and `UnmarshalText`. + +For the commonly used flexible and schema-less`map[string]interface {}` this is, as I'm aware of, not an option. + +Using this library, the above can be written to (see https://play.golang.org/p/PlDetQP5aWd for a runnable example): + +```go +package main + +import ( + "log" + "math/big" + "time" + + "github.com/bep/tmc" + + "github.com/kr/pretty" +) + +func main() { + mi := map[string]interface{}{ + "vstring": "Hello", + "vint": 32, + "vrat": big.NewRat(1, 2), + "vtime": time.Now(), + "vduration": 3 * time.Second, + "vsliceint": []int{1, 3, 4}, + "nested": map[string]interface{}{ + "vint": 55, + "vduration": 5 * time.Second, + }, + "nested-typed-int": map[string]int{ + "vint": 42, + }, + "nested-typed-duration": map[string]time.Duration{ + "v1": 5 * time.Second, + "v2": 10 * time.Second, + }, + } + + c, err := tmc.New() + if err != nil { + log.Fatal(err) + } + + data, err := c.Marshal(mi) + if err != nil { + log.Fatal(err) + } + m := make(map[string]interface{}) + if err := c.Unmarshal(data, &m); err != nil { + log.Fatal(err) + } + + pretty.Print(m) + +} +``` + +This prints: + +```go +map[string]interface {}{ + "vduration": time.Duration(3000000000), + "vint": int(32), + "nested-typed-int": map[string]int{"vint":42}, + "vsliceint": []int{1, 3, 4}, + "vstring": "Hello", + "vtime": time.Time{ + wall: 0x0, + ext: 63393490800, + loc: (*time.Location)(nil), + }, + "nested": map[string]interface {}{ + "vduration": time.Duration(5000000000), + "vint": int(55), + }, + "nested-typed-duration": map[string]time.Duration{"v1":5000000000, "v2":10000000000}, + "vrat": &big.Rat{ + a: big.Int{ + neg: false, + abs: {0x1}, + }, + b: big.Int{ + neg: false, + abs: {0x2}, + }, + }, +} +``` + + +### Performance + +The implementation is easy to reason aobut (it uses reflection), but It's not particulary fast and probably not suited for _big data_. A simple benchmark with a roundtrip marshal/unmarshal is included. On my MacBook it shows: + +```bash +BenchmarkCodec/JSON_regular-4 50000 27523 ns/op 6742 B/op 171 allocs/op +BenchmarkCodec/JSON_typed-4 20000 66644 ns/op 16234 B/op 411 allocs/op +``` diff --git a/vendor/github.com/bep/tmc/adapters.go b/vendor/github.com/bep/tmc/adapters.go new file mode 100644 index 0000000..8e79fc5 --- /dev/null +++ b/vendor/github.com/bep/tmc/adapters.go @@ -0,0 +1,146 @@ +// Copyright © 2019 Bjørn Erik Pedersen . +// +// Use of this source code is governed by an MIT-style +// license that can be found in the LICENSE file. + +package tmc + +import ( + "encoding" + "fmt" + "math/big" + "reflect" + "strconv" + "time" +) + +// Adapter wraps a type to preserve type information when encoding and decoding +// a map. +// +// The simples way to create new adapters is via the NewAdapter function. +type Adapter interface { + FromString(s string) (interface{}, error) + MarshalText() (text []byte, err error) + Type() reflect.Type + Wrap(v interface{}) Adapter +} + +var ( + // DefaultTypeAdapters contains the default set of type adapters. + DefaultTypeAdapters = []Adapter{ + // Time + NewAdapter(time.Now(), nil, nil), + NewAdapter( + 3*time.Hour, + func(s string) (interface{}, error) { return time.ParseDuration(s) }, + func(v interface{}) (string, error) { return v.(time.Duration).String(), nil }, + ), + + // Numbers + NewAdapter(big.NewRat(1, 2), nil, nil), + NewAdapter( + int(32), + func(s string) (interface{}, error) { + return strconv.Atoi(s) + }, + func(v interface{}) (string, error) { + return strconv.Itoa(v.(int)), nil + }, + ), + } +) + +// NewAdapter creates a new adapter that wraps the target type. +// +// fromString can be omitted if target implements encoding.TextUnmarshaler. +// toString can be omitted if target implements encoding.TextMarshaler. +// +// It will panic if it can not be created. +func NewAdapter( + target interface{}, + fromString func(s string) (interface{}, error), + toString func(v interface{}) (string, error)) Adapter { + + targetValue := reflect.ValueOf(target) + targetType := targetValue.Type() + + wasPointer := targetType.Kind() == reflect.Ptr + if !wasPointer { + // Need the pointer to see the TextUnmarshaler implementation. + v := targetValue + targetValue = reflect.New(targetType) + targetValue.Elem().Set(v) + } + + if fromString == nil { + if _, ok := targetValue.Interface().(encoding.TextUnmarshaler); ok { + fromString = func(s string) (interface{}, error) { + typ := targetType + if typ.Kind() == reflect.Ptr { + typ = typ.Elem() + } + v := reflect.New(typ) + + err := v.Interface().(encoding.TextUnmarshaler).UnmarshalText([]byte(s)) + if err != nil { + return nil, err + } + + if !wasPointer { + v = v.Elem() + } + return v.Interface(), nil + } + } else { + panic(fmt.Sprintf("%T can not be unmarshaled", target)) + } + } + + var marshalText func(v interface{}) ([]byte, error) + + if toString != nil { + marshalText = func(v interface{}) ([]byte, error) { + s, err := toString(v) + return []byte(s), err + } + } else if _, ok := target.(encoding.TextMarshaler); ok { + marshalText = func(v interface{}) ([]byte, error) { + return v.(encoding.TextMarshaler).MarshalText() + } + } else { + panic(fmt.Sprintf("%T can not be marshaled", target)) + } + + return &adapter{ + targetType: targetType, + fromString: fromString, + marshalText: marshalText, + } +} + +var _ Adapter = (*adapter)(nil) + +type adapter struct { + fromString func(s string) (interface{}, error) + marshalText func(v interface{}) (text []byte, err error) + targetType reflect.Type + + target interface{} +} + +func (a *adapter) FromString(s string) (interface{}, error) { + return a.fromString(s) +} + +func (a *adapter) MarshalText() (text []byte, err error) { + return a.marshalText(a.target) +} + +func (a adapter) Type() reflect.Type { + return a.targetType +} + +func (a adapter) Wrap(v interface{}) Adapter { + a.target = v + return &a +} diff --git a/vendor/github.com/bep/tmc/codec.go b/vendor/github.com/bep/tmc/codec.go new file mode 100644 index 0000000..09b9225 --- /dev/null +++ b/vendor/github.com/bep/tmc/codec.go @@ -0,0 +1,309 @@ +// Copyright © 2019 Bjørn Erik Pedersen . +// +// Use of this source code is governed by an MIT-style +// license that can be found in the LICENSE file. + +package tmc + +import ( + "encoding/json" + "errors" + "fmt" + "reflect" + "strings" +) + +// JSONMarshaler encodes and decodes JSON and is the default used in this +// codec. +var JSONMarshaler = new(jsonMarshaler) + +// New creates a new Coded with some optional options. +func New(opts ...Option) (*Codec, error) { + c := &Codec{ + typeSep: "|", + marshaler: JSONMarshaler, + typeAdapters: DefaultTypeAdapters, + typeAdaptersMap: make(map[reflect.Type]Adapter), + typeAdaptersStringMap: make(map[string]Adapter), + } + + for _, opt := range opts { + if err := opt(c); err != nil { + return c, err + } + } + + for _, w := range c.typeAdapters { + tp := w.Type() + c.typeAdaptersMap[tp] = w + c.typeAdaptersStringMap[tp.String()] = w + } + + return c, nil +} + +// Option configures the Codec. +type Option func(c *Codec) error + +// WithTypeSep sets the separator to use before the type information encoded in +// the key field. Default is "|". +func WithTypeSep(sep string) func(c *Codec) error { + return func(c *Codec) error { + if sep == "" { + return errors.New("separator cannot be empty") + } + c.typeSep = sep + return nil + } +} + +// WithMarshalUnmarshaler sets the MarshalUnmarshaler to use. +// Default is JSONMarshaler. +func WithMarshalUnmarshaler(marshaler MarshalUnmarshaler) func(c *Codec) error { + return func(c *Codec) error { + c.marshaler = marshaler + return nil + } +} + +// WithTypeAdapters sets the type adapters to use. Note that if more than one +// adapter exists for the same type, the last one will win. This means that +// if you want to use the default adapters, but override some of them, you +// can do: +// +// adapters := append(typedmapcodec.DefaultTypeAdapters, mycustomAdapters ...) +// codec := typedmapcodec.New(WithTypeAdapters(adapters)) +// +func WithTypeAdapters(typeAdapters []Adapter) func(c *Codec) error { + return func(c *Codec) error { + c.typeAdapters = typeAdapters + return nil + } +} + +// Codec provides methods to marshal and unmarshal a Go map while preserving +// type information. +type Codec struct { + typeSep string + marshaler MarshalUnmarshaler + typeAdapters []Adapter + typeAdaptersMap map[reflect.Type]Adapter + typeAdaptersStringMap map[string]Adapter +} + +// Marshal accepts a Go map and marshals it to the configured marshaler +// anntated with type information. +func (c *Codec) Marshal(v interface{}) ([]byte, error) { + m, err := c.toTypedMap(v) + if err != nil { + return nil, err + } + return c.marshaler.Marshal(m) +} + +// Unmarshal unmarshals the given data to the given Go map, using +// any annotated type information found to preserve the type information +// stored in Marshal. +func (c *Codec) Unmarshal(data []byte, v interface{}) error { + if err := c.marshaler.Unmarshal(data, v); err != nil { + return err + } + _, err := c.fromTypedMap(v) + return err +} + +func (c *Codec) newKey(key reflect.Value, a Adapter) reflect.Value { + return reflect.ValueOf(fmt.Sprintf("%s%s%s", key, c.typeSep, a.Type())) +} + +func (c *Codec) fromTypedMap(mi interface{}) (reflect.Value, error) { + m := reflect.ValueOf(mi) + if m.Kind() == reflect.Ptr { + m = m.Elem() + } + + if m.Kind() != reflect.Map { + return reflect.Value{}, errors.New("must be a Map") + } + + keyKind := m.Type().Key().Kind() + if keyKind == reflect.Interface { + // We only support string keys. + // YAML creates map[interface {}]interface {}, so try to convert it. + var err error + m, err = c.toStringMap(m) + if err != nil { + return reflect.Value{}, err + } + } + + for _, key := range m.MapKeys() { + + v := indirectInterface(m.MapIndex(key)) + + var ( + keyStr = key.String() + keyPlain string + keyType string + ) + + sepIdx := strings.LastIndex(keyStr, c.typeSep) + + if sepIdx != -1 { + keyPlain = keyStr[:sepIdx] + keyType = keyStr[sepIdx+len(c.typeSep):] + } + + adapter, found := c.typeAdaptersStringMap[keyType] + + if !found { + if v.Kind() == reflect.Map { + var err error + v, err = c.fromTypedMap(v.Interface()) + if err != nil { + return reflect.Value{}, err + } + m.SetMapIndex(key, v) + } + continue + } + + switch v.Kind() { + case reflect.Map: + mm := reflect.MakeMap(reflect.MapOf(stringType, adapter.Type())) + for _, key := range v.MapKeys() { + vv := indirectInterface(v.MapIndex(key)) + nv, err := adapter.FromString(vv.String()) + if err != nil { + return reflect.Value{}, err + } + mm.SetMapIndex(indirectInterface(key), reflect.ValueOf(nv)) + } + m.SetMapIndex(reflect.ValueOf(keyPlain), mm) + case reflect.Slice: + slice := reflect.MakeSlice(reflect.SliceOf(adapter.Type()), v.Len(), v.Cap()) + for i := 0; i < v.Len(); i++ { + vv := indirectInterface(v.Index(i)) + nv, err := adapter.FromString(vv.String()) + if err != nil { + return reflect.Value{}, err + } + slice.Index(i).Set(reflect.ValueOf(nv)) + } + + m.SetMapIndex(reflect.ValueOf(keyPlain), slice) + default: + nv, err := adapter.FromString(v.String()) + if err != nil { + return reflect.Value{}, err + } + m.SetMapIndex(reflect.ValueOf(keyPlain), reflect.ValueOf(nv)) + } + + m.SetMapIndex(key, reflect.Value{}) + + } + + return m, nil +} + +var ( + interfaceMapType = reflect.TypeOf(make(map[string]interface{})) + interfaceSliceType = reflect.TypeOf([]interface{}{}) + stringType = reflect.TypeOf("") +) + +func (c *Codec) toTypedMap(mi interface{}) (interface{}, error) { + + mv := reflect.ValueOf(mi) + + if mv.Kind() != reflect.Map || mv.Type().Key().Kind() != reflect.String { + return nil, errors.New("must provide a map with string keys") + } + + m := reflect.MakeMap(interfaceMapType) + + for _, key := range mv.MapKeys() { + v := indirectInterface(mv.MapIndex(key)) + + switch v.Kind() { + + case reflect.Map: + + if wrapper, found := c.typeAdaptersMap[v.Type().Elem()]; found { + mm := reflect.MakeMap(interfaceMapType) + for _, key := range v.MapKeys() { + mm.SetMapIndex(key, reflect.ValueOf(wrapper.Wrap(v.MapIndex(key).Interface()))) + } + m.SetMapIndex(c.newKey(key, wrapper), mm) + } else { + nested, err := c.toTypedMap(v.Interface()) + if err != nil { + return nil, err + } + m.SetMapIndex(key, reflect.ValueOf(nested)) + } + continue + case reflect.Slice: + if adapter, found := c.typeAdaptersMap[v.Type().Elem()]; found { + slice := reflect.MakeSlice(interfaceSliceType, v.Len(), v.Cap()) + for i := 0; i < v.Len(); i++ { + slice.Index(i).Set(reflect.ValueOf(adapter.Wrap(v.Index(i).Interface()))) + } + m.SetMapIndex(c.newKey(key, adapter), slice) + continue + } + } + + if adapter, found := c.typeAdaptersMap[v.Type()]; found { + m.SetMapIndex(c.newKey(key, adapter), reflect.ValueOf(adapter.Wrap(v.Interface()))) + } else { + m.SetMapIndex(key, v) + } + } + + return m.Interface(), nil +} + +func (c *Codec) toStringMap(mi reflect.Value) (reflect.Value, error) { + elemType := mi.Type().Elem() + m := reflect.MakeMap(reflect.MapOf(stringType, elemType)) + for _, key := range mi.MapKeys() { + key = indirectInterface(key) + if key.Kind() != reflect.String { + return reflect.Value{}, errors.New("this library supports only string keys in maps") + } + vv := mi.MapIndex(key) + m.SetMapIndex(reflect.ValueOf(key.String()), vv) + } + + return m, nil +} + +// MarshalUnmarshaler is the interface that must be implemented if you want to +// add support for more than JSON to this codec. +type MarshalUnmarshaler interface { + Marshal(v interface{}) ([]byte, error) + Unmarshal(b []byte, v interface{}) error +} + +type jsonMarshaler int + +func (jsonMarshaler) Marshal(v interface{}) ([]byte, error) { + return json.Marshal(v) +} + +func (jsonMarshaler) Unmarshal(b []byte, v interface{}) error { + return json.Unmarshal(b, v) +} + +// Based on: https://github.com/golang/go/blob/178a2c42254166cffed1b25fb1d3c7a5727cada6/src/text/template/exec.go#L931 +func indirectInterface(v reflect.Value) reflect.Value { + if v.Kind() != reflect.Interface { + return v + } + if v.IsNil() { + return reflect.Value{} + } + return v.Elem() +} diff --git a/vendor/github.com/bep/tmc/codecov.yml b/vendor/github.com/bep/tmc/codecov.yml new file mode 100644 index 0000000..2e3090a --- /dev/null +++ b/vendor/github.com/bep/tmc/codecov.yml @@ -0,0 +1,10 @@ +coverage: + status: + project: + default: + target: auto + threshold: 0.5% + patch: off + +comment: + require_changes: true diff --git a/vendor/github.com/blevesearch/bleve/.gitignore b/vendor/github.com/blevesearch/bleve/.gitignore new file mode 100644 index 0000000..ab7a1e2 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/.gitignore @@ -0,0 +1,19 @@ +#* +*.sublime-* +*~ +.#* +.project +.settings +**/.idea/ +**/*.iml +.DS_Store +query_string.y.go.tmp +/analysis/token_filters/cld2/cld2-read-only +/analysis/token_filters/cld2/libcld2_full.a +/cmd/bleve/bleve +vendor/** +!vendor/manifest +/y.output +/search/query/y.output +*.test +tags diff --git a/vendor/github.com/blevesearch/bleve/.travis.yml b/vendor/github.com/blevesearch/bleve/.travis.yml new file mode 100644 index 0000000..7b7297a --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/.travis.yml @@ -0,0 +1,25 @@ +sudo: false + +language: go + +go: + - "1.12.x" + - "1.13.x" + - "1.14.x" + +script: + - go get golang.org/x/tools/cmd/cover + - go get github.com/mattn/goveralls + - go get github.com/kisielk/errcheck + - go get -u github.com/FiloSottile/gvt + - gvt restore + - go test -race -v $(go list ./... | grep -v vendor/) + - go vet $(go list ./... | grep -v vendor/) + - go test ./test -v -indexType scorch + - errcheck -ignorepkg fmt $(go list ./... | grep -v vendor/); + - docs/project-code-coverage.sh + - docs/build_children.sh + +notifications: + email: + - marty.schoch@gmail.com diff --git a/vendor/github.com/blevesearch/bleve/CONTRIBUTING.md b/vendor/github.com/blevesearch/bleve/CONTRIBUTING.md new file mode 100644 index 0000000..5ebf3d6 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/CONTRIBUTING.md @@ -0,0 +1,16 @@ +# Contributing to Bleve + +We look forward to your contributions, but ask that you first review these guidelines. + +### Sign the CLA + +As Bleve is a Couchbase project we require contributors accept the [Couchbase Contributor License Agreement](http://review.couchbase.org/static/individual_agreement.html). To sign this agreement log into the Couchbase [code review tool](http://review.couchbase.org/). The Bleve project does not use this code review tool but it is still used to track acceptance of the contributor license agreements. + +### Submitting a Pull Request + +All types of contributions are welcome, but please keep the following in mind: + +- If you're planning a large change, you should really discuss it in a github issue or on the google group first. This helps avoid duplicate effort and spending time on something that may not be merged. +- Existing tests should continue to pass, new tests for the contribution are nice to have. +- All code should have gone through `go fmt` +- All code should pass `go vet` diff --git a/vendor/github.com/blevesearch/bleve/LICENSE b/vendor/github.com/blevesearch/bleve/LICENSE new file mode 100644 index 0000000..7a4a3ea --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/vendor/github.com/blevesearch/bleve/README.md b/vendor/github.com/blevesearch/bleve/README.md new file mode 100644 index 0000000..eff0be9 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/README.md @@ -0,0 +1,70 @@ +# ![bleve](docs/bleve.png) bleve + +[![Tests](https://github.com/blevesearch/bleve/workflows/Tests/badge.svg?branch=master&event=push)](https://github.com/blevesearch/bleve/actions?query=workflow%3ATests+event%3Apush+branch%3Amaster) +[![Coverage Status](https://coveralls.io/repos/github/blevesearch/bleve/badge.svg?branch=master)](https://coveralls.io/github/blevesearch/bleve?branch=master) +[![GoDoc](https://godoc.org/github.com/blevesearch/bleve?status.svg)](https://godoc.org/github.com/blevesearch/bleve) +[![Join the chat at https://gitter.im/blevesearch/bleve](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/blevesearch/bleve?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) +[![codebeat](https://codebeat.co/badges/38a7cbc9-9cf5-41c0-a315-0746178230f4)](https://codebeat.co/projects/github-com-blevesearch-bleve) +[![Go Report Card](https://goreportcard.com/badge/blevesearch/bleve)](https://goreportcard.com/report/blevesearch/bleve) +[![Sourcegraph](https://sourcegraph.com/github.com/blevesearch/bleve/-/badge.svg)](https://sourcegraph.com/github.com/blevesearch/bleve?badge) +[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) + +modern text indexing in go - [blevesearch.com](http://www.blevesearch.com/) + +Try out bleve live by [searching the bleve website](http://www.blevesearch.com/search/?q=bleve). + +## Features + +* Index any go data structure (including JSON) +* Intelligent defaults backed up by powerful configuration +* Supported field types: + * Text, Numeric, Date +* Supported query types: + * Term, Phrase, Match, Match Phrase, Prefix + * Conjunction, Disjunction, Boolean + * Numeric Range, Date Range + * Simple query [syntax](http://www.blevesearch.com/docs/Query-String-Query/) for human entry +* tf-idf Scoring +* Search result match highlighting +* Supports Aggregating Facets: + * Terms Facet + * Numeric Range Facet + * Date Range Facet + +## Discussion + +Discuss usage and development of bleve in the [google group](https://groups.google.com/forum/#!forum/bleve). + +## Indexing + +```go +message := struct{ + Id string + From string + Body string +}{ + Id: "example", + From: "marty.schoch@gmail.com", + Body: "bleve indexing is easy", +} + +mapping := bleve.NewIndexMapping() +index, err := bleve.New("example.bleve", mapping) +if err != nil { + panic(err) +} +index.Index(message.Id, message) +``` + +## Querying + +```go +index, _ := bleve.Open("example.bleve") +query := bleve.NewQueryStringQuery("bleve") +searchRequest := bleve.NewSearchRequest(query) +searchResult, _ := index.Search(searchRequest) +``` + +## License + +Apache License Version 2.0 diff --git a/vendor/github.com/blevesearch/bleve/analysis/analyzer/standard/standard.go b/vendor/github.com/blevesearch/bleve/analysis/analyzer/standard/standard.go new file mode 100644 index 0000000..74ea564 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/analysis/analyzer/standard/standard.go @@ -0,0 +1,52 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package standard + +import ( + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/analysis/lang/en" + "github.com/blevesearch/bleve/analysis/token/lowercase" + "github.com/blevesearch/bleve/analysis/tokenizer/unicode" + "github.com/blevesearch/bleve/registry" +) + +const Name = "standard" + +func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) { + tokenizer, err := cache.TokenizerNamed(unicode.Name) + if err != nil { + return nil, err + } + toLowerFilter, err := cache.TokenFilterNamed(lowercase.Name) + if err != nil { + return nil, err + } + stopEnFilter, err := cache.TokenFilterNamed(en.StopName) + if err != nil { + return nil, err + } + rv := analysis.Analyzer{ + Tokenizer: tokenizer, + TokenFilters: []analysis.TokenFilter{ + toLowerFilter, + stopEnFilter, + }, + } + return &rv, nil +} + +func init() { + registry.RegisterAnalyzer(Name, AnalyzerConstructor) +} diff --git a/vendor/github.com/blevesearch/bleve/analysis/datetime/flexible/flexible.go b/vendor/github.com/blevesearch/bleve/analysis/datetime/flexible/flexible.go new file mode 100644 index 0000000..cd549f5 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/analysis/datetime/flexible/flexible.go @@ -0,0 +1,64 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package flexible + +import ( + "fmt" + "time" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/registry" +) + +const Name = "flexiblego" + +type DateTimeParser struct { + layouts []string +} + +func New(layouts []string) *DateTimeParser { + return &DateTimeParser{ + layouts: layouts, + } +} + +func (p *DateTimeParser) ParseDateTime(input string) (time.Time, error) { + for _, layout := range p.layouts { + rv, err := time.Parse(layout, input) + if err == nil { + return rv, nil + } + } + return time.Time{}, analysis.ErrInvalidDateTime +} + +func DateTimeParserConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.DateTimeParser, error) { + layouts, ok := config["layouts"].([]interface{}) + if !ok { + return nil, fmt.Errorf("must specify layouts") + } + var layoutStrs []string + for _, layout := range layouts { + layoutStr, ok := layout.(string) + if ok { + layoutStrs = append(layoutStrs, layoutStr) + } + } + return New(layoutStrs), nil +} + +func init() { + registry.RegisterDateTimeParser(Name, DateTimeParserConstructor) +} diff --git a/vendor/github.com/blevesearch/bleve/analysis/datetime/optional/optional.go b/vendor/github.com/blevesearch/bleve/analysis/datetime/optional/optional.go new file mode 100644 index 0000000..4b98de6 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/analysis/datetime/optional/optional.go @@ -0,0 +1,45 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package optional + +import ( + "time" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/analysis/datetime/flexible" + "github.com/blevesearch/bleve/registry" +) + +const Name = "dateTimeOptional" + +const rfc3339NoTimezone = "2006-01-02T15:04:05" +const rfc3339NoTimezoneNoT = "2006-01-02 15:04:05" +const rfc3339NoTime = "2006-01-02" + +var layouts = []string{ + time.RFC3339Nano, + time.RFC3339, + rfc3339NoTimezone, + rfc3339NoTimezoneNoT, + rfc3339NoTime, +} + +func DateTimeParserConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.DateTimeParser, error) { + return flexible.New(layouts), nil +} + +func init() { + registry.RegisterDateTimeParser(Name, DateTimeParserConstructor) +} diff --git a/vendor/github.com/blevesearch/bleve/analysis/freq.go b/vendor/github.com/blevesearch/bleve/analysis/freq.go new file mode 100644 index 0000000..198c149 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/analysis/freq.go @@ -0,0 +1,152 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package analysis + +import ( + "reflect" + + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeTokenLocation int +var reflectStaticSizeTokenFreq int + +func init() { + var tl TokenLocation + reflectStaticSizeTokenLocation = int(reflect.TypeOf(tl).Size()) + var tf TokenFreq + reflectStaticSizeTokenFreq = int(reflect.TypeOf(tf).Size()) +} + +// TokenLocation represents one occurrence of a term at a particular location in +// a field. Start, End and Position have the same meaning as in analysis.Token. +// Field and ArrayPositions identify the field value in the source document. +// See document.Field for details. +type TokenLocation struct { + Field string + ArrayPositions []uint64 + Start int + End int + Position int +} + +func (tl *TokenLocation) Size() int { + rv := reflectStaticSizeTokenLocation + rv += len(tl.ArrayPositions) * size.SizeOfUint64 + return rv +} + +// TokenFreq represents all the occurrences of a term in all fields of a +// document. +type TokenFreq struct { + Term []byte + Locations []*TokenLocation + frequency int +} + +func (tf *TokenFreq) Size() int { + rv := reflectStaticSizeTokenFreq + rv += len(tf.Term) + for _, loc := range tf.Locations { + rv += loc.Size() + } + return rv +} + +func (tf *TokenFreq) Frequency() int { + return tf.frequency +} + +// TokenFrequencies maps document terms to their combined frequencies from all +// fields. +type TokenFrequencies map[string]*TokenFreq + +func (tfs TokenFrequencies) Size() int { + rv := size.SizeOfMap + rv += len(tfs) * (size.SizeOfString + size.SizeOfPtr) + for k, v := range tfs { + rv += len(k) + rv += v.Size() + } + return rv +} + +func (tfs TokenFrequencies) MergeAll(remoteField string, other TokenFrequencies) { + // walk the new token frequencies + for tfk, tf := range other { + // set the remoteField value in incoming token freqs + for _, l := range tf.Locations { + l.Field = remoteField + } + existingTf, exists := tfs[tfk] + if exists { + existingTf.Locations = append(existingTf.Locations, tf.Locations...) + existingTf.frequency = existingTf.frequency + tf.frequency + } else { + tfs[tfk] = &TokenFreq{ + Term: tf.Term, + frequency: tf.frequency, + Locations: make([]*TokenLocation, len(tf.Locations)), + } + copy(tfs[tfk].Locations, tf.Locations) + } + } +} + +func TokenFrequency(tokens TokenStream, arrayPositions []uint64, includeTermVectors bool) TokenFrequencies { + rv := make(map[string]*TokenFreq, len(tokens)) + + if includeTermVectors { + tls := make([]TokenLocation, len(tokens)) + tlNext := 0 + + for _, token := range tokens { + tls[tlNext] = TokenLocation{ + ArrayPositions: arrayPositions, + Start: token.Start, + End: token.End, + Position: token.Position, + } + + curr, ok := rv[string(token.Term)] + if ok { + curr.Locations = append(curr.Locations, &tls[tlNext]) + curr.frequency++ + } else { + rv[string(token.Term)] = &TokenFreq{ + Term: token.Term, + Locations: []*TokenLocation{&tls[tlNext]}, + frequency: 1, + } + } + + tlNext++ + } + } else { + for _, token := range tokens { + curr, exists := rv[string(token.Term)] + if exists { + curr.frequency++ + } else { + rv[string(token.Term)] = &TokenFreq{ + Term: token.Term, + frequency: 1, + } + } + } + } + + return rv +} diff --git a/vendor/github.com/blevesearch/bleve/analysis/lang/en/analyzer_en.go b/vendor/github.com/blevesearch/bleve/analysis/lang/en/analyzer_en.go new file mode 100644 index 0000000..8402785 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/analysis/lang/en/analyzer_en.go @@ -0,0 +1,70 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package en implements an analyzer with reasonable defaults for processing +// English text. +// +// It strips possessive suffixes ('s), transforms tokens to lower case, +// removes stopwords from a built-in list, and applies porter stemming. +// +// The built-in stopwords list is defined in EnglishStopWords. +package en + +import ( + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/registry" + + "github.com/blevesearch/bleve/analysis/token/lowercase" + "github.com/blevesearch/bleve/analysis/token/porter" + "github.com/blevesearch/bleve/analysis/tokenizer/unicode" +) + +const AnalyzerName = "en" + +func AnalyzerConstructor(config map[string]interface{}, cache *registry.Cache) (*analysis.Analyzer, error) { + tokenizer, err := cache.TokenizerNamed(unicode.Name) + if err != nil { + return nil, err + } + possEnFilter, err := cache.TokenFilterNamed(PossessiveName) + if err != nil { + return nil, err + } + toLowerFilter, err := cache.TokenFilterNamed(lowercase.Name) + if err != nil { + return nil, err + } + stopEnFilter, err := cache.TokenFilterNamed(StopName) + if err != nil { + return nil, err + } + stemmerEnFilter, err := cache.TokenFilterNamed(porter.Name) + if err != nil { + return nil, err + } + rv := analysis.Analyzer{ + Tokenizer: tokenizer, + TokenFilters: []analysis.TokenFilter{ + possEnFilter, + toLowerFilter, + stopEnFilter, + stemmerEnFilter, + }, + } + return &rv, nil +} + +func init() { + registry.RegisterAnalyzer(AnalyzerName, AnalyzerConstructor) +} diff --git a/vendor/github.com/blevesearch/bleve/analysis/lang/en/possessive_filter_en.go b/vendor/github.com/blevesearch/bleve/analysis/lang/en/possessive_filter_en.go new file mode 100644 index 0000000..2c06efd --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/analysis/lang/en/possessive_filter_en.go @@ -0,0 +1,67 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package en + +import ( + "unicode/utf8" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/registry" +) + +// PossessiveName is the name PossessiveFilter is registered as +// in the bleve registry. +const PossessiveName = "possessive_en" + +const rightSingleQuotationMark = '’' +const apostrophe = '\'' +const fullWidthApostrophe = ''' + +const apostropheChars = rightSingleQuotationMark + apostrophe + fullWidthApostrophe + +// PossessiveFilter implements a TokenFilter which +// strips the English possessive suffix ('s) from tokens. +// It handle a variety of apostrophe types, is case-insensitive +// and doesn't distinguish between possessive and contraction. +// (ie "She's So Rad" becomes "She So Rad") +type PossessiveFilter struct { +} + +func NewPossessiveFilter() *PossessiveFilter { + return &PossessiveFilter{} +} + +func (s *PossessiveFilter) Filter(input analysis.TokenStream) analysis.TokenStream { + for _, token := range input { + lastRune, lastRuneSize := utf8.DecodeLastRune(token.Term) + if lastRune == 's' || lastRune == 'S' { + nextLastRune, nextLastRuneSize := utf8.DecodeLastRune(token.Term[:len(token.Term)-lastRuneSize]) + if nextLastRune == rightSingleQuotationMark || + nextLastRune == apostrophe || + nextLastRune == fullWidthApostrophe { + token.Term = token.Term[:len(token.Term)-lastRuneSize-nextLastRuneSize] + } + } + } + return input +} + +func PossessiveFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) { + return NewPossessiveFilter(), nil +} + +func init() { + registry.RegisterTokenFilter(PossessiveName, PossessiveFilterConstructor) +} diff --git a/vendor/github.com/blevesearch/bleve/analysis/lang/en/stemmer_en_snowball.go b/vendor/github.com/blevesearch/bleve/analysis/lang/en/stemmer_en_snowball.go new file mode 100644 index 0000000..225bb06 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/analysis/lang/en/stemmer_en_snowball.go @@ -0,0 +1,49 @@ +// Copyright (c) 2020 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package en + +import ( + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/registry" + + "github.com/blevesearch/snowballstem" + "github.com/blevesearch/snowballstem/english" +) + +const SnowballStemmerName = "stemmer_en_snowball" + +type EnglishStemmerFilter struct { +} + +func NewEnglishStemmerFilter() *EnglishStemmerFilter { + return &EnglishStemmerFilter{} +} + +func (s *EnglishStemmerFilter) Filter(input analysis.TokenStream) analysis.TokenStream { + for _, token := range input { + env := snowballstem.NewEnv(string(token.Term)) + english.Stem(env) + token.Term = []byte(env.Current()) + } + return input +} + +func EnglishStemmerFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) { + return NewEnglishStemmerFilter(), nil +} + +func init() { + registry.RegisterTokenFilter(SnowballStemmerName, EnglishStemmerFilterConstructor) +} diff --git a/vendor/github.com/blevesearch/bleve/analysis/lang/en/stop_filter_en.go b/vendor/github.com/blevesearch/bleve/analysis/lang/en/stop_filter_en.go new file mode 100644 index 0000000..bfdb2c9 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/analysis/lang/en/stop_filter_en.go @@ -0,0 +1,33 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package en + +import ( + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/analysis/token/stop" + "github.com/blevesearch/bleve/registry" +) + +func StopTokenFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) { + tokenMap, err := cache.TokenMapNamed(StopName) + if err != nil { + return nil, err + } + return stop.NewStopTokensFilter(tokenMap), nil +} + +func init() { + registry.RegisterTokenFilter(StopName, StopTokenFilterConstructor) +} diff --git a/vendor/github.com/blevesearch/bleve/analysis/lang/en/stop_words_en.go b/vendor/github.com/blevesearch/bleve/analysis/lang/en/stop_words_en.go new file mode 100644 index 0000000..6423cf2 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/analysis/lang/en/stop_words_en.go @@ -0,0 +1,344 @@ +package en + +import ( + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/registry" +) + +const StopName = "stop_en" + +// EnglishStopWords is the built-in list of stopwords used by the "stop_en" TokenFilter. +// +// this content was obtained from: +// lucene-4.7.2/analysis/common/src/resources/org/apache/lucene/analysis/snowball/ +// ` was changed to ' to allow for literal string +var EnglishStopWords = []byte(` | From svn.tartarus.org/snowball/trunk/website/algorithms/english/stop.txt + | This file is distributed under the BSD License. + | See http://snowball.tartarus.org/license.php + | Also see http://www.opensource.org/licenses/bsd-license.html + | - Encoding was converted to UTF-8. + | - This notice was added. + | + | NOTE: To use this file with StopFilterFactory, you must specify format="snowball" + + | An English stop word list. Comments begin with vertical bar. Each stop + | word is at the start of a line. + + | Many of the forms below are quite rare (e.g. "yourselves") but included for + | completeness. + + | PRONOUNS FORMS + | 1st person sing + +i | subject, always in upper case of course + +me | object +my | possessive adjective + | the possessive pronoun 'mine' is best suppressed, because of the + | sense of coal-mine etc. +myself | reflexive + | 1st person plural +we | subject + +| us | object + | care is required here because US = United States. It is usually + | safe to remove it if it is in lower case. +our | possessive adjective +ours | possessive pronoun +ourselves | reflexive + | second person (archaic 'thou' forms not included) +you | subject and object +your | possessive adjective +yours | possessive pronoun +yourself | reflexive (singular) +yourselves | reflexive (plural) + | third person singular +he | subject +him | object +his | possessive adjective and pronoun +himself | reflexive + +she | subject +her | object and possessive adjective +hers | possessive pronoun +herself | reflexive + +it | subject and object +its | possessive adjective +itself | reflexive + | third person plural +they | subject +them | object +their | possessive adjective +theirs | possessive pronoun +themselves | reflexive + | other forms (demonstratives, interrogatives) +what +which +who +whom +this +that +these +those + + | VERB FORMS (using F.R. Palmer's nomenclature) + | BE +am | 1st person, present +is | -s form (3rd person, present) +are | present +was | 1st person, past +were | past +be | infinitive +been | past participle +being | -ing form + | HAVE +have | simple +has | -s form +had | past +having | -ing form + | DO +do | simple +does | -s form +did | past +doing | -ing form + + | The forms below are, I believe, best omitted, because of the significant + | homonym forms: + + | He made a WILL + | old tin CAN + | merry month of MAY + | a smell of MUST + | fight the good fight with all thy MIGHT + + | would, could, should, ought might however be included + + | | AUXILIARIES + | | WILL + |will + +would + + | | SHALL + |shall + +should + + | | CAN + |can + +could + + | | MAY + |may + |might + | | MUST + |must + | | OUGHT + +ought + + | COMPOUND FORMS, increasingly encountered nowadays in 'formal' writing + | pronoun + verb + +i'm +you're +he's +she's +it's +we're +they're +i've +you've +we've +they've +i'd +you'd +he'd +she'd +we'd +they'd +i'll +you'll +he'll +she'll +we'll +they'll + + | verb + negation + +isn't +aren't +wasn't +weren't +hasn't +haven't +hadn't +doesn't +don't +didn't + + | auxiliary + negation + +won't +wouldn't +shan't +shouldn't +can't +cannot +couldn't +mustn't + + | miscellaneous forms + +let's +that's +who's +what's +here's +there's +when's +where's +why's +how's + + | rarer forms + + | daren't needn't + + | doubtful forms + + | oughtn't mightn't + + | ARTICLES +a +an +the + + | THE REST (Overlap among prepositions, conjunctions, adverbs etc is so + | high, that classification is pointless.) +and +but +if +or +because +as +until +while + +of +at +by +for +with +about +against +between +into +through +during +before +after +above +below +to +from +up +down +in +out +on +off +over +under + +again +further +then +once + +here +there +when +where +why +how + +all +any +both +each +few +more +most +other +some +such + +no +nor +not +only +own +same +so +than +too +very + + | Just for the record, the following words are among the commonest in English + + | one + | every + | least + | less + | many + | now + | ever + | never + | say + | says + | said + | also + | get + | go + | goes + | just + | made + | make + | put + | see + | seen + | whether + | like + | well + | back + | even + | still + | way + | take + | since + | another + | however + | two + | three + | four + | five + | first + | second + | new + | old + | high + | long +`) + +func TokenMapConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenMap, error) { + rv := analysis.NewTokenMap() + err := rv.LoadBytes(EnglishStopWords) + return rv, err +} + +func init() { + registry.RegisterTokenMap(StopName, TokenMapConstructor) +} diff --git a/vendor/github.com/blevesearch/bleve/analysis/test_words.txt b/vendor/github.com/blevesearch/bleve/analysis/test_words.txt new file mode 100644 index 0000000..b86e254 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/analysis/test_words.txt @@ -0,0 +1,7 @@ +# full line comment +marty +steve # trailing comment +| different format of comment +dustin +siri | different style trailing comment +multiple words with different whitespace \ No newline at end of file diff --git a/vendor/github.com/blevesearch/bleve/analysis/token/lowercase/lowercase.go b/vendor/github.com/blevesearch/bleve/analysis/token/lowercase/lowercase.go new file mode 100644 index 0000000..adb740c --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/analysis/token/lowercase/lowercase.go @@ -0,0 +1,105 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package lowercase implements a TokenFilter which converts +// tokens to lower case according to unicode rules. +package lowercase + +import ( + "bytes" + "unicode" + "unicode/utf8" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/registry" +) + +// Name is the name used to register LowerCaseFilter in the bleve registry +const Name = "to_lower" + +type LowerCaseFilter struct { +} + +func NewLowerCaseFilter() *LowerCaseFilter { + return &LowerCaseFilter{} +} + +func (f *LowerCaseFilter) Filter(input analysis.TokenStream) analysis.TokenStream { + for _, token := range input { + token.Term = toLowerDeferredCopy(token.Term) + } + return input +} + +func LowerCaseFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) { + return NewLowerCaseFilter(), nil +} + +func init() { + registry.RegisterTokenFilter(Name, LowerCaseFilterConstructor) +} + +// toLowerDeferredCopy will function exactly like +// bytes.ToLower() only it will reuse (overwrite) +// the original byte array when possible +// NOTE: because its possible that the lower-case +// form of a rune has a different utf-8 encoded +// length, in these cases a new byte array is allocated +func toLowerDeferredCopy(s []byte) []byte { + j := 0 + for i := 0; i < len(s); { + wid := 1 + r := rune(s[i]) + if r >= utf8.RuneSelf { + r, wid = utf8.DecodeRune(s[i:]) + } + + l := unicode.ToLower(r) + + // If the rune is already lowercased, just move to the + // next rune. + if l == r { + i += wid + j += wid + continue + } + + // Handles the Unicode edge-case where the last + // rune in a word on the greek Σ needs to be converted + // differently. + if l == 'σ' && i+2 == len(s) { + l = 'Ï‚' + } + + lwid := utf8.RuneLen(l) + if lwid > wid { + // utf-8 encoded replacement is wider + // for now, punt and defer + // to bytes.ToLower() for the remainder + // only known to happen with chars + // Rune Ⱥ(570) width 2 - Lower â±¥(11365) width 3 + // Rune Ⱦ(574) width 2 - Lower ⱦ(11366) width 3 + rest := bytes.ToLower(s[i:]) + rv := make([]byte, j+len(rest)) + copy(rv[:j], s[:j]) + copy(rv[j:], rest) + return rv + } else { + utf8.EncodeRune(s[j:], l) + } + i += wid + j += lwid + } + return s[:j] +} diff --git a/vendor/github.com/blevesearch/bleve/analysis/token/porter/porter.go b/vendor/github.com/blevesearch/bleve/analysis/token/porter/porter.go new file mode 100644 index 0000000..4cd08d9 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/analysis/token/porter/porter.go @@ -0,0 +1,53 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package porter + +import ( + "bytes" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/registry" + + "github.com/blevesearch/go-porterstemmer" +) + +const Name = "stemmer_porter" + +type PorterStemmer struct { +} + +func NewPorterStemmer() *PorterStemmer { + return &PorterStemmer{} +} + +func (s *PorterStemmer) Filter(input analysis.TokenStream) analysis.TokenStream { + for _, token := range input { + // if it is not a protected keyword, stem it + if !token.KeyWord { + termRunes := bytes.Runes(token.Term) + stemmedRunes := porterstemmer.StemWithoutLowerCasing(termRunes) + token.Term = analysis.BuildTermFromRunes(stemmedRunes) + } + } + return input +} + +func PorterStemmerConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) { + return NewPorterStemmer(), nil +} + +func init() { + registry.RegisterTokenFilter(Name, PorterStemmerConstructor) +} diff --git a/vendor/github.com/blevesearch/bleve/analysis/token/stop/stop.go b/vendor/github.com/blevesearch/bleve/analysis/token/stop/stop.go new file mode 100644 index 0000000..cca2d8e --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/analysis/token/stop/stop.go @@ -0,0 +1,70 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package stop implements a TokenFilter removing tokens found in +// a TokenMap. +// +// It constructor takes the following arguments: +// +// "stop_token_map" (string): the name of the token map identifying tokens to +// remove. +package stop + +import ( + "fmt" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/registry" +) + +const Name = "stop_tokens" + +type StopTokensFilter struct { + stopTokens analysis.TokenMap +} + +func NewStopTokensFilter(stopTokens analysis.TokenMap) *StopTokensFilter { + return &StopTokensFilter{ + stopTokens: stopTokens, + } +} + +func (f *StopTokensFilter) Filter(input analysis.TokenStream) analysis.TokenStream { + j := 0 + for _, token := range input { + _, isStopToken := f.stopTokens[string(token.Term)] + if !isStopToken { + input[j] = token + j++ + } + } + + return input[:j] +} + +func StopTokensFilterConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.TokenFilter, error) { + stopTokenMapName, ok := config["stop_token_map"].(string) + if !ok { + return nil, fmt.Errorf("must specify stop_token_map") + } + stopTokenMap, err := cache.TokenMapNamed(stopTokenMapName) + if err != nil { + return nil, fmt.Errorf("error building stop words filter: %v", err) + } + return NewStopTokensFilter(stopTokenMap), nil +} + +func init() { + registry.RegisterTokenFilter(Name, StopTokensFilterConstructor) +} diff --git a/vendor/github.com/blevesearch/bleve/analysis/tokenizer/unicode/unicode.go b/vendor/github.com/blevesearch/bleve/analysis/tokenizer/unicode/unicode.go new file mode 100644 index 0000000..39e38b4 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/analysis/tokenizer/unicode/unicode.go @@ -0,0 +1,131 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package unicode + +import ( + "github.com/blevesearch/segment" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/registry" +) + +const Name = "unicode" + +type UnicodeTokenizer struct { +} + +func NewUnicodeTokenizer() *UnicodeTokenizer { + return &UnicodeTokenizer{} +} + +func (rt *UnicodeTokenizer) Tokenize(input []byte) analysis.TokenStream { + rvx := make([]analysis.TokenStream, 0, 10) // When rv gets full, append to rvx. + rv := make(analysis.TokenStream, 0, 1) + + ta := []analysis.Token(nil) + taNext := 0 + + segmenter := segment.NewWordSegmenterDirect(input) + start := 0 + pos := 1 + + guessRemaining := func(end int) int { + avgSegmentLen := end / (len(rv) + 1) + if avgSegmentLen < 1 { + avgSegmentLen = 1 + } + + remainingLen := len(input) - end + + return remainingLen / avgSegmentLen + } + + for segmenter.Segment() { + segmentBytes := segmenter.Bytes() + end := start + len(segmentBytes) + if segmenter.Type() != segment.None { + if taNext >= len(ta) { + remainingSegments := guessRemaining(end) + if remainingSegments > 1000 { + remainingSegments = 1000 + } + if remainingSegments < 1 { + remainingSegments = 1 + } + + ta = make([]analysis.Token, remainingSegments) + taNext = 0 + } + + token := &ta[taNext] + taNext++ + + token.Term = segmentBytes + token.Start = start + token.End = end + token.Position = pos + token.Type = convertType(segmenter.Type()) + + if len(rv) >= cap(rv) { // When rv is full, save it into rvx. + rvx = append(rvx, rv) + + rvCap := cap(rv) * 2 + if rvCap > 256 { + rvCap = 256 + } + + rv = make(analysis.TokenStream, 0, rvCap) // Next rv cap is bigger. + } + + rv = append(rv, token) + pos++ + } + start = end + } + + if len(rvx) > 0 { + n := len(rv) + for _, r := range rvx { + n += len(r) + } + rall := make(analysis.TokenStream, 0, n) + for _, r := range rvx { + rall = append(rall, r...) + } + return append(rall, rv...) + } + + return rv +} + +func UnicodeTokenizerConstructor(config map[string]interface{}, cache *registry.Cache) (analysis.Tokenizer, error) { + return NewUnicodeTokenizer(), nil +} + +func init() { + registry.RegisterTokenizer(Name, UnicodeTokenizerConstructor) +} + +func convertType(segmentWordType int) analysis.TokenType { + switch segmentWordType { + case segment.Ideo: + return analysis.Ideographic + case segment.Kana: + return analysis.Ideographic + case segment.Number: + return analysis.Numeric + } + return analysis.AlphaNumeric +} diff --git a/vendor/github.com/blevesearch/bleve/analysis/tokenmap.go b/vendor/github.com/blevesearch/bleve/analysis/tokenmap.go new file mode 100644 index 0000000..7c0d0a8 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/analysis/tokenmap.go @@ -0,0 +1,76 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package analysis + +import ( + "bufio" + "bytes" + "io" + "io/ioutil" + "strings" +) + +type TokenMap map[string]bool + +func NewTokenMap() TokenMap { + return make(TokenMap, 0) +} + +// LoadFile reads in a list of tokens from a text file, +// one per line. +// Comments are supported using `#` or `|` +func (t TokenMap) LoadFile(filename string) error { + data, err := ioutil.ReadFile(filename) + if err != nil { + return err + } + return t.LoadBytes(data) +} + +// LoadBytes reads in a list of tokens from memory, +// one per line. +// Comments are supported using `#` or `|` +func (t TokenMap) LoadBytes(data []byte) error { + bytesReader := bytes.NewReader(data) + bufioReader := bufio.NewReader(bytesReader) + line, err := bufioReader.ReadString('\n') + for err == nil { + t.LoadLine(line) + line, err = bufioReader.ReadString('\n') + } + // if the err was EOF we still need to process the last value + if err == io.EOF { + t.LoadLine(line) + return nil + } + return err +} + +func (t TokenMap) LoadLine(line string) { + // find the start of a comment, if any + startComment := strings.IndexAny(line, "#|") + if startComment >= 0 { + line = line[:startComment] + } + + tokens := strings.Fields(line) + for _, token := range tokens { + t.AddToken(token) + } +} + +func (t TokenMap) AddToken(token string) { + t[token] = true +} diff --git a/vendor/github.com/blevesearch/bleve/analysis/type.go b/vendor/github.com/blevesearch/bleve/analysis/type.go new file mode 100644 index 0000000..589cc1c --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/analysis/type.go @@ -0,0 +1,103 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package analysis + +import ( + "fmt" + "time" +) + +type CharFilter interface { + Filter([]byte) []byte +} + +type TokenType int + +const ( + AlphaNumeric TokenType = iota + Ideographic + Numeric + DateTime + Shingle + Single + Double + Boolean +) + +// Token represents one occurrence of a term at a particular location in a +// field. +type Token struct { + // Start specifies the byte offset of the beginning of the term in the + // field. + Start int `json:"start"` + + // End specifies the byte offset of the end of the term in the field. + End int `json:"end"` + Term []byte `json:"term"` + + // Position specifies the 1-based index of the token in the sequence of + // occurrences of its term in the field. + Position int `json:"position"` + Type TokenType `json:"type"` + KeyWord bool `json:"keyword"` +} + +func (t *Token) String() string { + return fmt.Sprintf("Start: %d End: %d Position: %d Token: %s Type: %d", t.Start, t.End, t.Position, string(t.Term), t.Type) +} + +type TokenStream []*Token + +// A Tokenizer splits an input string into tokens, the usual behaviour being to +// map words to tokens. +type Tokenizer interface { + Tokenize([]byte) TokenStream +} + +// A TokenFilter adds, transforms or removes tokens from a token stream. +type TokenFilter interface { + Filter(TokenStream) TokenStream +} + +type Analyzer struct { + CharFilters []CharFilter + Tokenizer Tokenizer + TokenFilters []TokenFilter +} + +func (a *Analyzer) Analyze(input []byte) TokenStream { + if a.CharFilters != nil { + for _, cf := range a.CharFilters { + input = cf.Filter(input) + } + } + tokens := a.Tokenizer.Tokenize(input) + if a.TokenFilters != nil { + for _, tf := range a.TokenFilters { + tokens = tf.Filter(tokens) + } + } + return tokens +} + +var ErrInvalidDateTime = fmt.Errorf("unable to parse datetime with any of the layouts") + +type DateTimeParser interface { + ParseDateTime(string) (time.Time, error) +} + +type ByteArrayConverter interface { + Convert([]byte) (interface{}, error) +} diff --git a/vendor/github.com/blevesearch/bleve/analysis/util.go b/vendor/github.com/blevesearch/bleve/analysis/util.go new file mode 100644 index 0000000..8e4348a --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/analysis/util.go @@ -0,0 +1,92 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package analysis + +import ( + "bytes" + "unicode/utf8" +) + +func DeleteRune(in []rune, pos int) []rune { + if pos >= len(in) { + return in + } + copy(in[pos:], in[pos+1:]) + return in[:len(in)-1] +} + +func InsertRune(in []rune, pos int, r rune) []rune { + // create a new slice 1 rune larger + rv := make([]rune, len(in)+1) + // copy the characters before the insert pos + copy(rv[0:pos], in[0:pos]) + // set the inserted rune + rv[pos] = r + // copy the characters after the insert pos + copy(rv[pos+1:], in[pos:]) + return rv +} + +// BuildTermFromRunesOptimistic will build a term from the provided runes +// AND optimistically attempt to encode into the provided buffer +// if at any point it appears the buffer is too small, a new buffer is +// allocated and that is used instead +// this should be used in cases where frequently the new term is the same +// length or shorter than the original term (in number of bytes) +func BuildTermFromRunesOptimistic(buf []byte, runes []rune) []byte { + rv := buf + used := 0 + for _, r := range runes { + nextLen := utf8.RuneLen(r) + if used+nextLen > len(rv) { + // alloc new buf + buf = make([]byte, len(runes)*utf8.UTFMax) + // copy work we've already done + copy(buf, rv[:used]) + rv = buf + } + written := utf8.EncodeRune(rv[used:], r) + used += written + } + return rv[:used] +} + +func BuildTermFromRunes(runes []rune) []byte { + return BuildTermFromRunesOptimistic(make([]byte, len(runes)*utf8.UTFMax), runes) +} + +func TruncateRunes(input []byte, num int) []byte { + runes := bytes.Runes(input) + runes = runes[:len(runes)-num] + out := BuildTermFromRunes(runes) + return out +} + +func RunesEndsWith(input []rune, suffix string) bool { + inputLen := len(input) + suffixRunes := []rune(suffix) + suffixLen := len(suffixRunes) + if suffixLen > inputLen { + return false + } + + for i := suffixLen - 1; i >= 0; i-- { + if input[inputLen-(suffixLen-i)] != suffixRunes[i] { + return false + } + } + + return true +} diff --git a/vendor/github.com/blevesearch/bleve/builder.go b/vendor/github.com/blevesearch/bleve/builder.go new file mode 100644 index 0000000..de00c97 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/builder.go @@ -0,0 +1,94 @@ +// Copyright (c) 2019 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bleve + +import ( + "encoding/json" + "fmt" + + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch" + "github.com/blevesearch/bleve/mapping" +) + +type builderImpl struct { + b index.IndexBuilder + m mapping.IndexMapping +} + +func (b *builderImpl) Index(id string, data interface{}) error { + if id == "" { + return ErrorEmptyID + } + + doc := document.NewDocument(id) + err := b.m.MapDocument(doc, data) + if err != nil { + return err + } + err = b.b.Index(doc) + return err +} + +func (b *builderImpl) Close() error { + return b.b.Close() +} + +func newBuilder(path string, mapping mapping.IndexMapping, config map[string]interface{}) (Builder, error) { + if path == "" { + return nil, fmt.Errorf("builder requires path") + } + + err := mapping.Validate() + if err != nil { + return nil, err + } + + if config == nil { + config = map[string]interface{}{} + } + + // the builder does not have an API to interact with internal storage + // however we can pass k/v pairs through the config + mappingBytes, err := json.Marshal(mapping) + if err != nil { + return nil, err + } + config["internal"] = map[string][]byte{ + string(mappingInternalKey): mappingBytes, + } + + // do not use real config, as these are options for the builder, + // not the resulting index + meta := newIndexMeta(scorch.Name, scorch.Name, map[string]interface{}{}) + err = meta.Save(path) + if err != nil { + return nil, err + } + + config["path"] = indexStorePath(path) + + b, err := scorch.NewBuilder(config) + if err != nil { + return nil, err + } + rv := &builderImpl{ + b: b, + m: mapping, + } + + return rv, nil +} diff --git a/vendor/github.com/blevesearch/bleve/config.go b/vendor/github.com/blevesearch/bleve/config.go new file mode 100644 index 0000000..99f2e08 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/config.go @@ -0,0 +1,98 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bleve + +import ( + "expvar" + "io/ioutil" + "log" + "time" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/store/gtreap" + "github.com/blevesearch/bleve/index/upsidedown" + "github.com/blevesearch/bleve/registry" + "github.com/blevesearch/bleve/search/highlight/highlighter/html" + + // force import of scorch so its accessible by default + _ "github.com/blevesearch/bleve/index/scorch" +) + +var bleveExpVar = expvar.NewMap("bleve") + +type configuration struct { + Cache *registry.Cache + DefaultHighlighter string + DefaultKVStore string + DefaultMemKVStore string + DefaultIndexType string + SlowSearchLogThreshold time.Duration + analysisQueue *index.AnalysisQueue +} + +func (c *configuration) SetAnalysisQueueSize(n int) { + if c.analysisQueue != nil { + c.analysisQueue.Close() + } + c.analysisQueue = index.NewAnalysisQueue(n) +} + +func (c *configuration) Shutdown() { + c.SetAnalysisQueueSize(0) +} + +func newConfiguration() *configuration { + return &configuration{ + Cache: registry.NewCache(), + analysisQueue: index.NewAnalysisQueue(4), + } +} + +// Config contains library level configuration +var Config *configuration + +func init() { + bootStart := time.Now() + + // build the default configuration + Config = newConfiguration() + + // set the default highlighter + Config.DefaultHighlighter = html.Name + + // default kv store + Config.DefaultKVStore = "" + + // default mem only kv store + Config.DefaultMemKVStore = gtreap.Name + + // default index + Config.DefaultIndexType = upsidedown.Name + + bootDuration := time.Since(bootStart) + bleveExpVar.Add("bootDuration", int64(bootDuration)) + indexStats = NewIndexStats() + bleveExpVar.Set("indexes", indexStats) + + initDisk() +} + +var logger = log.New(ioutil.Discard, "bleve", log.LstdFlags) + +// SetLog sets the logger used for logging +// by default log messages are sent to ioutil.Discard +func SetLog(l *log.Logger) { + logger = l +} diff --git a/vendor/github.com/blevesearch/bleve/config_app.go b/vendor/github.com/blevesearch/bleve/config_app.go new file mode 100644 index 0000000..112d0b6 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/config_app.go @@ -0,0 +1,23 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +build appengine appenginevm + +package bleve + +// in the appengine environment we cannot support disk based indexes +// so we do no extra configuration in this method +func initDisk() { + +} diff --git a/vendor/github.com/blevesearch/bleve/config_disk.go b/vendor/github.com/blevesearch/bleve/config_disk.go new file mode 100644 index 0000000..d03bceb --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/config_disk.go @@ -0,0 +1,25 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +build !appengine,!appenginevm + +package bleve + +import "github.com/blevesearch/bleve/index/store/boltdb" + +// in normal environments we configure boltdb as the default storage +func initDisk() { + // default kv store + Config.DefaultKVStore = boltdb.Name +} diff --git a/vendor/github.com/blevesearch/bleve/doc.go b/vendor/github.com/blevesearch/bleve/doc.go new file mode 100644 index 0000000..d54af7c --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/doc.go @@ -0,0 +1,38 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/* +Package bleve is a library for indexing and searching text. + +Example Opening New Index, Indexing Data + + message := struct{ + Id: "example" + From: "marty.schoch@gmail.com", + Body: "bleve indexing is easy", + } + + mapping := bleve.NewIndexMapping() + index, _ := bleve.New("example.bleve", mapping) + index.Index(message.Id, message) + +Example Opening Existing Index, Searching Data + + index, _ := bleve.Open("example.bleve") + query := bleve.NewQueryStringQuery("bleve") + searchRequest := bleve.NewSearchRequest(query) + searchResult, _ := index.Search(searchRequest) + +*/ +package bleve diff --git a/vendor/github.com/blevesearch/bleve/document/document.go b/vendor/github.com/blevesearch/bleve/document/document.go new file mode 100644 index 0000000..6ac17b9 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/document/document.go @@ -0,0 +1,101 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package document + +import ( + "fmt" + "reflect" + + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeDocument int + +func init() { + var d Document + reflectStaticSizeDocument = int(reflect.TypeOf(d).Size()) +} + +type Document struct { + ID string `json:"id"` + Fields []Field `json:"fields"` + CompositeFields []*CompositeField +} + +func NewDocument(id string) *Document { + return &Document{ + ID: id, + Fields: make([]Field, 0), + CompositeFields: make([]*CompositeField, 0), + } +} + +func (d *Document) Size() int { + sizeInBytes := reflectStaticSizeDocument + size.SizeOfPtr + + len(d.ID) + + for _, entry := range d.Fields { + sizeInBytes += entry.Size() + } + + for _, entry := range d.CompositeFields { + sizeInBytes += entry.Size() + } + + return sizeInBytes +} + +func (d *Document) AddField(f Field) *Document { + switch f := f.(type) { + case *CompositeField: + d.CompositeFields = append(d.CompositeFields, f) + default: + d.Fields = append(d.Fields, f) + } + return d +} + +func (d *Document) GoString() string { + fields := "" + for i, field := range d.Fields { + if i != 0 { + fields += ", " + } + fields += fmt.Sprintf("%#v", field) + } + compositeFields := "" + for i, field := range d.CompositeFields { + if i != 0 { + compositeFields += ", " + } + compositeFields += fmt.Sprintf("%#v", field) + } + return fmt.Sprintf("&document.Document{ID:%s, Fields: %s, CompositeFields: %s}", d.ID, fields, compositeFields) +} + +func (d *Document) NumPlainTextBytes() uint64 { + rv := uint64(0) + for _, field := range d.Fields { + rv += field.NumPlainTextBytes() + } + for _, compositeField := range d.CompositeFields { + for _, field := range d.Fields { + if compositeField.includesField(field.Name()) { + rv += field.NumPlainTextBytes() + } + } + } + return rv +} diff --git a/vendor/github.com/blevesearch/bleve/document/field.go b/vendor/github.com/blevesearch/bleve/document/field.go new file mode 100644 index 0000000..2fe9166 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/document/field.go @@ -0,0 +1,41 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package document + +import ( + "github.com/blevesearch/bleve/analysis" +) + +type Field interface { + // Name returns the path of the field from the root DocumentMapping. + // A root field path is "field", a subdocument field is "parent.field". + Name() string + // ArrayPositions returns the intermediate document and field indices + // required to resolve the field value in the document. For example, if the + // field path is "doc1.doc2.field" where doc1 and doc2 are slices or + // arrays, ArrayPositions returns 2 indices used to resolve "doc2" value in + // "doc1", then "field" in "doc2". + ArrayPositions() []uint64 + Options() IndexingOptions + Analyze() (int, analysis.TokenFrequencies) + Value() []byte + + // NumPlainTextBytes should return the number of plain text bytes + // that this field represents - this is a common metric for tracking + // the rate of indexing + NumPlainTextBytes() uint64 + + Size() int +} diff --git a/vendor/github.com/blevesearch/bleve/document/field_boolean.go b/vendor/github.com/blevesearch/bleve/document/field_boolean.go new file mode 100644 index 0000000..6864b16 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/document/field_boolean.go @@ -0,0 +1,123 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package document + +import ( + "fmt" + "reflect" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeBooleanField int + +func init() { + var f BooleanField + reflectStaticSizeBooleanField = int(reflect.TypeOf(f).Size()) +} + +const DefaultBooleanIndexingOptions = StoreField | IndexField | DocValues + +type BooleanField struct { + name string + arrayPositions []uint64 + options IndexingOptions + value []byte + numPlainTextBytes uint64 +} + +func (b *BooleanField) Size() int { + return reflectStaticSizeBooleanField + size.SizeOfPtr + + len(b.name) + + len(b.arrayPositions)*size.SizeOfUint64 + + len(b.value) +} + +func (b *BooleanField) Name() string { + return b.name +} + +func (b *BooleanField) ArrayPositions() []uint64 { + return b.arrayPositions +} + +func (b *BooleanField) Options() IndexingOptions { + return b.options +} + +func (b *BooleanField) Analyze() (int, analysis.TokenFrequencies) { + tokens := make(analysis.TokenStream, 0) + tokens = append(tokens, &analysis.Token{ + Start: 0, + End: len(b.value), + Term: b.value, + Position: 1, + Type: analysis.Boolean, + }) + + fieldLength := len(tokens) + tokenFreqs := analysis.TokenFrequency(tokens, b.arrayPositions, b.options.IncludeTermVectors()) + return fieldLength, tokenFreqs +} + +func (b *BooleanField) Value() []byte { + return b.value +} + +func (b *BooleanField) Boolean() (bool, error) { + if len(b.value) == 1 { + return b.value[0] == 'T', nil + } + return false, fmt.Errorf("boolean field has %d bytes", len(b.value)) +} + +func (b *BooleanField) GoString() string { + return fmt.Sprintf("&document.BooleanField{Name:%s, Options: %s, Value: %s}", b.name, b.options, b.value) +} + +func (b *BooleanField) NumPlainTextBytes() uint64 { + return b.numPlainTextBytes +} + +func NewBooleanFieldFromBytes(name string, arrayPositions []uint64, value []byte) *BooleanField { + return &BooleanField{ + name: name, + arrayPositions: arrayPositions, + value: value, + options: DefaultNumericIndexingOptions, + numPlainTextBytes: uint64(len(value)), + } +} + +func NewBooleanField(name string, arrayPositions []uint64, b bool) *BooleanField { + return NewBooleanFieldWithIndexingOptions(name, arrayPositions, b, DefaultNumericIndexingOptions) +} + +func NewBooleanFieldWithIndexingOptions(name string, arrayPositions []uint64, b bool, options IndexingOptions) *BooleanField { + numPlainTextBytes := 5 + v := []byte("F") + if b { + numPlainTextBytes = 4 + v = []byte("T") + } + return &BooleanField{ + name: name, + arrayPositions: arrayPositions, + value: v, + options: options, + numPlainTextBytes: uint64(numPlainTextBytes), + } +} diff --git a/vendor/github.com/blevesearch/bleve/document/field_composite.go b/vendor/github.com/blevesearch/bleve/document/field_composite.go new file mode 100644 index 0000000..a828588 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/document/field_composite.go @@ -0,0 +1,124 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package document + +import ( + "reflect" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeCompositeField int + +func init() { + var cf CompositeField + reflectStaticSizeCompositeField = int(reflect.TypeOf(cf).Size()) +} + +const DefaultCompositeIndexingOptions = IndexField + +type CompositeField struct { + name string + includedFields map[string]bool + excludedFields map[string]bool + defaultInclude bool + options IndexingOptions + totalLength int + compositeFrequencies analysis.TokenFrequencies +} + +func NewCompositeField(name string, defaultInclude bool, include []string, exclude []string) *CompositeField { + return NewCompositeFieldWithIndexingOptions(name, defaultInclude, include, exclude, DefaultCompositeIndexingOptions) +} + +func NewCompositeFieldWithIndexingOptions(name string, defaultInclude bool, include []string, exclude []string, options IndexingOptions) *CompositeField { + rv := &CompositeField{ + name: name, + options: options, + defaultInclude: defaultInclude, + includedFields: make(map[string]bool, len(include)), + excludedFields: make(map[string]bool, len(exclude)), + compositeFrequencies: make(analysis.TokenFrequencies), + } + + for _, i := range include { + rv.includedFields[i] = true + } + for _, e := range exclude { + rv.excludedFields[e] = true + } + + return rv +} + +func (c *CompositeField) Size() int { + sizeInBytes := reflectStaticSizeCompositeField + size.SizeOfPtr + + len(c.name) + + for k, _ := range c.includedFields { + sizeInBytes += size.SizeOfString + len(k) + size.SizeOfBool + } + + for k, _ := range c.excludedFields { + sizeInBytes += size.SizeOfString + len(k) + size.SizeOfBool + } + + return sizeInBytes +} + +func (c *CompositeField) Name() string { + return c.name +} + +func (c *CompositeField) ArrayPositions() []uint64 { + return []uint64{} +} + +func (c *CompositeField) Options() IndexingOptions { + return c.options +} + +func (c *CompositeField) Analyze() (int, analysis.TokenFrequencies) { + return c.totalLength, c.compositeFrequencies +} + +func (c *CompositeField) Value() []byte { + return []byte{} +} + +func (c *CompositeField) NumPlainTextBytes() uint64 { + return 0 +} + +func (c *CompositeField) includesField(field string) bool { + shouldInclude := c.defaultInclude + _, fieldShouldBeIncluded := c.includedFields[field] + if fieldShouldBeIncluded { + shouldInclude = true + } + _, fieldShouldBeExcluded := c.excludedFields[field] + if fieldShouldBeExcluded { + shouldInclude = false + } + return shouldInclude +} + +func (c *CompositeField) Compose(field string, length int, freq analysis.TokenFrequencies) { + if c.includesField(field) { + c.totalLength += length + c.compositeFrequencies.MergeAll(field, freq) + } +} diff --git a/vendor/github.com/blevesearch/bleve/document/field_datetime.go b/vendor/github.com/blevesearch/bleve/document/field_datetime.go new file mode 100644 index 0000000..583b44c --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/document/field_datetime.go @@ -0,0 +1,159 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package document + +import ( + "fmt" + "math" + "reflect" + "time" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/numeric" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeDateTimeField int + +func init() { + var f DateTimeField + reflectStaticSizeDateTimeField = int(reflect.TypeOf(f).Size()) +} + +const DefaultDateTimeIndexingOptions = StoreField | IndexField | DocValues +const DefaultDateTimePrecisionStep uint = 4 + +var MinTimeRepresentable = time.Unix(0, math.MinInt64) +var MaxTimeRepresentable = time.Unix(0, math.MaxInt64) + +type DateTimeField struct { + name string + arrayPositions []uint64 + options IndexingOptions + value numeric.PrefixCoded + numPlainTextBytes uint64 +} + +func (n *DateTimeField) Size() int { + return reflectStaticSizeDateTimeField + size.SizeOfPtr + + len(n.name) + + len(n.arrayPositions)*size.SizeOfUint64 +} + +func (n *DateTimeField) Name() string { + return n.name +} + +func (n *DateTimeField) ArrayPositions() []uint64 { + return n.arrayPositions +} + +func (n *DateTimeField) Options() IndexingOptions { + return n.options +} + +func (n *DateTimeField) Analyze() (int, analysis.TokenFrequencies) { + tokens := make(analysis.TokenStream, 0) + tokens = append(tokens, &analysis.Token{ + Start: 0, + End: len(n.value), + Term: n.value, + Position: 1, + Type: analysis.DateTime, + }) + + original, err := n.value.Int64() + if err == nil { + + shift := DefaultDateTimePrecisionStep + for shift < 64 { + shiftEncoded, err := numeric.NewPrefixCodedInt64(original, shift) + if err != nil { + break + } + token := analysis.Token{ + Start: 0, + End: len(shiftEncoded), + Term: shiftEncoded, + Position: 1, + Type: analysis.DateTime, + } + tokens = append(tokens, &token) + shift += DefaultDateTimePrecisionStep + } + } + + fieldLength := len(tokens) + tokenFreqs := analysis.TokenFrequency(tokens, n.arrayPositions, n.options.IncludeTermVectors()) + return fieldLength, tokenFreqs +} + +func (n *DateTimeField) Value() []byte { + return n.value +} + +func (n *DateTimeField) DateTime() (time.Time, error) { + i64, err := n.value.Int64() + if err != nil { + return time.Time{}, err + } + return time.Unix(0, i64).UTC(), nil +} + +func (n *DateTimeField) GoString() string { + return fmt.Sprintf("&document.DateField{Name:%s, Options: %s, Value: %s}", n.name, n.options, n.value) +} + +func (n *DateTimeField) NumPlainTextBytes() uint64 { + return n.numPlainTextBytes +} + +func NewDateTimeFieldFromBytes(name string, arrayPositions []uint64, value []byte) *DateTimeField { + return &DateTimeField{ + name: name, + arrayPositions: arrayPositions, + value: value, + options: DefaultDateTimeIndexingOptions, + numPlainTextBytes: uint64(len(value)), + } +} + +func NewDateTimeField(name string, arrayPositions []uint64, dt time.Time) (*DateTimeField, error) { + return NewDateTimeFieldWithIndexingOptions(name, arrayPositions, dt, DefaultDateTimeIndexingOptions) +} + +func NewDateTimeFieldWithIndexingOptions(name string, arrayPositions []uint64, dt time.Time, options IndexingOptions) (*DateTimeField, error) { + if canRepresent(dt) { + dtInt64 := dt.UnixNano() + prefixCoded := numeric.MustNewPrefixCodedInt64(dtInt64, 0) + return &DateTimeField{ + name: name, + arrayPositions: arrayPositions, + value: prefixCoded, + options: options, + // not correct, just a place holder until we revisit how fields are + // represented and can fix this better + numPlainTextBytes: uint64(8), + }, nil + } + return nil, fmt.Errorf("cannot represent %s in this type", dt) +} + +func canRepresent(dt time.Time) bool { + if dt.Before(MinTimeRepresentable) || dt.After(MaxTimeRepresentable) { + return false + } + return true +} diff --git a/vendor/github.com/blevesearch/bleve/document/field_geopoint.go b/vendor/github.com/blevesearch/bleve/document/field_geopoint.go new file mode 100644 index 0000000..91fe23f --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/document/field_geopoint.go @@ -0,0 +1,152 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package document + +import ( + "fmt" + "reflect" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/geo" + "github.com/blevesearch/bleve/numeric" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeGeoPointField int + +func init() { + var f GeoPointField + reflectStaticSizeGeoPointField = int(reflect.TypeOf(f).Size()) +} + +var GeoPrecisionStep uint = 9 + +type GeoPointField struct { + name string + arrayPositions []uint64 + options IndexingOptions + value numeric.PrefixCoded + numPlainTextBytes uint64 +} + +func (n *GeoPointField) Size() int { + return reflectStaticSizeGeoPointField + size.SizeOfPtr + + len(n.name) + + len(n.arrayPositions)*size.SizeOfUint64 +} + +func (n *GeoPointField) Name() string { + return n.name +} + +func (n *GeoPointField) ArrayPositions() []uint64 { + return n.arrayPositions +} + +func (n *GeoPointField) Options() IndexingOptions { + return n.options +} + +func (n *GeoPointField) Analyze() (int, analysis.TokenFrequencies) { + tokens := make(analysis.TokenStream, 0) + tokens = append(tokens, &analysis.Token{ + Start: 0, + End: len(n.value), + Term: n.value, + Position: 1, + Type: analysis.Numeric, + }) + + original, err := n.value.Int64() + if err == nil { + + shift := GeoPrecisionStep + for shift < 64 { + shiftEncoded, err := numeric.NewPrefixCodedInt64(original, shift) + if err != nil { + break + } + token := analysis.Token{ + Start: 0, + End: len(shiftEncoded), + Term: shiftEncoded, + Position: 1, + Type: analysis.Numeric, + } + tokens = append(tokens, &token) + shift += GeoPrecisionStep + } + } + + fieldLength := len(tokens) + tokenFreqs := analysis.TokenFrequency(tokens, n.arrayPositions, n.options.IncludeTermVectors()) + return fieldLength, tokenFreqs +} + +func (n *GeoPointField) Value() []byte { + return n.value +} + +func (n *GeoPointField) Lon() (float64, error) { + i64, err := n.value.Int64() + if err != nil { + return 0.0, err + } + return geo.MortonUnhashLon(uint64(i64)), nil +} + +func (n *GeoPointField) Lat() (float64, error) { + i64, err := n.value.Int64() + if err != nil { + return 0.0, err + } + return geo.MortonUnhashLat(uint64(i64)), nil +} + +func (n *GeoPointField) GoString() string { + return fmt.Sprintf("&document.GeoPointField{Name:%s, Options: %s, Value: %s}", n.name, n.options, n.value) +} + +func (n *GeoPointField) NumPlainTextBytes() uint64 { + return n.numPlainTextBytes +} + +func NewGeoPointFieldFromBytes(name string, arrayPositions []uint64, value []byte) *GeoPointField { + return &GeoPointField{ + name: name, + arrayPositions: arrayPositions, + value: value, + options: DefaultNumericIndexingOptions, + numPlainTextBytes: uint64(len(value)), + } +} + +func NewGeoPointField(name string, arrayPositions []uint64, lon, lat float64) *GeoPointField { + return NewGeoPointFieldWithIndexingOptions(name, arrayPositions, lon, lat, DefaultNumericIndexingOptions) +} + +func NewGeoPointFieldWithIndexingOptions(name string, arrayPositions []uint64, lon, lat float64, options IndexingOptions) *GeoPointField { + mhash := geo.MortonHash(lon, lat) + prefixCoded := numeric.MustNewPrefixCodedInt64(int64(mhash), 0) + return &GeoPointField{ + name: name, + arrayPositions: arrayPositions, + value: prefixCoded, + options: options, + // not correct, just a place holder until we revisit how fields are + // represented and can fix this better + numPlainTextBytes: uint64(8), + } +} diff --git a/vendor/github.com/blevesearch/bleve/document/field_numeric.go b/vendor/github.com/blevesearch/bleve/document/field_numeric.go new file mode 100644 index 0000000..46c685e --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/document/field_numeric.go @@ -0,0 +1,145 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package document + +import ( + "fmt" + "reflect" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/numeric" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeNumericField int + +func init() { + var f NumericField + reflectStaticSizeNumericField = int(reflect.TypeOf(f).Size()) +} + +const DefaultNumericIndexingOptions = StoreField | IndexField | DocValues + +const DefaultPrecisionStep uint = 4 + +type NumericField struct { + name string + arrayPositions []uint64 + options IndexingOptions + value numeric.PrefixCoded + numPlainTextBytes uint64 +} + +func (n *NumericField) Size() int { + return reflectStaticSizeNumericField + size.SizeOfPtr + + len(n.name) + + len(n.arrayPositions)*size.SizeOfPtr +} + +func (n *NumericField) Name() string { + return n.name +} + +func (n *NumericField) ArrayPositions() []uint64 { + return n.arrayPositions +} + +func (n *NumericField) Options() IndexingOptions { + return n.options +} + +func (n *NumericField) Analyze() (int, analysis.TokenFrequencies) { + tokens := make(analysis.TokenStream, 0) + tokens = append(tokens, &analysis.Token{ + Start: 0, + End: len(n.value), + Term: n.value, + Position: 1, + Type: analysis.Numeric, + }) + + original, err := n.value.Int64() + if err == nil { + + shift := DefaultPrecisionStep + for shift < 64 { + shiftEncoded, err := numeric.NewPrefixCodedInt64(original, shift) + if err != nil { + break + } + token := analysis.Token{ + Start: 0, + End: len(shiftEncoded), + Term: shiftEncoded, + Position: 1, + Type: analysis.Numeric, + } + tokens = append(tokens, &token) + shift += DefaultPrecisionStep + } + } + + fieldLength := len(tokens) + tokenFreqs := analysis.TokenFrequency(tokens, n.arrayPositions, n.options.IncludeTermVectors()) + return fieldLength, tokenFreqs +} + +func (n *NumericField) Value() []byte { + return n.value +} + +func (n *NumericField) Number() (float64, error) { + i64, err := n.value.Int64() + if err != nil { + return 0.0, err + } + return numeric.Int64ToFloat64(i64), nil +} + +func (n *NumericField) GoString() string { + return fmt.Sprintf("&document.NumericField{Name:%s, Options: %s, Value: %s}", n.name, n.options, n.value) +} + +func (n *NumericField) NumPlainTextBytes() uint64 { + return n.numPlainTextBytes +} + +func NewNumericFieldFromBytes(name string, arrayPositions []uint64, value []byte) *NumericField { + return &NumericField{ + name: name, + arrayPositions: arrayPositions, + value: value, + options: DefaultNumericIndexingOptions, + numPlainTextBytes: uint64(len(value)), + } +} + +func NewNumericField(name string, arrayPositions []uint64, number float64) *NumericField { + return NewNumericFieldWithIndexingOptions(name, arrayPositions, number, DefaultNumericIndexingOptions) +} + +func NewNumericFieldWithIndexingOptions(name string, arrayPositions []uint64, number float64, options IndexingOptions) *NumericField { + numberInt64 := numeric.Float64ToInt64(number) + prefixCoded := numeric.MustNewPrefixCodedInt64(numberInt64, 0) + return &NumericField{ + name: name, + arrayPositions: arrayPositions, + value: prefixCoded, + options: options, + // not correct, just a place holder until we revisit how fields are + // represented and can fix this better + numPlainTextBytes: uint64(8), + } +} diff --git a/vendor/github.com/blevesearch/bleve/document/field_text.go b/vendor/github.com/blevesearch/bleve/document/field_text.go new file mode 100644 index 0000000..6bd74c7 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/document/field_text.go @@ -0,0 +1,139 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package document + +import ( + "fmt" + "reflect" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeTextField int + +func init() { + var f TextField + reflectStaticSizeTextField = int(reflect.TypeOf(f).Size()) +} + +const DefaultTextIndexingOptions = IndexField | DocValues + +type TextField struct { + name string + arrayPositions []uint64 + options IndexingOptions + analyzer *analysis.Analyzer + value []byte + numPlainTextBytes uint64 +} + +func (t *TextField) Size() int { + return reflectStaticSizeTextField + size.SizeOfPtr + + len(t.name) + + len(t.arrayPositions)*size.SizeOfUint64 + + len(t.value) +} + +func (t *TextField) Name() string { + return t.name +} + +func (t *TextField) ArrayPositions() []uint64 { + return t.arrayPositions +} + +func (t *TextField) Options() IndexingOptions { + return t.options +} + +func (t *TextField) Analyze() (int, analysis.TokenFrequencies) { + var tokens analysis.TokenStream + if t.analyzer != nil { + bytesToAnalyze := t.Value() + if t.options.IsStored() { + // need to copy + bytesCopied := make([]byte, len(bytesToAnalyze)) + copy(bytesCopied, bytesToAnalyze) + bytesToAnalyze = bytesCopied + } + tokens = t.analyzer.Analyze(bytesToAnalyze) + } else { + tokens = analysis.TokenStream{ + &analysis.Token{ + Start: 0, + End: len(t.value), + Term: t.value, + Position: 1, + Type: analysis.AlphaNumeric, + }, + } + } + fieldLength := len(tokens) // number of tokens in this doc field + tokenFreqs := analysis.TokenFrequency(tokens, t.arrayPositions, t.options.IncludeTermVectors()) + return fieldLength, tokenFreqs +} + +func (t *TextField) Analyzer() *analysis.Analyzer { + return t.analyzer +} + +func (t *TextField) Value() []byte { + return t.value +} + +func (t *TextField) GoString() string { + return fmt.Sprintf("&document.TextField{Name:%s, Options: %s, Analyzer: %v, Value: %s, ArrayPositions: %v}", t.name, t.options, t.analyzer, t.value, t.arrayPositions) +} + +func (t *TextField) NumPlainTextBytes() uint64 { + return t.numPlainTextBytes +} + +func NewTextField(name string, arrayPositions []uint64, value []byte) *TextField { + return NewTextFieldWithIndexingOptions(name, arrayPositions, value, DefaultTextIndexingOptions) +} + +func NewTextFieldWithIndexingOptions(name string, arrayPositions []uint64, value []byte, options IndexingOptions) *TextField { + return &TextField{ + name: name, + arrayPositions: arrayPositions, + options: options, + value: value, + numPlainTextBytes: uint64(len(value)), + } +} + +func NewTextFieldWithAnalyzer(name string, arrayPositions []uint64, value []byte, analyzer *analysis.Analyzer) *TextField { + return &TextField{ + name: name, + arrayPositions: arrayPositions, + options: DefaultTextIndexingOptions, + analyzer: analyzer, + value: value, + numPlainTextBytes: uint64(len(value)), + } +} + +func NewTextFieldCustom(name string, arrayPositions []uint64, value []byte, options IndexingOptions, analyzer *analysis.Analyzer) *TextField { + return &TextField{ + name: name, + arrayPositions: arrayPositions, + options: options, + analyzer: analyzer, + value: value, + numPlainTextBytes: uint64(len(value)), + } +} diff --git a/vendor/github.com/blevesearch/bleve/document/indexing_options.go b/vendor/github.com/blevesearch/bleve/document/indexing_options.go new file mode 100644 index 0000000..44498a8 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/document/indexing_options.go @@ -0,0 +1,66 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package document + +type IndexingOptions int + +const ( + IndexField IndexingOptions = 1 << iota + StoreField + IncludeTermVectors + DocValues +) + +func (o IndexingOptions) IsIndexed() bool { + return o&IndexField != 0 +} + +func (o IndexingOptions) IsStored() bool { + return o&StoreField != 0 +} + +func (o IndexingOptions) IncludeTermVectors() bool { + return o&IncludeTermVectors != 0 +} + +func (o IndexingOptions) IncludeDocValues() bool { + return o&DocValues != 0 +} + +func (o IndexingOptions) String() string { + rv := "" + if o.IsIndexed() { + rv += "INDEXED" + } + if o.IsStored() { + if rv != "" { + rv += ", " + } + rv += "STORE" + } + if o.IncludeTermVectors() { + if rv != "" { + rv += ", " + } + rv += "TV" + } + if o.IncludeDocValues() { + if rv != "" { + rv += ", " + } + rv += "DV" + } + return rv +} diff --git a/vendor/github.com/blevesearch/bleve/error.go b/vendor/github.com/blevesearch/bleve/error.go new file mode 100644 index 0000000..7402dfc --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/error.go @@ -0,0 +1,52 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bleve + +// Constant Error values which can be compared to determine the type of error +const ( + ErrorIndexPathExists Error = iota + ErrorIndexPathDoesNotExist + ErrorIndexMetaMissing + ErrorIndexMetaCorrupt + ErrorUnknownStorageType + ErrorIndexClosed + ErrorAliasMulti + ErrorAliasEmpty + ErrorUnknownIndexType + ErrorEmptyID + ErrorIndexReadInconsistency +) + +// Error represents a more strongly typed bleve error for detecting +// and handling specific types of errors. +type Error int + +func (e Error) Error() string { + return errorMessages[e] +} + +var errorMessages = map[Error]string{ + ErrorIndexPathExists: "cannot create new index, path already exists", + ErrorIndexPathDoesNotExist: "cannot open index, path does not exist", + ErrorIndexMetaMissing: "cannot open index, metadata missing", + ErrorIndexMetaCorrupt: "cannot open index, metadata corrupt", + ErrorUnknownStorageType: "unknown storage type", + ErrorIndexClosed: "index is closed", + ErrorAliasMulti: "cannot perform single index operation on multiple index alias", + ErrorAliasEmpty: "cannot perform operation on empty alias", + ErrorUnknownIndexType: "unknown index type", + ErrorEmptyID: "document ID cannot be empty", + ErrorIndexReadInconsistency: "index read inconsistency detected", +} diff --git a/vendor/github.com/blevesearch/bleve/geo/README.md b/vendor/github.com/blevesearch/bleve/geo/README.md new file mode 100644 index 0000000..43bcd98 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/geo/README.md @@ -0,0 +1,9 @@ +# geo support in bleve + +First, all of this geo code is a Go adaptation of the [Lucene 5.3.2 sandbox geo support](https://lucene.apache.org/core/5_3_2/sandbox/org/apache/lucene/util/package-summary.html). + +## Notes + +- All of the APIs will use float64 for lon/lat values. +- When describing a point in function arguments or return values, we always use the order lon, lat. +- High level APIs will use TopLeft and BottomRight to describe bounding boxes. This may not map cleanly to min/max lon/lat when crossing the dateline. The lower level APIs will use min/max lon/lat and require the higher-level code to split boxes accordingly. diff --git a/vendor/github.com/blevesearch/bleve/geo/geo.go b/vendor/github.com/blevesearch/bleve/geo/geo.go new file mode 100644 index 0000000..b18ace4 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/geo/geo.go @@ -0,0 +1,210 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package geo + +import ( + "fmt" + "math" + + "github.com/blevesearch/bleve/numeric" +) + +// GeoBits is the number of bits used for a single geo point +// Currently this is 32bits for lon and 32bits for lat +var GeoBits uint = 32 + +var minLon = -180.0 +var minLat = -90.0 +var maxLon = 180.0 +var maxLat = 90.0 +var minLonRad = minLon * degreesToRadian +var minLatRad = minLat * degreesToRadian +var maxLonRad = maxLon * degreesToRadian +var maxLatRad = maxLat * degreesToRadian +var geoTolerance = 1e-6 +var lonScale = float64((uint64(0x1)<> 1)) +} + +func unscaleLon(lon uint64) float64 { + return (float64(lon) / lonScale) + minLon +} + +func unscaleLat(lat uint64) float64 { + return (float64(lat) / latScale) + minLat +} + +// compareGeo will compare two float values and see if they are the same +// taking into consideration a known geo tolerance. +func compareGeo(a, b float64) float64 { + compare := a - b + if math.Abs(compare) <= geoTolerance { + return 0 + } + return compare +} + +// RectIntersects checks whether rectangles a and b intersect +func RectIntersects(aMinX, aMinY, aMaxX, aMaxY, bMinX, bMinY, bMaxX, bMaxY float64) bool { + return !(aMaxX < bMinX || aMinX > bMaxX || aMaxY < bMinY || aMinY > bMaxY) +} + +// RectWithin checks whether box a is within box b +func RectWithin(aMinX, aMinY, aMaxX, aMaxY, bMinX, bMinY, bMaxX, bMaxY float64) bool { + rv := !(aMinX < bMinX || aMinY < bMinY || aMaxX > bMaxX || aMaxY > bMaxY) + return rv +} + +// BoundingBoxContains checks whether the lon/lat point is within the box +func BoundingBoxContains(lon, lat, minLon, minLat, maxLon, maxLat float64) bool { + return compareGeo(lon, minLon) >= 0 && compareGeo(lon, maxLon) <= 0 && + compareGeo(lat, minLat) >= 0 && compareGeo(lat, maxLat) <= 0 +} + +const degreesToRadian = math.Pi / 180 +const radiansToDegrees = 180 / math.Pi + +// DegreesToRadians converts an angle in degrees to radians +func DegreesToRadians(d float64) float64 { + return d * degreesToRadian +} + +// RadiansToDegrees converts an angle in radians to degress +func RadiansToDegrees(r float64) float64 { + return r * radiansToDegrees +} + +var earthMeanRadiusMeters = 6371008.7714 + +func RectFromPointDistance(lon, lat, dist float64) (float64, float64, float64, float64, error) { + err := checkLongitude(lon) + if err != nil { + return 0, 0, 0, 0, err + } + err = checkLatitude(lat) + if err != nil { + return 0, 0, 0, 0, err + } + radLon := DegreesToRadians(lon) + radLat := DegreesToRadians(lat) + radDistance := (dist + 7e-2) / earthMeanRadiusMeters + + minLatL := radLat - radDistance + maxLatL := radLat + radDistance + + var minLonL, maxLonL float64 + if minLatL > minLatRad && maxLatL < maxLatRad { + deltaLon := asin(sin(radDistance) / cos(radLat)) + minLonL = radLon - deltaLon + if minLonL < minLonRad { + minLonL += 2 * math.Pi + } + maxLonL = radLon + deltaLon + if maxLonL > maxLonRad { + maxLonL -= 2 * math.Pi + } + } else { + // pole is inside distance + minLatL = math.Max(minLatL, minLatRad) + maxLatL = math.Min(maxLatL, maxLatRad) + minLonL = minLonRad + maxLonL = maxLonRad + } + + return RadiansToDegrees(minLonL), + RadiansToDegrees(maxLatL), + RadiansToDegrees(maxLonL), + RadiansToDegrees(minLatL), + nil +} + +func checkLatitude(latitude float64) error { + if math.IsNaN(latitude) || latitude < minLat || latitude > maxLat { + return fmt.Errorf("invalid latitude %f; must be between %f and %f", latitude, minLat, maxLat) + } + return nil +} + +func checkLongitude(longitude float64) error { + if math.IsNaN(longitude) || longitude < minLon || longitude > maxLon { + return fmt.Errorf("invalid longitude %f; must be between %f and %f", longitude, minLon, maxLon) + } + return nil +} + +func BoundingRectangleForPolygon(polygon []Point) ( + float64, float64, float64, float64, error) { + err := checkLongitude(polygon[0].Lon) + if err != nil { + return 0, 0, 0, 0, err + } + err = checkLatitude(polygon[0].Lat) + if err != nil { + return 0, 0, 0, 0, err + } + maxY, minY := polygon[0].Lat, polygon[0].Lat + maxX, minX := polygon[0].Lon, polygon[0].Lon + for i := 1; i < len(polygon); i++ { + err := checkLongitude(polygon[i].Lon) + if err != nil { + return 0, 0, 0, 0, err + } + err = checkLatitude(polygon[i].Lat) + if err != nil { + return 0, 0, 0, 0, err + } + + maxY = math.Max(maxY, polygon[i].Lat) + minY = math.Min(minY, polygon[i].Lat) + + maxX = math.Max(maxX, polygon[i].Lon) + minX = math.Min(minX, polygon[i].Lon) + } + + return minX, maxY, maxX, minY, nil +} diff --git a/vendor/github.com/blevesearch/bleve/geo/geo_dist.go b/vendor/github.com/blevesearch/bleve/geo/geo_dist.go new file mode 100644 index 0000000..d3ae0ed --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/geo/geo_dist.go @@ -0,0 +1,98 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package geo + +import ( + "fmt" + "math" + "strconv" + "strings" +) + +type distanceUnit struct { + conv float64 + suffixes []string +} + +var inch = distanceUnit{0.0254, []string{"in", "inch"}} +var yard = distanceUnit{0.9144, []string{"yd", "yards"}} +var feet = distanceUnit{0.3048, []string{"ft", "feet"}} +var kilom = distanceUnit{1000, []string{"km", "kilometers"}} +var nauticalm = distanceUnit{1852.0, []string{"nm", "nauticalmiles"}} +var millim = distanceUnit{0.001, []string{"mm", "millimeters"}} +var centim = distanceUnit{0.01, []string{"cm", "centimeters"}} +var miles = distanceUnit{1609.344, []string{"mi", "miles"}} +var meters = distanceUnit{1, []string{"m", "meters"}} + +var distanceUnits = []*distanceUnit{ + &inch, &yard, &feet, &kilom, &nauticalm, &millim, ¢im, &miles, &meters, +} + +// ParseDistance attempts to parse a distance string and return distance in +// meters. Example formats supported: +// "5in" "5inch" "7yd" "7yards" "9ft" "9feet" "11km" "11kilometers" +// "3nm" "3nauticalmiles" "13mm" "13millimeters" "15cm" "15centimeters" +// "17mi" "17miles" "19m" "19meters" +// If the unit cannot be determined, the entire string is parsed and the +// unit of meters is assumed. +// If the number portion cannot be parsed, 0 and the parse error are returned. +func ParseDistance(d string) (float64, error) { + for _, unit := range distanceUnits { + for _, unitSuffix := range unit.suffixes { + if strings.HasSuffix(d, unitSuffix) { + parsedNum, err := strconv.ParseFloat(d[0:len(d)-len(unitSuffix)], 64) + if err != nil { + return 0, err + } + return parsedNum * unit.conv, nil + } + } + } + // no unit matched, try assuming meters? + parsedNum, err := strconv.ParseFloat(d, 64) + if err != nil { + return 0, err + } + return parsedNum, nil +} + +// ParseDistanceUnit attempts to parse a distance unit and return the +// multiplier for converting this to meters. If the unit cannot be parsed +// then 0 and the error message is returned. +func ParseDistanceUnit(u string) (float64, error) { + for _, unit := range distanceUnits { + for _, unitSuffix := range unit.suffixes { + if u == unitSuffix { + return unit.conv, nil + } + } + } + return 0, fmt.Errorf("unknown distance unit: %s", u) +} + +// Haversin computes the distance between two points. +// This implemenation uses the sloppy math implemenations which trade off +// accuracy for performance. The distance returned is in kilometers. +func Haversin(lon1, lat1, lon2, lat2 float64) float64 { + x1 := lat1 * degreesToRadian + x2 := lat2 * degreesToRadian + h1 := 1 - cos(x1-x2) + h2 := 1 - cos((lon1-lon2)*degreesToRadian) + h := (h1 + cos(x1)*cos(x2)*h2) / 2 + avgLat := (x1 + x2) / 2 + diameter := earthDiameter(avgLat) + + return diameter * asin(math.Min(1, math.Sqrt(h))) +} diff --git a/vendor/github.com/blevesearch/bleve/geo/geohash.go b/vendor/github.com/blevesearch/bleve/geo/geohash.go new file mode 100644 index 0000000..d3d4dfa --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/geo/geohash.go @@ -0,0 +1,111 @@ +// Copyright (c) 2019 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// This implementation is inspired from the geohash-js +// ref: https://github.com/davetroy/geohash-js + +package geo + +// encoding encapsulates an encoding defined by a given base32 alphabet. +type encoding struct { + enc string + dec [256]byte +} + +// newEncoding constructs a new encoding defined by the given alphabet, +// which must be a 32-byte string. +func newEncoding(encoder string) *encoding { + e := new(encoding) + e.enc = encoder + for i := 0; i < len(e.dec); i++ { + e.dec[i] = 0xff + } + for i := 0; i < len(encoder); i++ { + e.dec[encoder[i]] = byte(i) + } + return e +} + +// base32encoding with the Geohash alphabet. +var base32encoding = newEncoding("0123456789bcdefghjkmnpqrstuvwxyz") + +var masks = []uint64{16, 8, 4, 2, 1} + +// DecodeGeoHash decodes the string geohash faster with +// higher precision. This api is in experimental phase. +func DecodeGeoHash(geoHash string) (float64, float64) { + even := true + lat := []float64{-90.0, 90.0} + lon := []float64{-180.0, 180.0} + + for i := 0; i < len(geoHash); i++ { + cd := uint64(base32encoding.dec[geoHash[i]]) + for j := 0; j < 5; j++ { + if even { + if cd&masks[j] > 0 { + lon[0] = (lon[0] + lon[1]) / 2 + } else { + lon[1] = (lon[0] + lon[1]) / 2 + } + } else { + if cd&masks[j] > 0 { + lat[0] = (lat[0] + lat[1]) / 2 + } else { + lat[1] = (lat[0] + lat[1]) / 2 + } + } + even = !even + } + } + + return (lat[0] + lat[1]) / 2, (lon[0] + lon[1]) / 2 +} + +func EncodeGeoHash(lat, lon float64) string { + even := true + lats := []float64{-90.0, 90.0} + lons := []float64{-180.0, 180.0} + precision := 12 + var ch, bit uint64 + var geoHash string + + for len(geoHash) < precision { + if even { + mid := (lons[0] + lons[1]) / 2 + if lon > mid { + ch |= masks[bit] + lons[0] = mid + } else { + lons[1] = mid + } + } else { + mid := (lats[0] + lats[1]) / 2 + if lat > mid { + ch |= masks[bit] + lats[0] = mid + } else { + lats[1] = mid + } + } + even = !even + if bit < 4 { + bit++ + } else { + geoHash += string(base32encoding.enc[ch]) + ch = 0 + bit = 0 + } + } + + return geoHash +} diff --git a/vendor/github.com/blevesearch/bleve/geo/parse.go b/vendor/github.com/blevesearch/bleve/geo/parse.go new file mode 100644 index 0000000..8286805 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/geo/parse.go @@ -0,0 +1,181 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package geo + +import ( + "reflect" + "strconv" + "strings" +) + +// ExtractGeoPoint takes an arbitrary interface{} and tries it's best to +// interpret it is as geo point. Supported formats: +// Container: +// slice length 2 (GeoJSON) +// first element lon, second element lat +// string (coordinates separated by comma, or a geohash) +// first element lat, second element lon +// map[string]interface{} +// exact keys lat and lon or lng +// struct +// w/exported fields case-insensitive match on lat and lon or lng +// struct +// satisfying Later and Loner or Lnger interfaces +// +// in all cases values must be some sort of numeric-like thing: int/uint/float +func ExtractGeoPoint(thing interface{}) (lon, lat float64, success bool) { + var foundLon, foundLat bool + + thingVal := reflect.ValueOf(thing) + if !thingVal.IsValid() { + return lon, lat, false + } + + thingTyp := thingVal.Type() + + // is it a slice + if thingVal.Kind() == reflect.Slice { + // must be length 2 + if thingVal.Len() == 2 { + first := thingVal.Index(0) + if first.CanInterface() { + firstVal := first.Interface() + lon, foundLon = extractNumericVal(firstVal) + } + second := thingVal.Index(1) + if second.CanInterface() { + secondVal := second.Interface() + lat, foundLat = extractNumericVal(secondVal) + } + } + } + + // is it a string + if thingVal.Kind() == reflect.String { + geoStr := thingVal.Interface().(string) + if strings.Contains(geoStr, ",") { + // geo point with coordinates split by comma + points := strings.Split(geoStr, ",") + for i, point := range points { + // trim any leading or trailing white spaces + points[i] = strings.TrimSpace(point) + } + if len(points) == 2 { + var err error + lat, err = strconv.ParseFloat(points[0], 64) + if err == nil { + foundLat = true + } + lon, err = strconv.ParseFloat(points[1], 64) + if err == nil { + foundLon = true + } + } + } else { + // geohash + if len(geoStr) <= geoHashMaxLength { + lat, lon = DecodeGeoHash(geoStr) + foundLat = true + foundLon = true + } + } + } + + // is it a map + if l, ok := thing.(map[string]interface{}); ok { + if lval, ok := l["lon"]; ok { + lon, foundLon = extractNumericVal(lval) + } else if lval, ok := l["lng"]; ok { + lon, foundLon = extractNumericVal(lval) + } + if lval, ok := l["lat"]; ok { + lat, foundLat = extractNumericVal(lval) + } + } + + // now try reflection on struct fields + if thingVal.Kind() == reflect.Struct { + for i := 0; i < thingVal.NumField(); i++ { + fieldName := thingTyp.Field(i).Name + if strings.HasPrefix(strings.ToLower(fieldName), "lon") { + if thingVal.Field(i).CanInterface() { + fieldVal := thingVal.Field(i).Interface() + lon, foundLon = extractNumericVal(fieldVal) + } + } + if strings.HasPrefix(strings.ToLower(fieldName), "lng") { + if thingVal.Field(i).CanInterface() { + fieldVal := thingVal.Field(i).Interface() + lon, foundLon = extractNumericVal(fieldVal) + } + } + if strings.HasPrefix(strings.ToLower(fieldName), "lat") { + if thingVal.Field(i).CanInterface() { + fieldVal := thingVal.Field(i).Interface() + lat, foundLat = extractNumericVal(fieldVal) + } + } + } + } + + // last hope, some interfaces + // lon + if l, ok := thing.(loner); ok { + lon = l.Lon() + foundLon = true + } else if l, ok := thing.(lnger); ok { + lon = l.Lng() + foundLon = true + } + // lat + if l, ok := thing.(later); ok { + lat = l.Lat() + foundLat = true + } + + return lon, lat, foundLon && foundLat +} + +// extract numeric value (if possible) and returns a float64 +func extractNumericVal(v interface{}) (float64, bool) { + val := reflect.ValueOf(v) + if !val.IsValid() { + return 0, false + } + typ := val.Type() + switch typ.Kind() { + case reflect.Float32, reflect.Float64: + return val.Float(), true + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return float64(val.Int()), true + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return float64(val.Uint()), true + } + + return 0, false +} + +// various support interfaces which can be used to find lat/lon +type loner interface { + Lon() float64 +} + +type later interface { + Lat() float64 +} + +type lnger interface { + Lng() float64 +} diff --git a/vendor/github.com/blevesearch/bleve/geo/sloppy.go b/vendor/github.com/blevesearch/bleve/geo/sloppy.go new file mode 100644 index 0000000..0ce646d --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/geo/sloppy.go @@ -0,0 +1,212 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package geo + +import ( + "math" +) + +var earthDiameterPerLatitude []float64 +var sinTab []float64 +var cosTab []float64 +var asinTab []float64 +var asinDer1DivF1Tab []float64 +var asinDer2DivF2Tab []float64 +var asinDer3DivF3Tab []float64 +var asinDer4DivF4Tab []float64 + +const radiusTabsSize = (1 << 10) + 1 +const radiusDelta = (math.Pi / 2) / (radiusTabsSize - 1) +const radiusIndexer = 1 / radiusDelta +const sinCosTabsSize = (1 << 11) + 1 +const asinTabsSize = (1 << 13) + 1 +const oneDivF2 = 1 / 2.0 +const oneDivF3 = 1 / 6.0 +const oneDivF4 = 1 / 24.0 + +// 1.57079632673412561417e+00 first 33 bits of pi/2 +var pio2Hi = math.Float64frombits(0x3FF921FB54400000) + +// 6.07710050650619224932e-11 pi/2 - PIO2_HI +var pio2Lo = math.Float64frombits(0x3DD0B4611A626331) + +var asinPio2Hi = math.Float64frombits(0x3FF921FB54442D18) // 1.57079632679489655800e+00 +var asinPio2Lo = math.Float64frombits(0x3C91A62633145C07) // 6.12323399573676603587e-17 +var asinPs0 = math.Float64frombits(0x3fc5555555555555) // 1.66666666666666657415e-01 +var asinPs1 = math.Float64frombits(0xbfd4d61203eb6f7d) // -3.25565818622400915405e-01 +var asinPs2 = math.Float64frombits(0x3fc9c1550e884455) // 2.01212532134862925881e-01 +var asinPs3 = math.Float64frombits(0xbfa48228b5688f3b) // -4.00555345006794114027e-02 +var asinPs4 = math.Float64frombits(0x3f49efe07501b288) // 7.91534994289814532176e-04 +var asinPs5 = math.Float64frombits(0x3f023de10dfdf709) // 3.47933107596021167570e-05 +var asinQs1 = math.Float64frombits(0xc0033a271c8a2d4b) // -2.40339491173441421878e+00 +var asinQs2 = math.Float64frombits(0x40002ae59c598ac8) // 2.02094576023350569471e+00 +var asinQs3 = math.Float64frombits(0xbfe6066c1b8d0159) // -6.88283971605453293030e-01 +var asinQs4 = math.Float64frombits(0x3fb3b8c5b12e9282) // 7.70381505559019352791e-02 + +var twoPiHi = 4 * pio2Hi +var twoPiLo = 4 * pio2Lo +var sinCosDeltaHi = twoPiHi/sinCosTabsSize - 1 +var sinCosDeltaLo = twoPiLo/sinCosTabsSize - 1 +var sinCosIndexer = 1 / (sinCosDeltaHi + sinCosDeltaLo) +var sinCosMaxValueForIntModulo = ((math.MaxInt64 >> 9) / sinCosIndexer) * 0.99 +var asinMaxValueForTabs = math.Sin(73.0 * degreesToRadian) + +var asinDelta = asinMaxValueForTabs / (asinTabsSize - 1) +var asinIndexer = 1 / asinDelta + +func init() { + // initializes the tables used for the sloppy math functions + + // sin and cos + sinTab = make([]float64, sinCosTabsSize) + cosTab = make([]float64, sinCosTabsSize) + sinCosPiIndex := (sinCosTabsSize - 1) / 2 + sinCosPiMul2Index := 2 * sinCosPiIndex + sinCosPiMul05Index := sinCosPiIndex / 2 + sinCosPiMul15Index := 3 * sinCosPiIndex / 2 + for i := 0; i < sinCosTabsSize; i++ { + // angle: in [0,2*PI]. + angle := float64(i)*sinCosDeltaHi + float64(i)*sinCosDeltaLo + sinAngle := math.Sin(angle) + cosAngle := math.Cos(angle) + // For indexes corresponding to null cosine or sine, we make sure the value is zero + // and not an epsilon. This allows for a much better accuracy for results close to zero. + if i == sinCosPiIndex { + sinAngle = 0.0 + } else if i == sinCosPiMul2Index { + sinAngle = 0.0 + } else if i == sinCosPiMul05Index { + sinAngle = 0.0 + } else if i == sinCosPiMul15Index { + sinAngle = 0.0 + } + sinTab[i] = sinAngle + cosTab[i] = cosAngle + } + + // asin + asinTab = make([]float64, asinTabsSize) + asinDer1DivF1Tab = make([]float64, asinTabsSize) + asinDer2DivF2Tab = make([]float64, asinTabsSize) + asinDer3DivF3Tab = make([]float64, asinTabsSize) + asinDer4DivF4Tab = make([]float64, asinTabsSize) + for i := 0; i < asinTabsSize; i++ { + // x: in [0,ASIN_MAX_VALUE_FOR_TABS]. + x := float64(i) * asinDelta + asinTab[i] = math.Asin(x) + oneMinusXSqInv := 1.0 / (1 - x*x) + oneMinusXSqInv05 := math.Sqrt(oneMinusXSqInv) + oneMinusXSqInv15 := oneMinusXSqInv05 * oneMinusXSqInv + oneMinusXSqInv25 := oneMinusXSqInv15 * oneMinusXSqInv + oneMinusXSqInv35 := oneMinusXSqInv25 * oneMinusXSqInv + asinDer1DivF1Tab[i] = oneMinusXSqInv05 + asinDer2DivF2Tab[i] = (x * oneMinusXSqInv15) * oneDivF2 + asinDer3DivF3Tab[i] = ((1 + 2*x*x) * oneMinusXSqInv25) * oneDivF3 + asinDer4DivF4Tab[i] = ((5 + 2*x*(2+x*(5-2*x))) * oneMinusXSqInv35) * oneDivF4 + } + + // earth radius + a := 6378137.0 + b := 6356752.31420 + a2 := a * a + b2 := b * b + earthDiameterPerLatitude = make([]float64, radiusTabsSize) + earthDiameterPerLatitude[0] = 2.0 * a / 1000 + earthDiameterPerLatitude[radiusTabsSize-1] = 2.0 * b / 1000 + for i := 1; i < radiusTabsSize-1; i++ { + lat := math.Pi * float64(i) / (2*radiusTabsSize - 1) + one := math.Pow(a2*math.Cos(lat), 2) + two := math.Pow(b2*math.Sin(lat), 2) + three := math.Pow(float64(a)*math.Cos(lat), 2) + four := math.Pow(b*math.Sin(lat), 2) + radius := math.Sqrt((one + two) / (three + four)) + earthDiameterPerLatitude[i] = 2 * radius / 1000 + } +} + +// earthDiameter returns an estimation of the earth's diameter at the specified +// latitude in kilometers +func earthDiameter(lat float64) float64 { + index := math.Mod(math.Abs(lat)*radiusIndexer+0.5, float64(len(earthDiameterPerLatitude))) + if math.IsNaN(index) { + return 0 + } + return earthDiameterPerLatitude[int(index)] +} + +var pio2 = math.Pi / 2 + +func sin(a float64) float64 { + return cos(a - pio2) +} + +// cos is a sloppy math (faster) implementation of math.Cos +func cos(a float64) float64 { + if a < 0.0 { + a = -a + } + if a > sinCosMaxValueForIntModulo { + return math.Cos(a) + } + // index: possibly outside tables range. + index := int(a*sinCosIndexer + 0.5) + delta := (a - float64(index)*sinCosDeltaHi) - float64(index)*sinCosDeltaLo + // Making sure index is within tables range. + // Last value of each table is the same than first, so we ignore it (tabs size minus one) for modulo. + index &= (sinCosTabsSize - 2) // index % (SIN_COS_TABS_SIZE-1) + indexCos := cosTab[index] + indexSin := sinTab[index] + return indexCos + delta*(-indexSin+delta*(-indexCos*oneDivF2+delta*(indexSin*oneDivF3+delta*indexCos*oneDivF4))) +} + +// asin is a sloppy math (faster) implementation of math.Asin +func asin(a float64) float64 { + var negateResult bool + if a < 0 { + a = -a + negateResult = true + } + if a <= asinMaxValueForTabs { + index := int(a*asinIndexer + 0.5) + delta := a - float64(index)*asinDelta + result := asinTab[index] + delta*(asinDer1DivF1Tab[index]+delta*(asinDer2DivF2Tab[index]+delta*(asinDer3DivF3Tab[index]+delta*asinDer4DivF4Tab[index]))) + if negateResult { + return -result + } + return result + } + // value > ASIN_MAX_VALUE_FOR_TABS, or value is NaN + // This part is derived from fdlibm. + if a < 1 { + t := (1.0 - a) * 0.5 + p := t * (asinPs0 + t*(asinPs1+t*(asinPs2+t*(asinPs3+t*(asinPs4+t+asinPs5))))) + q := 1.0 + t*(asinQs1+t*(asinQs2+t*(asinQs3+t*asinQs4))) + s := math.Sqrt(t) + z := s + s*(p/q) + result := asinPio2Hi - ((z + z) - asinPio2Lo) + if negateResult { + return -result + } + return result + } + // value >= 1.0, or value is NaN + if a == 1.0 { + if negateResult { + return -math.Pi / 2 + } + return math.Pi / 2 + } + return math.NaN() +} diff --git a/vendor/github.com/blevesearch/bleve/index.go b/vendor/github.com/blevesearch/bleve/index.go new file mode 100644 index 0000000..974358b --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index.go @@ -0,0 +1,309 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bleve + +import ( + "context" + + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/store" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/size" +) + +// A Batch groups together multiple Index and Delete +// operations you would like performed at the same +// time. The Batch structure is NOT thread-safe. +// You should only perform operations on a batch +// from a single thread at a time. Once batch +// execution has started, you may not modify it. +type Batch struct { + index Index + internal *index.Batch + + lastDocSize uint64 + totalSize uint64 +} + +// Index adds the specified index operation to the +// batch. NOTE: the bleve Index is not updated +// until the batch is executed. +func (b *Batch) Index(id string, data interface{}) error { + if id == "" { + return ErrorEmptyID + } + doc := document.NewDocument(id) + err := b.index.Mapping().MapDocument(doc, data) + if err != nil { + return err + } + b.internal.Update(doc) + + b.lastDocSize = uint64(doc.Size() + + len(id) + size.SizeOfString) // overhead from internal + b.totalSize += b.lastDocSize + + return nil +} + +func (b *Batch) LastDocSize() uint64 { + return b.lastDocSize +} + +func (b *Batch) TotalDocsSize() uint64 { + return b.totalSize +} + +// IndexAdvanced adds the specified index operation to the +// batch which skips the mapping. NOTE: the bleve Index is not updated +// until the batch is executed. +func (b *Batch) IndexAdvanced(doc *document.Document) (err error) { + if doc.ID == "" { + return ErrorEmptyID + } + b.internal.Update(doc) + return nil +} + +// Delete adds the specified delete operation to the +// batch. NOTE: the bleve Index is not updated until +// the batch is executed. +func (b *Batch) Delete(id string) { + if id != "" { + b.internal.Delete(id) + } +} + +// SetInternal adds the specified set internal +// operation to the batch. NOTE: the bleve Index is +// not updated until the batch is executed. +func (b *Batch) SetInternal(key, val []byte) { + b.internal.SetInternal(key, val) +} + +// DeleteInternal adds the specified delete internal +// operation to the batch. NOTE: the bleve Index is +// not updated until the batch is executed. +func (b *Batch) DeleteInternal(key []byte) { + b.internal.DeleteInternal(key) +} + +// Size returns the total number of operations inside the batch +// including normal index operations and internal operations. +func (b *Batch) Size() int { + return len(b.internal.IndexOps) + len(b.internal.InternalOps) +} + +// String prints a user friendly string representation of what +// is inside this batch. +func (b *Batch) String() string { + return b.internal.String() +} + +// Reset returns a Batch to the empty state so that it can +// be re-used in the future. +func (b *Batch) Reset() { + b.internal.Reset() + b.lastDocSize = 0 + b.totalSize = 0 +} + +func (b *Batch) Merge(o *Batch) { + if o != nil && o.internal != nil { + b.internal.Merge(o.internal) + if o.LastDocSize() > 0 { + b.lastDocSize = o.LastDocSize() + } + b.totalSize = uint64(b.internal.TotalDocSize()) + } +} + +func (b *Batch) SetPersistedCallback(f index.BatchCallback) { + b.internal.SetPersistedCallback(f) +} + +func (b *Batch) PersistedCallback() index.BatchCallback { + return b.internal.PersistedCallback() +} + +// An Index implements all the indexing and searching +// capabilities of bleve. An Index can be created +// using the New() and Open() methods. +// +// Index() takes an input value, deduces a DocumentMapping for its type, +// assigns string paths to its fields or values then applies field mappings on +// them. +// +// The DocumentMapping used to index a value is deduced by the following rules: +// 1) If value implements mapping.bleveClassifier interface, resolve the mapping +// from BleveType(). +// 2) If value implements mapping.Classifier interface, resolve the mapping +// from Type(). +// 3) If value has a string field or value at IndexMapping.TypeField. +// (defaulting to "_type"), use it to resolve the mapping. Fields addressing +// is described below. +// 4) If IndexMapping.DefaultType is registered, return it. +// 5) Return IndexMapping.DefaultMapping. +// +// Each field or nested field of the value is identified by a string path, then +// mapped to one or several FieldMappings which extract the result for analysis. +// +// Struct values fields are identified by their "json:" tag, or by their name. +// Nested fields are identified by prefixing with their parent identifier, +// separated by a dot. +// +// Map values entries are identified by their string key. Entries not indexed +// by strings are ignored. Entry values are identified recursively like struct +// fields. +// +// Slice and array values are identified by their field name. Their elements +// are processed sequentially with the same FieldMapping. +// +// String, float64 and time.Time values are identified by their field name. +// Other types are ignored. +// +// Each value identifier is decomposed in its parts and recursively address +// SubDocumentMappings in the tree starting at the root DocumentMapping. If a +// mapping is found, all its FieldMappings are applied to the value. If no +// mapping is found and the root DocumentMapping is dynamic, default mappings +// are used based on value type and IndexMapping default configurations. +// +// Finally, mapped values are analyzed, indexed or stored. See +// FieldMapping.Analyzer to know how an analyzer is resolved for a given field. +// +// Examples: +// +// type Date struct { +// Day string `json:"day"` +// Month string +// Year string +// } +// +// type Person struct { +// FirstName string `json:"first_name"` +// LastName string +// BirthDate Date `json:"birth_date"` +// } +// +// A Person value FirstName is mapped by the SubDocumentMapping at +// "first_name". Its LastName is mapped by the one at "LastName". The day of +// BirthDate is mapped to the SubDocumentMapping "day" of the root +// SubDocumentMapping "birth_date". It will appear as the "birth_date.day" +// field in the index. The month is mapped to "birth_date.Month". +type Index interface { + // Index analyzes, indexes or stores mapped data fields. Supplied + // identifier is bound to analyzed data and will be retrieved by search + // requests. See Index interface documentation for details about mapping + // rules. + Index(id string, data interface{}) error + Delete(id string) error + + NewBatch() *Batch + Batch(b *Batch) error + + // Document returns specified document or nil if the document is not + // indexed or stored. + Document(id string) (*document.Document, error) + // DocCount returns the number of documents in the index. + DocCount() (uint64, error) + + Search(req *SearchRequest) (*SearchResult, error) + SearchInContext(ctx context.Context, req *SearchRequest) (*SearchResult, error) + + Fields() ([]string, error) + + FieldDict(field string) (index.FieldDict, error) + FieldDictRange(field string, startTerm []byte, endTerm []byte) (index.FieldDict, error) + FieldDictPrefix(field string, termPrefix []byte) (index.FieldDict, error) + + Close() error + + Mapping() mapping.IndexMapping + + Stats() *IndexStat + StatsMap() map[string]interface{} + + GetInternal(key []byte) ([]byte, error) + SetInternal(key, val []byte) error + DeleteInternal(key []byte) error + + // Name returns the name of the index (by default this is the path) + Name() string + // SetName lets you assign your own logical name to this index + SetName(string) + + // Advanced returns the indexer and data store, exposing lower level + // methods to enumerate records and access data. + Advanced() (index.Index, store.KVStore, error) +} + +// New index at the specified path, must not exist. +// The provided mapping will be used for all +// Index/Search operations. +func New(path string, mapping mapping.IndexMapping) (Index, error) { + return newIndexUsing(path, mapping, Config.DefaultIndexType, Config.DefaultKVStore, nil) +} + +// NewMemOnly creates a memory-only index. +// The contents of the index is NOT persisted, +// and will be lost once closed. +// The provided mapping will be used for all +// Index/Search operations. +func NewMemOnly(mapping mapping.IndexMapping) (Index, error) { + return newIndexUsing("", mapping, Config.DefaultIndexType, Config.DefaultMemKVStore, nil) +} + +// NewUsing creates index at the specified path, +// which must not already exist. +// The provided mapping will be used for all +// Index/Search operations. +// The specified index type will be used. +// The specified kvstore implementation will be used +// and the provided kvconfig will be passed to its +// constructor. Note that currently the values of kvconfig must +// be able to be marshaled and unmarshaled using the encoding/json library (used +// when reading/writing the index metadata file). +func NewUsing(path string, mapping mapping.IndexMapping, indexType string, kvstore string, kvconfig map[string]interface{}) (Index, error) { + return newIndexUsing(path, mapping, indexType, kvstore, kvconfig) +} + +// Open index at the specified path, must exist. +// The mapping used when it was created will be used for all Index/Search operations. +func Open(path string) (Index, error) { + return openIndexUsing(path, nil) +} + +// OpenUsing opens index at the specified path, must exist. +// The mapping used when it was created will be used for all Index/Search operations. +// The provided runtimeConfig can override settings +// persisted when the kvstore was created. +func OpenUsing(path string, runtimeConfig map[string]interface{}) (Index, error) { + return openIndexUsing(path, runtimeConfig) +} + +// Builder is a limited interface, used to build indexes in an offline mode. +// Items cannot be updated or deleted, and the caller MUST ensure a document is +// indexed only once. +type Builder interface { + Index(id string, data interface{}) error + Close() error +} + +// NewBuilder creates a builder, which will build an index at the specified path, +// using the specified mapping and options. +func NewBuilder(path string, mapping mapping.IndexMapping, config map[string]interface{}) (Builder, error) { + return newBuilder(path, mapping, config) +} diff --git a/vendor/github.com/blevesearch/bleve/index/analysis.go b/vendor/github.com/blevesearch/bleve/index/analysis.go new file mode 100644 index 0000000..82883af --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/analysis.go @@ -0,0 +1,110 @@ +// Copyright (c) 2015 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package index + +import ( + "reflect" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeAnalysisResult int + +func init() { + var ar AnalysisResult + reflectStaticSizeAnalysisResult = int(reflect.TypeOf(ar).Size()) +} + +type IndexRow interface { + KeySize() int + KeyTo([]byte) (int, error) + Key() []byte + + ValueSize() int + ValueTo([]byte) (int, error) + Value() []byte +} + +type AnalysisResult struct { + DocID string + Rows []IndexRow + + // scorch + Document *document.Document + Analyzed []analysis.TokenFrequencies + Length []int +} + +func (a *AnalysisResult) Size() int { + rv := reflectStaticSizeAnalysisResult + for _, analyzedI := range a.Analyzed { + rv += analyzedI.Size() + } + rv += len(a.Length) * size.SizeOfInt + return rv +} + +type AnalysisWork struct { + i Index + d *document.Document + rc chan *AnalysisResult +} + +func NewAnalysisWork(i Index, d *document.Document, rc chan *AnalysisResult) *AnalysisWork { + return &AnalysisWork{ + i: i, + d: d, + rc: rc, + } +} + +type AnalysisQueue struct { + queue chan *AnalysisWork + done chan struct{} +} + +func (q *AnalysisQueue) Queue(work *AnalysisWork) { + q.queue <- work +} + +func (q *AnalysisQueue) Close() { + close(q.done) +} + +func NewAnalysisQueue(numWorkers int) *AnalysisQueue { + rv := AnalysisQueue{ + queue: make(chan *AnalysisWork), + done: make(chan struct{}), + } + for i := 0; i < numWorkers; i++ { + go AnalysisWorker(rv) + } + return &rv +} + +func AnalysisWorker(q AnalysisQueue) { + // read work off the queue + for { + select { + case <-q.done: + return + case w := <-q.queue: + r := w.i.Analyze(w.d) + w.rc <- r + } + } +} diff --git a/vendor/github.com/blevesearch/bleve/index/field_cache.go b/vendor/github.com/blevesearch/bleve/index/field_cache.go new file mode 100644 index 0000000..9354081 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/field_cache.go @@ -0,0 +1,88 @@ +// Copyright (c) 2015 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package index + +import ( + "sync" +) + +type FieldCache struct { + fieldIndexes map[string]uint16 + indexFields []string + lastFieldIndex int + mutex sync.RWMutex +} + +func NewFieldCache() *FieldCache { + return &FieldCache{ + fieldIndexes: make(map[string]uint16), + lastFieldIndex: -1, + } +} + +func (f *FieldCache) AddExisting(field string, index uint16) { + f.mutex.Lock() + f.addLOCKED(field, index) + f.mutex.Unlock() +} + +func (f *FieldCache) addLOCKED(field string, index uint16) uint16 { + f.fieldIndexes[field] = index + if len(f.indexFields) < int(index)+1 { + prevIndexFields := f.indexFields + f.indexFields = make([]string, int(index)+16) + copy(f.indexFields, prevIndexFields) + } + f.indexFields[int(index)] = field + if int(index) > f.lastFieldIndex { + f.lastFieldIndex = int(index) + } + return index +} + +// FieldNamed returns the index of the field, and whether or not it existed +// before this call. if createIfMissing is true, and new field index is assigned +// but the second return value will still be false +func (f *FieldCache) FieldNamed(field string, createIfMissing bool) (uint16, bool) { + f.mutex.RLock() + if index, ok := f.fieldIndexes[field]; ok { + f.mutex.RUnlock() + return index, true + } else if !createIfMissing { + f.mutex.RUnlock() + return 0, false + } + // trade read lock for write lock + f.mutex.RUnlock() + f.mutex.Lock() + // need to check again with write lock + if index, ok := f.fieldIndexes[field]; ok { + f.mutex.Unlock() + return index, true + } + // assign next field id + index := f.addLOCKED(field, uint16(f.lastFieldIndex+1)) + f.mutex.Unlock() + return index, false +} + +func (f *FieldCache) FieldIndexed(index uint16) (field string) { + f.mutex.RLock() + if int(index) < len(f.indexFields) { + field = f.indexFields[int(index)] + } + f.mutex.RUnlock() + return field +} diff --git a/vendor/github.com/blevesearch/bleve/index/index.go b/vendor/github.com/blevesearch/bleve/index/index.go new file mode 100644 index 0000000..551f8de --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/index.go @@ -0,0 +1,376 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package index + +import ( + "bytes" + "encoding/json" + "fmt" + "reflect" + + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/index/store" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeTermFieldDoc int +var reflectStaticSizeTermFieldVector int + +func init() { + var tfd TermFieldDoc + reflectStaticSizeTermFieldDoc = int(reflect.TypeOf(tfd).Size()) + var tfv TermFieldVector + reflectStaticSizeTermFieldVector = int(reflect.TypeOf(tfv).Size()) +} + +var ErrorUnknownStorageType = fmt.Errorf("unknown storage type") + +type Index interface { + Open() error + Close() error + + Update(doc *document.Document) error + Delete(id string) error + Batch(batch *Batch) error + + SetInternal(key, val []byte) error + DeleteInternal(key []byte) error + + // Reader returns a low-level accessor on the index data. Close it to + // release associated resources. + Reader() (IndexReader, error) + + Stats() json.Marshaler + StatsMap() map[string]interface{} + + Analyze(d *document.Document) *AnalysisResult + + Advanced() (store.KVStore, error) +} + +type DocumentFieldTermVisitor func(field string, term []byte) + +type IndexReader interface { + TermFieldReader(term []byte, field string, includeFreq, includeNorm, includeTermVectors bool) (TermFieldReader, error) + + // DocIDReader returns an iterator over all doc ids + // The caller must close returned instance to release associated resources. + DocIDReaderAll() (DocIDReader, error) + + DocIDReaderOnly(ids []string) (DocIDReader, error) + + FieldDict(field string) (FieldDict, error) + + // FieldDictRange is currently defined to include the start and end terms + FieldDictRange(field string, startTerm []byte, endTerm []byte) (FieldDict, error) + FieldDictPrefix(field string, termPrefix []byte) (FieldDict, error) + + Document(id string) (*document.Document, error) + DocumentVisitFieldTerms(id IndexInternalID, fields []string, visitor DocumentFieldTermVisitor) error + + DocValueReader(fields []string) (DocValueReader, error) + + Fields() ([]string, error) + + GetInternal(key []byte) ([]byte, error) + + DocCount() (uint64, error) + + ExternalID(id IndexInternalID) (string, error) + InternalID(id string) (IndexInternalID, error) + + DumpAll() chan interface{} + DumpDoc(id string) chan interface{} + DumpFields() chan interface{} + + Close() error +} + +// The Regexp interface defines the subset of the regexp.Regexp API +// methods that are used by bleve indexes, allowing callers to pass in +// alternate implementations. +type Regexp interface { + FindStringIndex(s string) (loc []int) + + LiteralPrefix() (prefix string, complete bool) + + String() string +} + +type IndexReaderRegexp interface { + FieldDictRegexp(field string, regex string) (FieldDict, error) +} + +type IndexReaderFuzzy interface { + FieldDictFuzzy(field string, term string, fuzziness int, prefix string) (FieldDict, error) +} + +type IndexReaderOnly interface { + FieldDictOnly(field string, onlyTerms [][]byte, includeCount bool) (FieldDict, error) +} + +type IndexReaderContains interface { + FieldDictContains(field string) (FieldDictContains, error) +} + +// FieldTerms contains the terms used by a document, keyed by field +type FieldTerms map[string][]string + +// FieldsNotYetCached returns a list of fields not yet cached out of a larger list of fields +func (f FieldTerms) FieldsNotYetCached(fields []string) []string { + rv := make([]string, 0, len(fields)) + for _, field := range fields { + if _, ok := f[field]; !ok { + rv = append(rv, field) + } + } + return rv +} + +// Merge will combine two FieldTerms +// it assumes that the terms lists are complete (thus do not need to be merged) +// field terms from the other list always replace the ones in the receiver +func (f FieldTerms) Merge(other FieldTerms) { + for field, terms := range other { + f[field] = terms + } +} + +type TermFieldVector struct { + Field string + ArrayPositions []uint64 + Pos uint64 + Start uint64 + End uint64 +} + +func (tfv *TermFieldVector) Size() int { + return reflectStaticSizeTermFieldVector + size.SizeOfPtr + + len(tfv.Field) + len(tfv.ArrayPositions)*size.SizeOfUint64 +} + +// IndexInternalID is an opaque document identifier interal to the index impl +type IndexInternalID []byte + +func (id IndexInternalID) Equals(other IndexInternalID) bool { + return id.Compare(other) == 0 +} + +func (id IndexInternalID) Compare(other IndexInternalID) int { + return bytes.Compare(id, other) +} + +type TermFieldDoc struct { + Term string + ID IndexInternalID + Freq uint64 + Norm float64 + Vectors []*TermFieldVector +} + +func (tfd *TermFieldDoc) Size() int { + sizeInBytes := reflectStaticSizeTermFieldDoc + size.SizeOfPtr + + len(tfd.Term) + len(tfd.ID) + + for _, entry := range tfd.Vectors { + sizeInBytes += entry.Size() + } + + return sizeInBytes +} + +// Reset allows an already allocated TermFieldDoc to be reused +func (tfd *TermFieldDoc) Reset() *TermFieldDoc { + // remember the []byte used for the ID + id := tfd.ID + vectors := tfd.Vectors + // idiom to copy over from empty TermFieldDoc (0 allocations) + *tfd = TermFieldDoc{} + // reuse the []byte already allocated (and reset len to 0) + tfd.ID = id[:0] + tfd.Vectors = vectors[:0] + return tfd +} + +// TermFieldReader is the interface exposing the enumeration of documents +// containing a given term in a given field. Documents are returned in byte +// lexicographic order over their identifiers. +type TermFieldReader interface { + // Next returns the next document containing the term in this field, or nil + // when it reaches the end of the enumeration. The preAlloced TermFieldDoc + // is optional, and when non-nil, will be used instead of allocating memory. + Next(preAlloced *TermFieldDoc) (*TermFieldDoc, error) + + // Advance resets the enumeration at specified document or its immediate + // follower. + Advance(ID IndexInternalID, preAlloced *TermFieldDoc) (*TermFieldDoc, error) + + // Count returns the number of documents contains the term in this field. + Count() uint64 + Close() error + + Size() int +} + +type DictEntry struct { + Term string + Count uint64 +} + +type FieldDict interface { + Next() (*DictEntry, error) + Close() error +} + +type FieldDictContains interface { + Contains(key []byte) (bool, error) +} + +// DocIDReader is the interface exposing enumeration of documents identifiers. +// Close the reader to release associated resources. +type DocIDReader interface { + // Next returns the next document internal identifier in the natural + // index order, nil when the end of the sequence is reached. + Next() (IndexInternalID, error) + + // Advance resets the iteration to the first internal identifier greater than + // or equal to ID. If ID is smaller than the start of the range, the iteration + // will start there instead. If ID is greater than or equal to the end of + // the range, Next() call will return io.EOF. + Advance(ID IndexInternalID) (IndexInternalID, error) + + Size() int + + Close() error +} + +type BatchCallback func(error) + +type Batch struct { + IndexOps map[string]*document.Document + InternalOps map[string][]byte + persistedCallback BatchCallback +} + +func NewBatch() *Batch { + return &Batch{ + IndexOps: make(map[string]*document.Document), + InternalOps: make(map[string][]byte), + } +} + +func (b *Batch) Update(doc *document.Document) { + b.IndexOps[doc.ID] = doc +} + +func (b *Batch) Delete(id string) { + b.IndexOps[id] = nil +} + +func (b *Batch) SetInternal(key, val []byte) { + b.InternalOps[string(key)] = val +} + +func (b *Batch) DeleteInternal(key []byte) { + b.InternalOps[string(key)] = nil +} + +func (b *Batch) SetPersistedCallback(f BatchCallback) { + b.persistedCallback = f +} + +func (b *Batch) PersistedCallback() BatchCallback { + return b.persistedCallback +} + +func (b *Batch) String() string { + rv := fmt.Sprintf("Batch (%d ops, %d internal ops)\n", len(b.IndexOps), len(b.InternalOps)) + for k, v := range b.IndexOps { + if v != nil { + rv += fmt.Sprintf("\tINDEX - '%s'\n", k) + } else { + rv += fmt.Sprintf("\tDELETE - '%s'\n", k) + } + } + for k, v := range b.InternalOps { + if v != nil { + rv += fmt.Sprintf("\tSET INTERNAL - '%s'\n", k) + } else { + rv += fmt.Sprintf("\tDELETE INTERNAL - '%s'\n", k) + } + } + return rv +} + +func (b *Batch) Reset() { + b.IndexOps = make(map[string]*document.Document) + b.InternalOps = make(map[string][]byte) + b.persistedCallback = nil +} + +func (b *Batch) Merge(o *Batch) { + for k, v := range o.IndexOps { + b.IndexOps[k] = v + } + for k, v := range o.InternalOps { + b.InternalOps[k] = v + } +} + +func (b *Batch) TotalDocSize() int { + var s int + for k, v := range b.IndexOps { + if v != nil { + s += v.Size() + size.SizeOfString + } + s += len(k) + } + return s +} + +// Optimizable represents an optional interface that implementable by +// optimizable resources (e.g., TermFieldReaders, Searchers). These +// optimizable resources are provided the same OptimizableContext +// instance, so that they can coordinate via dynamic interface +// casting. +type Optimizable interface { + Optimize(kind string, octx OptimizableContext) (OptimizableContext, error) +} + +// Represents a result of optimization -- see the Finish() method. +type Optimized interface{} + +type OptimizableContext interface { + // Once all the optimzable resources have been provided the same + // OptimizableContext instance, the optimization preparations are + // finished or completed via the Finish() method. + // + // Depending on the optimization being performed, the Finish() + // method might return a non-nil Optimized instance. For example, + // the Optimized instance might represent an optimized + // TermFieldReader instance. + Finish() (Optimized, error) +} + +type DocValueReader interface { + VisitDocValues(id IndexInternalID, visitor DocumentFieldTermVisitor) error +} + +// IndexBuilder is an interface supported by some index schemes +// to allow direct write-only index building +type IndexBuilder interface { + Index(doc *document.Document) error + Close() error +} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/README.md b/vendor/github.com/blevesearch/bleve/index/scorch/README.md new file mode 100644 index 0000000..9794aed --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/README.md @@ -0,0 +1,367 @@ +# scorch + +## Definitions + +Batch +- A collection of Documents to mutate in the index. + +Document +- Has a unique identifier (arbitrary bytes). +- Is comprised of a list of fields. + +Field +- Has a name (string). +- Has a type (text, number, date, geopoint). +- Has a value (depending on type). +- Can be indexed, stored, or both. +- If indexed, can be analyzed. +-m If indexed, can optionally store term vectors. + +## Scope + +Scorch *MUST* implement the bleve.index API without requiring any changes to this API. + +Scorch *MAY* introduce new interfaces, which can be discovered to allow use of new capabilities not in the current API. + +## Implementation + +The scorch implementation starts with the concept of a segmented index. + +A segment is simply a slice, subset, or portion of the entire index. A segmented index is one which is composed of one or more segments. Although segments are created in a particular order, knowing this ordering is not required to achieve correct semantics when querying. Because there is no ordering, this means that when searching an index, you can (and should) search all the segments concurrently. + +### Internal Wrapper + +In order to accommodate the existing APIs while also improving the implementation, the scorch implementation includes some wrapper functionality that must be described. + +#### \_id field + +In scorch, field 0 is prearranged to be named \_id. All documents have a value for this field, which is the documents external identifier. In this version the field *MUST* be both indexed AND stored. The scorch wrapper adds this field, as it will not be present in the Document from the calling bleve code. + +NOTE: If a document already contains a field \_id, it will be replaced. If this is problematic, the caller must ensure such a scenario does not happen. + +### Proposed Structures + +``` +type Segment interface { + + Dictionary(field string) TermDictionary + +} + +type TermDictionary interface { + + PostingsList(term string, excluding PostingsList) PostingsList + +} + +type PostingsList interface { + + Next() Posting + + And(other PostingsList) PostingsList + Or(other PostingsList) PostingsList + +} + +type Posting interface { + Number() uint64 + + Frequency() uint64 + Norm() float64 + + Locations() Locations +} + +type Locations interface { + Start() uint64 + End() uint64 + Pos() uint64 + ArrayPositions() ... +} + +type DeletedDocs { + +} + +type SegmentSnapshot struct { + segment Segment + deleted PostingsList +} + +type IndexSnapshot struct { + segment []SegmentSnapshot +} +``` +**What about errors?** +**What about memory mgmnt or context?** +**Postings List separate iterator to separate stateful from stateless** +### Mutating the Index + +The bleve.index API has methods for directly making individual mutations (Update/Delete/SetInternal/DeleteInternal), however for this first implementation, we assume that all of these calls can simply be turned into a Batch of size 1. This may be highly inefficient, but it will be correct. This decision is made based on the fact that Couchbase FTS always uses Batches. + +NOTE: As a side-effect of this decision, it should be clear that performance tuning may depend on the batch size, which may in-turn require changes in FTS. + +From this point forward, only Batch mutations will be discussed. + +Sequence of Operations: + +1. For each document in the batch, search through all existing segments. The goal is to build up a per-segment bitset which tells us which documents in that segment are obsoleted by the addition of the new segment we're currently building. NOTE: we're not ready for this change to take effect yet, so rather than this operation mutating anything, they simply return bitsets, which we can apply later. Logically, this is something like: + + ``` + foreach segment { + dict := segment.Dictionary("\_id") + postings := empty postings list + foreach docID { + postings = postings.Or(dict.PostingsList(docID, nil)) + } + } + ``` + + NOTE: it is illustrated above as nested for loops, but some or all of these could be concurrently. The end result is that for each segment, we have (possibly empty) bitset. + +2. Also concurrent with 1, the documents in the batch are analyzed. This analysis proceeds using the existing analyzer pool. + +3. (after 2 completes) Analyzed documents are fed into a function which builds a new Segment representing this information. + +4. We now have everything we need to update the state of the system to include this new snapshot. + + - Acquire a lock + - Create a new IndexSnapshot + - For each SegmentSnapshot in the IndexSnapshot, take the deleted PostingsList and OR it with the new postings list for this Segment. Construct a new SegmentSnapshot for the segment using this new deleted PostingsList. Append this SegmentSnapshot to the IndexSnapshot. + - Create a new SegmentSnapshot wrapping our new segment with nil deleted docs. + - Append the new SegmentSnapshot to the IndexSnapshot + - Release the lock + +An ASCII art example: + ``` + 0 - Empty Index + + No segments + + IndexSnapshot + segments [] + deleted [] + + + 1 - Index Batch [ A B C ] + + segment 0 + numbers [ 1 2 3 ] + \_id [ A B C ] + + IndexSnapshot + segments [ 0 ] + deleted [ nil ] + + + 2 - Index Batch [ B' ] + + segment 0 1 + numbers [ 1 2 3 ] [ 1 ] + \_id [ A B C ] [ B ] + + Compute bitset segment-0-deleted-by-1: + [ 0 1 0 ] + + OR it with previous (nil) (call it 0-1) + [ 0 1 0 ] + + IndexSnapshot + segments [ 0 1 ] + deleted [ 0-1 nil ] + + 3 - Index Batch [ C' ] + + segment 0 1 2 + numbers [ 1 2 3 ] [ 1 ] [ 1 ] + \_id [ A B C ] [ B ] [ C ] + + Compute bitset segment-0-deleted-by-2: + [ 0 0 1 ] + + OR it with previous ([ 0 1 0 ]) (call it 0-12) + [ 0 1 1 ] + + Compute bitset segment-1-deleted-by-2: + [ 0 ] + + OR it with previous (nil) + still just nil + + + IndexSnapshot + segments [ 0 1 2 ] + deleted [ 0-12 nil nil ] + ``` + +**is there opportunity to stop early when doc is found in one segment** +**also, more efficient way to find bits for long lists of ids?** + +### Searching + +In the bleve.index API all searching starts by getting an IndexReader, which represents a snapshot of the index at a point in time. + +As described in the section above, our index implementation maintains a pointer to the current IndexSnapshot. When a caller gets an IndexReader, they get a copy of this pointer, and can use it as long as they like. The IndexSnapshot contains SegmentSnapshots, which only contain pointers to immutable segments. The deleted posting lists associated with a segment change over time, but the particular deleted posting list in YOUR snapshot is immutable. This gives a stable view of the data. + +#### Term Search + +Term search is the only searching primitive exposed in today's bleve.index API. This ultimately could limit our ability to take advantage of the indexing improvements, but it also means it will be easier to get a first version of this working. + +A term search for term T in field F will look something like this: + +``` + searchResultPostings = empty + foreach segment { + dict := segment.Dictionary(F) + segmentResultPostings = dict.PostingsList(T, segmentSnapshotDeleted) + // make segmentLocal numbers into global numbers, and flip bits in searchResultPostings + } +``` + +The searchResultPostings will be a new implementation of the TermFieldReader inteface. + +As a reminder this interface is: + +``` +// TermFieldReader is the interface exposing the enumeration of documents +// containing a given term in a given field. Documents are returned in byte +// lexicographic order over their identifiers. +type TermFieldReader interface { + // Next returns the next document containing the term in this field, or nil + // when it reaches the end of the enumeration. The preAlloced TermFieldDoc + // is optional, and when non-nil, will be used instead of allocating memory. + Next(preAlloced *TermFieldDoc) (*TermFieldDoc, error) + + // Advance resets the enumeration at specified document or its immediate + // follower. + Advance(ID IndexInternalID, preAlloced *TermFieldDoc) (*TermFieldDoc, error) + + // Count returns the number of documents contains the term in this field. + Count() uint64 + Close() error +} +``` + +At first glance this appears problematic, we have no way to return documents in order of their identifiers. But it turns out the wording of this perhaps too strong, or a bit ambiguous. Originally, this referred to the external identifiers, but with the introduction of a distinction between internal/external identifiers, returning them in order of their internal identifiers is also acceptable. **ASIDE**: the reason for this is that most callers just use Next() and literally don't care what the order is, they could be in any order and it would be fine. There is only one search that cares and that is the ConjunctionSearcher, which relies on Next/Advance having very specific semantics. Later in this document we will have a proposal to split into multiple interfaces: + +- The weakest interface, only supports Next() no ordering at all. +- Ordered, supporting Advance() +- And/Or'able capable of internally efficiently doing these ops with like interfaces (if not capable then can always fall back to external walking) + +But, the good news is that we don't even have to do that for our first implementation. As long as the global numbers we use for internal identifiers are consistent within this IndexSnapshot, then Next() will be ordered by ascending document number, and Advance() will still work correctly. + +NOTE: there is another place where we rely on the ordering of these hits, and that is in the "\_id" sort order. Previously this was the natural order, and a NOOP for the collector, now it must be implemented by actually sorting on the "\_id" field. We probably should introduce at least a marker interface to detect this. + +An ASCII art example: + +``` +Let's start with the IndexSnapshot we ended with earlier: + +3 - Index Batch [ C' ] + + segment 0 1 2 + numbers [ 1 2 3 ] [ 1 ] [ 1 ] + \_id [ A B C ] [ B ] [ C ] + + Compute bitset segment-0-deleted-by-2: + [ 0 0 1 ] + + OR it with previous ([ 0 1 0 ]) (call it 0-12) + [ 0 1 1 ] + +Compute bitset segment-1-deleted-by-2: + [ 0 0 0 ] + +OR it with previous (nil) + still just nil + + + IndexSnapshot + segments [ 0 1 2 ] + deleted [ 0-12 nil nil ] + +Now let's search for the term 'cat' in the field 'desc' and let's assume that Document C (both versions) would match it. + +Concurrently: + + - Segment 0 + - Get Term Dictionary For Field 'desc' + - From it get Postings List for term 'cat' EXCLUDING 0-12 + - raw segment matches [ 0 0 1 ] but excluding [ 0 1 1 ] gives [ 0 0 0 ] + - Segment 1 + - Get Term Dictionary For Field 'desc' + - From it get Postings List for term 'cat' excluding nil + - [ 0 ] + - Segment 2 + - Get Term Dictionary For Field 'desc' + - From it get Postings List for term 'cat' excluding nil + - [ 1 ] + +Map local bitsets into global number space (global meaning cross-segment but still unique to this snapshot) + +IndexSnapshot already should have mapping something like: +0 - Offset 0 +1 - Offset 3 (because segment 0 had 3 docs) +2 - Offset 4 (because segment 1 had 1 doc) + +This maps to search result bitset: + +[ 0 0 0 0 1] + +Caller would call Next() and get doc number 5 (assuming 1 based indexing for now) + +Caller could then ask to get term locations, stored fields, external doc ID for document number 5. Internally in the IndexSnapshot, we can now convert that back, and realize doc number 5 comes from segment 2, 5-4=1 so we're looking for doc number 1 in segment 2. That happens to be C... + +``` + +#### Future improvements + +In the future, interfaces to detect these non-serially operating TermFieldReaders could expose their own And() and Or() up to the higher level Conjunction/Disjunction searchers. Doing this alone offers some win, but also means there would be greater burden on the Searcher code rewriting logical expressions for maximum performance. + +Another related topic is that of peak memory usage. With serially operating TermFieldReaders it was necessary to start them all at the same time and operate in unison. However, with these non-serially operating TermFieldReaders we have the option of doing a few at a time, consolidating them, dispoting the intermediaries, and then doing a few more. For very complex queries with many clauses this could reduce peak memory usage. + + +### Memory Tracking + +All segments must be able to produce two statistics, an estimate of their explicit memory usage, and their actual size on disk (if any). For in-memory segments, disk usage could be zero, and the memory usage represents the entire information content. For mmap-based disk segments, the memory could be as low as the size of tracking structure itself (say just a few pointers). + +This would allow the implementation to throttle or block incoming mutations when a threshold memory usage has (or would be) exceeded. + +### Persistence + +Obviously, we want to support (but maybe not require) asynchronous persistence of segments. My expectation is that segments are initially built in memory. At some point they are persisted to disk. This poses some interesting challenges. + +At runtime, the state of an index (it's IndexSnapshot) is not only the contents of the segments, but also the bitmasks of deleted documents. These bitmasks indirectly encode an ordering in which the segments were added. The reason is that the bitmasks encode which items have been obsoleted by other (subsequent or more future) segments. In the runtime implementation we compute bitmask deltas and then merge them at the same time we bring the new segment in. One idea is that we could take a similar approach on disk. When we persist a segment, we persist the bitmask deltas of segments known to exist at that time, and eventually these can get merged up into a base segment deleted bitmask. + +This also relates to the topic rollback, addressed next... + + +### Rollback + +One desirable property in the Couchbase ecosystem is the ability to rollback to some previous (though typically not long ago) state. One idea for keeping this property in this design is to protect some of the most recent segments from merging. Then, if necessary, they could be "undone" to reveal previous states of the system. In these scenarios "undone" has to properly undo the deleted bitmasks on the other segments. Again, the current thinking is that rather than "undo" anything, it could be work that was deferred in the first place, thus making it easier to logically undo. + +Another possibly related approach would be to tie this into our existing snapshot mechanism. Perhaps simulating a slow reader (holding onto index snapshots) for some period of time, can be the mechanism to achieve the desired end goal. + + +### Internal Storage + +The bleve.index API has support for "internal storage". The ability to store information under a separate name space. + +This is not used for high volume storage, so it is tempting to think we could just put a small k/v store alongside the rest of the index. But, the reality is that this storage is used to maintain key information related to the rollback scenario. Because of this, its crucial that ordering and overwriting of key/value pairs correspond with actual segment persistence in the index. Based on this, I believe its important to put the internal key/value pairs inside the segments themselves. But, this also means that they must follow a similar "deleted" bitmask approach to obsolete values in older segments. But, this also seems to substantially increase the complexity of the solution because of the separate name space, it would appear to require its own bitmask. Further keys aren't numeric, which then implies yet another mapping from internal key to number, etc. + +More thought is required here. + +### Merging + +The segmented index approach requires merging to prevent the number of segments from growing too large. + +Recent experience with LSMs has taught us that having the correct merge strategy can make a huge difference in the overall performance of the system. In particular, a simple merge strategy which merges segments too aggressively can lead to high write amplification and unnecessarily rendering cached data useless. + +A few simple principles have been identified. + +- Roughly we merge multiple smaller segments into a single larger one. +- The larger a segment gets the less likely we should be to ever merge it. +- Segments with large numbers of deleted/obsoleted items are good candidates as the merge will result in a space savings. +- Segments with all items deleted/obsoleted can be dropped. + +Merging of a segment should be able to proceed even if that segment is held by an ongoing snapshot, it should only delay the removal of it. diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/builder.go b/vendor/github.com/blevesearch/bleve/index/scorch/builder.go new file mode 100644 index 0000000..1f4b41d --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/builder.go @@ -0,0 +1,334 @@ +// Copyright (c) 2019 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorch + +import ( + "fmt" + "io/ioutil" + "os" + "sync" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + bolt "go.etcd.io/bbolt" +) + +const DefaultBuilderBatchSize = 1000 +const DefaultBuilderMergeMax = 10 + +type Builder struct { + m sync.Mutex + segCount uint64 + path string + buildPath string + segPaths []string + batchSize int + mergeMax int + batch *index.Batch + internal map[string][]byte + segPlugin segment.Plugin +} + +func NewBuilder(config map[string]interface{}) (*Builder, error) { + path, ok := config["path"].(string) + if !ok { + return nil, fmt.Errorf("must specify path") + } + + buildPathPrefix, _ := config["buildPathPrefix"].(string) + buildPath, err := ioutil.TempDir(buildPathPrefix, "scorch-offline-build") + if err != nil { + return nil, err + } + + rv := &Builder{ + path: path, + buildPath: buildPath, + mergeMax: DefaultBuilderMergeMax, + batchSize: DefaultBuilderBatchSize, + batch: index.NewBatch(), + segPlugin: defaultSegmentPlugin, + } + + err = rv.parseConfig(config) + if err != nil { + return nil, fmt.Errorf("error parsing builder config: %v", err) + } + + return rv, nil +} + +func (o *Builder) parseConfig(config map[string]interface{}) (err error) { + if v, ok := config["mergeMax"]; ok { + var t int + if t, err = parseToInteger(v); err != nil { + return fmt.Errorf("mergeMax parse err: %v", err) + } + if t > 0 { + o.mergeMax = t + } + } + + if v, ok := config["batchSize"]; ok { + var t int + if t, err = parseToInteger(v); err != nil { + return fmt.Errorf("batchSize parse err: %v", err) + } + if t > 0 { + o.batchSize = t + } + } + + if v, ok := config["internal"]; ok { + if vinternal, ok := v.(map[string][]byte); ok { + o.internal = vinternal + } + } + + forcedSegmentType, forcedSegmentVersion, err := configForceSegmentTypeVersion(config) + if err != nil { + return err + } + if forcedSegmentType != "" && forcedSegmentVersion != 0 { + segPlugin, err := chooseSegmentPlugin(forcedSegmentType, + uint32(forcedSegmentVersion)) + if err != nil { + return err + } + o.segPlugin = segPlugin + } + + return nil +} + +// Index will place the document into the index. +// It is invalid to index the same document multiple times. +func (o *Builder) Index(doc *document.Document) error { + o.m.Lock() + defer o.m.Unlock() + + o.batch.Update(doc) + + return o.maybeFlushBatchLOCKED(o.batchSize) +} + +func (o *Builder) maybeFlushBatchLOCKED(moreThan int) error { + if len(o.batch.IndexOps) >= moreThan { + defer o.batch.Reset() + return o.executeBatchLOCKED(o.batch) + } + return nil +} + +func (o *Builder) executeBatchLOCKED(batch *index.Batch) (err error) { + analysisResults := make([]*index.AnalysisResult, 0, len(batch.IndexOps)) + for _, doc := range batch.IndexOps { + if doc != nil { + // insert _id field + doc.AddField(document.NewTextFieldCustom("_id", nil, []byte(doc.ID), document.IndexField|document.StoreField, nil)) + // perform analysis directly + analysisResult := analyze(doc) + analysisResults = append(analysisResults, analysisResult) + } + } + + seg, _, err := o.segPlugin.New(analysisResults) + if err != nil { + return fmt.Errorf("error building segment base: %v", err) + } + + filename := zapFileName(o.segCount) + o.segCount++ + path := o.buildPath + string(os.PathSeparator) + filename + + if segUnpersisted, ok := seg.(segment.UnpersistedSegment); ok { + err = segUnpersisted.Persist(path) + if err != nil { + return fmt.Errorf("error persisting segment base to %s: %v", path, err) + } + + o.segPaths = append(o.segPaths, path) + return nil + } + + return fmt.Errorf("new segment does not implement unpersisted: %T", seg) +} + +func (o *Builder) doMerge() error { + // as long as we have more than 1 segment, keep merging + for len(o.segPaths) > 1 { + + // merge the next number of segments into one new one + // or, if there are fewer than remaining, merge them all + mergeCount := o.mergeMax + if mergeCount > len(o.segPaths) { + mergeCount = len(o.segPaths) + } + + mergePaths := o.segPaths[0:mergeCount] + o.segPaths = o.segPaths[mergeCount:] + + // open each of the segments to be merged + mergeSegs := make([]segment.Segment, 0, mergeCount) + + // closeOpenedSegs attempts to close all opened + // segments even if an error occurs, in which case + // the first error is returned + closeOpenedSegs := func() error { + var err error + for _, seg := range mergeSegs { + clErr := seg.Close() + if clErr != nil && err == nil { + err = clErr + } + } + return err + } + + for _, mergePath := range mergePaths { + seg, err := o.segPlugin.Open(mergePath) + if err != nil { + _ = closeOpenedSegs() + return fmt.Errorf("error opening segment (%s) for merge: %v", mergePath, err) + } + mergeSegs = append(mergeSegs, seg) + } + + // do the merge + mergedSegPath := o.buildPath + string(os.PathSeparator) + zapFileName(o.segCount) + drops := make([]*roaring.Bitmap, mergeCount) + _, _, err := o.segPlugin.Merge(mergeSegs, drops, mergedSegPath, nil, nil) + if err != nil { + _ = closeOpenedSegs() + return fmt.Errorf("error merging segments (%v): %v", mergePaths, err) + } + o.segCount++ + o.segPaths = append(o.segPaths, mergedSegPath) + + // close segments opened for merge + err = closeOpenedSegs() + if err != nil { + return fmt.Errorf("error closing opened segments: %v", err) + } + + // remove merged segments + for _, mergePath := range mergePaths { + err = os.RemoveAll(mergePath) + if err != nil { + return fmt.Errorf("error removing segment %s after merge: %v", mergePath, err) + } + } + } + + return nil +} + +func (o *Builder) Close() error { + o.m.Lock() + defer o.m.Unlock() + + // see if there is a partial batch + err := o.maybeFlushBatchLOCKED(1) + if err != nil { + return fmt.Errorf("error flushing batch before close: %v", err) + } + + // perform all the merging + err = o.doMerge() + if err != nil { + return fmt.Errorf("error while merging: %v", err) + } + + // ensure the store path exists + err = os.MkdirAll(o.path, 0700) + if err != nil { + return err + } + + // move final segment into place + // segment id 2 is chosen to match the behavior of a scorch + // index which indexes a single batch of data + finalSegPath := o.path + string(os.PathSeparator) + zapFileName(2) + err = os.Rename(o.segPaths[0], finalSegPath) + if err != nil { + return fmt.Errorf("error moving final segment into place: %v", err) + } + + // remove the buildPath, as it is no longer needed + err = os.RemoveAll(o.buildPath) + if err != nil { + return fmt.Errorf("error removing build path: %v", err) + } + + // prepare wrapping + seg, err := o.segPlugin.Open(finalSegPath) + if err != nil { + return fmt.Errorf("error opening final segment") + } + + // create a segment snapshot for this segment + ss := &SegmentSnapshot{ + segment: seg, + } + is := &IndexSnapshot{ + epoch: 3, // chosen to match scorch behavior when indexing a single batch + segment: []*SegmentSnapshot{ss}, + creator: "scorch-builder", + internal: o.internal, + } + + // create the root bolt + rootBoltPath := o.path + string(os.PathSeparator) + "root.bolt" + rootBolt, err := bolt.Open(rootBoltPath, 0600, nil) + if err != nil { + return err + } + + // start a write transaction + tx, err := rootBolt.Begin(true) + if err != nil { + return err + } + + // fill the root bolt with this fake index snapshot + _, _, err = prepareBoltSnapshot(is, tx, o.path, o.segPlugin) + if err != nil { + _ = tx.Rollback() + _ = rootBolt.Close() + return fmt.Errorf("error preparing bolt snapshot in root.bolt: %v", err) + } + + // commit bolt data + err = tx.Commit() + if err != nil { + _ = rootBolt.Close() + return fmt.Errorf("error committing bolt tx in root.bolt: %v", err) + } + + // close bolt + err = rootBolt.Close() + if err != nil { + return fmt.Errorf("error closing root.bolt: %v", err) + } + + // close final segment + err = seg.Close() + if err != nil { + return fmt.Errorf("error closing final segment: %v", err) + } + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/event.go b/vendor/github.com/blevesearch/bleve/index/scorch/event.go new file mode 100644 index 0000000..8f3fc19 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/event.go @@ -0,0 +1,64 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorch + +import "time" + +// RegistryAsyncErrorCallbacks should be treated as read-only after +// process init()'ialization. +var RegistryAsyncErrorCallbacks = map[string]func(error){} + +// RegistryEventCallbacks should be treated as read-only after +// process init()'ialization. +var RegistryEventCallbacks = map[string]func(Event){} + +// Event represents the information provided in an OnEvent() callback. +type Event struct { + Kind EventKind + Scorch *Scorch + Duration time.Duration +} + +// EventKind represents an event code for OnEvent() callbacks. +type EventKind int + +// EventKindCloseStart is fired when a Scorch.Close() has begun. +var EventKindCloseStart = EventKind(1) + +// EventKindClose is fired when a scorch index has been fully closed. +var EventKindClose = EventKind(2) + +// EventKindMergerProgress is fired when the merger has completed a +// round of merge processing. +var EventKindMergerProgress = EventKind(3) + +// EventKindPersisterProgress is fired when the persister has completed +// a round of persistence processing. +var EventKindPersisterProgress = EventKind(4) + +// EventKindBatchIntroductionStart is fired when Batch() is invoked which +// introduces a new segment. +var EventKindBatchIntroductionStart = EventKind(5) + +// EventKindBatchIntroduction is fired when Batch() completes. +var EventKindBatchIntroduction = EventKind(6) + +// EventKindMergeTaskIntroductionStart is fired when the merger is about to +// start the introduction of merged segment from a single merge task. +var EventKindMergeTaskIntroductionStart = EventKind(7) + +// EventKindMergeTaskIntroduction is fired when the merger has completed +// the introduction of merged segment from a single merge task. +var EventKindMergeTaskIntroduction = EventKind(8) diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/introducer.go b/vendor/github.com/blevesearch/bleve/index/scorch/introducer.go new file mode 100644 index 0000000..7770c41 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/introducer.go @@ -0,0 +1,449 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorch + +import ( + "fmt" + "sync/atomic" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" +) + +type segmentIntroduction struct { + id uint64 + data segment.Segment + obsoletes map[uint64]*roaring.Bitmap + ids []string + internal map[string][]byte + + applied chan error + persisted chan error + persistedCallback index.BatchCallback +} + +type persistIntroduction struct { + persisted map[uint64]segment.Segment + applied notificationChan +} + +type epochWatcher struct { + epoch uint64 + notifyCh notificationChan +} + +func (s *Scorch) introducerLoop() { + var epochWatchers []*epochWatcher +OUTER: + for { + atomic.AddUint64(&s.stats.TotIntroduceLoop, 1) + + select { + case <-s.closeCh: + break OUTER + + case epochWatcher := <-s.introducerNotifier: + epochWatchers = append(epochWatchers, epochWatcher) + + case nextMerge := <-s.merges: + s.introduceMerge(nextMerge) + + case next := <-s.introductions: + err := s.introduceSegment(next) + if err != nil { + continue OUTER + } + + case persist := <-s.persists: + s.introducePersist(persist) + + } + + var epochCurr uint64 + s.rootLock.RLock() + if s.root != nil { + epochCurr = s.root.epoch + } + s.rootLock.RUnlock() + var epochWatchersNext []*epochWatcher + for _, w := range epochWatchers { + if w.epoch < epochCurr { + close(w.notifyCh) + } else { + epochWatchersNext = append(epochWatchersNext, w) + } + } + epochWatchers = epochWatchersNext + } + + s.asyncTasks.Done() +} + +func (s *Scorch) introduceSegment(next *segmentIntroduction) error { + atomic.AddUint64(&s.stats.TotIntroduceSegmentBeg, 1) + defer atomic.AddUint64(&s.stats.TotIntroduceSegmentEnd, 1) + + s.rootLock.RLock() + root := s.root + root.AddRef() + s.rootLock.RUnlock() + + defer func() { _ = root.DecRef() }() + + nsegs := len(root.segment) + + // prepare new index snapshot + newSnapshot := &IndexSnapshot{ + parent: s, + segment: make([]*SegmentSnapshot, 0, nsegs+1), + offsets: make([]uint64, 0, nsegs+1), + internal: make(map[string][]byte, len(root.internal)), + refs: 1, + creator: "introduceSegment", + } + + // iterate through current segments + var running uint64 + var docsToPersistCount, memSegments, fileSegments uint64 + for i := range root.segment { + // see if optimistic work included this segment + delta, ok := next.obsoletes[root.segment[i].id] + if !ok { + var err error + delta, err = root.segment[i].segment.DocNumbers(next.ids) + if err != nil { + next.applied <- fmt.Errorf("error computing doc numbers: %v", err) + close(next.applied) + _ = newSnapshot.DecRef() + return err + } + } + + newss := &SegmentSnapshot{ + id: root.segment[i].id, + segment: root.segment[i].segment, + cachedDocs: root.segment[i].cachedDocs, + creator: root.segment[i].creator, + } + + // apply new obsoletions + if root.segment[i].deleted == nil { + newss.deleted = delta + } else { + newss.deleted = roaring.Or(root.segment[i].deleted, delta) + } + if newss.deleted.IsEmpty() { + newss.deleted = nil + } + + // check for live size before copying + if newss.LiveSize() > 0 { + newSnapshot.segment = append(newSnapshot.segment, newss) + root.segment[i].segment.AddRef() + newSnapshot.offsets = append(newSnapshot.offsets, running) + running += newss.segment.Count() + } + + if isMemorySegment(root.segment[i]) { + docsToPersistCount += root.segment[i].Count() + memSegments++ + } else { + fileSegments++ + } + } + + atomic.StoreUint64(&s.stats.TotItemsToPersist, docsToPersistCount) + atomic.StoreUint64(&s.stats.TotMemorySegmentsAtRoot, memSegments) + atomic.StoreUint64(&s.stats.TotFileSegmentsAtRoot, fileSegments) + + // append new segment, if any, to end of the new index snapshot + if next.data != nil { + newSegmentSnapshot := &SegmentSnapshot{ + id: next.id, + segment: next.data, // take ownership of next.data's ref-count + cachedDocs: &cachedDocs{cache: nil}, + creator: "introduceSegment", + } + newSnapshot.segment = append(newSnapshot.segment, newSegmentSnapshot) + newSnapshot.offsets = append(newSnapshot.offsets, running) + + // increment numItemsIntroduced which tracks the number of items + // queued for persistence. + atomic.AddUint64(&s.stats.TotIntroducedItems, newSegmentSnapshot.Count()) + atomic.AddUint64(&s.stats.TotIntroducedSegmentsBatch, 1) + } + // copy old values + for key, oldVal := range root.internal { + newSnapshot.internal[key] = oldVal + } + // set new values and apply deletes + for key, newVal := range next.internal { + if newVal != nil { + newSnapshot.internal[key] = newVal + } else { + delete(newSnapshot.internal, key) + } + } + + newSnapshot.updateSize() + s.rootLock.Lock() + if next.persisted != nil { + s.rootPersisted = append(s.rootPersisted, next.persisted) + } + if next.persistedCallback != nil { + s.persistedCallbacks = append(s.persistedCallbacks, next.persistedCallback) + } + // swap in new index snapshot + newSnapshot.epoch = s.nextSnapshotEpoch + s.nextSnapshotEpoch++ + rootPrev := s.root + s.root = newSnapshot + atomic.StoreUint64(&s.stats.CurRootEpoch, s.root.epoch) + // release lock + s.rootLock.Unlock() + + if rootPrev != nil { + _ = rootPrev.DecRef() + } + + close(next.applied) + + return nil +} + +func (s *Scorch) introducePersist(persist *persistIntroduction) { + atomic.AddUint64(&s.stats.TotIntroducePersistBeg, 1) + defer atomic.AddUint64(&s.stats.TotIntroducePersistEnd, 1) + + s.rootLock.Lock() + root := s.root + root.AddRef() + nextSnapshotEpoch := s.nextSnapshotEpoch + s.nextSnapshotEpoch++ + s.rootLock.Unlock() + + defer func() { _ = root.DecRef() }() + + newIndexSnapshot := &IndexSnapshot{ + parent: s, + epoch: nextSnapshotEpoch, + segment: make([]*SegmentSnapshot, len(root.segment)), + offsets: make([]uint64, len(root.offsets)), + internal: make(map[string][]byte, len(root.internal)), + refs: 1, + creator: "introducePersist", + } + + var docsToPersistCount, memSegments, fileSegments uint64 + for i, segmentSnapshot := range root.segment { + // see if this segment has been replaced + if replacement, ok := persist.persisted[segmentSnapshot.id]; ok { + newSegmentSnapshot := &SegmentSnapshot{ + id: segmentSnapshot.id, + segment: replacement, + deleted: segmentSnapshot.deleted, + cachedDocs: segmentSnapshot.cachedDocs, + creator: "introducePersist", + } + newIndexSnapshot.segment[i] = newSegmentSnapshot + delete(persist.persisted, segmentSnapshot.id) + + // update items persisted incase of a new segment snapshot + atomic.AddUint64(&s.stats.TotPersistedItems, newSegmentSnapshot.Count()) + atomic.AddUint64(&s.stats.TotPersistedSegments, 1) + fileSegments++ + } else { + newIndexSnapshot.segment[i] = root.segment[i] + newIndexSnapshot.segment[i].segment.AddRef() + + if isMemorySegment(root.segment[i]) { + docsToPersistCount += root.segment[i].Count() + memSegments++ + } else { + fileSegments++ + } + } + newIndexSnapshot.offsets[i] = root.offsets[i] + } + + for k, v := range root.internal { + newIndexSnapshot.internal[k] = v + } + + atomic.StoreUint64(&s.stats.TotItemsToPersist, docsToPersistCount) + atomic.StoreUint64(&s.stats.TotMemorySegmentsAtRoot, memSegments) + atomic.StoreUint64(&s.stats.TotFileSegmentsAtRoot, fileSegments) + newIndexSnapshot.updateSize() + s.rootLock.Lock() + rootPrev := s.root + s.root = newIndexSnapshot + atomic.StoreUint64(&s.stats.CurRootEpoch, s.root.epoch) + s.rootLock.Unlock() + + if rootPrev != nil { + _ = rootPrev.DecRef() + } + + close(persist.applied) +} + +// The introducer should definitely handle the segmentMerge.notify +// channel before exiting the introduceMerge. +func (s *Scorch) introduceMerge(nextMerge *segmentMerge) { + atomic.AddUint64(&s.stats.TotIntroduceMergeBeg, 1) + defer atomic.AddUint64(&s.stats.TotIntroduceMergeEnd, 1) + + s.rootLock.RLock() + root := s.root + root.AddRef() + s.rootLock.RUnlock() + + defer func() { _ = root.DecRef() }() + + newSnapshot := &IndexSnapshot{ + parent: s, + internal: root.internal, + refs: 1, + creator: "introduceMerge", + } + + // iterate through current segments + newSegmentDeleted := roaring.NewBitmap() + var running, docsToPersistCount, memSegments, fileSegments uint64 + for i := range root.segment { + segmentID := root.segment[i].id + if segSnapAtMerge, ok := nextMerge.old[segmentID]; ok { + // this segment is going away, see if anything else was deleted since we started the merge + if segSnapAtMerge != nil && root.segment[i].deleted != nil { + // assume all these deletes are new + deletedSince := root.segment[i].deleted + // if we already knew about some of them, remove + if segSnapAtMerge.deleted != nil { + deletedSince = roaring.AndNot(root.segment[i].deleted, segSnapAtMerge.deleted) + } + deletedSinceItr := deletedSince.Iterator() + for deletedSinceItr.HasNext() { + oldDocNum := deletedSinceItr.Next() + newDocNum := nextMerge.oldNewDocNums[segmentID][oldDocNum] + newSegmentDeleted.Add(uint32(newDocNum)) + } + } + // clean up the old segment map to figure out the + // obsolete segments wrt root in meantime, whatever + // segments left behind in old map after processing + // the root segments would be the obsolete segment set + delete(nextMerge.old, segmentID) + } else if root.segment[i].LiveSize() > 0 { + // this segment is staying + newSnapshot.segment = append(newSnapshot.segment, &SegmentSnapshot{ + id: root.segment[i].id, + segment: root.segment[i].segment, + deleted: root.segment[i].deleted, + cachedDocs: root.segment[i].cachedDocs, + creator: root.segment[i].creator, + }) + root.segment[i].segment.AddRef() + newSnapshot.offsets = append(newSnapshot.offsets, running) + running += root.segment[i].segment.Count() + + if isMemorySegment(root.segment[i]) { + docsToPersistCount += root.segment[i].Count() + memSegments++ + } else { + fileSegments++ + } + } + + } + + // before the newMerge introduction, need to clean the newly + // merged segment wrt the current root segments, hence + // applying the obsolete segment contents to newly merged segment + for segID, ss := range nextMerge.old { + obsoleted := ss.DocNumbersLive() + if obsoleted != nil { + obsoletedIter := obsoleted.Iterator() + for obsoletedIter.HasNext() { + oldDocNum := obsoletedIter.Next() + newDocNum := nextMerge.oldNewDocNums[segID][oldDocNum] + newSegmentDeleted.Add(uint32(newDocNum)) + } + } + } + var skipped bool + // In case where all the docs in the newly merged segment getting + // deleted by the time we reach here, can skip the introduction. + if nextMerge.new != nil && + nextMerge.new.Count() > newSegmentDeleted.GetCardinality() { + // put new segment at end + newSnapshot.segment = append(newSnapshot.segment, &SegmentSnapshot{ + id: nextMerge.id, + segment: nextMerge.new, // take ownership for nextMerge.new's ref-count + deleted: newSegmentDeleted, + cachedDocs: &cachedDocs{cache: nil}, + creator: "introduceMerge", + }) + newSnapshot.offsets = append(newSnapshot.offsets, running) + atomic.AddUint64(&s.stats.TotIntroducedSegmentsMerge, 1) + + switch nextMerge.new.(type) { + case segment.PersistedSegment: + fileSegments++ + default: + docsToPersistCount += nextMerge.new.Count() - newSegmentDeleted.GetCardinality() + memSegments++ + } + } else { + skipped = true + atomic.AddUint64(&s.stats.TotFileMergeIntroductionsObsoleted, 1) + } + + atomic.StoreUint64(&s.stats.TotItemsToPersist, docsToPersistCount) + atomic.StoreUint64(&s.stats.TotMemorySegmentsAtRoot, memSegments) + atomic.StoreUint64(&s.stats.TotFileSegmentsAtRoot, fileSegments) + + newSnapshot.AddRef() // 1 ref for the nextMerge.notify response + + newSnapshot.updateSize() + s.rootLock.Lock() + // swap in new index snapshot + newSnapshot.epoch = s.nextSnapshotEpoch + s.nextSnapshotEpoch++ + rootPrev := s.root + s.root = newSnapshot + atomic.StoreUint64(&s.stats.CurRootEpoch, s.root.epoch) + // release lock + s.rootLock.Unlock() + + if rootPrev != nil { + _ = rootPrev.DecRef() + } + + // notify requester that we incorporated this + nextMerge.notifyCh <- &mergeTaskIntroStatus{ + indexSnapshot: newSnapshot, + skipped: skipped} + close(nextMerge.notifyCh) +} + +func isMemorySegment(s *SegmentSnapshot) bool { + switch s.segment.(type) { + case segment.PersistedSegment: + return false + default: + return true + } +} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/merge.go b/vendor/github.com/blevesearch/bleve/index/scorch/merge.go new file mode 100644 index 0000000..56c0953 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/merge.go @@ -0,0 +1,504 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorch + +import ( + "context" + "encoding/json" + "fmt" + "os" + "strings" + "sync/atomic" + "time" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index/scorch/mergeplan" + "github.com/blevesearch/bleve/index/scorch/segment" +) + +func (s *Scorch) mergerLoop() { + var lastEpochMergePlanned uint64 + var ctrlMsg *mergerCtrl + mergePlannerOptions, err := s.parseMergePlannerOptions() + if err != nil { + s.fireAsyncError(fmt.Errorf("mergePlannerOption json parsing err: %v", err)) + s.asyncTasks.Done() + return + } + ctrlMsgDflt := &mergerCtrl{ctx: context.Background(), + options: mergePlannerOptions, + doneCh: nil} + +OUTER: + for { + atomic.AddUint64(&s.stats.TotFileMergeLoopBeg, 1) + + select { + case <-s.closeCh: + break OUTER + + default: + // check to see if there is a new snapshot to persist + s.rootLock.Lock() + ourSnapshot := s.root + ourSnapshot.AddRef() + atomic.StoreUint64(&s.iStats.mergeSnapshotSize, uint64(ourSnapshot.Size())) + atomic.StoreUint64(&s.iStats.mergeEpoch, ourSnapshot.epoch) + s.rootLock.Unlock() + + if ctrlMsg == nil && ourSnapshot.epoch != lastEpochMergePlanned { + ctrlMsg = ctrlMsgDflt + } + if ctrlMsg != nil { + startTime := time.Now() + + // lets get started + err := s.planMergeAtSnapshot(ctrlMsg.ctx, ctrlMsg.options, + ourSnapshot) + if err != nil { + atomic.StoreUint64(&s.iStats.mergeEpoch, 0) + if err == segment.ErrClosed { + // index has been closed + _ = ourSnapshot.DecRef() + + // continue the workloop on a user triggered cancel + if ctrlMsg.doneCh != nil { + close(ctrlMsg.doneCh) + ctrlMsg = nil + continue OUTER + } + + // exit the workloop on index closure + ctrlMsg = nil + break OUTER + } + s.fireAsyncError(fmt.Errorf("merging err: %v", err)) + _ = ourSnapshot.DecRef() + atomic.AddUint64(&s.stats.TotFileMergeLoopErr, 1) + continue OUTER + } + + if ctrlMsg.doneCh != nil { + close(ctrlMsg.doneCh) + } + ctrlMsg = nil + + lastEpochMergePlanned = ourSnapshot.epoch + + atomic.StoreUint64(&s.stats.LastMergedEpoch, ourSnapshot.epoch) + + s.fireEvent(EventKindMergerProgress, time.Since(startTime)) + } + _ = ourSnapshot.DecRef() + + // tell the persister we're waiting for changes + // first make a epochWatcher chan + ew := &epochWatcher{ + epoch: lastEpochMergePlanned, + notifyCh: make(notificationChan, 1), + } + + // give it to the persister + select { + case <-s.closeCh: + break OUTER + case s.persisterNotifier <- ew: + case ctrlMsg = <-s.forceMergeRequestCh: + continue OUTER + } + + // now wait for persister (but also detect close) + select { + case <-s.closeCh: + break OUTER + case <-ew.notifyCh: + case ctrlMsg = <-s.forceMergeRequestCh: + } + } + + atomic.AddUint64(&s.stats.TotFileMergeLoopEnd, 1) + } + + s.asyncTasks.Done() +} + +type mergerCtrl struct { + ctx context.Context + options *mergeplan.MergePlanOptions + doneCh chan struct{} +} + +// ForceMerge helps users trigger a merge operation on +// an online scorch index. +func (s *Scorch) ForceMerge(ctx context.Context, + mo *mergeplan.MergePlanOptions) error { + // check whether force merge is already under processing + s.rootLock.Lock() + if s.stats.TotFileMergeForceOpsStarted > + s.stats.TotFileMergeForceOpsCompleted { + s.rootLock.Unlock() + return fmt.Errorf("force merge already in progress") + } + + s.stats.TotFileMergeForceOpsStarted++ + s.rootLock.Unlock() + + if mo != nil { + err := mergeplan.ValidateMergePlannerOptions(mo) + if err != nil { + return err + } + } else { + // assume the default single segment merge policy + mo = &mergeplan.SingleSegmentMergePlanOptions + } + msg := &mergerCtrl{options: mo, + doneCh: make(chan struct{}), + ctx: ctx, + } + + // request the merger perform a force merge + select { + case s.forceMergeRequestCh <- msg: + case <-s.closeCh: + return nil + } + + // wait for the force merge operation completion + select { + case <-msg.doneCh: + atomic.AddUint64(&s.stats.TotFileMergeForceOpsCompleted, 1) + case <-s.closeCh: + } + + return nil +} + +func (s *Scorch) parseMergePlannerOptions() (*mergeplan.MergePlanOptions, + error) { + mergePlannerOptions := mergeplan.DefaultMergePlanOptions + if v, ok := s.config["scorchMergePlanOptions"]; ok { + b, err := json.Marshal(v) + if err != nil { + return &mergePlannerOptions, err + } + + err = json.Unmarshal(b, &mergePlannerOptions) + if err != nil { + return &mergePlannerOptions, err + } + + err = mergeplan.ValidateMergePlannerOptions(&mergePlannerOptions) + if err != nil { + return nil, err + } + } + return &mergePlannerOptions, nil +} + +type closeChWrapper struct { + ch1 chan struct{} + ctx context.Context + closeCh chan struct{} +} + +func newCloseChWrapper(ch1 chan struct{}, + ctx context.Context) *closeChWrapper { + return &closeChWrapper{ch1: ch1, + ctx: ctx, + closeCh: make(chan struct{})} +} + +func (w *closeChWrapper) close() { + select { + case <-w.closeCh: + default: + close(w.closeCh) + } +} + +func (w *closeChWrapper) listen() { + select { + case <-w.ch1: + w.close() + case <-w.ctx.Done(): + w.close() + case <-w.closeCh: + } +} + +func (s *Scorch) planMergeAtSnapshot(ctx context.Context, + options *mergeplan.MergePlanOptions, ourSnapshot *IndexSnapshot) error { + // build list of persisted segments in this snapshot + var onlyPersistedSnapshots []mergeplan.Segment + for _, segmentSnapshot := range ourSnapshot.segment { + if _, ok := segmentSnapshot.segment.(segment.PersistedSegment); ok { + onlyPersistedSnapshots = append(onlyPersistedSnapshots, segmentSnapshot) + } + } + + atomic.AddUint64(&s.stats.TotFileMergePlan, 1) + + // give this list to the planner + resultMergePlan, err := mergeplan.Plan(onlyPersistedSnapshots, options) + if err != nil { + atomic.AddUint64(&s.stats.TotFileMergePlanErr, 1) + return fmt.Errorf("merge planning err: %v", err) + } + if resultMergePlan == nil { + // nothing to do + atomic.AddUint64(&s.stats.TotFileMergePlanNone, 1) + return nil + } + atomic.AddUint64(&s.stats.TotFileMergePlanOk, 1) + + atomic.AddUint64(&s.stats.TotFileMergePlanTasks, uint64(len(resultMergePlan.Tasks))) + + // process tasks in serial for now + var filenames []string + + cw := newCloseChWrapper(s.closeCh, ctx) + defer cw.close() + + go cw.listen() + + for _, task := range resultMergePlan.Tasks { + if len(task.Segments) == 0 { + atomic.AddUint64(&s.stats.TotFileMergePlanTasksSegmentsEmpty, 1) + continue + } + + atomic.AddUint64(&s.stats.TotFileMergePlanTasksSegments, uint64(len(task.Segments))) + + oldMap := make(map[uint64]*SegmentSnapshot) + newSegmentID := atomic.AddUint64(&s.nextSegmentID, 1) + segmentsToMerge := make([]segment.Segment, 0, len(task.Segments)) + docsToDrop := make([]*roaring.Bitmap, 0, len(task.Segments)) + + for _, planSegment := range task.Segments { + if segSnapshot, ok := planSegment.(*SegmentSnapshot); ok { + oldMap[segSnapshot.id] = segSnapshot + if persistedSeg, ok := segSnapshot.segment.(segment.PersistedSegment); ok { + if segSnapshot.LiveSize() == 0 { + atomic.AddUint64(&s.stats.TotFileMergeSegmentsEmpty, 1) + oldMap[segSnapshot.id] = nil + } else { + segmentsToMerge = append(segmentsToMerge, segSnapshot.segment) + docsToDrop = append(docsToDrop, segSnapshot.deleted) + } + // track the files getting merged for unsetting the + // removal ineligibility. This helps to unflip files + // even with fast merger, slow persister work flows. + path := persistedSeg.Path() + filenames = append(filenames, + strings.TrimPrefix(path, s.path+string(os.PathSeparator))) + } + } + } + + var oldNewDocNums map[uint64][]uint64 + var seg segment.Segment + var filename string + if len(segmentsToMerge) > 0 { + filename = zapFileName(newSegmentID) + s.markIneligibleForRemoval(filename) + path := s.path + string(os.PathSeparator) + filename + + fileMergeZapStartTime := time.Now() + + atomic.AddUint64(&s.stats.TotFileMergeZapBeg, 1) + newDocNums, _, err := s.segPlugin.Merge(segmentsToMerge, docsToDrop, path, + cw.closeCh, s) + atomic.AddUint64(&s.stats.TotFileMergeZapEnd, 1) + + fileMergeZapTime := uint64(time.Since(fileMergeZapStartTime)) + atomic.AddUint64(&s.stats.TotFileMergeZapTime, fileMergeZapTime) + if atomic.LoadUint64(&s.stats.MaxFileMergeZapTime) < fileMergeZapTime { + atomic.StoreUint64(&s.stats.MaxFileMergeZapTime, fileMergeZapTime) + } + + if err != nil { + s.unmarkIneligibleForRemoval(filename) + atomic.AddUint64(&s.stats.TotFileMergePlanTasksErr, 1) + if err == segment.ErrClosed { + return err + } + return fmt.Errorf("merging failed: %v", err) + } + + seg, err = s.segPlugin.Open(path) + if err != nil { + s.unmarkIneligibleForRemoval(filename) + atomic.AddUint64(&s.stats.TotFileMergePlanTasksErr, 1) + return err + } + oldNewDocNums = make(map[uint64][]uint64) + for i, segNewDocNums := range newDocNums { + oldNewDocNums[task.Segments[i].Id()] = segNewDocNums + } + + atomic.AddUint64(&s.stats.TotFileMergeSegments, uint64(len(segmentsToMerge))) + } + + sm := &segmentMerge{ + id: newSegmentID, + old: oldMap, + oldNewDocNums: oldNewDocNums, + new: seg, + notifyCh: make(chan *mergeTaskIntroStatus), + } + + s.fireEvent(EventKindMergeTaskIntroductionStart, 0) + + // give it to the introducer + select { + case <-s.closeCh: + _ = seg.Close() + return segment.ErrClosed + case s.merges <- sm: + atomic.AddUint64(&s.stats.TotFileMergeIntroductions, 1) + } + + introStartTime := time.Now() + // it is safe to blockingly wait for the merge introduction + // here as the introducer is bound to handle the notify channel. + introStatus := <-sm.notifyCh + introTime := uint64(time.Since(introStartTime)) + atomic.AddUint64(&s.stats.TotFileMergeZapIntroductionTime, introTime) + if atomic.LoadUint64(&s.stats.MaxFileMergeZapIntroductionTime) < introTime { + atomic.StoreUint64(&s.stats.MaxFileMergeZapIntroductionTime, introTime) + } + atomic.AddUint64(&s.stats.TotFileMergeIntroductionsDone, 1) + if introStatus != nil && introStatus.indexSnapshot != nil { + _ = introStatus.indexSnapshot.DecRef() + if introStatus.skipped { + // close the segment on skipping introduction. + s.unmarkIneligibleForRemoval(filename) + _ = seg.Close() + } + } + + atomic.AddUint64(&s.stats.TotFileMergePlanTasksDone, 1) + + s.fireEvent(EventKindMergeTaskIntroduction, 0) + } + + // once all the newly merged segment introductions are done, + // its safe to unflip the removal ineligibility for the replaced + // older segments + for _, f := range filenames { + s.unmarkIneligibleForRemoval(f) + } + + return nil +} + +type mergeTaskIntroStatus struct { + indexSnapshot *IndexSnapshot + skipped bool +} + +type segmentMerge struct { + id uint64 + old map[uint64]*SegmentSnapshot + oldNewDocNums map[uint64][]uint64 + new segment.Segment + notifyCh chan *mergeTaskIntroStatus +} + +// perform a merging of the given SegmentBase instances into a new, +// persisted segment, and synchronously introduce that new segment +// into the root +func (s *Scorch) mergeSegmentBases(snapshot *IndexSnapshot, + sbs []segment.Segment, sbsDrops []*roaring.Bitmap, + sbsIndexes []int) (*IndexSnapshot, uint64, error) { + atomic.AddUint64(&s.stats.TotMemMergeBeg, 1) + + memMergeZapStartTime := time.Now() + + atomic.AddUint64(&s.stats.TotMemMergeZapBeg, 1) + + newSegmentID := atomic.AddUint64(&s.nextSegmentID, 1) + filename := zapFileName(newSegmentID) + path := s.path + string(os.PathSeparator) + filename + + newDocNums, _, err := + s.segPlugin.Merge(sbs, sbsDrops, path, s.closeCh, s) + + atomic.AddUint64(&s.stats.TotMemMergeZapEnd, 1) + + memMergeZapTime := uint64(time.Since(memMergeZapStartTime)) + atomic.AddUint64(&s.stats.TotMemMergeZapTime, memMergeZapTime) + if atomic.LoadUint64(&s.stats.MaxMemMergeZapTime) < memMergeZapTime { + atomic.StoreUint64(&s.stats.MaxMemMergeZapTime, memMergeZapTime) + } + + if err != nil { + atomic.AddUint64(&s.stats.TotMemMergeErr, 1) + return nil, 0, err + } + + seg, err := s.segPlugin.Open(path) + if err != nil { + atomic.AddUint64(&s.stats.TotMemMergeErr, 1) + return nil, 0, err + } + + // update persisted stats + atomic.AddUint64(&s.stats.TotPersistedItems, seg.Count()) + atomic.AddUint64(&s.stats.TotPersistedSegments, 1) + + sm := &segmentMerge{ + id: newSegmentID, + old: make(map[uint64]*SegmentSnapshot), + oldNewDocNums: make(map[uint64][]uint64), + new: seg, + notifyCh: make(chan *mergeTaskIntroStatus), + } + + for i, idx := range sbsIndexes { + ss := snapshot.segment[idx] + sm.old[ss.id] = ss + sm.oldNewDocNums[ss.id] = newDocNums[i] + } + + select { // send to introducer + case <-s.closeCh: + _ = seg.DecRef() + return nil, 0, segment.ErrClosed + case s.merges <- sm: + } + + // blockingly wait for the introduction to complete + var newSnapshot *IndexSnapshot + introStatus := <-sm.notifyCh + if introStatus != nil && introStatus.indexSnapshot != nil { + newSnapshot = introStatus.indexSnapshot + atomic.AddUint64(&s.stats.TotMemMergeSegments, uint64(len(sbs))) + atomic.AddUint64(&s.stats.TotMemMergeDone, 1) + if introStatus.skipped { + // close the segment on skipping introduction. + _ = newSnapshot.DecRef() + _ = seg.Close() + newSnapshot = nil + } + } + + return newSnapshot, newSegmentID, nil +} + +func (s *Scorch) ReportBytesWritten(bytesWritten uint64) { + atomic.AddUint64(&s.stats.TotFileMergeWrittenBytes, bytesWritten) +} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/mergeplan/merge_plan.go b/vendor/github.com/blevesearch/bleve/index/scorch/mergeplan/merge_plan.go new file mode 100644 index 0000000..7523506 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/mergeplan/merge_plan.go @@ -0,0 +1,397 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package mergeplan provides a segment merge planning approach that's +// inspired by Lucene's TieredMergePolicy.java and descriptions like +// http://blog.mikemccandless.com/2011/02/visualizing-lucenes-segment-merges.html +package mergeplan + +import ( + "errors" + "fmt" + "math" + "sort" + "strings" +) + +// A Segment represents the information that the planner needs to +// calculate segment merging. +type Segment interface { + // Unique id of the segment -- used for sorting. + Id() uint64 + + // Full segment size (the size before any logical deletions). + FullSize() int64 + + // Size of the live data of the segment; i.e., FullSize() minus + // any logical deletions. + LiveSize() int64 +} + +// Plan() will functionally compute a merge plan. A segment will be +// assigned to at most a single MergeTask in the output MergePlan. A +// segment not assigned to any MergeTask means the segment should +// remain unmerged. +func Plan(segments []Segment, o *MergePlanOptions) (*MergePlan, error) { + return plan(segments, o) +} + +// A MergePlan is the result of the Plan() API. +// +// The planner doesn’t know how or whether these tasks are executed -- +// that’s up to a separate merge execution system, which might execute +// these tasks concurrently or not, and which might execute all the +// tasks or not. +type MergePlan struct { + Tasks []*MergeTask +} + +// A MergeTask represents several segments that should be merged +// together into a single segment. +type MergeTask struct { + Segments []Segment +} + +// The MergePlanOptions is designed to be reusable between planning calls. +type MergePlanOptions struct { + // Max # segments per logarithmic tier, or max width of any + // logarithmic “stepâ€. Smaller values mean more merging but fewer + // segments. Should be >= SegmentsPerMergeTask, else you'll have + // too much merging. + MaxSegmentsPerTier int + + // Max size of any segment produced after merging. Actual + // merging, however, may produce segment sizes different than the + // planner’s predicted sizes. + MaxSegmentSize int64 + + // The growth factor for each tier in a staircase of idealized + // segments computed by CalcBudget(). + TierGrowth float64 + + // The number of segments in any resulting MergeTask. e.g., + // len(result.Tasks[ * ].Segments) == SegmentsPerMergeTask. + SegmentsPerMergeTask int + + // Small segments are rounded up to this size, i.e., treated as + // equal (floor) size for consideration. This is to prevent lots + // of tiny segments from resulting in a long tail in the index. + FloorSegmentSize int64 + + // Controls how aggressively merges that reclaim more deletions + // are favored. Higher values will more aggressively target + // merges that reclaim deletions, but be careful not to go so high + // that way too much merging takes place; a value of 3.0 is + // probably nearly too high. A value of 0.0 means deletions don't + // impact merge selection. + ReclaimDeletesWeight float64 + + // Optional, defaults to mergeplan.CalcBudget(). + CalcBudget func(totalSize int64, firstTierSize int64, + o *MergePlanOptions) (budgetNumSegments int) + + // Optional, defaults to mergeplan.ScoreSegments(). + ScoreSegments func(segments []Segment, o *MergePlanOptions) float64 + + // Optional. + Logger func(string) +} + +// Returns the higher of the input or FloorSegmentSize. +func (o *MergePlanOptions) RaiseToFloorSegmentSize(s int64) int64 { + if s > o.FloorSegmentSize { + return s + } + return o.FloorSegmentSize +} + +// MaxSegmentSizeLimit represents the maximum size of a segment, +// this limit comes with hit-1 optimisation/max encoding limit uint31. +const MaxSegmentSizeLimit = 1<<31 - 1 + +// ErrMaxSegmentSizeTooLarge is returned when the size of the segment +// exceeds the MaxSegmentSizeLimit +var ErrMaxSegmentSizeTooLarge = errors.New("MaxSegmentSize exceeds the size limit") + +// DefaultMergePlanOptions suggests the default options. +var DefaultMergePlanOptions = MergePlanOptions{ + MaxSegmentsPerTier: 10, + MaxSegmentSize: 5000000, + TierGrowth: 10.0, + SegmentsPerMergeTask: 10, + FloorSegmentSize: 2000, + ReclaimDeletesWeight: 2.0, +} + +// SingleSegmentMergePlanOptions helps in creating a +// single segment index. +var SingleSegmentMergePlanOptions = MergePlanOptions{ + MaxSegmentsPerTier: 1, + MaxSegmentSize: 1 << 30, + TierGrowth: 1.0, + SegmentsPerMergeTask: 10, + FloorSegmentSize: 1 << 30, + ReclaimDeletesWeight: 2.0, +} + +// ------------------------------------------- + +func plan(segmentsIn []Segment, o *MergePlanOptions) (*MergePlan, error) { + if len(segmentsIn) <= 1 { + return nil, nil + } + + if o == nil { + o = &DefaultMergePlanOptions + } + + segments := append([]Segment(nil), segmentsIn...) // Copy. + + sort.Sort(byLiveSizeDescending(segments)) + + var minLiveSize int64 = math.MaxInt64 + + var eligibles []Segment + var eligiblesLiveSize int64 + + for _, segment := range segments { + if minLiveSize > segment.LiveSize() { + minLiveSize = segment.LiveSize() + } + + // Only small-enough segments are eligible. + if segment.LiveSize() < o.MaxSegmentSize/2 { + eligibles = append(eligibles, segment) + eligiblesLiveSize += segment.LiveSize() + } + } + + minLiveSize = o.RaiseToFloorSegmentSize(minLiveSize) + + calcBudget := o.CalcBudget + if calcBudget == nil { + calcBudget = CalcBudget + } + + budgetNumSegments := calcBudget(eligiblesLiveSize, minLiveSize, o) + + scoreSegments := o.ScoreSegments + if scoreSegments == nil { + scoreSegments = ScoreSegments + } + + rv := &MergePlan{} + + var empties []Segment + for _, eligible := range eligibles { + if eligible.LiveSize() <= 0 { + empties = append(empties, eligible) + } + } + if len(empties) > 0 { + rv.Tasks = append(rv.Tasks, &MergeTask{Segments: empties}) + eligibles = removeSegments(eligibles, empties) + } + + // While we’re over budget, keep looping, which might produce + // another MergeTask. + for len(eligibles) > 0 && (len(eligibles)+len(rv.Tasks)) > budgetNumSegments { + // Track a current best roster as we examine and score + // potential rosters of merges. + var bestRoster []Segment + var bestRosterScore float64 // Lower score is better. + + for startIdx := 0; startIdx < len(eligibles); startIdx++ { + var roster []Segment + var rosterLiveSize int64 + + for idx := startIdx; idx < len(eligibles) && len(roster) < o.SegmentsPerMergeTask; idx++ { + eligible := eligibles[idx] + + if rosterLiveSize+eligible.LiveSize() < o.MaxSegmentSize { + roster = append(roster, eligible) + rosterLiveSize += eligible.LiveSize() + } + } + + if len(roster) > 0 { + rosterScore := scoreSegments(roster, o) + + if len(bestRoster) == 0 || rosterScore < bestRosterScore { + bestRoster = roster + bestRosterScore = rosterScore + } + } + } + + if len(bestRoster) == 0 { + return rv, nil + } + + rv.Tasks = append(rv.Tasks, &MergeTask{Segments: bestRoster}) + + eligibles = removeSegments(eligibles, bestRoster) + } + + return rv, nil +} + +// Compute the number of segments that would be needed to cover the +// totalSize, by climbing up a logarithmically growing staircase of +// segment tiers. +func CalcBudget(totalSize int64, firstTierSize int64, o *MergePlanOptions) ( + budgetNumSegments int) { + tierSize := firstTierSize + if tierSize < 1 { + tierSize = 1 + } + + maxSegmentsPerTier := o.MaxSegmentsPerTier + if maxSegmentsPerTier < 1 { + maxSegmentsPerTier = 1 + } + + tierGrowth := o.TierGrowth + if tierGrowth < 1.0 { + tierGrowth = 1.0 + } + + for totalSize > 0 { + segmentsInTier := float64(totalSize) / float64(tierSize) + if segmentsInTier < float64(maxSegmentsPerTier) { + budgetNumSegments += int(math.Ceil(segmentsInTier)) + break + } + + budgetNumSegments += maxSegmentsPerTier + totalSize -= int64(maxSegmentsPerTier) * tierSize + tierSize = int64(float64(tierSize) * tierGrowth) + } + + return budgetNumSegments +} + +// Of note, removeSegments() keeps the ordering of the results stable. +func removeSegments(segments []Segment, toRemove []Segment) []Segment { + rv := make([]Segment, 0, len(segments)-len(toRemove)) +OUTER: + for _, segment := range segments { + for _, r := range toRemove { + if segment == r { + continue OUTER + } + } + rv = append(rv, segment) + } + return rv +} + +// Smaller result score is better. +func ScoreSegments(segments []Segment, o *MergePlanOptions) float64 { + var totBeforeSize int64 + var totAfterSize int64 + var totAfterSizeFloored int64 + + for _, segment := range segments { + totBeforeSize += segment.FullSize() + totAfterSize += segment.LiveSize() + totAfterSizeFloored += o.RaiseToFloorSegmentSize(segment.LiveSize()) + } + + if totBeforeSize <= 0 || totAfterSize <= 0 || totAfterSizeFloored <= 0 { + return 0 + } + + // Roughly guess the "balance" of the segments -- whether the + // segments are about the same size. + balance := + float64(o.RaiseToFloorSegmentSize(segments[0].LiveSize())) / + float64(totAfterSizeFloored) + + // Gently favor smaller merges over bigger ones. We don't want to + // make the exponent too large else we end up with poor merges of + // small segments in order to avoid the large merges. + score := balance * math.Pow(float64(totAfterSize), 0.05) + + // Strongly favor merges that reclaim deletes. + nonDelRatio := float64(totAfterSize) / float64(totBeforeSize) + + score *= math.Pow(nonDelRatio, o.ReclaimDeletesWeight) + + return score +} + +// ------------------------------------------ + +// ToBarChart returns an ASCII rendering of the segments and the plan. +// The barMax is the max width of the bars in the bar chart. +func ToBarChart(prefix string, barMax int, segments []Segment, plan *MergePlan) string { + rv := make([]string, 0, len(segments)) + + var maxFullSize int64 + for _, segment := range segments { + if maxFullSize < segment.FullSize() { + maxFullSize = segment.FullSize() + } + } + if maxFullSize < 0 { + maxFullSize = 1 + } + + for _, segment := range segments { + barFull := int(segment.FullSize()) + barLive := int(segment.LiveSize()) + + if maxFullSize > int64(barMax) { + barFull = int(float64(barMax) * float64(barFull) / float64(maxFullSize)) + barLive = int(float64(barMax) * float64(barLive) / float64(maxFullSize)) + } + + barKind := " " + barChar := "." + + if plan != nil { + TASK_LOOP: + for taski, task := range plan.Tasks { + for _, taskSegment := range task.Segments { + if taskSegment == segment { + barKind = "*" + barChar = fmt.Sprintf("%d", taski) + break TASK_LOOP + } + } + } + } + + bar := + strings.Repeat(barChar, barLive)[0:barLive] + + strings.Repeat("x", barFull-barLive)[0:barFull-barLive] + + rv = append(rv, fmt.Sprintf("%s %5d: %5d /%5d - %s %s", prefix, + segment.Id(), + segment.LiveSize(), + segment.FullSize(), + barKind, bar)) + } + + return strings.Join(rv, "\n") +} + +// ValidateMergePlannerOptions validates the merge planner options +func ValidateMergePlannerOptions(options *MergePlanOptions) error { + if options.MaxSegmentSize > MaxSegmentSizeLimit { + return ErrMaxSegmentSizeTooLarge + } + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/mergeplan/sort.go b/vendor/github.com/blevesearch/bleve/index/scorch/mergeplan/sort.go new file mode 100644 index 0000000..d044b8d --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/mergeplan/sort.go @@ -0,0 +1,28 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package mergeplan + +type byLiveSizeDescending []Segment + +func (a byLiveSizeDescending) Len() int { return len(a) } + +func (a byLiveSizeDescending) Swap(i, j int) { a[i], a[j] = a[j], a[i] } + +func (a byLiveSizeDescending) Less(i, j int) bool { + if a[i].LiveSize() != a[j].LiveSize() { + return a[i].LiveSize() > a[j].LiveSize() + } + return a[i].Id() < a[j].Id() +} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/optimize.go b/vendor/github.com/blevesearch/bleve/index/scorch/optimize.go new file mode 100644 index 0000000..658354c --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/optimize.go @@ -0,0 +1,396 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorch + +import ( + "fmt" + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "sync/atomic" +) + +var OptimizeConjunction = true +var OptimizeConjunctionUnadorned = true +var OptimizeDisjunctionUnadorned = true + +func (s *IndexSnapshotTermFieldReader) Optimize(kind string, + octx index.OptimizableContext) (index.OptimizableContext, error) { + if OptimizeConjunction && kind == "conjunction" { + return s.optimizeConjunction(octx) + } + + if OptimizeConjunctionUnadorned && kind == "conjunction:unadorned" { + return s.optimizeConjunctionUnadorned(octx) + } + + if OptimizeDisjunctionUnadorned && kind == "disjunction:unadorned" { + return s.optimizeDisjunctionUnadorned(octx) + } + + return nil, nil +} + +var OptimizeDisjunctionUnadornedMinChildCardinality = uint64(256) + +// ---------------------------------------------------------------- + +func (s *IndexSnapshotTermFieldReader) optimizeConjunction( + octx index.OptimizableContext) (index.OptimizableContext, error) { + if octx == nil { + octx = &OptimizeTFRConjunction{snapshot: s.snapshot} + } + + o, ok := octx.(*OptimizeTFRConjunction) + if !ok { + return octx, nil + } + + if o.snapshot != s.snapshot { + return nil, fmt.Errorf("tried to optimize conjunction across different snapshots") + } + + o.tfrs = append(o.tfrs, s) + + return o, nil +} + +type OptimizeTFRConjunction struct { + snapshot *IndexSnapshot + + tfrs []*IndexSnapshotTermFieldReader +} + +func (o *OptimizeTFRConjunction) Finish() (index.Optimized, error) { + if len(o.tfrs) <= 1 { + return nil, nil + } + + for i := range o.snapshot.segment { + itr0, ok := o.tfrs[0].iterators[i].(segment.OptimizablePostingsIterator) + if !ok || itr0.ActualBitmap() == nil { + continue + } + + itr1, ok := o.tfrs[1].iterators[i].(segment.OptimizablePostingsIterator) + if !ok || itr1.ActualBitmap() == nil { + continue + } + + bm := roaring.And(itr0.ActualBitmap(), itr1.ActualBitmap()) + + for _, tfr := range o.tfrs[2:] { + itr, ok := tfr.iterators[i].(segment.OptimizablePostingsIterator) + if !ok || itr.ActualBitmap() == nil { + continue + } + + bm.And(itr.ActualBitmap()) + } + + // in this conjunction optimization, the postings iterators + // will all share the same AND'ed together actual bitmap. The + // regular conjunction searcher machinery will still be used, + // but the underlying bitmap will be smaller. + for _, tfr := range o.tfrs { + itr, ok := tfr.iterators[i].(segment.OptimizablePostingsIterator) + if ok && itr.ActualBitmap() != nil { + itr.ReplaceActual(bm) + } + } + } + + return nil, nil +} + +// ---------------------------------------------------------------- + +// An "unadorned" conjunction optimization is appropriate when +// additional or subsidiary information like freq-norm's and +// term-vectors are not required, and instead only the internal-id's +// are needed. +func (s *IndexSnapshotTermFieldReader) optimizeConjunctionUnadorned( + octx index.OptimizableContext) (index.OptimizableContext, error) { + if octx == nil { + octx = &OptimizeTFRConjunctionUnadorned{snapshot: s.snapshot} + } + + o, ok := octx.(*OptimizeTFRConjunctionUnadorned) + if !ok { + return nil, nil + } + + if o.snapshot != s.snapshot { + return nil, fmt.Errorf("tried to optimize unadorned conjunction across different snapshots") + } + + o.tfrs = append(o.tfrs, s) + + return o, nil +} + +type OptimizeTFRConjunctionUnadorned struct { + snapshot *IndexSnapshot + + tfrs []*IndexSnapshotTermFieldReader +} + +var OptimizeTFRConjunctionUnadornedTerm = []byte("") +var OptimizeTFRConjunctionUnadornedField = "*" + +// Finish of an unadorned conjunction optimization will compute a +// termFieldReader with an "actual" bitmap that represents the +// constituent bitmaps AND'ed together. This termFieldReader cannot +// provide any freq-norm or termVector associated information. +func (o *OptimizeTFRConjunctionUnadorned) Finish() (rv index.Optimized, err error) { + if len(o.tfrs) <= 1 { + return nil, nil + } + + // We use an artificial term and field because the optimized + // termFieldReader can represent multiple terms and fields. + oTFR := o.snapshot.unadornedTermFieldReader( + OptimizeTFRConjunctionUnadornedTerm, OptimizeTFRConjunctionUnadornedField) + + var actualBMs []*roaring.Bitmap // Collected from regular posting lists. + +OUTER: + for i := range o.snapshot.segment { + actualBMs = actualBMs[:0] + + var docNum1HitLast uint64 + var docNum1HitLastOk bool + + for _, tfr := range o.tfrs { + if _, ok := tfr.iterators[i].(*segment.EmptyPostingsIterator); ok { + // An empty postings iterator means the entire AND is empty. + oTFR.iterators[i] = segment.AnEmptyPostingsIterator + continue OUTER + } + + itr, ok := tfr.iterators[i].(segment.OptimizablePostingsIterator) + if !ok { + // We only optimize postings iterators that support this operation. + return nil, nil + } + + // If the postings iterator is "1-hit" optimized, then we + // can perform several optimizations up-front here. + docNum1Hit, ok := itr.DocNum1Hit() + if ok { + if docNum1HitLastOk && docNum1HitLast != docNum1Hit { + // The docNum1Hit doesn't match the previous + // docNum1HitLast, so the entire AND is empty. + oTFR.iterators[i] = segment.AnEmptyPostingsIterator + continue OUTER + } + + docNum1HitLast = docNum1Hit + docNum1HitLastOk = true + + continue + } + + if itr.ActualBitmap() == nil { + // An empty actual bitmap means the entire AND is empty. + oTFR.iterators[i] = segment.AnEmptyPostingsIterator + continue OUTER + } + + // Collect the actual bitmap for more processing later. + actualBMs = append(actualBMs, itr.ActualBitmap()) + } + + if docNum1HitLastOk { + // We reach here if all the 1-hit optimized posting + // iterators had the same 1-hit docNum, so we can check if + // our collected actual bitmaps also have that docNum. + for _, bm := range actualBMs { + if !bm.Contains(uint32(docNum1HitLast)) { + // The docNum1Hit isn't in one of our actual + // bitmaps, so the entire AND is empty. + oTFR.iterators[i] = segment.AnEmptyPostingsIterator + continue OUTER + } + } + + // The actual bitmaps and docNum1Hits all contain or have + // the same 1-hit docNum, so that's our AND'ed result. + oTFR.iterators[i] = segment.NewUnadornedPostingsIteratorFrom1Hit(docNum1HitLast) + + continue OUTER + } + + if len(actualBMs) == 0 { + // If we've collected no actual bitmaps at this point, + // then the entire AND is empty. + oTFR.iterators[i] = segment.AnEmptyPostingsIterator + continue OUTER + } + + if len(actualBMs) == 1 { + // If we've only 1 actual bitmap, then that's our result. + oTFR.iterators[i] = segment.NewUnadornedPostingsIteratorFromBitmap(actualBMs[0]) + + continue OUTER + } + + // Else, AND together our collected bitmaps as our result. + bm := roaring.And(actualBMs[0], actualBMs[1]) + + for _, actualBM := range actualBMs[2:] { + bm.And(actualBM) + } + + oTFR.iterators[i] = segment.NewUnadornedPostingsIteratorFromBitmap(bm) + } + + atomic.AddUint64(&o.snapshot.parent.stats.TotTermSearchersStarted, uint64(1)) + return oTFR, nil +} + +// ---------------------------------------------------------------- + +// An "unadorned" disjunction optimization is appropriate when +// additional or subsidiary information like freq-norm's and +// term-vectors are not required, and instead only the internal-id's +// are needed. +func (s *IndexSnapshotTermFieldReader) optimizeDisjunctionUnadorned( + octx index.OptimizableContext) (index.OptimizableContext, error) { + if octx == nil { + octx = &OptimizeTFRDisjunctionUnadorned{ + snapshot: s.snapshot, + } + } + + o, ok := octx.(*OptimizeTFRDisjunctionUnadorned) + if !ok { + return nil, nil + } + + if o.snapshot != s.snapshot { + return nil, fmt.Errorf("tried to optimize unadorned disjunction across different snapshots") + } + + o.tfrs = append(o.tfrs, s) + + return o, nil +} + +type OptimizeTFRDisjunctionUnadorned struct { + snapshot *IndexSnapshot + + tfrs []*IndexSnapshotTermFieldReader +} + +var OptimizeTFRDisjunctionUnadornedTerm = []byte("") +var OptimizeTFRDisjunctionUnadornedField = "*" + +// Finish of an unadorned disjunction optimization will compute a +// termFieldReader with an "actual" bitmap that represents the +// constituent bitmaps OR'ed together. This termFieldReader cannot +// provide any freq-norm or termVector associated information. +func (o *OptimizeTFRDisjunctionUnadorned) Finish() (rv index.Optimized, err error) { + if len(o.tfrs) <= 1 { + return nil, nil + } + + for i := range o.snapshot.segment { + var cMax uint64 + + for _, tfr := range o.tfrs { + itr, ok := tfr.iterators[i].(segment.OptimizablePostingsIterator) + if !ok { + return nil, nil + } + + if itr.ActualBitmap() != nil { + c := itr.ActualBitmap().GetCardinality() + if cMax < c { + cMax = c + } + } + } + } + + // We use an artificial term and field because the optimized + // termFieldReader can represent multiple terms and fields. + oTFR := o.snapshot.unadornedTermFieldReader( + OptimizeTFRDisjunctionUnadornedTerm, OptimizeTFRDisjunctionUnadornedField) + + var docNums []uint32 // Collected docNum's from 1-hit posting lists. + var actualBMs []*roaring.Bitmap // Collected from regular posting lists. + + for i := range o.snapshot.segment { + docNums = docNums[:0] + actualBMs = actualBMs[:0] + + for _, tfr := range o.tfrs { + itr, ok := tfr.iterators[i].(segment.OptimizablePostingsIterator) + if !ok { + return nil, nil + } + + docNum, ok := itr.DocNum1Hit() + if ok { + docNums = append(docNums, uint32(docNum)) + continue + } + + if itr.ActualBitmap() != nil { + actualBMs = append(actualBMs, itr.ActualBitmap()) + } + } + + var bm *roaring.Bitmap + if len(actualBMs) > 2 { + bm = roaring.HeapOr(actualBMs...) + } else if len(actualBMs) == 2 { + bm = roaring.Or(actualBMs[0], actualBMs[1]) + } else if len(actualBMs) == 1 { + bm = actualBMs[0].Clone() + } + + if bm == nil { + bm = roaring.New() + } + + bm.AddMany(docNums) + + oTFR.iterators[i] = segment.NewUnadornedPostingsIteratorFromBitmap(bm) + } + + atomic.AddUint64(&o.snapshot.parent.stats.TotTermSearchersStarted, uint64(1)) + return oTFR, nil +} + +// ---------------------------------------------------------------- + +func (i *IndexSnapshot) unadornedTermFieldReader( + term []byte, field string) *IndexSnapshotTermFieldReader { + // This IndexSnapshotTermFieldReader will not be recycled, more + // conversation here: https://github.com/blevesearch/bleve/pull/1438 + return &IndexSnapshotTermFieldReader{ + term: term, + field: field, + snapshot: i, + iterators: make([]segment.PostingsIterator, len(i.segment)), + segmentOffset: 0, + includeFreq: false, + includeNorm: false, + includeTermVectors: false, + recycle: false, + } +} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/persister.go b/vendor/github.com/blevesearch/bleve/index/scorch/persister.go new file mode 100644 index 0000000..498378a --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/persister.go @@ -0,0 +1,990 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorch + +import ( + "bytes" + "encoding/binary" + "encoding/json" + "fmt" + "io/ioutil" + "log" + "math" + "os" + "path/filepath" + "strconv" + "strings" + "sync/atomic" + "time" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + bolt "go.etcd.io/bbolt" +) + +// DefaultPersisterNapTimeMSec is kept to zero as this helps in direct +// persistence of segments with the default safe batch option. +// If the default safe batch option results in high number of +// files on disk, then users may initialise this configuration parameter +// with higher values so that the persister will nap a bit within it's +// work loop to favour better in-memory merging of segments to result +// in fewer segment files on disk. But that may come with an indexing +// performance overhead. +// Unsafe batch users are advised to override this to higher value +// for better performance especially with high data density. +var DefaultPersisterNapTimeMSec int = 0 // ms + +// DefaultPersisterNapUnderNumFiles helps in controlling the pace of +// persister. At times of a slow merger progress with heavy file merging +// operations, its better to pace down the persister for letting the merger +// to catch up within a range defined by this parameter. +// Fewer files on disk (as per the merge plan) would result in keeping the +// file handle usage under limit, faster disk merger and a healthier index. +// Its been observed that such a loosely sync'ed introducer-persister-merger +// trio results in better overall performance. +var DefaultPersisterNapUnderNumFiles int = 1000 + +var DefaultMemoryPressurePauseThreshold uint64 = math.MaxUint64 + +type persisterOptions struct { + // PersisterNapTimeMSec controls the wait/delay injected into + // persistence workloop to improve the chances for + // a healthier and heavier in-memory merging + PersisterNapTimeMSec int + + // PersisterNapTimeMSec > 0, and the number of files is less than + // PersisterNapUnderNumFiles, then the persister will sleep + // PersisterNapTimeMSec amount of time to improve the chances for + // a healthier and heavier in-memory merging + PersisterNapUnderNumFiles int + + // MemoryPressurePauseThreshold let persister to have a better leeway + // for prudently performing the memory merge of segments on a memory + // pressure situation. Here the config value is an upper threshold + // for the number of paused application threads. The default value would + // be a very high number to always favour the merging of memory segments. + MemoryPressurePauseThreshold uint64 +} + +type notificationChan chan struct{} + +func (s *Scorch) persisterLoop() { + defer s.asyncTasks.Done() + + var persistWatchers []*epochWatcher + var lastPersistedEpoch, lastMergedEpoch uint64 + var ew *epochWatcher + + var unpersistedCallbacks []index.BatchCallback + + po, err := s.parsePersisterOptions() + if err != nil { + s.fireAsyncError(fmt.Errorf("persisterOptions json parsing err: %v", err)) + s.asyncTasks.Done() + return + } + +OUTER: + for { + atomic.AddUint64(&s.stats.TotPersistLoopBeg, 1) + + select { + case <-s.closeCh: + break OUTER + case ew = <-s.persisterNotifier: + persistWatchers = append(persistWatchers, ew) + default: + } + if ew != nil && ew.epoch > lastMergedEpoch { + lastMergedEpoch = ew.epoch + } + lastMergedEpoch, persistWatchers = s.pausePersisterForMergerCatchUp(lastPersistedEpoch, + lastMergedEpoch, persistWatchers, po) + + var ourSnapshot *IndexSnapshot + var ourPersisted []chan error + var ourPersistedCallbacks []index.BatchCallback + + // check to see if there is a new snapshot to persist + s.rootLock.Lock() + if s.root != nil && s.root.epoch > lastPersistedEpoch { + ourSnapshot = s.root + ourSnapshot.AddRef() + ourPersisted = s.rootPersisted + s.rootPersisted = nil + ourPersistedCallbacks = s.persistedCallbacks + s.persistedCallbacks = nil + atomic.StoreUint64(&s.iStats.persistSnapshotSize, uint64(ourSnapshot.Size())) + atomic.StoreUint64(&s.iStats.persistEpoch, ourSnapshot.epoch) + } + s.rootLock.Unlock() + + if ourSnapshot != nil { + startTime := time.Now() + + err := s.persistSnapshot(ourSnapshot, po) + for _, ch := range ourPersisted { + if err != nil { + ch <- err + } + close(ch) + } + if err != nil { + atomic.StoreUint64(&s.iStats.persistEpoch, 0) + if err == segment.ErrClosed { + // index has been closed + _ = ourSnapshot.DecRef() + break OUTER + } + + // save this current snapshot's persistedCallbacks, to invoke during + // the retry attempt + unpersistedCallbacks = append(unpersistedCallbacks, ourPersistedCallbacks...) + + s.fireAsyncError(fmt.Errorf("got err persisting snapshot: %v", err)) + _ = ourSnapshot.DecRef() + atomic.AddUint64(&s.stats.TotPersistLoopErr, 1) + continue OUTER + } + + if unpersistedCallbacks != nil { + // in the event of this being a retry attempt for persisting a snapshot + // that had earlier failed, prepend the persistedCallbacks associated + // with earlier segment(s) to the latest persistedCallbacks + ourPersistedCallbacks = append(unpersistedCallbacks, ourPersistedCallbacks...) + unpersistedCallbacks = nil + } + + for i := range ourPersistedCallbacks { + ourPersistedCallbacks[i](err) + } + + atomic.StoreUint64(&s.stats.LastPersistedEpoch, ourSnapshot.epoch) + + lastPersistedEpoch = ourSnapshot.epoch + for _, ew := range persistWatchers { + close(ew.notifyCh) + } + + persistWatchers = nil + _ = ourSnapshot.DecRef() + + changed := false + s.rootLock.RLock() + if s.root != nil && s.root.epoch != lastPersistedEpoch { + changed = true + } + s.rootLock.RUnlock() + + s.fireEvent(EventKindPersisterProgress, time.Since(startTime)) + + if changed { + atomic.AddUint64(&s.stats.TotPersistLoopProgress, 1) + continue OUTER + } + } + + // tell the introducer we're waiting for changes + w := &epochWatcher{ + epoch: lastPersistedEpoch, + notifyCh: make(notificationChan, 1), + } + + select { + case <-s.closeCh: + break OUTER + case s.introducerNotifier <- w: + } + + s.removeOldData() // might as well cleanup while waiting + + atomic.AddUint64(&s.stats.TotPersistLoopWait, 1) + + select { + case <-s.closeCh: + break OUTER + case <-w.notifyCh: + // woken up, next loop should pick up work + atomic.AddUint64(&s.stats.TotPersistLoopWaitNotified, 1) + case ew = <-s.persisterNotifier: + // if the watchers are already caught up then let them wait, + // else let them continue to do the catch up + persistWatchers = append(persistWatchers, ew) + } + + atomic.AddUint64(&s.stats.TotPersistLoopEnd, 1) + } +} + +func notifyMergeWatchers(lastPersistedEpoch uint64, + persistWatchers []*epochWatcher) []*epochWatcher { + var watchersNext []*epochWatcher + for _, w := range persistWatchers { + if w.epoch < lastPersistedEpoch { + close(w.notifyCh) + } else { + watchersNext = append(watchersNext, w) + } + } + return watchersNext +} + +func (s *Scorch) pausePersisterForMergerCatchUp(lastPersistedEpoch uint64, + lastMergedEpoch uint64, persistWatchers []*epochWatcher, + po *persisterOptions) (uint64, []*epochWatcher) { + + // First, let the watchers proceed if they lag behind + persistWatchers = notifyMergeWatchers(lastPersistedEpoch, persistWatchers) + + // Check the merger lag by counting the segment files on disk, + numFilesOnDisk, _, _ := s.diskFileStats(nil) + + // On finding fewer files on disk, persister takes a short pause + // for sufficient in-memory segments to pile up for the next + // memory merge cum persist loop. + if numFilesOnDisk < uint64(po.PersisterNapUnderNumFiles) && + po.PersisterNapTimeMSec > 0 && s.NumEventsBlocking() == 0 { + select { + case <-s.closeCh: + case <-time.After(time.Millisecond * time.Duration(po.PersisterNapTimeMSec)): + atomic.AddUint64(&s.stats.TotPersisterNapPauseCompleted, 1) + + case ew := <-s.persisterNotifier: + // unblock the merger in meantime + persistWatchers = append(persistWatchers, ew) + lastMergedEpoch = ew.epoch + persistWatchers = notifyMergeWatchers(lastPersistedEpoch, persistWatchers) + atomic.AddUint64(&s.stats.TotPersisterMergerNapBreak, 1) + } + return lastMergedEpoch, persistWatchers + } + + // Finding too many files on disk could be due to two reasons. + // 1. Too many older snapshots awaiting the clean up. + // 2. The merger could be lagging behind on merging the disk files. + if numFilesOnDisk > uint64(po.PersisterNapUnderNumFiles) { + s.removeOldData() + numFilesOnDisk, _, _ = s.diskFileStats(nil) + } + + // Persister pause until the merger catches up to reduce the segment + // file count under the threshold. + // But if there is memory pressure, then skip this sleep maneuvers. +OUTER: + for po.PersisterNapUnderNumFiles > 0 && + numFilesOnDisk >= uint64(po.PersisterNapUnderNumFiles) && + lastMergedEpoch < lastPersistedEpoch { + atomic.AddUint64(&s.stats.TotPersisterSlowMergerPause, 1) + + select { + case <-s.closeCh: + break OUTER + case ew := <-s.persisterNotifier: + persistWatchers = append(persistWatchers, ew) + lastMergedEpoch = ew.epoch + } + + atomic.AddUint64(&s.stats.TotPersisterSlowMergerResume, 1) + + // let the watchers proceed if they lag behind + persistWatchers = notifyMergeWatchers(lastPersistedEpoch, persistWatchers) + + numFilesOnDisk, _, _ = s.diskFileStats(nil) + } + + return lastMergedEpoch, persistWatchers +} + +func (s *Scorch) parsePersisterOptions() (*persisterOptions, error) { + po := persisterOptions{ + PersisterNapTimeMSec: DefaultPersisterNapTimeMSec, + PersisterNapUnderNumFiles: DefaultPersisterNapUnderNumFiles, + MemoryPressurePauseThreshold: DefaultMemoryPressurePauseThreshold, + } + if v, ok := s.config["scorchPersisterOptions"]; ok { + b, err := json.Marshal(v) + if err != nil { + return &po, err + } + + err = json.Unmarshal(b, &po) + if err != nil { + return &po, err + } + } + return &po, nil +} + +func (s *Scorch) persistSnapshot(snapshot *IndexSnapshot, + po *persisterOptions) error { + // Perform in-memory segment merging only when the memory pressure is + // below the configured threshold, else the persister performs the + // direct persistence of segments. + if s.NumEventsBlocking() < po.MemoryPressurePauseThreshold { + persisted, err := s.persistSnapshotMaybeMerge(snapshot) + if err != nil { + return err + } + if persisted { + return nil + } + } + + return s.persistSnapshotDirect(snapshot) +} + +// DefaultMinSegmentsForInMemoryMerge represents the default number of +// in-memory zap segments that persistSnapshotMaybeMerge() needs to +// see in an IndexSnapshot before it decides to merge and persist +// those segments +var DefaultMinSegmentsForInMemoryMerge = 2 + +// persistSnapshotMaybeMerge examines the snapshot and might merge and +// persist the in-memory zap segments if there are enough of them +func (s *Scorch) persistSnapshotMaybeMerge(snapshot *IndexSnapshot) ( + bool, error) { + // collect the in-memory zap segments (SegmentBase instances) + var sbs []segment.Segment + var sbsDrops []*roaring.Bitmap + var sbsIndexes []int + + for i, segmentSnapshot := range snapshot.segment { + if _, ok := segmentSnapshot.segment.(segment.PersistedSegment); !ok { + sbs = append(sbs, segmentSnapshot.segment) + sbsDrops = append(sbsDrops, segmentSnapshot.deleted) + sbsIndexes = append(sbsIndexes, i) + } + } + + if len(sbs) < DefaultMinSegmentsForInMemoryMerge { + return false, nil + } + + newSnapshot, newSegmentID, err := s.mergeSegmentBases( + snapshot, sbs, sbsDrops, sbsIndexes) + if err != nil { + return false, err + } + if newSnapshot == nil { + return false, nil + } + + defer func() { + _ = newSnapshot.DecRef() + }() + + mergedSegmentIDs := map[uint64]struct{}{} + for _, idx := range sbsIndexes { + mergedSegmentIDs[snapshot.segment[idx].id] = struct{}{} + } + + // construct a snapshot that's logically equivalent to the input + // snapshot, but with merged segments replaced by the new segment + equiv := &IndexSnapshot{ + parent: snapshot.parent, + segment: make([]*SegmentSnapshot, 0, len(snapshot.segment)), + internal: snapshot.internal, + epoch: snapshot.epoch, + creator: "persistSnapshotMaybeMerge", + } + + // copy to the equiv the segments that weren't replaced + for _, segment := range snapshot.segment { + if _, wasMerged := mergedSegmentIDs[segment.id]; !wasMerged { + equiv.segment = append(equiv.segment, segment) + } + } + + // append to the equiv the new segment + for _, segment := range newSnapshot.segment { + if segment.id == newSegmentID { + equiv.segment = append(equiv.segment, &SegmentSnapshot{ + id: newSegmentID, + segment: segment.segment, + deleted: nil, // nil since merging handled deletions + }) + break + } + } + + err = s.persistSnapshotDirect(equiv) + if err != nil { + return false, err + } + + return true, nil +} + +func prepareBoltSnapshot(snapshot *IndexSnapshot, tx *bolt.Tx, path string, + segPlugin segment.Plugin) ([]string, map[uint64]string, error) { + snapshotsBucket, err := tx.CreateBucketIfNotExists(boltSnapshotsBucket) + if err != nil { + return nil, nil, err + } + newSnapshotKey := segment.EncodeUvarintAscending(nil, snapshot.epoch) + snapshotBucket, err := snapshotsBucket.CreateBucketIfNotExists(newSnapshotKey) + if err != nil { + return nil, nil, err + } + + // persist meta values + metaBucket, err := snapshotBucket.CreateBucketIfNotExists(boltMetaDataKey) + if err != nil { + return nil, nil, err + } + err = metaBucket.Put(boltMetaDataSegmentTypeKey, []byte(segPlugin.Type())) + if err != nil { + return nil, nil, err + } + buf := make([]byte, binary.MaxVarintLen32) + binary.BigEndian.PutUint32(buf, segPlugin.Version()) + err = metaBucket.Put(boltMetaDataSegmentVersionKey, buf) + if err != nil { + return nil, nil, err + } + + // persist internal values + internalBucket, err := snapshotBucket.CreateBucketIfNotExists(boltInternalKey) + if err != nil { + return nil, nil, err + } + // TODO optimize writing these in order? + for k, v := range snapshot.internal { + err = internalBucket.Put([]byte(k), v) + if err != nil { + return nil, nil, err + } + } + + var filenames []string + newSegmentPaths := make(map[uint64]string) + + // first ensure that each segment in this snapshot has been persisted + for _, segmentSnapshot := range snapshot.segment { + snapshotSegmentKey := segment.EncodeUvarintAscending(nil, segmentSnapshot.id) + snapshotSegmentBucket, err := snapshotBucket.CreateBucketIfNotExists(snapshotSegmentKey) + if err != nil { + return nil, nil, err + } + switch seg := segmentSnapshot.segment.(type) { + case segment.PersistedSegment: + segPath := seg.Path() + filename := strings.TrimPrefix(segPath, path+string(os.PathSeparator)) + err = snapshotSegmentBucket.Put(boltPathKey, []byte(filename)) + if err != nil { + return nil, nil, err + } + filenames = append(filenames, filename) + case segment.UnpersistedSegment: + // need to persist this to disk + filename := zapFileName(segmentSnapshot.id) + path := path + string(os.PathSeparator) + filename + err = seg.Persist(path) + if err != nil { + return nil, nil, fmt.Errorf("error persisting segment: %v", err) + } + newSegmentPaths[segmentSnapshot.id] = path + err = snapshotSegmentBucket.Put(boltPathKey, []byte(filename)) + if err != nil { + return nil, nil, err + } + filenames = append(filenames, filename) + default: + return nil, nil, fmt.Errorf("unknown segment type: %T", seg) + } + // store current deleted bits + var roaringBuf bytes.Buffer + if segmentSnapshot.deleted != nil { + _, err = segmentSnapshot.deleted.WriteTo(&roaringBuf) + if err != nil { + return nil, nil, fmt.Errorf("error persisting roaring bytes: %v", err) + } + err = snapshotSegmentBucket.Put(boltDeletedKey, roaringBuf.Bytes()) + if err != nil { + return nil, nil, err + } + } + } + + return filenames, newSegmentPaths, nil +} + +func (s *Scorch) persistSnapshotDirect(snapshot *IndexSnapshot) (err error) { + // start a write transaction + tx, err := s.rootBolt.Begin(true) + if err != nil { + return err + } + // defer rollback on error + defer func() { + if err != nil { + _ = tx.Rollback() + } + }() + + filenames, newSegmentPaths, err := prepareBoltSnapshot(snapshot, tx, s.path, s.segPlugin) + if err != nil { + return err + } + + // we need to swap in a new root only when we've persisted 1 or + // more segments -- whereby the new root would have 1-for-1 + // replacements of in-memory segments with file-based segments + // + // other cases like updates to internal values only, and/or when + // there are only deletions, are already covered and persisted by + // the newly populated boltdb snapshotBucket above + if len(newSegmentPaths) > 0 { + // now try to open all the new snapshots + newSegments := make(map[uint64]segment.Segment) + defer func() { + for _, s := range newSegments { + if s != nil { + // cleanup segments that were opened but not + // swapped into the new root + _ = s.Close() + } + } + }() + for segmentID, path := range newSegmentPaths { + newSegments[segmentID], err = s.segPlugin.Open(path) + if err != nil { + return fmt.Errorf("error opening new segment at %s, %v", path, err) + } + } + + persist := &persistIntroduction{ + persisted: newSegments, + applied: make(notificationChan), + } + + select { + case <-s.closeCh: + return segment.ErrClosed + case s.persists <- persist: + } + + select { + case <-s.closeCh: + return segment.ErrClosed + case <-persist.applied: + } + } + + err = tx.Commit() + if err != nil { + return err + } + + err = s.rootBolt.Sync() + if err != nil { + return err + } + + // allow files to become eligible for removal after commit, such + // as file segments from snapshots that came from the merger + s.rootLock.Lock() + for _, filename := range filenames { + delete(s.ineligibleForRemoval, filename) + } + s.rootLock.Unlock() + + return nil +} + +func zapFileName(epoch uint64) string { + return fmt.Sprintf("%012x.zap", epoch) +} + +// bolt snapshot code + +var boltSnapshotsBucket = []byte{'s'} +var boltPathKey = []byte{'p'} +var boltDeletedKey = []byte{'d'} +var boltInternalKey = []byte{'i'} +var boltMetaDataKey = []byte{'m'} +var boltMetaDataSegmentTypeKey = []byte("type") +var boltMetaDataSegmentVersionKey = []byte("version") + +func (s *Scorch) loadFromBolt() error { + return s.rootBolt.View(func(tx *bolt.Tx) error { + snapshots := tx.Bucket(boltSnapshotsBucket) + if snapshots == nil { + return nil + } + foundRoot := false + c := snapshots.Cursor() + for k, _ := c.Last(); k != nil; k, _ = c.Prev() { + _, snapshotEpoch, err := segment.DecodeUvarintAscending(k) + if err != nil { + log.Printf("unable to parse segment epoch %x, continuing", k) + continue + } + if foundRoot { + s.AddEligibleForRemoval(snapshotEpoch) + continue + } + snapshot := snapshots.Bucket(k) + if snapshot == nil { + log.Printf("snapshot key, but bucket missing %x, continuing", k) + s.AddEligibleForRemoval(snapshotEpoch) + continue + } + indexSnapshot, err := s.loadSnapshot(snapshot) + if err != nil { + log.Printf("unable to load snapshot, %v, continuing", err) + s.AddEligibleForRemoval(snapshotEpoch) + continue + } + indexSnapshot.epoch = snapshotEpoch + // set the nextSegmentID + s.nextSegmentID, err = s.maxSegmentIDOnDisk() + if err != nil { + return err + } + s.nextSegmentID++ + s.rootLock.Lock() + s.nextSnapshotEpoch = snapshotEpoch + 1 + rootPrev := s.root + s.root = indexSnapshot + s.rootLock.Unlock() + + if rootPrev != nil { + _ = rootPrev.DecRef() + } + + foundRoot = true + } + return nil + }) +} + +// LoadSnapshot loads the segment with the specified epoch +// NOTE: this is currently ONLY intended to be used by the command-line tool +func (s *Scorch) LoadSnapshot(epoch uint64) (rv *IndexSnapshot, err error) { + err = s.rootBolt.View(func(tx *bolt.Tx) error { + snapshots := tx.Bucket(boltSnapshotsBucket) + if snapshots == nil { + return nil + } + snapshotKey := segment.EncodeUvarintAscending(nil, epoch) + snapshot := snapshots.Bucket(snapshotKey) + if snapshot == nil { + return fmt.Errorf("snapshot with epoch: %v - doesn't exist", epoch) + } + rv, err = s.loadSnapshot(snapshot) + return err + }) + if err != nil { + return nil, err + } + return rv, nil +} + +func (s *Scorch) loadSnapshot(snapshot *bolt.Bucket) (*IndexSnapshot, error) { + + rv := &IndexSnapshot{ + parent: s, + internal: make(map[string][]byte), + refs: 1, + creator: "loadSnapshot", + } + // first we look for the meta-data bucket, this will tell us + // which segment type/version was used for this snapshot + // all operations for this scorch will use this type/version + metaBucket := snapshot.Bucket(boltMetaDataKey) + if metaBucket == nil { + _ = rv.DecRef() + return nil, fmt.Errorf("meta-data bucket missing") + } + segmentType := string(metaBucket.Get(boltMetaDataSegmentTypeKey)) + segmentVersion := binary.BigEndian.Uint32( + metaBucket.Get(boltMetaDataSegmentVersionKey)) + err := s.loadSegmentPlugin(segmentType, segmentVersion) + if err != nil { + _ = rv.DecRef() + return nil, fmt.Errorf( + "unable to load correct segment wrapper: %v", err) + } + var running uint64 + c := snapshot.Cursor() + for k, _ := c.First(); k != nil; k, _ = c.Next() { + if k[0] == boltInternalKey[0] { + internalBucket := snapshot.Bucket(k) + err := internalBucket.ForEach(func(key []byte, val []byte) error { + copiedVal := append([]byte(nil), val...) + rv.internal[string(key)] = copiedVal + return nil + }) + if err != nil { + _ = rv.DecRef() + return nil, err + } + } else if k[0] != boltMetaDataKey[0] { + segmentBucket := snapshot.Bucket(k) + if segmentBucket == nil { + _ = rv.DecRef() + return nil, fmt.Errorf("segment key, but bucket missing % x", k) + } + segmentSnapshot, err := s.loadSegment(segmentBucket) + if err != nil { + _ = rv.DecRef() + return nil, fmt.Errorf("failed to load segment: %v", err) + } + _, segmentSnapshot.id, err = segment.DecodeUvarintAscending(k) + if err != nil { + _ = rv.DecRef() + return nil, fmt.Errorf("failed to decode segment id: %v", err) + } + rv.segment = append(rv.segment, segmentSnapshot) + rv.offsets = append(rv.offsets, running) + running += segmentSnapshot.segment.Count() + } + } + return rv, nil +} + +func (s *Scorch) loadSegment(segmentBucket *bolt.Bucket) (*SegmentSnapshot, error) { + pathBytes := segmentBucket.Get(boltPathKey) + if pathBytes == nil { + return nil, fmt.Errorf("segment path missing") + } + segmentPath := s.path + string(os.PathSeparator) + string(pathBytes) + segment, err := s.segPlugin.Open(segmentPath) + if err != nil { + return nil, fmt.Errorf("error opening bolt segment: %v", err) + } + + rv := &SegmentSnapshot{ + segment: segment, + cachedDocs: &cachedDocs{cache: nil}, + } + deletedBytes := segmentBucket.Get(boltDeletedKey) + if deletedBytes != nil { + deletedBitmap := roaring.NewBitmap() + r := bytes.NewReader(deletedBytes) + _, err := deletedBitmap.ReadFrom(r) + if err != nil { + _ = segment.Close() + return nil, fmt.Errorf("error reading deleted bytes: %v", err) + } + if !deletedBitmap.IsEmpty() { + rv.deleted = deletedBitmap + } + } + + return rv, nil +} + +func (s *Scorch) removeOldData() { + removed, err := s.removeOldBoltSnapshots() + if err != nil { + s.fireAsyncError(fmt.Errorf("got err removing old bolt snapshots: %v", err)) + } + atomic.AddUint64(&s.stats.TotSnapshotsRemovedFromMetaStore, uint64(removed)) + + err = s.removeOldZapFiles() + if err != nil { + s.fireAsyncError(fmt.Errorf("got err removing old zap files: %v", err)) + } +} + +// NumSnapshotsToKeep represents how many recent, old snapshots to +// keep around per Scorch instance. Useful for apps that require +// rollback'ability. +var NumSnapshotsToKeep = 1 + +// Removes enough snapshots from the rootBolt so that the +// s.eligibleForRemoval stays under the NumSnapshotsToKeep policy. +func (s *Scorch) removeOldBoltSnapshots() (numRemoved int, err error) { + persistedEpochs, err := s.RootBoltSnapshotEpochs() + if err != nil { + return 0, err + } + + if len(persistedEpochs) <= s.numSnapshotsToKeep { + // we need to keep everything + return 0, nil + } + + // make a map of epochs to protect from deletion + protectedEpochs := make(map[uint64]struct{}, s.numSnapshotsToKeep) + for _, epoch := range persistedEpochs[0:s.numSnapshotsToKeep] { + protectedEpochs[epoch] = struct{}{} + } + + var epochsToRemove []uint64 + var newEligible []uint64 + s.rootLock.Lock() + for _, epoch := range s.eligibleForRemoval { + if _, ok := protectedEpochs[epoch]; ok { + // protected + newEligible = append(newEligible, epoch) + } else { + epochsToRemove = append(epochsToRemove, epoch) + } + } + s.eligibleForRemoval = newEligible + s.rootLock.Unlock() + + if len(epochsToRemove) == 0 { + return 0, nil + } + + tx, err := s.rootBolt.Begin(true) + if err != nil { + return 0, err + } + defer func() { + if err == nil { + err = tx.Commit() + } else { + _ = tx.Rollback() + } + if err == nil { + err = s.rootBolt.Sync() + } + }() + + snapshots := tx.Bucket(boltSnapshotsBucket) + if snapshots == nil { + return 0, nil + } + + for _, epochToRemove := range epochsToRemove { + k := segment.EncodeUvarintAscending(nil, epochToRemove) + err = snapshots.DeleteBucket(k) + if err == bolt.ErrBucketNotFound { + err = nil + } + if err == nil { + numRemoved++ + } + } + + return numRemoved, err +} + +func (s *Scorch) maxSegmentIDOnDisk() (uint64, error) { + currFileInfos, err := ioutil.ReadDir(s.path) + if err != nil { + return 0, err + } + + var rv uint64 + for _, finfo := range currFileInfos { + fname := finfo.Name() + if filepath.Ext(fname) == ".zap" { + prefix := strings.TrimSuffix(fname, ".zap") + id, err2 := strconv.ParseUint(prefix, 16, 64) + if err2 != nil { + return 0, err2 + } + if id > rv { + rv = id + } + } + } + return rv, err +} + +// Removes any *.zap files which aren't listed in the rootBolt. +func (s *Scorch) removeOldZapFiles() error { + liveFileNames, err := s.loadZapFileNames() + if err != nil { + return err + } + + currFileInfos, err := ioutil.ReadDir(s.path) + if err != nil { + return err + } + + s.rootLock.RLock() + + for _, finfo := range currFileInfos { + fname := finfo.Name() + if filepath.Ext(fname) == ".zap" { + if _, exists := liveFileNames[fname]; !exists && !s.ineligibleForRemoval[fname] { + err := os.Remove(s.path + string(os.PathSeparator) + fname) + if err != nil { + log.Printf("got err removing file: %s, err: %v", fname, err) + } + } + } + } + + s.rootLock.RUnlock() + + return nil +} + +func (s *Scorch) RootBoltSnapshotEpochs() ([]uint64, error) { + var rv []uint64 + err := s.rootBolt.View(func(tx *bolt.Tx) error { + snapshots := tx.Bucket(boltSnapshotsBucket) + if snapshots == nil { + return nil + } + sc := snapshots.Cursor() + for sk, _ := sc.Last(); sk != nil; sk, _ = sc.Prev() { + _, snapshotEpoch, err := segment.DecodeUvarintAscending(sk) + if err != nil { + continue + } + rv = append(rv, snapshotEpoch) + } + return nil + }) + return rv, err +} + +// Returns the *.zap file names that are listed in the rootBolt. +func (s *Scorch) loadZapFileNames() (map[string]struct{}, error) { + rv := map[string]struct{}{} + err := s.rootBolt.View(func(tx *bolt.Tx) error { + snapshots := tx.Bucket(boltSnapshotsBucket) + if snapshots == nil { + return nil + } + sc := snapshots.Cursor() + for sk, _ := sc.First(); sk != nil; sk, _ = sc.Next() { + snapshot := snapshots.Bucket(sk) + if snapshot == nil { + continue + } + segc := snapshot.Cursor() + for segk, _ := segc.First(); segk != nil; segk, _ = segc.Next() { + if segk[0] == boltInternalKey[0] { + continue + } + segmentBucket := snapshot.Bucket(segk) + if segmentBucket == nil { + continue + } + pathBytes := segmentBucket.Get(boltPathKey) + if pathBytes == nil { + continue + } + pathString := string(pathBytes) + rv[string(pathString)] = struct{}{} + } + } + return nil + }) + + return rv, err +} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/rollback.go b/vendor/github.com/blevesearch/bleve/index/scorch/rollback.go new file mode 100644 index 0000000..7cc87bd --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/rollback.go @@ -0,0 +1,213 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorch + +import ( + "fmt" + "log" + "os" + + "github.com/blevesearch/bleve/index/scorch/segment" + bolt "go.etcd.io/bbolt" +) + +type RollbackPoint struct { + epoch uint64 + meta map[string][]byte +} + +func (r *RollbackPoint) GetInternal(key []byte) []byte { + return r.meta[string(key)] +} + +// RollbackPoints returns an array of rollback points available for +// the application to rollback to, with more recent rollback points +// (higher epochs) coming first. +func RollbackPoints(path string) ([]*RollbackPoint, error) { + if len(path) == 0 { + return nil, fmt.Errorf("RollbackPoints: invalid path") + } + + rootBoltPath := path + string(os.PathSeparator) + "root.bolt" + rootBoltOpt := &bolt.Options{ + ReadOnly: true, + } + rootBolt, err := bolt.Open(rootBoltPath, 0600, rootBoltOpt) + if err != nil || rootBolt == nil { + return nil, err + } + + // start a read-only bolt transaction + tx, err := rootBolt.Begin(false) + if err != nil { + return nil, fmt.Errorf("RollbackPoints: failed to start" + + " read-only transaction") + } + + // read-only bolt transactions to be rolled back + defer func() { + _ = tx.Rollback() + _ = rootBolt.Close() + }() + + snapshots := tx.Bucket(boltSnapshotsBucket) + if snapshots == nil { + return nil, nil + } + + rollbackPoints := []*RollbackPoint{} + + c1 := snapshots.Cursor() + for k, _ := c1.Last(); k != nil; k, _ = c1.Prev() { + _, snapshotEpoch, err := segment.DecodeUvarintAscending(k) + if err != nil { + log.Printf("RollbackPoints:"+ + " unable to parse segment epoch %x, continuing", k) + continue + } + + snapshot := snapshots.Bucket(k) + if snapshot == nil { + log.Printf("RollbackPoints:"+ + " snapshot key, but bucket missing %x, continuing", k) + continue + } + + meta := map[string][]byte{} + c2 := snapshot.Cursor() + for j, _ := c2.First(); j != nil; j, _ = c2.Next() { + if j[0] == boltInternalKey[0] { + internalBucket := snapshot.Bucket(j) + err = internalBucket.ForEach(func(key []byte, val []byte) error { + copiedVal := append([]byte(nil), val...) + meta[string(key)] = copiedVal + return nil + }) + if err != nil { + break + } + } + } + + if err != nil { + log.Printf("RollbackPoints:"+ + " failed in fetching internal data: %v", err) + continue + } + + rollbackPoints = append(rollbackPoints, &RollbackPoint{ + epoch: snapshotEpoch, + meta: meta, + }) + } + + return rollbackPoints, nil +} + +// Rollback atomically and durably brings the store back to the point +// in time as represented by the RollbackPoint. +// Rollback() should only be passed a RollbackPoint that came from the +// same store using the RollbackPoints() API along with the index path. +func Rollback(path string, to *RollbackPoint) error { + if to == nil { + return fmt.Errorf("Rollback: RollbackPoint is nil") + } + if len(path) == 0 { + return fmt.Errorf("Rollback: index path is empty") + } + + rootBoltPath := path + string(os.PathSeparator) + "root.bolt" + rootBoltOpt := &bolt.Options{ + ReadOnly: false, + } + rootBolt, err := bolt.Open(rootBoltPath, 0600, rootBoltOpt) + if err != nil || rootBolt == nil { + return err + } + defer func() { + err1 := rootBolt.Close() + if err1 != nil && err == nil { + err = err1 + } + }() + + // pick all the younger persisted epochs in bolt store + // including the target one. + var found bool + var eligibleEpochs []uint64 + err = rootBolt.View(func(tx *bolt.Tx) error { + snapshots := tx.Bucket(boltSnapshotsBucket) + if snapshots == nil { + return nil + } + sc := snapshots.Cursor() + for sk, _ := sc.Last(); sk != nil && !found; sk, _ = sc.Prev() { + _, snapshotEpoch, err := segment.DecodeUvarintAscending(sk) + if err != nil { + continue + } + if snapshotEpoch == to.epoch { + found = true + } + eligibleEpochs = append(eligibleEpochs, snapshotEpoch) + } + return nil + }) + + if len(eligibleEpochs) == 0 { + return fmt.Errorf("Rollback: no persisted epochs found in bolt") + } + if !found { + return fmt.Errorf("Rollback: target epoch %d not found in bolt", to.epoch) + } + + // start a write transaction + tx, err := rootBolt.Begin(true) + if err != nil { + return err + } + + defer func() { + if err == nil { + err = tx.Commit() + } else { + _ = tx.Rollback() + } + if err == nil { + err = rootBolt.Sync() + } + }() + + snapshots := tx.Bucket(boltSnapshotsBucket) + if snapshots == nil { + return nil + } + for _, epoch := range eligibleEpochs { + k := segment.EncodeUvarintAscending(nil, epoch) + if err != nil { + continue + } + if epoch == to.epoch { + // return here as it already processed until the given epoch + return nil + } + err = snapshots.DeleteBucket(k) + if err == bolt.ErrBucketNotFound { + err = nil + } + } + + return err +} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/scorch.go b/vendor/github.com/blevesearch/bleve/index/scorch/scorch.go new file mode 100644 index 0000000..fccff67 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/scorch.go @@ -0,0 +1,676 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorch + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "os" + "sync" + "sync/atomic" + "time" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/blevesearch/bleve/index/store" + "github.com/blevesearch/bleve/registry" + bolt "go.etcd.io/bbolt" +) + +const Name = "scorch" + +const Version uint8 = 2 + +var ErrClosed = fmt.Errorf("scorch closed") + +type Scorch struct { + nextSegmentID uint64 + stats Stats + iStats internalStats + + readOnly bool + version uint8 + config map[string]interface{} + analysisQueue *index.AnalysisQueue + path string + + unsafeBatch bool + + rootLock sync.RWMutex + root *IndexSnapshot // holds 1 ref-count on the root + rootPersisted []chan error // closed when root is persisted + persistedCallbacks []index.BatchCallback + nextSnapshotEpoch uint64 + eligibleForRemoval []uint64 // Index snapshot epochs that are safe to GC. + ineligibleForRemoval map[string]bool // Filenames that should not be GC'ed yet. + + numSnapshotsToKeep int + closeCh chan struct{} + introductions chan *segmentIntroduction + persists chan *persistIntroduction + merges chan *segmentMerge + introducerNotifier chan *epochWatcher + persisterNotifier chan *epochWatcher + rootBolt *bolt.DB + asyncTasks sync.WaitGroup + + onEvent func(event Event) + onAsyncError func(err error) + + forceMergeRequestCh chan *mergerCtrl + + segPlugin segment.Plugin +} + +type internalStats struct { + persistEpoch uint64 + persistSnapshotSize uint64 + mergeEpoch uint64 + mergeSnapshotSize uint64 + newSegBufBytesAdded uint64 + newSegBufBytesRemoved uint64 + analysisBytesAdded uint64 + analysisBytesRemoved uint64 +} + +func NewScorch(storeName string, + config map[string]interface{}, + analysisQueue *index.AnalysisQueue) (index.Index, error) { + rv := &Scorch{ + version: Version, + config: config, + analysisQueue: analysisQueue, + nextSnapshotEpoch: 1, + closeCh: make(chan struct{}), + ineligibleForRemoval: map[string]bool{}, + forceMergeRequestCh: make(chan *mergerCtrl, 1), + segPlugin: defaultSegmentPlugin, + } + + forcedSegmentType, forcedSegmentVersion, err := configForceSegmentTypeVersion(config) + if err != nil { + return nil, err + } + if forcedSegmentType != "" && forcedSegmentVersion != 0 { + err := rv.loadSegmentPlugin(forcedSegmentType, + uint32(forcedSegmentVersion)) + if err != nil { + return nil, err + } + } + + rv.root = &IndexSnapshot{parent: rv, refs: 1, creator: "NewScorch"} + ro, ok := config["read_only"].(bool) + if ok { + rv.readOnly = ro + } + ub, ok := config["unsafe_batch"].(bool) + if ok { + rv.unsafeBatch = ub + } + ecbName, ok := config["eventCallbackName"].(string) + if ok { + rv.onEvent = RegistryEventCallbacks[ecbName] + } + aecbName, ok := config["asyncErrorCallbackName"].(string) + if ok { + rv.onAsyncError = RegistryAsyncErrorCallbacks[aecbName] + } + return rv, nil +} + +// configForceSegmentTypeVersion checks if the caller has requested a +// specific segment type/version +func configForceSegmentTypeVersion(config map[string]interface{}) (string, uint32, error) { + forcedSegmentVersion, err := parseToInteger(config["forceSegmentVersion"]) + if err != nil { + return "", 0, nil + } + + forcedSegmentType, ok := config["forceSegmentType"].(string) + if !ok { + return "", 0, fmt.Errorf( + "forceSegmentVersion set to %d, must also specify forceSegmentType", forcedSegmentVersion) + } + + return forcedSegmentType, uint32(forcedSegmentVersion), nil +} + +func (s *Scorch) NumEventsBlocking() uint64 { + eventsCompleted := atomic.LoadUint64(&s.stats.TotEventTriggerCompleted) + eventsStarted := atomic.LoadUint64(&s.stats.TotEventTriggerStarted) + return eventsStarted - eventsCompleted +} + +func (s *Scorch) fireEvent(kind EventKind, dur time.Duration) { + if s.onEvent != nil { + atomic.AddUint64(&s.stats.TotEventTriggerStarted, 1) + s.onEvent(Event{Kind: kind, Scorch: s, Duration: dur}) + atomic.AddUint64(&s.stats.TotEventTriggerCompleted, 1) + } +} + +func (s *Scorch) fireAsyncError(err error) { + if s.onAsyncError != nil { + s.onAsyncError(err) + } + atomic.AddUint64(&s.stats.TotOnErrors, 1) +} + +func (s *Scorch) Open() error { + err := s.openBolt() + if err != nil { + return err + } + + s.asyncTasks.Add(1) + go s.introducerLoop() + + if !s.readOnly && s.path != "" { + s.asyncTasks.Add(1) + go s.persisterLoop() + s.asyncTasks.Add(1) + go s.mergerLoop() + } + + return nil +} + +func (s *Scorch) openBolt() error { + var ok bool + s.path, ok = s.config["path"].(string) + if !ok { + return fmt.Errorf("must specify path") + } + if s.path == "" { + s.unsafeBatch = true + } + + var rootBoltOpt *bolt.Options + if s.readOnly { + rootBoltOpt = &bolt.Options{ + ReadOnly: true, + } + } else { + if s.path != "" { + err := os.MkdirAll(s.path, 0700) + if err != nil { + return err + } + } + } + + rootBoltPath := s.path + string(os.PathSeparator) + "root.bolt" + var err error + if s.path != "" { + s.rootBolt, err = bolt.Open(rootBoltPath, 0600, rootBoltOpt) + if err != nil { + return err + } + + // now see if there is any existing state to load + err = s.loadFromBolt() + if err != nil { + _ = s.Close() + return err + } + } + + atomic.StoreUint64(&s.stats.TotFileSegmentsAtRoot, uint64(len(s.root.segment))) + + s.introductions = make(chan *segmentIntroduction) + s.persists = make(chan *persistIntroduction) + s.merges = make(chan *segmentMerge) + s.introducerNotifier = make(chan *epochWatcher, 1) + s.persisterNotifier = make(chan *epochWatcher, 1) + s.closeCh = make(chan struct{}) + s.forceMergeRequestCh = make(chan *mergerCtrl, 1) + + if !s.readOnly && s.path != "" { + err := s.removeOldZapFiles() // Before persister or merger create any new files. + if err != nil { + _ = s.Close() + return err + } + } + + s.numSnapshotsToKeep = NumSnapshotsToKeep + if v, ok := s.config["numSnapshotsToKeep"]; ok { + var t int + if t, err = parseToInteger(v); err != nil { + return fmt.Errorf("numSnapshotsToKeep parse err: %v", err) + } + if t > 0 { + s.numSnapshotsToKeep = t + } + } + + return nil +} + +func (s *Scorch) Close() (err error) { + startTime := time.Now() + defer func() { + s.fireEvent(EventKindClose, time.Since(startTime)) + }() + + s.fireEvent(EventKindCloseStart, 0) + + // signal to async tasks we want to close + close(s.closeCh) + // wait for them to close + s.asyncTasks.Wait() + // now close the root bolt + if s.rootBolt != nil { + err = s.rootBolt.Close() + s.rootLock.Lock() + if s.root != nil { + err2 := s.root.DecRef() + if err == nil { + err = err2 + } + } + s.root = nil + s.rootLock.Unlock() + } + + return +} + +func (s *Scorch) Update(doc *document.Document) error { + b := index.NewBatch() + b.Update(doc) + return s.Batch(b) +} + +func (s *Scorch) Delete(id string) error { + b := index.NewBatch() + b.Delete(id) + return s.Batch(b) +} + +// Batch applices a batch of changes to the index atomically +func (s *Scorch) Batch(batch *index.Batch) (err error) { + start := time.Now() + + defer func() { + s.fireEvent(EventKindBatchIntroduction, time.Since(start)) + }() + + resultChan := make(chan *index.AnalysisResult, len(batch.IndexOps)) + + var numUpdates uint64 + var numDeletes uint64 + var numPlainTextBytes uint64 + var ids []string + for docID, doc := range batch.IndexOps { + if doc != nil { + // insert _id field + doc.AddField(document.NewTextFieldCustom("_id", nil, []byte(doc.ID), document.IndexField|document.StoreField, nil)) + numUpdates++ + numPlainTextBytes += doc.NumPlainTextBytes() + } else { + numDeletes++ + } + ids = append(ids, docID) + } + + // FIXME could sort ids list concurrent with analysis? + + if numUpdates > 0 { + go func() { + for _, doc := range batch.IndexOps { + if doc != nil { + aw := index.NewAnalysisWork(s, doc, resultChan) + // put the work on the queue + s.analysisQueue.Queue(aw) + } + } + }() + } + + // wait for analysis result + analysisResults := make([]*index.AnalysisResult, int(numUpdates)) + var itemsDeQueued uint64 + var totalAnalysisSize int + for itemsDeQueued < numUpdates { + result := <-resultChan + resultSize := result.Size() + atomic.AddUint64(&s.iStats.analysisBytesAdded, uint64(resultSize)) + totalAnalysisSize += resultSize + analysisResults[itemsDeQueued] = result + itemsDeQueued++ + } + close(resultChan) + defer atomic.AddUint64(&s.iStats.analysisBytesRemoved, uint64(totalAnalysisSize)) + + atomic.AddUint64(&s.stats.TotAnalysisTime, uint64(time.Since(start))) + + indexStart := time.Now() + + // notify handlers that we're about to introduce a segment + s.fireEvent(EventKindBatchIntroductionStart, 0) + + var newSegment segment.Segment + var bufBytes uint64 + if len(analysisResults) > 0 { + newSegment, bufBytes, err = s.segPlugin.New(analysisResults) + if err != nil { + return err + } + atomic.AddUint64(&s.iStats.newSegBufBytesAdded, bufBytes) + } else { + atomic.AddUint64(&s.stats.TotBatchesEmpty, 1) + } + + err = s.prepareSegment(newSegment, ids, batch.InternalOps, batch.PersistedCallback()) + if err != nil { + if newSegment != nil { + _ = newSegment.Close() + } + atomic.AddUint64(&s.stats.TotOnErrors, 1) + } else { + atomic.AddUint64(&s.stats.TotUpdates, numUpdates) + atomic.AddUint64(&s.stats.TotDeletes, numDeletes) + atomic.AddUint64(&s.stats.TotBatches, 1) + atomic.AddUint64(&s.stats.TotIndexedPlainTextBytes, numPlainTextBytes) + } + + atomic.AddUint64(&s.iStats.newSegBufBytesRemoved, bufBytes) + atomic.AddUint64(&s.stats.TotIndexTime, uint64(time.Since(indexStart))) + + return err +} + +func (s *Scorch) prepareSegment(newSegment segment.Segment, ids []string, + internalOps map[string][]byte, persistedCallback index.BatchCallback) error { + + // new introduction + introduction := &segmentIntroduction{ + id: atomic.AddUint64(&s.nextSegmentID, 1), + data: newSegment, + ids: ids, + obsoletes: make(map[uint64]*roaring.Bitmap), + internal: internalOps, + applied: make(chan error), + persistedCallback: persistedCallback, + } + + if !s.unsafeBatch { + introduction.persisted = make(chan error, 1) + } + + // optimistically prepare obsoletes outside of rootLock + s.rootLock.RLock() + root := s.root + root.AddRef() + s.rootLock.RUnlock() + + defer func() { _ = root.DecRef() }() + + for _, seg := range root.segment { + delta, err := seg.segment.DocNumbers(ids) + if err != nil { + return err + } + introduction.obsoletes[seg.id] = delta + } + + introStartTime := time.Now() + + s.introductions <- introduction + + // block until this segment is applied + err := <-introduction.applied + if err != nil { + return err + } + + if introduction.persisted != nil { + err = <-introduction.persisted + } + + introTime := uint64(time.Since(introStartTime)) + atomic.AddUint64(&s.stats.TotBatchIntroTime, introTime) + if atomic.LoadUint64(&s.stats.MaxBatchIntroTime) < introTime { + atomic.StoreUint64(&s.stats.MaxBatchIntroTime, introTime) + } + + return err +} + +func (s *Scorch) SetInternal(key, val []byte) error { + b := index.NewBatch() + b.SetInternal(key, val) + return s.Batch(b) +} + +func (s *Scorch) DeleteInternal(key []byte) error { + b := index.NewBatch() + b.DeleteInternal(key) + return s.Batch(b) +} + +// Reader returns a low-level accessor on the index data. Close it to +// release associated resources. +func (s *Scorch) Reader() (index.IndexReader, error) { + return s.currentSnapshot(), nil +} + +func (s *Scorch) currentSnapshot() *IndexSnapshot { + s.rootLock.RLock() + rv := s.root + if rv != nil { + rv.AddRef() + } + s.rootLock.RUnlock() + return rv +} + +func (s *Scorch) Stats() json.Marshaler { + return &s.stats +} + +func (s *Scorch) diskFileStats(rootSegmentPaths map[string]struct{}) (uint64, + uint64, uint64) { + var numFilesOnDisk, numBytesUsedDisk, numBytesOnDiskByRoot uint64 + if s.path != "" { + finfos, err := ioutil.ReadDir(s.path) + if err == nil { + for _, finfo := range finfos { + if !finfo.IsDir() { + numBytesUsedDisk += uint64(finfo.Size()) + numFilesOnDisk++ + if rootSegmentPaths != nil { + fname := s.path + string(os.PathSeparator) + finfo.Name() + if _, fileAtRoot := rootSegmentPaths[fname]; fileAtRoot { + numBytesOnDiskByRoot += uint64(finfo.Size()) + } + } + } + } + } + } + // if no root files path given, then consider all disk files. + if rootSegmentPaths == nil { + return numFilesOnDisk, numBytesUsedDisk, numBytesUsedDisk + } + + return numFilesOnDisk, numBytesUsedDisk, numBytesOnDiskByRoot +} + +func (s *Scorch) StatsMap() map[string]interface{} { + m := s.stats.ToMap() + + indexSnapshot := s.currentSnapshot() + defer func() { + _ = indexSnapshot.Close() + }() + + rootSegPaths := indexSnapshot.diskSegmentsPaths() + + s.rootLock.RLock() + m["CurFilesIneligibleForRemoval"] = uint64(len(s.ineligibleForRemoval)) + s.rootLock.RUnlock() + + numFilesOnDisk, numBytesUsedDisk, numBytesOnDiskByRoot := s.diskFileStats(rootSegPaths) + + m["CurOnDiskBytes"] = numBytesUsedDisk + m["CurOnDiskFiles"] = numFilesOnDisk + + // TODO: consider one day removing these backwards compatible + // names for apps using the old names + m["updates"] = m["TotUpdates"] + m["deletes"] = m["TotDeletes"] + m["batches"] = m["TotBatches"] + m["errors"] = m["TotOnErrors"] + m["analysis_time"] = m["TotAnalysisTime"] + m["index_time"] = m["TotIndexTime"] + m["term_searchers_started"] = m["TotTermSearchersStarted"] + m["term_searchers_finished"] = m["TotTermSearchersFinished"] + m["num_plain_text_bytes_indexed"] = m["TotIndexedPlainTextBytes"] + m["num_items_introduced"] = m["TotIntroducedItems"] + m["num_items_persisted"] = m["TotPersistedItems"] + m["num_recs_to_persist"] = m["TotItemsToPersist"] + // total disk bytes found in index directory inclusive of older snapshots + m["num_bytes_used_disk"] = numBytesUsedDisk + // total disk bytes by the latest root index, exclusive of older snapshots + m["num_bytes_used_disk_by_root"] = numBytesOnDiskByRoot + // num_bytes_used_disk_by_root_reclaimable is an approximation about the + // reclaimable disk space in an index. (eg: from a full compaction) + m["num_bytes_used_disk_by_root_reclaimable"] = uint64(float64(numBytesOnDiskByRoot) * + indexSnapshot.reClaimableDocsRatio()) + m["num_files_on_disk"] = numFilesOnDisk + m["num_root_memorysegments"] = m["TotMemorySegmentsAtRoot"] + m["num_root_filesegments"] = m["TotFileSegmentsAtRoot"] + m["num_persister_nap_pause_completed"] = m["TotPersisterNapPauseCompleted"] + m["num_persister_nap_merger_break"] = m["TotPersisterMergerNapBreak"] + m["total_compaction_written_bytes"] = m["TotFileMergeWrittenBytes"] + + return m +} + +func (s *Scorch) Analyze(d *document.Document) *index.AnalysisResult { + return analyze(d) +} + +func analyze(d *document.Document) *index.AnalysisResult { + rv := &index.AnalysisResult{ + Document: d, + Analyzed: make([]analysis.TokenFrequencies, len(d.Fields)+len(d.CompositeFields)), + Length: make([]int, len(d.Fields)+len(d.CompositeFields)), + } + + for i, field := range d.Fields { + if field.Options().IsIndexed() { + fieldLength, tokenFreqs := field.Analyze() + rv.Analyzed[i] = tokenFreqs + rv.Length[i] = fieldLength + + if len(d.CompositeFields) > 0 && field.Name() != "_id" { + // see if any of the composite fields need this + for _, compositeField := range d.CompositeFields { + compositeField.Compose(field.Name(), fieldLength, tokenFreqs) + } + } + } + } + + return rv +} + +func (s *Scorch) Advanced() (store.KVStore, error) { + return nil, nil +} + +func (s *Scorch) AddEligibleForRemoval(epoch uint64) { + s.rootLock.Lock() + if s.root == nil || s.root.epoch != epoch { + s.eligibleForRemoval = append(s.eligibleForRemoval, epoch) + } + s.rootLock.Unlock() +} + +func (s *Scorch) MemoryUsed() (memUsed uint64) { + indexSnapshot := s.currentSnapshot() + if indexSnapshot == nil { + return + } + + defer func() { + _ = indexSnapshot.Close() + }() + + // Account for current root snapshot overhead + memUsed += uint64(indexSnapshot.Size()) + + // Account for snapshot that the persister may be working on + persistEpoch := atomic.LoadUint64(&s.iStats.persistEpoch) + persistSnapshotSize := atomic.LoadUint64(&s.iStats.persistSnapshotSize) + if persistEpoch != 0 && indexSnapshot.epoch > persistEpoch { + // the snapshot that the persister is working on isn't the same as + // the current snapshot + memUsed += persistSnapshotSize + } + + // Account for snapshot that the merger may be working on + mergeEpoch := atomic.LoadUint64(&s.iStats.mergeEpoch) + mergeSnapshotSize := atomic.LoadUint64(&s.iStats.mergeSnapshotSize) + if mergeEpoch != 0 && indexSnapshot.epoch > mergeEpoch { + // the snapshot that the merger is working on isn't the same as + // the current snapshot + memUsed += mergeSnapshotSize + } + + memUsed += (atomic.LoadUint64(&s.iStats.newSegBufBytesAdded) - + atomic.LoadUint64(&s.iStats.newSegBufBytesRemoved)) + + memUsed += (atomic.LoadUint64(&s.iStats.analysisBytesAdded) - + atomic.LoadUint64(&s.iStats.analysisBytesRemoved)) + + return memUsed +} + +func (s *Scorch) markIneligibleForRemoval(filename string) { + s.rootLock.Lock() + s.ineligibleForRemoval[filename] = true + s.rootLock.Unlock() +} + +func (s *Scorch) unmarkIneligibleForRemoval(filename string) { + s.rootLock.Lock() + delete(s.ineligibleForRemoval, filename) + s.rootLock.Unlock() +} + +func init() { + registry.RegisterIndexType(Name, NewScorch) +} + +func parseToInteger(i interface{}) (int, error) { + switch v := i.(type) { + case float64: + return int(v), nil + case int: + return v, nil + + default: + return 0, fmt.Errorf("expects int or float64 value") + } +} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/segment/empty.go b/vendor/github.com/blevesearch/bleve/index/scorch/segment/empty.go new file mode 100644 index 0000000..340db73 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/segment/empty.go @@ -0,0 +1,137 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package segment + +import ( + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index" + "github.com/couchbase/vellum" +) + +type EmptySegment struct{} + +func (e *EmptySegment) Dictionary(field string) (TermDictionary, error) { + return &EmptyDictionary{}, nil +} + +func (e *EmptySegment) VisitDocument(num uint64, visitor DocumentFieldValueVisitor) error { + return nil +} + +func (e *EmptySegment) DocID(num uint64) ([]byte, error) { + return nil, nil +} + +func (e *EmptySegment) Count() uint64 { + return 0 +} + +func (e *EmptySegment) DocNumbers([]string) (*roaring.Bitmap, error) { + r := roaring.NewBitmap() + return r, nil +} + +func (e *EmptySegment) Fields() []string { + return []string{} +} + +func (e *EmptySegment) Close() error { + return nil +} + +func (e *EmptySegment) Size() uint64 { + return 0 +} + +func (e *EmptySegment) AddRef() { +} + +func (e *EmptySegment) DecRef() error { + return nil +} + +type EmptyDictionary struct{} + +func (e *EmptyDictionary) PostingsList(term []byte, + except *roaring.Bitmap, prealloc PostingsList) (PostingsList, error) { + return &EmptyPostingsList{}, nil +} + +func (e *EmptyDictionary) Iterator() DictionaryIterator { + return &EmptyDictionaryIterator{} +} + +func (e *EmptyDictionary) PrefixIterator(prefix string) DictionaryIterator { + return &EmptyDictionaryIterator{} +} + +func (e *EmptyDictionary) RangeIterator(start, end string) DictionaryIterator { + return &EmptyDictionaryIterator{} +} + +func (e *EmptyDictionary) AutomatonIterator(a vellum.Automaton, + startKeyInclusive, endKeyExclusive []byte) DictionaryIterator { + return &EmptyDictionaryIterator{} +} + +func (e *EmptyDictionary) OnlyIterator(onlyTerms [][]byte, + includeCount bool) DictionaryIterator { + return &EmptyDictionaryIterator{} +} + +func (e *EmptyDictionary) Contains(key []byte) (bool, error) { + return false, nil +} + +type EmptyDictionaryIterator struct{} + +func (e *EmptyDictionaryIterator) Next() (*index.DictEntry, error) { + return nil, nil +} + +func (e *EmptyDictionaryIterator) Contains(key []byte) (bool, error) { + return false, nil +} + +type EmptyPostingsList struct{} + +func (e *EmptyPostingsList) Iterator(includeFreq, includeNorm, includeLocations bool, + prealloc PostingsIterator) PostingsIterator { + return &EmptyPostingsIterator{} +} + +func (e *EmptyPostingsList) Size() int { + return 0 +} + +func (e *EmptyPostingsList) Count() uint64 { + return 0 +} + +type EmptyPostingsIterator struct{} + +func (e *EmptyPostingsIterator) Next() (Posting, error) { + return nil, nil +} + +func (e *EmptyPostingsIterator) Advance(uint64) (Posting, error) { + return nil, nil +} + +func (e *EmptyPostingsIterator) Size() int { + return 0 +} + +var AnEmptyPostingsIterator = &EmptyPostingsIterator{} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/segment/int.go b/vendor/github.com/blevesearch/bleve/index/scorch/segment/int.go new file mode 100644 index 0000000..55299d8 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/segment/int.go @@ -0,0 +1,176 @@ +// Copyright 2014 The Cockroach Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +// implied. See the License for the specific language governing +// permissions and limitations under the License. + +// This code originated from: +// https://github.com/cockroachdb/cockroach/blob/2dd65dde5d90c157f4b93f92502ca1063b904e1d/pkg/util/encoding/encoding.go + +// Modified to not use pkg/errors + +package segment + +import ( + "errors" + "fmt" +) + +const ( + MaxVarintSize = 9 + + // IntMin is chosen such that the range of int tags does not overlap the + // ascii character set that is frequently used in testing. + IntMin = 0x80 // 128 + intMaxWidth = 8 + intZero = IntMin + intMaxWidth // 136 + intSmall = IntMax - intZero - intMaxWidth // 109 + // IntMax is the maximum int tag value. + IntMax = 0xfd // 253 +) + +// EncodeUvarintAscending encodes the uint64 value using a variable length +// (length-prefixed) representation. The length is encoded as a single +// byte indicating the number of encoded bytes (-8) to follow. See +// EncodeVarintAscending for rationale. The encoded bytes are appended to the +// supplied buffer and the final buffer is returned. +func EncodeUvarintAscending(b []byte, v uint64) []byte { + switch { + case v <= intSmall: + return append(b, intZero+byte(v)) + case v <= 0xff: + return append(b, IntMax-7, byte(v)) + case v <= 0xffff: + return append(b, IntMax-6, byte(v>>8), byte(v)) + case v <= 0xffffff: + return append(b, IntMax-5, byte(v>>16), byte(v>>8), byte(v)) + case v <= 0xffffffff: + return append(b, IntMax-4, byte(v>>24), byte(v>>16), byte(v>>8), byte(v)) + case v <= 0xffffffffff: + return append(b, IntMax-3, byte(v>>32), byte(v>>24), byte(v>>16), byte(v>>8), + byte(v)) + case v <= 0xffffffffffff: + return append(b, IntMax-2, byte(v>>40), byte(v>>32), byte(v>>24), byte(v>>16), + byte(v>>8), byte(v)) + case v <= 0xffffffffffffff: + return append(b, IntMax-1, byte(v>>48), byte(v>>40), byte(v>>32), byte(v>>24), + byte(v>>16), byte(v>>8), byte(v)) + default: + return append(b, IntMax, byte(v>>56), byte(v>>48), byte(v>>40), byte(v>>32), + byte(v>>24), byte(v>>16), byte(v>>8), byte(v)) + } +} + +// DecodeUvarintAscending decodes a varint encoded uint64 from the input +// buffer. The remainder of the input buffer and the decoded uint64 +// are returned. +func DecodeUvarintAscending(b []byte) ([]byte, uint64, error) { + if len(b) == 0 { + return nil, 0, fmt.Errorf("insufficient bytes to decode uvarint value") + } + length := int(b[0]) - intZero + b = b[1:] // skip length byte + if length <= intSmall { + return b, uint64(length), nil + } + length -= intSmall + if length < 0 || length > 8 { + return nil, 0, fmt.Errorf("invalid uvarint length of %d", length) + } else if len(b) < length { + return nil, 0, fmt.Errorf("insufficient bytes to decode uvarint value: %q", b) + } + var v uint64 + // It is faster to range over the elements in a slice than to index + // into the slice on each loop iteration. + for _, t := range b[:length] { + v = (v << 8) | uint64(t) + } + return b[length:], v, nil +} + +// ------------------------------------------------------------ + +type MemUvarintReader struct { + C int // index of next byte to read from S + S []byte +} + +func NewMemUvarintReader(s []byte) *MemUvarintReader { + return &MemUvarintReader{S: s} +} + +// Len returns the number of unread bytes. +func (r *MemUvarintReader) Len() int { + n := len(r.S) - r.C + if n < 0 { + return 0 + } + return n +} + +var ErrMemUvarintReaderOverflow = errors.New("MemUvarintReader overflow") + +// ReadUvarint reads an encoded uint64. The original code this was +// based on is at encoding/binary/ReadUvarint(). +func (r *MemUvarintReader) ReadUvarint() (uint64, error) { + var x uint64 + var s uint + var C = r.C + var S = r.S + + for { + b := S[C] + C++ + + if b < 0x80 { + r.C = C + + // why 63? The original code had an 'i += 1' loop var and + // checked for i > 9 || i == 9 ...; but, we no longer + // check for the i var, but instead check here for s, + // which is incremented by 7. So, 7*9 == 63. + // + // why the "extra" >= check? The normal case is that s < + // 63, so we check this single >= guard first so that we + // hit the normal, nil-error return pathway sooner. + if s >= 63 && (s > 63 || s == 63 && b > 1) { + return 0, ErrMemUvarintReaderOverflow + } + + return x | uint64(b)<= 0; i-- { + rv[i] = rv[i] + 1 + if rv[i] != 0 { + return rv // didn't overflow, so stop + } + } + return nil // overflowed +} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/segment/segment.go b/vendor/github.com/blevesearch/bleve/index/scorch/segment/segment.go new file mode 100644 index 0000000..ddd0d09 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/segment/segment.go @@ -0,0 +1,153 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package segment + +import ( + "fmt" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index" + "github.com/couchbase/vellum" +) + +var ErrClosed = fmt.Errorf("index closed") + +// DocumentFieldValueVisitor defines a callback to be visited for each +// stored field value. The return value determines if the visitor +// should keep going. Returning true continues visiting, false stops. +type DocumentFieldValueVisitor func(field string, typ byte, value []byte, pos []uint64) bool + +type Segment interface { + Dictionary(field string) (TermDictionary, error) + + VisitDocument(num uint64, visitor DocumentFieldValueVisitor) error + + DocID(num uint64) ([]byte, error) + + Count() uint64 + + DocNumbers([]string) (*roaring.Bitmap, error) + + Fields() []string + + Close() error + + Size() int + + AddRef() + DecRef() error +} + +type UnpersistedSegment interface { + Segment + Persist(path string) error +} + +type PersistedSegment interface { + Segment + Path() string +} + +type TermDictionary interface { + PostingsList(term []byte, except *roaring.Bitmap, prealloc PostingsList) (PostingsList, error) + + Iterator() DictionaryIterator + PrefixIterator(prefix string) DictionaryIterator + RangeIterator(start, end string) DictionaryIterator + AutomatonIterator(a vellum.Automaton, + startKeyInclusive, endKeyExclusive []byte) DictionaryIterator + OnlyIterator(onlyTerms [][]byte, includeCount bool) DictionaryIterator + + Contains(key []byte) (bool, error) +} + +type DictionaryIterator interface { + Next() (*index.DictEntry, error) +} + +type PostingsList interface { + Iterator(includeFreq, includeNorm, includeLocations bool, prealloc PostingsIterator) PostingsIterator + + Size() int + + Count() uint64 + + // NOTE deferred for future work + + // And(other PostingsList) PostingsList + // Or(other PostingsList) PostingsList +} + +type PostingsIterator interface { + // The caller is responsible for copying whatever it needs from + // the returned Posting instance before calling Next(), as some + // implementations may return a shared instance to reduce memory + // allocations. + Next() (Posting, error) + + // Advance will return the posting with the specified doc number + // or if there is no such posting, the next posting. + // Callers MUST NOT attempt to pass a docNum that is less than or + // equal to the currently visited posting doc Num. + Advance(docNum uint64) (Posting, error) + + Size() int +} + +type OptimizablePostingsIterator interface { + ActualBitmap() *roaring.Bitmap + DocNum1Hit() (uint64, bool) + ReplaceActual(*roaring.Bitmap) +} + +type Posting interface { + Number() uint64 + + Frequency() uint64 + Norm() float64 + + Locations() []Location + + Size() int +} + +type Location interface { + Field() string + Start() uint64 + End() uint64 + Pos() uint64 + ArrayPositions() []uint64 + Size() int +} + +// DocumentFieldTermVisitable is implemented by various scorch segment +// implementations with persistence for the un inverting of the +// postings or other indexed values. +type DocumentFieldTermVisitable interface { + VisitDocumentFieldTerms(localDocNum uint64, fields []string, + visitor index.DocumentFieldTermVisitor, optional DocVisitState) (DocVisitState, error) + + // VisitableDocValueFields implementation should return + // the list of fields which are document value persisted and + // therefore visitable by the above VisitDocumentFieldTerms method. + VisitableDocValueFields() ([]string, error) +} + +type DocVisitState interface { +} + +type StatsReporter interface { + ReportBytesWritten(bytesWritten uint64) +} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/segment/unadorned.go b/vendor/github.com/blevesearch/bleve/index/scorch/segment/unadorned.go new file mode 100644 index 0000000..db06562 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/segment/unadorned.go @@ -0,0 +1,160 @@ +// Copyright (c) 2020 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package segment + +import ( + "github.com/RoaringBitmap/roaring" + "math" + "reflect" +) + +var reflectStaticSizeUnadornedPostingsIteratorBitmap int +var reflectStaticSizeUnadornedPostingsIterator1Hit int +var reflectStaticSizeUnadornedPosting int + +func init() { + var pib UnadornedPostingsIteratorBitmap + reflectStaticSizeUnadornedPostingsIteratorBitmap = int(reflect.TypeOf(pib).Size()) + var pi1h UnadornedPostingsIterator1Hit + reflectStaticSizeUnadornedPostingsIterator1Hit = int(reflect.TypeOf(pi1h).Size()) + var up UnadornedPosting + reflectStaticSizeUnadornedPosting = int(reflect.TypeOf(up).Size()) +} + +type UnadornedPostingsIteratorBitmap struct { + actual roaring.IntPeekable + actualBM *roaring.Bitmap +} + +func (i *UnadornedPostingsIteratorBitmap) Next() (Posting, error) { + return i.nextAtOrAfter(0) +} + +func (i *UnadornedPostingsIteratorBitmap) Advance(docNum uint64) (Posting, error) { + return i.nextAtOrAfter(docNum) +} + +func (i *UnadornedPostingsIteratorBitmap) nextAtOrAfter(atOrAfter uint64) (Posting, error) { + docNum, exists := i.nextDocNumAtOrAfter(atOrAfter) + if !exists { + return nil, nil + } + return UnadornedPosting(docNum), nil +} + +func (i *UnadornedPostingsIteratorBitmap) nextDocNumAtOrAfter(atOrAfter uint64) (uint64, bool) { + if i.actual == nil || !i.actual.HasNext() { + return 0, false + } + i.actual.AdvanceIfNeeded(uint32(atOrAfter)) + + if !i.actual.HasNext() { + return 0, false // couldn't find anything + } + + return uint64(i.actual.Next()), true +} + +func (i *UnadornedPostingsIteratorBitmap) Size() int { + return reflectStaticSizeUnadornedPostingsIteratorBitmap +} + +func (i *UnadornedPostingsIteratorBitmap) ActualBitmap() *roaring.Bitmap { + return i.actualBM +} + +func (i *UnadornedPostingsIteratorBitmap) DocNum1Hit() (uint64, bool) { + return 0, false +} + +func (i *UnadornedPostingsIteratorBitmap) ReplaceActual(actual *roaring.Bitmap) { + i.actualBM = actual + i.actual = actual.Iterator() +} + +func NewUnadornedPostingsIteratorFromBitmap(bm *roaring.Bitmap) PostingsIterator { + return &UnadornedPostingsIteratorBitmap{ + actualBM: bm, + actual: bm.Iterator(), + } +} + +const docNum1HitFinished = math.MaxUint64 + +type UnadornedPostingsIterator1Hit struct { + docNum uint64 +} + +func (i *UnadornedPostingsIterator1Hit) Next() (Posting, error) { + return i.nextAtOrAfter(0) +} + +func (i *UnadornedPostingsIterator1Hit) Advance(docNum uint64) (Posting, error) { + return i.nextAtOrAfter(docNum) +} + +func (i *UnadornedPostingsIterator1Hit) nextAtOrAfter(atOrAfter uint64) (Posting, error) { + docNum, exists := i.nextDocNumAtOrAfter(atOrAfter) + if !exists { + return nil, nil + } + return UnadornedPosting(docNum), nil +} + +func (i *UnadornedPostingsIterator1Hit) nextDocNumAtOrAfter(atOrAfter uint64) (uint64, bool) { + if i.docNum == docNum1HitFinished { + return 0, false + } + if i.docNum < atOrAfter { + // advanced past our 1-hit + i.docNum = docNum1HitFinished // consume our 1-hit docNum + return 0, false + } + docNum := i.docNum + i.docNum = docNum1HitFinished // consume our 1-hit docNum + return docNum, true +} + +func (i *UnadornedPostingsIterator1Hit) Size() int { + return reflectStaticSizeUnadornedPostingsIterator1Hit +} + +func NewUnadornedPostingsIteratorFrom1Hit(docNum1Hit uint64) PostingsIterator { + return &UnadornedPostingsIterator1Hit{ + docNum1Hit, + } +} + +type UnadornedPosting uint64 + +func (p UnadornedPosting) Number() uint64 { + return uint64(p) +} + +func (p UnadornedPosting) Frequency() uint64 { + return 0 +} + +func (p UnadornedPosting) Norm() float64 { + return 0 +} + +func (p UnadornedPosting) Locations() []Location { + return nil +} + +func (p UnadornedPosting) Size() int { + return reflectStaticSizeUnadornedPosting +} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/segment_plugin.go b/vendor/github.com/blevesearch/bleve/index/scorch/segment_plugin.go new file mode 100644 index 0000000..2f7db48 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/segment_plugin.go @@ -0,0 +1,93 @@ +// Copyright (c) 2019 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorch + +import ( + "fmt" + + "github.com/blevesearch/bleve/index/scorch/segment" + + zapv11 "github.com/blevesearch/zap/v11" + zapv12 "github.com/blevesearch/zap/v12" + zapv13 "github.com/blevesearch/zap/v13" + zapv14 "github.com/blevesearch/zap/v14" + zapv15 "github.com/blevesearch/zap/v15" +) + +var supportedSegmentPlugins map[string]map[uint32]segment.Plugin +var defaultSegmentPlugin segment.Plugin + +func init() { + ResetPlugins() + RegisterPlugin(zapv15.Plugin(), false) + RegisterPlugin(zapv14.Plugin(), false) + RegisterPlugin(zapv13.Plugin(), false) + RegisterPlugin(zapv12.Plugin(), false) + RegisterPlugin(zapv11.Plugin(), true) +} + +func ResetPlugins() { + supportedSegmentPlugins = map[string]map[uint32]segment.Plugin{} +} + +func RegisterPlugin(plugin segment.Plugin, makeDefault bool) { + if _, ok := supportedSegmentPlugins[plugin.Type()]; !ok { + supportedSegmentPlugins[plugin.Type()] = map[uint32]segment.Plugin{} + } + supportedSegmentPlugins[plugin.Type()][plugin.Version()] = plugin + if makeDefault { + defaultSegmentPlugin = plugin + } +} + +func SupportedSegmentTypes() (rv []string) { + for k := range supportedSegmentPlugins { + rv = append(rv, k) + } + return +} + +func SupportedSegmentTypeVersions(typ string) (rv []uint32) { + for k := range supportedSegmentPlugins[typ] { + rv = append(rv, k) + } + return rv +} + +func chooseSegmentPlugin(forcedSegmentType string, + forcedSegmentVersion uint32) (segment.Plugin, error) { + if versions, ok := supportedSegmentPlugins[forcedSegmentType]; ok { + if segPlugin, ok := versions[uint32(forcedSegmentVersion)]; ok { + return segPlugin, nil + } + return nil, fmt.Errorf( + "unsupported version %d for segment type: %s, supported: %v", + forcedSegmentVersion, forcedSegmentType, + SupportedSegmentTypeVersions(forcedSegmentType)) + } + return nil, fmt.Errorf("unsupported segment type: %s, supported: %v", + forcedSegmentType, SupportedSegmentTypes()) +} + +func (s *Scorch) loadSegmentPlugin(forcedSegmentType string, + forcedSegmentVersion uint32) error { + segPlugin, err := chooseSegmentPlugin(forcedSegmentType, + forcedSegmentVersion) + if err != nil { + return err + } + s.segPlugin = segPlugin + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/snapshot_index.go b/vendor/github.com/blevesearch/bleve/index/scorch/snapshot_index.go new file mode 100644 index 0000000..61204eb --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/snapshot_index.go @@ -0,0 +1,755 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorch + +import ( + "container/heap" + "encoding/binary" + "fmt" + "reflect" + "sort" + "sync" + "sync/atomic" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/couchbase/vellum" + lev "github.com/couchbase/vellum/levenshtein" +) + +// re usable, threadsafe levenshtein builders +var lb1, lb2 *lev.LevenshteinAutomatonBuilder + +type asynchSegmentResult struct { + dict segment.TermDictionary + dictItr segment.DictionaryIterator + + index int + docs *roaring.Bitmap + + postings segment.PostingsList + + err error +} + +var reflectStaticSizeIndexSnapshot int + +func init() { + var is interface{} = IndexSnapshot{} + reflectStaticSizeIndexSnapshot = int(reflect.TypeOf(is).Size()) + var err error + lb1, err = lev.NewLevenshteinAutomatonBuilder(1, true) + if err != nil { + panic(fmt.Errorf("Levenshtein automaton ed1 builder err: %v", err)) + } + lb2, err = lev.NewLevenshteinAutomatonBuilder(2, true) + if err != nil { + panic(fmt.Errorf("Levenshtein automaton ed2 builder err: %v", err)) + } +} + +type IndexSnapshot struct { + parent *Scorch + segment []*SegmentSnapshot + offsets []uint64 + internal map[string][]byte + epoch uint64 + size uint64 + creator string + + m sync.Mutex // Protects the fields that follow. + refs int64 + + m2 sync.Mutex // Protects the fields that follow. + fieldTFRs map[string][]*IndexSnapshotTermFieldReader // keyed by field, recycled TFR's +} + +func (i *IndexSnapshot) Segments() []*SegmentSnapshot { + return i.segment +} + +func (i *IndexSnapshot) Internal() map[string][]byte { + return i.internal +} + +func (i *IndexSnapshot) AddRef() { + i.m.Lock() + i.refs++ + i.m.Unlock() +} + +func (i *IndexSnapshot) DecRef() (err error) { + i.m.Lock() + i.refs-- + if i.refs == 0 { + for _, s := range i.segment { + if s != nil { + err2 := s.segment.DecRef() + if err == nil { + err = err2 + } + } + } + if i.parent != nil { + go i.parent.AddEligibleForRemoval(i.epoch) + } + } + i.m.Unlock() + return err +} + +func (i *IndexSnapshot) Close() error { + return i.DecRef() +} + +func (i *IndexSnapshot) Size() int { + return int(i.size) +} + +func (i *IndexSnapshot) updateSize() { + i.size += uint64(reflectStaticSizeIndexSnapshot) + for _, s := range i.segment { + i.size += uint64(s.Size()) + } +} + +func (i *IndexSnapshot) newIndexSnapshotFieldDict(field string, + makeItr func(i segment.TermDictionary) segment.DictionaryIterator, + randomLookup bool) (*IndexSnapshotFieldDict, error) { + + results := make(chan *asynchSegmentResult) + for index, segment := range i.segment { + go func(index int, segment *SegmentSnapshot) { + dict, err := segment.segment.Dictionary(field) + if err != nil { + results <- &asynchSegmentResult{err: err} + } else { + if randomLookup { + results <- &asynchSegmentResult{dict: dict} + } else { + results <- &asynchSegmentResult{dictItr: makeItr(dict)} + } + } + }(index, segment) + } + + var err error + rv := &IndexSnapshotFieldDict{ + snapshot: i, + cursors: make([]*segmentDictCursor, 0, len(i.segment)), + } + for count := 0; count < len(i.segment); count++ { + asr := <-results + if asr.err != nil && err == nil { + err = asr.err + } else { + if !randomLookup { + next, err2 := asr.dictItr.Next() + if err2 != nil && err == nil { + err = err2 + } + if next != nil { + rv.cursors = append(rv.cursors, &segmentDictCursor{ + itr: asr.dictItr, + curr: *next, + }) + } + } else { + rv.cursors = append(rv.cursors, &segmentDictCursor{ + dict: asr.dict, + }) + } + } + } + // after ensuring we've read all items on channel + if err != nil { + return nil, err + } + + if !randomLookup { + // prepare heap + heap.Init(rv) + } + + return rv, nil +} + +func (i *IndexSnapshot) FieldDict(field string) (index.FieldDict, error) { + return i.newIndexSnapshotFieldDict(field, func(i segment.TermDictionary) segment.DictionaryIterator { + return i.Iterator() + }, false) +} + +func (i *IndexSnapshot) FieldDictRange(field string, startTerm []byte, + endTerm []byte) (index.FieldDict, error) { + return i.newIndexSnapshotFieldDict(field, func(i segment.TermDictionary) segment.DictionaryIterator { + return i.RangeIterator(string(startTerm), string(endTerm)) + }, false) +} + +func (i *IndexSnapshot) FieldDictPrefix(field string, + termPrefix []byte) (index.FieldDict, error) { + return i.newIndexSnapshotFieldDict(field, func(i segment.TermDictionary) segment.DictionaryIterator { + return i.PrefixIterator(string(termPrefix)) + }, false) +} + +func (i *IndexSnapshot) FieldDictRegexp(field string, + termRegex string) (index.FieldDict, error) { + // TODO: potential optimization where the literal prefix represents the, + // entire regexp, allowing us to use PrefixIterator(prefixTerm)? + + a, prefixBeg, prefixEnd, err := segment.ParseRegexp(termRegex) + if err != nil { + return nil, err + } + + return i.newIndexSnapshotFieldDict(field, func(i segment.TermDictionary) segment.DictionaryIterator { + return i.AutomatonIterator(a, prefixBeg, prefixEnd) + }, false) +} + +func (i *IndexSnapshot) getLevAutomaton(term string, + fuzziness uint8) (vellum.Automaton, error) { + if fuzziness == 1 { + return lb1.BuildDfa(term, fuzziness) + } else if fuzziness == 2 { + return lb2.BuildDfa(term, fuzziness) + } + return nil, fmt.Errorf("fuzziness exceeds the max limit") +} + +func (i *IndexSnapshot) FieldDictFuzzy(field string, + term string, fuzziness int, prefix string) (index.FieldDict, error) { + a, err := i.getLevAutomaton(term, uint8(fuzziness)) + if err != nil { + return nil, err + } + + var prefixBeg, prefixEnd []byte + if prefix != "" { + prefixBeg = []byte(prefix) + prefixEnd = segment.IncrementBytes(prefixBeg) + } + + return i.newIndexSnapshotFieldDict(field, func(i segment.TermDictionary) segment.DictionaryIterator { + return i.AutomatonIterator(a, prefixBeg, prefixEnd) + }, false) +} + +func (i *IndexSnapshot) FieldDictOnly(field string, + onlyTerms [][]byte, includeCount bool) (index.FieldDict, error) { + return i.newIndexSnapshotFieldDict(field, func(i segment.TermDictionary) segment.DictionaryIterator { + return i.OnlyIterator(onlyTerms, includeCount) + }, false) +} + +func (i *IndexSnapshot) FieldDictContains(field string) (index.FieldDictContains, error) { + return i.newIndexSnapshotFieldDict(field, nil, true) +} + +func (i *IndexSnapshot) DocIDReaderAll() (index.DocIDReader, error) { + results := make(chan *asynchSegmentResult) + for index, segment := range i.segment { + go func(index int, segment *SegmentSnapshot) { + results <- &asynchSegmentResult{ + index: index, + docs: segment.DocNumbersLive(), + } + }(index, segment) + } + + return i.newDocIDReader(results) +} + +func (i *IndexSnapshot) DocIDReaderOnly(ids []string) (index.DocIDReader, error) { + results := make(chan *asynchSegmentResult) + for index, segment := range i.segment { + go func(index int, segment *SegmentSnapshot) { + docs, err := segment.DocNumbers(ids) + if err != nil { + results <- &asynchSegmentResult{err: err} + } else { + results <- &asynchSegmentResult{ + index: index, + docs: docs, + } + } + }(index, segment) + } + + return i.newDocIDReader(results) +} + +func (i *IndexSnapshot) newDocIDReader(results chan *asynchSegmentResult) (index.DocIDReader, error) { + rv := &IndexSnapshotDocIDReader{ + snapshot: i, + iterators: make([]roaring.IntIterable, len(i.segment)), + } + var err error + for count := 0; count < len(i.segment); count++ { + asr := <-results + if asr.err != nil { + if err == nil { + // returns the first error encountered + err = asr.err + } + } else if err == nil { + rv.iterators[asr.index] = asr.docs.Iterator() + } + } + + if err != nil { + return nil, err + } + + return rv, nil +} + +func (i *IndexSnapshot) Fields() ([]string, error) { + // FIXME not making this concurrent for now as it's not used in hot path + // of any searches at the moment (just a debug aid) + fieldsMap := map[string]struct{}{} + for _, segment := range i.segment { + fields := segment.Fields() + for _, field := range fields { + fieldsMap[field] = struct{}{} + } + } + rv := make([]string, 0, len(fieldsMap)) + for k := range fieldsMap { + rv = append(rv, k) + } + return rv, nil +} + +func (i *IndexSnapshot) GetInternal(key []byte) ([]byte, error) { + return i.internal[string(key)], nil +} + +func (i *IndexSnapshot) DocCount() (uint64, error) { + var rv uint64 + for _, segment := range i.segment { + rv += segment.Count() + } + return rv, nil +} + +func (i *IndexSnapshot) Document(id string) (rv *document.Document, err error) { + // FIXME could be done more efficiently directly, but reusing for simplicity + tfr, err := i.TermFieldReader([]byte(id), "_id", false, false, false) + if err != nil { + return nil, err + } + defer func() { + if cerr := tfr.Close(); err == nil && cerr != nil { + err = cerr + } + }() + + next, err := tfr.Next(nil) + if err != nil { + return nil, err + } + + if next == nil { + // no such doc exists + return nil, nil + } + + docNum, err := docInternalToNumber(next.ID) + if err != nil { + return nil, err + } + segmentIndex, localDocNum := i.segmentIndexAndLocalDocNumFromGlobal(docNum) + + rv = document.NewDocument(id) + err = i.segment[segmentIndex].VisitDocument(localDocNum, func(name string, typ byte, val []byte, pos []uint64) bool { + if name == "_id" { + return true + } + + // copy value, array positions to preserve them beyond the scope of this callback + value := append([]byte(nil), val...) + arrayPos := append([]uint64(nil), pos...) + + switch typ { + case 't': + rv.AddField(document.NewTextField(name, arrayPos, value)) + case 'n': + rv.AddField(document.NewNumericFieldFromBytes(name, arrayPos, value)) + case 'd': + rv.AddField(document.NewDateTimeFieldFromBytes(name, arrayPos, value)) + case 'b': + rv.AddField(document.NewBooleanFieldFromBytes(name, arrayPos, value)) + case 'g': + rv.AddField(document.NewGeoPointFieldFromBytes(name, arrayPos, value)) + } + + return true + }) + if err != nil { + return nil, err + } + + return rv, nil +} + +func (i *IndexSnapshot) segmentIndexAndLocalDocNumFromGlobal(docNum uint64) (int, uint64) { + segmentIndex := sort.Search(len(i.offsets), + func(x int) bool { + return i.offsets[x] > docNum + }) - 1 + + localDocNum := docNum - i.offsets[segmentIndex] + return int(segmentIndex), localDocNum +} + +func (i *IndexSnapshot) ExternalID(id index.IndexInternalID) (string, error) { + docNum, err := docInternalToNumber(id) + if err != nil { + return "", err + } + segmentIndex, localDocNum := i.segmentIndexAndLocalDocNumFromGlobal(docNum) + + v, err := i.segment[segmentIndex].DocID(localDocNum) + if err != nil { + return "", err + } + if v == nil { + return "", fmt.Errorf("document number %d not found", docNum) + } + + return string(v), nil +} + +func (i *IndexSnapshot) InternalID(id string) (rv index.IndexInternalID, err error) { + // FIXME could be done more efficiently directly, but reusing for simplicity + tfr, err := i.TermFieldReader([]byte(id), "_id", false, false, false) + if err != nil { + return nil, err + } + defer func() { + if cerr := tfr.Close(); err == nil && cerr != nil { + err = cerr + } + }() + + next, err := tfr.Next(nil) + if err != nil || next == nil { + return nil, err + } + + return next.ID, nil +} + +func (i *IndexSnapshot) TermFieldReader(term []byte, field string, includeFreq, + includeNorm, includeTermVectors bool) (index.TermFieldReader, error) { + rv := i.allocTermFieldReaderDicts(field) + + rv.term = term + rv.field = field + rv.snapshot = i + if rv.postings == nil { + rv.postings = make([]segment.PostingsList, len(i.segment)) + } + if rv.iterators == nil { + rv.iterators = make([]segment.PostingsIterator, len(i.segment)) + } + rv.segmentOffset = 0 + rv.includeFreq = includeFreq + rv.includeNorm = includeNorm + rv.includeTermVectors = includeTermVectors + rv.currPosting = nil + rv.currID = rv.currID[:0] + + if rv.dicts == nil { + rv.dicts = make([]segment.TermDictionary, len(i.segment)) + for i, segment := range i.segment { + dict, err := segment.segment.Dictionary(field) + if err != nil { + return nil, err + } + rv.dicts[i] = dict + } + } + + for i, segment := range i.segment { + pl, err := rv.dicts[i].PostingsList(term, segment.deleted, rv.postings[i]) + if err != nil { + return nil, err + } + rv.postings[i] = pl + rv.iterators[i] = pl.Iterator(includeFreq, includeNorm, includeTermVectors, rv.iterators[i]) + } + atomic.AddUint64(&i.parent.stats.TotTermSearchersStarted, uint64(1)) + return rv, nil +} + +func (i *IndexSnapshot) allocTermFieldReaderDicts(field string) (tfr *IndexSnapshotTermFieldReader) { + i.m2.Lock() + if i.fieldTFRs != nil { + tfrs := i.fieldTFRs[field] + last := len(tfrs) - 1 + if last >= 0 { + tfr = tfrs[last] + tfrs[last] = nil + i.fieldTFRs[field] = tfrs[:last] + i.m2.Unlock() + return + } + } + i.m2.Unlock() + return &IndexSnapshotTermFieldReader{ + recycle: true, + } +} + +func (i *IndexSnapshot) recycleTermFieldReader(tfr *IndexSnapshotTermFieldReader) { + if !tfr.recycle { + // Do not recycle an optimized unadorned term field reader (used for + // ConjunctionUnadorned or DisjunctionUnadorned), during when a fresh + // roaring.Bitmap is built by AND-ing or OR-ing individual bitmaps, + // and we'll need to release them for GC. (See MB-40916) + return + } + + i.parent.rootLock.RLock() + obsolete := i.parent.root != i + i.parent.rootLock.RUnlock() + if obsolete { + // if we're not the current root (mutations happened), don't bother recycling + return + } + + i.m2.Lock() + if i.fieldTFRs == nil { + i.fieldTFRs = map[string][]*IndexSnapshotTermFieldReader{} + } + i.fieldTFRs[tfr.field] = append(i.fieldTFRs[tfr.field], tfr) + i.m2.Unlock() +} + +func docNumberToBytes(buf []byte, in uint64) []byte { + if len(buf) != 8 { + if cap(buf) >= 8 { + buf = buf[0:8] + } else { + buf = make([]byte, 8) + } + } + binary.BigEndian.PutUint64(buf, in) + return buf +} + +func docInternalToNumber(in index.IndexInternalID) (uint64, error) { + if len(in) != 8 { + return 0, fmt.Errorf("wrong len for IndexInternalID: %q", in) + } + return binary.BigEndian.Uint64(in), nil +} + +func (i *IndexSnapshot) DocumentVisitFieldTerms(id index.IndexInternalID, + fields []string, visitor index.DocumentFieldTermVisitor) error { + _, err := i.documentVisitFieldTerms(id, fields, visitor, nil) + return err +} + +func (i *IndexSnapshot) documentVisitFieldTerms(id index.IndexInternalID, + fields []string, visitor index.DocumentFieldTermVisitor, + dvs segment.DocVisitState) (segment.DocVisitState, error) { + docNum, err := docInternalToNumber(id) + if err != nil { + return nil, err + } + + segmentIndex, localDocNum := i.segmentIndexAndLocalDocNumFromGlobal(docNum) + if segmentIndex >= len(i.segment) { + return nil, nil + } + + _, dvs, err = i.documentVisitFieldTermsOnSegment( + segmentIndex, localDocNum, fields, nil, visitor, dvs) + + return dvs, err +} + +func (i *IndexSnapshot) documentVisitFieldTermsOnSegment( + segmentIndex int, localDocNum uint64, fields []string, cFields []string, + visitor index.DocumentFieldTermVisitor, dvs segment.DocVisitState) ( + cFieldsOut []string, dvsOut segment.DocVisitState, err error) { + ss := i.segment[segmentIndex] + + var vFields []string // fields that are visitable via the segment + + ssv, ssvOk := ss.segment.(segment.DocumentFieldTermVisitable) + if ssvOk && ssv != nil { + vFields, err = ssv.VisitableDocValueFields() + if err != nil { + return nil, nil, err + } + } + + var errCh chan error + + // cFields represents the fields that we'll need from the + // cachedDocs, and might be optionally be provided by the caller, + // if the caller happens to know we're on the same segmentIndex + // from a previous invocation + if cFields == nil { + cFields = subtractStrings(fields, vFields) + + if !ss.cachedDocs.hasFields(cFields) { + errCh = make(chan error, 1) + + go func() { + err := ss.cachedDocs.prepareFields(cFields, ss) + if err != nil { + errCh <- err + } + close(errCh) + }() + } + } + + if ssvOk && ssv != nil && len(vFields) > 0 { + dvs, err = ssv.VisitDocumentFieldTerms(localDocNum, fields, visitor, dvs) + if err != nil { + return nil, nil, err + } + } + + if errCh != nil { + err = <-errCh + if err != nil { + return nil, nil, err + } + } + + if len(cFields) > 0 { + ss.cachedDocs.visitDoc(localDocNum, cFields, visitor) + } + + return cFields, dvs, nil +} + +func (i *IndexSnapshot) DocValueReader(fields []string) ( + index.DocValueReader, error) { + return &DocValueReader{i: i, fields: fields, currSegmentIndex: -1}, nil +} + +type DocValueReader struct { + i *IndexSnapshot + fields []string + dvs segment.DocVisitState + + currSegmentIndex int + currCachedFields []string +} + +func (dvr *DocValueReader) VisitDocValues(id index.IndexInternalID, + visitor index.DocumentFieldTermVisitor) (err error) { + docNum, err := docInternalToNumber(id) + if err != nil { + return err + } + + segmentIndex, localDocNum := dvr.i.segmentIndexAndLocalDocNumFromGlobal(docNum) + if segmentIndex >= len(dvr.i.segment) { + return nil + } + + if dvr.currSegmentIndex != segmentIndex { + dvr.currSegmentIndex = segmentIndex + dvr.currCachedFields = nil + } + + dvr.currCachedFields, dvr.dvs, err = dvr.i.documentVisitFieldTermsOnSegment( + dvr.currSegmentIndex, localDocNum, dvr.fields, dvr.currCachedFields, visitor, dvr.dvs) + + return err +} + +func (i *IndexSnapshot) DumpAll() chan interface{} { + rv := make(chan interface{}) + go func() { + close(rv) + }() + return rv +} + +func (i *IndexSnapshot) DumpDoc(id string) chan interface{} { + rv := make(chan interface{}) + go func() { + close(rv) + }() + return rv +} + +func (i *IndexSnapshot) DumpFields() chan interface{} { + rv := make(chan interface{}) + go func() { + close(rv) + }() + return rv +} + +func (i *IndexSnapshot) diskSegmentsPaths() map[string]struct{} { + rv := make(map[string]struct{}, len(i.segment)) + for _, segmentSnapshot := range i.segment { + if seg, ok := segmentSnapshot.segment.(segment.PersistedSegment); ok { + rv[seg.Path()] = struct{}{} + } + } + return rv +} + +// reClaimableDocsRatio gives a ratio about the obsoleted or +// reclaimable documents present in a given index snapshot. +func (i *IndexSnapshot) reClaimableDocsRatio() float64 { + var totalCount, liveCount uint64 + for _, segmentSnapshot := range i.segment { + if _, ok := segmentSnapshot.segment.(segment.PersistedSegment); ok { + totalCount += uint64(segmentSnapshot.FullSize()) + liveCount += uint64(segmentSnapshot.Count()) + } + } + + if totalCount > 0 { + return float64(totalCount-liveCount) / float64(totalCount) + } + return 0 +} + +// subtractStrings returns set a minus elements of set b. +func subtractStrings(a, b []string) []string { + if len(b) == 0 { + return a + } + + rv := make([]string, 0, len(a)) +OUTER: + for _, as := range a { + for _, bs := range b { + if as == bs { + continue OUTER + } + } + rv = append(rv, as) + } + return rv +} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/snapshot_index_dict.go b/vendor/github.com/blevesearch/bleve/index/scorch/snapshot_index_dict.go new file mode 100644 index 0000000..47486c2 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/snapshot_index_dict.go @@ -0,0 +1,108 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorch + +import ( + "container/heap" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" +) + +type segmentDictCursor struct { + dict segment.TermDictionary + itr segment.DictionaryIterator + curr index.DictEntry +} + +type IndexSnapshotFieldDict struct { + snapshot *IndexSnapshot + cursors []*segmentDictCursor + entry index.DictEntry +} + +func (i *IndexSnapshotFieldDict) Len() int { return len(i.cursors) } +func (i *IndexSnapshotFieldDict) Less(a, b int) bool { + return i.cursors[a].curr.Term < i.cursors[b].curr.Term +} +func (i *IndexSnapshotFieldDict) Swap(a, b int) { + i.cursors[a], i.cursors[b] = i.cursors[b], i.cursors[a] +} + +func (i *IndexSnapshotFieldDict) Push(x interface{}) { + i.cursors = append(i.cursors, x.(*segmentDictCursor)) +} + +func (i *IndexSnapshotFieldDict) Pop() interface{} { + n := len(i.cursors) + x := i.cursors[n-1] + i.cursors = i.cursors[0 : n-1] + return x +} + +func (i *IndexSnapshotFieldDict) Next() (*index.DictEntry, error) { + if len(i.cursors) == 0 { + return nil, nil + } + i.entry = i.cursors[0].curr + next, err := i.cursors[0].itr.Next() + if err != nil { + return nil, err + } + if next == nil { + // at end of this cursor, remove it + heap.Pop(i) + } else { + // modified heap, fix it + i.cursors[0].curr = *next + heap.Fix(i, 0) + } + // look for any other entries with the exact same term + for len(i.cursors) > 0 && i.cursors[0].curr.Term == i.entry.Term { + i.entry.Count += i.cursors[0].curr.Count + next, err := i.cursors[0].itr.Next() + if err != nil { + return nil, err + } + if next == nil { + // at end of this cursor, remove it + heap.Pop(i) + } else { + // modified heap, fix it + i.cursors[0].curr = *next + heap.Fix(i, 0) + } + } + + return &i.entry, nil +} + +func (i *IndexSnapshotFieldDict) Close() error { + return nil +} + +func (i *IndexSnapshotFieldDict) Contains(key []byte) (bool, error) { + if len(i.cursors) == 0 { + return false, nil + } + + for _, cursor := range i.cursors { + if found, _ := cursor.dict.Contains(key); found { + return true, nil + } + } + + return false, nil +} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/snapshot_index_doc.go b/vendor/github.com/blevesearch/bleve/index/scorch/snapshot_index_doc.go new file mode 100644 index 0000000..27da208 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/snapshot_index_doc.go @@ -0,0 +1,80 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorch + +import ( + "bytes" + "reflect" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeIndexSnapshotDocIDReader int + +func init() { + var isdr IndexSnapshotDocIDReader + reflectStaticSizeIndexSnapshotDocIDReader = int(reflect.TypeOf(isdr).Size()) +} + +type IndexSnapshotDocIDReader struct { + snapshot *IndexSnapshot + iterators []roaring.IntIterable + segmentOffset int +} + +func (i *IndexSnapshotDocIDReader) Size() int { + return reflectStaticSizeIndexSnapshotDocIDReader + size.SizeOfPtr +} + +func (i *IndexSnapshotDocIDReader) Next() (index.IndexInternalID, error) { + for i.segmentOffset < len(i.iterators) { + if !i.iterators[i.segmentOffset].HasNext() { + i.segmentOffset++ + continue + } + next := i.iterators[i.segmentOffset].Next() + // make segment number into global number by adding offset + globalOffset := i.snapshot.offsets[i.segmentOffset] + return docNumberToBytes(nil, uint64(next)+globalOffset), nil + } + return nil, nil +} + +func (i *IndexSnapshotDocIDReader) Advance(ID index.IndexInternalID) (index.IndexInternalID, error) { + // FIXME do something better + next, err := i.Next() + if err != nil { + return nil, err + } + if next == nil { + return nil, nil + } + for bytes.Compare(next, ID) < 0 { + next, err = i.Next() + if err != nil { + return nil, err + } + if next == nil { + break + } + } + return next, nil +} + +func (i *IndexSnapshotDocIDReader) Close() error { + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/snapshot_index_tfr.go b/vendor/github.com/blevesearch/bleve/index/scorch/snapshot_index_tfr.go new file mode 100644 index 0000000..239f68f --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/snapshot_index_tfr.go @@ -0,0 +1,188 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorch + +import ( + "bytes" + "fmt" + "reflect" + "sync/atomic" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeIndexSnapshotTermFieldReader int + +func init() { + var istfr IndexSnapshotTermFieldReader + reflectStaticSizeIndexSnapshotTermFieldReader = int(reflect.TypeOf(istfr).Size()) +} + +type IndexSnapshotTermFieldReader struct { + term []byte + field string + snapshot *IndexSnapshot + dicts []segment.TermDictionary + postings []segment.PostingsList + iterators []segment.PostingsIterator + segmentOffset int + includeFreq bool + includeNorm bool + includeTermVectors bool + currPosting segment.Posting + currID index.IndexInternalID + recycle bool +} + +func (i *IndexSnapshotTermFieldReader) Size() int { + sizeInBytes := reflectStaticSizeIndexSnapshotTermFieldReader + size.SizeOfPtr + + len(i.term) + + len(i.field) + + len(i.currID) + + for _, entry := range i.postings { + sizeInBytes += entry.Size() + } + + for _, entry := range i.iterators { + sizeInBytes += entry.Size() + } + + if i.currPosting != nil { + sizeInBytes += i.currPosting.Size() + } + + return sizeInBytes +} + +func (i *IndexSnapshotTermFieldReader) Next(preAlloced *index.TermFieldDoc) (*index.TermFieldDoc, error) { + rv := preAlloced + if rv == nil { + rv = &index.TermFieldDoc{} + } + // find the next hit + for i.segmentOffset < len(i.iterators) { + next, err := i.iterators[i.segmentOffset].Next() + if err != nil { + return nil, err + } + if next != nil { + // make segment number into global number by adding offset + globalOffset := i.snapshot.offsets[i.segmentOffset] + nnum := next.Number() + rv.ID = docNumberToBytes(rv.ID, nnum+globalOffset) + i.postingToTermFieldDoc(next, rv) + + i.currID = rv.ID + i.currPosting = next + return rv, nil + } + i.segmentOffset++ + } + return nil, nil +} + +func (i *IndexSnapshotTermFieldReader) postingToTermFieldDoc(next segment.Posting, rv *index.TermFieldDoc) { + if i.includeFreq { + rv.Freq = next.Frequency() + } + if i.includeNorm { + rv.Norm = next.Norm() + } + if i.includeTermVectors { + locs := next.Locations() + if cap(rv.Vectors) < len(locs) { + rv.Vectors = make([]*index.TermFieldVector, len(locs)) + backing := make([]index.TermFieldVector, len(locs)) + for i := range backing { + rv.Vectors[i] = &backing[i] + } + } + rv.Vectors = rv.Vectors[:len(locs)] + for i, loc := range locs { + *rv.Vectors[i] = index.TermFieldVector{ + Start: loc.Start(), + End: loc.End(), + Pos: loc.Pos(), + ArrayPositions: loc.ArrayPositions(), + Field: loc.Field(), + } + } + } +} + +func (i *IndexSnapshotTermFieldReader) Advance(ID index.IndexInternalID, preAlloced *index.TermFieldDoc) (*index.TermFieldDoc, error) { + // FIXME do something better + // for now, if we need to seek backwards, then restart from the beginning + if i.currPosting != nil && bytes.Compare(i.currID, ID) >= 0 { + i2, err := i.snapshot.TermFieldReader(i.term, i.field, + i.includeFreq, i.includeNorm, i.includeTermVectors) + if err != nil { + return nil, err + } + // close the current term field reader before replacing it with a new one + _ = i.Close() + *i = *(i2.(*IndexSnapshotTermFieldReader)) + } + num, err := docInternalToNumber(ID) + if err != nil { + return nil, fmt.Errorf("error converting to doc number % x - %v", ID, err) + } + segIndex, ldocNum := i.snapshot.segmentIndexAndLocalDocNumFromGlobal(num) + if segIndex >= len(i.snapshot.segment) { + return nil, fmt.Errorf("computed segment index %d out of bounds %d", + segIndex, len(i.snapshot.segment)) + } + // skip directly to the target segment + i.segmentOffset = segIndex + next, err := i.iterators[i.segmentOffset].Advance(ldocNum) + if err != nil { + return nil, err + } + if next == nil { + // we jumped directly to the segment that should have contained it + // but it wasn't there, so reuse Next() which should correctly + // get the next hit after it (we moved i.segmentOffset) + return i.Next(preAlloced) + } + + if preAlloced == nil { + preAlloced = &index.TermFieldDoc{} + } + preAlloced.ID = docNumberToBytes(preAlloced.ID, next.Number()+ + i.snapshot.offsets[segIndex]) + i.postingToTermFieldDoc(next, preAlloced) + i.currID = preAlloced.ID + i.currPosting = next + return preAlloced, nil +} + +func (i *IndexSnapshotTermFieldReader) Count() uint64 { + var rv uint64 + for _, posting := range i.postings { + rv += posting.Count() + } + return rv +} + +func (i *IndexSnapshotTermFieldReader) Close() error { + if i.snapshot != nil { + atomic.AddUint64(&i.snapshot.parent.stats.TotTermSearchersFinished, uint64(1)) + i.snapshot.recycleTermFieldReader(i) + } + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/snapshot_segment.go b/vendor/github.com/blevesearch/bleve/index/scorch/snapshot_segment.go new file mode 100644 index 0000000..96742b4 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/snapshot_segment.go @@ -0,0 +1,279 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorch + +import ( + "bytes" + "sync" + "sync/atomic" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/blevesearch/bleve/size" +) + +var TermSeparator byte = 0xff + +var TermSeparatorSplitSlice = []byte{TermSeparator} + +type SegmentSnapshot struct { + id uint64 + segment segment.Segment + deleted *roaring.Bitmap + creator string + + cachedDocs *cachedDocs +} + +func (s *SegmentSnapshot) Segment() segment.Segment { + return s.segment +} + +func (s *SegmentSnapshot) Deleted() *roaring.Bitmap { + return s.deleted +} + +func (s *SegmentSnapshot) Id() uint64 { + return s.id +} + +func (s *SegmentSnapshot) FullSize() int64 { + return int64(s.segment.Count()) +} + +func (s SegmentSnapshot) LiveSize() int64 { + return int64(s.Count()) +} + +func (s *SegmentSnapshot) Close() error { + return s.segment.Close() +} + +func (s *SegmentSnapshot) VisitDocument(num uint64, visitor segment.DocumentFieldValueVisitor) error { + return s.segment.VisitDocument(num, visitor) +} + +func (s *SegmentSnapshot) DocID(num uint64) ([]byte, error) { + return s.segment.DocID(num) +} + +func (s *SegmentSnapshot) Count() uint64 { + rv := s.segment.Count() + if s.deleted != nil { + rv -= s.deleted.GetCardinality() + } + return rv +} + +func (s *SegmentSnapshot) DocNumbers(docIDs []string) (*roaring.Bitmap, error) { + rv, err := s.segment.DocNumbers(docIDs) + if err != nil { + return nil, err + } + if s.deleted != nil { + rv.AndNot(s.deleted) + } + return rv, nil +} + +// DocNumbersLive returns a bitmap containing doc numbers for all live docs +func (s *SegmentSnapshot) DocNumbersLive() *roaring.Bitmap { + rv := roaring.NewBitmap() + rv.AddRange(0, s.segment.Count()) + if s.deleted != nil { + rv.AndNot(s.deleted) + } + return rv +} + +func (s *SegmentSnapshot) Fields() []string { + return s.segment.Fields() +} + +func (s *SegmentSnapshot) Size() (rv int) { + rv = s.segment.Size() + if s.deleted != nil { + rv += int(s.deleted.GetSizeInBytes()) + } + rv += s.cachedDocs.Size() + return +} + +type cachedFieldDocs struct { + m sync.Mutex + readyCh chan struct{} // closed when the cachedFieldDocs.docs is ready to be used. + err error // Non-nil if there was an error when preparing this cachedFieldDocs. + docs map[uint64][]byte // Keyed by localDocNum, value is a list of terms delimited by 0xFF. + size uint64 +} + +func (cfd *cachedFieldDocs) Size() int { + var rv int + cfd.m.Lock() + for _, entry := range cfd.docs { + rv += 8 /* size of uint64 */ + len(entry) + } + cfd.m.Unlock() + return rv +} + +func (cfd *cachedFieldDocs) prepareField(field string, ss *SegmentSnapshot) { + cfd.m.Lock() + defer func() { + close(cfd.readyCh) + cfd.m.Unlock() + }() + + cfd.size += uint64(size.SizeOfUint64) /* size field */ + dict, err := ss.segment.Dictionary(field) + if err != nil { + cfd.err = err + return + } + + var postings segment.PostingsList + var postingsItr segment.PostingsIterator + + dictItr := dict.Iterator() + next, err := dictItr.Next() + for err == nil && next != nil { + var err1 error + postings, err1 = dict.PostingsList([]byte(next.Term), nil, postings) + if err1 != nil { + cfd.err = err1 + return + } + + cfd.size += uint64(size.SizeOfUint64) /* map key */ + postingsItr = postings.Iterator(false, false, false, postingsItr) + nextPosting, err2 := postingsItr.Next() + for err2 == nil && nextPosting != nil { + docNum := nextPosting.Number() + cfd.docs[docNum] = append(cfd.docs[docNum], []byte(next.Term)...) + cfd.docs[docNum] = append(cfd.docs[docNum], TermSeparator) + cfd.size += uint64(len(next.Term) + 1) // map value + nextPosting, err2 = postingsItr.Next() + } + + if err2 != nil { + cfd.err = err2 + return + } + + next, err = dictItr.Next() + } + + if err != nil { + cfd.err = err + return + } +} + +type cachedDocs struct { + size uint64 + m sync.Mutex // As the cache is asynchronously prepared, need a lock + cache map[string]*cachedFieldDocs // Keyed by field +} + +func (c *cachedDocs) prepareFields(wantedFields []string, ss *SegmentSnapshot) error { + c.m.Lock() + + if c.cache == nil { + c.cache = make(map[string]*cachedFieldDocs, len(ss.Fields())) + } + + for _, field := range wantedFields { + _, exists := c.cache[field] + if !exists { + c.cache[field] = &cachedFieldDocs{ + readyCh: make(chan struct{}), + docs: make(map[uint64][]byte), + } + + go c.cache[field].prepareField(field, ss) + } + } + + for _, field := range wantedFields { + cachedFieldDocs := c.cache[field] + c.m.Unlock() + <-cachedFieldDocs.readyCh + + if cachedFieldDocs.err != nil { + return cachedFieldDocs.err + } + c.m.Lock() + } + + c.updateSizeLOCKED() + + c.m.Unlock() + return nil +} + +// hasFields returns true if the cache has all the given fields +func (c *cachedDocs) hasFields(fields []string) bool { + c.m.Lock() + for _, field := range fields { + if _, exists := c.cache[field]; !exists { + c.m.Unlock() + return false // found a field not in cache + } + } + c.m.Unlock() + return true +} + +func (c *cachedDocs) Size() int { + return int(atomic.LoadUint64(&c.size)) +} + +func (c *cachedDocs) updateSizeLOCKED() { + sizeInBytes := 0 + for k, v := range c.cache { // cachedFieldDocs + sizeInBytes += len(k) + if v != nil { + sizeInBytes += v.Size() + } + } + atomic.StoreUint64(&c.size, uint64(sizeInBytes)) +} + +func (c *cachedDocs) visitDoc(localDocNum uint64, + fields []string, visitor index.DocumentFieldTermVisitor) { + c.m.Lock() + + for _, field := range fields { + if cachedFieldDocs, exists := c.cache[field]; exists { + c.m.Unlock() + <-cachedFieldDocs.readyCh + c.m.Lock() + + if tlist, exists := cachedFieldDocs.docs[localDocNum]; exists { + for { + i := bytes.Index(tlist, TermSeparatorSplitSlice) + if i < 0 { + break + } + visitor(field, tlist[0:i]) + tlist = tlist[i+1:] + } + } + } + } + + c.m.Unlock() +} diff --git a/vendor/github.com/blevesearch/bleve/index/scorch/stats.go b/vendor/github.com/blevesearch/bleve/index/scorch/stats.go new file mode 100644 index 0000000..626fff2 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/scorch/stats.go @@ -0,0 +1,152 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorch + +import ( + "encoding/json" + "reflect" + "sync/atomic" +) + +// Stats tracks statistics about the index, fields that are +// prefixed like CurXxxx are gauges (can go up and down), +// and fields that are prefixed like TotXxxx are monotonically +// increasing counters. +type Stats struct { + TotUpdates uint64 + TotDeletes uint64 + + TotBatches uint64 + TotBatchesEmpty uint64 + TotBatchIntroTime uint64 + MaxBatchIntroTime uint64 + + CurRootEpoch uint64 + LastPersistedEpoch uint64 + LastMergedEpoch uint64 + + TotOnErrors uint64 + + TotAnalysisTime uint64 + TotIndexTime uint64 + + TotIndexedPlainTextBytes uint64 + + TotTermSearchersStarted uint64 + TotTermSearchersFinished uint64 + + TotEventTriggerStarted uint64 + TotEventTriggerCompleted uint64 + + TotIntroduceLoop uint64 + TotIntroduceSegmentBeg uint64 + TotIntroduceSegmentEnd uint64 + TotIntroducePersistBeg uint64 + TotIntroducePersistEnd uint64 + TotIntroduceMergeBeg uint64 + TotIntroduceMergeEnd uint64 + TotIntroduceRevertBeg uint64 + TotIntroduceRevertEnd uint64 + + TotIntroducedItems uint64 + TotIntroducedSegmentsBatch uint64 + TotIntroducedSegmentsMerge uint64 + + TotPersistLoopBeg uint64 + TotPersistLoopErr uint64 + TotPersistLoopProgress uint64 + TotPersistLoopWait uint64 + TotPersistLoopWaitNotified uint64 + TotPersistLoopEnd uint64 + + TotPersistedItems uint64 + TotItemsToPersist uint64 + TotPersistedSegments uint64 + + TotPersisterSlowMergerPause uint64 + TotPersisterSlowMergerResume uint64 + + TotPersisterNapPauseCompleted uint64 + TotPersisterMergerNapBreak uint64 + + TotFileMergeLoopBeg uint64 + TotFileMergeLoopErr uint64 + TotFileMergeLoopEnd uint64 + + TotFileMergeForceOpsStarted uint64 + TotFileMergeForceOpsCompleted uint64 + + TotFileMergePlan uint64 + TotFileMergePlanErr uint64 + TotFileMergePlanNone uint64 + TotFileMergePlanOk uint64 + + TotFileMergePlanTasks uint64 + TotFileMergePlanTasksDone uint64 + TotFileMergePlanTasksErr uint64 + TotFileMergePlanTasksSegments uint64 + TotFileMergePlanTasksSegmentsEmpty uint64 + + TotFileMergeSegmentsEmpty uint64 + TotFileMergeSegments uint64 + TotFileSegmentsAtRoot uint64 + TotFileMergeWrittenBytes uint64 + + TotFileMergeZapBeg uint64 + TotFileMergeZapEnd uint64 + TotFileMergeZapTime uint64 + MaxFileMergeZapTime uint64 + TotFileMergeZapIntroductionTime uint64 + MaxFileMergeZapIntroductionTime uint64 + + TotFileMergeIntroductions uint64 + TotFileMergeIntroductionsDone uint64 + TotFileMergeIntroductionsSkipped uint64 + TotFileMergeIntroductionsObsoleted uint64 + + CurFilesIneligibleForRemoval uint64 + TotSnapshotsRemovedFromMetaStore uint64 + + TotMemMergeBeg uint64 + TotMemMergeErr uint64 + TotMemMergeDone uint64 + TotMemMergeZapBeg uint64 + TotMemMergeZapEnd uint64 + TotMemMergeZapTime uint64 + MaxMemMergeZapTime uint64 + TotMemMergeSegments uint64 + TotMemorySegmentsAtRoot uint64 +} + +// atomically populates the returned map +func (s *Stats) ToMap() map[string]interface{} { + m := map[string]interface{}{} + sve := reflect.ValueOf(s).Elem() + svet := sve.Type() + for i := 0; i < svet.NumField(); i++ { + svef := sve.Field(i) + if svef.CanAddr() { + svefp := svef.Addr().Interface() + m[svet.Field(i).Name] = atomic.LoadUint64(svefp.(*uint64)) + } + } + return m +} + +// MarshalJSON implements json.Marshaler, and in contrast to standard +// json marshaling provides atomic safety +func (s *Stats) MarshalJSON() ([]byte, error) { + return json.Marshal(s.ToMap()) +} diff --git a/vendor/github.com/blevesearch/bleve/index/store/batch.go b/vendor/github.com/blevesearch/bleve/index/store/batch.go new file mode 100644 index 0000000..7110526 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/store/batch.go @@ -0,0 +1,62 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package store + +type op struct { + K []byte + V []byte +} + +type EmulatedBatch struct { + Ops []*op + Merger *EmulatedMerge +} + +func NewEmulatedBatch(mo MergeOperator) *EmulatedBatch { + return &EmulatedBatch{ + Ops: make([]*op, 0, 1000), + Merger: NewEmulatedMerge(mo), + } +} + +func (b *EmulatedBatch) Set(key, val []byte) { + ck := make([]byte, len(key)) + copy(ck, key) + cv := make([]byte, len(val)) + copy(cv, val) + b.Ops = append(b.Ops, &op{ck, cv}) +} + +func (b *EmulatedBatch) Delete(key []byte) { + ck := make([]byte, len(key)) + copy(ck, key) + b.Ops = append(b.Ops, &op{ck, nil}) +} + +func (b *EmulatedBatch) Merge(key, val []byte) { + ck := make([]byte, len(key)) + copy(ck, key) + cv := make([]byte, len(val)) + copy(cv, val) + b.Merger.Merge(key, val) +} + +func (b *EmulatedBatch) Reset() { + b.Ops = b.Ops[:0] +} + +func (b *EmulatedBatch) Close() error { + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/index/store/boltdb/iterator.go b/vendor/github.com/blevesearch/bleve/index/store/boltdb/iterator.go new file mode 100644 index 0000000..cf4da87 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/store/boltdb/iterator.go @@ -0,0 +1,85 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package boltdb + +import ( + "bytes" + + bolt "go.etcd.io/bbolt" +) + +type Iterator struct { + store *Store + tx *bolt.Tx + cursor *bolt.Cursor + prefix []byte + start []byte + end []byte + valid bool + key []byte + val []byte +} + +func (i *Iterator) updateValid() { + i.valid = (i.key != nil) + if i.valid { + if i.prefix != nil { + i.valid = bytes.HasPrefix(i.key, i.prefix) + } else if i.end != nil { + i.valid = bytes.Compare(i.key, i.end) < 0 + } + } +} + +func (i *Iterator) Seek(k []byte) { + if i.start != nil && bytes.Compare(k, i.start) < 0 { + k = i.start + } + if i.prefix != nil && !bytes.HasPrefix(k, i.prefix) { + if bytes.Compare(k, i.prefix) < 0 { + k = i.prefix + } else { + i.valid = false + return + } + } + i.key, i.val = i.cursor.Seek(k) + i.updateValid() +} + +func (i *Iterator) Next() { + i.key, i.val = i.cursor.Next() + i.updateValid() +} + +func (i *Iterator) Current() ([]byte, []byte, bool) { + return i.key, i.val, i.valid +} + +func (i *Iterator) Key() []byte { + return i.key +} + +func (i *Iterator) Value() []byte { + return i.val +} + +func (i *Iterator) Valid() bool { + return i.valid +} + +func (i *Iterator) Close() error { + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/index/store/boltdb/reader.go b/vendor/github.com/blevesearch/bleve/index/store/boltdb/reader.go new file mode 100644 index 0000000..7977ebb --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/store/boltdb/reader.go @@ -0,0 +1,73 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package boltdb + +import ( + "github.com/blevesearch/bleve/index/store" + bolt "go.etcd.io/bbolt" +) + +type Reader struct { + store *Store + tx *bolt.Tx + bucket *bolt.Bucket +} + +func (r *Reader) Get(key []byte) ([]byte, error) { + var rv []byte + v := r.bucket.Get(key) + if v != nil { + rv = make([]byte, len(v)) + copy(rv, v) + } + return rv, nil +} + +func (r *Reader) MultiGet(keys [][]byte) ([][]byte, error) { + return store.MultiGet(r, keys) +} + +func (r *Reader) PrefixIterator(prefix []byte) store.KVIterator { + cursor := r.bucket.Cursor() + + rv := &Iterator{ + store: r.store, + tx: r.tx, + cursor: cursor, + prefix: prefix, + } + + rv.Seek(prefix) + return rv +} + +func (r *Reader) RangeIterator(start, end []byte) store.KVIterator { + cursor := r.bucket.Cursor() + + rv := &Iterator{ + store: r.store, + tx: r.tx, + cursor: cursor, + start: start, + end: end, + } + + rv.Seek(start) + return rv +} + +func (r *Reader) Close() error { + return r.tx.Rollback() +} diff --git a/vendor/github.com/blevesearch/bleve/index/store/boltdb/stats.go b/vendor/github.com/blevesearch/bleve/index/store/boltdb/stats.go new file mode 100644 index 0000000..e50e552 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/store/boltdb/stats.go @@ -0,0 +1,26 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package boltdb + +import "encoding/json" + +type stats struct { + s *Store +} + +func (s *stats) MarshalJSON() ([]byte, error) { + bs := s.s.db.Stats() + return json.Marshal(bs) +} diff --git a/vendor/github.com/blevesearch/bleve/index/store/boltdb/store.go b/vendor/github.com/blevesearch/bleve/index/store/boltdb/store.go new file mode 100644 index 0000000..3c74969 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/store/boltdb/store.go @@ -0,0 +1,181 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package boltdb implements a store.KVStore on top of BoltDB. It supports the +// following options: +// +// "bucket" (string): the name of BoltDB bucket to use, defaults to "bleve". +// +// "nosync" (bool): if true, set boltdb.DB.NoSync to true. It speeds up index +// operations in exchange of losing integrity guarantees if indexation aborts +// without closing the index. Use it when rebuilding indexes from zero. +package boltdb + +import ( + "bytes" + "encoding/json" + "fmt" + "os" + + "github.com/blevesearch/bleve/index/store" + "github.com/blevesearch/bleve/registry" + bolt "go.etcd.io/bbolt" +) + +const ( + Name = "boltdb" + defaultCompactBatchSize = 100 +) + +type Store struct { + path string + bucket string + db *bolt.DB + noSync bool + fillPercent float64 + mo store.MergeOperator +} + +func New(mo store.MergeOperator, config map[string]interface{}) (store.KVStore, error) { + path, ok := config["path"].(string) + if !ok { + return nil, fmt.Errorf("must specify path") + } + if path == "" { + return nil, os.ErrInvalid + } + + bucket, ok := config["bucket"].(string) + if !ok { + bucket = "bleve" + } + + noSync, _ := config["nosync"].(bool) + + fillPercent, ok := config["fillPercent"].(float64) + if !ok { + fillPercent = bolt.DefaultFillPercent + } + + bo := &bolt.Options{} + ro, ok := config["read_only"].(bool) + if ok { + bo.ReadOnly = ro + } + + if initialMmapSize, ok := config["initialMmapSize"].(int); ok { + bo.InitialMmapSize = initialMmapSize + } else if initialMmapSize, ok := config["initialMmapSize"].(float64); ok { + bo.InitialMmapSize = int(initialMmapSize) + } + + db, err := bolt.Open(path, 0600, bo) + if err != nil { + return nil, err + } + db.NoSync = noSync + + if !bo.ReadOnly { + err = db.Update(func(tx *bolt.Tx) error { + _, err := tx.CreateBucketIfNotExists([]byte(bucket)) + + return err + }) + if err != nil { + return nil, err + } + } + + rv := Store{ + path: path, + bucket: bucket, + db: db, + mo: mo, + noSync: noSync, + fillPercent: fillPercent, + } + return &rv, nil +} + +func (bs *Store) Close() error { + return bs.db.Close() +} + +func (bs *Store) Reader() (store.KVReader, error) { + tx, err := bs.db.Begin(false) + if err != nil { + return nil, err + } + return &Reader{ + store: bs, + tx: tx, + bucket: tx.Bucket([]byte(bs.bucket)), + }, nil +} + +func (bs *Store) Writer() (store.KVWriter, error) { + return &Writer{ + store: bs, + }, nil +} + +func (bs *Store) Stats() json.Marshaler { + return &stats{ + s: bs, + } +} + +// CompactWithBatchSize removes DictionaryTerm entries with a count of zero (in batchSize batches) +// Removing entries is a workaround for github issue #374. +func (bs *Store) CompactWithBatchSize(batchSize int) error { + for { + cnt := 0 + err := bs.db.Batch(func(tx *bolt.Tx) error { + c := tx.Bucket([]byte(bs.bucket)).Cursor() + prefix := []byte("d") + + for k, v := c.Seek(prefix); bytes.HasPrefix(k, prefix); k, v = c.Next() { + if bytes.Equal(v, []byte{0}) { + cnt++ + if err := c.Delete(); err != nil { + return err + } + if cnt == batchSize { + break + } + } + + } + return nil + }) + if err != nil { + return err + } + + if cnt == 0 { + break + } + } + return nil +} + +// Compact calls CompactWithBatchSize with a default batch size of 100. This is a workaround +// for github issue #374. +func (bs *Store) Compact() error { + return bs.CompactWithBatchSize(defaultCompactBatchSize) +} + +func init() { + registry.RegisterKVStore(Name, New) +} diff --git a/vendor/github.com/blevesearch/bleve/index/store/boltdb/writer.go b/vendor/github.com/blevesearch/bleve/index/store/boltdb/writer.go new file mode 100644 index 0000000..f093574 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/store/boltdb/writer.go @@ -0,0 +1,95 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package boltdb + +import ( + "fmt" + + "github.com/blevesearch/bleve/index/store" +) + +type Writer struct { + store *Store +} + +func (w *Writer) NewBatch() store.KVBatch { + return store.NewEmulatedBatch(w.store.mo) +} + +func (w *Writer) NewBatchEx(options store.KVBatchOptions) ([]byte, store.KVBatch, error) { + return make([]byte, options.TotalBytes), w.NewBatch(), nil +} + +func (w *Writer) ExecuteBatch(batch store.KVBatch) (err error) { + + emulatedBatch, ok := batch.(*store.EmulatedBatch) + if !ok { + return fmt.Errorf("wrong type of batch") + } + + tx, err := w.store.db.Begin(true) + if err != nil { + return + } + // defer function to ensure that once started, + // we either Commit tx or Rollback + defer func() { + // if nothing went wrong, commit + if err == nil { + // careful to catch error here too + err = tx.Commit() + } else { + // caller should see error that caused abort, + // not success or failure of Rollback itself + _ = tx.Rollback() + } + }() + + bucket := tx.Bucket([]byte(w.store.bucket)) + bucket.FillPercent = w.store.fillPercent + + for k, mergeOps := range emulatedBatch.Merger.Merges { + kb := []byte(k) + existingVal := bucket.Get(kb) + mergedVal, fullMergeOk := w.store.mo.FullMerge(kb, existingVal, mergeOps) + if !fullMergeOk { + err = fmt.Errorf("merge operator returned failure") + return + } + err = bucket.Put(kb, mergedVal) + if err != nil { + return + } + } + + for _, op := range emulatedBatch.Ops { + if op.V != nil { + err = bucket.Put(op.K, op.V) + if err != nil { + return + } + } else { + err = bucket.Delete(op.K) + if err != nil { + return + } + } + } + return +} + +func (w *Writer) Close() error { + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/index/store/gtreap/iterator.go b/vendor/github.com/blevesearch/bleve/index/store/gtreap/iterator.go new file mode 100644 index 0000000..092ccf2 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/store/gtreap/iterator.go @@ -0,0 +1,152 @@ +// Copyright (c) 2015 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package gtreap provides an in-memory implementation of the +// KVStore interfaces using the gtreap balanced-binary treap, +// copy-on-write data structure. +package gtreap + +import ( + "bytes" + "sync" + + "github.com/steveyen/gtreap" +) + +type Iterator struct { + t *gtreap.Treap + + m sync.Mutex + cancelCh chan struct{} + nextCh chan *Item + curr *Item + currOk bool + + prefix []byte + start []byte + end []byte +} + +func (w *Iterator) Seek(k []byte) { + if w.start != nil && bytes.Compare(k, w.start) < 0 { + k = w.start + } + if w.prefix != nil && !bytes.HasPrefix(k, w.prefix) { + if bytes.Compare(k, w.prefix) < 0 { + k = w.prefix + } else { + var end []byte + for i := len(w.prefix) - 1; i >= 0; i-- { + c := w.prefix[i] + if c < 0xff { + end = make([]byte, i+1) + copy(end, w.prefix) + end[i] = c + 1 + break + } + } + k = end + } + } + w.restart(&Item{k: k}) +} + +func (w *Iterator) restart(start *Item) *Iterator { + cancelCh := make(chan struct{}) + nextCh := make(chan *Item, 1) + + w.m.Lock() + if w.cancelCh != nil { + close(w.cancelCh) + } + w.cancelCh = cancelCh + w.nextCh = nextCh + w.curr = nil + w.currOk = false + w.m.Unlock() + + go func() { + if start != nil { + w.t.VisitAscend(start, func(itm gtreap.Item) bool { + select { + case <-cancelCh: + return false + case nextCh <- itm.(*Item): + return true + } + }) + } + close(nextCh) + }() + + w.Next() + + return w +} + +func (w *Iterator) Next() { + w.m.Lock() + nextCh := w.nextCh + w.m.Unlock() + w.curr, w.currOk = <-nextCh +} + +func (w *Iterator) Current() ([]byte, []byte, bool) { + w.m.Lock() + defer w.m.Unlock() + if !w.currOk || w.curr == nil { + return nil, nil, false + } + if w.prefix != nil && !bytes.HasPrefix(w.curr.k, w.prefix) { + return nil, nil, false + } else if w.end != nil && bytes.Compare(w.curr.k, w.end) >= 0 { + return nil, nil, false + } + return w.curr.k, w.curr.v, w.currOk +} + +func (w *Iterator) Key() []byte { + k, _, ok := w.Current() + if !ok { + return nil + } + return k +} + +func (w *Iterator) Value() []byte { + _, v, ok := w.Current() + if !ok { + return nil + } + return v +} + +func (w *Iterator) Valid() bool { + _, _, ok := w.Current() + return ok +} + +func (w *Iterator) Close() error { + w.m.Lock() + if w.cancelCh != nil { + close(w.cancelCh) + } + w.cancelCh = nil + w.nextCh = nil + w.curr = nil + w.currOk = false + w.m.Unlock() + + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/index/store/gtreap/reader.go b/vendor/github.com/blevesearch/bleve/index/store/gtreap/reader.go new file mode 100644 index 0000000..98254d3 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/store/gtreap/reader.go @@ -0,0 +1,66 @@ +// Copyright (c) 2015 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package gtreap provides an in-memory implementation of the +// KVStore interfaces using the gtreap balanced-binary treap, +// copy-on-write data structure. +package gtreap + +import ( + "github.com/blevesearch/bleve/index/store" + + "github.com/steveyen/gtreap" +) + +type Reader struct { + t *gtreap.Treap +} + +func (w *Reader) Get(k []byte) (v []byte, err error) { + var rv []byte + itm := w.t.Get(&Item{k: k}) + if itm != nil { + rv = make([]byte, len(itm.(*Item).v)) + copy(rv, itm.(*Item).v) + return rv, nil + } + return nil, nil +} + +func (r *Reader) MultiGet(keys [][]byte) ([][]byte, error) { + return store.MultiGet(r, keys) +} + +func (w *Reader) PrefixIterator(k []byte) store.KVIterator { + rv := Iterator{ + t: w.t, + prefix: k, + } + rv.restart(&Item{k: k}) + return &rv +} + +func (w *Reader) RangeIterator(start, end []byte) store.KVIterator { + rv := Iterator{ + t: w.t, + start: start, + end: end, + } + rv.restart(&Item{k: start}) + return &rv +} + +func (w *Reader) Close() error { + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/index/store/gtreap/store.go b/vendor/github.com/blevesearch/bleve/index/store/gtreap/store.go new file mode 100644 index 0000000..3e6c5fe --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/store/gtreap/store.go @@ -0,0 +1,82 @@ +// Copyright (c) 2015 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package gtreap provides an in-memory implementation of the +// KVStore interfaces using the gtreap balanced-binary treap, +// copy-on-write data structure. + +package gtreap + +import ( + "bytes" + "fmt" + "os" + "sync" + + "github.com/blevesearch/bleve/index/store" + "github.com/blevesearch/bleve/registry" + "github.com/steveyen/gtreap" +) + +const Name = "gtreap" + +type Store struct { + m sync.Mutex + t *gtreap.Treap + mo store.MergeOperator +} + +type Item struct { + k []byte + v []byte +} + +func itemCompare(a, b interface{}) int { + return bytes.Compare(a.(*Item).k, b.(*Item).k) +} + +func New(mo store.MergeOperator, config map[string]interface{}) (store.KVStore, error) { + path, ok := config["path"].(string) + if !ok { + return nil, fmt.Errorf("must specify path") + } + if path != "" { + return nil, os.ErrInvalid + } + + rv := Store{ + t: gtreap.NewTreap(itemCompare), + mo: mo, + } + return &rv, nil +} + +func (s *Store) Close() error { + return nil +} + +func (s *Store) Reader() (store.KVReader, error) { + s.m.Lock() + t := s.t + s.m.Unlock() + return &Reader{t: t}, nil +} + +func (s *Store) Writer() (store.KVWriter, error) { + return &Writer{s: s}, nil +} + +func init() { + registry.RegisterKVStore(Name, New) +} diff --git a/vendor/github.com/blevesearch/bleve/index/store/gtreap/writer.go b/vendor/github.com/blevesearch/bleve/index/store/gtreap/writer.go new file mode 100644 index 0000000..777aab4 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/store/gtreap/writer.go @@ -0,0 +1,76 @@ +// Copyright (c) 2015 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package gtreap provides an in-memory implementation of the +// KVStore interfaces using the gtreap balanced-binary treap, +// copy-on-write data structure. +package gtreap + +import ( + "fmt" + "math/rand" + + "github.com/blevesearch/bleve/index/store" +) + +type Writer struct { + s *Store +} + +func (w *Writer) NewBatch() store.KVBatch { + return store.NewEmulatedBatch(w.s.mo) +} + +func (w *Writer) NewBatchEx(options store.KVBatchOptions) ([]byte, store.KVBatch, error) { + return make([]byte, options.TotalBytes), w.NewBatch(), nil +} + +func (w *Writer) ExecuteBatch(batch store.KVBatch) error { + + emulatedBatch, ok := batch.(*store.EmulatedBatch) + if !ok { + return fmt.Errorf("wrong type of batch") + } + + w.s.m.Lock() + for k, mergeOps := range emulatedBatch.Merger.Merges { + kb := []byte(k) + var existingVal []byte + existingItem := w.s.t.Get(&Item{k: kb}) + if existingItem != nil { + existingVal = w.s.t.Get(&Item{k: kb}).(*Item).v + } + mergedVal, fullMergeOk := w.s.mo.FullMerge(kb, existingVal, mergeOps) + if !fullMergeOk { + return fmt.Errorf("merge operator returned failure") + } + w.s.t = w.s.t.Upsert(&Item{k: kb, v: mergedVal}, rand.Int()) + } + + for _, op := range emulatedBatch.Ops { + if op.V != nil { + w.s.t = w.s.t.Upsert(&Item{k: op.K, v: op.V}, rand.Int()) + } else { + w.s.t = w.s.t.Delete(&Item{k: op.K}) + } + } + w.s.m.Unlock() + + return nil +} + +func (w *Writer) Close() error { + w.s = nil + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/index/store/kvstore.go b/vendor/github.com/blevesearch/bleve/index/store/kvstore.go new file mode 100644 index 0000000..34698c7 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/store/kvstore.go @@ -0,0 +1,174 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package store + +import "encoding/json" + +// KVStore is an abstraction for working with KV stores. Note that +// in order to be used with the bleve.registry, it must also implement +// a constructor function of the registry.KVStoreConstructor type. +type KVStore interface { + + // Writer returns a KVWriter which can be used to + // make changes to the KVStore. If a writer cannot + // be obtained a non-nil error is returned. + Writer() (KVWriter, error) + + // Reader returns a KVReader which can be used to + // read data from the KVStore. If a reader cannot + // be obtained a non-nil error is returned. + Reader() (KVReader, error) + + // Close closes the KVStore + Close() error +} + +// KVReader is an abstraction of an **ISOLATED** reader +// In this context isolated is defined to mean that +// writes/deletes made after the KVReader is opened +// are not observed. +// Because there is usually a cost associated with +// keeping isolated readers active, users should +// close them as soon as they are no longer needed. +type KVReader interface { + + // Get returns the value associated with the key + // If the key does not exist, nil is returned. + // The caller owns the bytes returned. + Get(key []byte) ([]byte, error) + + // MultiGet retrieves multiple values in one call. + MultiGet(keys [][]byte) ([][]byte, error) + + // PrefixIterator returns a KVIterator that will + // visit all K/V pairs with the provided prefix + PrefixIterator(prefix []byte) KVIterator + + // RangeIterator returns a KVIterator that will + // visit all K/V pairs >= start AND < end + RangeIterator(start, end []byte) KVIterator + + // Close closes the iterator + Close() error +} + +// KVIterator is an abstraction around key iteration +type KVIterator interface { + + // Seek will advance the iterator to the specified key + Seek(key []byte) + + // Next will advance the iterator to the next key + Next() + + // Key returns the key pointed to by the iterator + // The bytes returned are **ONLY** valid until the next call to Seek/Next/Close + // Continued use after that requires that they be copied. + Key() []byte + + // Value returns the value pointed to by the iterator + // The bytes returned are **ONLY** valid until the next call to Seek/Next/Close + // Continued use after that requires that they be copied. + Value() []byte + + // Valid returns whether or not the iterator is in a valid state + Valid() bool + + // Current returns Key(),Value(),Valid() in a single operation + Current() ([]byte, []byte, bool) + + // Close closes the iterator + Close() error +} + +// KVWriter is an abstraction for mutating the KVStore +// KVWriter does **NOT** enforce restrictions of a single writer +// if the underlying KVStore allows concurrent writes, the +// KVWriter interface should also do so, it is up to the caller +// to do this in a way that is safe and makes sense +type KVWriter interface { + + // NewBatch returns a KVBatch for performing batch operations on this kvstore + NewBatch() KVBatch + + // NewBatchEx returns a KVBatch and an associated byte array + // that's pre-sized based on the KVBatchOptions. The caller can + // use the returned byte array for keys and values associated with + // the batch. Once the batch is either executed or closed, the + // associated byte array should no longer be accessed by the + // caller. + NewBatchEx(KVBatchOptions) ([]byte, KVBatch, error) + + // ExecuteBatch will execute the KVBatch, the provided KVBatch **MUST** have + // been created by the same KVStore (though not necessarily the same KVWriter) + // Batch execution is atomic, either all the operations or none will be performed + ExecuteBatch(batch KVBatch) error + + // Close closes the writer + Close() error +} + +// KVBatchOptions provides the KVWriter.NewBatchEx() method with batch +// preparation and preallocation information. +type KVBatchOptions struct { + // TotalBytes is the sum of key and value bytes needed by the + // caller for the entire batch. It affects the size of the + // returned byte array of KVWrite.NewBatchEx(). + TotalBytes int + + // NumSets is the number of Set() calls the caller will invoke on + // the KVBatch. + NumSets int + + // NumDeletes is the number of Delete() calls the caller will invoke + // on the KVBatch. + NumDeletes int + + // NumMerges is the number of Merge() calls the caller will invoke + // on the KVBatch. + NumMerges int +} + +// KVBatch is an abstraction for making multiple KV mutations at once +type KVBatch interface { + + // Set updates the key with the specified value + // both key and value []byte may be reused as soon as this call returns + Set(key, val []byte) + + // Delete removes the specified key + // the key []byte may be reused as soon as this call returns + Delete(key []byte) + + // Merge merges old value with the new value at the specified key + // as prescribed by the KVStores merge operator + // both key and value []byte may be reused as soon as this call returns + Merge(key, val []byte) + + // Reset frees resources for this batch and allows reuse + Reset() + + // Close frees resources + Close() error +} + +// KVStoreStats is an optional interface that KVStores can implement +// if they're able to report any useful stats +type KVStoreStats interface { + // Stats returns a JSON serializable object representing stats for this KVStore + Stats() json.Marshaler + + StatsMap() map[string]interface{} +} diff --git a/vendor/github.com/blevesearch/bleve/index/store/merge.go b/vendor/github.com/blevesearch/bleve/index/store/merge.go new file mode 100644 index 0000000..ca2561b --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/store/merge.go @@ -0,0 +1,64 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package store + +// At the moment this happens to be the same interface as described by +// RocksDB, but this may not always be the case. + +type MergeOperator interface { + + // FullMerge the full sequence of operands on top of the existingValue + // if no value currently exists, existingValue is nil + // return the merged value, and success/failure + FullMerge(key, existingValue []byte, operands [][]byte) ([]byte, bool) + + // Partially merge these two operands. + // If partial merge cannot be done, return nil,false, which will defer + // all processing until the FullMerge is done. + PartialMerge(key, leftOperand, rightOperand []byte) ([]byte, bool) + + // Name returns an identifier for the operator + Name() string +} + +type EmulatedMerge struct { + Merges map[string][][]byte + mo MergeOperator +} + +func NewEmulatedMerge(mo MergeOperator) *EmulatedMerge { + return &EmulatedMerge{ + Merges: make(map[string][][]byte), + mo: mo, + } +} + +func (m *EmulatedMerge) Merge(key, val []byte) { + ops, ok := m.Merges[string(key)] + if ok && len(ops) > 0 { + last := ops[len(ops)-1] + mergedVal, partialMergeOk := m.mo.PartialMerge(key, last, val) + if partialMergeOk { + // replace last entry with the result of the merge + ops[len(ops)-1] = mergedVal + } else { + // could not partial merge, append this to the end + ops = append(ops, val) + } + } else { + ops = [][]byte{val} + } + m.Merges[string(key)] = ops +} diff --git a/vendor/github.com/blevesearch/bleve/index/store/multiget.go b/vendor/github.com/blevesearch/bleve/index/store/multiget.go new file mode 100644 index 0000000..635bcd4 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/store/multiget.go @@ -0,0 +1,33 @@ +// Copyright (c) 2016 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package store + +// MultiGet is a helper function to retrieve mutiple keys from a +// KVReader, and might be used by KVStore implementations that don't +// have a native multi-get facility. +func MultiGet(kvreader KVReader, keys [][]byte) ([][]byte, error) { + vals := make([][]byte, 0, len(keys)) + + for i, key := range keys { + val, err := kvreader.Get(key) + if err != nil { + return nil, err + } + + vals[i] = val + } + + return vals, nil +} diff --git a/vendor/github.com/blevesearch/bleve/index/upsidedown/analysis.go b/vendor/github.com/blevesearch/bleve/index/upsidedown/analysis.go new file mode 100644 index 0000000..d1b1fd5 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/upsidedown/analysis.go @@ -0,0 +1,110 @@ +// Copyright (c) 2015 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package upsidedown + +import ( + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/index" +) + +func (udc *UpsideDownCouch) Analyze(d *document.Document) *index.AnalysisResult { + rv := &index.AnalysisResult{ + DocID: d.ID, + Rows: make([]index.IndexRow, 0, 100), + } + + docIDBytes := []byte(d.ID) + + // track our back index entries + backIndexStoredEntries := make([]*BackIndexStoreEntry, 0) + + // information we collate as we merge fields with same name + fieldTermFreqs := make(map[uint16]analysis.TokenFrequencies) + fieldLengths := make(map[uint16]int) + fieldIncludeTermVectors := make(map[uint16]bool) + fieldNames := make(map[uint16]string) + + analyzeField := func(field document.Field, storable bool) { + fieldIndex, newFieldRow := udc.fieldIndexOrNewRow(field.Name()) + if newFieldRow != nil { + rv.Rows = append(rv.Rows, newFieldRow) + } + fieldNames[fieldIndex] = field.Name() + + if field.Options().IsIndexed() { + fieldLength, tokenFreqs := field.Analyze() + existingFreqs := fieldTermFreqs[fieldIndex] + if existingFreqs == nil { + fieldTermFreqs[fieldIndex] = tokenFreqs + } else { + existingFreqs.MergeAll(field.Name(), tokenFreqs) + fieldTermFreqs[fieldIndex] = existingFreqs + } + fieldLengths[fieldIndex] += fieldLength + fieldIncludeTermVectors[fieldIndex] = field.Options().IncludeTermVectors() + } + + if storable && field.Options().IsStored() { + rv.Rows, backIndexStoredEntries = udc.storeField(docIDBytes, field, fieldIndex, rv.Rows, backIndexStoredEntries) + } + } + + // walk all the fields, record stored fields now + // place information about indexed fields into map + // this collates information across fields with + // same names (arrays) + for _, field := range d.Fields { + analyzeField(field, true) + } + + if len(d.CompositeFields) > 0 { + for fieldIndex, tokenFreqs := range fieldTermFreqs { + // see if any of the composite fields need this + for _, compositeField := range d.CompositeFields { + compositeField.Compose(fieldNames[fieldIndex], fieldLengths[fieldIndex], tokenFreqs) + } + } + + for _, compositeField := range d.CompositeFields { + analyzeField(compositeField, false) + } + } + + rowsCapNeeded := len(rv.Rows) + 1 + for _, tokenFreqs := range fieldTermFreqs { + rowsCapNeeded += len(tokenFreqs) + } + + rv.Rows = append(make([]index.IndexRow, 0, rowsCapNeeded), rv.Rows...) + + backIndexTermsEntries := make([]*BackIndexTermsEntry, 0, len(fieldTermFreqs)) + + // walk through the collated information and process + // once for each indexed field (unique name) + for fieldIndex, tokenFreqs := range fieldTermFreqs { + fieldLength := fieldLengths[fieldIndex] + includeTermVectors := fieldIncludeTermVectors[fieldIndex] + + // encode this field + rv.Rows, backIndexTermsEntries = udc.indexField(docIDBytes, includeTermVectors, fieldIndex, fieldLength, tokenFreqs, rv.Rows, backIndexTermsEntries) + } + + // build the back index row + backIndexRow := NewBackIndexRow(docIDBytes, backIndexTermsEntries, backIndexStoredEntries) + rv.Rows = append(rv.Rows, backIndexRow) + + return rv +} diff --git a/vendor/github.com/blevesearch/bleve/index/upsidedown/benchmark_all.sh b/vendor/github.com/blevesearch/bleve/index/upsidedown/benchmark_all.sh new file mode 100644 index 0000000..079fef1 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/upsidedown/benchmark_all.sh @@ -0,0 +1,8 @@ +#!/bin/sh + +BENCHMARKS=`grep "func Benchmark" *_test.go | sed 's/.*func //' | sed s/\(.*{//` + +for BENCHMARK in $BENCHMARKS +do + go test -v -run=xxx -bench=^$BENCHMARK$ -benchtime=10s -tags 'forestdb leveldb' | grep -v ok | grep -v PASS +done diff --git a/vendor/github.com/blevesearch/bleve/index/upsidedown/dump.go b/vendor/github.com/blevesearch/bleve/index/upsidedown/dump.go new file mode 100644 index 0000000..cb045d2 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/upsidedown/dump.go @@ -0,0 +1,174 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package upsidedown + +import ( + "bytes" + "sort" + + "github.com/blevesearch/bleve/index/store" +) + +// the functions in this file are only intended to be used by +// the bleve_dump utility and the debug http handlers +// if your application relies on them, you're doing something wrong +// they may change or be removed at any time + +func dumpPrefix(kvreader store.KVReader, rv chan interface{}, prefix []byte) { + start := prefix + if start == nil { + start = []byte{0} + } + it := kvreader.PrefixIterator(start) + defer func() { + cerr := it.Close() + if cerr != nil { + rv <- cerr + } + }() + key, val, valid := it.Current() + for valid { + ck := make([]byte, len(key)) + copy(ck, key) + cv := make([]byte, len(val)) + copy(cv, val) + row, err := ParseFromKeyValue(ck, cv) + if err != nil { + rv <- err + return + } + rv <- row + + it.Next() + key, val, valid = it.Current() + } +} + +func dumpRange(kvreader store.KVReader, rv chan interface{}, start, end []byte) { + it := kvreader.RangeIterator(start, end) + defer func() { + cerr := it.Close() + if cerr != nil { + rv <- cerr + } + }() + key, val, valid := it.Current() + for valid { + ck := make([]byte, len(key)) + copy(ck, key) + cv := make([]byte, len(val)) + copy(cv, val) + row, err := ParseFromKeyValue(ck, cv) + if err != nil { + rv <- err + return + } + rv <- row + + it.Next() + key, val, valid = it.Current() + } +} + +func (i *IndexReader) DumpAll() chan interface{} { + rv := make(chan interface{}) + go func() { + defer close(rv) + dumpRange(i.kvreader, rv, nil, nil) + }() + return rv +} + +func (i *IndexReader) DumpFields() chan interface{} { + rv := make(chan interface{}) + go func() { + defer close(rv) + dumpPrefix(i.kvreader, rv, []byte{'f'}) + }() + return rv +} + +type keyset [][]byte + +func (k keyset) Len() int { return len(k) } +func (k keyset) Swap(i, j int) { k[i], k[j] = k[j], k[i] } +func (k keyset) Less(i, j int) bool { return bytes.Compare(k[i], k[j]) < 0 } + +// DumpDoc returns all rows in the index related to this doc id +func (i *IndexReader) DumpDoc(id string) chan interface{} { + idBytes := []byte(id) + + rv := make(chan interface{}) + + go func() { + defer close(rv) + + back, err := backIndexRowForDoc(i.kvreader, []byte(id)) + if err != nil { + rv <- err + return + } + + // no such doc + if back == nil { + return + } + // build sorted list of term keys + keys := make(keyset, 0) + for _, entry := range back.termsEntries { + for i := range entry.Terms { + tfr := NewTermFrequencyRow([]byte(entry.Terms[i]), uint16(*entry.Field), idBytes, 0, 0) + key := tfr.Key() + keys = append(keys, key) + } + } + sort.Sort(keys) + + // first add all the stored rows + storedRowPrefix := NewStoredRow(idBytes, 0, []uint64{}, 'x', []byte{}).ScanPrefixForDoc() + dumpPrefix(i.kvreader, rv, storedRowPrefix) + + // now walk term keys in order and add them as well + if len(keys) > 0 { + it := i.kvreader.RangeIterator(keys[0], nil) + defer func() { + cerr := it.Close() + if cerr != nil { + rv <- cerr + } + }() + + for _, key := range keys { + it.Seek(key) + rkey, rval, valid := it.Current() + if !valid { + break + } + rck := make([]byte, len(rkey)) + copy(rck, key) + rcv := make([]byte, len(rval)) + copy(rcv, rval) + row, err := ParseFromKeyValue(rck, rcv) + if err != nil { + rv <- err + return + } + rv <- row + } + } + }() + + return rv +} diff --git a/vendor/github.com/blevesearch/bleve/index/upsidedown/field_dict.go b/vendor/github.com/blevesearch/bleve/index/upsidedown/field_dict.go new file mode 100644 index 0000000..20d4eb3 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/upsidedown/field_dict.go @@ -0,0 +1,78 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package upsidedown + +import ( + "fmt" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/store" +) + +type UpsideDownCouchFieldDict struct { + indexReader *IndexReader + iterator store.KVIterator + dictRow *DictionaryRow + dictEntry *index.DictEntry + field uint16 +} + +func newUpsideDownCouchFieldDict(indexReader *IndexReader, field uint16, startTerm, endTerm []byte) (*UpsideDownCouchFieldDict, error) { + + startKey := NewDictionaryRow(startTerm, field, 0).Key() + if endTerm == nil { + endTerm = []byte{ByteSeparator} + } else { + endTerm = incrementBytes(endTerm) + } + endKey := NewDictionaryRow(endTerm, field, 0).Key() + + it := indexReader.kvreader.RangeIterator(startKey, endKey) + + return &UpsideDownCouchFieldDict{ + indexReader: indexReader, + iterator: it, + dictRow: &DictionaryRow{}, // Pre-alloced, reused row. + dictEntry: &index.DictEntry{}, // Pre-alloced, reused entry. + field: field, + }, nil + +} + +func (r *UpsideDownCouchFieldDict) Next() (*index.DictEntry, error) { + key, val, valid := r.iterator.Current() + if !valid { + return nil, nil + } + + err := r.dictRow.parseDictionaryK(key) + if err != nil { + return nil, fmt.Errorf("unexpected error parsing dictionary row key: %v", err) + } + err = r.dictRow.parseDictionaryV(val) + if err != nil { + return nil, fmt.Errorf("unexpected error parsing dictionary row val: %v", err) + } + r.dictEntry.Term = string(r.dictRow.term) + r.dictEntry.Count = r.dictRow.count + // advance the iterator to the next term + r.iterator.Next() + return r.dictEntry, nil + +} + +func (r *UpsideDownCouchFieldDict) Close() error { + return r.iterator.Close() +} diff --git a/vendor/github.com/blevesearch/bleve/index/upsidedown/index_reader.go b/vendor/github.com/blevesearch/bleve/index/upsidedown/index_reader.go new file mode 100644 index 0000000..ea7243e --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/upsidedown/index_reader.go @@ -0,0 +1,226 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package upsidedown + +import ( + "reflect" + + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/store" +) + +var reflectStaticSizeIndexReader int + +func init() { + var ir IndexReader + reflectStaticSizeIndexReader = int(reflect.TypeOf(ir).Size()) +} + +type IndexReader struct { + index *UpsideDownCouch + kvreader store.KVReader + docCount uint64 +} + +func (i *IndexReader) TermFieldReader(term []byte, fieldName string, includeFreq, includeNorm, includeTermVectors bool) (index.TermFieldReader, error) { + fieldIndex, fieldExists := i.index.fieldCache.FieldNamed(fieldName, false) + if fieldExists { + return newUpsideDownCouchTermFieldReader(i, term, uint16(fieldIndex), includeFreq, includeNorm, includeTermVectors) + } + return newUpsideDownCouchTermFieldReader(i, []byte{ByteSeparator}, ^uint16(0), includeFreq, includeNorm, includeTermVectors) +} + +func (i *IndexReader) FieldDict(fieldName string) (index.FieldDict, error) { + return i.FieldDictRange(fieldName, nil, nil) +} + +func (i *IndexReader) FieldDictRange(fieldName string, startTerm []byte, endTerm []byte) (index.FieldDict, error) { + fieldIndex, fieldExists := i.index.fieldCache.FieldNamed(fieldName, false) + if fieldExists { + return newUpsideDownCouchFieldDict(i, uint16(fieldIndex), startTerm, endTerm) + } + return newUpsideDownCouchFieldDict(i, ^uint16(0), []byte{ByteSeparator}, []byte{}) +} + +func (i *IndexReader) FieldDictPrefix(fieldName string, termPrefix []byte) (index.FieldDict, error) { + return i.FieldDictRange(fieldName, termPrefix, termPrefix) +} + +func (i *IndexReader) DocIDReaderAll() (index.DocIDReader, error) { + return newUpsideDownCouchDocIDReader(i) +} + +func (i *IndexReader) DocIDReaderOnly(ids []string) (index.DocIDReader, error) { + return newUpsideDownCouchDocIDReaderOnly(i, ids) +} + +func (i *IndexReader) Document(id string) (doc *document.Document, err error) { + // first hit the back index to confirm doc exists + var backIndexRow *BackIndexRow + backIndexRow, err = backIndexRowForDoc(i.kvreader, []byte(id)) + if err != nil { + return + } + if backIndexRow == nil { + return + } + doc = document.NewDocument(id) + storedRow := NewStoredRow([]byte(id), 0, []uint64{}, 'x', nil) + storedRowScanPrefix := storedRow.ScanPrefixForDoc() + it := i.kvreader.PrefixIterator(storedRowScanPrefix) + defer func() { + if cerr := it.Close(); err == nil && cerr != nil { + err = cerr + } + }() + key, val, valid := it.Current() + for valid { + safeVal := make([]byte, len(val)) + copy(safeVal, val) + var row *StoredRow + row, err = NewStoredRowKV(key, safeVal) + if err != nil { + doc = nil + return + } + if row != nil { + fieldName := i.index.fieldCache.FieldIndexed(row.field) + field := decodeFieldType(row.typ, fieldName, row.arrayPositions, row.value) + if field != nil { + doc.AddField(field) + } + } + + it.Next() + key, val, valid = it.Current() + } + return +} + +func (i *IndexReader) DocumentVisitFieldTerms(id index.IndexInternalID, fields []string, visitor index.DocumentFieldTermVisitor) error { + fieldsMap := make(map[uint16]string, len(fields)) + for _, f := range fields { + id, ok := i.index.fieldCache.FieldNamed(f, false) + if ok { + fieldsMap[id] = f + } + } + + tempRow := BackIndexRow{ + doc: id, + } + + keyBuf := GetRowBuffer() + if tempRow.KeySize() > len(keyBuf) { + keyBuf = make([]byte, 2*tempRow.KeySize()) + } + defer PutRowBuffer(keyBuf) + keySize, err := tempRow.KeyTo(keyBuf) + if err != nil { + return err + } + + value, err := i.kvreader.Get(keyBuf[:keySize]) + if err != nil { + return err + } + if value == nil { + return nil + } + + return visitBackIndexRow(value, func(field uint32, term []byte) { + if field, ok := fieldsMap[uint16(field)]; ok { + visitor(field, term) + } + }) +} + +func (i *IndexReader) Fields() (fields []string, err error) { + fields = make([]string, 0) + it := i.kvreader.PrefixIterator([]byte{'f'}) + defer func() { + if cerr := it.Close(); err == nil && cerr != nil { + err = cerr + } + }() + key, val, valid := it.Current() + for valid { + var row UpsideDownCouchRow + row, err = ParseFromKeyValue(key, val) + if err != nil { + fields = nil + return + } + if row != nil { + fieldRow, ok := row.(*FieldRow) + if ok { + fields = append(fields, fieldRow.name) + } + } + + it.Next() + key, val, valid = it.Current() + } + return +} + +func (i *IndexReader) GetInternal(key []byte) ([]byte, error) { + internalRow := NewInternalRow(key, nil) + return i.kvreader.Get(internalRow.Key()) +} + +func (i *IndexReader) DocCount() (uint64, error) { + return i.docCount, nil +} + +func (i *IndexReader) Close() error { + return i.kvreader.Close() +} + +func (i *IndexReader) ExternalID(id index.IndexInternalID) (string, error) { + return string(id), nil +} + +func (i *IndexReader) InternalID(id string) (index.IndexInternalID, error) { + return index.IndexInternalID(id), nil +} + +func incrementBytes(in []byte) []byte { + rv := make([]byte, len(in)) + copy(rv, in) + for i := len(rv) - 1; i >= 0; i-- { + rv[i] = rv[i] + 1 + if rv[i] != 0 { + // didn't overflow, so stop + break + } + } + return rv +} + +func (i *IndexReader) DocValueReader(fields []string) (index.DocValueReader, error) { + return &DocValueReader{i: i, fields: fields}, nil +} + +type DocValueReader struct { + i *IndexReader + fields []string +} + +func (dvr *DocValueReader) VisitDocValues(id index.IndexInternalID, + visitor index.DocumentFieldTermVisitor) error { + return dvr.i.DocumentVisitFieldTerms(id, dvr.fields, visitor) +} diff --git a/vendor/github.com/blevesearch/bleve/index/upsidedown/reader.go b/vendor/github.com/blevesearch/bleve/index/upsidedown/reader.go new file mode 100644 index 0000000..bc0fef1 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/upsidedown/reader.go @@ -0,0 +1,376 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package upsidedown + +import ( + "bytes" + "reflect" + "sort" + "sync/atomic" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/store" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeUpsideDownCouchTermFieldReader int +var reflectStaticSizeUpsideDownCouchDocIDReader int + +func init() { + var tfr UpsideDownCouchTermFieldReader + reflectStaticSizeUpsideDownCouchTermFieldReader = + int(reflect.TypeOf(tfr).Size()) + var cdr UpsideDownCouchDocIDReader + reflectStaticSizeUpsideDownCouchDocIDReader = + int(reflect.TypeOf(cdr).Size()) +} + +type UpsideDownCouchTermFieldReader struct { + count uint64 + indexReader *IndexReader + iterator store.KVIterator + term []byte + tfrNext *TermFrequencyRow + tfrPrealloc TermFrequencyRow + keyBuf []byte + field uint16 + includeTermVectors bool +} + +func (r *UpsideDownCouchTermFieldReader) Size() int { + sizeInBytes := reflectStaticSizeUpsideDownCouchTermFieldReader + size.SizeOfPtr + + len(r.term) + + r.tfrPrealloc.Size() + + len(r.keyBuf) + + if r.tfrNext != nil { + sizeInBytes += r.tfrNext.Size() + } + + return sizeInBytes +} + +func newUpsideDownCouchTermFieldReader(indexReader *IndexReader, term []byte, field uint16, includeFreq, includeNorm, includeTermVectors bool) (*UpsideDownCouchTermFieldReader, error) { + bufNeeded := termFrequencyRowKeySize(term, nil) + if bufNeeded < dictionaryRowKeySize(term) { + bufNeeded = dictionaryRowKeySize(term) + } + buf := make([]byte, bufNeeded) + + bufUsed := dictionaryRowKeyTo(buf, field, term) + val, err := indexReader.kvreader.Get(buf[:bufUsed]) + if err != nil { + return nil, err + } + if val == nil { + atomic.AddUint64(&indexReader.index.stats.termSearchersStarted, uint64(1)) + rv := &UpsideDownCouchTermFieldReader{ + count: 0, + term: term, + field: field, + includeTermVectors: includeTermVectors, + } + rv.tfrNext = &rv.tfrPrealloc + return rv, nil + } + + count, err := dictionaryRowParseV(val) + if err != nil { + return nil, err + } + + bufUsed = termFrequencyRowKeyTo(buf, field, term, nil) + it := indexReader.kvreader.PrefixIterator(buf[:bufUsed]) + + atomic.AddUint64(&indexReader.index.stats.termSearchersStarted, uint64(1)) + return &UpsideDownCouchTermFieldReader{ + indexReader: indexReader, + iterator: it, + count: count, + term: term, + field: field, + includeTermVectors: includeTermVectors, + }, nil +} + +func (r *UpsideDownCouchTermFieldReader) Count() uint64 { + return r.count +} + +func (r *UpsideDownCouchTermFieldReader) Next(preAlloced *index.TermFieldDoc) (*index.TermFieldDoc, error) { + if r.iterator != nil { + // We treat tfrNext also like an initialization flag, which + // tells us whether we need to invoke the underlying + // iterator.Next(). The first time, don't call iterator.Next(). + if r.tfrNext != nil { + r.iterator.Next() + } else { + r.tfrNext = &r.tfrPrealloc + } + key, val, valid := r.iterator.Current() + if valid { + tfr := r.tfrNext + err := tfr.parseKDoc(key, r.term) + if err != nil { + return nil, err + } + err = tfr.parseV(val, r.includeTermVectors) + if err != nil { + return nil, err + } + rv := preAlloced + if rv == nil { + rv = &index.TermFieldDoc{} + } + rv.ID = append(rv.ID, tfr.doc...) + rv.Freq = tfr.freq + rv.Norm = float64(tfr.norm) + if tfr.vectors != nil { + rv.Vectors = r.indexReader.index.termFieldVectorsFromTermVectors(tfr.vectors) + } + return rv, nil + } + } + return nil, nil +} + +func (r *UpsideDownCouchTermFieldReader) Advance(docID index.IndexInternalID, preAlloced *index.TermFieldDoc) (rv *index.TermFieldDoc, err error) { + if r.iterator != nil { + if r.tfrNext == nil { + r.tfrNext = &TermFrequencyRow{} + } + tfr := InitTermFrequencyRow(r.tfrNext, r.term, r.field, docID, 0, 0) + r.keyBuf, err = tfr.KeyAppendTo(r.keyBuf[:0]) + if err != nil { + return nil, err + } + r.iterator.Seek(r.keyBuf) + key, val, valid := r.iterator.Current() + if valid { + err := tfr.parseKDoc(key, r.term) + if err != nil { + return nil, err + } + err = tfr.parseV(val, r.includeTermVectors) + if err != nil { + return nil, err + } + rv = preAlloced + if rv == nil { + rv = &index.TermFieldDoc{} + } + rv.ID = append(rv.ID, tfr.doc...) + rv.Freq = tfr.freq + rv.Norm = float64(tfr.norm) + if tfr.vectors != nil { + rv.Vectors = r.indexReader.index.termFieldVectorsFromTermVectors(tfr.vectors) + } + return rv, nil + } + } + return nil, nil +} + +func (r *UpsideDownCouchTermFieldReader) Close() error { + if r.indexReader != nil { + atomic.AddUint64(&r.indexReader.index.stats.termSearchersFinished, uint64(1)) + } + if r.iterator != nil { + return r.iterator.Close() + } + return nil +} + +type UpsideDownCouchDocIDReader struct { + indexReader *IndexReader + iterator store.KVIterator + only []string + onlyPos int + onlyMode bool +} + +func (r *UpsideDownCouchDocIDReader) Size() int { + sizeInBytes := reflectStaticSizeUpsideDownCouchDocIDReader + + reflectStaticSizeIndexReader + size.SizeOfPtr + + for _, entry := range r.only { + sizeInBytes += size.SizeOfString + len(entry) + } + + return sizeInBytes +} + +func newUpsideDownCouchDocIDReader(indexReader *IndexReader) (*UpsideDownCouchDocIDReader, error) { + startBytes := []byte{0x0} + endBytes := []byte{0xff} + + bisr := NewBackIndexRow(startBytes, nil, nil) + bier := NewBackIndexRow(endBytes, nil, nil) + it := indexReader.kvreader.RangeIterator(bisr.Key(), bier.Key()) + + return &UpsideDownCouchDocIDReader{ + indexReader: indexReader, + iterator: it, + }, nil +} + +func newUpsideDownCouchDocIDReaderOnly(indexReader *IndexReader, ids []string) (*UpsideDownCouchDocIDReader, error) { + // we don't actually own the list of ids, so if before we sort we must copy + idsCopy := make([]string, len(ids)) + copy(idsCopy, ids) + // ensure ids are sorted + sort.Strings(idsCopy) + startBytes := []byte{0x0} + if len(idsCopy) > 0 { + startBytes = []byte(idsCopy[0]) + } + endBytes := []byte{0xff} + if len(idsCopy) > 0 { + endBytes = incrementBytes([]byte(idsCopy[len(idsCopy)-1])) + } + bisr := NewBackIndexRow(startBytes, nil, nil) + bier := NewBackIndexRow(endBytes, nil, nil) + it := indexReader.kvreader.RangeIterator(bisr.Key(), bier.Key()) + + return &UpsideDownCouchDocIDReader{ + indexReader: indexReader, + iterator: it, + only: idsCopy, + onlyMode: true, + }, nil +} + +func (r *UpsideDownCouchDocIDReader) Next() (index.IndexInternalID, error) { + key, val, valid := r.iterator.Current() + + if r.onlyMode { + var rv index.IndexInternalID + for valid && r.onlyPos < len(r.only) { + br, err := NewBackIndexRowKV(key, val) + if err != nil { + return nil, err + } + if !bytes.Equal(br.doc, []byte(r.only[r.onlyPos])) { + ok := r.nextOnly() + if !ok { + return nil, nil + } + r.iterator.Seek(NewBackIndexRow([]byte(r.only[r.onlyPos]), nil, nil).Key()) + key, val, valid = r.iterator.Current() + continue + } else { + rv = append([]byte(nil), br.doc...) + break + } + } + if valid && r.onlyPos < len(r.only) { + ok := r.nextOnly() + if ok { + r.iterator.Seek(NewBackIndexRow([]byte(r.only[r.onlyPos]), nil, nil).Key()) + } + return rv, nil + } + + } else { + if valid { + br, err := NewBackIndexRowKV(key, val) + if err != nil { + return nil, err + } + rv := append([]byte(nil), br.doc...) + r.iterator.Next() + return rv, nil + } + } + return nil, nil +} + +func (r *UpsideDownCouchDocIDReader) Advance(docID index.IndexInternalID) (index.IndexInternalID, error) { + + if r.onlyMode { + r.onlyPos = sort.SearchStrings(r.only, string(docID)) + if r.onlyPos >= len(r.only) { + // advanced to key after our last only key + return nil, nil + } + r.iterator.Seek(NewBackIndexRow([]byte(r.only[r.onlyPos]), nil, nil).Key()) + key, val, valid := r.iterator.Current() + + var rv index.IndexInternalID + for valid && r.onlyPos < len(r.only) { + br, err := NewBackIndexRowKV(key, val) + if err != nil { + return nil, err + } + if !bytes.Equal(br.doc, []byte(r.only[r.onlyPos])) { + // the only key we seek'd to didn't exist + // now look for the closest key that did exist in only + r.onlyPos = sort.SearchStrings(r.only, string(br.doc)) + if r.onlyPos >= len(r.only) { + // advanced to key after our last only key + return nil, nil + } + // now seek to this new only key + r.iterator.Seek(NewBackIndexRow([]byte(r.only[r.onlyPos]), nil, nil).Key()) + key, val, valid = r.iterator.Current() + continue + } else { + rv = append([]byte(nil), br.doc...) + break + } + } + if valid && r.onlyPos < len(r.only) { + ok := r.nextOnly() + if ok { + r.iterator.Seek(NewBackIndexRow([]byte(r.only[r.onlyPos]), nil, nil).Key()) + } + return rv, nil + } + } else { + bir := NewBackIndexRow(docID, nil, nil) + r.iterator.Seek(bir.Key()) + key, val, valid := r.iterator.Current() + if valid { + br, err := NewBackIndexRowKV(key, val) + if err != nil { + return nil, err + } + rv := append([]byte(nil), br.doc...) + r.iterator.Next() + return rv, nil + } + } + return nil, nil +} + +func (r *UpsideDownCouchDocIDReader) Close() error { + return r.iterator.Close() +} + +// move the r.only pos forward one, skipping duplicates +// return true if there is more data, or false if we got to the end of the list +func (r *UpsideDownCouchDocIDReader) nextOnly() bool { + + // advance 1 position, until we see a different key + // it's already sorted, so this skips duplicates + start := r.onlyPos + r.onlyPos++ + for r.onlyPos < len(r.only) && r.only[r.onlyPos] == r.only[start] { + start = r.onlyPos + r.onlyPos++ + } + // inidicate if we got to the end of the list + return r.onlyPos < len(r.only) +} diff --git a/vendor/github.com/blevesearch/bleve/index/upsidedown/row.go b/vendor/github.com/blevesearch/bleve/index/upsidedown/row.go new file mode 100644 index 0000000..531e0a0 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/upsidedown/row.go @@ -0,0 +1,1141 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package upsidedown + +import ( + "bytes" + "encoding/binary" + "fmt" + "io" + "math" + "reflect" + + "github.com/blevesearch/bleve/size" + "github.com/golang/protobuf/proto" +) + +var reflectStaticSizeTermFrequencyRow int +var reflectStaticSizeTermVector int + +func init() { + var tfr TermFrequencyRow + reflectStaticSizeTermFrequencyRow = int(reflect.TypeOf(tfr).Size()) + var tv TermVector + reflectStaticSizeTermVector = int(reflect.TypeOf(tv).Size()) +} + +const ByteSeparator byte = 0xff + +type UpsideDownCouchRowStream chan UpsideDownCouchRow + +type UpsideDownCouchRow interface { + KeySize() int + KeyTo([]byte) (int, error) + Key() []byte + Value() []byte + ValueSize() int + ValueTo([]byte) (int, error) +} + +func ParseFromKeyValue(key, value []byte) (UpsideDownCouchRow, error) { + if len(key) > 0 { + switch key[0] { + case 'v': + return NewVersionRowKV(key, value) + case 'f': + return NewFieldRowKV(key, value) + case 'd': + return NewDictionaryRowKV(key, value) + case 't': + return NewTermFrequencyRowKV(key, value) + case 'b': + return NewBackIndexRowKV(key, value) + case 's': + return NewStoredRowKV(key, value) + case 'i': + return NewInternalRowKV(key, value) + } + return nil, fmt.Errorf("Unknown field type '%s'", string(key[0])) + } + return nil, fmt.Errorf("Invalid empty key") +} + +// VERSION + +type VersionRow struct { + version uint8 +} + +func (v *VersionRow) Key() []byte { + return []byte{'v'} +} + +func (v *VersionRow) KeySize() int { + return 1 +} + +func (v *VersionRow) KeyTo(buf []byte) (int, error) { + buf[0] = 'v' + return 1, nil +} + +func (v *VersionRow) Value() []byte { + return []byte{byte(v.version)} +} + +func (v *VersionRow) ValueSize() int { + return 1 +} + +func (v *VersionRow) ValueTo(buf []byte) (int, error) { + buf[0] = v.version + return 1, nil +} + +func (v *VersionRow) String() string { + return fmt.Sprintf("Version: %d", v.version) +} + +func NewVersionRow(version uint8) *VersionRow { + return &VersionRow{ + version: version, + } +} + +func NewVersionRowKV(key, value []byte) (*VersionRow, error) { + rv := VersionRow{} + buf := bytes.NewBuffer(value) + err := binary.Read(buf, binary.LittleEndian, &rv.version) + if err != nil { + return nil, err + } + return &rv, nil +} + +// INTERNAL STORAGE + +type InternalRow struct { + key []byte + val []byte +} + +func (i *InternalRow) Key() []byte { + buf := make([]byte, i.KeySize()) + size, _ := i.KeyTo(buf) + return buf[:size] +} + +func (i *InternalRow) KeySize() int { + return len(i.key) + 1 +} + +func (i *InternalRow) KeyTo(buf []byte) (int, error) { + buf[0] = 'i' + actual := copy(buf[1:], i.key) + return 1 + actual, nil +} + +func (i *InternalRow) Value() []byte { + return i.val +} + +func (i *InternalRow) ValueSize() int { + return len(i.val) +} + +func (i *InternalRow) ValueTo(buf []byte) (int, error) { + actual := copy(buf, i.val) + return actual, nil +} + +func (i *InternalRow) String() string { + return fmt.Sprintf("InternalStore - Key: %s (% x) Val: %s (% x)", i.key, i.key, i.val, i.val) +} + +func NewInternalRow(key, val []byte) *InternalRow { + return &InternalRow{ + key: key, + val: val, + } +} + +func NewInternalRowKV(key, value []byte) (*InternalRow, error) { + rv := InternalRow{} + rv.key = key[1:] + rv.val = value + return &rv, nil +} + +// FIELD definition + +type FieldRow struct { + index uint16 + name string +} + +func (f *FieldRow) Key() []byte { + buf := make([]byte, f.KeySize()) + size, _ := f.KeyTo(buf) + return buf[:size] +} + +func (f *FieldRow) KeySize() int { + return 3 +} + +func (f *FieldRow) KeyTo(buf []byte) (int, error) { + buf[0] = 'f' + binary.LittleEndian.PutUint16(buf[1:3], f.index) + return 3, nil +} + +func (f *FieldRow) Value() []byte { + return append([]byte(f.name), ByteSeparator) +} + +func (f *FieldRow) ValueSize() int { + return len(f.name) + 1 +} + +func (f *FieldRow) ValueTo(buf []byte) (int, error) { + size := copy(buf, f.name) + buf[size] = ByteSeparator + return size + 1, nil +} + +func (f *FieldRow) String() string { + return fmt.Sprintf("Field: %d Name: %s", f.index, f.name) +} + +func NewFieldRow(index uint16, name string) *FieldRow { + return &FieldRow{ + index: index, + name: name, + } +} + +func NewFieldRowKV(key, value []byte) (*FieldRow, error) { + rv := FieldRow{} + + buf := bytes.NewBuffer(key) + _, err := buf.ReadByte() // type + if err != nil { + return nil, err + } + err = binary.Read(buf, binary.LittleEndian, &rv.index) + if err != nil { + return nil, err + } + + buf = bytes.NewBuffer(value) + rv.name, err = buf.ReadString(ByteSeparator) + if err != nil { + return nil, err + } + rv.name = rv.name[:len(rv.name)-1] // trim off separator byte + + return &rv, nil +} + +// DICTIONARY + +const DictionaryRowMaxValueSize = binary.MaxVarintLen64 + +type DictionaryRow struct { + term []byte + count uint64 + field uint16 +} + +func (dr *DictionaryRow) Key() []byte { + buf := make([]byte, dr.KeySize()) + size, _ := dr.KeyTo(buf) + return buf[:size] +} + +func (dr *DictionaryRow) KeySize() int { + return dictionaryRowKeySize(dr.term) +} + +func dictionaryRowKeySize(term []byte) int { + return len(term) + 3 +} + +func (dr *DictionaryRow) KeyTo(buf []byte) (int, error) { + return dictionaryRowKeyTo(buf, dr.field, dr.term), nil +} + +func dictionaryRowKeyTo(buf []byte, field uint16, term []byte) int { + buf[0] = 'd' + binary.LittleEndian.PutUint16(buf[1:3], field) + size := copy(buf[3:], term) + return size + 3 +} + +func (dr *DictionaryRow) Value() []byte { + buf := make([]byte, dr.ValueSize()) + size, _ := dr.ValueTo(buf) + return buf[:size] +} + +func (dr *DictionaryRow) ValueSize() int { + return DictionaryRowMaxValueSize +} + +func (dr *DictionaryRow) ValueTo(buf []byte) (int, error) { + used := binary.PutUvarint(buf, dr.count) + return used, nil +} + +func (dr *DictionaryRow) String() string { + return fmt.Sprintf("Dictionary Term: `%s` Field: %d Count: %d ", string(dr.term), dr.field, dr.count) +} + +func NewDictionaryRow(term []byte, field uint16, count uint64) *DictionaryRow { + return &DictionaryRow{ + term: term, + field: field, + count: count, + } +} + +func NewDictionaryRowKV(key, value []byte) (*DictionaryRow, error) { + rv, err := NewDictionaryRowK(key) + if err != nil { + return nil, err + } + + err = rv.parseDictionaryV(value) + if err != nil { + return nil, err + } + return rv, nil + +} + +func NewDictionaryRowK(key []byte) (*DictionaryRow, error) { + rv := &DictionaryRow{} + err := rv.parseDictionaryK(key) + if err != nil { + return nil, err + } + return rv, nil +} + +func (dr *DictionaryRow) parseDictionaryK(key []byte) error { + dr.field = binary.LittleEndian.Uint16(key[1:3]) + if dr.term != nil { + dr.term = dr.term[:0] + } + dr.term = append(dr.term, key[3:]...) + return nil +} + +func (dr *DictionaryRow) parseDictionaryV(value []byte) error { + count, err := dictionaryRowParseV(value) + if err != nil { + return err + } + dr.count = count + return nil +} + +func dictionaryRowParseV(value []byte) (uint64, error) { + count, nread := binary.Uvarint(value) + if nread <= 0 { + return 0, fmt.Errorf("DictionaryRow parse Uvarint error, nread: %d", nread) + } + return count, nil +} + +// TERM FIELD FREQUENCY + +type TermVector struct { + field uint16 + arrayPositions []uint64 + pos uint64 + start uint64 + end uint64 +} + +func (tv *TermVector) Size() int { + return reflectStaticSizeTermVector + size.SizeOfPtr + + len(tv.arrayPositions)*size.SizeOfUint64 +} + +func (tv *TermVector) String() string { + return fmt.Sprintf("Field: %d Pos: %d Start: %d End %d ArrayPositions: %#v", tv.field, tv.pos, tv.start, tv.end, tv.arrayPositions) +} + +type TermFrequencyRow struct { + term []byte + doc []byte + freq uint64 + vectors []*TermVector + norm float32 + field uint16 +} + +func (tfr *TermFrequencyRow) Size() int { + sizeInBytes := reflectStaticSizeTermFrequencyRow + + len(tfr.term) + + len(tfr.doc) + + for _, entry := range tfr.vectors { + sizeInBytes += entry.Size() + } + + return sizeInBytes +} + +func (tfr *TermFrequencyRow) Term() []byte { + return tfr.term +} + +func (tfr *TermFrequencyRow) Freq() uint64 { + return tfr.freq +} + +func (tfr *TermFrequencyRow) ScanPrefixForField() []byte { + buf := make([]byte, 3) + buf[0] = 't' + binary.LittleEndian.PutUint16(buf[1:3], tfr.field) + return buf +} + +func (tfr *TermFrequencyRow) ScanPrefixForFieldTermPrefix() []byte { + buf := make([]byte, 3+len(tfr.term)) + buf[0] = 't' + binary.LittleEndian.PutUint16(buf[1:3], tfr.field) + copy(buf[3:], tfr.term) + return buf +} + +func (tfr *TermFrequencyRow) ScanPrefixForFieldTerm() []byte { + buf := make([]byte, 3+len(tfr.term)+1) + buf[0] = 't' + binary.LittleEndian.PutUint16(buf[1:3], tfr.field) + termLen := copy(buf[3:], tfr.term) + buf[3+termLen] = ByteSeparator + return buf +} + +func (tfr *TermFrequencyRow) Key() []byte { + buf := make([]byte, tfr.KeySize()) + size, _ := tfr.KeyTo(buf) + return buf[:size] +} + +func (tfr *TermFrequencyRow) KeySize() int { + return termFrequencyRowKeySize(tfr.term, tfr.doc) +} + +func termFrequencyRowKeySize(term, doc []byte) int { + return 3 + len(term) + 1 + len(doc) +} + +func (tfr *TermFrequencyRow) KeyTo(buf []byte) (int, error) { + return termFrequencyRowKeyTo(buf, tfr.field, tfr.term, tfr.doc), nil +} + +func termFrequencyRowKeyTo(buf []byte, field uint16, term, doc []byte) int { + buf[0] = 't' + binary.LittleEndian.PutUint16(buf[1:3], field) + termLen := copy(buf[3:], term) + buf[3+termLen] = ByteSeparator + docLen := copy(buf[3+termLen+1:], doc) + return 3 + termLen + 1 + docLen +} + +func (tfr *TermFrequencyRow) KeyAppendTo(buf []byte) ([]byte, error) { + keySize := tfr.KeySize() + if cap(buf) < keySize { + buf = make([]byte, keySize) + } + actualSize, err := tfr.KeyTo(buf[0:keySize]) + return buf[0:actualSize], err +} + +func (tfr *TermFrequencyRow) DictionaryRowKey() []byte { + dr := NewDictionaryRow(tfr.term, tfr.field, 0) + return dr.Key() +} + +func (tfr *TermFrequencyRow) DictionaryRowKeySize() int { + dr := NewDictionaryRow(tfr.term, tfr.field, 0) + return dr.KeySize() +} + +func (tfr *TermFrequencyRow) DictionaryRowKeyTo(buf []byte) (int, error) { + dr := NewDictionaryRow(tfr.term, tfr.field, 0) + return dr.KeyTo(buf) +} + +func (tfr *TermFrequencyRow) Value() []byte { + buf := make([]byte, tfr.ValueSize()) + size, _ := tfr.ValueTo(buf) + return buf[:size] +} + +func (tfr *TermFrequencyRow) ValueSize() int { + bufLen := binary.MaxVarintLen64 + binary.MaxVarintLen64 + for _, vector := range tfr.vectors { + bufLen += (binary.MaxVarintLen64 * 4) + (1+len(vector.arrayPositions))*binary.MaxVarintLen64 + } + return bufLen +} + +func (tfr *TermFrequencyRow) ValueTo(buf []byte) (int, error) { + used := binary.PutUvarint(buf[:binary.MaxVarintLen64], tfr.freq) + + normuint32 := math.Float32bits(tfr.norm) + newbuf := buf[used : used+binary.MaxVarintLen64] + used += binary.PutUvarint(newbuf, uint64(normuint32)) + + for _, vector := range tfr.vectors { + used += binary.PutUvarint(buf[used:used+binary.MaxVarintLen64], uint64(vector.field)) + used += binary.PutUvarint(buf[used:used+binary.MaxVarintLen64], vector.pos) + used += binary.PutUvarint(buf[used:used+binary.MaxVarintLen64], vector.start) + used += binary.PutUvarint(buf[used:used+binary.MaxVarintLen64], vector.end) + used += binary.PutUvarint(buf[used:used+binary.MaxVarintLen64], uint64(len(vector.arrayPositions))) + for _, arrayPosition := range vector.arrayPositions { + used += binary.PutUvarint(buf[used:used+binary.MaxVarintLen64], arrayPosition) + } + } + return used, nil +} + +func (tfr *TermFrequencyRow) String() string { + return fmt.Sprintf("Term: `%s` Field: %d DocId: `%s` Frequency: %d Norm: %f Vectors: %v", string(tfr.term), tfr.field, string(tfr.doc), tfr.freq, tfr.norm, tfr.vectors) +} + +func InitTermFrequencyRow(tfr *TermFrequencyRow, term []byte, field uint16, docID []byte, freq uint64, norm float32) *TermFrequencyRow { + tfr.term = term + tfr.field = field + tfr.doc = docID + tfr.freq = freq + tfr.norm = norm + return tfr +} + +func NewTermFrequencyRow(term []byte, field uint16, docID []byte, freq uint64, norm float32) *TermFrequencyRow { + return &TermFrequencyRow{ + term: term, + field: field, + doc: docID, + freq: freq, + norm: norm, + } +} + +func NewTermFrequencyRowWithTermVectors(term []byte, field uint16, docID []byte, freq uint64, norm float32, vectors []*TermVector) *TermFrequencyRow { + return &TermFrequencyRow{ + term: term, + field: field, + doc: docID, + freq: freq, + norm: norm, + vectors: vectors, + } +} + +func NewTermFrequencyRowK(key []byte) (*TermFrequencyRow, error) { + rv := &TermFrequencyRow{} + err := rv.parseK(key) + if err != nil { + return nil, err + } + return rv, nil +} + +func (tfr *TermFrequencyRow) parseK(key []byte) error { + keyLen := len(key) + if keyLen < 3 { + return fmt.Errorf("invalid term frequency key, no valid field") + } + tfr.field = binary.LittleEndian.Uint16(key[1:3]) + + termEndPos := bytes.IndexByte(key[3:], ByteSeparator) + if termEndPos < 0 { + return fmt.Errorf("invalid term frequency key, no byte separator terminating term") + } + tfr.term = key[3 : 3+termEndPos] + + docLen := keyLen - (3 + termEndPos + 1) + if docLen < 1 { + return fmt.Errorf("invalid term frequency key, empty docid") + } + tfr.doc = key[3+termEndPos+1:] + + return nil +} + +func (tfr *TermFrequencyRow) parseKDoc(key []byte, term []byte) error { + tfr.doc = key[3+len(term)+1:] + if len(tfr.doc) == 0 { + return fmt.Errorf("invalid term frequency key, empty docid") + } + + return nil +} + +func (tfr *TermFrequencyRow) parseV(value []byte, includeTermVectors bool) error { + var bytesRead int + tfr.freq, bytesRead = binary.Uvarint(value) + if bytesRead <= 0 { + return fmt.Errorf("invalid term frequency value, invalid frequency") + } + currOffset := bytesRead + + var norm uint64 + norm, bytesRead = binary.Uvarint(value[currOffset:]) + if bytesRead <= 0 { + return fmt.Errorf("invalid term frequency value, no norm") + } + currOffset += bytesRead + + tfr.norm = math.Float32frombits(uint32(norm)) + + tfr.vectors = nil + if !includeTermVectors { + return nil + } + + var field uint64 + field, bytesRead = binary.Uvarint(value[currOffset:]) + for bytesRead > 0 { + currOffset += bytesRead + tv := TermVector{} + tv.field = uint16(field) + // at this point we expect at least one term vector + if tfr.vectors == nil { + tfr.vectors = make([]*TermVector, 0) + } + + tv.pos, bytesRead = binary.Uvarint(value[currOffset:]) + if bytesRead <= 0 { + return fmt.Errorf("invalid term frequency value, vector contains no position") + } + currOffset += bytesRead + + tv.start, bytesRead = binary.Uvarint(value[currOffset:]) + if bytesRead <= 0 { + return fmt.Errorf("invalid term frequency value, vector contains no start") + } + currOffset += bytesRead + + tv.end, bytesRead = binary.Uvarint(value[currOffset:]) + if bytesRead <= 0 { + return fmt.Errorf("invalid term frequency value, vector contains no end") + } + currOffset += bytesRead + + var arrayPositionsLen uint64 = 0 + arrayPositionsLen, bytesRead = binary.Uvarint(value[currOffset:]) + if bytesRead <= 0 { + return fmt.Errorf("invalid term frequency value, vector contains no arrayPositionLen") + } + currOffset += bytesRead + + if arrayPositionsLen > 0 { + tv.arrayPositions = make([]uint64, arrayPositionsLen) + for i := 0; uint64(i) < arrayPositionsLen; i++ { + tv.arrayPositions[i], bytesRead = binary.Uvarint(value[currOffset:]) + if bytesRead <= 0 { + return fmt.Errorf("invalid term frequency value, vector contains no arrayPosition of index %d", i) + } + currOffset += bytesRead + } + } + + tfr.vectors = append(tfr.vectors, &tv) + // try to read next record (may not exist) + field, bytesRead = binary.Uvarint(value[currOffset:]) + } + if len(value[currOffset:]) > 0 && bytesRead <= 0 { + return fmt.Errorf("invalid term frequency value, vector field invalid") + } + + return nil +} + +func NewTermFrequencyRowKV(key, value []byte) (*TermFrequencyRow, error) { + rv, err := NewTermFrequencyRowK(key) + if err != nil { + return nil, err + } + + err = rv.parseV(value, true) + if err != nil { + return nil, err + } + return rv, nil + +} + +type BackIndexRow struct { + doc []byte + termsEntries []*BackIndexTermsEntry + storedEntries []*BackIndexStoreEntry +} + +func (br *BackIndexRow) AllTermKeys() [][]byte { + if br == nil { + return nil + } + rv := make([][]byte, 0, len(br.termsEntries)) // FIXME this underestimates severely + for _, termsEntry := range br.termsEntries { + for i := range termsEntry.Terms { + termRow := NewTermFrequencyRow([]byte(termsEntry.Terms[i]), uint16(termsEntry.GetField()), br.doc, 0, 0) + rv = append(rv, termRow.Key()) + } + } + return rv +} + +func (br *BackIndexRow) AllStoredKeys() [][]byte { + if br == nil { + return nil + } + rv := make([][]byte, len(br.storedEntries)) + for i, storedEntry := range br.storedEntries { + storedRow := NewStoredRow(br.doc, uint16(storedEntry.GetField()), storedEntry.GetArrayPositions(), 'x', []byte{}) + rv[i] = storedRow.Key() + } + return rv +} + +func (br *BackIndexRow) Key() []byte { + buf := make([]byte, br.KeySize()) + size, _ := br.KeyTo(buf) + return buf[:size] +} + +func (br *BackIndexRow) KeySize() int { + return len(br.doc) + 1 +} + +func (br *BackIndexRow) KeyTo(buf []byte) (int, error) { + buf[0] = 'b' + used := copy(buf[1:], br.doc) + return used + 1, nil +} + +func (br *BackIndexRow) Value() []byte { + buf := make([]byte, br.ValueSize()) + size, _ := br.ValueTo(buf) + return buf[:size] +} + +func (br *BackIndexRow) ValueSize() int { + birv := &BackIndexRowValue{ + TermsEntries: br.termsEntries, + StoredEntries: br.storedEntries, + } + return birv.Size() +} + +func (br *BackIndexRow) ValueTo(buf []byte) (int, error) { + birv := &BackIndexRowValue{ + TermsEntries: br.termsEntries, + StoredEntries: br.storedEntries, + } + return birv.MarshalTo(buf) +} + +func (br *BackIndexRow) String() string { + return fmt.Sprintf("Backindex DocId: `%s` Terms Entries: %v, Stored Entries: %v", string(br.doc), br.termsEntries, br.storedEntries) +} + +func NewBackIndexRow(docID []byte, entries []*BackIndexTermsEntry, storedFields []*BackIndexStoreEntry) *BackIndexRow { + return &BackIndexRow{ + doc: docID, + termsEntries: entries, + storedEntries: storedFields, + } +} + +func NewBackIndexRowKV(key, value []byte) (*BackIndexRow, error) { + rv := BackIndexRow{} + + buf := bytes.NewBuffer(key) + _, err := buf.ReadByte() // type + if err != nil { + return nil, err + } + + rv.doc, err = buf.ReadBytes(ByteSeparator) + if err == io.EOF && len(rv.doc) < 1 { + err = fmt.Errorf("invalid doc length 0 - % x", key) + } + if err != nil && err != io.EOF { + return nil, err + } else if err == nil { + rv.doc = rv.doc[:len(rv.doc)-1] // trim off separator byte + } + + var birv BackIndexRowValue + err = proto.Unmarshal(value, &birv) + if err != nil { + return nil, err + } + rv.termsEntries = birv.TermsEntries + rv.storedEntries = birv.StoredEntries + + return &rv, nil +} + +// STORED + +type StoredRow struct { + doc []byte + field uint16 + arrayPositions []uint64 + typ byte + value []byte +} + +func (s *StoredRow) Key() []byte { + buf := make([]byte, s.KeySize()) + size, _ := s.KeyTo(buf) + return buf[0:size] +} + +func (s *StoredRow) KeySize() int { + return 1 + len(s.doc) + 1 + 2 + (binary.MaxVarintLen64 * len(s.arrayPositions)) +} + +func (s *StoredRow) KeyTo(buf []byte) (int, error) { + docLen := len(s.doc) + buf[0] = 's' + copy(buf[1:], s.doc) + buf[1+docLen] = ByteSeparator + binary.LittleEndian.PutUint16(buf[1+docLen+1:], s.field) + bytesUsed := 1 + docLen + 1 + 2 + for _, arrayPosition := range s.arrayPositions { + varbytes := binary.PutUvarint(buf[bytesUsed:], arrayPosition) + bytesUsed += varbytes + } + return bytesUsed, nil +} + +func (s *StoredRow) Value() []byte { + buf := make([]byte, s.ValueSize()) + size, _ := s.ValueTo(buf) + return buf[:size] +} + +func (s *StoredRow) ValueSize() int { + return len(s.value) + 1 +} + +func (s *StoredRow) ValueTo(buf []byte) (int, error) { + buf[0] = s.typ + used := copy(buf[1:], s.value) + return used + 1, nil +} + +func (s *StoredRow) String() string { + return fmt.Sprintf("Document: %s Field %d, Array Positions: %v, Type: %s Value: %s", s.doc, s.field, s.arrayPositions, string(s.typ), s.value) +} + +func (s *StoredRow) ScanPrefixForDoc() []byte { + docLen := len(s.doc) + buf := make([]byte, 1+docLen+1) + buf[0] = 's' + copy(buf[1:], s.doc) + buf[1+docLen] = ByteSeparator + return buf +} + +func NewStoredRow(docID []byte, field uint16, arrayPositions []uint64, typ byte, value []byte) *StoredRow { + return &StoredRow{ + doc: docID, + field: field, + arrayPositions: arrayPositions, + typ: typ, + value: value, + } +} + +func NewStoredRowK(key []byte) (*StoredRow, error) { + rv := StoredRow{} + + buf := bytes.NewBuffer(key) + _, err := buf.ReadByte() // type + if err != nil { + return nil, err + } + + rv.doc, err = buf.ReadBytes(ByteSeparator) + if len(rv.doc) < 2 { // 1 for min doc id length, 1 for separator + err = fmt.Errorf("invalid doc length 0") + return nil, err + } + + rv.doc = rv.doc[:len(rv.doc)-1] // trim off separator byte + + err = binary.Read(buf, binary.LittleEndian, &rv.field) + if err != nil { + return nil, err + } + + rv.arrayPositions = make([]uint64, 0) + nextArrayPos, err := binary.ReadUvarint(buf) + for err == nil { + rv.arrayPositions = append(rv.arrayPositions, nextArrayPos) + nextArrayPos, err = binary.ReadUvarint(buf) + } + return &rv, nil +} + +func NewStoredRowKV(key, value []byte) (*StoredRow, error) { + rv, err := NewStoredRowK(key) + if err != nil { + return nil, err + } + rv.typ = value[0] + rv.value = value[1:] + return rv, nil +} + +type backIndexFieldTermVisitor func(field uint32, term []byte) + +// visitBackIndexRow is designed to process a protobuf encoded +// value, without creating unnecessary garbage. Instead values are passed +// to a callback, inspected first, and only copied if necessary. +// Due to the fact that this borrows from generated code, it must be marnually +// updated if the protobuf definition changes. +// +// This code originates from: +// func (m *BackIndexRowValue) Unmarshal(data []byte) error +// the sections which create garbage or parse unintersting sections +// have been commented out. This was done by design to allow for easier +// merging in the future if that original function is regenerated +func visitBackIndexRow(data []byte, callback backIndexFieldTermVisitor) error { + l := len(data) + iNdEx := 0 + for iNdEx < l { + var wire uint64 + for shift := uint(0); ; shift += 7 { + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := data[iNdEx] + iNdEx++ + wire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field TermsEntries", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := data[iNdEx] + iNdEx++ + msglen |= (int(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + postIndex := iNdEx + msglen + if msglen < 0 { + return ErrInvalidLengthUpsidedown + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + // dont parse term entries + // m.TermsEntries = append(m.TermsEntries, &BackIndexTermsEntry{}) + // if err := m.TermsEntries[len(m.TermsEntries)-1].Unmarshal(data[iNdEx:postIndex]); err != nil { + // return err + // } + // instead, inspect them + if err := visitBackIndexRowFieldTerms(data[iNdEx:postIndex], callback); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field StoredEntries", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := data[iNdEx] + iNdEx++ + msglen |= (int(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + postIndex := iNdEx + msglen + if msglen < 0 { + return ErrInvalidLengthUpsidedown + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + // don't parse stored entries + // m.StoredEntries = append(m.StoredEntries, &BackIndexStoreEntry{}) + // if err := m.StoredEntries[len(m.StoredEntries)-1].Unmarshal(data[iNdEx:postIndex]); err != nil { + // return err + // } + iNdEx = postIndex + default: + var sizeOfWire int + for { + sizeOfWire++ + wire >>= 7 + if wire == 0 { + break + } + } + iNdEx -= sizeOfWire + skippy, err := skipUpsidedown(data[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthUpsidedown + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + // don't track unrecognized data + //m.XXX_unrecognized = append(m.XXX_unrecognized, data[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + + return nil +} + +// visitBackIndexRowFieldTerms is designed to process a protobuf encoded +// sub-value within the BackIndexRowValue, without creating unnecessary garbage. +// Instead values are passed to a callback, inspected first, and only copied if +// necessary. Due to the fact that this borrows from generated code, it must +// be marnually updated if the protobuf definition changes. +// +// This code originates from: +// func (m *BackIndexTermsEntry) Unmarshal(data []byte) error { +// the sections which create garbage or parse uninteresting sections +// have been commented out. This was done by design to allow for easier +// merging in the future if that original function is regenerated +func visitBackIndexRowFieldTerms(data []byte, callback backIndexFieldTermVisitor) error { + var theField uint32 + + var hasFields [1]uint64 + l := len(data) + iNdEx := 0 + for iNdEx < l { + var wire uint64 + for shift := uint(0); ; shift += 7 { + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := data[iNdEx] + iNdEx++ + wire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Field", wireType) + } + var v uint32 + for shift := uint(0); ; shift += 7 { + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := data[iNdEx] + iNdEx++ + v |= (uint32(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + // m.Field = &v + theField = v + hasFields[0] |= uint64(0x00000001) + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Terms", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := data[iNdEx] + iNdEx++ + stringLen |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + postIndex := iNdEx + int(stringLen) + if postIndex > l { + return io.ErrUnexpectedEOF + } + //m.Terms = append(m.Terms, string(data[iNdEx:postIndex])) + callback(theField, data[iNdEx:postIndex]) + iNdEx = postIndex + default: + var sizeOfWire int + for { + sizeOfWire++ + wire >>= 7 + if wire == 0 { + break + } + } + iNdEx -= sizeOfWire + skippy, err := skipUpsidedown(data[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthUpsidedown + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + //m.XXX_unrecognized = append(m.XXX_unrecognized, data[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + // if hasFields[0]&uint64(0x00000001) == 0 { + // return new(github_com_golang_protobuf_proto.RequiredNotSetError) + // } + + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/index/upsidedown/row_merge.go b/vendor/github.com/blevesearch/bleve/index/upsidedown/row_merge.go new file mode 100644 index 0000000..39172ad --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/upsidedown/row_merge.go @@ -0,0 +1,76 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package upsidedown + +import ( + "encoding/binary" +) + +var mergeOperator upsideDownMerge + +var dictionaryTermIncr []byte +var dictionaryTermDecr []byte + +func init() { + dictionaryTermIncr = make([]byte, 8) + binary.LittleEndian.PutUint64(dictionaryTermIncr, uint64(1)) + dictionaryTermDecr = make([]byte, 8) + var negOne = int64(-1) + binary.LittleEndian.PutUint64(dictionaryTermDecr, uint64(negOne)) +} + +type upsideDownMerge struct{} + +func (m *upsideDownMerge) FullMerge(key, existingValue []byte, operands [][]byte) ([]byte, bool) { + // set up record based on key + dr, err := NewDictionaryRowK(key) + if err != nil { + return nil, false + } + if len(existingValue) > 0 { + // if existing value, parse it + err = dr.parseDictionaryV(existingValue) + if err != nil { + return nil, false + } + } + + // now process operands + for _, operand := range operands { + next := int64(binary.LittleEndian.Uint64(operand)) + if next < 0 && uint64(-next) > dr.count { + // subtracting next from existing would overflow + dr.count = 0 + } else if next < 0 { + dr.count -= uint64(-next) + } else { + dr.count += uint64(next) + } + } + + return dr.Value(), true +} + +func (m *upsideDownMerge) PartialMerge(key, leftOperand, rightOperand []byte) ([]byte, bool) { + left := int64(binary.LittleEndian.Uint64(leftOperand)) + right := int64(binary.LittleEndian.Uint64(rightOperand)) + rv := make([]byte, 8) + binary.LittleEndian.PutUint64(rv, uint64(left+right)) + return rv, true +} + +func (m *upsideDownMerge) Name() string { + return "upsideDownMerge" +} diff --git a/vendor/github.com/blevesearch/bleve/index/upsidedown/stats.go b/vendor/github.com/blevesearch/bleve/index/upsidedown/stats.go new file mode 100644 index 0000000..a148ab7 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/upsidedown/stats.go @@ -0,0 +1,55 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package upsidedown + +import ( + "encoding/json" + "sync/atomic" + + "github.com/blevesearch/bleve/index/store" +) + +type indexStat struct { + updates, deletes, batches, errors uint64 + analysisTime, indexTime uint64 + termSearchersStarted uint64 + termSearchersFinished uint64 + numPlainTextBytesIndexed uint64 + i *UpsideDownCouch +} + +func (i *indexStat) statsMap() map[string]interface{} { + m := map[string]interface{}{} + m["updates"] = atomic.LoadUint64(&i.updates) + m["deletes"] = atomic.LoadUint64(&i.deletes) + m["batches"] = atomic.LoadUint64(&i.batches) + m["errors"] = atomic.LoadUint64(&i.errors) + m["analysis_time"] = atomic.LoadUint64(&i.analysisTime) + m["index_time"] = atomic.LoadUint64(&i.indexTime) + m["term_searchers_started"] = atomic.LoadUint64(&i.termSearchersStarted) + m["term_searchers_finished"] = atomic.LoadUint64(&i.termSearchersFinished) + m["num_plain_text_bytes_indexed"] = atomic.LoadUint64(&i.numPlainTextBytesIndexed) + + if o, ok := i.i.store.(store.KVStoreStats); ok { + m["kv"] = o.StatsMap() + } + + return m +} + +func (i *indexStat) MarshalJSON() ([]byte, error) { + m := i.statsMap() + return json.Marshal(m) +} diff --git a/vendor/github.com/blevesearch/bleve/index/upsidedown/upsidedown.go b/vendor/github.com/blevesearch/bleve/index/upsidedown/upsidedown.go new file mode 100644 index 0000000..8e915c6 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/upsidedown/upsidedown.go @@ -0,0 +1,1083 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate protoc --gofast_out=. upsidedown.proto + +package upsidedown + +import ( + "encoding/binary" + "encoding/json" + "fmt" + "math" + "sync" + "sync/atomic" + "time" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/store" + "github.com/blevesearch/bleve/registry" + + "github.com/golang/protobuf/proto" +) + +const Name = "upside_down" + +// RowBufferSize should ideally this is sized to be the smallest +// size that can contain an index row key and its corresponding +// value. It is not a limit, if need be a larger buffer is +// allocated, but performance will be more optimal if *most* +// rows fit this size. +const RowBufferSize = 4 * 1024 + +var VersionKey = []byte{'v'} + +const Version uint8 = 7 + +var IncompatibleVersion = fmt.Errorf("incompatible version, %d is supported", Version) + +type UpsideDownCouch struct { + version uint8 + path string + storeName string + storeConfig map[string]interface{} + store store.KVStore + fieldCache *index.FieldCache + analysisQueue *index.AnalysisQueue + stats *indexStat + + m sync.RWMutex + // fields protected by m + docCount uint64 + + writeMutex sync.Mutex +} + +type docBackIndexRow struct { + docID string + doc *document.Document // If deletion, doc will be nil. + backIndexRow *BackIndexRow +} + +func NewUpsideDownCouch(storeName string, storeConfig map[string]interface{}, analysisQueue *index.AnalysisQueue) (index.Index, error) { + rv := &UpsideDownCouch{ + version: Version, + fieldCache: index.NewFieldCache(), + storeName: storeName, + storeConfig: storeConfig, + analysisQueue: analysisQueue, + } + rv.stats = &indexStat{i: rv} + return rv, nil +} + +func (udc *UpsideDownCouch) init(kvwriter store.KVWriter) (err error) { + // version marker + rowsAll := [][]UpsideDownCouchRow{ + {NewVersionRow(udc.version)}, + } + + err = udc.batchRows(kvwriter, nil, rowsAll, nil) + return +} + +func (udc *UpsideDownCouch) loadSchema(kvreader store.KVReader) (err error) { + + it := kvreader.PrefixIterator([]byte{'f'}) + defer func() { + if cerr := it.Close(); err == nil && cerr != nil { + err = cerr + } + }() + + key, val, valid := it.Current() + for valid { + var fieldRow *FieldRow + fieldRow, err = NewFieldRowKV(key, val) + if err != nil { + return + } + udc.fieldCache.AddExisting(fieldRow.name, fieldRow.index) + + it.Next() + key, val, valid = it.Current() + } + + val, err = kvreader.Get([]byte{'v'}) + if err != nil { + return + } + var vr *VersionRow + vr, err = NewVersionRowKV([]byte{'v'}, val) + if err != nil { + return + } + if vr.version != Version { + err = IncompatibleVersion + return + } + + return +} + +var rowBufferPool sync.Pool + +func GetRowBuffer() []byte { + if rb, ok := rowBufferPool.Get().([]byte); ok { + return rb + } else { + return make([]byte, RowBufferSize) + } +} + +func PutRowBuffer(buf []byte) { + rowBufferPool.Put(buf) +} + +func (udc *UpsideDownCouch) batchRows(writer store.KVWriter, addRowsAll [][]UpsideDownCouchRow, updateRowsAll [][]UpsideDownCouchRow, deleteRowsAll [][]UpsideDownCouchRow) (err error) { + dictionaryDeltas := make(map[string]int64) + + // count up bytes needed for buffering. + addNum := 0 + addKeyBytes := 0 + addValBytes := 0 + + updateNum := 0 + updateKeyBytes := 0 + updateValBytes := 0 + + deleteNum := 0 + deleteKeyBytes := 0 + + rowBuf := GetRowBuffer() + + for _, addRows := range addRowsAll { + for _, row := range addRows { + tfr, ok := row.(*TermFrequencyRow) + if ok { + if tfr.DictionaryRowKeySize() > len(rowBuf) { + rowBuf = make([]byte, tfr.DictionaryRowKeySize()) + } + dictKeySize, err := tfr.DictionaryRowKeyTo(rowBuf) + if err != nil { + return err + } + dictionaryDeltas[string(rowBuf[:dictKeySize])] += 1 + } + addKeyBytes += row.KeySize() + addValBytes += row.ValueSize() + } + addNum += len(addRows) + } + + for _, updateRows := range updateRowsAll { + for _, row := range updateRows { + updateKeyBytes += row.KeySize() + updateValBytes += row.ValueSize() + } + updateNum += len(updateRows) + } + + for _, deleteRows := range deleteRowsAll { + for _, row := range deleteRows { + tfr, ok := row.(*TermFrequencyRow) + if ok { + // need to decrement counter + if tfr.DictionaryRowKeySize() > len(rowBuf) { + rowBuf = make([]byte, tfr.DictionaryRowKeySize()) + } + dictKeySize, err := tfr.DictionaryRowKeyTo(rowBuf) + if err != nil { + return err + } + dictionaryDeltas[string(rowBuf[:dictKeySize])] -= 1 + } + deleteKeyBytes += row.KeySize() + } + deleteNum += len(deleteRows) + } + + PutRowBuffer(rowBuf) + + mergeNum := len(dictionaryDeltas) + mergeKeyBytes := 0 + mergeValBytes := mergeNum * DictionaryRowMaxValueSize + + for dictRowKey := range dictionaryDeltas { + mergeKeyBytes += len(dictRowKey) + } + + // prepare batch + totBytes := addKeyBytes + addValBytes + + updateKeyBytes + updateValBytes + + deleteKeyBytes + + 2*(mergeKeyBytes+mergeValBytes) + + buf, wb, err := writer.NewBatchEx(store.KVBatchOptions{ + TotalBytes: totBytes, + NumSets: addNum + updateNum, + NumDeletes: deleteNum, + NumMerges: mergeNum, + }) + if err != nil { + return err + } + defer func() { + _ = wb.Close() + }() + + // fill the batch + for _, addRows := range addRowsAll { + for _, row := range addRows { + keySize, err := row.KeyTo(buf) + if err != nil { + return err + } + valSize, err := row.ValueTo(buf[keySize:]) + if err != nil { + return err + } + wb.Set(buf[:keySize], buf[keySize:keySize+valSize]) + buf = buf[keySize+valSize:] + } + } + + for _, updateRows := range updateRowsAll { + for _, row := range updateRows { + keySize, err := row.KeyTo(buf) + if err != nil { + return err + } + valSize, err := row.ValueTo(buf[keySize:]) + if err != nil { + return err + } + wb.Set(buf[:keySize], buf[keySize:keySize+valSize]) + buf = buf[keySize+valSize:] + } + } + + for _, deleteRows := range deleteRowsAll { + for _, row := range deleteRows { + keySize, err := row.KeyTo(buf) + if err != nil { + return err + } + wb.Delete(buf[:keySize]) + buf = buf[keySize:] + } + } + + for dictRowKey, delta := range dictionaryDeltas { + dictRowKeyLen := copy(buf, dictRowKey) + binary.LittleEndian.PutUint64(buf[dictRowKeyLen:], uint64(delta)) + wb.Merge(buf[:dictRowKeyLen], buf[dictRowKeyLen:dictRowKeyLen+DictionaryRowMaxValueSize]) + buf = buf[dictRowKeyLen+DictionaryRowMaxValueSize:] + } + + // write out the batch + return writer.ExecuteBatch(wb) +} + +func (udc *UpsideDownCouch) Open() (err error) { + // acquire the write mutex for the duration of Open() + udc.writeMutex.Lock() + defer udc.writeMutex.Unlock() + + // open the kv store + storeConstructor := registry.KVStoreConstructorByName(udc.storeName) + if storeConstructor == nil { + err = index.ErrorUnknownStorageType + return + } + + // now open the store + udc.store, err = storeConstructor(&mergeOperator, udc.storeConfig) + if err != nil { + return + } + + // start a reader to look at the index + var kvreader store.KVReader + kvreader, err = udc.store.Reader() + if err != nil { + return + } + + var value []byte + value, err = kvreader.Get(VersionKey) + if err != nil { + _ = kvreader.Close() + return + } + + if value != nil { + err = udc.loadSchema(kvreader) + if err != nil { + _ = kvreader.Close() + return + } + + // set doc count + udc.m.Lock() + udc.docCount, err = udc.countDocs(kvreader) + udc.m.Unlock() + + err = kvreader.Close() + } else { + // new index, close the reader and open writer to init + err = kvreader.Close() + if err != nil { + return + } + + var kvwriter store.KVWriter + kvwriter, err = udc.store.Writer() + if err != nil { + return + } + defer func() { + if cerr := kvwriter.Close(); err == nil && cerr != nil { + err = cerr + } + }() + + // init the index + err = udc.init(kvwriter) + } + + return +} + +func (udc *UpsideDownCouch) countDocs(kvreader store.KVReader) (count uint64, err error) { + it := kvreader.PrefixIterator([]byte{'b'}) + defer func() { + if cerr := it.Close(); err == nil && cerr != nil { + err = cerr + } + }() + + _, _, valid := it.Current() + for valid { + count++ + it.Next() + _, _, valid = it.Current() + } + + return +} + +func (udc *UpsideDownCouch) rowCount() (count uint64, err error) { + // start an isolated reader for use during the rowcount + kvreader, err := udc.store.Reader() + if err != nil { + return + } + defer func() { + if cerr := kvreader.Close(); err == nil && cerr != nil { + err = cerr + } + }() + it := kvreader.RangeIterator(nil, nil) + defer func() { + if cerr := it.Close(); err == nil && cerr != nil { + err = cerr + } + }() + + _, _, valid := it.Current() + for valid { + count++ + it.Next() + _, _, valid = it.Current() + } + + return +} + +func (udc *UpsideDownCouch) Close() error { + return udc.store.Close() +} + +func (udc *UpsideDownCouch) Update(doc *document.Document) (err error) { + // do analysis before acquiring write lock + analysisStart := time.Now() + resultChan := make(chan *index.AnalysisResult) + aw := index.NewAnalysisWork(udc, doc, resultChan) + + // put the work on the queue + udc.analysisQueue.Queue(aw) + + // wait for the result + result := <-resultChan + close(resultChan) + atomic.AddUint64(&udc.stats.analysisTime, uint64(time.Since(analysisStart))) + + udc.writeMutex.Lock() + defer udc.writeMutex.Unlock() + + // open a reader for backindex lookup + var kvreader store.KVReader + kvreader, err = udc.store.Reader() + if err != nil { + return + } + + // first we lookup the backindex row for the doc id if it exists + // lookup the back index row + var backIndexRow *BackIndexRow + backIndexRow, err = backIndexRowForDoc(kvreader, index.IndexInternalID(doc.ID)) + if err != nil { + _ = kvreader.Close() + atomic.AddUint64(&udc.stats.errors, 1) + return + } + + err = kvreader.Close() + if err != nil { + return + } + + return udc.UpdateWithAnalysis(doc, result, backIndexRow) +} + +func (udc *UpsideDownCouch) UpdateWithAnalysis(doc *document.Document, + result *index.AnalysisResult, backIndexRow *BackIndexRow) (err error) { + // start a writer for this update + indexStart := time.Now() + var kvwriter store.KVWriter + kvwriter, err = udc.store.Writer() + if err != nil { + return + } + defer func() { + if cerr := kvwriter.Close(); err == nil && cerr != nil { + err = cerr + } + }() + + // prepare a list of rows + var addRowsAll [][]UpsideDownCouchRow + var updateRowsAll [][]UpsideDownCouchRow + var deleteRowsAll [][]UpsideDownCouchRow + + addRows, updateRows, deleteRows := udc.mergeOldAndNew(backIndexRow, result.Rows) + if len(addRows) > 0 { + addRowsAll = append(addRowsAll, addRows) + } + if len(updateRows) > 0 { + updateRowsAll = append(updateRowsAll, updateRows) + } + if len(deleteRows) > 0 { + deleteRowsAll = append(deleteRowsAll, deleteRows) + } + + err = udc.batchRows(kvwriter, addRowsAll, updateRowsAll, deleteRowsAll) + if err == nil && backIndexRow == nil { + udc.m.Lock() + udc.docCount++ + udc.m.Unlock() + } + atomic.AddUint64(&udc.stats.indexTime, uint64(time.Since(indexStart))) + if err == nil { + atomic.AddUint64(&udc.stats.updates, 1) + atomic.AddUint64(&udc.stats.numPlainTextBytesIndexed, doc.NumPlainTextBytes()) + } else { + atomic.AddUint64(&udc.stats.errors, 1) + } + return +} + +func (udc *UpsideDownCouch) mergeOldAndNew(backIndexRow *BackIndexRow, rows []index.IndexRow) (addRows []UpsideDownCouchRow, updateRows []UpsideDownCouchRow, deleteRows []UpsideDownCouchRow) { + addRows = make([]UpsideDownCouchRow, 0, len(rows)) + + if backIndexRow == nil { + addRows = addRows[0:len(rows)] + for i, row := range rows { + addRows[i] = row + } + return addRows, nil, nil + } + + updateRows = make([]UpsideDownCouchRow, 0, len(rows)) + deleteRows = make([]UpsideDownCouchRow, 0, len(rows)) + + var existingTermKeys map[string]struct{} + backIndexTermKeys := backIndexRow.AllTermKeys() + if len(backIndexTermKeys) > 0 { + existingTermKeys = make(map[string]struct{}, len(backIndexTermKeys)) + for _, key := range backIndexTermKeys { + existingTermKeys[string(key)] = struct{}{} + } + } + + var existingStoredKeys map[string]struct{} + backIndexStoredKeys := backIndexRow.AllStoredKeys() + if len(backIndexStoredKeys) > 0 { + existingStoredKeys = make(map[string]struct{}, len(backIndexStoredKeys)) + for _, key := range backIndexStoredKeys { + existingStoredKeys[string(key)] = struct{}{} + } + } + + keyBuf := GetRowBuffer() + for _, row := range rows { + switch row := row.(type) { + case *TermFrequencyRow: + if existingTermKeys != nil { + if row.KeySize() > len(keyBuf) { + keyBuf = make([]byte, row.KeySize()) + } + keySize, _ := row.KeyTo(keyBuf) + if _, ok := existingTermKeys[string(keyBuf[:keySize])]; ok { + updateRows = append(updateRows, row) + delete(existingTermKeys, string(keyBuf[:keySize])) + continue + } + } + addRows = append(addRows, row) + case *StoredRow: + if existingStoredKeys != nil { + if row.KeySize() > len(keyBuf) { + keyBuf = make([]byte, row.KeySize()) + } + keySize, _ := row.KeyTo(keyBuf) + if _, ok := existingStoredKeys[string(keyBuf[:keySize])]; ok { + updateRows = append(updateRows, row) + delete(existingStoredKeys, string(keyBuf[:keySize])) + continue + } + } + addRows = append(addRows, row) + default: + updateRows = append(updateRows, row) + } + } + PutRowBuffer(keyBuf) + + // any of the existing rows that weren't updated need to be deleted + for existingTermKey := range existingTermKeys { + termFreqRow, err := NewTermFrequencyRowK([]byte(existingTermKey)) + if err == nil { + deleteRows = append(deleteRows, termFreqRow) + } + } + + // any of the existing stored fields that weren't updated need to be deleted + for existingStoredKey := range existingStoredKeys { + storedRow, err := NewStoredRowK([]byte(existingStoredKey)) + if err == nil { + deleteRows = append(deleteRows, storedRow) + } + } + + return addRows, updateRows, deleteRows +} + +func (udc *UpsideDownCouch) storeField(docID []byte, field document.Field, fieldIndex uint16, rows []index.IndexRow, backIndexStoredEntries []*BackIndexStoreEntry) ([]index.IndexRow, []*BackIndexStoreEntry) { + fieldType := encodeFieldType(field) + storedRow := NewStoredRow(docID, fieldIndex, field.ArrayPositions(), fieldType, field.Value()) + + // record the back index entry + backIndexStoredEntry := BackIndexStoreEntry{Field: proto.Uint32(uint32(fieldIndex)), ArrayPositions: field.ArrayPositions()} + + return append(rows, storedRow), append(backIndexStoredEntries, &backIndexStoredEntry) +} + +func encodeFieldType(f document.Field) byte { + fieldType := byte('x') + switch f.(type) { + case *document.TextField: + fieldType = 't' + case *document.NumericField: + fieldType = 'n' + case *document.DateTimeField: + fieldType = 'd' + case *document.BooleanField: + fieldType = 'b' + case *document.GeoPointField: + fieldType = 'g' + case *document.CompositeField: + fieldType = 'c' + } + return fieldType +} + +func (udc *UpsideDownCouch) indexField(docID []byte, includeTermVectors bool, fieldIndex uint16, fieldLength int, tokenFreqs analysis.TokenFrequencies, rows []index.IndexRow, backIndexTermsEntries []*BackIndexTermsEntry) ([]index.IndexRow, []*BackIndexTermsEntry) { + fieldNorm := float32(1.0 / math.Sqrt(float64(fieldLength))) + + termFreqRows := make([]TermFrequencyRow, len(tokenFreqs)) + termFreqRowsUsed := 0 + + terms := make([]string, 0, len(tokenFreqs)) + for k, tf := range tokenFreqs { + termFreqRow := &termFreqRows[termFreqRowsUsed] + termFreqRowsUsed++ + + InitTermFrequencyRow(termFreqRow, tf.Term, fieldIndex, docID, + uint64(frequencyFromTokenFreq(tf)), fieldNorm) + + if includeTermVectors { + termFreqRow.vectors, rows = udc.termVectorsFromTokenFreq(fieldIndex, tf, rows) + } + + // record the back index entry + terms = append(terms, k) + + rows = append(rows, termFreqRow) + } + backIndexTermsEntry := BackIndexTermsEntry{Field: proto.Uint32(uint32(fieldIndex)), Terms: terms} + backIndexTermsEntries = append(backIndexTermsEntries, &backIndexTermsEntry) + + return rows, backIndexTermsEntries +} + +func (udc *UpsideDownCouch) Delete(id string) (err error) { + indexStart := time.Now() + + udc.writeMutex.Lock() + defer udc.writeMutex.Unlock() + + // open a reader for backindex lookup + var kvreader store.KVReader + kvreader, err = udc.store.Reader() + if err != nil { + return + } + + // first we lookup the backindex row for the doc id if it exists + // lookup the back index row + var backIndexRow *BackIndexRow + backIndexRow, err = backIndexRowForDoc(kvreader, index.IndexInternalID(id)) + if err != nil { + _ = kvreader.Close() + atomic.AddUint64(&udc.stats.errors, 1) + return + } + + err = kvreader.Close() + if err != nil { + return + } + + if backIndexRow == nil { + atomic.AddUint64(&udc.stats.deletes, 1) + return + } + + // start a writer for this delete + var kvwriter store.KVWriter + kvwriter, err = udc.store.Writer() + if err != nil { + return + } + defer func() { + if cerr := kvwriter.Close(); err == nil && cerr != nil { + err = cerr + } + }() + + var deleteRowsAll [][]UpsideDownCouchRow + + deleteRows := udc.deleteSingle(id, backIndexRow, nil) + if len(deleteRows) > 0 { + deleteRowsAll = append(deleteRowsAll, deleteRows) + } + + err = udc.batchRows(kvwriter, nil, nil, deleteRowsAll) + if err == nil { + udc.m.Lock() + udc.docCount-- + udc.m.Unlock() + } + atomic.AddUint64(&udc.stats.indexTime, uint64(time.Since(indexStart))) + if err == nil { + atomic.AddUint64(&udc.stats.deletes, 1) + } else { + atomic.AddUint64(&udc.stats.errors, 1) + } + return +} + +func (udc *UpsideDownCouch) deleteSingle(id string, backIndexRow *BackIndexRow, deleteRows []UpsideDownCouchRow) []UpsideDownCouchRow { + idBytes := []byte(id) + + for _, backIndexEntry := range backIndexRow.termsEntries { + for i := range backIndexEntry.Terms { + tfr := NewTermFrequencyRow([]byte(backIndexEntry.Terms[i]), uint16(*backIndexEntry.Field), idBytes, 0, 0) + deleteRows = append(deleteRows, tfr) + } + } + for _, se := range backIndexRow.storedEntries { + sf := NewStoredRow(idBytes, uint16(*se.Field), se.ArrayPositions, 'x', nil) + deleteRows = append(deleteRows, sf) + } + + // also delete the back entry itself + deleteRows = append(deleteRows, backIndexRow) + return deleteRows +} + +func decodeFieldType(typ byte, name string, pos []uint64, value []byte) document.Field { + switch typ { + case 't': + return document.NewTextField(name, pos, value) + case 'n': + return document.NewNumericFieldFromBytes(name, pos, value) + case 'd': + return document.NewDateTimeFieldFromBytes(name, pos, value) + case 'b': + return document.NewBooleanFieldFromBytes(name, pos, value) + case 'g': + return document.NewGeoPointFieldFromBytes(name, pos, value) + } + return nil +} + +func frequencyFromTokenFreq(tf *analysis.TokenFreq) int { + return tf.Frequency() +} + +func (udc *UpsideDownCouch) termVectorsFromTokenFreq(field uint16, tf *analysis.TokenFreq, rows []index.IndexRow) ([]*TermVector, []index.IndexRow) { + a := make([]TermVector, len(tf.Locations)) + rv := make([]*TermVector, len(tf.Locations)) + + for i, l := range tf.Locations { + var newFieldRow *FieldRow + fieldIndex := field + if l.Field != "" { + // lookup correct field + fieldIndex, newFieldRow = udc.fieldIndexOrNewRow(l.Field) + if newFieldRow != nil { + rows = append(rows, newFieldRow) + } + } + a[i] = TermVector{ + field: fieldIndex, + arrayPositions: l.ArrayPositions, + pos: uint64(l.Position), + start: uint64(l.Start), + end: uint64(l.End), + } + rv[i] = &a[i] + } + + return rv, rows +} + +func (udc *UpsideDownCouch) termFieldVectorsFromTermVectors(in []*TermVector) []*index.TermFieldVector { + if len(in) == 0 { + return nil + } + + a := make([]index.TermFieldVector, len(in)) + rv := make([]*index.TermFieldVector, len(in)) + + for i, tv := range in { + fieldName := udc.fieldCache.FieldIndexed(tv.field) + a[i] = index.TermFieldVector{ + Field: fieldName, + ArrayPositions: tv.arrayPositions, + Pos: tv.pos, + Start: tv.start, + End: tv.end, + } + rv[i] = &a[i] + } + return rv +} + +func (udc *UpsideDownCouch) Batch(batch *index.Batch) (err error) { + persistedCallback := batch.PersistedCallback() + if persistedCallback != nil { + defer persistedCallback(err) + } + analysisStart := time.Now() + + resultChan := make(chan *index.AnalysisResult, len(batch.IndexOps)) + + var numUpdates uint64 + var numPlainTextBytes uint64 + for _, doc := range batch.IndexOps { + if doc != nil { + numUpdates++ + numPlainTextBytes += doc.NumPlainTextBytes() + } + } + + if numUpdates > 0 { + go func() { + for k := range batch.IndexOps { + doc := batch.IndexOps[k] + if doc != nil { + aw := index.NewAnalysisWork(udc, doc, resultChan) + // put the work on the queue + udc.analysisQueue.Queue(aw) + } + } + }() + } + + // retrieve back index rows concurrent with analysis + docBackIndexRowErr := error(nil) + docBackIndexRowCh := make(chan *docBackIndexRow, len(batch.IndexOps)) + + udc.writeMutex.Lock() + defer udc.writeMutex.Unlock() + + go func() { + defer close(docBackIndexRowCh) + + // open a reader for backindex lookup + var kvreader store.KVReader + kvreader, err = udc.store.Reader() + if err != nil { + docBackIndexRowErr = err + return + } + defer func() { + if cerr := kvreader.Close(); err == nil && cerr != nil { + docBackIndexRowErr = cerr + } + }() + + for docID, doc := range batch.IndexOps { + backIndexRow, err := backIndexRowForDoc(kvreader, index.IndexInternalID(docID)) + if err != nil { + docBackIndexRowErr = err + return + } + + docBackIndexRowCh <- &docBackIndexRow{docID, doc, backIndexRow} + } + }() + + // wait for analysis result + newRowsMap := make(map[string][]index.IndexRow) + var itemsDeQueued uint64 + for itemsDeQueued < numUpdates { + result := <-resultChan + newRowsMap[result.DocID] = result.Rows + itemsDeQueued++ + } + close(resultChan) + + atomic.AddUint64(&udc.stats.analysisTime, uint64(time.Since(analysisStart))) + + docsAdded := uint64(0) + docsDeleted := uint64(0) + + indexStart := time.Now() + + // prepare a list of rows + var addRowsAll [][]UpsideDownCouchRow + var updateRowsAll [][]UpsideDownCouchRow + var deleteRowsAll [][]UpsideDownCouchRow + + // add the internal ops + var updateRows []UpsideDownCouchRow + var deleteRows []UpsideDownCouchRow + + for internalKey, internalValue := range batch.InternalOps { + if internalValue == nil { + // delete + deleteInternalRow := NewInternalRow([]byte(internalKey), nil) + deleteRows = append(deleteRows, deleteInternalRow) + } else { + updateInternalRow := NewInternalRow([]byte(internalKey), internalValue) + updateRows = append(updateRows, updateInternalRow) + } + } + + if len(updateRows) > 0 { + updateRowsAll = append(updateRowsAll, updateRows) + } + if len(deleteRows) > 0 { + deleteRowsAll = append(deleteRowsAll, deleteRows) + } + + // process back index rows as they arrive + for dbir := range docBackIndexRowCh { + if dbir.doc == nil && dbir.backIndexRow != nil { + // delete + deleteRows := udc.deleteSingle(dbir.docID, dbir.backIndexRow, nil) + if len(deleteRows) > 0 { + deleteRowsAll = append(deleteRowsAll, deleteRows) + } + docsDeleted++ + } else if dbir.doc != nil { + addRows, updateRows, deleteRows := udc.mergeOldAndNew(dbir.backIndexRow, newRowsMap[dbir.docID]) + if len(addRows) > 0 { + addRowsAll = append(addRowsAll, addRows) + } + if len(updateRows) > 0 { + updateRowsAll = append(updateRowsAll, updateRows) + } + if len(deleteRows) > 0 { + deleteRowsAll = append(deleteRowsAll, deleteRows) + } + if dbir.backIndexRow == nil { + docsAdded++ + } + } + } + + if docBackIndexRowErr != nil { + return docBackIndexRowErr + } + + // start a writer for this batch + var kvwriter store.KVWriter + kvwriter, err = udc.store.Writer() + if err != nil { + return + } + + err = udc.batchRows(kvwriter, addRowsAll, updateRowsAll, deleteRowsAll) + if err != nil { + _ = kvwriter.Close() + atomic.AddUint64(&udc.stats.errors, 1) + return + } + + err = kvwriter.Close() + + atomic.AddUint64(&udc.stats.indexTime, uint64(time.Since(indexStart))) + + if err == nil { + udc.m.Lock() + udc.docCount += docsAdded + udc.docCount -= docsDeleted + udc.m.Unlock() + atomic.AddUint64(&udc.stats.updates, numUpdates) + atomic.AddUint64(&udc.stats.deletes, docsDeleted) + atomic.AddUint64(&udc.stats.batches, 1) + atomic.AddUint64(&udc.stats.numPlainTextBytesIndexed, numPlainTextBytes) + } else { + atomic.AddUint64(&udc.stats.errors, 1) + } + + return +} + +func (udc *UpsideDownCouch) SetInternal(key, val []byte) (err error) { + internalRow := NewInternalRow(key, val) + udc.writeMutex.Lock() + defer udc.writeMutex.Unlock() + var writer store.KVWriter + writer, err = udc.store.Writer() + if err != nil { + return + } + defer func() { + if cerr := writer.Close(); err == nil && cerr != nil { + err = cerr + } + }() + + batch := writer.NewBatch() + batch.Set(internalRow.Key(), internalRow.Value()) + + return writer.ExecuteBatch(batch) +} + +func (udc *UpsideDownCouch) DeleteInternal(key []byte) (err error) { + internalRow := NewInternalRow(key, nil) + udc.writeMutex.Lock() + defer udc.writeMutex.Unlock() + var writer store.KVWriter + writer, err = udc.store.Writer() + if err != nil { + return + } + defer func() { + if cerr := writer.Close(); err == nil && cerr != nil { + err = cerr + } + }() + + batch := writer.NewBatch() + batch.Delete(internalRow.Key()) + return writer.ExecuteBatch(batch) +} + +func (udc *UpsideDownCouch) Reader() (index.IndexReader, error) { + kvr, err := udc.store.Reader() + if err != nil { + return nil, fmt.Errorf("error opening store reader: %v", err) + } + udc.m.RLock() + defer udc.m.RUnlock() + return &IndexReader{ + index: udc, + kvreader: kvr, + docCount: udc.docCount, + }, nil +} + +func (udc *UpsideDownCouch) Stats() json.Marshaler { + return udc.stats +} + +func (udc *UpsideDownCouch) StatsMap() map[string]interface{} { + return udc.stats.statsMap() +} + +func (udc *UpsideDownCouch) Advanced() (store.KVStore, error) { + return udc.store, nil +} + +func (udc *UpsideDownCouch) fieldIndexOrNewRow(name string) (uint16, *FieldRow) { + index, existed := udc.fieldCache.FieldNamed(name, true) + if !existed { + return index, NewFieldRow(index, name) + } + return index, nil +} + +func init() { + registry.RegisterIndexType(Name, NewUpsideDownCouch) +} + +func backIndexRowForDoc(kvreader store.KVReader, docID index.IndexInternalID) (*BackIndexRow, error) { + // use a temporary row structure to build key + tempRow := BackIndexRow{ + doc: docID, + } + + keyBuf := GetRowBuffer() + if tempRow.KeySize() > len(keyBuf) { + keyBuf = make([]byte, 2*tempRow.KeySize()) + } + defer PutRowBuffer(keyBuf) + keySize, err := tempRow.KeyTo(keyBuf) + if err != nil { + return nil, err + } + + value, err := kvreader.Get(keyBuf[:keySize]) + if err != nil { + return nil, err + } + if value == nil { + return nil, nil + } + backIndexRow, err := NewBackIndexRowKV(keyBuf[:keySize], value) + if err != nil { + return nil, err + } + return backIndexRow, nil +} diff --git a/vendor/github.com/blevesearch/bleve/index/upsidedown/upsidedown.pb.go b/vendor/github.com/blevesearch/bleve/index/upsidedown/upsidedown.pb.go new file mode 100644 index 0000000..c161e1c --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/upsidedown/upsidedown.pb.go @@ -0,0 +1,688 @@ +// Code generated by protoc-gen-gogo. +// source: upsidedown.proto +// DO NOT EDIT! + +/* + Package upsidedown is a generated protocol buffer package. + + It is generated from these files: + upsidedown.proto + + It has these top-level messages: + BackIndexTermsEntry + BackIndexStoreEntry + BackIndexRowValue +*/ +package upsidedown + +import proto "github.com/golang/protobuf/proto" +import math "math" + +import io "io" +import fmt "fmt" +import github_com_golang_protobuf_proto "github.com/golang/protobuf/proto" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = math.Inf + +type BackIndexTermsEntry struct { + Field *uint32 `protobuf:"varint,1,req,name=field" json:"field,omitempty"` + Terms []string `protobuf:"bytes,2,rep,name=terms" json:"terms,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *BackIndexTermsEntry) Reset() { *m = BackIndexTermsEntry{} } +func (m *BackIndexTermsEntry) String() string { return proto.CompactTextString(m) } +func (*BackIndexTermsEntry) ProtoMessage() {} + +func (m *BackIndexTermsEntry) GetField() uint32 { + if m != nil && m.Field != nil { + return *m.Field + } + return 0 +} + +func (m *BackIndexTermsEntry) GetTerms() []string { + if m != nil { + return m.Terms + } + return nil +} + +type BackIndexStoreEntry struct { + Field *uint32 `protobuf:"varint,1,req,name=field" json:"field,omitempty"` + ArrayPositions []uint64 `protobuf:"varint,2,rep,name=arrayPositions" json:"arrayPositions,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *BackIndexStoreEntry) Reset() { *m = BackIndexStoreEntry{} } +func (m *BackIndexStoreEntry) String() string { return proto.CompactTextString(m) } +func (*BackIndexStoreEntry) ProtoMessage() {} + +func (m *BackIndexStoreEntry) GetField() uint32 { + if m != nil && m.Field != nil { + return *m.Field + } + return 0 +} + +func (m *BackIndexStoreEntry) GetArrayPositions() []uint64 { + if m != nil { + return m.ArrayPositions + } + return nil +} + +type BackIndexRowValue struct { + TermsEntries []*BackIndexTermsEntry `protobuf:"bytes,1,rep,name=termsEntries" json:"termsEntries,omitempty"` + StoredEntries []*BackIndexStoreEntry `protobuf:"bytes,2,rep,name=storedEntries" json:"storedEntries,omitempty"` + XXX_unrecognized []byte `json:"-"` +} + +func (m *BackIndexRowValue) Reset() { *m = BackIndexRowValue{} } +func (m *BackIndexRowValue) String() string { return proto.CompactTextString(m) } +func (*BackIndexRowValue) ProtoMessage() {} + +func (m *BackIndexRowValue) GetTermsEntries() []*BackIndexTermsEntry { + if m != nil { + return m.TermsEntries + } + return nil +} + +func (m *BackIndexRowValue) GetStoredEntries() []*BackIndexStoreEntry { + if m != nil { + return m.StoredEntries + } + return nil +} + +func (m *BackIndexTermsEntry) Unmarshal(data []byte) error { + var hasFields [1]uint64 + l := len(data) + iNdEx := 0 + for iNdEx < l { + var wire uint64 + for shift := uint(0); ; shift += 7 { + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := data[iNdEx] + iNdEx++ + wire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Field", wireType) + } + var v uint32 + for shift := uint(0); ; shift += 7 { + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := data[iNdEx] + iNdEx++ + v |= (uint32(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + m.Field = &v + hasFields[0] |= uint64(0x00000001) + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Terms", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := data[iNdEx] + iNdEx++ + stringLen |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + postIndex := iNdEx + int(stringLen) + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Terms = append(m.Terms, string(data[iNdEx:postIndex])) + iNdEx = postIndex + default: + var sizeOfWire int + for { + sizeOfWire++ + wire >>= 7 + if wire == 0 { + break + } + } + iNdEx -= sizeOfWire + skippy, err := skipUpsidedown(data[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthUpsidedown + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + m.XXX_unrecognized = append(m.XXX_unrecognized, data[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + if hasFields[0]&uint64(0x00000001) == 0 { + return new(github_com_golang_protobuf_proto.RequiredNotSetError) + } + + return nil +} +func (m *BackIndexStoreEntry) Unmarshal(data []byte) error { + var hasFields [1]uint64 + l := len(data) + iNdEx := 0 + for iNdEx < l { + var wire uint64 + for shift := uint(0); ; shift += 7 { + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := data[iNdEx] + iNdEx++ + wire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Field", wireType) + } + var v uint32 + for shift := uint(0); ; shift += 7 { + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := data[iNdEx] + iNdEx++ + v |= (uint32(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + m.Field = &v + hasFields[0] |= uint64(0x00000001) + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field ArrayPositions", wireType) + } + var v uint64 + for shift := uint(0); ; shift += 7 { + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := data[iNdEx] + iNdEx++ + v |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + m.ArrayPositions = append(m.ArrayPositions, v) + default: + var sizeOfWire int + for { + sizeOfWire++ + wire >>= 7 + if wire == 0 { + break + } + } + iNdEx -= sizeOfWire + skippy, err := skipUpsidedown(data[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthUpsidedown + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + m.XXX_unrecognized = append(m.XXX_unrecognized, data[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + if hasFields[0]&uint64(0x00000001) == 0 { + return new(github_com_golang_protobuf_proto.RequiredNotSetError) + } + + return nil +} +func (m *BackIndexRowValue) Unmarshal(data []byte) error { + l := len(data) + iNdEx := 0 + for iNdEx < l { + var wire uint64 + for shift := uint(0); ; shift += 7 { + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := data[iNdEx] + iNdEx++ + wire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field TermsEntries", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := data[iNdEx] + iNdEx++ + msglen |= (int(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + postIndex := iNdEx + msglen + if msglen < 0 { + return ErrInvalidLengthUpsidedown + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.TermsEntries = append(m.TermsEntries, &BackIndexTermsEntry{}) + if err := m.TermsEntries[len(m.TermsEntries)-1].Unmarshal(data[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field StoredEntries", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := data[iNdEx] + iNdEx++ + msglen |= (int(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + postIndex := iNdEx + msglen + if msglen < 0 { + return ErrInvalidLengthUpsidedown + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.StoredEntries = append(m.StoredEntries, &BackIndexStoreEntry{}) + if err := m.StoredEntries[len(m.StoredEntries)-1].Unmarshal(data[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + var sizeOfWire int + for { + sizeOfWire++ + wire >>= 7 + if wire == 0 { + break + } + } + iNdEx -= sizeOfWire + skippy, err := skipUpsidedown(data[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthUpsidedown + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + m.XXX_unrecognized = append(m.XXX_unrecognized, data[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + + return nil +} +func skipUpsidedown(data []byte) (n int, err error) { + l := len(data) + iNdEx := 0 + for iNdEx < l { + var wire uint64 + for shift := uint(0); ; shift += 7 { + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := data[iNdEx] + iNdEx++ + wire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + wireType := int(wire & 0x7) + switch wireType { + case 0: + for { + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + iNdEx++ + if data[iNdEx-1] < 0x80 { + break + } + } + return iNdEx, nil + case 1: + iNdEx += 8 + return iNdEx, nil + case 2: + var length int + for shift := uint(0); ; shift += 7 { + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := data[iNdEx] + iNdEx++ + length |= (int(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + iNdEx += length + if length < 0 { + return 0, ErrInvalidLengthUpsidedown + } + return iNdEx, nil + case 3: + for { + var innerWire uint64 + var start int = iNdEx + for shift := uint(0); ; shift += 7 { + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := data[iNdEx] + iNdEx++ + innerWire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + innerWireType := int(innerWire & 0x7) + if innerWireType == 4 { + break + } + next, err := skipUpsidedown(data[start:]) + if err != nil { + return 0, err + } + iNdEx = start + next + } + return iNdEx, nil + case 4: + return iNdEx, nil + case 5: + iNdEx += 4 + return iNdEx, nil + default: + return 0, fmt.Errorf("proto: illegal wireType %d", wireType) + } + } + panic("unreachable") +} + +var ( + ErrInvalidLengthUpsidedown = fmt.Errorf("proto: negative length found during unmarshaling") +) + +func (m *BackIndexTermsEntry) Size() (n int) { + var l int + _ = l + if m.Field != nil { + n += 1 + sovUpsidedown(uint64(*m.Field)) + } + if len(m.Terms) > 0 { + for _, s := range m.Terms { + l = len(s) + n += 1 + l + sovUpsidedown(uint64(l)) + } + } + if m.XXX_unrecognized != nil { + n += len(m.XXX_unrecognized) + } + return n +} + +func (m *BackIndexStoreEntry) Size() (n int) { + var l int + _ = l + if m.Field != nil { + n += 1 + sovUpsidedown(uint64(*m.Field)) + } + if len(m.ArrayPositions) > 0 { + for _, e := range m.ArrayPositions { + n += 1 + sovUpsidedown(uint64(e)) + } + } + if m.XXX_unrecognized != nil { + n += len(m.XXX_unrecognized) + } + return n +} + +func (m *BackIndexRowValue) Size() (n int) { + var l int + _ = l + if len(m.TermsEntries) > 0 { + for _, e := range m.TermsEntries { + l = e.Size() + n += 1 + l + sovUpsidedown(uint64(l)) + } + } + if len(m.StoredEntries) > 0 { + for _, e := range m.StoredEntries { + l = e.Size() + n += 1 + l + sovUpsidedown(uint64(l)) + } + } + if m.XXX_unrecognized != nil { + n += len(m.XXX_unrecognized) + } + return n +} + +func sovUpsidedown(x uint64) (n int) { + for { + n++ + x >>= 7 + if x == 0 { + break + } + } + return n +} +func sozUpsidedown(x uint64) (n int) { + return sovUpsidedown(uint64((x << 1) ^ uint64((int64(x) >> 63)))) +} +func (m *BackIndexTermsEntry) Marshal() (data []byte, err error) { + size := m.Size() + data = make([]byte, size) + n, err := m.MarshalTo(data) + if err != nil { + return nil, err + } + return data[:n], nil +} + +func (m *BackIndexTermsEntry) MarshalTo(data []byte) (n int, err error) { + var i int + _ = i + var l int + _ = l + if m.Field == nil { + return 0, new(github_com_golang_protobuf_proto.RequiredNotSetError) + } else { + data[i] = 0x8 + i++ + i = encodeVarintUpsidedown(data, i, uint64(*m.Field)) + } + if len(m.Terms) > 0 { + for _, s := range m.Terms { + data[i] = 0x12 + i++ + l = len(s) + for l >= 1<<7 { + data[i] = uint8(uint64(l)&0x7f | 0x80) + l >>= 7 + i++ + } + data[i] = uint8(l) + i++ + i += copy(data[i:], s) + } + } + if m.XXX_unrecognized != nil { + i += copy(data[i:], m.XXX_unrecognized) + } + return i, nil +} + +func (m *BackIndexStoreEntry) Marshal() (data []byte, err error) { + size := m.Size() + data = make([]byte, size) + n, err := m.MarshalTo(data) + if err != nil { + return nil, err + } + return data[:n], nil +} + +func (m *BackIndexStoreEntry) MarshalTo(data []byte) (n int, err error) { + var i int + _ = i + var l int + _ = l + if m.Field == nil { + return 0, new(github_com_golang_protobuf_proto.RequiredNotSetError) + } else { + data[i] = 0x8 + i++ + i = encodeVarintUpsidedown(data, i, uint64(*m.Field)) + } + if len(m.ArrayPositions) > 0 { + for _, num := range m.ArrayPositions { + data[i] = 0x10 + i++ + i = encodeVarintUpsidedown(data, i, uint64(num)) + } + } + if m.XXX_unrecognized != nil { + i += copy(data[i:], m.XXX_unrecognized) + } + return i, nil +} + +func (m *BackIndexRowValue) Marshal() (data []byte, err error) { + size := m.Size() + data = make([]byte, size) + n, err := m.MarshalTo(data) + if err != nil { + return nil, err + } + return data[:n], nil +} + +func (m *BackIndexRowValue) MarshalTo(data []byte) (n int, err error) { + var i int + _ = i + var l int + _ = l + if len(m.TermsEntries) > 0 { + for _, msg := range m.TermsEntries { + data[i] = 0xa + i++ + i = encodeVarintUpsidedown(data, i, uint64(msg.Size())) + n, err := msg.MarshalTo(data[i:]) + if err != nil { + return 0, err + } + i += n + } + } + if len(m.StoredEntries) > 0 { + for _, msg := range m.StoredEntries { + data[i] = 0x12 + i++ + i = encodeVarintUpsidedown(data, i, uint64(msg.Size())) + n, err := msg.MarshalTo(data[i:]) + if err != nil { + return 0, err + } + i += n + } + } + if m.XXX_unrecognized != nil { + i += copy(data[i:], m.XXX_unrecognized) + } + return i, nil +} + +func encodeFixed64Upsidedown(data []byte, offset int, v uint64) int { + data[offset] = uint8(v) + data[offset+1] = uint8(v >> 8) + data[offset+2] = uint8(v >> 16) + data[offset+3] = uint8(v >> 24) + data[offset+4] = uint8(v >> 32) + data[offset+5] = uint8(v >> 40) + data[offset+6] = uint8(v >> 48) + data[offset+7] = uint8(v >> 56) + return offset + 8 +} +func encodeFixed32Upsidedown(data []byte, offset int, v uint32) int { + data[offset] = uint8(v) + data[offset+1] = uint8(v >> 8) + data[offset+2] = uint8(v >> 16) + data[offset+3] = uint8(v >> 24) + return offset + 4 +} +func encodeVarintUpsidedown(data []byte, offset int, v uint64) int { + for v >= 1<<7 { + data[offset] = uint8(v&0x7f | 0x80) + v >>= 7 + offset++ + } + data[offset] = uint8(v) + return offset + 1 +} diff --git a/vendor/github.com/blevesearch/bleve/index/upsidedown/upsidedown.proto b/vendor/github.com/blevesearch/bleve/index/upsidedown/upsidedown.proto new file mode 100644 index 0000000..cf0492a --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index/upsidedown/upsidedown.proto @@ -0,0 +1,14 @@ +message BackIndexTermsEntry { + required uint32 field = 1; + repeated string terms = 2; +} + +message BackIndexStoreEntry { + required uint32 field = 1; + repeated uint64 arrayPositions = 2; +} + +message BackIndexRowValue { + repeated BackIndexTermsEntry termsEntries = 1; + repeated BackIndexStoreEntry storedEntries = 2; +} diff --git a/vendor/github.com/blevesearch/bleve/index_alias.go b/vendor/github.com/blevesearch/bleve/index_alias.go new file mode 100644 index 0000000..7a85d72 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index_alias.go @@ -0,0 +1,37 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bleve + +// An IndexAlias is a wrapper around one or more +// Index objects. It has two distinct modes of +// operation. +// 1. When it points to a single index, ALL index +// operations are valid and will be passed through +// to the underlying index. +// 2. When it points to more than one index, the only +// valid operation is Search. In this case the +// search will be performed across all the +// underlying indexes and the results merged. +// Calls to Add/Remove/Swap the underlying indexes +// are atomic, so you can safely change the +// underlying Index objects while other components +// are performing operations. +type IndexAlias interface { + Index + + Add(i ...Index) + Remove(i ...Index) + Swap(in, out []Index) +} diff --git a/vendor/github.com/blevesearch/bleve/index_alias_impl.go b/vendor/github.com/blevesearch/bleve/index_alias_impl.go new file mode 100644 index 0000000..5aa57d8 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index_alias_impl.go @@ -0,0 +1,614 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bleve + +import ( + "context" + "sync" + "time" + + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/store" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" +) + +type indexAliasImpl struct { + name string + indexes []Index + mutex sync.RWMutex + open bool +} + +// NewIndexAlias creates a new IndexAlias over the provided +// Index objects. +func NewIndexAlias(indexes ...Index) *indexAliasImpl { + return &indexAliasImpl{ + name: "alias", + indexes: indexes, + open: true, + } +} + +// VisitIndexes invokes the visit callback on every +// indexes included in the index alias. +func (i *indexAliasImpl) VisitIndexes(visit func(Index)) { + i.mutex.RLock() + for _, idx := range i.indexes { + visit(idx) + } + i.mutex.RUnlock() +} + +func (i *indexAliasImpl) isAliasToSingleIndex() error { + if len(i.indexes) < 1 { + return ErrorAliasEmpty + } else if len(i.indexes) > 1 { + return ErrorAliasMulti + } + return nil +} + +func (i *indexAliasImpl) Index(id string, data interface{}) error { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return ErrorIndexClosed + } + + err := i.isAliasToSingleIndex() + if err != nil { + return err + } + + return i.indexes[0].Index(id, data) +} + +func (i *indexAliasImpl) Delete(id string) error { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return ErrorIndexClosed + } + + err := i.isAliasToSingleIndex() + if err != nil { + return err + } + + return i.indexes[0].Delete(id) +} + +func (i *indexAliasImpl) Batch(b *Batch) error { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return ErrorIndexClosed + } + + err := i.isAliasToSingleIndex() + if err != nil { + return err + } + + return i.indexes[0].Batch(b) +} + +func (i *indexAliasImpl) Document(id string) (*document.Document, error) { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return nil, ErrorIndexClosed + } + + err := i.isAliasToSingleIndex() + if err != nil { + return nil, err + } + + return i.indexes[0].Document(id) +} + +func (i *indexAliasImpl) DocCount() (uint64, error) { + i.mutex.RLock() + defer i.mutex.RUnlock() + + rv := uint64(0) + + if !i.open { + return 0, ErrorIndexClosed + } + + for _, index := range i.indexes { + otherCount, err := index.DocCount() + if err == nil { + rv += otherCount + } + // tolerate errors to produce partial counts + } + + return rv, nil +} + +func (i *indexAliasImpl) Search(req *SearchRequest) (*SearchResult, error) { + return i.SearchInContext(context.Background(), req) +} + +func (i *indexAliasImpl) SearchInContext(ctx context.Context, req *SearchRequest) (*SearchResult, error) { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return nil, ErrorIndexClosed + } + + if len(i.indexes) < 1 { + return nil, ErrorAliasEmpty + } + + // short circuit the simple case + if len(i.indexes) == 1 { + return i.indexes[0].SearchInContext(ctx, req) + } + + return MultiSearch(ctx, req, i.indexes...) +} + +func (i *indexAliasImpl) Fields() ([]string, error) { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return nil, ErrorIndexClosed + } + + err := i.isAliasToSingleIndex() + if err != nil { + return nil, err + } + + return i.indexes[0].Fields() +} + +func (i *indexAliasImpl) FieldDict(field string) (index.FieldDict, error) { + i.mutex.RLock() + + if !i.open { + i.mutex.RUnlock() + return nil, ErrorIndexClosed + } + + err := i.isAliasToSingleIndex() + if err != nil { + i.mutex.RUnlock() + return nil, err + } + + fieldDict, err := i.indexes[0].FieldDict(field) + if err != nil { + i.mutex.RUnlock() + return nil, err + } + + return &indexAliasImplFieldDict{ + index: i, + fieldDict: fieldDict, + }, nil +} + +func (i *indexAliasImpl) FieldDictRange(field string, startTerm []byte, endTerm []byte) (index.FieldDict, error) { + i.mutex.RLock() + + if !i.open { + i.mutex.RUnlock() + return nil, ErrorIndexClosed + } + + err := i.isAliasToSingleIndex() + if err != nil { + i.mutex.RUnlock() + return nil, err + } + + fieldDict, err := i.indexes[0].FieldDictRange(field, startTerm, endTerm) + if err != nil { + i.mutex.RUnlock() + return nil, err + } + + return &indexAliasImplFieldDict{ + index: i, + fieldDict: fieldDict, + }, nil +} + +func (i *indexAliasImpl) FieldDictPrefix(field string, termPrefix []byte) (index.FieldDict, error) { + i.mutex.RLock() + + if !i.open { + i.mutex.RUnlock() + return nil, ErrorIndexClosed + } + + err := i.isAliasToSingleIndex() + if err != nil { + i.mutex.RUnlock() + return nil, err + } + + fieldDict, err := i.indexes[0].FieldDictPrefix(field, termPrefix) + if err != nil { + i.mutex.RUnlock() + return nil, err + } + + return &indexAliasImplFieldDict{ + index: i, + fieldDict: fieldDict, + }, nil +} + +func (i *indexAliasImpl) Close() error { + i.mutex.Lock() + defer i.mutex.Unlock() + + i.open = false + return nil +} + +func (i *indexAliasImpl) Mapping() mapping.IndexMapping { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return nil + } + + err := i.isAliasToSingleIndex() + if err != nil { + return nil + } + + return i.indexes[0].Mapping() +} + +func (i *indexAliasImpl) Stats() *IndexStat { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return nil + } + + err := i.isAliasToSingleIndex() + if err != nil { + return nil + } + + return i.indexes[0].Stats() +} + +func (i *indexAliasImpl) StatsMap() map[string]interface{} { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return nil + } + + err := i.isAliasToSingleIndex() + if err != nil { + return nil + } + + return i.indexes[0].StatsMap() +} + +func (i *indexAliasImpl) GetInternal(key []byte) ([]byte, error) { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return nil, ErrorIndexClosed + } + + err := i.isAliasToSingleIndex() + if err != nil { + return nil, err + } + + return i.indexes[0].GetInternal(key) +} + +func (i *indexAliasImpl) SetInternal(key, val []byte) error { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return ErrorIndexClosed + } + + err := i.isAliasToSingleIndex() + if err != nil { + return err + } + + return i.indexes[0].SetInternal(key, val) +} + +func (i *indexAliasImpl) DeleteInternal(key []byte) error { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return ErrorIndexClosed + } + + err := i.isAliasToSingleIndex() + if err != nil { + return err + } + + return i.indexes[0].DeleteInternal(key) +} + +func (i *indexAliasImpl) Advanced() (index.Index, store.KVStore, error) { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return nil, nil, ErrorIndexClosed + } + + err := i.isAliasToSingleIndex() + if err != nil { + return nil, nil, err + } + + return i.indexes[0].Advanced() +} + +func (i *indexAliasImpl) Add(indexes ...Index) { + i.mutex.Lock() + defer i.mutex.Unlock() + + i.indexes = append(i.indexes, indexes...) +} + +func (i *indexAliasImpl) removeSingle(index Index) { + for pos, in := range i.indexes { + if in == index { + i.indexes = append(i.indexes[:pos], i.indexes[pos+1:]...) + break + } + } +} + +func (i *indexAliasImpl) Remove(indexes ...Index) { + i.mutex.Lock() + defer i.mutex.Unlock() + + for _, in := range indexes { + i.removeSingle(in) + } +} + +func (i *indexAliasImpl) Swap(in, out []Index) { + i.mutex.Lock() + defer i.mutex.Unlock() + + // add + i.indexes = append(i.indexes, in...) + + // delete + for _, ind := range out { + i.removeSingle(ind) + } +} + +// createChildSearchRequest creates a separate +// request from the original +// For now, avoid data race on req structure. +// TODO disable highlight/field load on child +// requests, and add code to do this only on +// the actual final results. +// Perhaps that part needs to be optional, +// could be slower in remote usages. +func createChildSearchRequest(req *SearchRequest) *SearchRequest { + rv := SearchRequest{ + Query: req.Query, + Size: req.Size + req.From, + From: 0, + Highlight: req.Highlight, + Fields: req.Fields, + Facets: req.Facets, + Explain: req.Explain, + Sort: req.Sort.Copy(), + IncludeLocations: req.IncludeLocations, + Score: req.Score, + SearchAfter: req.SearchAfter, + SearchBefore: req.SearchBefore, + } + return &rv +} + +type asyncSearchResult struct { + Name string + Result *SearchResult + Err error +} + +// MultiSearch executes a SearchRequest across multiple Index objects, +// then merges the results. The indexes must honor any ctx deadline. +func MultiSearch(ctx context.Context, req *SearchRequest, indexes ...Index) (*SearchResult, error) { + + searchStart := time.Now() + asyncResults := make(chan *asyncSearchResult, len(indexes)) + + var reverseQueryExecution bool + if req.SearchBefore != nil { + reverseQueryExecution = true + req.Sort.Reverse() + req.SearchAfter = req.SearchBefore + req.SearchBefore = nil + } + + // run search on each index in separate go routine + var waitGroup sync.WaitGroup + + var searchChildIndex = func(in Index, childReq *SearchRequest) { + rv := asyncSearchResult{Name: in.Name()} + rv.Result, rv.Err = in.SearchInContext(ctx, childReq) + asyncResults <- &rv + waitGroup.Done() + } + + waitGroup.Add(len(indexes)) + for _, in := range indexes { + go searchChildIndex(in, createChildSearchRequest(req)) + } + + // on another go routine, close after finished + go func() { + waitGroup.Wait() + close(asyncResults) + }() + + var sr *SearchResult + indexErrors := make(map[string]error) + + for asr := range asyncResults { + if asr.Err == nil { + if sr == nil { + // first result + sr = asr.Result + } else { + // merge with previous + sr.Merge(asr.Result) + } + } else { + indexErrors[asr.Name] = asr.Err + } + } + + // merge just concatenated all the hits + // now lets clean it up + + // handle case where no results were successful + if sr == nil { + sr = &SearchResult{ + Status: &SearchStatus{ + Errors: make(map[string]error), + }, + } + } + + sortFunc := req.SortFunc() + // sort all hits with the requested order + if len(req.Sort) > 0 { + sorter := newSearchHitSorter(req.Sort, sr.Hits) + sortFunc(sorter) + } + + // now skip over the correct From + if req.From > 0 && len(sr.Hits) > req.From { + sr.Hits = sr.Hits[req.From:] + } else if req.From > 0 { + sr.Hits = search.DocumentMatchCollection{} + } + + // now trim to the correct size + if req.Size > 0 && len(sr.Hits) > req.Size { + sr.Hits = sr.Hits[0:req.Size] + } + + // fix up facets + for name, fr := range req.Facets { + sr.Facets.Fixup(name, fr.Size) + } + + if reverseQueryExecution { + // reverse the sort back to the original + req.Sort.Reverse() + // resort using the original order + mhs := newSearchHitSorter(req.Sort, sr.Hits) + sortFunc(mhs) + // reset request + req.SearchBefore = req.SearchAfter + req.SearchAfter = nil + } + + // fix up original request + sr.Request = req + searchDuration := time.Since(searchStart) + sr.Took = searchDuration + + // fix up errors + if len(indexErrors) > 0 { + if sr.Status.Errors == nil { + sr.Status.Errors = make(map[string]error) + } + for indexName, indexErr := range indexErrors { + sr.Status.Errors[indexName] = indexErr + sr.Status.Total++ + sr.Status.Failed++ + } + } + + return sr, nil +} + +func (i *indexAliasImpl) NewBatch() *Batch { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return nil + } + + err := i.isAliasToSingleIndex() + if err != nil { + return nil + } + + return i.indexes[0].NewBatch() +} + +func (i *indexAliasImpl) Name() string { + return i.name +} + +func (i *indexAliasImpl) SetName(name string) { + i.name = name +} + +type indexAliasImplFieldDict struct { + index *indexAliasImpl + fieldDict index.FieldDict +} + +func (f *indexAliasImplFieldDict) Next() (*index.DictEntry, error) { + return f.fieldDict.Next() +} + +func (f *indexAliasImplFieldDict) Close() error { + defer f.index.mutex.RUnlock() + return f.fieldDict.Close() +} diff --git a/vendor/github.com/blevesearch/bleve/index_impl.go b/vendor/github.com/blevesearch/bleve/index_impl.go new file mode 100644 index 0000000..0520fe4 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index_impl.go @@ -0,0 +1,924 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bleve + +import ( + "context" + "encoding/json" + "fmt" + "os" + "sync" + "sync/atomic" + "time" + + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/store" + "github.com/blevesearch/bleve/index/upsidedown" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/registry" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/collector" + "github.com/blevesearch/bleve/search/facet" + "github.com/blevesearch/bleve/search/highlight" +) + +type indexImpl struct { + path string + name string + meta *indexMeta + i index.Index + m mapping.IndexMapping + mutex sync.RWMutex + open bool + stats *IndexStat +} + +const storePath = "store" + +var mappingInternalKey = []byte("_mapping") + +const SearchQueryStartCallbackKey = "_search_query_start_callback_key" +const SearchQueryEndCallbackKey = "_search_query_end_callback_key" + +type SearchQueryStartCallbackFn func(size uint64) error +type SearchQueryEndCallbackFn func(size uint64) error + +func indexStorePath(path string) string { + return path + string(os.PathSeparator) + storePath +} + +func newIndexUsing(path string, mapping mapping.IndexMapping, indexType string, kvstore string, kvconfig map[string]interface{}) (*indexImpl, error) { + // first validate the mapping + err := mapping.Validate() + if err != nil { + return nil, err + } + + if kvconfig == nil { + kvconfig = map[string]interface{}{} + } + + if kvstore == "" { + return nil, fmt.Errorf("bleve not configured for file based indexing") + } + + rv := indexImpl{ + path: path, + name: path, + m: mapping, + meta: newIndexMeta(indexType, kvstore, kvconfig), + } + rv.stats = &IndexStat{i: &rv} + // at this point there is hope that we can be successful, so save index meta + if path != "" { + err = rv.meta.Save(path) + if err != nil { + return nil, err + } + kvconfig["create_if_missing"] = true + kvconfig["error_if_exists"] = true + kvconfig["path"] = indexStorePath(path) + } else { + kvconfig["path"] = "" + } + + // open the index + indexTypeConstructor := registry.IndexTypeConstructorByName(rv.meta.IndexType) + if indexTypeConstructor == nil { + return nil, ErrorUnknownIndexType + } + + rv.i, err = indexTypeConstructor(rv.meta.Storage, kvconfig, Config.analysisQueue) + if err != nil { + return nil, err + } + err = rv.i.Open() + if err != nil { + if err == index.ErrorUnknownStorageType { + return nil, ErrorUnknownStorageType + } + return nil, err + } + defer func(rv *indexImpl) { + if !rv.open { + rv.i.Close() + } + }(&rv) + + // now persist the mapping + mappingBytes, err := json.Marshal(mapping) + if err != nil { + return nil, err + } + err = rv.i.SetInternal(mappingInternalKey, mappingBytes) + if err != nil { + return nil, err + } + + // mark the index as open + rv.mutex.Lock() + defer rv.mutex.Unlock() + rv.open = true + indexStats.Register(&rv) + return &rv, nil +} + +func openIndexUsing(path string, runtimeConfig map[string]interface{}) (rv *indexImpl, err error) { + rv = &indexImpl{ + path: path, + name: path, + } + rv.stats = &IndexStat{i: rv} + + rv.meta, err = openIndexMeta(path) + if err != nil { + return nil, err + } + + // backwards compatibility if index type is missing + if rv.meta.IndexType == "" { + rv.meta.IndexType = upsidedown.Name + } + + storeConfig := rv.meta.Config + if storeConfig == nil { + storeConfig = map[string]interface{}{} + } + + storeConfig["path"] = indexStorePath(path) + storeConfig["create_if_missing"] = false + storeConfig["error_if_exists"] = false + for rck, rcv := range runtimeConfig { + storeConfig[rck] = rcv + } + + // open the index + indexTypeConstructor := registry.IndexTypeConstructorByName(rv.meta.IndexType) + if indexTypeConstructor == nil { + return nil, ErrorUnknownIndexType + } + + rv.i, err = indexTypeConstructor(rv.meta.Storage, storeConfig, Config.analysisQueue) + if err != nil { + return nil, err + } + err = rv.i.Open() + if err != nil { + if err == index.ErrorUnknownStorageType { + return nil, ErrorUnknownStorageType + } + return nil, err + } + defer func(rv *indexImpl) { + if !rv.open { + rv.i.Close() + } + }(rv) + + // now load the mapping + indexReader, err := rv.i.Reader() + if err != nil { + return nil, err + } + defer func() { + if cerr := indexReader.Close(); cerr != nil && err == nil { + err = cerr + } + }() + + mappingBytes, err := indexReader.GetInternal(mappingInternalKey) + if err != nil { + return nil, err + } + + var im *mapping.IndexMappingImpl + err = json.Unmarshal(mappingBytes, &im) + if err != nil { + return nil, fmt.Errorf("error parsing mapping JSON: %v\nmapping contents:\n%s", err, string(mappingBytes)) + } + + // mark the index as open + rv.mutex.Lock() + defer rv.mutex.Unlock() + rv.open = true + + // validate the mapping + err = im.Validate() + if err != nil { + // note even if the mapping is invalid + // we still return an open usable index + return rv, err + } + + rv.m = im + indexStats.Register(rv) + return rv, err +} + +// Advanced returns implementation internals +// necessary ONLY for advanced usage. +func (i *indexImpl) Advanced() (index.Index, store.KVStore, error) { + s, err := i.i.Advanced() + if err != nil { + return nil, nil, err + } + return i.i, s, nil +} + +// Mapping returns the IndexMapping in use by this +// Index. +func (i *indexImpl) Mapping() mapping.IndexMapping { + return i.m +} + +// Index the object with the specified identifier. +// The IndexMapping for this index will determine +// how the object is indexed. +func (i *indexImpl) Index(id string, data interface{}) (err error) { + if id == "" { + return ErrorEmptyID + } + + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return ErrorIndexClosed + } + + doc := document.NewDocument(id) + err = i.m.MapDocument(doc, data) + if err != nil { + return + } + err = i.i.Update(doc) + return +} + +// IndexAdvanced takes a document.Document object +// skips the mapping and indexes it. +func (i *indexImpl) IndexAdvanced(doc *document.Document) (err error) { + if doc.ID == "" { + return ErrorEmptyID + } + + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return ErrorIndexClosed + } + + err = i.i.Update(doc) + return +} + +// Delete entries for the specified identifier from +// the index. +func (i *indexImpl) Delete(id string) (err error) { + if id == "" { + return ErrorEmptyID + } + + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return ErrorIndexClosed + } + + err = i.i.Delete(id) + return +} + +// Batch executes multiple Index and Delete +// operations at the same time. There are often +// significant performance benefits when performing +// operations in a batch. +func (i *indexImpl) Batch(b *Batch) error { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return ErrorIndexClosed + } + + return i.i.Batch(b.internal) +} + +// Document is used to find the values of all the +// stored fields for a document in the index. These +// stored fields are put back into a Document object +// and returned. +func (i *indexImpl) Document(id string) (doc *document.Document, err error) { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return nil, ErrorIndexClosed + } + indexReader, err := i.i.Reader() + if err != nil { + return nil, err + } + defer func() { + if cerr := indexReader.Close(); err == nil && cerr != nil { + err = cerr + } + }() + + doc, err = indexReader.Document(id) + if err != nil { + return nil, err + } + return doc, nil +} + +// DocCount returns the number of documents in the +// index. +func (i *indexImpl) DocCount() (count uint64, err error) { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return 0, ErrorIndexClosed + } + + // open a reader for this search + indexReader, err := i.i.Reader() + if err != nil { + return 0, fmt.Errorf("error opening index reader %v", err) + } + defer func() { + if cerr := indexReader.Close(); err == nil && cerr != nil { + err = cerr + } + }() + + count, err = indexReader.DocCount() + return +} + +// Search executes a search request operation. +// Returns a SearchResult object or an error. +func (i *indexImpl) Search(req *SearchRequest) (sr *SearchResult, err error) { + return i.SearchInContext(context.Background(), req) +} + +var documentMatchEmptySize int +var searchContextEmptySize int +var facetResultEmptySize int +var documentEmptySize int + +func init() { + var dm search.DocumentMatch + documentMatchEmptySize = dm.Size() + + var sc search.SearchContext + searchContextEmptySize = sc.Size() + + var fr search.FacetResult + facetResultEmptySize = fr.Size() + + var d document.Document + documentEmptySize = d.Size() +} + +// memNeededForSearch is a helper function that returns an estimate of RAM +// needed to execute a search request. +func memNeededForSearch(req *SearchRequest, + searcher search.Searcher, + topnCollector *collector.TopNCollector) uint64 { + + backingSize := req.Size + req.From + 1 + if req.Size+req.From > collector.PreAllocSizeSkipCap { + backingSize = collector.PreAllocSizeSkipCap + 1 + } + numDocMatches := backingSize + searcher.DocumentMatchPoolSize() + + estimate := 0 + + // overhead, size in bytes from collector + estimate += topnCollector.Size() + + // pre-allocing DocumentMatchPool + estimate += searchContextEmptySize + numDocMatches*documentMatchEmptySize + + // searcher overhead + estimate += searcher.Size() + + // overhead from results, lowestMatchOutsideResults + estimate += (numDocMatches + 1) * documentMatchEmptySize + + // additional overhead from SearchResult + estimate += reflectStaticSizeSearchResult + reflectStaticSizeSearchStatus + + // overhead from facet results + if req.Facets != nil { + estimate += len(req.Facets) * facetResultEmptySize + } + + // highlighting, store + if len(req.Fields) > 0 || req.Highlight != nil { + // Size + From => number of hits + estimate += (req.Size + req.From) * documentEmptySize + } + + return uint64(estimate) +} + +// SearchInContext executes a search request operation within the provided +// Context. Returns a SearchResult object or an error. +func (i *indexImpl) SearchInContext(ctx context.Context, req *SearchRequest) (sr *SearchResult, err error) { + i.mutex.RLock() + defer i.mutex.RUnlock() + + searchStart := time.Now() + + if !i.open { + return nil, ErrorIndexClosed + } + + var reverseQueryExecution bool + if req.SearchBefore != nil { + reverseQueryExecution = true + req.Sort.Reverse() + req.SearchAfter = req.SearchBefore + req.SearchBefore = nil + } + + var coll *collector.TopNCollector + if req.SearchAfter != nil { + coll = collector.NewTopNCollectorAfter(req.Size, req.Sort, req.SearchAfter) + } else { + coll = collector.NewTopNCollector(req.Size, req.From, req.Sort) + } + + // open a reader for this search + indexReader, err := i.i.Reader() + if err != nil { + return nil, fmt.Errorf("error opening index reader %v", err) + } + defer func() { + if cerr := indexReader.Close(); err == nil && cerr != nil { + err = cerr + } + }() + + searcher, err := req.Query.Searcher(indexReader, i.m, search.SearcherOptions{ + Explain: req.Explain, + IncludeTermVectors: req.IncludeLocations || req.Highlight != nil, + Score: req.Score, + }) + if err != nil { + return nil, err + } + defer func() { + if serr := searcher.Close(); err == nil && serr != nil { + err = serr + } + }() + + if req.Facets != nil { + facetsBuilder := search.NewFacetsBuilder(indexReader) + for facetName, facetRequest := range req.Facets { + if facetRequest.NumericRanges != nil { + // build numeric range facet + facetBuilder := facet.NewNumericFacetBuilder(facetRequest.Field, facetRequest.Size) + for _, nr := range facetRequest.NumericRanges { + facetBuilder.AddRange(nr.Name, nr.Min, nr.Max) + } + facetsBuilder.Add(facetName, facetBuilder) + } else if facetRequest.DateTimeRanges != nil { + // build date range facet + facetBuilder := facet.NewDateTimeFacetBuilder(facetRequest.Field, facetRequest.Size) + dateTimeParser := i.m.DateTimeParserNamed("") + for _, dr := range facetRequest.DateTimeRanges { + start, end := dr.ParseDates(dateTimeParser) + facetBuilder.AddRange(dr.Name, start, end) + } + facetsBuilder.Add(facetName, facetBuilder) + } else { + // build terms facet + facetBuilder := facet.NewTermsFacetBuilder(facetRequest.Field, facetRequest.Size) + facetsBuilder.Add(facetName, facetBuilder) + } + } + coll.SetFacetsBuilder(facetsBuilder) + } + + memNeeded := memNeededForSearch(req, searcher, coll) + if cb := ctx.Value(SearchQueryStartCallbackKey); cb != nil { + if cbF, ok := cb.(SearchQueryStartCallbackFn); ok { + err = cbF(memNeeded) + } + } + if err != nil { + return nil, err + } + + if cb := ctx.Value(SearchQueryEndCallbackKey); cb != nil { + if cbF, ok := cb.(SearchQueryEndCallbackFn); ok { + defer func() { + _ = cbF(memNeeded) + }() + } + } + + err = coll.Collect(ctx, searcher, indexReader) + if err != nil { + return nil, err + } + + hits := coll.Results() + + var highlighter highlight.Highlighter + + if req.Highlight != nil { + // get the right highlighter + highlighter, err = Config.Cache.HighlighterNamed(Config.DefaultHighlighter) + if err != nil { + return nil, err + } + if req.Highlight.Style != nil { + highlighter, err = Config.Cache.HighlighterNamed(*req.Highlight.Style) + if err != nil { + return nil, err + } + } + if highlighter == nil { + return nil, fmt.Errorf("no highlighter named `%s` registered", *req.Highlight.Style) + } + } + + for _, hit := range hits { + if i.name != "" { + hit.Index = i.name + } + err = LoadAndHighlightFields(hit, req, i.name, indexReader, highlighter) + if err != nil { + return nil, err + } + } + + atomic.AddUint64(&i.stats.searches, 1) + searchDuration := time.Since(searchStart) + atomic.AddUint64(&i.stats.searchTime, uint64(searchDuration)) + + if Config.SlowSearchLogThreshold > 0 && + searchDuration > Config.SlowSearchLogThreshold { + logger.Printf("slow search took %s - %v", searchDuration, req) + } + + if reverseQueryExecution { + // reverse the sort back to the original + req.Sort.Reverse() + // resort using the original order + mhs := newSearchHitSorter(req.Sort, hits) + req.SortFunc()(mhs) + // reset request + req.SearchBefore = req.SearchAfter + req.SearchAfter = nil + } + + return &SearchResult{ + Status: &SearchStatus{ + Total: 1, + Successful: 1, + }, + Request: req, + Hits: hits, + Total: coll.Total(), + MaxScore: coll.MaxScore(), + Took: searchDuration, + Facets: coll.FacetResults(), + }, nil +} + +func LoadAndHighlightFields(hit *search.DocumentMatch, req *SearchRequest, + indexName string, r index.IndexReader, + highlighter highlight.Highlighter) error { + if len(req.Fields) > 0 || highlighter != nil { + doc, err := r.Document(hit.ID) + if err == nil && doc != nil { + if len(req.Fields) > 0 { + fieldsToLoad := deDuplicate(req.Fields) + for _, f := range fieldsToLoad { + for _, docF := range doc.Fields { + if f == "*" || docF.Name() == f { + var value interface{} + switch docF := docF.(type) { + case *document.TextField: + value = string(docF.Value()) + case *document.NumericField: + num, err := docF.Number() + if err == nil { + value = num + } + case *document.DateTimeField: + datetime, err := docF.DateTime() + if err == nil { + value = datetime.Format(time.RFC3339) + } + case *document.BooleanField: + boolean, err := docF.Boolean() + if err == nil { + value = boolean + } + case *document.GeoPointField: + lon, err := docF.Lon() + if err == nil { + lat, err := docF.Lat() + if err == nil { + value = []float64{lon, lat} + } + } + } + if value != nil { + hit.AddFieldValue(docF.Name(), value) + } + } + } + } + } + if highlighter != nil { + highlightFields := req.Highlight.Fields + if highlightFields == nil { + // add all fields with matches + highlightFields = make([]string, 0, len(hit.Locations)) + for k := range hit.Locations { + highlightFields = append(highlightFields, k) + } + } + for _, hf := range highlightFields { + highlighter.BestFragmentsInField(hit, doc, hf, 1) + } + } + } else if doc == nil { + // unexpected case, a doc ID that was found as a search hit + // was unable to be found during document lookup + return ErrorIndexReadInconsistency + } + } + + return nil +} + +// Fields returns the name of all the fields this +// Index has operated on. +func (i *indexImpl) Fields() (fields []string, err error) { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return nil, ErrorIndexClosed + } + + indexReader, err := i.i.Reader() + if err != nil { + return nil, err + } + defer func() { + if cerr := indexReader.Close(); err == nil && cerr != nil { + err = cerr + } + }() + + fields, err = indexReader.Fields() + if err != nil { + return nil, err + } + return fields, nil +} + +func (i *indexImpl) FieldDict(field string) (index.FieldDict, error) { + i.mutex.RLock() + + if !i.open { + i.mutex.RUnlock() + return nil, ErrorIndexClosed + } + + indexReader, err := i.i.Reader() + if err != nil { + i.mutex.RUnlock() + return nil, err + } + + fieldDict, err := indexReader.FieldDict(field) + if err != nil { + i.mutex.RUnlock() + return nil, err + } + + return &indexImplFieldDict{ + index: i, + indexReader: indexReader, + fieldDict: fieldDict, + }, nil +} + +func (i *indexImpl) FieldDictRange(field string, startTerm []byte, endTerm []byte) (index.FieldDict, error) { + i.mutex.RLock() + + if !i.open { + i.mutex.RUnlock() + return nil, ErrorIndexClosed + } + + indexReader, err := i.i.Reader() + if err != nil { + i.mutex.RUnlock() + return nil, err + } + + fieldDict, err := indexReader.FieldDictRange(field, startTerm, endTerm) + if err != nil { + i.mutex.RUnlock() + return nil, err + } + + return &indexImplFieldDict{ + index: i, + indexReader: indexReader, + fieldDict: fieldDict, + }, nil +} + +func (i *indexImpl) FieldDictPrefix(field string, termPrefix []byte) (index.FieldDict, error) { + i.mutex.RLock() + + if !i.open { + i.mutex.RUnlock() + return nil, ErrorIndexClosed + } + + indexReader, err := i.i.Reader() + if err != nil { + i.mutex.RUnlock() + return nil, err + } + + fieldDict, err := indexReader.FieldDictPrefix(field, termPrefix) + if err != nil { + i.mutex.RUnlock() + return nil, err + } + + return &indexImplFieldDict{ + index: i, + indexReader: indexReader, + fieldDict: fieldDict, + }, nil +} + +func (i *indexImpl) Close() error { + i.mutex.Lock() + defer i.mutex.Unlock() + + indexStats.UnRegister(i) + + i.open = false + return i.i.Close() +} + +func (i *indexImpl) Stats() *IndexStat { + return i.stats +} + +func (i *indexImpl) StatsMap() map[string]interface{} { + return i.stats.statsMap() +} + +func (i *indexImpl) GetInternal(key []byte) (val []byte, err error) { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return nil, ErrorIndexClosed + } + + reader, err := i.i.Reader() + if err != nil { + return nil, err + } + defer func() { + if cerr := reader.Close(); err == nil && cerr != nil { + err = cerr + } + }() + + val, err = reader.GetInternal(key) + if err != nil { + return nil, err + } + return val, nil +} + +func (i *indexImpl) SetInternal(key, val []byte) error { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return ErrorIndexClosed + } + + return i.i.SetInternal(key, val) +} + +func (i *indexImpl) DeleteInternal(key []byte) error { + i.mutex.RLock() + defer i.mutex.RUnlock() + + if !i.open { + return ErrorIndexClosed + } + + return i.i.DeleteInternal(key) +} + +// NewBatch creates a new empty batch. +func (i *indexImpl) NewBatch() *Batch { + return &Batch{ + index: i, + internal: index.NewBatch(), + } +} + +func (i *indexImpl) Name() string { + return i.name +} + +func (i *indexImpl) SetName(name string) { + indexStats.UnRegister(i) + i.name = name + indexStats.Register(i) +} + +type indexImplFieldDict struct { + index *indexImpl + indexReader index.IndexReader + fieldDict index.FieldDict +} + +func (f *indexImplFieldDict) Next() (*index.DictEntry, error) { + return f.fieldDict.Next() +} + +func (f *indexImplFieldDict) Close() error { + defer f.index.mutex.RUnlock() + err := f.fieldDict.Close() + if err != nil { + return err + } + return f.indexReader.Close() +} + +// helper function to remove duplicate entries from slice of strings +func deDuplicate(fields []string) []string { + entries := make(map[string]struct{}) + ret := []string{} + for _, entry := range fields { + if _, exists := entries[entry]; !exists { + entries[entry] = struct{}{} + ret = append(ret, entry) + } + } + return ret +} + +type searchHitSorter struct { + hits search.DocumentMatchCollection + sort search.SortOrder + cachedScoring []bool + cachedDesc []bool +} + +func newSearchHitSorter(sort search.SortOrder, hits search.DocumentMatchCollection) *searchHitSorter { + return &searchHitSorter{ + sort: sort, + hits: hits, + cachedScoring: sort.CacheIsScore(), + cachedDesc: sort.CacheDescending(), + } +} + +func (m *searchHitSorter) Len() int { return len(m.hits) } +func (m *searchHitSorter) Swap(i, j int) { m.hits[i], m.hits[j] = m.hits[j], m.hits[i] } +func (m *searchHitSorter) Less(i, j int) bool { + c := m.sort.Compare(m.cachedScoring, m.cachedDesc, m.hits[i], m.hits[j]) + return c < 0 +} diff --git a/vendor/github.com/blevesearch/bleve/index_meta.go b/vendor/github.com/blevesearch/bleve/index_meta.go new file mode 100644 index 0000000..d814799 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index_meta.go @@ -0,0 +1,97 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bleve + +import ( + "encoding/json" + "io/ioutil" + "os" + "path/filepath" + + "github.com/blevesearch/bleve/index/upsidedown" +) + +const metaFilename = "index_meta.json" + +type indexMeta struct { + Storage string `json:"storage"` + IndexType string `json:"index_type"` + Config map[string]interface{} `json:"config,omitempty"` +} + +func newIndexMeta(indexType string, storage string, config map[string]interface{}) *indexMeta { + return &indexMeta{ + IndexType: indexType, + Storage: storage, + Config: config, + } +} + +func openIndexMeta(path string) (*indexMeta, error) { + if _, err := os.Stat(path); os.IsNotExist(err) { + return nil, ErrorIndexPathDoesNotExist + } + indexMetaPath := indexMetaPath(path) + metaBytes, err := ioutil.ReadFile(indexMetaPath) + if err != nil { + return nil, ErrorIndexMetaMissing + } + var im indexMeta + err = json.Unmarshal(metaBytes, &im) + if err != nil { + return nil, ErrorIndexMetaCorrupt + } + if im.IndexType == "" { + im.IndexType = upsidedown.Name + } + return &im, nil +} + +func (i *indexMeta) Save(path string) (err error) { + indexMetaPath := indexMetaPath(path) + // ensure any necessary parent directories exist + err = os.MkdirAll(path, 0700) + if err != nil { + if os.IsExist(err) { + return ErrorIndexPathExists + } + return err + } + metaBytes, err := json.Marshal(i) + if err != nil { + return err + } + indexMetaFile, err := os.OpenFile(indexMetaPath, os.O_RDWR|os.O_CREATE|os.O_EXCL, 0666) + if err != nil { + if os.IsExist(err) { + return ErrorIndexPathExists + } + return err + } + defer func() { + if ierr := indexMetaFile.Close(); err == nil && ierr != nil { + err = ierr + } + }() + _, err = indexMetaFile.Write(metaBytes) + if err != nil { + return err + } + return nil +} + +func indexMetaPath(path string) string { + return filepath.Join(path, metaFilename) +} diff --git a/vendor/github.com/blevesearch/bleve/index_stats.go b/vendor/github.com/blevesearch/bleve/index_stats.go new file mode 100644 index 0000000..2d303f6 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/index_stats.go @@ -0,0 +1,75 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bleve + +import ( + "encoding/json" + "sync" + "sync/atomic" +) + +type IndexStat struct { + searches uint64 + searchTime uint64 + i *indexImpl +} + +func (is *IndexStat) statsMap() map[string]interface{} { + m := map[string]interface{}{} + m["index"] = is.i.i.StatsMap() + m["searches"] = atomic.LoadUint64(&is.searches) + m["search_time"] = atomic.LoadUint64(&is.searchTime) + return m +} + +func (is *IndexStat) MarshalJSON() ([]byte, error) { + m := is.statsMap() + return json.Marshal(m) +} + +type IndexStats struct { + indexes map[string]*IndexStat + mutex sync.RWMutex +} + +func NewIndexStats() *IndexStats { + return &IndexStats{ + indexes: make(map[string]*IndexStat), + } +} + +func (i *IndexStats) Register(index Index) { + i.mutex.Lock() + defer i.mutex.Unlock() + i.indexes[index.Name()] = index.Stats() +} + +func (i *IndexStats) UnRegister(index Index) { + i.mutex.Lock() + defer i.mutex.Unlock() + delete(i.indexes, index.Name()) +} + +func (i *IndexStats) String() string { + i.mutex.RLock() + defer i.mutex.RUnlock() + bytes, err := json.Marshal(i.indexes) + if err != nil { + return "error marshaling stats" + } + return string(bytes) +} + +var indexStats *IndexStats diff --git a/vendor/github.com/blevesearch/bleve/mapping.go b/vendor/github.com/blevesearch/bleve/mapping.go new file mode 100644 index 0000000..76238dc --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/mapping.go @@ -0,0 +1,65 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bleve + +import "github.com/blevesearch/bleve/mapping" + +// NewIndexMapping creates a new IndexMapping that will use all the default indexing rules +func NewIndexMapping() *mapping.IndexMappingImpl { + return mapping.NewIndexMapping() +} + +// NewDocumentMapping returns a new document mapping +// with all the default values. +func NewDocumentMapping() *mapping.DocumentMapping { + return mapping.NewDocumentMapping() +} + +// NewDocumentStaticMapping returns a new document +// mapping that will not automatically index parts +// of a document without an explicit mapping. +func NewDocumentStaticMapping() *mapping.DocumentMapping { + return mapping.NewDocumentStaticMapping() +} + +// NewDocumentDisabledMapping returns a new document +// mapping that will not perform any indexing. +func NewDocumentDisabledMapping() *mapping.DocumentMapping { + return mapping.NewDocumentDisabledMapping() +} + +// NewTextFieldMapping returns a default field mapping for text +func NewTextFieldMapping() *mapping.FieldMapping { + return mapping.NewTextFieldMapping() +} + +// NewNumericFieldMapping returns a default field mapping for numbers +func NewNumericFieldMapping() *mapping.FieldMapping { + return mapping.NewNumericFieldMapping() +} + +// NewDateTimeFieldMapping returns a default field mapping for dates +func NewDateTimeFieldMapping() *mapping.FieldMapping { + return mapping.NewDateTimeFieldMapping() +} + +// NewBooleanFieldMapping returns a default field mapping for booleans +func NewBooleanFieldMapping() *mapping.FieldMapping { + return mapping.NewBooleanFieldMapping() +} + +func NewGeoPointFieldMapping() *mapping.FieldMapping { + return mapping.NewGeoPointFieldMapping() +} diff --git a/vendor/github.com/blevesearch/bleve/mapping/analysis.go b/vendor/github.com/blevesearch/bleve/mapping/analysis.go new file mode 100644 index 0000000..03e3cd0 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/mapping/analysis.go @@ -0,0 +1,99 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package mapping + +type customAnalysis struct { + CharFilters map[string]map[string]interface{} `json:"char_filters,omitempty"` + Tokenizers map[string]map[string]interface{} `json:"tokenizers,omitempty"` + TokenMaps map[string]map[string]interface{} `json:"token_maps,omitempty"` + TokenFilters map[string]map[string]interface{} `json:"token_filters,omitempty"` + Analyzers map[string]map[string]interface{} `json:"analyzers,omitempty"` + DateTimeParsers map[string]map[string]interface{} `json:"date_time_parsers,omitempty"` +} + +func (c *customAnalysis) registerAll(i *IndexMappingImpl) error { + for name, config := range c.CharFilters { + _, err := i.cache.DefineCharFilter(name, config) + if err != nil { + return err + } + } + + if len(c.Tokenizers) > 0 { + // put all the names in map tracking work to do + todo := map[string]struct{}{} + for name := range c.Tokenizers { + todo[name] = struct{}{} + } + registered := 1 + errs := []error{} + // as long as we keep making progress, keep going + for len(todo) > 0 && registered > 0 { + registered = 0 + errs = []error{} + for name := range todo { + config := c.Tokenizers[name] + _, err := i.cache.DefineTokenizer(name, config) + if err != nil { + errs = append(errs, err) + } else { + delete(todo, name) + registered++ + } + } + } + + if len(errs) > 0 { + return errs[0] + } + } + for name, config := range c.TokenMaps { + _, err := i.cache.DefineTokenMap(name, config) + if err != nil { + return err + } + } + for name, config := range c.TokenFilters { + _, err := i.cache.DefineTokenFilter(name, config) + if err != nil { + return err + } + } + for name, config := range c.Analyzers { + _, err := i.cache.DefineAnalyzer(name, config) + if err != nil { + return err + } + } + for name, config := range c.DateTimeParsers { + _, err := i.cache.DefineDateTimeParser(name, config) + if err != nil { + return err + } + } + return nil +} + +func newCustomAnalysis() *customAnalysis { + rv := customAnalysis{ + CharFilters: make(map[string]map[string]interface{}), + Tokenizers: make(map[string]map[string]interface{}), + TokenMaps: make(map[string]map[string]interface{}), + TokenFilters: make(map[string]map[string]interface{}), + Analyzers: make(map[string]map[string]interface{}), + DateTimeParsers: make(map[string]map[string]interface{}), + } + return &rv +} diff --git a/vendor/github.com/blevesearch/bleve/mapping/document.go b/vendor/github.com/blevesearch/bleve/mapping/document.go new file mode 100644 index 0000000..dd42fab --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/mapping/document.go @@ -0,0 +1,558 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package mapping + +import ( + "encoding" + "encoding/json" + "fmt" + "reflect" + "time" + + "github.com/blevesearch/bleve/registry" +) + +// A DocumentMapping describes how a type of document +// should be indexed. +// As documents can be hierarchical, named sub-sections +// of documents are mapped using the same structure in +// the Properties field. +// Each value inside a document can be indexed 0 or more +// ways. These index entries are called fields and +// are stored in the Fields field. +// Entire sections of a document can be ignored or +// excluded by setting Enabled to false. +// If not explicitly mapped, default mapping operations +// are used. To disable this automatic handling, set +// Dynamic to false. +type DocumentMapping struct { + Enabled bool `json:"enabled"` + Dynamic bool `json:"dynamic"` + Properties map[string]*DocumentMapping `json:"properties,omitempty"` + Fields []*FieldMapping `json:"fields,omitempty"` + DefaultAnalyzer string `json:"default_analyzer,omitempty"` + + // StructTagKey overrides "json" when looking for field names in struct tags + StructTagKey string `json:"struct_tag_key,omitempty"` +} + +func (dm *DocumentMapping) Validate(cache *registry.Cache) error { + var err error + if dm.DefaultAnalyzer != "" { + _, err := cache.AnalyzerNamed(dm.DefaultAnalyzer) + if err != nil { + return err + } + } + for _, property := range dm.Properties { + err = property.Validate(cache) + if err != nil { + return err + } + } + for _, field := range dm.Fields { + if field.Analyzer != "" { + _, err = cache.AnalyzerNamed(field.Analyzer) + if err != nil { + return err + } + } + if field.DateFormat != "" { + _, err = cache.DateTimeParserNamed(field.DateFormat) + if err != nil { + return err + } + } + switch field.Type { + case "text", "datetime", "number", "boolean", "geopoint": + default: + return fmt.Errorf("unknown field type: '%s'", field.Type) + } + } + return nil +} + +// analyzerNameForPath attempts to first find the field +// described by this path, then returns the analyzer +// configured for that field +func (dm *DocumentMapping) analyzerNameForPath(path string) string { + field := dm.fieldDescribedByPath(path) + if field != nil { + return field.Analyzer + } + return "" +} + +func (dm *DocumentMapping) fieldDescribedByPath(path string) *FieldMapping { + pathElements := decodePath(path) + if len(pathElements) > 1 { + // easy case, there is more than 1 path element remaining + // the next path element must match a property name + // at this level + for propName, subDocMapping := range dm.Properties { + if propName == pathElements[0] { + return subDocMapping.fieldDescribedByPath(encodePath(pathElements[1:])) + } + } + } + + // either the path just had one element + // or it had multiple, but no match for the first element at this level + // look for match with full path + + // first look for property name with empty field + for propName, subDocMapping := range dm.Properties { + if propName == path { + // found property name match, now look at its fields + for _, field := range subDocMapping.Fields { + if field.Name == "" || field.Name == path { + // match + return field + } + } + } + } + // next, walk the properties again, looking for field overriding the name + for propName, subDocMapping := range dm.Properties { + if propName != path { + // property name isn't a match, but field name could override it + for _, field := range subDocMapping.Fields { + if field.Name == path { + return field + } + } + } + } + + return nil +} + +// documentMappingForPath only returns EXACT matches for a sub document +// or for an explicitly mapped field, if you want to find the +// closest document mapping to a field not explicitly mapped +// use closestDocMapping +func (dm *DocumentMapping) documentMappingForPath(path string) *DocumentMapping { + pathElements := decodePath(path) + current := dm +OUTER: + for i, pathElement := range pathElements { + for name, subDocMapping := range current.Properties { + if name == pathElement { + current = subDocMapping + continue OUTER + } + } + // no subDocMapping matches this pathElement + // only if this is the last element check for field name + if i == len(pathElements)-1 { + for _, field := range current.Fields { + if field.Name == pathElement { + break + } + } + } + + return nil + } + return current +} + +// closestDocMapping findest the most specific document mapping that matches +// part of the provided path +func (dm *DocumentMapping) closestDocMapping(path string) *DocumentMapping { + pathElements := decodePath(path) + current := dm +OUTER: + for _, pathElement := range pathElements { + for name, subDocMapping := range current.Properties { + if name == pathElement { + current = subDocMapping + continue OUTER + } + } + break + } + return current +} + +// NewDocumentMapping returns a new document mapping +// with all the default values. +func NewDocumentMapping() *DocumentMapping { + return &DocumentMapping{ + Enabled: true, + Dynamic: true, + } +} + +// NewDocumentStaticMapping returns a new document +// mapping that will not automatically index parts +// of a document without an explicit mapping. +func NewDocumentStaticMapping() *DocumentMapping { + return &DocumentMapping{ + Enabled: true, + } +} + +// NewDocumentDisabledMapping returns a new document +// mapping that will not perform any indexing. +func NewDocumentDisabledMapping() *DocumentMapping { + return &DocumentMapping{} +} + +// AddSubDocumentMapping adds the provided DocumentMapping as a sub-mapping +// for the specified named subsection. +func (dm *DocumentMapping) AddSubDocumentMapping(property string, sdm *DocumentMapping) { + if dm.Properties == nil { + dm.Properties = make(map[string]*DocumentMapping) + } + dm.Properties[property] = sdm +} + +// AddFieldMappingsAt adds one or more FieldMappings +// at the named sub-document. If the named sub-document +// doesn't yet exist it is created for you. +// This is a convenience function to make most common +// mappings more concise. +// Otherwise, you would: +// subMapping := NewDocumentMapping() +// subMapping.AddFieldMapping(fieldMapping) +// parentMapping.AddSubDocumentMapping(property, subMapping) +func (dm *DocumentMapping) AddFieldMappingsAt(property string, fms ...*FieldMapping) { + if dm.Properties == nil { + dm.Properties = make(map[string]*DocumentMapping) + } + sdm, ok := dm.Properties[property] + if !ok { + sdm = NewDocumentMapping() + } + for _, fm := range fms { + sdm.AddFieldMapping(fm) + } + dm.Properties[property] = sdm +} + +// AddFieldMapping adds the provided FieldMapping for this section +// of the document. +func (dm *DocumentMapping) AddFieldMapping(fm *FieldMapping) { + if dm.Fields == nil { + dm.Fields = make([]*FieldMapping, 0) + } + dm.Fields = append(dm.Fields, fm) +} + +// UnmarshalJSON offers custom unmarshaling with optional strict validation +func (dm *DocumentMapping) UnmarshalJSON(data []byte) error { + var tmp map[string]json.RawMessage + err := json.Unmarshal(data, &tmp) + if err != nil { + return err + } + + // set defaults for fields which might have been omitted + dm.Enabled = true + dm.Dynamic = true + + var invalidKeys []string + for k, v := range tmp { + switch k { + case "enabled": + err := json.Unmarshal(v, &dm.Enabled) + if err != nil { + return err + } + case "dynamic": + err := json.Unmarshal(v, &dm.Dynamic) + if err != nil { + return err + } + case "default_analyzer": + err := json.Unmarshal(v, &dm.DefaultAnalyzer) + if err != nil { + return err + } + case "properties": + err := json.Unmarshal(v, &dm.Properties) + if err != nil { + return err + } + case "fields": + err := json.Unmarshal(v, &dm.Fields) + if err != nil { + return err + } + case "struct_tag_key": + err := json.Unmarshal(v, &dm.StructTagKey) + if err != nil { + return err + } + default: + invalidKeys = append(invalidKeys, k) + } + } + + if MappingJSONStrict && len(invalidKeys) > 0 { + return fmt.Errorf("document mapping contains invalid keys: %v", invalidKeys) + } + + return nil +} + +func (dm *DocumentMapping) defaultAnalyzerName(path []string) string { + current := dm + rv := current.DefaultAnalyzer + for _, pathElement := range path { + var ok bool + current, ok = current.Properties[pathElement] + if !ok { + break + } + if current.DefaultAnalyzer != "" { + rv = current.DefaultAnalyzer + } + } + return rv +} + +func (dm *DocumentMapping) walkDocument(data interface{}, path []string, indexes []uint64, context *walkContext) { + // allow default "json" tag to be overridden + structTagKey := dm.StructTagKey + if structTagKey == "" { + structTagKey = "json" + } + + val := reflect.ValueOf(data) + if !val.IsValid() { + return + } + + typ := val.Type() + switch typ.Kind() { + case reflect.Map: + // FIXME can add support for other map keys in the future + if typ.Key().Kind() == reflect.String { + for _, key := range val.MapKeys() { + fieldName := key.String() + fieldVal := val.MapIndex(key).Interface() + dm.processProperty(fieldVal, append(path, fieldName), indexes, context) + } + } + case reflect.Struct: + for i := 0; i < val.NumField(); i++ { + field := typ.Field(i) + fieldName := field.Name + // anonymous fields of type struct can elide the type name + if field.Anonymous && field.Type.Kind() == reflect.Struct { + fieldName = "" + } + + // if the field has a name under the specified tag, prefer that + tag := field.Tag.Get(structTagKey) + tagFieldName := parseTagName(tag) + if tagFieldName == "-" { + continue + } + // allow tag to set field name to empty, only if anonymous + if field.Tag != "" && (tagFieldName != "" || field.Anonymous) { + fieldName = tagFieldName + } + + if val.Field(i).CanInterface() { + fieldVal := val.Field(i).Interface() + newpath := path + if fieldName != "" { + newpath = append(path, fieldName) + } + dm.processProperty(fieldVal, newpath, indexes, context) + } + } + case reflect.Slice, reflect.Array: + for i := 0; i < val.Len(); i++ { + if val.Index(i).CanInterface() { + fieldVal := val.Index(i).Interface() + dm.processProperty(fieldVal, path, append(indexes, uint64(i)), context) + } + } + case reflect.Ptr: + ptrElem := val.Elem() + if ptrElem.IsValid() && ptrElem.CanInterface() { + dm.processProperty(ptrElem.Interface(), path, indexes, context) + } + case reflect.String: + dm.processProperty(val.String(), path, indexes, context) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + dm.processProperty(float64(val.Int()), path, indexes, context) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + dm.processProperty(float64(val.Uint()), path, indexes, context) + case reflect.Float32, reflect.Float64: + dm.processProperty(float64(val.Float()), path, indexes, context) + case reflect.Bool: + dm.processProperty(val.Bool(), path, indexes, context) + } + +} + +func (dm *DocumentMapping) processProperty(property interface{}, path []string, indexes []uint64, context *walkContext) { + pathString := encodePath(path) + // look to see if there is a mapping for this field + subDocMapping := dm.documentMappingForPath(pathString) + closestDocMapping := dm.closestDocMapping(pathString) + + // check to see if we even need to do further processing + if subDocMapping != nil && !subDocMapping.Enabled { + return + } + + propertyValue := reflect.ValueOf(property) + if !propertyValue.IsValid() { + // cannot do anything with the zero value + return + } + propertyType := propertyValue.Type() + switch propertyType.Kind() { + case reflect.String: + propertyValueString := propertyValue.String() + if subDocMapping != nil { + // index by explicit mapping + for _, fieldMapping := range subDocMapping.Fields { + if fieldMapping.Type == "geopoint" { + fieldMapping.processGeoPoint(property, pathString, path, indexes, context) + } else { + fieldMapping.processString(propertyValueString, pathString, path, indexes, context) + } + } + } else if closestDocMapping.Dynamic { + // automatic indexing behavior + + // first see if it can be parsed by the default date parser + dateTimeParser := context.im.DateTimeParserNamed(context.im.DefaultDateTimeParser) + if dateTimeParser != nil { + parsedDateTime, err := dateTimeParser.ParseDateTime(propertyValueString) + if err != nil { + // index as text + fieldMapping := newTextFieldMappingDynamic(context.im) + fieldMapping.processString(propertyValueString, pathString, path, indexes, context) + } else { + // index as datetime + fieldMapping := newDateTimeFieldMappingDynamic(context.im) + fieldMapping.processTime(parsedDateTime, pathString, path, indexes, context) + } + } + } + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + dm.processProperty(float64(propertyValue.Int()), path, indexes, context) + return + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + dm.processProperty(float64(propertyValue.Uint()), path, indexes, context) + return + case reflect.Float64, reflect.Float32: + propertyValFloat := propertyValue.Float() + if subDocMapping != nil { + // index by explicit mapping + for _, fieldMapping := range subDocMapping.Fields { + fieldMapping.processFloat64(propertyValFloat, pathString, path, indexes, context) + } + } else if closestDocMapping.Dynamic { + // automatic indexing behavior + fieldMapping := newNumericFieldMappingDynamic(context.im) + fieldMapping.processFloat64(propertyValFloat, pathString, path, indexes, context) + } + case reflect.Bool: + propertyValBool := propertyValue.Bool() + if subDocMapping != nil { + // index by explicit mapping + for _, fieldMapping := range subDocMapping.Fields { + fieldMapping.processBoolean(propertyValBool, pathString, path, indexes, context) + } + } else if closestDocMapping.Dynamic { + // automatic indexing behavior + fieldMapping := newBooleanFieldMappingDynamic(context.im) + fieldMapping.processBoolean(propertyValBool, pathString, path, indexes, context) + } + case reflect.Struct: + switch property := property.(type) { + case time.Time: + // don't descend into the time struct + if subDocMapping != nil { + // index by explicit mapping + for _, fieldMapping := range subDocMapping.Fields { + fieldMapping.processTime(property, pathString, path, indexes, context) + } + } else if closestDocMapping.Dynamic { + fieldMapping := newDateTimeFieldMappingDynamic(context.im) + fieldMapping.processTime(property, pathString, path, indexes, context) + } + case encoding.TextMarshaler: + txt, err := property.MarshalText() + if err == nil && subDocMapping != nil { + // index by explicit mapping + for _, fieldMapping := range subDocMapping.Fields { + if fieldMapping.Type == "text" { + fieldMapping.processString(string(txt), pathString, path, indexes, context) + } + } + } + dm.walkDocument(property, path, indexes, context) + default: + if subDocMapping != nil { + for _, fieldMapping := range subDocMapping.Fields { + if fieldMapping.Type == "geopoint" { + fieldMapping.processGeoPoint(property, pathString, path, indexes, context) + } + } + } + dm.walkDocument(property, path, indexes, context) + } + case reflect.Map, reflect.Slice: + if subDocMapping != nil { + for _, fieldMapping := range subDocMapping.Fields { + if fieldMapping.Type == "geopoint" { + fieldMapping.processGeoPoint(property, pathString, path, indexes, context) + } + } + } + dm.walkDocument(property, path, indexes, context) + case reflect.Ptr: + if !propertyValue.IsNil() { + switch property := property.(type) { + case encoding.TextMarshaler: + // ONLY process TextMarshaler if there is an explicit mapping + // AND all of the fiels are of type text + // OTHERWISE process field without TextMarshaler + if subDocMapping != nil { + allFieldsText := true + for _, fieldMapping := range subDocMapping.Fields { + if fieldMapping.Type != "text" { + allFieldsText = false + break + } + } + txt, err := property.MarshalText() + if err == nil && allFieldsText { + txtStr := string(txt) + for _, fieldMapping := range subDocMapping.Fields { + fieldMapping.processString(txtStr, pathString, path, indexes, context) + } + return + } + } + dm.walkDocument(property, path, indexes, context) + default: + dm.walkDocument(property, path, indexes, context) + } + } + default: + dm.walkDocument(property, path, indexes, context) + } +} diff --git a/vendor/github.com/blevesearch/bleve/mapping/field.go b/vendor/github.com/blevesearch/bleve/mapping/field.go new file mode 100644 index 0000000..278faa1 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/mapping/field.go @@ -0,0 +1,343 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package mapping + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/geo" +) + +// control the default behavior for dynamic fields (those not explicitly mapped) +var ( + IndexDynamic = true + StoreDynamic = true + DocValuesDynamic = true // TODO revisit default? +) + +// A FieldMapping describes how a specific item +// should be put into the index. +type FieldMapping struct { + Name string `json:"name,omitempty"` + Type string `json:"type,omitempty"` + + // Analyzer specifies the name of the analyzer to use for this field. If + // Analyzer is empty, traverse the DocumentMapping tree toward the root and + // pick the first non-empty DefaultAnalyzer found. If there is none, use + // the IndexMapping.DefaultAnalyzer. + Analyzer string `json:"analyzer,omitempty"` + + // Store indicates whether to store field values in the index. Stored + // values can be retrieved from search results using SearchRequest.Fields. + Store bool `json:"store,omitempty"` + Index bool `json:"index,omitempty"` + + // IncludeTermVectors, if true, makes terms occurrences to be recorded for + // this field. It includes the term position within the terms sequence and + // the term offsets in the source document field. Term vectors are required + // to perform phrase queries or terms highlighting in source documents. + IncludeTermVectors bool `json:"include_term_vectors,omitempty"` + IncludeInAll bool `json:"include_in_all,omitempty"` + DateFormat string `json:"date_format,omitempty"` + + // DocValues, if true makes the index uninverting possible for this field + // It is useful for faceting and sorting queries. + DocValues bool `json:"docvalues,omitempty"` +} + +// NewTextFieldMapping returns a default field mapping for text +func NewTextFieldMapping() *FieldMapping { + return &FieldMapping{ + Type: "text", + Store: true, + Index: true, + IncludeTermVectors: true, + IncludeInAll: true, + DocValues: true, + } +} + +func newTextFieldMappingDynamic(im *IndexMappingImpl) *FieldMapping { + rv := NewTextFieldMapping() + rv.Store = im.StoreDynamic + rv.Index = im.IndexDynamic + rv.DocValues = im.DocValuesDynamic + return rv +} + +// NewNumericFieldMapping returns a default field mapping for numbers +func NewNumericFieldMapping() *FieldMapping { + return &FieldMapping{ + Type: "number", + Store: true, + Index: true, + IncludeInAll: true, + DocValues: true, + } +} + +func newNumericFieldMappingDynamic(im *IndexMappingImpl) *FieldMapping { + rv := NewNumericFieldMapping() + rv.Store = im.StoreDynamic + rv.Index = im.IndexDynamic + rv.DocValues = im.DocValuesDynamic + return rv +} + +// NewDateTimeFieldMapping returns a default field mapping for dates +func NewDateTimeFieldMapping() *FieldMapping { + return &FieldMapping{ + Type: "datetime", + Store: true, + Index: true, + IncludeInAll: true, + DocValues: true, + } +} + +func newDateTimeFieldMappingDynamic(im *IndexMappingImpl) *FieldMapping { + rv := NewDateTimeFieldMapping() + rv.Store = im.StoreDynamic + rv.Index = im.IndexDynamic + rv.DocValues = im.DocValuesDynamic + return rv +} + +// NewBooleanFieldMapping returns a default field mapping for booleans +func NewBooleanFieldMapping() *FieldMapping { + return &FieldMapping{ + Type: "boolean", + Store: true, + Index: true, + IncludeInAll: true, + DocValues: true, + } +} + +func newBooleanFieldMappingDynamic(im *IndexMappingImpl) *FieldMapping { + rv := NewBooleanFieldMapping() + rv.Store = im.StoreDynamic + rv.Index = im.IndexDynamic + rv.DocValues = im.DocValuesDynamic + return rv +} + +// NewGeoPointFieldMapping returns a default field mapping for geo points +func NewGeoPointFieldMapping() *FieldMapping { + return &FieldMapping{ + Type: "geopoint", + Store: true, + Index: true, + IncludeInAll: true, + DocValues: true, + } +} + +// Options returns the indexing options for this field. +func (fm *FieldMapping) Options() document.IndexingOptions { + var rv document.IndexingOptions + if fm.Store { + rv |= document.StoreField + } + if fm.Index { + rv |= document.IndexField + } + if fm.IncludeTermVectors { + rv |= document.IncludeTermVectors + } + if fm.DocValues { + rv |= document.DocValues + } + return rv +} + +func (fm *FieldMapping) processString(propertyValueString string, pathString string, path []string, indexes []uint64, context *walkContext) { + fieldName := getFieldName(pathString, path, fm) + options := fm.Options() + if fm.Type == "text" { + analyzer := fm.analyzerForField(path, context) + field := document.NewTextFieldCustom(fieldName, indexes, []byte(propertyValueString), options, analyzer) + context.doc.AddField(field) + + if !fm.IncludeInAll { + context.excludedFromAll = append(context.excludedFromAll, fieldName) + } + } else if fm.Type == "datetime" { + dateTimeFormat := context.im.DefaultDateTimeParser + if fm.DateFormat != "" { + dateTimeFormat = fm.DateFormat + } + dateTimeParser := context.im.DateTimeParserNamed(dateTimeFormat) + if dateTimeParser != nil { + parsedDateTime, err := dateTimeParser.ParseDateTime(propertyValueString) + if err == nil { + fm.processTime(parsedDateTime, pathString, path, indexes, context) + } + } + } +} + +func (fm *FieldMapping) processFloat64(propertyValFloat float64, pathString string, path []string, indexes []uint64, context *walkContext) { + fieldName := getFieldName(pathString, path, fm) + if fm.Type == "number" { + options := fm.Options() + field := document.NewNumericFieldWithIndexingOptions(fieldName, indexes, propertyValFloat, options) + context.doc.AddField(field) + + if !fm.IncludeInAll { + context.excludedFromAll = append(context.excludedFromAll, fieldName) + } + } +} + +func (fm *FieldMapping) processTime(propertyValueTime time.Time, pathString string, path []string, indexes []uint64, context *walkContext) { + fieldName := getFieldName(pathString, path, fm) + if fm.Type == "datetime" { + options := fm.Options() + field, err := document.NewDateTimeFieldWithIndexingOptions(fieldName, indexes, propertyValueTime, options) + if err == nil { + context.doc.AddField(field) + } else { + logger.Printf("could not build date %v", err) + } + + if !fm.IncludeInAll { + context.excludedFromAll = append(context.excludedFromAll, fieldName) + } + } +} + +func (fm *FieldMapping) processBoolean(propertyValueBool bool, pathString string, path []string, indexes []uint64, context *walkContext) { + fieldName := getFieldName(pathString, path, fm) + if fm.Type == "boolean" { + options := fm.Options() + field := document.NewBooleanFieldWithIndexingOptions(fieldName, indexes, propertyValueBool, options) + context.doc.AddField(field) + + if !fm.IncludeInAll { + context.excludedFromAll = append(context.excludedFromAll, fieldName) + } + } +} + +func (fm *FieldMapping) processGeoPoint(propertyMightBeGeoPoint interface{}, pathString string, path []string, indexes []uint64, context *walkContext) { + lon, lat, found := geo.ExtractGeoPoint(propertyMightBeGeoPoint) + if found { + fieldName := getFieldName(pathString, path, fm) + options := fm.Options() + field := document.NewGeoPointFieldWithIndexingOptions(fieldName, indexes, lon, lat, options) + context.doc.AddField(field) + + if !fm.IncludeInAll { + context.excludedFromAll = append(context.excludedFromAll, fieldName) + } + } +} + +func (fm *FieldMapping) analyzerForField(path []string, context *walkContext) *analysis.Analyzer { + analyzerName := fm.Analyzer + if analyzerName == "" { + analyzerName = context.dm.defaultAnalyzerName(path) + if analyzerName == "" { + analyzerName = context.im.DefaultAnalyzer + } + } + return context.im.AnalyzerNamed(analyzerName) +} + +func getFieldName(pathString string, path []string, fieldMapping *FieldMapping) string { + fieldName := pathString + if fieldMapping.Name != "" { + parentName := "" + if len(path) > 1 { + parentName = encodePath(path[:len(path)-1]) + pathSeparator + } + fieldName = parentName + fieldMapping.Name + } + return fieldName +} + +// UnmarshalJSON offers custom unmarshaling with optional strict validation +func (fm *FieldMapping) UnmarshalJSON(data []byte) error { + + var tmp map[string]json.RawMessage + err := json.Unmarshal(data, &tmp) + if err != nil { + return err + } + + var invalidKeys []string + for k, v := range tmp { + switch k { + case "name": + err := json.Unmarshal(v, &fm.Name) + if err != nil { + return err + } + case "type": + err := json.Unmarshal(v, &fm.Type) + if err != nil { + return err + } + case "analyzer": + err := json.Unmarshal(v, &fm.Analyzer) + if err != nil { + return err + } + case "store": + err := json.Unmarshal(v, &fm.Store) + if err != nil { + return err + } + case "index": + err := json.Unmarshal(v, &fm.Index) + if err != nil { + return err + } + case "include_term_vectors": + err := json.Unmarshal(v, &fm.IncludeTermVectors) + if err != nil { + return err + } + case "include_in_all": + err := json.Unmarshal(v, &fm.IncludeInAll) + if err != nil { + return err + } + case "date_format": + err := json.Unmarshal(v, &fm.DateFormat) + if err != nil { + return err + } + case "docvalues": + err := json.Unmarshal(v, &fm.DocValues) + if err != nil { + return err + } + default: + invalidKeys = append(invalidKeys, k) + } + } + + if MappingJSONStrict && len(invalidKeys) > 0 { + return fmt.Errorf("field mapping contains invalid keys: %v", invalidKeys) + } + + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/mapping/index.go b/vendor/github.com/blevesearch/bleve/mapping/index.go new file mode 100644 index 0000000..319ba94 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/mapping/index.go @@ -0,0 +1,443 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package mapping + +import ( + "encoding/json" + "fmt" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/analysis/analyzer/standard" + "github.com/blevesearch/bleve/analysis/datetime/optional" + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/registry" +) + +var MappingJSONStrict = false + +const defaultTypeField = "_type" +const defaultType = "_default" +const defaultField = "_all" +const defaultAnalyzer = standard.Name +const defaultDateTimeParser = optional.Name + +// An IndexMappingImpl controls how objects are placed +// into an index. +// First the type of the object is determined. +// Once the type is know, the appropriate +// DocumentMapping is selected by the type. +// If no mapping was determined for that type, +// a DefaultMapping will be used. +type IndexMappingImpl struct { + TypeMapping map[string]*DocumentMapping `json:"types,omitempty"` + DefaultMapping *DocumentMapping `json:"default_mapping"` + TypeField string `json:"type_field"` + DefaultType string `json:"default_type"` + DefaultAnalyzer string `json:"default_analyzer"` + DefaultDateTimeParser string `json:"default_datetime_parser"` + DefaultField string `json:"default_field"` + StoreDynamic bool `json:"store_dynamic"` + IndexDynamic bool `json:"index_dynamic"` + DocValuesDynamic bool `json:"docvalues_dynamic"` + CustomAnalysis *customAnalysis `json:"analysis,omitempty"` + cache *registry.Cache +} + +// AddCustomCharFilter defines a custom char filter for use in this mapping +func (im *IndexMappingImpl) AddCustomCharFilter(name string, config map[string]interface{}) error { + _, err := im.cache.DefineCharFilter(name, config) + if err != nil { + return err + } + im.CustomAnalysis.CharFilters[name] = config + return nil +} + +// AddCustomTokenizer defines a custom tokenizer for use in this mapping +func (im *IndexMappingImpl) AddCustomTokenizer(name string, config map[string]interface{}) error { + _, err := im.cache.DefineTokenizer(name, config) + if err != nil { + return err + } + im.CustomAnalysis.Tokenizers[name] = config + return nil +} + +// AddCustomTokenMap defines a custom token map for use in this mapping +func (im *IndexMappingImpl) AddCustomTokenMap(name string, config map[string]interface{}) error { + _, err := im.cache.DefineTokenMap(name, config) + if err != nil { + return err + } + im.CustomAnalysis.TokenMaps[name] = config + return nil +} + +// AddCustomTokenFilter defines a custom token filter for use in this mapping +func (im *IndexMappingImpl) AddCustomTokenFilter(name string, config map[string]interface{}) error { + _, err := im.cache.DefineTokenFilter(name, config) + if err != nil { + return err + } + im.CustomAnalysis.TokenFilters[name] = config + return nil +} + +// AddCustomAnalyzer defines a custom analyzer for use in this mapping. The +// config map must have a "type" string entry to resolve the analyzer +// constructor. The constructor is invoked with the remaining entries and +// returned analyzer is registered in the IndexMapping. +// +// bleve comes with predefined analyzers, like +// github.com/blevesearch/bleve/analysis/analyzer/custom. They are +// available only if their package is imported by client code. To achieve this, +// use their metadata to fill configuration entries: +// +// import ( +// "github.com/blevesearch/bleve/analysis/analyzer/custom" +// "github.com/blevesearch/bleve/analysis/char/html" +// "github.com/blevesearch/bleve/analysis/token/lowercase" +// "github.com/blevesearch/bleve/analysis/tokenizer/unicode" +// ) +// +// m := bleve.NewIndexMapping() +// err := m.AddCustomAnalyzer("html", map[string]interface{}{ +// "type": custom.Name, +// "char_filters": []string{ +// html.Name, +// }, +// "tokenizer": unicode.Name, +// "token_filters": []string{ +// lowercase.Name, +// ... +// }, +// }) +func (im *IndexMappingImpl) AddCustomAnalyzer(name string, config map[string]interface{}) error { + _, err := im.cache.DefineAnalyzer(name, config) + if err != nil { + return err + } + im.CustomAnalysis.Analyzers[name] = config + return nil +} + +// AddCustomDateTimeParser defines a custom date time parser for use in this mapping +func (im *IndexMappingImpl) AddCustomDateTimeParser(name string, config map[string]interface{}) error { + _, err := im.cache.DefineDateTimeParser(name, config) + if err != nil { + return err + } + im.CustomAnalysis.DateTimeParsers[name] = config + return nil +} + +// NewIndexMapping creates a new IndexMapping that will use all the default indexing rules +func NewIndexMapping() *IndexMappingImpl { + return &IndexMappingImpl{ + TypeMapping: make(map[string]*DocumentMapping), + DefaultMapping: NewDocumentMapping(), + TypeField: defaultTypeField, + DefaultType: defaultType, + DefaultAnalyzer: defaultAnalyzer, + DefaultDateTimeParser: defaultDateTimeParser, + DefaultField: defaultField, + IndexDynamic: IndexDynamic, + StoreDynamic: StoreDynamic, + DocValuesDynamic: DocValuesDynamic, + CustomAnalysis: newCustomAnalysis(), + cache: registry.NewCache(), + } +} + +// Validate will walk the entire structure ensuring the following +// explicitly named and default analyzers can be built +func (im *IndexMappingImpl) Validate() error { + _, err := im.cache.AnalyzerNamed(im.DefaultAnalyzer) + if err != nil { + return err + } + _, err = im.cache.DateTimeParserNamed(im.DefaultDateTimeParser) + if err != nil { + return err + } + err = im.DefaultMapping.Validate(im.cache) + if err != nil { + return err + } + for _, docMapping := range im.TypeMapping { + err = docMapping.Validate(im.cache) + if err != nil { + return err + } + } + return nil +} + +// AddDocumentMapping sets a custom document mapping for the specified type +func (im *IndexMappingImpl) AddDocumentMapping(doctype string, dm *DocumentMapping) { + im.TypeMapping[doctype] = dm +} + +func (im *IndexMappingImpl) mappingForType(docType string) *DocumentMapping { + docMapping := im.TypeMapping[docType] + if docMapping == nil { + docMapping = im.DefaultMapping + } + return docMapping +} + +// UnmarshalJSON offers custom unmarshaling with optional strict validation +func (im *IndexMappingImpl) UnmarshalJSON(data []byte) error { + + var tmp map[string]json.RawMessage + err := json.Unmarshal(data, &tmp) + if err != nil { + return err + } + + // set defaults for fields which might have been omitted + im.cache = registry.NewCache() + im.CustomAnalysis = newCustomAnalysis() + im.TypeField = defaultTypeField + im.DefaultType = defaultType + im.DefaultAnalyzer = defaultAnalyzer + im.DefaultDateTimeParser = defaultDateTimeParser + im.DefaultField = defaultField + im.DefaultMapping = NewDocumentMapping() + im.TypeMapping = make(map[string]*DocumentMapping) + im.StoreDynamic = StoreDynamic + im.IndexDynamic = IndexDynamic + im.DocValuesDynamic = DocValuesDynamic + + var invalidKeys []string + for k, v := range tmp { + switch k { + case "analysis": + err := json.Unmarshal(v, &im.CustomAnalysis) + if err != nil { + return err + } + case "type_field": + err := json.Unmarshal(v, &im.TypeField) + if err != nil { + return err + } + case "default_type": + err := json.Unmarshal(v, &im.DefaultType) + if err != nil { + return err + } + case "default_analyzer": + err := json.Unmarshal(v, &im.DefaultAnalyzer) + if err != nil { + return err + } + case "default_datetime_parser": + err := json.Unmarshal(v, &im.DefaultDateTimeParser) + if err != nil { + return err + } + case "default_field": + err := json.Unmarshal(v, &im.DefaultField) + if err != nil { + return err + } + case "default_mapping": + err := json.Unmarshal(v, &im.DefaultMapping) + if err != nil { + return err + } + case "types": + err := json.Unmarshal(v, &im.TypeMapping) + if err != nil { + return err + } + case "store_dynamic": + err := json.Unmarshal(v, &im.StoreDynamic) + if err != nil { + return err + } + case "index_dynamic": + err := json.Unmarshal(v, &im.IndexDynamic) + if err != nil { + return err + } + case "docvalues_dynamic": + err := json.Unmarshal(v, &im.DocValuesDynamic) + if err != nil { + return err + } + default: + invalidKeys = append(invalidKeys, k) + } + } + + if MappingJSONStrict && len(invalidKeys) > 0 { + return fmt.Errorf("index mapping contains invalid keys: %v", invalidKeys) + } + + err = im.CustomAnalysis.registerAll(im) + if err != nil { + return err + } + + return nil +} + +func (im *IndexMappingImpl) determineType(data interface{}) string { + // first see if the object implements bleveClassifier + bleveClassifier, ok := data.(bleveClassifier) + if ok { + return bleveClassifier.BleveType() + } + // next see if the object implements Classifier + classifier, ok := data.(Classifier) + if ok { + return classifier.Type() + } + + // now see if we can find a type using the mapping + typ, ok := mustString(lookupPropertyPath(data, im.TypeField)) + if ok { + return typ + } + + return im.DefaultType +} + +func (im *IndexMappingImpl) MapDocument(doc *document.Document, data interface{}) error { + docType := im.determineType(data) + docMapping := im.mappingForType(docType) + if docMapping.Enabled { + walkContext := im.newWalkContext(doc, docMapping) + docMapping.walkDocument(data, []string{}, []uint64{}, walkContext) + + // see if the _all field was disabled + allMapping := docMapping.documentMappingForPath("_all") + if allMapping == nil || allMapping.Enabled { + field := document.NewCompositeFieldWithIndexingOptions("_all", true, []string{}, walkContext.excludedFromAll, document.IndexField|document.IncludeTermVectors) + doc.AddField(field) + } + } + + return nil +} + +type walkContext struct { + doc *document.Document + im *IndexMappingImpl + dm *DocumentMapping + excludedFromAll []string +} + +func (im *IndexMappingImpl) newWalkContext(doc *document.Document, dm *DocumentMapping) *walkContext { + return &walkContext{ + doc: doc, + im: im, + dm: dm, + excludedFromAll: []string{"_id"}, + } +} + +// AnalyzerNameForPath attempts to find the best analyzer to use with only a +// field name will walk all the document types, look for field mappings at the +// provided path, if one exists and it has an explicit analyzer that is +// returned. +func (im *IndexMappingImpl) AnalyzerNameForPath(path string) string { + // first we look for explicit mapping on the field + for _, docMapping := range im.TypeMapping { + analyzerName := docMapping.analyzerNameForPath(path) + if analyzerName != "" { + return analyzerName + } + } + // now try the default mapping + pathMapping := im.DefaultMapping.documentMappingForPath(path) + if pathMapping != nil { + if len(pathMapping.Fields) > 0 { + if pathMapping.Fields[0].Analyzer != "" { + return pathMapping.Fields[0].Analyzer + } + } + } + + // next we will try default analyzers for the path + pathDecoded := decodePath(path) + for _, docMapping := range im.TypeMapping { + rv := docMapping.defaultAnalyzerName(pathDecoded) + if rv != "" { + return rv + } + } + + return im.DefaultAnalyzer +} + +func (im *IndexMappingImpl) AnalyzerNamed(name string) *analysis.Analyzer { + analyzer, err := im.cache.AnalyzerNamed(name) + if err != nil { + logger.Printf("error using analyzer named: %s", name) + return nil + } + return analyzer +} + +func (im *IndexMappingImpl) DateTimeParserNamed(name string) analysis.DateTimeParser { + if name == "" { + name = im.DefaultDateTimeParser + } + dateTimeParser, err := im.cache.DateTimeParserNamed(name) + if err != nil { + logger.Printf("error using datetime parser named: %s", name) + return nil + } + return dateTimeParser +} + +func (im *IndexMappingImpl) datetimeParserNameForPath(path string) string { + + // first we look for explicit mapping on the field + for _, docMapping := range im.TypeMapping { + pathMapping := docMapping.documentMappingForPath(path) + if pathMapping != nil { + if len(pathMapping.Fields) > 0 { + if pathMapping.Fields[0].Analyzer != "" { + return pathMapping.Fields[0].Analyzer + } + } + } + } + + return im.DefaultDateTimeParser +} + +func (im *IndexMappingImpl) AnalyzeText(analyzerName string, text []byte) (analysis.TokenStream, error) { + analyzer, err := im.cache.AnalyzerNamed(analyzerName) + if err != nil { + return nil, err + } + return analyzer.Analyze(text), nil +} + +// FieldAnalyzer returns the name of the analyzer used on a field. +func (im *IndexMappingImpl) FieldAnalyzer(field string) string { + return im.AnalyzerNameForPath(field) +} + +// wrapper to satisfy new interface + +func (im *IndexMappingImpl) DefaultSearchField() string { + return im.DefaultField +} diff --git a/vendor/github.com/blevesearch/bleve/mapping/mapping.go b/vendor/github.com/blevesearch/bleve/mapping/mapping.go new file mode 100644 index 0000000..4a47281 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/mapping/mapping.go @@ -0,0 +1,58 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package mapping + +import ( + "io/ioutil" + "log" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/document" +) + +// A Classifier is an interface describing any object which knows how to +// identify its own type. Alternatively, if a struct already has a Type +// field or method in conflict, one can use BleveType instead. +type Classifier interface { + Type() string +} + +// A bleveClassifier is an interface describing any object which knows how +// to identify its own type. This is introduced as an alternative to the +// Classifier interface which often has naming conflicts with existing +// structures. +type bleveClassifier interface { + BleveType() string +} + +var logger = log.New(ioutil.Discard, "bleve mapping ", log.LstdFlags) + +// SetLog sets the logger used for logging +// by default log messages are sent to ioutil.Discard +func SetLog(l *log.Logger) { + logger = l +} + +type IndexMapping interface { + MapDocument(doc *document.Document, data interface{}) error + Validate() error + + DateTimeParserNamed(name string) analysis.DateTimeParser + + DefaultSearchField() string + + AnalyzerNameForPath(path string) string + AnalyzerNamed(name string) *analysis.Analyzer +} diff --git a/vendor/github.com/blevesearch/bleve/mapping/reflect.go b/vendor/github.com/blevesearch/bleve/mapping/reflect.go new file mode 100644 index 0000000..6500a70 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/mapping/reflect.go @@ -0,0 +1,92 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package mapping + +import ( + "reflect" + "strings" +) + +func lookupPropertyPath(data interface{}, path string) interface{} { + pathParts := decodePath(path) + + current := data + for _, part := range pathParts { + current = lookupPropertyPathPart(current, part) + if current == nil { + break + } + } + + return current +} + +func lookupPropertyPathPart(data interface{}, part string) interface{} { + val := reflect.ValueOf(data) + if !val.IsValid() { + return nil + } + typ := val.Type() + switch typ.Kind() { + case reflect.Map: + // FIXME can add support for other map keys in the future + if typ.Key().Kind() == reflect.String { + key := reflect.ValueOf(part) + entry := val.MapIndex(key) + if entry.IsValid() { + return entry.Interface() + } + } + case reflect.Struct: + field := val.FieldByName(part) + if field.IsValid() && field.CanInterface() { + return field.Interface() + } + case reflect.Ptr: + ptrElem := val.Elem() + if ptrElem.IsValid() && ptrElem.CanInterface() { + return lookupPropertyPathPart(ptrElem.Interface(), part) + } + } + return nil +} + +const pathSeparator = "." + +func decodePath(path string) []string { + return strings.Split(path, pathSeparator) +} + +func encodePath(pathElements []string) string { + return strings.Join(pathElements, pathSeparator) +} + +func mustString(data interface{}) (string, bool) { + if data != nil { + str, ok := data.(string) + if ok { + return str, true + } + } + return "", false +} + +// parseTagName extracts the field name from a struct tag +func parseTagName(tag string) string { + if idx := strings.Index(tag, ","); idx != -1 { + return tag[:idx] + } + return tag +} diff --git a/vendor/github.com/blevesearch/bleve/numeric/bin.go b/vendor/github.com/blevesearch/bleve/numeric/bin.go new file mode 100644 index 0000000..368952a --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/numeric/bin.go @@ -0,0 +1,43 @@ +package numeric + +var interleaveMagic = []uint64{ + 0x5555555555555555, + 0x3333333333333333, + 0x0F0F0F0F0F0F0F0F, + 0x00FF00FF00FF00FF, + 0x0000FFFF0000FFFF, + 0x00000000FFFFFFFF, + 0xAAAAAAAAAAAAAAAA, +} + +var interleaveShift = []uint{1, 2, 4, 8, 16} + +// Interleave the first 32 bits of each uint64 +// apdated from org.apache.lucene.util.BitUtil +// which was adapted from: +// http://graphics.stanford.edu/~seander/bithacks.html#InterleaveBMN +func Interleave(v1, v2 uint64) uint64 { + v1 = (v1 | (v1 << interleaveShift[4])) & interleaveMagic[4] + v1 = (v1 | (v1 << interleaveShift[3])) & interleaveMagic[3] + v1 = (v1 | (v1 << interleaveShift[2])) & interleaveMagic[2] + v1 = (v1 | (v1 << interleaveShift[1])) & interleaveMagic[1] + v1 = (v1 | (v1 << interleaveShift[0])) & interleaveMagic[0] + v2 = (v2 | (v2 << interleaveShift[4])) & interleaveMagic[4] + v2 = (v2 | (v2 << interleaveShift[3])) & interleaveMagic[3] + v2 = (v2 | (v2 << interleaveShift[2])) & interleaveMagic[2] + v2 = (v2 | (v2 << interleaveShift[1])) & interleaveMagic[1] + v2 = (v2 | (v2 << interleaveShift[0])) & interleaveMagic[0] + return (v2 << 1) | v1 +} + +// Deinterleave the 32-bit value starting at position 0 +// to get the other 32-bit value, shift it by 1 first +func Deinterleave(b uint64) uint64 { + b &= interleaveMagic[0] + b = (b ^ (b >> interleaveShift[0])) & interleaveMagic[1] + b = (b ^ (b >> interleaveShift[1])) & interleaveMagic[2] + b = (b ^ (b >> interleaveShift[2])) & interleaveMagic[3] + b = (b ^ (b >> interleaveShift[3])) & interleaveMagic[4] + b = (b ^ (b >> interleaveShift[4])) & interleaveMagic[5] + return b +} diff --git a/vendor/github.com/blevesearch/bleve/numeric/float.go b/vendor/github.com/blevesearch/bleve/numeric/float.go new file mode 100644 index 0000000..2bb14d7 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/numeric/float.go @@ -0,0 +1,34 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package numeric + +import ( + "math" +) + +func Float64ToInt64(f float64) int64 { + fasint := int64(math.Float64bits(f)) + if fasint < 0 { + fasint = fasint ^ 0x7fffffffffffffff + } + return fasint +} + +func Int64ToFloat64(i int64) float64 { + if i < 0 { + i ^= 0x7fffffffffffffff + } + return math.Float64frombits(uint64(i)) +} diff --git a/vendor/github.com/blevesearch/bleve/numeric/prefix_coded.go b/vendor/github.com/blevesearch/bleve/numeric/prefix_coded.go new file mode 100644 index 0000000..29bd0fc --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/numeric/prefix_coded.go @@ -0,0 +1,111 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package numeric + +import "fmt" + +const ShiftStartInt64 byte = 0x20 + +// PrefixCoded is a byte array encoding of +// 64-bit numeric values shifted by 0-63 bits +type PrefixCoded []byte + +func NewPrefixCodedInt64(in int64, shift uint) (PrefixCoded, error) { + rv, _, err := NewPrefixCodedInt64Prealloc(in, shift, nil) + return rv, err +} + +func NewPrefixCodedInt64Prealloc(in int64, shift uint, prealloc []byte) ( + rv PrefixCoded, preallocRest []byte, err error) { + if shift > 63 { + return nil, prealloc, fmt.Errorf("cannot shift %d, must be between 0 and 63", shift) + } + + nChars := ((63 - shift) / 7) + 1 + + size := int(nChars + 1) + if len(prealloc) >= size { + rv = PrefixCoded(prealloc[0:size]) + preallocRest = prealloc[size:] + } else { + rv = make(PrefixCoded, size) + } + + rv[0] = ShiftStartInt64 + byte(shift) + + sortableBits := int64(uint64(in) ^ 0x8000000000000000) + sortableBits = int64(uint64(sortableBits) >> shift) + for nChars > 0 { + // Store 7 bits per byte for compatibility + // with UTF-8 encoding of terms + rv[nChars] = byte(sortableBits & 0x7f) + nChars-- + sortableBits = int64(uint64(sortableBits) >> 7) + } + + return rv, preallocRest, nil +} + +func MustNewPrefixCodedInt64(in int64, shift uint) PrefixCoded { + rv, err := NewPrefixCodedInt64(in, shift) + if err != nil { + panic(err) + } + return rv +} + +// Shift returns the number of bits shifted +// returns 0 if in uninitialized state +func (p PrefixCoded) Shift() (uint, error) { + if len(p) > 0 { + shift := p[0] - ShiftStartInt64 + if shift < 0 || shift < 63 { + return uint(shift), nil + } + } + return 0, fmt.Errorf("invalid prefix coded value") +} + +func (p PrefixCoded) Int64() (int64, error) { + shift, err := p.Shift() + if err != nil { + return 0, err + } + var sortableBits int64 + for _, inbyte := range p[1:] { + sortableBits <<= 7 + sortableBits |= int64(inbyte) + } + return int64(uint64((sortableBits << shift)) ^ 0x8000000000000000), nil +} + +func ValidPrefixCodedTerm(p string) (bool, int) { + return ValidPrefixCodedTermBytes([]byte(p)) +} + +func ValidPrefixCodedTermBytes(p []byte) (bool, int) { + if len(p) > 0 { + if p[0] < ShiftStartInt64 || p[0] > ShiftStartInt64+63 { + return false, 0 + } + shift := p[0] - ShiftStartInt64 + nChars := ((63 - int(shift)) / 7) + 1 + if len(p) != nChars+1 { + return false, 0 + } + return true, int(shift) + } + return false, 0 +} diff --git a/vendor/github.com/blevesearch/bleve/query.go b/vendor/github.com/blevesearch/bleve/query.go new file mode 100644 index 0000000..523db5e --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/query.go @@ -0,0 +1,218 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bleve + +import ( + "time" + + "github.com/blevesearch/bleve/search/query" +) + +// NewBoolFieldQuery creates a new Query for boolean fields +func NewBoolFieldQuery(val bool) *query.BoolFieldQuery { + return query.NewBoolFieldQuery(val) +} + +// NewBooleanQuery creates a compound Query composed +// of several other Query objects. +// These other query objects are added using the +// AddMust() AddShould() and AddMustNot() methods. +// Result documents must satisfy ALL of the +// must Queries. +// Result documents must satisfy NONE of the must not +// Queries. +// Result documents that ALSO satisfy any of the should +// Queries will score higher. +func NewBooleanQuery() *query.BooleanQuery { + return query.NewBooleanQuery(nil, nil, nil) +} + +// NewConjunctionQuery creates a new compound Query. +// Result documents must satisfy all of the queries. +func NewConjunctionQuery(conjuncts ...query.Query) *query.ConjunctionQuery { + return query.NewConjunctionQuery(conjuncts) +} + +// NewDateRangeQuery creates a new Query for ranges +// of date values. +// Date strings are parsed using the DateTimeParser configured in the +// top-level config.QueryDateTimeParser +// Either, but not both endpoints can be nil. +func NewDateRangeQuery(start, end time.Time) *query.DateRangeQuery { + return query.NewDateRangeQuery(start, end) +} + +// NewDateRangeInclusiveQuery creates a new Query for ranges +// of date values. +// Date strings are parsed using the DateTimeParser configured in the +// top-level config.QueryDateTimeParser +// Either, but not both endpoints can be nil. +// startInclusive and endInclusive control inclusion of the endpoints. +func NewDateRangeInclusiveQuery(start, end time.Time, startInclusive, endInclusive *bool) *query.DateRangeQuery { + return query.NewDateRangeInclusiveQuery(start, end, startInclusive, endInclusive) +} + +// NewDisjunctionQuery creates a new compound Query. +// Result documents satisfy at least one Query. +func NewDisjunctionQuery(disjuncts ...query.Query) *query.DisjunctionQuery { + return query.NewDisjunctionQuery(disjuncts) +} + +// NewDocIDQuery creates a new Query object returning indexed documents among +// the specified set. Combine it with ConjunctionQuery to restrict the scope of +// other queries output. +func NewDocIDQuery(ids []string) *query.DocIDQuery { + return query.NewDocIDQuery(ids) +} + +// NewFuzzyQuery creates a new Query which finds +// documents containing terms within a specific +// fuzziness of the specified term. +// The default fuzziness is 1. +// +// The current implementation uses Levenshtein edit +// distance as the fuzziness metric. +func NewFuzzyQuery(term string) *query.FuzzyQuery { + return query.NewFuzzyQuery(term) +} + +// NewMatchAllQuery creates a Query which will +// match all documents in the index. +func NewMatchAllQuery() *query.MatchAllQuery { + return query.NewMatchAllQuery() +} + +// NewMatchNoneQuery creates a Query which will not +// match any documents in the index. +func NewMatchNoneQuery() *query.MatchNoneQuery { + return query.NewMatchNoneQuery() +} + +// NewMatchPhraseQuery creates a new Query object +// for matching phrases in the index. +// An Analyzer is chosen based on the field. +// Input text is analyzed using this analyzer. +// Token terms resulting from this analysis are +// used to build a search phrase. Result documents +// must match this phrase. Queried field must have been indexed with +// IncludeTermVectors set to true. +func NewMatchPhraseQuery(matchPhrase string) *query.MatchPhraseQuery { + return query.NewMatchPhraseQuery(matchPhrase) +} + +// NewMatchQuery creates a Query for matching text. +// An Analyzer is chosen based on the field. +// Input text is analyzed using this analyzer. +// Token terms resulting from this analysis are +// used to perform term searches. Result documents +// must satisfy at least one of these term searches. +func NewMatchQuery(match string) *query.MatchQuery { + return query.NewMatchQuery(match) +} + +// NewNumericRangeQuery creates a new Query for ranges +// of numeric values. +// Either, but not both endpoints can be nil. +// The minimum value is inclusive. +// The maximum value is exclusive. +func NewNumericRangeQuery(min, max *float64) *query.NumericRangeQuery { + return query.NewNumericRangeQuery(min, max) +} + +// NewNumericRangeInclusiveQuery creates a new Query for ranges +// of numeric values. +// Either, but not both endpoints can be nil. +// Control endpoint inclusion with inclusiveMin, inclusiveMax. +func NewNumericRangeInclusiveQuery(min, max *float64, minInclusive, maxInclusive *bool) *query.NumericRangeQuery { + return query.NewNumericRangeInclusiveQuery(min, max, minInclusive, maxInclusive) +} + +// NewTermRangeQuery creates a new Query for ranges +// of text terms. +// Either, but not both endpoints can be "". +// The minimum value is inclusive. +// The maximum value is exclusive. +func NewTermRangeQuery(min, max string) *query.TermRangeQuery { + return query.NewTermRangeQuery(min, max) +} + +// NewTermRangeInclusiveQuery creates a new Query for ranges +// of text terms. +// Either, but not both endpoints can be "". +// Control endpoint inclusion with inclusiveMin, inclusiveMax. +func NewTermRangeInclusiveQuery(min, max string, minInclusive, maxInclusive *bool) *query.TermRangeQuery { + return query.NewTermRangeInclusiveQuery(min, max, minInclusive, maxInclusive) +} + +// NewPhraseQuery creates a new Query for finding +// exact term phrases in the index. +// The provided terms must exist in the correct +// order, at the correct index offsets, in the +// specified field. Queried field must have been indexed with +// IncludeTermVectors set to true. +func NewPhraseQuery(terms []string, field string) *query.PhraseQuery { + return query.NewPhraseQuery(terms, field) +} + +// NewPrefixQuery creates a new Query which finds +// documents containing terms that start with the +// specified prefix. +func NewPrefixQuery(prefix string) *query.PrefixQuery { + return query.NewPrefixQuery(prefix) +} + +// NewRegexpQuery creates a new Query which finds +// documents containing terms that match the +// specified regular expression. +func NewRegexpQuery(regexp string) *query.RegexpQuery { + return query.NewRegexpQuery(regexp) +} + +// NewQueryStringQuery creates a new Query used for +// finding documents that satisfy a query string. The +// query string is a small query language for humans. +func NewQueryStringQuery(q string) *query.QueryStringQuery { + return query.NewQueryStringQuery(q) +} + +// NewTermQuery creates a new Query for finding an +// exact term match in the index. +func NewTermQuery(term string) *query.TermQuery { + return query.NewTermQuery(term) +} + +// NewWildcardQuery creates a new Query which finds +// documents containing terms that match the +// specified wildcard. In the wildcard pattern '*' +// will match any sequence of 0 or more characters, +// and '?' will match any single character. +func NewWildcardQuery(wildcard string) *query.WildcardQuery { + return query.NewWildcardQuery(wildcard) +} + +// NewGeoBoundingBoxQuery creates a new Query for performing geo bounding +// box searches. The arguments describe the position of the box and documents +// which have an indexed geo point inside the box will be returned. +func NewGeoBoundingBoxQuery(topLeftLon, topLeftLat, bottomRightLon, bottomRightLat float64) *query.GeoBoundingBoxQuery { + return query.NewGeoBoundingBoxQuery(topLeftLon, topLeftLat, bottomRightLon, bottomRightLat) +} + +// NewGeoDistanceQuery creates a new Query for performing geo distance +// searches. The arguments describe a position and a distance. Documents +// which have an indexed geo point which is less than or equal to the provided +// distance from the given position will be returned. +func NewGeoDistanceQuery(lon, lat float64, distance string) *query.GeoDistanceQuery { + return query.NewGeoDistanceQuery(lon, lat, distance) +} diff --git a/vendor/github.com/blevesearch/bleve/registry/analyzer.go b/vendor/github.com/blevesearch/bleve/registry/analyzer.go new file mode 100644 index 0000000..340e349 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/registry/analyzer.go @@ -0,0 +1,89 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package registry + +import ( + "fmt" + + "github.com/blevesearch/bleve/analysis" +) + +func RegisterAnalyzer(name string, constructor AnalyzerConstructor) { + _, exists := analyzers[name] + if exists { + panic(fmt.Errorf("attempted to register duplicate analyzer named '%s'", name)) + } + analyzers[name] = constructor +} + +type AnalyzerConstructor func(config map[string]interface{}, cache *Cache) (*analysis.Analyzer, error) +type AnalyzerRegistry map[string]AnalyzerConstructor + +type AnalyzerCache struct { + *ConcurrentCache +} + +func NewAnalyzerCache() *AnalyzerCache { + return &AnalyzerCache{ + NewConcurrentCache(), + } +} + +func AnalyzerBuild(name string, config map[string]interface{}, cache *Cache) (interface{}, error) { + cons, registered := analyzers[name] + if !registered { + return nil, fmt.Errorf("no analyzer with name or type '%s' registered", name) + } + analyzer, err := cons(config, cache) + if err != nil { + return nil, fmt.Errorf("error building analyzer: %v", err) + } + return analyzer, nil +} + +func (c *AnalyzerCache) AnalyzerNamed(name string, cache *Cache) (*analysis.Analyzer, error) { + item, err := c.ItemNamed(name, cache, AnalyzerBuild) + if err != nil { + return nil, err + } + return item.(*analysis.Analyzer), nil +} + +func (c *AnalyzerCache) DefineAnalyzer(name string, typ string, config map[string]interface{}, cache *Cache) (*analysis.Analyzer, error) { + item, err := c.DefineItem(name, typ, config, cache, AnalyzerBuild) + if err != nil { + if err == ErrAlreadyDefined { + return nil, fmt.Errorf("analyzer named '%s' already defined", name) + } + return nil, err + } + return item.(*analysis.Analyzer), nil +} + +func AnalyzerTypesAndInstances() ([]string, []string) { + emptyConfig := map[string]interface{}{} + emptyCache := NewCache() + var types []string + var instances []string + for name, cons := range analyzers { + _, err := cons(emptyConfig, emptyCache) + if err == nil { + instances = append(instances, name) + } else { + types = append(types, name) + } + } + return types, instances +} diff --git a/vendor/github.com/blevesearch/bleve/registry/cache.go b/vendor/github.com/blevesearch/bleve/registry/cache.go new file mode 100644 index 0000000..b0ce852 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/registry/cache.go @@ -0,0 +1,87 @@ +// Copyright (c) 2016 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package registry + +import ( + "fmt" + "sync" +) + +var ErrAlreadyDefined = fmt.Errorf("item already defined") + +type CacheBuild func(name string, config map[string]interface{}, cache *Cache) (interface{}, error) + +type ConcurrentCache struct { + mutex sync.RWMutex + data map[string]interface{} +} + +func NewConcurrentCache() *ConcurrentCache { + return &ConcurrentCache{ + data: make(map[string]interface{}), + } +} + +func (c *ConcurrentCache) ItemNamed(name string, cache *Cache, build CacheBuild) (interface{}, error) { + c.mutex.RLock() + item, cached := c.data[name] + if cached { + c.mutex.RUnlock() + return item, nil + } + // give up read lock + c.mutex.RUnlock() + // try to build it + newItem, err := build(name, nil, cache) + if err != nil { + return nil, err + } + // acquire write lock + c.mutex.Lock() + defer c.mutex.Unlock() + // check again because it could have been created while trading locks + item, cached = c.data[name] + if cached { + return item, nil + } + c.data[name] = newItem + return newItem, nil +} + +func (c *ConcurrentCache) DefineItem(name string, typ string, config map[string]interface{}, cache *Cache, build CacheBuild) (interface{}, error) { + c.mutex.RLock() + _, cached := c.data[name] + if cached { + c.mutex.RUnlock() + return nil, ErrAlreadyDefined + } + // give up read lock so others lookups can proceed + c.mutex.RUnlock() + // really not there, try to build it + newItem, err := build(typ, config, cache) + if err != nil { + return nil, err + } + // now we've built it, acquire lock + c.mutex.Lock() + defer c.mutex.Unlock() + // check again because it could have been created while trading locks + _, cached = c.data[name] + if cached { + return nil, ErrAlreadyDefined + } + c.data[name] = newItem + return newItem, nil +} diff --git a/vendor/github.com/blevesearch/bleve/registry/char_filter.go b/vendor/github.com/blevesearch/bleve/registry/char_filter.go new file mode 100644 index 0000000..4696713 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/registry/char_filter.go @@ -0,0 +1,89 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package registry + +import ( + "fmt" + + "github.com/blevesearch/bleve/analysis" +) + +func RegisterCharFilter(name string, constructor CharFilterConstructor) { + _, exists := charFilters[name] + if exists { + panic(fmt.Errorf("attempted to register duplicate char filter named '%s'", name)) + } + charFilters[name] = constructor +} + +type CharFilterConstructor func(config map[string]interface{}, cache *Cache) (analysis.CharFilter, error) +type CharFilterRegistry map[string]CharFilterConstructor + +type CharFilterCache struct { + *ConcurrentCache +} + +func NewCharFilterCache() *CharFilterCache { + return &CharFilterCache{ + NewConcurrentCache(), + } +} + +func CharFilterBuild(name string, config map[string]interface{}, cache *Cache) (interface{}, error) { + cons, registered := charFilters[name] + if !registered { + return nil, fmt.Errorf("no char filter with name or type '%s' registered", name) + } + charFilter, err := cons(config, cache) + if err != nil { + return nil, fmt.Errorf("error building char filter: %v", err) + } + return charFilter, nil +} + +func (c *CharFilterCache) CharFilterNamed(name string, cache *Cache) (analysis.CharFilter, error) { + item, err := c.ItemNamed(name, cache, CharFilterBuild) + if err != nil { + return nil, err + } + return item.(analysis.CharFilter), nil +} + +func (c *CharFilterCache) DefineCharFilter(name string, typ string, config map[string]interface{}, cache *Cache) (analysis.CharFilter, error) { + item, err := c.DefineItem(name, typ, config, cache, CharFilterBuild) + if err != nil { + if err == ErrAlreadyDefined { + return nil, fmt.Errorf("char filter named '%s' already defined", name) + } + return nil, err + } + return item.(analysis.CharFilter), nil +} + +func CharFilterTypesAndInstances() ([]string, []string) { + emptyConfig := map[string]interface{}{} + emptyCache := NewCache() + var types []string + var instances []string + for name, cons := range charFilters { + _, err := cons(emptyConfig, emptyCache) + if err == nil { + instances = append(instances, name) + } else { + types = append(types, name) + } + } + return types, instances +} diff --git a/vendor/github.com/blevesearch/bleve/registry/datetime_parser.go b/vendor/github.com/blevesearch/bleve/registry/datetime_parser.go new file mode 100644 index 0000000..2cd46e5 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/registry/datetime_parser.go @@ -0,0 +1,89 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package registry + +import ( + "fmt" + + "github.com/blevesearch/bleve/analysis" +) + +func RegisterDateTimeParser(name string, constructor DateTimeParserConstructor) { + _, exists := dateTimeParsers[name] + if exists { + panic(fmt.Errorf("attempted to register duplicate date time parser named '%s'", name)) + } + dateTimeParsers[name] = constructor +} + +type DateTimeParserConstructor func(config map[string]interface{}, cache *Cache) (analysis.DateTimeParser, error) +type DateTimeParserRegistry map[string]DateTimeParserConstructor + +type DateTimeParserCache struct { + *ConcurrentCache +} + +func NewDateTimeParserCache() *DateTimeParserCache { + return &DateTimeParserCache{ + NewConcurrentCache(), + } +} + +func DateTimeParserBuild(name string, config map[string]interface{}, cache *Cache) (interface{}, error) { + cons, registered := dateTimeParsers[name] + if !registered { + return nil, fmt.Errorf("no date time parser with name or type '%s' registered", name) + } + dateTimeParser, err := cons(config, cache) + if err != nil { + return nil, fmt.Errorf("error building date time parser: %v", err) + } + return dateTimeParser, nil +} + +func (c *DateTimeParserCache) DateTimeParserNamed(name string, cache *Cache) (analysis.DateTimeParser, error) { + item, err := c.ItemNamed(name, cache, DateTimeParserBuild) + if err != nil { + return nil, err + } + return item.(analysis.DateTimeParser), nil +} + +func (c *DateTimeParserCache) DefineDateTimeParser(name string, typ string, config map[string]interface{}, cache *Cache) (analysis.DateTimeParser, error) { + item, err := c.DefineItem(name, typ, config, cache, DateTimeParserBuild) + if err != nil { + if err == ErrAlreadyDefined { + return nil, fmt.Errorf("date time parser named '%s' already defined", name) + } + return nil, err + } + return item.(analysis.DateTimeParser), nil +} + +func DateTimeParserTypesAndInstances() ([]string, []string) { + emptyConfig := map[string]interface{}{} + emptyCache := NewCache() + var types []string + var instances []string + for name, cons := range dateTimeParsers { + _, err := cons(emptyConfig, emptyCache) + if err == nil { + instances = append(instances, name) + } else { + types = append(types, name) + } + } + return types, instances +} diff --git a/vendor/github.com/blevesearch/bleve/registry/fragment_formatter.go b/vendor/github.com/blevesearch/bleve/registry/fragment_formatter.go new file mode 100644 index 0000000..d0121d9 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/registry/fragment_formatter.go @@ -0,0 +1,89 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package registry + +import ( + "fmt" + + "github.com/blevesearch/bleve/search/highlight" +) + +func RegisterFragmentFormatter(name string, constructor FragmentFormatterConstructor) { + _, exists := fragmentFormatters[name] + if exists { + panic(fmt.Errorf("attempted to register duplicate fragment formatter named '%s'", name)) + } + fragmentFormatters[name] = constructor +} + +type FragmentFormatterConstructor func(config map[string]interface{}, cache *Cache) (highlight.FragmentFormatter, error) +type FragmentFormatterRegistry map[string]FragmentFormatterConstructor + +type FragmentFormatterCache struct { + *ConcurrentCache +} + +func NewFragmentFormatterCache() *FragmentFormatterCache { + return &FragmentFormatterCache{ + NewConcurrentCache(), + } +} + +func FragmentFormatterBuild(name string, config map[string]interface{}, cache *Cache) (interface{}, error) { + cons, registered := fragmentFormatters[name] + if !registered { + return nil, fmt.Errorf("no fragment formatter with name or type '%s' registered", name) + } + fragmentFormatter, err := cons(config, cache) + if err != nil { + return nil, fmt.Errorf("error building fragment formatter: %v", err) + } + return fragmentFormatter, nil +} + +func (c *FragmentFormatterCache) FragmentFormatterNamed(name string, cache *Cache) (highlight.FragmentFormatter, error) { + item, err := c.ItemNamed(name, cache, FragmentFormatterBuild) + if err != nil { + return nil, err + } + return item.(highlight.FragmentFormatter), nil +} + +func (c *FragmentFormatterCache) DefineFragmentFormatter(name string, typ string, config map[string]interface{}, cache *Cache) (highlight.FragmentFormatter, error) { + item, err := c.DefineItem(name, typ, config, cache, FragmentFormatterBuild) + if err != nil { + if err == ErrAlreadyDefined { + return nil, fmt.Errorf("fragment formatter named '%s' already defined", name) + } + return nil, err + } + return item.(highlight.FragmentFormatter), nil +} + +func FragmentFormatterTypesAndInstances() ([]string, []string) { + emptyConfig := map[string]interface{}{} + emptyCache := NewCache() + var types []string + var instances []string + for name, cons := range fragmentFormatters { + _, err := cons(emptyConfig, emptyCache) + if err == nil { + instances = append(instances, name) + } else { + types = append(types, name) + } + } + return types, instances +} diff --git a/vendor/github.com/blevesearch/bleve/registry/fragmenter.go b/vendor/github.com/blevesearch/bleve/registry/fragmenter.go new file mode 100644 index 0000000..18ab2ac --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/registry/fragmenter.go @@ -0,0 +1,89 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package registry + +import ( + "fmt" + + "github.com/blevesearch/bleve/search/highlight" +) + +func RegisterFragmenter(name string, constructor FragmenterConstructor) { + _, exists := fragmenters[name] + if exists { + panic(fmt.Errorf("attempted to register duplicate fragmenter named '%s'", name)) + } + fragmenters[name] = constructor +} + +type FragmenterConstructor func(config map[string]interface{}, cache *Cache) (highlight.Fragmenter, error) +type FragmenterRegistry map[string]FragmenterConstructor + +type FragmenterCache struct { + *ConcurrentCache +} + +func NewFragmenterCache() *FragmenterCache { + return &FragmenterCache{ + NewConcurrentCache(), + } +} + +func FragmenterBuild(name string, config map[string]interface{}, cache *Cache) (interface{}, error) { + cons, registered := fragmenters[name] + if !registered { + return nil, fmt.Errorf("no fragmenter with name or type '%s' registered", name) + } + fragmenter, err := cons(config, cache) + if err != nil { + return nil, fmt.Errorf("error building fragmenter: %v", err) + } + return fragmenter, nil +} + +func (c *FragmenterCache) FragmenterNamed(name string, cache *Cache) (highlight.Fragmenter, error) { + item, err := c.ItemNamed(name, cache, FragmenterBuild) + if err != nil { + return nil, err + } + return item.(highlight.Fragmenter), nil +} + +func (c *FragmenterCache) DefineFragmenter(name string, typ string, config map[string]interface{}, cache *Cache) (highlight.Fragmenter, error) { + item, err := c.DefineItem(name, typ, config, cache, FragmenterBuild) + if err != nil { + if err == ErrAlreadyDefined { + return nil, fmt.Errorf("fragmenter named '%s' already defined", name) + } + return nil, err + } + return item.(highlight.Fragmenter), nil +} + +func FragmenterTypesAndInstances() ([]string, []string) { + emptyConfig := map[string]interface{}{} + emptyCache := NewCache() + var types []string + var instances []string + for name, cons := range fragmenters { + _, err := cons(emptyConfig, emptyCache) + if err == nil { + instances = append(instances, name) + } else { + types = append(types, name) + } + } + return types, instances +} diff --git a/vendor/github.com/blevesearch/bleve/registry/highlighter.go b/vendor/github.com/blevesearch/bleve/registry/highlighter.go new file mode 100644 index 0000000..b84219c --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/registry/highlighter.go @@ -0,0 +1,89 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package registry + +import ( + "fmt" + + "github.com/blevesearch/bleve/search/highlight" +) + +func RegisterHighlighter(name string, constructor HighlighterConstructor) { + _, exists := highlighters[name] + if exists { + panic(fmt.Errorf("attempted to register duplicate highlighter named '%s'", name)) + } + highlighters[name] = constructor +} + +type HighlighterConstructor func(config map[string]interface{}, cache *Cache) (highlight.Highlighter, error) +type HighlighterRegistry map[string]HighlighterConstructor + +type HighlighterCache struct { + *ConcurrentCache +} + +func NewHighlighterCache() *HighlighterCache { + return &HighlighterCache{ + NewConcurrentCache(), + } +} + +func HighlighterBuild(name string, config map[string]interface{}, cache *Cache) (interface{}, error) { + cons, registered := highlighters[name] + if !registered { + return nil, fmt.Errorf("no highlighter with name or type '%s' registered", name) + } + highlighter, err := cons(config, cache) + if err != nil { + return nil, fmt.Errorf("error building highlighter: %v", err) + } + return highlighter, nil +} + +func (c *HighlighterCache) HighlighterNamed(name string, cache *Cache) (highlight.Highlighter, error) { + item, err := c.ItemNamed(name, cache, HighlighterBuild) + if err != nil { + return nil, err + } + return item.(highlight.Highlighter), nil +} + +func (c *HighlighterCache) DefineHighlighter(name string, typ string, config map[string]interface{}, cache *Cache) (highlight.Highlighter, error) { + item, err := c.DefineItem(name, typ, config, cache, HighlighterBuild) + if err != nil { + if err == ErrAlreadyDefined { + return nil, fmt.Errorf("highlighter named '%s' already defined", name) + } + return nil, err + } + return item.(highlight.Highlighter), nil +} + +func HighlighterTypesAndInstances() ([]string, []string) { + emptyConfig := map[string]interface{}{} + emptyCache := NewCache() + var types []string + var instances []string + for name, cons := range highlighters { + _, err := cons(emptyConfig, emptyCache) + if err == nil { + instances = append(instances, name) + } else { + types = append(types, name) + } + } + return types, instances +} diff --git a/vendor/github.com/blevesearch/bleve/registry/index_type.go b/vendor/github.com/blevesearch/bleve/registry/index_type.go new file mode 100644 index 0000000..4da07c8 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/registry/index_type.go @@ -0,0 +1,45 @@ +// Copyright (c) 2015 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package registry + +import ( + "fmt" + + "github.com/blevesearch/bleve/index" +) + +func RegisterIndexType(name string, constructor IndexTypeConstructor) { + _, exists := indexTypes[name] + if exists { + panic(fmt.Errorf("attempted to register duplicate index encoding named '%s'", name)) + } + indexTypes[name] = constructor +} + +type IndexTypeConstructor func(storeName string, storeConfig map[string]interface{}, analysisQueue *index.AnalysisQueue) (index.Index, error) +type IndexTypeRegistry map[string]IndexTypeConstructor + +func IndexTypeConstructorByName(name string) IndexTypeConstructor { + return indexTypes[name] +} + +func IndexTypesAndInstances() ([]string, []string) { + var types []string + var instances []string + for name := range stores { + types = append(types, name) + } + return types, instances +} diff --git a/vendor/github.com/blevesearch/bleve/registry/registry.go b/vendor/github.com/blevesearch/bleve/registry/registry.go new file mode 100644 index 0000000..a0ea69c --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/registry/registry.go @@ -0,0 +1,184 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package registry + +import ( + "fmt" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/search/highlight" +) + +var stores = make(KVStoreRegistry, 0) +var indexTypes = make(IndexTypeRegistry, 0) + +// highlight +var fragmentFormatters = make(FragmentFormatterRegistry, 0) +var fragmenters = make(FragmenterRegistry, 0) +var highlighters = make(HighlighterRegistry, 0) + +// analysis +var charFilters = make(CharFilterRegistry, 0) +var tokenizers = make(TokenizerRegistry, 0) +var tokenMaps = make(TokenMapRegistry, 0) +var tokenFilters = make(TokenFilterRegistry, 0) +var analyzers = make(AnalyzerRegistry, 0) +var dateTimeParsers = make(DateTimeParserRegistry, 0) + +type Cache struct { + CharFilters *CharFilterCache + Tokenizers *TokenizerCache + TokenMaps *TokenMapCache + TokenFilters *TokenFilterCache + Analyzers *AnalyzerCache + DateTimeParsers *DateTimeParserCache + FragmentFormatters *FragmentFormatterCache + Fragmenters *FragmenterCache + Highlighters *HighlighterCache +} + +func NewCache() *Cache { + return &Cache{ + CharFilters: NewCharFilterCache(), + Tokenizers: NewTokenizerCache(), + TokenMaps: NewTokenMapCache(), + TokenFilters: NewTokenFilterCache(), + Analyzers: NewAnalyzerCache(), + DateTimeParsers: NewDateTimeParserCache(), + FragmentFormatters: NewFragmentFormatterCache(), + Fragmenters: NewFragmenterCache(), + Highlighters: NewHighlighterCache(), + } +} + +func typeFromConfig(config map[string]interface{}) (string, error) { + prop, ok := config["type"] + if !ok { + return "", fmt.Errorf("'type' property is not defined") + } + typ, ok := prop.(string) + if !ok { + return "", fmt.Errorf("'type' property must be a string, not %T", prop) + } + return typ, nil +} + +func (c *Cache) CharFilterNamed(name string) (analysis.CharFilter, error) { + return c.CharFilters.CharFilterNamed(name, c) +} + +func (c *Cache) DefineCharFilter(name string, config map[string]interface{}) (analysis.CharFilter, error) { + typ, err := typeFromConfig(config) + if err != nil { + return nil, err + } + return c.CharFilters.DefineCharFilter(name, typ, config, c) +} + +func (c *Cache) TokenizerNamed(name string) (analysis.Tokenizer, error) { + return c.Tokenizers.TokenizerNamed(name, c) +} + +func (c *Cache) DefineTokenizer(name string, config map[string]interface{}) (analysis.Tokenizer, error) { + typ, err := typeFromConfig(config) + if err != nil { + return nil, fmt.Errorf("cannot resolve '%s' tokenizer type: %s", name, err) + } + return c.Tokenizers.DefineTokenizer(name, typ, config, c) +} + +func (c *Cache) TokenMapNamed(name string) (analysis.TokenMap, error) { + return c.TokenMaps.TokenMapNamed(name, c) +} + +func (c *Cache) DefineTokenMap(name string, config map[string]interface{}) (analysis.TokenMap, error) { + typ, err := typeFromConfig(config) + if err != nil { + return nil, err + } + return c.TokenMaps.DefineTokenMap(name, typ, config, c) +} + +func (c *Cache) TokenFilterNamed(name string) (analysis.TokenFilter, error) { + return c.TokenFilters.TokenFilterNamed(name, c) +} + +func (c *Cache) DefineTokenFilter(name string, config map[string]interface{}) (analysis.TokenFilter, error) { + typ, err := typeFromConfig(config) + if err != nil { + return nil, err + } + return c.TokenFilters.DefineTokenFilter(name, typ, config, c) +} + +func (c *Cache) AnalyzerNamed(name string) (*analysis.Analyzer, error) { + return c.Analyzers.AnalyzerNamed(name, c) +} + +func (c *Cache) DefineAnalyzer(name string, config map[string]interface{}) (*analysis.Analyzer, error) { + typ, err := typeFromConfig(config) + if err != nil { + return nil, err + } + return c.Analyzers.DefineAnalyzer(name, typ, config, c) +} + +func (c *Cache) DateTimeParserNamed(name string) (analysis.DateTimeParser, error) { + return c.DateTimeParsers.DateTimeParserNamed(name, c) +} + +func (c *Cache) DefineDateTimeParser(name string, config map[string]interface{}) (analysis.DateTimeParser, error) { + typ, err := typeFromConfig(config) + if err != nil { + return nil, err + } + return c.DateTimeParsers.DefineDateTimeParser(name, typ, config, c) +} + +func (c *Cache) FragmentFormatterNamed(name string) (highlight.FragmentFormatter, error) { + return c.FragmentFormatters.FragmentFormatterNamed(name, c) +} + +func (c *Cache) DefineFragmentFormatter(name string, config map[string]interface{}) (highlight.FragmentFormatter, error) { + typ, err := typeFromConfig(config) + if err != nil { + return nil, err + } + return c.FragmentFormatters.DefineFragmentFormatter(name, typ, config, c) +} + +func (c *Cache) FragmenterNamed(name string) (highlight.Fragmenter, error) { + return c.Fragmenters.FragmenterNamed(name, c) +} + +func (c *Cache) DefineFragmenter(name string, config map[string]interface{}) (highlight.Fragmenter, error) { + typ, err := typeFromConfig(config) + if err != nil { + return nil, err + } + return c.Fragmenters.DefineFragmenter(name, typ, config, c) +} + +func (c *Cache) HighlighterNamed(name string) (highlight.Highlighter, error) { + return c.Highlighters.HighlighterNamed(name, c) +} + +func (c *Cache) DefineHighlighter(name string, config map[string]interface{}) (highlight.Highlighter, error) { + typ, err := typeFromConfig(config) + if err != nil { + return nil, err + } + return c.Highlighters.DefineHighlighter(name, typ, config, c) +} diff --git a/vendor/github.com/blevesearch/bleve/registry/store.go b/vendor/github.com/blevesearch/bleve/registry/store.go new file mode 100644 index 0000000..8318776 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/registry/store.go @@ -0,0 +1,51 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package registry + +import ( + "fmt" + + "github.com/blevesearch/bleve/index/store" +) + +func RegisterKVStore(name string, constructor KVStoreConstructor) { + _, exists := stores[name] + if exists { + panic(fmt.Errorf("attempted to register duplicate store named '%s'", name)) + } + stores[name] = constructor +} + +// KVStoreConstructor is used to build a KVStore of a specific type when +// specificied by the index configuration. In addition to meeting the +// store.KVStore interface, KVStores must also support this constructor. +// Note that currently the values of config must +// be able to be marshaled and unmarshaled using the encoding/json library (used +// when reading/writing the index metadata file). +type KVStoreConstructor func(mo store.MergeOperator, config map[string]interface{}) (store.KVStore, error) +type KVStoreRegistry map[string]KVStoreConstructor + +func KVStoreConstructorByName(name string) KVStoreConstructor { + return stores[name] +} + +func KVStoreTypesAndInstances() ([]string, []string) { + var types []string + var instances []string + for name := range stores { + types = append(types, name) + } + return types, instances +} diff --git a/vendor/github.com/blevesearch/bleve/registry/token_filter.go b/vendor/github.com/blevesearch/bleve/registry/token_filter.go new file mode 100644 index 0000000..e202e15 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/registry/token_filter.go @@ -0,0 +1,89 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package registry + +import ( + "fmt" + + "github.com/blevesearch/bleve/analysis" +) + +func RegisterTokenFilter(name string, constructor TokenFilterConstructor) { + _, exists := tokenFilters[name] + if exists { + panic(fmt.Errorf("attempted to register duplicate token filter named '%s'", name)) + } + tokenFilters[name] = constructor +} + +type TokenFilterConstructor func(config map[string]interface{}, cache *Cache) (analysis.TokenFilter, error) +type TokenFilterRegistry map[string]TokenFilterConstructor + +type TokenFilterCache struct { + *ConcurrentCache +} + +func NewTokenFilterCache() *TokenFilterCache { + return &TokenFilterCache{ + NewConcurrentCache(), + } +} + +func TokenFilterBuild(name string, config map[string]interface{}, cache *Cache) (interface{}, error) { + cons, registered := tokenFilters[name] + if !registered { + return nil, fmt.Errorf("no token filter with name or type '%s' registered", name) + } + tokenFilter, err := cons(config, cache) + if err != nil { + return nil, fmt.Errorf("error building token filter: %v", err) + } + return tokenFilter, nil +} + +func (c *TokenFilterCache) TokenFilterNamed(name string, cache *Cache) (analysis.TokenFilter, error) { + item, err := c.ItemNamed(name, cache, TokenFilterBuild) + if err != nil { + return nil, err + } + return item.(analysis.TokenFilter), nil +} + +func (c *TokenFilterCache) DefineTokenFilter(name string, typ string, config map[string]interface{}, cache *Cache) (analysis.TokenFilter, error) { + item, err := c.DefineItem(name, typ, config, cache, TokenFilterBuild) + if err != nil { + if err == ErrAlreadyDefined { + return nil, fmt.Errorf("token filter named '%s' already defined", name) + } + return nil, err + } + return item.(analysis.TokenFilter), nil +} + +func TokenFilterTypesAndInstances() ([]string, []string) { + emptyConfig := map[string]interface{}{} + emptyCache := NewCache() + var types []string + var instances []string + for name, cons := range tokenFilters { + _, err := cons(emptyConfig, emptyCache) + if err == nil { + instances = append(instances, name) + } else { + types = append(types, name) + } + } + return types, instances +} diff --git a/vendor/github.com/blevesearch/bleve/registry/token_maps.go b/vendor/github.com/blevesearch/bleve/registry/token_maps.go new file mode 100644 index 0000000..66ca08f --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/registry/token_maps.go @@ -0,0 +1,89 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package registry + +import ( + "fmt" + + "github.com/blevesearch/bleve/analysis" +) + +func RegisterTokenMap(name string, constructor TokenMapConstructor) { + _, exists := tokenMaps[name] + if exists { + panic(fmt.Errorf("attempted to register duplicate token map named '%s'", name)) + } + tokenMaps[name] = constructor +} + +type TokenMapConstructor func(config map[string]interface{}, cache *Cache) (analysis.TokenMap, error) +type TokenMapRegistry map[string]TokenMapConstructor + +type TokenMapCache struct { + *ConcurrentCache +} + +func NewTokenMapCache() *TokenMapCache { + return &TokenMapCache{ + NewConcurrentCache(), + } +} + +func TokenMapBuild(name string, config map[string]interface{}, cache *Cache) (interface{}, error) { + cons, registered := tokenMaps[name] + if !registered { + return nil, fmt.Errorf("no token map with name or type '%s' registered", name) + } + tokenMap, err := cons(config, cache) + if err != nil { + return nil, fmt.Errorf("error building token map: %v", err) + } + return tokenMap, nil +} + +func (c *TokenMapCache) TokenMapNamed(name string, cache *Cache) (analysis.TokenMap, error) { + item, err := c.ItemNamed(name, cache, TokenMapBuild) + if err != nil { + return nil, err + } + return item.(analysis.TokenMap), nil +} + +func (c *TokenMapCache) DefineTokenMap(name string, typ string, config map[string]interface{}, cache *Cache) (analysis.TokenMap, error) { + item, err := c.DefineItem(name, typ, config, cache, TokenMapBuild) + if err != nil { + if err == ErrAlreadyDefined { + return nil, fmt.Errorf("token map named '%s' already defined", name) + } + return nil, err + } + return item.(analysis.TokenMap), nil +} + +func TokenMapTypesAndInstances() ([]string, []string) { + emptyConfig := map[string]interface{}{} + emptyCache := NewCache() + var types []string + var instances []string + for name, cons := range tokenMaps { + _, err := cons(emptyConfig, emptyCache) + if err == nil { + instances = append(instances, name) + } else { + types = append(types, name) + } + } + return types, instances +} diff --git a/vendor/github.com/blevesearch/bleve/registry/tokenizer.go b/vendor/github.com/blevesearch/bleve/registry/tokenizer.go new file mode 100644 index 0000000..cb9af64 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/registry/tokenizer.go @@ -0,0 +1,89 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package registry + +import ( + "fmt" + + "github.com/blevesearch/bleve/analysis" +) + +func RegisterTokenizer(name string, constructor TokenizerConstructor) { + _, exists := tokenizers[name] + if exists { + panic(fmt.Errorf("attempted to register duplicate tokenizer named '%s'", name)) + } + tokenizers[name] = constructor +} + +type TokenizerConstructor func(config map[string]interface{}, cache *Cache) (analysis.Tokenizer, error) +type TokenizerRegistry map[string]TokenizerConstructor + +type TokenizerCache struct { + *ConcurrentCache +} + +func NewTokenizerCache() *TokenizerCache { + return &TokenizerCache{ + NewConcurrentCache(), + } +} + +func TokenizerBuild(name string, config map[string]interface{}, cache *Cache) (interface{}, error) { + cons, registered := tokenizers[name] + if !registered { + return nil, fmt.Errorf("no tokenizer with name or type '%s' registered", name) + } + tokenizer, err := cons(config, cache) + if err != nil { + return nil, fmt.Errorf("error building tokenizer: %v", err) + } + return tokenizer, nil +} + +func (c *TokenizerCache) TokenizerNamed(name string, cache *Cache) (analysis.Tokenizer, error) { + item, err := c.ItemNamed(name, cache, TokenizerBuild) + if err != nil { + return nil, err + } + return item.(analysis.Tokenizer), nil +} + +func (c *TokenizerCache) DefineTokenizer(name string, typ string, config map[string]interface{}, cache *Cache) (analysis.Tokenizer, error) { + item, err := c.DefineItem(name, typ, config, cache, TokenizerBuild) + if err != nil { + if err == ErrAlreadyDefined { + return nil, fmt.Errorf("tokenizer named '%s' already defined", name) + } + return nil, err + } + return item.(analysis.Tokenizer), nil +} + +func TokenizerTypesAndInstances() ([]string, []string) { + emptyConfig := map[string]interface{}{} + emptyCache := NewCache() + var types []string + var instances []string + for name, cons := range tokenizers { + _, err := cons(emptyConfig, emptyCache) + if err == nil { + instances = append(instances, name) + } else { + types = append(types, name) + } + } + return types, instances +} diff --git a/vendor/github.com/blevesearch/bleve/search.go b/vendor/github.com/blevesearch/bleve/search.go new file mode 100644 index 0000000..f674507 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search.go @@ -0,0 +1,631 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package bleve + +import ( + "encoding/json" + "fmt" + "reflect" + "sort" + "time" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/analysis/datetime/optional" + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/registry" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/collector" + "github.com/blevesearch/bleve/search/query" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeSearchResult int +var reflectStaticSizeSearchStatus int + +func init() { + var sr SearchResult + reflectStaticSizeSearchResult = int(reflect.TypeOf(sr).Size()) + var ss SearchStatus + reflectStaticSizeSearchStatus = int(reflect.TypeOf(ss).Size()) +} + +var cache = registry.NewCache() + +const defaultDateTimeParser = optional.Name + +type numericRange struct { + Name string `json:"name,omitempty"` + Min *float64 `json:"min,omitempty"` + Max *float64 `json:"max,omitempty"` +} + +type dateTimeRange struct { + Name string `json:"name,omitempty"` + Start time.Time `json:"start,omitempty"` + End time.Time `json:"end,omitempty"` + startString *string + endString *string +} + +func (dr *dateTimeRange) ParseDates(dateTimeParser analysis.DateTimeParser) (start, end time.Time) { + start = dr.Start + if dr.Start.IsZero() && dr.startString != nil { + s, err := dateTimeParser.ParseDateTime(*dr.startString) + if err == nil { + start = s + } + } + end = dr.End + if dr.End.IsZero() && dr.endString != nil { + e, err := dateTimeParser.ParseDateTime(*dr.endString) + if err == nil { + end = e + } + } + return start, end +} + +func (dr *dateTimeRange) UnmarshalJSON(input []byte) error { + var temp struct { + Name string `json:"name,omitempty"` + Start *string `json:"start,omitempty"` + End *string `json:"end,omitempty"` + } + + err := json.Unmarshal(input, &temp) + if err != nil { + return err + } + + dr.Name = temp.Name + if temp.Start != nil { + dr.startString = temp.Start + } + if temp.End != nil { + dr.endString = temp.End + } + + return nil +} + +func (dr *dateTimeRange) MarshalJSON() ([]byte, error) { + rv := map[string]interface{}{ + "name": dr.Name, + "start": dr.Start, + "end": dr.End, + } + if dr.Start.IsZero() && dr.startString != nil { + rv["start"] = dr.startString + } + if dr.End.IsZero() && dr.endString != nil { + rv["end"] = dr.endString + } + return json.Marshal(rv) +} + +// A FacetRequest describes a facet or aggregation +// of the result document set you would like to be +// built. +type FacetRequest struct { + Size int `json:"size"` + Field string `json:"field"` + NumericRanges []*numericRange `json:"numeric_ranges,omitempty"` + DateTimeRanges []*dateTimeRange `json:"date_ranges,omitempty"` +} + +func (fr *FacetRequest) Validate() error { + nrCount := len(fr.NumericRanges) + drCount := len(fr.DateTimeRanges) + if nrCount > 0 && drCount > 0 { + return fmt.Errorf("facet can only conain numeric ranges or date ranges, not both") + } + + if nrCount > 0 { + nrNames := map[string]interface{}{} + for _, nr := range fr.NumericRanges { + if _, ok := nrNames[nr.Name]; ok { + return fmt.Errorf("numeric ranges contains duplicate name '%s'", nr.Name) + } + nrNames[nr.Name] = struct{}{} + if nr.Min == nil && nr.Max == nil { + return fmt.Errorf("numeric range query must specify either min, max or both for range name '%s'", nr.Name) + } + } + + } else { + dateTimeParser, err := cache.DateTimeParserNamed(defaultDateTimeParser) + if err != nil { + return err + } + drNames := map[string]interface{}{} + for _, dr := range fr.DateTimeRanges { + if _, ok := drNames[dr.Name]; ok { + return fmt.Errorf("date ranges contains duplicate name '%s'", dr.Name) + } + drNames[dr.Name] = struct{}{} + start, end := dr.ParseDates(dateTimeParser) + if start.IsZero() && end.IsZero() { + return fmt.Errorf("date range query must specify either start, end or both for range name '%s'", dr.Name) + } + } + } + return nil +} + +// NewFacetRequest creates a facet on the specified +// field that limits the number of entries to the +// specified size. +func NewFacetRequest(field string, size int) *FacetRequest { + return &FacetRequest{ + Field: field, + Size: size, + } +} + +// AddDateTimeRange adds a bucket to a field +// containing date values. Documents with a +// date value falling into this range are tabulated +// as part of this bucket/range. +func (fr *FacetRequest) AddDateTimeRange(name string, start, end time.Time) { + if fr.DateTimeRanges == nil { + fr.DateTimeRanges = make([]*dateTimeRange, 0, 1) + } + fr.DateTimeRanges = append(fr.DateTimeRanges, &dateTimeRange{Name: name, Start: start, End: end}) +} + +// AddDateTimeRangeString adds a bucket to a field +// containing date values. +func (fr *FacetRequest) AddDateTimeRangeString(name string, start, end *string) { + if fr.DateTimeRanges == nil { + fr.DateTimeRanges = make([]*dateTimeRange, 0, 1) + } + fr.DateTimeRanges = append(fr.DateTimeRanges, + &dateTimeRange{Name: name, startString: start, endString: end}) +} + +// AddNumericRange adds a bucket to a field +// containing numeric values. Documents with a +// numeric value falling into this range are +// tabulated as part of this bucket/range. +func (fr *FacetRequest) AddNumericRange(name string, min, max *float64) { + if fr.NumericRanges == nil { + fr.NumericRanges = make([]*numericRange, 0, 1) + } + fr.NumericRanges = append(fr.NumericRanges, &numericRange{Name: name, Min: min, Max: max}) +} + +// FacetsRequest groups together all the +// FacetRequest objects for a single query. +type FacetsRequest map[string]*FacetRequest + +func (fr FacetsRequest) Validate() error { + for _, v := range fr { + err := v.Validate() + if err != nil { + return err + } + } + return nil +} + +// HighlightRequest describes how field matches +// should be highlighted. +type HighlightRequest struct { + Style *string `json:"style"` + Fields []string `json:"fields"` +} + +// NewHighlight creates a default +// HighlightRequest. +func NewHighlight() *HighlightRequest { + return &HighlightRequest{} +} + +// NewHighlightWithStyle creates a HighlightRequest +// with an alternate style. +func NewHighlightWithStyle(style string) *HighlightRequest { + return &HighlightRequest{ + Style: &style, + } +} + +func (h *HighlightRequest) AddField(field string) { + if h.Fields == nil { + h.Fields = make([]string, 0, 1) + } + h.Fields = append(h.Fields, field) +} + +// A SearchRequest describes all the parameters +// needed to search the index. +// Query is required. +// Size/From describe how much and which part of the +// result set to return. +// Highlight describes optional search result +// highlighting. +// Fields describes a list of field values which +// should be retrieved for result documents, provided they +// were stored while indexing. +// Facets describe the set of facets to be computed. +// Explain triggers inclusion of additional search +// result score explanations. +// Sort describes the desired order for the results to be returned. +// Score controls the kind of scoring performed +// SearchAfter supports deep paging by providing a minimum sort key +// SearchBefore supports deep paging by providing a maximum sort key +// sortFunc specifies the sort implementation to use for sorting results. +// +// A special field named "*" can be used to return all fields. +type SearchRequest struct { + Query query.Query `json:"query"` + Size int `json:"size"` + From int `json:"from"` + Highlight *HighlightRequest `json:"highlight"` + Fields []string `json:"fields"` + Facets FacetsRequest `json:"facets"` + Explain bool `json:"explain"` + Sort search.SortOrder `json:"sort"` + IncludeLocations bool `json:"includeLocations"` + Score string `json:"score,omitempty"` + SearchAfter []string `json:"search_after"` + SearchBefore []string `json:"search_before"` + + sortFunc func(sort.Interface) +} + +func (r *SearchRequest) Validate() error { + if srq, ok := r.Query.(query.ValidatableQuery); ok { + err := srq.Validate() + if err != nil { + return err + } + } + + if r.SearchAfter != nil && r.SearchBefore != nil { + return fmt.Errorf("cannot use search after and search before together") + } + + if r.SearchAfter != nil { + if r.From != 0 { + return fmt.Errorf("cannot use search after with from !=0") + } + if len(r.SearchAfter) != len(r.Sort) { + return fmt.Errorf("search after must have same size as sort order") + } + } + if r.SearchBefore != nil { + if r.From != 0 { + return fmt.Errorf("cannot use search before with from !=0") + } + if len(r.SearchBefore) != len(r.Sort) { + return fmt.Errorf("search before must have same size as sort order") + } + } + + return r.Facets.Validate() +} + +// AddFacet adds a FacetRequest to this SearchRequest +func (r *SearchRequest) AddFacet(facetName string, f *FacetRequest) { + if r.Facets == nil { + r.Facets = make(FacetsRequest, 1) + } + r.Facets[facetName] = f +} + +// SortBy changes the request to use the requested sort order +// this form uses the simplified syntax with an array of strings +// each string can either be a field name +// or the magic value _id and _score which refer to the doc id and search score +// any of these values can optionally be prefixed with - to reverse the order +func (r *SearchRequest) SortBy(order []string) { + so := search.ParseSortOrderStrings(order) + r.Sort = so +} + +// SortByCustom changes the request to use the requested sort order +func (r *SearchRequest) SortByCustom(order search.SortOrder) { + r.Sort = order +} + +// SetSearchAfter sets the request to skip over hits with a sort +// value less than the provided sort after key +func (r *SearchRequest) SetSearchAfter(after []string) { + r.SearchAfter = after +} + +// SetSearchBefore sets the request to skip over hits with a sort +// value greater than the provided sort before key +func (r *SearchRequest) SetSearchBefore(before []string) { + r.SearchBefore = before +} + +// UnmarshalJSON deserializes a JSON representation of +// a SearchRequest +func (r *SearchRequest) UnmarshalJSON(input []byte) error { + var temp struct { + Q json.RawMessage `json:"query"` + Size *int `json:"size"` + From int `json:"from"` + Highlight *HighlightRequest `json:"highlight"` + Fields []string `json:"fields"` + Facets FacetsRequest `json:"facets"` + Explain bool `json:"explain"` + Sort []json.RawMessage `json:"sort"` + IncludeLocations bool `json:"includeLocations"` + Score string `json:"score"` + SearchAfter []string `json:"search_after"` + SearchBefore []string `json:"search_before"` + } + + err := json.Unmarshal(input, &temp) + if err != nil { + return err + } + + if temp.Size == nil { + r.Size = 10 + } else { + r.Size = *temp.Size + } + if temp.Sort == nil { + r.Sort = search.SortOrder{&search.SortScore{Desc: true}} + } else { + r.Sort, err = search.ParseSortOrderJSON(temp.Sort) + if err != nil { + return err + } + } + r.From = temp.From + r.Explain = temp.Explain + r.Highlight = temp.Highlight + r.Fields = temp.Fields + r.Facets = temp.Facets + r.IncludeLocations = temp.IncludeLocations + r.Score = temp.Score + r.SearchAfter = temp.SearchAfter + r.SearchBefore = temp.SearchBefore + r.Query, err = query.ParseQuery(temp.Q) + if err != nil { + return err + } + + if r.Size < 0 { + r.Size = 10 + } + if r.From < 0 { + r.From = 0 + } + + return nil + +} + +// NewSearchRequest creates a new SearchRequest +// for the Query, using default values for all +// other search parameters. +func NewSearchRequest(q query.Query) *SearchRequest { + return NewSearchRequestOptions(q, 10, 0, false) +} + +// NewSearchRequestOptions creates a new SearchRequest +// for the Query, with the requested size, from +// and explanation search parameters. +// By default results are ordered by score, descending. +func NewSearchRequestOptions(q query.Query, size, from int, explain bool) *SearchRequest { + return &SearchRequest{ + Query: q, + Size: size, + From: from, + Explain: explain, + Sort: search.SortOrder{&search.SortScore{Desc: true}}, + } +} + +// IndexErrMap tracks errors with the name of the index where it occurred +type IndexErrMap map[string]error + +// MarshalJSON seralizes the error into a string for JSON consumption +func (iem IndexErrMap) MarshalJSON() ([]byte, error) { + tmp := make(map[string]string, len(iem)) + for k, v := range iem { + tmp[k] = v.Error() + } + return json.Marshal(tmp) +} + +func (iem IndexErrMap) UnmarshalJSON(data []byte) error { + var tmp map[string]string + err := json.Unmarshal(data, &tmp) + if err != nil { + return err + } + for k, v := range tmp { + iem[k] = fmt.Errorf("%s", v) + } + return nil +} + +// SearchStatus is a secion in the SearchResult reporting how many +// underlying indexes were queried, how many were successful/failed +// and a map of any errors that were encountered +type SearchStatus struct { + Total int `json:"total"` + Failed int `json:"failed"` + Successful int `json:"successful"` + Errors IndexErrMap `json:"errors,omitempty"` +} + +// Merge will merge together multiple SearchStatuses during a MultiSearch +func (ss *SearchStatus) Merge(other *SearchStatus) { + ss.Total += other.Total + ss.Failed += other.Failed + ss.Successful += other.Successful + if len(other.Errors) > 0 { + if ss.Errors == nil { + ss.Errors = make(map[string]error) + } + for otherIndex, otherError := range other.Errors { + ss.Errors[otherIndex] = otherError + } + } +} + +// A SearchResult describes the results of executing +// a SearchRequest. +type SearchResult struct { + Status *SearchStatus `json:"status"` + Request *SearchRequest `json:"request"` + Hits search.DocumentMatchCollection `json:"hits"` + Total uint64 `json:"total_hits"` + MaxScore float64 `json:"max_score"` + Took time.Duration `json:"took"` + Facets search.FacetResults `json:"facets"` +} + +func (sr *SearchResult) Size() int { + sizeInBytes := reflectStaticSizeSearchResult + size.SizeOfPtr + + reflectStaticSizeSearchStatus + + for _, entry := range sr.Hits { + if entry != nil { + sizeInBytes += entry.Size() + } + } + + for k, v := range sr.Facets { + sizeInBytes += size.SizeOfString + len(k) + + v.Size() + } + + return sizeInBytes +} + +func (sr *SearchResult) String() string { + rv := "" + if sr.Total > 0 { + if sr.Request.Size > 0 { + rv = fmt.Sprintf("%d matches, showing %d through %d, took %s\n", sr.Total, sr.Request.From+1, sr.Request.From+len(sr.Hits), sr.Took) + for i, hit := range sr.Hits { + rv += fmt.Sprintf("%5d. %s (%f)\n", i+sr.Request.From+1, hit.ID, hit.Score) + for fragmentField, fragments := range hit.Fragments { + rv += fmt.Sprintf("\t%s\n", fragmentField) + for _, fragment := range fragments { + rv += fmt.Sprintf("\t\t%s\n", fragment) + } + } + for otherFieldName, otherFieldValue := range hit.Fields { + if _, ok := hit.Fragments[otherFieldName]; !ok { + rv += fmt.Sprintf("\t%s\n", otherFieldName) + rv += fmt.Sprintf("\t\t%v\n", otherFieldValue) + } + } + } + } else { + rv = fmt.Sprintf("%d matches, took %s\n", sr.Total, sr.Took) + } + } else { + rv = "No matches" + } + if len(sr.Facets) > 0 { + rv += fmt.Sprintf("Facets:\n") + for fn, f := range sr.Facets { + rv += fmt.Sprintf("%s(%d)\n", fn, f.Total) + for _, t := range f.Terms { + rv += fmt.Sprintf("\t%s(%d)\n", t.Term, t.Count) + } + if f.Other != 0 { + rv += fmt.Sprintf("\tOther(%d)\n", f.Other) + } + } + } + return rv +} + +// Merge will merge together multiple SearchResults during a MultiSearch +func (sr *SearchResult) Merge(other *SearchResult) { + sr.Status.Merge(other.Status) + sr.Hits = append(sr.Hits, other.Hits...) + sr.Total += other.Total + if other.MaxScore > sr.MaxScore { + sr.MaxScore = other.MaxScore + } + if sr.Facets == nil && len(other.Facets) != 0 { + sr.Facets = other.Facets + return + } + + sr.Facets.Merge(other.Facets) +} + +// MemoryNeededForSearchResult is an exported helper function to determine the RAM +// needed to accommodate the results for a given search request. +func MemoryNeededForSearchResult(req *SearchRequest) uint64 { + if req == nil { + return 0 + } + + numDocMatches := req.Size + req.From + if req.Size+req.From > collector.PreAllocSizeSkipCap { + numDocMatches = collector.PreAllocSizeSkipCap + } + + estimate := 0 + + // overhead from the SearchResult structure + var sr SearchResult + estimate += sr.Size() + + var dm search.DocumentMatch + sizeOfDocumentMatch := dm.Size() + + // overhead from results + estimate += numDocMatches * sizeOfDocumentMatch + + // overhead from facet results + if req.Facets != nil { + var fr search.FacetResult + estimate += len(req.Facets) * fr.Size() + } + + // highlighting, store + var d document.Document + if len(req.Fields) > 0 || req.Highlight != nil { + for i := 0; i < (req.Size + req.From); i++ { + estimate += (req.Size + req.From) * d.Size() + } + } + + return uint64(estimate) +} + +// SetSortFunc sets the sort implementation to use when sorting hits. +// +// SearchRequests can specify a custom sort implementation to meet +// their needs. For instance, by specifying a parallel sort +// that uses all available cores. +func (r *SearchRequest) SetSortFunc(s func(sort.Interface)) { + r.sortFunc = s +} + +// SortFunc returns the sort implementation to use when sorting hits. +// Defaults to sort.Sort. +func (r *SearchRequest) SortFunc() func(data sort.Interface) { + if r.sortFunc != nil { + return r.sortFunc + } + + return sort.Sort +} diff --git a/vendor/github.com/blevesearch/bleve/search/collector.go b/vendor/github.com/blevesearch/bleve/search/collector.go new file mode 100644 index 0000000..df3ff9c --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/collector.go @@ -0,0 +1,52 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package search + +import ( + "context" + "time" + + "github.com/blevesearch/bleve/index" +) + +type Collector interface { + Collect(ctx context.Context, searcher Searcher, reader index.IndexReader) error + Results() DocumentMatchCollection + Total() uint64 + MaxScore() float64 + Took() time.Duration + SetFacetsBuilder(facetsBuilder *FacetsBuilder) + FacetResults() FacetResults +} + +// DocumentMatchHandler is the type of document match callback +// bleve will invoke during the search. +// Eventually, bleve will indicate the completion of an ongoing search, +// by passing a nil value for the document match callback. +// The application should take a copy of the hit/documentMatch +// if it wish to own it or need prolonged access to it. +type DocumentMatchHandler func(hit *DocumentMatch) error + +type MakeDocumentMatchHandlerKeyType string + +var MakeDocumentMatchHandlerKey = MakeDocumentMatchHandlerKeyType( + "MakeDocumentMatchHandlerKey") + +// MakeDocumentMatchHandler is an optional DocumentMatchHandler +// builder function which the applications can pass to bleve. +// These builder methods gives a DocumentMatchHandler function +// to bleve, which it will invoke on every document matches. +type MakeDocumentMatchHandler func(ctx *SearchContext) ( + callback DocumentMatchHandler, loadID bool, err error) diff --git a/vendor/github.com/blevesearch/bleve/search/collector/heap.go b/vendor/github.com/blevesearch/bleve/search/collector/heap.go new file mode 100644 index 0000000..05502d5 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/collector/heap.go @@ -0,0 +1,95 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package collector + +import ( + "container/heap" + + "github.com/blevesearch/bleve/search" +) + +type collectStoreHeap struct { + heap search.DocumentMatchCollection + compare collectorCompare +} + +func newStoreHeap(capacity int, compare collectorCompare) *collectStoreHeap { + rv := &collectStoreHeap{ + heap: make(search.DocumentMatchCollection, 0, capacity), + compare: compare, + } + heap.Init(rv) + return rv +} + +func (c *collectStoreHeap) AddNotExceedingSize(doc *search.DocumentMatch, + size int) *search.DocumentMatch { + c.add(doc) + if c.Len() > size { + return c.removeLast() + } + return nil +} + +func (c *collectStoreHeap) add(doc *search.DocumentMatch) { + heap.Push(c, doc) +} + +func (c *collectStoreHeap) removeLast() *search.DocumentMatch { + return heap.Pop(c).(*search.DocumentMatch) +} + +func (c *collectStoreHeap) Final(skip int, fixup collectorFixup) (search.DocumentMatchCollection, error) { + count := c.Len() + size := count - skip + if size <= 0 { + return make(search.DocumentMatchCollection, 0), nil + } + rv := make(search.DocumentMatchCollection, size) + for i := size - 1; i >= 0; i-- { + doc := heap.Pop(c).(*search.DocumentMatch) + rv[i] = doc + err := fixup(doc) + if err != nil { + return nil, err + } + } + return rv, nil +} + +// heap interface implementation + +func (c *collectStoreHeap) Len() int { + return len(c.heap) +} + +func (c *collectStoreHeap) Less(i, j int) bool { + so := c.compare(c.heap[i], c.heap[j]) + return -so < 0 +} + +func (c *collectStoreHeap) Swap(i, j int) { + c.heap[i], c.heap[j] = c.heap[j], c.heap[i] +} + +func (c *collectStoreHeap) Push(x interface{}) { + c.heap = append(c.heap, x.(*search.DocumentMatch)) +} + +func (c *collectStoreHeap) Pop() interface{} { + var rv *search.DocumentMatch + rv, c.heap = c.heap[len(c.heap)-1], c.heap[:len(c.heap)-1] + return rv +} diff --git a/vendor/github.com/blevesearch/bleve/search/collector/list.go b/vendor/github.com/blevesearch/bleve/search/collector/list.go new file mode 100644 index 0000000..f01d205 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/collector/list.go @@ -0,0 +1,86 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package collector + +import ( + "container/list" + + "github.com/blevesearch/bleve/search" +) + +type collectStoreList struct { + results *list.List + compare collectorCompare +} + +func newStoreList(capacity int, compare collectorCompare) *collectStoreList { + rv := &collectStoreList{ + results: list.New(), + compare: compare, + } + + return rv +} + +func (c *collectStoreList) AddNotExceedingSize(doc *search.DocumentMatch, size int) *search.DocumentMatch { + c.add(doc) + if c.len() > size { + return c.removeLast() + } + return nil +} + +func (c *collectStoreList) add(doc *search.DocumentMatch) { + for e := c.results.Front(); e != nil; e = e.Next() { + curr := e.Value.(*search.DocumentMatch) + if c.compare(doc, curr) >= 0 { + c.results.InsertBefore(doc, e) + return + } + } + // if we got to the end, we still have to add it + c.results.PushBack(doc) +} + +func (c *collectStoreList) removeLast() *search.DocumentMatch { + return c.results.Remove(c.results.Front()).(*search.DocumentMatch) +} + +func (c *collectStoreList) Final(skip int, fixup collectorFixup) (search.DocumentMatchCollection, error) { + if c.results.Len()-skip > 0 { + rv := make(search.DocumentMatchCollection, c.results.Len()-skip) + i := 0 + skipped := 0 + for e := c.results.Back(); e != nil; e = e.Prev() { + if skipped < skip { + skipped++ + continue + } + + rv[i] = e.Value.(*search.DocumentMatch) + err := fixup(rv[i]) + if err != nil { + return nil, err + } + i++ + } + return rv, nil + } + return search.DocumentMatchCollection{}, nil +} + +func (c *collectStoreList) len() int { + return c.results.Len() +} diff --git a/vendor/github.com/blevesearch/bleve/search/collector/slice.go b/vendor/github.com/blevesearch/bleve/search/collector/slice.go new file mode 100644 index 0000000..85fe73c --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/collector/slice.go @@ -0,0 +1,77 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package collector + +import "github.com/blevesearch/bleve/search" + +type collectStoreSlice struct { + slice search.DocumentMatchCollection + compare collectorCompare +} + +func newStoreSlice(capacity int, compare collectorCompare) *collectStoreSlice { + rv := &collectStoreSlice{ + slice: make(search.DocumentMatchCollection, 0, capacity), + compare: compare, + } + return rv +} + +func (c *collectStoreSlice) AddNotExceedingSize(doc *search.DocumentMatch, + size int) *search.DocumentMatch { + c.add(doc) + if c.len() > size { + return c.removeLast() + } + return nil +} + +func (c *collectStoreSlice) add(doc *search.DocumentMatch) { + // find where to insert, starting at end (lowest) + i := len(c.slice) + for ; i > 0; i-- { + cmp := c.compare(doc, c.slice[i-1]) + if cmp >= 0 { + break + } + } + // insert at i + c.slice = append(c.slice, nil) + copy(c.slice[i+1:], c.slice[i:]) + c.slice[i] = doc +} + +func (c *collectStoreSlice) removeLast() *search.DocumentMatch { + var rv *search.DocumentMatch + rv, c.slice = c.slice[len(c.slice)-1], c.slice[:len(c.slice)-1] + return rv +} + +func (c *collectStoreSlice) Final(skip int, fixup collectorFixup) (search.DocumentMatchCollection, error) { + for i := skip; i < len(c.slice); i++ { + err := fixup(c.slice[i]) + if err != nil { + return nil, err + } + } + if skip <= len(c.slice) { + return c.slice[skip:], nil + } + return search.DocumentMatchCollection{}, nil +} + +func (c *collectStoreSlice) len() int { + return len(c.slice) +} diff --git a/vendor/github.com/blevesearch/bleve/search/collector/topn.go b/vendor/github.com/blevesearch/bleve/search/collector/topn.go new file mode 100644 index 0000000..8d4afb6 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/collector/topn.go @@ -0,0 +1,412 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package collector + +import ( + "context" + "reflect" + "strconv" + "time" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeTopNCollector int + +func init() { + var coll TopNCollector + reflectStaticSizeTopNCollector = int(reflect.TypeOf(coll).Size()) +} + +type collectorStore interface { + // Add the document, and if the new store size exceeds the provided size + // the last element is removed and returned. If the size has not been + // exceeded, nil is returned. + AddNotExceedingSize(doc *search.DocumentMatch, size int) *search.DocumentMatch + + Final(skip int, fixup collectorFixup) (search.DocumentMatchCollection, error) +} + +// PreAllocSizeSkipCap will cap preallocation to this amount when +// size+skip exceeds this value +var PreAllocSizeSkipCap = 1000 + +type collectorCompare func(i, j *search.DocumentMatch) int + +type collectorFixup func(d *search.DocumentMatch) error + +// TopNCollector collects the top N hits, optionally skipping some results +type TopNCollector struct { + size int + skip int + total uint64 + maxScore float64 + took time.Duration + sort search.SortOrder + results search.DocumentMatchCollection + facetsBuilder *search.FacetsBuilder + + store collectorStore + + needDocIds bool + neededFields []string + cachedScoring []bool + cachedDesc []bool + + lowestMatchOutsideResults *search.DocumentMatch + updateFieldVisitor index.DocumentFieldTermVisitor + dvReader index.DocValueReader + searchAfter *search.DocumentMatch +} + +// CheckDoneEvery controls how frequently we check the context deadline +const CheckDoneEvery = uint64(1024) + +// NewTopNCollector builds a collector to find the top 'size' hits +// skipping over the first 'skip' hits +// ordering hits by the provided sort order +func NewTopNCollector(size int, skip int, sort search.SortOrder) *TopNCollector { + return newTopNCollector(size, skip, sort) +} + +// NewTopNCollector builds a collector to find the top 'size' hits +// skipping over the first 'skip' hits +// ordering hits by the provided sort order +func NewTopNCollectorAfter(size int, sort search.SortOrder, after []string) *TopNCollector { + rv := newTopNCollector(size, 0, sort) + rv.searchAfter = &search.DocumentMatch{ + Sort: after, + } + + for pos, ss := range sort { + if ss.RequiresDocID() { + rv.searchAfter.ID = after[pos] + } + if ss.RequiresScoring() { + if score, err := strconv.ParseFloat(after[pos], 64); err == nil { + rv.searchAfter.Score = score + } + } + } + + return rv +} + +func newTopNCollector(size int, skip int, sort search.SortOrder) *TopNCollector { + hc := &TopNCollector{size: size, skip: skip, sort: sort} + + // pre-allocate space on the store to avoid reslicing + // unless the size + skip is too large, then cap it + // everything should still work, just reslices as necessary + backingSize := size + skip + 1 + if size+skip > PreAllocSizeSkipCap { + backingSize = PreAllocSizeSkipCap + 1 + } + + if size+skip > 10 { + hc.store = newStoreHeap(backingSize, func(i, j *search.DocumentMatch) int { + return hc.sort.Compare(hc.cachedScoring, hc.cachedDesc, i, j) + }) + } else { + hc.store = newStoreSlice(backingSize, func(i, j *search.DocumentMatch) int { + return hc.sort.Compare(hc.cachedScoring, hc.cachedDesc, i, j) + }) + } + + // these lookups traverse an interface, so do once up-front + if sort.RequiresDocID() { + hc.needDocIds = true + } + hc.neededFields = sort.RequiredFields() + hc.cachedScoring = sort.CacheIsScore() + hc.cachedDesc = sort.CacheDescending() + + return hc +} + +func (hc *TopNCollector) Size() int { + sizeInBytes := reflectStaticSizeTopNCollector + size.SizeOfPtr + + if hc.facetsBuilder != nil { + sizeInBytes += hc.facetsBuilder.Size() + } + + for _, entry := range hc.neededFields { + sizeInBytes += len(entry) + size.SizeOfString + } + + sizeInBytes += len(hc.cachedScoring) + len(hc.cachedDesc) + + return sizeInBytes +} + +// Collect goes to the index to find the matching documents +func (hc *TopNCollector) Collect(ctx context.Context, searcher search.Searcher, reader index.IndexReader) error { + startTime := time.Now() + var err error + var next *search.DocumentMatch + + // pre-allocate enough space in the DocumentMatchPool + // unless the size + skip is too large, then cap it + // everything should still work, just allocates DocumentMatches on demand + backingSize := hc.size + hc.skip + 1 + if hc.size+hc.skip > PreAllocSizeSkipCap { + backingSize = PreAllocSizeSkipCap + 1 + } + searchContext := &search.SearchContext{ + DocumentMatchPool: search.NewDocumentMatchPool(backingSize+searcher.DocumentMatchPoolSize(), len(hc.sort)), + Collector: hc, + IndexReader: reader, + } + + hc.dvReader, err = reader.DocValueReader(hc.neededFields) + if err != nil { + return err + } + + hc.updateFieldVisitor = func(field string, term []byte) { + if hc.facetsBuilder != nil { + hc.facetsBuilder.UpdateVisitor(field, term) + } + hc.sort.UpdateVisitor(field, term) + } + + dmHandlerMaker := MakeTopNDocumentMatchHandler + if cv := ctx.Value(search.MakeDocumentMatchHandlerKey); cv != nil { + dmHandlerMaker = cv.(search.MakeDocumentMatchHandler) + } + // use the application given builder for making the custom document match + // handler and perform callbacks/invocations on the newly made handler. + dmHandler, loadID, err := dmHandlerMaker(searchContext) + if err != nil { + return err + } + + hc.needDocIds = hc.needDocIds || loadID + + select { + case <-ctx.Done(): + return ctx.Err() + default: + next, err = searcher.Next(searchContext) + } + for err == nil && next != nil { + if hc.total%CheckDoneEvery == 0 { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + } + + err = hc.prepareDocumentMatch(searchContext, reader, next) + if err != nil { + break + } + + err = dmHandler(next) + if err != nil { + break + } + + next, err = searcher.Next(searchContext) + } + + // help finalize/flush the results in case + // of custom document match handlers. + err = dmHandler(nil) + if err != nil { + return err + } + + // compute search duration + hc.took = time.Since(startTime) + if err != nil { + return err + } + // finalize actual results + err = hc.finalizeResults(reader) + if err != nil { + return err + } + return nil +} + +var sortByScoreOpt = []string{"_score"} + +func (hc *TopNCollector) prepareDocumentMatch(ctx *search.SearchContext, + reader index.IndexReader, d *search.DocumentMatch) (err error) { + + // visit field terms for features that require it (sort, facets) + if len(hc.neededFields) > 0 { + err = hc.visitFieldTerms(reader, d) + if err != nil { + return err + } + } + + // increment total hits + hc.total++ + d.HitNumber = hc.total + + // update max score + if d.Score > hc.maxScore { + hc.maxScore = d.Score + } + + // see if we need to load ID (at this early stage, for example to sort on it) + if hc.needDocIds { + d.ID, err = reader.ExternalID(d.IndexInternalID) + if err != nil { + return err + } + } + + // compute this hits sort value + if len(hc.sort) == 1 && hc.cachedScoring[0] { + d.Sort = sortByScoreOpt + } else { + hc.sort.Value(d) + } + + return nil +} + +func MakeTopNDocumentMatchHandler( + ctx *search.SearchContext) (search.DocumentMatchHandler, bool, error) { + var hc *TopNCollector + var ok bool + if hc, ok = ctx.Collector.(*TopNCollector); ok { + return func(d *search.DocumentMatch) error { + if d == nil { + return nil + } + + // support search after based pagination, + // if this hit is <= the search after sort key + // we should skip it + if hc.searchAfter != nil { + // exact sort order matches use hit number to break tie + // but we want to allow for exact match, so we pretend + hc.searchAfter.HitNumber = d.HitNumber + if hc.sort.Compare(hc.cachedScoring, hc.cachedDesc, d, hc.searchAfter) <= 0 { + return nil + } + } + + // optimization, we track lowest sorting hit already removed from heap + // with this one comparison, we can avoid all heap operations if + // this hit would have been added and then immediately removed + if hc.lowestMatchOutsideResults != nil { + cmp := hc.sort.Compare(hc.cachedScoring, hc.cachedDesc, d, + hc.lowestMatchOutsideResults) + if cmp >= 0 { + // this hit can't possibly be in the result set, so avoid heap ops + ctx.DocumentMatchPool.Put(d) + return nil + } + } + + removed := hc.store.AddNotExceedingSize(d, hc.size+hc.skip) + if removed != nil { + if hc.lowestMatchOutsideResults == nil { + hc.lowestMatchOutsideResults = removed + } else { + cmp := hc.sort.Compare(hc.cachedScoring, hc.cachedDesc, + removed, hc.lowestMatchOutsideResults) + if cmp < 0 { + tmp := hc.lowestMatchOutsideResults + hc.lowestMatchOutsideResults = removed + ctx.DocumentMatchPool.Put(tmp) + } + } + } + return nil + }, false, nil + } + return nil, false, nil +} + +// visitFieldTerms is responsible for visiting the field terms of the +// search hit, and passing visited terms to the sort and facet builder +func (hc *TopNCollector) visitFieldTerms(reader index.IndexReader, d *search.DocumentMatch) error { + if hc.facetsBuilder != nil { + hc.facetsBuilder.StartDoc() + } + + err := hc.dvReader.VisitDocValues(d.IndexInternalID, hc.updateFieldVisitor) + if hc.facetsBuilder != nil { + hc.facetsBuilder.EndDoc() + } + + return err +} + +// SetFacetsBuilder registers a facet builder for this collector +func (hc *TopNCollector) SetFacetsBuilder(facetsBuilder *search.FacetsBuilder) { + hc.facetsBuilder = facetsBuilder + hc.neededFields = append(hc.neededFields, hc.facetsBuilder.RequiredFields()...) +} + +// finalizeResults starts with the heap containing the final top size+skip +// it now throws away the results to be skipped +// and does final doc id lookup (if necessary) +func (hc *TopNCollector) finalizeResults(r index.IndexReader) error { + var err error + hc.results, err = hc.store.Final(hc.skip, func(doc *search.DocumentMatch) error { + if doc.ID == "" { + // look up the id since we need it for lookup + var err error + doc.ID, err = r.ExternalID(doc.IndexInternalID) + if err != nil { + return err + } + } + doc.Complete(nil) + return nil + }) + + return err +} + +// Results returns the collected hits +func (hc *TopNCollector) Results() search.DocumentMatchCollection { + return hc.results +} + +// Total returns the total number of hits +func (hc *TopNCollector) Total() uint64 { + return hc.total +} + +// MaxScore returns the maximum score seen across all the hits +func (hc *TopNCollector) MaxScore() float64 { + return hc.maxScore +} + +// Took returns the time spent collecting hits +func (hc *TopNCollector) Took() time.Duration { + return hc.took +} + +// FacetResults returns the computed facets results +func (hc *TopNCollector) FacetResults() search.FacetResults { + if hc.facetsBuilder != nil { + return hc.facetsBuilder.Results() + } + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/search/explanation.go b/vendor/github.com/blevesearch/bleve/search/explanation.go new file mode 100644 index 0000000..3b81737 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/explanation.go @@ -0,0 +1,55 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package search + +import ( + "encoding/json" + "fmt" + "reflect" + + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeExplanation int + +func init() { + var e Explanation + reflectStaticSizeExplanation = int(reflect.TypeOf(e).Size()) +} + +type Explanation struct { + Value float64 `json:"value"` + Message string `json:"message"` + Children []*Explanation `json:"children,omitempty"` +} + +func (expl *Explanation) String() string { + js, err := json.MarshalIndent(expl, "", " ") + if err != nil { + return fmt.Sprintf("error serializing explanation to json: %v", err) + } + return string(js) +} + +func (expl *Explanation) Size() int { + sizeInBytes := reflectStaticSizeExplanation + size.SizeOfPtr + + len(expl.Message) + + for _, entry := range expl.Children { + sizeInBytes += entry.Size() + } + + return sizeInBytes +} diff --git a/vendor/github.com/blevesearch/bleve/search/facet/benchmark_data.txt b/vendor/github.com/blevesearch/bleve/search/facet/benchmark_data.txt new file mode 100644 index 0000000..b012f78 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/facet/benchmark_data.txt @@ -0,0 +1,2909 @@ +Boiling liquid expanding vapor explosion +From Wikipedia, the free encyclopedia +See also: Boiler explosion and Steam explosion + +Flames subsequent to a flammable liquid BLEVE from a tanker. BLEVEs do not necessarily involve fire. + +This article's tone or style may not reflect the encyclopedic tone used on Wikipedia. See Wikipedia's guide to writing better articles for suggestions. (July 2013) +A boiling liquid expanding vapor explosion (BLEVE, /ˈblÉ›viË/ blev-ee) is an explosion caused by the rupture of a vessel containing a pressurized liquid above its boiling point.[1] +Contents [hide] +1 Mechanism +1.1 Water example +1.2 BLEVEs without chemical reactions +2 Fires +3 Incidents +4 Safety measures +5 See also +6 References +7 External links +Mechanism[edit] + +This section needs additional citations for verification. Please help improve this article by adding citations to reliable sources. Unsourced material may be challenged and removed. (July 2013) +There are three characteristics of liquids which are relevant to the discussion of a BLEVE: +If a liquid in a sealed container is boiled, the pressure inside the container increases. As the liquid changes to a gas it expands - this expansion in a vented container would cause the gas and liquid to take up more space. In a sealed container the gas and liquid are not able to take up more space and so the pressure rises. Pressurized vessels containing liquids can reach an equilibrium where the liquid stops boiling and the pressure stops rising. This occurs when no more heat is being added to the system (either because it has reached ambient temperature or has had a heat source removed). +The boiling temperature of a liquid is dependent on pressure - high pressures will yield high boiling temperatures, and low pressures will yield low boiling temperatures. A common simple experiment is to place a cup of water in a vacuum chamber, and then reduce the pressure in the chamber until the water boils. By reducing the pressure the water will boil even at room temperature. This works both ways - if the pressure is increased beyond normal atmospheric pressures, the boiling of hot water could be suppressed far beyond normal temperatures. The cooling system of a modern internal combustion engine is a real-world example. +When a liquid boils it turns into a gas. The resulting gas takes up far more space than the liquid did. +Typically, a BLEVE starts with a container of liquid which is held above its normal, atmospheric-pressure boiling temperature. Many substances normally stored as liquids, such as CO2, oxygen, and other similar industrial gases have boiling temperatures, at atmospheric pressure, far below room temperature. In the case of water, a BLEVE could occur if a pressurized chamber of water is heated far beyond the standard 100 °C (212 °F). That container, because the boiling water pressurizes it, is capable of holding liquid water at very high temperatures. +If the pressurized vessel, containing liquid at high temperature (which may be room temperature, depending on the substance) ruptures, the pressure which prevents the liquid from boiling is lost. If the rupture is catastrophic, where the vessel is immediately incapable of holding any pressure at all, then there suddenly exists a large mass of liquid which is at very high temperature and very low pressure. This causes the entire volume of liquid to instantaneously boil, which in turn causes an extremely rapid expansion. Depending on temperatures, pressures and the substance involved, that expansion may be so rapid that it can be classified as an explosion, fully capable of inflicting severe damage on its surroundings. +Water example[edit] +Imagine, for example, a tank of pressurized liquid water held at 204.4 °C (400 °F). This vessel would normally be pressurized to 1.7 MPa (250 psi) above atmospheric ("gauge") pressure. Were the tank containing the water to split open, there would momentarily exist a volume of liquid water which is +at atmospheric pressure, and +204.4 °C (400 °F). +At atmospheric pressure the boiling point of water is 100 °C (212 °F) - liquid water at atmospheric pressure cannot exist at temperatures higher than 100 °C (212 °F). It is obvious, then, that 204.4 °C (400 °F) liquid water at atmospheric pressure must immediately flash to gas causing an explosion. +BLEVEs without chemical reactions[edit] +It is important to note that a BLEVE need not be a chemical explosion - nor does there need to be a fire - however if a flammable substance is subject to a BLEVE it may also be subject to intense heating, either from an external source of heat which may have caused the vessel to rupture in the first place or from an internal source of localized heating such as skin friction. This heating can cause a flammable substance to ignite, adding a secondary explosion caused by the primary BLEVE. While blast effects of any BLEVE can be devastating, a flammable substance such as propane can add significantly to the danger. +Bleve explosion.svg +While the term BLEVE is most often used to describe the results of a container of flammable liquid rupturing due to fire, a BLEVE can occur even with a non-flammable substance such as water,[2] liquid nitrogen,[3] liquid helium or other refrigerants or cryogens, and therefore is not usually considered a type of chemical explosion. +Fires[edit] +BLEVEs can be caused by an external fire near the storage vessel causing heating of the contents and pressure build-up. While tanks are often designed to withstand great pressure, constant heating can cause the metal to weaken and eventually fail. If the tank is being heated in an area where there is no liquid, it may rupture faster without the liquid to absorb the heat. Gas containers are usually equipped with relief valves that vent off excess pressure, but the tank can still fail if the pressure is not released quickly enough.[1] Relief valves are sized to release pressure fast enough to prevent the pressure from increasing beyond the strength of the vessel, but not so fast as to be the cause of an explosion. An appropriately sized relief valve will allow the liquid inside to boil slowly, maintaining a constant pressure in the vessel until all the liquid has boiled and the vessel empties. +If the substance involved is flammable, it is likely that the resulting cloud of the substance will ignite after the BLEVE has occurred, forming a fireball and possibly a fuel-air explosion, also termed a vapor cloud explosion (VCE). If the materials are toxic, a large area will be contaminated.[4] +Incidents[edit] +The term "BLEVE" was coined by three researchers at Factory Mutual, in the analysis of an accident there in 1957 involving a chemical reactor vessel.[5] +In August 1959 the Kansas City Fire Department suffered its largest ever loss of life in the line of duty, when a 25,000 gallon (95,000 litre) gas tank exploded during a fire on Southwest Boulevard killing five firefighters. This was the first time BLEVE was used to describe a burning fuel tank.[citation needed] +Later incidents included the Cheapside Street Whisky Bond Fire in Glasgow, Scotland in 1960; Feyzin, France in 1966; Crescent City, Illinois in 1970; Kingman, Arizona in 1973; a liquid nitrogen tank rupture[6] at Air Products and Chemicals and Mobay Chemical Company at New Martinsville, West Virginia on January 31, 1978 [1];Texas City, Texas in 1978; Murdock, Illinois in 1983; San Juan Ixhuatepec, Mexico City in 1984; and Toronto, Ontario in 2008. +Safety measures[edit] +[icon] This section requires expansion. (July 2013) +Some fire mitigation measures are listed under liquefied petroleum gas. +See also[edit] +Boiler explosion +Expansion ratio +Explosive boiling or phase explosion +Rapid phase transition +Viareggio train derailment +2008 Toronto explosions +Gas carriers +Los Alfaques Disaster +Lac-Mégantic derailment +References[edit] +^ Jump up to: a b Kletz, Trevor (March 1990). Critical Aspects of Safety and Loss Prevention. London: Butterworth–Heinemann. pp. 43–45. ISBN 0-408-04429-2. +Jump up ^ "Temperature Pressure Relief Valves on Water Heaters: test, inspect, replace, repair guide". Inspect-ny.com. Retrieved 2011-07-12. +Jump up ^ Liquid nitrogen BLEVE demo +Jump up ^ "Chemical Process Safety" (PDF). Retrieved 2011-07-12. +Jump up ^ David F. Peterson, BLEVE: Facts, Risk Factors, and Fallacies, Fire Engineering magazine (2002). +Jump up ^ "STATE EX REL. VAPOR CORP. v. NARICK". Supreme Court of Appeals of West Virginia. 1984-07-12. Retrieved 2014-03-16. +External links[edit] + Look up boiling liquid expanding vapor explosion in Wiktionary, the free dictionary. + Wikimedia Commons has media related to BLEVE. +BLEVE Demo on YouTube — video of a controlled BLEVE demo +huge explosions on YouTube — video of propane and isobutane BLEVEs from a train derailment at Murdock, Illinois (3 September 1983) +Propane BLEVE on YouTube — video of BLEVE from the Toronto propane depot fire +Moscow Ring Road Accident on YouTube - Dozens of LPG tank BLEVEs after a road accident in Moscow +Kingman, AZ BLEVE — An account of the 5 July 1973 explosion in Kingman, with photographs +Propane Tank Explosions — Description of circumstances required to cause a propane tank BLEVE. +Analysis of BLEVE Events at DOE Sites - Details physics and mathematics of BLEVEs. +HID - SAFETY REPORT ASSESSMENT GUIDE: Whisky Maturation Warehouses - The liquor is aged in wooden barrels that can suffer BLEVE. +Categories: ExplosivesFirefightingFireTypes of fireGas technologiesIndustrial fires and explosions +Navigation menu +Create accountLog inArticleTalkReadEditView history + +Main page +Contents +Featured content +Current events +Random article +Donate to Wikipedia +Wikimedia Shop +Interaction +Help +About Wikipedia +Community portal +Recent changes +Contact page +Tools +What links here +Related changes +Upload file +Special pages +Permanent link +Page information +Wikidata item +Cite this page +Print/export +Create a book +Download as PDF +Printable version +Languages +Català +Deutsch +Español +Français +Italiano +עברית +Nederlands +日本語 +Norsk bokmÃ¥l +Polski +Português +РуÑÑкий +Suomi +Edit links +This page was last modified on 18 November 2014 at 01:35. +Text is available under the Creative Commons Attribution-ShareAlike License; additional terms may apply. By using this site, you agree to the Terms of Use and Privacy Policy. Wikipedia® is a registered trademark of the Wikimedia Foundation, Inc., a non-profit organization. +Privacy policyAbout WikipediaDisclaimersContact WikipediaDevelopersMobile viewWikimedia Foundation Powered by MediaWiki + + +Thermobaric weapon +From Wikipedia, the free encyclopedia + +Blast from a US Navy fuel air explosive used against a decommissioned ship, USS McNulty, 1972. +A thermobaric weapon is a type of explosive that utilizes oxygen from the surrounding air to generate an intense, high-temperature explosion, and in practice the blast wave such a weapon produces is typically significantly longer in duration than a conventional condensed explosive. The fuel-air bomb is one of the most well-known types of thermobaric weapons. +Most conventional explosives consist of a fuel-oxidizer premix (gunpowder, for example, contains 25% fuel and 75% oxidizer), whereas thermobaric weapons are almost 100% fuel, so thermobaric weapons are significantly more energetic than conventional condensed explosives of equal weight. Their reliance on atmospheric oxygen makes them unsuitable for use underwater, at high altitude, and in adverse weather. They do, however, cause considerably more destruction when used inside confined environments such as tunnels, caves, and bunkers - partly due to the sustained blast wave, and partly by consuming the available oxygen inside those confined spaces. +There are many different types of thermobaric weapons rounds that can be fitted to hand-held launchers.[1] +Contents [hide] +1 Terminology +2 Mechanism +2.1 Fuel-air explosive +2.1.1 Effect +3 Development history +3.1 Soviet and Russian developments +3.2 US developments +4 History +4.1 Military use +4.2 Non-military use +5 See also +6 References +7 External links +Terminology[edit] +The term thermobaric is derived from the Greek words for "heat" and "pressure": thermobarikos (θεÏμοβαÏικός), from thermos (θεÏμός), hot + baros (βάÏος), weight, pressure + suffix -ikos (-ικός), suffix -ic. +Other terms used for this family of weapons are high-impulse thermobaric weapons (HITs), heat and pressure weapons, vacuum bombs, or fuel-air explosives (FAE or FAX). +Mechanism[edit] +In contrast to condensed explosive, where oxidation in a confined region produces a blast front from essentially a point source, a flame front accelerates to a large volume producing pressure fronts both within the mixture of fuel and oxidant and then in the surrounding air.[2] +Thermobaric explosives apply the principles underlying accidental unconfined vapor cloud explosions, which include those from dispersions of flammable dusts and droplets.[3] Previously, such explosions were most often encountered in flour mills and their storage containers, and later in coal mines; but, now, most commonly in discharged oil tankers and refineries, including an incident at Buncefield in the UK in 2005 where the blast wave woke people 150 kilometres (93 mi) from its centre.[4] +A typical weapon consists of a container packed with a fuel substance, in the center of which is a small conventional-explosive "scatter charge". Fuels are chosen on the basis of the exothermicity of their oxidation, ranging from powdered metals, such as aluminium or magnesium, to organic materials, possibly with a self-contained partial oxidant. The most recent development involves the use of nanofuels.[5][6] +A thermobaric bomb's effective yield requires the most appropriate combination of a number of factors; among these are how well the fuel is dispersed, how rapidly it mixes with the surrounding atmosphere, and the initiation of the igniter and its position relative to the container of fuel. In some designs, strong munitions cases allow the blast pressure to be contained long enough for the fuel to be heated up well above its auto-ignition temperature, so that once the container bursts the super-heated fuel will auto-ignite progressively as it comes into contact with atmospheric oxygen.[7][8][9][10][11][12][13][14][15][16][17] +Conventional upper and lower limits of flammability apply to such weapons. Close in, blast from the dispersal charge, compressing and heating the surrounding atmosphere, will have some influence on the lower limit. The upper limit has been demonstrated strongly to influence the ignition of fogs above pools of oil.[18] This weakness may be eliminated by designs where the fuel is preheated well above its ignition temperature, so that its cooling during its dispersion still results in a minimal ignition delay on mixing. The continual combustion of the outer layer of fuel molecules as they come into contact with the air, generates additional heat which maintains the temperature of the interior of the fireball, and thus sustains the detonation.[19][20][21] +In confinement, a series of reflective shock waves are generated,[22][23] which maintain the fireball and can extend its duration to between 10 and 50 ms as exothermic recombination reactions occur.[24] Further damage can result as the gases cool and pressure drops sharply, leading to a partial vacuum. This effect has given rise to the misnomer "vacuum bomb". Piston-type afterburning is also believed to occur in such structures, as flame-fronts accelerate through it.[25][26] +Fuel-air explosive[edit] +A fuel-air explosive (FAE) device consists of a container of fuel and two separate explosive charges. After the munition is dropped or fired, the first explosive charge bursts open the container at a predetermined height and disperses the fuel in a cloud that mixes with atmospheric oxygen (the size of the cloud varies with the size of the munition). The cloud of fuel flows around objects and into structures. The second charge then detonates the cloud, creating a massive blast wave. The blast wave destroys unreinforced buildings and equipment and kills and injures people. The antipersonnel effect of the blast wave is more severe in foxholes, on people with body armor, and in enclosed spaces such as caves, buildings, and bunkers. +Fuel-air explosives were first developed, and used in Vietnam, by the United States. Soviet scientists, however, quickly developed their own FAE weapons, which were reportedly used against China in the Sino-Soviet border conflict and in Afghanistan. Since then, research and development has continued and currently Russian forces field a wide array of third-generation FAE warheads. +Effect[edit] +A Human Rights Watch report of 1 February 2000[27] quotes a study made by the US Defense Intelligence Agency: +The [blast] kill mechanism against living targets is unique–and unpleasant.... What kills is the pressure wave, and more importantly, the subsequent rarefaction [vacuum], which ruptures the lungs.... If the fuel deflagrates but does not detonate, victims will be severely burned and will probably also inhale the burning fuel. Since the most common FAE fuels, ethylene oxide and propylene oxide, are highly toxic, undetonated FAE should prove as lethal to personnel caught within the cloud as most chemical agents. +According to a U.S. Central Intelligence Agency study,[27] "the effect of an FAE explosion within confined spaces is immense. Those near the ignition point are obliterated. Those at the fringe are likely to suffer many internal, and thus invisible injuries, including burst eardrums and crushed inner ear organs, severe concussions, ruptured lungs and internal organs, and possibly blindness." Another Defense Intelligence Agency document speculates that because the "shock and pressure waves cause minimal damage to brain tissue…it is possible that victims of FAEs are not rendered unconscious by the blast, but instead suffer for several seconds or minutes while they suffocate."[28] +Development history[edit] +Soviet and Russian developments[edit] + +A RPO-A rocket and launcher. +The Soviet armed forces extensively developed FAE weapons,[29] such as the RPO-A, and used them in Chechnya.[30] +The Russian armed forces have developed thermobaric ammunition variants for several of their weapons, such as the TGB-7V thermobaric grenade with a lethality radius of 10 metres (33 ft), which can be launched from a RPG-7. The GM-94 is a 43 mm pump-action grenade launcher which is designed mainly to fire thermobaric grenades for close quarters combat. With the grenade weighing 250 grams (8.8 oz) and holding a 160 grams (5.6 oz) explosive mixture, its lethality radius is 3 metres (9.8 ft); however, due to the deliberate "fragmentation-free" design of the grenade, 4 metres (13 ft) is already considered a safe distance.[31] The RPO-A and upgraded RPO-M are infantry-portable RPGs designed to fire thermobaric rockets. The RPO-M, for instance, has a thermobaric warhead with a TNT equivalence of 5.5 kilograms (12 lb) of TNT and destructive capabilities similar to a 152 mm High explosive fragmentation artillery shell.[32][33] The RShG-1 and the RShG-2 are thermobaric variants of the RPG-27 and RPG-26 respectively. The RShG-1 is the more powerful variant, with its warhead having a 10 metres (33 ft) lethality radius and producing about the same effect as 6 kg (13 lb) of TNT.[34] The RMG is a further derivative of the RPG-26 that uses a tandem-charge warhead, whereby the precursor HEAT warhead blasts an opening for the main thermobaric charge to enter and detonate inside.[35] The RMG's precursor HEAT warhead can penetrate 300 mm of reinforced concrete or over 100 mm of Rolled homogeneous armour, thus allowing the 105 millimetres (4.1 in) diameter thermobaric warhead to detonate inside.[36] +The other examples include the SACLOS or millimeter wave radar-guided thermobaric variants of the 9M123 Khrizantema, the 9M133F-1 thermobaric warhead variant of the 9M133 Kornet, and the 9M131F thermobaric warhead variant of the 9K115-2 Metis-M, all of which are anti-tank missiles. The Kornet has since been upgraded to the Kornet-EM, and its thermobaric variant has a maximum range of 10 kilometres (6.2 mi) and has the TNT equivalent of 7 kilograms (15 lb) of TNT.[37] The 300 mm 9M55S thermobaric cluster warhead rocket was built to be fired from the BM-30 Smerch MLRS. A dedicated carrier of thermobaric weapons is the purpose-built TOS-1, a 24-tube MLRS designed to fire 220 mm caliber thermobaric rockets. A full salvo from the TOS-1 will cover a rectangle 200x400 metres.[38] The Iskander-M theatre ballistic missile can also carry a 700 kilograms (1,500 lb) thermobaric warhead.[39] + +The fireball blast from the Russian Air Force's FOAB, the largest Thermobaric device to be detonated. +Many Russian Air Force munitions also have thermobaric variants. The 80 mm S-8 rocket has the S-8DM and S-8DF thermobaric variants. The S-8's larger 122 mm brother, the S-13 rocket, has the S-13D and S-13DF thermobaric variants. The S-13DF's warhead weighs only 32 kg (71 lb) but its power is equivalent to 40 kg (88 lb) of TNT. The KAB-500-OD variant of the KAB-500KR has a 250 kg (550 lb) thermobaric warhead. The ODAB-500PM and ODAB-500PMV unguided bombs carry a 190 kg (420 lb) fuel-air explosive each. The KAB-1500S GLONASS/GPS guided 1,500 kg (3,300 lb) bomb also has a thermobaric variant. Its fireball will cover over a 150-metre (490 ft) radius and its lethality zone is a 500-metre (1,600 ft) radius.[40] The 9M120 Ataka-V and the 9K114 Shturm ATGMs both have thermobaric variants. +In September 2007 Russia exploded the largest thermobaric weapon ever made. The weapon's yield was reportedly greater than that of the smallest dial-a-yield nuclear weapons at their lowest settings.[41][42] Russia named this particular ordnance the "Father of All Bombs" in response to the United States developed "Massive Ordnance Air Blast" (MOAB) bomb whose backronym is the "Mother of All Bombs", and which previously held the accolade of the most powerful non-nuclear weapon in history.[43] The bomb contains an about 7 tons charge of a liquid fuel such as ethylene oxide, mixed with an energetic nanoparticle such as aluminium, surrounding a high explosive burster[44] that when detonated created an explosion equivalent to 44 metric tons of TNT. +US developments[edit] + +A BLU-72/B bomb on a USAF A-1E taking off from Nakhon Phanom, in September 1968. +Current US FAE munitions include: +BLU-73 FAE I +BLU-95 500-lb (FAE-II) +BLU-96 2,000-lb (FAE-II) +CBU-55 FAE I +CBU-72 FAE I +The XM1060 40-mm grenade is a small-arms thermobaric device, which was delivered to U.S. forces in April 2003.[45] Since the 2003 Invasion of Iraq, the US Marine Corps has introduced a thermobaric 'Novel Explosive' (SMAW-NE) round for the Mk 153 SMAW rocket launcher. One team of Marines reported that they had destroyed a large one-story masonry type building with one round from 100 yards (91 m).[46] +The AGM-114N Hellfire II, first used by U.S. forces in 2003 in Iraq, uses a Metal Augmented Charge (MAC) warhead that contains a thermobaric explosive fill using fluoridated aluminium layered between the charge casing and a PBXN-112 explosive mixture. When the PBXN-112 detonates, the aluminium mixture is dispersed and rapidly burns. The resultant sustained high pressure is extremely effective against people and structures.[47] +History[edit] +Military use[edit] + +US Navy BLU-118B being prepared for shipping for use in Afghanistan, 5 March 2002. +The first experiments with thermobaric weapon were conducted in Germany during World War II and were led by Mario Zippermayr. The German bombs used coal dust as fuel and were extensively tested in 1943 and 1944, but did not reach mass production before the war ended. +The TOS-1 system was test fired in Panjshir valley during Soviet war in Afghanistan in the early 1980s.[48] +Unconfirmed reports suggest that Russian military forces used ground delivered thermobaric weapons in the storming of the Russian parliament during the 1993 Russian constitutional crisis and also during the Battle for Grozny (first and second Chechen wars) to attack dug in Chechen fighters. The use of both TOS-1 heavy MLRS and "RPO-A Shmel" shoulder-fired rocket system in the Chechen wars is reported to have occurred.[48][49] +It is theorized that a multitude of hand-held thermobaric weapons were used by the Russian Armed Forces in their efforts to retake the school during the 2004 Beslan school hostage crisis. The RPO-A and either the TGB-7V thermobaric rocket from the RPG-7 or rockets from either the RShG-1 or the RShG-2 is claimed to have been used by the Spetsnaz during the initial storming of the school.[50][51][52] At least 3 and as many as 9 RPO-A casings were later found at the positions of the Spetsnaz.[53][54] The Russian Government later admitted to the use of the RPO-A during the crisis.[55] +According to UK Ministry of Defence, British military forces have also used thermobaric weapons in their AGM-114N Hellfire missiles (carried by Apache helicopters and UAVs) against the Taliban in the War in Afghanistan.[56] +The US military also used thermobaric weapons in Afghanistan. On 3 March 2002, a single 2,000 lb (910 kg) laser guided thermobaric bomb was used by the United States Army against cave complexes in which Al-Qaeda and Taliban fighters had taken refuge in the Gardez region of Afghanistan.[57][58] The SMAW-NE was used by the US Marines during the First Battle of Fallujah and Second Battle of Fallujah. +Reports by the rebel fighters of the Free Syrian Army claim the Syrian Air Force used such weapons against residential area targets occupied by the rebel fighters, as for instance in the Battle for Aleppo[59] and also in Kafar Batna.[60] A United Nations panel of human rights investigators reported that the Syrian government used thermobaric bombs against the rebellious town of Qusayr in March 2013.[61] +Non-military use[edit] +Thermobaric and fuel-air explosives have been used in guerrilla warfare since the 1983 Beirut barracks bombing in Lebanon, which used a gas-enhanced explosive mechanism, probably propane, butane or acetylene.[62] The explosive used by the bombers in the 1993 World Trade Center bombing incorporated the FAE principle, using three tanks of bottled hydrogen gas to enhance the blast.[63][64] Jemaah Islamiyah bombers used a shock-dispersed solid fuel charge,[65] based on the thermobaric principle,[66] to attack the Sari nightclub in the 2002 Bali bombings.[67] +See also[edit] +Bunker buster +Dust explosion +FOAB +Flame fougasse +MOAB +RPO-A +SMAW +References[edit] +Jump up ^ Algeria Isp (2011-10-18). "Libye – l'Otan utilise une bombe FAE | Politique, Algérie". Algeria ISP. Retrieved 2013-04-23. +Jump up ^ Nettleton, J. Occ. Accidents, 1, 149 (1976). +Jump up ^ Strehlow, 14th. Symp. (Int.) Comb. 1189, Comb. Inst. (1973). +Jump up ^ Health and Safety Environmental Agency, 5th. and final report, 2008. +Jump up ^ See Nanofuel/Oxidizers For Energetic Compositions – John D. Sullivan and Charles N. Kingery (1994) High explosive disseminator for a high explosive air bomb. +Jump up ^ Slavica Terzić, Mirjana Dakić Kolundžija, Milovan Azdejković and Gorgi Minov (2004) Compatibility Of Thermobaric Mixtures Based On Isopropyl Nitrate And Metal Powders. +Jump up ^ Meyer, Rudolf; Josef Köhler and Axel Homburg (2007). Explosives. Weinheim: Wiley-VCH. pp. 312. ISBN 3-527-31656-6. OCLC 165404124. +Jump up ^ Howard C. Hornig (1998) Non-focusing active warhead. +Jump up ^ Chris Ludwig (Talley Defense) Verifying Performance of Thermobaric Materials for Small to Medium Caliber Rocket Warheads. +Jump up ^ Martin M.West (1982) Composite high explosives for high energy blast applications. +Jump up ^ Raafat H. Guirguis (2005) Reactively Induced Fragmenting Explosives. +Jump up ^ Michael Dunning, William Andrews and Kevin Jaansalu (2005) The Fragmentation of Metal Cylinders Using Thermobaric Explosives. +Jump up ^ David L. Frost, Fan Zhang, Stephen B. Murray and Susan McCahan Critical Conditions For Ignition Of Metal Particles In A Condensed Explosive. +Jump up ^ The Army Doctrine and Training Bulletin (2001) The Threat from Blast Weapons. +Jump up ^ INTERNATIONAL DEFENCE REVIEW (2004) ENHANCED BLAST AND THERMOBARICS. +Jump up ^ F. Winterberg Conjectured Metastable Super-Explosives formed under High Pressure for Thermonuclear Ignition. +Jump up ^ Zhang, Fan (Medicine Hat, CA) Murray, Stephen Burke (Medicine Hat, CA) Higgins, Andrew (Montreal, CA) (2005) Super compressed detonation method and device to effect such detonation. +Jump up ^ Nettleton, arch. combust.,1,131, (1981). +Jump up ^ Stephen B. Murray Fundamental and Applied Studies of Fuel-Air Detonation. +Jump up ^ John H. Lee (1992) Chemical initiation of detonation in fuel-air explosive clouds. +Jump up ^ Frank E. Lowther (1989) Nuclear-sized explosions without radiation. +Jump up ^ Nettleton, Comb. and Flame, 24,65 (1975). +Jump up ^ Fire Prev. Sci. and Tech. No. 19,4 (1976) +Jump up ^ May L.Chan (2001) Advanced Thermobaric Explosive Compositions. +Jump up ^ New Thermobaric Materials and Weapon Concepts. +Jump up ^ Robert C. Morris (2003) Small Thermobaric Weapons An Unnoticed Threat.[dead link] +^ Jump up to: a b "Backgrounder on Russian Fuel Air Explosives ("Vacuum Bombs") | Human Rights Watch". Hrw.org. 2000-02-01. Retrieved 2013-04-23. +Jump up ^ Defense Intelligence Agency, "Future Threat to the Soldier System, Volume I; Dismounted Soldier--Middle East Threat", September 1993, p. 73. Obtained by Human Rights Watch under the U.S. Freedom of Information Act. +Jump up ^ "Press | Human Rights Watch". Hrw.org. 2008-12-27. Retrieved 2009-07-30. +Jump up ^ Lester W. Grau and Timothy L. Thomas(2000)"Russian Lessons Learned From the Battles For Grozny" +Jump up ^ "Modern Firearms – GM-94". World.guns.ru. 2011-01-24. Retrieved 2011-07-12. +Jump up ^ "New RPO Shmel-M Infantry Rocket Flamethrower Man-Packable Thermobaric Weapon". defensereview.com. 2006-07-19. Retrieved 2012-08-27. +Jump up ^ "Shmel-M: Infantry Rocket-assisted Flamethrower of Enhanced Range and Lethality". Kbptula.ru. Retrieved 2013-12-28. +Jump up ^ "Modern Firearms – RShG-1". World.guns.ru. 2011-01-24. Retrieved 2011-07-12. +Jump up ^ "Modern Firearms – RMG". World.guns.ru. 2011-01-24. Retrieved 2011-07-12. +Jump up ^ "RMG - A new Multi-Purpose Assault Weapon from Bazalt". defense-update.com. Retrieved 2012-08-27. +Jump up ^ "Kornet-EM: Multi-purpose Long-range Missile System". Kbptula.ru. Retrieved 2013-12-28. +Jump up ^ "TOS-1 Heavy flamethrower system". military-today.com. Retrieved 2012-08-27. +Jump up ^ "SS-26". Missilethreat.com. Retrieved 2013-12-28. +Jump up ^ Air Power Australia (2007-07-04). "How to Destroy the Australian Defence Force". Ausairpower.net. Retrieved 2011-07-12. +Jump up ^ "Russia unveils devastating vacuum bomb". ABC News. 2007. Retrieved 2007-09-12. +Jump up ^ "Video of test explosion". BBC News. 2007. Retrieved 2007-09-12. +Jump up ^ Harding, Luke (2007-09-12). "Russia unveils the father of all bombs". London: The Guardian. Retrieved 2007-09-12. +Jump up ^ Berhie, Saba. "Dropping the Big One | Popular Science". Popsci.com. Retrieved 2011-07-12. +Jump up ^ John Pike (2003-04-22). "XM1060 40mm Thermobaric Grenade". Globalsecurity.org. Retrieved 2011-07-12. +Jump up ^ David Hambling (2005) "Marines Quiet About Brutal New Weapon" +Jump up ^ John Pike (2001-09-11). "AGM-114N Metal Augmented Charge (MAC) Thermobaric Hellfire". Globalsecurity.org. Retrieved 2011-07-12. +^ Jump up to: a b John Pike. "TOS-1 Buratino 220mm Multiple Rocket Launcher". Globalsecurity.org. Retrieved 2013-04-23. +Jump up ^ "Foreign Military Studies Office Publications - A 'Crushing' Victory: Fuel-Air Explosives and Grozny 2000". Fmso.leavenworth.army.mil. Retrieved 2013-04-23. +Jump up ^ "Russian forces faulted in Beslan school tragedy". Christian Science Monitor. 1 September 2006. Retrieved 14 February 2007. +Jump up ^ Russia: Independent Beslan Investigation Sparks Controversy, The Jamestown Foundation, 29 August 2006 +Jump up ^ Beslan still a raw nerve for Russia, BBC News, 1 September 2006 +Jump up ^ ACHING TO KNOW, Los Angeles Times, 27 August 2005 +Jump up ^ Searching for Traces of “Shmel†in Beslan School, Kommersant, 12 September 2005 +Jump up ^ A Reversal Over Beslan Only Fuels Speculation, The Moscow Times, 21 July 2005 +Jump up ^ "MoD's Controversial Thermobaric Weapons Use in Afghanistan". Armedforces-int.com. 2008-06-23. Retrieved 2013-04-23. +Jump up ^ "US Uses Bunker-Busting 'Thermobaric' Bomb for First Time". Commondreams.org. 2002-03-03. Retrieved 2013-04-23. +Jump up ^ John Pike. "BLU-118/B Thermobaric Weapon Demonstration / Hard Target Defeat Program". Globalsecurity.org. Retrieved 2013-04-23. +Jump up ^ "Syria rebels say Assad using 'mass-killing weapons' in Aleppo". October 10, 2012. Retrieved November 11, 2012. +Jump up ^ "Dropping Thermobaric Bombs on Residential Areas in Syria_ Nov. 5. 2012". First Post. November 11, 2012. Retrieved November 11, 2012. +Jump up ^ Cumming-Bruce, Nick (2013-06-04). "U.N. Panel Reports Increasing Brutality by Both Sides in Syria". The New York Times. +Jump up ^ Richard J. Grunawalt. Hospital Ships In The War On Terror: Sanctuaries or Targets? (PDF), Naval War College Review, Winter 2005, pp. 110–11. +Jump up ^ Paul Rogers (2000) "Politics in the Next 50 Years: The Changing Nature of International Conflict" +Jump up ^ J. Gilmore Childers, Henry J. DePippo (February 24, 1998). "Senate Judiciary Committee, Subcommittee on Technology, Terrorism, and Government Information hearing on "Foreign Terrorists in America: Five Years After the World Trade Center"". Fas.org. Retrieved 2011-07-12. +Jump up ^ P. Neuwald, H. Reichenbach, A. L. Kuhl (2003). "Shock-Dispersed-Fuel Charges-Combustion in Chambers and Tunnels". +Jump up ^ David Eshel (2006). "Is the world facing Thermobaric Terrorism?".[dead link] +Jump up ^ Wayne Turnbull (2003). "Bali:Preparations". +External links[edit] +Fuel/Air Explosive (FAE) +Thermobaric Explosive (Global Security) +Aspects of thermobaric weaponry (PDF) – Dr. Anna E Wildegger-Gaissmaier, Australian Defence Force Health +Thermobaric warhead for RPG-7 +XM1060 40 mm Thermobaric Grenade (Global Security) +Defense Update: Fuel-Air Explosive Mine Clearing System +Foreign Military Studies Office – A 'Crushing' Victory: Fuel-Air Explosives and Grozny 2000 +Soon to make a comeback in Afghanistan +Russia claims to have tested the most powerful "Vacuum" weapon +Categories: Explosive weaponsAmmunitionThermobaric weaponsAnti-personnel weapons +Navigation menu +Create accountLog inArticleTalkReadEditView history + +Main page +Contents +Featured content +Current events +Random article +Donate to Wikipedia +Wikimedia Shop +Interaction +Help +About Wikipedia +Community portal +Recent changes +Contact page +Tools +What links here +Related changes +Upload file +Special pages +Permanent link +Page information +Wikidata item +Cite this page +Print/export +Create a book +Download as PDF +Printable version +Languages +العربية +БеларуÑÐºÐ°Ñ +БългарÑки +ÄŒeÅ¡tina +Deutsch +Español +Ùارسی +Français +हिनà¥à¤¦à¥€ +Italiano +עברית +LatvieÅ¡u +МакедонÑки +Nederlands +日本語 +Polski +РуÑÑкий +Suomi +Svenska +Türkçe +УкраїнÑька +Tiếng Việt +粵語 +中文 +Edit links +This page was last modified on 28 November 2014 at 10:32. +Text is available under the Creative Commons Attribution-ShareAlike License; additional terms may apply. By using this site, you agree to the Terms of Use and Privacy Policy. Wikipedia® is a registered trademark of the Wikimedia Foundation, Inc., a non-profit organization. +Privacy policyAbout WikipediaDisclaimersContact WikipediaDevelopersMobile viewWikimedia Foundation Powered by MediaWiki + + +Gunpowder +From Wikipedia, the free encyclopedia +For other uses, see Gunpowder (disambiguation). +In American English, the term gunpowder also refers broadly to any gun propellant.[1] Gunpowder (black powder) as described in this article is not normally used in modern firearms, which instead use smokeless powders. + +Black powder for muzzleloading rifles and pistols in FFFG granulation size. American Quarter (diameter 24 mm) for comparison. +Gunpowder, also known as black powder, is a chemical explosive—the earliest known. It is a mixture of sulfur, charcoal, and potassium nitrate (saltpeter). The sulfur and charcoal act as fuels, and the saltpeter is an oxidizer.[2][3] Because of its burning properties and the amount of heat and gas volume that it generates, gunpowder has been widely used as a propellant in firearms and as a pyrotechnic composition in fireworks. +Gunpowder is assigned the UN number UN0027 and has a hazard class of 1.1D. It has a flash point of approximately 427–464 °C (801–867 °F). The specific flash point may vary based on the specific composition of the gunpowder. Gunpowder's gravity is 1.70–1.82 (mercury method) orŠ 1.92–2.08 (pycnometer), and it has a pH of 6.0–8.0. It is also considered to be an insoluble material.[4] +Gunpowder was, according to prevailing academic consensus, invented in the 9th century in China,[5][6] and the earliest record of a written formula for gunpowder appears in the 11th century Song Dynasty text, Wujing Zongyao.[7] This discovery led to the invention of fireworks and the earliest gunpowder weapons in China. In the centuries following the Chinese discovery, gunpowder weapons began appearing in the Muslim world, Europe, and India. The technology spread from China through the Middle East or Central Asia, and then into Europe.[8] The earliest Western accounts of gunpowder appear in texts written by English philosopher Roger Bacon in the 13th century.[9] +Gunpowder is classified as a low explosive because of its relatively slow decomposition rate and consequently low brisance. Low explosives deflagrate (i.e., burn) at subsonic speeds, whereas high explosives detonate, producing a supersonic wave. Gunpowder's burning rate increases with pressure, so it bursts containers if contained but otherwise just burns in the open. Ignition of the powder packed behind a bullet must generate enough pressure to force it from the muzzle at high speed, but not enough to rupture the gun barrel. Gunpowder thus makes a good propellant, but is less suitable for shattering rock or fortifications. Gunpowder was widely used to fill artillery shells and in mining and civil engineering to blast rock roughly until the second half of the 19th century, when the first high explosives (nitro-explosives) were discovered. Gunpowder is no longer used in modern explosive military warheads, nor is it used as main explosive in mining operations due to its cost relative to that of newer alternatives such as ammonium nitrate/fuel oil (ANFO).[10] Black powder is still used as a delay element in various munitions where its slow-burning properties are valuable. +Formulations used in blasting rock (such as in quarrying) are called blasting powder. +Contents [hide] +1 History +1.1 China +1.2 Middle East +1.3 Mainland Europe +1.4 Britain and Ireland +1.5 India +1.6 Indonesia +2 Manufacturing technology +3 Composition and characteristics +4 Serpentine +5 Corning +6 Modern types +7 Other types of gunpowder +8 Sulfur-free gunpowder +9 Combustion characteristics +9.1 Advantages +9.2 Disadvantages +9.3 Transportation +10 Other uses +11 See also +12 References +13 External links +History[edit] + +Early Chinese rocket + +A Mongol bomb thrown against a charging Japanese samurai during the Mongol invasions of Japan after founding the Yuan Dynasty, 1281. +Main article: History of gunpowder +Gunpowder was invented in China while taoists attempted to create a potion of immortality. Chinese military forces used gunpowder-based weapons (i.e. rockets, guns, cannons) and explosives (i.e. grenades and different types of bombs) against the Mongols when the Mongols attempted to invade and breach city fortifications on China's northern borders. After the Mongols conquered China and founded the Yuan Dynasty, they used the Chinese gunpowder-based weapons technology in their attempted invasion of Japan; they also used gunpowder to fuel rockets. +The mainstream scholarly consensus is that gunpowder was invented in China, spread through the Middle East, and then into Europe,[8] although there is a dispute over how much the Chinese advancements in gunpowder warfare influenced later advancements in the Middle East and Europe.[11][12] The spread of gunpowder across Asia from China is widely attributed to the Mongols. One of the first examples of Europeans encountering gunpowder and firearms is at the Battle of Mohi in 1241. At this battle the Mongols not only used gunpowder in early Chinese firearms but in the earliest grenades as well. +A major problem confronting the study of the early history of gunpowder is ready access to sources close to the events described. Often enough, the first records potentially describing use of gunpowder in warfare were written several centuries after the fact, and may well have been colored by the contemporary experiences of the chronicler.[13] It is also difficult to accurately translate original alchemy texts, especially medieval Chinese texts that try to explain phenomena through metaphor, into modern scientific language with rigidly defined terminology. The translation difficulty has led to errors or loose interpretations bordering on artistic licence.[14][15] Early writings potentially mentioning gunpowder are sometimes marked by a linguistic process where old words acquired new meanings.[16] For instance, the Arabic word naft transitioned from denoting naphtha to denoting gunpowder, and the Chinese word pao evolved from meaning catapult to referring to cannon.[17] According to science and technology historian Bert S. Hall: "It goes without saying, however, that historians bent on special pleading, or simply with axes of their own to grind, can find rich material in these terminological thickets."[18] +China[edit] +Further information: Wujing Zongyao, Four Great Inventions and List of Chinese inventions + +Chinese Ming Dynasty (1368-1644) matchlock firearms +Saltpeter was known to the Chinese by the mid-1st century AD and there is strong evidence of the use of saltpeter and sulfur in various largely medicinal combinations.[19] A Chinese alchemical text dated 492 noted saltpeter burnt with a purple flame, providing a practical and reliable means of distinguishing it from other inorganic salts, thus enabling alchemists to evaluate and compare purification techniques; the earliest Latin accounts of saltpeter purification are dated after 1200.[20] + +Yuan Dynasty bronze hand cannon from 1332 at th (c. 808); it describes mixing six parts sulfur to six parts saltpeter to one part birthwort herb (which would provide carbon).[21] +The first reference to the incendiary properties of such mixtures is the passage of the Zhenyuan miaodao yaolüe, a Taoist text tentatively dated to the mid-9th century AD:[20] "Some have heated together sulfur, realgar and saltpete with honey; smoke and flames result, so that their hands and faces have been burnt, and even the whole house where they were working burned down."[22] The Chinese word for "gunpowder" is Chinese: ç«è¯/ç«è—¥; pinyin: huÅ yào /xuou yÉ‘ÊŠ/, which literally means "Fire Medicine";[23] however this name only came into use some centuries after the mixture's discovery.[24] During the 9th century, Taoist monks or alchemists searching for an elixir of immortality had serendipitously stumbled upon gunpowder.[8][25] The Chinese wasted little time in applying gunpowder to the development of weapons, and in the centuries that followed, they produced a variety of gunpowder weapons, including flamethrowers, rockets, bombs, and land mines, before inventing guns as a projectile weapon.[26] Archaeological evidence of a hand cannon has been excavated in Manchuria dated from the late 1200s[27] and the shells of explosive bombs have been discovered in a shipwreck off the shore of Japan dated from 1281, during the Mongol invasions of Japan.[28] +The Chinese "Wu Ching Tsung Yao" (Complete Essentials from the Military Classics), written by Tseng Kung-Liang between 1040–1044, provides encyclopedia references to a variety of mixtures that included petrochemicals—as well as garlic and honey. A slow match for flame throwing mechanisms using the siphon principle and for fireworks and rockets are mentioned. The mixture formulas in this book do not contain enough saltpeter to create an explosive however; being limited to at most 50% saltpeter, they produce an incendiary.[29] The Essentials was however written by a Song Dynasty court bureaucrat, and there's little evidence that it had any immediate impact on warfare; there is no mention of gunpowder use in the chronicles of the wars against the Tanguts in the eleventh century, and China was otherwise mostly at peace during this century. The first chronicled use of "fire spears" (or "fire lances") is at the siege of De'an in 1132.[30] + +Formula for gunpowder in 1044 Wujing zongyao part I vol 12 + + +Instruction for fire bomb in Wujing zongyao + + +Fire bomb + + +Fire grenade + + +Proto-cannon from the Ming Dynasty text Huolongjing + + +Land mine from the Ming Dynasty text Huolongjing + + +Fire arrow rocket launcher from the Wujing zongyao +Middle East[edit] +Main articles: Inventions in the Islamic world and Alchemy and chemistry in Islam + +The Sultani Cannon, a very heavy bronze breech-loading cannon of type used by Ottoman Empire in the conquest of Constantinople, in 1453. +The Muslims acquired knowledge of gunpowder some time between 1240 and 1280, by which time the Syrian Hasan al-Rammah had written, in Arabic, recipes for gunpowder, instructions for the purification of saltpeter, and descriptions of gunpowder incendiaries. Gunpowder arrived in the Middle East, possibly through India, from China. This is implied by al-Rammah's usage of "terms that suggested he derived his knowledge from Chinese sources" and his references to saltpeter as "Chinese snow" Arabic: ثلج الصين‎ thalj al-ṣīn, fireworks as "Chinese flowers" and rockets as "Chinese arrows".[31] However, because al-Rammah attributes his material to "his father and forefathers", al-Hassan argues that gunpowder became prevalent in Syria and Egypt by "the end of the twelfth century or the beginning of the thirteenth".[32] Persians called saltpeter "Chinese salt" [33][34][35][36][37] or "salt from Chinese salt marshes" (namak shÅ«ra chÄ«nÄ« Persian: نمک شوره چيني‎).[38][39] + +A picture of a 15th-century Granadian cannon from the book Al-izz wal rifa'a. +Al-Hassan claims that in the Battle of Ain Jalut of 1260, the Mamluks used against the Mongols in "the first cannon in history" gunpowder formula with near-identical ideal composition ratios for explosive gunpowder.[32] Other historians urge caution regarding claims of Islamic firearms use in the 1204-1324 period as late medieval Arabic texts used the same word for gunpowder, naft, that they used for an earlier incendiary, naphtha.[13][17] Khan claims that it was invading Mongols who introduced gunpowder to the Islamic world[40] and cites Mamluk antagonism towards early musketeers in their infantry as an example of how gunpowder weapons were not always met with open acceptance in the Middle East.[41] Similarly, the refusal of their Qizilbash forces to use firearms contributed to the Safavid rout at Chaldiran in 1514.[41] +The earliest surviving documentary evidence for the use of the hand cannon, considered the oldest type of portable firearm and a forerunner of the handgun, are from several Arabic manuscripts dated to the 14th century.[42] Al-Hassan argues that these are based on earlier originals and that they report hand-held cannons being used by the Mamluks at the Battle of Ain Jalut in 1260.[32] +Hasan al-Rammah included 107 gunpowder recipes in his text al-Furusiyyah wa al-Manasib al-Harbiyya (The Book of Military Horsemanship and Ingenious War Devices), 22 of which are for rockets. If one takes the median of 17 of these 22 compositions for rockets (75% nitrates, 9.06% sulfur, and 15.94% carbon), it is nearly identical to the modern reported ideal gunpowder recipe of 75% potassium nitrate, 10% sulfur, and 15% carbon.[32] +The state-controlled manufacture of gunpowder by the Ottoman Empire through early supply chains to obtain nitre, sulfur and high-quality charcoal from oaks in Anatolia contributed significantly to its expansion the 15th and 18th century. It was not until later in the 19th century when the syndicalist production of Turkish gunpowder was greatly reduced, which coincided with the decline of its military might.[43] +Mainland Europe[edit] +Several sources mention Chinese firearms and gunpowder weapons being deployed by the Mongols against European forces at the Battle of Mohi in 1241.[44][45][46] Professor Kenneth Warren Chase credits the Mongols for introducing into Europe gunpowder and its associated weaponry.[47] +C. F. Temler interprets Peter, Bishop of Leon, as reporting the use of cannons in Seville in 1248.[48] +In Europe, one of the first mentions of gunpowder use appears in a passage found in Roger Bacon's Opus Maius and Opus Tertium in what has been interpreted as being firecrackers. The most telling passage reads: "We have an example of these things (that act on the senses) in [the sound and fire of] that children's toy which is made in many [diverse] parts of the world; i.e., a device no bigger than one's thumb. From the violence of that salt called saltpeter [together with sulfur and willow charcoal, combined into a powder] so horrible a sound is made by the bursting of a thing so small, no more than a bit of parchment [containing it], that we find [the ear assaulted by a noise] exceeding the roar of strong thunder, and a flash brighter than the most brilliant lightning."[9] In the early 20th century, British artillery officer Henry William Lovett Hime proposed that another work tentatively attributed to Bacon, Epistola de Secretis Operibus Artis et Naturae, et de Nullitate Magiae contained an encrypted formula for gunpowder. This claim has been disputed by historians of science including Lynn Thorndike, John Maxson Stillman and George Sarton and by Bacon's editor Robert Steele, both in terms of authenticity of the work, and with respect to the decryption method.[9] In any case, the formula claimed to have been decrypted (7:5:5 saltpeter:charcoal:sulfur) is not useful for firearms use or even firecrackers, burning slowly and producing mostly smoke.[49][50] + +Cannon forged in 1667 at the Fortín de La Galera, Nueva Esparta, Venezuela. +The Liber Ignium, or Book of Fires, attributed to Marcus Graecus, is a collection of incendiary recipes, including some gunpowder recipes. Partington dates the gunpowder recipes to approximately 1300.[51] One recipe for "flying fire" (ingis volatilis) involves saltpeter, sulfur, and colophonium, which, when inserted into a reed or hollow wood, "flies away suddenly and burns up everything." Another recipe, for artificial "thunder", specifies a mixture of one pound native sulfur, two pounds linden or willow charcoal, and six pounds of saltpeter.[52] Another specifies a 1:3:9 ratio.[52] +Some of the gunpowder recipes of De Mirabilibus Mundi of Albertus Magnus are identical to the recipes of the Liber Ignium, and according to Partington, "may have been taken from that work, rather than conversely."[53] Partington suggests that some of the book may have been compiled by Albert's students, "but since it is found in thirteenth century manuscripts, it may well be by Albert."[53] Albertus Magnus died in 1280. +A common German folk-tale is of the German priest/monk named Berthold Schwarz who independently invented gunpowder, thus earning it the German name Schwarzpulver or in English Schwarz's powder. Schwarz is also German for black so this folk-tale, while likely containing elements of truth, is considered problematic. +A major advance in manufacturing began in Europe in the late 14th century when the safety and thoroughness of incorporation was improved by wet grinding; liquid, such as distilled spirits or perhaps the urine of wine-drinking bishops[54] was added during the grinding-together of the ingredients and the moist paste dried afterwards. (The principle of wet mixing to prevent the separation of dry ingredients, invented for gunpowder, is used today in the pharmaceutical industry.[55]) It was also discovered that if the paste was rolled into balls before drying the resulting gunpowder absorbed less water from the air during storage and traveled better. The balls were then crushed in a mortar by the gunner immediately before use, with the old problem of uneven particle size and packing causing unpredictable results. +If the right size particles were chosen, however, the result was a great improvement in power. Forming the damp paste into corn-sized clumps by hand or with the use of a sieve instead of larger balls produced a product after drying that loaded much better, as each tiny piece provided its own surrounding air space that allowed much more rapid combustion than a fine powder. This "corned" gunpowder was from 30% to 300% more powerful. An example is cited where 34 pounds of serpentine was needed to shoot a 47 pound ball, but only 18 pounds of corned powder.[54] The optimum size of the grain depended on its use; larger for large cannon, finer for small arms. Larger cast cannons were easily muzzle-loaded with corned powder using a long-handled ladle. Corned powder also retained the advantage of low moisture absorption, as even tiny grains still had much less surface area to attract water than a floury powder. +During this time, European manufacturers also began regularly purifying saltpeter, using wood ashes containing potassium carbonate to precipitate calcium from their dung liquor, and using ox blood, alum, and slices of turnip to clarify the solution.[54] +Gunpowder-making and metal-smelting and casting for shot and cannon fee was closely held by skilled military tradesmen, who formed guilds that collected dues, tested apprentices, and gave pensions. "Fire workers" were also required to craft fireworks for celebrations of victory or peace. During the Renaissance, two European schools of pyrotechnic thought emerged, one in Italy and the other at Nuremberg, Germany. Vannoccio Biringuccio, born in 1480, was a member of the guild Fraternita di Santa Barbara but broke with the tradition of secrecy by setting down everything he knew in a book titled De la pirotechnia, written in vernacular. The first printed book on either gunpowder or metalworking, it was published posthumously in 1540, with 9 editions over 138 years, and also reprinted by MIT Press in 1966.[54] By the mid-17th century fireworks were used for entertainment on an unprecedented scale in Europe, being popular even at resorts and public gardens.[56] +In 1774 Louis XVI ascended to the throne of France at age 20. After he discovered that France was not self-sufficient in gunpowder, a Gunpowder Administration was established; to head it, the lawyer Antoine Lavoisier was appointed. Although from a bourgeois family, after his degree in law Lavoisier became wealthy from a company set up to collect taxes for the Crown; this allowed him to pursue experimental natural science as a hobby.[57] +Without access to cheap Indian saltpeter (controlled by the British), for hundreds of years France had relied on saltpetermen with royal warrants, the droit de fouille or "right to dig", to seize nitrous-containing soil and demolished walls of barnyards, without compensation to the owners.[58] This caused farmers, the wealthy, or entire villages to bribe the petermen and the associated bureaucracy to leave their buildings alone and the saltpeter uncollected. Lavoisier instituted a crash program to increase saltpeter production, revised (and later eliminated) the droit de fouille, researched best refining and powder manufacturing methods, instituted management and record-keeping, and established pricing that encouraged private investment in works. Although saltpeter from new Prussian-style putrefaction works had not been produced yet (the process taking about 18 months), in only a year France had gunpowder to export. A chief beneficiary of this surplus was the American Revolution. By careful testing and adjusting the proportions and grinding time, powder from mills such as at Essonne outside Paris became the best in the world by 1788, and inexpensive.[58] [59] +Britain and Ireland[edit] + +The old Powder or Pouther magazine dating from 1642, built by order of Charles I. Irvine, North Ayrshire, Scotland +Gunpowder production in Britain appears to have started in the mid 14th century AD with the aim of supplying the English Crown.[60] Records show that gunpowder was being made, in England, in 1346, at the Tower of London; a powder house existed at the Tower in 1461; and in 1515 three King's gunpowder makers worked there.[60] Gunpowder was also being made or stored at other Royal castles, such as Portchester. By the early 14th century, according to N.J.G. Pounds's study The Medieval Castle in England and Wales, many English castles had been deserted and others were crumbling. Their military significance faded except on the borders. Gunpowder had made smaller castles useless.[61] +Henry VIII of England was short of gunpowder when he invaded France in 1544 and England needed to import gunpowder via the port of Antwerp in what is now Belgium.[60] +The English Civil War (1642–1645) led to an expansion of the gunpowder industry, with the repeal of the Royal Patent in August 1641.[60] +Two British physicists, Andrew Noble and Frederick Abel, worked to improve the properties of black powder during the late 19th century. This formed the basis for the Noble-Abel gas equation for internal ballistics.[62] +The introduction of smokeless powder in the late 19th century led to a contraction of the gunpowder industry. After the end of World War I, the majority of the United Kingdom gunpowder manufacturers merged into a single company, "Explosives Trades limited"; and number of sites were closed down, including those in Ireland. This company became Nobel Industries Limited; and in 1926 became a founding member of Imperial Chemical Industries. The Home Office removed gunpowder from its list of Permitted Explosives; and shortly afterwards, on 31 December 1931, the former Curtis & Harvey's Glynneath gunpowder factory at Pontneddfechan, in Wales, closed down, and it was demolished by fire in 1932.[63] + +Gunpowder storing barrels at Martello tower in Point Pleasant Park +The last remaining gunpowder mill at the Royal Gunpowder Factory, Waltham Abbey was damaged by a German parachute mine in 1941 and it never reopened.[64] This was followed by the closure of the gunpowder section at the Royal Ordnance Factory, ROF Chorley, the section was closed and demolished at the end of World War II; and ICI Nobel's Roslin gunpowder factory, which closed in 1954.[64][65] +This left the sole United Kingdom gunpowder factory at ICI Nobel's Ardeer site in Scotland; it too closed in October 1976.[64] Since then gunpowder has been imported into the United Kingdom. In the late 1970s/early 1980s gunpowder was bought from eastern Europe, particularly from what was then the German Democratic Republic and former Yugoslavia. +India[edit] + +In the year 1780 the British began to annex the territories of the Sultanate of Mysore, during the Second Anglo-Mysore War. The British battalion was defeated during the Battle of Guntur, by the forces of Hyder Ali, who effectively utilized Mysorean rockets and Rocket artillery against the closely massed British forces. + +Mughal Emperor Shah Jahan, hunting deer using a Matchlock as the sun sets in the horizon. +Gunpowder and gunpowder weapons were transmitted to India through the Mongol invasions of India.[66][67] The Mongols were defeated by Alauddin Khilji of the Delhi Sultanate, and some of the Mongol soldiers remained in northern India after their conversion to Islam.[67] It was written in the Tarikh-i Firishta (1606–1607) that Nasir ud din Mahmud the ruler of the Delhi Sultanate presented the envoy of the Mongol ruler Hulegu Khan with a dazzling pyrotechnics display upon his arrival in Delhi in 1258 AD. Nasir ud din Mahmud tried to express his strength as a ruler and tried to ward off any Mongol attempt similar to the Siege of Baghdad (1258).[68] Firearms known as top-o-tufak also existed in many Muslim kingdoms in India by as early as 1366 AD.[68] From then on the employment of gunpowder warfare in India was prevalent, with events such as the "Siege of Belgaum" in 1473 by Sultan Muhammad Shah Bahmani.[69] +The shipwrecked Ottoman Admiral Seydi Ali Reis is known to have introduced the earliest type of Matchlock weapons, which the Ottomans used against the Portuguese during the Siege of Diu (1531). After that, a diverse variety of firearms; large guns in particular, became visible in Tanjore, Dacca, Bijapur, and Murshidabad.[70] Guns made of bronze were recovered from Calicut (1504)- the former capital of the Zamorins[71] +The Mughal Emperor Akbar mass-produced matchlocks for the Mughal Army. Akbar is personally known to have shot a leading Rajput commander during the Siege of Chittorgarh.[72] The Mughals began to use Bamboo rockets (mainly for signalling) and employ Sappers: special units that undermined heavy stone fortifications to plant gunpowder charges. +The Mughal Emperor Shah Jahan is known to have introduced much more advanced Matchlocks, their designs were a combination of Ottoman and Mughal designs. Shah Jahan also countered the British and other Europeans in his province of GujarÄt, which supplied Europe saltpeter for use in gunpowder warfare during the 17th century.[73] Bengal and MÄlwa participated in saltpeter production.[73] The Dutch, French, Portuguese, and English used Chhapra as a center of saltpeter refining.[73] +Ever since the founding of the Sultanate of Mysore by Hyder Ali, French military officers were employed to train the Mysore Army. Hyder Ali and his son Tipu Sultan were the first to introduce modern Cannons and Muskets, their army was also the first in India to have official uniforms. During the Second Anglo-Mysore War Hyder Ali and his son Tipu Sultan unleashed the Mysorean rockets at their British opponents effectively defeating them on various occasions. The Mysorean rockets inspired the development of the Congreve rocket, which the British widely utilized during the Napoleonic Wars and the War of 1812.[74] +Indonesia[edit] +The Javanese Majapahit Empire was arguably able to encompass much of modern day Indonesia due to its unique mastery of bronze smithing and use of a central arsenal fed by a large number of cottage industries within the immediate region. Documentary and archeological evidence indicate that Arab or Indian traders introduced gunpowder, gonnes, muskets, blunderbusses, and cannons to the Javanese, Acehnese, and Batak via long established commercial trade routes around the early to mid 14th century CE.[75] Portuguese and Spanish invaders were unpleasantly surprised and occasionally even outgunned on occasion.[76] The resurgent Singhasari Empire overtook Sriwijaya and later emerged as the Majapahit whose warfare featured the use of fire-arms and cannonade.[77] Circa 1540 CE the Javanese, always alert for new weapons found the newly arrived Portuguese weaponry superior to that of the locally made variants. Javanese bronze breech-loaded swivel-guns, known as meriam, or erroneously as lantaka, was used widely by the Majapahit navy as well as by pirates and rival lords. The demise of the Majapahit empire and the dispersal of disaffected skilled bronze cannon-smiths to Brunei, modern Sumatra, Malaysia and the Philippines lead to widespread use, especially in the Makassar Strait. +Saltpeter harvesting was recorded by Dutch and German travelers as being common in even the smallest villages and was collected from the decomposition process of large dung hills specifically piled for the purpose. The Dutch punishment for possession of non-permitted gunpowder appears to have been amputation.[78] Ownership and manufacture of gunpowder was later prohibited by the colonial Dutch occupiers.[75] According to a colonel McKenzie quoted in Sir Thomas Stamford Raffles, The History of Java (1817), the purest sulfur was supplied from a crater from a mountain near the straits of Bali.[77] +Manufacturing technology[edit] + +Edge-runner mill in a restored mill, at Eleutherian Mills +For the most powerful black powder meal, a wood charcoal is used. The best wood for the purpose is Pacific willow,[79] but others such as alder or buckthorn can be used. In Great Britain between the 15th to 19th centuries charcoal from alder buckthorn was greatly prized for gunpowder manufacture; cottonwood was used by the American Confederate States.[80] The ingredients are reduced in particle size and mixed as intimately as possible. Originally this was with a mortar-and-pestle or a similarly operating stamping-mill, using copper, bronze or other non-sparking materials, until supplanted by the rotating ball mill principle with non-sparking bronze or lead. Historically, a marble or limestone edge runner mill, running on a limestone bed was used in Great Britain; however, by the mid 19th century AD this had changed to either an iron shod stone wheel or a cast iron wheel running on an iron bed.[81] The mix was dampened with alcohol or water during grinding to prevent accidental ignition. This also helps the extremely soluble saltpeter mix into the microscopic nooks and crannies of the very high surface-area charcoal. +Around the late 14th century AD, European powdermakers first began adding liquid during grinding to improve mixing, reduce dust, and with it the risk of explosion.[82] The powder-makers would then shape the resulting paste of dampened gunpowder, known as mill cake, into corns, or grains, to dry. Not only did corned powder keep better because of its reduced surface area, gunners also found that it was more powerful and easier to load into guns. Before long, powder-makers standardized the process by forcing mill cake through sieves instead of corning powder by hand. +The improvement was based on reducing the surface area of a higher density composition. At the beginning of the 19th century, makers increased density further by static pressing. They shoveled damp mill cake into a two-foot square box, placed this beneath a screw press and reduced it to 1/2 its volume. "Presscake" had the hardness of slate. They broke the dried slabs with hammers or rollers, and sorted the granules with sieves into different grades. In the United States, Irenee du Pont, who had learned the trade from Lavoisier, tumbled the dried grains in rotating barrels to round the edges and increase durability during shipping and handling. (Sharp grains rounded off in transport, producing fine "meal dust" that changed the burning properties.) +Another advance was the manufacture of kiln charcoal by distilling wood in heated iron retorts instead of burning it in earthen pits. Controlling the temperature influenced the power and consistency of the finished gunpowder. In 1863, in response to high prices for Indian saltpeter, DuPont chemists developed a process using potash or mined potassium chloride to convert plentiful Chilean sodium nitrate to potassium nitrate.[83] +During the 18th century gunpowder factories became increasingly dependent on mechanical energy.[84] Despite mechanization, production difficulties related to humidity control, especially during the pressing, were still present in the late 19th century. A paper from 1885 laments that "Gunpowder is such a nervous and sensitive spirit, that in almost every process of manufacture it changes under our hands as the weather changes." Pressing times to the desired density could vary by factor of three depending on the atmospheric humidity.[85] +Composition and characteristics[edit] +The term black powder was coined in the late 19th century, primarily in the United States, to distinguish prior gunpowder formulations from the new smokeless powders and semi-smokeless powders, in cases where these are not referred to as cordite. Semi-smokeless powders featured bulk volume properties that approximated black powder, but had significantly reduced amounts of smoke and combustion products. Smokeless powder has different burning properties (pressure vs. time) and can generate higher pressures and work per gram. This can rupture older weapons designed for black powder. Smokeless powders ranged in color from brownish tan to yellow to white. Most of the bulk semi-smokeless powders ceased to be manufactured in the 1920s.[86][87][88] +Black powder is a granular mixture of +a nitrate, typically potassium nitrate (KNO3), which supplies oxygen for the reaction; +charcoal, which provides carbon and other fuel for the reaction, simplified as carbon (C); +sulfur (S), which, while also serving as a fuel, lowers the temperature required to ignite the mixture, thereby increasing the rate of combustion. +Potassium nitrate is the most important ingredient in terms of both bulk and function because the combustion process releases oxygen from the potassium nitrate, promoting the rapid burning of the other ingredients.[89] To reduce the likelihood of accidental ignition by static electricity, the granules of modern black powder are typically coated with graphite, which prevents the build-up of electrostatic charge. +Charcoal does not consist of pure carbon; rather, it consists of partially pyrolyzed cellulose, in which the wood is not completely decomposed. Carbon differs from charcoal. Whereas charcoal's autoignition temperature is relatively low, carbon's is much greater. Thus, a black powder composition containing pure carbon would burn similarly to a match head, at best.[90] +The current standard composition for the black powders that are manufactured by pyrotechnicians was adopted as long ago as 1780. Proportions by weight are 75% potassium nitrate (known as saltpeter or saltpetre), 15% softwood charcoal, and 10% sulfur.[81] These ratios have varied over the centuries and by country, and can be altered somewhat depending on the purpose of the powder. For instance, power grades of black powder, unsuitable for use in firearms but adequate for blasting rock in quarrying operations, is called blasting powder rather than gunpowder with standard proportions of 70% nitrate, 14% charcoal, and 16% sulfur; blasting powder may be made with the cheaper sodium nitrate substituted for potassium nitrate and proportions may be as low as 40% nitrate, 30% charcoal, and 30% sulfur.[91] In 1857, Lamont DuPont solved the main problem of using cheaper sodium nitrate formulations when he patented DuPont "B" Blasting powder. After manufacturing grains from press-cake in the usual way, his process tumbled the powder with graphite dust for 12 hours. This formed a graphite coating on each grain that reduced its ability to absorb moisture.[92] +French war powder in 1879 used the ratio 75% saltpeter, 12.5% charcoal, 12.5% sulfur. English war powder in 1879 used the ratio 75% saltpeter, 15% charcoal, 10% sulfur.[93] The British Congreve rockets used 62.4% saltpeter, 23.2% charcoal and 14.4% sulfur, but the British Mark VII gunpowder was changed to 65% saltpeter, 20% charcoal and 15% sulfur.[94] The explanation for the wide variety in formulation relates to usage. Powder used for rocketry can use a slower burn rate since it accelerates the projectile for a much longer time—whereas powders for weapons such as flintlocks, cap-locks, or matchlocks need a higher burn rate to accelerate the projectile in a much shorter distance. Cannons usually used lower burn rate powders, because most would burst with higher burn rate powders. +Serpentine[edit] +The original dry-compounded powder used in fifteenth-century Europe was known as "Serpentine", either a reference to Satan[95] or to a common artillery piece that used it.[96] The ingredients were ground together with a mortar and pestle, perhaps for 24 hours,[96] resulting in a fine flour. Vibration during transportation could cause the components to separate again, requiring remixing in the field. Also if the quality of the saltpeter was low (for instance if it was contaminated with highly hygroscopic calcium nitrate), or if the powder was simply old (due to the mildly hygroscopic nature of potassium nitrate), in humid weather it would need to be re-dried. The dust from "repairing" powder in the field was a major hazard. +Loading cannons or bombards before the powder-making advances of the Renaissance was a skilled art. Fine powder loaded haphazardly or too tightly would burn incompletely or too slowly. Typically, the breech-loading powder chamber in the rear of the piece was filled only about half full, the serpentine powder neither too compressed nor too loose, a wooden bung pounded in to seal the chamber from the barrel when assembled, and the projectile placed on. A carefully determined empty space was necessary for the charge to burn effectively. When the cannon was fired through the touchhole, turbulence from the initial surface combustion caused the rest of the powder to be rapidly exposed to the flame.[96] +The advent of much more powerful and easy to use corned powder changed this procedure, but serpentine was used with older guns into the seventeenth century.[97] +Corning[edit] +For gunpowder to explode effectively, the combustible ingredients must be reduced to the smallest possible particle sizes, and thoroughly mixed as possible. Once mixed, however, for better results in a gun, makers discovered that the final product should be in the form of individual, dense, grains that spread the fire quickly from grain to grain, much as straw or twigs catch fire more quickly than a pile of sawdust. +Primarily for safety reasons, size reduction and mixing is done while the ingredients are damp, usually with water. After 1800, instead of forming grains by hand or with sieves, the damp mill-cake was pressed in molds to increase its density and extract the liquid, forming press-cake. The pressing took varying amounts of time, depending on conditions such as atmospheric humidity. The hard, dense product was broken again into tiny pieces, which were separated with sieves to produce a uniform product for each purpose: coarse powders for cannons, finer grained powders for muskets, and the finest for small hand guns and priming.[97] Inappropriately fine-grained powder often caused cannons to burst before the projectile could move down the barrel, due to the high initial spike in pressure.[98] Mammoth powder with large grains made for Rodman's 15-inch cannon reduced the pressure to only 20 percent as high as ordinary cannon powder would have produced.[99] +In the mid-nineteenth century, measurements were made determining that the burning rate within a grain of black powder (or a tightly packed mass) is about 0.20 fps, while the rate of ignition propagation from grain to grain is around 30 fps, over two orders of magnitude faster.[97] +Modern types[edit] +Modern corning first compresses the fine black powder meal into blocks with a fixed density (1.7 g/cm³).[100] In the United States, gunpowder grains were designated F (for fine) or C (for coarse). Grain diameter decreased with a larger number of Fs and increased with a larger number of Cs, ranging from about 2 mm for 7F to 15 mm for 7C. Even larger grains were produced for artillery bore diameters greater than about 17 cm (6.7 in). The standard DuPont Mammoth powder developed by Thomas Rodman and Lammot du Pont for use during the American Civil War had grains averaging 0.6 inches diameter, with edges rounded in a glazing barrel.[99] Other versions had grains the size of golf and tennis balls for use in 20-inch (50-cm) Rodman guns.[101] In 1875 DuPont introduced Hexagonal powder for large artillery, which was pressed using shaped plates with a small center core—about 1.5 inches diameter, like a wagon wheel nut, the center hole widened as the grain burned.[102] By 1882 German makers also produced hexagonal grained powders of a similar size for artillery.[102] +By the late 19th century manufacturing focused on standard grades of black powder from Fg used in large bore rifles and shotguns, through FFg (medium and small-bore arms such as muskets and fusils), FFFg (small-bore rifles and pistols), and FFFFg (extreme small bore, short pistols and most commonly for priming flintlocks).[103] A coarser grade for use in military artillery blanks was designated A-1. These grades were sorted on a system of screens with oversize retained on a mesh of 6 wires per inch, A-1 retained on 10 wires per inch, Fg retained on 14, FFg on 24, FFFg on 46, and FFFFg on 60. Fines designated FFFFFg were usually reprocessed to minimize explosive dust hazards.[104] In the United Kingdom, the main service gunpowders were classified RFG (rifle grained fine) with diameter of one or two millimeters and RLG (rifle grained large) for grain diameters between two and six millimeters.[101] Gunpowder grains can alternatively be categorized by mesh size: the BSS sieve mesh size, being the smallest mesh size, which retains no grains. Recognized grain sizes are Gunpowder G 7, G 20, G 40, and G 90. +Owing to the large market of antique and replica black-powder firearms in the US, modern gunpowder substitutes like Pyrodex, Triple Seven and Black Mag3[105] pellets have been developed since the 1970s. These products, which should not be confused with smokeless powders, aim to produce less fouling (solid residue), while maintaining the traditional volumetric measurement system for charges. Claims of less corrosiveness of these products have been controversial however. New cleaning products for black-powder guns have also been developed for this market.[103] +Other types of gunpowder[edit] +Besides black powder, there are other historically important types of gunpowder. "Brown gunpowder" is cited as composed of 79% nitre, 3% sulfur, and 18% charcoal per 100 of dry powder, with about 2% moisture. Prismatic Brown Powder is a large-grained product the Rottweil Company introduced in 1884 in Germany, which was adopted by the British Royal Navy shortly thereafter. The French navy adopted a fine, 3.1 millimeter, not prismatic grained product called Slow Burning Cocoa (SBC) or "cocoa powder". These brown powders reduced burning rate even further by using as little as 2 percent sulfur and using charcoal made from rye straw that had not been completely charred, hence the brown color.[102] +Lesmok powder was a product developed by DuPont in 1911[106] one of several semi-smokeless products in the industry containing a mixture of black and nitrocellulose powder. It was sold to Winchester and others primarily for .22 and .32 small calibers. Its advantage was that it was believed at the time to be less corrosive than smokeless powders then in use. It was not understood in the U.S. until the 1920s that the actual source of corrosion was the potassium chloride residue from potassium chlorate sensitized primers. The bulkier black powder fouling better disperses primer residue. Failure to mitigate primer corrosion by dispersion caused the false impression that nitrocellulose-based powder caused corrosion.[107] Lesmok had some of the bulk of black powder for dispersing primer residue, but somewhat less total bulk than straight black powder, thus requiring less frequent bore cleaning.[108] It was last sold by Winchester in 1947. +Sulfur-free gunpowder[edit] + +Burst barrel of a muzzle loader pistol replica, which was loaded with nitrocellulose powder instead of black powder and couldn't withstand the higher pressures of the modern propellant +The development of smokeless powders, such as cordite, in the late 19th century created the need for a spark-sensitive priming charge, such as gunpowder. However, the sulfur content of traditional gunpowders caused corrosion problems with Cordite Mk I and this led to the introduction of a range of sulfur-free gunpowders, of varying grain sizes.[64] They typically contain 70.5 parts of saltpeter and 29.5 parts of charcoal.[64] Like black powder, they were produced in different grain sizes. In the United Kingdom, the finest grain was known as sulfur-free mealed powder (SMP). Coarser grains were numbered as sulfur-free gunpowder (SFG n): 'SFG 12', 'SFG 20', 'SFG 40' and 'SFG 90', for example; where the number represents the smallest BSS sieve mesh size, which retained no grains. +Sulfur's main role in gunpowder is to decrease the ignition temperature. A sample reaction for sulfur-free gunpowder would be +6 KNO3 + C7H4O → 3 K2CO3 + 4 CO2 + 2 H2O + 3 N2 +Combustion characteristics[edit] +A simple, commonly cited, chemical equation for the combustion of black powder is +2 KNO3 + S + 3 C → K2S + N2 + 3 CO2. +A balanced, but still simplified, equation is[109] +10 KNO3 + 3 S + 8 C → 2 K2CO3 + 3 K2SO4 + 6 CO2 + 5 N2. +Although charcoal's chemical formula varies, it can be best summed up by its empirical formula: C7H4O. +Therefore, an even more accurate equation of the decomposition of regular black powder with the use of sulfur can be described as: +6 KNO3 + C7H4O + 2 S → K2CO3 + K2SO4 + K2S + 4 CO2 + 2 CO + 2 H2O + 3 N2 +Black powder without the use of sulfur: +10 KNO3 + 2 C7H4O → 5 K2CO3 + 4 CO2 + 5 CO + 4 H2O + 5 N2 +The burning of gunpowder does not take place as a single reaction, however, and the byproducts are not easily predicted. One study's results showed that it produced (in order of descending quantities) 55.91% solid products: potassium carbonate, potassium sulfate, potassium sulfide, sulfur, potassium nitrate, potassium thiocyanate, carbon, ammonium carbonate and 42.98% gaseous products: carbon dioxide, nitrogen, carbon monoxide, hydrogen sulfide, hydrogen, methane, 1.11% water. +Black powder made with less-expensive and more plentiful sodium nitrate (in appropriate proportions) works just as well but is more hygroscopic than powders made from Potassium nitrate—popularly known as saltpeter. Because corned black powder grains made with saltpeter are less affected by moisture in the air, they can be stored unsealed without degradation by humidity. Muzzleloaders have been known to fire after hanging on a wall for decades in a loaded state, provided they remained dry. By contrast, black powder made with sodium nitrate must be kept sealed to remain stable. +Gunpowder contains 3 megajoules per kilogram, and contains its own oxidant. For comparison, the energy density of TNT is 4.7 megajoules per kilogram, and the energy density of gasoline is 47.2 megajoules per kilogram. Gunpowder is a low explosive and as such it does not detonate; rather it deflagrates. Since it contains its own oxidizer and additionally burns faster under pressure, its combustion is capable of rupturing containers such as shell, grenade, or improvised "pipe bomb" or "pressure cooker" casings, forming shrapnel. +Advantages[edit] +In quarrying, high explosives are generally preferred for shattering rock. However, because of its low brisance, black powder causes fewer fractures and results in more usable stone compared to other explosives, making black powder useful for blasting monumental stone such as granite and marble. Black powder is well suited for blank rounds, signal flares, burst charges, and rescue-line launches. Black powder is also used in fireworks for lifting shells, in rockets as fuel, and in certain special effects. +Disadvantages[edit] +Black powder has a low energy density compared to modern "smokeless" powders, and thus to achieve high energy loadings, large amounts of black powder are needed with heavy projectiles. Black powder also produces thick smoke as a byproduct, which in military applications may give a soldier's location away to an enemy observer and may also impair aiming for additional shots. +Combustion converts less than half the mass of black powder to gas. The rest ends up as a thick layer of soot inside the barrel. In addition to being a nuisance, the residue from burnt black powder is hygroscopic and with the addition of moisture absorbed from the air, this residue forms a caustic substance. The soot contains potassium oxide or sodium oxide that turns into potassium hydroxide, or sodium hydroxide, which corrodes wrought iron or steel gun barrels. Black powder arms must be well cleaned both inside and out to remove the residue. The matchlock musket or pistol (an early gun ignition system), as well as the flintlock would often be unusable in wet weather, due to powder in the pan being exposed and dampened. Because of this unreliability, soldiers carrying muskets, known as musketeers, were armed with additional weapons such as swords or pikes. The bayonet was developed to allow the musket to be used as a pike, thus eliminating the need for the soldier to carry a secondary weapon. +Transportation[edit] +The United Nations Model Regulations on the Transportation of Dangerous Goods and national transportation authorities, such as United States Department of Transportation, have classified gunpowder (black powder) as a Group A: Primary explosive substance for shipment because it ignites so easily. Complete manufactured devices containing black powder are usually classified as Group D: Secondary detonating substance, or black powder, or article containing secondary detonating substance, such as firework, class D model rocket engine, etc., for shipment because they are harder to ignite than loose powder. As explosives, they all fall into the category of Class 1. +Other uses[edit] +Besides its use as an explosive, gunpowder has been occasionally employed for other purposes; after the Battle of Aspern-Essling (1809), the surgeon of the Napoleonic Army Larrey combated the lack of food for the wounded under his care by preparing a bouillon of horse meat seasoned with gunpowder for lack of salt.[110][111] It was also used for sterilizing on ships when there was no alcohol. +Jack Tars (British sailors) used gunpowder to create tattoos when ink wasn't available, by pricking the skin and rubbing the powder into the wound in a method known as traumatic tattooing.[112] +Christiaan Huygens experimented with gunpowder in 1673 in an early attempt to build an internal combustion engine, but he did not succeed. Modern attempts to recreate his invention were similarly unsuccessful. +Fireworks use gunpowder as lifting and burst charges, although sometimes other more powerful compositions are added to the burst charge to improve performance in small shells or provide a louder report. Most modern firecrackers no longer contain black powder. +Beginning in the 1930s, gunpowder or smokeless powder was used in rivet guns, stun guns for animals, cable splicers and other industrial construction tools.[113] The "stud gun" drove nails or screws into solid concrete, a function not possible with hydraulic tools. See Powder-actuated tool. Shotguns have been used to eliminate persistent material rings in operating rotary kilns (such as those for cement, lime, phosphate, etc.) and clinker in operating furnaces, and commercial tools make the method more reliable.[114] +Near London in 1853, Captain Shrapnel demonstrated a method for crushing gold-bearing ores by firing them from a cannon into an iron chamber, and "much satisfaction was expressed by all present". He hoped it would be useful on the goldfields of California and Australia. Nothing came of the invention, as continuously-operating crushing machines that achieved more reliable comminution were already coming into use.[115] +See also[edit] +Ballistics +Black powder substitute +Faversham explosives industry +Bulk loaded liquid propellants +Gunpowder magazine +Gunpowder Plot +Berthold Schwarz +Gunpowder warfare +History of gunpowder +Technology of the Song Dynasty +References[edit] +Jump up ^ http://www.merriam-webster.com/dictionary/gunpowder +Jump up ^ Jai Prakash Agrawal (2010). High Energy Materials: Propellants, Explosives and Pyrotechnics. Wiley-VCH. p. 69. ISBN 978-3-527-32610-5. +Jump up ^ David Cressy, Saltpeter: The Mother of Gunpowder (Oxford University Press, 2013) +Jump up ^ Owen Compliance Services. "Black Powder". Material Safety Data Sheet. Retrieved 31 August 2014. +Jump up ^ http://www.history.com/shows/ancient-discoveries/articles/who-built-it-first-2 +Jump up ^ http://chemistry.about.com/od/historyofchemistry/a/gunpowder.htm +Jump up ^ Chase 2003:31 : "the earliest surviving formulas for gunpowder can be found in the Wujing zongyao, a military work from around 1040" +^ Jump up to: a b c Buchanan 2006, p. 2 "With its ninth century AD origins in China, the knowledge of gunpowder emerged from the search by alchemists for the secrets of life, to filter through the channels of Middle Eastern culture, and take root in Europe with consequences that form the context of the studies in this volume." +^ Jump up to: a b c Joseph Needham; Gwei-Djen Lu; Ling Wang (1987). Science and civilisation in China, Volume 5, Part 7. Cambridge University Press. pp. 48–50. ISBN 978-0-521-30358-3. +Jump up ^ Hazel Rossotti (2002). Fire: Servant, Scourge, and Enigma. Courier Dover Publications. pp. 132–137. ISBN 978-0-486-42261-9. +Jump up ^ Jack Kelly Gunpowder: Alchemy, Bombards, and Pyrotechnics: The History of the Explosive that Changed the World, Perseus Books Group: 2005, ISBN 0-465-03722-4, ISBN 978-0-465-03722-3: 272 pages +Jump up ^ St. C. Easton: "Roger Bacon and his Search for a Universal Science", Oxford (1962) +^ Jump up to: a b Gábor Ãgoston (2005). Guns for the sultan: military power and the weapons industry in the Ottoman Empire. Cambridge University Press. p. 15. ISBN 978-0-521-84313-3. +Jump up ^ Ingham-Brown, George (1989) The Big Bang: A History of Explosives, Sutton Publishers, ISBN 0-7509-1878-0, ISBN 978-0-7509-1878-7, page vi +Jump up ^ Kelly, Jack (2005) Gunpowder: Alchemy, Bombards, and Pyrotechnics: The History of the Explosive that Changed the World, Perseus Books Group, ISBN 0-465-03722-4, ISBN 978-0-465-03722-3, page 22 +Jump up ^ Bert S. Hall, "Introduction, 1999" pp. xvi–xvii to the reprinting of James Riddick Partington (1960). A history of Greek fire and gunpowder. JHU Press. ISBN 978-0-8018-5954-0. +^ Jump up to: a b Peter Purton (2009). A History of the Late Medieval Siege, 1200–1500. Boydell & Brewer. pp. 108–109. ISBN 978-1-84383-449-6. +Jump up ^ Bert S. Hall, "Introduction, 1999" p. xvii to the reprinting of James Riddick Partington (1960). A history of Greek fire and gunpowder. JHU Press. ISBN 978-0-8018-5954-0. +Jump up ^ Buchanan. "Editor's Introduction: Setting the Context", in Buchanan 2006. +^ Jump up to: a b Chase 2003:31–32 +Jump up ^ Lorge, Peter A. (2008). The Asian military revolution, 1300-2000 : from gunpowder to the bomb (1. publ. ed.). Cambridge: Cambridge University Press. p. 32. ISBN 978052160954-8. +Jump up ^ Kelly 2004:4 +Jump up ^ The Big Book of Trivia Fun, Kidsbooks, 2004 +Jump up ^ Peter Allan Lorge (2008), The Asian military revolution: from gunpowder to the bomb, Cambridge University Press, p. 18, ISBN 978-0-521-60954-8 +Jump up ^ Needham 1986, p. 7 "Without doubt it was in the previous century, around +850, that the early alchemical experiments on the constituents of gunpowder, with its self-contained oxygen, reached their climax in the appearance of the mixture itself." +Jump up ^ Chase 2003:1 "The earliest known formula for gunpowder can be found in a Chinese work dating probably from the 800s. The Chinese wasted little time in applying it to warfare, and they produced a variety of gunpowder weapons, including flamethrowers, rockets, bombs, and land mines, before inventing firearms." +Jump up ^ Chase 2003:1 +Jump up ^ Delgado, James (February 2003). "Relics of the Kamikaze". Archaeology (Archaeological Institute of America) 56 (1). +Jump up ^ Chase 2003:31 +Jump up ^ Peter Allan Lorge (2008), The Asian military revolution: from gunpowder to the bomb, Cambridge University Press, pp. 33–34, ISBN 978-0-521-60954-8 +Jump up ^ Kelly 2004:22 'Around year 1240, Arabs acquired knowledge of saltpeter ("Chinese snow") from the East, perhaps through India. They knew of gunpowder soon afterward. They also learned about fireworks ("Chinese flowers") and rockets ("Chinese arrows"). Arab warriors had acquired fire lances before year 1280. Around that same year, a Syrian named Hasan al-Rammah wrote a book that, as he put it, "treats of machines of fire to be used for amusement or for useful purposes." He talked of rockets, fireworks, fire lances, and other incendiaries, using terms that suggested he derived his knowledge from Chinese sources. He gave instructions for the purification of saltpeter and recipes for making different types of gunpowder.' +^ Jump up to: a b c d Hassan, Ahmad Y. "Transfer of Islamic Technology to the West: Part III". History of Science and Technology in Islam. +Jump up ^ Peter Watson (2006). Ideas: A History of Thought and Invention, from Fire to Freud. HarperCollins. p. 304. ISBN 978-0-06-093564-1. The first use of a metal tube in this context was made around 1280 in the wars between the Song and the Mongols, where a new term, chong, was invented to describe the new horror...Like paper, it reached the West via the Muslims, in this case the writings of the Andalusian botanist Ibn al-Baytar, who died in Damascus in 1248. The Arabic term for saltpetre is 'Chinese snow' while the Persian usage is 'Chinese salt'.28 +Jump up ^ Cathal J. Nolan (2006). The age of wars of religion, 1000–1650: an encyclopedia of global warfare and civilization. Volume 1 of Greenwood encyclopedias of modern world wars. Greenwood Publishing Group. p. 365. ISBN 0-313-33733-0. Retrieved 2011-11-28. In either case, there is linguistic evidence of Chinese origins of the technology: in Damascus, Arabs called the saltpeter used in making gunpowder " Chinese snow," while in Iran it was called "Chinese salt." Whatever the migratory route +Jump up ^ Oliver Frederick Gillilan Hogg (1970). Artillery: its origin, heyday, and decline. Archon Books. p. 123. The Chinese were certainly acquainted with saltpetre, the essential ingredient of gunpowder. They called it Chinese Snow and employed it early in the Christian era in the manufacture of fireworks and rockets. +Jump up ^ Oliver Frederick Gillilan Hogg (1963). English artillery, 1326–1716: being the history of artillery in this country prior to the formation of the Royal Regiment of Artillery. Royal Artillery Institution. p. 42. The Chinese were certainly acquainted with saltpetre, the essential ingredient of gunpowder. They called it Chinese Snow and employed it early in the Christian era in the manufacture of fireworks and rockets. +Jump up ^ Oliver Frederick Gillilan Hogg (1993). Clubs to cannon: warfare and weapons before the introduction of gunpowder (reprint ed.). Barnes & Noble Books. p. 216. ISBN 1-56619-364-8. Retrieved 2011-11-28. The Chinese were certainly acquainted with saltpetre, the essential ingredient of gunpowder. They called it Chinese snow and used it early in the Christian era in the manufacture of fireworks and rockets. +Jump up ^ Partington, J. R. (1960). A History of Greek Fire and Gunpowder (illustrated, reprint ed.). JHU Press. p. 335. ISBN 0801859549. Retrieved 2014-11-21. +Jump up ^ Needham, Joseph; Yu, Ping-Yu (1980). Needham, Joseph, ed. Science and Civilisation in China: Volume 5, Chemistry and Chemical Technology, Part 4, Spagyrical Discovery and Invention: Apparatus, Theories and Gifts. Volume 5 (Issue 4 of Science and Civilisation in China). Contributors Joseph Needham, Lu Gwei-Djen, Nathan Sivin (illustrated, reprint ed.). Cambridge University Press. p. 194. ISBN 052108573X. Retrieved 2014-11-21. +Jump up ^ Khan 1996 +^ Jump up to: a b Khan 2004:6 +Jump up ^ Ancient Discoveries, Episode 12: Machines of the East, History Channel, 2007 (Part 4 and Part 5) +Jump up ^ Nelson, Cameron Rubaloff (2010-07). Manufacture and transportation of gunpowder in the Ottoman Empire: 1400-1800 M.A. Thesis. +Jump up ^ William H. McNeill (1992). The Rise of the West: A History of the Human Community. University of Chicago Press. p. 492. ISBN 0-226-56141-0. Retrieved 29 July 2011. +Jump up ^ Michael Kohn (2006), Dateline Mongolia: An American Journalist in Nomad's Land, RDR Books, p. 28, ISBN 1-57143-155-1, retrieved 29 July 2011 +Jump up ^ Robert Cowley (1993). Robert Cowley, ed. Experience of War (reprint ed.). Random House Inc. p. 86. ISBN 0-440-50553-4. Retrieved 29 July 2011. +Jump up ^ Kenneth Warren Chase (2003). Firearms: a global history to 1700 (illustrated ed.). Cambridge University Press. p. 58. ISBN 0-521-82274-2. Retrieved 29 July 2011. +Jump up ^ C. F. Temler, Historische Abhandlungen der Koniglichen Gesellschaft der Wissenschaften zu Kopenhagen ... ubersetzt ... von V. A. Heinze, Kiel, Dresden and Leipzig, 1782, i, 168, as cited in Partington, p. 228, footnote 6. +Jump up ^ Joseph Needham; Gwei-Djen Lu; Ling Wang (1987). Science and civilisation in China, Volume 5, Part 7. Cambridge University Press. p. 358. ISBN 978-0-521-30358-3. +Jump up ^ Bert S. Hall, "Introduction, 1999" p. xxiv to the reprinting of James Riddick Partington (1960). A history of Greek fire and gunpowder. JHU Press. ISBN 978-0-8018-5954-0. +Jump up ^ Partington 1960:60 +^ Jump up to: a b Partington 1960:48–49, 54 +^ Jump up to: a b Partington 1960:82–83 +^ Jump up to: a b c d Kelly 2004, p.61 +Jump up ^ Molerus, Otto. "History of Civilization in the Western Hemisphere from the Point of View of Particulate Technology, Part 2," Advanced Powder Technology 7 (1996): 161-66 +Jump up ^ Microsoft Encarta Online Encyclopedia 2007 Archived 31 October 2009. +Jump up ^ In 1777 Lavoisier named oxygen, which had earlier been isolated by Priestley; the realization that saltpeter contained this substance was fundamental to understanding gunpowder. +^ Jump up to: a b Kelly 2004, p.164 +Jump up ^ Metzner, Paul (1998), Crescendo of the Virtuoso: Spectacle, Skill, and Self-Promotion in Paris during the Age of Revolution, University of California Press +^ Jump up to: a b c d Cocroft 2000, "Success to the Black Art!". Chapter 1 +Jump up ^ Ross, Charles. The Custom of the Castle: From Malory to Macbeth. Berkeley: University of California Press, c1997. [1] pages 131-130 +Jump up ^ The Noble-Abel Equation of State: Thermodynamic Derivations for Ballistics Modelling +Jump up ^ Pritchard, Tom; Evans, Jack; Johnson, Sydney (1985), The Old Gunpowder Factory at Glynneath, Merthyr Tydfil: Merthyr Tydfil & District Naturalists' Society +^ Jump up to: a b c d e Cocroft 2000, "The demise of gunpowder". Chapter 4 +Jump up ^ MacDougall, Ian (2000). 'Oh, ye had to be careful' : personal recollections by Roslin gunpowder mill and bomb factory workers. East Linton, Scotland: Tuckwell Press in association with the European Ethnological Research Centre and the Scottish Working People's History Trust. ISBN 1-86232-126-4. +Jump up ^ Iqtidar Alam Khan (2004). Gunpowder And Firearms: Warfare In Medieval India. Oxford University Press. ISBN 978-0-19-566526-0. +^ Jump up to: a b Iqtidar Alam Khan (25 April 2008). Historical Dictionary of Medieval India. Scarecrow Press. p. 157. ISBN 978-0-8108-5503-8. +^ Jump up to: a b Khan 2004:9–10 +Jump up ^ Khan 2004:10 +Jump up ^ Partington (Johns Hopkins University Press edition, 1999), 225 +Jump up ^ Partington (Johns Hopkins University Press edition, 1999), 226 +Jump up ^ http://www.youtube.com/watch?v=DTfEDaWMj4o +^ Jump up to: a b c "India." Encyclopædia Britannica. Encyclopedia Britannica 2008 Ultimate Reference Suite. Chicago: Encyclopedia Britannica, 2008. +Jump up ^ "rocket and missile system." Encyclopædia Britannica. Encyclopædia Britannica 2008 Ultimate Reference Suite. Chicago: Encyclopædia Britannica, 2008. +^ Jump up to: a b Dipanegara, P. B. R. Carey, Babad Dipanagara: an account of the outbreak of the Java war, 1825-30 : the Surakarta court version of the Babad Dipanagara with translations into English and Indonesian volume 9: Council of the M.B.R.A.S. by Art Printing Works: 1981. +Jump up ^ Atsushi, Ota (2006). Changes of regime and social dynamics in West Java : society, state, and the outer world of Banten, 1750-1830. Leiden: Brill. ISBN 90-04-15091-9. +^ Jump up to: a b Thomas Stamford Raffles, The History of Java, Oxford University Press, 1965 (originally published in 1817), ISBN 0-19-580347-7 +Jump up ^ Raffles, Thomas Stamford (1978). The History of Java ([Repr.]. ed.). Kuala Lumpur: Oxford University Press. ISBN 0-19-580347-7. +Jump up ^ US Department of Agriculture (1917). Department Bulleting No. 316: Willows: Their growth, use, and importance. The Department. p. 31. +Jump up ^ Kelly 2004, p.200 +^ Jump up to: a b Earl 1978, Chapter 2: The Development of Gunpowder +Jump up ^ Kelly 2004:60–63 +Jump up ^ Kelly 2004, p.199 +Jump up ^ Frangsmyr, Tore, J. L. Heilbron, and Robin E. Rider, editors The Quantifying Spirit in the Eighteenth Century. Berkeley: University of California Press, c1990. http://ark.cdlib.org/ark:/13030/ft6d5nb455/ p. 292. +Jump up ^ C.E. Munroe (1885) "Notes on the literature of explosives no. VIII", Proceedings of the US Naval Institute, no. XI, p. 285 +Jump up ^ The History of the 10.4×38 Swiss Cartridge +Jump up ^ Blackpowder to Pyrodex and Beyond by Randy Wakeman at Chuck Hawks +Jump up ^ The History and Art of Shotshells by Jon Farrar, Nebraskaland Magazine +Jump up ^ Buchanan. "Editor's Introduction: Setting the Context", in Buchanan 2006, p. 4. +Jump up ^ Black Powder Recipes, Ulrich Bretscher +Jump up ^ Julian S. Hatcher, Hatcher's Notebook, Military Service Publishing Company, 1947. Chapter XIII Notes on Gunpowder, pages 300-305. +Jump up ^ Kelly 2004, p.218 +Jump up ^ Book title Workshop Receipts Publisher William Clowes and Son limited Author Ernest Spon. Date 1 August 1873. +Jump up ^ GunpowderTranslation. Academic. Retrieved 2014-08-31. +Jump up ^ Cathal J. Nolan (2006), The age of wars of religion, 1000-1650: an encyclopedia of global warfare and civilization, Greenwood Publishing Group, p. 365, ISBN 978-0-313-33733-8 +^ Jump up to: a b c Kelly 2004, p58 +^ Jump up to: a b c John Francis Guilmartin (2003). Gunpowder & galleys: changing technology & Mediterranean warfare at sea in the 16th century. Conway Maritime Press. pp. 109–110 and 298–300. ISBN 0851779514. +Jump up ^ T.J. Rodman (1861), Reports of experiments on the properties of metals for cannon and the qualities of cannon powder, p. 270 +^ Jump up to: a b Kelly 2004, p.195 +Jump up ^ Tenney L. Davis (1943). The Chemistry of Powder and Explosives (PDF). p. 139. +^ Jump up to: a b Brown, G.I. (1998) The Big Bang: A history of Explosives Sutton Publishing pp.22&32 ISBN 0-7509-1878-0 +^ Jump up to: a b c Kelly 2004, p.224 +^ Jump up to: a b Rodney James (2011). The ABCs of Reloading: The Definitive Guide for Novice to Expert (9 ed.). Krause Publications. pp. 53–59. ISBN 978-1-4402-1396-0. +Jump up ^ Sharpe, Philip B. (1953) Complete Guide to Handloading Funk & Wagnalls p.137 +Jump up ^ Wakeman, Randy. "Blackpowder to Pyrodex and Beyond". Retrieved 31 August 2014. +Jump up ^ "LESMOK POWDER". +Jump up ^ Julian S. Hatcher, Hatcher's Notebook, Stackpole Books, 1962. Chapter XIV, Gun Corrosion and Ammunition Developments, pages 346-349. +Jump up ^ Wakeman, Randy. "Blackpowder to Pyrodex and Beyond". +Jump up ^ Flash! Bang! Whiz!, University of Denver +Jump up ^ Parker, Harold T. (1983). Three Napoleonic battles. (Repr., Durham, 1944. ed.). Durham, NC: Duke Univ. Pr. p. 83. ISBN 0-8223-0547-X. +Jump up ^ Larrey is quoted in French at Dr Béraud, Études Hygiéniques de la chair de cheval comme aliment, Musée des Familles (1841-42). +Jump up ^ Rediker, Marcus (1989). Between the devil and the deep blue sea : merchant seamen, pirates, and the Anglo-American maritime world, 1700-1750 (1st pbk. ed. ed.). Cambridge: Cambridge University Press. p. 12. ISBN 9780521379830. +Jump up ^ "Gunpowder Now Used To Drive Rivets And Splice Cables", April 1932, Popular Science +Jump up ^ "MasterBlaster System". Remington Products. +Jump up ^ Mining Journal 22 January 1853, p. 61 +Benton, Captain James G. (1862). A Course of Instruction in Ordnance and Gunnery (2 ed.). West Point, New York: Thomas Publications. ISBN 1-57747-079-6.. +Brown, G. I. (1998). The Big Bang: A History of Explosives. Sutton Publishing. ISBN 0-7509-1878-0.. +Buchanan, Brenda J., ed. (2006). Gunpowder, Explosives and the State: A Technological History. Aldershot: Ashgate. ISBN 0-7546-5259-9.. +Chase, Kenneth (2003). Firearms: A Global History to 1700. Cambridge University Press. ISBN 0-521-82274-2.. +Cocroft, Wayne (2000). Dangerous Energy: The archaeology of gunpowder and military explosives manufacture. Swindon: English Heritage. ISBN 1-85074-718-0.. +Crosby, Alfred W. (2002). Throwing Fire: Projectile Technology Through History. Cambridge University Press. ISBN 0-521-79158-8.. +Earl, Brian (1978). Cornish Explosives. Cornwall: The Trevithick Society. ISBN 0-904040-13-5.. +al-Hassan, Ahmad Y.. "History of Science and Technology in Islam". |chapter= ignored (help). +Johnson, Norman Gardner. "explosive". Encyclopædia Britannica. Chicago: Encyclopædia Britannica Online.. +Kelly, Jack (2004). Gunpowder: Alchemy, Bombards, & Pyrotechnics: The History of the Explosive that Changed the World. Basic Books. ISBN 0-465-03718-6.. +Khan, Iqtidar Alam (1996). "Coming of Gunpowder to the Islamic World and North India: Spotlight on the Role of the Mongols". Journal of Asian History 30: 41–5.. +Khan, Iqtidar Alam (2004). "Gunpowder and Firearms: Warfare in Medieval India". Oxford University Press. doi:10.1086/ahr.111.3.817.. +Needham, Joseph (1986). "Science & Civilisation in China". V:7: The Gunpowder Epic. Cambridge University Press. ISBN 0-521-30358-3.. +Norris, John (2003). Early Gunpowder Artillery: 1300-1600. Marlborough: The Crowood Press. ISBN 9781861266156.. +Partington, J.R. (1960). A History of Greek Fire and Gunpowder. Cambridge, UK: W. Heffer & Sons.. +Partington, James Riddick; Hall, Bert S. (1999). A History of Greek Fire and Gunpowder. Baltimore: Johns Hopkins University Press. doi:10.1353/tech.2000.0031. ISBN 0-8018-5954-9. +Urbanski, Tadeusz (1967). "Chemistry and Technology of Explosives" III. New York: Pergamon Press.. +External links[edit] + Wikimedia Commons has media related to Gunpowder. + Look up gunpowder in Wiktionary, the free dictionary. +Gun and Gunpowder +The Origins of Gunpowder +Cannons and Gunpowder +Oare Gunpowder Works, Kent, UK +Royal Gunpowder Mills +The DuPont Company on the Brandywine A digital exhibit produced by the Hagley Library that covers the founding and early history of the DuPont Company powder yards in Delaware +"Ulrich Bretschler's Gunpowder Chemistry page". +Video Demonstration of the Medieval Siege Society's Guns, Including showing ignition of gunpowder +Black Powder Recipes +"Dr. Sasse's investigations (and others) found via search at US DTIC.MIL These contain scientific studies of BP properties and details of measurement techniques.". +Categories: GunpowderChinese inventionsExplosivesFirearm propellantsPyrotechnic compositionsRocket fuelsSolid fuels +Navigation menu +Create accountLog inArticleTalkReadEditView history + +Main page +Contents +Featured content +Current events +Random article +Donate to Wikipedia +Wikimedia Shop +Interaction +Help +About Wikipedia +Community portal +Recent changes +Contact page +Tools +What links here +Related changes +Upload file +Special pages +Permanent link +Page information +Wikidata item +Cite this page +Print/export +Create a book +Download as PDF +Printable version +Languages +Afrikaans +العربية +Aragonés +Asturianu +AzÉ™rbaycanca +БашҡортÑа +БеларуÑÐºÐ°Ñ +БеларуÑÐºÐ°Ñ (тарашкевіца)‎ +БългарÑки +Bosanski +Brezhoneg +БурÑад +Català +Чӑвашла +ÄŒeÅ¡tina +Corsu +Cymraeg +Dansk +Deutsch +Eesti +Ελληνικά +Español +Esperanto +Euskara +Ùارسی +Français +Gaeilge +Galego +贛語 +Хальмг +한국어 +हिनà¥à¤¦à¥€ +Hrvatski +Ilokano +Bahasa Indonesia +Ãslenska +Italiano +עברית +Kapampangan +Kiswahili +Kurdî +Latina +LatvieÅ¡u +Lietuvių +Limburgs +Magyar +МакедонÑки +മലയാളം +مصرى +Монгол +Nederlands +नेपाली +नेपाल भाषा +日本語 +Ðохчийн +Norsk bokmÃ¥l +Norsk nynorsk +Occitan +OÊ»zbekcha +پنجابی +Polski +Português +Română +Runa Simi +РуÑÑкий +Саха тыла +Scots +Shqip +Sicilianu +Simple English +SlovenÄina +SlovenÅ¡Äina +کوردی +СрпÑки / srpski +Srpskohrvatski / ÑрпÑкохрватÑки +Suomi +Svenska +Tagalog +தமிழ௠+Татарча/tatarça +ไทย +Türkçe +УкраїнÑька +اردو +Tiếng Việt +Võro +Winaray +ייִדיש +粵語 +ŽemaitÄ—Å¡ka +中文 +Edit links +This page was last modified on 28 November 2014 at 05:37. +Text is available under the Creative Commons Attribution-ShareAlike License; additional terms may apply. By using this site, you agree to the Terms of Use and Privacy Policy. Wikipedia® is a registered trademark of the Wikimedia Foundation, Inc., a non-profit organization. +Privacy policyAbout WikipediaDisclaimersContact WikipediaDevelopersMobile viewWikimedia Foundation Powered by MediaWiki + + +Smokeless powder +From Wikipedia, the free encyclopedia + +Finnish smokeless powder +Smokeless powder is the name given to a number of propellants used in firearms and artillery that produce negligible smoke when fired, unlike the black powder they replaced. The term is unique to the United States and is generally not used in other English-speaking countries, which initially used proprietary names such as "Ballistite" and "Cordite" but gradually shifted to "propellant" as the generic term. +The basis of the term smokeless is that the combustion products are mainly gaseous, compared to around 55% solid products (mostly potassium carbonate, potassium sulfate, and potassium sulfide) for black powder.[1] Despite its name, smokeless powder is not completely smoke-free;[2] while there may be little noticeable smoke from small-arms ammunition, smoke from artillery fire can be substantial. This article focuses on nitrocellulose formulations, but the term smokeless powder was also used to describe various picrate mixtures with nitrate, chlorate, or dichromate oxidizers during the late 19th century, before the advantages of nitrocellulose became evident.[3] +Since the 14th century[4] gunpowder was not actually a physical "powder," and smokeless powder can only be produced as a pelletized or extruded granular material. Smokeless powder allowed the development of modern semi- and fully automatic firearms and lighter breeches and barrels for artillery. Burnt black powder leaves a thick, heavy fouling that is hygroscopic and causes rusting of the barrel. The fouling left by smokeless powder exhibits none of these properties (though some primer compounds can leave hygroscopic salts that have a similar effect; non-corrosive primer compounds were introduced in the 1920s[5][6]). This makes an autoloading firearm with many moving parts feasible (which would otherwise jam or seize under heavy black powder fouling). +Smokeless powders are classified as, typically, division 1.3 explosives under the UN Recommendations on the transportation of Dangerous goods – Model Regulations, regional regulations (such as ADR) and national regulations (such the United States' ATF). However, they are used as solid propellants; in normal use, they undergo deflagration rather than detonation. +Contents [hide] +1 Background +2 Nitroglycerine and guncotton +3 Propellant improvements +4 Chemical formulations +5 Instability and stabilization +6 Physical variations +7 Smokeless propellant components +8 Manufacturing +9 Flashless propellant +10 See also +11 References +11.1 Notes +11.2 Sources +12 External links +Background[edit] +Military commanders had been complaining since the Napoleonic Wars about the problems of giving orders on a battlefield obscured by the smoke of firing. Verbal commands could not be heard above the noise of the guns, and visual signals could not be seen through the thick smoke from the gunpowder used by the guns. Unless there was a strong wind, after a few shots, soldiers using black powder ammunition would have their view obscured by a huge cloud of smoke. Snipers or other concealed shooters were given away by a cloud of smoke over the firing position. Black powder is also corrosive, making cleaning mandatory after every use. Likewise, black powder's tendency to produce severe fouling caused actions to jam and often made reloading difficult. +Nitroglycerine and guncotton[edit] +Nitroglycerine was synthesized by the Italian chemist Ascanio Sobrero in 1847.[7] It was subsequently developed and manufactured by Alfred Nobel as an industrial explosive, but even then it was unsuitable as a propellant: despite its energetic and smokeless qualities, it detonates instead of deflagrating smoothly, making it more amenable to shattering a gun than propelling a projectile out of it. Nitroglycerine per se is also highly unstable, making it unfit to be carried in battlefield conditions. +A major step forward was the discovery of guncotton, a nitrocellulose-based material, by Swiss chemist Christian Friedrich Schönbein in 1846. He promoted its use as a blasting explosive[8] and sold manufacturing rights to the Austrian Empire. Guncotton was more powerful than gunpowder, but at the same time was once again somewhat more unstable. John Taylor obtained an English patent for guncotton; and John Hall & Sons began manufacture in Faversham. +English interest languished after an explosion destroyed the Faversham factory in 1847. Austrian Baron Wilhelm Lenk von Wolfsberg built two guncotton plants producing artillery propellent, but it too was dangerous under field conditions, and guns that could fire thousands of rounds using gunpowder would reach their service life after only a few hundred shots with the more powerful guncotton. Small arms could not withstand the pressures generated by guncotton at all. +After one of the Austrian factories blew up in 1862, Thomas Prentice & Company began manufacturing guncotton in Stowmarket in 1863; and British War Office chemist Sir Frederick Abel began thorough research at Waltham Abbey Royal Gunpowder Mills leading to a manufacturing process that eliminated the impurities in nitrocellulose making it safer to produce and a stable product safer to handle. Abel patented this process in 1865, when the second Austrian guncotton factory exploded. After the Stowmarket factory exploded in 1871, Waltham Abbey began production of guncotton for torpedo and mine warheads.[9] +Propellant improvements[edit] +In 1863, Prussian artillery captain Johann F. E. Schultze patented a small arms propellent of nitrated hardwood impregnated with saltpetre or barium nitrate. Prentice received an 1866 patent for a sporting powder of nitrated paper manufactured at Stowmarket, but ballistic uniformity suffered as the paper absorbed atmospheric moisture. In 1871, Frederick Volkmann received an Austrian patent for a colloided version of Schultze powder called Collodin, which he manufactured near Vienna for use in sporting firearms. Austrian patents were not published at the time, and the Austrian Empire considered the operation a violation of the government monopoly on explosives manufacture and closed the Volkmann factory in 1875.[9] In 1882, the Explosives Company at Stowmarket patented an improved formulation of nitrated cotton gelatinised by ether-alcohol with nitrates of potassium and barium. These propellants were suitable for shotguns but not rifles.[10] + +Poudre B single-base smokeless powder flakes +In 1884, Paul Vieille invented a smokeless powder called Poudre B (short for poudre blanche—white powder, as distinguished from black powder)[11] made from 68.2% insoluble nitrocellulose, 29.8% soluble nitrocellusose gelatinized with ether and 2% paraffin. This was adopted for the Lebel rifle.[12] It was passed through rollers to form paper thin sheets, which were cut into flakes of the desired size.[11] The resulting propellant, today known as pyrocellulose, contains somewhat less nitrogen than guncotton and is less volatile. A particularly good feature of the propellant is that it will not detonate unless it is compressed, making it very safe to handle under normal conditions. +Vieille's powder revolutionized the effectiveness of small guns, because it gave off almost no smoke and was three times more powerful than black powder. Higher muzzle velocity meant a flatter trajectory and less wind drift and bullet drop, making 1000 meter shots practicable. Since less powder was needed to propel a bullet, the cartridge could be made smaller and lighter. This allowed troops to carry more ammunition for the same weight. Also, it would burn even when wet. Black powder ammunition had to be kept dry and was almost always stored and transported in watertight cartridges. +Other European countries swiftly followed and started using their own versions of Poudre B, the first being Germany and Austria, which introduced new weapons in 1888. Subsequently Poudre B was modified several times with various compounds being added and removed. Krupp began adding diphenylamine as a stabilizer in 1888.[9] +Meanwhile, in 1887, Alfred Nobel obtained an English patent for a smokeless gunpowder he called Ballistite. In this propellant the fibrous structure of cotton (nitro-cellulose) was destroyed by a nitro-glycerine solution instead of a solvent.[13] In England in 1889, a similar powder was patented by Hiram Maxim, and in the USA in 1890 by Hudson Maxim.[14] Ballistite was patented in the United States in 1891. +The Germans adopted ballistite for naval use in 1898, calling it WPC/98. The Italians adopted it as filite, in cord instead of flake form, but realising its drawbacks changed to a formulation with nitroglycerine they called solenite. In 1891 the Russians tasked the chemist Mendeleef with finding a suitable propellant, he created nitrocellulose gelatinised by ether-alcohol, which produced more nitrogen and more uniform colloidal structure than the French use of nitro-cottons in Poudre B. He called it pyro-collodion.[13] +Britain conducted trials on all the various types of propellant brought to their attention, but were dissatisfied with them all and sought something superior to all existing types. In 1889, Sir Frederick Abel, James Dewar and Dr W Kellner patented (Nos 5614 and 11,664 in the names of Abel and Dewar) a new formulation that was manufactured at the Royal Gunpowder Factory at Waltham Abbey. It entered British service in 1891 as Cordite Mark 1. Its main composition was 58% Nitro-glycerine, 37% Guncotton and 3% mineral jelly. A modified version, Cordite MD, entered service in 1901, this increased guncotton to 65% and reduced nitro-glycerine to 30%, this change reduced the combustion temperature and hence erosion and barrel wear. Cordite's advantages over gunpowder were reduced maximum pressure in the chamber (hence lighter breeches, etc.) but longer high pressure. Cordite could be made in any desired shape or size.[15] The creation of cordite led to a lengthy court battle between Nobel, Maxim, and another inventor over alleged British patent infringement. +The Anglo-American Explosives Company began manufacturing its shotgun powder in Oakland, New Jersey in 1890. DuPont began producing guncotton at Carneys Point Township, New Jersey in 1891.[3] Charles E. Munroe of the Naval Torpedo Station in Newport, Rhode Island patented a formulation of guncotton colloided with nitrobenzene, called Indurite, in 1891.[16] Several United States firms began producing smokeless powder when Winchester Repeating Arms Company started loading sporting cartridges with Explosives Company powder in 1893. California Powder Works began producing a mixture of nitroglycerine and nitrocellulose with ammonium picrate as Peyton Powder, Leonard Smokeless Powder Company began producing nitroglycerine-nitrocellulose Ruby powders, Laflin & Rand negotiated a license to produce Ballistite, and DuPont started producing smokeless shotgun powder. The United States Army evaluated 25 varieties of smokeless powder and selected Ruby and Peyton Powders as the most suitable for use in the Krag-Jørgensen service rifle. Ruby was preferred, because tin-plating was required to protect brass cartridge cases from picric acid in the Peyton Powder. Rather than paying the required royalties for Ballistite, Laflin & Rand financed Leonard's reorganization as the American Smokeless Powder Company. United States Army Lieutenant Whistler assisted American Smokeless Powder Company factory superintendent Aspinwall in formulating an improved powder named W.A. for their efforts. W.A. smokeless powder was the standard for United States military service rifles from 1897 until 1908.[3] +In 1897, United States Navy Lieutenant John Bernadou patented a nitrocellulose powder colloided with ether-alcohol.[16] The Navy licensed or sold patents for this formulation to DuPont and the California Powder Works while retaining manufacturing rights for the Naval Powder Factory, Indian Head, Maryland constructed in 1900. The United States Army adopted the Navy single-base formulation in 1908 and began manufacture at Picatinny Arsenal.[3] By that time Laflin & Rand had taken over the American Powder Company to protect their investment, and Laflin & Rand had been purchased by DuPont in 1902.[17] Upon securing a 99-year lease of the Explosives Company in 1903, DuPont enjoyed use of all significant smokeless powder patents in the United States, and was able to optimize production of smokeless powder.[3] When government anti-trust action forced divestiture in 1912, DuPont retained the nitrocellulose smokeless powder formulations used by the United States military and released the double-base formulations used in sporting ammunition to the reorganized Hercules Powder Company. These newer propellants were more stable and thus safer to handle than Poudre B, and also more powerful. +Chemical formulations[edit] +"Double base" redirects here. For the musical instrument, see double bass. +Currently, propellants using nitrocellulose (detonation velocity 7,300 m/s (23,950 ft/s)) (typically an ether-alcohol colloid of nitrocellulose) as the sole explosive propellant ingredient are described as single-base powder.[18] +Propellants mixtures containing nitrocellulose and nitroglycerin (detonation velocity 7,700 m/s (25,260 ft/s)) as explosive propellant ingredients are known as double-base powder.[19] +During the 1930s triple-base propellant containing nitrocellulose, nitroglycerin, and a substantial quantity of nitroguanidine (detonation velocity 8,200 m/s (26,900 ft/s)) as explosive propellant ingredients was developed. These propellant mixtures have reduced flash and flame temperature without sacrificing chamber pressure compared to single and double base propellants, albeit at the cost of more smoke. +In practice, triple base propellants are reserved mainly for large caliber ammunition such as used in (naval) artillery and tank guns. During World War II it had some use by British artillery. After that war it became the standard propellant in all British large caliber ammunition designs except small-arms. Most western nations, except the United States, followed a similar path. +In the late 20th century new propellant formulations started to appear. These are based on nitroguanidine and high explosives of the RDX (detonation velocity 8,750 m/s (28,710 ft/s)) type. +Instability and stabilization[edit] +Nitrocellulose deteriorates with time, yielding acidic byproducts. Those byproducts catalyze the further deterioration, increasing its rate. The released heat, in case of bulk storage of the powder, or too large blocks of solid propellant, can cause self-ignition of the material. Single-base nitrocellulose propellants are hygroscopic and most susceptible to degradation; double-base and triple-base propellants tend to deteriorate more slowly. To neutralize the decomposition products, which could otherwise cause corrosion of metals of the cartridges and gun barrels, calcium carbonate is added to some formulations. +To prevent buildup of the deterioration products, stabilizers are added. Diphenylamine is one of the most common stabilizers used. Nitrated analogs of diphenylamine formed in the process of stabilizing decomposing powder are sometimes used as stabilizers themselves.[20][21] The stabilizers are added in the amount of 0.5–2% of the total amount of the formulation; higher amounts tend to degrade its ballistic properties. The amount of the stabilizer is depleted with time. Propellants in storage should be periodically tested for the amount of stabilizer remaining, as its depletion may lead to auto-ignition of the propellant. +Physical variations[edit] + +Ammunition handloading powders +Smokeless powder may be corned into small spherical balls or extruded into cylinders or strips with many cross-sectional shapes (strips with various rectangular proportions, single or multi-hole cylinders, slotted cylinders) using solvents such as ether. These extrusions can be cut into short ('flakes') or long pieces ('cords' many inches long). Cannon powder has the largest pieces. +The properties of the propellant are greatly influenced by the size and shape of its pieces. The specific surface area of the propellant influences the speed of burning, and the size and shape of the particles determine the specific surface area. By manipulation of the shape it is possible to influence the burning rate and hence the rate at which pressure builds during combustion. Smokeless powder burns only on the surfaces of the pieces. Larger pieces burn more slowly, and the burn rate is further controlled by flame-deterrent coatings that retard burning slightly. The intent is to regulate the burn rate so that a more or less constant pressure is exerted on the propelled projectile as long as it is in the barrel so as to obtain the highest velocity. The perforations stabilize the burn rate because as the outside burns inward (thus shrinking the burning surface area) the inside is burning outward (thus increasing the burning surface area, but faster, so as to fill up the increasing volume of barrel presented by the departing projectile).[22] Fast-burning pistol powders are made by extruding shapes with more area such as flakes or by flattening the spherical granules. Drying is usually performed under a vacuum. The solvents are condensed and recycled. The granules are also coated with graphite to prevent static electricity sparks from causing undesired ignitions.[23] +Faster-burning propellants generate higher temperatures and higher pressures, however they also increase wear on gun barrels. +Smokeless propellant components[edit] +The propellant formulations may contain various energetic and auxiliary components: +Propellants: +Nitrocellulose, an energetic component of most smokeless propellants[24] +Nitroglycerin, an energetic component of double-base and triple-base formulations[24] +Nitroguanidine, a component of triple-base formulations[24] +D1NA (bis-nitroxyethylnitramine)[25] +Fivonite (tetramethylolcyclopentanone)[25] +DGN (di-ethylene glycol dinitrate)[26] +Acetyl cellulose[27] +Deterrents, (or moderants), to slow the burning rate +Centralites (symmetrical diphenyl urea—primarily diethyl or dimethyl)[28][29] +Dibutyl phthalate[24][29] +Dinitrotoluene (toxic, carcinogenic, and obsolete)[24][30] +Akardite (asymmetrical diphenyl urea)[26] +ortho-tolyl urethane[31] +Polyester adipate +Camphor (obsolete)[29] +Stabilizers, to prevent or slow down self-decomposition[32] +Diphenylamine[33] +Petroleum jelly[34] +Calcium carbonate[24] +Magnesium oxide[26] +Sodium bicarbonate[27] +beta-naphthol methyl ether[31] +Amyl alcohol (obsolete)[35] +Aniline (obsolete)[36] +Decoppering additives, to hinder the buildup of copper residues from the gun barrel rifling +Tin metal and compounds (e.g., tin dioxide)[24][37] +Bismuth metal and compounds (e.g., bismuth trioxide, bismuth subcarbonate, bismuth nitrate, bismuth antimonide); the bismuth compounds are favored as copper dissolves in molten bismuth, forming brittle and easily removable alloy +Lead foil and lead compounds, phased out due to toxicity[25] +Flash reducers, to reduce the brightness of the muzzle flash (all have a disadvantage: the production of smoke)[38] +Potassium chloride[39] +Potassium nitrate +Potassium sulfate[24][37] +Potassium hydrogen tartarate (a byproduct of wine production formerly used by French artillery)[39] +Wear reduction additives, to lower the wear of the gun barrel liners[40] +Wax +Talc +Titanium dioxide +Polyurethane jackets over the powder bags, in large guns +Other additives +Ethyl acetate, a solvent for manufacture of spherical powder[34] +Rosin, a surfactant to hold the grain shape of spherical powder +Graphite, a lubricant to cover the grains and prevent them from sticking together, and to dissipate static electricity[23] +Manufacturing[edit] +This section describes procedures used in the United States. See Cordite for alternative procedures formerly used in the United Kingdom. +The United States Navy manufactured single-base tubular powder for naval artillery at Indian Head, Maryland, beginning in 1900. Similar procedures were used for United States Army production at Picatinny Arsenal beginning in 1907[18] and for manufacture of smaller grained Improved Military Rifle (IMR) powders after 1914. Short-fiber cotton linter was boiled in a solution of sodium hydroxide to remove vegetable waxes, and then dried before conversion to nitrocellulose by mixing with concentrated nitric and sulfuric acids. Nitrocellulose still resembles fibrous cotton at this point in the manufacturing process, and was typically identified as pyrocellulose because it would spontaneously ignite in air until unreacted acid was removed. The term guncotton was also used; although some references identify guncotton as a more extensively nitrated and refined product used in torpedo and mine warheads prior to use of TNT.[41] +Unreacted acid was removed from pyrocellulose pulp by a multistage draining and water washing process similar to that used in paper mills during production of chemical woodpulp. Pressurized alcohol removed remaining water from drained pyrocellulose prior to mixing with ether and diphenylamine. The mixture was then fed through a press extruding a long turbular cord form to be cut into grains of the desired length.[42] +Alcohol and ether were then evaporated from "green" powder grains to a remaining solvent concentration between 3 percent for rifle powders and 7 percent for large artillery powder grains. Burning rate is inversely proportional to solvent concentration. Grains were coated with electrically conductive graphite to minimize generation of static electricity during subsequent blending. "Lots" containing more than ten tonnes of powder grains were mixed through a tower arrangement of blending hoppers to minimize ballistic differences. Each blended lot was then subjected to testing to determine the correct loading charge for the desired performance.[43][44] +Military quantities of old smokeless powder were sometimes reworked into new lots of propellants.[45] Through the 1920s Dr. Fred Olsen worked at Picatinny Arsenal experimenting with ways to salvage tons of single-base cannon powder manufactured for World War I. Dr. Olsen was employed by Western Cartridge Company in 1929 and developed a process for manufacturing spherical smokeless powder by 1933.[46] Reworked powder or washed pyrocellulose can be dissolved in ethyl acetate containing small quantities of desired stabilizers and other additives. The resultant syrup, combined with water and surfactants, can be heated and agitated in a pressurized container until the syrup forms an emulsion of small spherical globules of the desired size. Ethyl acetate distills off as pressure is slowly reduced to leave small spheres of nitrocellulose and additives. The spheres can be subsequently modified by adding nitroglycerine to increase energy, flattening between rollers to a uniform minimum dimension, coating with phthalate deterrents to retard ignition, and/or glazing with graphite to improve flow characteristics during blending.[47][48] +Modern smokeless powder is produced in the United States by St. Marks Powder, Inc. owned by General Dynamics.[49] +Flashless propellant[edit] +Muzzle flash is the light emitted in the vicinity of the muzzle by the hot propellant gases and the chemical reactions that follow as the gases mix with the surrounding air. Before projectiles exit a slight pre-flash may occur from gases leaking past the projectiles. Following muzzle exit the heat of gases is usually sufficient to emit visible radiation – the primary flash. The gases expand but as they pass through the Mach disc they are re-compressed to produce an intermediate flash. Hot combustible gases (e.g. hydrogen and carbon-monoxide) may follow when they mix with oxygen in the surrounding air to produce the secondary flash, the brightest. The secondary flash does not usually occur with small-arms.[50] +Nitrocellulose contains insufficient oxygen to completely oxidize its carbon and hydrogen. The oxygen deficit is increased by addition of graphite and organic stabilizers. Products of combustion within the gun barrel include flammable gasses like hydrogen and carbon monoxide. At high temperature, these flammable gasses will ignite when turbulently mixed with atmospheric oxygen beyond the muzzle of the gun. During night engagements the flash produced by ignition can reveal the location of the gun to enemy forces[51] and cause temporary night-blindness among the gun crew by photo-bleaching visual purple.[52] +Flash suppressors are commonly used on small arms to reduce the flash signature, but this approach is not practical for artillery. Artillery muzzle flash up to 150 feet (46 m) from the muzzle has been observed, and can be reflected off clouds and be visible for distances up to 30 miles (48 km).[51] For artillery the most effective method is a propellant that produces a large proportion of inert nitrogen at relatively low temperatures that dilutes the combustible gases. Triple based propellants are used for this because of the nitrogen in the nitroguandine.[53] +Before the use of triple based propellants the usual method of flash reduction was to add inorganic salts like potassium chloride so their specific heat capacity might reduce the temperature of combustion gasses and their finely divided particulate smoke might block visible wavelengths of radiant energy of combustion.[39] +See also[edit] +Portal icon Pyrotechnics portal +Antique guns +Ballistite +Cordite +Firearms +Gunpowder +Nitrocellulose +Small arms +Brown-brown – a drug created by mixing cocaine with cartridge powder +References[edit] +Notes[edit] +Jump up ^ Hatcher, Julian S. and Barr, Al Handloading Hennage Lithograph Company (1951) p.34 +Jump up ^ Fairfield, A. P., CDR USN Naval Ordnance Lord Baltimore Press (1921) p.44 +^ Jump up to: a b c d e Sharpe, Philip B. Complete Guide to Handloading 3rd Edition (1953) Funk & Wagnalls pp.146-149 +Jump up ^ seegunpowder +Jump up ^ Sharpe, Philip B. Complete Guide To Handloading (1953) Funk & Wagnalls p.60 +Jump up ^ Davis, William C., Jr. Handloading (1981) National Rifle Association p.21 +Jump up ^ Davis, Tenney L. The Chemistry of Powder & Explosives (1943) page 195 +Jump up ^ Davis, William C., Jr. Handloading National Rifle Association of America (1981) p.28 +^ Jump up to: a b c Sharpe, Philip B. Complete Guide to Handloading 3rd Edition (1953) Funk & Wagnalls pp.141-144 +Jump up ^ Hogg, Oliver F. G. Artillery: Its Origin, Heyday and Decline (1969) p.138-139 +^ Jump up to: a b Davis, Tenney L. The Chemistry of Powder & Explosives (1943) pages 289–292 +Jump up ^ Hogg, Oliver F. G. Artillery: Its Origin, Heyday and Decline (1969) p.139 +^ Jump up to: a b Hogg, Oliver F. G. Artillery: Its Origin, Heyday and Decline (1969) p.140 +Jump up ^ U.S. Patent 430,212 – Manufacture of explosive – H. S. Maxim +Jump up ^ Hogg, Oliver F. G. Artillery: Its Origin, Heyday and Decline (1969) p.141 +^ Jump up to: a b Davis, Tenney L. The Chemistry of Powder & Explosives (1943) pages 296-297 +Jump up ^ "Laflin & Rand Powder Company". DuPont. Retrieved 2012-02-24. +^ Jump up to: a b Davis, Tenny L. The Chemistry of Powder & Explosives (1943) p.297 +Jump up ^ Davis, Tenny L. The Chemistry of Powder & Explosives (1943) p.298 +Jump up ^ Fairfield, A. P., CDR USN Naval Ordnance Lord Baltimore Press (1921) p.28 +Jump up ^ Davis, Tenny L. The Chemistry of Powder & Explosives (1943) p. 310 +Jump up ^ Fairfield, A. P., CDR USN Naval Ordnance Lord Baltimore Press (1921) pp.41–43 +^ Jump up to: a b Davis, Tenny L. The Chemistry of Powder & Explosives (1943) p.306 +^ Jump up to: a b c d e f g h Campbell, John Naval Weapons of World War Two (1985) p. 5 +^ Jump up to: a b c Campbell, John Naval Weapons of World War Two (1985) p. 104 +^ Jump up to: a b c Campbell, John Naval Weapons of World War Two (1985) p. 221 +^ Jump up to: a b Campbell, John Naval Weapons of World War Two (1985) p. 318 +Jump up ^ Davis, Tenny L. The Chemistry of Powder & Explosives (1943) pages 317–320 +^ Jump up to: a b c Davis, William C., Jr. Handloading National Rifle Association of America (1981) p.30 +Jump up ^ Davis, William C., Jr. Handloading National Rifle Association of America (1981) p.31 +^ Jump up to: a b Campbell, John Naval Weapons of World War Two (1985) p. 174 +Jump up ^ Davis, Tenny L. The Chemistry of Powder & Explosives (1943) pages 307–311 +Jump up ^ Davis, Tenny L. The Chemistry of Powder & Explosives (1943) p. 302 +^ Jump up to: a b Davis, Tenny L. The Chemistry of Powder & Explosives (1943) p. 296 +Jump up ^ Davis, Tenny L. The Chemistry of Powder & Explosives (1943) p. 307 +Jump up ^ Davis, Tenny L. The Chemistry of Powder & Explosives (1943) p. 308 +^ Jump up to: a b Davis, William C., Jr. Handloading National Rifle Association of America (1981) p.32 +Jump up ^ Davis, Tenny L. The Chemistry of Powder & Explosives (1943) pages 322–327 +^ Jump up to: a b c Davis, Tenny L. The Chemistry of Powder & Explosives (1943) pages 323–327 +Jump up ^ "USA 16"/50 (40.6 cm) Mark 7". NavWeaps. 2008-11-03. Retrieved 2008-12-05. +Jump up ^ Fairfield, A. P., CDR USN Naval Ordnance Lord Baltimore Press (1921) pages 28–31 +Jump up ^ Fairfield, A. P., CDR USN Naval Ordnance Lord Baltimore Press (1921) pages 31–35 +Jump up ^ Fairfield, A. P., CDR USN Naval Ordnance Lord Baltimore Press (1921) pages 35–41 +Jump up ^ Davis, Tenny L. The Chemistry of Powder & Explosives (1943) pages 293 & 306 +Jump up ^ Fairfield, A. P., CDR USN Naval Ordnance Lord Baltimore Press (1921) p.39 +Jump up ^ Matunas, E. A. Winchester-Western Ball Powder Loading Data Olin Corporation (1978) p.3 +Jump up ^ Davis, Tenny L. The Chemistry of Powder & Explosives (1943) pages 328–330 +Jump up ^ Wolfe, Dave Propellant Profiles Volume 1 Wolfe Publishing Company (1982) pages 136–137 +Jump up ^ General Dynamics Commercial Powder Applications. +Jump up ^ Moss G. M., Leeming D. W., Farrar C. L. Military Ballisitcs (1969) pages 55–56 +^ Jump up to: a b Davis, Tenny L. The Chemistry of Powder & Explosives (1943) pages 322–323 +Jump up ^ Milner p.68 +Jump up ^ Moss G. M., Leeming D. W., Farrar C. L. Military Ballisitcs (1969) pages 59–60 +Sources[edit] +Campbell, John (1985). Naval Weapons of World War Two. Naval Institute Press. ISBN 0-87021-459-4. +Davis, Tenney L. (1943). The Chemistry of Powder & Explosives (Angriff Press [1992] ed.). John Wiley & Sons Inc. ISBN 0-913022-00-4. +Davis, William C., Jr. (1981). Handloading. National Rifle Association of America. ISBN 0-935998-34-9. +Fairfield, A. P., CDR USN (1921). Naval Ordnance. Lord Baltimore Press. +Hatcher, Julian S. and Barr, Al (1951). Handloading. Hennage Lithograph Company. +Matunas, E. A. (1978). Winchester-Western Ball Powder Loading Data. Olin Corporation. +Milner, Marc (1985). North Atlantic Run. Naval Institute Press. ISBN 0-87021-450-0. +Wolfe, Dave (1982). Propellant Profiles Volume 1. Wolfe Publishing Company. ISBN 0-935632-10-7. +External links[edit] +The Manufacture of Smokeless Powders and their Forensic Analysis: A Brief Review – Robert M. Heramb, Bruce R. McCord +Hudson Maxim papers (1851-1925) at Hagley Museum and Library. Collection includes material relating to Maxim's patent on the process of making smokeless powder. +Categories: CorditeExplosivesFirearm propellantsSolid fuels +Navigation menu +Create accountLog inArticleTalkReadEditView history + +Main page +Contents +Featured content +Current events +Random article +Donate to Wikipedia +Wikimedia Shop +Interaction +Help +About Wikipedia +Community portal +Recent changes +Contact page +Tools +What links here +Related changes +Upload file +Special pages +Permanent link +Page information +Wikidata item +Cite this page +Print/export +Create a book +Download as PDF +Printable version +Languages +العربية +БългарÑки +Dansk +Deutsch +Español +Ùارسی +Français +Bahasa Indonesia +Ãslenska +Italiano +עברית +Nederlands +日本語 +Polski +Português +РуÑÑкий +Svenska +தமிழ௠+中文 +Edit links +This page was last modified on 25 July 2014 at 22:33. +Text is available under the Creative Commons Attribution-ShareAlike License; additional terms may apply. By using this site, you agree to the Terms of Use and Privacy Policy. Wikipedia® is a registered trademark of the Wikimedia Foundation, Inc., a non-profit organization. +Privacy policyAbout WikipediaDisclaimersContact WikipediaDevelopersMobile viewWikimedia Foundation Powered by MediaWiki + + +Deflagration +From Wikipedia, the free encyclopedia + +[hide]This article has multiple issues. Please help improve it or discuss these issues on the talk page. +This article needs additional citations for verification. (April 2011) +This article may be too technical for most readers to understand. (December 2013) + +A log in a fireplace. +Deflagration [1] (Lat: de + flagrare, "to burn down") is a term describing subsonic combustion propagating through heat transfer; hot burning material heats the next layer of cold material and ignites it. Most "fire" found in daily life, from flames to explosions, is deflagration. Deflagration is different from detonation, which is supersonic and propagates through shock. +Contents [hide] +1 Applications +2 Oil/wax fire and water +3 Flame physics +4 Damaging deflagration events +5 See also +6 References +Applications[edit] +In engineering applications, deflagrations are easier to control than detonations. Consequently, they are better suited when the goal is to move an object (a bullet in a gun, or a piston in an internal combustion engine) with the force of the expanding gas. Typical examples of deflagrations are the combustion of a gas-air mixture in a gas stove or a fuel-air mixture in an internal combustion engine, and the rapid burning of gunpowder in a firearm or of pyrotechnic mixtures in fireworks. Deflagration systems and products can also be used in mining, demolition and stone quarrying via gas pressure blasting as a beneficial alternative to high explosives. +Oil/wax fire and water[edit] +Adding water to a burning hydrocarbon such as oil or wax produces a deflagration. The water boils rapidly and ejects the burning material as a fine spray of droplets. A deflagration then occurs as the fine mist of oil ignites and burns extremely rapidly. These are particularly common in chip pan fires, which are responsible for one in five household fires in Britain.[2] +Flame physics[edit] +The underlying flame physics can be understood with the help of an idealized model consisting of a uniform one-dimensional tube of unburnt and burned gaseous fuel, separated by a thin transitional region of width \delta\; in which the burning occurs. The burning region is commonly referred to as the flame or flame front. In equilibrium, thermal diffusion across the flame front is balanced by the heat supplied by burning. +There are two characteristic timescales which are important here. The first is the thermal diffusion timescale \tau_d\;, which is approximately equal to +\tau_d \simeq \delta^2 / \kappa, +where \kappa \; is the thermal diffusivity. The second is the burning timescale \tau_b that strongly decreases with temperature, typically as +\tau_b\propto \exp[\Delta U/(k_B T_f)], +where \Delta U\; is the activation barrier for the burning reaction and T_f\; is the temperature developed as the result of burning; the value of this so-called "flame temperature" can be determined from the laws of thermodynamics. +For a stationary moving deflagration front, these two timescales must be equal: the heat generated by burning is equal to the heat carried away by heat transfer. This makes it possible to calculate the characteristic width \delta\; of the flame front: +\tau_b = \tau_d\;, +thus + \delta \simeq \sqrt {\kappa \tau_b} . +Now, the thermal flame front propagates at a characteristic speed S_l\;, which is simply equal to the flame width divided by the burn time: +S_l \simeq \delta / \tau_b \simeq \sqrt {\kappa / \tau_b} . +This simplified model neglects the change of temperature and thus the burning rate across the deflagration front. This model also neglects the possible influence of turbulence. As a result, this derivation gives only the laminar flame speed -- hence the designation S_l\;. +Damaging deflagration events[edit] +Damage to buildings, equipment and people can result from a large-scale, short-duration deflagration. The potential damage is primarily a function of the total amount of fuel burned in the event (total energy available), the maximum flame velocity that is achieved, and the manner in which the expansion of the combustion gases is contained. +In free-air deflagrations, there is a continuous variation in deflagration effects relative to the maximum flame velocity. When flame velocities are low, the effect of a deflagration is to release heat. Some authors use the term flash fire to describe these low-speed deflagrations. At flame velocities near the speed of sound, the energy released is in the form of pressure and the results resemble a detonation. Between these extremes both heat and pressure are released. +When a low-speed deflagration occurs within a closed vessel or structure, pressure effects can produce damage due to expansion of gases as a secondary effect. The heat released by the deflagration causes the combustion gases and excess air to expand thermally. The net result is that the volume of the vessel or structure must expand to accommodate the hot combustion gases, or the vessel must be strong enough to withstand the additional internal pressure, or it fails, allowing the gases to escape. The risks of deflagration inside waste storage drums is a growing concern in storage facilities. +See also[edit] + Look up deflagration in Wiktionary, the free dictionary. +Pressure piling +References[edit] +Jump up ^ "Glossary D-H". Hutchisonrodway.co.nz. Retrieved 2013-12-29. +Jump up ^ UK Fire Service advice on chip pan fires +Categories: Explosives +Navigation menu +Create accountLog inArticleTalkReadEditView history + +Main page +Contents +Featured content +Current events +Random article +Donate to Wikipedia +Wikimedia Shop +Interaction +Help +About Wikipedia +Community portal +Recent changes +Contact page +Tools +What links here +Related changes +Upload file +Special pages +Permanent link +Page information +Wikidata item +Cite this page +Print/export +Create a book +Download as PDF +Printable version +Languages +Català +ÄŒeÅ¡tina +Deutsch +Español +Français +Italiano +Lietuvių +Nederlands +Norsk bokmÃ¥l +Polski +Português +РуÑÑкий +СрпÑки / srpski +Svenska +Edit links +This page was last modified on 2 October 2014 at 16:44. +Text is available under the Creative Commons Attribution-ShareAlike License; additional terms may apply. By using this site, you agree to the Terms of Use and Privacy Policy. Wikipedia® is a registered trademark of the Wikimedia Foundation, Inc., a non-profit organization. +Privacy policyAbout WikipediaDisclaimersContact WikipediaDevelopersMobile viewWikimedia Foundation Powered by MediaWiki + + +United Kingdom +From Wikipedia, the free encyclopedia +This article is about the sovereign state. For the island, see Great Britain. For other uses, see United Kingdom (disambiguation) and UK (disambiguation). +Page semi-protected +United Kingdom of Great +Britain and Northern Ireland[show] + +A flag featuring both cross and saltire in red, white and blue Coat of arms containing shield and crown in centre, flanked by lion and unicorn +Flag Royal coat of arms[nb 1] +Anthem: "God Save the Queen"[nb 2] +MENU0:00 +Two islands to the north-west of continental Europe. Highlighted are the larger island and the north-eastern fifth of the smaller island to the west. +Location of the United Kingdom (dark green) +– in Europe (green & dark grey) +– in the European Union (green) +Capital +and largest city London +51°30′N 0°7′W +Official language +and national language English +Recognised regional +languages Cornish, Irish, Scots, Scottish Gaelic, Ulster-Scots, Welsh[nb 3] +Ethnic groups (2011) 87.1% White +7.0% Asian +3.0% Black +2.0% Mixed +0.9% Other +Demonym British, Briton +Government Unitary parliamentary constitutional monarchy + - Monarch Elizabeth II + - Prime Minister David Cameron +Legislature Parliament + - Upper house House of Lords + - Lower house House of Commons +Formation + - Acts of Union 1707 1 May 1707 + - Acts of Union 1800 1 January 1801 + - Irish Free State Constitution Act 5 December 1922 +Area + - Total 243,610 km2 (80th) +94,060 sq mi + - Water (%) 1.34 +Population + - 2013 estimate 64,100,000[3] (22nd) + - 2011 census 63,181,775[4] (22nd) + - Density 255.6/km2 (51st) +661.9/sq mi +GDP (PPP) 2014 estimate + - Total $2.435 trillion[5] (10th) + - Per capita $37,744[5] (27th) +GDP (nominal) 2014 estimate + - Total $2.848 trillion[5] (6th) + - Per capita $44,141[5] (22nd) +Gini (2012) positive decrease 32.8[6] +medium · 33rd +HDI (2013) Steady 0.892[7] +very high · 14th +Currency Pound sterling (GBP) +Time zone GMT (UTC​) + - Summer (DST) BST (UTC+1) +Date format dd/mm/yyyy (AD) +Drives on the left +Calling code +44 +ISO 3166 code GB +Internet TLD .uk +The United Kingdom of Great Britain and Northern Ireland Listeni/É¡reɪt ˈbrɪt(É™)n É™nd ˈnÉ”Ëð(É™)n ˈʌɪəlÉ™nd/, commonly known as the United Kingdom (UK) or Britain, is a sovereign state in Europe. Lying off the north-western coast of the European mainland, the country includes the island of Great Britain (a term also applied loosely to refer to the whole country),[8] the north-eastern part of the island of Ireland, and many smaller islands. Northern Ireland is the only part of the UK that shares a land border with another state: the Republic of Ireland.[nb 4] Apart from this land border, the UK is surrounded by the Atlantic Ocean, with the North Sea in the east and the English Channel in the south. The Irish Sea lies between Great Britain and Ireland. The UK has an area of 243,610 square kilometres (94,060 sq mi), making it the 78th-largest sovereign state in the world and the 11th-largest in Europe. +The United Kingdom is the 22nd-most populous country, with an estimated 64.1 million inhabitants.[3] It is a constitutional monarchy with a parliamentary system of governance.[9][10] Its capital city is London, an important global city and financial centre with the fourth-largest urban area in Europe.[11] The current monarch—since 6 February 1952—is Queen Elizabeth II. The UK consists of four countries: England, Scotland, Wales, and Northern Ireland.[12] The latter three have devolved administrations,[13] each with varying powers,[14][15] based in their capitals, Edinburgh, Cardiff, and Belfast, respectively. Guernsey, Jersey, and the Isle of Man are not part of the United Kingdom, being Crown dependencies with the British Government responsible for defence and international representation.[16] The UK has fourteen Overseas Territories,[17] including the disputed Falkland Islands, Gibraltar, and Indian Ocean Territory. +The relationships among the countries of the United Kingdom have changed over time. Wales was annexed by the Kingdom of England under the Acts of Union of 1536 and 1543. A treaty between England and Scotland resulted in a unified Kingdom of Great Britain in 1707, which in 1801, merged with the Kingdom of Ireland to form the United Kingdom of Great Britain and Ireland. In 1922, five-sixths of Ireland seceded from the country, leaving the present formulation of the United Kingdom of Great Britain and Northern Ireland.[nb 5] British Overseas Territories, formerly colonies, are the remnants of the British Empire which, at its height in the late 19th and early 20th centuries, encompassed almost a quarter of the world's land mass and was the largest empire in history. British influence can be observed in the language, culture, and legal systems of many of its former colonies. +The United Kingdom is a developed country and has the world's sixth-largest economy by nominal GDP and tenth-largest by purchasing power parity. The country is considered to have a high-income economy and is categorised as very high in the Human Development Index, currently ranking 14th in the world. It was the world's first industrialised country and the world's foremost power during the 19th and early 20th centuries.[18][19] The UK remains a great power with considerable economic, cultural, military, scientific, and political influence internationally.[20][21] It is a recognised nuclear weapons state and its military expenditure ranks fifth or sixth in the world.[22][23] The UK has been a permanent member of the United Nations Security Council since its first session in 1946. It has been a member state of the European Union (EU) and its predecessor, the European Economic Community (EEC), since 1973; it is also a member of the Commonwealth of Nations, the Council of Europe, the G7, the G8, the G20, NATO, the Organisation for Economic Co-operation and Development (OECD), and the World Trade Organization (WTO). +Contents [hide] +1 Etymology and terminology +2 History +2.1 Before 1707 +2.2 Since the Acts of Union of 1707 +3 Geography +3.1 Climate +3.2 Administrative divisions +4 Dependencies +5 Politics +5.1 Government +5.2 Devolved administrations +5.3 Law and criminal justice +5.4 Foreign relations +5.5 Military +6 Economy +6.1 Science and technology +6.2 Transport +6.3 Energy +7 Demographics +7.1 Ethnic groups +7.2 Languages +7.3 Religion +7.4 Migration +7.5 Education +7.6 Healthcare +8 Culture +8.1 Literature +8.2 Music +8.3 Visual art +8.4 Cinema +8.5 Media +8.6 Philosophy +8.7 Sport +8.8 Symbols +9 See also +10 Notes +11 References +12 Further reading +13 External links +Etymology and terminology +See also: Britain (placename) and Terminology of the British Isles +The 1707 Acts of Union declared that the kingdoms of England and Scotland were "United into One Kingdom by the Name of Great Britain", though the new state is also referred to in the Acts as the "Kingdom of Great Britain", "United Kingdom of Great Britain" and "United Kingdom".[24][25][nb 6] However, the term "united kingdom" is only found in informal use during the 18th century and the country was only occasionally referred to as he "United Kingdom of Great Britain".[26] The Acts of Union 1800 united the Kingdom of Great Britain and the Kingdom of Ireland in 1801, forming the United Kingdom of Great Britain and Ireland. The name "United Kingdom of Great Britain and Northern Ireland" was adopted following the independence of the Irish Free State, and the partition of Ireland, in 1922, which left Northern Ireland as the only part of the island of Ireland within the UK.[27] +Although the United Kingdom, as a sovereign state, is a country, England, Scotland, Wales, and to a lesser degree, Northern Ireland, are also regarded as countries, though they are not sovereign states.[28][29] Scotland, Wales and Northern Ireland have devolved self-government.[30][31] The British Prime Minister's website has used the phrase "countries within a country" to describe the United Kingdom.[12] Some statistical summaries, such as those for the twelve NUTS 1 regions of the UK, also refer to Scotland, Wales and Northern Ireland as "regions".[32][33] Northern Ireland is also referred to as a "province".[28][34] With regard to Northern Ireland, the descriptive name used "can be controversial, with the choice often revealing one's political preferences."[35] +The term Britain is often used as synonym for the United Kingdom. The term Great Britain, by contrast, refers conventionally to the island of Great Britain, or politically to England, Scotland and Wales in combination.[36][37][38] However, it is sometimes used as a loose synonym for the United Kingdom as a whole.[39][40] GB and GBR are the standard country codes for the United Kingdom (see ISO 3166-2 and ISO 3166-1 alpha-3) and are consequently used by international organisations to refer to the United Kingdom. Additionally, the United Kingdom's Olympic team competes under the name "Great Britain" or "Team GB".[41][42] +The adjective British is commonly used to refer to matters relating to the United Kingdom. The term has no definite legal connotation, but is used in law to refer to UK citizenship and matters to do with nationality.[43] People of the United Kingdom use a number of different terms to describe their national identity and may identify themselves as being British; or as being English, Scottish, Welsh, Northern Irish, or Irish;[44] or as being both.[45] +In 2006, a new design of British passport was introduced. Its first page shows the long form name of the state in English, Welsh and Scottish Gaelic.[46] In Welsh, the long form name of the state is "Teyrnas Unedig Prydain Fawr a Gogledd Iwerddon" with "Teyrnas Unedig" being used as a short form name on government websites.[47] In Scottish Gaelic, the long form is "Rìoghachd Aonaichte Bhreatainn is Èireann a Tuath" and the short form "Rìoghachd Aonaichte". +History +See also: History of the British Isles +Before 1707 + +Stonehenge, in Wiltshire, was erected around 2500 BC. +Main articles: History of England, History of Wales, History of Scotland, History of Ireland and History of the formation of the United Kingdom +Settlement by anatomically modern humans of what was to become the United Kingdom occurred in waves beginning by about 30,000 years ago.[48] By the end of the region's prehistoric period, the population is thought to have belonged, in the main, to a culture termed Insular Celtic, comprising Brythonic Britain and Gaelic Ireland.[49] The Roman conquest, beginning in 43 AD, and the 400-year rule of southern Britain, was followed by an invasion by Germanic Anglo-Saxon settlers, reducing the Brythonic area mainly to what was to become Wales and the historic Kingdom of Strathclyde.[50] Most of the region settled by the Anglo-Saxons became unified as the Kingdom of England in the 10th century.[51] Meanwhile, Gaelic-speakers in north west Britain (with connections to the north-east of Ireland and traditionally supposed to have migrated from there in the 5th century)[52][53] united with the Picts to create the Kingdom of Scotland in the 9th century.[54] +In 1066, the Normans invaded England from France and after its conquest, seized large parts of Wales, conquered much of Ireland and were invited to settle in Scotland, bringing to each country feudalism on the Northern French model and Norman-French culture.[55] The Norman elites greatly influenced, but eventually assimilated with, each of the local cultures.[56] Subsequent medieval English kings completed the conquest of Wales and made an unsuccessful attempt to annex Scotland. Thereafter, Scotland maintained its independence, albeit in near-constant conflict with England. The English monarchs, through inheritance of substantial territories in France and claims to the French crown, were also heavily involved in conflicts in France, most notably the Hundred Years War, while the Kings of Scots were in an alliance with the French during this period.[57] + +The Bayeux Tapestry depicts the Battle of Hastings and the events leading to it. +The early modern period saw religious conflict resulting from the Reformation and the introduction of Protestant state churches in each country.[58] Wales was fully incorporated into the Kingdom of England,[59] and Ireland was constituted as a kingdom in personal union with the English crown.[60] In what was to become Northern Ireland, the lands of the independent Catholic Gaelic nobility were confiscated and given to Protestant settlers from England and Scotland.[61] +In 1603, the kingdoms of England, Scotland and Ireland were united in a personal union when James VI, King of Scots, inherited the crowns of England and Ireland and moved his court from Edinburgh to London; each country nevertheless remained a separate political entity and retained its separate political, legal, and religious institutions.[62][63] +In the mid-17th century, all three kingdoms were involved in a series of connected wars (including the English Civil War) which led to the temporary overthrow of the monarchy and the establishment of the short-lived unitary republic of the Commonwealth of England, Scotland and Ireland.[64][65] +Although the monarchy was restored, it ensured (with the Glorious Revolution of 1688) that, unlike much of the rest of Europe, royal absolutism would not prevail, and a professed Catholic could never accede to the throne. The British constitution would develop on the basis of constitutional monarchy and the parliamentary system.[66] During this period, particularly in England, the development of naval power (and the interest in voyages of discovery) led to the acquisition and settlement of overseas colonies, particularly in North America.[67][68] +Since the Acts of Union of 1707 +Main article: History of the United Kingdom + +The Treaty of Union led to a single united kingdom encompassing all Great Britain. +On 1 May 1707, the united kingdom of Great Britain came into being, the result of Acts of Union being passed by the parliaments of England and Scotland to ratify the 1706 Treaty of Union and so unite the two kingdoms.[69][70][71] +In the 18th century, cabinet government developed under Robert Walpole, in practice the first prime minister (1721–1742). A series of Jacobite Uprisings sought to remove the Protestant House of Hanover from the British throne and restore the Catholic House of Stuart. The Jacobites were finally defeated at the Battle of Culloden in 1746, after which the Scottish Highlanders were brutally suppressed. The British colonies in North America that broke away from Britain in the American War of Independence became the United States of America in 1782. British imperial ambition turned elsewhere, particularly to India.[72] +During the 18th century, Britain was involved in the Atlantic slave trade. British ships transported an estimated 2 million slaves from Africa to the West Indies before banning the trade in 1807.[73] The term 'United Kingdom' became official in 1801 when the parliaments of Britain and Ireland each passed an Act of Union, uniting the two kingdoms and creating the United Kingdom of Great Britain and Ireland.[74] +In the early 19th century, the British-led Industrial Revolution began to transform the country. It slowly led to a shift in political power away from the old Tory and Whig landowning classes towards the new industrialists. An alliance of merchants and industrialists with the Whigs would lead to a new party, the Liberals, with an ideology of free trade and laissez-faire. In 1832 Parliament passed the Great Reform Act, which began the transfer of political power from the aristocracy to the middle classes. In the countryside, enclosure of the land was driving small farmers out. Towns and cities began to swell with a new urban working class. Few ordinary workers had the vote, and they created their own organisations in the form of trade unions. +Painting of a bloody battle. Horses and infantry fight or lie on grass. +The Battle of Waterloo marked the end of the Napoleonic Wars and the start of Pax Britannica. +After the defeat of France in the Revolutionary and Napoleonic Wars (1792–1815), the UK emerged as the principal naval and imperial power of the 19th century (with London the largest city in the world from about 1830).[75] Unchallenged at sea, British dominance was later described as Pax Britannica.[76][77] By the time of the Great Exhibition of 1851, Britain was described as the "workshop of the world".[78] The British Empire was expanded to include India, large parts of Africa and many other territories throughout the world. Alongside the formal control it exerted over its own colonies, British dominance of much of world trade meant that it effectively controlled the economies of many countries, such as China, Argentina and Siam.[79][80] Domestically, political attitudes favoured free trade and laissez-faire policies and a gradual widening of the voting franchise. During the century, the population increased at a dramatic rate, accompanied by rapid urbanisation, causing significant social and economic stresses.[81] After 1875, the UK's industrial monopoly was challenged by Germany and the USA. To seek new markets and sources of raw materials, the Conservative Party under Disraeli launched a period of imperialist expansion in Egypt, South Africa and elsewhere. Canada, Australia and New Zealand became self-governing dominions.[82] +Social reform and home rule for Ireland were important domestic issues after 1900. The Labour Party emerged from an alliance of trade unions and small Socialist groups in 1900, and suffragettes campaigned for women's right to vote before 1914. +Black-and-white photo of two dozen men in military uniforms and metal helmets sitting or standing in a muddy trench. +Infantry of the Royal Irish Rifles during the Battle of the Somme. More than 885,000 British soldiers died on the battlefields of World War I. +The UK fought with France, Russia and (after 1917) the US, against Germany and its allies in World War I (1914–18).[83] The UK armed forces were engaged across much of the British Empire and in several regions of Europe, particularly on the Western front.[84] The high fatalities of trench warfare caused the loss of much of a generation of men, with lasting social effects in the nation and a great disruption in the social order. +After the war, the UK received the League of Nations mandate over a number of former German and Ottoman colonies. The British Empire reached its greatest extent, covering a fifth of the world's land surface and a quarter of its population.[85] However, the UK had suffered 2.5 million casualties and finished the war with a huge national debt.[84] The rise of Irish Nationalism and disputes within Ireland over the terms of Irish Home Rule led eventually to the partition of the island in 1921,[86] and the Irish Free State became independent with Dominion status in 1922. Northern Ireland remained part of the United Kingdom.[87] A wave of strikes in the mid-1920s culminated in the UK General Strike of 1926. The UK had still not recovered from the effects of the war when the Great Depression (1929–32) occurred. This led to considerable unemployment and hardship in the old industrial areas, as well as political and social unrest in the 1930s. A coalition government was formed in 1931.[88] +The UK entered World War II by declaring war on Germany in 1939, after it had invaded Poland and Czechoslovakia. In 1940, Winston Churchill became prime minister and head of a coalition government. Despite the defeat of its European allies in the first year of the war, the UK continued the fight alone against Germany. In 1940, the RAF defeated the German Luftwaffe in a struggle for control of the skies in the Battle of Britain. The UK suffered heavy bombing during the Blitz. There were also eventual hard-fought victories in the Battle of the Atlantic, the North Africa campaign and Burma campaign. UK forces played an important role in the Normandy landings of 1944, achieved with its ally the US. After Germany's defeat, the UK was one of the Big Three powers who met to plan the post-war world; it was an original signatory to the Declaration of the United Nations. The UK became one of the five permanent members of the United Nations Security Council. However, the war left the UK severely weakened and depending financially on Marshall Aid and loans from the United States.[89] +Map of the world. Canada, the eastern United States, countries in east Africa, India, most of Australasia and some other countries are highlighted in pink. +Territories that were at one time part of the British Empire. Current British Overseas Territories are underlined in red. +In the immediate post-war years, the Labour government initiated a radical programme of reforms, which had a significant effect on British society in the following decades.[90] Major industries and public utilities were nationalised, a Welfare State was established, and a comprehensive, publicly funded healthcare system, the National Health Service, was created.[91] The rise of nationalism in the colonies coincided with Britain's now much-diminished economic position, so that a policy of decolonisation was unavoidable. Independence was granted to India and Pakistan in 1947.[92] Over the next three decades, most colonies of the British Empire gained their independence. Many became members of the Commonwealth of Nations.[93] +Although the UK was the third country to develop a nuclear weapons arsenal (with its first atomic bomb test in 1952), the new post-war limits of Britain's international role were illustrated by the Suez Crisis of 1956. The international spread of the English language ensured the continuing international influence of its literature and culture. From the 1960s onward, its popular culture was also influential abroad. As a result of a shortage of workers in the 1950s, the UK government encouraged immigration from Commonwealth countries. In the following decades, the UK became a multi-ethnic society.[94] Despite rising living standards in the late 1950s and 1960s, the UK's economic performance was not as successful as many of its competitors, such as West Germany and Japan. In 1973, the UK joined the European Economic Community (EEC), and when the EEC became the European Union (EU) in 1992, it was one of the 12 founding members. + +After the two vetos of France in 1961 and 1967, the UK entered in the European Union in 1973. In 1975, 67% of Britons voted yes to the permanence in the European Union. +From the late 1960s, Northern Ireland suffered communal and paramilitary violence (sometimes affecting other parts of the UK) conventionally known as the Troubles. It is usually considered to have ended with the Belfast "Good Friday" Agreement of 1998.[95][96][97] +Following a period of widespread economic slowdown and industrial strife in the 1970s, the Conservative Government of the 1980s initiated a radical policy of monetarism, deregulation, particularly of the financial sector (for example, Big Bang in 1986) and labour markets, the sale of state-owned companies (privatisation), and the withdrawal of subsidies to others.[98] This resulted in high unemployment and social unrest, but ultimately also economic growth, particularly in the services sector. From 1984, the economy was helped by the inflow of substantial North Sea oil revenues.[99] +Around the end of the 20th century there were major changes to the governance of the UK with the establishment of devolved administrations for Scotland, Wales and Northern Ireland.[13][100] The statutory incorporation followed acceptance of the European Convention on Human Rights. The UK is still a key global player diplomatically and militarily. It plays leading roles in the EU, UN and NATO. However, controversy surrounds some of Britain's overseas military deployments, particularly in Afghanistan and Iraq.[101] +The 2008 global financial crisis severely affected the UK economy. The coalition government of 2010 introduced austerity measures intended to tackle the substantial public deficits which resulted.[102] In 2014 the Scottish Government held a referendum on Scottish independence, with the majority of voters rejecting the independence proposal and opting to remain within the United Kingdom.[103] +Geography +Main article: Geography of the United Kingdom +Map of United Kingdom showing hilly regions to north and west, and flattest region in the south-east. +The topography of the UK +The total area of the United Kingdom is approximately 243,610 square kilometres (94,060 sq mi). The country occupies the major part of the British Isles[104] archipelago and includes the island of Great Britain, the northeastern one-sixth of the island of Ireland and some smaller surrounding islands. It lies between the North Atlantic Ocean and the North Sea with the south-east coast coming within 22 miles (35 km) of the coast of northern France, from which it is separated by the English Channel.[105] In 1993 10% of the UK was forested, 46% used for pastures and 25% cultivated for agriculture.[106] The Royal Greenwich Observatory in London is the defining point of the Prime Meridian.[107] +The United Kingdom lies between latitudes 49° to 61° N, and longitudes 9° W to 2° E. Northern Ireland shares a 224-mile (360 km) land boundary with the Republic of Ireland.[105] The coastline of Great Britain is 11,073 miles (17,820 km) long.[108] It is connected to continental Europe by the Channel Tunnel, which at 31 miles (50 km) (24 miles (38 km) underwater) is the longest underwater tunnel in the world.[109] +England accounts for just over half of the total area of the UK, covering 130,395 square kilometres (50,350 sq mi).[110] Most of the country consists of lowland terrain,[106] with mountainous terrain north-west of the Tees-Exe line; including the Cumbrian Mountains of the Lake District, the Pennines and limestone hills of the Peak District, Exmoor and Dartmoor. The main rivers and estuaries are the Thames, Severn and the Humber. England's highest mountain is Scafell Pike (978 metres (3,209 ft)) in the Lake District. Its principal rivers are the Severn, Thames, Humber, Tees, Tyne, Tweed, Avon, Exe and Mersey.[106] +Scotland accounts for just under a third of the total area of the UK, covering 78,772 square kilometres (30,410 sq mi)[111] and including nearly eight hundred islands,[112] predominantly west and north of the mainland; notably the Hebrides, Orkney Islands and Shetland Islands. The topography of Scotland is distinguished by the Highland Boundary Fault – a geological rock fracture – which traverses Scotland from Arran in the west to Stonehaven in the east.[113] The faultline separates two distinctively different regions; namely the Highlands to the north and west and the lowlands to the south and east. The more rugged Highland region contains the majority of Scotland's mountainous land, including Ben Nevis which at 1,343 metres (4,406 ft) is the highest point in the British Isles.[114] Lowland areas – especially the narrow waist of land between the Firth of Clyde and the Firth of Forth known as the Central Belt – are flatter and home to most of the population including Glasgow, Scotland's largest city, and Edinburgh, its capital and political centre. +A view of Ben Nevis in the distance, fronted by rolling plains +Ben Nevis, in Scotland, is the highest point in the British Isles +Wales accounts for less than a tenth of the total area of the UK, covering 20,779 square kilometres (8,020 sq mi).[115] Wales is mostly mountainous, though South Wales is less mountainous than North and mid Wales. The main population and industrial areas are in South Wales, consisting of the coastal cities of Cardiff, Swansea and Newport, and the South Wales Valleys to their north. The highest mountains in Wales are in Snowdonia and include Snowdon (Welsh: Yr Wyddfa) which, at 1,085 metres (3,560 ft), is the highest peak in Wales.[106] The 14, or possibly 15, Welsh mountains over 3,000 feet (914 m) high are known collectively as the Welsh 3000s. Wales has over 2,704 kilometres (1,680 miles) of coastline.[116] Several islands lie off the Welsh mainland, the largest of which is Anglesey (Ynys Môn) in the northwest. +Northern Ireland, separated from Great Britain by the Irish Sea and North Channel, has an area of 14,160 square kilometres (5,470 sq mi) and is mostly hilly. It includes Lough Neagh which, at 388 square kilometres (150 sq mi), is the largest lake in the British Isles by area.[117] The highest peak in Northern Ireland is Slieve Donard in the Mourne Mountains at 852 metres (2,795 ft).[106] +Climate +Main article: Climate of the United Kingdom +The United Kingdom has a temperate climate, with plentiful rainfall all year round.[105] The temperature varies with the seasons seldom dropping below −11 °C (12 °F) or rising above 35 °C (95 °F).[118] The prevailing wind is from the south-west and bears frequent spells of mild and wet weather from the Atlantic Ocean,[105] although the eastern parts are mostly sheltered from this wind since the majority of the rain falls over the western regions the eastern parts are therefore the driest. Atlantic currents, warmed by the Gulf Stream, bring mild winters; especially in the west where winters are wet and even more so over high ground. Summers are warmest in the south-east of England, being closest to the European mainland, and coolest in the north. Heavy snowfall can occur in winter and early spring on high ground, and occasionally settles to great depth away from the hills. +Administrative divisions +Main article: Administrative geography of the United Kingdom +Each country of the United Kingdom has its own system of administrative and geographic demarcation, whose origins often pre-date the formation of the United Kingdom. Thus there is "no common stratum of administrative unit encompassing the United Kingdom".[119] Until the 19th century there was little change to those arrangements, but there has since been a constant evolution of role and function.[120] Change did not occur in a uniform manner and the devolution of power over local government to Scotland, Wales and Northern Ireland means that future changes are unlikely to be uniform either. +The organisation of local government in England is complex, with the distribution of functions varying according to local arrangements. Legislation concerning local government in England is the responsibility of the UK parliament and the Government of the United Kingdom, as England has no devolved parliament. The upper-tier subdivisions of England are the nine Government office regions or European Union government office regions.[121] One region, Greater London, has had a directly elected assembly and mayor since 2000 following popular support for the proposal in a referendum.[122] It was intended that other regions would also be given their own elected regional assemblies, but a proposed assembly in the North East region was rejected by a referendum in 2004.[123] Below the regional tier, some parts of England have county councils and district councils and others have unitary authorities; while London consists of 32 London boroughs and the City of London. Councillors are elected by the first-past-the-post system in single-member wards or by the multi-member plurality system in multi-member wards.[124] +For local government purposes, Scotland is divided into 32 council areas, with wide variation in both size and population. The cities of Glasgow, Edinburgh, Aberdeen and Dundee are separate council areas, as is the Highland Council which includes a third of Scotland's area but only just over 200,000 people. Local councils are made up of elected councillors, of whom there are currently 1,222;[125] they are paid a part-time salary. Elections are conducted by single transferable vote in multi-member wards that elect either three or four councillors. Each council elects a Provost, or Convenor, to chair meetings of the council and to act as a figurehead for the area. Councillors are subject to a code of conduct enforced by the Standards Commission for Scotland.[126] The representative association of Scotland's local authorities is the Convention of Scottish Local Authorities (COSLA).[127] +Local government in Wales consists of 22 unitary authorities. These include the cities of Cardiff, Swansea and Newport which are unitary authorities in their own right.[128] Elections are held every four years under the first-past-the-post system.[129] The most recent elections were held in May 2012, except for the Isle of Anglesey. The Welsh Local Government Association represents the interests of local authorities in Wales.[130] +Local government in Northern Ireland has since 1973 been organised into 26 district councils, each elected by single transferable vote. Their powers are limited to services such as collecting waste, controlling dogs and maintaining parks and cemeteries.[131] On 13 March 2008 the executive agreed on proposals to create 11 new councils and replace the present system.[132] The next local elections were postponed until 2016 to facilitate this.[133] +Dependencies + +A view of the Caribbean Sea from the Cayman Islands, one of the world's foremost international financial centres[134] and tourist destinations.[135] +Main articles: British Overseas Territories, Crown dependencies and British Islands +The United Kingdom has sovereignty over seventeen territories which do not form part of the United Kingdom itself: fourteen British Overseas Territories[136] and three Crown dependencies.[137] +The fourteen British Overseas Territories are: Anguilla; Bermuda; the British Antarctic Territory; the British Indian Ocean Territory; the British Virgin Islands; the Cayman Islands; the Falkland Islands; Gibraltar; Montserrat; Saint Helena, Ascension and Tristan da Cunha; the Turks and Caicos Islands; the Pitcairn Islands; South Georgia and the South Sandwich Islands; and Sovereign Base Areas on Cyprus.[138] British claims in Antarctica are not universally recognised.[139] Collectively Britain's overseas territories encompass an approximate land area of 1,727,570 square kilometres (667,018 sq mi) and a population of approximately 260,000 people.[140] They are the remnants of the British Empire and several have specifically voted to remain British territories (Bermuda in 1995, Gibraltar in 2002 and the Falkland Islands in 2013).[141] +The Crown dependencies are possessions of the Crown, as opposed to overseas territories of the UK.[142] They comprise three independently administered jurisdictions: the Channel Islands of Jersey and Guernsey in the English Channel, and the Isle of Man in the Irish Sea. By mutual agreement, the British Government manages the islands' foreign affairs and defence and the UK Parliament has the authority to legislate on their behalf. However, internationally, they are regarded as "territories for which the United Kingdom is responsible".[143] The power to pass legislation affecting the islands ultimately rests with their own respective legislative assemblies, with the assent of the Crown (Privy Council or, in the case of the Isle of Man, in certain circumstances the Lieutenant-Governor).[144] Since 2005 each Crown dependency has had a Chief Minister as its head of government.[145] +Politics +Main articles: Politics of the United Kingdom, Monarchy of the United Kingdom and Elections in the United Kingdom +Elderly lady with a yellow hat and grey hair is smiling in outdoor setting. +Elizabeth II, Queen of the United Kingdom and the other Commonwealth realms +The United Kingdom is a unitary state under a constitutional monarchy. Queen Elizabeth II is the head of state of the UK as well as monarch of fifteen other independent Commonwealth countries. The monarch has "the right to be consulted, the right to encourage, and the right to warn".[146] The United Kingdom is one of only four countries in the world to have an uncodified constitution.[147][nb 7] The Constitution of the United Kingdom thus consists mostly of a collection of disparate written sources, including statutes, judge-made case law and international treaties, together with constitutional conventions. As there is no technical difference between ordinary statutes and "constitutional law", the UK Parliament can perform "constitutional reform" simply by passing Acts of Parliament, and thus has the political power to change or abolish almost any written or unwritten element of the constitution. However, no Parliament can pass laws that future Parliaments cannot change.[148] +Government +Main article: Government of the United Kingdom +The UK has a parliamentary government based on the Westminster system that has been emulated around the world: a legacy of the British Empire. The parliament of the United Kingdom that meets in the Palace of Westminster has two houses; an elected House of Commons and an appointed House of Lords. All bills passed are given Royal Assent before becoming law. +The position of prime minister,[nb 8] the UK's head of government,[149] belongs to the person most likely to command the confidence of the House of Commons; this individual is typically the leader of the political party or coalition of parties that holds the largest number of seats in that chamber. The prime minister chooses a cabinet and they are formally appointed by the monarch to form Her Majesty's Government. By convention, the Queen respects the prime minister's decisions of government.[150] +Large sand-coloured building of Gothic design beside brown river and road bridge. The building has several large towers, including large clock-tower. +The Palace of Westminster, seat of both houses of the Parliament of the United Kingdom +The cabinet is traditionally drawn from members of a prime minister's party or coalition and mostly from the House of Commons but always from both legislative houses, the cabinet being responsible to both. Executive power is exercised by the prime minister and cabinet, all of whom are sworn into the Privy Council of the United Kingdom, and become Ministers of the Crown. The current Prime Minister is David Cameron, who has been in office since 11 May 2010.[151] Cameron is the leader of the Conservative Party and heads a coalition with the Liberal Democrats. For elections to the House of Commons, the UK is currently divided into 650 constituencies,[152] each electing a single member of parliament (MP) by simple plurality. General elections are called by the monarch when the prime minister so advises. The Parliament Acts 1911 and 1949 require that a new election must be called no later than five years after the previous general election.[153] +The UK's three major political parties are the Conservative Party (Tories), the Labour Party and the Liberal Democrats, representing the British traditions of conservatism, socialism and social liberalism, respectively. During the 2010 general election these three parties won 622 out of 650 seats available in the House of Commons.[154][155] Most of the remaining seats were won by parties that contest elections only in one part of the UK: the Scottish National Party (Scotland only); Plaid Cymru (Wales only); and the Alliance Party, Democratic Unionist Party, Social Democratic and Labour Party and Sinn Féin (Northern Ireland only[nb 9]). In accordance with party policy, no elected Sinn Féin members of parliament have ever attended the House of Commons to speak on behalf of their constituents because of the requirement to take an oath of allegiance to the monarch. +Devolved administrations +Main articles: Devolution in the United Kingdom, Northern Ireland Executive, Scottish Government and Welsh Government +Modern one-story building with grass on roof and large sculpted grass area in front. Behind are residential buildings in a mixture of styles. +The Scottish Parliament Building in Holyrood is the seat of the Scottish Parliament. +Scotland, Wales and Northern Ireland each have their own government or executive, led by a First Minister (or, in the case of Northern Ireland, a diarchal First Minister and deputy First Minister), and a devolved unicameral legislature. England, the largest country of the United Kingdom, has no such devolved executive or legislature and is administered and legislated for directly by the UK government and parliament on all issues. This situation has given rise to the so-called West Lothian question which concerns the fact that members of parliament from Scotland, Wales and Northern Ireland can vote, sometimes decisively,[156] on matters that only affect England.[157] The McKay Commission reported on this matter in March 2013 recommending that laws affecting only England should need support from a majority of English members of parliament.[158] +The Scottish Government and Parliament have wide-ranging powers over any matter that has not been specifically reserved to the UK parliament, including education, healthcare, Scots law and local government.[159] At the 2011 elections the Scottish National Party won re-election and achieved an overall majority in the Scottish parliament, with its leader, Alex Salmond, as First Minister of Scotland.[160][161] In 2012, the UK and Scottish governments signed the Edinburgh Agreement setting out the terms for a referendum on Scottish independence in 2014, which was defeated 55% to 45%. +The Welsh Government and the National Assembly for Wales have more limited powers than those devolved to Scotland.[162] The Assembly is able to legislate on devolved matters through Acts of the Assembly, which require no prior consent from Westminster. The 2011 elections resulted in a minority Labour administration led by Carwyn Jones.[163] +The Northern Ireland Executive and Assembly have powers similar to those devolved to Scotland. The Executive is led by a diarchy representing unionist and nationalist members of the Assembly. Currently, Peter Robinson (Democratic Unionist Party) and Martin McGuinness (Sinn Féin) are First Minister and deputy First Minister respectively.[164] Devolution to Northern Ireland is contingent on participation by the Northern Ireland administration in the North-South Ministerial Council, where the Northern Ireland Executive cooperates and develops joint and shared policies with the Government of Ireland. The British and Irish governments co-operate on non-devolved matters affecting Northern Ireland through the British–Irish Intergovernmental Conference, which assumes the responsibilities of the Northern Ireland administration in the event of its non-operation. +The UK does not have a codified constitution and constitutional matters are not among the powers devolved to Scotland, Wales or Northern Ireland. Under the doctrine of parliamentary sovereignty, the UK Parliament could, in theory, therefore, abolish the Scottish Parliament, Welsh Assembly or Northern Ireland Assembly.[165][166] Indeed, in 1972, the UK Parliament unilaterally prorogued the Parliament of Northern Ireland, setting a precedent relevant to contemporary devolved institutions.[167] In practice, it would be politically difficult for the UK Parliament to abolish devolution to the Scottish Parliament and the Welsh Assembly, given the political entrenchment created by referendum decisions.[168] The political constraints placed upon the UK Parliament's power to interfere with devolution in Northern Ireland are even greater than in relation to Scotland and Wales, given that devolution in Northern Ireland rests upon an international agreement with the Government of Ireland.[169] +Law and criminal justice +Main article: Law of the United Kingdom + +The Royal Courts of Justice of England and Wales +The United Kingdom does not have a single legal system, as Article 19 of the 1706 Treaty of Union provided for the continuation of Scotland's separate legal system.[170] Today the UK has three distinct systems of law: English law, Northern Ireland law and Scots law. A new Supreme Court of the United Kingdom came into being in October 2009 to replace the Appellate Committee of the House of Lords.[171][172] The Judicial Committee of the Privy Council, including the same members as the Supreme Court, is the highest court of appeal for several independent Commonwealth countries, the British Overseas Territories and the Crown Dependencies.[173] +Both English law, which applies in England and Wales, and Northern Ireland law are based on common-law principles.[174] The essence of common law is that, subject to statute, the law is developed by judges in courts, applying statute, precedent and common sense to the facts before them to give explanatory judgements of the relevant legal principles, which are reported and binding in future similar cases (stare decisis).[175] The courts of England and Wales are headed by the Senior Courts of England and Wales, consisting of the Court of Appeal, the High Court of Justice (for civil cases) and the Crown Court (for criminal cases). The Supreme Court is the highest court in the land for both criminal and civil appeal cases in England, Wales and Northern Ireland and any decision it makes is binding on every other court in the same jurisdiction, often having a persuasive effect in other jurisdictions.[176] + +The High Court of Justiciary – the supreme criminal court of Scotland. +Scots law is a hybrid system based on both common-law and civil-law principles. The chief courts are the Court of Session, for civil cases,[177] and the High Court of Justiciary, for criminal cases.[178] The Supreme Court of the United Kingdom serves as the highest court of appeal for civil cases under Scots law.[179] Sheriff courts deal with most civil and criminal cases including conducting criminal trials with a jury, known as sheriff solemn court, or with a sheriff and no jury, known as sheriff summary Court.[180] The Scots legal system is unique in having three possible verdicts for a criminal trial: "guilty", "not guilty" and "not proven". Both "not guilty" and "not proven" result in an acquittal.[181] +Crime in England and Wales increased in the period between 1981 and 1995, though since that peak there has been an overall fall of 48% in crime from 1995 to 2007/08,[182] according to crime statistics. The prison population of England and Wales has almost doubled over the same period, to over 80,000, giving England and Wales the highest rate of incarceration in Western Europe at 147 per 100,000.[183] Her Majesty's Prison Service, which reports to the Ministry of Justice, manages most of the prisons within England and Wales. Crime in Scotland fell to its lowest recorded level for 32 years in 2009/10, falling by ten per cent.[184] At the same time Scotland's prison population, at over 8,000,[185] is at record levels and well above design capacity.[186] The Scottish Prison Service, which reports to the Cabinet Secretary for Justice, manages Scotland's prisons. +Foreign relations +Main article: Foreign relations of the United Kingdom + +The Prime Minister of the United Kingdom, David Cameron, and the President of the United States, Barack Obama, during the 2010 G-20 Toronto summit. +The UK is a permanent member of the United Nations Security Council, a member of NATO, the Commonwealth of Nations, G7, G8, G20, the OECD, the WTO, the Council of Europe, the OSCE, and is a member state of the European Union. The UK is said to have a "Special Relationship" with the United States and a close partnership with France—the "Entente cordiale"—and shares nuclear weapons technology with both countries.[187][188] The UK is also closely linked with the Republic of Ireland; the two countries share a Common Travel Area and co-operate through the British-Irish Intergovernmental Conference and the British-Irish Council. Britain's global presence and influence is further amplified through its trading relations, foreign investments, official development assistance and military engagements.[189] +Military + +Troopers of the Blues and Royals during the 2007 Trooping the Colour ceremony +Main article: British Armed Forces +The armed forces of the United Kingdom—officially, Her Majesty's Armed Forces—consist of three professional service branches: the Royal Navy and Royal Marines (forming the Naval Service), the British Army and the Royal Air Force.[190] The forces are managed by the Ministry of Defence and controlled by the Defence Council, chaired by the Secretary of State for Defence. The Commander-in-Chief is the British monarch, Elizabeth II, to whom members of the forces swear an oath of allegiance.[191] The Armed Forces are charged with protecting the UK and its overseas territories, promoting the UK's global security interests and supporting international peacekeeping efforts. They are active and regular participants in NATO, including the Allied Rapid Reaction Corps, as well as the Five Power Defence Arrangements, RIMPAC and other worldwide coalition operations. Overseas garrisons and facilities are maintained in Ascension Island, Belize, Brunei, Canada, Cyprus, Diego Garcia, the Falkland Islands, Germany, Gibraltar, Kenya and Qatar.[192] +The British armed forces played a key role in establishing the British Empire as the dominant world power in the 18th, 19th and early 20th centuries. Throughout its unique history the British forces have seen action in a number of major wars, such as the Seven Years' War, the Napoleonic Wars, the Crimean War, World War I and World War II—as well as many colonial conflicts. By emerging victorious from such conflicts, Britain has often been able to decisively influence world events. Since the end of the British Empire, the UK has nonetheless remained a major military power. Following the end of the Cold War, defence policy has a stated assumption that "the most demanding operations" will be undertaken as part of a coalition.[193] Setting aside the intervention in Sierra Leone, recent UK military operations in Bosnia, Kosovo, Afghanistan, Iraq and, most recently, Libya, have followed this approach. The last time the British military fought alone was the Falklands War of 1982. +According to various sources, including the Stockholm International Peace Research Institute and the International Institute for Strategic Studies, the United Kingdom has the fifth- or sixth-highest military expenditure in the world. Total defence spending currently accounts for around 2.4% of total national GDP.[22][23] +Economy +Main article: Economy of the United Kingdom + +The Bank of England – the central bank of the United Kingdom +The UK has a partially regulated market economy.[194] Based on market exchange rates the UK is today the sixth-largest economy in the world and the third-largest in Europe after Germany and France, having fallen behind France for the first time in over a decade in 2008.[195] HM Treasury, led by the Chancellor of the Exchequer, is responsible for developing and executing the British government's public finance policy and economic policy. The Bank of England is the UK's central bank and is responsible for issuing notes and coins in the nation's currency, the pound sterling. Banks in Scotland and Northern Ireland retain the right to issue their own notes, subject to retaining enough Bank of England notes in reserve to cover their issue. Pound sterling is the world's third-largest reserve currency (after the US Dollar and the Euro).[196] Since 1997 the Bank of England's Monetary Policy Committee, headed by the Governor of the Bank of England, has been responsible for setting interest rates at the level necessary to achieve the overall inflation target for the economy that is set by the Chancellor each year.[197] +The UK service sector makes up around 73% of GDP.[198] London is one of the three "command centres" of the global economy (alongside New York City and Tokyo),[199] it is the world's largest financial centre alongside New York,[200][201][202] and it has the largest city GDP in Europe.[203] Edinburgh is also one of the largest financial centres in Europe.[204] Tourism is very important to the British economy and, with over 27 million tourists arriving in 2004, the United Kingdom is ranked as the sixth major tourist destination in the world and London has the most international visitors of any city in the world.[205][206] The creative industries accounted for 7% GVA in 2005 and grew at an average of 6% per annum between 1997 and 2005.[207] + +The Airbus A350 has its wings and engines manufactured in the UK. +The Industrial Revolution started in the UK with an initial concentration on the textile industry,[208] followed by other heavy industries such as shipbuilding, coal mining and steelmaking.[209][210] +The empire was exploited as an overseas market for British products, allowing the UK to dominate international trade in the 19th century. As other nations industrialised, coupled with economic decline after two world wars, the United Kingdom began to lose its competitive advantage and heavy industry declined, by degrees, throughout the 20th century. Manufacturing remains a significant part of the economy but accounted for only 16.7% of national output in 2003.[211] +The automotive industry is a significant part of the UK manufacturing sector and employs over 800,000 people, with a turnover of some £52 billion, generating £26.6 billion of exports.[212] +The aerospace industry of the UK is the second- or third-largest national aerospace industry in the world depending upon the method of measurement and has an annual turnover of around £20 billion. The wings for the Airbus A380 and the A350 XWB are designed and manufactured at Airbus UK's world-leading Broughton facility, whilst over a quarter of the value of the Boeing 787 comes from UK manufacturers including Eaton (fuel subsystem pumps), Messier-Bugatti-Dowty (the landing gear) and Rolls-Royce (the engines). Other key names include GKN Aerospace – an expert in metallic and composite aerostructures that's involved in almost every civil and military fixed and rotary wing aircraft in production and development today.[213][214][215][216] +BAE Systems - plays a critical role on some of the world's biggest defence aerospace projects. The company makes large sections of the Typhoon Eurofighter at its sub-assembly plant in Salmesbury and assembles the aircraft for the RAF at its Warton Plant, near Preston. It is also a principal subcontractor on the F35 Joint Strike Fighter - the world's largest single defence project - for which it designs and manufactures a range of components including the aft fuselage, vertical and horizontal tail and wing tips and fuel system. As well as this it manufactures the Hawk, the world's most successful jet training aircraft.[216] Airbus UK also manufactures the wings for the A400m military transporter. Rolls-Royce, is the world's second-largest aero-engine manufacturer. Its engines power more than 30 types of commercial aircraft and it has more than 30,000 engines currently in service across both the civil and defence sectors. Agusta Westland designs and manufactures complete helicopters in the UK.[216] +The UK space industry is growing very fast. Worth £9.1bn in 2011 and employing 29,000 people, it is growing at a rate of some 7.5 per cent annually, according to its umbrella organisation, the UK Space Agency. Government strategy is for the space industry to be a £40bn business for the UK by 2030, capturing a 10 per cent share of the $250bn world market for commercial space technology.[216] On 16 July 2013, the British government pledged £60m to the Skylon project: this investment will provide support at a "crucial stage" to allow a full-scale prototype of the SABRE engine to be built. +The pharmaceutical industry plays an important role in the UK economy and the country has the third-highest share of global pharmaceutical R&D expenditures (after the United States and Japan).[217][218] +Agriculture is intensive, highly mechanised and efficient by European standards, producing about 60% of food needs with less than 1.6% of the labour force (535,000 workers).[219] Around two-thirds of production is devoted to livestock, one-third to arable crops. Farmers are subsidised by the EU's Common Agricultural Policy. The UK retains a significant, though much reduced fishing industry. It is also rich in a number of natural resources including coal, petroleum, natural gas, tin, limestone, iron ore, salt, clay, chalk, gypsum, lead, silica and an abundance of arable land. + +The City of London is the world's largest financial centre alongside New York[200][201][202] +In the final quarter of 2008 the UK economy officially entered recession for the first time since 1991.[220] Unemployment increased from 5.2% in May 2008 to 7.6% in May 2009 and by January 2012 the unemployment rate among 18 to 24-year-olds had risen from 11.9% to 22.5%, the highest since current records began in 1992.[221][222] Total UK government debt rose from 44.4% of GDP in 2007 to 82.9% of GDP in 2011.[223] In February 2013, the UK lost its top AAA credit rating for the first time since 1978.[224] +Inflation-adjusted wages in the UK fell by 3.2% between the third quarter of 2010 and the third quarter of 2012.[225] Since the 1980s, economic inequality has grown faster in the UK than in any other developed country.[226] +The poverty line in the UK is commonly defined as being 60% of the median household income.[nb 10] In 2007–2008 13.5 million people, or 22% of the population, lived below this line. This is a higher level of relative poverty than all but four other EU members.[227] In the same year 4.0 million children, 31% of the total, lived in households below the poverty line after housing costs were taken into account. This is a decrease of 400,000 children since 1998–1999.[228] The UK imports 40% of its food supplies.[229] The Office for National Statistics has estimated that in 2011, 14 million people were at risk of poverty or social exclusion, and that one person in 20 (5.1%) was now experiencing "severe material depression,"[230] up from 3 million people in 1977.[231][232] +Science and technology +Main article: Science and technology in the United Kingdom + +Charles Darwin (1809–82), whose theory of evolution by natural selection is the foundation of modern biological sciences +England and Scotland were leading centres of the Scientific Revolution from the 17th century[233] and the United Kingdom led the Industrial Revolution from the 18th century,[208] and has continued to produce scientists and engineers credited with important advances.[234] Major theorists from the 17th and 18th centuries include Isaac Newton, whose laws of motion and illumination of gravity have been seen as a keystone of modern science;[235] from the 19th century Charles Darwin, whose theory of evolution by natural selection was fundamental to the development of modern biology, and James Clerk Maxwell, who formulated classical electromagnetic theory; and more recently Stephen Hawking, who has advanced major theories in the fields of cosmology, quantum gravity and the investigation of black holes.[236] Major scientific discoveries from the 18th century include hydrogen by Henry Cavendish;[237] from the 20th century penicillin by Alexander Fleming,[238] and the structure of DNA, by Francis Crick and others.[239] Major engineering projects and applications by people from the UK in the 18th century include the steam locomotive, developed by Richard Trevithick and Andrew Vivian;[240] from the 19th century the electric motor by Michael Faraday, the incandescent light bulb by Joseph Swan,[241] and the first practical telephone, patented by Alexander Graham Bell;[242] and in the 20th century the world's first working television system by John Logie Baird and others,[243] the jet engine by Frank Whittle, the basis of the modern computer by Alan Turing, and the World Wide Web by Tim Berners-Lee.[244] +Scientific research and development remains important in British universities, with many establishing science parks to facilitate production and co-operation with industry.[245] Between 2004 and 2008 the UK produced 7% of the world's scientific research papers and had an 8% share of scientific citations, the third and second highest in the world (after the United States and China, and the United States, respectively).[246] Scientific journals produced in the UK include Nature, the British Medical Journal and The Lancet.[247] +Transport +Main article: Transport in the United Kingdom + +Heathrow Terminal 5 building. London Heathrow Airport has the most international passenger traffic of any airport in the world.[248][249] +A radial road network totals 29,145 miles (46,904 km) of main roads, 2,173 miles (3,497 km) of motorways and 213,750 miles (344,000 km) of paved roads.[105] In 2009 there were a total of 34 million licensed vehicles in Great Britain.[250] +The UK has a railway network of 10,072 miles (16,209 km) in Great Britain and 189 miles (304 km) in Northern Ireland. Railways in Northern Ireland are operated by NI Railways, a subsidiary of state-owned Translink. In Great Britain, the British Rail network was privatised between 1994 and 1997. Network Rail owns and manages most of the fixed assets (tracks, signals etc.). About 20 privately owned (and foreign state-owned railways including: Deutsche Bahn; SNCF and Nederlandse Spoorwegen) Train Operating Companies (including state-owned East Coast), operate passenger trains and carry over 18,000 passenger trains daily. There are also some 1,000 freight trains in daily operation.[105] The UK government is to spend £30 billion on a new high-speed railway line, HS2, to be operational by 2025.[251] Crossrail, under construction in London, Is Europe's largest construction project with a £15 billion projected cost.[252][253] +In the year from October 2009 to September 2010 UK airports handled a total of 211.4 million passengers.[254] In that period the three largest airports were London Heathrow Airport (65.6 million passengers), Gatwick Airport (31.5 million passengers) and London Stansted Airport (18.9 million passengers).[254] London Heathrow Airport, located 15 miles (24 km) west of the capital, has the most international passenger traffic of any airport in the world[248][249] and is the hub for the UK flag carrier British Airways, as well as for BMI and Virgin Atlantic.[255] +Energy +Main article: Energy in the United Kingdom + +An oil platform in the North Sea +In 2006, the UK was the world's ninth-largest consumer of energy and the 15th-largest producer.[256] The UK is home to a number of large energy companies, including two of the six oil and gas "supermajors" – BP and Royal Dutch Shell – and BG Group.[257][258] In 2011, 40% of the UK's electricity was produced by gas, 30% by coal, 19% by nuclear power and 4.2% by wind, hydro, biofuels and wastes.[259] +In 2009, the UK produced 1.5 million barrels per day (bbl/d) of oil and consumed 1.7 million bbl/d.[260] Production is now in decline and the UK has been a net importer of oil since 2005.[260] In 2010 the UK had around 3.1 billion barrels of proven crude oil reserves, the largest of any EU member state.[260] In 2009, 66.5% of the UK's oil supply was imported.[261] +In 2009, the UK was the 13th-largest producer of natural gas in the world and the largest producer in the EU.[262] Production is now in decline and the UK has been a net importer of natural gas since 2004.[262] In 2009, half of British gas was supplied from imports and this is expected to increase to at least 75% by 2015, as domestic reserves are depleted.[259] +Coal production played a key role in the UK economy in the 19th and 20th centuries. In the mid-1970s, 130 million tonnes of coal was being produced annually, not falling below 100 million tonnes until the early 1980s. During the 1980s and 1990s the industry was scaled back considerably. In 2011, the UK produced 18.3 million tonnes of coal.[263] In 2005 it had proven recoverable coal reserves of 171 million tons.[263] The UK Coal Authority has stated there is a potential to produce between 7 billion tonnes and 16 billion tonnes of coal through underground coal gasification (UCG) or 'fracking',[264] and that, based on current UK coal consumption, such reserves could last between 200 and 400 years.[265] However, environmental and social concerns have been raised over chemicals getting into the water table and minor earthquakes damaging homes.[266][267] +In the late 1990s, nuclear power plants contributed around 25% of total annual electricity generation in the UK, but this has gradually declined as old plants have been shut down and ageing-related problems affect plant availability. In 2012, the UK had 16 reactors normally generating about 19% of its electricity. All but one of the reactors will be retired by 2023. Unlike Germany and Japan, the UK intends to build a new generation of nuclear plants from about 2018.[259] +Demographics +Main article: Demographics of the United Kingdom + +Map of population density in the UK as at the 2011 census. +A census is taken simultaneously in all parts of the UK every ten years.[268] The Office for National Statistics is responsible for collecting data for England and Wales, the General Register Office for Scotland and the Northern Ireland Statistics and Research Agency each being responsible for censuses in their respective countries.[269] In the 2011 census the total population of the United Kingdom was 63,181,775.[270] It is the third-largest in the European Union, the fifth-largest in the Commonwealth and the 21st-largest in the world. 2010 was the third successive year in which natural change contributed more to population growth than net long-term international migration.[271][271] Between 2001 and 2011 the population increased by an average annual rate of approximately 0.7 per cent.[270] This compares to 0.3 per cent per year in the period 1991 to 2001 and 0.2 per cent in the decade 1981 to 1991.[271] The 2011 census also confirmed that the proportion of the population aged 0–14 has nearly halved (31 per cent in 1911 compared to 18 in 2011) and the proportion of older people aged 65 and over has more than trebled (from 5 to 16 per cent).[270] It has been estimated that the number of people aged 100 or over will rise steeply to reach over 626,000 by 2080.[272] +England's population in 2011 was found to be 53 million.[273] It is one of the most densely populated countries in the world, with 383 people resident per square kilometre in mid-2003,[274] with a particular concentration in London and the south-east.[275] The 2011 census put Scotland's population at 5.3 million,[276] Wales at 3.06 million and Northern Ireland at 1.81 million.[273] In percentage terms England has had the fastest growing population of any country of the UK in the period from 2001 to 2011, with an increase of 7.9%. +In 2012 the average total fertility rate (TFR) across the UK was 1.92 children per woman.[277] While a rising birth rate is contributing to current population growth, it remains considerably below the 'baby boom' peak of 2.95 children per woman in 1964,[278] below the replacement rate of 2.1, but higher than the 2001 record low of 1.63.[277] In 2012, Scotland had the lowest TFR at only 1.67, followed by Wales at 1.88, England at 1.94, and Northern Ireland at 2.03.[277] In 2011, 47.3% of births in the UK were to unmarried women.[279] A government figure estimated that there are 3.6 million homosexual people in Britain comprising 6 per cent of the population.[280] +view talk edit +view talk edit +Largest urban areas of the United Kingdom +United Kingdom 2011 census Built-up areas[281][282][283] +Rank Urban area Pop. Principal settlement Rank Urban area Pop. Principal settlement +Greater London Urban Area +Greater London Urban Area +Greater Manchester Urban Area +Greater Manchester Urban Area +1 Greater London Urban Area 9,787,426 London 11 Bristol Urban Area 617,280 Bristol West Midlands Urban Area +West Midlands Urban Area +West Yorkshire Urban Area +West Yorkshire Urban Area +2 Greater Manchester Urban Area 2,553,379 Manchester 12 Belfast Metropolitan Urban Area 579,236 Belfast +3 West Midlands Urban Area 2,440,986 Birmingham 13 Leicester Urban Area 508,916 Leicester +4 West Yorkshire Urban Area 1,777,934 Leeds 14 Edinburgh 488,610 Edinburgh +5 Greater Glasgow 976,970 Glasgow 15 Brighton/Worthing/Littlehampton 474,485 Brighton +6 Liverpool Urban Area 864,122 Liverpool 16 South East Dorset conurbation 466,266 Bournemouth +7 South Hampshire 855,569 Southampton 17 Cardiff Urban Area 390,214 Cardiff +8 Tyneside 774,891 Newcastle 18 Teesside 376,633 Middlesbrough +9 Nottingham Urban Area 729,977 Nottingham 19 The Potteries Urban Area 372,775 Stoke-on-Trent +10 Sheffield Urban Area 685,368 Sheffield 20 Coventry and Bedworth Urban Area 359,262 Coventry + +Ethnic groups + +Map showing the percentage of the population who are not white according to the 2011 census. +Ethnic group 2011 +population 2011 +% +White 55,010,359 87.1 +White: Irish Traveller 63,193 0.1 +Asian or Asian British: Indian 1,451,862 +2.3 +Asian or Asian British: Pakistani 1,173,892 +1.9 +Asian or Asian British: Bangladeshi 451,529 +0.7 +Asian or Asian British: Chinese 433,150 +0.7 +Asian or Asian British: Asian Other 861,815 +1.4 +Asian or Asian British: Total 4,373,339 +7.0 +Black or Black British 1,904,684 +3.0 +British Mixed 1,250,229 +2.0 +Other: Total 580,374 +0.9 +Total[284] 63,182,178 +100 +Historically, indigenous British people were thought to be descended from the various ethnic groups that settled there before the 11th century: the Celts, Romans, Anglo-Saxons, Norse and the Normans. Welsh people could be the oldest ethnic group in the UK.[285] A 2006 genetic study shows that more than 50 per cent of England's gene pool contains Germanic Y chromosomes.[286] Another 2005 genetic analysis indicates that "about 75 per cent of the traceable ancestors of the modern British population had arrived in the British isles by about 6,200 years ago, at the start of the British Neolithic or Stone Age", and that the British broadly share a common ancestry with the Basque people.[287][288][289] +The UK has a history of small-scale non-white immigration, with Liverpool having the oldest Black population in the country dating back to at least the 1730s during the period of the African slave trade,[290] and the oldest Chinese community in Europe, dating to the arrival of Chinese seamen in the 19th century.[291] In 1950 there were probably fewer than 20,000 non-white residents in Britain, almost all born overseas.[292] +Since 1948 substantial immigration from Africa, the Caribbean and South Asia has been a legacy of ties forged by the British Empire. Migration from new EU member states in Central and Eastern Europe since 2004 has resulted in growth in these population groups but, as of 2008, the trend is reversing. Many of these migrants are returning to their home countries, leaving the size of these groups unknown.[293] In 2011, 86% of the population identified themselves as White, meaning 12.9% of the UK population identify themselves as of mixed ethnic minority. +Ethnic diversity varies significantly across the UK. 30.4% of London's population and 37.4% of Leicester's was estimated to be non-white in 2005,[294][295] whereas less than 5% of the populations of North East England, Wales and the South West were from ethnic minorities, according to the 2001 census.[296] In 2011, 26.5% of primary and 22.2% of secondary pupils at state schools in England were members of an ethnic minority.[297] +The non-white British population of England and Wales increased by 38% from 6.6 million in 2001 to 9.1 million in 2009.[298] The fastest-growing group was the mixed-ethnicity population, which doubled from 672,000 in 2001 to 986,600 in 2009. Also in the same period, a decrease of 36,000 white British people was recorded.[299] +Languages +Main article: Languages of the United Kingdom + +The English-speaking world. Countries in dark blue have a majority of native speakers; countries where English is an official but not a majority language are shaded in light blue. English is one of the official languages of the European Union[300] and the United Nations[301] +The UK's de facto official language is English.[302][303] It is estimated that 95% of the UK's population are monolingual English speakers.[304] 5.5% of the population are estimated to speak languages brought to the UK as a result of relatively recent immigration.[304] South Asian languages, including Bengali, Tamil, Punjabi, Hindi and Gujarati, are the largest grouping and are spoken by 2.7% of the UK population.[304] According to the 2011 census, Polish has become the second-largest language spoken in England and has 546,000 speakers.[305] +Four Celtic languages are spoken in the UK: Welsh; Irish; Scottish Gaelic; and Cornish. All are recognised as regional or minority languages, subject to specific measures of protection and promotion under the European Charter for Regional or Minority Languages[2][306] and the Framework Convention for the Protection of National Minorities.[307] In the 2001 Census over a fifth (21%) of the population of Wales said they could speak Welsh,[308] an increase from the 1991 Census (18%).[309] In addition it is estimated that about 200,000 Welsh speakers live in England.[310] In the same census in Northern Ireland 167,487 people (10.4%) stated that they had "some knowledge of Irish" (see Irish language in Northern Ireland), almost exclusively in the nationalist (mainly Catholic) population. Over 92,000 people in Scotland (just under 2% of the population) had some Gaelic language ability, including 72% of those living in the Outer Hebrides.[311] The number of schoolchildren being taught through Welsh, Scottish Gaelic and Irish is increasing.[312] Among emigrant-descended populations some Scottish Gaelic is still spoken in Canada (principally Nova Scotia and Cape Breton Island),[313] and Welsh in Patagonia, Argentina.[314] +Scots, a language descended from early northern Middle English, has limited recognition alongside its regional variant, Ulster Scots in Northern Ireland, without specific commitments to protection and promotion.[2][315] +It is compulsory for pupils to study a second language up to the age of 14 in England,[316] and up to age 16 in Scotland. French and German are the two most commonly taught second languages in England and Scotland. All pupils in Wales are taught Welsh as a second language up to age 16, or are taught in Welsh.[317] +Religion +Main article: Religion in the United Kingdom + +Westminster Abbey is used for the coronation of British monarchs +Forms of Christianity have dominated religious life in what is now the United Kingdom for over 1,400 years.[318] Although a majority of citizens still identify with Christianity in many surveys, regular church attendance has fallen dramatically since the middle of the 20th century,[319] while immigration and demographic change have contributed to the growth of other faiths, most notably Islam.[320] This has led some commentators to variously describe the UK as a multi-faith,[321] secularised,[322] or post-Christian society.[323] +In the 2001 census 71.6% of all respondents indicated that they were Christians, with the next largest faiths (by number of adherents) being Islam (2.8%), Hinduism (1.0%), Sikhism (0.6%), Judaism (0.5%), Buddhism (0.3%) and all other religions (0.3%).[324] 15% of respondents stated that they had no religion, with a further 7% not stating a religious preference.[325] A Tearfund survey in 2007 showed only one in ten Britons actually attend church weekly.[326] Between the 2001 and 2011 census there was a decrease in the amount of people who identified as Christian by 12%, whilst the percentage of those reporting no religious affiliation doubled. This contrasted with growth in the other main religious group categories, with the number of Muslims increasing by the most substantial margin to a total of about 5%.[327] +The Church of England is the established church in England.[328] It retains a representation in the UK Parliament and the British monarch is its Supreme Governor.[329] In Scotland the Presbyterian Church of Scotland is recognised as the national church. It is not subject to state control, and the British monarch is an ordinary member, required to swear an oath to "maintain and preserve the Protestant Religion and Presbyterian Church Government" upon his or her accession.[330][331] The (Anglican) Church in Wales was disestablished in 1920 and, as the (Anglican) Church of Ireland was disestablished in 1870 before the partition of Ireland, there is no established church in Northern Ireland.[332] Although there are no UK-wide data in the 2001 census on adherence to individual Christian denominations, it has been estimated that 62% of Christians are Anglican, 13.5% Catholic, 6% Presbyterian, 3.4% Methodist with small numbers of other Protestant denominations such as Open Brethren, and Orthodox churches.[333] +Migration +Main article: Immigration to the United Kingdom since 1922 +See also: Foreign-born population of the United Kingdom + +Estimated foreign-born population by country of birth, April 2007 – March 2008 +The United Kingdom has experienced successive waves of migration. The Great Famine in Ireland, then part of the United Kingdom, resulted in perhaps a million people migrating to Great Brtain.[334] Unable to return to Poland at the end of World War II, over 120,000 Polish veterans remained in the UK permanently.[335] After World War II, there was significant immigration from the colonies and newly independent former colonies, partly as a legacy of empire and partly driven by labour shortages. Many of these migrants came from the Caribbean and the Indian subcontinent.[336] The British Asian population has increased from 2.2 million in 2001 to over 4.2 million in 2011.[337] +One of the more recent trends in migration has been the arrival of workers from the new EU member states in Eastern Europe. In 2010, there were 7.0 million foreign-born residents in the UK, corresponding to 11.3% of the total population. Of these, 4.76 million (7.7%) were born outside the EU and 2.24 million (3.6%) were born in another EU Member State.[338] The proportion of foreign-born people in the UK remains slightly below that of many other European countries.[339] However, immigration is now contributing to a rising population[340] with arrivals and UK-born children of migrants accounting for about half of the population increase between 1991 and 2001. Analysis of Office for National Statistics (ONS) data shows that a net total of 2.3 million migrants moved to the UK in the 15 years from 1991 to 2006.[341][342] In 2008 it was predicted that migration would add 7 million to the UK population by 2031,[343] though these figures are disputed.[344] The ONS reported that net migration rose from 2009 to 2010 by 21 per cent to 239,000.[345] In 2011 the net increase was 251,000: immigration was 589,000, while the number of people emigrating (for more than 12 months) was 338,000.[346][347] +195,046 foreign nationals became British citizens in 2010,[348] compared to 54,902 in 1999.[348][349] A record 241,192 people were granted permanent settlement rights in 2010, of whom 51 per cent were from Asia and 27 per cent from Africa.[350] 25.5 per cent of babies born in England and Wales in 2011 were born to mothers born outside the UK, according to official statistics released in 2012.[351] +Citizens of the European Union, including those of the UK, have the right to live and work in any EU member state.[352] The UK applied temporary restrictions to citizens of Romania and Bulgaria, which joined the EU in January 2007.[353] Research conducted by the Migration Policy Institute for the Equality and Human Rights Commission suggests that, between May 2004 and September 2009, 1.5 million workers migrated from the new EU member states to the UK, two-thirds of them Polish, but that many subsequently returned home, resulting in a net increase in the number of nationals of the new member states in the UK of some 700,000 over that period.[354][355] The late-2000s recession in the UK reduced the economic incentive for Poles to migrate to the UK,[356] the migration becoming temporary and circular.[357] In 2009, for the first time since enlargement, more nationals of the eight central and eastern European states that had joined the EU in 2004 left the UK than arrived.[358] In 2011, citizens of the new EU member states made up 13% of the immigrants entering the country.[346] + +Estimated number of British citizens living overseas by country, 2006 +The UK government has introduced a points-based immigration system for immigration from outside the European Economic Area to replace former schemes, including the Scottish Government's Fresh Talent Initiative.[359] In June 2010 the UK government introduced a temporary limit of 24,000 on immigration from outside the EU, aiming to discourage applications before a permanent cap was imposed in April 2011.[360] The cap has caused tension within the coalition: business secretary Vince Cable has argued that it is harming British businesses.[361] +Emigration was an important feature of British society in the 19th century. Between 1815 and 1930 around 11.4 million people emigrated from Britain and 7.3 million from Ireland. Estimates show that by the end of the 20th century some 300 million people of British and Irish descent were permanently settled around the globe.[362] Today, at least 5.5 million UK-born people live abroad,[363][364][365] mainly in Australia, Spain, the United States and Canada.[363][366] +Education +Main article: Education in the United Kingdom +See also: Education in England, Education in Northern Ireland, Education in Scotland and Education in Wales + +King's College, part of the University of Cambridge, which was founded in 1209 +Education in the United Kingdom is a devolved matter, with each country having a separate education system. +Whilst education in England is the responsibility of the Secretary of State for Education, the day-to-day administration and funding of state schools is the responsibility of local authorities.[367] Universally free of charge state education was introduced piecemeal between 1870 and 1944.[368][369] Education is now mandatory from ages five to sixteen (15 if born in late July or August). In 2011, the Trends in International Mathematics and Science Study (TIMSS) rated 13–14-year-old pupils in England and Wales 10th in the world for maths and 9th for science.[370] The majority of children are educated in state-sector schools, a small proportion of which select on the grounds of academic ability. Two of the top ten performing schools in terms of GCSE results in 2006 were state-run grammar schools. Over half of students at the leading universities of Cambridge and Oxford had attended state schools.[371] Despite a fall in actual numbers the proportion of children in England attending private schools has risen to over 7%.[372] In 2010, more than 45% of places at the University of Oxford and 40% at the University of Cambridge were taken by students from private schools, even though they educate just 7% of the population.[373] England has the two oldest universities in English-speaking world, Universities of Oxford and Cambridge (jointly known as "Oxbridge") with history of over eight centuries. The United Kingdom has 9 universities featured in the Times Higher Education top 100 rankings, making it second to the United States in terms of representation.[374] + +Queen's University Belfast, built in 1849[375] +Education in Scotland is the responsibility of the Cabinet Secretary for Education and Lifelong Learning, with day-to-day administration and funding of state schools the responsibility of Local Authorities. Two non-departmental public bodies have key roles in Scottish education. The Scottish Qualifications Authority is responsible for the development, accreditation, assessment and certification of qualifications other than degrees which are delivered at secondary schools, post-secondary colleges of further education and other centres.[376] The Learning and Teaching Scotland provides advice, resources and staff development to education professionals.[377] Scotland first legislated for compulsory education in 1496.[378] The proportion of children in Scotland attending private schools is just over 4%, and it has been rising slowly in recent years.[379] Scottish students who attend Scottish universities pay neither tuition fees nor graduate endowment charges, as fees were abolished in 2001 and the graduate endowment scheme was abolished in 2008.[380] +The Welsh Government has responsibility for education in Wales. A significant number of Welsh students are taught either wholly or largely in the Welsh language; lessons in Welsh are compulsory for all until the age of 16.[381] There are plans to increase the provision of Welsh-medium schools as part of the policy of creating a fully bilingual Wales. +Education in Northern Ireland is the responsibility of the Minister of Education and the Minister for Employment and Learning, although responsibility at a local level is administered by five education and library boards covering different geographical areas. The Council for the Curriculum, Examinations & Assessment (CCEA) is the body responsible for advising the government on what should be taught in Northern Ireland's schools, monitoring standards and awarding qualifications.[382] +A government commission's report in 2014 found that privately educated people comprise 7% of the general population of the UK but much larger percentages of the top professions, the most extreme case quoted being 71% of senior judges.[383][384] +Healthcare +Main article: Healthcare in the United Kingdom + +The Royal Aberdeen Children's Hospital, an NHS Scotland specialist children's hospital +Healthcare in the United Kingdom is a devolved matter and each country has its own system of private and publicly funded health care, together with alternative, holistic and complementary treatments. Public healthcare is provided to all UK permanent residents and is mostly free at the point of need, being paid for from general taxation. The World Health Organization, in 2000, ranked the provision of healthcare in the United Kingdom as fifteenth best in Europe and eighteenth in the world.[385][386] +Regulatory bodies are organised on a UK-wide basis such as the General Medical Council, the Nursing and Midwifery Council and non-governmental-based, such as the Royal Colleges. However, political and operational responsibility for healthcare lies with four national executives; healthcare in England is the responsibility of the UK Government; healthcare in Northern Ireland is the responsibility of the Northern Ireland Executive; healthcare in Scotland is the responsibility of the Scottish Government; and healthcare in Wales is the responsibility of the Welsh Assembly Government. Each National Health Service has different policies and priorities, resulting in contrasts.[387][388] +Since 1979 expenditure on healthcare has been increased significantly to bring it closer to the European Union average.[389] The UK spends around 8.4 per cent of its gross domestic product on healthcare, which is 0.5 percentage points below the Organisation for Economic Co-operation and Development average and about one percentage point below the average of the European Union.[390] +Culture +Main article: Culture of the United Kingdom +The culture of the United Kingdom has been influenced by many factors including: the nation's island status; its history as a western liberal democracy and a major power; as well as being a political union of four countries with each preserving elements of distinctive traditions, customs and symbolism. As a result of the British Empire, British influence can be observed in the language, culture and legal systems of many of its former colonies including Australia, Canada, India, Ireland, New Zealand, South Africa and the United States. The substantial cultural influence of the United Kingdom has led it to be described as a "cultural superpower."[391][392] +Literature +Main article: British literature + +The Chandos portrait, believed to depict William Shakespeare +'British literature' refers to literature associated with the United Kingdom, the Isle of Man and the Channel Islands. Most British literature is in the English language. In 2005, some 206,000 books were published in the United Kingdom and in 2006 it was the largest publisher of books in the world.[393] +The English playwright and poet William Shakespeare is widely regarded as the greatest dramatist of all time,[394][395][396] and his contemporaries Christopher Marlowe and Ben Jonson have also been held in continuous high esteem. More recently the playwrights Alan Ayckbourn, Harold Pinter, Michael Frayn, Tom Stoppard and David Edgar have combined elements of surrealism, realism and radicalism. +Notable pre-modern and early-modern English writers include Geoffrey Chaucer (14th century), Thomas Malory (15th century), Sir Thomas More (16th century), John Bunyan (17th century) and John Milton (17th century). In the 18th century Daniel Defoe (author of Robinson Crusoe) and Samuel Richardson were pioneers of the modern novel. In the 19th century there followed further innovation by Jane Austen, the gothic novelist Mary Shelley, the children's writer Lewis Carroll, the Brontë sisters, the social campaigner Charles Dickens, the naturalist Thomas Hardy, the realist George Eliot, the visionary poet William Blake and romantic poet William Wordsworth. 20th-century English writers include the science-fiction novelist H. G. Wells; the writers of children's classics Rudyard Kipling, A. A. Milne (the creator of Winnie-the-Pooh), Roald Dahl and Enid Blyton; the controversial D. H. Lawrence; the modernist Virginia Woolf; the satirist Evelyn Waugh; the prophetic novelist George Orwell; the popular novelists W. Somerset Maugham and Graham Greene; the crime writer Agatha Christie (the best-selling novelist of all time);[397] Ian Fleming (the creator of James Bond); the poets T.S. Eliot, Philip Larkin and Ted Hughes; the fantasy writers J. R. R. Tolkien, C. S. Lewis and J. K. Rowling; the graphic novelist Alan Moore, whose novel Watchmen is often cited by critics as comic's greatest series and graphic novel[398] and one of the best-selling graphic novels ever published.[399] + +A photograph of Victorian era novelist Charles Dickens +Scotland's contributions include the detective writer Arthur Conan Doyle (the creator of Sherlock Holmes), romantic literature by Sir Walter Scott, the children's writer J. M. Barrie, the epic adventures of Robert Louis Stevenson and the celebrated poet Robert Burns. More recently the modernist and nationalist Hugh MacDiarmid and Neil M. Gunn contributed to the Scottish Renaissance. A more grim outlook is found in Ian Rankin's stories and the psychological horror-comedy of Iain Banks. Scotland's capital, Edinburgh, was UNESCO's first worldwide City of Literature.[400] +Britain's oldest known poem, Y Gododdin, was composed in Yr Hen Ogledd (The Old North), most likely in the late 6th century. It was written in Cumbric or Old Welsh and contains the earliest known reference to King Arthur.[401] From around the seventh century, the connection between Wales and the Old North was lost, and the focus of Welsh-language culture shifted to Wales, where Arthurian legend was further developed by Geoffrey of Monmouth.[402] Wales's most celebrated medieval poet, Dafydd ap Gwilym (fl.1320–1370), composed poetry on themes including nature, religion and especially love. He is widely regarded as one of the greatest European poets of his age.[403] Until the late 19th century the majority of Welsh literature was in Welsh and much of the prose was religious in character. Daniel Owen is credited as the first Welsh-language novelist, publishing Rhys Lewis in 1885. The best-known of the Anglo-Welsh poets are both Thomases. Dylan Thomas became famous on both sides of the Atlantic in the mid-20th century. He is remembered for his poetry – his "Do not go gentle into that good night; Rage, rage against the dying of the light." is one of the most quoted couplets of English language verse – and for his 'play for voices', Under Milk Wood. The influential Church in Wales 'poet-priest' and Welsh nationalist R. S. Thomas was nominated for the Nobel Prize in Literature in 1996. Leading Welsh novelists of the twentieth century include Richard Llewellyn and Kate Roberts.[404][405] +Authors of other nationalities, particularly from Commonwealth countries, the Republic of Ireland and the United States, have lived and worked in the UK. Significant examples through the centuries include Jonathan Swift, Oscar Wilde, Bram Stoker, George Bernard Shaw, Joseph Conrad, T.S. Eliot, Ezra Pound and more recently British authors born abroad such as Kazuo Ishiguro and Sir Salman Rushdie.[406][407] +Music +Main article: Music of the United Kingdom +See also: British rock + +The Beatles are the most commercially successful and critically acclaimed band in the history of music, selling over a billion records internationally.[408][409][410] +Various styles of music are popular in the UK from the indigenous folk music of England, Wales, Scotland and Northern Ireland to heavy metal. Notable composers of classical music from the United Kingdom and the countries that preceded it include William Byrd, Henry Purcell, Sir Edward Elgar, Gustav Holst, Sir Arthur Sullivan (most famous for working with the librettist Sir W. S. Gilbert), Ralph Vaughan Williams and Benjamin Britten, pioneer of modern British opera. Sir Peter Maxwell Davies is one of the foremost living composers and current Master of the Queen's Music. The UK is also home to world-renowned symphonic orchestras and choruses such as the BBC Symphony Orchestra and the London Symphony Chorus. Notable conductors include Sir Simon Rattle, John Barbirolli and Sir Malcolm Sargent. Some of the notable film score composers include John Barry, Clint Mansell, Mike Oldfield, John Powell, Craig Armstrong, David Arnold, John Murphy, Monty Norman and Harry Gregson-Williams. George Frideric Handel, although born German, was a naturalised British citizen[411] and some of his best works, such as Messiah, were written in the English language.[412] Andrew Lloyd Webber has achieved enormous worldwide commercial success and is a prolific composer of musical theatre, works which have dominated London's West End for a number of years and have travelled to Broadway in New York.[413] +The Beatles have international sales of over one billion units and are the biggest-selling and most influential band in the history of popular music.[408][409][410][414] Other prominent British contributors to have influenced popular music over the last 50 years include; The Rolling Stones, Led Zeppelin, Pink Floyd, Queen, the Bee Gees, and Elton John, all of whom have world wide record sales of 200 million or more.[415][416][417][418][419][420] The Brit Awards are the BPI's annual music awards, and some of the British recipients of the Outstanding Contribution to Music award include; The Who, David Bowie, Eric Clapton, Rod Stewart and The Police.[421] More recent UK music acts that have had international success include Coldplay, Radiohead, Oasis, Spice Girls, Robbie Williams, Amy Winehouse and Adele.[422] +A number of UK cities are known for their music. Acts from Liverpool have had more UK chart number one hit singles per capita (54) than any other city worldwide.[423] Glasgow's contribution to music was recognised in 2008 when it was named a UNESCO City of Music, one of only three cities in the world to have this honour.[424] +Visual art +Main article: Art of the United Kingdom + +J. M. W. Turner self-portrait, oil on canvas, c. 1799 +The history of British visual art forms part of western art history. Major British artists include: the Romantics William Blake, John Constable, Samuel Palmer and J.M.W. Turner; the portrait painters Sir Joshua Reynolds and Lucian Freud; the landscape artists Thomas Gainsborough and L. S. Lowry; the pioneer of the Arts and Crafts Movement William Morris; the figurative painter Francis Bacon; the Pop artists Peter Blake, Richard Hamilton and David Hockney; the collaborative duo Gilbert and George; the abstract artist Howard Hodgkin; and the sculptors Antony Gormley, Anish Kapoor and Henry Moore. During the late 1980s and 1990s the Saatchi Gallery in London helped to bring to public attention a group of multi-genre artists who would become known as the "Young British Artists": Damien Hirst, Chris Ofili, Rachel Whiteread, Tracey Emin, Mark Wallinger, Steve McQueen, Sam Taylor-Wood and the Chapman Brothers are among the better-known members of this loosely affiliated movement. +The Royal Academy in London is a key organisation for the promotion of the visual arts in the United Kingdom. Major schools of art in the UK include: the six-school University of the Arts London, which includes the Central Saint Martins College of Art and Design and Chelsea College of Art and Design; Goldsmiths, University of London; the Slade School of Fine Art (part of University College London); the Glasgow School of Art; the Royal College of Art; and The Ruskin School of Drawing and Fine Art (part of the University of Oxford). The Courtauld Institute of Art is a leading centre for the teaching of the history of art. Important art galleries in the United Kingdom include the National Gallery, National Portrait Gallery, Tate Britain and Tate Modern (the most-visited modern art gallery in the world, with around 4.7 million visitors per year).[425] +Cinema +Main article: Cinema of the United Kingdom + +Film director Alfred Hitchcock +The United Kingdom has had a considerable influence on the history of the cinema. The British directors Alfred Hitchcock, whose film Vertigo is considered by some critics as the best film of all time,[426] and David Lean are among the most critically acclaimed of all-time.[427] Other important directors including Charlie Chaplin,[428] Michael Powell,[429] Carol Reed[430] and Ridley Scott.[431] Many British actors have achieved international fame and critical success, including: Julie Andrews,[432] Richard Burton,[433] Michael Caine,[434] Charlie Chaplin,[435] Sean Connery,[436] Vivien Leigh,[437] David Niven,[438] Laurence Olivier,[439] Peter Sellers,[440] Kate Winslet,[441] and Daniel Day-Lewis, the only person to win an Oscar in the best actor category three times.[442] Some of the most commercially successful films of all time have been produced in the United Kingdom, including the two highest-grossing film franchises (Harry Potter and James Bond).[443] Ealing Studios has a claim to being the oldest continuously working film studio in the world.[444] +Despite a history of important and successful productions, the industry has often been characterised by a debate about its identity and the level of American and European influence. British producers are active in international co-productions and British actors, directors and crew feature regularly in American films. Many successful Hollywood films have been based on British people, stories or events, including Titanic, The Lord of the Rings, Pirates of the Caribbean. +In 2009, British films grossed around $2 billion worldwide and achieved a market share of around 7% globally and 17% in the United Kingdom.[445] UK box-office takings totalled £944 million in 2009, with around 173 million admissions.[445] The British Film Institute has produced a poll ranking of what it considers to be the 100 greatest British films of all time, the BFI Top 100 British films.[446] The annual British Academy Film Awards, hosted by the British Academy of Film and Television Arts, are the British equivalent of the Oscars.[447] +Media +Main article: Media of the United Kingdom + +Broadcasting House in London, headquarters of the BBC, the oldest and largest broadcaster in the world.[448][449][450] +The BBC, founded in 1922, is the UK's publicly funded radio, television and Internet broadcasting corporation, and is the oldest and largest broadcaster in the world.[448][449][450] It operates numerous television and radio stations in the UK and abroad and its domestic services are funded by the television licence.[451][452] Other major players in the UK media include ITV plc, which operates 11 of the 15 regional television broadcasters that make up the ITV Network,[453] and News Corporation, which owns a number of national newspapers through News International such as the most popular tabloid The Sun and the longest-established daily "broadsheet" The Times,[454] as well as holding a large stake in satellite broadcaster British Sky Broadcasting.[455] London dominates the media sector in the UK: national newspapers and television and radio are largely based there, although Manchester is also a significant national media centre. Edinburgh and Glasgow, and Cardiff, are important centres of newspaper and broadcasting production in Scotland and Wales respectively.[456] The UK publishing sector, including books, directories and databases, journals, magazines and business media, newspapers and news agencies, has a combined turnover of around £20 billion and employs around 167,000 people.[457] +In 2009, it was estimated that individuals viewed a mean of 3.75 hours of television per day and 2.81 hours of radio. In that year the main BBC public service broadcasting channels accounted for an estimated 28.4% of all television viewing; the three main independent channels accounted for 29.5% and the increasingly important other satellite and digital channels for the remaining 42.1%.[458] Sales of newspapers have fallen since the 1970s and in 2009 42% of people reported reading a daily national newspaper.[459] In 2010 82.5% of the UK population were Internet users, the highest proportion amongst the 20 countries with the largest total number of users in that year.[460] +Philosophy +Main article: British philosophy +The United Kingdom is famous for the tradition of 'British Empiricism', a branch of the philosophy of knowledge that states that only knowledge verified by experience is valid, and 'Scottish Philosophy', sometimes referred to as the 'Scottish School of Common Sense'.[461] The most famous philosophers of British Empiricism are John Locke, George Berkeley and David Hume; while Dugald Stewart, Thomas Reid and William Hamilton were major exponents of the Scottish "common sense" school. Two Britons are also notable for a theory of moral philosophy utilitarianism, first used by Jeremy Bentham and later by John Stuart Mill in his short work Utilitarianism.[462][463] Other eminent philosophers from the UK and the unions and countries that preceded it include Duns Scotus, John Lilburne, Mary Wollstonecraft, Sir Francis Bacon, Adam Smith, Thomas Hobbes, William of Ockham, Bertrand Russell and A.J. "Freddie" Ayer. Foreign-born philosophers who settled in the UK include Isaiah Berlin, Karl Marx, Karl Popper and Ludwig Wittgenstein. +Sport +Main article: Sport in the United Kingdom + +Wembley Stadium, London, home of the England national football team, is one of the most expensive stadia ever built.[464] +Major sports, including association football, tennis, rugby union, rugby league, golf, boxing, rowing and cricket, originated or were substantially developed in the UK and the states that preceded it. With the rules and codes of many modern sports invented and codified in late 19th-century Victorian Britain, in 2012, the President of the IOC, Jacques Rogge, stated; "This great, sports-loving country is widely recognized as the birthplace of modern sport. It was here that the concepts of sportsmanship and fair play were first codified into clear rules and regulations. It was here that sport was included as an educational tool in the school curriculum".[465][466] +In most international competitions, separate teams represent England, Scotland and Wales. Northern Ireland and the Republic of Ireland usually field a single team representing all of Ireland, with notable exceptions being association football and the Commonwealth Games. In sporting contexts, the English, Scottish, Welsh and Irish / Northern Irish teams are often referred to collectively as the Home Nations. There are some sports in which a single team represents the whole of United Kingdom, including the Olympics, where the UK is represented by the Great Britain team. The 1908, 1948 and 2012 Summer Olympics were held in London, making it the first city to host the games three times. Britain has participated in every modern Olympic Games to date and is third in the medal count. +A 2003 poll found that football is the most popular sport in the United Kingdom.[467] Each of the Home Nations has its own football association, national team and league system. The English top division, the Premier League, is the most watched football league in the world.[468] The first-ever international football match was contested by England and Scotland on 30 November 1872.[469] England, Scotland, Wales and Northern Ireland compete as separate countries in international competitions.[470] A Great Britain Olympic football team was assembled for the first time to compete in the London 2012 Olympic Games. However, the Scottish, Welsh and Northern Irish football associations declined to participate, fearing that it would undermine their independent status – a fear confirmed by FIFA president Sepp Blatter.[471] + +The Millennium Stadium, Cardiff, opened for the 1999 Rugby World Cup. +Cricket was invented in England. The England cricket team, controlled by the England and Wales Cricket Board,[472] is the only national team in the UK with Test status. Team members are drawn from the main county sides, and include both English and Welsh players. Cricket is distinct from football and rugby where Wales and England field separate national teams, although Wales had fielded its own team in the past. Irish and Scottish players have played for England because neither Scotland nor Ireland have Test status and have only recently started to play in One Day Internationals.[473][474] Scotland, England (and Wales), and Ireland (including Northern Ireland) have competed at the Cricket World Cup, with England reaching the finals on three occasions. There is a professional league championship in which clubs representing 17 English counties and 1 Welsh county compete.[475] +Rugby league is a popular sport in some regions of the UK. It originated in Huddersfield and is generally played in Northern England.[476] A single 'Great Britain Lions' team had competed in the Rugby League World Cup and Test match games, but this changed in 2008 when England, Scotland and Ireland competed as separate nations.[477] Great Britain is still being retained as the full national team for Ashes tours against Australia, New Zealand and France. Super League is the highest level of professional rugby league in the UK and Europe. It consists of 11 teams from Northern England, 1 from London, 1 from Wales and 1 from France. +In rugby union, England, Scotland, Wales, Ireland, France and Italy compete in the Six Nations Championship; the premier international tournament in the northern hemisphere. Sport governing bodies in England, Scotland, Wales and Ireland organise and regulate the game separately.[478] If any of the British teams or the Irish team beat the other three in a tournament, then it is awarded the Triple Crown.[479] + +The Wimbledon Championships, a Grand Slam tennis tournament, is held in Wimbledon, London every June or July. +Thoroughbred racing, which originated under Charles II of England as the "sport of kings", is popular throughout the UK with world-famous races including the Grand National, the Epsom Derby, Royal Ascot and the Cheltenham National Hunt Festival (including the Cheltenham Gold Cup). The UK has proved successful in the international sporting arena in rowing. +The UK is closely associated with motorsport. Many teams and drivers in Formula One (F1) are based in the UK, and the country has won more drivers' and constructors' titles than any other. The UK hosted the very first F1 Grand Prix in 1950 at Silverstone, the current location of the British Grand Prix held each year in July. The country also hosts legs of the Grand Prix motorcycle racing, World Rally Championship and FIA World Endurance Championship. The premier national auto racing event is the British Touring Car Championship (BTCC). Motorcycle road racing has a long tradition with races such as the Isle of Man TT and the North West 200. +Golf is the sixth-most popular sport, by participation, in the UK. Although The Royal and Ancient Golf Club of St Andrews in Scotland is the sport's home course,[480] the world's oldest golf course is actually Musselburgh Links' Old Golf Course.[481] +Snooker is one of the UK's popular sporting exports, with the world championships held annually in Sheffield.[482] The modern game of lawn tennis first originated in the city of Birmingham between 1859 and 1865.[483] The Championships, Wimbledon are international tennis events held in Wimbledon in south London every summer and are regarded as the most prestigious event of the global tennis calendar. In Northern Ireland Gaelic football and hurling are popular team sports, both in terms of participation and spectating, and Irish expatriates in the UK and the US also play them.[484] Shinty (or camanachd) is popular in the Scottish Highlands.[485] +Symbols +Main article: Symbols of the United Kingdom, the Channel Islands and the Isle of Man + +The Statue of Britannia in Plymouth. Britannia is a national personification of the UK. +The flag of the United Kingdom is the Union Flag (also referred to as the Union Jack). It was created in 1606 by the superimposition of the Flag of England on the Flag of Scotland and updated in 1801 with the addition of Saint Patrick's Flag. Wales is not represented in the Union Flag, as Wales had been conquered and annexed to England prior to the formation of the United Kingdom. The possibility of redesigning the Union Flag to include representation of Wales has not been completely ruled out.[486] The national anthem of the United Kingdom is "God Save the King", with "King" replaced with "Queen" in the lyrics whenever the monarch is a woman. +Britannia is a national personification of the United Kingdom, originating from Roman Britain.[487] Britannia is symbolised as a young woman with brown or golden hair, wearing a Corinthian helmet and white robes. She holds Poseidon's three-pronged trident and a shield, bearing the Union Flag. Sometimes she is depicted as riding on the back of a lion. Since the height of the British Empire in the late 19th century, Britannia has often been associated with British maritime dominance, as in the patriotic song "Rule, Britannia!". Up until 2008, the lion symbol was depicted behind Britannia on the British fifty pence coin and on the back of the British ten pence coin. It is also used as a symbol on the non-ceremonial flag of the British Army. The bulldog is sometimes used as a symbol of the United Kingdom and has been associated with Winston Churchill's defiance of Nazi Germany.[488] +See also +Outline of the United Kingdom + United Kingdom – Wikipedia book +Walking in the United Kingdom +Flag of the United Kingdom.svgUnited Kingdom portal Flag of Europe.svgEuropean Union portal Europe green light.pngEurope portal +Notes +Jump up ^ The Royal coat of arms used in Scotland: + Royal Coat of Arms of the United Kingdom (Scotland).svg +Jump up ^ There is no authorised version of the national anthem as the words are a matter of tradition; only the first verse is usually sung.[1] No law was passed making "God Save the Queen" the official anthem. In the English tradition, such laws are not necessary; proclamation and usage are sufficient to make it the national anthem. "God Save the Queen" also serves as the Royal anthem for several other countries, namely certain Commonwealth realms. +Jump up ^ Under the Council of Europe's European Charter for Regional or Minority Languages, Scots, Ulster-Scots, Welsh, Cornish, Irish and Scottish Gaelic, are officially recognised as regional or minority languages by the British government for the purposes of the Charter. See also Languages of the United Kingdom.[2] +Jump up ^ Although Northern Ireland is the only part of the UK that shares a land border with another state, two of its Overseas Territories also share land borders with other states. Gibraltar shares a border with Spain, while the Sovereign Base Areas of Akrotiri and Dhekelia share borders with the Republic of Cyprus, Turkish Republic of Northern Cyprus and UN buffer zone separating the two Cypriot polities. +Jump up ^ The Anglo-Irish Treaty was signed on 6 December 1921 to resolve the Irish War of Independence. Effective one year later, it established the Irish Free State as a separate dominion within the Commonwealth. The UK's current name was adopted in 1927 to reflect the change. +Jump up ^ Compare to section 1 of both of the 1800 Acts of Union which reads: the Kingdoms of Great Britain and Ireland shall...be united into one Kingdom, by the Name of "The United Kingdom of Great Britain and Ireland" +Jump up ^ New Zealand, Israel and San Marino are the other countries with uncodified constitutions. +Jump up ^ Since the early twentieth century the prime minister has held the office of First Lord of the Treasury, and in recent decades has also held the office of Minister for the Civil Service. +Jump up ^ Sinn Féin, an Irish republican party, also contests elections in the Republic of Ireland. +Jump up ^ In 2007–2008, this was calculated to be £115 per week for single adults with no dependent children; £199 per week for couples with no dependent children; £195 per week for single adults with two dependent children under 14; and £279 per week for couples with two dependent children under 14. +References +Jump up ^ National Anthem, British Monarchy official website. Retrieved 16 November 2013. +^ Jump up to: a b c "List of declarations made with respect to treaty No. 148". Council of Europe. Retrieved 12 December 2013. +^ Jump up to: a b "Population Estimates for UK, England and Wales, Scotland and Northern Ireland, Mid-2013". Office for National Statistics. Retrieved 26 June 2014. +Jump up ^ "2011 UK censuses". Office for National Statistics. Retrieved 17 December 2012. +^ Jump up to: a b c d "United Kingdom". International Monetary Fund. Retrieved 1 November 2014. +Jump up ^ "Gini coefficient of equivalised disposable income (source: SILC)". Eurostat Data Explorer. Retrieved 13 August 2013. +Jump up ^ "2014 Human Development Report". 14 March 2013. pp. 22–25. Retrieved 27 July 2014. +Jump up ^ "Definition of Great Britain in English". Oxford University Press. Retrieved 29 October 2014. Great Britain is the name for the island that comprises England, Scotland, and Wales, although the term is also used loosely to refer to the United Kingdom. +Jump up ^ The British Monarchy, What is constitutional monarchy?. Retrieved 17 July 2013 +Jump up ^ CIA, The World Factbook. Retrieved 17 July 2013 +Jump up ^ "The World Factbook". Central Intelligence Agency. 1 February 2014. Retrieved 23 February 2014. +^ Jump up to: a b "Countries within a country". Prime Minister's Office. 10 January 2003. +^ Jump up to: a b "Devolution of powers to Scotland, Wales, and Northern Ireland". United Kingdom Government. Retrieved 17 April 2013. In a similar way to how the government is formed from members from the two Houses of Parliament, members of the devolved legislatures nominate ministers from among themselves to comprise an executive, known as the devolved administrations... +Jump up ^ "Fall in UK university students". BBC News. 29 January 2009. +Jump up ^ "Country Overviews: United Kingdom". Transport Research Knowledge Centre. Retrieved 28 March 2010. +Jump up ^ "Key facts about the United Kingdom". Directgov. Retrieved 3 May 2011. The full title of this country is 'the United Kingdom of Great Britain and Northern Ireland'. 'The UK' is made up of England, Scotland, Wales and Northern Ireland. 'Britain' is used informally, usually meaning the United Kingdom. 'Great Britain' is made up of England, Scotland and Wales. The Channel Islands and the Isle of Man are not part of the UK.[dead link] +Jump up ^ "Working with Overseas Territories". Foreign and Commonwealth Office. Retrieved 3 May 2011. +Jump up ^ Mathias, P. (2001). The First Industrial Nation: the Economic History of Britain, 1700–1914. London: Routledge. ISBN 0-415-26672-6. +Jump up ^ Ferguson, Niall (2004). Empire: The rise and demise of the British world order and the lessons for global power. New York: Basic Books. ISBN 0-465-02328-2. +Jump up ^ Sheridan, Greg (15 May 2010). "Cameron has chance to make UK great again". The Australian (Sydney). Retrieved 23 May 2011. +Jump up ^ Dugan, Emily (18 November 2012). "Britain is now most powerful nation on earth". The Independent (London). Retrieved 18 November 2012. +^ Jump up to: a b "The 15 countries with the highest military expenditure in 2013 (table)" (PDF). Stockholm International Peace Research Institute. Retrieved 4 May 2014. +^ Jump up to: a b The Military Balance 2014: Top 15 Defence Budgets 2013 (IISS) +Jump up ^ "Treaty of Union, 1706". Scots History Online. Retrieved 23 August 2011. +Jump up ^ Barnett, Hilaire; Jago, Robert (2011). Constitutional & Administrative Law (8th ed.). Abingdon: Routledge. p. 165. ISBN 978-0-415-56301-7. +Jump up ^ Gascoigne, Bamber. "History of Great Britain (from 1707)". History World. Retrieved 18 July 2011. +Jump up ^ Cottrell, P. (2008). The Irish Civil War 1922–23. p. 85. ISBN 1-84603-270-9. +^ Jump up to: a b S. Dunn; H. Dawson (2000), An Alphabetical Listing of Word, Name and Place in Northern Ireland and the Living Language of Conflict, Lampeter: Edwin Mellen Press, One specific problem - in both general and particular senses - is to know what to call Northern Ireland itself: in the general sense, it is not a country, or a province, or a state - although some refer to it contemptuously as a statelet: the least controversial word appears to be jurisdiction, but this might change. +Jump up ^ "Changes in the list of subdivision names and code elements". ISO 3166-2. International Organization for Standardization. 15 December 2011. Retrieved 28 May 2012. +Jump up ^ Population Trends, Issues 75–82, p.38, 1994, UK Office of Population Censuses and Surveys +Jump up ^ Life in the United Kingdom: a journey to citizenship, p. 7, United Kingdom Home Office, 2007, ISBN 978-0-11-341313-3. +Jump up ^ "Statistical bulletin: Regional Labour Market Statistics". Retrieved 5 March 2014. +Jump up ^ "13.4% Fall In Earnings Value During Recession". Retrieved 5 March 2014. +Jump up ^ Murphy, Dervla (1979). A Place Apart. London: Penguin. ISBN 978-0-14-005030-1. +Jump up ^ Whyte, John; FitzGerald, Garret (1991). Interpreting Northern Ireland. Oxford: Clarendon Press. ISBN 978-0-19-827380-6. +Jump up ^ "Guardian Unlimited Style Guide". London: Guardian News and Media Limited. 19 December 2008. Retrieved 23 August 2011. +Jump up ^ "BBC style guide (Great Britain)". BBC News. 19 August 2002. Retrieved 23 August 2011. +Jump up ^ "Key facts about the United Kingdom". Government, citizens and rights. HM Government. Retrieved 24 August 2011.[dead link] +Jump up ^ "Merriam-Webster Dictionary Online Definition of ''Great Britain''". Merriam Webster. 31 August 2012. Retrieved 9 April 2013. +Jump up ^ New Oxford American Dictionary: "Great Britain: England, Wales, and Scotland considered as a unit. The name is also often used loosely to refer to the United Kingdom." +Jump up ^ "Great Britain". International Olympic Committee. Retrieved 10 May 2011. +Jump up ^ "Team GB – Our Greatest Team". British Olympic Association. Retrieved 10 May 2011.[dead link] +Jump up ^ Bradley, Anthony Wilfred; Ewing, Keith D. (2007). Constitutional and administrative law 1 (14th ed.). Harlow: Pearson Longman. p. 36. ISBN 978-1-4058-1207-8. +Jump up ^ "Which of these best describes the way you think of yourself?". Northern Ireland Life and Times Survey 2010. ARK – Access Research Knowledge. 2010. Retrieved 1 July 2010. +Jump up ^ Schrijver, Frans (2006). Regionalism after regionalisation: Spain, France and the United Kingdom. Amsterdam University Press. pp. 275–277. ISBN 978-90-5629-428-1. +Jump up ^ Jack, Ian (11 December 2010). "Why I'm saddened by Scotland going Gaelic". The Guardian (London). +Jump up ^ Ffeithiau allweddol am y Deyrnas Unedig : Directgov – Llywodraeth, dinasyddion a hawliau[dead link] +Jump up ^ "Ancient skeleton was 'even older'". BBC News. 30 October 2007. Retrieved 27 April 2011. +Jump up ^ Koch, John T. (2006). Celtic culture: A historical encyclopedia. Santa Barbara, CA: ABC-CLIO. p. 973. ISBN 978-1-85109-440-0. +Jump up ^ Davies, John; Jenkins, Nigel; Baines, Menna; Lynch, Peredur I., eds. (2008). The Welsh Academy Encyclopaedia of Wales. Cardiff: University of Wales Press. p. 915. ISBN 978-0-7083-1953-6. +Jump up ^ "Short Athelstan biography". BBC History. Retrieved 9 April 2013. +Jump up ^ Mackie, J.D. (1991). A History of Scotland. London: Penguin. pp. 18–19. ISBN 978-0-14-013649-4. +Jump up ^ Campbell, Ewan (1999). Saints and Sea-kings: The First Kingdom of the Scots. Edinburgh: Canongate. pp. 8–15. ISBN 0-86241-874-7. +Jump up ^ Haigh, Christopher (1990). The Cambridge Historical Encyclopedia of Great Britain and Ireland. Cambridge University Press. p. 30. ISBN 978-0-521-39552-6. +Jump up ^ Ganshof, F.L. (1996). Feudalism. University of Toronto. p. 165. ISBN 978-0-8020-7158-3. +Jump up ^ Chibnall, Marjorie (1999). The debate on the Norman Conquest. Manchester University Press. pp. 115–122. ISBN 978-0-7190-4913-2. +Jump up ^ Keen, Maurice. "The Hundred Years War". BBC History. +Jump up ^ The Reformation in England and Scotland and Ireland: The Reformation Period & Ireland under Elizabth I, Encyclopædia Britannica Online. +Jump up ^ "British History in Depth – Wales under the Tudors". BBC History. 5 November 2009. Retrieved 21 September 2010. +Jump up ^ Nicholls, Mark (1999). A history of the modern British Isles, 1529–1603: The two kingdoms. Oxford: Blackwell. pp. 171–172. ISBN 978-0-631-19334-0. +Jump up ^ Canny, Nicholas P. (2003). Making Ireland British, 1580–1650. Oxford University Press. pp. 189–200. ISBN 978-0-19-925905-2. +Jump up ^ Ross, D. (2002). Chronology of Scottish History. Glasgow: Geddes & Grosset. p. 56. ISBN 1-85534-380-0 +Jump up ^ Hearn, J. (2002). Claiming Scotland: National Identity and Liberal Culture. Edinburgh University Press. p. 104. ISBN 1-902930-16-9 +Jump up ^ "English Civil Wars". Encyclopaedia Britannica. Retrieved 28 April 2013. +Jump up ^ "Scotland and the Commonwealth: 1651–1660". Archontology.org. 14 March 2010. Retrieved 20 April 2010. +Jump up ^ Lodge, Richard (2007) [1910]. The History of England – From the Restoration to the Death of William III (1660–1702). Read Books. p. 8. ISBN 978-1-4067-0897-4. +Jump up ^ "Tudor Period and the Birth of a Regular Navy". Royal Navy History. Institute of Naval History. Retrieved 24 December 2010.[dead link] +Jump up ^ Canny, Nicholas (1998). The Origins of Empire, The Oxford History of the British Empire Volume I. Oxford University Press. ISBN 0-19-924676-9. +Jump up ^ "Articles of Union with Scotland 1707". UK Parliament. Retrieved 19 October 2008. +Jump up ^ "Acts of Union 1707". UK Parliament. Retrieved 6 January 2011. +Jump up ^ "Treaty (act) of Union 1706". Scottish History online. Retrieved 3 February 2011. +Jump up ^ Library of Congress, The Impact of the American Revolution Abroad, p. 73. +Jump up ^ Loosemore, Jo (2007). Sailing against slavery. BBC Devon. 2007. +Jump up ^ "The Act of Union". Act of Union Virtual Library. Retrieved 15 May 2006. +Jump up ^ Tellier, L.-N. (2009). Urban World History: an Economic and Geographical Perspective. Quebec: PUQ. p. 463. ISBN 2-7605-1588-5. +Jump up ^ Sondhaus, L. (2004). Navies in Modern World History. London: Reaktion Books. p. 9. ISBN 1-86189-202-0. +Jump up ^ Porter, Andrew (1998). The Nineteenth Century, The Oxford History of the British Empire Volume III. Oxford University Press. p. 332. ISBN 0-19-924678-5. +Jump up ^ "The Workshop of the World". BBC History. Retrieved 28 April 2013. +Jump up ^ Porter, Andrew (1998). The Nineteenth Century, The Oxford History of the British Empire Volume III. Oxford University Press. p. 8. ISBN 0-19-924678-5. +Jump up ^ Marshall, P.J. (1996). The Cambridge Illustrated History of the British Empire. Cambridge University Press. pp. 156–57. ISBN 0-521-00254-0. +Jump up ^ Tompson, Richard S. (2003). Great Britain: a reference guide from the Renaissance to the present. New York: Facts on File. p. 63. ISBN 978-0-8160-4474-0. +Jump up ^ Hosch, William L. (2009). World War I: People, Politics, and Power. America at War. New York: Britannica Educational Publishing. p. 21. ISBN 978-1-61530-048-8. +Jump up ^ Turner, John (1988). Britain and the First World War. London: Unwin Hyman. pp. 22–35. ISBN 978-0-04-445109-9. +^ Jump up to: a b Westwell, I.; Cove, D. (eds) (2002). History of World War I, Volume 3. London: Marshall Cavendish. pp. 698 and 705. ISBN 0-7614-7231-2. +Jump up ^ Turner, J. (1988). Britain and the First World War. Abingdon: Routledge. p. 41. ISBN 0-04-445109-1. +Jump up ^ SR&O 1921, No. 533 of 3 May 1921. +Jump up ^ "The Anglo-Irish Treaty, 6 December 1921". CAIN. Retrieved 15 May 2006. +Jump up ^ Rubinstein, W. D. (2004). Capitalism, Culture, and Decline in Britain, 1750–1990. Abingdon: Routledge. p. 11. ISBN 0-415-03719-0. +Jump up ^ "Britain to make its final payment on World War II loan from U.S.". The New York Times. 28 December 2006. Retrieved 25 August 2011. +Jump up ^ Francis, Martin (1997). Ideas and policies under Labour, 1945–1951: Building a new Britain. Manchester University Press. pp. 225–233. ISBN 978-0-7190-4833-3. +Jump up ^ Lee, Stephen J. (1996). Aspects of British political history, 1914–1995. London; New York: Routledge. pp. 173–199. ISBN 978-0-415-13103-2. +Jump up ^ Larres, Klaus (2009). A companion to Europe since 1945. Chichester: Wiley-Blackwell. p. 118. ISBN 978-1-4051-0612-2. +Jump up ^ "Country List". Commonwealth Secretariat. 19 March 2009. Retrieved 11 September 2012.[dead link] +Jump up ^ Julios, Christina (2008). Contemporary British identity: English language, migrants, and public discourse. Studies in migration and diaspora. Aldershot: Ashgate. p. 84. ISBN 978-0-7546-7158-9. +Jump up ^ Aughey, Arthur (2005). The Politics of Northern Ireland: Beyond the Belfast Agreement. London: Routledge. p. 7. ISBN 978-0-415-32788-6. +Jump up ^ "The troubles were over, but the killing continued. Some of the heirs to Ireland's violent traditions refused to give up their inheritance." Holland, Jack (1999). Hope against History: The Course of Conflict in Northern Ireland. New York: Henry Holt. p. 221. ISBN 978-0-8050-6087-4. +Jump up ^ Elliot, Marianne (2007). The Long Road to Peace in Northern Ireland: Peace Lectures from the Institute of Irish Studies at Liverpool University. University of Liverpool Institute of Irish Studies, Liverpool University Press. p. 2. ISBN 1-84631-065-2. +Jump up ^ Dorey, Peter (1995). British politics since 1945. Making contemporary Britain. Oxford: Blackwell. pp. 164–223. ISBN 978-0-631-19075-2. +Jump up ^ Griffiths, Alan; Wall, Stuart (2007). Applied Economics (11th ed.). Harlow: Financial Times Press. p. 6. ISBN 978-0-273-70822-3. Retrieved 26 December 2010. +Jump up ^ Keating, Michael (1 January 1998). "Reforging the Union: Devolution and Constitutional Change in the United Kingdom". Publius: the Journal of Federalism 28 (1): 217. doi:10.1093/oxfordjournals.pubjof.a029948. Retrieved 4 February 2009. +Jump up ^ Jackson, Mike (3 April 2011). "Military action alone will not save Libya". Financial Times (London). +Jump up ^ "United Kingdom country profile". BBC. 24 January 2013. Retrieved 9 April 2013. +Jump up ^ "Scotland to hold independence poll in 2014 – Salmond". BBC News. 10 January 2012. Retrieved 10 January 2012. +Jump up ^ Oxford English Dictionary: "British Isles: a geographical term for the islands comprising Great Britain and Ireland with all their offshore islands including the Isle of Man and the Channel Islands." +^ Jump up to: a b c d e f "United Kingdom". The World Factbook. Central Intelligence Agency. Retrieved 23 September 2008. +^ Jump up to: a b c d e Latimer Clarke Corporation Pty Ltd. "United Kingdom – Atlapedia Online". Atlapedia.com. Retrieved 26 October 2010. +Jump up ^ ROG Learing Team (23 August 2002). "The Prime Meridian at Greenwich". Royal Museums Greenwich. Royal Museums Greenwich. Retrieved 11 September 2012. +Jump up ^ Neal, Clare. "How long is the UK coastline?". British Cartographic Society. Retrieved 26 October 2010. +Jump up ^ "The Channel Tunnel". Eurotunnel. Retrieved 29 November 2010.[dead link] +Jump up ^ "England – Profile". BBC News. 11 February 2010. +Jump up ^ "Scotland Facts". Scotland Online Gateway. Archived from the original on 21 June 2008. Retrieved 16 July 2008. +Jump up ^ Winter, Jon (19 May 2001). "The complete guide to Scottish Islands". The Independent (London). +Jump up ^ "Overview of Highland Boundary Fault". Gazetteer for Scotland. University of Edinburgh. Retrieved 27 December 2010. +Jump up ^ "Ben Nevis Weather". Ben Nevis Weather. Retrieved 26 October 2008. +Jump up ^ "Profile: Wales". BBC News. 9 June 2010. Retrieved 7 November 2010. +Jump up ^ Giles Darkes (26 April 2014). "How long is the UK coastline?". The British Cartographic Society. +Jump up ^ "Geography of Northern Ireland". University of Ulster. Retrieved 22 May 2006. +Jump up ^ "UK climate summaries". Met Office. Retrieved 1 May 2011. +Jump up ^ United Nations Economic and Social Council (August 2007). "Ninth UN Conference on the standardization of Geographical Names". UN Statistics Division. Archived from the original on 1 December 2009. Retrieved 21 October 2008. +Jump up ^ Barlow, I.M. (1991). Metropolitan Government. London: Routledge. ISBN 978-0-415-02099-2. +Jump up ^ "Welcome to the national site of the Government Office Network". Government Offices. Archived from the original on 15 June 2009. Retrieved 3 July 2008. +Jump up ^ "A short history of London government". Greater London Authority. Archived from the original on 21 April 2008. Retrieved 4 October 2008. +Jump up ^ Sherman, Jill; Norfolk, Andrew (5 November 2004). "Prescott's dream in tatters as North East rejects assembly". The Times (London). Retrieved 15 February 2008. The Government is now expected to tear up its twelve-year-old plan to create eight or nine regional assemblies in England to mirror devolution in Scotland and Wales. (subscription required) +Jump up ^ "Local Authority Elections". Local Government Association. Retrieved 3 October 2008.[dead link] +Jump up ^ "STV in Scotland: Local Government Elections 2007". Political Studies Association. Archived from the original on 20 March 2011. Retrieved 2 August 2008. +Jump up ^ Ethical Standards in Public Life framework: "Ethical Standards in Public Life". The Scottish Government. Retrieved 3 October 2008. +Jump up ^ "Who we are". Convention of Scottish Local Authorities. Retrieved 5 July 2011. +Jump up ^ "Local Authorities". The Welsh Assembly Government. Retrieved 31 July 2008. +Jump up ^ "Local government elections in Wales". The Electoral Commission. 2008. Retrieved 8 April 2011. +Jump up ^ "Welsh Local Government Association". Welsh Local Government Association. Retrieved 20 March 2008. +Jump up ^ Devenport, Mark (18 November 2005). "NI local government set for shake-up". BBC News. Retrieved 15 November 2008. +Jump up ^ "Foster announces the future shape of local government" (Press release). Northern Ireland Executive. 13 March 2008. Retrieved 20 October 2008. +Jump up ^ "Local Government elections to be aligned with review of public administration" (Press release). Northern Ireland Office. 25 April 2008. Retrieved 2 August 2008.[dead link] +Jump up ^ "CIBC PWM Global – Introduction to The Cayman Islands". Cibc.com. 11 July 2012. Retrieved 17 August 2012. +Jump up ^ Rappeport, Laurie. "Cayman Islands Tourism". Washington DC: USA Today Travel Tips. Retrieved 9 April 2013. +Jump up ^ "Working with Overseas Territories". Foreign & Commonwealth Office. 6 October 2010. Retrieved 5 November 2010. +Jump up ^ http://www.justice.gov.uk/downloads/about/moj/our-responsibilities/Background_Briefing_on_the_Crown_Dependencies2.pdf +Jump up ^ "Overseas Territories". Foreign & Commonwealth Office. Retrieved 6 September 2010. +Jump up ^ "The World Factbook". CIA. Retrieved 26 December 2010. +Jump up ^ "Country profiles". Foreign & Commonwealth Office. 21 February 2008. Retrieved 6 September 2010.[dead link] +Jump up ^ Davison, Phil (18 August 1995). "Bermudians vote to stay British". The Independent (London). Retrieved 11 September 2012. +Jump up ^ The Committee Office, House of Commons. "House of Commons – Crown Dependencies – Justice Committee". Publications.parliament.uk. Retrieved 7 November 2010. +Jump up ^ Fact sheet on the UK's relationship with the Crown Dependencies – gov.uk, Ministry of Justice. Retrieved 25 August 2014. +Jump up ^ "Profile of Jersey". States of Jersey. Retrieved 31 July 2008. The legislature passes primary legislation, which requires approval by The Queen in Council, and enacts subordinate legislation in many areas without any requirement for Royal Sanction and under powers conferred by primary legislation. +Jump up ^ "Chief Minister to meet Channel Islands counterparts – Isle of Man Public Services" (Press release). Isle of Man Government. 29 May 2012. Retrieved 9 April 2013.[dead link] +Jump up ^ Bagehot, Walter (1867). The English Constitution. London: Chapman and Hall. p. 103. +Jump up ^ Carter, Sarah. "A Guide To the UK Legal System". University of Kent at Canterbury. Retrieved 16 May 2006. +Jump up ^ "Parliamentary sovereignty". UK Parliament. n.d. Archived from the original on 27 May 2012. +Jump up ^ "The Government, Prime Minister and Cabinet". Public services all in one place. Directgov. Retrieved 12 February 2010. +Jump up ^ "Brown is UK's new prime minister". BBC News. 27 June 2007. Retrieved 23 January 2008. +Jump up ^ "David Cameron is UK's new prime minister". BBC News. 11 May 2010. Retrieved 11 May 2010. +Jump up ^ November 2010 "Elections and voting". UK Parliament. Archived from the original on 14 November 2010. Retrieved 14 November 2010. +Jump up ^ November 2010 "The Parliament Acts". UK Parliament. Archived from the original on 14 November 2010. +Jump up ^ "United Kingdom". European Election Database. Norwegian Social Science Data Services. Retrieved 3 July 2010. +Jump up ^ Wainwright, Martin (28 May 2010). "Thirsk and Malton: Conservatives take final seat in parliament". The Guardian (London). Retrieved 3 July 2010. +Jump up ^ "Scots MPs attacked over fees vote". BBC News. 27 January 2004. Retrieved 21 October 2008. +Jump up ^ Taylor, Brian (1 June 1998). "Talking Politics: The West Lothian Question". BBC News. Retrieved 21 October 2008. +Jump up ^ "England-only laws 'need majority from English MPs'". BBC News. 25 March 2013. Retrieved 28 April 2013. +Jump up ^ "Scotland's Parliament – powers and structures". BBC News. 8 April 1999. Retrieved 21 October 2008. +Jump up ^ "Salmond elected as first minister". BBC News. 16 May 2007. Retrieved 21 October 2008. +Jump up ^ "Scottish election: SNP wins election". BBC News. 6 May 2011. +Jump up ^ "Structure and powers of the Assembly". BBC News. 9 April 1999. Retrieved 21 October 2008. +Jump up ^ "Carwyn Jones clinches leadership in Wales". WalesOnline (Media Wales). 1 December 2009. Retrieved 1 December 2009. +Jump up ^ "Devolved Government – Ministers and their departments". Northern Ireland Executive. Archived from the original on 22 August 2007. +Jump up ^ Burrows, N. (1999). "Unfinished Business: The Scotland Act 1998". The Modern Law Review 62 (2): 241–60 [p. 249]. doi:10.1111/1468-2230.00203. The UK Parliament is sovereign and the Scottish Parliament is subordinate. The White Paper had indicated that this was to be the approach taken in the legislation. The Scottish Parliament is not to be seen as a reflection of the settled will of the people of Scotland or of popular sovereignty but as a reflection of its subordination to a higher legal authority. Following the logic of this argument, the power of the Scottish Parliament to legislate can be withdrawn or overridden... +Jump up ^ Elliot, M. (2004). "United Kingdom: Parliamentary sovereignty under pressure". International Journal of Constitutional Law 2 (3): 545–627 [pp. 553–554]. doi:10.1093/icon/2.3.545. Notwithstanding substantial differences among the schemes, an important common factor is that the U.K. Parliament has not renounced legislative sovereignty in relation to the three nations concerned. For example, the Scottish Parliament is empowered to enact primary legislation on all matters, save those in relation to which competence is explicitly denied ... but this power to legislate on what may be termed "devolved matters" is concurrent with the Westminster Parliament's general power to legislate for Scotland on any matter at all, including devolved matters ... In theory, therefore, Westminster may legislate on Scottish devolved matters whenever it chooses... +Jump up ^ Walker, G. (2010). "Scotland, Northern Ireland, and Devolution, 1945–1979". Journal of British Studies 39 (1): 124 & 133. doi:10.1086/644536. +Jump up ^ Gamble, A. "The Constitutional Revolution in the United Kingdom". Publius 36 (1): 19–35 [p. 29]. doi:10.1093/publius/pjj011. The British parliament has the power to abolish the Scottish parliament and the Welsh assembly by a simple majority vote in both houses, but since both were sanctioned by referenda, it would be politically difficult to abolish them without the sanction of a further vote by the people. In this way several of the constitutional measures introduced by the Blair government appear to be entrenched and not subject to a simple exercise of parliamentary sovereignty at Westminster. +Jump up ^ Meehan, E. (1999). "The Belfast Agreement—Its Distinctiveness and Points of Cross-Fertilization in the UK's Devolution Programme". Parliamentary Affairs 52 (1): 19–31 [p. 23]. doi:10.1093/pa/52.1.19. [T]he distinctive involvement of two governments in the Northern Irish problem means that Northern Ireland's new arrangements rest upon an intergovernmental agreement. If this can be equated with a treaty, it could be argued that the forthcoming distribution of power between Westminster and Belfast has similarities with divisions specified in the written constitutions of federal states... Although the Agreement makes the general proviso that Westminster's 'powers to make legislation for Northern Ireland' remains 'unaffected', without an explicit categorical reference to reserved matters, it may be more difficult than in Scotland or Wales for devolved powers to be repatriated. The retraction of devolved powers would not merely entail consultation in Northern Ireland backed implicitly by the absolute power of parliamentary sovereignty but also the renegotiation of an intergovernmental agreement. +Jump up ^ "The Treaty (act) of the Union of Parliament 1706". Scottish History Online. Retrieved 5 October 2008. +Jump up ^ "UK Supreme Court judges sworn in". BBC News. 1 October 2009. +Jump up ^ "Constitutional reform: A Supreme Court for the United Kingdom". Department for Constitutional Affairs. July 2003. Retrieved 13 May 2013. +Jump up ^ "Role of the JCPC". Judicial Committee of the Privy Council. Retrieved 28 April 2013. +Jump up ^ Bainham, Andrew (1998). The international survey of family law: 1996. The Hague: Martinus Nijhoff. p. 298. ISBN 978-90-411-0573-8. +Jump up ^ Adeleye, Gabriel; Acquah-Dadzie, Kofi; Sienkewicz, Thomas; McDonough, James (1999). World dictionary of foreign expressions. Waucojnda, IL: Bolchazy-Carducci. p. 371. ISBN 978-0-86516-423-9. +Jump up ^ "The Australian courts and comparative law". Australian Law Postgraduate Network. Retrieved 28 December 2010. +Jump up ^ "Court of Session – Introduction". Scottish Courts. Retrieved 5 October 2008.[dead link] +Jump up ^ "High Court of Justiciary – Introduction". Scottish Courts. Retrieved 5 October 2008.[dead link] +Jump up ^ "House of Lords – Practice Directions on Permission to Appeal". UK Parliament. Retrieved 22 June 2009. +Jump up ^ "Introduction". Scottish Courts. Retrieved 5 October 2008.[dead link] +Jump up ^ Samuel Bray (2005). "Not proven: introducing a third verdict". The University of Chicago Law Review 72 (4): 1299. Retrieved 30 November 2013. +Jump up ^ "Police-recorded crime down by 9%". BBC News. 17 July 2008. Retrieved 21 October 2008. +Jump up ^ "New record high prison population". BBC News. 8 February 2008. Retrieved 21 October 2008. +Jump up ^ "Crime falls to 32 year low" (Press release). Scottish Government. 7 September 2010. Retrieved 21 April 2011. +Jump up ^ "Prisoner Population at Friday 22 August 2008". Scottish Prison Service. Retrieved 28 August 2008. +Jump up ^ "Scots jail numbers at record high". BBC News. 29 August 2008. Retrieved 21 October 2008. +Jump up ^ Swaine, Jon (13 January 2009). "Barack Obama presidency will strengthen special relationship, says Gordon Brown". The Daily Telegraph (London). Retrieved 3 May 2011. +Jump up ^ Kirchner, E. J.; Sperling, J. (2007). Global Security Governance: Competing Perceptions of Security in the 21st Century. London: Taylor & Francis. p. 100. ISBN 0-415-39162-8 +Jump up ^ The Committee Office, House of Commons (19 February 2009). "DFID's expenditure on development assistance". UK Parliament. Retrieved 28 April 2013. +Jump up ^ "Ministry of Defence". Ministry of Defence. Retrieved 21 February 2012. +Jump up ^ "Speaker addresses Her Majesty Queen Elizabeth II". UK Parliament. 30 March 2012. Retrieved 28 April 2013. +Jump up ^ "House of Commons Hansard". UK Parliament. Retrieved 23 October 2008. +Jump up ^ UK 2005: The Official Yearbook of the United Kingdom of Great Britain and Northern Ireland. Office for National Statistics. p. 89. +Jump up ^ "Principles for Economic Regulation". Department for Business, Innovation & Skills. April 2011. Retrieved 1 May 2011. +Jump up ^ "United Kingdom". International Monetary Fund. Retrieved 1 October 2009. +Jump up ^ Chavez-Dreyfuss, Gertrude (1 April 2008). "Global reserves, dollar share up at end of 2007-IMF". Reuters. Retrieved 21 December 2009. +Jump up ^ "More About the Bank". Bank of England. n.d. Archived from the original on 12 March 2008. +Jump up ^ "Index of Services (experimental)". Office for National Statistics. 7 May 2006. Archived from the original on 7 May 2006. +Jump up ^ Sassen, Saskia (2001). The Global City: New York, London, Tokyo (2nd ed.). Princeton University Press. ISBN 0-691-07866-1. +^ Jump up to: a b "Global Financial Centres 7". Z/Yen. 2010. Retrieved 21 April 2010. +^ Jump up to: a b "Worldwide Centres of Commerce Index 2008". Mastercard. Retrieved 5 July 2011. +^ Jump up to: a b Zumbrun, Joshua (15 July 2008). ""World's Most Economically Powerful Cities".". Forbes (New York). Archived from the original on 19 May 2011. Retrieved 3 October 2010. +Jump up ^ "Global city GDP rankings 2008–2025". PricewaterhouseCoopers. Archived from the original on 19 May 2011. Retrieved 16 November 2010. +Jump up ^ Lazarowicz, Mark (Labour MP) (30 April 2003). "Financial Services Industry". UK Parliament. Retrieved 17 October 2008. +Jump up ^ International Tourism Receipts[dead link]. UNWTO Tourism Highlights, Edition 2005. page 12. World Tourism Organisation. Retrieved 24 May 2006. +Jump up ^ Bremner, Caroline (10 January 2010). "Euromonitor International's Top City Destination Ranking". Euromonitor International. Archived from the original on 19 May 2011. Retrieved 31 May 2011. +Jump up ^ "From the Margins to the Mainstream – Government unveils new action plan for the creative industries". DCMS. 9 March 2007. Retrieved 9 March 2007.[dead link] +^ Jump up to: a b "European Countries – United Kingdom". Europa (web portal). Retrieved 15 December 2010. +Jump up ^ Harrington, James W.; Warf, Barney (1995). Industrial location: Principles, practices, and policy. London: Routledge. p. 121. ISBN 978-0-415-10479-1. +Jump up ^ Spielvogel, Jackson J. (2008). Western Civilization: Alternative Volume: Since 1300. Belmont, CA: Thomson Wadsworth. ISBN 978-0-495-55528-5. +Jump up ^ Hewitt, Patricia (15 July 2004). "TUC Manufacturing Conference". Department of Trade and Industry. Retrieved 16 May 2006. +Jump up ^ "Industry topics". Society of Motor Manufacturers and Traders. 2011. Retrieved 5 July 2011. +Jump up ^ Robertson, David (9 January 2009). "The Aerospace industry has thousands of jobs in peril". The Times (London). Retrieved 9 June 2011. (subscription required) +Jump up ^ "Facts & Figures – 2009". Aerospace & Defence Association of Europe. Retrieved 9 June 2011.[dead link] +Jump up ^ "UK Aerospace Industry Survey – 2010". ADS Group. Retrieved 9 June 2011. +^ Jump up to: a b c d http://www.theengineer.co.uk/aerospace/in-depth/reasons-to-be-cheerful-about-the-uk-aerospace-sector/1017274.article +Jump up ^ "The Pharmaceutical sector in the UK". Department for Business, Innovation & Skills. Retrieved 9 June 2011. +Jump up ^ "Ministerial Industry Strategy Group – Pharmaceutical Industry: Competitiveness and Performance Indicators". Department of Health. Retrieved 9 June 2011.[dead link] +Jump up ^ [1][dead link] +Jump up ^ "UK in recession as economy slides". BBC News. 23 January 2009. Retrieved 23 January 2009. +Jump up ^ "UK youth unemployment at its highest in two decades: 22.5%". MercoPress. 15 April 2012. +Jump up ^ Groom, Brian (19 January 2011). "UK youth unemployment reaches record". Financial Times (London). +Jump up ^ "Release: EU Government Debt and Deficit returns". Office for National Statistics. March 2012. Retrieved 17 August 2012. +Jump up ^ "UK loses top AAA credit rating for first time since 1978". BBC News. 23 February 2013. Retrieved 23 February 2013. +Jump up ^ "Britain sees real wages fall 3.2%". Daily Express (London). 2 March 2013. +Jump up ^ Beckford, Martin (5 December 2011). "Gap between rich and poor growing fastest in Britain". The Daily Telegraph (London). +Jump up ^ "United Kingdom: Numbers in low income". The Poverty Site. Retrieved 25 September 2009. +Jump up ^ "United Kingdom: Children in low income households". The Poverty Site. Retrieved 25 September 2009. +Jump up ^ "Warning of food price hike crisis". BBC News. 4 April 2009. +Jump up ^ Andrews, J. (16 January 2013). "How poor is Britain now". Yahoo! Finance UK +Jump up ^ Glynn, S.; Booth, A. (1996). Modern Britain: An Economic and Social History. London: Routledge. +Jump up ^ "Report highlights 'bleak' poverty levels in the UK" Phys.org, 29 March 2013 +Jump up ^ Gascoin, J. "A reappraisal of the role of the universities in the Scientific Revolution", in Lindberg, David C. and Westman, Robert S., eds (1990), Reappraisals of the Scientific Revolution. Cambridge University Press. p. 248. ISBN 0-521-34804-8. +Jump up ^ Reynolds, E.E.; Brasher, N.H. (1966). Britain in the Twentieth Century, 1900–1964. Cambridge University Press. p. 336. OCLC 474197910 +Jump up ^ Burtt, E.A. (2003) [1924].The Metaphysical Foundations of Modern Science. Mineola, NY: Courier Dover. p. 207. ISBN 0-486-42551-7. +Jump up ^ Hatt, C. (2006). Scientists and Their Discoveries. London: Evans Brothers. pp. 16, 30 and 46. ISBN 0-237-53195-X. +Jump up ^ Jungnickel, C.; McCormmach, R. (1996). Cavendish. American Philosophical Society. ISBN 0-87169-220-1. +Jump up ^ "The Nobel Prize in Physiology or Medicine 1945: Sir Alexander Fleming, Ernst B. Chain, Sir Howard Florey". The Nobel Foundation. Archived from the original on 21 June 2011. +Jump up ^ Hatt, C. (2006). Scientists and Their Discoveries. London: Evans Brothers. p. 56. ISBN 0-237-53195-X. +Jump up ^ James, I. (2010). Remarkable Engineers: From Riquet to Shannon. Cambridge University Press. pp. 33–6. ISBN 0-521-73165-8. +Jump up ^ Bova, Ben (2002) [1932]. The Story of Light. Naperville, IL: Sourcebooks. p. 238. ISBN 978-1-4022-0009-0. +Jump up ^ "Alexander Graham Bell (1847–1922)". Scottish Science Hall of Fame. Archived from the original on 21 June 2011. +Jump up ^ "John Logie Baird (1888–1946)". BBC History. Archived from the original on 21 June 2011. +Jump up ^ Cole, Jeffrey (2011). Ethnic Groups of Europe: An Encyclopedia. Santa Barbara, CA: ABC-CLIO. p. 121. ISBN 1-59884-302-8. +Jump up ^ Castells, M.; Hall, P.; Hall, P.G. (2004). Technopoles of the World: the Making of Twenty-First-Century Industrial Complexes. London: Routledge. pp. 98–100. ISBN 0-415-10015-1. +Jump up ^ "Knowledge, networks and nations: scientific collaborations in the twenty-first century". Royal Society. 2011. Archived from the original on 22 June 2011. +Jump up ^ McCook, Alison. "Is peer review broken?". Reprinted from the Scientist 20(2) 26, 2006. Archived from the original on 21 June 2011. +^ Jump up to: a b "Heathrow 'needs a third runway'". BBC News. 25 June 2008. Retrieved 17 October 2008. +^ Jump up to: a b "Statistics: Top 30 World airports" (Press release). Airports Council International. July 2008. Retrieved 15 October 2008. +Jump up ^ "Transport Statistics Great Britain: 2010". Department for Transport. Archived from the original on 16 December 2010. +Jump up ^ "Major new rail lines considered". BBC News. 21 June 2008. Archived from the original on 9 October 2010. +Jump up ^ "Crossrail's giant tunnelling machines unveiled". BBC News. 2 January 2012. +Jump up ^ Leftly, Mark (29 August 2010). "Crossrail delayed to save £1bn". The Independent on Sunday (London). +^ Jump up to: a b "Size of Reporting Airports October 2009 – September 2010". Civil Aviation Authority. Retrieved 5 December 2010. +Jump up ^ "BMI being taken over by Lufthansa". BBC News. 29 October 2008. Retrieved 23 December 2009. +Jump up ^ "United Kingdom Energy Profile". U.S. Energy Information Administration. Retrieved 4 November 2010. +Jump up ^ Mason, Rowena (24 October 2009). "Let the battle begin over black gold". The Daily Telegraph (London). Retrieved 26 November 2010. +Jump up ^ Heath, Michael (26 November 2010). "RBA Says Currency Containing Prices, Rate Level 'Appropriate' in Near Term". Bloomberg (New York). Retrieved 26 November 2010. +^ Jump up to: a b c "Nuclear Power in the United Kingdom". World Nuclear Association. April 2013. Retrieved 9 April 2013. +^ Jump up to: a b c "United Kingdom – Oil". U.S. Energy Information Administration. Retrieved 4 November 2010.[dead link] +Jump up ^ "Diminishing domestic reserves, escalating imports". EDF Energy. Retrieved 9 April 2013. +^ Jump up to: a b "United Kingdom – Natural Gas". U.S. Energy Information Administration. Retrieved 4 November 2010.[dead link] +^ Jump up to: a b "United Kingdom – Quick Facts Energy Overview". U.S. Energy Information Administration. Retrieved 4 November 2010.[dead link] +Jump up ^ The Coal Authority (10 April 2006). "Coal Reserves in the United Kingdom". The Coal Authority. Archived from the original on 4 January 2009. Retrieved 5 July 2011. +Jump up ^ "England Expert predicts 'coal revolution'". BBC News. 16 October 2007. Retrieved 23 September 2008. +Jump up ^ Watts, Susan (20 March 2012). "Fracking: Concerns over gas extraction regulations". BBC News. Retrieved 9 April 2013. +Jump up ^ "Quit fracking aboot". Friends of the Earth Scotland. Retrieved 9 April 2013. +Jump up ^ "Census Geography". Office for National Statistics. 30 October 2007. Archived from the original on 4 June 2011. Retrieved 14 April 2012. +Jump up ^ "Welcome to the 2011 Census for England and Wales". Office for National Statistics. n.d. Retrieved 11 October 2008. +^ Jump up to: a b c "2011 Census: Population Estimates for the United Kingdom". Office for National Statistics. 27 March 2011. Retrieved 18 December 2012. +^ Jump up to: a b c "Annual Mid-year Population Estimates, 2010". Office for National Statistics. 2011. Retrieved 14 April 2012. +Jump up ^ Batty, David (30 December 2010). "One in six people in the UK today will live to 100, study says". The Guardian (London). +^ Jump up to: a b "2011 UK censuses". Office for National Statistics. Retrieved 18 December 2012. +Jump up ^ "Population: UK population grows to 59.6 million" (Press release). Office for National Statistics. 24 June 2004. Archived from the original on 22 July 2004. Retrieved 14 April 2012. +Jump up ^ Khan, Urmee (16 September 2008). "England is most crowded country in Europe". The Daily Telegraph (London). Retrieved 5 September 2009. +Jump up ^ Carrell, Severin (17 December 2012). "Scotland's population at record high". The Guardian. London. Retrieved 18 December 2012. +^ Jump up to: a b c "Vital Statistics: Population and Health Reference Tables (February 2014 Update): Annual Time Series Data". ONS. Retrieved 27 April 2014. +Jump up ^ Boseley, Sarah (14 July 2008). "The question: What's behind the baby boom?". The Guardian (London). p. 3. Retrieved 28 August 2009. +Jump up ^ Tables, Graphs and Maps Interface (TGM) table. Eurostat (26 February 2013). Retrieved 12 July 2013. +Jump up ^ Campbell, Denis (11 December 2005). "3.6m people in Britain are gay – official". The Observer (London). Retrieved 28 April 2013. +Jump up ^ "2011 Census - Built-up areas". ONS. Retrieved 1 July 2013. +Jump up ^ Mid-2012 Population Estimates for Settlements and Localities in Scotland General Register Office for Scotland +Jump up ^ "Belfast Metropolitan Urban Area NISRA 2005". Retrieved 28 April 2013. +Jump up ^ 2011 Census: KS201UK Ethnic group, local authorities in the United Kingdom, Accessed 21 February 2014 +Jump up ^ "Welsh people could be most ancient in UK, DNA suggests". BBC News. 19 June 2012. Retrieved 28 April 2013. +Jump up ^ Thomas, Mark G. et al. "Evidence for a segregated social structure in early Anglo-Saxon England". Proceedings of the Royal Society B: Biological Sciences 273(1601): 2651–2657. +Jump up ^ Owen, James (19 July 2005). "Review of 'The Tribes of Britain'". National Geographic (Washington DC). +Jump up ^ Oppenheimer, Stephen (October 2006). "Myths of British ancestry" at the Wayback Machine (archived 26 September 2006). Prospect (London). Retrieved 5 November 2010. +Jump up ^ Henderson, Mark (23 October 2009). "Scientist – Griffin hijacked my work to make race claim about 'British aborigines'". The Times (London). Retrieved 26 October 2009. (subscription required) +Jump up ^ Costello, Ray (2001). Black Liverpool: The Early History of Britain's Oldest Black Community 1730–1918. Liverpool: Picton Press. ISBN 1-873245-07-6. +Jump up ^ "Culture and Ethnicity Differences in Liverpool – Chinese Community". Chambré Hardman Trust. Retrieved 26 October 2009. +Jump up ^ Coleman, David; Compton, Paul; Salt, John (2002). "The demographic characteristics of immigrant populations", Council of Europe, p.505. ISBN 92-871-4974-7. +Jump up ^ Mason, Chris (30 April 2008). "'Why I left UK to return to Poland'". BBC News. +Jump up ^ "Resident population estimates by ethnic group (percentages): London". Office for National Statistics. Retrieved 23 April 2008. +Jump up ^ "Resident population estimates by ethnic group (percentages): Leicester". Office for National Statistics. Retrieved 23 April 2008. +Jump up ^ "Census 2001 – Ethnicity and religion in England and Wales". Office for National Statistics. Retrieved 23 April 2008. +Jump up ^ Loveys, Kate (22 June 2011). "One in four primary school pupils are from an ethnic minority and almost a million schoolchildren do not speak English as their first language". Daily Mail (London). Retrieved 28 June 2011. +Jump up ^ Rogers, Simon (19 May 2011). "Non-white British population reaches 9.1 million". The Guardian (London). +Jump up ^ Wallop, Harry (18 May 2011). "Population growth of last decade driven by non-white British". The Daily Telegraph (London). +Jump up ^ "Official EU languages". European Commission. 8 May 2009. Retrieved 16 October 2009. +Jump up ^ "Language Courses in New York". United Nations. 2006. Retrieved 29 November 2010. +Jump up ^ "English language – Government, citizens and rights". Directgov. Retrieved 23 August 2011. +Jump up ^ "Commonwealth Secretariat – UK". Commonwealth Secretariat. Retrieved 23 August 2011. +^ Jump up to: a b c "Languages across Europe: United Kingdom". BBC. Retrieved 4 February 2013. +Jump up ^ Booth, Robert (30 January 2013). "Polish becomes England's second language". The Guardian (London). Retrieved 4 February 2012. +Jump up ^ European Charter for Regional or Minority Languages, Strasbourg, 5.XI.1992 - http://conventions.coe.int/treaty/en/Treaties/Html/148.htm +Jump up ^ Framework Convention for the Protection of National Minorities, Strasbourg, 1.II.1995 - http://conventions.coe.int/Treaty/en/Treaties/Html/157.htm +Jump up ^ National Statistics Online – Welsh Language[dead link]. National Statistics Office. +Jump up ^ "Differences in estimates of Welsh Language Skills". Office for National Statistics. Archived from the original on 12 January 2010. Retrieved 30 December 2008. +Jump up ^ Wynn Thomas, Peter (March 2007). "Welsh today". Voices. BBC. Retrieved 5 July 2011. +Jump up ^ "Scotland's Census 2001 – Gaelic Report". General Register Office for Scotland. Retrieved 28 April 2013. +Jump up ^ "Local UK languages 'taking off'". BBC News. 12 February 2009. +Jump up ^ Edwards, John R. (2010). Minority languages and group identity: cases and categories. John Benjamins. pp. 150–158. ISBN 978-90-272-1866-7. Retrieved 12 March 2011. +Jump up ^ Koch, John T. (2006). Celtic culture: a historical encyclopedia. ABC-CLIO. p. 696. ISBN 978-1-85109-440-0. +Jump up ^ "Language Data – Scots". European Bureau for Lesser-Used Languages. Archived from the original on 23 June 2007. Retrieved 2 November 2008. +Jump up ^ "Fall in compulsory language lessons". BBC News. 4 November 2004. +Jump up ^ "The School Gate for parents in Wales". BBC. Retrieved 28 April 2013. +Jump up ^ Cannon, John, ed. (2nd edn., 2009). A Dictionary of British History. Oxford University Press. p. 144. ISBN 0-19-955037-9. +Jump up ^ Field, Clive D. (November 2009). "British religion in numbers"[dead link]. BRIN Discussion Series on Religious Statistics, Discussion Paper 001. Retrieved 3 June 2011. +Jump up ^ Yilmaz, Ihsan (2005). Muslim Laws, Politics and Society in Modern Nation States: Dynamic Legal Pluralisms in England, Turkey, and Pakistan. Aldershot: Ashgate Publishing. pp. 55–6. ISBN 0-7546-4389-1. +Jump up ^ Brown, Callum G. (2006). Religion and Society in Twentieth-Century Britain. Harlow: Pearson Education. p. 291. ISBN 0-582-47289-X. +Jump up ^ Norris, Pippa; Inglehart, Ronald (2004). Sacred and Secular: Religion and Politics Worldwide. Cambridge University Press. p. 84. ISBN 0-521-83984-X. +Jump up ^ Fergusson, David (2004). Church, State and Civil Society. Cambridge University Press. p. 94. ISBN 0-521-52959-X. +Jump up ^ "UK Census 2001". National Office for Statistics. Archived from the original on 12 March 2007. Retrieved 22 April 2007. +Jump up ^ "Religious Populations". Office for National Statistics. 11 October 2004. Archived from the original on 6 June 2011. +Jump up ^ "United Kingdom: New Report Finds Only One in 10 Attend Church". News.adventist.org. 4 April 2007. Retrieved 12 September 2010. +Jump up ^ Philby, Charlotte (12 December 2012). "Less religious and more ethnically diverse: Census reveals a picture of Britain today". The Independent (London). +Jump up ^ The History of the Church of England. The Church of England. Retrieved 23 November 2008. +Jump up ^ "Queen and Church of England". British Monarchy Media Centre. Archived from the original on 8 October 2006. Retrieved 5 June 2010. +Jump up ^ "Queen and the Church". The British Monarchy (Official Website). Archived from the original on 7 June 2011. +Jump up ^ "How we are organised". Church of Scotland. Archived from the original on 7 June 2011. +Jump up ^ Weller, Paul (2005). Time for a Change: Reconfiguring Religion, State, and Society. London: Continuum. pp. 79–80. ISBN 0567084876. +Jump up ^ Peach, Ceri, "United Kingdom, a major transformation of the religious landscape", in H. Knippenberg. ed. (2005). The Changing Religious Landscape of Europe. Amsterdam: Het Spinhuis. pp. 44–58. ISBN 90-5589-248-3. +Jump up ^ Richards, Eric (2004). Britannia's children: Emigration from England, Scotland, Wales and Ireland since 1600. London: Hambledon, p. 143. ISBN 978-1-85285-441-6. +Jump up ^ Gibney, Matthew J.; Hansen, Randall (2005). Immigration and asylum: from 1900 to the present, ABC-CLIO, p. 630. ISBN 1-57607-796-9 +Jump up ^ "Short history of immigration". BBC. 2005. Retrieved 28 August 2010. +Jump up ^ Rogers, Simon (11 December 2012). "Census 2011 mapped and charted: England & Wales in religion, immigration and race". London: Guardian. Retrieved 11 December 2012. +Jump up ^ 6.5% of the EU population are foreigners and 9.4% are born abroad, Eurostat, Katya Vasileva, 34/2011. +Jump up ^ Muenz, Rainer (June 2006). "Europe: Population and Migration in 2005". Migration Policy Institute. Retrieved 2 April 2007. +Jump up ^ "Immigration and births to non-British mothers pushes British population to record high". London Evening Standard. 21 August 2008. +Jump up ^ Doughty, Steve; Slack, James (3 June 2008). "Third World migrants behind our 2.3m population boom". Daily Mail (London). +Jump up ^ Bentham, Martin (20 October 2008). "Tories call for tougher control of immigration". London Evening Standard. +Jump up ^ "Minister rejects migrant cap plan". BBC News. 8 September 2008. Retrieved 26 April 2011. +Jump up ^ Johnston, Philip (5 January 2007). "Immigration 'far higher' than figures say". The Daily Telegraph (London). Retrieved 20 April 2007. +Jump up ^ Travis, Alan (25 August 2011). "UK net migration rises 21%". The Guardian (London). +^ Jump up to: a b "Migration Statistics Quarterly Report May 2012". Office for National Statistics. 24 May 2012. +Jump up ^ "Migration to UK more than double government target". BBC News. 24 May 2012. +^ Jump up to: a b "Citizenship". Home Office. August 2011. Retrieved 24 October 2011.[dead link] +Jump up ^ Bamber, David (20 December 2000). "Migrant squad to operate in France". The Daily Telegraph (London). +Jump up ^ "Settlement". Home Office. August 2011. Retrieved 24 October 2011.[dead link] +Jump up ^ "Births in England and Wales by parents' country of birth, 2011". Office for National Statistics. 30 August 2012. Retrieved 28 April 2013. +Jump up ^ "Right of Union citizens and their family members to move and reside freely within the territory of the Member States". European Commission. Retrieved 28 April 2013. +Jump up ^ Doward, Jamie; Temko, Ned (23 September 2007). "Home Office shuts the door on Bulgaria and Romania". The Observer (London). p. 2. Retrieved 23 August 2008. +Jump up ^ Sumption, Madeleine; Somerville, Will (January 2010). The UK's new Europeans: Progress and challenges five years after accession. Policy Report (London: Equality and Human Rights Commission). p. 13. ISBN 978-1-84206-252-4. Retrieved 19 January 2010. +Jump up ^ Doward, Jamie; Rogers, Sam (17 January 2010). "Young, self-reliant, educated: portrait of UK's eastern European migrants". The Observer (London). Retrieved 19 January 2010. +Jump up ^ Hopkirk, Elizabeth (20 October 2008). "Packing up for home: Poles hit by UK's economic downturn". London Evening Standard. +Jump up ^ "Migrants to UK 'returning home'". BBC News. 8 September 2009. Retrieved 8 September 2009. +Jump up ^ "UK sees shift in migration trend". BBC News. 27 May 2010. Retrieved 28 May 2010. +Jump up ^ "Fresh Talent: Working in Scotland". London: UK Border Agency. Retrieved 30 October 2010. +Jump up ^ Boxell, James (28 June 2010). "Tories begin consultation on cap for migrants". Financial Times (London). Retrieved 17 September 2010. +Jump up ^ "Vince Cable: Migrant cap is hurting economy". The Guardian (London). Press Association. 17 September 2010. Retrieved 17 September 2010. +Jump up ^ Richards (2004), pp. 6–7. +^ Jump up to: a b Sriskandarajah, Dhananjayan; Drew, Catherine (11 December 2006). "Brits Abroad: Mapping the scale and nature of British emigration". Institute for Public Policy Research. Retrieved 20 January 2007. +Jump up ^ "Brits Abroad: world overview". BBC. n.d. Retrieved 20 April 2007. +Jump up ^ Casciani, Dominic (11 December 2006). "5.5 m Britons 'opt to live abroad'". BBC News. Retrieved 20 April 2007. +Jump up ^ "Brits Abroad: Country-by-country". BBC News. 11 December 2006. +Jump up ^ "Local Authorities". Department for Children, Schools and Families. Retrieved 21 December 2008. +Jump up ^ Gordon, J.C.B. (1981). Verbal Deficit: A Critique. London: Croom Helm. p. 44 note 18. ISBN 978-0-85664-990-5. +Jump up ^ Section 8 ('Duty of local education authorities to secure provision of primary and secondary schools'), Sections 35–40 ('Compulsory attendance at Primary and Secondary Schools') and Section 61 ('Prohibition of fees in schools maintained by local education authorities ...'), Education Act 1944. +Jump up ^ "England's pupils in global top 10". BBC News. 10 December 2008. +Jump up ^ "More state pupils in universities". BBC News. 19 July 2007. +Jump up ^ MacLeod, Donald (9 November 2007). "Private school pupil numbers in decline". The Guardian (London). Retrieved 31 March 2010. +Jump up ^ Frankel, Hannah (3 September 2010). "Is Oxbridge still a preserve of the posh?". TES (London). Retrieved 9 April 2013. +Jump up ^ "World's top 100 universities 2013: their reputations ranked by Times Higher Education". The Guardian (London). 2013. Retrieved 23 October 2014. +Jump up ^ Davenport, F.; Beech, C.; Downs, T.; Hannigan, D. (2006). Ireland. Lonely Planet, 7th edn. ISBN 1-74059-968-3. p. 564. +Jump up ^ "About SQA". Scottish Qualifications Authority. 10 April 2013. Retrieved 28 April 2013. +Jump up ^ "About Learning and Teaching Scotland". Learning and Teaching Scotland. Retrieved 28 April 2013. +Jump up ^ "Brain drain in reverse". Scotland Online Gateway. July 2002. Archived from the original on 4 December 2007. +Jump up ^ "Increase in private school intake". BBC News. 17 April 2007. +Jump up ^ "MSPs vote to scrap endowment fee". BBC News. 28 February 2008. +Jump up ^ What will your child learn?[dead link] The Welsh Assembly Government. Retrieved 22 January 2010. +Jump up ^ CCEA. "About Us – What we do". Council for the Curriculum Examinations & Assessment. Retrieved 28 April 2013. +Jump up ^ Elitist Britain?, Social Mobility and Child Poverty Commission, 28 August 2014 +Jump up ^ Arnett, George (28 August 2014). "Elitism in Britain - breakdown by profession". The Guardian: Datablog. +Jump up ^ Haden, Angela; Campanini, Barbara, eds. (2000). The world health report 2000 – Health systems: improving performance. Geneva: World Health Organisation. ISBN 92-4-156198-X. Retrieved 5 July 2011. +Jump up ^ World Health Organization. "Measuring overall health system performance for 191 countries". New York University. Retrieved 5 July 2011. +Jump up ^ "'Huge contrasts' in devolved NHS". BBC News. 28 August 2008. +Jump up ^ Triggle, Nick (2 January 2008). "NHS now four different systems". BBC News. +Jump up ^ Fisher, Peter. "The NHS from Thatcher to Blair". NHS Consultants Association (International Association of Health Policy). The Budget ... was even more generous to the NHS than had been expected amounting to an annual rise of 7.4% above the rate of inflation for the next 5 years. This would take us to 9.4% of GDP spent on health ie around EU average. +Jump up ^ "OECD Health Data 2009 – How Does the United Kingdom Compare". Paris: Organisation for Economic Co-operation and Development. Retrieved 28 April 2013.[dead link] +Jump up ^ "The cultural superpower: British cultural projection abroad". Journal of the British Politics Society, Norway. Volume 6. No. 1. Winter 2011 +Jump up ^ Sheridan, Greg (15 May 2010). "Cameron has chance to make UK great again". The Australian (Sydney). Retrieved 20 May 2012. +Jump up ^ Goldfarb, Jeffrey (10 May 2006). "Bookish Britain overtakes America as top publisher". RedOrbit (Texas). Reuters. +Jump up ^ "William Shakespeare (English author)". Britannica Online encyclopedia. Retrieved 26 February 2006. +Jump up ^ MSN Encarta Encyclopedia article on Shakespeare. Archived from the original on 9 February 2006. Retrieved 26 February 2006. +Jump up ^ William Shakespeare. Columbia Electronic Encyclopedia. Retrieved 26 February 2006. +Jump up ^ "Mystery of Christie's success is solved". The Daily Telegraph (London). 19 December 2005. Retrieved 14 November 2010. +Jump up ^ "All-Time Essential Comics". IGN. Retrieved 15 August 2013. +Jump up ^ Johnston, Rich."Before Watchmen To Double Up For Hardcover Collections". Bleeding Cool. 10 December 2012. Retrieved 15 August 2013. +Jump up ^ "Edinburgh, UK appointed first UNESCO City of Literature". Unesco. 2004. Retrieved 28 April 2013.[dead link] +Jump up ^ "Early Welsh poetry". BBC Wales. Retrieved 29 December 2010. +Jump up ^ Lang, Andrew (2003) [1913]. History of English Literature from Beowulf to Swinburne. Holicong, PA: Wildside Press. p. 42. ISBN 978-0-8095-3229-2. +Jump up ^ "Dafydd ap Gwilym". Academi website. Academi. 2011. Retrieved 3 January 2011. Dafydd ap Gwilym is widely regarded as one of the greatest Welsh poets of all time, and amongst the leading European poets of the Middle Ages. +Jump up ^ True birthplace of Wales's literary hero. BBC News. Retrieved 28 April 2012 +Jump up ^ Kate Roberts: Biography at the Wayback Machine. BBC Wales. Retrieved 28 April 2012 +Jump up ^ Swift, Jonathan; Fox, Christopher (1995). Gulliver's travels: complete, authoritative text with biographical and historical contexts, critical history, and essays from five contemporary critical perspectives. Basingstoke: Macmillan. p. 10. ISBN 978-0-333-63438-7. +Jump up ^ "Bram Stoker." (PDF). The New York Times. 23 April 1912. Retrieved 1 January 2011. +^ Jump up to: a b "1960–1969". EMI Group. Retrieved 31 May 2008. +^ Jump up to: a b "Paul At Fifty". Time (New York). 8 June 1992. +^ Jump up to: a b Most Successful Group The Guinness Book of Records 1999, p. 230. Retrieved 19 March 2011. +Jump up ^ "British Citizen by Act of Parliament: George Frideric Handel". UK Parliament. 20 July 2009. Retrieved 11 September 2009.[dead link] +Jump up ^ Andrews, John (14 April 2006). "Handel all'inglese". Playbill (New York). Retrieved 11 September 2009. +Jump up ^ Citron, Stephen (2001). Sondheim and Lloyd-Webber: The new musical. London: Chatto & Windus. ISBN 978-1-85619-273-6. +Jump up ^ "Beatles a big hit with downloads". Belfast Telegraph. 25 November 2010. Retrieved 16 May 2011. +Jump up ^ "British rock legends get their own music title for PlayStation3 and PlayStation2" (Press release). EMI. 2 February 2009. +Jump up ^ Khan, Urmee (17 July 2008). "Sir Elton John honoured in Ben and Jerry ice cream". The Daily Telegraph (London). +Jump up ^ Alleyne, Richard (19 April 2008). "Rock group Led Zeppelin to reunite". The Daily Telegraph (London). Retrieved 31 March 2010. +Jump up ^ Fresco, Adam (11 July 2006). "Pink Floyd founder Syd Barrett dies at home". The Times (London). Retrieved 31 March 2010. (subscription required) +Jump up ^ Holton, Kate (17 January 2008). "Rolling Stones sign Universal album deal". Reuters. Retrieved 26 October 2008. +Jump up ^ Walker, Tim (12 May 2008). "Jive talkin': Why Robin Gibb wants more respect for the Bee Gees". The Independent (London). Retrieved 26 October 2008. +Jump up ^ "Brit awards winners list 2012: every winner since 1977". The Guardian (London). Retrieved 28 February 2012. +Jump up ^ Corner, Lewis (16 February 2012). "Adele, Coldplay biggest-selling UK artists worldwide in 2011". Digital Spy. Retrieved 22 March 2012. +Jump up ^ Hughes, Mark (14 January 2008). "A tale of two cities of culture: Liverpool vs Stavanger". The Independent (London). Retrieved 2 August 2009. +Jump up ^ "Glasgow gets city of music honour". BBC News. 20 August 2008. Retrieved 2 August 2009. +Jump up ^ Bayley, Stephen (24 April 2010). "The startling success of Tate Modern". The Times (London). Retrieved 19 January 2011. (subscription required) +Jump up ^ "Vertigo is named 'greatest film of all time'". BBC News. 2 August 2012. Retrieved 18 August 2012. +Jump up ^ "The Directors' Top Ten Directors". British Film Institute. Archived from the original on 27 May 2012. +Jump up ^ "Chaplin, Charles (1889–1977)". British Film Institute. Retrieved 25 January 2011. +Jump up ^ "Powell, Michael (1905–1990)". British Film Institute. Retrieved 25 January 2011. +Jump up ^ "Reed, Carol (1906–1976)". British Film Institute. Retrieved 25 January 2011. +Jump up ^ "Scott, Sir Ridley (1937–)". British Film Institute. Retrieved 25 January 2011. +Jump up ^ "Andrews, Julie (1935–)". British Film Institute. Retrieved 11 December 2010. +Jump up ^ "Burton, Richard (1925–1984)". British Film Institute. Retrieved 11 December 2010. +Jump up ^ "Caine, Michael (1933–)". British Film Institute. Retrieved 11 December 2010. +Jump up ^ "Chaplin, Charles (1889–1977)". British Film Institute. Retrieved 11 December 2010. +Jump up ^ "Connery, Sean (1930–)". British Film Institute. Retrieved 11 December 2010. +Jump up ^ "Leigh, Vivien (1913–1967)". British Film Institute. Retrieved 11 December 2010. +Jump up ^ "Niven, David (1910–1983)". British Film Institute. Retrieved 11 December 2010. +Jump up ^ "Olivier, Laurence (1907–1989)". British Film Institute. Retrieved 11 December 2010. +Jump up ^ "Sellers, Peter (1925–1980)". British Film Institute. Retrieved 11 December 2010. +Jump up ^ "Winslet, Kate (1975–)". British Film Institute. Retrieved 11 December 2010. +Jump up ^ "Daniel Day-Lewis makes Oscar history with third award"'. BBC News. Retrieved 15 August 2013 +Jump up ^ "Harry Potter becomes highest-grossing film franchise". The Guardian (London). 11 September 2007. Retrieved 2 November 2010. +Jump up ^ "History of Ealing Studios". Ealing Studios. Retrieved 5 June 2010. +^ Jump up to: a b "UK film – the vital statistics". UK Film Council. Retrieved 22 October 2010.[dead link] +Jump up ^ "The BFI 100". British Film Institute. 6 September 2006. Archived from the original on 1 April 2011. +Jump up ^ "Baftas fuel Oscars race". BBC News. 26 February 2001. Retrieved 14 February 2011. +^ Jump up to: a b "BBC: World's largest broadcaster & Most trusted media brand". Media Newsline. Archived from the original on 5 October 2010. Retrieved 23 September 2010. +^ Jump up to: a b "Digital licence". Prospect. Retrieved 23 September 2010. +^ Jump up to: a b "About the BBC – What is the BBC". BBC Online. Retrieved 23 September 2010. +Jump up ^ Newswire7 (13 August 2009). "BBC: World's largest broadcaster & Most trusted media brand". Media Newsline. Archived from the original on 17 June 2011. +Jump up ^ "TV Licence Fee: facts & figures". BBC Press Office. April 2010. Archived from the original on 17 June 2011. +Jump up ^ "Publications & Policies: The History of ITV". ITV.com. Archived from the original on 17 June 2011. +Jump up ^ "Publishing". News Corporation. Archived from the original on 17 June 2011. +Jump up ^ "Direct Broadcast Satellite Television". News Corporation. Archived from the original on 17 June 2011. +Jump up ^ William, D. (2010). UK Cities: A Look at Life and Major Cities in England, Scotland, Wales and Northern Ireland. Eastbourne: Gardners Books. ISBN 978-9987-16-021-1, pp. 22, 46, 109 and 145. +Jump up ^ "Publishing". Department of Culture, Media and Sport. Archived from the original on 17 June 2011. +Jump up ^ Ofcom "Communication Market Report 2010", 19 August 2010, pp. 97, 164 and 191 +Jump up ^ "Social Trends: Lifestyles and social participation". Office for National Statistics. 16 February 2010. Archived from the original on 17 June 2011. +Jump up ^ "Top 20 countries with the highest number of Internet users". Internet World Stats. Archived from the original on 17 June 2011. +Jump up ^ Fieser, James, ed. (2000). A bibliography of Scottish common sense philosophy: Sources and origins. Bristol: Thoemmes Press. Retrieved 17 December 2010. +Jump up ^ Palmer, Michael (1999). Moral Problems in Medicine: A Practical Coursebook. Cambridge: Lutterworth Press. p. 66. ISBN 978-0-7188-2978-0. +Jump up ^ Scarre, Geoffrey (1995). Utilitarianism. London: Routledge. p. 82. ISBN 978-0-415-12197-2. +Jump up ^ Gysin, Christian (9 March 2007). "Wembley kick-off: Stadium is ready and England play first game in fortnight". Daily Mail (London). Retrieved 19 March 2007. +Jump up ^ "Opening ceremony of the games of the XXX Olympiad". Olympic.org. Retrieved 30 November 2013 +Jump up ^ "Unparalleled Sporting History" . Reuters. Retrieved 30 November 2013 +Jump up ^ "Rugby Union 'Britain's Second Most Popular Sport'". Ipsos-Mori. 22 December 2003. Retrieved 28 April 2013. +Jump up ^ Ebner, Sarah (2 July 2013). "History and time are key to power of football, says Premier League chief". The Times (London). Retrieved 30 November 2013. +Jump up ^ Mitchell, Paul (November 2005). "The first international football match". BBC Sport Scotland. Retrieved 15 December 2013. +Jump up ^ "Why is there no GB Olympics football team?". BBC Sport. 5 August 2008. Retrieved 31 December 2010. +Jump up ^ "Blatter against British 2012 team". BBC News. 9 March 2008. Retrieved 2 April 2008. +Jump up ^ "About ECB". England and Wales Cricket Board. n.d. Retrieved 28 April 2013. +Jump up ^ McLaughlin, Martyn (4 August 2009). "Howzat happen? England fields a Gaelic-speaking Scotsman in Ashes". The Scotsman (Edinburgh). Retrieved 30 December 2010. +Jump up ^ "Uncapped Joyce wins Ashes call up". BBC Sport. 15 November 2006. Retrieved 30 December 2010. +Jump up ^ "Glamorgan". BBC South East Wales. August 2009. Retrieved 30 December 2010. +Jump up ^ Ardener, Shirley (2007). Professional identities: policy and practice in business and bureaucracy. New York: Berghahn. p. 27. ISBN 978-1-84545-054-0. +Jump up ^ "Official Website of Rugby League World Cup 2008". Archived from the original on 16 October 2007. +Jump up ^ Louw, Jaco; Nesbit, Derrick (2008). The Girlfriends Guide to Rugby. Johannesburg: South Publishers. ISBN 978-0-620-39541-0. +Jump up ^ "Triple Crown". RBS 6 Nations. Retrieved 6 March 2011. +Jump up ^ "Tracking the Field". Ipsos MORI. Archived from the original on 5 February 2009. Retrieved 17 October 2008. +Jump up ^ "Links plays into the record books". BBC News. 17 March 2009. +Jump up ^ Chowdhury, Saj (22 January 2007). "China in Ding's hands". BBC Sport. Retrieved 2 January 2011. +Jump up ^ "Lawn Tennis and Major T.Gem". The Birmingham Civic Society. Archived from the original on 18 August 2011. Retrieved 31 December 2010. +Jump up ^ Gould, Joe (10 April 2007). "The ancient Irish sport of hurling catches on in America". Columbia News Service (Columbia Journalism School). Retrieved 17 May 2011. +Jump up ^ "Shinty". Scottishsport.co.uk. Retrieved 28 April 2013. +Jump up ^ "Welsh dragon call for Union flag". BBC News. 27 November 2007. Retrieved 17 October 2008. +Jump up ^ "Britannia on British Coins". Chard. Retrieved 25 June 2006. +Jump up ^ Baker, Steve (2001). Picturing the Beast. University of Illinois Press. p. 52. ISBN 0-252-07030-5. +Further reading +Hitchens, Peter (2000). The Abolition of Britain: from Winston Churchill to Princess Diana. Second ed. San Francisco, Calif.: Encounter Books. xi, 332 p. ISBN 1-893554-18-X. +Lambert, Richard S. (1964). The Great Heritage: a History of Britain for Canadians. House of Grant, 1964 (and earlier editions and/or printings). +External links +Find more about +United Kingdom +at Wikipedia's sister projects +Search Wiktionary Definitions from Wiktionary +Search Commons Media from Commons +Search Wikinews News stories from Wikinews +Search Wikiquote Quotations from Wikiquote +Search Wikisource Source texts from Wikisource +Search Wikibooks Textbooks from Wikibooks +Search Wikivoyage Travel guide from Wikivoyage +Search Wikiversity Learning resources from Wikiversity +Government +Official website of HM Government +Official website of the British Monarchy +Official Yearbook of the United Kingdom statistics +The official site of the British Prime Minister's Office +General information +United Kingdom from the BBC News +United Kingdom entry at The World Factbook +United Kingdom from UCB Libraries GovPubs +United Kingdom at DMOZ +United Kingdom Encyclopædia Britannica entry +United Kingdom from the OECD +United Kingdom at the EU + Wikimedia Atlas of United Kingdom + Geographic data related to United Kingdom at OpenStreetMap +Key Development Forecasts for the United Kingdom from International Futures +Travel +Official tourist guide to Britain +[hide] v t e +United Kingdom topics +History +Chronology +Formation Georgian era Victorian era Edwardian era World War I Interwar World War II UK since 1945 (Postwar Britain) +By topic +Economic Empire Maritime Military +Geography +Administrative +Countries of the United Kingdom Crown dependencies Overseas territories City status Towns Former colonies +Physical +British Isles terminology Great Britain Geology Northern Ireland Lakes and lochs Mountains Rivers Volcanoes +Resources +Energy/Renewable energy Biodiesel Coal Geothermal Hydraulic frac. Hydroelectricity Marine North Sea oil Solar Wind Food Agriculture Fishing English Scottish Hunting Materials Flora Forestry Mining +Politics +Constitution Courts Elections Foreign relations Judiciary Law Law enforcement Legislation Monarchy monarchs Nationality Parliament House of Commons House of Lords Political parties +Government +Cabinet list Civil service Departments Prime Minister list +Military +Royal Navy Army Royal Air Force Weapons of mass destruction +Economy +Banks Bank of England Budget Economic geography Pound (currency) Stock Exchange Taxation Telecommunications Tourism Transport +Society +Affordability of housing Crime Demography Drug policy Education Ethnic groups Health care Immigration Languages Poverty Food banks Prostitution Public holidays Social care Social structure +Culture +Art Cinema Cuisine Identity Literature Media television Music Religion Sport Symbols Theatre +[show] +Countries of the United Kingdom +Outline Index +Book Category Portal WikiProject +[show] +Gnome-globe.svg Geographic locale +[show] v t e +Member states of the European Union +[show] +International organisations +[show] v t e +English-speaking world +[show] v t e +National personifications +Coordinates: 55°N 3°W +Categories: United KingdomBritish IslandsConstitutional monarchiesCountries in EuropeEnglish-speaking countries and territoriesG20 nationsG7 nationsG8 nationsIsland countriesLiberal democraciesMember states of NATOMember states of the Commonwealth of NationsMember states of the Council of EuropeMember states of the European UnionMember states of the Union for the MediterraneanMember states of the United NationsNorthern EuropeWestern Europe +Navigation menu +Create accountLog inArticleTalkReadView sourceView history + +Main page +Contents +Featured content +Current events +Random article +Donate to Wikipedia +Wikimedia Shop +Interaction +Help +About Wikipedia +Community portal +Recent changes +Contact page +Tools +What links here +Related changes +Upload file +Special pages +Permanent link +Page information +Wikidata item +Cite this page +Print/export +Create a book +Download as PDF +Printable version +Languages +ÐдыгÑÐ±Ð·Ñ +Afrikaans +Akan +Alemannisch +አማርኛ +Ænglisc +ÐÒ§Ñшәа +العربية +Aragonés +ÜܪܡÜÜ +Armãneashti +Arpetan +Asturianu +Avañe'ẽ +Ðвар +AzÉ™rbaycanca +বাংলা +Bahasa Banjar +Bân-lâm-gú +БашҡортÑа +БеларуÑÐºÐ°Ñ +БеларуÑÐºÐ°Ñ (тарашкевіца)‎ +भोजपà¥à¤°à¥€ +Bikol Central +Bislama +БългарÑки +Boarisch +བོད་ཡིག +Bosanski +Brezhoneg +БурÑад +Català +Чӑвашла +Cebuano +ÄŒeÅ¡tina +Chavacano de Zamboanga +ChiShona +Corsu +Cymraeg +Dansk +Deutsch +Þ‹Þ¨ÞˆÞ¬Þ€Þ¨Þ„Þ¦ÞÞ° +Diné bizaad +Dolnoserbski +ཇོང་འ+Eesti +Ελληνικά +Emiliàn e rumagnòl +Español +Esperanto +Estremeñu +Euskara +Ùارسی +Fiji Hindi +Føroyskt +Français +Frysk +Furlan +Gaeilge +Gaelg +Gagauz +Gàidhlig +Galego +贛語 +ગà«àªœàª°àª¾àª¤à«€ +客家語/Hak-kâ-ngî +Хальмг +한국어 +Hausa +Hawaii +Õ€Õ¡ÕµÕ¥Ö€Õ¥Õ¶ +हिनà¥à¤¦à¥€ +Hornjoserbsce +Hrvatski +Ido +Igbo +Ilokano +বিষà§à¦£à§à¦ªà§à¦°à¦¿à¦¯à¦¼à¦¾ মণিপà§à¦°à§€ +Bahasa Indonesia +Interlingua +Interlingue +Ирон +IsiZulu +Ãslenska +Italiano +עברית +Basa Jawa +Kalaallisut +ಕನà³à²¨à²¡ +Kapampangan +Къарачай-малкъар +ქáƒáƒ áƒ—ული +Kaszëbsczi +Қазақша +Kernowek +Kinyarwanda +Kiswahili +Коми +Kongo +Kreyòl ayisyen +Kurdî +Кыргызча +Кырык мары +Ladino +Лезги +ລາວ +Latgaļu +Latina +LatvieÅ¡u +Lëtzebuergesch +Lietuvių +Ligure +Limburgs +Lingála +Lojban +Lumbaart +Magyar +МакедонÑки +Malagasy +മലയാളം +Malti +MÄori +मराठी +მáƒáƒ áƒ’áƒáƒšáƒ£áƒ áƒ˜ +مصرى +مازÙرونی +Bahasa Melayu +Mìng-dĕ̤ng-ngṳ̄ +Mirandés +Монгол +မြန်မာဘာသာ +NÄhuatl +Dorerin Naoero +Nederlands +Nedersaksies +नेपाली +नेपाल भाषा +日本語 +Napulitano +Ðохчийн +Nordfriisk +Norfuk / Pitkern +Norsk bokmÃ¥l +Norsk nynorsk +Nouormand +Novial +Occitan +Олык марий +ଓଡ଼ିଆ +Oromoo +OÊ»zbekcha +ਪੰਜਾਬੀ +Pangasinan +پنجابی +Papiamentu +پښتو +Перем Коми +ភាសាážáŸ’មែរ +Picard +Piemontèis +Tok Pisin +Plattdüütsch +Polski +Ποντιακά +Português +Qırımtatarca +Reo tahiti +Ripoarisch +Română +Romani +Rumantsch +Runa Simi +РуÑиньÑкый +РуÑÑкий +Саха тыла +Sámegiella +संसà¥à¤•à¥ƒà¤¤à¤®à¥ +Sardu +Scots +Seeltersk +Shqip +Sicilianu +සිංහල +Simple English +SiSwati +SlovenÄina +SlovenÅ¡Äina +СловѣньÑкъ / ⰔⰎⰑⰂⰡâ°â° â°”â°â°Ÿ +Åšlůnski +Soomaaliga +کوردی +Sranantongo +СрпÑки / srpski +Srpskohrvatski / ÑрпÑкохрватÑки +Basa Sunda +Suomi +Svenska +Tagalog +தமிழ௠+Taqbaylit +Tarandíne +Татарча/tatarça +తెలà±à°—à± +Tetun +ไทย +Тоҷикӣ +á£áŽ³áŽ© +Tsetsêhestâhese +Türkçe +Twi +Удмурт +ᨅᨔ ᨕᨘá¨á¨— +УкраїнÑька +اردو +ئۇيغۇرچە / Uyghurche +Vahcuengh +Vèneto +Vepsän kel’ +Tiếng Việt +Volapük +Võro +Walon +文言 +West-Vlams +Winaray +Wolof +å´è¯­ +ייִדיש +Yorùbá +粵語 +Zazaki +Zeêuws +ŽemaitÄ—Å¡ka +中文 +Edit links +This page was last modified on 22 November 2014 at 11:19. +Text is available under the Creative Commons Attribution-ShareAlike License; additional terms may apply. By using this site, you agree to the Terms of Use and Privacy Policy. Wikipedia® is a registered trademark of the Wikimedia Foundation, Inc., a non-profit organization. +Privacy policyAbout WikipediaDisclaimersContact WikipediaDevelopersMobile viewWikimedia Foundation Powered by MediaWiki + + +World Trade Organization +From Wikipedia, the free encyclopedia +"WTO" redirects here. For other uses, see WTO (disambiguation). +World Trade Organization (English) +Organisation mondiale du commerce (French) +Organización Mundial del Comercio (Spanish) +World Trade Organization (logo and wordmark).svg +Official logo of WTO +WTO members and observers.svg + Members + Members, dually represented by the EU + Observers + Non-members +Abbreviation WTO +Formation 1 January 1995; 19 years ago +Type International trade organization +Purpose Liberalize international trade +Headquarters Centre William Rappard, Geneva, Switzerland +Coordinates 46.12°N 6.09°ECoordinates: 46.12°N 6.09°E +Region served Worldwide +Membership 160 member states[1] +Official language English, French, Spanish[2] +Director-General Roberto Azevêdo +Budget 196 million Swiss francs (approx. 209 million US$) in 2011.[3] +Staff 640[4] +Website www.wto.org +The World Trade Organization (WTO) is an organization that intends to supervise and liberalize international trade. The organization officially commenced on 1 January 1995 under the Marrakech Agreement, replacing the General Agreement on Tariffs and Trade (GATT), which commenced in 1948.[5] The organization deals with regulation of trade between participating countries by providing a framework for negotiating and formalizing trade agreements and a dispute resolution process aimed at enforcing participants' adherence to WTO agreements, which are signed by representatives of member governments[6]:fol.9–10 and ratified by their parliaments.[7] Most of the issues that the WTO focuses on derive from previous trade negotiations, especially from the Uruguay Round (1986–1994). +The organization is attempting to complete negotiations on the Doha Development Round, which was launched in 2001 with an explicit focus on addressing the needs of developing countries. As of June 2012, the future of the Doha Round remained uncertain: the work programme lists 21 subjects in which the original deadline of 1 January 2005 was missed, and the round is still incomplete.[8] The conflict between free trade on industrial goods and services but retention of protectionism on farm subsidies to domestic agricultural sector (requested by developed countries) and the substantiation of the international liberalization of fair trade on agricultural products (requested by developing countries) remain the major obstacles. These points of contention have hindered any progress to launch new WTO negotiations beyond the Doha Development Round. As a result of this impasse, there has been an increasing number of bilateral free trade agreements signed.[9] As of July 2012, there were various negotiation groups in the WTO system for the current agricultural trade negotiation which is in the condition of stalemate.[10] +WTO's current Director-General is Roberto Azevêdo,[11][12] who leads a staff of over 600 people in Geneva, Switzerland.[13] A trade facilitation agreement known as the Bali Package was reached by all members on 7 December 2013, the first comprehensive agreement in the organization's history.[14][15] +Contents [hide] +1 History +1.1 GATT rounds of negotiations +1.1.1 From Geneva to Tokyo +1.1.2 Uruguay Round +1.2 Ministerial conferences +1.3 Doha Round (Doha Agenda) +2 Functions +3 Principles of the trading system +4 Organizational structure +5 Decision-making +6 Dispute settlement +7 Accession and membership +7.1 Accession process +7.2 Members and observers +8 Agreements +9 Office of director-general +9.1 List of directors-general +10 See also +11 Notes and references +12 External links +History + +The economists Harry White (left) and John Maynard Keynes at the Bretton Woods Conference. Both had been strong advocates of a central-controlled international trade environment and recommended the establishment of three institutions: the IMF (for fiscal and monetary issues); the World Bank (for financial and structural issues); and the ITO (for international economic cooperation).[16] +The WTO's predecessor, the General Agreement on Tariffs and Trade (GATT), was established after World War II in the wake of other new multilateral institutions dedicated to international economic cooperation – notably the Bretton Woods institutions known as the World Bank and the International Monetary Fund. A comparable international institution for trade, named the International Trade Organization was successfully negotiated. The ITO was to be a United Nations specialized agency and would address not only trade barriers but other issues indirectly related to trade, including employment, investment, restrictive business practices, and commodity agreements. But the ITO treaty was not approved by the U.S. and a few other signatories and never went into effect.[17][18][19] +In the absence of an international organization for trade, the GATT would over the years "transform itself" into a de facto international organization.[20] +GATT rounds of negotiations +See also: General Agreement on Tariffs and Trade +The GATT was the only multilateral instrument governing international trade from 1946 until the WTO was established on 1 January 1995.[21] Despite attempts in the mid-1950s and 1960s to create some form of institutional mechanism for international trade, the GATT continued to operate for almost half a century as a semi-institutionalized multilateral treaty regime on a provisional basis.[22] +From Geneva to Tokyo +Seven rounds of negotiations occurred under GATT. The first real GATT trade rounds concentrated on further reducing tariffs. Then, the Kennedy Round in the mid-sixties brought about a GATT anti-dumping Agreement and a section on development. The Tokyo Round during the seventies was the first major attempt to tackle trade barriers that do not take the form of tariffs, and to improve the system, adopting a series of agreements on non-tariff barriers, which in some cases interpreted existing GATT rules, and in others broke entirely new ground. Because these plurilateral agreements were not accepted by the full GATT membership, they were often informally called "codes". Several of these codes were amended in the Uruguay Round, and turned into multilateral commitments accepted by all WTO members. Only four remained plurilateral (those on government procurement, bovine meat, civil aircraft and dairy products), but in 1997 WTO members agreed to terminate the bovine meat and dairy agreements, leaving only two.[21] +Uruguay Round +Main article: Uruguay Round + +During the Doha Round, the US government blamed Brazil and India for being inflexible and the EU for impeding agricultural imports.[23] The then-President of Brazil, Luiz Inácio Lula da Silva (above right), responded to the criticisms by arguing that progress would only be achieved if the richest countries (especially the US and countries in the EU) made deeper cuts in agricultural subsidies and further opened their markets for agricultural goods.[24] +Well before GATT's 40th anniversary, its members concluded that the GATT system was straining to adapt to a new globalizing world economy.[25][26] In response to the problems identified in the 1982 Ministerial Declaration (structural deficiencies, spill-over impacts of certain countries' policies on world trade GATT could not manage etc.), the eighth GATT round – known as the Uruguay Round – was launched in September 1986, in Punta del Este, Uruguay.[25] +It was the biggest negotiating mandate on trade ever agreed: the talks were going to extend the trading system into several new areas, notably trade in services and intellectual property, and to reform trade in the sensitive sectors of agriculture and textiles; all the original GATT articles were up for review.[26] The Final Act concluding the Uruguay Round and officially establishing the WTO regime was signed 15 April 1994, during the ministerial meeting at Marrakesh, Morocco, and hence is known as the Marrakesh Agreement.[27] +The GATT still exists as the WTO's umbrella treaty for trade in goods, updated as a result of the Uruguay Round negotiations (a distinction is made between GATT 1994, the updated parts of GATT, and GATT 1947, the original agreement which is still the heart of GATT 1994).[25] GATT 1994 is not however the only legally binding agreement included via the Final Act at Marrakesh; a long list of about 60 agreements, annexes, decisions and understandings was adopted. The agreements fall into a structure with six main parts: +The Agreement Establishing the WTO +Goods and investment – the Multilateral Agreements on Trade in Goods including the GATT 1994 and the Trade Related Investment Measures (TRIMS) +Services — the General Agreement on Trade in Services +Intellectual property – the Agreement on Trade-Related Aspects of Intellectual Property Rights (TRIPS) +Dispute settlement (DSU) +Reviews of governments' trade policies (TPRM)[28] +In terms of the WTO's principle relating to tariff "ceiling-binding" (No. 3), the Uruguay Round has been successful in increasing binding commitments by both developed and developing countries, as may be seen in the percentages of tariffs bound before and after the 1986–1994 talks.[29] +Ministerial conferences + +The World Trade Organization Ministerial Conference of 1998, in the Palace of Nations (Geneva, Switzerland). +The highest decision-making body of the WTO is the Ministerial Conference, which usually meets every two years. It brings together all members of the WTO, all of which are countries or customs unions. The Ministerial Conference can take decisions on all matters under any of the multilateral trade agreements. The inaugural ministerial conference was held in Singapore in 1996. Disagreements between largely developed and developing economies emerged during this conference over four issues initiated by this conference, which led to them being collectively referred to as the "Singapore issues". The second ministerial conference was held in Geneva in Switzerland. The third conference in Seattle, Washington ended in failure, with massive demonstrations and police and National Guard crowd-control efforts drawing worldwide attention. The fourth ministerial conference was held in Doha in the Persian Gulf nation of Qatar. The Doha Development Round was launched at the conference. The conference also approved the joining of China, which became the 143rd member to join. The fifth ministerial conference was held in Cancún, Mexico, aiming at forging agreement on the Doha round. An alliance of 22 southern states, the G20 developing nations (led by India, China,[30] Brazil, ASEAN led by the Philippines), resisted demands from the North for agreements on the so-called "Singapore issues" and called for an end to agricultural subsidies within the EU and the US. The talks broke down without progress. +The sixth WTO ministerial conference was held in Hong Kong from 13–18 December 2005. It was considered vital if the four-year-old Doha Development Round negotiations were to move forward sufficiently to conclude the round in 2006. In this meeting, countries agreed to phase out all their agricultural export subsidies by the end of 2013, and terminate any cotton export subsidies by the end of 2006. Further concessions to developing countries included an agreement to introduce duty-free, tariff-free access for goods from the Least Developed Countries, following the Everything but Arms initiative of the European Union — but with up to 3% of tariff lines exempted. Other major issues were left for further negotiation to be completed by the end of 2010. The WTO General Council, on 26 May 2009, agreed to hold a seventh WTO ministerial conference session in Geneva from 30 November-3 December 2009. A statement by chairman Amb. Mario Matus acknowledged that the prime purpose was to remedy a breach of protocol requiring two-yearly "regular" meetings, which had lapsed with the Doha Round failure in 2005, and that the "scaled-down" meeting would not be a negotiating session, but "emphasis will be on transparency and open discussion rather than on small group processes and informal negotiating structures". The general theme for discussion was "The WTO, the Multilateral Trading System and the Current Global Economic Environment"[31] +Doha Round (Doha Agenda) +Main article: Doha Development Round + +The Doha Development Round started in 2001 is at an impasse. +The WTO launched the current round of negotiations, the Doha Development Round, at the fourth ministerial conference in Doha, Qatar in November 2001. This was to be an ambitious effort to make globalization more inclusive and help the world's poor, particularly by slashing barriers and subsidies in farming.[32] The initial agenda comprised both further trade liberalization and new rule-making, underpinned by commitments to strengthen substantial assistance to developing countries.[33] +The negotiations have been highly contentious. Disagreements still continue over several key areas including agriculture subsidies, which emerged as critical in July 2006.[34] According to a European Union statement, "The 2008 Ministerial meeting broke down over a disagreement between exporters of agricultural bulk commodities and countries with large numbers of subsistence farmers on the precise terms of a 'special safeguard measure' to protect farmers from surges in imports."[35] The position of the European Commission is that "The successful conclusion of the Doha negotiations would confirm the central role of multilateral liberalisation and rule-making. It would confirm the WTO as a powerful shield against protectionist backsliding."[33] An impasse remains and, as of August 2013, agreement has not been reached, despite intense negotiations at several ministerial conferences and at other sessions. On 27 March 2013, the chairman of agriculture talks announced "a proposal to loosen price support disciplines for developing countries’ public stocks and domestic food aid." He added: “...we are not yet close to agreement—in fact, the substantive discussion of the proposal is only beginning.â€[36] +[show]v · t · eGATT and WTO trade rounds[37] +Functions +Among the various functions of the WTO, these are regarded by analysts as the most important: +It oversees the implementation, administration and operation of the covered agreements.[38][39] +It provides a forum for negotiations and for settling disputes.[40][41] +Additionally, it is the WTO's duty to review and propagate the national trade policies, and to ensure the coherence and transparency of trade policies through surveillance in global economic policy-making.[39][41] Another priority of the WTO is the assistance of developing, least-developed and low-income countries in transition to adjust to WTO rules and disciplines through technical cooperation and training.[42] +(i) The WTO shall facilitate the implementation, administration and operation and further the objec­tives of this Agreement and of the Multilateral Trade Agreements, and shall also provide the frame work for the implementation, administration and operation of the multilateral Trade Agreements. +(ii) The WTO shall provide the forum for negotiations among its members concerning their multilateral trade relations in matters dealt with under the Agreement in the Annexes to this Agreement. +(iii) The WTO shall administer the Understanding on Rules and Procedures Governing the Settlement of Disputes. +(iv) The WTO shall administer Trade Policy Review Mechanism. +(v) With a view to achieving greater coherence in global economic policy making, the WTO shall cooperate, as appropriate, with the international Monetary Fund (IMF) and with the International Bank for Reconstruction and Development (IBRD) and its affiliated agencies. [43] +The above five listings are the additional functions of the World Trade Organization. As globalization proceeds in today's society, the necessity of an International Organization to manage the trading systems has been of vital importance. As the trade volume increases, issues such as protectionism, trade barriers, subsidies, violation of intellectual property arise due to the differences in the trading rules of every nation. The World Trade Organization serves as the mediator between the nations when such problems arise. WTO could be referred to as the product of globalization and also as one of the most important organizations in today's globalized society. +The WTO is also a center of economic research and analysis: regular assessments of the global trade picture in its annual publications and research reports on specific topics are produced by the organization.[44] Finally, the WTO cooperates closely with the two other components of the Bretton Woods system, the IMF and the World Bank.[40] +Principles of the trading system +The WTO establishes a framework for trade policies; it does not define or specify outcomes. That is, it is concerned with setting the rules of the trade policy games.[45] Five principles are of particular importance in understanding both the pre-1994 GATT and the WTO: +Non-discrimination. It has two major components: the most favoured nation (MFN) rule, and the national treatment policy. Both are embedded in the main WTO rules on goods, services, and intellectual property, but their precise scope and nature differ across these areas. The MFN rule requires that a WTO member must apply the same conditions on all trade with other WTO members, i.e. a WTO member has to grant the most favorable conditions under which it allows trade in a certain product type to all other WTO members.[45] "Grant someone a special favour and you have to do the same for all other WTO members."[29] National treatment means that imported goods should be treated no less favorably than domestically produced goods (at least after the foreign goods have entered the market) and was introduced to tackle non-tariff barriers to trade (e.g. technical standards, security standards et al. discriminating against imported goods).[45] +Reciprocity. It reflects both a desire to limit the scope of free-riding that may arise because of the MFN rule, and a desire to obtain better access to foreign markets. A related point is that for a nation to negotiate, it is necessary that the gain from doing so be greater than the gain available from unilateral liberalization; reciprocal concessions intend to ensure that such gains will materialise.[46] +Binding and enforceable commitments. The tariff commitments made by WTO members in a multilateral trade negotiation and on accession are enumerated in a schedule (list) of concessions. These schedules establish "ceiling bindings": a country can change its bindings, but only after negotiating with its trading partners, which could mean compensating them for loss of trade. If satisfaction is not obtained, the complaining country may invoke the WTO dispute settlement procedures.[29][46] +Transparency. The WTO members are required to publish their trade regulations, to maintain institutions allowing for the review of administrative decisions affecting trade, to respond to requests for information by other members, and to notify changes in trade policies to the WTO. These internal transparency requirements are supplemented and facilitated by periodic country-specific reports (trade policy reviews) through the Trade Policy Review Mechanism (TPRM).[47] The WTO system tries also to improve predictability and stability, discouraging the use of quotas and other measures used to set limits on quantities of imports.[29] +Safety valves. In specific circumstances, governments are able to restrict trade. The WTO's agreements permit members to take measures to protect not only the environment but also public health, animal health and plant health.[48] +There are three types of provision in this direction: +articles allowing for the use of trade measures to attain non-economic objectives; +articles aimed at ensuring "fair competition"; members must not use environmental protection measures as a means of disguising protectionist policies.[48] +provisions permitting intervention in trade for economic reasons.[47] +Exceptions to the MFN principle also allow for preferential treatment of developing countries, regional free trade areas and customs unions.[6]:fol.93 +Organizational structure +The General Council has the following subsidiary bodies which oversee committees in different areas: +Council for Trade in Goods +There are 11 committees under the jurisdiction of the Goods Council each with a specific task. All members of the WTO participate in the committees. The Textiles Monitoring Body is separate from the other committees but still under the jurisdiction of Goods Council. The body has its own chairman and only 10 members. The body also has several groups relating to textiles.[49] +Council for Trade-Related Aspects of Intellectual Property Rights +Information on intellectual property in the WTO, news and official records of the activities of the TRIPS Council, and details of the WTO's work with other international organizations in the field.[50] +Council for Trade in Services +The Council for Trade in Services operates under the guidance of the General Council and is responsible for overseeing the functioning of the General Agreement on Trade in Services (GATS). It is open to all WTO members, and can create subsidiary bodies as required.[51] +Trade Negotiations Committee +The Trade Negotiations Committee (TNC) is the committee that deals with the current trade talks round. The chair is WTO's director-general. As of June 2012 the committee was tasked with the Doha Development Round.[52] +The Service Council has three subsidiary bodies: financial services, domestic regulations, GATS rules and specific commitments.[49] The council has several different committees, working groups, and working parties.[53] There are committees on the following: Trade and Environment; Trade and Development (Subcommittee on Least-Developed Countries); Regional Trade Agreements; Balance of Payments Restrictions; and Budget, Finance and Administration. There are working parties on the following: Accession. There are working groups on the following: Trade, debt and finance; and Trade and technology transfer. +Decision-making +The WTO describes itself as "a rules-based, member-driven organization — all decisions are made by the member governments, and the rules are the outcome of negotiations among members".[54] The WTO Agreement foresees votes where consensus cannot be reached, but the practice of consensus dominates the process of decision-making.[55] +Richard Harold Steinberg (2002) argues that although the WTO's consensus governance model provides law-based initial bargaining, trading rounds close through power-based bargaining favouring Europe and the U.S., and may not lead to Pareto improvement.[56] +Dispute settlement +Main article: Dispute settlement in the WTO +In 1994, the WTO members agreed on the Understanding on Rules and Procedures Governing the Settlement of Disputes (DSU) annexed to the "Final Act" signed in Marrakesh in 1994.[57] Dispute settlement is regarded by the WTO as the central pillar of the multilateral trading system, and as a "unique contribution to the stability of the global economy".[58] WTO members have agreed that, if they believe fellow-members are violating trade rules, they will use the multilateral system of settling disputes instead of taking action unilaterally.[59] +The operation of the WTO dispute settlement process involves the DSB panels, the Appellate Body, the WTO Secretariat, arbitrators, independent experts and several specialized institutions.[60] Bodies involved in the dispute settlement process, World Trade Organization. +Accession and membership +Main article: World Trade Organization accession and membership +The process of becoming a WTO member is unique to each applicant country, and the terms of accession are dependent upon the country's stage of economic development and current trade regime.[61] The process takes about five years, on average, but it can last more if the country is less than fully committed to the process or if political issues interfere. The shortest accession negotiation was that of the Kyrgyz Republic, while the longest was that of Russia, which, having first applied to join GATT in 1993, was approved for membership in December 2011 and became a WTO member on 22 August 2012.[62] The second longest was that of Vanuatu, whose Working Party on the Accession of Vanuatu was established on 11 July 1995. After a final meeting of the Working Party in October 2001, Vanuatu requested more time to consider its accession terms. In 2008, it indicated its interest to resume and conclude its WTO accession. The Working Party on the Accession of Vanuatu was reconvened informally on 4 April 2011 to discuss Vanuatu's future WTO membership. The re-convened Working Party completed its mandate on 2 May 2011. The General Council formally approved the Accession Package of Vanuatu on 26 October 2011. On 24 August 2012, the WTO welcomed Vanuatu as its 157th member.[63] An offer of accession is only given once consensus is reached among interested parties.[64] +Accession process + +WTO accession progress: + Members (including dual-representation with the European Union) + Draft Working Party Report or Factual Summary adopted + Goods and/or Services offers submitted + Memorandum on Foreign Trade Regime (FTR) submitted + Observer, negotiations to start later or no Memorandum on FTR submitted + Frozen procedures or no negotiations in the last 3 years + No official interaction with the WTO +A country wishing to accede to the WTO submits an application to the General Council, and has to describe all aspects of its trade and economic policies that have a bearing on WTO agreements.[65] The application is submitted to the WTO in a memorandum which is examined by a working party open to all interested WTO Members.[66] +After all necessary background information has been acquired, the working party focuses on issues of discrepancy between the WTO rules and the applicant's international and domestic trade policies and laws. The working party determines the terms and conditions of entry into the WTO for the applicant nation, and may consider transitional periods to allow countries some leeway in complying with the WTO rules.[61] +The final phase of accession involves bilateral negotiations between the applicant nation and other working party members regarding the concessions and commitments on tariff levels and market access for goods and services. The new member's commitments are to apply equally to all WTO members under normal non-discrimination rules, even though they are negotiated bilaterally.[65] +When the bilateral talks conclude, the working party sends to the general council or ministerial conference an accession package, which includes a summary of all the working party meetings, the Protocol of Accession (a draft membership treaty), and lists ("schedules") of the member-to-be's commitments. Once the general council or ministerial conference approves of the terms of accession, the applicant's parliament must ratify the Protocol of Accession before it can become a member.[67] Some countries may have faced tougher and a much longer accession process due to challenges during negotiations with other WTO members, such as Vietnam, whose negotiations took more than 11 years before it became official member in January 2007.[68] +Members and observers +The WTO has 160 members and 24 observer governments.[69] In addition to states, the European Union is a member. WTO members do not have to be full sovereign nation-members. Instead, they must be a customs territory with full autonomy in the conduct of their external commercial relations. Thus Hong Kong has been a member since 1995 (as "Hong Kong, China" since 1997) predating the People's Republic of China, which joined in 2001 after 15 years of negotiations. The Republic of China (Taiwan) acceded to the WTO in 2002 as "Separate Customs Territory of Taiwan, Penghu, Kinmen and Matsu" (Chinese Taipei) despite its disputed status.[70] The WTO Secretariat omits the official titles (such as Counselor, First Secretary, Second Secretary and Third Secretary) of the members of Chinese Taipei's Permanent Mission to the WTO, except for the titles of the Permanent Representative and the Deputy Permanent Representative.[71] +As of 2007, WTO member states represented 96.4% of global trade and 96.7% of global GDP.[72] Iran, followed by Algeria, are the economies with the largest GDP and trade outside the WTO, using 2005 data.[73][74] With the exception of the Holy See, observers must start accession negotiations within five years of becoming observers. A number of international intergovernmental organizations have also been granted observer status to WTO bodies.[75] 14 UN member states have no official affiliation with the WTO. +Agreements +Further information: Uruguay Round +The WTO oversees about 60 different agreements which have the status of international legal texts. Member countries must sign and ratify all WTO agreements on accession.[76] A discussion of some of the most important agreements follows. The Agreement on Agriculture came into effect with the establishment of the WTO at the beginning of 1995. The AoA has three central concepts, or "pillars": domestic support, market access and export subsidies. The General Agreement on Trade in Services was created to extend the multilateral trading system to service sector, in the same way as the General Agreement on Tariffs and Trade (GATT) provided such a system for merchandise trade. The agreement entered into force in January 1995. The Agreement on Trade-Related Aspects of Intellectual Property Rights sets down minimum standards for many forms of intellectual property (IP) regulation. It was negotiated at the end of the Uruguay Round of the General Agreement on Tariffs and Trade (GATT) in 1994.[77] +The Agreement on the Application of Sanitary and Phytosanitary Measures—also known as the SPS Agreement—was negotiated during the Uruguay Round of GATT, and entered into force with the establishment of the WTO at the beginning of 1995. Under the SPS agreement, the WTO sets constraints on members' policies relating to food safety (bacterial contaminants, pesticides, inspection and labelling) as well as animal and plant health (imported pests and diseases). The Agreement on Technical Barriers to Trade is an international treaty of the World Trade Organization. It was negotiated during the Uruguay Round of the General Agreement on Tariffs and Trade, and entered into force with the establishment of the WTO at the end of 1994. The object ensures that technical negotiations and standards, as well as testing and certification procedures, do not create unnecessary obstacles to trade".[78] The Agreement on Customs Valuation, formally known as the Agreement on Implementation of Article VII of GATT, prescribes methods of customs valuation that Members are to follow. Chiefly, it adopts the "transaction value" approach. +In December 2013, the biggest agreement within the WTO was signed and known as the Bali Package.[79] +Office of director-general + +The headquarters of the World Trade Organization, in Geneva, Switzerland. +The procedures for the appointment of the WTO director-general were published in January 2003.[80] Additionally, there are four deputy directors-general. As of 1 October 2013, under director-general Roberto Azevêdo, the four deputy directors-general are Yi Xiaozhun of China, Karl-Ernst Brauner of Germany, Yonov Frederick Agah of Nigeria and David Shark of the United States.[81] +List of directors-general +Source: Official website[82] +Brazil Roberto Azevedo, 2013– +France Pascal Lamy, 2005–2013 +Thailand Supachai Panitchpakdi, 2002–2005 +New Zealand Mike Moore, 1999–2002 +Italy Renato Ruggiero, 1995–1999 +Republic of Ireland Peter Sutherland, 1995 +(Heads of the precursor organization, GATT): +Republic of Ireland Peter Sutherland, 1993–1995 +Switzerland Arthur Dunkel, 1980–1993 +Switzerland Olivier Long, 1968–1980 +United Kingdom Eric Wyndham White, 1948–1968 +See also +Agreement on Trade Related Investment Measures (TRIMS) +Agreement on Trade-Related Aspects of Intellectual Property Rights (TRIPS) +Aide-mémoire non-paper +Anti-globalization movement +Criticism of the World Trade Organization +Foreign Affiliate Trade Statistics +Global administrative law +Globality +Information Technology Agreement +International Trade Centre +Labour Standards in the World Trade Organisation +List of member states of the World Trade Organization +North American Free Trade Agreement (NAFTA) +Subsidy +Swiss Formula +Trade bloc +Washington Consensus +World Trade Report +World Trade Organization Ministerial Conference of 1999 protest activity +China and the World Trade Organization +Notes and references +Jump up ^ Members and Observers at WTO official website +Jump up ^ Languages, Documentation and Information Management Division at WTO official site +Jump up ^ "WTO Secretariat budget for 2011". WTO official site. Retrieved 25 August 2008. +Jump up ^ Understanding the WTO: What We Stand For_ Fact File +Jump up ^ World Trade Organization - UNDERSTANDING THE WTO: BASICS +^ Jump up to: a b Understanding the WTO Handbook at WTO official website. (Note that the document's printed folio numbers do not match the pdf page numbers.) +Jump up ^ Malanczuk, P. (1999). "International Organisations and Space Law: World Trade Organization". Encyclopaedia Britannica 442. p. 305. Bibcode:1999ESASP.442..305M. +Jump up ^ Understanding the WTO: The Doha Agenda +Jump up ^ The Challenges to the World Trade Organization: It’s All About Legitimacy THE BROOKINGS INSTITUTION, Policy Paper 2011-04 +Jump up ^ GROUPS IN THE WTO Updated 1 July 2013 +Jump up ^ Bourcier, Nicolas (21 May 2013). "Roberto Azevedo's WTO appointment gives Brazil a seat at the top table". Guardian Weekly. Retrieved 2 September 2013. +Jump up ^ "Roberto Azevêdo takes over". WTO official website. 1 September 2013. Retrieved 2 September 2013. +Jump up ^ "Overview of the WTO Secretariat". WTO official website. Retrieved 2 September 2013. +Jump up ^ Ninth WTO Ministerial Conference | WTO - MC9 +Jump up ^ BBC News - WTO agrees global trade deal worth $1tn +Jump up ^ A.E. Eckes Jr., US Trade History, 73 +* A. Smithies, Reflections on the Work of Keynes, 578–601 +* N. Warren, Internet and Globalization, 193 +Jump up ^ P. van den Bossche, The Law and Policy of the World Trade Organization, 80 +Jump up ^ Palmeter-Mavroidis, Dispute Settlement, 2 +Jump up ^ Fergusson, Ian F. (9 May 2007). "The World Trade Organization: Background and Issues" (PDF). Congressional Research Service. p. 4. Retrieved 15 August 2008. +Jump up ^ It was contemplated that the GATT would be applied for several years until the ITO came into force. However, since the ITO was never brought into being, the GATT gradually became the focus for international governmental cooperation on trade matters with economist Nicholas Halford overseeing the implementation of GATT in members policies. (P. van den Bossche, The Law and Policy of the World Trade Organization, 81; J.H. Jackson, Managing the Trading System, 134). +^ Jump up to: a b The GATT Years: from Havana to Marrakesh, WTO official site +Jump up ^ Footer, M. E. Analysis of the World Trade Organization, 17 +Jump up ^ B.S. Klapper, With a "Short Window" +Jump up ^ Lula, Time to Get Serious about Agricultural Subsidies +^ Jump up to: a b c P. Gallagher, The First Ten Years of the WTO, 4 +^ Jump up to: a b The Uruguay Round, WTO official site +Jump up ^ "Legal texts – Marrakesh agreement". WTO. Retrieved 30 May 2010. +Jump up ^ Overview: a Navigational Guide, WTO official site. For the complete list of "The Uruguay Round Agreements", see WTO legal texts, WTO official site, and Uruguay Round Agreements, Understandings, Decisions and Declarations, WorldTradeLaw.net +^ Jump up to: a b c d Principles of the Trading System, WTO official site +Jump up ^ "Five Years of China WTO Membership. EU and US Perspectives about China's Compliance with Transparency Commitments and the Transitional Review Mechanism". Papers.ssrn.com. Retrieved 30 May 2010. +Jump up ^ WTO to hold 7th Ministerial Conference on 30 November-2 December 2009 WTO official website +Jump up ^ "In the twilight of Doha". The Economist (The Economist): 65. 27 July 2006. +^ Jump up to: a b European Commission The Doha Round +Jump up ^ Fergusson, Ian F. (18 January 2008). "World Trade Organization Negotiations: The Doha Development Agenda" (PDF). Congressional Research Service. Retrieved 13 April 2012. Page 9 (folio CRS-6) +Jump up ^ WTO trade negotiations: Doha Development Agenda Europa press release, 31 October 2011 +Jump up ^ "Members start negotiating proposal on poor countries’ food stockholding". WTO official website. 27 March 2013. Retrieved 2 September 2013. +Jump up ^ a)The GATT years: from Havana to Marrakesh, World Trade Organization +b)Timeline: World Trade Organization – A chronology of key events, BBC News +c)Brakman-Garretsen-Marrewijk-Witteloostuijn, Nations and Firms in the Global Economy, Chapter 10: Trade and Capital Restriction +Jump up ^ Functions of the WTO, IISD +^ Jump up to: a b Main Functions, WTO official site +^ Jump up to: a b A Bredimas, International Economic Law, II, 17 +^ Jump up to: a b C. Deere, Decision-making in the WTO: Medieval or Up-to-Date? +Jump up ^ WTO Assistance for Developing Countries[dead link], WTO official site +Jump up ^ Sinha, Aparijita. [1]. "What are the functions and objectives of the WTO?". Retrieved on 13 April, 2014. +Jump up ^ Economic research and analysis, WTO official site +^ Jump up to: a b c B. Hoekman, The WTO: Functions and Basic Principles, 42 +^ Jump up to: a b B. Hoekman, The WTO: Functions and Basic Principles, 43 +^ Jump up to: a b B. Hoekman, The WTO: Functions and Basic Principles, 44 +^ Jump up to: a b Understanding the WTO: What we stand for +^ Jump up to: a b "Fourth level: down to the nitty-gritty". WTO official site. Retrieved 18 August 2008. +Jump up ^ "Intellectual property – overview of TRIPS Agreement". Wto.org. 15 April 1994. Retrieved 30 May 2010. +Jump up ^ "The Services Council, its Committees and other subsidiary bodies". WTO official site. Retrieved 14 August 2008. +Jump up ^ "The Trade Negotiations Committee". WTO official site. Retrieved 14 August 2008. +Jump up ^ "WTO organization chart". WTO official site. Retrieved 14 August 2008. +Jump up ^ Decision-making at WTO official site +Jump up ^ Decision-Making in the World Trade Organization Abstract from Journal of International Economic Law at Oxford Journals +Jump up ^ Steinberg, Richard H. "In the Shadow of Law or Power? Consensus-based Bargaining and Outcomes in the GATT/WTO." International Organization. Spring 2002. pp. 339–374. +Jump up ^ Stewart-Dawyer, The WTO Dispute Settlement System, 7 +Jump up ^ S. Panitchpakdi, The WTO at ten, 8. +Jump up ^ Settling Disputes:a Unique Contribution, WTO official site +Jump up ^ "Disputes – Dispute Settlement CBT – WTO Bodies involved in the dispute settlement process – The Dispute Settlement Body (DSB) – Page 1". WTO. 25 July 1996. Retrieved 21 May 2011. +^ Jump up to: a b Accessions Summary, Center for International Development +Jump up ^ Ministerial Conference approves Russia's WTO membership WTO News Item, 16 December 2011 +Jump up ^ Accession status: Vanuatu. WTO. Retrieved on 12 July 2013. +Jump up ^ C. Michalopoulos, WTO Accession, 64 +^ Jump up to: a b Membership, Alliances and Bureaucracy, WTO official site +Jump up ^ C. Michalopoulos, WTO Accession, 62–63 +Jump up ^ How to Become a Member of the WTO, WTO official site +Jump up ^ Napier, Nancy K.; Vuong, Quan Hoang (2013). What we see, why we worry, why we hope: Vietnam going forward. Boise, ID, USA: Boise State University CCI Press. p. 140. ISBN 978-0985530587. +Jump up ^ "Members and Observers". World Trade Organization. 24 August 2012. +Jump up ^ Jackson, J. H. Sovereignty, 109 +Jump up ^ ROC Government Publication +Jump up ^ "Accession in perspective". World Trade Organization. Retrieved 22 December 2013. +Jump up ^ "ANNEX 1. STATISTICAL SURVEY". World Trade Organization. 2005. Retrieved 22 December 2013. +Jump up ^ Arjomandy, Danial (21 November 2013). "Iranian Membership in the World Trade Organization: An Unclear Future". Iranian Studies. Retrieved 22 December 2013. +Jump up ^ International intergovernmental organizations granted observer status to WTO bodies at WTO official website +Jump up ^ "Legal texts – the WTO agreements". WTO. Retrieved 30 May 2010. +Jump up ^ Understanding the WTO - Intellectual property: protection and enforcement. WTO. Retrieved on 29 July 2013. +Jump up ^ "A Summary of the Final Act of the Uruguay Round". Wto.org. Retrieved 30 May 2010. +Jump up ^ Zarocostas, John (7 December 2013). "Global Trade Deal Reached". WWD. Retrieved 8 December 2013. +Jump up ^ "WT/L/509". WTO. Retrieved 18 February 2013. +Jump up ^ "Director-General Elect Azevêdo announces his four Deputy Directors-General". 17 August 2013. Retrieved 2 September 2013. +Jump up ^ "Previous GATT and WTO Directors-General". WTO. Retrieved 21 May 2011. +External links + Wikiquote has quotations related to: World Trade Organization + Wikimedia Commons has media related to World Trade Organization. +Official pages +Official WTO homepage +WTO 10th Anniversary PDF (1.40 MB) — Highlights of the first decade, Annual Report 2005 pages 116–166 +Glossary of terms—a guide to 'WTO-speak' +International Trade Centre — joint UN/WTO agency +Government pages on the WTO +European Union position on the WTO +Media pages on the WTO +World Trade Organization +BBC News — Profile: WTO +Guardian Unlimited — Special Report: The World Trade Organisation ongoing coverage +Non-governmental organization pages on the WTO +Gatt.org — Parody of official WTO page by The Yes Men +Public Citizen +Transnational Institute: Beyond the WTO +[show] v t e +World Trade Organization +[show] v t e +International trade +[show] v t e +International organizations +Authority control +WorldCat VIAF: 149937768 LCCN: no94018277 ISNI: 0000 0001 2296 2735 GND: 2145784-0 SELIBR: 135910 ULAN: 500292980 NDL: 00577475 NKC: kn20010711437 BNE: XX4574846 +Categories: World Trade OrganizationInternational tradeInternational trade organizationsOrganisations based in GenevaOrganizations established in 1995World government +Navigation menu +Create accountLog inArticleTalkReadView sourceView history + +Main page +Contents +Featured content +Current events +Random article +Donate to Wikipedia +Wikimedia Shop +Interaction +Help +About Wikipedia +Community portal +Recent changes +Contact page +Tools +What links here +Related changes +Upload file +Special pages +Permanent link +Page information +Wikidata item +Cite this page +Print/export +Create a book +Download as PDF +Printable version +Languages +Afrikaans +العربية +Aragonés +Asturianu +AzÉ™rbaycanca +বাংলা +Bân-lâm-gú +БеларуÑÐºÐ°Ñ +БеларуÑÐºÐ°Ñ (тарашкевіца)‎ +БългарÑки +Bosanski +Brezhoneg +Català +ÄŒeÅ¡tina +Cymraeg +Dansk +Deutsch +Eesti +Ελληνικά +Español +Esperanto +Euskara +Ùارسی +Fiji Hindi +Føroyskt +Français +Frysk +Galego +ગà«àªœàª°àª¾àª¤à«€ +客家語/Hak-kâ-ngî +한국어 +Õ€Õ¡ÕµÕ¥Ö€Õ¥Õ¶ +हिनà¥à¤¦à¥€ +Hrvatski +Ido +Ilokano +Bahasa Indonesia +Ãslenska +Italiano +עברית +Basa Jawa +ಕನà³à²¨à²¡ +Къарачай-малкъар +ქáƒáƒ áƒ—ული +Қазақша +Kiswahili +Latina +LatvieÅ¡u +Lietuvių +Magyar +МакедонÑки +മലയാളം +मराठी +مصرى +Bahasa Melayu +Baso Minangkabau +မြန်မာဘာသာ +Nederlands +नेपाली +नेपाल भाषा +日本語 +Ðохчийн +Norsk bokmÃ¥l +Norsk nynorsk +Occitan +OÊ»zbekcha +ਪੰਜਾਬੀ +پنجابی +پښتو +ភាសាážáŸ’មែរ +Piemontèis +Polski +Português +Română +РуÑиньÑкый +РуÑÑкий +Саха тыла +Shqip +සිංහල +Simple English +SlovenÄina +SlovenÅ¡Äina +کوردی +СрпÑки / srpski +Srpskohrvatski / ÑрпÑкохрватÑки +Suomi +Svenska +Tagalog +தமிழ௠+Татарча/tatarça +తెలà±à°—à± +ไทย +Тоҷикӣ +Türkçe +Türkmençe +УкраїнÑька +اردو +ئۇيغۇرچە / Uyghurche +Tiếng Việt +Winaray +ייִדיש +Yorùbá +粵語 +ŽemaitÄ—Å¡ka +中文 +Edit links +This page was last modified on 22 November 2014 at 14:33. +Text is available under the Creative Commons Attribution-ShareAlike License; additional terms may apply. By using this site, you agree to the Terms of Use and Privacy Policy. Wikipedia® is a registered trademark of the Wikimedia Foundation, Inc., a non-profit organization. +Privacy policyAbout WikipediaDisclaimersContact WikipediaDevelopersMobile viewWikimedia Foundation Powered by MediaWiki \ No newline at end of file diff --git a/vendor/github.com/blevesearch/bleve/search/facet/facet_builder_datetime.go b/vendor/github.com/blevesearch/bleve/search/facet/facet_builder_datetime.go new file mode 100644 index 0000000..c45442e --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/facet/facet_builder_datetime.go @@ -0,0 +1,163 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package facet + +import ( + "reflect" + "sort" + "time" + + "github.com/blevesearch/bleve/numeric" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeDateTimeFacetBuilder int +var reflectStaticSizedateTimeRange int + +func init() { + var dtfb DateTimeFacetBuilder + reflectStaticSizeDateTimeFacetBuilder = int(reflect.TypeOf(dtfb).Size()) + var dtr dateTimeRange + reflectStaticSizedateTimeRange = int(reflect.TypeOf(dtr).Size()) +} + +type dateTimeRange struct { + start time.Time + end time.Time +} + +type DateTimeFacetBuilder struct { + size int + field string + termsCount map[string]int + total int + missing int + ranges map[string]*dateTimeRange + sawValue bool +} + +func NewDateTimeFacetBuilder(field string, size int) *DateTimeFacetBuilder { + return &DateTimeFacetBuilder{ + size: size, + field: field, + termsCount: make(map[string]int), + ranges: make(map[string]*dateTimeRange, 0), + } +} + +func (fb *DateTimeFacetBuilder) Size() int { + sizeInBytes := reflectStaticSizeDateTimeFacetBuilder + size.SizeOfPtr + + len(fb.field) + + for k, _ := range fb.termsCount { + sizeInBytes += size.SizeOfString + len(k) + + size.SizeOfInt + } + + for k, _ := range fb.ranges { + sizeInBytes += size.SizeOfString + len(k) + + size.SizeOfPtr + reflectStaticSizedateTimeRange + } + + return sizeInBytes +} + +func (fb *DateTimeFacetBuilder) AddRange(name string, start, end time.Time) { + r := dateTimeRange{ + start: start, + end: end, + } + fb.ranges[name] = &r +} + +func (fb *DateTimeFacetBuilder) Field() string { + return fb.field +} + +func (fb *DateTimeFacetBuilder) UpdateVisitor(field string, term []byte) { + if field == fb.field { + fb.sawValue = true + // only consider the values which are shifted 0 + prefixCoded := numeric.PrefixCoded(term) + shift, err := prefixCoded.Shift() + if err == nil && shift == 0 { + i64, err := prefixCoded.Int64() + if err == nil { + t := time.Unix(0, i64) + + // look at each of the ranges for a match + for rangeName, r := range fb.ranges { + if (r.start.IsZero() || t.After(r.start) || t.Equal(r.start)) && (r.end.IsZero() || t.Before(r.end)) { + fb.termsCount[rangeName] = fb.termsCount[rangeName] + 1 + fb.total++ + } + } + } + } + } +} + +func (fb *DateTimeFacetBuilder) StartDoc() { + fb.sawValue = false +} + +func (fb *DateTimeFacetBuilder) EndDoc() { + if !fb.sawValue { + fb.missing++ + } +} + +func (fb *DateTimeFacetBuilder) Result() *search.FacetResult { + rv := search.FacetResult{ + Field: fb.field, + Total: fb.total, + Missing: fb.missing, + } + + rv.DateRanges = make([]*search.DateRangeFacet, 0, len(fb.termsCount)) + + for term, count := range fb.termsCount { + dateRange := fb.ranges[term] + tf := &search.DateRangeFacet{ + Name: term, + Count: count, + } + if !dateRange.start.IsZero() { + start := dateRange.start.Format(time.RFC3339Nano) + tf.Start = &start + } + if !dateRange.end.IsZero() { + end := dateRange.end.Format(time.RFC3339Nano) + tf.End = &end + } + rv.DateRanges = append(rv.DateRanges, tf) + } + + sort.Sort(rv.DateRanges) + + // we now have the list of the top N facets + if fb.size < len(rv.DateRanges) { + rv.DateRanges = rv.DateRanges[:fb.size] + } + + notOther := 0 + for _, nr := range rv.DateRanges { + notOther += nr.Count + } + rv.Other = fb.total - notOther + + return &rv +} diff --git a/vendor/github.com/blevesearch/bleve/search/facet/facet_builder_numeric.go b/vendor/github.com/blevesearch/bleve/search/facet/facet_builder_numeric.go new file mode 100644 index 0000000..c1692b5 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/facet/facet_builder_numeric.go @@ -0,0 +1,157 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package facet + +import ( + "reflect" + "sort" + + "github.com/blevesearch/bleve/numeric" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeNumericFacetBuilder int +var reflectStaticSizenumericRange int + +func init() { + var nfb NumericFacetBuilder + reflectStaticSizeNumericFacetBuilder = int(reflect.TypeOf(nfb).Size()) + var nr numericRange + reflectStaticSizenumericRange = int(reflect.TypeOf(nr).Size()) +} + +type numericRange struct { + min *float64 + max *float64 +} + +type NumericFacetBuilder struct { + size int + field string + termsCount map[string]int + total int + missing int + ranges map[string]*numericRange + sawValue bool +} + +func NewNumericFacetBuilder(field string, size int) *NumericFacetBuilder { + return &NumericFacetBuilder{ + size: size, + field: field, + termsCount: make(map[string]int), + ranges: make(map[string]*numericRange, 0), + } +} + +func (fb *NumericFacetBuilder) Size() int { + sizeInBytes := reflectStaticSizeNumericFacetBuilder + size.SizeOfPtr + + len(fb.field) + + for k, _ := range fb.termsCount { + sizeInBytes += size.SizeOfString + len(k) + + size.SizeOfInt + } + + for k, _ := range fb.ranges { + sizeInBytes += size.SizeOfString + len(k) + + size.SizeOfPtr + reflectStaticSizenumericRange + } + + return sizeInBytes +} + +func (fb *NumericFacetBuilder) AddRange(name string, min, max *float64) { + r := numericRange{ + min: min, + max: max, + } + fb.ranges[name] = &r +} + +func (fb *NumericFacetBuilder) Field() string { + return fb.field +} + +func (fb *NumericFacetBuilder) UpdateVisitor(field string, term []byte) { + if field == fb.field { + fb.sawValue = true + // only consider the values which are shifted 0 + prefixCoded := numeric.PrefixCoded(term) + shift, err := prefixCoded.Shift() + if err == nil && shift == 0 { + i64, err := prefixCoded.Int64() + if err == nil { + f64 := numeric.Int64ToFloat64(i64) + + // look at each of the ranges for a match + for rangeName, r := range fb.ranges { + if (r.min == nil || f64 >= *r.min) && (r.max == nil || f64 < *r.max) { + fb.termsCount[rangeName] = fb.termsCount[rangeName] + 1 + fb.total++ + } + } + } + } + } +} + +func (fb *NumericFacetBuilder) StartDoc() { + fb.sawValue = false +} + +func (fb *NumericFacetBuilder) EndDoc() { + if !fb.sawValue { + fb.missing++ + } +} + +func (fb *NumericFacetBuilder) Result() *search.FacetResult { + rv := search.FacetResult{ + Field: fb.field, + Total: fb.total, + Missing: fb.missing, + } + + rv.NumericRanges = make([]*search.NumericRangeFacet, 0, len(fb.termsCount)) + + for term, count := range fb.termsCount { + numericRange := fb.ranges[term] + tf := &search.NumericRangeFacet{ + Name: term, + Count: count, + Min: numericRange.min, + Max: numericRange.max, + } + + rv.NumericRanges = append(rv.NumericRanges, tf) + } + + sort.Sort(rv.NumericRanges) + + // we now have the list of the top N facets + if fb.size < len(rv.NumericRanges) { + rv.NumericRanges = rv.NumericRanges[:fb.size] + } + + notOther := 0 + for _, nr := range rv.NumericRanges { + notOther += nr.Count + } + rv.Other = fb.total - notOther + + return &rv +} diff --git a/vendor/github.com/blevesearch/bleve/search/facet/facet_builder_terms.go b/vendor/github.com/blevesearch/bleve/search/facet/facet_builder_terms.go new file mode 100644 index 0000000..5b5901e --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/facet/facet_builder_terms.go @@ -0,0 +1,117 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package facet + +import ( + "reflect" + "sort" + + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeTermsFacetBuilder int + +func init() { + var tfb TermsFacetBuilder + reflectStaticSizeTermsFacetBuilder = int(reflect.TypeOf(tfb).Size()) +} + +type TermsFacetBuilder struct { + size int + field string + termsCount map[string]int + total int + missing int + sawValue bool +} + +func NewTermsFacetBuilder(field string, size int) *TermsFacetBuilder { + return &TermsFacetBuilder{ + size: size, + field: field, + termsCount: make(map[string]int), + } +} + +func (fb *TermsFacetBuilder) Size() int { + sizeInBytes := reflectStaticSizeTermsFacetBuilder + size.SizeOfPtr + + len(fb.field) + + for k, _ := range fb.termsCount { + sizeInBytes += size.SizeOfString + len(k) + + size.SizeOfInt + } + + return sizeInBytes +} + +func (fb *TermsFacetBuilder) Field() string { + return fb.field +} + +func (fb *TermsFacetBuilder) UpdateVisitor(field string, term []byte) { + if field == fb.field { + fb.sawValue = true + fb.termsCount[string(term)] = fb.termsCount[string(term)] + 1 + fb.total++ + } +} + +func (fb *TermsFacetBuilder) StartDoc() { + fb.sawValue = false +} + +func (fb *TermsFacetBuilder) EndDoc() { + if !fb.sawValue { + fb.missing++ + } +} + +func (fb *TermsFacetBuilder) Result() *search.FacetResult { + rv := search.FacetResult{ + Field: fb.field, + Total: fb.total, + Missing: fb.missing, + } + + rv.Terms = make([]*search.TermFacet, 0, len(fb.termsCount)) + + for term, count := range fb.termsCount { + tf := &search.TermFacet{ + Term: term, + Count: count, + } + + rv.Terms = append(rv.Terms, tf) + } + + sort.Sort(rv.Terms) + + // we now have the list of the top N facets + trimTopN := fb.size + if trimTopN > len(rv.Terms) { + trimTopN = len(rv.Terms) + } + rv.Terms = rv.Terms[:trimTopN] + + notOther := 0 + for _, tf := range rv.Terms { + notOther += tf.Count + } + rv.Other = fb.total - notOther + + return &rv +} diff --git a/vendor/github.com/blevesearch/bleve/search/facets_builder.go b/vendor/github.com/blevesearch/bleve/search/facets_builder.go new file mode 100644 index 0000000..7fc0bed --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/facets_builder.go @@ -0,0 +1,341 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package search + +import ( + "reflect" + "sort" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeFacetsBuilder int +var reflectStaticSizeFacetResult int +var reflectStaticSizeTermFacet int +var reflectStaticSizeNumericRangeFacet int +var reflectStaticSizeDateRangeFacet int + +func init() { + var fb FacetsBuilder + reflectStaticSizeFacetsBuilder = int(reflect.TypeOf(fb).Size()) + var fr FacetResult + reflectStaticSizeFacetResult = int(reflect.TypeOf(fr).Size()) + var tf TermFacet + reflectStaticSizeTermFacet = int(reflect.TypeOf(tf).Size()) + var nrf NumericRangeFacet + reflectStaticSizeNumericRangeFacet = int(reflect.TypeOf(nrf).Size()) + var drf DateRangeFacet + reflectStaticSizeDateRangeFacet = int(reflect.TypeOf(drf).Size()) +} + +type FacetBuilder interface { + StartDoc() + UpdateVisitor(field string, term []byte) + EndDoc() + + Result() *FacetResult + Field() string + + Size() int +} + +type FacetsBuilder struct { + indexReader index.IndexReader + facetNames []string + facets []FacetBuilder + fields []string +} + +func NewFacetsBuilder(indexReader index.IndexReader) *FacetsBuilder { + return &FacetsBuilder{ + indexReader: indexReader, + } +} + +func (fb *FacetsBuilder) Size() int { + sizeInBytes := reflectStaticSizeFacetsBuilder + size.SizeOfPtr + + for k, v := range fb.facets { + sizeInBytes += size.SizeOfString + v.Size() + len(fb.facetNames[k]) + } + + for _, entry := range fb.fields { + sizeInBytes += size.SizeOfString + len(entry) + } + + return sizeInBytes +} + +func (fb *FacetsBuilder) Add(name string, facetBuilder FacetBuilder) { + fb.facetNames = append(fb.facetNames, name) + fb.facets = append(fb.facets, facetBuilder) + fb.fields = append(fb.fields, facetBuilder.Field()) +} + +func (fb *FacetsBuilder) RequiredFields() []string { + return fb.fields +} + +func (fb *FacetsBuilder) StartDoc() { + for _, facetBuilder := range fb.facets { + facetBuilder.StartDoc() + } +} + +func (fb *FacetsBuilder) EndDoc() { + for _, facetBuilder := range fb.facets { + facetBuilder.EndDoc() + } +} + +func (fb *FacetsBuilder) UpdateVisitor(field string, term []byte) { + for _, facetBuilder := range fb.facets { + facetBuilder.UpdateVisitor(field, term) + } +} + +type TermFacet struct { + Term string `json:"term"` + Count int `json:"count"` +} + +type TermFacets []*TermFacet + +func (tf TermFacets) Add(termFacet *TermFacet) TermFacets { + for _, existingTerm := range tf { + if termFacet.Term == existingTerm.Term { + existingTerm.Count += termFacet.Count + return tf + } + } + // if we got here it wasn't already in the existing terms + tf = append(tf, termFacet) + return tf +} + +func (tf TermFacets) Len() int { return len(tf) } +func (tf TermFacets) Swap(i, j int) { tf[i], tf[j] = tf[j], tf[i] } +func (tf TermFacets) Less(i, j int) bool { + if tf[i].Count == tf[j].Count { + return tf[i].Term < tf[j].Term + } + return tf[i].Count > tf[j].Count +} + +type NumericRangeFacet struct { + Name string `json:"name"` + Min *float64 `json:"min,omitempty"` + Max *float64 `json:"max,omitempty"` + Count int `json:"count"` +} + +func (nrf *NumericRangeFacet) Same(other *NumericRangeFacet) bool { + if nrf.Min == nil && other.Min != nil { + return false + } + if nrf.Min != nil && other.Min == nil { + return false + } + if nrf.Min != nil && other.Min != nil && *nrf.Min != *other.Min { + return false + } + if nrf.Max == nil && other.Max != nil { + return false + } + if nrf.Max != nil && other.Max == nil { + return false + } + if nrf.Max != nil && other.Max != nil && *nrf.Max != *other.Max { + return false + } + + return true +} + +type NumericRangeFacets []*NumericRangeFacet + +func (nrf NumericRangeFacets) Add(numericRangeFacet *NumericRangeFacet) NumericRangeFacets { + for _, existingNr := range nrf { + if numericRangeFacet.Same(existingNr) { + existingNr.Count += numericRangeFacet.Count + return nrf + } + } + // if we got here it wasn't already in the existing terms + nrf = append(nrf, numericRangeFacet) + return nrf +} + +func (nrf NumericRangeFacets) Len() int { return len(nrf) } +func (nrf NumericRangeFacets) Swap(i, j int) { nrf[i], nrf[j] = nrf[j], nrf[i] } +func (nrf NumericRangeFacets) Less(i, j int) bool { + if nrf[i].Count == nrf[j].Count { + return nrf[i].Name < nrf[j].Name + } + return nrf[i].Count > nrf[j].Count +} + +type DateRangeFacet struct { + Name string `json:"name"` + Start *string `json:"start,omitempty"` + End *string `json:"end,omitempty"` + Count int `json:"count"` +} + +func (drf *DateRangeFacet) Same(other *DateRangeFacet) bool { + if drf.Start == nil && other.Start != nil { + return false + } + if drf.Start != nil && other.Start == nil { + return false + } + if drf.Start != nil && other.Start != nil && *drf.Start != *other.Start { + return false + } + if drf.End == nil && other.End != nil { + return false + } + if drf.End != nil && other.End == nil { + return false + } + if drf.End != nil && other.End != nil && *drf.End != *other.End { + return false + } + + return true +} + +type DateRangeFacets []*DateRangeFacet + +func (drf DateRangeFacets) Add(dateRangeFacet *DateRangeFacet) DateRangeFacets { + for _, existingDr := range drf { + if dateRangeFacet.Same(existingDr) { + existingDr.Count += dateRangeFacet.Count + return drf + } + } + // if we got here it wasn't already in the existing terms + drf = append(drf, dateRangeFacet) + return drf +} + +func (drf DateRangeFacets) Len() int { return len(drf) } +func (drf DateRangeFacets) Swap(i, j int) { drf[i], drf[j] = drf[j], drf[i] } +func (drf DateRangeFacets) Less(i, j int) bool { + if drf[i].Count == drf[j].Count { + return drf[i].Name < drf[j].Name + } + return drf[i].Count > drf[j].Count +} + +type FacetResult struct { + Field string `json:"field"` + Total int `json:"total"` + Missing int `json:"missing"` + Other int `json:"other"` + Terms TermFacets `json:"terms,omitempty"` + NumericRanges NumericRangeFacets `json:"numeric_ranges,omitempty"` + DateRanges DateRangeFacets `json:"date_ranges,omitempty"` +} + +func (fr *FacetResult) Size() int { + return reflectStaticSizeFacetResult + size.SizeOfPtr + + len(fr.Field) + + len(fr.Terms)*(reflectStaticSizeTermFacet+size.SizeOfPtr) + + len(fr.NumericRanges)*(reflectStaticSizeNumericRangeFacet+size.SizeOfPtr) + + len(fr.DateRanges)*(reflectStaticSizeDateRangeFacet+size.SizeOfPtr) +} + +func (fr *FacetResult) Merge(other *FacetResult) { + fr.Total += other.Total + fr.Missing += other.Missing + fr.Other += other.Other + if fr.Terms != nil && other.Terms != nil { + for _, term := range other.Terms { + fr.Terms = fr.Terms.Add(term) + } + } + if fr.NumericRanges != nil && other.NumericRanges != nil { + for _, nr := range other.NumericRanges { + fr.NumericRanges = fr.NumericRanges.Add(nr) + } + } + if fr.DateRanges != nil && other.DateRanges != nil { + for _, dr := range other.DateRanges { + fr.DateRanges = fr.DateRanges.Add(dr) + } + } +} + +func (fr *FacetResult) Fixup(size int) { + if fr.Terms != nil { + sort.Sort(fr.Terms) + if len(fr.Terms) > size { + moveToOther := fr.Terms[size:] + for _, mto := range moveToOther { + fr.Other += mto.Count + } + fr.Terms = fr.Terms[0:size] + } + } else if fr.NumericRanges != nil { + sort.Sort(fr.NumericRanges) + if len(fr.NumericRanges) > size { + moveToOther := fr.NumericRanges[size:] + for _, mto := range moveToOther { + fr.Other += mto.Count + } + fr.NumericRanges = fr.NumericRanges[0:size] + } + } else if fr.DateRanges != nil { + sort.Sort(fr.DateRanges) + if len(fr.DateRanges) > size { + moveToOther := fr.DateRanges[size:] + for _, mto := range moveToOther { + fr.Other += mto.Count + } + fr.DateRanges = fr.DateRanges[0:size] + } + } +} + +type FacetResults map[string]*FacetResult + +func (fr FacetResults) Merge(other FacetResults) { + for name, oFacetResult := range other { + facetResult, ok := fr[name] + if ok { + facetResult.Merge(oFacetResult) + } else { + fr[name] = oFacetResult + } + } +} + +func (fr FacetResults) Fixup(name string, size int) { + facetResult, ok := fr[name] + if ok { + facetResult.Fixup(size) + } +} + +func (fb *FacetsBuilder) Results() FacetResults { + fr := make(FacetResults) + for i, facetBuilder := range fb.facets { + facetResult := facetBuilder.Result() + fr[fb.facetNames[i]] = facetResult + } + return fr +} diff --git a/vendor/github.com/blevesearch/bleve/search/highlight/format/html/html.go b/vendor/github.com/blevesearch/bleve/search/highlight/format/html/html.go new file mode 100644 index 0000000..259a037 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/highlight/format/html/html.go @@ -0,0 +1,91 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package html + +import ( + "html" + + "github.com/blevesearch/bleve/registry" + "github.com/blevesearch/bleve/search/highlight" +) + +const Name = "html" + +const defaultHTMLHighlightBefore = "" +const defaultHTMLHighlightAfter = "" + +type FragmentFormatter struct { + before string + after string +} + +func NewFragmentFormatter(before, after string) *FragmentFormatter { + return &FragmentFormatter{ + before: before, + after: after, + } +} + +func (a *FragmentFormatter) Format(f *highlight.Fragment, orderedTermLocations highlight.TermLocations) string { + rv := "" + curr := f.Start + for _, termLocation := range orderedTermLocations { + if termLocation == nil { + continue + } + // make sure the array positions match + if !termLocation.ArrayPositions.Equals(f.ArrayPositions) { + continue + } + if termLocation.Start < curr { + continue + } + if termLocation.End > f.End { + break + } + // add the stuff before this location + rv += html.EscapeString(string(f.Orig[curr:termLocation.Start])) + // start the tag + rv += a.before + // add the term itself + rv += string(f.Orig[termLocation.Start:termLocation.End]) + // end the tag + rv += a.after + // update current + curr = termLocation.End + } + // add any remaining text after the last token + rv += html.EscapeString(string(f.Orig[curr:f.End])) + + return rv +} + +func Constructor(config map[string]interface{}, cache *registry.Cache) (highlight.FragmentFormatter, error) { + before := defaultHTMLHighlightBefore + beforeVal, ok := config["before"].(string) + if ok { + before = beforeVal + } + after := defaultHTMLHighlightAfter + afterVal, ok := config["after"].(string) + if ok { + after = afterVal + } + return NewFragmentFormatter(before, after), nil +} + +func init() { + registry.RegisterFragmentFormatter(Name, Constructor) +} diff --git a/vendor/github.com/blevesearch/bleve/search/highlight/fragmenter/simple/simple.go b/vendor/github.com/blevesearch/bleve/search/highlight/fragmenter/simple/simple.go new file mode 100644 index 0000000..9c63f7f --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/highlight/fragmenter/simple/simple.go @@ -0,0 +1,147 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package simple + +import ( + "unicode/utf8" + + "github.com/blevesearch/bleve/registry" + "github.com/blevesearch/bleve/search/highlight" +) + +const Name = "simple" + +const defaultFragmentSize = 200 + +type Fragmenter struct { + fragmentSize int +} + +func NewFragmenter(fragmentSize int) *Fragmenter { + return &Fragmenter{ + fragmentSize: fragmentSize, + } +} + +func (s *Fragmenter) Fragment(orig []byte, ot highlight.TermLocations) []*highlight.Fragment { + var rv []*highlight.Fragment + maxbegin := 0 +OUTER: + for currTermIndex, termLocation := range ot { + // start with this + // it should be the highest scoring fragment with this term first + start := termLocation.Start + end := start + used := 0 + for end < len(orig) && used < s.fragmentSize { + r, size := utf8.DecodeRune(orig[end:]) + if r == utf8.RuneError { + continue OUTER // bail + } + end += size + used++ + } + + // if we still have more characters available to us + // push back towards beginning + // without cross maxbegin + for start > 0 && used < s.fragmentSize { + if start > len(orig) { + // bail if out of bounds, possibly due to token replacement + // e.g with a regexp replacement + continue OUTER + } + r, size := utf8.DecodeLastRune(orig[0:start]) + if r == utf8.RuneError { + continue OUTER // bail + } + if start-size >= maxbegin { + start -= size + used++ + } else { + break + } + } + + // however, we'd rather have the tokens centered more in the frag + // lets try to do that as best we can, without affecting the score + // find the end of the last term in this fragment + minend := end + for _, innerTermLocation := range ot[currTermIndex:] { + if innerTermLocation.End > end { + break + } + minend = innerTermLocation.End + } + + // find the smaller of the two rooms to move + roomToMove := utf8.RuneCount(orig[minend:end]) + roomToMoveStart := 0 + if start >= maxbegin { + roomToMoveStart = utf8.RuneCount(orig[maxbegin:start]) + } + if roomToMoveStart < roomToMove { + roomToMove = roomToMoveStart + } + + offset := roomToMove / 2 + + for offset > 0 { + r, size := utf8.DecodeLastRune(orig[0:start]) + if r == utf8.RuneError { + continue OUTER // bail + } + start -= size + + r, size = utf8.DecodeLastRune(orig[0:end]) + if r == utf8.RuneError { + continue OUTER // bail + } + end -= size + offset-- + } + + rv = append(rv, &highlight.Fragment{Orig: orig, Start: start - offset, End: end - offset}) + // set maxbegin to the end of the current term location + // so that next one won't back up to include it + maxbegin = termLocation.End + + } + if len(ot) == 0 { + // if there were no terms to highlight + // produce a single fragment from the beginning + start := 0 + end := start + s.fragmentSize + if end > len(orig) { + end = len(orig) + } + rv = append(rv, &highlight.Fragment{Orig: orig, Start: start, End: end}) + } + + return rv +} + +func Constructor(config map[string]interface{}, cache *registry.Cache) (highlight.Fragmenter, error) { + size := defaultFragmentSize + sizeVal, ok := config["size"].(float64) + if ok { + size = int(sizeVal) + } + return NewFragmenter(size), nil +} + +func init() { + registry.RegisterFragmenter(Name, Constructor) +} diff --git a/vendor/github.com/blevesearch/bleve/search/highlight/highlighter.go b/vendor/github.com/blevesearch/bleve/search/highlight/highlighter.go new file mode 100644 index 0000000..8077985 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/highlight/highlighter.go @@ -0,0 +1,64 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package highlight + +import ( + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/search" +) + +type Fragment struct { + Orig []byte + ArrayPositions []uint64 + Start int + End int + Score float64 + Index int // used by heap +} + +func (f *Fragment) Overlaps(other *Fragment) bool { + if other.Start >= f.Start && other.Start < f.End { + return true + } else if f.Start >= other.Start && f.Start < other.End { + return true + } + return false +} + +type Fragmenter interface { + Fragment([]byte, TermLocations) []*Fragment +} + +type FragmentFormatter interface { + Format(f *Fragment, orderedTermLocations TermLocations) string +} + +type FragmentScorer interface { + Score(f *Fragment) float64 +} + +type Highlighter interface { + Fragmenter() Fragmenter + SetFragmenter(Fragmenter) + + FragmentFormatter() FragmentFormatter + SetFragmentFormatter(FragmentFormatter) + + Separator() string + SetSeparator(string) + + BestFragmentInField(*search.DocumentMatch, *document.Document, string) string + BestFragmentsInField(*search.DocumentMatch, *document.Document, string, int) []string +} diff --git a/vendor/github.com/blevesearch/bleve/search/highlight/highlighter/html/html.go b/vendor/github.com/blevesearch/bleve/search/highlight/highlighter/html/html.go new file mode 100644 index 0000000..928589c --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/highlight/highlighter/html/html.go @@ -0,0 +1,50 @@ +// Copyright (c) 2015 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package html + +import ( + "fmt" + + "github.com/blevesearch/bleve/registry" + "github.com/blevesearch/bleve/search/highlight" + htmlFormatter "github.com/blevesearch/bleve/search/highlight/format/html" + simpleFragmenter "github.com/blevesearch/bleve/search/highlight/fragmenter/simple" + simpleHighlighter "github.com/blevesearch/bleve/search/highlight/highlighter/simple" +) + +const Name = "html" + +func Constructor(config map[string]interface{}, cache *registry.Cache) (highlight.Highlighter, error) { + + fragmenter, err := cache.FragmenterNamed(simpleFragmenter.Name) + if err != nil { + return nil, fmt.Errorf("error building fragmenter: %v", err) + } + + formatter, err := cache.FragmentFormatterNamed(htmlFormatter.Name) + if err != nil { + return nil, fmt.Errorf("error building fragment formatter: %v", err) + } + + return simpleHighlighter.NewHighlighter( + fragmenter, + formatter, + simpleHighlighter.DefaultSeparator), + nil +} + +func init() { + registry.RegisterHighlighter(Name, Constructor) +} diff --git a/vendor/github.com/blevesearch/bleve/search/highlight/highlighter/simple/fragment_scorer_simple.go b/vendor/github.com/blevesearch/bleve/search/highlight/highlighter/simple/fragment_scorer_simple.go new file mode 100644 index 0000000..3ec4c3d --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/highlight/highlighter/simple/fragment_scorer_simple.go @@ -0,0 +1,49 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package simple + +import ( + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/highlight" +) + +// FragmentScorer will score fragments by how many +// unique terms occur in the fragment with no regard for +// any boost values used in the original query +type FragmentScorer struct { + tlm search.TermLocationMap +} + +func NewFragmentScorer(tlm search.TermLocationMap) *FragmentScorer { + return &FragmentScorer{ + tlm: tlm, + } +} + +func (s *FragmentScorer) Score(f *highlight.Fragment) { + score := 0.0 +OUTER: + for _, locations := range s.tlm { + for _, location := range locations { + if location.ArrayPositions.Equals(f.ArrayPositions) && int(location.Start) >= f.Start && int(location.End) <= f.End { + score += 1.0 + // once we find a term in the fragment + // don't care about additional matches + continue OUTER + } + } + } + f.Score = score +} diff --git a/vendor/github.com/blevesearch/bleve/search/highlight/highlighter/simple/highlighter_simple.go b/vendor/github.com/blevesearch/bleve/search/highlight/highlighter/simple/highlighter_simple.go new file mode 100644 index 0000000..4849516 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/highlight/highlighter/simple/highlighter_simple.go @@ -0,0 +1,221 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package simple + +import ( + "container/heap" + "fmt" + + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/registry" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/highlight" +) + +const Name = "simple" +const DefaultSeparator = "…" + +type Highlighter struct { + fragmenter highlight.Fragmenter + formatter highlight.FragmentFormatter + sep string +} + +func NewHighlighter(fragmenter highlight.Fragmenter, formatter highlight.FragmentFormatter, separator string) *Highlighter { + return &Highlighter{ + fragmenter: fragmenter, + formatter: formatter, + sep: separator, + } +} + +func (s *Highlighter) Fragmenter() highlight.Fragmenter { + return s.fragmenter +} + +func (s *Highlighter) SetFragmenter(f highlight.Fragmenter) { + s.fragmenter = f +} + +func (s *Highlighter) FragmentFormatter() highlight.FragmentFormatter { + return s.formatter +} + +func (s *Highlighter) SetFragmentFormatter(f highlight.FragmentFormatter) { + s.formatter = f +} + +func (s *Highlighter) Separator() string { + return s.sep +} + +func (s *Highlighter) SetSeparator(sep string) { + s.sep = sep +} + +func (s *Highlighter) BestFragmentInField(dm *search.DocumentMatch, doc *document.Document, field string) string { + fragments := s.BestFragmentsInField(dm, doc, field, 1) + if len(fragments) > 0 { + return fragments[0] + } + return "" +} + +func (s *Highlighter) BestFragmentsInField(dm *search.DocumentMatch, doc *document.Document, field string, num int) []string { + tlm := dm.Locations[field] + orderedTermLocations := highlight.OrderTermLocations(tlm) + scorer := NewFragmentScorer(tlm) + + // score the fragments and put them into a priority queue ordered by score + fq := make(FragmentQueue, 0) + heap.Init(&fq) + for _, f := range doc.Fields { + if f.Name() == field { + _, ok := f.(*document.TextField) + if ok { + termLocationsSameArrayPosition := make(highlight.TermLocations, 0) + for _, otl := range orderedTermLocations { + if otl.ArrayPositions.Equals(f.ArrayPositions()) { + termLocationsSameArrayPosition = append(termLocationsSameArrayPosition, otl) + } + } + + fieldData := f.Value() + fragments := s.fragmenter.Fragment(fieldData, termLocationsSameArrayPosition) + for _, fragment := range fragments { + fragment.ArrayPositions = f.ArrayPositions() + scorer.Score(fragment) + heap.Push(&fq, fragment) + } + } + } + } + + // now find the N best non-overlapping fragments + var bestFragments []*highlight.Fragment + if len(fq) > 0 { + candidate := heap.Pop(&fq) + OUTER: + for candidate != nil && len(bestFragments) < num { + // see if this overlaps with any of the best already identified + if len(bestFragments) > 0 { + for _, frag := range bestFragments { + if candidate.(*highlight.Fragment).Overlaps(frag) { + if len(fq) < 1 { + break OUTER + } + candidate = heap.Pop(&fq) + continue OUTER + } + } + bestFragments = append(bestFragments, candidate.(*highlight.Fragment)) + } else { + bestFragments = append(bestFragments, candidate.(*highlight.Fragment)) + } + + if len(fq) < 1 { + break + } + candidate = heap.Pop(&fq) + } + } + + // now that we have the best fragments, we can format them + orderedTermLocations.MergeOverlapping() + formattedFragments := make([]string, len(bestFragments)) + for i, fragment := range bestFragments { + formattedFragments[i] = "" + if fragment.Start != 0 { + formattedFragments[i] += s.sep + } + formattedFragments[i] += s.formatter.Format(fragment, orderedTermLocations) + if fragment.End != len(fragment.Orig) { + formattedFragments[i] += s.sep + } + } + + if dm.Fragments == nil { + dm.Fragments = make(search.FieldFragmentMap, 0) + } + if len(formattedFragments) > 0 { + dm.Fragments[field] = formattedFragments + } + + return formattedFragments +} + +// FragmentQueue implements heap.Interface and holds Items. +type FragmentQueue []*highlight.Fragment + +func (fq FragmentQueue) Len() int { return len(fq) } + +func (fq FragmentQueue) Less(i, j int) bool { + // We want Pop to give us the highest, not lowest, priority so we use greater-than here. + return fq[i].Score > fq[j].Score +} + +func (fq FragmentQueue) Swap(i, j int) { + fq[i], fq[j] = fq[j], fq[i] + fq[i].Index = i + fq[j].Index = j +} + +func (fq *FragmentQueue) Push(x interface{}) { + n := len(*fq) + item := x.(*highlight.Fragment) + item.Index = n + *fq = append(*fq, item) +} + +func (fq *FragmentQueue) Pop() interface{} { + old := *fq + n := len(old) + item := old[n-1] + item.Index = -1 // for safety + *fq = old[0 : n-1] + return item +} + +func Constructor(config map[string]interface{}, cache *registry.Cache) (highlight.Highlighter, error) { + separator := DefaultSeparator + separatorVal, ok := config["separator"].(string) + if ok { + separator = separatorVal + } + + fragmenterName, ok := config["fragmenter"].(string) + if !ok { + return nil, fmt.Errorf("must specify fragmenter") + } + fragmenter, err := cache.FragmenterNamed(fragmenterName) + if err != nil { + return nil, fmt.Errorf("error building fragmenter: %v", err) + } + + formatterName, ok := config["formatter"].(string) + if !ok { + return nil, fmt.Errorf("must specify formatter") + } + formatter, err := cache.FragmentFormatterNamed(formatterName) + if err != nil { + return nil, fmt.Errorf("error building fragment formatter: %v", err) + } + + return NewHighlighter(fragmenter, formatter, separator), nil +} + +func init() { + registry.RegisterHighlighter(Name, Constructor) +} diff --git a/vendor/github.com/blevesearch/bleve/search/highlight/term_locations.go b/vendor/github.com/blevesearch/bleve/search/highlight/term_locations.go new file mode 100644 index 0000000..6d2cb13 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/highlight/term_locations.go @@ -0,0 +1,105 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package highlight + +import ( + "reflect" + "sort" + + "github.com/blevesearch/bleve/search" +) + +type TermLocation struct { + Term string + ArrayPositions search.ArrayPositions + Pos int + Start int + End int +} + +func (tl *TermLocation) Overlaps(other *TermLocation) bool { + if reflect.DeepEqual(tl.ArrayPositions, other.ArrayPositions) { + if other.Start >= tl.Start && other.Start < tl.End { + return true + } else if tl.Start >= other.Start && tl.Start < other.End { + return true + } + } + return false +} + +type TermLocations []*TermLocation + +func (t TermLocations) Len() int { return len(t) } +func (t TermLocations) Swap(i, j int) { t[i], t[j] = t[j], t[i] } +func (t TermLocations) Less(i, j int) bool { + + shortestArrayPositions := len(t[i].ArrayPositions) + if len(t[j].ArrayPositions) < shortestArrayPositions { + shortestArrayPositions = len(t[j].ArrayPositions) + } + + // compare all the common array positions + for api := 0; api < shortestArrayPositions; api++ { + if t[i].ArrayPositions[api] < t[j].ArrayPositions[api] { + return true + } + if t[i].ArrayPositions[api] > t[j].ArrayPositions[api] { + return false + } + } + // all the common array positions are the same + if len(t[i].ArrayPositions) < len(t[j].ArrayPositions) { + return true // j array positions, longer so greater + } else if len(t[i].ArrayPositions) > len(t[j].ArrayPositions) { + return false // j array positions, shorter so less + } + + // array positions the same, compare starts + return t[i].Start < t[j].Start +} + +func (t TermLocations) MergeOverlapping() { + var lastTl *TermLocation + for i, tl := range t { + if lastTl == nil && tl != nil { + lastTl = tl + } else if lastTl != nil && tl != nil { + if lastTl.Overlaps(tl) { + // ok merge this with previous + lastTl.End = tl.End + t[i] = nil + } + } + } +} + +func OrderTermLocations(tlm search.TermLocationMap) TermLocations { + rv := make(TermLocations, 0) + for term, locations := range tlm { + for _, location := range locations { + tl := TermLocation{ + Term: term, + ArrayPositions: location.ArrayPositions, + Pos: int(location.Pos), + Start: int(location.Start), + End: int(location.End), + } + rv = append(rv, &tl) + } + } + sort.Sort(rv) + return rv +} diff --git a/vendor/github.com/blevesearch/bleve/search/levenshtein.go b/vendor/github.com/blevesearch/bleve/search/levenshtein.go new file mode 100644 index 0000000..687608d --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/levenshtein.go @@ -0,0 +1,114 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package search + +import ( + "math" +) + +func LevenshteinDistance(a, b string) int { + la := len(a) + lb := len(b) + d := make([]int, la+1) + var lastdiag, olddiag, temp int + + for i := 1; i <= la; i++ { + d[i] = i + } + for i := 1; i <= lb; i++ { + d[0] = i + lastdiag = i - 1 + for j := 1; j <= la; j++ { + olddiag = d[j] + min := d[j] + 1 + if (d[j-1] + 1) < min { + min = d[j-1] + 1 + } + if a[j-1] == b[i-1] { + temp = 0 + } else { + temp = 1 + } + if (lastdiag + temp) < min { + min = lastdiag + temp + } + d[j] = min + lastdiag = olddiag + } + } + return d[la] +} + +// LevenshteinDistanceMax same as LevenshteinDistance but +// attempts to bail early once we know the distance +// will be greater than max +// in which case the first return val will be the max +// and the second will be true, indicating max was exceeded +func LevenshteinDistanceMax(a, b string, max int) (int, bool) { + v, wasMax, _ := LevenshteinDistanceMaxReuseSlice(a, b, max, nil) + return v, wasMax +} + +func LevenshteinDistanceMaxReuseSlice(a, b string, max int, d []int) (int, bool, []int) { + la := len(a) + lb := len(b) + + ld := int(math.Abs(float64(la - lb))) + if ld > max { + return max, true, d + } + + if cap(d) < la+1 { + d = make([]int, la+1) + } + d = d[:la+1] + + var lastdiag, olddiag, temp int + + for i := 1; i <= la; i++ { + d[i] = i + } + for i := 1; i <= lb; i++ { + d[0] = i + lastdiag = i - 1 + rowmin := max + 1 + for j := 1; j <= la; j++ { + olddiag = d[j] + min := d[j] + 1 + if (d[j-1] + 1) < min { + min = d[j-1] + 1 + } + if a[j-1] == b[i-1] { + temp = 0 + } else { + temp = 1 + } + if (lastdiag + temp) < min { + min = lastdiag + temp + } + if min < rowmin { + rowmin = min + } + d[j] = min + + lastdiag = olddiag + } + // after each row if rowmin isn't less than max stop + if rowmin > max { + return max, true, d + } + } + return d[la], false, d +} diff --git a/vendor/github.com/blevesearch/bleve/search/pool.go b/vendor/github.com/blevesearch/bleve/search/pool.go new file mode 100644 index 0000000..ba8be8f --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/pool.go @@ -0,0 +1,91 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package search + +import ( + "reflect" +) + +var reflectStaticSizeDocumentMatchPool int + +func init() { + var dmp DocumentMatchPool + reflectStaticSizeDocumentMatchPool = int(reflect.TypeOf(dmp).Size()) +} + +// DocumentMatchPoolTooSmall is a callback function that can be executed +// when the DocumentMatchPool does not have sufficient capacity +// By default we just perform just-in-time allocation, but you could log +// a message, or panic, etc. +type DocumentMatchPoolTooSmall func(p *DocumentMatchPool) *DocumentMatch + +// DocumentMatchPool manages use/re-use of DocumentMatch instances +// it pre-allocates space from a single large block with the expected +// number of instances. It is not thread-safe as currently all +// aspects of search take place in a single goroutine. +type DocumentMatchPool struct { + avail DocumentMatchCollection + TooSmall DocumentMatchPoolTooSmall +} + +func defaultDocumentMatchPoolTooSmall(p *DocumentMatchPool) *DocumentMatch { + return &DocumentMatch{} +} + +// NewDocumentMatchPool will build a DocumentMatchPool with memory +// pre-allocated to accommodate the requested number of DocumentMatch +// instances +func NewDocumentMatchPool(size, sortsize int) *DocumentMatchPool { + avail := make(DocumentMatchCollection, size) + // pre-allocate the expected number of instances + startBlock := make([]DocumentMatch, size) + startSorts := make([]string, size*sortsize) + // make these initial instances available + i, j := 0, 0 + for i < size { + avail[i] = &startBlock[i] + avail[i].Sort = startSorts[j:j] + i += 1 + j += sortsize + } + return &DocumentMatchPool{ + avail: avail, + TooSmall: defaultDocumentMatchPoolTooSmall, + } +} + +// Get returns an available DocumentMatch from the pool +// if the pool was not allocated with sufficient size, an allocation will +// occur to satisfy this request. As a side-effect this will grow the size +// of the pool. +func (p *DocumentMatchPool) Get() *DocumentMatch { + var rv *DocumentMatch + if len(p.avail) > 0 { + rv, p.avail = p.avail[len(p.avail)-1], p.avail[:len(p.avail)-1] + } else { + rv = p.TooSmall(p) + } + return rv +} + +// Put returns a DocumentMatch to the pool +func (p *DocumentMatchPool) Put(d *DocumentMatch) { + if d == nil { + return + } + // reset DocumentMatch before returning it to available pool + d.Reset() + p.avail = append(p.avail, d) +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/bool_field.go b/vendor/github.com/blevesearch/bleve/search/query/bool_field.go new file mode 100644 index 0000000..b7b5a3d --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/bool_field.go @@ -0,0 +1,64 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +type BoolFieldQuery struct { + Bool bool `json:"bool"` + FieldVal string `json:"field,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` +} + +// NewBoolFieldQuery creates a new Query for boolean fields +func NewBoolFieldQuery(val bool) *BoolFieldQuery { + return &BoolFieldQuery{ + Bool: val, + } +} + +func (q *BoolFieldQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *BoolFieldQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *BoolFieldQuery) SetField(f string) { + q.FieldVal = f +} + +func (q *BoolFieldQuery) Field() string { + return q.FieldVal +} + +func (q *BoolFieldQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + field := q.FieldVal + if q.FieldVal == "" { + field = m.DefaultSearchField() + } + term := "F" + if q.Bool { + term = "T" + } + return searcher.NewTermSearcher(i, term, field, q.BoostVal.Value(), options) +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/boolean.go b/vendor/github.com/blevesearch/bleve/search/query/boolean.go new file mode 100644 index 0000000..3cfa1d9 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/boolean.go @@ -0,0 +1,248 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "encoding/json" + "fmt" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +type BooleanQuery struct { + Must Query `json:"must,omitempty"` + Should Query `json:"should,omitempty"` + MustNot Query `json:"must_not,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` + queryStringMode bool +} + +// NewBooleanQuery creates a compound Query composed +// of several other Query objects. +// Result documents must satisfy ALL of the +// must Queries. +// Result documents must satisfy NONE of the must not +// Queries. +// Result documents that ALSO satisfy any of the should +// Queries will score higher. +func NewBooleanQuery(must []Query, should []Query, mustNot []Query) *BooleanQuery { + + rv := BooleanQuery{} + if len(must) > 0 { + rv.Must = NewConjunctionQuery(must) + } + if len(should) > 0 { + rv.Should = NewDisjunctionQuery(should) + } + if len(mustNot) > 0 { + rv.MustNot = NewDisjunctionQuery(mustNot) + } + + return &rv +} + +func NewBooleanQueryForQueryString(must []Query, should []Query, mustNot []Query) *BooleanQuery { + rv := NewBooleanQuery(nil, nil, nil) + rv.queryStringMode = true + rv.AddMust(must...) + rv.AddShould(should...) + rv.AddMustNot(mustNot...) + return rv +} + +// SetMinShould requires that at least minShould of the +// should Queries must be satisfied. +func (q *BooleanQuery) SetMinShould(minShould float64) { + q.Should.(*DisjunctionQuery).SetMin(minShould) +} + +func (q *BooleanQuery) AddMust(m ...Query) { + if q.Must == nil { + tmp := NewConjunctionQuery([]Query{}) + tmp.queryStringMode = q.queryStringMode + q.Must = tmp + } + for _, mq := range m { + q.Must.(*ConjunctionQuery).AddQuery(mq) + } +} + +func (q *BooleanQuery) AddShould(m ...Query) { + if q.Should == nil { + tmp := NewDisjunctionQuery([]Query{}) + tmp.queryStringMode = q.queryStringMode + q.Should = tmp + } + for _, mq := range m { + q.Should.(*DisjunctionQuery).AddQuery(mq) + } +} + +func (q *BooleanQuery) AddMustNot(m ...Query) { + if q.MustNot == nil { + tmp := NewDisjunctionQuery([]Query{}) + tmp.queryStringMode = q.queryStringMode + q.MustNot = tmp + } + for _, mq := range m { + q.MustNot.(*DisjunctionQuery).AddQuery(mq) + } +} + +func (q *BooleanQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *BooleanQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *BooleanQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + var err error + var mustNotSearcher search.Searcher + if q.MustNot != nil { + mustNotSearcher, err = q.MustNot.Searcher(i, m, options) + if err != nil { + return nil, err + } + // if must not is MatchNone, reset it to nil + if _, ok := mustNotSearcher.(*searcher.MatchNoneSearcher); ok { + mustNotSearcher = nil + } + } + + var mustSearcher search.Searcher + if q.Must != nil { + mustSearcher, err = q.Must.Searcher(i, m, options) + if err != nil { + return nil, err + } + // if must searcher is MatchNone, reset it to nil + if _, ok := mustSearcher.(*searcher.MatchNoneSearcher); ok { + mustSearcher = nil + } + } + + var shouldSearcher search.Searcher + if q.Should != nil { + shouldSearcher, err = q.Should.Searcher(i, m, options) + if err != nil { + return nil, err + } + // if should searcher is MatchNone, reset it to nil + if _, ok := shouldSearcher.(*searcher.MatchNoneSearcher); ok { + shouldSearcher = nil + } + } + + // if all 3 are nil, return MatchNone + if mustSearcher == nil && shouldSearcher == nil && mustNotSearcher == nil { + return searcher.NewMatchNoneSearcher(i) + } + + // if only mustNotSearcher, start with MatchAll + if mustSearcher == nil && shouldSearcher == nil && mustNotSearcher != nil { + mustSearcher, err = searcher.NewMatchAllSearcher(i, 1.0, options) + if err != nil { + return nil, err + } + } + + // optimization, if only should searcher, just return it instead + if mustSearcher == nil && shouldSearcher != nil && mustNotSearcher == nil { + return shouldSearcher, nil + } + + return searcher.NewBooleanSearcher(i, mustSearcher, shouldSearcher, mustNotSearcher, options) +} + +func (q *BooleanQuery) Validate() error { + if qm, ok := q.Must.(ValidatableQuery); ok { + err := qm.Validate() + if err != nil { + return err + } + } + if qs, ok := q.Should.(ValidatableQuery); ok { + err := qs.Validate() + if err != nil { + return err + } + } + if qmn, ok := q.MustNot.(ValidatableQuery); ok { + err := qmn.Validate() + if err != nil { + return err + } + } + if q.Must == nil && q.Should == nil && q.MustNot == nil { + return fmt.Errorf("boolean query must contain at least one must or should or not must clause") + } + return nil +} + +func (q *BooleanQuery) UnmarshalJSON(data []byte) error { + tmp := struct { + Must json.RawMessage `json:"must,omitempty"` + Should json.RawMessage `json:"should,omitempty"` + MustNot json.RawMessage `json:"must_not,omitempty"` + Boost *Boost `json:"boost,omitempty"` + }{} + err := json.Unmarshal(data, &tmp) + if err != nil { + return err + } + + if tmp.Must != nil { + q.Must, err = ParseQuery(tmp.Must) + if err != nil { + return err + } + _, isConjunctionQuery := q.Must.(*ConjunctionQuery) + if !isConjunctionQuery { + return fmt.Errorf("must clause must be conjunction") + } + } + + if tmp.Should != nil { + q.Should, err = ParseQuery(tmp.Should) + if err != nil { + return err + } + _, isDisjunctionQuery := q.Should.(*DisjunctionQuery) + if !isDisjunctionQuery { + return fmt.Errorf("should clause must be disjunction") + } + } + + if tmp.MustNot != nil { + q.MustNot, err = ParseQuery(tmp.MustNot) + if err != nil { + return err + } + _, isDisjunctionQuery := q.MustNot.(*DisjunctionQuery) + if !isDisjunctionQuery { + return fmt.Errorf("must not clause must be disjunction") + } + } + + q.BoostVal = tmp.Boost + + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/boost.go b/vendor/github.com/blevesearch/bleve/search/query/boost.go new file mode 100644 index 0000000..1365994 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/boost.go @@ -0,0 +1,33 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import "fmt" + +type Boost float64 + +func (b *Boost) Value() float64 { + if b == nil { + return 1.0 + } + return float64(*b) +} + +func (b *Boost) GoString() string { + if b == nil { + return "boost unspecified" + } + return fmt.Sprintf("%f", *b) +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/conjunction.go b/vendor/github.com/blevesearch/bleve/search/query/conjunction.go new file mode 100644 index 0000000..1a7ed1b --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/conjunction.go @@ -0,0 +1,112 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "encoding/json" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +type ConjunctionQuery struct { + Conjuncts []Query `json:"conjuncts"` + BoostVal *Boost `json:"boost,omitempty"` + queryStringMode bool +} + +// NewConjunctionQuery creates a new compound Query. +// Result documents must satisfy all of the queries. +func NewConjunctionQuery(conjuncts []Query) *ConjunctionQuery { + return &ConjunctionQuery{ + Conjuncts: conjuncts, + } +} + +func (q *ConjunctionQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *ConjunctionQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *ConjunctionQuery) AddQuery(aq ...Query) { + for _, aaq := range aq { + q.Conjuncts = append(q.Conjuncts, aaq) + } +} + +func (q *ConjunctionQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + ss := make([]search.Searcher, 0, len(q.Conjuncts)) + for _, conjunct := range q.Conjuncts { + sr, err := conjunct.Searcher(i, m, options) + if err != nil { + for _, searcher := range ss { + if searcher != nil { + _ = searcher.Close() + } + } + return nil, err + } + if _, ok := sr.(*searcher.MatchNoneSearcher); ok && q.queryStringMode { + // in query string mode, skip match none + continue + } + ss = append(ss, sr) + } + + if len(ss) < 1 { + return searcher.NewMatchNoneSearcher(i) + } + + return searcher.NewConjunctionSearcher(i, ss, options) +} + +func (q *ConjunctionQuery) Validate() error { + for _, q := range q.Conjuncts { + if q, ok := q.(ValidatableQuery); ok { + err := q.Validate() + if err != nil { + return err + } + } + } + return nil +} + +func (q *ConjunctionQuery) UnmarshalJSON(data []byte) error { + tmp := struct { + Conjuncts []json.RawMessage `json:"conjuncts"` + Boost *Boost `json:"boost,omitempty"` + }{} + err := json.Unmarshal(data, &tmp) + if err != nil { + return err + } + q.Conjuncts = make([]Query, len(tmp.Conjuncts)) + for i, term := range tmp.Conjuncts { + query, err := ParseQuery(term) + if err != nil { + return err + } + q.Conjuncts[i] = query + } + q.BoostVal = tmp.Boost + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/date_range.go b/vendor/github.com/blevesearch/bleve/search/query/date_range.go new file mode 100644 index 0000000..3ac0322 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/date_range.go @@ -0,0 +1,191 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "encoding/json" + "fmt" + "math" + "time" + + "github.com/blevesearch/bleve/analysis/datetime/optional" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/numeric" + "github.com/blevesearch/bleve/registry" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +// QueryDateTimeParser controls the default query date time parser +var QueryDateTimeParser = optional.Name + +// QueryDateTimeFormat controls the format when Marshaling to JSON +var QueryDateTimeFormat = time.RFC3339 + +var cache = registry.NewCache() + +type BleveQueryTime struct { + time.Time +} + +var MinRFC3339CompatibleTime time.Time +var MaxRFC3339CompatibleTime time.Time + +func init() { + MinRFC3339CompatibleTime, _ = time.Parse(time.RFC3339, "1677-12-01T00:00:00Z") + MaxRFC3339CompatibleTime, _ = time.Parse(time.RFC3339, "2262-04-11T11:59:59Z") +} + +func queryTimeFromString(t string) (time.Time, error) { + dateTimeParser, err := cache.DateTimeParserNamed(QueryDateTimeParser) + if err != nil { + return time.Time{}, err + } + rv, err := dateTimeParser.ParseDateTime(t) + if err != nil { + return time.Time{}, err + } + return rv, nil +} + +func (t *BleveQueryTime) MarshalJSON() ([]byte, error) { + tt := time.Time(t.Time) + return []byte("\"" + tt.Format(QueryDateTimeFormat) + "\""), nil +} + +func (t *BleveQueryTime) UnmarshalJSON(data []byte) error { + var timeString string + err := json.Unmarshal(data, &timeString) + if err != nil { + return err + } + dateTimeParser, err := cache.DateTimeParserNamed(QueryDateTimeParser) + if err != nil { + return err + } + t.Time, err = dateTimeParser.ParseDateTime(timeString) + if err != nil { + return err + } + return nil +} + +type DateRangeQuery struct { + Start BleveQueryTime `json:"start,omitempty"` + End BleveQueryTime `json:"end,omitempty"` + InclusiveStart *bool `json:"inclusive_start,omitempty"` + InclusiveEnd *bool `json:"inclusive_end,omitempty"` + FieldVal string `json:"field,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` +} + +// NewDateRangeQuery creates a new Query for ranges +// of date values. +// Date strings are parsed using the DateTimeParser configured in the +// top-level config.QueryDateTimeParser +// Either, but not both endpoints can be nil. +func NewDateRangeQuery(start, end time.Time) *DateRangeQuery { + return NewDateRangeInclusiveQuery(start, end, nil, nil) +} + +// NewDateRangeInclusiveQuery creates a new Query for ranges +// of date values. +// Date strings are parsed using the DateTimeParser configured in the +// top-level config.QueryDateTimeParser +// Either, but not both endpoints can be nil. +// startInclusive and endInclusive control inclusion of the endpoints. +func NewDateRangeInclusiveQuery(start, end time.Time, startInclusive, endInclusive *bool) *DateRangeQuery { + return &DateRangeQuery{ + Start: BleveQueryTime{start}, + End: BleveQueryTime{end}, + InclusiveStart: startInclusive, + InclusiveEnd: endInclusive, + } +} + +func (q *DateRangeQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *DateRangeQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *DateRangeQuery) SetField(f string) { + q.FieldVal = f +} + +func (q *DateRangeQuery) Field() string { + return q.FieldVal +} + +func (q *DateRangeQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + min, max, err := q.parseEndpoints() + if err != nil { + return nil, err + } + + field := q.FieldVal + if q.FieldVal == "" { + field = m.DefaultSearchField() + } + + return searcher.NewNumericRangeSearcher(i, min, max, q.InclusiveStart, q.InclusiveEnd, field, q.BoostVal.Value(), options) +} + +func (q *DateRangeQuery) parseEndpoints() (*float64, *float64, error) { + min := math.Inf(-1) + max := math.Inf(1) + if !q.Start.IsZero() { + if !isDatetimeCompatible(q.Start) { + // overflow + return nil, nil, fmt.Errorf("invalid/unsupported date range, start: %v", q.Start) + } + startInt64 := q.Start.UnixNano() + min = numeric.Int64ToFloat64(startInt64) + } + if !q.End.IsZero() { + if !isDatetimeCompatible(q.End) { + // overflow + return nil, nil, fmt.Errorf("invalid/unsupported date range, end: %v", q.End) + } + endInt64 := q.End.UnixNano() + max = numeric.Int64ToFloat64(endInt64) + } + + return &min, &max, nil +} + +func (q *DateRangeQuery) Validate() error { + if q.Start.IsZero() && q.End.IsZero() { + return fmt.Errorf("must specify start or end") + } + _, _, err := q.parseEndpoints() + if err != nil { + return err + } + return nil +} + +func isDatetimeCompatible(t BleveQueryTime) bool { + if QueryDateTimeFormat == time.RFC3339 && + (t.Before(MinRFC3339CompatibleTime) || t.After(MaxRFC3339CompatibleTime)) { + return false + } + + return true +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/disjunction.go b/vendor/github.com/blevesearch/bleve/search/query/disjunction.go new file mode 100644 index 0000000..a1fc143 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/disjunction.go @@ -0,0 +1,124 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "encoding/json" + "fmt" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +type DisjunctionQuery struct { + Disjuncts []Query `json:"disjuncts"` + BoostVal *Boost `json:"boost,omitempty"` + Min float64 `json:"min"` + queryStringMode bool +} + +// NewDisjunctionQuery creates a new compound Query. +// Result documents satisfy at least one Query. +func NewDisjunctionQuery(disjuncts []Query) *DisjunctionQuery { + return &DisjunctionQuery{ + Disjuncts: disjuncts, + } +} + +func (q *DisjunctionQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *DisjunctionQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *DisjunctionQuery) AddQuery(aq ...Query) { + for _, aaq := range aq { + q.Disjuncts = append(q.Disjuncts, aaq) + } +} + +func (q *DisjunctionQuery) SetMin(m float64) { + q.Min = m +} + +func (q *DisjunctionQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, + options search.SearcherOptions) (search.Searcher, error) { + ss := make([]search.Searcher, 0, len(q.Disjuncts)) + for _, disjunct := range q.Disjuncts { + sr, err := disjunct.Searcher(i, m, options) + if err != nil { + for _, searcher := range ss { + if searcher != nil { + _ = searcher.Close() + } + } + return nil, err + } + if _, ok := sr.(*searcher.MatchNoneSearcher); ok && q.queryStringMode { + // in query string mode, skip match none + continue + } + ss = append(ss, sr) + } + + if len(ss) < 1 { + return searcher.NewMatchNoneSearcher(i) + } + + return searcher.NewDisjunctionSearcher(i, ss, q.Min, options) +} + +func (q *DisjunctionQuery) Validate() error { + if int(q.Min) > len(q.Disjuncts) { + return fmt.Errorf("disjunction query has fewer than the minimum number of clauses to satisfy") + } + for _, q := range q.Disjuncts { + if q, ok := q.(ValidatableQuery); ok { + err := q.Validate() + if err != nil { + return err + } + } + } + return nil +} + +func (q *DisjunctionQuery) UnmarshalJSON(data []byte) error { + tmp := struct { + Disjuncts []json.RawMessage `json:"disjuncts"` + Boost *Boost `json:"boost,omitempty"` + Min float64 `json:"min"` + }{} + err := json.Unmarshal(data, &tmp) + if err != nil { + return err + } + q.Disjuncts = make([]Query, len(tmp.Disjuncts)) + for i, term := range tmp.Disjuncts { + query, err := ParseQuery(term) + if err != nil { + return err + } + q.Disjuncts[i] = query + } + q.BoostVal = tmp.Boost + q.Min = tmp.Min + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/docid.go b/vendor/github.com/blevesearch/bleve/search/query/docid.go new file mode 100644 index 0000000..3b865f9 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/docid.go @@ -0,0 +1,49 @@ +// Copyright (c) 2015 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +type DocIDQuery struct { + IDs []string `json:"ids"` + BoostVal *Boost `json:"boost,omitempty"` +} + +// NewDocIDQuery creates a new Query object returning indexed documents among +// the specified set. Combine it with ConjunctionQuery to restrict the scope of +// other queries output. +func NewDocIDQuery(ids []string) *DocIDQuery { + return &DocIDQuery{ + IDs: ids, + } +} + +func (q *DocIDQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *DocIDQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *DocIDQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + return searcher.NewDocIDSearcher(i, q.IDs, q.BoostVal.Value(), options) +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/fuzzy.go b/vendor/github.com/blevesearch/bleve/search/query/fuzzy.go new file mode 100644 index 0000000..f18982d --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/fuzzy.go @@ -0,0 +1,77 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +type FuzzyQuery struct { + Term string `json:"term"` + Prefix int `json:"prefix_length"` + Fuzziness int `json:"fuzziness"` + FieldVal string `json:"field,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` +} + +// NewFuzzyQuery creates a new Query which finds +// documents containing terms within a specific +// fuzziness of the specified term. +// The default fuzziness is 1. +// +// The current implementation uses Levenshtein edit +// distance as the fuzziness metric. +func NewFuzzyQuery(term string) *FuzzyQuery { + return &FuzzyQuery{ + Term: term, + Fuzziness: 1, + } +} + +func (q *FuzzyQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *FuzzyQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *FuzzyQuery) SetField(f string) { + q.FieldVal = f +} + +func (q *FuzzyQuery) Field() string { + return q.FieldVal +} + +func (q *FuzzyQuery) SetFuzziness(f int) { + q.Fuzziness = f +} + +func (q *FuzzyQuery) SetPrefix(p int) { + q.Prefix = p +} + +func (q *FuzzyQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + field := q.FieldVal + if q.FieldVal == "" { + field = m.DefaultSearchField() + } + return searcher.NewFuzzySearcher(i, q.Term, q.Prefix, q.Fuzziness, field, q.BoostVal.Value(), options) +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/geo_boundingbox.go b/vendor/github.com/blevesearch/bleve/search/query/geo_boundingbox.go new file mode 100644 index 0000000..de6be4a --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/geo_boundingbox.go @@ -0,0 +1,113 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "encoding/json" + "fmt" + + "github.com/blevesearch/bleve/geo" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +type GeoBoundingBoxQuery struct { + TopLeft []float64 `json:"top_left,omitempty"` + BottomRight []float64 `json:"bottom_right,omitempty"` + FieldVal string `json:"field,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` +} + +func NewGeoBoundingBoxQuery(topLeftLon, topLeftLat, bottomRightLon, bottomRightLat float64) *GeoBoundingBoxQuery { + return &GeoBoundingBoxQuery{ + TopLeft: []float64{topLeftLon, topLeftLat}, + BottomRight: []float64{bottomRightLon, bottomRightLat}, + } +} + +func (q *GeoBoundingBoxQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *GeoBoundingBoxQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *GeoBoundingBoxQuery) SetField(f string) { + q.FieldVal = f +} + +func (q *GeoBoundingBoxQuery) Field() string { + return q.FieldVal +} + +func (q *GeoBoundingBoxQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + field := q.FieldVal + if q.FieldVal == "" { + field = m.DefaultSearchField() + } + + if q.BottomRight[0] < q.TopLeft[0] { + // cross date line, rewrite as two parts + + leftSearcher, err := searcher.NewGeoBoundingBoxSearcher(i, -180, q.BottomRight[1], q.BottomRight[0], q.TopLeft[1], field, q.BoostVal.Value(), options, true) + if err != nil { + return nil, err + } + rightSearcher, err := searcher.NewGeoBoundingBoxSearcher(i, q.TopLeft[0], q.BottomRight[1], 180, q.TopLeft[1], field, q.BoostVal.Value(), options, true) + if err != nil { + _ = leftSearcher.Close() + return nil, err + } + + return searcher.NewDisjunctionSearcher(i, []search.Searcher{leftSearcher, rightSearcher}, 0, options) + } + + return searcher.NewGeoBoundingBoxSearcher(i, q.TopLeft[0], q.BottomRight[1], q.BottomRight[0], q.TopLeft[1], field, q.BoostVal.Value(), options, true) +} + +func (q *GeoBoundingBoxQuery) Validate() error { + return nil +} + +func (q *GeoBoundingBoxQuery) UnmarshalJSON(data []byte) error { + tmp := struct { + TopLeft interface{} `json:"top_left,omitempty"` + BottomRight interface{} `json:"bottom_right,omitempty"` + FieldVal string `json:"field,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` + }{} + err := json.Unmarshal(data, &tmp) + if err != nil { + return err + } + // now use our generic point parsing code from the geo package + lon, lat, found := geo.ExtractGeoPoint(tmp.TopLeft) + if !found { + return fmt.Errorf("geo location top_left not in a valid format") + } + q.TopLeft = []float64{lon, lat} + lon, lat, found = geo.ExtractGeoPoint(tmp.BottomRight) + if !found { + return fmt.Errorf("geo location bottom_right not in a valid format") + } + q.BottomRight = []float64{lon, lat} + q.FieldVal = tmp.FieldVal + q.BoostVal = tmp.BoostVal + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/geo_boundingpolygon.go b/vendor/github.com/blevesearch/bleve/search/query/geo_boundingpolygon.go new file mode 100644 index 0000000..41c7f7f --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/geo_boundingpolygon.go @@ -0,0 +1,94 @@ +// Copyright (c) 2019 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "encoding/json" + "fmt" + + "github.com/blevesearch/bleve/geo" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +type GeoBoundingPolygonQuery struct { + Points []geo.Point `json:"polygon_points"` + FieldVal string `json:"field,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` +} + +func NewGeoBoundingPolygonQuery(points []geo.Point) *GeoBoundingPolygonQuery { + return &GeoBoundingPolygonQuery{ + Points: points} +} + +func (q *GeoBoundingPolygonQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *GeoBoundingPolygonQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *GeoBoundingPolygonQuery) SetField(f string) { + q.FieldVal = f +} + +func (q *GeoBoundingPolygonQuery) Field() string { + return q.FieldVal +} + +func (q *GeoBoundingPolygonQuery) Searcher(i index.IndexReader, + m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + field := q.FieldVal + if q.FieldVal == "" { + field = m.DefaultSearchField() + } + + return searcher.NewGeoBoundedPolygonSearcher(i, q.Points, field, q.BoostVal.Value(), options) +} + +func (q *GeoBoundingPolygonQuery) Validate() error { + return nil +} + +func (q *GeoBoundingPolygonQuery) UnmarshalJSON(data []byte) error { + tmp := struct { + Points []interface{} `json:"polygon_points"` + FieldVal string `json:"field,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` + }{} + err := json.Unmarshal(data, &tmp) + if err != nil { + return err + } + + q.Points = make([]geo.Point, 0, len(tmp.Points)) + for _, i := range tmp.Points { + // now use our generic point parsing code from the geo package + lon, lat, found := geo.ExtractGeoPoint(i) + if !found { + return fmt.Errorf("geo polygon point: %v is not in a valid format", i) + } + q.Points = append(q.Points, geo.Point{Lon: lon, Lat: lat}) + } + + q.FieldVal = tmp.FieldVal + q.BoostVal = tmp.BoostVal + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/geo_distance.go b/vendor/github.com/blevesearch/bleve/search/query/geo_distance.go new file mode 100644 index 0000000..ef3aa88 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/geo_distance.go @@ -0,0 +1,100 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "encoding/json" + "fmt" + + "github.com/blevesearch/bleve/geo" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +type GeoDistanceQuery struct { + Location []float64 `json:"location,omitempty"` + Distance string `json:"distance,omitempty"` + FieldVal string `json:"field,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` +} + +func NewGeoDistanceQuery(lon, lat float64, distance string) *GeoDistanceQuery { + return &GeoDistanceQuery{ + Location: []float64{lon, lat}, + Distance: distance, + } +} + +func (q *GeoDistanceQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *GeoDistanceQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *GeoDistanceQuery) SetField(f string) { + q.FieldVal = f +} + +func (q *GeoDistanceQuery) Field() string { + return q.FieldVal +} + +func (q *GeoDistanceQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, + options search.SearcherOptions) (search.Searcher, error) { + field := q.FieldVal + if q.FieldVal == "" { + field = m.DefaultSearchField() + } + + dist, err := geo.ParseDistance(q.Distance) + if err != nil { + return nil, err + } + + return searcher.NewGeoPointDistanceSearcher(i, q.Location[0], q.Location[1], + dist, field, q.BoostVal.Value(), options) +} + +func (q *GeoDistanceQuery) Validate() error { + return nil +} + +func (q *GeoDistanceQuery) UnmarshalJSON(data []byte) error { + tmp := struct { + Location interface{} `json:"location,omitempty"` + Distance string `json:"distance,omitempty"` + FieldVal string `json:"field,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` + }{} + err := json.Unmarshal(data, &tmp) + if err != nil { + return err + } + // now use our generic point parsing code from the geo package + lon, lat, found := geo.ExtractGeoPoint(tmp.Location) + if !found { + return fmt.Errorf("geo location not in a valid format") + } + q.Location = []float64{lon, lat} + q.Distance = tmp.Distance + q.FieldVal = tmp.FieldVal + q.BoostVal = tmp.BoostVal + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/match.go b/vendor/github.com/blevesearch/bleve/search/query/match.go new file mode 100644 index 0000000..36c9ee4 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/match.go @@ -0,0 +1,176 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "encoding/json" + "fmt" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" +) + +type MatchQuery struct { + Match string `json:"match"` + FieldVal string `json:"field,omitempty"` + Analyzer string `json:"analyzer,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` + Prefix int `json:"prefix_length"` + Fuzziness int `json:"fuzziness"` + Operator MatchQueryOperator `json:"operator,omitempty"` +} + +type MatchQueryOperator int + +const ( + // Document must satisfy AT LEAST ONE of term searches. + MatchQueryOperatorOr = 0 + // Document must satisfy ALL of term searches. + MatchQueryOperatorAnd = 1 +) + +func (o MatchQueryOperator) MarshalJSON() ([]byte, error) { + switch o { + case MatchQueryOperatorOr: + return json.Marshal("or") + case MatchQueryOperatorAnd: + return json.Marshal("and") + default: + return nil, fmt.Errorf("cannot marshal match operator %d to JSON", o) + } +} + +func (o *MatchQueryOperator) UnmarshalJSON(data []byte) error { + var operatorString string + err := json.Unmarshal(data, &operatorString) + if err != nil { + return err + } + + switch operatorString { + case "or": + *o = MatchQueryOperatorOr + return nil + case "and": + *o = MatchQueryOperatorAnd + return nil + default: + return fmt.Errorf("cannot unmarshal match operator '%v' from JSON", o) + } +} + +// NewMatchQuery creates a Query for matching text. +// An Analyzer is chosen based on the field. +// Input text is analyzed using this analyzer. +// Token terms resulting from this analysis are +// used to perform term searches. Result documents +// must satisfy at least one of these term searches. +func NewMatchQuery(match string) *MatchQuery { + return &MatchQuery{ + Match: match, + Operator: MatchQueryOperatorOr, + } +} + +func (q *MatchQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *MatchQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *MatchQuery) SetField(f string) { + q.FieldVal = f +} + +func (q *MatchQuery) Field() string { + return q.FieldVal +} + +func (q *MatchQuery) SetFuzziness(f int) { + q.Fuzziness = f +} + +func (q *MatchQuery) SetPrefix(p int) { + q.Prefix = p +} + +func (q *MatchQuery) SetOperator(operator MatchQueryOperator) { + q.Operator = operator +} + +func (q *MatchQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + + field := q.FieldVal + if q.FieldVal == "" { + field = m.DefaultSearchField() + } + + analyzerName := "" + if q.Analyzer != "" { + analyzerName = q.Analyzer + } else { + analyzerName = m.AnalyzerNameForPath(field) + } + analyzer := m.AnalyzerNamed(analyzerName) + + if analyzer == nil { + return nil, fmt.Errorf("no analyzer named '%s' registered", q.Analyzer) + } + + tokens := analyzer.Analyze([]byte(q.Match)) + if len(tokens) > 0 { + + tqs := make([]Query, len(tokens)) + if q.Fuzziness != 0 { + for i, token := range tokens { + query := NewFuzzyQuery(string(token.Term)) + query.SetFuzziness(q.Fuzziness) + query.SetPrefix(q.Prefix) + query.SetField(field) + query.SetBoost(q.BoostVal.Value()) + tqs[i] = query + } + } else { + for i, token := range tokens { + tq := NewTermQuery(string(token.Term)) + tq.SetField(field) + tq.SetBoost(q.BoostVal.Value()) + tqs[i] = tq + } + } + + switch q.Operator { + case MatchQueryOperatorOr: + shouldQuery := NewDisjunctionQuery(tqs) + shouldQuery.SetMin(1) + shouldQuery.SetBoost(q.BoostVal.Value()) + return shouldQuery.Searcher(i, m, options) + + case MatchQueryOperatorAnd: + mustQuery := NewConjunctionQuery(tqs) + mustQuery.SetBoost(q.BoostVal.Value()) + return mustQuery.Searcher(i, m, options) + + default: + return nil, fmt.Errorf("unhandled operator %d", q.Operator) + } + } + noneQuery := NewMatchNoneQuery() + return noneQuery.Searcher(i, m, options) +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/match_all.go b/vendor/github.com/blevesearch/bleve/search/query/match_all.go new file mode 100644 index 0000000..7fbe1f9 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/match_all.go @@ -0,0 +1,55 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "encoding/json" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +type MatchAllQuery struct { + BoostVal *Boost `json:"boost,omitempty"` +} + +// NewMatchAllQuery creates a Query which will +// match all documents in the index. +func NewMatchAllQuery() *MatchAllQuery { + return &MatchAllQuery{} +} + +func (q *MatchAllQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *MatchAllQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *MatchAllQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + return searcher.NewMatchAllSearcher(i, q.BoostVal.Value(), options) +} + +func (q *MatchAllQuery) MarshalJSON() ([]byte, error) { + tmp := map[string]interface{}{ + "boost": q.BoostVal, + "match_all": map[string]interface{}{}, + } + return json.Marshal(tmp) +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/match_none.go b/vendor/github.com/blevesearch/bleve/search/query/match_none.go new file mode 100644 index 0000000..dc2ea78 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/match_none.go @@ -0,0 +1,55 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "encoding/json" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +type MatchNoneQuery struct { + BoostVal *Boost `json:"boost,omitempty"` +} + +// NewMatchNoneQuery creates a Query which will not +// match any documents in the index. +func NewMatchNoneQuery() *MatchNoneQuery { + return &MatchNoneQuery{} +} + +func (q *MatchNoneQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *MatchNoneQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *MatchNoneQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + return searcher.NewMatchNoneSearcher(i) +} + +func (q *MatchNoneQuery) MarshalJSON() ([]byte, error) { + tmp := map[string]interface{}{ + "boost": q.BoostVal, + "match_none": map[string]interface{}{}, + } + return json.Marshal(tmp) +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/match_phrase.go b/vendor/github.com/blevesearch/bleve/search/query/match_phrase.go new file mode 100644 index 0000000..51be355 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/match_phrase.go @@ -0,0 +1,113 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "fmt" + + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" +) + +type MatchPhraseQuery struct { + MatchPhrase string `json:"match_phrase"` + FieldVal string `json:"field,omitempty"` + Analyzer string `json:"analyzer,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` +} + +// NewMatchPhraseQuery creates a new Query object +// for matching phrases in the index. +// An Analyzer is chosen based on the field. +// Input text is analyzed using this analyzer. +// Token terms resulting from this analysis are +// used to build a search phrase. Result documents +// must match this phrase. Queried field must have been indexed with +// IncludeTermVectors set to true. +func NewMatchPhraseQuery(matchPhrase string) *MatchPhraseQuery { + return &MatchPhraseQuery{ + MatchPhrase: matchPhrase, + } +} + +func (q *MatchPhraseQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *MatchPhraseQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *MatchPhraseQuery) SetField(f string) { + q.FieldVal = f +} + +func (q *MatchPhraseQuery) Field() string { + return q.FieldVal +} + +func (q *MatchPhraseQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + field := q.FieldVal + if q.FieldVal == "" { + field = m.DefaultSearchField() + } + + analyzerName := "" + if q.Analyzer != "" { + analyzerName = q.Analyzer + } else { + analyzerName = m.AnalyzerNameForPath(field) + } + analyzer := m.AnalyzerNamed(analyzerName) + if analyzer == nil { + return nil, fmt.Errorf("no analyzer named '%s' registered", q.Analyzer) + } + + tokens := analyzer.Analyze([]byte(q.MatchPhrase)) + if len(tokens) > 0 { + phrase := tokenStreamToPhrase(tokens) + phraseQuery := NewMultiPhraseQuery(phrase, field) + phraseQuery.SetBoost(q.BoostVal.Value()) + return phraseQuery.Searcher(i, m, options) + } + noneQuery := NewMatchNoneQuery() + return noneQuery.Searcher(i, m, options) +} + +func tokenStreamToPhrase(tokens analysis.TokenStream) [][]string { + firstPosition := int(^uint(0) >> 1) + lastPosition := 0 + for _, token := range tokens { + if token.Position < firstPosition { + firstPosition = token.Position + } + if token.Position > lastPosition { + lastPosition = token.Position + } + } + phraseLen := lastPosition - firstPosition + 1 + if phraseLen > 0 { + rv := make([][]string, phraseLen) + for _, token := range tokens { + pos := token.Position - firstPosition + rv[pos] = append(rv[pos], string(token.Term)) + } + return rv + } + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/multi_phrase.go b/vendor/github.com/blevesearch/bleve/search/query/multi_phrase.go new file mode 100644 index 0000000..8a7c9b6 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/multi_phrase.go @@ -0,0 +1,80 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "encoding/json" + "fmt" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +type MultiPhraseQuery struct { + Terms [][]string `json:"terms"` + Field string `json:"field,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` +} + +// NewMultiPhraseQuery creates a new Query for finding +// term phrases in the index. +// It is like PhraseQuery, but each position in the +// phrase may be satisfied by a list of terms +// as opposed to just one. +// At least one of the terms must exist in the correct +// order, at the correct index offsets, in the +// specified field. Queried field must have been indexed with +// IncludeTermVectors set to true. +func NewMultiPhraseQuery(terms [][]string, field string) *MultiPhraseQuery { + return &MultiPhraseQuery{ + Terms: terms, + Field: field, + } +} + +func (q *MultiPhraseQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *MultiPhraseQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *MultiPhraseQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + return searcher.NewMultiPhraseSearcher(i, q.Terms, q.Field, options) +} + +func (q *MultiPhraseQuery) Validate() error { + if len(q.Terms) < 1 { + return fmt.Errorf("phrase query must contain at least one term") + } + return nil +} + +func (q *MultiPhraseQuery) UnmarshalJSON(data []byte) error { + type _mphraseQuery MultiPhraseQuery + tmp := _mphraseQuery{} + err := json.Unmarshal(data, &tmp) + if err != nil { + return err + } + q.Terms = tmp.Terms + q.Field = tmp.Field + q.BoostVal = tmp.BoostVal + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/numeric_range.go b/vendor/github.com/blevesearch/bleve/search/query/numeric_range.go new file mode 100644 index 0000000..ea3f068 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/numeric_range.go @@ -0,0 +1,87 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "fmt" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +type NumericRangeQuery struct { + Min *float64 `json:"min,omitempty"` + Max *float64 `json:"max,omitempty"` + InclusiveMin *bool `json:"inclusive_min,omitempty"` + InclusiveMax *bool `json:"inclusive_max,omitempty"` + FieldVal string `json:"field,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` +} + +// NewNumericRangeQuery creates a new Query for ranges +// of numeric values. +// Either, but not both endpoints can be nil. +// The minimum value is inclusive. +// The maximum value is exclusive. +func NewNumericRangeQuery(min, max *float64) *NumericRangeQuery { + return NewNumericRangeInclusiveQuery(min, max, nil, nil) +} + +// NewNumericRangeInclusiveQuery creates a new Query for ranges +// of numeric values. +// Either, but not both endpoints can be nil. +// Control endpoint inclusion with inclusiveMin, inclusiveMax. +func NewNumericRangeInclusiveQuery(min, max *float64, minInclusive, maxInclusive *bool) *NumericRangeQuery { + return &NumericRangeQuery{ + Min: min, + Max: max, + InclusiveMin: minInclusive, + InclusiveMax: maxInclusive, + } +} + +func (q *NumericRangeQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *NumericRangeQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *NumericRangeQuery) SetField(f string) { + q.FieldVal = f +} + +func (q *NumericRangeQuery) Field() string { + return q.FieldVal +} + +func (q *NumericRangeQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + field := q.FieldVal + if q.FieldVal == "" { + field = m.DefaultSearchField() + } + return searcher.NewNumericRangeSearcher(i, q.Min, q.Max, q.InclusiveMin, q.InclusiveMax, field, q.BoostVal.Value(), options) +} + +func (q *NumericRangeQuery) Validate() error { + if q.Min == nil && q.Min == q.Max { + return fmt.Errorf("numeric range query must specify min or max") + } + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/phrase.go b/vendor/github.com/blevesearch/bleve/search/query/phrase.go new file mode 100644 index 0000000..dff1a02 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/phrase.go @@ -0,0 +1,77 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "encoding/json" + "fmt" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +type PhraseQuery struct { + Terms []string `json:"terms"` + Field string `json:"field,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` +} + +// NewPhraseQuery creates a new Query for finding +// exact term phrases in the index. +// The provided terms must exist in the correct +// order, at the correct index offsets, in the +// specified field. Queried field must have been indexed with +// IncludeTermVectors set to true. +func NewPhraseQuery(terms []string, field string) *PhraseQuery { + return &PhraseQuery{ + Terms: terms, + Field: field, + } +} + +func (q *PhraseQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *PhraseQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *PhraseQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + return searcher.NewPhraseSearcher(i, q.Terms, q.Field, options) +} + +func (q *PhraseQuery) Validate() error { + if len(q.Terms) < 1 { + return fmt.Errorf("phrase query must contain at least one term") + } + return nil +} + +func (q *PhraseQuery) UnmarshalJSON(data []byte) error { + type _phraseQuery PhraseQuery + tmp := _phraseQuery{} + err := json.Unmarshal(data, &tmp) + if err != nil { + return err + } + q.Terms = tmp.Terms + q.Field = tmp.Field + q.BoostVal = tmp.BoostVal + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/prefix.go b/vendor/github.com/blevesearch/bleve/search/query/prefix.go new file mode 100644 index 0000000..4f5be2b --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/prefix.go @@ -0,0 +1,62 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +type PrefixQuery struct { + Prefix string `json:"prefix"` + FieldVal string `json:"field,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` +} + +// NewPrefixQuery creates a new Query which finds +// documents containing terms that start with the +// specified prefix. +func NewPrefixQuery(prefix string) *PrefixQuery { + return &PrefixQuery{ + Prefix: prefix, + } +} + +func (q *PrefixQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *PrefixQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *PrefixQuery) SetField(f string) { + q.FieldVal = f +} + +func (q *PrefixQuery) Field() string { + return q.FieldVal +} + +func (q *PrefixQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + field := q.FieldVal + if q.FieldVal == "" { + field = m.DefaultSearchField() + } + return searcher.NewTermPrefixSearcher(i, q.Prefix, field, q.BoostVal.Value(), options) +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/query.go b/vendor/github.com/blevesearch/bleve/search/query/query.go new file mode 100644 index 0000000..18aca22 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/query.go @@ -0,0 +1,361 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "log" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" +) + +var logger = log.New(ioutil.Discard, "bleve mapping ", log.LstdFlags) + +// SetLog sets the logger used for logging +// by default log messages are sent to ioutil.Discard +func SetLog(l *log.Logger) { + logger = l +} + +// A Query represents a description of the type +// and parameters for a query into the index. +type Query interface { + Searcher(i index.IndexReader, m mapping.IndexMapping, + options search.SearcherOptions) (search.Searcher, error) +} + +// A BoostableQuery represents a Query which can be boosted +// relative to other queries. +type BoostableQuery interface { + Query + SetBoost(b float64) + Boost() float64 +} + +// A FieldableQuery represents a Query which can be restricted +// to a single field. +type FieldableQuery interface { + Query + SetField(f string) + Field() string +} + +// A ValidatableQuery represents a Query which can be validated +// prior to execution. +type ValidatableQuery interface { + Query + Validate() error +} + +// ParseQuery deserializes a JSON representation of +// a Query object. +func ParseQuery(input []byte) (Query, error) { + var tmp map[string]interface{} + err := json.Unmarshal(input, &tmp) + if err != nil { + return nil, err + } + _, isMatchQuery := tmp["match"] + _, hasFuzziness := tmp["fuzziness"] + if hasFuzziness && !isMatchQuery { + var rv FuzzyQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + _, isTermQuery := tmp["term"] + if isTermQuery { + var rv TermQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + if isMatchQuery { + var rv MatchQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + _, isMatchPhraseQuery := tmp["match_phrase"] + if isMatchPhraseQuery { + var rv MatchPhraseQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + _, hasMust := tmp["must"] + _, hasShould := tmp["should"] + _, hasMustNot := tmp["must_not"] + if hasMust || hasShould || hasMustNot { + var rv BooleanQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + _, hasTerms := tmp["terms"] + if hasTerms { + var rv PhraseQuery + err := json.Unmarshal(input, &rv) + if err != nil { + // now try multi-phrase + var rv2 MultiPhraseQuery + err = json.Unmarshal(input, &rv2) + if err != nil { + return nil, err + } + return &rv2, nil + } + return &rv, nil + } + _, hasConjuncts := tmp["conjuncts"] + if hasConjuncts { + var rv ConjunctionQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + _, hasDisjuncts := tmp["disjuncts"] + if hasDisjuncts { + var rv DisjunctionQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + + _, hasSyntaxQuery := tmp["query"] + if hasSyntaxQuery { + var rv QueryStringQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + _, hasMin := tmp["min"].(float64) + _, hasMax := tmp["max"].(float64) + if hasMin || hasMax { + var rv NumericRangeQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + _, hasMinStr := tmp["min"].(string) + _, hasMaxStr := tmp["max"].(string) + if hasMinStr || hasMaxStr { + var rv TermRangeQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + _, hasStart := tmp["start"] + _, hasEnd := tmp["end"] + if hasStart || hasEnd { + var rv DateRangeQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + _, hasPrefix := tmp["prefix"] + if hasPrefix { + var rv PrefixQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + _, hasRegexp := tmp["regexp"] + if hasRegexp { + var rv RegexpQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + _, hasWildcard := tmp["wildcard"] + if hasWildcard { + var rv WildcardQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + _, hasMatchAll := tmp["match_all"] + if hasMatchAll { + var rv MatchAllQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + _, hasMatchNone := tmp["match_none"] + if hasMatchNone { + var rv MatchNoneQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + _, hasDocIds := tmp["ids"] + if hasDocIds { + var rv DocIDQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + _, hasBool := tmp["bool"] + if hasBool { + var rv BoolFieldQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + _, hasTopLeft := tmp["top_left"] + _, hasBottomRight := tmp["bottom_right"] + if hasTopLeft && hasBottomRight { + var rv GeoBoundingBoxQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + _, hasDistance := tmp["distance"] + if hasDistance { + var rv GeoDistanceQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + _, hasPoints := tmp["polygon_points"] + if hasPoints { + var rv GeoBoundingPolygonQuery + err := json.Unmarshal(input, &rv) + if err != nil { + return nil, err + } + return &rv, nil + } + return nil, fmt.Errorf("unknown query type") +} + +// expandQuery traverses the input query tree and returns a new tree where +// query string queries have been expanded into base queries. Returned tree may +// reference queries from the input tree or new queries. +func expandQuery(m mapping.IndexMapping, query Query) (Query, error) { + var expand func(query Query) (Query, error) + var expandSlice func(queries []Query) ([]Query, error) + + expandSlice = func(queries []Query) ([]Query, error) { + expanded := []Query{} + for _, q := range queries { + exp, err := expand(q) + if err != nil { + return nil, err + } + expanded = append(expanded, exp) + } + return expanded, nil + } + + expand = func(query Query) (Query, error) { + switch q := query.(type) { + case *QueryStringQuery: + parsed, err := parseQuerySyntax(q.Query) + if err != nil { + return nil, fmt.Errorf("could not parse '%s': %s", q.Query, err) + } + return expand(parsed) + case *ConjunctionQuery: + children, err := expandSlice(q.Conjuncts) + if err != nil { + return nil, err + } + q.Conjuncts = children + return q, nil + case *DisjunctionQuery: + children, err := expandSlice(q.Disjuncts) + if err != nil { + return nil, err + } + q.Disjuncts = children + return q, nil + case *BooleanQuery: + var err error + q.Must, err = expand(q.Must) + if err != nil { + return nil, err + } + q.Should, err = expand(q.Should) + if err != nil { + return nil, err + } + q.MustNot, err = expand(q.MustNot) + if err != nil { + return nil, err + } + return q, nil + default: + return query, nil + } + } + return expand(query) +} + +// DumpQuery returns a string representation of the query tree, where query +// string queries have been expanded into base queries. The output format is +// meant for debugging purpose and may change in the future. +func DumpQuery(m mapping.IndexMapping, query Query) (string, error) { + q, err := expandQuery(m, query) + if err != nil { + return "", err + } + data, err := json.MarshalIndent(q, "", " ") + return string(data), err +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/query_string.go b/vendor/github.com/blevesearch/bleve/search/query/query_string.go new file mode 100644 index 0000000..ecafe6b --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/query_string.go @@ -0,0 +1,67 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" +) + +type QueryStringQuery struct { + Query string `json:"query"` + BoostVal *Boost `json:"boost,omitempty"` +} + +// NewQueryStringQuery creates a new Query used for +// finding documents that satisfy a query string. The +// query string is a small query language for humans. +func NewQueryStringQuery(query string) *QueryStringQuery { + return &QueryStringQuery{ + Query: query, + } +} + +func (q *QueryStringQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *QueryStringQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *QueryStringQuery) Parse() (Query, error) { + return parseQuerySyntax(q.Query) +} + +func (q *QueryStringQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + newQuery, err := parseQuerySyntax(q.Query) + if err != nil { + return nil, err + } + return newQuery.Searcher(i, m, options) +} + +func (q *QueryStringQuery) Validate() error { + newQuery, err := parseQuerySyntax(q.Query) + if err != nil { + return err + } + if newQuery, ok := newQuery.(ValidatableQuery); ok { + return newQuery.Validate() + } + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/query_string.y b/vendor/github.com/blevesearch/bleve/search/query/query_string.y new file mode 100644 index 0000000..d3e5ac9 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/query_string.y @@ -0,0 +1,328 @@ +%{ +package query +import ( + "fmt" + "strconv" + "strings" + "time" +) + +func logDebugGrammar(format string, v ...interface{}) { + if debugParser { + logger.Printf(format, v...) + } +} +%} + +%union { +s string +n int +f float64 +q Query +pf *float64} + +%token tSTRING tPHRASE tPLUS tMINUS tCOLON tBOOST tNUMBER tSTRING tGREATER tLESS +tEQUAL tTILDE + +%type tSTRING +%type tPHRASE +%type tNUMBER +%type posOrNegNumber +%type tTILDE +%type tBOOST +%type searchBase +%type searchSuffix +%type searchPrefix + +%% + +input: +searchParts { + logDebugGrammar("INPUT") +}; + +searchParts: +searchPart searchParts { + logDebugGrammar("SEARCH PARTS") +} +| +searchPart { + logDebugGrammar("SEARCH PART") +}; + +searchPart: +searchPrefix searchBase searchSuffix { + query := $2 + if $3 != nil { + if query, ok := query.(BoostableQuery); ok { + query.SetBoost(*$3) + } + } + switch($1) { + case queryShould: + yylex.(*lexerWrapper).query.AddShould(query) + case queryMust: + yylex.(*lexerWrapper).query.AddMust(query) + case queryMustNot: + yylex.(*lexerWrapper).query.AddMustNot(query) + } +}; + + +searchPrefix: +/* empty */ { + $$ = queryShould +} +| +tPLUS { + logDebugGrammar("PLUS") + $$ = queryMust +} +| +tMINUS { + logDebugGrammar("MINUS") + $$ = queryMustNot +}; + +searchBase: +tSTRING { + str := $1 + logDebugGrammar("STRING - %s", str) + var q FieldableQuery + if strings.HasPrefix(str, "/") && strings.HasSuffix(str, "/") { + q = NewRegexpQuery(str[1:len(str)-1]) + } else if strings.ContainsAny(str, "*?"){ + q = NewWildcardQuery(str) + } else { + q = NewMatchQuery(str) + } + $$ = q +} +| +tSTRING tTILDE { + str := $1 + fuzziness, err := strconv.ParseFloat($2, 64) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("invalid fuzziness value: %v", err)) + } + logDebugGrammar("FUZZY STRING - %s %f", str, fuzziness) + q := NewMatchQuery(str) + q.SetFuzziness(int(fuzziness)) + $$ = q +} +| +tSTRING tCOLON tSTRING tTILDE { + field := $1 + str := $3 + fuzziness, err := strconv.ParseFloat($4, 64) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("invalid fuzziness value: %v", err)) + } + logDebugGrammar("FIELD - %s FUZZY STRING - %s %f", field, str, fuzziness) + q := NewMatchQuery(str) + q.SetFuzziness(int(fuzziness)) + q.SetField(field) + $$ = q +} +| +tNUMBER { + str := $1 + logDebugGrammar("STRING - %s", str) + q1 := NewMatchQuery(str) + val, err := strconv.ParseFloat($1, 64) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("error parsing number: %v", err)) + } + inclusive := true + q2 := NewNumericRangeInclusiveQuery(&val, &val, &inclusive, &inclusive) + q := NewDisjunctionQuery([]Query{q1,q2}) + q.queryStringMode = true + $$ = q +} +| +tPHRASE { + phrase := $1 + logDebugGrammar("PHRASE - %s", phrase) + q := NewMatchPhraseQuery(phrase) + $$ = q +} +| +tSTRING tCOLON tSTRING { + field := $1 + str := $3 + logDebugGrammar("FIELD - %s STRING - %s", field, str) + var q FieldableQuery + if strings.HasPrefix(str, "/") && strings.HasSuffix(str, "/") { + q = NewRegexpQuery(str[1:len(str)-1]) + } else if strings.ContainsAny(str, "*?"){ + q = NewWildcardQuery(str) + } else { + q = NewMatchQuery(str) + } + q.SetField(field) + $$ = q +} +| +tSTRING tCOLON posOrNegNumber { + field := $1 + str := $3 + logDebugGrammar("FIELD - %s STRING - %s", field, str) + q1 := NewMatchQuery(str) + q1.SetField(field) + val, err := strconv.ParseFloat($3, 64) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("error parsing number: %v", err)) + } + inclusive := true + q2 := NewNumericRangeInclusiveQuery(&val, &val, &inclusive, &inclusive) + q2.SetField(field) + q := NewDisjunctionQuery([]Query{q1,q2}) + q.queryStringMode = true + $$ = q +} +| +tSTRING tCOLON tPHRASE { + field := $1 + phrase := $3 + logDebugGrammar("FIELD - %s PHRASE - %s", field, phrase) + q := NewMatchPhraseQuery(phrase) + q.SetField(field) + $$ = q +} +| +tSTRING tCOLON tGREATER posOrNegNumber { + field := $1 + min, err := strconv.ParseFloat($4, 64) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("error parsing number: %v", err)) + } + minInclusive := false + logDebugGrammar("FIELD - GREATER THAN %f", min) + q := NewNumericRangeInclusiveQuery(&min, nil, &minInclusive, nil) + q.SetField(field) + $$ = q +} +| +tSTRING tCOLON tGREATER tEQUAL posOrNegNumber { + field := $1 + min, err := strconv.ParseFloat($5, 64) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("error parsing number: %v", err)) + } + minInclusive := true + logDebugGrammar("FIELD - GREATER THAN OR EQUAL %f", min) + q := NewNumericRangeInclusiveQuery(&min, nil, &minInclusive, nil) + q.SetField(field) + $$ = q +} +| +tSTRING tCOLON tLESS posOrNegNumber { + field := $1 + max, err := strconv.ParseFloat($4, 64) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("error parsing number: %v", err)) + } + maxInclusive := false + logDebugGrammar("FIELD - LESS THAN %f", max) + q := NewNumericRangeInclusiveQuery(nil, &max, nil, &maxInclusive) + q.SetField(field) + $$ = q +} +| +tSTRING tCOLON tLESS tEQUAL posOrNegNumber { + field := $1 + max, err := strconv.ParseFloat($5, 64) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("error parsing number: %v", err)) + } + maxInclusive := true + logDebugGrammar("FIELD - LESS THAN OR EQUAL %f", max) + q := NewNumericRangeInclusiveQuery(nil, &max, nil, &maxInclusive) + q.SetField(field) + $$ = q +} +| +tSTRING tCOLON tGREATER tPHRASE { + field := $1 + minInclusive := false + phrase := $4 + + logDebugGrammar("FIELD - GREATER THAN DATE %s", phrase) + minTime, err := queryTimeFromString(phrase) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("invalid time: %v", err)) + } + q := NewDateRangeInclusiveQuery(minTime, time.Time{}, &minInclusive, nil) + q.SetField(field) + $$ = q +} +| +tSTRING tCOLON tGREATER tEQUAL tPHRASE { + field := $1 + minInclusive := true + phrase := $5 + + logDebugGrammar("FIELD - GREATER THAN OR EQUAL DATE %s", phrase) + minTime, err := queryTimeFromString(phrase) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("invalid time: %v", err)) + } + q := NewDateRangeInclusiveQuery(minTime, time.Time{}, &minInclusive, nil) + q.SetField(field) + $$ = q +} +| +tSTRING tCOLON tLESS tPHRASE { + field := $1 + maxInclusive := false + phrase := $4 + + logDebugGrammar("FIELD - LESS THAN DATE %s", phrase) + maxTime, err := queryTimeFromString(phrase) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("invalid time: %v", err)) + } + q := NewDateRangeInclusiveQuery(time.Time{}, maxTime, nil, &maxInclusive) + q.SetField(field) + $$ = q +} +| +tSTRING tCOLON tLESS tEQUAL tPHRASE { + field := $1 + maxInclusive := true + phrase := $5 + + logDebugGrammar("FIELD - LESS THAN OR EQUAL DATE %s", phrase) + maxTime, err := queryTimeFromString(phrase) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("invalid time: %v", err)) + } + q := NewDateRangeInclusiveQuery(time.Time{}, maxTime, nil, &maxInclusive) + q.SetField(field) + $$ = q +}; + +searchSuffix: +/* empty */ { + $$ = nil +} +| +tBOOST { + $$ = nil + boost, err := strconv.ParseFloat($1, 64) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("invalid boost value: %v", err)) + } else { + $$ = &boost + } + logDebugGrammar("BOOST %f", boost) +}; + +posOrNegNumber: +tNUMBER { + $$ = $1 +} +| +tMINUS tNUMBER { + $$ = "-" + $2 +}; diff --git a/vendor/github.com/blevesearch/bleve/search/query/query_string.y.go b/vendor/github.com/blevesearch/bleve/search/query/query_string.y.go new file mode 100644 index 0000000..ac2d322 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/query_string.y.go @@ -0,0 +1,815 @@ +package query + +import __yyfmt__ "fmt" + +//line query_string.y:2 +import ( + "fmt" + "strconv" + "strings" + "time" +) + +func logDebugGrammar(format string, v ...interface{}) { + if debugParser { + logger.Printf(format, v...) + } +} + +//line query_string.y:17 +type yySymType struct { + yys int + s string + n int + f float64 + q Query + pf *float64 +} + +const tSTRING = 57346 +const tPHRASE = 57347 +const tPLUS = 57348 +const tMINUS = 57349 +const tCOLON = 57350 +const tBOOST = 57351 +const tNUMBER = 57352 +const tGREATER = 57353 +const tLESS = 57354 +const tEQUAL = 57355 +const tTILDE = 57356 + +var yyToknames = [...]string{ + "$end", + "error", + "$unk", + "tSTRING", + "tPHRASE", + "tPLUS", + "tMINUS", + "tCOLON", + "tBOOST", + "tNUMBER", + "tGREATER", + "tLESS", + "tEQUAL", + "tTILDE", +} +var yyStatenames = [...]string{} + +const yyEofCode = 1 +const yyErrCode = 2 +const yyInitialStackSize = 16 + +//line yacctab:1 +var yyExca = [...]int{ + -1, 1, + 1, -1, + -2, 0, + -1, 3, + 1, 3, + -2, 5, +} + +const yyNprod = 28 +const yyPrivate = 57344 + +var yyTokenNames []string +var yyStates []string + +const yyLast = 42 + +var yyAct = [...]int{ + + 17, 16, 18, 23, 22, 30, 3, 21, 19, 20, + 29, 26, 22, 22, 1, 21, 21, 15, 28, 25, + 24, 27, 34, 14, 22, 13, 31, 21, 32, 33, + 22, 9, 11, 21, 5, 6, 2, 10, 4, 12, + 7, 8, +} +var yyPact = [...]int{ + + 28, -1000, -1000, 28, 27, -1000, -1000, -1000, 16, 9, + -1000, -1000, -1000, -1000, -1000, -3, -11, -1000, -1000, 6, + 5, -1000, -5, -1000, -1000, 23, -1000, -1000, 17, -1000, + -1000, -1000, -1000, -1000, -1000, +} +var yyPgo = [...]int{ + + 0, 0, 41, 39, 38, 14, 36, 6, +} +var yyR1 = [...]int{ + + 0, 5, 6, 6, 7, 4, 4, 4, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 3, 3, 1, 1, +} +var yyR2 = [...]int{ + + 0, 1, 2, 1, 3, 0, 1, 1, 1, 2, + 4, 1, 1, 3, 3, 3, 4, 5, 4, 5, + 4, 5, 4, 5, 0, 1, 1, 2, +} +var yyChk = [...]int{ + + -1000, -5, -6, -7, -4, 6, 7, -6, -2, 4, + 10, 5, -3, 9, 14, 8, 4, -1, 5, 11, + 12, 10, 7, 14, -1, 13, 5, -1, 13, 5, + 10, -1, 5, -1, 5, +} +var yyDef = [...]int{ + + 5, -2, 1, -2, 0, 6, 7, 2, 24, 8, + 11, 12, 4, 25, 9, 0, 13, 14, 15, 0, + 0, 26, 0, 10, 16, 0, 20, 18, 0, 22, + 27, 17, 21, 19, 23, +} +var yyTok1 = [...]int{ + + 1, +} +var yyTok2 = [...]int{ + + 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, + 12, 13, 14, +} +var yyTok3 = [...]int{ + 0, +} + +var yyErrorMessages = [...]struct { + state int + token int + msg string +}{} + +//line yaccpar:1 + +/* parser for yacc output */ + +var ( + yyDebug = 0 + yyErrorVerbose = false +) + +type yyLexer interface { + Lex(lval *yySymType) int + Error(s string) +} + +type yyParser interface { + Parse(yyLexer) int + Lookahead() int +} + +type yyParserImpl struct { + lval yySymType + stack [yyInitialStackSize]yySymType + char int +} + +func (p *yyParserImpl) Lookahead() int { + return p.char +} + +func yyNewParser() yyParser { + return &yyParserImpl{} +} + +const yyFlag = -1000 + +func yyTokname(c int) string { + if c >= 1 && c-1 < len(yyToknames) { + if yyToknames[c-1] != "" { + return yyToknames[c-1] + } + } + return __yyfmt__.Sprintf("tok-%v", c) +} + +func yyStatname(s int) string { + if s >= 0 && s < len(yyStatenames) { + if yyStatenames[s] != "" { + return yyStatenames[s] + } + } + return __yyfmt__.Sprintf("state-%v", s) +} + +func yyErrorMessage(state, lookAhead int) string { + const TOKSTART = 4 + + if !yyErrorVerbose { + return "syntax error" + } + + for _, e := range yyErrorMessages { + if e.state == state && e.token == lookAhead { + return "syntax error: " + e.msg + } + } + + res := "syntax error: unexpected " + yyTokname(lookAhead) + + // To match Bison, suggest at most four expected tokens. + expected := make([]int, 0, 4) + + // Look for shiftable tokens. + base := yyPact[state] + for tok := TOKSTART; tok-1 < len(yyToknames); tok++ { + if n := base + tok; n >= 0 && n < yyLast && yyChk[yyAct[n]] == tok { + if len(expected) == cap(expected) { + return res + } + expected = append(expected, tok) + } + } + + if yyDef[state] == -2 { + i := 0 + for yyExca[i] != -1 || yyExca[i+1] != state { + i += 2 + } + + // Look for tokens that we accept or reduce. + for i += 2; yyExca[i] >= 0; i += 2 { + tok := yyExca[i] + if tok < TOKSTART || yyExca[i+1] == 0 { + continue + } + if len(expected) == cap(expected) { + return res + } + expected = append(expected, tok) + } + + // If the default action is to accept or reduce, give up. + if yyExca[i+1] != 0 { + return res + } + } + + for i, tok := range expected { + if i == 0 { + res += ", expecting " + } else { + res += " or " + } + res += yyTokname(tok) + } + return res +} + +func yylex1(lex yyLexer, lval *yySymType) (char, token int) { + token = 0 + char = lex.Lex(lval) + if char <= 0 { + token = yyTok1[0] + goto out + } + if char < len(yyTok1) { + token = yyTok1[char] + goto out + } + if char >= yyPrivate { + if char < yyPrivate+len(yyTok2) { + token = yyTok2[char-yyPrivate] + goto out + } + } + for i := 0; i < len(yyTok3); i += 2 { + token = yyTok3[i+0] + if token == char { + token = yyTok3[i+1] + goto out + } + } + +out: + if token == 0 { + token = yyTok2[1] /* unknown char */ + } + if yyDebug >= 3 { + __yyfmt__.Printf("lex %s(%d)\n", yyTokname(token), uint(char)) + } + return char, token +} + +func yyParse(yylex yyLexer) int { + return yyNewParser().Parse(yylex) +} + +func (yyrcvr *yyParserImpl) Parse(yylex yyLexer) int { + var yyn int + var yyVAL yySymType + var yyDollar []yySymType + _ = yyDollar // silence set and not used + yyS := yyrcvr.stack[:] + + Nerrs := 0 /* number of errors */ + Errflag := 0 /* error recovery flag */ + yystate := 0 + yyrcvr.char = -1 + yytoken := -1 // yyrcvr.char translated into internal numbering + defer func() { + // Make sure we report no lookahead when not parsing. + yystate = -1 + yyrcvr.char = -1 + yytoken = -1 + }() + yyp := -1 + goto yystack + +ret0: + return 0 + +ret1: + return 1 + +yystack: + /* put a state and value onto the stack */ + if yyDebug >= 4 { + __yyfmt__.Printf("char %v in %v\n", yyTokname(yytoken), yyStatname(yystate)) + } + + yyp++ + if yyp >= len(yyS) { + nyys := make([]yySymType, len(yyS)*2) + copy(nyys, yyS) + yyS = nyys + } + yyS[yyp] = yyVAL + yyS[yyp].yys = yystate + +yynewstate: + yyn = yyPact[yystate] + if yyn <= yyFlag { + goto yydefault /* simple state */ + } + if yyrcvr.char < 0 { + yyrcvr.char, yytoken = yylex1(yylex, &yyrcvr.lval) + } + yyn += yytoken + if yyn < 0 || yyn >= yyLast { + goto yydefault + } + yyn = yyAct[yyn] + if yyChk[yyn] == yytoken { /* valid shift */ + yyrcvr.char = -1 + yytoken = -1 + yyVAL = yyrcvr.lval + yystate = yyn + if Errflag > 0 { + Errflag-- + } + goto yystack + } + +yydefault: + /* default state action */ + yyn = yyDef[yystate] + if yyn == -2 { + if yyrcvr.char < 0 { + yyrcvr.char, yytoken = yylex1(yylex, &yyrcvr.lval) + } + + /* look through exception table */ + xi := 0 + for { + if yyExca[xi+0] == -1 && yyExca[xi+1] == yystate { + break + } + xi += 2 + } + for xi += 2; ; xi += 2 { + yyn = yyExca[xi+0] + if yyn < 0 || yyn == yytoken { + break + } + } + yyn = yyExca[xi+1] + if yyn < 0 { + goto ret0 + } + } + if yyn == 0 { + /* error ... attempt to resume parsing */ + switch Errflag { + case 0: /* brand new error */ + yylex.Error(yyErrorMessage(yystate, yytoken)) + Nerrs++ + if yyDebug >= 1 { + __yyfmt__.Printf("%s", yyStatname(yystate)) + __yyfmt__.Printf(" saw %s\n", yyTokname(yytoken)) + } + fallthrough + + case 1, 2: /* incompletely recovered error ... try again */ + Errflag = 3 + + /* find a state where "error" is a legal shift action */ + for yyp >= 0 { + yyn = yyPact[yyS[yyp].yys] + yyErrCode + if yyn >= 0 && yyn < yyLast { + yystate = yyAct[yyn] /* simulate a shift of "error" */ + if yyChk[yystate] == yyErrCode { + goto yystack + } + } + + /* the current p has no shift on "error", pop stack */ + if yyDebug >= 2 { + __yyfmt__.Printf("error recovery pops state %d\n", yyS[yyp].yys) + } + yyp-- + } + /* there is no state on the stack with an error shift ... abort */ + goto ret1 + + case 3: /* no shift yet; clobber input char */ + if yyDebug >= 2 { + __yyfmt__.Printf("error recovery discards %s\n", yyTokname(yytoken)) + } + if yytoken == yyEofCode { + goto ret1 + } + yyrcvr.char = -1 + yytoken = -1 + goto yynewstate /* try again in the same state */ + } + } + + /* reduction by production yyn */ + if yyDebug >= 2 { + __yyfmt__.Printf("reduce %v in:\n\t%v\n", yyn, yyStatname(yystate)) + } + + yynt := yyn + yypt := yyp + _ = yypt // guard against "declared and not used" + + yyp -= yyR2[yyn] + // yyp is now the index of $0. Perform the default action. Iff the + // reduced production is ε, $1 is possibly out of range. + if yyp+1 >= len(yyS) { + nyys := make([]yySymType, len(yyS)*2) + copy(nyys, yyS) + yyS = nyys + } + yyVAL = yyS[yyp+1] + + /* consult goto table to find next state */ + yyn = yyR1[yyn] + yyg := yyPgo[yyn] + yyj := yyg + yyS[yyp].yys + 1 + + if yyj >= yyLast { + yystate = yyAct[yyg] + } else { + yystate = yyAct[yyj] + if yyChk[yystate] != -yyn { + yystate = yyAct[yyg] + } + } + // dummy call; replaced with literal code + switch yynt { + + case 1: + yyDollar = yyS[yypt-1 : yypt+1] + //line query_string.y:40 + { + logDebugGrammar("INPUT") + } + case 2: + yyDollar = yyS[yypt-2 : yypt+1] + //line query_string.y:45 + { + logDebugGrammar("SEARCH PARTS") + } + case 3: + yyDollar = yyS[yypt-1 : yypt+1] + //line query_string.y:49 + { + logDebugGrammar("SEARCH PART") + } + case 4: + yyDollar = yyS[yypt-3 : yypt+1] + //line query_string.y:54 + { + query := yyDollar[2].q + if yyDollar[3].pf != nil { + if query, ok := query.(BoostableQuery); ok { + query.SetBoost(*yyDollar[3].pf) + } + } + switch yyDollar[1].n { + case queryShould: + yylex.(*lexerWrapper).query.AddShould(query) + case queryMust: + yylex.(*lexerWrapper).query.AddMust(query) + case queryMustNot: + yylex.(*lexerWrapper).query.AddMustNot(query) + } + } + case 5: + yyDollar = yyS[yypt-0 : yypt+1] + //line query_string.y:73 + { + yyVAL.n = queryShould + } + case 6: + yyDollar = yyS[yypt-1 : yypt+1] + //line query_string.y:77 + { + logDebugGrammar("PLUS") + yyVAL.n = queryMust + } + case 7: + yyDollar = yyS[yypt-1 : yypt+1] + //line query_string.y:82 + { + logDebugGrammar("MINUS") + yyVAL.n = queryMustNot + } + case 8: + yyDollar = yyS[yypt-1 : yypt+1] + //line query_string.y:88 + { + str := yyDollar[1].s + logDebugGrammar("STRING - %s", str) + var q FieldableQuery + if strings.HasPrefix(str, "/") && strings.HasSuffix(str, "/") { + q = NewRegexpQuery(str[1 : len(str)-1]) + } else if strings.ContainsAny(str, "*?") { + q = NewWildcardQuery(str) + } else { + q = NewMatchQuery(str) + } + yyVAL.q = q + } + case 9: + yyDollar = yyS[yypt-2 : yypt+1] + //line query_string.y:102 + { + str := yyDollar[1].s + fuzziness, err := strconv.ParseFloat(yyDollar[2].s, 64) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("invalid fuzziness value: %v", err)) + } + logDebugGrammar("FUZZY STRING - %s %f", str, fuzziness) + q := NewMatchQuery(str) + q.SetFuzziness(int(fuzziness)) + yyVAL.q = q + } + case 10: + yyDollar = yyS[yypt-4 : yypt+1] + //line query_string.y:114 + { + field := yyDollar[1].s + str := yyDollar[3].s + fuzziness, err := strconv.ParseFloat(yyDollar[4].s, 64) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("invalid fuzziness value: %v", err)) + } + logDebugGrammar("FIELD - %s FUZZY STRING - %s %f", field, str, fuzziness) + q := NewMatchQuery(str) + q.SetFuzziness(int(fuzziness)) + q.SetField(field) + yyVAL.q = q + } + case 11: + yyDollar = yyS[yypt-1 : yypt+1] + //line query_string.y:128 + { + str := yyDollar[1].s + logDebugGrammar("STRING - %s", str) + q1 := NewMatchQuery(str) + val, err := strconv.ParseFloat(yyDollar[1].s, 64) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("error parsing number: %v", err)) + } + inclusive := true + q2 := NewNumericRangeInclusiveQuery(&val, &val, &inclusive, &inclusive) + q := NewDisjunctionQuery([]Query{q1, q2}) + q.queryStringMode = true + yyVAL.q = q + } + case 12: + yyDollar = yyS[yypt-1 : yypt+1] + //line query_string.y:143 + { + phrase := yyDollar[1].s + logDebugGrammar("PHRASE - %s", phrase) + q := NewMatchPhraseQuery(phrase) + yyVAL.q = q + } + case 13: + yyDollar = yyS[yypt-3 : yypt+1] + //line query_string.y:150 + { + field := yyDollar[1].s + str := yyDollar[3].s + logDebugGrammar("FIELD - %s STRING - %s", field, str) + var q FieldableQuery + if strings.HasPrefix(str, "/") && strings.HasSuffix(str, "/") { + q = NewRegexpQuery(str[1 : len(str)-1]) + } else if strings.ContainsAny(str, "*?") { + q = NewWildcardQuery(str) + } else { + q = NewMatchQuery(str) + } + q.SetField(field) + yyVAL.q = q + } + case 14: + yyDollar = yyS[yypt-3 : yypt+1] + //line query_string.y:166 + { + field := yyDollar[1].s + str := yyDollar[3].s + logDebugGrammar("FIELD - %s STRING - %s", field, str) + q1 := NewMatchQuery(str) + q1.SetField(field) + val, err := strconv.ParseFloat(yyDollar[3].s, 64) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("error parsing number: %v", err)) + } + inclusive := true + q2 := NewNumericRangeInclusiveQuery(&val, &val, &inclusive, &inclusive) + q2.SetField(field) + q := NewDisjunctionQuery([]Query{q1, q2}) + q.queryStringMode = true + yyVAL.q = q + } + case 15: + yyDollar = yyS[yypt-3 : yypt+1] + //line query_string.y:184 + { + field := yyDollar[1].s + phrase := yyDollar[3].s + logDebugGrammar("FIELD - %s PHRASE - %s", field, phrase) + q := NewMatchPhraseQuery(phrase) + q.SetField(field) + yyVAL.q = q + } + case 16: + yyDollar = yyS[yypt-4 : yypt+1] + //line query_string.y:193 + { + field := yyDollar[1].s + min, err := strconv.ParseFloat(yyDollar[4].s, 64) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("error parsing number: %v", err)) + } + minInclusive := false + logDebugGrammar("FIELD - GREATER THAN %f", min) + q := NewNumericRangeInclusiveQuery(&min, nil, &minInclusive, nil) + q.SetField(field) + yyVAL.q = q + } + case 17: + yyDollar = yyS[yypt-5 : yypt+1] + //line query_string.y:206 + { + field := yyDollar[1].s + min, err := strconv.ParseFloat(yyDollar[5].s, 64) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("error parsing number: %v", err)) + } + minInclusive := true + logDebugGrammar("FIELD - GREATER THAN OR EQUAL %f", min) + q := NewNumericRangeInclusiveQuery(&min, nil, &minInclusive, nil) + q.SetField(field) + yyVAL.q = q + } + case 18: + yyDollar = yyS[yypt-4 : yypt+1] + //line query_string.y:219 + { + field := yyDollar[1].s + max, err := strconv.ParseFloat(yyDollar[4].s, 64) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("error parsing number: %v", err)) + } + maxInclusive := false + logDebugGrammar("FIELD - LESS THAN %f", max) + q := NewNumericRangeInclusiveQuery(nil, &max, nil, &maxInclusive) + q.SetField(field) + yyVAL.q = q + } + case 19: + yyDollar = yyS[yypt-5 : yypt+1] + //line query_string.y:232 + { + field := yyDollar[1].s + max, err := strconv.ParseFloat(yyDollar[5].s, 64) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("error parsing number: %v", err)) + } + maxInclusive := true + logDebugGrammar("FIELD - LESS THAN OR EQUAL %f", max) + q := NewNumericRangeInclusiveQuery(nil, &max, nil, &maxInclusive) + q.SetField(field) + yyVAL.q = q + } + case 20: + yyDollar = yyS[yypt-4 : yypt+1] + //line query_string.y:245 + { + field := yyDollar[1].s + minInclusive := false + phrase := yyDollar[4].s + + logDebugGrammar("FIELD - GREATER THAN DATE %s", phrase) + minTime, err := queryTimeFromString(phrase) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("invalid time: %v", err)) + } + q := NewDateRangeInclusiveQuery(minTime, time.Time{}, &minInclusive, nil) + q.SetField(field) + yyVAL.q = q + } + case 21: + yyDollar = yyS[yypt-5 : yypt+1] + //line query_string.y:260 + { + field := yyDollar[1].s + minInclusive := true + phrase := yyDollar[5].s + + logDebugGrammar("FIELD - GREATER THAN OR EQUAL DATE %s", phrase) + minTime, err := queryTimeFromString(phrase) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("invalid time: %v", err)) + } + q := NewDateRangeInclusiveQuery(minTime, time.Time{}, &minInclusive, nil) + q.SetField(field) + yyVAL.q = q + } + case 22: + yyDollar = yyS[yypt-4 : yypt+1] + //line query_string.y:275 + { + field := yyDollar[1].s + maxInclusive := false + phrase := yyDollar[4].s + + logDebugGrammar("FIELD - LESS THAN DATE %s", phrase) + maxTime, err := queryTimeFromString(phrase) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("invalid time: %v", err)) + } + q := NewDateRangeInclusiveQuery(time.Time{}, maxTime, nil, &maxInclusive) + q.SetField(field) + yyVAL.q = q + } + case 23: + yyDollar = yyS[yypt-5 : yypt+1] + //line query_string.y:290 + { + field := yyDollar[1].s + maxInclusive := true + phrase := yyDollar[5].s + + logDebugGrammar("FIELD - LESS THAN OR EQUAL DATE %s", phrase) + maxTime, err := queryTimeFromString(phrase) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("invalid time: %v", err)) + } + q := NewDateRangeInclusiveQuery(time.Time{}, maxTime, nil, &maxInclusive) + q.SetField(field) + yyVAL.q = q + } + case 24: + yyDollar = yyS[yypt-0 : yypt+1] + //line query_string.y:306 + { + yyVAL.pf = nil + } + case 25: + yyDollar = yyS[yypt-1 : yypt+1] + //line query_string.y:310 + { + yyVAL.pf = nil + boost, err := strconv.ParseFloat(yyDollar[1].s, 64) + if err != nil { + yylex.(*lexerWrapper).lex.Error(fmt.Sprintf("invalid boost value: %v", err)) + } else { + yyVAL.pf = &boost + } + logDebugGrammar("BOOST %f", boost) + } + case 26: + yyDollar = yyS[yypt-1 : yypt+1] + //line query_string.y:322 + { + yyVAL.s = yyDollar[1].s + } + case 27: + yyDollar = yyS[yypt-2 : yypt+1] + //line query_string.y:326 + { + yyVAL.s = "-" + yyDollar[2].s + } + } + goto yystack /* stack new state and value */ +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/query_string_lex.go b/vendor/github.com/blevesearch/bleve/search/query/query_string_lex.go new file mode 100644 index 0000000..3a9cf23 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/query_string_lex.go @@ -0,0 +1,323 @@ +// Copyright (c) 2016 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "bufio" + "io" + "strings" + "unicode" +) + +const reservedChars = "+-=&|>', '<', '=': + l.buf += string(next) + return singleCharOpState, true + case '^': + return inBoostState, true + case '~': + return inTildeState, true + } + + switch { + case !l.inEscape && next == '\\': + l.inEscape = true + return startState, true + case unicode.IsDigit(next): + l.buf += string(next) + return inNumOrStrState, true + case !unicode.IsSpace(next): + l.buf += string(next) + return inStrState, true + } + + // doesn't look like anything, just eat it and stay here + l.reset() + return startState, true +} + +func inPhraseState(l *queryStringLex, next rune, eof bool) (lexState, bool) { + // unterminated phrase eats the phrase + if eof { + l.Error("unterminated quote") + return nil, false + } + + // only a non-escaped " ends the phrase + if !l.inEscape && next == '"' { + // end phrase + l.nextTokenType = tPHRASE + l.nextToken = &yySymType{ + s: l.buf, + } + logDebugTokens("PHRASE - '%s'", l.nextToken.s) + l.reset() + return startState, true + } else if !l.inEscape && next == '\\' { + l.inEscape = true + } else if l.inEscape { + // if in escape, end it + l.inEscape = false + l.buf += unescape(string(next)) + } else { + l.buf += string(next) + } + + return inPhraseState, true +} + +func singleCharOpState(l *queryStringLex, next rune, eof bool) (lexState, bool) { + l.nextToken = &yySymType{} + + switch l.buf { + case "+": + l.nextTokenType = tPLUS + logDebugTokens("PLUS") + case "-": + l.nextTokenType = tMINUS + logDebugTokens("MINUS") + case ":": + l.nextTokenType = tCOLON + logDebugTokens("COLON") + case ">": + l.nextTokenType = tGREATER + logDebugTokens("GREATER") + case "<": + l.nextTokenType = tLESS + logDebugTokens("LESS") + case "=": + l.nextTokenType = tEQUAL + logDebugTokens("EQUAL") + } + + l.reset() + return startState, false +} + +func inBoostState(l *queryStringLex, next rune, eof bool) (lexState, bool) { + + // only a non-escaped space ends the boost (or eof) + if eof || (!l.inEscape && next == ' ') { + // end boost + l.nextTokenType = tBOOST + if l.buf == "" { + l.buf = "1" + } + l.nextToken = &yySymType{ + s: l.buf, + } + logDebugTokens("BOOST - '%s'", l.nextToken.s) + l.reset() + return startState, true + } else if !l.inEscape && next == '\\' { + l.inEscape = true + } else if l.inEscape { + // if in escape, end it + l.inEscape = false + l.buf += unescape(string(next)) + } else { + l.buf += string(next) + } + + return inBoostState, true +} + +func inTildeState(l *queryStringLex, next rune, eof bool) (lexState, bool) { + + // only a non-escaped space ends the tilde (or eof) + if eof || (!l.inEscape && next == ' ') { + // end tilde + l.nextTokenType = tTILDE + if l.buf == "" { + l.buf = "1" + } + l.nextToken = &yySymType{ + s: l.buf, + } + logDebugTokens("TILDE - '%s'", l.nextToken.s) + l.reset() + return startState, true + } else if !l.inEscape && next == '\\' { + l.inEscape = true + } else if l.inEscape { + // if in escape, end it + l.inEscape = false + l.buf += unescape(string(next)) + } else { + l.buf += string(next) + } + + return inTildeState, true +} + +func inNumOrStrState(l *queryStringLex, next rune, eof bool) (lexState, bool) { + // only a non-escaped space ends the tilde (or eof) + if eof || (!l.inEscape && next == ' ') { + // end number + l.nextTokenType = tNUMBER + l.nextToken = &yySymType{ + s: l.buf, + } + logDebugTokens("NUMBER - '%s'", l.nextToken.s) + l.reset() + return startState, true + } else if !l.inEscape && next == '\\' { + l.inEscape = true + return inNumOrStrState, true + } else if l.inEscape { + // if in escape, end it + l.inEscape = false + l.buf += unescape(string(next)) + // go directly to string, no successfully or unsuccessfully + // escaped string results in a valid number + return inStrState, true + } + + // see where to go + if !l.seenDot && next == '.' { + // stay in this state + l.seenDot = true + l.buf += string(next) + return inNumOrStrState, true + } else if unicode.IsDigit(next) { + l.buf += string(next) + return inNumOrStrState, true + } + + // doesn't look like an number, transition + l.buf += string(next) + return inStrState, true +} + +func inStrState(l *queryStringLex, next rune, eof bool) (lexState, bool) { + // end on non-escped space, colon, tilde, boost (or eof) + if eof || (!l.inEscape && (next == ' ' || next == ':' || next == '^' || next == '~')) { + // end string + l.nextTokenType = tSTRING + l.nextToken = &yySymType{ + s: l.buf, + } + logDebugTokens("STRING - '%s'", l.nextToken.s) + l.reset() + + consumed := true + if !eof && (next == ':' || next == '^' || next == '~') { + consumed = false + } + + return startState, consumed + } else if !l.inEscape && next == '\\' { + l.inEscape = true + } else if l.inEscape { + // if in escape, end it + l.inEscape = false + l.buf += unescape(string(next)) + } else { + l.buf += string(next) + } + + return inStrState, true +} + +func logDebugTokens(format string, v ...interface{}) { + if debugLexer { + logger.Printf(format, v...) + } +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/query_string_parser.go b/vendor/github.com/blevesearch/bleve/search/query/query_string_parser.go new file mode 100644 index 0000000..3fb7731 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/query_string_parser.go @@ -0,0 +1,85 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// as of Go 1.8 this requires the goyacc external tool +// available from golang.org/x/tools/cmd/goyacc + +//go:generate goyacc -o query_string.y.go query_string.y +//go:generate sed -i.tmp -e 1d query_string.y.go +//go:generate rm query_string.y.go.tmp + +// note: OSX sed and gnu sed handle the -i (in-place) option differently. +// using -i.tmp works on both, at the expense of having to remove +// the unsightly .tmp files + +package query + +import ( + "fmt" + "strings" +) + +var debugParser bool +var debugLexer bool + +func parseQuerySyntax(query string) (rq Query, err error) { + if query == "" { + return NewMatchNoneQuery(), nil + } + lex := newLexerWrapper(newQueryStringLex(strings.NewReader(query))) + doParse(lex) + + if len(lex.errs) > 0 { + return nil, fmt.Errorf(strings.Join(lex.errs, "\n")) + } + return lex.query, nil +} + +func doParse(lex *lexerWrapper) { + defer func() { + r := recover() + if r != nil { + lex.errs = append(lex.errs, fmt.Sprintf("parse error: %v", r)) + } + }() + + yyParse(lex) +} + +const ( + queryShould = iota + queryMust + queryMustNot +) + +type lexerWrapper struct { + lex yyLexer + errs []string + query *BooleanQuery +} + +func newLexerWrapper(lex yyLexer) *lexerWrapper { + return &lexerWrapper{ + lex: lex, + query: NewBooleanQueryForQueryString(nil, nil, nil), + } +} + +func (l *lexerWrapper) Lex(lval *yySymType) int { + return l.lex.Lex(lval) +} + +func (l *lexerWrapper) Error(s string) { + l.errs = append(l.errs, s) +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/regexp.go b/vendor/github.com/blevesearch/bleve/search/query/regexp.go new file mode 100644 index 0000000..0c87a6f --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/regexp.go @@ -0,0 +1,81 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "strings" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +type RegexpQuery struct { + Regexp string `json:"regexp"` + FieldVal string `json:"field,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` +} + +// NewRegexpQuery creates a new Query which finds +// documents containing terms that match the +// specified regular expression. The regexp pattern +// SHOULD NOT include ^ or $ modifiers, the search +// will only match entire terms even without them. +func NewRegexpQuery(regexp string) *RegexpQuery { + return &RegexpQuery{ + Regexp: regexp, + } +} + +func (q *RegexpQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *RegexpQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *RegexpQuery) SetField(f string) { + q.FieldVal = f +} + +func (q *RegexpQuery) Field() string { + return q.FieldVal +} + +func (q *RegexpQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + field := q.FieldVal + if q.FieldVal == "" { + field = m.DefaultSearchField() + } + + // require that pattern NOT be anchored to start and end of term. + // do not attempt to remove trailing $, its presence is not + // known to interfere with LiteralPrefix() the way ^ does + // and removing $ introduces possible ambiguities with escaped \$, \\$, etc + actualRegexp := q.Regexp + if strings.HasPrefix(actualRegexp, "^") { + actualRegexp = actualRegexp[1:] // remove leading ^ + } + + return searcher.NewRegexpStringSearcher(i, actualRegexp, field, + q.BoostVal.Value(), options) +} + +func (q *RegexpQuery) Validate() error { + return nil // real validation delayed until searcher constructor +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/term.go b/vendor/github.com/blevesearch/bleve/search/query/term.go new file mode 100644 index 0000000..2eeb5a3 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/term.go @@ -0,0 +1,61 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +type TermQuery struct { + Term string `json:"term"` + FieldVal string `json:"field,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` +} + +// NewTermQuery creates a new Query for finding an +// exact term match in the index. +func NewTermQuery(term string) *TermQuery { + return &TermQuery{ + Term: term, + } +} + +func (q *TermQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *TermQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *TermQuery) SetField(f string) { + q.FieldVal = f +} + +func (q *TermQuery) Field() string { + return q.FieldVal +} + +func (q *TermQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + field := q.FieldVal + if q.FieldVal == "" { + field = m.DefaultSearchField() + } + return searcher.NewTermSearcher(i, q.Term, field, q.BoostVal.Value(), options) +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/term_range.go b/vendor/github.com/blevesearch/bleve/search/query/term_range.go new file mode 100644 index 0000000..8f8ca84 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/term_range.go @@ -0,0 +1,95 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "fmt" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +type TermRangeQuery struct { + Min string `json:"min,omitempty"` + Max string `json:"max,omitempty"` + InclusiveMin *bool `json:"inclusive_min,omitempty"` + InclusiveMax *bool `json:"inclusive_max,omitempty"` + FieldVal string `json:"field,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` +} + +// NewTermRangeQuery creates a new Query for ranges +// of text term values. +// Either, but not both endpoints can be nil. +// The minimum value is inclusive. +// The maximum value is exclusive. +func NewTermRangeQuery(min, max string) *TermRangeQuery { + return NewTermRangeInclusiveQuery(min, max, nil, nil) +} + +// NewTermRangeInclusiveQuery creates a new Query for ranges +// of numeric values. +// Either, but not both endpoints can be nil. +// Control endpoint inclusion with inclusiveMin, inclusiveMax. +func NewTermRangeInclusiveQuery(min, max string, minInclusive, maxInclusive *bool) *TermRangeQuery { + return &TermRangeQuery{ + Min: min, + Max: max, + InclusiveMin: minInclusive, + InclusiveMax: maxInclusive, + } +} + +func (q *TermRangeQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *TermRangeQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *TermRangeQuery) SetField(f string) { + q.FieldVal = f +} + +func (q *TermRangeQuery) Field() string { + return q.FieldVal +} + +func (q *TermRangeQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + field := q.FieldVal + if q.FieldVal == "" { + field = m.DefaultSearchField() + } + var minTerm []byte + if q.Min != "" { + minTerm = []byte(q.Min) + } + var maxTerm []byte + if q.Max != "" { + maxTerm = []byte(q.Max) + } + return searcher.NewTermRangeSearcher(i, minTerm, maxTerm, q.InclusiveMin, q.InclusiveMax, field, q.BoostVal.Value(), options) +} + +func (q *TermRangeQuery) Validate() error { + if q.Min == "" && q.Min == q.Max { + return fmt.Errorf("term range query must specify min or max") + } + return nil +} diff --git a/vendor/github.com/blevesearch/bleve/search/query/wildcard.go b/vendor/github.com/blevesearch/bleve/search/query/wildcard.go new file mode 100644 index 0000000..747dfe7 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/query/wildcard.go @@ -0,0 +1,93 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package query + +import ( + "strings" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/mapping" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/searcher" +) + +var wildcardRegexpReplacer = strings.NewReplacer( + // characters in the wildcard that must + // be escaped in the regexp + "+", `\+`, + "(", `\(`, + ")", `\)`, + "^", `\^`, + "$", `\$`, + ".", `\.`, + "{", `\{`, + "}", `\}`, + "[", `\[`, + "]", `\]`, + `|`, `\|`, + `\`, `\\`, + // wildcard characters + "*", ".*", + "?", ".") + +type WildcardQuery struct { + Wildcard string `json:"wildcard"` + FieldVal string `json:"field,omitempty"` + BoostVal *Boost `json:"boost,omitempty"` +} + +// NewWildcardQuery creates a new Query which finds +// documents containing terms that match the +// specified wildcard. In the wildcard pattern '*' +// will match any sequence of 0 or more characters, +// and '?' will match any single character. +func NewWildcardQuery(wildcard string) *WildcardQuery { + return &WildcardQuery{ + Wildcard: wildcard, + } +} + +func (q *WildcardQuery) SetBoost(b float64) { + boost := Boost(b) + q.BoostVal = &boost +} + +func (q *WildcardQuery) Boost() float64 { + return q.BoostVal.Value() +} + +func (q *WildcardQuery) SetField(f string) { + q.FieldVal = f +} + +func (q *WildcardQuery) Field() string { + return q.FieldVal +} + +func (q *WildcardQuery) Searcher(i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) { + field := q.FieldVal + if q.FieldVal == "" { + field = m.DefaultSearchField() + } + + regexpString := wildcardRegexpReplacer.Replace(q.Wildcard) + + return searcher.NewRegexpStringSearcher(i, regexpString, field, + q.BoostVal.Value(), options) +} + +func (q *WildcardQuery) Validate() error { + return nil // real validation delayed until searcher constructor +} diff --git a/vendor/github.com/blevesearch/bleve/search/scorer/scorer_conjunction.go b/vendor/github.com/blevesearch/bleve/search/scorer/scorer_conjunction.go new file mode 100644 index 0000000..48cdf3a --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/scorer/scorer_conjunction.go @@ -0,0 +1,72 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorer + +import ( + "reflect" + + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeConjunctionQueryScorer int + +func init() { + var cqs ConjunctionQueryScorer + reflectStaticSizeConjunctionQueryScorer = int(reflect.TypeOf(cqs).Size()) +} + +type ConjunctionQueryScorer struct { + options search.SearcherOptions +} + +func (s *ConjunctionQueryScorer) Size() int { + return reflectStaticSizeConjunctionQueryScorer + size.SizeOfPtr +} + +func NewConjunctionQueryScorer(options search.SearcherOptions) *ConjunctionQueryScorer { + return &ConjunctionQueryScorer{ + options: options, + } +} + +func (s *ConjunctionQueryScorer) Score(ctx *search.SearchContext, constituents []*search.DocumentMatch) *search.DocumentMatch { + var sum float64 + var childrenExplanations []*search.Explanation + if s.options.Explain { + childrenExplanations = make([]*search.Explanation, len(constituents)) + } + + for i, docMatch := range constituents { + sum += docMatch.Score + if s.options.Explain { + childrenExplanations[i] = docMatch.Expl + } + } + newScore := sum + var newExpl *search.Explanation + if s.options.Explain { + newExpl = &search.Explanation{Value: sum, Message: "sum of:", Children: childrenExplanations} + } + + // reuse constituents[0] as the return value + rv := constituents[0] + rv.Score = newScore + rv.Expl = newExpl + rv.FieldTermLocations = search.MergeFieldTermLocations( + rv.FieldTermLocations, constituents[1:]) + + return rv +} diff --git a/vendor/github.com/blevesearch/bleve/search/scorer/scorer_constant.go b/vendor/github.com/blevesearch/bleve/search/scorer/scorer_constant.go new file mode 100644 index 0000000..dc10fda --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/scorer/scorer_constant.go @@ -0,0 +1,127 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorer + +import ( + "fmt" + "reflect" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeConstantScorer int + +func init() { + var cs ConstantScorer + reflectStaticSizeConstantScorer = int(reflect.TypeOf(cs).Size()) +} + +type ConstantScorer struct { + constant float64 + boost float64 + options search.SearcherOptions + queryNorm float64 + queryWeight float64 + queryWeightExplanation *search.Explanation +} + +func (s *ConstantScorer) Size() int { + sizeInBytes := reflectStaticSizeConstantScorer + size.SizeOfPtr + + if s.queryWeightExplanation != nil { + sizeInBytes += s.queryWeightExplanation.Size() + } + + return sizeInBytes +} + +func NewConstantScorer(constant float64, boost float64, options search.SearcherOptions) *ConstantScorer { + rv := ConstantScorer{ + options: options, + queryWeight: 1.0, + constant: constant, + boost: boost, + } + + return &rv +} + +func (s *ConstantScorer) Weight() float64 { + sum := s.boost + return sum * sum +} + +func (s *ConstantScorer) SetQueryNorm(qnorm float64) { + s.queryNorm = qnorm + + // update the query weight + s.queryWeight = s.boost * s.queryNorm + + if s.options.Explain { + childrenExplanations := make([]*search.Explanation, 2) + childrenExplanations[0] = &search.Explanation{ + Value: s.boost, + Message: "boost", + } + childrenExplanations[1] = &search.Explanation{ + Value: s.queryNorm, + Message: "queryNorm", + } + s.queryWeightExplanation = &search.Explanation{ + Value: s.queryWeight, + Message: fmt.Sprintf("ConstantScore()^%f, product of:", s.boost), + Children: childrenExplanations, + } + } +} + +func (s *ConstantScorer) Score(ctx *search.SearchContext, id index.IndexInternalID) *search.DocumentMatch { + var scoreExplanation *search.Explanation + + score := s.constant + + if s.options.Explain { + scoreExplanation = &search.Explanation{ + Value: score, + Message: fmt.Sprintf("ConstantScore()"), + } + } + + // if the query weight isn't 1, multiply + if s.queryWeight != 1.0 { + score = score * s.queryWeight + if s.options.Explain { + childExplanations := make([]*search.Explanation, 2) + childExplanations[0] = s.queryWeightExplanation + childExplanations[1] = scoreExplanation + scoreExplanation = &search.Explanation{ + Value: score, + Message: fmt.Sprintf("weight(^%f), product of:", s.boost), + Children: childExplanations, + } + } + } + + rv := ctx.DocumentMatchPool.Get() + rv.IndexInternalID = id + rv.Score = score + if s.options.Explain { + rv.Expl = scoreExplanation + } + + return rv +} diff --git a/vendor/github.com/blevesearch/bleve/search/scorer/scorer_disjunction.go b/vendor/github.com/blevesearch/bleve/search/scorer/scorer_disjunction.go new file mode 100644 index 0000000..7a955e1 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/scorer/scorer_disjunction.go @@ -0,0 +1,83 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorer + +import ( + "fmt" + "reflect" + + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeDisjunctionQueryScorer int + +func init() { + var dqs DisjunctionQueryScorer + reflectStaticSizeDisjunctionQueryScorer = int(reflect.TypeOf(dqs).Size()) +} + +type DisjunctionQueryScorer struct { + options search.SearcherOptions +} + +func (s *DisjunctionQueryScorer) Size() int { + return reflectStaticSizeDisjunctionQueryScorer + size.SizeOfPtr +} + +func NewDisjunctionQueryScorer(options search.SearcherOptions) *DisjunctionQueryScorer { + return &DisjunctionQueryScorer{ + options: options, + } +} + +func (s *DisjunctionQueryScorer) Score(ctx *search.SearchContext, constituents []*search.DocumentMatch, countMatch, countTotal int) *search.DocumentMatch { + var sum float64 + var childrenExplanations []*search.Explanation + if s.options.Explain { + childrenExplanations = make([]*search.Explanation, len(constituents)) + } + + for i, docMatch := range constituents { + sum += docMatch.Score + if s.options.Explain { + childrenExplanations[i] = docMatch.Expl + } + } + + var rawExpl *search.Explanation + if s.options.Explain { + rawExpl = &search.Explanation{Value: sum, Message: "sum of:", Children: childrenExplanations} + } + + coord := float64(countMatch) / float64(countTotal) + newScore := sum * coord + var newExpl *search.Explanation + if s.options.Explain { + ce := make([]*search.Explanation, 2) + ce[0] = rawExpl + ce[1] = &search.Explanation{Value: coord, Message: fmt.Sprintf("coord(%d/%d)", countMatch, countTotal)} + newExpl = &search.Explanation{Value: newScore, Message: "product of:", Children: ce} + } + + // reuse constituents[0] as the return value + rv := constituents[0] + rv.Score = newScore + rv.Expl = newExpl + rv.FieldTermLocations = search.MergeFieldTermLocations( + rv.FieldTermLocations, constituents[1:]) + + return rv +} diff --git a/vendor/github.com/blevesearch/bleve/search/scorer/scorer_term.go b/vendor/github.com/blevesearch/bleve/search/scorer/scorer_term.go new file mode 100644 index 0000000..718de2e --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/scorer/scorer_term.go @@ -0,0 +1,203 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorer + +import ( + "fmt" + "math" + "reflect" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeTermQueryScorer int + +func init() { + var tqs TermQueryScorer + reflectStaticSizeTermQueryScorer = int(reflect.TypeOf(tqs).Size()) +} + +type TermQueryScorer struct { + queryTerm string + queryField string + queryBoost float64 + docTerm uint64 + docTotal uint64 + idf float64 + options search.SearcherOptions + idfExplanation *search.Explanation + includeScore bool + queryNorm float64 + queryWeight float64 + queryWeightExplanation *search.Explanation +} + +func (s *TermQueryScorer) Size() int { + sizeInBytes := reflectStaticSizeTermQueryScorer + size.SizeOfPtr + + len(s.queryTerm) + len(s.queryField) + + if s.idfExplanation != nil { + sizeInBytes += s.idfExplanation.Size() + } + + if s.queryWeightExplanation != nil { + sizeInBytes += s.queryWeightExplanation.Size() + } + + return sizeInBytes +} + +func NewTermQueryScorer(queryTerm []byte, queryField string, queryBoost float64, docTotal, docTerm uint64, options search.SearcherOptions) *TermQueryScorer { + rv := TermQueryScorer{ + queryTerm: string(queryTerm), + queryField: queryField, + queryBoost: queryBoost, + docTerm: docTerm, + docTotal: docTotal, + idf: 1.0 + math.Log(float64(docTotal)/float64(docTerm+1.0)), + options: options, + queryWeight: 1.0, + includeScore: options.Score != "none", + } + + if options.Explain { + rv.idfExplanation = &search.Explanation{ + Value: rv.idf, + Message: fmt.Sprintf("idf(docFreq=%d, maxDocs=%d)", docTerm, docTotal), + } + } + + return &rv +} + +func (s *TermQueryScorer) Weight() float64 { + sum := s.queryBoost * s.idf + return sum * sum +} + +func (s *TermQueryScorer) SetQueryNorm(qnorm float64) { + s.queryNorm = qnorm + + // update the query weight + s.queryWeight = s.queryBoost * s.idf * s.queryNorm + + if s.options.Explain { + childrenExplanations := make([]*search.Explanation, 3) + childrenExplanations[0] = &search.Explanation{ + Value: s.queryBoost, + Message: "boost", + } + childrenExplanations[1] = s.idfExplanation + childrenExplanations[2] = &search.Explanation{ + Value: s.queryNorm, + Message: "queryNorm", + } + s.queryWeightExplanation = &search.Explanation{ + Value: s.queryWeight, + Message: fmt.Sprintf("queryWeight(%s:%s^%f), product of:", s.queryField, s.queryTerm, s.queryBoost), + Children: childrenExplanations, + } + } +} + +func (s *TermQueryScorer) Score(ctx *search.SearchContext, termMatch *index.TermFieldDoc) *search.DocumentMatch { + rv := ctx.DocumentMatchPool.Get() + // perform any score computations only when needed + if s.includeScore || s.options.Explain { + var scoreExplanation *search.Explanation + var tf float64 + if termMatch.Freq < MaxSqrtCache { + tf = SqrtCache[int(termMatch.Freq)] + } else { + tf = math.Sqrt(float64(termMatch.Freq)) + } + score := tf * termMatch.Norm * s.idf + + if s.options.Explain { + childrenExplanations := make([]*search.Explanation, 3) + childrenExplanations[0] = &search.Explanation{ + Value: tf, + Message: fmt.Sprintf("tf(termFreq(%s:%s)=%d", s.queryField, s.queryTerm, termMatch.Freq), + } + childrenExplanations[1] = &search.Explanation{ + Value: termMatch.Norm, + Message: fmt.Sprintf("fieldNorm(field=%s, doc=%s)", s.queryField, termMatch.ID), + } + childrenExplanations[2] = s.idfExplanation + scoreExplanation = &search.Explanation{ + Value: score, + Message: fmt.Sprintf("fieldWeight(%s:%s in %s), product of:", s.queryField, s.queryTerm, termMatch.ID), + Children: childrenExplanations, + } + } + + // if the query weight isn't 1, multiply + if s.queryWeight != 1.0 { + score = score * s.queryWeight + if s.options.Explain { + childExplanations := make([]*search.Explanation, 2) + childExplanations[0] = s.queryWeightExplanation + childExplanations[1] = scoreExplanation + scoreExplanation = &search.Explanation{ + Value: score, + Message: fmt.Sprintf("weight(%s:%s^%f in %s), product of:", s.queryField, s.queryTerm, s.queryBoost, termMatch.ID), + Children: childExplanations, + } + } + } + + if s.includeScore { + rv.Score = score + } + + if s.options.Explain { + rv.Expl = scoreExplanation + } + } + + rv.IndexInternalID = append(rv.IndexInternalID, termMatch.ID...) + + if len(termMatch.Vectors) > 0 { + if cap(rv.FieldTermLocations) < len(termMatch.Vectors) { + rv.FieldTermLocations = make([]search.FieldTermLocation, 0, len(termMatch.Vectors)) + } + + for _, v := range termMatch.Vectors { + var ap search.ArrayPositions + if len(v.ArrayPositions) > 0 { + n := len(rv.FieldTermLocations) + if n < cap(rv.FieldTermLocations) { // reuse ap slice if available + ap = rv.FieldTermLocations[:n+1][n].Location.ArrayPositions[:0] + } + ap = append(ap, v.ArrayPositions...) + } + rv.FieldTermLocations = + append(rv.FieldTermLocations, search.FieldTermLocation{ + Field: v.Field, + Term: s.queryTerm, + Location: search.Location{ + Pos: v.Pos, + Start: v.Start, + End: v.End, + ArrayPositions: ap, + }, + }) + } + } + + return rv +} diff --git a/vendor/github.com/blevesearch/bleve/search/scorer/sqrt_cache.go b/vendor/github.com/blevesearch/bleve/search/scorer/sqrt_cache.go new file mode 100644 index 0000000..e26d33d --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/scorer/sqrt_cache.go @@ -0,0 +1,30 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package scorer + +import ( + "math" +) + +var SqrtCache []float64 + +const MaxSqrtCache = 64 + +func init() { + SqrtCache = make([]float64, MaxSqrtCache) + for i := 0; i < MaxSqrtCache; i++ { + SqrtCache[i] = math.Sqrt(float64(i)) + } +} diff --git a/vendor/github.com/blevesearch/bleve/search/search.go b/vendor/github.com/blevesearch/bleve/search/search.go new file mode 100644 index 0000000..8ed23de --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/search.go @@ -0,0 +1,378 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package search + +import ( + "fmt" + "reflect" + "sort" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeDocumentMatch int +var reflectStaticSizeSearchContext int +var reflectStaticSizeLocation int + +func init() { + var dm DocumentMatch + reflectStaticSizeDocumentMatch = int(reflect.TypeOf(dm).Size()) + var sc SearchContext + reflectStaticSizeSearchContext = int(reflect.TypeOf(sc).Size()) + var l Location + reflectStaticSizeLocation = int(reflect.TypeOf(l).Size()) +} + +type ArrayPositions []uint64 + +func (ap ArrayPositions) Equals(other ArrayPositions) bool { + if len(ap) != len(other) { + return false + } + for i := range ap { + if ap[i] != other[i] { + return false + } + } + return true +} + +func (ap ArrayPositions) Compare(other ArrayPositions) int { + for i, p := range ap { + if i >= len(other) { + return 1 + } + if p < other[i] { + return -1 + } + if p > other[i] { + return 1 + } + } + if len(ap) < len(other) { + return -1 + } + return 0 +} + +type Location struct { + // Pos is the position of the term within the field, starting at 1 + Pos uint64 `json:"pos"` + + // Start and End are the byte offsets of the term in the field + Start uint64 `json:"start"` + End uint64 `json:"end"` + + // ArrayPositions contains the positions of the term within any elements. + ArrayPositions ArrayPositions `json:"array_positions"` +} + +func (l *Location) Size() int { + return reflectStaticSizeLocation + size.SizeOfPtr + + len(l.ArrayPositions)*size.SizeOfUint64 +} + +type Locations []*Location + +func (p Locations) Len() int { return len(p) } +func (p Locations) Swap(i, j int) { p[i], p[j] = p[j], p[i] } + +func (p Locations) Less(i, j int) bool { + c := p[i].ArrayPositions.Compare(p[j].ArrayPositions) + if c < 0 { + return true + } + if c > 0 { + return false + } + return p[i].Pos < p[j].Pos +} + +func (p Locations) Dedupe() Locations { // destructive! + if len(p) <= 1 { + return p + } + + sort.Sort(p) + + slow := 0 + + for _, pfast := range p { + pslow := p[slow] + if pslow.Pos == pfast.Pos && + pslow.Start == pfast.Start && + pslow.End == pfast.End && + pslow.ArrayPositions.Equals(pfast.ArrayPositions) { + continue // duplicate, so only move fast ahead + } + + slow++ + + p[slow] = pfast + } + + return p[:slow+1] +} + +type TermLocationMap map[string]Locations + +func (t TermLocationMap) AddLocation(term string, location *Location) { + t[term] = append(t[term], location) +} + +type FieldTermLocationMap map[string]TermLocationMap + +type FieldTermLocation struct { + Field string + Term string + Location Location +} + +type FieldFragmentMap map[string][]string + +type DocumentMatch struct { + Index string `json:"index,omitempty"` + ID string `json:"id"` + IndexInternalID index.IndexInternalID `json:"-"` + Score float64 `json:"score"` + Expl *Explanation `json:"explanation,omitempty"` + Locations FieldTermLocationMap `json:"locations,omitempty"` + Fragments FieldFragmentMap `json:"fragments,omitempty"` + Sort []string `json:"sort,omitempty"` + + // Fields contains the values for document fields listed in + // SearchRequest.Fields. Text fields are returned as strings, numeric + // fields as float64s and date fields as time.RFC3339 formatted strings. + Fields map[string]interface{} `json:"fields,omitempty"` + + // used to maintain natural index order + HitNumber uint64 `json:"-"` + + // used to temporarily hold field term location information during + // search processing in an efficient, recycle-friendly manner, to + // be later incorporated into the Locations map when search + // results are completed + FieldTermLocations []FieldTermLocation `json:"-"` +} + +func (dm *DocumentMatch) AddFieldValue(name string, value interface{}) { + if dm.Fields == nil { + dm.Fields = make(map[string]interface{}) + } + existingVal, ok := dm.Fields[name] + if !ok { + dm.Fields[name] = value + return + } + + valSlice, ok := existingVal.([]interface{}) + if ok { + // already a slice, append to it + valSlice = append(valSlice, value) + } else { + // create a slice + valSlice = []interface{}{existingVal, value} + } + dm.Fields[name] = valSlice +} + +// Reset allows an already allocated DocumentMatch to be reused +func (dm *DocumentMatch) Reset() *DocumentMatch { + // remember the []byte used for the IndexInternalID + indexInternalID := dm.IndexInternalID + // remember the []interface{} used for sort + sort := dm.Sort + // remember the FieldTermLocations backing array + ftls := dm.FieldTermLocations + for i := range ftls { // recycle the ArrayPositions of each location + ftls[i].Location.ArrayPositions = ftls[i].Location.ArrayPositions[:0] + } + // idiom to copy over from empty DocumentMatch (0 allocations) + *dm = DocumentMatch{} + // reuse the []byte already allocated (and reset len to 0) + dm.IndexInternalID = indexInternalID[:0] + // reuse the []interface{} already allocated (and reset len to 0) + dm.Sort = sort[:0] + // reuse the FieldTermLocations already allocated (and reset len to 0) + dm.FieldTermLocations = ftls[:0] + return dm +} + +func (dm *DocumentMatch) Size() int { + sizeInBytes := reflectStaticSizeDocumentMatch + size.SizeOfPtr + + len(dm.Index) + + len(dm.ID) + + len(dm.IndexInternalID) + + if dm.Expl != nil { + sizeInBytes += dm.Expl.Size() + } + + for k, v := range dm.Locations { + sizeInBytes += size.SizeOfString + len(k) + for k1, v1 := range v { + sizeInBytes += size.SizeOfString + len(k1) + + size.SizeOfSlice + for _, entry := range v1 { + sizeInBytes += entry.Size() + } + } + } + + for k, v := range dm.Fragments { + sizeInBytes += size.SizeOfString + len(k) + + size.SizeOfSlice + + for _, entry := range v { + sizeInBytes += size.SizeOfString + len(entry) + } + } + + for _, entry := range dm.Sort { + sizeInBytes += size.SizeOfString + len(entry) + } + + for k, _ := range dm.Fields { + sizeInBytes += size.SizeOfString + len(k) + + size.SizeOfPtr + } + + return sizeInBytes +} + +// Complete performs final preparation & transformation of the +// DocumentMatch at the end of search processing, also allowing the +// caller to provide an optional preallocated locations slice +func (dm *DocumentMatch) Complete(prealloc []Location) []Location { + // transform the FieldTermLocations slice into the Locations map + nlocs := len(dm.FieldTermLocations) + if nlocs > 0 { + if cap(prealloc) < nlocs { + prealloc = make([]Location, nlocs) + } + prealloc = prealloc[:nlocs] + + var lastField string + var tlm TermLocationMap + var needsDedupe bool + + for i, ftl := range dm.FieldTermLocations { + if lastField != ftl.Field { + lastField = ftl.Field + + if dm.Locations == nil { + dm.Locations = make(FieldTermLocationMap) + } + + tlm = dm.Locations[ftl.Field] + if tlm == nil { + tlm = make(TermLocationMap) + dm.Locations[ftl.Field] = tlm + } + } + + loc := &prealloc[i] + *loc = ftl.Location + + if len(loc.ArrayPositions) > 0 { // copy + loc.ArrayPositions = append(ArrayPositions(nil), loc.ArrayPositions...) + } + + locs := tlm[ftl.Term] + + // if the loc is before or at the last location, then there + // might be duplicates that need to be deduplicated + if !needsDedupe && len(locs) > 0 { + last := locs[len(locs)-1] + cmp := loc.ArrayPositions.Compare(last.ArrayPositions) + if cmp < 0 || (cmp == 0 && loc.Pos <= last.Pos) { + needsDedupe = true + } + } + + tlm[ftl.Term] = append(locs, loc) + + dm.FieldTermLocations[i] = FieldTermLocation{ // recycle + Location: Location{ + ArrayPositions: ftl.Location.ArrayPositions[:0], + }, + } + } + + if needsDedupe { + for _, tlm := range dm.Locations { + for term, locs := range tlm { + tlm[term] = locs.Dedupe() + } + } + } + } + + dm.FieldTermLocations = dm.FieldTermLocations[:0] // recycle + + return prealloc +} + +func (dm *DocumentMatch) String() string { + return fmt.Sprintf("[%s-%f]", string(dm.IndexInternalID), dm.Score) +} + +type DocumentMatchCollection []*DocumentMatch + +func (c DocumentMatchCollection) Len() int { return len(c) } +func (c DocumentMatchCollection) Swap(i, j int) { c[i], c[j] = c[j], c[i] } +func (c DocumentMatchCollection) Less(i, j int) bool { return c[i].Score > c[j].Score } + +type Searcher interface { + Next(ctx *SearchContext) (*DocumentMatch, error) + Advance(ctx *SearchContext, ID index.IndexInternalID) (*DocumentMatch, error) + Close() error + Weight() float64 + SetQueryNorm(float64) + Count() uint64 + Min() int + Size() int + + DocumentMatchPoolSize() int +} + +type SearcherOptions struct { + Explain bool + IncludeTermVectors bool + Score string +} + +// SearchContext represents the context around a single search +type SearchContext struct { + DocumentMatchPool *DocumentMatchPool + Collector Collector + IndexReader index.IndexReader +} + +func (sc *SearchContext) Size() int { + sizeInBytes := reflectStaticSizeSearchContext + size.SizeOfPtr + + reflectStaticSizeDocumentMatchPool + size.SizeOfPtr + + if sc.DocumentMatchPool != nil { + for _, entry := range sc.DocumentMatchPool.avail { + if entry != nil { + sizeInBytes += entry.Size() + } + } + } + + return sizeInBytes +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/ordered_searchers_list.go b/vendor/github.com/blevesearch/bleve/search/searcher/ordered_searchers_list.go new file mode 100644 index 0000000..536c593 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/ordered_searchers_list.go @@ -0,0 +1,35 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "github.com/blevesearch/bleve/search" +) + +type OrderedSearcherList []search.Searcher + +// sort.Interface + +func (otrl OrderedSearcherList) Len() int { + return len(otrl) +} + +func (otrl OrderedSearcherList) Less(i, j int) bool { + return otrl[i].Count() < otrl[j].Count() +} + +func (otrl OrderedSearcherList) Swap(i, j int) { + otrl[i], otrl[j] = otrl[j], otrl[i] +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_boolean.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_boolean.go new file mode 100644 index 0000000..7f0bfa4 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_boolean.go @@ -0,0 +1,450 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "math" + "reflect" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/scorer" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeBooleanSearcher int + +func init() { + var bs BooleanSearcher + reflectStaticSizeBooleanSearcher = int(reflect.TypeOf(bs).Size()) +} + +type BooleanSearcher struct { + indexReader index.IndexReader + mustSearcher search.Searcher + shouldSearcher search.Searcher + mustNotSearcher search.Searcher + queryNorm float64 + currMust *search.DocumentMatch + currShould *search.DocumentMatch + currMustNot *search.DocumentMatch + currentID index.IndexInternalID + min uint64 + scorer *scorer.ConjunctionQueryScorer + matches []*search.DocumentMatch + initialized bool + done bool +} + +func NewBooleanSearcher(indexReader index.IndexReader, mustSearcher search.Searcher, shouldSearcher search.Searcher, mustNotSearcher search.Searcher, options search.SearcherOptions) (*BooleanSearcher, error) { + // build our searcher + rv := BooleanSearcher{ + indexReader: indexReader, + mustSearcher: mustSearcher, + shouldSearcher: shouldSearcher, + mustNotSearcher: mustNotSearcher, + scorer: scorer.NewConjunctionQueryScorer(options), + matches: make([]*search.DocumentMatch, 2), + } + rv.computeQueryNorm() + return &rv, nil +} + +func (s *BooleanSearcher) Size() int { + sizeInBytes := reflectStaticSizeBooleanSearcher + size.SizeOfPtr + + if s.mustSearcher != nil { + sizeInBytes += s.mustSearcher.Size() + } + + if s.shouldSearcher != nil { + sizeInBytes += s.shouldSearcher.Size() + } + + if s.mustNotSearcher != nil { + sizeInBytes += s.mustNotSearcher.Size() + } + + sizeInBytes += s.scorer.Size() + + for _, entry := range s.matches { + if entry != nil { + sizeInBytes += entry.Size() + } + } + + return sizeInBytes +} + +func (s *BooleanSearcher) computeQueryNorm() { + // first calculate sum of squared weights + sumOfSquaredWeights := 0.0 + if s.mustSearcher != nil { + sumOfSquaredWeights += s.mustSearcher.Weight() + } + if s.shouldSearcher != nil { + sumOfSquaredWeights += s.shouldSearcher.Weight() + } + + // now compute query norm from this + s.queryNorm = 1.0 / math.Sqrt(sumOfSquaredWeights) + // finally tell all the downstream searchers the norm + if s.mustSearcher != nil { + s.mustSearcher.SetQueryNorm(s.queryNorm) + } + if s.shouldSearcher != nil { + s.shouldSearcher.SetQueryNorm(s.queryNorm) + } +} + +func (s *BooleanSearcher) initSearchers(ctx *search.SearchContext) error { + var err error + // get all searchers pointing at their first match + if s.mustSearcher != nil { + if s.currMust != nil { + ctx.DocumentMatchPool.Put(s.currMust) + } + s.currMust, err = s.mustSearcher.Next(ctx) + if err != nil { + return err + } + } + + if s.shouldSearcher != nil { + if s.currShould != nil { + ctx.DocumentMatchPool.Put(s.currShould) + } + s.currShould, err = s.shouldSearcher.Next(ctx) + if err != nil { + return err + } + } + + if s.mustNotSearcher != nil { + if s.currMustNot != nil { + ctx.DocumentMatchPool.Put(s.currMustNot) + } + s.currMustNot, err = s.mustNotSearcher.Next(ctx) + if err != nil { + return err + } + } + + if s.mustSearcher != nil && s.currMust != nil { + s.currentID = s.currMust.IndexInternalID + } else if s.mustSearcher == nil && s.currShould != nil { + s.currentID = s.currShould.IndexInternalID + } else { + s.currentID = nil + } + + s.initialized = true + return nil +} + +func (s *BooleanSearcher) advanceNextMust(ctx *search.SearchContext, skipReturn *search.DocumentMatch) error { + var err error + + if s.mustSearcher != nil { + if s.currMust != skipReturn { + ctx.DocumentMatchPool.Put(s.currMust) + } + s.currMust, err = s.mustSearcher.Next(ctx) + if err != nil { + return err + } + } else { + if s.currShould != skipReturn { + ctx.DocumentMatchPool.Put(s.currShould) + } + s.currShould, err = s.shouldSearcher.Next(ctx) + if err != nil { + return err + } + } + + if s.mustSearcher != nil && s.currMust != nil { + s.currentID = s.currMust.IndexInternalID + } else if s.mustSearcher == nil && s.currShould != nil { + s.currentID = s.currShould.IndexInternalID + } else { + s.currentID = nil + } + return nil +} + +func (s *BooleanSearcher) Weight() float64 { + var rv float64 + if s.mustSearcher != nil { + rv += s.mustSearcher.Weight() + } + if s.shouldSearcher != nil { + rv += s.shouldSearcher.Weight() + } + + return rv +} + +func (s *BooleanSearcher) SetQueryNorm(qnorm float64) { + if s.mustSearcher != nil { + s.mustSearcher.SetQueryNorm(qnorm) + } + if s.shouldSearcher != nil { + s.shouldSearcher.SetQueryNorm(qnorm) + } +} + +func (s *BooleanSearcher) Next(ctx *search.SearchContext) (*search.DocumentMatch, error) { + + if s.done { + return nil, nil + } + + if !s.initialized { + err := s.initSearchers(ctx) + if err != nil { + return nil, err + } + } + + var err error + var rv *search.DocumentMatch + + for s.currentID != nil { + if s.currMustNot != nil { + cmp := s.currMustNot.IndexInternalID.Compare(s.currentID) + if cmp < 0 { + ctx.DocumentMatchPool.Put(s.currMustNot) + // advance must not searcher to our candidate entry + s.currMustNot, err = s.mustNotSearcher.Advance(ctx, s.currentID) + if err != nil { + return nil, err + } + if s.currMustNot != nil && s.currMustNot.IndexInternalID.Equals(s.currentID) { + // the candidate is excluded + err = s.advanceNextMust(ctx, nil) + if err != nil { + return nil, err + } + continue + } + } else if cmp == 0 { + // the candidate is excluded + err = s.advanceNextMust(ctx, nil) + if err != nil { + return nil, err + } + continue + } + } + + shouldCmpOrNil := 1 // NOTE: shouldCmp will also be 1 when currShould == nil. + if s.currShould != nil { + shouldCmpOrNil = s.currShould.IndexInternalID.Compare(s.currentID) + } + + if shouldCmpOrNil < 0 { + ctx.DocumentMatchPool.Put(s.currShould) + // advance should searcher to our candidate entry + s.currShould, err = s.shouldSearcher.Advance(ctx, s.currentID) + if err != nil { + return nil, err + } + if s.currShould != nil && s.currShould.IndexInternalID.Equals(s.currentID) { + // score bonus matches should + var cons []*search.DocumentMatch + if s.currMust != nil { + cons = s.matches + cons[0] = s.currMust + cons[1] = s.currShould + } else { + cons = s.matches[0:1] + cons[0] = s.currShould + } + rv = s.scorer.Score(ctx, cons) + err = s.advanceNextMust(ctx, rv) + if err != nil { + return nil, err + } + break + } else if s.shouldSearcher.Min() == 0 { + // match is OK anyway + cons := s.matches[0:1] + cons[0] = s.currMust + rv = s.scorer.Score(ctx, cons) + err = s.advanceNextMust(ctx, rv) + if err != nil { + return nil, err + } + break + } + } else if shouldCmpOrNil == 0 { + // score bonus matches should + var cons []*search.DocumentMatch + if s.currMust != nil { + cons = s.matches + cons[0] = s.currMust + cons[1] = s.currShould + } else { + cons = s.matches[0:1] + cons[0] = s.currShould + } + rv = s.scorer.Score(ctx, cons) + err = s.advanceNextMust(ctx, rv) + if err != nil { + return nil, err + } + break + } else if s.shouldSearcher == nil || s.shouldSearcher.Min() == 0 { + // match is OK anyway + cons := s.matches[0:1] + cons[0] = s.currMust + rv = s.scorer.Score(ctx, cons) + err = s.advanceNextMust(ctx, rv) + if err != nil { + return nil, err + } + break + } + + err = s.advanceNextMust(ctx, nil) + if err != nil { + return nil, err + } + } + + if rv == nil { + s.done = true + } + + return rv, nil +} + +func (s *BooleanSearcher) Advance(ctx *search.SearchContext, ID index.IndexInternalID) (*search.DocumentMatch, error) { + + if s.done { + return nil, nil + } + + if !s.initialized { + err := s.initSearchers(ctx) + if err != nil { + return nil, err + } + } + + // Advance the searcher only if the cursor is trailing the lookup ID + if s.currentID == nil || s.currentID.Compare(ID) < 0 { + var err error + if s.mustSearcher != nil { + if s.currMust != nil { + ctx.DocumentMatchPool.Put(s.currMust) + } + s.currMust, err = s.mustSearcher.Advance(ctx, ID) + if err != nil { + return nil, err + } + } + + if s.shouldSearcher != nil { + if s.currShould != nil { + ctx.DocumentMatchPool.Put(s.currShould) + } + s.currShould, err = s.shouldSearcher.Advance(ctx, ID) + if err != nil { + return nil, err + } + } + + if s.mustNotSearcher != nil { + // Additional check for mustNotSearcher, whose cursor isn't tracked by + // currentID to prevent it from moving when the searcher's tracked + // position is already ahead of or at the requested ID. + if s.currMustNot == nil || s.currMustNot.IndexInternalID.Compare(ID) < 0 { + if s.currMustNot != nil { + ctx.DocumentMatchPool.Put(s.currMustNot) + } + s.currMustNot, err = s.mustNotSearcher.Advance(ctx, ID) + if err != nil { + return nil, err + } + } + } + + if s.mustSearcher != nil && s.currMust != nil { + s.currentID = s.currMust.IndexInternalID + } else if s.mustSearcher == nil && s.currShould != nil { + s.currentID = s.currShould.IndexInternalID + } else { + s.currentID = nil + } + } + + return s.Next(ctx) +} + +func (s *BooleanSearcher) Count() uint64 { + + // for now return a worst case + var sum uint64 + if s.mustSearcher != nil { + sum += s.mustSearcher.Count() + } + if s.shouldSearcher != nil { + sum += s.shouldSearcher.Count() + } + return sum +} + +func (s *BooleanSearcher) Close() error { + var err0, err1, err2 error + if s.mustSearcher != nil { + err0 = s.mustSearcher.Close() + } + if s.shouldSearcher != nil { + err1 = s.shouldSearcher.Close() + } + if s.mustNotSearcher != nil { + err2 = s.mustNotSearcher.Close() + } + if err0 != nil { + return err0 + } + if err1 != nil { + return err1 + } + if err2 != nil { + return err2 + } + return nil +} + +func (s *BooleanSearcher) Min() int { + return 0 +} + +func (s *BooleanSearcher) DocumentMatchPoolSize() int { + rv := 3 + if s.mustSearcher != nil { + rv += s.mustSearcher.DocumentMatchPoolSize() + } + if s.shouldSearcher != nil { + rv += s.shouldSearcher.DocumentMatchPoolSize() + } + if s.mustNotSearcher != nil { + rv += s.mustNotSearcher.DocumentMatchPoolSize() + } + return rv +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_conjunction.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_conjunction.go new file mode 100644 index 0000000..ac737bc --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_conjunction.go @@ -0,0 +1,284 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "math" + "reflect" + "sort" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/scorer" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeConjunctionSearcher int + +func init() { + var cs ConjunctionSearcher + reflectStaticSizeConjunctionSearcher = int(reflect.TypeOf(cs).Size()) +} + +type ConjunctionSearcher struct { + indexReader index.IndexReader + searchers OrderedSearcherList + queryNorm float64 + currs []*search.DocumentMatch + maxIDIdx int + scorer *scorer.ConjunctionQueryScorer + initialized bool + options search.SearcherOptions +} + +func NewConjunctionSearcher(indexReader index.IndexReader, + qsearchers []search.Searcher, options search.SearcherOptions) ( + search.Searcher, error) { + // build the sorted downstream searchers + searchers := make(OrderedSearcherList, len(qsearchers)) + for i, searcher := range qsearchers { + searchers[i] = searcher + } + sort.Sort(searchers) + + // attempt the "unadorned" conjunction optimization only when we + // do not need extra information like freq-norm's or term vectors + if len(searchers) > 1 && + options.Score == "none" && !options.IncludeTermVectors { + rv, err := optimizeCompositeSearcher("conjunction:unadorned", + indexReader, searchers, options) + if err != nil || rv != nil { + return rv, err + } + } + + // build our searcher + rv := ConjunctionSearcher{ + indexReader: indexReader, + options: options, + searchers: searchers, + currs: make([]*search.DocumentMatch, len(searchers)), + scorer: scorer.NewConjunctionQueryScorer(options), + } + rv.computeQueryNorm() + + // attempt push-down conjunction optimization when there's >1 searchers + if len(searchers) > 1 { + rv, err := optimizeCompositeSearcher("conjunction", + indexReader, searchers, options) + if err != nil || rv != nil { + return rv, err + } + } + + return &rv, nil +} + +func (s *ConjunctionSearcher) Size() int { + sizeInBytes := reflectStaticSizeConjunctionSearcher + size.SizeOfPtr + + s.scorer.Size() + + for _, entry := range s.searchers { + sizeInBytes += entry.Size() + } + + for _, entry := range s.currs { + if entry != nil { + sizeInBytes += entry.Size() + } + } + + return sizeInBytes +} + +func (s *ConjunctionSearcher) computeQueryNorm() { + // first calculate sum of squared weights + sumOfSquaredWeights := 0.0 + for _, searcher := range s.searchers { + sumOfSquaredWeights += searcher.Weight() + } + // now compute query norm from this + s.queryNorm = 1.0 / math.Sqrt(sumOfSquaredWeights) + // finally tell all the downstream searchers the norm + for _, searcher := range s.searchers { + searcher.SetQueryNorm(s.queryNorm) + } +} + +func (s *ConjunctionSearcher) initSearchers(ctx *search.SearchContext) error { + var err error + // get all searchers pointing at their first match + for i, searcher := range s.searchers { + if s.currs[i] != nil { + ctx.DocumentMatchPool.Put(s.currs[i]) + } + s.currs[i], err = searcher.Next(ctx) + if err != nil { + return err + } + } + s.initialized = true + return nil +} + +func (s *ConjunctionSearcher) Weight() float64 { + var rv float64 + for _, searcher := range s.searchers { + rv += searcher.Weight() + } + return rv +} + +func (s *ConjunctionSearcher) SetQueryNorm(qnorm float64) { + for _, searcher := range s.searchers { + searcher.SetQueryNorm(qnorm) + } +} + +func (s *ConjunctionSearcher) Next(ctx *search.SearchContext) (*search.DocumentMatch, error) { + if !s.initialized { + err := s.initSearchers(ctx) + if err != nil { + return nil, err + } + } + var rv *search.DocumentMatch + var err error +OUTER: + for s.maxIDIdx < len(s.currs) && s.currs[s.maxIDIdx] != nil { + maxID := s.currs[s.maxIDIdx].IndexInternalID + + i := 0 + for i < len(s.currs) { + if s.currs[i] == nil { + return nil, nil + } + + if i == s.maxIDIdx { + i++ + continue + } + + cmp := maxID.Compare(s.currs[i].IndexInternalID) + if cmp == 0 { + i++ + continue + } + + if cmp < 0 { + // maxID < currs[i], so we found a new maxIDIdx + s.maxIDIdx = i + + // advance the positions where [0 <= x < i], since we + // know they were equal to the former max entry + maxID = s.currs[s.maxIDIdx].IndexInternalID + for x := 0; x < i; x++ { + err = s.advanceChild(ctx, x, maxID) + if err != nil { + return nil, err + } + } + + continue OUTER + } + + // maxID > currs[i], so need to advance searchers[i] + err = s.advanceChild(ctx, i, maxID) + if err != nil { + return nil, err + } + + // don't bump i, so that we'll examine the just-advanced + // currs[i] again + } + + // if we get here, a doc matched all readers, so score and add it + rv = s.scorer.Score(ctx, s.currs) + + // we know all the searchers are pointing at the same thing + // so they all need to be bumped + for i, searcher := range s.searchers { + if s.currs[i] != rv { + ctx.DocumentMatchPool.Put(s.currs[i]) + } + s.currs[i], err = searcher.Next(ctx) + if err != nil { + return nil, err + } + } + + // don't continue now, wait for the next call to Next() + break + } + return rv, nil +} + +func (s *ConjunctionSearcher) Advance(ctx *search.SearchContext, ID index.IndexInternalID) (*search.DocumentMatch, error) { + if !s.initialized { + err := s.initSearchers(ctx) + if err != nil { + return nil, err + } + } + for i := range s.searchers { + if s.currs[i] != nil && s.currs[i].IndexInternalID.Compare(ID) >= 0 { + continue + } + err := s.advanceChild(ctx, i, ID) + if err != nil { + return nil, err + } + } + return s.Next(ctx) +} + +func (s *ConjunctionSearcher) advanceChild(ctx *search.SearchContext, i int, ID index.IndexInternalID) (err error) { + if s.currs[i] != nil { + ctx.DocumentMatchPool.Put(s.currs[i]) + } + s.currs[i], err = s.searchers[i].Advance(ctx, ID) + return err +} + +func (s *ConjunctionSearcher) Count() uint64 { + // for now return a worst case + var sum uint64 + for _, searcher := range s.searchers { + sum += searcher.Count() + } + return sum +} + +func (s *ConjunctionSearcher) Close() (rv error) { + for _, searcher := range s.searchers { + err := searcher.Close() + if err != nil && rv == nil { + rv = err + } + } + return rv +} + +func (s *ConjunctionSearcher) Min() int { + return 0 +} + +func (s *ConjunctionSearcher) DocumentMatchPoolSize() int { + rv := len(s.currs) + for _, s := range s.searchers { + rv += s.DocumentMatchPoolSize() + } + return rv +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_disjunction.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_disjunction.go new file mode 100644 index 0000000..f47da27 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_disjunction.go @@ -0,0 +1,113 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "fmt" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/search" +) + +// DisjunctionMaxClauseCount is a compile time setting that applications can +// adjust to non-zero value to cause the DisjunctionSearcher to return an +// error instead of exeucting searches when the size exceeds this value. +var DisjunctionMaxClauseCount = 0 + +// DisjunctionHeapTakeover is a compile time setting that applications can +// adjust to control when the DisjunctionSearcher will switch from a simple +// slice implementation to a heap implementation. +var DisjunctionHeapTakeover = 10 + +func NewDisjunctionSearcher(indexReader index.IndexReader, + qsearchers []search.Searcher, min float64, options search.SearcherOptions) ( + search.Searcher, error) { + return newDisjunctionSearcher(indexReader, qsearchers, min, options, true) +} + +func optionsDisjunctionOptimizable(options search.SearcherOptions) bool { + rv := options.Score == "none" && !options.IncludeTermVectors + return rv +} + +func newDisjunctionSearcher(indexReader index.IndexReader, + qsearchers []search.Searcher, min float64, options search.SearcherOptions, + limit bool) (search.Searcher, error) { + // attempt the "unadorned" disjunction optimization only when we + // do not need extra information like freq-norm's or term vectors + // and the requested min is simple + if len(qsearchers) > 1 && min <= 1 && + optionsDisjunctionOptimizable(options) { + rv, err := optimizeCompositeSearcher("disjunction:unadorned", + indexReader, qsearchers, options) + if err != nil || rv != nil { + return rv, err + } + } + + if len(qsearchers) > DisjunctionHeapTakeover { + return newDisjunctionHeapSearcher(indexReader, qsearchers, min, options, + limit) + } + return newDisjunctionSliceSearcher(indexReader, qsearchers, min, options, + limit) +} + +func optimizeCompositeSearcher(optimizationKind string, + indexReader index.IndexReader, qsearchers []search.Searcher, + options search.SearcherOptions) (search.Searcher, error) { + var octx index.OptimizableContext + + for _, searcher := range qsearchers { + o, ok := searcher.(index.Optimizable) + if !ok { + return nil, nil + } + + var err error + octx, err = o.Optimize(optimizationKind, octx) + if err != nil { + return nil, err + } + + if octx == nil { + return nil, nil + } + } + + optimized, err := octx.Finish() + if err != nil || optimized == nil { + return nil, err + } + + tfr, ok := optimized.(index.TermFieldReader) + if !ok { + return nil, nil + } + + return newTermSearcherFromReader(indexReader, tfr, + []byte(optimizationKind), "*", 1.0, options) +} + +func tooManyClauses(count int) bool { + if DisjunctionMaxClauseCount != 0 && count > DisjunctionMaxClauseCount { + return true + } + return false +} + +func tooManyClausesErr(field string, count int) error { + return fmt.Errorf("TooManyClauses over field: `%s` [%d > maxClauseCount,"+ + " which is set to %d]", field, count, DisjunctionMaxClauseCount) +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_disjunction_heap.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_disjunction_heap.go new file mode 100644 index 0000000..7f0a5a0 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_disjunction_heap.go @@ -0,0 +1,343 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "bytes" + "container/heap" + "math" + "reflect" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/scorer" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeDisjunctionHeapSearcher int +var reflectStaticSizeSearcherCurr int + +func init() { + var dhs DisjunctionHeapSearcher + reflectStaticSizeDisjunctionHeapSearcher = int(reflect.TypeOf(dhs).Size()) + + var sc SearcherCurr + reflectStaticSizeSearcherCurr = int(reflect.TypeOf(sc).Size()) +} + +type SearcherCurr struct { + searcher search.Searcher + curr *search.DocumentMatch +} + +type DisjunctionHeapSearcher struct { + indexReader index.IndexReader + + numSearchers int + scorer *scorer.DisjunctionQueryScorer + min int + queryNorm float64 + initialized bool + searchers []search.Searcher + heap []*SearcherCurr + + matching []*search.DocumentMatch + matchingCurrs []*SearcherCurr +} + +func newDisjunctionHeapSearcher(indexReader index.IndexReader, + searchers []search.Searcher, min float64, options search.SearcherOptions, + limit bool) ( + *DisjunctionHeapSearcher, error) { + if limit && tooManyClauses(len(searchers)) { + return nil, tooManyClausesErr("", len(searchers)) + } + + // build our searcher + rv := DisjunctionHeapSearcher{ + indexReader: indexReader, + searchers: searchers, + numSearchers: len(searchers), + scorer: scorer.NewDisjunctionQueryScorer(options), + min: int(min), + matching: make([]*search.DocumentMatch, len(searchers)), + matchingCurrs: make([]*SearcherCurr, len(searchers)), + heap: make([]*SearcherCurr, 0, len(searchers)), + } + rv.computeQueryNorm() + return &rv, nil +} + +func (s *DisjunctionHeapSearcher) Size() int { + sizeInBytes := reflectStaticSizeDisjunctionHeapSearcher + size.SizeOfPtr + + s.scorer.Size() + + for _, entry := range s.searchers { + sizeInBytes += entry.Size() + } + + for _, entry := range s.matching { + if entry != nil { + sizeInBytes += entry.Size() + } + } + + // for matchingCurrs and heap, just use static size * len + // since searchers and document matches already counted above + sizeInBytes += len(s.matchingCurrs) * reflectStaticSizeSearcherCurr + sizeInBytes += len(s.heap) * reflectStaticSizeSearcherCurr + + return sizeInBytes +} + +func (s *DisjunctionHeapSearcher) computeQueryNorm() { + // first calculate sum of squared weights + sumOfSquaredWeights := 0.0 + for _, searcher := range s.searchers { + sumOfSquaredWeights += searcher.Weight() + } + // now compute query norm from this + s.queryNorm = 1.0 / math.Sqrt(sumOfSquaredWeights) + // finally tell all the downstream searchers the norm + for _, searcher := range s.searchers { + searcher.SetQueryNorm(s.queryNorm) + } +} + +func (s *DisjunctionHeapSearcher) initSearchers(ctx *search.SearchContext) error { + // alloc a single block of SearcherCurrs + block := make([]SearcherCurr, len(s.searchers)) + + // get all searchers pointing at their first match + for i, searcher := range s.searchers { + curr, err := searcher.Next(ctx) + if err != nil { + return err + } + if curr != nil { + block[i].searcher = searcher + block[i].curr = curr + heap.Push(s, &block[i]) + } + } + + err := s.updateMatches() + if err != nil { + return err + } + s.initialized = true + return nil +} + +func (s *DisjunctionHeapSearcher) updateMatches() error { + matching := s.matching[:0] + matchingCurrs := s.matchingCurrs[:0] + + if len(s.heap) > 0 { + + // top of the heap is our next hit + next := heap.Pop(s).(*SearcherCurr) + matching = append(matching, next.curr) + matchingCurrs = append(matchingCurrs, next) + + // now as long as top of heap matches, keep popping + for len(s.heap) > 0 && bytes.Compare(next.curr.IndexInternalID, s.heap[0].curr.IndexInternalID) == 0 { + next = heap.Pop(s).(*SearcherCurr) + matching = append(matching, next.curr) + matchingCurrs = append(matchingCurrs, next) + } + } + + s.matching = matching + s.matchingCurrs = matchingCurrs + + return nil +} + +func (s *DisjunctionHeapSearcher) Weight() float64 { + var rv float64 + for _, searcher := range s.searchers { + rv += searcher.Weight() + } + return rv +} + +func (s *DisjunctionHeapSearcher) SetQueryNorm(qnorm float64) { + for _, searcher := range s.searchers { + searcher.SetQueryNorm(qnorm) + } +} + +func (s *DisjunctionHeapSearcher) Next(ctx *search.SearchContext) ( + *search.DocumentMatch, error) { + if !s.initialized { + err := s.initSearchers(ctx) + if err != nil { + return nil, err + } + } + + var rv *search.DocumentMatch + found := false + for !found && len(s.matching) > 0 { + if len(s.matching) >= s.min { + found = true + // score this match + rv = s.scorer.Score(ctx, s.matching, len(s.matching), s.numSearchers) + } + + // invoke next on all the matching searchers + for _, matchingCurr := range s.matchingCurrs { + if matchingCurr.curr != rv { + ctx.DocumentMatchPool.Put(matchingCurr.curr) + } + curr, err := matchingCurr.searcher.Next(ctx) + if err != nil { + return nil, err + } + if curr != nil { + matchingCurr.curr = curr + heap.Push(s, matchingCurr) + } + } + + err := s.updateMatches() + if err != nil { + return nil, err + } + } + + return rv, nil +} + +func (s *DisjunctionHeapSearcher) Advance(ctx *search.SearchContext, + ID index.IndexInternalID) (*search.DocumentMatch, error) { + if !s.initialized { + err := s.initSearchers(ctx) + if err != nil { + return nil, err + } + } + + // if there is anything in matching, toss it back onto the heap + for _, matchingCurr := range s.matchingCurrs { + heap.Push(s, matchingCurr) + } + s.matching = s.matching[:0] + s.matchingCurrs = s.matchingCurrs[:0] + + // find all searchers that actually need to be advanced + // advance them, using s.matchingCurrs as temp storage + for len(s.heap) > 0 && bytes.Compare(s.heap[0].curr.IndexInternalID, ID) < 0 { + searcherCurr := heap.Pop(s).(*SearcherCurr) + ctx.DocumentMatchPool.Put(searcherCurr.curr) + curr, err := searcherCurr.searcher.Advance(ctx, ID) + if err != nil { + return nil, err + } + if curr != nil { + searcherCurr.curr = curr + s.matchingCurrs = append(s.matchingCurrs, searcherCurr) + } + } + // now all of the searchers that we advanced have to be pushed back + for _, matchingCurr := range s.matchingCurrs { + heap.Push(s, matchingCurr) + } + // reset our temp space + s.matchingCurrs = s.matchingCurrs[:0] + + err := s.updateMatches() + if err != nil { + return nil, err + } + + return s.Next(ctx) +} + +func (s *DisjunctionHeapSearcher) Count() uint64 { + // for now return a worst case + var sum uint64 + for _, searcher := range s.searchers { + sum += searcher.Count() + } + return sum +} + +func (s *DisjunctionHeapSearcher) Close() (rv error) { + for _, searcher := range s.searchers { + err := searcher.Close() + if err != nil && rv == nil { + rv = err + } + } + return rv +} + +func (s *DisjunctionHeapSearcher) Min() int { + return s.min +} + +func (s *DisjunctionHeapSearcher) DocumentMatchPoolSize() int { + rv := len(s.searchers) + for _, s := range s.searchers { + rv += s.DocumentMatchPoolSize() + } + return rv +} + +// a disjunction searcher implements the index.Optimizable interface +// but only activates on an edge case where the disjunction is a +// wrapper around a single Optimizable child searcher +func (s *DisjunctionHeapSearcher) Optimize(kind string, octx index.OptimizableContext) ( + index.OptimizableContext, error) { + if len(s.searchers) == 1 { + o, ok := s.searchers[0].(index.Optimizable) + if ok { + return o.Optimize(kind, octx) + } + } + + return nil, nil +} + +// heap impl + +func (s *DisjunctionHeapSearcher) Len() int { return len(s.heap) } + +func (s *DisjunctionHeapSearcher) Less(i, j int) bool { + if s.heap[i].curr == nil { + return true + } else if s.heap[j].curr == nil { + return false + } + return bytes.Compare(s.heap[i].curr.IndexInternalID, s.heap[j].curr.IndexInternalID) < 0 +} + +func (s *DisjunctionHeapSearcher) Swap(i, j int) { + s.heap[i], s.heap[j] = s.heap[j], s.heap[i] +} + +func (s *DisjunctionHeapSearcher) Push(x interface{}) { + s.heap = append(s.heap, x.(*SearcherCurr)) +} + +func (s *DisjunctionHeapSearcher) Pop() interface{} { + old := s.heap + n := len(old) + x := old[n-1] + s.heap = old[0 : n-1] + return x +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_disjunction_slice.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_disjunction_slice.go new file mode 100644 index 0000000..dc566ad --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_disjunction_slice.go @@ -0,0 +1,298 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "math" + "reflect" + "sort" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/scorer" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeDisjunctionSliceSearcher int + +func init() { + var ds DisjunctionSliceSearcher + reflectStaticSizeDisjunctionSliceSearcher = int(reflect.TypeOf(ds).Size()) +} + +type DisjunctionSliceSearcher struct { + indexReader index.IndexReader + searchers OrderedSearcherList + numSearchers int + queryNorm float64 + currs []*search.DocumentMatch + scorer *scorer.DisjunctionQueryScorer + min int + matching []*search.DocumentMatch + matchingIdxs []int + initialized bool +} + +func newDisjunctionSliceSearcher(indexReader index.IndexReader, + qsearchers []search.Searcher, min float64, options search.SearcherOptions, + limit bool) ( + *DisjunctionSliceSearcher, error) { + if limit && tooManyClauses(len(qsearchers)) { + return nil, tooManyClausesErr("", len(qsearchers)) + } + // build the downstream searchers + searchers := make(OrderedSearcherList, len(qsearchers)) + for i, searcher := range qsearchers { + searchers[i] = searcher + } + // sort the searchers + sort.Sort(sort.Reverse(searchers)) + // build our searcher + rv := DisjunctionSliceSearcher{ + indexReader: indexReader, + searchers: searchers, + numSearchers: len(searchers), + currs: make([]*search.DocumentMatch, len(searchers)), + scorer: scorer.NewDisjunctionQueryScorer(options), + min: int(min), + matching: make([]*search.DocumentMatch, len(searchers)), + matchingIdxs: make([]int, len(searchers)), + } + rv.computeQueryNorm() + return &rv, nil +} + +func (s *DisjunctionSliceSearcher) Size() int { + sizeInBytes := reflectStaticSizeDisjunctionSliceSearcher + size.SizeOfPtr + + s.scorer.Size() + + for _, entry := range s.searchers { + sizeInBytes += entry.Size() + } + + for _, entry := range s.currs { + if entry != nil { + sizeInBytes += entry.Size() + } + } + + for _, entry := range s.matching { + if entry != nil { + sizeInBytes += entry.Size() + } + } + + sizeInBytes += len(s.matchingIdxs) * size.SizeOfInt + + return sizeInBytes +} + +func (s *DisjunctionSliceSearcher) computeQueryNorm() { + // first calculate sum of squared weights + sumOfSquaredWeights := 0.0 + for _, searcher := range s.searchers { + sumOfSquaredWeights += searcher.Weight() + } + // now compute query norm from this + s.queryNorm = 1.0 / math.Sqrt(sumOfSquaredWeights) + // finally tell all the downstream searchers the norm + for _, searcher := range s.searchers { + searcher.SetQueryNorm(s.queryNorm) + } +} + +func (s *DisjunctionSliceSearcher) initSearchers(ctx *search.SearchContext) error { + var err error + // get all searchers pointing at their first match + for i, searcher := range s.searchers { + if s.currs[i] != nil { + ctx.DocumentMatchPool.Put(s.currs[i]) + } + s.currs[i], err = searcher.Next(ctx) + if err != nil { + return err + } + } + + err = s.updateMatches() + if err != nil { + return err + } + + s.initialized = true + return nil +} + +func (s *DisjunctionSliceSearcher) updateMatches() error { + matching := s.matching[:0] + matchingIdxs := s.matchingIdxs[:0] + + for i := 0; i < len(s.currs); i++ { + curr := s.currs[i] + if curr == nil { + continue + } + + if len(matching) > 0 { + cmp := curr.IndexInternalID.Compare(matching[0].IndexInternalID) + if cmp > 0 { + continue + } + + if cmp < 0 { + matching = matching[:0] + matchingIdxs = matchingIdxs[:0] + } + } + + matching = append(matching, curr) + matchingIdxs = append(matchingIdxs, i) + } + + s.matching = matching + s.matchingIdxs = matchingIdxs + + return nil +} + +func (s *DisjunctionSliceSearcher) Weight() float64 { + var rv float64 + for _, searcher := range s.searchers { + rv += searcher.Weight() + } + return rv +} + +func (s *DisjunctionSliceSearcher) SetQueryNorm(qnorm float64) { + for _, searcher := range s.searchers { + searcher.SetQueryNorm(qnorm) + } +} + +func (s *DisjunctionSliceSearcher) Next(ctx *search.SearchContext) ( + *search.DocumentMatch, error) { + if !s.initialized { + err := s.initSearchers(ctx) + if err != nil { + return nil, err + } + } + var err error + var rv *search.DocumentMatch + + found := false + for !found && len(s.matching) > 0 { + if len(s.matching) >= s.min { + found = true + // score this match + rv = s.scorer.Score(ctx, s.matching, len(s.matching), s.numSearchers) + } + + // invoke next on all the matching searchers + for _, i := range s.matchingIdxs { + searcher := s.searchers[i] + if s.currs[i] != rv { + ctx.DocumentMatchPool.Put(s.currs[i]) + } + s.currs[i], err = searcher.Next(ctx) + if err != nil { + return nil, err + } + } + + err = s.updateMatches() + if err != nil { + return nil, err + } + } + return rv, nil +} + +func (s *DisjunctionSliceSearcher) Advance(ctx *search.SearchContext, + ID index.IndexInternalID) (*search.DocumentMatch, error) { + if !s.initialized { + err := s.initSearchers(ctx) + if err != nil { + return nil, err + } + } + // get all searchers pointing at their first match + var err error + for i, searcher := range s.searchers { + if s.currs[i] != nil { + if s.currs[i].IndexInternalID.Compare(ID) >= 0 { + continue + } + ctx.DocumentMatchPool.Put(s.currs[i]) + } + s.currs[i], err = searcher.Advance(ctx, ID) + if err != nil { + return nil, err + } + } + + err = s.updateMatches() + if err != nil { + return nil, err + } + + return s.Next(ctx) +} + +func (s *DisjunctionSliceSearcher) Count() uint64 { + // for now return a worst case + var sum uint64 + for _, searcher := range s.searchers { + sum += searcher.Count() + } + return sum +} + +func (s *DisjunctionSliceSearcher) Close() (rv error) { + for _, searcher := range s.searchers { + err := searcher.Close() + if err != nil && rv == nil { + rv = err + } + } + return rv +} + +func (s *DisjunctionSliceSearcher) Min() int { + return s.min +} + +func (s *DisjunctionSliceSearcher) DocumentMatchPoolSize() int { + rv := len(s.currs) + for _, s := range s.searchers { + rv += s.DocumentMatchPoolSize() + } + return rv +} + +// a disjunction searcher implements the index.Optimizable interface +// but only activates on an edge case where the disjunction is a +// wrapper around a single Optimizable child searcher +func (s *DisjunctionSliceSearcher) Optimize(kind string, octx index.OptimizableContext) ( + index.OptimizableContext, error) { + if len(s.searchers) == 1 { + o, ok := s.searchers[0].(index.Optimizable) + if ok { + return o.Optimize(kind, octx) + } + } + + return nil, nil +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_docid.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_docid.go new file mode 100644 index 0000000..3b258a5 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_docid.go @@ -0,0 +1,109 @@ +// Copyright (c) 2015 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "reflect" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/scorer" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeDocIDSearcher int + +func init() { + var ds DocIDSearcher + reflectStaticSizeDocIDSearcher = int(reflect.TypeOf(ds).Size()) +} + +// DocIDSearcher returns documents matching a predefined set of identifiers. +type DocIDSearcher struct { + reader index.DocIDReader + scorer *scorer.ConstantScorer + count int +} + +func NewDocIDSearcher(indexReader index.IndexReader, ids []string, boost float64, + options search.SearcherOptions) (searcher *DocIDSearcher, err error) { + + reader, err := indexReader.DocIDReaderOnly(ids) + if err != nil { + return nil, err + } + scorer := scorer.NewConstantScorer(1.0, boost, options) + return &DocIDSearcher{ + scorer: scorer, + reader: reader, + count: len(ids), + }, nil +} + +func (s *DocIDSearcher) Size() int { + return reflectStaticSizeDocIDSearcher + size.SizeOfPtr + + s.reader.Size() + + s.scorer.Size() +} + +func (s *DocIDSearcher) Count() uint64 { + return uint64(s.count) +} + +func (s *DocIDSearcher) Weight() float64 { + return s.scorer.Weight() +} + +func (s *DocIDSearcher) SetQueryNorm(qnorm float64) { + s.scorer.SetQueryNorm(qnorm) +} + +func (s *DocIDSearcher) Next(ctx *search.SearchContext) (*search.DocumentMatch, error) { + docidMatch, err := s.reader.Next() + if err != nil { + return nil, err + } + if docidMatch == nil { + return nil, nil + } + + docMatch := s.scorer.Score(ctx, docidMatch) + return docMatch, nil +} + +func (s *DocIDSearcher) Advance(ctx *search.SearchContext, ID index.IndexInternalID) (*search.DocumentMatch, error) { + docidMatch, err := s.reader.Advance(ID) + if err != nil { + return nil, err + } + if docidMatch == nil { + return nil, nil + } + + docMatch := s.scorer.Score(ctx, docidMatch) + return docMatch, nil +} + +func (s *DocIDSearcher) Close() error { + return s.reader.Close() +} + +func (s *DocIDSearcher) Min() int { + return 0 +} + +func (s *DocIDSearcher) DocumentMatchPoolSize() int { + return 1 +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_filter.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_filter.go new file mode 100644 index 0000000..7c95fb4 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_filter.go @@ -0,0 +1,103 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "reflect" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeFilteringSearcher int + +func init() { + var fs FilteringSearcher + reflectStaticSizeFilteringSearcher = int(reflect.TypeOf(fs).Size()) +} + +// FilterFunc defines a function which can filter documents +// returning true means keep the document +// returning false means do not keep the document +type FilterFunc func(d *search.DocumentMatch) bool + +// FilteringSearcher wraps any other searcher, but checks any Next/Advance +// call against the supplied FilterFunc +type FilteringSearcher struct { + child search.Searcher + accept FilterFunc +} + +func NewFilteringSearcher(s search.Searcher, filter FilterFunc) *FilteringSearcher { + return &FilteringSearcher{ + child: s, + accept: filter, + } +} + +func (f *FilteringSearcher) Size() int { + return reflectStaticSizeFilteringSearcher + size.SizeOfPtr + + f.child.Size() +} + +func (f *FilteringSearcher) Next(ctx *search.SearchContext) (*search.DocumentMatch, error) { + next, err := f.child.Next(ctx) + for next != nil && err == nil { + if f.accept(next) { + return next, nil + } + next, err = f.child.Next(ctx) + } + return nil, err +} + +func (f *FilteringSearcher) Advance(ctx *search.SearchContext, ID index.IndexInternalID) (*search.DocumentMatch, error) { + adv, err := f.child.Advance(ctx, ID) + if err != nil { + return nil, err + } + if adv == nil { + return nil, nil + } + if f.accept(adv) { + return adv, nil + } + return f.Next(ctx) +} + +func (f *FilteringSearcher) Close() error { + return f.child.Close() +} + +func (f *FilteringSearcher) Weight() float64 { + return f.child.Weight() +} + +func (f *FilteringSearcher) SetQueryNorm(n float64) { + f.child.SetQueryNorm(n) +} + +func (f *FilteringSearcher) Count() uint64 { + return f.child.Count() +} + +func (f *FilteringSearcher) Min() int { + return f.child.Min() +} + +func (f *FilteringSearcher) DocumentMatchPoolSize() int { + return f.child.DocumentMatchPoolSize() +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_fuzzy.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_fuzzy.go new file mode 100644 index 0000000..aca8a7d --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_fuzzy.go @@ -0,0 +1,117 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "fmt" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/search" +) + +var MaxFuzziness = 2 + +func NewFuzzySearcher(indexReader index.IndexReader, term string, + prefix, fuzziness int, field string, boost float64, + options search.SearcherOptions) (search.Searcher, error) { + + if fuzziness > MaxFuzziness { + return nil, fmt.Errorf("fuzziness exceeds max (%d)", MaxFuzziness) + } + + if fuzziness < 0 { + return nil, fmt.Errorf("invalid fuzziness, negative") + } + + // Note: we don't byte slice the term for a prefix because of runes. + prefixTerm := "" + for i, r := range term { + if i < prefix { + prefixTerm += string(r) + } else { + break + } + } + candidateTerms, err := findFuzzyCandidateTerms(indexReader, term, fuzziness, + field, prefixTerm) + if err != nil { + return nil, err + } + + return NewMultiTermSearcher(indexReader, candidateTerms, field, + boost, options, true) +} + +func findFuzzyCandidateTerms(indexReader index.IndexReader, term string, + fuzziness int, field, prefixTerm string) (rv []string, err error) { + rv = make([]string, 0) + + // in case of advanced reader implementations directly call + // the levenshtein automaton based iterator to collect the + // candidate terms + if ir, ok := indexReader.(index.IndexReaderFuzzy); ok { + fieldDict, err := ir.FieldDictFuzzy(field, term, fuzziness, prefixTerm) + if err != nil { + return nil, err + } + defer func() { + if cerr := fieldDict.Close(); cerr != nil && err == nil { + err = cerr + } + }() + tfd, err := fieldDict.Next() + for err == nil && tfd != nil { + rv = append(rv, tfd.Term) + if tooManyClauses(len(rv)) { + return nil, tooManyClausesErr(field, len(rv)) + } + tfd, err = fieldDict.Next() + } + return rv, err + } + + var fieldDict index.FieldDict + if len(prefixTerm) > 0 { + fieldDict, err = indexReader.FieldDictPrefix(field, []byte(prefixTerm)) + } else { + fieldDict, err = indexReader.FieldDict(field) + } + if err != nil { + return nil, err + } + defer func() { + if cerr := fieldDict.Close(); cerr != nil && err == nil { + err = cerr + } + }() + + // enumerate terms and check levenshtein distance + var reuse []int + tfd, err := fieldDict.Next() + for err == nil && tfd != nil { + var ld int + var exceeded bool + ld, exceeded, reuse = search.LevenshteinDistanceMaxReuseSlice(term, tfd.Term, fuzziness, reuse) + if !exceeded && ld <= fuzziness { + rv = append(rv, tfd.Term) + if tooManyClauses(len(rv)) { + return nil, tooManyClausesErr(field, len(rv)) + } + } + tfd, err = fieldDict.Next() + } + + return rv, err +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_geoboundingbox.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_geoboundingbox.go new file mode 100644 index 0000000..76157f0 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_geoboundingbox.go @@ -0,0 +1,272 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/geo" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/numeric" + "github.com/blevesearch/bleve/search" +) + +type filterFunc func(key []byte) bool + +var GeoBitsShift1 = geo.GeoBits << 1 +var GeoBitsShift1Minus1 = GeoBitsShift1 - 1 + +func NewGeoBoundingBoxSearcher(indexReader index.IndexReader, minLon, minLat, + maxLon, maxLat float64, field string, boost float64, + options search.SearcherOptions, checkBoundaries bool) ( + search.Searcher, error) { + + // track list of opened searchers, for cleanup on early exit + var openedSearchers []search.Searcher + cleanupOpenedSearchers := func() { + for _, s := range openedSearchers { + _ = s.Close() + } + } + + // do math to produce list of terms needed for this search + onBoundaryTerms, notOnBoundaryTerms, err := ComputeGeoRange(0, GeoBitsShift1Minus1, + minLon, minLat, maxLon, maxLat, checkBoundaries, indexReader, field) + if err != nil { + return nil, err + } + + var onBoundarySearcher search.Searcher + dvReader, err := indexReader.DocValueReader([]string{field}) + if err != nil { + return nil, err + } + + if len(onBoundaryTerms) > 0 { + rawOnBoundarySearcher, err := NewMultiTermSearcherBytes(indexReader, + onBoundaryTerms, field, boost, options, false) + if err != nil { + return nil, err + } + // add filter to check points near the boundary + onBoundarySearcher = NewFilteringSearcher(rawOnBoundarySearcher, + buildRectFilter(dvReader, field, minLon, minLat, maxLon, maxLat)) + openedSearchers = append(openedSearchers, onBoundarySearcher) + } + + var notOnBoundarySearcher search.Searcher + if len(notOnBoundaryTerms) > 0 { + var err error + notOnBoundarySearcher, err = NewMultiTermSearcherBytes(indexReader, + notOnBoundaryTerms, field, boost, options, false) + if err != nil { + cleanupOpenedSearchers() + return nil, err + } + openedSearchers = append(openedSearchers, notOnBoundarySearcher) + } + + if onBoundarySearcher != nil && notOnBoundarySearcher != nil { + rv, err := NewDisjunctionSearcher(indexReader, + []search.Searcher{ + onBoundarySearcher, + notOnBoundarySearcher, + }, + 0, options) + if err != nil { + cleanupOpenedSearchers() + return nil, err + } + return rv, nil + } else if onBoundarySearcher != nil { + return onBoundarySearcher, nil + } else if notOnBoundarySearcher != nil { + return notOnBoundarySearcher, nil + } + + return NewMatchNoneSearcher(indexReader) +} + +var geoMaxShift = document.GeoPrecisionStep * 4 +var geoDetailLevel = ((geo.GeoBits << 1) - geoMaxShift) / 2 +type closeFunc func() error + +func ComputeGeoRange(term uint64, shift uint, + sminLon, sminLat, smaxLon, smaxLat float64, checkBoundaries bool, + indexReader index.IndexReader, field string) ( + onBoundary [][]byte, notOnBoundary [][]byte, err error) { + + isIndexed, closeF, err := buildIsIndexedFunc(indexReader, field) + if closeF != nil { + defer func() { + cerr := closeF() + if cerr != nil { + err = cerr + } + }() + } + + grc := &geoRangeCompute{ + preallocBytesLen: 32, + preallocBytes: make([]byte, 32), + sminLon: sminLon, + sminLat: sminLat, + smaxLon: smaxLon, + smaxLat: smaxLat, + checkBoundaries: checkBoundaries, + isIndexed: isIndexed, + } + + grc.computeGeoRange(term, shift) + + return grc.onBoundary, grc.notOnBoundary, nil +} + +func buildIsIndexedFunc(indexReader index.IndexReader, field string) (isIndexed filterFunc, closeF closeFunc, err error) { + if irr, ok := indexReader.(index.IndexReaderContains); ok { + fieldDict, err := irr.FieldDictContains(field) + if err != nil { + return nil, nil, err + } + + isIndexed = func(term []byte) bool { + found, err := fieldDict.Contains(term) + return err == nil && found + } + + closeF = func() error { + if fd, ok := fieldDict.(index.FieldDict); ok { + err := fd.Close() + if err != nil { + return err + } + } + return nil + } + } else if indexReader != nil { + isIndexed = func(term []byte) bool { + reader, err := indexReader.TermFieldReader(term, field, false, false, false) + if err != nil || reader == nil { + return false + } + if reader.Count() == 0 { + _ = reader.Close() + return false + } + _ = reader.Close() + return true + } + + } else { + isIndexed = func([]byte) bool { + return true + } + } + return isIndexed, closeF, err +} + +func buildRectFilter(dvReader index.DocValueReader, field string, + minLon, minLat, maxLon, maxLat float64) FilterFunc { + return func(d *search.DocumentMatch) bool { + // check geo matches against all numeric type terms indexed + var lons, lats []float64 + var found bool + err := dvReader.VisitDocValues(d.IndexInternalID, func(field string, term []byte) { + // only consider the values which are shifted 0 + prefixCoded := numeric.PrefixCoded(term) + shift, err := prefixCoded.Shift() + if err == nil && shift == 0 { + var i64 int64 + i64, err = prefixCoded.Int64() + if err == nil { + lons = append(lons, geo.MortonUnhashLon(uint64(i64))) + lats = append(lats, geo.MortonUnhashLat(uint64(i64))) + found = true + } + } + }) + if err == nil && found { + for i := range lons { + if geo.BoundingBoxContains(lons[i], lats[i], + minLon, minLat, maxLon, maxLat) { + return true + } + } + } + return false + } +} + +type geoRangeCompute struct { + preallocBytesLen int + preallocBytes []byte + sminLon, sminLat, smaxLon, smaxLat float64 + checkBoundaries bool + onBoundary, notOnBoundary [][]byte + isIndexed func(term []byte) bool +} + +func (grc *geoRangeCompute) makePrefixCoded(in int64, shift uint) (rv numeric.PrefixCoded) { + if len(grc.preallocBytes) <= 0 { + grc.preallocBytesLen = grc.preallocBytesLen * 2 + grc.preallocBytes = make([]byte, grc.preallocBytesLen) + } + + rv, grc.preallocBytes, _ = + numeric.NewPrefixCodedInt64Prealloc(in, shift, grc.preallocBytes) + + return rv +} + +func (grc *geoRangeCompute) computeGeoRange(term uint64, shift uint) { + split := term | uint64(0x1)<> 1 + + within := res%document.GeoPrecisionStep == 0 && + geo.RectWithin(minLon, minLat, maxLon, maxLat, + grc.sminLon, grc.sminLat, grc.smaxLon, grc.smaxLat) + if within || (level == geoDetailLevel && + geo.RectIntersects(minLon, minLat, maxLon, maxLat, + grc.sminLon, grc.sminLat, grc.smaxLon, grc.smaxLat)) { + codedTerm := grc.makePrefixCoded(int64(start), res) + if grc.isIndexed(codedTerm) { + if !within && grc.checkBoundaries { + grc.onBoundary = append(grc.onBoundary, codedTerm) + } else { + grc.notOnBoundary = append(grc.notOnBoundary, codedTerm) + } + } + } else if level < geoDetailLevel && + geo.RectIntersects(minLon, minLat, maxLon, maxLat, + grc.sminLon, grc.sminLat, grc.smaxLon, grc.smaxLat) { + grc.computeGeoRange(start, res-1) + } +} \ No newline at end of file diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_geopointdistance.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_geopointdistance.go new file mode 100644 index 0000000..b6f2932 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_geopointdistance.go @@ -0,0 +1,126 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "github.com/blevesearch/bleve/geo" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/numeric" + "github.com/blevesearch/bleve/search" +) + +func NewGeoPointDistanceSearcher(indexReader index.IndexReader, centerLon, + centerLat, dist float64, field string, boost float64, + options search.SearcherOptions) (search.Searcher, error) { + // compute bounding box containing the circle + topLeftLon, topLeftLat, bottomRightLon, bottomRightLat, err := + geo.RectFromPointDistance(centerLon, centerLat, dist) + if err != nil { + return nil, err + } + + // build a searcher for the box + boxSearcher, err := boxSearcher(indexReader, + topLeftLon, topLeftLat, bottomRightLon, bottomRightLat, + field, boost, options, false) + if err != nil { + return nil, err + } + + dvReader, err := indexReader.DocValueReader([]string{field}) + if err != nil { + return nil, err + } + + // wrap it in a filtering searcher which checks the actual distance + return NewFilteringSearcher(boxSearcher, + buildDistFilter(dvReader, field, centerLon, centerLat, dist)), nil +} + +// boxSearcher builds a searcher for the described bounding box +// if the desired box crosses the dateline, it is automatically split into +// two boxes joined through a disjunction searcher +func boxSearcher(indexReader index.IndexReader, + topLeftLon, topLeftLat, bottomRightLon, bottomRightLat float64, + field string, boost float64, options search.SearcherOptions, checkBoundaries bool) ( + search.Searcher, error) { + if bottomRightLon < topLeftLon { + // cross date line, rewrite as two parts + + leftSearcher, err := NewGeoBoundingBoxSearcher(indexReader, + -180, bottomRightLat, bottomRightLon, topLeftLat, + field, boost, options, checkBoundaries) + if err != nil { + return nil, err + } + rightSearcher, err := NewGeoBoundingBoxSearcher(indexReader, + topLeftLon, bottomRightLat, 180, topLeftLat, field, boost, options, + checkBoundaries) + if err != nil { + _ = leftSearcher.Close() + return nil, err + } + + boxSearcher, err := NewDisjunctionSearcher(indexReader, + []search.Searcher{leftSearcher, rightSearcher}, 0, options) + if err != nil { + _ = leftSearcher.Close() + _ = rightSearcher.Close() + return nil, err + } + return boxSearcher, nil + } + + // build geoboundingbox searcher for that bounding box + boxSearcher, err := NewGeoBoundingBoxSearcher(indexReader, + topLeftLon, bottomRightLat, bottomRightLon, topLeftLat, field, boost, + options, checkBoundaries) + if err != nil { + return nil, err + } + return boxSearcher, nil +} + +func buildDistFilter(dvReader index.DocValueReader, field string, + centerLon, centerLat, maxDist float64) FilterFunc { + return func(d *search.DocumentMatch) bool { + // check geo matches against all numeric type terms indexed + var lons, lats []float64 + var found bool + + err := dvReader.VisitDocValues(d.IndexInternalID, func(field string, term []byte) { + // only consider the values which are shifted 0 + prefixCoded := numeric.PrefixCoded(term) + shift, err := prefixCoded.Shift() + if err == nil && shift == 0 { + i64, err := prefixCoded.Int64() + if err == nil { + lons = append(lons, geo.MortonUnhashLon(uint64(i64))) + lats = append(lats, geo.MortonUnhashLat(uint64(i64))) + found = true + } + } + }) + if err == nil && found { + for i := range lons { + dist := geo.Haversin(lons[i], lats[i], centerLon, centerLat) + if dist <= maxDist/1000 { + return true + } + } + } + return false + } +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_geopolygon.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_geopolygon.go new file mode 100644 index 0000000..5f16aa8 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_geopolygon.go @@ -0,0 +1,126 @@ +// Copyright (c) 2019 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "fmt" + "github.com/blevesearch/bleve/geo" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/numeric" + "github.com/blevesearch/bleve/search" + "math" +) + +func NewGeoBoundedPolygonSearcher(indexReader index.IndexReader, + polygon []geo.Point, field string, boost float64, + options search.SearcherOptions) (search.Searcher, error) { + + if len(polygon) < 3 { + return nil, fmt.Errorf("Too few points specified for the polygon boundary") + } + + // compute the bounding box enclosing the polygon + topLeftLon, topLeftLat, bottomRightLon, bottomRightLat, err := + geo.BoundingRectangleForPolygon(polygon) + if err != nil { + return nil, err + } + + // build a searcher for the bounding box on the polygon + boxSearcher, err := boxSearcher(indexReader, + topLeftLon, topLeftLat, bottomRightLon, bottomRightLat, + field, boost, options, true) + if err != nil { + return nil, err + } + + dvReader, err := indexReader.DocValueReader([]string{field}) + if err != nil { + return nil, err + } + + // wrap it in a filtering searcher that checks for the polygon inclusivity + return NewFilteringSearcher(boxSearcher, + buildPolygonFilter(dvReader, field, polygon)), nil +} + +const float64EqualityThreshold = 1e-6 + +func almostEqual(a, b float64) bool { + return math.Abs(a-b) <= float64EqualityThreshold +} + +// buildPolygonFilter returns true if the point lies inside the +// polygon. It is based on the ray-casting technique as referred +// here: https://wrf.ecse.rpi.edu/nikola/pubdetails/pnpoly.html +func buildPolygonFilter(dvReader index.DocValueReader, field string, + polygon []geo.Point) FilterFunc { + return func(d *search.DocumentMatch) bool { + // check geo matches against all numeric type terms indexed + var lons, lats []float64 + var found bool + + err := dvReader.VisitDocValues(d.IndexInternalID, func(field string, term []byte) { + // only consider the values which are shifted 0 + prefixCoded := numeric.PrefixCoded(term) + shift, err := prefixCoded.Shift() + if err == nil && shift == 0 { + i64, err := prefixCoded.Int64() + if err == nil { + lons = append(lons, geo.MortonUnhashLon(uint64(i64))) + lats = append(lats, geo.MortonUnhashLat(uint64(i64))) + found = true + } + } + }) + + // Note: this approach works for points which are strictly inside + // the polygon. ie it might fail for certain points on the polygon boundaries. + if err == nil && found { + nVertices := len(polygon) + if len(polygon) < 3 { + return false + } + rayIntersectsSegment := func(point, a, b geo.Point) bool { + return (a.Lat > point.Lat) != (b.Lat > point.Lat) && + point.Lon < (b.Lon-a.Lon)*(point.Lat-a.Lat)/(b.Lat-a.Lat)+a.Lon + } + + for i := range lons { + pt := geo.Point{Lon: lons[i], Lat: lats[i]} + inside := rayIntersectsSegment(pt, polygon[len(polygon)-1], polygon[0]) + // check for a direct vertex match + if almostEqual(polygon[0].Lat, lats[i]) && + almostEqual(polygon[0].Lon, lons[i]) { + return true + } + + for j := 1; j < nVertices; j++ { + if almostEqual(polygon[j].Lat, lats[i]) && + almostEqual(polygon[j].Lon, lons[i]) { + return true + } + if rayIntersectsSegment(pt, polygon[j-1], polygon[j]) { + inside = !inside + } + } + if inside { + return true + } + } + } + return false + } +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_match_all.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_match_all.go new file mode 100644 index 0000000..bb66401 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_match_all.go @@ -0,0 +1,121 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "reflect" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/scorer" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeMatchAllSearcher int + +func init() { + var mas MatchAllSearcher + reflectStaticSizeMatchAllSearcher = int(reflect.TypeOf(mas).Size()) +} + +type MatchAllSearcher struct { + indexReader index.IndexReader + reader index.DocIDReader + scorer *scorer.ConstantScorer + count uint64 +} + +func NewMatchAllSearcher(indexReader index.IndexReader, boost float64, options search.SearcherOptions) (*MatchAllSearcher, error) { + reader, err := indexReader.DocIDReaderAll() + if err != nil { + return nil, err + } + count, err := indexReader.DocCount() + if err != nil { + _ = reader.Close() + return nil, err + } + scorer := scorer.NewConstantScorer(1.0, boost, options) + return &MatchAllSearcher{ + indexReader: indexReader, + reader: reader, + scorer: scorer, + count: count, + }, nil +} + +func (s *MatchAllSearcher) Size() int { + return reflectStaticSizeMatchAllSearcher + size.SizeOfPtr + + s.reader.Size() + + s.scorer.Size() +} + +func (s *MatchAllSearcher) Count() uint64 { + return s.count +} + +func (s *MatchAllSearcher) Weight() float64 { + return s.scorer.Weight() +} + +func (s *MatchAllSearcher) SetQueryNorm(qnorm float64) { + s.scorer.SetQueryNorm(qnorm) +} + +func (s *MatchAllSearcher) Next(ctx *search.SearchContext) (*search.DocumentMatch, error) { + id, err := s.reader.Next() + if err != nil { + return nil, err + } + + if id == nil { + return nil, nil + } + + // score match + docMatch := s.scorer.Score(ctx, id) + // return doc match + return docMatch, nil + +} + +func (s *MatchAllSearcher) Advance(ctx *search.SearchContext, ID index.IndexInternalID) (*search.DocumentMatch, error) { + id, err := s.reader.Advance(ID) + if err != nil { + return nil, err + } + + if id == nil { + return nil, nil + } + + // score match + docMatch := s.scorer.Score(ctx, id) + + // return doc match + return docMatch, nil +} + +func (s *MatchAllSearcher) Close() error { + return s.reader.Close() +} + +func (s *MatchAllSearcher) Min() int { + return 0 +} + +func (s *MatchAllSearcher) DocumentMatchPoolSize() int { + return 1 +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_match_none.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_match_none.go new file mode 100644 index 0000000..a345e17 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_match_none.go @@ -0,0 +1,76 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "reflect" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeMatchNoneSearcher int + +func init() { + var mns MatchNoneSearcher + reflectStaticSizeMatchNoneSearcher = int(reflect.TypeOf(mns).Size()) +} + +type MatchNoneSearcher struct { + indexReader index.IndexReader +} + +func NewMatchNoneSearcher(indexReader index.IndexReader) (*MatchNoneSearcher, error) { + return &MatchNoneSearcher{ + indexReader: indexReader, + }, nil +} + +func (s *MatchNoneSearcher) Size() int { + return reflectStaticSizeMatchNoneSearcher + size.SizeOfPtr +} + +func (s *MatchNoneSearcher) Count() uint64 { + return uint64(0) +} + +func (s *MatchNoneSearcher) Weight() float64 { + return 0.0 +} + +func (s *MatchNoneSearcher) SetQueryNorm(qnorm float64) { + +} + +func (s *MatchNoneSearcher) Next(ctx *search.SearchContext) (*search.DocumentMatch, error) { + return nil, nil +} + +func (s *MatchNoneSearcher) Advance(ctx *search.SearchContext, ID index.IndexInternalID) (*search.DocumentMatch, error) { + return nil, nil +} + +func (s *MatchNoneSearcher) Close() error { + return nil +} + +func (s *MatchNoneSearcher) Min() int { + return 0 +} + +func (s *MatchNoneSearcher) DocumentMatchPoolSize() int { + return 0 +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_multi_term.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_multi_term.go new file mode 100644 index 0000000..70a2fa3 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_multi_term.go @@ -0,0 +1,215 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "fmt" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/search" +) + +func NewMultiTermSearcher(indexReader index.IndexReader, terms []string, + field string, boost float64, options search.SearcherOptions, limit bool) ( + search.Searcher, error) { + + if tooManyClauses(len(terms)) { + if optionsDisjunctionOptimizable(options) { + return optimizeMultiTermSearcher(indexReader, terms, field, boost, options) + } + if limit { + return nil, tooManyClausesErr(field, len(terms)) + } + } + + qsearchers, err := makeBatchSearchers(indexReader, terms, field, boost, options) + if err != nil { + return nil, err + } + + // build disjunction searcher of these ranges + return newMultiTermSearcherInternal(indexReader, qsearchers, field, boost, + options, limit) +} + +func NewMultiTermSearcherBytes(indexReader index.IndexReader, terms [][]byte, + field string, boost float64, options search.SearcherOptions, limit bool) ( + search.Searcher, error) { + + if tooManyClauses(len(terms)) { + if optionsDisjunctionOptimizable(options) { + return optimizeMultiTermSearcherBytes(indexReader, terms, field, boost, options) + } + + if limit { + return nil, tooManyClausesErr(field, len(terms)) + } + } + + qsearchers, err := makeBatchSearchersBytes(indexReader, terms, field, boost, options) + if err != nil { + return nil, err + } + + // build disjunction searcher of these ranges + return newMultiTermSearcherInternal(indexReader, qsearchers, field, boost, + options, limit) +} + +func newMultiTermSearcherInternal(indexReader index.IndexReader, + searchers []search.Searcher, field string, boost float64, + options search.SearcherOptions, limit bool) ( + search.Searcher, error) { + + // build disjunction searcher of these ranges + searcher, err := newDisjunctionSearcher(indexReader, searchers, 0, options, + limit) + if err != nil { + for _, s := range searchers { + _ = s.Close() + } + return nil, err + } + + return searcher, nil +} + +func optimizeMultiTermSearcher(indexReader index.IndexReader, terms []string, + field string, boost float64, options search.SearcherOptions) ( + search.Searcher, error) { + var finalSearcher search.Searcher + for len(terms) > 0 { + var batchTerms []string + if len(terms) > DisjunctionMaxClauseCount { + batchTerms = terms[:DisjunctionMaxClauseCount] + terms = terms[DisjunctionMaxClauseCount:] + } else { + batchTerms = terms + terms = nil + } + batch, err := makeBatchSearchers(indexReader, batchTerms, field, boost, options) + if err != nil { + return nil, err + } + if finalSearcher != nil { + batch = append(batch, finalSearcher) + } + cleanup := func() { + for _, searcher := range batch { + if searcher != nil { + _ = searcher.Close() + } + } + } + finalSearcher, err = optimizeCompositeSearcher("disjunction:unadorned", + indexReader, batch, options) + // all searchers in batch should be closed, regardless of error or optimization failure + // either we're returning, or continuing and only finalSearcher is needed for next loop + cleanup() + if err != nil { + return nil, err + } + if finalSearcher == nil { + return nil, fmt.Errorf("unable to optimize") + } + } + return finalSearcher, nil +} + +func makeBatchSearchers(indexReader index.IndexReader, terms []string, field string, + boost float64, options search.SearcherOptions) ([]search.Searcher, error) { + + qsearchers := make([]search.Searcher, len(terms)) + qsearchersClose := func() { + for _, searcher := range qsearchers { + if searcher != nil { + _ = searcher.Close() + } + } + } + for i, term := range terms { + var err error + qsearchers[i], err = NewTermSearcher(indexReader, term, field, boost, options) + if err != nil { + qsearchersClose() + return nil, err + } + } + return qsearchers, nil +} + +func optimizeMultiTermSearcherBytes(indexReader index.IndexReader, terms [][]byte, + field string, boost float64, options search.SearcherOptions) ( + search.Searcher, error) { + + var finalSearcher search.Searcher + for len(terms) > 0 { + var batchTerms [][]byte + if len(terms) > DisjunctionMaxClauseCount { + batchTerms = terms[:DisjunctionMaxClauseCount] + terms = terms[DisjunctionMaxClauseCount:] + } else { + batchTerms = terms + terms = nil + } + batch, err := makeBatchSearchersBytes(indexReader, batchTerms, field, boost, options) + if err != nil { + return nil, err + } + if finalSearcher != nil { + batch = append(batch, finalSearcher) + } + cleanup := func() { + for _, searcher := range batch { + if searcher != nil { + _ = searcher.Close() + } + } + } + finalSearcher, err = optimizeCompositeSearcher("disjunction:unadorned", + indexReader, batch, options) + // all searchers in batch should be closed, regardless of error or optimization failure + // either we're returning, or continuing and only finalSearcher is needed for next loop + cleanup() + if err != nil { + return nil, err + } + if finalSearcher == nil { + return nil, fmt.Errorf("unable to optimize") + } + } + return finalSearcher, nil +} + +func makeBatchSearchersBytes(indexReader index.IndexReader, terms [][]byte, field string, + boost float64, options search.SearcherOptions) ([]search.Searcher, error) { + + qsearchers := make([]search.Searcher, len(terms)) + qsearchersClose := func() { + for _, searcher := range qsearchers { + if searcher != nil { + _ = searcher.Close() + } + } + } + for i, term := range terms { + var err error + qsearchers[i], err = NewTermSearcherBytes(indexReader, term, field, boost, options) + if err != nil { + qsearchersClose() + return nil, err + } + } + return qsearchers, nil +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_numeric_range.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_numeric_range.go new file mode 100644 index 0000000..48d6226 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_numeric_range.go @@ -0,0 +1,260 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "bytes" + "math" + "sort" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/numeric" + "github.com/blevesearch/bleve/search" +) + +func NewNumericRangeSearcher(indexReader index.IndexReader, + min *float64, max *float64, inclusiveMin, inclusiveMax *bool, field string, + boost float64, options search.SearcherOptions) (search.Searcher, error) { + // account for unbounded edges + if min == nil { + negInf := math.Inf(-1) + min = &negInf + } + if max == nil { + Inf := math.Inf(1) + max = &Inf + } + if inclusiveMin == nil { + defaultInclusiveMin := true + inclusiveMin = &defaultInclusiveMin + } + if inclusiveMax == nil { + defaultInclusiveMax := false + inclusiveMax = &defaultInclusiveMax + } + // find all the ranges + minInt64 := numeric.Float64ToInt64(*min) + if !*inclusiveMin && minInt64 != math.MaxInt64 { + minInt64++ + } + maxInt64 := numeric.Float64ToInt64(*max) + if !*inclusiveMax && maxInt64 != math.MinInt64 { + maxInt64-- + } + + var fieldDict index.FieldDictContains + var isIndexed filterFunc + var err error + if irr, ok := indexReader.(index.IndexReaderContains); ok { + fieldDict, err = irr.FieldDictContains(field) + if err != nil { + return nil, err + } + + isIndexed = func(term []byte) bool { + found, err := fieldDict.Contains(term) + return err == nil && found + } + } + + // FIXME hard-coded precision, should match field declaration + termRanges := splitInt64Range(minInt64, maxInt64, 4) + terms := termRanges.Enumerate(isIndexed) + if fieldDict != nil { + if fd, ok := fieldDict.(index.FieldDict); ok { + if err = fd.Close(); err != nil { + return nil, err + } + } + } + + if len(terms) < 1 { + // cannot return MatchNoneSearcher because of interaction with + // commit f391b991c20f02681bacd197afc6d8aed444e132 + return NewMultiTermSearcherBytes(indexReader, terms, field, boost, options, + true) + } + + // for upside_down + if isIndexed == nil { + terms, err = filterCandidateTerms(indexReader, terms, field) + if err != nil { + return nil, err + } + } + + if tooManyClauses(len(terms)) { + return nil, tooManyClausesErr(field, len(terms)) + } + + return NewMultiTermSearcherBytes(indexReader, terms, field, boost, options, + true) +} + +func filterCandidateTerms(indexReader index.IndexReader, + terms [][]byte, field string) (rv [][]byte, err error) { + + if ir, ok := indexReader.(index.IndexReaderOnly); ok { + fieldDict, err := ir.FieldDictOnly(field, terms, false) + if err != nil { + return nil, err + } + // enumerate the terms (no need to check them again) + tfd, err := fieldDict.Next() + for err == nil && tfd != nil { + rv = append(rv, []byte(tfd.Term)) + tfd, err = fieldDict.Next() + } + if cerr := fieldDict.Close(); cerr != nil && err == nil { + err = cerr + } + + return rv, err + } + + fieldDict, err := indexReader.FieldDictRange(field, terms[0], terms[len(terms)-1]) + if err != nil { + return nil, err + } + + // enumerate the terms and check against list of terms + tfd, err := fieldDict.Next() + for err == nil && tfd != nil { + termBytes := []byte(tfd.Term) + i := sort.Search(len(terms), func(i int) bool { return bytes.Compare(terms[i], termBytes) >= 0 }) + if i < len(terms) && bytes.Compare(terms[i], termBytes) == 0 { + rv = append(rv, terms[i]) + } + terms = terms[i:] + tfd, err = fieldDict.Next() + } + + if cerr := fieldDict.Close(); cerr != nil && err == nil { + err = cerr + } + + return rv, err +} + +type termRange struct { + startTerm []byte + endTerm []byte +} + +func (t *termRange) Enumerate(filter filterFunc) [][]byte { + var rv [][]byte + next := t.startTerm + for bytes.Compare(next, t.endTerm) <= 0 { + if filter != nil { + if filter(next) { + rv = append(rv, next) + } + } else { + rv = append(rv, next) + } + next = incrementBytes(next) + } + return rv +} + +func incrementBytes(in []byte) []byte { + rv := make([]byte, len(in)) + copy(rv, in) + for i := len(rv) - 1; i >= 0; i-- { + rv[i] = rv[i] + 1 + if rv[i] != 0 { + // didn't overflow, so stop + break + } + } + return rv +} + +type termRanges []*termRange + +func (tr termRanges) Enumerate(filter filterFunc) [][]byte { + var rv [][]byte + for _, tri := range tr { + trie := tri.Enumerate(filter) + rv = append(rv, trie...) + } + return rv +} + +func splitInt64Range(minBound, maxBound int64, precisionStep uint) termRanges { + rv := make(termRanges, 0) + if minBound > maxBound { + return rv + } + + for shift := uint(0); ; shift += precisionStep { + + diff := int64(1) << (shift + precisionStep) + mask := ((int64(1) << precisionStep) - int64(1)) << shift + hasLower := (minBound & mask) != int64(0) + hasUpper := (maxBound & mask) != mask + + var nextMinBound int64 + if hasLower { + nextMinBound = (minBound + diff) &^ mask + } else { + nextMinBound = minBound &^ mask + } + var nextMaxBound int64 + if hasUpper { + nextMaxBound = (maxBound - diff) &^ mask + } else { + nextMaxBound = maxBound &^ mask + } + + lowerWrapped := nextMinBound < minBound + upperWrapped := nextMaxBound > maxBound + + if shift+precisionStep >= 64 || nextMinBound > nextMaxBound || + lowerWrapped || upperWrapped { + // We are in the lowest precision or the next precision is not available. + rv = append(rv, newRange(minBound, maxBound, shift)) + // exit the split recursion loop + break + } + + if hasLower { + rv = append(rv, newRange(minBound, minBound|mask, shift)) + } + if hasUpper { + rv = append(rv, newRange(maxBound&^mask, maxBound, shift)) + } + + // recurse to next precision + minBound = nextMinBound + maxBound = nextMaxBound + } + + return rv +} + +func newRange(minBound, maxBound int64, shift uint) *termRange { + maxBound |= (int64(1) << shift) - int64(1) + minBytes := numeric.MustNewPrefixCodedInt64(minBound, shift) + maxBytes := numeric.MustNewPrefixCodedInt64(maxBound, shift) + return newRangeBytes(minBytes, maxBytes) +} + +func newRangeBytes(minBytes, maxBytes []byte) *termRange { + return &termRange{ + startTerm: minBytes, + endTerm: maxBytes, + } +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_phrase.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_phrase.go new file mode 100644 index 0000000..51b7e5b --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_phrase.go @@ -0,0 +1,437 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "fmt" + "math" + "reflect" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizePhraseSearcher int + +func init() { + var ps PhraseSearcher + reflectStaticSizePhraseSearcher = int(reflect.TypeOf(ps).Size()) +} + +type PhraseSearcher struct { + mustSearcher search.Searcher + queryNorm float64 + currMust *search.DocumentMatch + terms [][]string + path phrasePath + paths []phrasePath + locations []search.Location + initialized bool +} + +func (s *PhraseSearcher) Size() int { + sizeInBytes := reflectStaticSizePhraseSearcher + size.SizeOfPtr + + if s.mustSearcher != nil { + sizeInBytes += s.mustSearcher.Size() + } + + if s.currMust != nil { + sizeInBytes += s.currMust.Size() + } + + for _, entry := range s.terms { + sizeInBytes += size.SizeOfSlice + for _, entry1 := range entry { + sizeInBytes += size.SizeOfString + len(entry1) + } + } + + return sizeInBytes +} + +func NewPhraseSearcher(indexReader index.IndexReader, terms []string, field string, options search.SearcherOptions) (*PhraseSearcher, error) { + // turn flat terms []string into [][]string + mterms := make([][]string, len(terms)) + for i, term := range terms { + mterms[i] = []string{term} + } + return NewMultiPhraseSearcher(indexReader, mterms, field, options) +} + +func NewMultiPhraseSearcher(indexReader index.IndexReader, terms [][]string, field string, options search.SearcherOptions) (*PhraseSearcher, error) { + options.IncludeTermVectors = true + var termPositionSearchers []search.Searcher + for _, termPos := range terms { + if len(termPos) == 1 && termPos[0] != "" { + // single term + ts, err := NewTermSearcher(indexReader, termPos[0], field, 1.0, options) + if err != nil { + // close any searchers already opened + for _, ts := range termPositionSearchers { + _ = ts.Close() + } + return nil, fmt.Errorf("phrase searcher error building term searcher: %v", err) + } + termPositionSearchers = append(termPositionSearchers, ts) + } else if len(termPos) > 1 { + // multiple terms + var termSearchers []search.Searcher + for _, term := range termPos { + if term == "" { + continue + } + ts, err := NewTermSearcher(indexReader, term, field, 1.0, options) + if err != nil { + // close any searchers already opened + for _, ts := range termPositionSearchers { + _ = ts.Close() + } + return nil, fmt.Errorf("phrase searcher error building term searcher: %v", err) + } + termSearchers = append(termSearchers, ts) + } + disjunction, err := NewDisjunctionSearcher(indexReader, termSearchers, 1, options) + if err != nil { + // close any searchers already opened + for _, ts := range termPositionSearchers { + _ = ts.Close() + } + return nil, fmt.Errorf("phrase searcher error building term position disjunction searcher: %v", err) + } + termPositionSearchers = append(termPositionSearchers, disjunction) + } + } + + mustSearcher, err := NewConjunctionSearcher(indexReader, termPositionSearchers, options) + if err != nil { + // close any searchers already opened + for _, ts := range termPositionSearchers { + _ = ts.Close() + } + return nil, fmt.Errorf("phrase searcher error building conjunction searcher: %v", err) + } + + // build our searcher + rv := PhraseSearcher{ + mustSearcher: mustSearcher, + terms: terms, + } + rv.computeQueryNorm() + return &rv, nil +} + +func (s *PhraseSearcher) computeQueryNorm() { + // first calculate sum of squared weights + sumOfSquaredWeights := 0.0 + if s.mustSearcher != nil { + sumOfSquaredWeights += s.mustSearcher.Weight() + } + + // now compute query norm from this + s.queryNorm = 1.0 / math.Sqrt(sumOfSquaredWeights) + // finally tell all the downstream searchers the norm + if s.mustSearcher != nil { + s.mustSearcher.SetQueryNorm(s.queryNorm) + } +} + +func (s *PhraseSearcher) initSearchers(ctx *search.SearchContext) error { + err := s.advanceNextMust(ctx) + if err != nil { + return err + } + + s.initialized = true + return nil +} + +func (s *PhraseSearcher) advanceNextMust(ctx *search.SearchContext) error { + var err error + + if s.mustSearcher != nil { + if s.currMust != nil { + ctx.DocumentMatchPool.Put(s.currMust) + } + s.currMust, err = s.mustSearcher.Next(ctx) + if err != nil { + return err + } + } + + return nil +} + +func (s *PhraseSearcher) Weight() float64 { + return s.mustSearcher.Weight() +} + +func (s *PhraseSearcher) SetQueryNorm(qnorm float64) { + s.mustSearcher.SetQueryNorm(qnorm) +} + +func (s *PhraseSearcher) Next(ctx *search.SearchContext) (*search.DocumentMatch, error) { + if !s.initialized { + err := s.initSearchers(ctx) + if err != nil { + return nil, err + } + } + + for s.currMust != nil { + // check this match against phrase constraints + rv := s.checkCurrMustMatch(ctx) + + // prepare for next iteration (either loop or subsequent call to Next()) + err := s.advanceNextMust(ctx) + if err != nil { + return nil, err + } + + // if match satisfied phrase constraints return it as a hit + if rv != nil { + return rv, nil + } + } + + return nil, nil +} + +// checkCurrMustMatch is solely concerned with determining if the DocumentMatch +// pointed to by s.currMust (which satisifies the pre-condition searcher) +// also satisfies the phase constraints. if so, it returns a DocumentMatch +// for this document, otherwise nil +func (s *PhraseSearcher) checkCurrMustMatch(ctx *search.SearchContext) *search.DocumentMatch { + s.locations = s.currMust.Complete(s.locations) + + locations := s.currMust.Locations + s.currMust.Locations = nil + + ftls := s.currMust.FieldTermLocations + + // typically we would expect there to only actually be results in + // one field, but we allow for this to not be the case + // but, we note that phrase constraints can only be satisfied within + // a single field, so we can check them each independently + for field, tlm := range locations { + ftls = s.checkCurrMustMatchField(ctx, field, tlm, ftls) + } + + if len(ftls) > 0 { + // return match + rv := s.currMust + s.currMust = nil + rv.FieldTermLocations = ftls + return rv + } + + return nil +} + +// checkCurrMustMatchField is solely concerned with determining if one +// particular field within the currMust DocumentMatch Locations +// satisfies the phase constraints (possibly more than once). if so, +// the matching field term locations are appended to the provided +// slice +func (s *PhraseSearcher) checkCurrMustMatchField(ctx *search.SearchContext, + field string, tlm search.TermLocationMap, + ftls []search.FieldTermLocation) []search.FieldTermLocation { + if s.path == nil { + s.path = make(phrasePath, 0, len(s.terms)) + } + s.paths = findPhrasePaths(0, nil, s.terms, tlm, s.path[:0], 0, s.paths[:0]) + for _, p := range s.paths { + for _, pp := range p { + ftls = append(ftls, search.FieldTermLocation{ + Field: field, + Term: pp.term, + Location: search.Location{ + Pos: pp.loc.Pos, + Start: pp.loc.Start, + End: pp.loc.End, + ArrayPositions: pp.loc.ArrayPositions, + }, + }) + } + } + return ftls +} + +type phrasePart struct { + term string + loc *search.Location +} + +func (p *phrasePart) String() string { + return fmt.Sprintf("[%s %v]", p.term, p.loc) +} + +type phrasePath []phrasePart + +func (p phrasePath) MergeInto(in search.TermLocationMap) { + for _, pp := range p { + in[pp.term] = append(in[pp.term], pp.loc) + } +} + +func (p phrasePath) String() string { + rv := "[" + for i, pp := range p { + if i > 0 { + rv += ", " + } + rv += pp.String() + } + rv += "]" + return rv +} + +// findPhrasePaths is a function to identify phase matches from a set +// of known term locations. it recursive so care must be taken with +// arguments and return values. +// +// prevPos - the previous location, 0 on first invocation +// ap - array positions of the first candidate phrase part to +// which further recursive phrase parts must match, +// nil on initial invocation or when there are no array positions +// phraseTerms - slice containing the phrase terms, +// may contain empty string as placeholder (don't care) +// tlm - the Term Location Map containing all relevant term locations +// p - the current path being explored (appended to in recursive calls) +// this is the primary state being built during the traversal +// remainingSlop - amount of sloppiness that's allowed, which is the +// sum of the editDistances from each matching phrase part, +// where 0 means no sloppiness allowed (all editDistances must be 0), +// decremented during recursion +// rv - the final result being appended to by all the recursive calls +// +// returns slice of paths, or nil if invocation did not find any successul paths +func findPhrasePaths(prevPos uint64, ap search.ArrayPositions, phraseTerms [][]string, + tlm search.TermLocationMap, p phrasePath, remainingSlop int, rv []phrasePath) []phrasePath { + // no more terms + if len(phraseTerms) < 1 { + // snapshot or copy the recursively built phrasePath p and + // append it to the rv, also optimizing by checking if next + // phrasePath item in the rv (which we're about to overwrite) + // is available for reuse + var pcopy phrasePath + if len(rv) < cap(rv) { + pcopy = rv[:len(rv)+1][len(rv)][:0] + } + return append(rv, append(pcopy, p...)) + } + + car := phraseTerms[0] + cdr := phraseTerms[1:] + + // empty term is treated as match (continue) + if len(car) == 0 || (len(car) == 1 && car[0] == "") { + nextPos := prevPos + 1 + if prevPos == 0 { + // if prevPos was 0, don't set it to 1 (as thats not a real abs pos) + nextPos = 0 // don't advance nextPos if prevPos was 0 + } + return findPhrasePaths(nextPos, ap, cdr, tlm, p, remainingSlop, rv) + } + + // locations for this term + for _, carTerm := range car { + locations := tlm[carTerm] + LOCATIONS_LOOP: + for _, loc := range locations { + if prevPos != 0 && !loc.ArrayPositions.Equals(ap) { + // if the array positions are wrong, can't match, try next location + continue + } + + // compute distance from previous phrase term + dist := 0 + if prevPos != 0 { + dist = editDistance(prevPos+1, loc.Pos) + } + + // if enough slop remaining, continue recursively + if prevPos == 0 || (remainingSlop-dist) >= 0 { + // skip if we've already used this term+loc already + for _, ppart := range p { + if ppart.term == carTerm && ppart.loc == loc { + continue LOCATIONS_LOOP + } + } + + // this location works, add it to the path (but not for empty term) + px := append(p, phrasePart{term: carTerm, loc: loc}) + rv = findPhrasePaths(loc.Pos, loc.ArrayPositions, cdr, tlm, px, remainingSlop-dist, rv) + } + } + } + return rv +} + +func editDistance(p1, p2 uint64) int { + dist := int(p1 - p2) + if dist < 0 { + return -dist + } + return dist +} + +func (s *PhraseSearcher) Advance(ctx *search.SearchContext, ID index.IndexInternalID) (*search.DocumentMatch, error) { + if !s.initialized { + err := s.initSearchers(ctx) + if err != nil { + return nil, err + } + } + if s.currMust != nil { + if s.currMust.IndexInternalID.Compare(ID) >= 0 { + return s.Next(ctx) + } + ctx.DocumentMatchPool.Put(s.currMust) + } + if s.currMust == nil { + return nil, nil + } + var err error + s.currMust, err = s.mustSearcher.Advance(ctx, ID) + if err != nil { + return nil, err + } + return s.Next(ctx) +} + +func (s *PhraseSearcher) Count() uint64 { + // for now return a worst case + return s.mustSearcher.Count() +} + +func (s *PhraseSearcher) Close() error { + if s.mustSearcher != nil { + err := s.mustSearcher.Close() + if err != nil { + return err + } + } + return nil +} + +func (s *PhraseSearcher) Min() int { + return 0 +} + +func (s *PhraseSearcher) DocumentMatchPoolSize() int { + return s.mustSearcher.DocumentMatchPoolSize() + 1 +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_regexp.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_regexp.go new file mode 100644 index 0000000..11a44f1 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_regexp.go @@ -0,0 +1,120 @@ +// Copyright (c) 2015 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "regexp" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/search" +) + +// NewRegexpStringSearcher is similar to NewRegexpSearcher, but +// additionally optimizes for index readers that handle regexp's. +func NewRegexpStringSearcher(indexReader index.IndexReader, pattern string, + field string, boost float64, options search.SearcherOptions) ( + search.Searcher, error) { + ir, ok := indexReader.(index.IndexReaderRegexp) + if !ok { + r, err := regexp.Compile(pattern) + if err != nil { + return nil, err + } + + return NewRegexpSearcher(indexReader, r, field, boost, options) + } + + fieldDict, err := ir.FieldDictRegexp(field, pattern) + if err != nil { + return nil, err + } + defer func() { + if cerr := fieldDict.Close(); cerr != nil && err == nil { + err = cerr + } + }() + + var candidateTerms []string + + tfd, err := fieldDict.Next() + for err == nil && tfd != nil { + candidateTerms = append(candidateTerms, tfd.Term) + tfd, err = fieldDict.Next() + } + if err != nil { + return nil, err + } + + return NewMultiTermSearcher(indexReader, candidateTerms, field, boost, + options, true) +} + +// NewRegexpSearcher creates a searcher which will match documents that +// contain terms which match the pattern regexp. The match must be EXACT +// matching the entire term. The provided regexp SHOULD NOT start with ^ +// or end with $ as this can intefere with the implementation. Separately, +// matches will be checked to ensure they match the entire term. +func NewRegexpSearcher(indexReader index.IndexReader, pattern index.Regexp, + field string, boost float64, options search.SearcherOptions) ( + search.Searcher, error) { + var candidateTerms []string + + prefixTerm, complete := pattern.LiteralPrefix() + if complete { + // there is no pattern + candidateTerms = []string{prefixTerm} + } else { + var err error + candidateTerms, err = findRegexpCandidateTerms(indexReader, pattern, field, + prefixTerm) + if err != nil { + return nil, err + } + } + + return NewMultiTermSearcher(indexReader, candidateTerms, field, boost, + options, true) +} + +func findRegexpCandidateTerms(indexReader index.IndexReader, + pattern index.Regexp, field, prefixTerm string) (rv []string, err error) { + rv = make([]string, 0) + var fieldDict index.FieldDict + if len(prefixTerm) > 0 { + fieldDict, err = indexReader.FieldDictPrefix(field, []byte(prefixTerm)) + } else { + fieldDict, err = indexReader.FieldDict(field) + } + defer func() { + if cerr := fieldDict.Close(); cerr != nil && err == nil { + err = cerr + } + }() + + // enumerate the terms and check against regexp + tfd, err := fieldDict.Next() + for err == nil && tfd != nil { + matchPos := pattern.FindStringIndex(tfd.Term) + if matchPos != nil && matchPos[0] == 0 && matchPos[1] == len(tfd.Term) { + rv = append(rv, tfd.Term) + if tooManyClauses(len(rv)) { + return rv, tooManyClausesErr(field, len(rv)) + } + } + tfd, err = fieldDict.Next() + } + + return rv, err +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_term.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_term.go new file mode 100644 index 0000000..e07d253 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_term.go @@ -0,0 +1,141 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "reflect" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/search" + "github.com/blevesearch/bleve/search/scorer" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizeTermSearcher int + +func init() { + var ts TermSearcher + reflectStaticSizeTermSearcher = int(reflect.TypeOf(ts).Size()) +} + +type TermSearcher struct { + indexReader index.IndexReader + reader index.TermFieldReader + scorer *scorer.TermQueryScorer + tfd index.TermFieldDoc +} + +func NewTermSearcher(indexReader index.IndexReader, term string, field string, boost float64, options search.SearcherOptions) (*TermSearcher, error) { + return NewTermSearcherBytes(indexReader, []byte(term), field, boost, options) +} + +func NewTermSearcherBytes(indexReader index.IndexReader, term []byte, field string, boost float64, options search.SearcherOptions) (*TermSearcher, error) { + needFreqNorm := options.Score != "none" + reader, err := indexReader.TermFieldReader(term, field, needFreqNorm, needFreqNorm, options.IncludeTermVectors) + if err != nil { + return nil, err + } + return newTermSearcherFromReader(indexReader, reader, term, field, boost, options) +} + +func newTermSearcherFromReader(indexReader index.IndexReader, reader index.TermFieldReader, + term []byte, field string, boost float64, options search.SearcherOptions) (*TermSearcher, error) { + count, err := indexReader.DocCount() + if err != nil { + _ = reader.Close() + return nil, err + } + scorer := scorer.NewTermQueryScorer(term, field, boost, count, reader.Count(), options) + return &TermSearcher{ + indexReader: indexReader, + reader: reader, + scorer: scorer, + }, nil +} + +func (s *TermSearcher) Size() int { + return reflectStaticSizeTermSearcher + size.SizeOfPtr + + s.reader.Size() + + s.tfd.Size() + + s.scorer.Size() +} + +func (s *TermSearcher) Count() uint64 { + return s.reader.Count() +} + +func (s *TermSearcher) Weight() float64 { + return s.scorer.Weight() +} + +func (s *TermSearcher) SetQueryNorm(qnorm float64) { + s.scorer.SetQueryNorm(qnorm) +} + +func (s *TermSearcher) Next(ctx *search.SearchContext) (*search.DocumentMatch, error) { + termMatch, err := s.reader.Next(s.tfd.Reset()) + if err != nil { + return nil, err + } + + if termMatch == nil { + return nil, nil + } + + // score match + docMatch := s.scorer.Score(ctx, termMatch) + // return doc match + return docMatch, nil + +} + +func (s *TermSearcher) Advance(ctx *search.SearchContext, ID index.IndexInternalID) (*search.DocumentMatch, error) { + termMatch, err := s.reader.Advance(ID, s.tfd.Reset()) + if err != nil { + return nil, err + } + + if termMatch == nil { + return nil, nil + } + + // score match + docMatch := s.scorer.Score(ctx, termMatch) + + // return doc match + return docMatch, nil +} + +func (s *TermSearcher) Close() error { + return s.reader.Close() +} + +func (s *TermSearcher) Min() int { + return 0 +} + +func (s *TermSearcher) DocumentMatchPoolSize() int { + return 1 +} + +func (s *TermSearcher) Optimize(kind string, octx index.OptimizableContext) ( + index.OptimizableContext, error) { + o, ok := s.reader.(index.Optimizable) + if ok { + return o.Optimize(kind, octx) + } + + return nil, nil +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_term_prefix.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_term_prefix.go new file mode 100644 index 0000000..2a8f22c --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_term_prefix.go @@ -0,0 +1,50 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/search" +) + +func NewTermPrefixSearcher(indexReader index.IndexReader, prefix string, + field string, boost float64, options search.SearcherOptions) ( + search.Searcher, error) { + // find the terms with this prefix + fieldDict, err := indexReader.FieldDictPrefix(field, []byte(prefix)) + if err != nil { + return nil, err + } + defer func() { + if cerr := fieldDict.Close(); cerr != nil && err == nil { + err = cerr + } + }() + + var terms []string + tfd, err := fieldDict.Next() + for err == nil && tfd != nil { + terms = append(terms, tfd.Term) + if tooManyClauses(len(terms)) { + return nil, tooManyClausesErr(field, len(terms)) + } + tfd, err = fieldDict.Next() + } + if err != nil { + return nil, err + } + + return NewMultiTermSearcher(indexReader, terms, field, boost, options, true) +} diff --git a/vendor/github.com/blevesearch/bleve/search/searcher/search_term_range.go b/vendor/github.com/blevesearch/bleve/search/searcher/search_term_range.go new file mode 100644 index 0000000..90be1e1 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/searcher/search_term_range.go @@ -0,0 +1,85 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package searcher + +import ( + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/search" +) + +func NewTermRangeSearcher(indexReader index.IndexReader, + min, max []byte, inclusiveMin, inclusiveMax *bool, field string, + boost float64, options search.SearcherOptions) (search.Searcher, error) { + + if inclusiveMin == nil { + defaultInclusiveMin := true + inclusiveMin = &defaultInclusiveMin + } + if inclusiveMax == nil { + defaultInclusiveMax := false + inclusiveMax = &defaultInclusiveMax + } + + if min == nil { + min = []byte{} + } + + rangeMax := max + if rangeMax != nil { + // the term dictionary range end has an unfortunate implementation + rangeMax = append(rangeMax, 0) + } + + // find the terms with this prefix + fieldDict, err := indexReader.FieldDictRange(field, min, rangeMax) + if err != nil { + return nil, err + } + + defer func() { + if cerr := fieldDict.Close(); cerr != nil && err == nil { + err = cerr + } + }() + + var terms []string + tfd, err := fieldDict.Next() + for err == nil && tfd != nil { + terms = append(terms, tfd.Term) + tfd, err = fieldDict.Next() + } + if err != nil { + return nil, err + } + + if len(terms) < 1 { + return NewMatchNoneSearcher(indexReader) + } + + if !*inclusiveMin && min != nil && string(min) == terms[0] { + terms = terms[1:] + // check again, as we might have removed only entry + if len(terms) < 1 { + return NewMatchNoneSearcher(indexReader) + } + } + + // if our term list included the max, it would be the last item + if !*inclusiveMax && max != nil && string(max) == terms[len(terms)-1] { + terms = terms[:len(terms)-1] + } + + return NewMultiTermSearcher(indexReader, terms, field, boost, options, true) +} diff --git a/vendor/github.com/blevesearch/bleve/search/sort.go b/vendor/github.com/blevesearch/bleve/search/sort.go new file mode 100644 index 0000000..dca422e --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/sort.go @@ -0,0 +1,746 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package search + +import ( + "bytes" + "encoding/json" + "fmt" + "math" + "sort" + "strings" + + "github.com/blevesearch/bleve/geo" + "github.com/blevesearch/bleve/numeric" +) + +var HighTerm = strings.Repeat(string([]byte{0xff}), 10) +var LowTerm = string([]byte{0x00}) + +type SearchSort interface { + UpdateVisitor(field string, term []byte) + Value(a *DocumentMatch) string + Descending() bool + + RequiresDocID() bool + RequiresScoring() bool + RequiresFields() []string + + Reverse() + + Copy() SearchSort +} + +func ParseSearchSortObj(input map[string]interface{}) (SearchSort, error) { + descending, ok := input["desc"].(bool) + by, ok := input["by"].(string) + if !ok { + return nil, fmt.Errorf("search sort must specify by") + } + switch by { + case "id": + return &SortDocID{ + Desc: descending, + }, nil + case "score": + return &SortScore{ + Desc: descending, + }, nil + case "geo_distance": + field, ok := input["field"].(string) + if !ok { + return nil, fmt.Errorf("search sort mode geo_distance must specify field") + } + lon, lat, foundLocation := geo.ExtractGeoPoint(input["location"]) + if !foundLocation { + return nil, fmt.Errorf("unable to parse geo_distance location") + } + rvd := &SortGeoDistance{ + Field: field, + Desc: descending, + Lon: lon, + Lat: lat, + unitMult: 1.0, + } + if distUnit, ok := input["unit"].(string); ok { + var err error + rvd.unitMult, err = geo.ParseDistanceUnit(distUnit) + if err != nil { + return nil, err + } + rvd.Unit = distUnit + } + return rvd, nil + case "field": + field, ok := input["field"].(string) + if !ok { + return nil, fmt.Errorf("search sort mode field must specify field") + } + rv := &SortField{ + Field: field, + Desc: descending, + } + typ, ok := input["type"].(string) + if ok { + switch typ { + case "auto": + rv.Type = SortFieldAuto + case "string": + rv.Type = SortFieldAsString + case "number": + rv.Type = SortFieldAsNumber + case "date": + rv.Type = SortFieldAsDate + default: + return nil, fmt.Errorf("unknown sort field type: %s", typ) + } + } + mode, ok := input["mode"].(string) + if ok { + switch mode { + case "default": + rv.Mode = SortFieldDefault + case "min": + rv.Mode = SortFieldMin + case "max": + rv.Mode = SortFieldMax + default: + return nil, fmt.Errorf("unknown sort field mode: %s", mode) + } + } + missing, ok := input["missing"].(string) + if ok { + switch missing { + case "first": + rv.Missing = SortFieldMissingFirst + case "last": + rv.Missing = SortFieldMissingLast + default: + return nil, fmt.Errorf("unknown sort field missing: %s", missing) + } + } + return rv, nil + } + + return nil, fmt.Errorf("unknown search sort by: %s", by) +} + +func ParseSearchSortString(input string) SearchSort { + descending := false + if strings.HasPrefix(input, "-") { + descending = true + input = input[1:] + } else if strings.HasPrefix(input, "+") { + input = input[1:] + } + if input == "_id" { + return &SortDocID{ + Desc: descending, + } + } else if input == "_score" { + return &SortScore{ + Desc: descending, + } + } + return &SortField{ + Field: input, + Desc: descending, + } +} + +func ParseSearchSortJSON(input json.RawMessage) (SearchSort, error) { + // first try to parse it as string + var sortString string + err := json.Unmarshal(input, &sortString) + if err != nil { + var sortObj map[string]interface{} + err = json.Unmarshal(input, &sortObj) + if err != nil { + return nil, err + } + return ParseSearchSortObj(sortObj) + } + return ParseSearchSortString(sortString), nil +} + +func ParseSortOrderStrings(in []string) SortOrder { + rv := make(SortOrder, 0, len(in)) + for _, i := range in { + ss := ParseSearchSortString(i) + rv = append(rv, ss) + } + return rv +} + +func ParseSortOrderJSON(in []json.RawMessage) (SortOrder, error) { + rv := make(SortOrder, 0, len(in)) + for _, i := range in { + ss, err := ParseSearchSortJSON(i) + if err != nil { + return nil, err + } + rv = append(rv, ss) + } + return rv, nil +} + +type SortOrder []SearchSort + +func (so SortOrder) Value(doc *DocumentMatch) { + for _, soi := range so { + doc.Sort = append(doc.Sort, soi.Value(doc)) + } +} + +func (so SortOrder) UpdateVisitor(field string, term []byte) { + for _, soi := range so { + soi.UpdateVisitor(field, term) + } +} + +func (so SortOrder) Copy() SortOrder { + rv := make(SortOrder, len(so)) + for i, soi := range so { + rv[i] = soi.Copy() + } + return rv +} + +// Compare will compare two document matches using the specified sort order +// if both are numbers, we avoid converting back to term +func (so SortOrder) Compare(cachedScoring, cachedDesc []bool, i, j *DocumentMatch) int { + // compare the documents on all search sorts until a differences is found + for x := range so { + c := 0 + if cachedScoring[x] { + if i.Score < j.Score { + c = -1 + } else if i.Score > j.Score { + c = 1 + } + } else { + iVal := i.Sort[x] + jVal := j.Sort[x] + if iVal < jVal { + c = -1 + } else if iVal > jVal { + c = 1 + } + } + + if c == 0 { + continue + } + if cachedDesc[x] { + c = -c + } + return c + } + // if they are the same at this point, impose order based on index natural sort order + if i.HitNumber == j.HitNumber { + return 0 + } else if i.HitNumber > j.HitNumber { + return 1 + } + return -1 +} + +func (so SortOrder) RequiresScore() bool { + for _, soi := range so { + if soi.RequiresScoring() { + return true + } + } + return false +} + +func (so SortOrder) RequiresDocID() bool { + for _, soi := range so { + if soi.RequiresDocID() { + return true + } + } + return false +} + +func (so SortOrder) RequiredFields() []string { + var rv []string + for _, soi := range so { + rv = append(rv, soi.RequiresFields()...) + } + return rv +} + +func (so SortOrder) CacheIsScore() []bool { + rv := make([]bool, 0, len(so)) + for _, soi := range so { + rv = append(rv, soi.RequiresScoring()) + } + return rv +} + +func (so SortOrder) CacheDescending() []bool { + rv := make([]bool, 0, len(so)) + for _, soi := range so { + rv = append(rv, soi.Descending()) + } + return rv +} + +func (so SortOrder) Reverse() { + for _, soi := range so { + soi.Reverse() + } +} + +// SortFieldType lets you control some internal sort behavior +// normally leaving this to the zero-value of SortFieldAuto is fine +type SortFieldType int + +const ( + // SortFieldAuto applies heuristics attempt to automatically sort correctly + SortFieldAuto SortFieldType = iota + // SortFieldAsString forces sort as string (no prefix coded terms removed) + SortFieldAsString + // SortFieldAsNumber forces sort as string (prefix coded terms with shift > 0 removed) + SortFieldAsNumber + // SortFieldAsDate forces sort as string (prefix coded terms with shift > 0 removed) + SortFieldAsDate +) + +// SortFieldMode describes the behavior if the field has multiple values +type SortFieldMode int + +const ( + // SortFieldDefault uses the first (or only) value, this is the default zero-value + SortFieldDefault SortFieldMode = iota // FIXME name is confusing + // SortFieldMin uses the minimum value + SortFieldMin + // SortFieldMax uses the maximum value + SortFieldMax +) + +// SortFieldMissing controls where documents missing a field value should be sorted +type SortFieldMissing int + +const ( + // SortFieldMissingLast sorts documents missing a field at the end + SortFieldMissingLast SortFieldMissing = iota + + // SortFieldMissingFirst sorts documents missing a field at the beginning + SortFieldMissingFirst +) + +// SortField will sort results by the value of a stored field +// Field is the name of the field +// Descending reverse the sort order (default false) +// Type allows forcing of string/number/date behavior (default auto) +// Mode controls behavior for multi-values fields (default first) +// Missing controls behavior of missing values (default last) +type SortField struct { + Field string + Desc bool + Type SortFieldType + Mode SortFieldMode + Missing SortFieldMissing + values [][]byte + tmp [][]byte +} + +// UpdateVisitor notifies this sort field that in this document +// this field has the specified term +func (s *SortField) UpdateVisitor(field string, term []byte) { + if field == s.Field { + s.values = append(s.values, term) + } +} + +// Value returns the sort value of the DocumentMatch +// it also resets the state of this SortField for +// processing the next document +func (s *SortField) Value(i *DocumentMatch) string { + iTerms := s.filterTermsByType(s.values) + iTerm := s.filterTermsByMode(iTerms) + s.values = s.values[:0] + return iTerm +} + +// Descending determines the order of the sort +func (s *SortField) Descending() bool { + return s.Desc +} + +func (s *SortField) filterTermsByMode(terms [][]byte) string { + if len(terms) == 1 || (len(terms) > 1 && s.Mode == SortFieldDefault) { + return string(terms[0]) + } else if len(terms) > 1 { + switch s.Mode { + case SortFieldMin: + sort.Sort(BytesSlice(terms)) + return string(terms[0]) + case SortFieldMax: + sort.Sort(BytesSlice(terms)) + return string(terms[len(terms)-1]) + } + } + + // handle missing terms + if s.Missing == SortFieldMissingLast { + if s.Desc { + return LowTerm + } + return HighTerm + } + if s.Desc { + return HighTerm + } + return LowTerm +} + +// filterTermsByType attempts to make one pass on the terms +// if we are in auto-mode AND all the terms look like prefix-coded numbers +// return only the terms which had shift of 0 +// if we are in explicit number or date mode, return only valid +// prefix coded numbers with shift of 0 +func (s *SortField) filterTermsByType(terms [][]byte) [][]byte { + stype := s.Type + if stype == SortFieldAuto { + allTermsPrefixCoded := true + termsWithShiftZero := s.tmp[:0] + for _, term := range terms { + valid, shift := numeric.ValidPrefixCodedTermBytes(term) + if valid && shift == 0 { + termsWithShiftZero = append(termsWithShiftZero, term) + } else if !valid { + allTermsPrefixCoded = false + } + } + // reset the terms only when valid zero shift terms are found. + if allTermsPrefixCoded && len(termsWithShiftZero) > 0 { + terms = termsWithShiftZero + s.tmp = termsWithShiftZero[:0] + } + } else if stype == SortFieldAsNumber || stype == SortFieldAsDate { + termsWithShiftZero := s.tmp[:0] + for _, term := range terms { + valid, shift := numeric.ValidPrefixCodedTermBytes(term) + if valid && shift == 0 { + termsWithShiftZero = append(termsWithShiftZero, term) + } + } + terms = termsWithShiftZero + s.tmp = termsWithShiftZero[:0] + } + return terms +} + +// RequiresDocID says this SearchSort does not require the DocID be loaded +func (s *SortField) RequiresDocID() bool { return false } + +// RequiresScoring says this SearchStore does not require scoring +func (s *SortField) RequiresScoring() bool { return false } + +// RequiresFields says this SearchStore requires the specified stored field +func (s *SortField) RequiresFields() []string { return []string{s.Field} } + +func (s *SortField) MarshalJSON() ([]byte, error) { + // see if simple format can be used + if s.Missing == SortFieldMissingLast && + s.Mode == SortFieldDefault && + s.Type == SortFieldAuto { + if s.Desc { + return json.Marshal("-" + s.Field) + } + return json.Marshal(s.Field) + } + sfm := map[string]interface{}{ + "by": "field", + "field": s.Field, + } + if s.Desc { + sfm["desc"] = true + } + if s.Missing > SortFieldMissingLast { + switch s.Missing { + case SortFieldMissingFirst: + sfm["missing"] = "first" + } + } + if s.Mode > SortFieldDefault { + switch s.Mode { + case SortFieldMin: + sfm["mode"] = "min" + case SortFieldMax: + sfm["mode"] = "max" + } + } + if s.Type > SortFieldAuto { + switch s.Type { + case SortFieldAsString: + sfm["type"] = "string" + case SortFieldAsNumber: + sfm["type"] = "number" + case SortFieldAsDate: + sfm["type"] = "date" + } + } + + return json.Marshal(sfm) +} + +func (s *SortField) Copy() SearchSort { + rv := *s + return &rv +} + +func (s *SortField) Reverse() { + s.Desc = !s.Desc + if s.Missing == SortFieldMissingFirst { + s.Missing = SortFieldMissingLast + } else { + s.Missing = SortFieldMissingFirst + } +} + +// SortDocID will sort results by the document identifier +type SortDocID struct { + Desc bool +} + +// UpdateVisitor is a no-op for SortDocID as it's value +// is not dependent on any field terms +func (s *SortDocID) UpdateVisitor(field string, term []byte) { +} + +// Value returns the sort value of the DocumentMatch +func (s *SortDocID) Value(i *DocumentMatch) string { + return i.ID +} + +// Descending determines the order of the sort +func (s *SortDocID) Descending() bool { + return s.Desc +} + +// RequiresDocID says this SearchSort does require the DocID be loaded +func (s *SortDocID) RequiresDocID() bool { return true } + +// RequiresScoring says this SearchStore does not require scoring +func (s *SortDocID) RequiresScoring() bool { return false } + +// RequiresFields says this SearchStore does not require any stored fields +func (s *SortDocID) RequiresFields() []string { return nil } + +func (s *SortDocID) MarshalJSON() ([]byte, error) { + if s.Desc { + return json.Marshal("-_id") + } + return json.Marshal("_id") +} + +func (s *SortDocID) Copy() SearchSort { + rv := *s + return &rv +} + +func (s *SortDocID) Reverse() { + s.Desc = !s.Desc +} + +// SortScore will sort results by the document match score +type SortScore struct { + Desc bool +} + +// UpdateVisitor is a no-op for SortScore as it's value +// is not dependent on any field terms +func (s *SortScore) UpdateVisitor(field string, term []byte) { +} + +// Value returns the sort value of the DocumentMatch +func (s *SortScore) Value(i *DocumentMatch) string { + return "_score" +} + +// Descending determines the order of the sort +func (s *SortScore) Descending() bool { + return s.Desc +} + +// RequiresDocID says this SearchSort does not require the DocID be loaded +func (s *SortScore) RequiresDocID() bool { return false } + +// RequiresScoring says this SearchStore does require scoring +func (s *SortScore) RequiresScoring() bool { return true } + +// RequiresFields says this SearchStore does not require any store fields +func (s *SortScore) RequiresFields() []string { return nil } + +func (s *SortScore) MarshalJSON() ([]byte, error) { + if s.Desc { + return json.Marshal("-_score") + } + return json.Marshal("_score") +} + +func (s *SortScore) Copy() SearchSort { + rv := *s + return &rv +} + +func (s *SortScore) Reverse() { + s.Desc = !s.Desc +} + +var maxDistance = string(numeric.MustNewPrefixCodedInt64(math.MaxInt64, 0)) + +// NewSortGeoDistance creates SearchSort instance for sorting documents by +// their distance from the specified point. +func NewSortGeoDistance(field, unit string, lon, lat float64, desc bool) ( + *SortGeoDistance, error) { + rv := &SortGeoDistance{ + Field: field, + Desc: desc, + Unit: unit, + Lon: lon, + Lat: lat, + } + var err error + rv.unitMult, err = geo.ParseDistanceUnit(unit) + if err != nil { + return nil, err + } + return rv, nil +} + +// SortGeoDistance will sort results by the distance of an +// indexed geo point, from the provided location. +// Field is the name of the field +// Descending reverse the sort order (default false) +type SortGeoDistance struct { + Field string + Desc bool + Unit string + values []string + Lon float64 + Lat float64 + unitMult float64 +} + +// UpdateVisitor notifies this sort field that in this document +// this field has the specified term +func (s *SortGeoDistance) UpdateVisitor(field string, term []byte) { + if field == s.Field { + s.values = append(s.values, string(term)) + } +} + +// Value returns the sort value of the DocumentMatch +// it also resets the state of this SortField for +// processing the next document +func (s *SortGeoDistance) Value(i *DocumentMatch) string { + iTerms := s.filterTermsByType(s.values) + iTerm := s.filterTermsByMode(iTerms) + s.values = s.values[:0] + + if iTerm == "" { + return maxDistance + } + + i64, err := numeric.PrefixCoded(iTerm).Int64() + if err != nil { + return maxDistance + } + docLon := geo.MortonUnhashLon(uint64(i64)) + docLat := geo.MortonUnhashLat(uint64(i64)) + + dist := geo.Haversin(s.Lon, s.Lat, docLon, docLat) + // dist is returned in km, so convert to m + dist *= 1000 + if s.unitMult != 0 { + dist /= s.unitMult + } + distInt64 := numeric.Float64ToInt64(dist) + return string(numeric.MustNewPrefixCodedInt64(distInt64, 0)) +} + +// Descending determines the order of the sort +func (s *SortGeoDistance) Descending() bool { + return s.Desc +} + +func (s *SortGeoDistance) filterTermsByMode(terms []string) string { + if len(terms) >= 1 { + return terms[0] + } + + return "" +} + +// filterTermsByType attempts to make one pass on the terms +// return only valid prefix coded numbers with shift of 0 +func (s *SortGeoDistance) filterTermsByType(terms []string) []string { + var termsWithShiftZero []string + for _, term := range terms { + valid, shift := numeric.ValidPrefixCodedTerm(term) + if valid && shift == 0 { + termsWithShiftZero = append(termsWithShiftZero, term) + } + } + return termsWithShiftZero +} + +// RequiresDocID says this SearchSort does not require the DocID be loaded +func (s *SortGeoDistance) RequiresDocID() bool { return false } + +// RequiresScoring says this SearchStore does not require scoring +func (s *SortGeoDistance) RequiresScoring() bool { return false } + +// RequiresFields says this SearchStore requires the specified stored field +func (s *SortGeoDistance) RequiresFields() []string { return []string{s.Field} } + +func (s *SortGeoDistance) MarshalJSON() ([]byte, error) { + sfm := map[string]interface{}{ + "by": "geo_distance", + "field": s.Field, + "location": map[string]interface{}{ + "lon": s.Lon, + "lat": s.Lat, + }, + } + if s.Unit != "" { + sfm["unit"] = s.Unit + } + if s.Desc { + sfm["desc"] = true + } + + return json.Marshal(sfm) +} + +func (s *SortGeoDistance) Copy() SearchSort { + rv := *s + return &rv +} + +func (s *SortGeoDistance) Reverse() { + s.Desc = !s.Desc +} + +type BytesSlice [][]byte + +func (p BytesSlice) Len() int { return len(p) } +func (p BytesSlice) Less(i, j int) bool { return bytes.Compare(p[i], p[j]) < 0 } +func (p BytesSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] } diff --git a/vendor/github.com/blevesearch/bleve/search/util.go b/vendor/github.com/blevesearch/bleve/search/util.go new file mode 100644 index 0000000..19dd5d6 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/search/util.go @@ -0,0 +1,69 @@ +// Copyright (c) 2014 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package search + +func MergeLocations(locations []FieldTermLocationMap) FieldTermLocationMap { + rv := locations[0] + + for i := 1; i < len(locations); i++ { + nextLocations := locations[i] + for field, termLocationMap := range nextLocations { + rvTermLocationMap, rvHasField := rv[field] + if rvHasField { + rv[field] = MergeTermLocationMaps(rvTermLocationMap, termLocationMap) + } else { + rv[field] = termLocationMap + } + } + } + + return rv +} + +func MergeTermLocationMaps(rv, other TermLocationMap) TermLocationMap { + for term, locationMap := range other { + // for a given term/document there cannot be different locations + // if they came back from different clauses, overwrite is ok + rv[term] = locationMap + } + return rv +} + +func MergeFieldTermLocations(dest []FieldTermLocation, matches []*DocumentMatch) []FieldTermLocation { + n := len(dest) + for _, dm := range matches { + n += len(dm.FieldTermLocations) + } + if cap(dest) < n { + dest = append(make([]FieldTermLocation, 0, n), dest...) + } + + for _, dm := range matches { + for _, ftl := range dm.FieldTermLocations { + dest = append(dest, FieldTermLocation{ + Field: ftl.Field, + Term: ftl.Term, + Location: Location{ + Pos: ftl.Location.Pos, + Start: ftl.Location.Start, + End: ftl.Location.End, + ArrayPositions: append(ArrayPositions(nil), ftl.Location.ArrayPositions...), + }, + }) + } + } + + return dest +} diff --git a/vendor/github.com/blevesearch/bleve/size/sizes.go b/vendor/github.com/blevesearch/bleve/size/sizes.go new file mode 100644 index 0000000..0990bf8 --- /dev/null +++ b/vendor/github.com/blevesearch/bleve/size/sizes.go @@ -0,0 +1,59 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package size + +import ( + "reflect" +) + +func init() { + var b bool + SizeOfBool = int(reflect.TypeOf(b).Size()) + var f32 float32 + SizeOfFloat32 = int(reflect.TypeOf(f32).Size()) + var f64 float64 + SizeOfFloat64 = int(reflect.TypeOf(f64).Size()) + var i int + SizeOfInt = int(reflect.TypeOf(i).Size()) + var m map[int]int + SizeOfMap = int(reflect.TypeOf(m).Size()) + var ptr *int + SizeOfPtr = int(reflect.TypeOf(ptr).Size()) + var slice []int + SizeOfSlice = int(reflect.TypeOf(slice).Size()) + var str string + SizeOfString = int(reflect.TypeOf(str).Size()) + var u8 uint8 + SizeOfUint8 = int(reflect.TypeOf(u8).Size()) + var u16 uint16 + SizeOfUint16 = int(reflect.TypeOf(u16).Size()) + var u32 uint32 + SizeOfUint32 = int(reflect.TypeOf(u32).Size()) + var u64 uint64 + SizeOfUint64 = int(reflect.TypeOf(u64).Size()) +} + +var SizeOfBool int +var SizeOfFloat32 int +var SizeOfFloat64 int +var SizeOfInt int +var SizeOfMap int +var SizeOfPtr int +var SizeOfSlice int +var SizeOfString int +var SizeOfUint8 int +var SizeOfUint16 int +var SizeOfUint32 int +var SizeOfUint64 int diff --git a/vendor/github.com/blevesearch/go-porterstemmer/.gitignore b/vendor/github.com/blevesearch/go-porterstemmer/.gitignore new file mode 100644 index 0000000..d1ffcc5 --- /dev/null +++ b/vendor/github.com/blevesearch/go-porterstemmer/.gitignore @@ -0,0 +1,8 @@ +#* +*.sublime-* +*~ +.#* +.project +.settings +.DS_Store +/testdata diff --git a/vendor/github.com/blevesearch/go-porterstemmer/.travis.yml b/vendor/github.com/blevesearch/go-porterstemmer/.travis.yml new file mode 100644 index 0000000..d032f23 --- /dev/null +++ b/vendor/github.com/blevesearch/go-porterstemmer/.travis.yml @@ -0,0 +1,16 @@ +language: go + +go: + - 1.4 + +script: + - go get golang.org/x/tools/cmd/vet + - go get golang.org/x/tools/cmd/cover + - go get github.com/mattn/goveralls + - go test -v -covermode=count -coverprofile=profile.out + - go vet + - goveralls -service drone.io -coverprofile=profile.out -repotoken $COVERALLS + +notifications: + email: + - marty.schoch@gmail.com diff --git a/vendor/github.com/blevesearch/go-porterstemmer/LICENSE b/vendor/github.com/blevesearch/go-porterstemmer/LICENSE new file mode 100644 index 0000000..8d2999c --- /dev/null +++ b/vendor/github.com/blevesearch/go-porterstemmer/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2013 Charles Iliya Krempeaux :: http://changelog.ca/ + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/blevesearch/go-porterstemmer/README.md b/vendor/github.com/blevesearch/go-porterstemmer/README.md new file mode 100644 index 0000000..d96911a --- /dev/null +++ b/vendor/github.com/blevesearch/go-porterstemmer/README.md @@ -0,0 +1,118 @@ +# This fork... + +I'm maintaining this fork because the original author was not replying to issues or pull requests. For now I plan on maintaining this fork as necessary. + +## Status + +[![Build Status](https://travis-ci.org/blevesearch/go-porterstemmer.svg?branch=master)](https://travis-ci.org/blevesearch/go-porterstemmer) + +[![Coverage Status](https://coveralls.io/repos/blevesearch/go-porterstemmer/badge.png?branch=HEAD)](https://coveralls.io/r/blevesearch/go-porterstemmer?branch=HEAD) + +# Go Porter Stemmer + +A native Go clean room implementation of the Porter Stemming Algorithm. + +This algorithm is of interest to people doing Machine Learning or +Natural Language Processing (NLP). + +This is NOT a port. This is a native Go implementation from the human-readable +description of the algorithm. + +I've tried to make it (more) efficient by NOT internally using string's, but +instead internally using []rune's and using the same (array) buffer used by +the []rune slice (and sub-slices) at all steps of the algorithm. + +For Porter Stemmer algorithm, see: + +http://tartarus.org/martin/PorterStemmer/def.txt (URL #1) + +http://tartarus.org/martin/PorterStemmer/ (URL #2) + +# Departures + +Also, since when I initially implemented it, it failed the tests at... + +http://tartarus.org/martin/PorterStemmer/voc.txt (URL #3) + +http://tartarus.org/martin/PorterStemmer/output.txt (URL #4) + +... after reading the human-readble text over and over again to try to figure out +what the error I made was (and doing all sorts of things to debug it) I came to the +conclusion that the some of these tests were wrong according to the human-readable +description of the algorithm. + +This led me to wonder if maybe other people's code that was passing these tests had +rules that were not in the human-readable description. Which led me to look at the source +code here... + +http://tartarus.org/martin/PorterStemmer/c.txt (URL #5) + +... When I looked there I noticed that there are some items marked as a "DEPARTURE", +which differ from the original algorithm. (There are 2 of these.) + +I implemented these departures, and the tests at URL #3 and URL #4 all passed. + +## Usage + +To use this Golang library, use with something like: + + package main + + import ( + "fmt" + "github.com/reiver/go-porterstemmer" + ) + + func main() { + + word := "Waxes" + + stem := porterstemmer.StemString(word) + + fmt.Printf("The word [%s] has the stem [%s].\n", word, stem) + } + +Alternatively, if you want to be a bit more efficient, use []rune slices instead, with code like: + + package main + + import ( + "fmt" + "github.com/reiver/go-porterstemmer" + ) + + func main() { + + word := []rune("Waxes") + + stem := porterstemmer.Stem(word) + + fmt.Printf("The word [%s] has the stem [%s].\n", string(word), string(stem)) + } + +Although NOTE that the above code may modify original slice (named "word" in the example) as a side +effect, for efficiency reasons. And that the slice named "stem" in the example above may be a +sub-slice of the slice named "word". + +Also alternatively, if you already know that your word is already lowercase (and you don't need +this library to lowercase your word for you) you can instead use code like: + + package main + + import ( + "fmt" + "github.com/reiver/go-porterstemmer" + ) + + func main() { + + word := []rune("waxes") + + stem := porterstemmer.StemWithoutLowerCasing(word) + + fmt.Printf("The word [%s] has the stem [%s].\n", string(word), string(stem)) + } + +Again NOTE (like with the previous example) that the above code may modify original slice (named +"word" in the example) as a side effect, for efficiency reasons. And that the slice named "stem" +in the example above may be a sub-slice of the slice named "word". diff --git a/vendor/github.com/blevesearch/go-porterstemmer/porterstemmer.go b/vendor/github.com/blevesearch/go-porterstemmer/porterstemmer.go new file mode 100644 index 0000000..d1f77e6 --- /dev/null +++ b/vendor/github.com/blevesearch/go-porterstemmer/porterstemmer.go @@ -0,0 +1,839 @@ +package porterstemmer + +import ( + // "log" + "unicode" +) + +func isConsonant(s []rune, i int) bool { + + //DEBUG + //log.Printf("isConsonant: [%+v]", string(s[i])) + + result := true + + switch s[i] { + case 'a', 'e', 'i', 'o', 'u': + result = false + case 'y': + if 0 == i { + result = true + } else { + result = !isConsonant(s, i-1) + } + default: + result = true + } + + return result +} + +func measure(s []rune) uint { + + // Initialize. + lenS := len(s) + result := uint(0) + i := 0 + + // Short Circuit. + if 0 == lenS { + /////////// RETURN + return result + } + + // Ignore (potential) consonant sequence at the beginning of word. + for isConsonant(s, i) { + + //DEBUG + //log.Printf("[measure([%s])] Eat Consonant [%d] -> [%s]", string(s), i, string(s[i])) + + i++ + if i >= lenS { + /////////////// RETURN + return result + } + } + + // For each pair of a vowel sequence followed by a consonant sequence, increment result. +Outer: + for i < lenS { + + for !isConsonant(s, i) { + + //DEBUG + //log.Printf("[measure([%s])] VOWEL [%d] -> [%s]", string(s), i, string(s[i])) + + i++ + if i >= lenS { + /////////// BREAK + break Outer + } + } + for isConsonant(s, i) { + + //DEBUG + //log.Printf("[measure([%s])] CONSONANT [%d] -> [%s]", string(s), i, string(s[i])) + + i++ + if i >= lenS { + result++ + /////////// BREAK + break Outer + } + } + result++ + } + + // Return + return result +} + +func hasSuffix(s, suffix []rune) bool { + + lenSMinusOne := len(s) - 1 + lenSuffixMinusOne := len(suffix) - 1 + + if lenSMinusOne <= lenSuffixMinusOne { + return false + } else if s[lenSMinusOne] != suffix[lenSuffixMinusOne] { // I suspect checking this first should speed this function up in practice. + /////// RETURN + return false + } else { + + for i := 0; i < lenSuffixMinusOne; i++ { + + if suffix[i] != s[lenSMinusOne-lenSuffixMinusOne+i] { + /////////////// RETURN + return false + } + + } + + } + + return true +} + +func containsVowel(s []rune) bool { + + lenS := len(s) + + for i := 0; i < lenS; i++ { + + if !isConsonant(s, i) { + /////////// RETURN + return true + } + + } + + return false +} + +func hasRepeatDoubleConsonantSuffix(s []rune) bool { + + // Initialize. + lenS := len(s) + + result := false + + // Do it! + if 2 > lenS { + result = false + } else if s[lenS-1] == s[lenS-2] && isConsonant(s, lenS-1) { // Will using isConsonant() cause a problem with "YY"? + result = true + } else { + result = false + } + + // Return, + return result +} + +func hasConsonantVowelConsonantSuffix(s []rune) bool { + + // Initialize. + lenS := len(s) + + result := false + + // Do it! + if 3 > lenS { + result = false + } else if isConsonant(s, lenS-3) && !isConsonant(s, lenS-2) && isConsonant(s, lenS-1) { + result = true + } else { + result = false + } + + // Return + return result +} + +func step1a(s []rune) []rune { + + // Initialize. + var result []rune = s + + lenS := len(s) + + // Do it! + if suffix := []rune("sses"); hasSuffix(s, suffix) { + + lenTrim := 2 + + subSlice := s[:lenS-lenTrim] + + result = subSlice + } else if suffix := []rune("ies"); hasSuffix(s, suffix) { + lenTrim := 2 + + subSlice := s[:lenS-lenTrim] + + result = subSlice + } else if suffix := []rune("ss"); hasSuffix(s, suffix) { + + result = s + } else if suffix := []rune("s"); hasSuffix(s, suffix) { + + lenSuffix := 1 + + subSlice := s[:lenS-lenSuffix] + + result = subSlice + } + + // Return. + return result +} + +func step1b(s []rune) []rune { + + // Initialize. + var result []rune = s + + lenS := len(s) + + // Do it! + if suffix := []rune("eed"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 0 < m { + lenTrim := 1 + + result = s[:lenS-lenTrim] + } + } else if suffix := []rune("ed"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + if containsVowel(subSlice) { + + if suffix2 := []rune("at"); hasSuffix(subSlice, suffix2) { + lenTrim := -1 + + result = s[:lenS-lenSuffix-lenTrim] + } else if suffix2 := []rune("bl"); hasSuffix(subSlice, suffix2) { + lenTrim := -1 + + result = s[:lenS-lenSuffix-lenTrim] + } else if suffix2 := []rune("iz"); hasSuffix(subSlice, suffix2) { + lenTrim := -1 + + result = s[:lenS-lenSuffix-lenTrim] + } else if c := subSlice[len(subSlice)-1]; 'l' != c && 's' != c && 'z' != c && hasRepeatDoubleConsonantSuffix(subSlice) { + lenTrim := 1 + + lenSubSlice := len(subSlice) + + result = subSlice[:lenSubSlice-lenTrim] + } else if c := subSlice[len(subSlice)-1]; 1 == measure(subSlice) && hasConsonantVowelConsonantSuffix(subSlice) && 'w' != c && 'x' != c && 'y' != c { + lenTrim := -1 + + result = s[:lenS-lenSuffix-lenTrim] + + result[len(result)-1] = 'e' + } else { + result = subSlice + } + + } + } else if suffix := []rune("ing"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + if containsVowel(subSlice) { + + if suffix2 := []rune("at"); hasSuffix(subSlice, suffix2) { + lenTrim := -1 + + result = s[:lenS-lenSuffix-lenTrim] + + result[len(result)-1] = 'e' + } else if suffix2 := []rune("bl"); hasSuffix(subSlice, suffix2) { + lenTrim := -1 + + result = s[:lenS-lenSuffix-lenTrim] + + result[len(result)-1] = 'e' + } else if suffix2 := []rune("iz"); hasSuffix(subSlice, suffix2) { + lenTrim := -1 + + result = s[:lenS-lenSuffix-lenTrim] + + result[len(result)-1] = 'e' + } else if c := subSlice[len(subSlice)-1]; 'l' != c && 's' != c && 'z' != c && hasRepeatDoubleConsonantSuffix(subSlice) { + lenTrim := 1 + + lenSubSlice := len(subSlice) + + result = subSlice[:lenSubSlice-lenTrim] + } else if c := subSlice[len(subSlice)-1]; 1 == measure(subSlice) && hasConsonantVowelConsonantSuffix(subSlice) && 'w' != c && 'x' != c && 'y' != c { + lenTrim := -1 + + result = s[:lenS-lenSuffix-lenTrim] + + result[len(result)-1] = 'e' + } else { + result = subSlice + } + + } + } + + // Return. + return result +} + +func step1c(s []rune) []rune { + + // Initialize. + lenS := len(s) + + result := s + + // Do it! + if 2 > lenS { + /////////// RETURN + return result + } + + if 'y' == s[lenS-1] && containsVowel(s[:lenS-1]) { + + result[lenS-1] = 'i' + + } else if 'Y' == s[lenS-1] && containsVowel(s[:lenS-1]) { + + result[lenS-1] = 'I' + + } + + // Return. + return result +} + +func step2(s []rune) []rune { + + // Initialize. + lenS := len(s) + + result := s + + // Do it! + if suffix := []rune("ational"); hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result[lenS-5] = 'e' + result = result[:lenS-4] + } + } else if suffix := []rune("tional"); hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result = result[:lenS-2] + } + } else if suffix := []rune("enci"); hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result[lenS-1] = 'e' + } + } else if suffix := []rune("anci"); hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result[lenS-1] = 'e' + } + } else if suffix := []rune("izer"); hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result = s[:lenS-1] + } + } else if suffix := []rune("bli"); hasSuffix(s, suffix) { // --DEPARTURE-- + // } else if suffix := []rune("abli") ; hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result[lenS-1] = 'e' + } + } else if suffix := []rune("alli"); hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result = s[:lenS-2] + } + } else if suffix := []rune("entli"); hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result = s[:lenS-2] + } + } else if suffix := []rune("eli"); hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result = s[:lenS-2] + } + } else if suffix := []rune("ousli"); hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result = s[:lenS-2] + } + } else if suffix := []rune("ization"); hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result[lenS-5] = 'e' + + result = s[:lenS-4] + } + } else if suffix := []rune("ation"); hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result[lenS-3] = 'e' + + result = s[:lenS-2] + } + } else if suffix := []rune("ator"); hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result[lenS-2] = 'e' + + result = s[:lenS-1] + } + } else if suffix := []rune("alism"); hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result = s[:lenS-3] + } + } else if suffix := []rune("iveness"); hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result = s[:lenS-4] + } + } else if suffix := []rune("fulness"); hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result = s[:lenS-4] + } + } else if suffix := []rune("ousness"); hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result = s[:lenS-4] + } + } else if suffix := []rune("aliti"); hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result = s[:lenS-3] + } + } else if suffix := []rune("iviti"); hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result[lenS-3] = 'e' + + result = result[:lenS-2] + } + } else if suffix := []rune("biliti"); hasSuffix(s, suffix) { + if 0 < measure(s[:lenS-len(suffix)]) { + result[lenS-5] = 'l' + result[lenS-4] = 'e' + + result = result[:lenS-3] + } + } else if suffix := []rune("logi"); hasSuffix(s, suffix) { // --DEPARTURE-- + if 0 < measure(s[:lenS-len(suffix)]) { + lenTrim := 1 + + result = s[:lenS-lenTrim] + } + } + + // Return. + return result +} + +func step3(s []rune) []rune { + + // Initialize. + lenS := len(s) + result := s + + // Do it! + if suffix := []rune("icate"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + if 0 < measure(s[:lenS-lenSuffix]) { + result = result[:lenS-3] + } + } else if suffix := []rune("ative"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 0 < m { + result = subSlice + } + } else if suffix := []rune("alize"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + if 0 < measure(s[:lenS-lenSuffix]) { + result = result[:lenS-3] + } + } else if suffix := []rune("iciti"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + if 0 < measure(s[:lenS-lenSuffix]) { + result = result[:lenS-3] + } + } else if suffix := []rune("ical"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + if 0 < measure(s[:lenS-lenSuffix]) { + result = result[:lenS-2] + } + } else if suffix := []rune("ful"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 0 < m { + result = subSlice + } + } else if suffix := []rune("ness"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 0 < m { + result = subSlice + } + } + + // Return. + return result +} + +func step4(s []rune) []rune { + + // Initialize. + lenS := len(s) + result := s + + // Do it! + if suffix := []rune("al"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = result[:lenS-lenSuffix] + } + } else if suffix := []rune("ance"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = result[:lenS-lenSuffix] + } + } else if suffix := []rune("ence"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = result[:lenS-lenSuffix] + } + } else if suffix := []rune("er"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = subSlice + } + } else if suffix := []rune("ic"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = subSlice + } + } else if suffix := []rune("able"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = subSlice + } + } else if suffix := []rune("ible"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = subSlice + } + } else if suffix := []rune("ant"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = subSlice + } + } else if suffix := []rune("ement"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = subSlice + } + } else if suffix := []rune("ment"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = subSlice + } + } else if suffix := []rune("ent"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = subSlice + } + } else if suffix := []rune("ion"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + c := subSlice[len(subSlice)-1] + + if 1 < m && ('s' == c || 't' == c) { + result = subSlice + } + } else if suffix := []rune("ou"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = subSlice + } + } else if suffix := []rune("ism"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = subSlice + } + } else if suffix := []rune("ate"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = subSlice + } + } else if suffix := []rune("iti"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = subSlice + } + } else if suffix := []rune("ous"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = subSlice + } + } else if suffix := []rune("ive"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = subSlice + } + } else if suffix := []rune("ize"); hasSuffix(s, suffix) { + lenSuffix := len(suffix) + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = subSlice + } + } + + // Return. + return result +} + +func step5a(s []rune) []rune { + + // Initialize. + lenS := len(s) + result := s + + // Do it! + if 'e' == s[lenS-1] { + lenSuffix := 1 + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = subSlice + } else if 1 == m { + if c := subSlice[len(subSlice)-1]; !(hasConsonantVowelConsonantSuffix(subSlice) && 'w' != c && 'x' != c && 'y' != c) { + result = subSlice + } + } + } + + // Return. + return result +} + +func step5b(s []rune) []rune { + + // Initialize. + lenS := len(s) + result := s + + // Do it! + if 2 < lenS && 'l' == s[lenS-2] && 'l' == s[lenS-1] { + + lenSuffix := 1 + + subSlice := s[:lenS-lenSuffix] + + m := measure(subSlice) + + if 1 < m { + result = subSlice + } + } + + // Return. + return result +} + +func StemString(s string) string { + + // Convert string to []rune + runeArr := []rune(s) + + // Stem. + runeArr = Stem(runeArr) + + // Convert []rune to string + str := string(runeArr) + + // Return. + return str +} + +func Stem(s []rune) []rune { + + // Initialize. + lenS := len(s) + + // Short circuit. + if 0 == lenS { + /////////// RETURN + return s + } + + // Make all runes lowercase. + for i := 0; i < lenS; i++ { + s[i] = unicode.ToLower(s[i]) + } + + // Stem + result := StemWithoutLowerCasing(s) + + // Return. + return result +} + +func StemWithoutLowerCasing(s []rune) []rune { + + // Initialize. + lenS := len(s) + + // Words that are of length 2 or less is already stemmed. + // Don't do anything. + if 2 >= lenS { + /////////// RETURN + return s + } + + // Stem + s = step1a(s) + s = step1b(s) + s = step1c(s) + s = step2(s) + s = step3(s) + s = step4(s) + s = step5a(s) + s = step5b(s) + + // Return. + return s +} diff --git a/vendor/github.com/blevesearch/mmap-go/.gitignore b/vendor/github.com/blevesearch/mmap-go/.gitignore new file mode 100644 index 0000000..0c0a5e4 --- /dev/null +++ b/vendor/github.com/blevesearch/mmap-go/.gitignore @@ -0,0 +1,10 @@ +*.out +*.5 +*.6 +*.8 +*.swp +_obj +_test +testdata +/.idea +*.iml \ No newline at end of file diff --git a/vendor/github.com/blevesearch/mmap-go/.travis.yml b/vendor/github.com/blevesearch/mmap-go/.travis.yml new file mode 100644 index 0000000..169eb1f --- /dev/null +++ b/vendor/github.com/blevesearch/mmap-go/.travis.yml @@ -0,0 +1,16 @@ +language: go +os: + - linux + - osx + - windows +go: + - 1.11.4 +env: + global: + - GO111MODULE=on +install: + - go mod download + - go get github.com/mattn/goveralls +script: + - go test -v -covermode=count -coverprofile=coverage.out -bench . -cpu 1,4 + - '[ "${TRAVIS_PULL_REQUEST}" = "false" ] && $HOME/gopath/bin/goveralls -coverprofile=coverage.out -service=travis-ci -repotoken $COVERALLS_TOKEN || true' diff --git a/vendor/github.com/blevesearch/mmap-go/LICENSE b/vendor/github.com/blevesearch/mmap-go/LICENSE new file mode 100644 index 0000000..8f05f33 --- /dev/null +++ b/vendor/github.com/blevesearch/mmap-go/LICENSE @@ -0,0 +1,25 @@ +Copyright (c) 2011, Evan Shaw +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of the copyright holder nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/vendor/github.com/blevesearch/mmap-go/README.md b/vendor/github.com/blevesearch/mmap-go/README.md new file mode 100644 index 0000000..4cc2bfe --- /dev/null +++ b/vendor/github.com/blevesearch/mmap-go/README.md @@ -0,0 +1,12 @@ +mmap-go +======= + +mmap-go is a portable mmap package for the [Go programming language](http://golang.org). +It has been tested on Linux (386, amd64), OS X, and Windows (386). It should also +work on other Unix-like platforms, but hasn't been tested with them. I'm interested +to hear about the results. + +I haven't been able to add more features without adding significant complexity, +so mmap-go doesn't support mprotect, mincore, and maybe a few other things. +If you're running on a Unix-like platform and need some of these features, +I suggest Gustavo Niemeyer's [gommap](http://labix.org/gommap). diff --git a/vendor/github.com/blevesearch/mmap-go/mmap.go b/vendor/github.com/blevesearch/mmap-go/mmap.go new file mode 100644 index 0000000..29655bd --- /dev/null +++ b/vendor/github.com/blevesearch/mmap-go/mmap.go @@ -0,0 +1,117 @@ +// Copyright 2011 Evan Shaw. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file defines the common package interface and contains a little bit of +// factored out logic. + +// Package mmap allows mapping files into memory. It tries to provide a simple, reasonably portable interface, +// but doesn't go out of its way to abstract away every little platform detail. +// This specifically means: +// * forked processes may or may not inherit mappings +// * a file's timestamp may or may not be updated by writes through mappings +// * specifying a size larger than the file's actual size can increase the file's size +// * If the mapped file is being modified by another process while your program's running, don't expect consistent results between platforms +package mmap + +import ( + "errors" + "os" + "reflect" + "unsafe" +) + +const ( + // RDONLY maps the memory read-only. + // Attempts to write to the MMap object will result in undefined behavior. + RDONLY = 0 + // RDWR maps the memory as read-write. Writes to the MMap object will update the + // underlying file. + RDWR = 1 << iota + // COPY maps the memory as copy-on-write. Writes to the MMap object will affect + // memory, but the underlying file will remain unchanged. + COPY + // If EXEC is set, the mapped memory is marked as executable. + EXEC +) + +const ( + // If the ANON flag is set, the mapped memory will not be backed by a file. + ANON = 1 << iota +) + +// MMap represents a file mapped into memory. +type MMap []byte + +// Map maps an entire file into memory. +// If ANON is set in flags, f is ignored. +func Map(f *os.File, prot, flags int) (MMap, error) { + return MapRegion(f, -1, prot, flags, 0) +} + +// MapRegion maps part of a file into memory. +// The offset parameter must be a multiple of the system's page size. +// If length < 0, the entire file will be mapped. +// If ANON is set in flags, f is ignored. +func MapRegion(f *os.File, length int, prot, flags int, offset int64) (MMap, error) { + if offset%int64(os.Getpagesize()) != 0 { + return nil, errors.New("offset parameter must be a multiple of the system's page size") + } + + var fd uintptr + if flags&ANON == 0 { + fd = uintptr(f.Fd()) + if length < 0 { + fi, err := f.Stat() + if err != nil { + return nil, err + } + length = int(fi.Size()) + } + } else { + if length <= 0 { + return nil, errors.New("anonymous mapping requires non-zero length") + } + fd = ^uintptr(0) + } + return mmap(length, uintptr(prot), uintptr(flags), fd, offset) +} + +func (m *MMap) header() *reflect.SliceHeader { + return (*reflect.SliceHeader)(unsafe.Pointer(m)) +} + +func (m *MMap) addrLen() (uintptr, uintptr) { + header := m.header() + return header.Data, uintptr(header.Len) +} + +// Lock keeps the mapped region in physical memory, ensuring that it will not be +// swapped out. +func (m MMap) Lock() error { + return m.lock() +} + +// Unlock reverses the effect of Lock, allowing the mapped region to potentially +// be swapped out. +// If m is already unlocked, aan error will result. +func (m MMap) Unlock() error { + return m.unlock() +} + +// Flush synchronizes the mapping's contents to the file's contents on disk. +func (m MMap) Flush() error { + return m.flush() +} + +// Unmap deletes the memory mapped region, flushes any remaining changes, and sets +// m to nil. +// Trying to read or write any remaining references to m after Unmap is called will +// result in undefined behavior. +// Unmap should only be called on the slice value that was originally returned from +// a call to Map. Calling Unmap on a derived slice may cause errors. +func (m *MMap) Unmap() error { + err := m.unmap() + *m = nil + return err +} diff --git a/vendor/github.com/blevesearch/mmap-go/mmap_unix.go b/vendor/github.com/blevesearch/mmap-go/mmap_unix.go new file mode 100644 index 0000000..25b13e5 --- /dev/null +++ b/vendor/github.com/blevesearch/mmap-go/mmap_unix.go @@ -0,0 +1,51 @@ +// Copyright 2011 Evan Shaw. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build darwin dragonfly freebsd linux openbsd solaris netbsd + +package mmap + +import ( + "golang.org/x/sys/unix" +) + +func mmap(len int, inprot, inflags, fd uintptr, off int64) ([]byte, error) { + flags := unix.MAP_SHARED + prot := unix.PROT_READ + switch { + case inprot© != 0: + prot |= unix.PROT_WRITE + flags = unix.MAP_PRIVATE + case inprot&RDWR != 0: + prot |= unix.PROT_WRITE + } + if inprot&EXEC != 0 { + prot |= unix.PROT_EXEC + } + if inflags&ANON != 0 { + flags |= unix.MAP_ANON + } + + b, err := unix.Mmap(int(fd), off, len, prot, flags) + if err != nil { + return nil, err + } + return b, nil +} + +func (m MMap) flush() error { + return unix.Msync([]byte(m), unix.MS_SYNC) +} + +func (m MMap) lock() error { + return unix.Mlock([]byte(m)) +} + +func (m MMap) unlock() error { + return unix.Munlock([]byte(m)) +} + +func (m MMap) unmap() error { + return unix.Munmap([]byte(m)) +} diff --git a/vendor/github.com/blevesearch/mmap-go/mmap_windows.go b/vendor/github.com/blevesearch/mmap-go/mmap_windows.go new file mode 100644 index 0000000..631b382 --- /dev/null +++ b/vendor/github.com/blevesearch/mmap-go/mmap_windows.go @@ -0,0 +1,153 @@ +// Copyright 2011 Evan Shaw. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mmap + +import ( + "errors" + "os" + "sync" + + "golang.org/x/sys/windows" +) + +// mmap on Windows is a two-step process. +// First, we call CreateFileMapping to get a handle. +// Then, we call MapviewToFile to get an actual pointer into memory. +// Because we want to emulate a POSIX-style mmap, we don't want to expose +// the handle -- only the pointer. We also want to return only a byte slice, +// not a struct, so it's convenient to manipulate. + +// We keep this map so that we can get back the original handle from the memory address. + +type addrinfo struct { + file windows.Handle + mapview windows.Handle + writable bool +} + +var handleLock sync.Mutex +var handleMap = map[uintptr]*addrinfo{} + +func mmap(len int, prot, flags, hfile uintptr, off int64) ([]byte, error) { + flProtect := uint32(windows.PAGE_READONLY) + dwDesiredAccess := uint32(windows.FILE_MAP_READ) + writable := false + switch { + case prot© != 0: + flProtect = windows.PAGE_WRITECOPY + dwDesiredAccess = windows.FILE_MAP_COPY + writable = true + case prot&RDWR != 0: + flProtect = windows.PAGE_READWRITE + dwDesiredAccess = windows.FILE_MAP_WRITE + writable = true + } + if prot&EXEC != 0 { + flProtect <<= 4 + dwDesiredAccess |= windows.FILE_MAP_EXECUTE + } + + // The maximum size is the area of the file, starting from 0, + // that we wish to allow to be mappable. It is the sum of + // the length the user requested, plus the offset where that length + // is starting from. This does not map the data into memory. + maxSizeHigh := uint32((off + int64(len)) >> 32) + maxSizeLow := uint32((off + int64(len)) & 0xFFFFFFFF) + // TODO: Do we need to set some security attributes? It might help portability. + h, errno := windows.CreateFileMapping(windows.Handle(hfile), nil, flProtect, maxSizeHigh, maxSizeLow, nil) + if h == 0 { + return nil, os.NewSyscallError("CreateFileMapping", errno) + } + + // Actually map a view of the data into memory. The view's size + // is the length the user requested. + fileOffsetHigh := uint32(off >> 32) + fileOffsetLow := uint32(off & 0xFFFFFFFF) + addr, errno := windows.MapViewOfFile(h, dwDesiredAccess, fileOffsetHigh, fileOffsetLow, uintptr(len)) + if addr == 0 { + return nil, os.NewSyscallError("MapViewOfFile", errno) + } + handleLock.Lock() + handleMap[addr] = &addrinfo{ + file: windows.Handle(hfile), + mapview: h, + writable: writable, + } + handleLock.Unlock() + + m := MMap{} + dh := m.header() + dh.Data = addr + dh.Len = len + dh.Cap = dh.Len + + return m, nil +} + +func (m MMap) flush() error { + addr, len := m.addrLen() + errno := windows.FlushViewOfFile(addr, len) + if errno != nil { + return os.NewSyscallError("FlushViewOfFile", errno) + } + + handleLock.Lock() + defer handleLock.Unlock() + handle, ok := handleMap[addr] + if !ok { + // should be impossible; we would've errored above + return errors.New("unknown base address") + } + + if handle.writable { + if err := windows.FlushFileBuffers(handle.file); err != nil { + return os.NewSyscallError("FlushFileBuffers", err) + } + } + + return nil +} + +func (m MMap) lock() error { + addr, len := m.addrLen() + errno := windows.VirtualLock(addr, len) + return os.NewSyscallError("VirtualLock", errno) +} + +func (m MMap) unlock() error { + addr, len := m.addrLen() + errno := windows.VirtualUnlock(addr, len) + return os.NewSyscallError("VirtualUnlock", errno) +} + +func (m MMap) unmap() error { + err := m.flush() + if err != nil { + return err + } + + addr := m.header().Data + // Lock the UnmapViewOfFile along with the handleMap deletion. + // As soon as we unmap the view, the OS is free to give the + // same addr to another new map. We don't want another goroutine + // to insert and remove the same addr into handleMap while + // we're trying to remove our old addr/handle pair. + handleLock.Lock() + defer handleLock.Unlock() + err = windows.UnmapViewOfFile(addr) + if err != nil { + return err + } + + handle, ok := handleMap[addr] + if !ok { + // should be impossible; we would've errored above + return errors.New("unknown base address") + } + delete(handleMap, addr) + + e := windows.CloseHandle(windows.Handle(handle.mapview)) + return os.NewSyscallError("CloseHandle", e) +} diff --git a/vendor/github.com/blevesearch/segment/.gitignore b/vendor/github.com/blevesearch/segment/.gitignore new file mode 100644 index 0000000..b4ccb07 --- /dev/null +++ b/vendor/github.com/blevesearch/segment/.gitignore @@ -0,0 +1,10 @@ +#* +*.sublime-* +*~ +.#* +.project +.settings +.DS_Store +/maketesttables +/workdir +/segment-fuzz.zip \ No newline at end of file diff --git a/vendor/github.com/blevesearch/segment/.travis.yml b/vendor/github.com/blevesearch/segment/.travis.yml new file mode 100644 index 0000000..b9d58e7 --- /dev/null +++ b/vendor/github.com/blevesearch/segment/.travis.yml @@ -0,0 +1,15 @@ +language: go + +go: + - 1.7 + +script: + - go get golang.org/x/tools/cmd/cover + - go get github.com/mattn/goveralls + - go test -v -covermode=count -coverprofile=profile.out + - go vet + - goveralls -service drone.io -coverprofile=profile.out -repotoken $COVERALLS + +notifications: + email: + - marty.schoch@gmail.com diff --git a/vendor/github.com/blevesearch/segment/LICENSE b/vendor/github.com/blevesearch/segment/LICENSE new file mode 100644 index 0000000..7a4a3ea --- /dev/null +++ b/vendor/github.com/blevesearch/segment/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/vendor/github.com/blevesearch/segment/README.md b/vendor/github.com/blevesearch/segment/README.md new file mode 100644 index 0000000..0840962 --- /dev/null +++ b/vendor/github.com/blevesearch/segment/README.md @@ -0,0 +1,92 @@ +# segment + +A Go library for performing Unicode Text Segmentation +as described in [Unicode Standard Annex #29](http://www.unicode.org/reports/tr29/) + +## Features + +* Currently only segmentation at Word Boundaries is supported. + +## License + +Apache License Version 2.0 + +## Usage + +The functionality is exposed in two ways: + +1. You can use a bufio.Scanner with the SplitWords implementation of SplitFunc. +The SplitWords function will identify the appropriate word boundaries in the input +text and the Scanner will return tokens at the appropriate place. + + scanner := bufio.NewScanner(...) + scanner.Split(segment.SplitWords) + for scanner.Scan() { + tokenBytes := scanner.Bytes() + } + if err := scanner.Err(); err != nil { + t.Fatal(err) + } + +2. Sometimes you would also like information returned about the type of token. +To do this we have introduce a new type named Segmenter. It works just like Scanner +but additionally a token type is returned. + + segmenter := segment.NewWordSegmenter(...) + for segmenter.Segment() { + tokenBytes := segmenter.Bytes()) + tokenType := segmenter.Type() + } + if err := segmenter.Err(); err != nil { + t.Fatal(err) + } + +## Choosing Implementation + +By default segment does NOT use the fastest runtime implementation. The reason is that it adds approximately 5s to compilation time and may require more than 1GB of ram on the machine performing compilation. + +However, you can choose to build with the fastest runtime implementation by passing the build tag as follows: + + -tags 'prod' + +## Generating Code + +Several components in this package are generated. + +1. Several Ragel rules files are generated from Unicode properties files. +2. Ragel machine is generated from the Ragel rules. +3. Test tables are generated from the Unicode test files. + +All of these can be generated by running: + + go generate + +## Fuzzing + +There is support for fuzzing the segment library with [go-fuzz](https://github.com/dvyukov/go-fuzz). + +1. Install go-fuzz if you haven't already: + + go get github.com/dvyukov/go-fuzz/go-fuzz + go get github.com/dvyukov/go-fuzz/go-fuzz-build + +2. Build the package with go-fuzz: + + go-fuzz-build github.com/blevesearch/segment + +3. Convert the Unicode provided test cases into the initial corpus for go-fuzz: + + go test -v -run=TestGenerateWordSegmentFuzz -tags gofuzz_generate + +4. Run go-fuzz: + + go-fuzz -bin=segment-fuzz.zip -workdir=workdir + +## Status + + +[![Build Status](https://travis-ci.org/blevesearch/segment.svg?branch=master)](https://travis-ci.org/blevesearch/segment) + +[![Coverage Status](https://img.shields.io/coveralls/blevesearch/segment.svg)](https://coveralls.io/r/blevesearch/segment?branch=master) + +[![GoDoc](https://godoc.org/github.com/blevesearch/segment?status.svg)](https://godoc.org/github.com/blevesearch/segment) \ No newline at end of file diff --git a/vendor/github.com/blevesearch/segment/doc.go b/vendor/github.com/blevesearch/segment/doc.go new file mode 100644 index 0000000..6eed3e3 --- /dev/null +++ b/vendor/github.com/blevesearch/segment/doc.go @@ -0,0 +1,45 @@ +// Copyright (c) 2014 Couchbase, Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file +// except in compliance with the License. You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software distributed under the +// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +// either express or implied. See the License for the specific language governing permissions +// and limitations under the License. + +/* +Package segment is a library for performing Unicode Text Segmentation +as described in Unicode Standard Annex #29 http://www.unicode.org/reports/tr29/ + +Currently only segmentation at Word Boundaries is supported. + +The functionality is exposed in two ways: + +1. You can use a bufio.Scanner with the SplitWords implementation of SplitFunc. +The SplitWords function will identify the appropriate word boundaries in the input +text and the Scanner will return tokens at the appropriate place. + + scanner := bufio.NewScanner(...) + scanner.Split(segment.SplitWords) + for scanner.Scan() { + tokenBytes := scanner.Bytes() + } + if err := scanner.Err(); err != nil { + t.Fatal(err) + } + +2. Sometimes you would also like information returned about the type of token. +To do this we have introduce a new type named Segmenter. It works just like Scanner +but additionally a token type is returned. + + segmenter := segment.NewWordSegmenter(...) + for segmenter.Segment() { + tokenBytes := segmenter.Bytes()) + tokenType := segmenter.Type() + } + if err := segmenter.Err(); err != nil { + t.Fatal(err) + } + +*/ +package segment diff --git a/vendor/github.com/blevesearch/segment/segment.go b/vendor/github.com/blevesearch/segment/segment.go new file mode 100644 index 0000000..42ab482 --- /dev/null +++ b/vendor/github.com/blevesearch/segment/segment.go @@ -0,0 +1,284 @@ +// Copyright (c) 2015 Couchbase, Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file +// except in compliance with the License. You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software distributed under the +// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +// either express or implied. See the License for the specific language governing permissions +// and limitations under the License. + +package segment + +import ( + "errors" + "io" +) + +// Autogenerate the following: +// 1. Ragel rules from subset of Unicode script properties +// 2. Ragel rules from Unicode word segmentation properties +// 3. Ragel machine for word segmentation +// 4. Test tables from Unicode +// +// Requires: +// 1. Ruby (to generate ragel rules from unicode spec) +// 2. Ragel (only v6.9 tested) +// 3. sed (to rewrite build tags) +// +//go:generate ragel/unicode2ragel.rb -u http://www.unicode.org/Public/8.0.0/ucd/Scripts.txt -m SCRIPTS -p Hangul,Han,Hiragana -o ragel/uscript.rl +//go:generate ragel/unicode2ragel.rb -u http://www.unicode.org/Public/8.0.0/ucd/auxiliary/WordBreakProperty.txt -m WB -p Double_Quote,Single_Quote,Hebrew_Letter,CR,LF,Newline,Extend,Format,Katakana,ALetter,MidLetter,MidNum,MidNumLet,Numeric,ExtendNumLet,Regional_Indicator -o ragel/uwb.rl +//go:generate ragel -T1 -Z segment_words.rl -o segment_words.go +//go:generate sed -i "" -e "s/BUILDTAGS/!prod/" segment_words.go +//go:generate sed -i "" -e "s/RAGELFLAGS/-T1/" segment_words.go +//go:generate ragel -G2 -Z segment_words.rl -o segment_words_prod.go +//go:generate sed -i "" -e "s/BUILDTAGS/prod/" segment_words_prod.go +//go:generate sed -i "" -e "s/RAGELFLAGS/-G2/" segment_words_prod.go +//go:generate go run maketesttables.go -output tables_test.go + +// NewWordSegmenter returns a new Segmenter to read from r. +func NewWordSegmenter(r io.Reader) *Segmenter { + return NewSegmenter(r) +} + +// NewWordSegmenterDirect returns a new Segmenter to work directly with buf. +func NewWordSegmenterDirect(buf []byte) *Segmenter { + return NewSegmenterDirect(buf) +} + +func SplitWords(data []byte, atEOF bool) (int, []byte, error) { + advance, token, _, err := SegmentWords(data, atEOF) + return advance, token, err +} + +func SegmentWords(data []byte, atEOF bool) (int, []byte, int, error) { + vals := make([][]byte, 0, 1) + types := make([]int, 0, 1) + tokens, types, advance, err := segmentWords(data, 1, atEOF, vals, types) + if len(tokens) > 0 { + return advance, tokens[0], types[0], err + } + return advance, nil, 0, err +} + +func SegmentWordsDirect(data []byte, val [][]byte, types []int) ([][]byte, []int, int, error) { + return segmentWords(data, -1, true, val, types) +} + +// *** Core Segmenter + +const maxConsecutiveEmptyReads = 100 + +// NewSegmenter returns a new Segmenter to read from r. +// Defaults to segment using SegmentWords +func NewSegmenter(r io.Reader) *Segmenter { + return &Segmenter{ + r: r, + segment: SegmentWords, + maxTokenSize: MaxScanTokenSize, + buf: make([]byte, 4096), // Plausible starting size; needn't be large. + } +} + +// NewSegmenterDirect returns a new Segmenter to work directly with buf. +// Defaults to segment using SegmentWords +func NewSegmenterDirect(buf []byte) *Segmenter { + return &Segmenter{ + segment: SegmentWords, + maxTokenSize: MaxScanTokenSize, + buf: buf, + start: 0, + end: len(buf), + err: io.EOF, + } +} + +// Segmenter provides a convenient interface for reading data such as +// a file of newline-delimited lines of text. Successive calls to +// the Segment method will step through the 'tokens' of a file, skipping +// the bytes between the tokens. The specification of a token is +// defined by a split function of type SplitFunc; the default split +// function breaks the input into lines with line termination stripped. Split +// functions are defined in this package for scanning a file into +// lines, bytes, UTF-8-encoded runes, and space-delimited words. The +// client may instead provide a custom split function. +// +// Segmenting stops unrecoverably at EOF, the first I/O error, or a token too +// large to fit in the buffer. When a scan stops, the reader may have +// advanced arbitrarily far past the last token. Programs that need more +// control over error handling or large tokens, or must run sequential scans +// on a reader, should use bufio.Reader instead. +// +type Segmenter struct { + r io.Reader // The reader provided by the client. + segment SegmentFunc // The function to split the tokens. + maxTokenSize int // Maximum size of a token; modified by tests. + token []byte // Last token returned by split. + buf []byte // Buffer used as argument to split. + start int // First non-processed byte in buf. + end int // End of data in buf. + typ int // The token type + err error // Sticky error. +} + +// SegmentFunc is the signature of the segmenting function used to tokenize the +// input. The arguments are an initial substring of the remaining unprocessed +// data and a flag, atEOF, that reports whether the Reader has no more data +// to give. The return values are the number of bytes to advance the input +// and the next token to return to the user, plus an error, if any. If the +// data does not yet hold a complete token, for instance if it has no newline +// while scanning lines, SegmentFunc can return (0, nil, nil) to signal the +// Segmenter to read more data into the slice and try again with a longer slice +// starting at the same point in the input. +// +// If the returned error is non-nil, segmenting stops and the error +// is returned to the client. +// +// The function is never called with an empty data slice unless atEOF +// is true. If atEOF is true, however, data may be non-empty and, +// as always, holds unprocessed text. +type SegmentFunc func(data []byte, atEOF bool) (advance int, token []byte, segmentType int, err error) + +// Errors returned by Segmenter. +var ( + ErrTooLong = errors.New("bufio.Segmenter: token too long") + ErrNegativeAdvance = errors.New("bufio.Segmenter: SplitFunc returns negative advance count") + ErrAdvanceTooFar = errors.New("bufio.Segmenter: SplitFunc returns advance count beyond input") +) + +const ( + // Maximum size used to buffer a token. The actual maximum token size + // may be smaller as the buffer may need to include, for instance, a newline. + MaxScanTokenSize = 64 * 1024 +) + +// Err returns the first non-EOF error that was encountered by the Segmenter. +func (s *Segmenter) Err() error { + if s.err == io.EOF { + return nil + } + return s.err +} + +func (s *Segmenter) Type() int { + return s.typ +} + +// Bytes returns the most recent token generated by a call to Segment. +// The underlying array may point to data that will be overwritten +// by a subsequent call to Segment. It does no allocation. +func (s *Segmenter) Bytes() []byte { + return s.token +} + +// Text returns the most recent token generated by a call to Segment +// as a newly allocated string holding its bytes. +func (s *Segmenter) Text() string { + return string(s.token) +} + +// Segment advances the Segmenter to the next token, which will then be +// available through the Bytes or Text method. It returns false when the +// scan stops, either by reaching the end of the input or an error. +// After Segment returns false, the Err method will return any error that +// occurred during scanning, except that if it was io.EOF, Err +// will return nil. +func (s *Segmenter) Segment() bool { + // Loop until we have a token. + for { + // See if we can get a token with what we already have. + if s.end > s.start { + advance, token, typ, err := s.segment(s.buf[s.start:s.end], s.err != nil) + if err != nil { + s.setErr(err) + return false + } + s.typ = typ + if !s.advance(advance) { + return false + } + s.token = token + if token != nil { + return true + } + } + // We cannot generate a token with what we are holding. + // If we've already hit EOF or an I/O error, we are done. + if s.err != nil { + // Shut it down. + s.start = 0 + s.end = 0 + return false + } + // Must read more data. + // First, shift data to beginning of buffer if there's lots of empty space + // or space is needed. + if s.start > 0 && (s.end == len(s.buf) || s.start > len(s.buf)/2) { + copy(s.buf, s.buf[s.start:s.end]) + s.end -= s.start + s.start = 0 + } + // Is the buffer full? If so, resize. + if s.end == len(s.buf) { + if len(s.buf) >= s.maxTokenSize { + s.setErr(ErrTooLong) + return false + } + newSize := len(s.buf) * 2 + if newSize > s.maxTokenSize { + newSize = s.maxTokenSize + } + newBuf := make([]byte, newSize) + copy(newBuf, s.buf[s.start:s.end]) + s.buf = newBuf + s.end -= s.start + s.start = 0 + continue + } + // Finally we can read some input. Make sure we don't get stuck with + // a misbehaving Reader. Officially we don't need to do this, but let's + // be extra careful: Segmenter is for safe, simple jobs. + for loop := 0; ; { + n, err := s.r.Read(s.buf[s.end:len(s.buf)]) + s.end += n + if err != nil { + s.setErr(err) + break + } + if n > 0 { + break + } + loop++ + if loop > maxConsecutiveEmptyReads { + s.setErr(io.ErrNoProgress) + break + } + } + } +} + +// advance consumes n bytes of the buffer. It reports whether the advance was legal. +func (s *Segmenter) advance(n int) bool { + if n < 0 { + s.setErr(ErrNegativeAdvance) + return false + } + if n > s.end-s.start { + s.setErr(ErrAdvanceTooFar) + return false + } + s.start += n + return true +} + +// setErr records the first error encountered. +func (s *Segmenter) setErr(err error) { + if s.err == nil || s.err == io.EOF { + s.err = err + } +} + +// SetSegmenter sets the segment function for the Segmenter. If called, it must be +// called before Segment. +func (s *Segmenter) SetSegmenter(segmenter SegmentFunc) { + s.segment = segmenter +} diff --git a/vendor/github.com/blevesearch/segment/segment_fuzz.go b/vendor/github.com/blevesearch/segment/segment_fuzz.go new file mode 100644 index 0000000..748b3d6 --- /dev/null +++ b/vendor/github.com/blevesearch/segment/segment_fuzz.go @@ -0,0 +1,22 @@ +// Copyright (c) 2015 Couchbase, Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file +// except in compliance with the License. You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software distributed under the +// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +// either express or implied. See the License for the specific language governing permissions +// and limitations under the License. + +// +build gofuzz + +package segment + +func Fuzz(data []byte) int { + + vals := make([][]byte, 0, 10000) + types := make([]int, 0, 10000) + if _, _, _, err := SegmentWordsDirect(data, vals, types); err != nil { + return 0 + } + return 1 +} diff --git a/vendor/github.com/blevesearch/segment/segment_words.go b/vendor/github.com/blevesearch/segment/segment_words.go new file mode 100644 index 0000000..4328b52 --- /dev/null +++ b/vendor/github.com/blevesearch/segment/segment_words.go @@ -0,0 +1,19542 @@ + +//line segment_words.rl:1 +// Copyright (c) 2015 Couchbase, Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file +// except in compliance with the License. You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software distributed under the +// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +// either express or implied. See the License for the specific language governing permissions +// and limitations under the License. + +// +build !prod + +package segment + +import ( + "fmt" + "unicode/utf8" +) + +var RagelFlags = "-T1" + +var ParseError = fmt.Errorf("unicode word segmentation parse error") + +// Word Types +const ( + None = iota + Number + Letter + Kana + Ideo +) + + +//line segment_words.go:36 +var _s_key_offsets []uint16 = []uint16{ + 0, 1, 3, 5, 7, 10, 15, 20, + 23, 31, 35, 37, 39, 41, 71, 79, + 81, 83, 86, 91, 96, 106, 118, 124, + 129, 139, 142, 149, 153, 161, 171, 175, + 183, 185, 193, 196, 198, 203, 205, 212, + 214, 222, 223, 244, 246, 256, 261, 263, + 267, 271, 273, 277, 279, 280, 284, 286, + 291, 293, 297, 301, 305, 307, 309, 318, + 322, 328, 332, 336, 338, 340, 341, 343, + 345, 347, 349, 364, 368, 370, 372, 377, + 381, 385, 387, 389, 393, 397, 399, 404, + 411, 416, 420, 425, 426, 430, 432, 438, + 443, 444, 445, 447, 456, 458, 477, 481, + 483, 489, 493, 494, 498, 502, 504, 506, + 511, 524, 526, 528, 532, 536, 538, 540, + 542, 546, 548, 550, 552, 554, 555, 559, + 565, 569, 575, 577, 581, 585, 590, 591, + 593, 594, 600, 603, 605, 605, 628, 666, + 670, 676, 676, 678, 680, 689, 691, 701, + 709, 710, 713, 714, 718, 725, 734, 741, + 748, 761, 768, 772, 776, 783, 815, 822, + 826, 828, 830, 833, 840, 856, 874, 893, + 910, 927, 937, 954, 969, 990, 998, 1011, + 1024, 1039, 1054, 1065, 1080, 1089, 1099, 1102, + 1104, 1109, 1111, 1119, 1125, 1135, 1136, 1183, + 1185, 1195, 1202, 1208, 1215, 1223, 1230, 1233, + 1239, 1243, 1247, 1249, 1253, 1257, 1261, 1271, + 1281, 1283, 1287, 1291, 1293, 1298, 1300, 1306, + 1310, 1315, 1317, 1323, 1329, 1339, 1343, 1347, + 1351, 1364, 1368, 1372, 1382, 1387, 1401, 1419, + 1423, 1429, 1431, 1433, 1449, 1454, 1456, 1458, + 1460, 1464, 1465, 1471, 1477, 1482, 1492, 1502, + 1503, 1508, 1513, 1515, 1519, 1523, 1553, 1555, + 1557, 1563, 1570, 1576, 1580, 1584, 1590, 1592, + 1599, 1601, 1607, 1615, 1621, 1627, 1633, 1638, + 1642, 1649, 1656, 1665, 1677, 1680, 1686, 1686, + 1690, 1692, 1696, 1713, 1725, 1731, 1733, 1735, + 1737, 1739, 1741, 1745, 1749, 1753, 1756, 1758, + 1760, 1764, 1774, 1784, 1823, 1833, 1837, 1839, + 1841, 1842, 1846, 1849, 1853, 1859, 1863, 1868, + 1870, 1874, 1876, 1878, 1882, 1892, 1896, 1898, + 1902, 1906, 1910, 1926, 1928, 1930, 1936, 1938, + 1942, 1944, 1946, 1970, 1976, 1978, 1984, 1986, + 1992, 1996, 2002, 2010, 2016, 2027, 2031, 2048, + 2064, 2068, 2073, 2079, 2085, 2088, 2092, 2094, + 2096, 2100, 2102, 2112, 2114, 2116, 2118, 2122, + 2126, 2128, 2140, 2142, 2146, 2150, 2156, 2158, + 2164, 2168, 2171, 2175, 2183, 2203, 2207, 2213, + 2215, 2216, 2226, 2227, 2235, 2242, 2244, 2247, + 2249, 2251, 2253, 2256, 2260, 2264, 2269, 2276, + 2282, 2296, 2322, 2332, 2335, 2337, 2341, 2343, + 2344, 2350, 2353, 2355, 2355, 2357, 2380, 2381, + 2383, 2385, 2387, 2390, 2395, 2400, 2406, 2416, + 2420, 2422, 2424, 2428, 2458, 2466, 2468, 2470, + 2473, 2482, 2487, 2501, 2515, 2525, 2530, 2542, + 2545, 2554, 2558, 2570, 2582, 2586, 2598, 2600, + 2610, 2613, 2617, 2622, 2626, 2635, 2637, 2645, + 2646, 2670, 2672, 2684, 2691, 2693, 2697, 2701, + 2703, 2711, 2715, 2716, 2720, 2722, 2724, 2726, + 2731, 2737, 2741, 2749, 2755, 2757, 2759, 2763, + 2772, 2776, 2782, 2786, 2790, 2792, 2794, 2795, + 2797, 2799, 2801, 2803, 2819, 2821, 2825, 2827, + 2829, 2834, 2838, 2846, 2850, 2852, 2856, 2868, + 2870, 2877, 2884, 2889, 2895, 2900, 2901, 2905, + 2907, 2913, 2919, 2920, 2921, 2923, 2932, 2934, + 2956, 2960, 2966, 2972, 2974, 2980, 2981, 2985, + 2993, 2995, 2999, 3004, 3017, 3019, 3025, 3029, + 3033, 3039, 3041, 3043, 3047, 3053, 3055, 3057, + 3059, 3061, 3062, 3066, 3073, 3077, 3083, 3085, + 3087, 3091, 3095, 3100, 3101, 3103, 3104, 3110, + 3113, 3115, 3115, 3119, 3121, 3131, 3134, 3141, + 3151, 3172, 3173, 3175, 3177, 3179, 3182, 3191, + 3195, 3197, 3207, 3210, 3217, 3227, 3234, 3244, + 3257, 3264, 3268, 3272, 3281, 3313, 3320, 3324, + 3326, 3329, 3339, 3355, 3375, 3394, 3413, 3430, + 3444, 3461, 3478, 3499, 3509, 3522, 3539, 3554, + 3571, 3582, 3601, 3610, 3622, 3625, 3629, 3634, + 3638, 3648, 3654, 3664, 3665, 3714, 3716, 3728, + 3737, 3743, 3753, 3763, 3765, 3773, 3779, 3784, + 3790, 3794, 3799, 3805, 3811, 3821, 3833, 3837, + 3841, 3849, 3862, 3866, 3884, 3890, 3901, 3903, + 3909, 3914, 3924, 3929, 3934, 3936, 3966, 3972, + 3979, 3985, 3989, 3996, 4002, 4014, 4022, 4028, + 4034, 4047, 4051, 4060, 4067, 4076, 4084, 4101, + 4113, 4121, 4127, 4133, 4136, 4139, 4143, 4153, + 4163, 4202, 4203, 4206, 4212, 4218, 4234, 4240, + 4265, 4271, 4277, 4283, 4287, 4295, 4299, 4305, + 4318, 4324, 4332, 4349, 4365, 4369, 4378, 4384, + 4390, 4397, 4401, 4405, 4411, 4423, 4427, 4431, + 4435, 4443, 4449, 4453, 4456, 4464, 4484, 4488, + 4494, 4496, 4503, 4507, 4511, 4516, 4523, 4529, + 4530, 4536, 4539, 4541, 4541, 4543, 4548, 4551, + 4559, 4563, 4565, 4567, 4569, 4599, 4607, 4609, + 4611, 4614, 4619, 4624, 4634, 4646, 4652, 4657, + 4667, 4670, 4677, 4681, 4689, 4699, 4703, 4711, + 4713, 4721, 4724, 4726, 4731, 4733, 4740, 4742, + 4750, 4751, 4772, 4774, 4784, 4789, 4791, 4795, + 4799, 4801, 4805, 4807, 4808, 4812, 4814, 4819, + 4821, 4825, 4829, 4833, 4835, 4837, 4846, 4850, + 4856, 4860, 4864, 4866, 4868, 4869, 4871, 4873, + 4875, 4877, 4892, 4896, 4898, 4900, 4905, 4909, + 4913, 4915, 4917, 4921, 4925, 4927, 4932, 4939, + 4944, 4948, 4954, 4962, 4968, 4972, 4974, 4980, + 4985, 4986, 4987, 4989, 4998, 5000, 5019, 5023, + 5025, 5031, 5035, 5036, 5040, 5044, 5046, 5048, + 5053, 5066, 5068, 5070, 5074, 5078, 5080, 5082, + 5084, 5088, 5090, 5092, 5094, 5096, 5097, 5101, + 5107, 5111, 5117, 5119, 5123, 5127, 5132, 5133, + 5135, 5136, 5142, 5145, 5147, 5147, 5152, 5162, + 5164, 5174, 5182, 5185, 5192, 5203, 5210, 5220, + 5233, 5240, 5244, 5248, 5257, 5289, 5296, 5300, + 5302, 5305, 5315, 5331, 5351, 5370, 5389, 5406, + 5420, 5437, 5454, 5475, 5485, 5498, 5515, 5530, + 5547, 5558, 5577, 5586, 5598, 5601, 5605, 5610, + 5614, 5624, 5630, 5640, 5641, 5690, 5692, 5704, + 5713, 5719, 5729, 5739, 5741, 5749, 5755, 5760, + 5766, 5770, 5775, 5781, 5787, 5797, 5809, 5813, + 5817, 5830, 5834, 5852, 5862, 5873, 5875, 5881, + 5886, 5896, 5901, 5906, 5908, 5938, 5945, 5951, + 5955, 5962, 5968, 5980, 5988, 5994, 6000, 6013, + 6017, 6026, 6033, 6042, 6050, 6067, 6078, 6085, + 6093, 6096, 6102, 6106, 6116, 6126, 6165, 6166, + 6169, 6175, 6191, 6197, 6222, 6228, 6234, 6240, + 6248, 6252, 6258, 6271, 6277, 6285, 6302, 6318, + 6322, 6331, 6337, 6343, 6350, 6354, 6358, 6370, + 6374, 6378, 6384, 6388, 6391, 6399, 6419, 6423, + 6429, 6431, 6435, 6439, 6444, 6451, 6457, 6458, + 6464, 6467, 6469, 6469, 6471, 6478, 6488, 6501, + 6508, 6512, 6516, 6525, 6557, 6564, 6568, 6570, + 6573, 6583, 6599, 6619, 6638, 6657, 6674, 6688, + 6705, 6722, 6743, 6753, 6766, 6783, 6798, 6815, + 6826, 6845, 6854, 6866, 6869, 6873, 6878, 6882, + 6892, 6898, 6908, 6909, 6958, 6960, 6972, 6981, + 6987, 6997, 7007, 7009, 7017, 7023, 7028, 7034, + 7038, 7043, 7049, 7055, 7065, 7077, 7081, 7085, + 7098, 7102, 7120, 7126, 7135, 7137, 7143, 7148, + 7158, 7168, 7175, 7176, 7178, 7180, 7182, 7185, + 7190, 7195, 7198, 7206, 7210, 7212, 7214, 7216, + 7246, 7254, 7256, 7258, 7261, 7266, 7271, 7281, + 7293, 7299, 7304, 7314, 7317, 7324, 7328, 7336, + 7346, 7350, 7358, 7360, 7368, 7371, 7373, 7378, + 7380, 7387, 7389, 7397, 7398, 7419, 7421, 7431, + 7436, 7438, 7442, 7446, 7448, 7452, 7454, 7455, + 7459, 7461, 7466, 7468, 7472, 7476, 7480, 7482, + 7484, 7493, 7497, 7503, 7509, 7518, 7520, 7522, + 7523, 7525, 7532, 7536, 7540, 7543, 7545, 7547, + 7549, 7564, 7568, 7570, 7572, 7577, 7581, 7585, + 7587, 7589, 7593, 7597, 7599, 7604, 7611, 7616, + 7620, 7628, 7629, 7635, 7637, 7638, 7640, 7642, + 7644, 7650, 7655, 7656, 7657, 7659, 7668, 7670, + 7689, 7693, 7695, 7701, 7705, 7706, 7710, 7714, + 7716, 7718, 7723, 7736, 7738, 7740, 7744, 7748, + 7750, 7752, 7754, 7758, 7760, 7762, 7764, 7766, + 7768, 7769, 7773, 7779, 7783, 7789, 7791, 7795, + 7799, 7804, 7805, 7807, 7808, 7814, 7817, 7819, + 7819, 7825, 7855, 7862, 7868, 7872, 7879, 7885, + 7897, 7905, 7911, 7917, 7930, 7934, 7943, 7950, + 7959, 7967, 7984, 7996, 8002, 8005, 8009, 8015, + 8025, 8035, 8074, 8075, 8078, 8084, 8100, 8106, + 8131, 8137, 8143, 8149, 8157, 8161, 8167, 8180, + 8186, 8194, 8211, 8227, 8231, 8240, 8246, 8252, + 8259, 8263, 8267, 8279, 8283, 8287, 8293, 8297, + 8301, 8309, 8329, 8333, 8339, 8341, 8345, 8349, + 8354, 8361, 8367, 8368, 8374, 8377, 8379, 8379, + 8381, 8385, 8387, 8397, 8400, 8408, 8418, 8427, + 8436, 8449, 8456, 8460, 8464, 8474, 8506, 8513, + 8517, 8519, 8522, 8530, 8546, 8564, 8583, 8600, + 8617, 8629, 8646, 8663, 8684, 8694, 8707, 8722, + 8737, 8754, 8765, 8782, 8791, 8803, 8806, 8810, + 8815, 8819, 8829, 8835, 8845, 8846, 8895, 8897, + 8909, 8915, 8921, 8931, 8941, 8943, 8949, 8955, + 8960, 8966, 8970, 8975, 8981, 8987, 8995, 9007, + 9011, 9015, 9028, 9032, 9050, 9059, 9071, 9073, + 9079, 9084, 9094, 9099, 9104, 9106, 9136, 9143, + 9149, 9153, 9160, 9166, 9176, 9184, 9190, 9196, + 9205, 9209, 9218, 9225, 9234, 9242, 9259, 9271, + 9279, 9288, 9291, 9298, 9302, 9312, 9322, 9361, + 9362, 9365, 9371, 9387, 9393, 9418, 9424, 9428, + 9434, 9442, 9446, 9452, 9463, 9469, 9477, 9494, + 9510, 9514, 9521, 9527, 9533, 9538, 9542, 9546, + 9558, 9562, 9566, 9572, 9576, 9579, 9587, 9607, + 9611, 9617, 9619, 9623, 9627, 9632, 9639, 9645, + 9646, 9652, 9655, 9657, 9657, 9659, 9664, 9670, + 9671, 9676, 9687, 9694, 9701, 9710, 9712, 9714, + 9719, 9751, 9753, 9755, 9757, 9762, 9773, 9788, + 9807, 9822, 9838, 9852, 9868, 9883, 9904, 9914, + 9926, 9939, 9953, 9968, 9978, 9992, 10001, 10013, + 10016, 10020, 10025, 10029, 10039, 10045, 10051, 10052, + 10101, 10103, 10115, 10124, 10128, 10134, 10142, 10144, + 10152, 10158, 10162, 10168, 10173, 10179, 10187, 10193, + 10195, 10197, 10203, 10207, 10225, 10235, 10246, 10248, + 10252, 10258, 10268, 10273, 10278, 10280, 10310, 10315, + 10317, 10328, 10334, 10338, 10350, 10352, 10358, 10365, + 10370, 10376, 10393, 10405, 10412, 10416, 10418, 10428, + 10438, 10477, 10478, 10482, 10486, 10499, 10503, 10528, + 10534, 10538, 10542, 10545, 10554, 10558, 10564, 10580, + 10594, 10601, 10605, 10609, 10617, 10621, 10633, 10637, + 10641, 10643, 10646, 10654, 10674, 10678, 10684, 10686, + 10690, 10694, 10699, 10706, 10710, 10711, 10717, 10720, + 10722, 10726, 10728, 10738, 10741, 10748, 10758, 10765, + 10775, 10788, 10795, 10799, 10803, 10812, 10844, 10851, + 10855, 10857, 10860, 10870, 10886, 10906, 10925, 10944, + 10961, 10975, 10992, 11009, 11030, 11040, 11053, 11070, + 11085, 11102, 11113, 11132, 11141, 11153, 11156, 11160, + 11165, 11169, 11179, 11185, 11195, 11196, 11245, 11247, + 11259, 11268, 11274, 11284, 11294, 11296, 11304, 11310, + 11315, 11321, 11323, 11325, 11329, 11334, 11340, 11346, + 11356, 11368, 11372, 11376, 11384, 11397, 11401, 11419, + 11425, 11434, 11436, 11442, 11447, 11457, 11467, 11474, + 11480, 11510, 11516, 11523, 11529, 11533, 11540, 11546, + 11558, 11566, 11572, 11578, 11591, 11595, 11604, 11611, + 11620, 11628, 11645, 11657, 11663, 11669, 11672, 11675, + 11681, 11691, 11701, 11740, 11741, 11744, 11750, 11756, + 11772, 11778, 11803, 11809, 11815, 11821, 11825, 11833, + 11837, 11843, 11856, 11862, 11870, 11887, 11903, 11907, + 11916, 11922, 11928, 11935, 11939, 11941, 11945, 11951, + 11963, 11967, 11971, 11975, 11983, 11989, 11993, 11997, + 12005, 12025, 12029, 12035, 12037, 12044, 12048, 12052, + 12057, 12064, 12070, 12071, 12077, 12080, 12082, 12082, + 12084, 12088, 12090, 12100, 12103, 12111, 12121, 12130, + 12139, 12152, 12159, 12163, 12167, 12177, 12209, 12216, + 12220, 12222, 12225, 12233, 12249, 12267, 12286, 12303, + 12320, 12332, 12349, 12366, 12387, 12397, 12410, 12425, + 12440, 12457, 12468, 12485, 12494, 12506, 12509, 12513, + 12518, 12522, 12532, 12538, 12548, 12549, 12598, 12600, + 12612, 12618, 12624, 12634, 12644, 12646, 12652, 12658, + 12663, 12669, 12673, 12678, 12684, 12690, 12698, 12710, + 12714, 12718, 12731, 12735, 12753, 12762, 12774, 12776, + 12782, 12787, 12797, 12802, 12807, 12809, 12839, 12846, + 12852, 12856, 12863, 12869, 12879, 12887, 12893, 12899, + 12908, 12912, 12921, 12928, 12937, 12945, 12962, 12974, + 12982, 12991, 12994, 13001, 13005, 13015, 13025, 13064, + 13065, 13068, 13074, 13090, 13096, 13121, 13127, 13131, + 13137, 13145, 13149, 13155, 13166, 13172, 13180, 13197, + 13213, 13217, 13224, 13230, 13236, 13241, 13245, 13249, + 13261, 13265, 13269, 13275, 13279, 13282, 13290, 13310, + 13314, 13320, 13322, 13326, 13330, 13335, 13342, 13348, + 13349, 13355, 13358, 13360, 13360, 13362, 13363, 13365, + 13367, 13369, 13372, 13377, 13382, 13388, 13398, 13402, + 13404, 13406, 13410, 13440, 13448, 13450, 13452, 13455, + 13464, 13469, 13483, 13497, 13507, 13512, 13524, 13527, + 13536, 13540, 13552, 13564, 13568, 13580, 13582, 13592, + 13595, 13599, 13604, 13608, 13617, 13619, 13627, 13628, + 13652, 13654, 13666, 13673, 13675, 13679, 13683, 13685, + 13693, 13697, 13698, 13702, 13704, 13709, 13715, 13719, + 13727, 13733, 13735, 13737, 13741, 13750, 13754, 13760, + 13764, 13768, 13770, 13772, 13773, 13775, 13777, 13779, + 13781, 13797, 13799, 13803, 13805, 13807, 13812, 13816, + 13824, 13828, 13830, 13834, 13846, 13848, 13855, 13862, + 13867, 13873, 13878, 13879, 13883, 13885, 13891, 13897, + 13898, 13899, 13901, 13910, 13912, 13934, 13938, 13944, + 13950, 13952, 13958, 13959, 13963, 13971, 13973, 13977, + 13982, 13995, 13997, 14003, 14007, 14011, 14017, 14019, + 14023, 14029, 14031, 14033, 14035, 14037, 14038, 14042, + 14049, 14053, 14059, 14061, 14063, 14067, 14071, 14076, + 14077, 14079, 14080, 14086, 14089, 14091, 14091, 14095, + 14097, 14107, 14110, 14118, 14128, 14137, 14148, 14161, + 14168, 14172, 14176, 14186, 14218, 14225, 14229, 14231, + 14234, 14244, 14260, 14280, 14299, 14318, 14335, 14349, + 14366, 14383, 14404, 14414, 14427, 14444, 14459, 14476, + 14487, 14506, 14515, 14527, 14530, 14534, 14539, 14543, + 14553, 14559, 14569, 14570, 14619, 14621, 14633, 14642, + 14648, 14658, 14668, 14670, 14678, 14684, 14689, 14695, + 14699, 14704, 14710, 14716, 14726, 14738, 14742, 14746, + 14759, 14763, 14781, 14790, 14802, 14804, 14810, 14815, + 14825, 14830, 14835, 14837, 14867, 14874, 14880, 14884, + 14891, 14897, 14909, 14917, 14923, 14929, 14942, 14946, + 14955, 14962, 14971, 14979, 14996, 15008, 15016, 15019, + 15023, 15033, 15043, 15082, 15083, 15086, 15092, 15108, + 15114, 15139, 15145, 15151, 15157, 15165, 15169, 15175, + 15188, 15194, 15202, 15219, 15235, 15239, 15248, 15254, + 15260, 15267, 15271, 15275, 15287, 15291, 15295, 15301, + 15305, 15308, 15316, 15336, 15340, 15346, 15348, 15352, + 15356, 15361, 15368, 15374, 15375, 15381, 15384, 15386, + 15386, 15388, 15393, 15395, 15405, 15408, 15415, 15426, + 15433, 15443, 15456, 15463, 15467, 15471, 15480, 15512, + 15519, 15523, 15525, 15528, 15538, 15554, 15574, 15593, + 15612, 15629, 15643, 15660, 15677, 15698, 15708, 15721, + 15738, 15753, 15770, 15781, 15800, 15809, 15821, 15824, + 15828, 15833, 15837, 15847, 15853, 15863, 15864, 15913, + 15915, 15927, 15936, 15942, 15952, 15962, 15964, 15972, + 15978, 15983, 15989, 15993, 15998, 16004, 16010, 16020, + 16032, 16036, 16040, 16053, 16057, 16075, 16085, 16096, + 16098, 16104, 16109, 16119, 16124, 16129, 16131, 16161, + 16168, 16174, 16178, 16185, 16191, 16203, 16211, 16217, + 16223, 16236, 16240, 16249, 16256, 16265, 16273, 16290, + 16302, 16309, 16312, 16316, 16326, 16336, 16375, 16376, + 16379, 16385, 16401, 16407, 16432, 16438, 16444, 16450, + 16458, 16462, 16468, 16481, 16487, 16495, 16512, 16528, + 16532, 16541, 16547, 16553, 16560, 16564, 16568, 16580, + 16584, 16588, 16594, 16598, 16601, 16609, 16629, 16633, + 16639, 16641, 16645, 16649, 16654, 16661, 16667, 16668, + 16674, 16677, 16679, 16679, 16681, 16685, 16687, 16697, + 16700, 16707, 16717, 16724, 16734, 16747, 16754, 16758, + 16762, 16771, 16803, 16810, 16814, 16816, 16819, 16829, + 16845, 16865, 16884, 16903, 16920, 16934, 16951, 16968, + 16989, 16999, 17012, 17029, 17044, 17061, 17072, 17091, + 17100, 17112, 17115, 17119, 17124, 17128, 17138, 17144, + 17154, 17155, 17204, 17206, 17218, 17227, 17233, 17243, + 17253, 17255, 17263, 17269, 17274, 17280, 17284, 17289, + 17295, 17301, 17311, 17323, 17327, 17331, 17344, 17348, + 17366, 17372, 17381, 17383, 17389, 17394, 17404, 17414, + 17421, 17427, 17457, 17464, 17470, 17474, 17481, 17487, + 17499, 17507, 17513, 17519, 17532, 17536, 17545, 17552, + 17561, 17569, 17586, 17598, 17604, 17610, 17613, 17616, + 17622, 17632, 17642, 17681, 17682, 17685, 17691, 17707, + 17713, 17738, 17744, 17750, 17756, 17764, 17768, 17774, + 17787, 17793, 17801, 17818, 17834, 17838, 17847, 17853, + 17859, 17866, 17870, 17874, 17886, 17890, 17894, 17900, + 17904, 17908, 17916, 17936, 17940, 17946, 17948, 17952, + 17956, 17961, 17968, 17974, 17975, 17981, 17984, 17986, + 17986, 17988, 17992, 17994, 18004, 18007, 18014, 18024, + 18031, 18041, 18054, 18061, 18065, 18069, 18078, 18110, + 18117, 18121, 18123, 18126, 18136, 18152, 18172, 18191, + 18210, 18227, 18241, 18258, 18275, 18296, 18306, 18319, + 18336, 18351, 18368, 18379, 18398, 18407, 18419, 18422, + 18426, 18431, 18435, 18445, 18451, 18461, 18462, 18511, + 18513, 18525, 18534, 18540, 18550, 18560, 18562, 18570, + 18576, 18581, 18587, 18591, 18596, 18602, 18608, 18618, + 18630, 18634, 18638, 18651, 18655, 18673, 18679, 18690, + 18692, 18698, 18703, 18713, 18723, 18730, 18736, 18766, + 18773, 18779, 18783, 18790, 18796, 18808, 18816, 18822, + 18828, 18841, 18845, 18854, 18861, 18870, 18878, 18895, + 18907, 18913, 18916, 18922, 18932, 18942, 18981, 18982, + 18985, 18991, 19007, 19013, 19038, 19044, 19050, 19056, + 19064, 19068, 19074, 19087, 19093, 19101, 19118, 19134, + 19138, 19147, 19153, 19159, 19166, 19170, 19174, 19186, + 19190, 19194, 19200, 19204, 19208, 19216, 19236, 19240, + 19246, 19248, 19252, 19256, 19261, 19268, 19274, 19275, + 19281, 19284, 19286, 19286, 19288, 19289, 19291, 19293, + 19295, 19298, 19303, 19308, 19311, 19319, 19323, 19325, + 19327, 19329, 19359, 19367, 19369, 19371, 19374, 19379, + 19384, 19394, 19406, 19412, 19417, 19427, 19430, 19437, + 19441, 19449, 19459, 19463, 19471, 19473, 19481, 19484, + 19486, 19491, 19493, 19500, 19502, 19510, 19511, 19532, + 19534, 19544, 19549, 19551, 19555, 19559, 19561, 19565, + 19567, 19568, 19572, 19574, 19579, 19581, 19585, 19589, + 19593, 19595, 19597, 19606, 19610, 19616, 19620, 19624, + 19626, 19628, 19629, 19631, 19633, 19635, 19637, 19652, + 19656, 19658, 19660, 19665, 19669, 19673, 19675, 19677, + 19681, 19685, 19687, 19692, 19699, 19704, 19708, 19713, + 19714, 19718, 19720, 19726, 19731, 19732, 19733, 19735, + 19744, 19746, 19765, 19769, 19771, 19777, 19781, 19782, + 19786, 19790, 19792, 19794, 19799, 19812, 19814, 19816, + 19820, 19824, 19826, 19828, 19830, 19834, 19836, 19838, + 19840, 19842, 19843, 19847, 19853, 19857, 19863, 19865, + 19869, 19873, 19878, 19879, 19881, 19882, 19888, 19891, + 19893, 19893, 19897, 19899, 19909, 19912, 19919, 19928, + 19935, 19942, 19955, 19962, 19966, 19970, 19977, 20009, + 20016, 20020, 20022, 20025, 20032, 20048, 20066, 20085, + 20102, 20119, 20129, 20146, 20161, 20182, 20190, 20203, + 20216, 20231, 20246, 20257, 20272, 20281, 20291, 20294, + 20296, 20301, 20303, 20311, 20317, 20327, 20328, 20375, + 20377, 20387, 20394, 20400, 20410, 20420, 20422, 20426, + 20430, 20435, 20441, 20445, 20450, 20452, 20458, 20464, + 20474, 20478, 20482, 20495, 20499, 20517, 20521, 20527, + 20529, 20535, 20540, 20550, 20555, 20560, 20562, 20592, + 20599, 20605, 20609, 20616, 20622, 20630, 20636, 20642, + 20648, 20653, 20657, 20664, 20671, 20680, 20686, 20703, + 20715, 20719, 20722, 20726, 20736, 20746, 20785, 20786, + 20789, 20795, 20811, 20817, 20841, 20847, 20849, 20855, + 20861, 20865, 20871, 20879, 20885, 20889, 20906, 20922, + 20926, 20931, 20937, 20943, 20946, 20950, 20952, 20964, + 20968, 20972, 20978, 20982, 20985, 20993, 21013, 21017, + 21023, 21025, 21029, 21033, 21038, 21045, 21051, 21052, + 21058, 21061, 21063, 21063, 21065, 21069, 21071, 21081, + 21084, 21092, 21102, 21111, 21122, 21135, 21142, 21146, + 21150, 21160, 21192, 21199, 21203, 21205, 21208, 21218, + 21234, 21254, 21273, 21292, 21309, 21323, 21340, 21357, + 21378, 21388, 21401, 21418, 21433, 21450, 21461, 21480, + 21489, 21501, 21504, 21508, 21513, 21517, 21527, 21533, + 21543, 21544, 21593, 21595, 21607, 21616, 21622, 21632, + 21642, 21644, 21652, 21658, 21663, 21669, 21673, 21678, + 21684, 21690, 21700, 21712, 21716, 21720, 21733, 21737, + 21755, 21764, 21776, 21778, 21784, 21789, 21799, 21804, + 21809, 21811, 21841, 21848, 21854, 21858, 21865, 21871, + 21883, 21891, 21897, 21903, 21916, 21920, 21929, 21936, + 21945, 21953, 21970, 21982, 21990, 21993, 21997, 22007, + 22017, 22056, 22057, 22060, 22066, 22082, 22088, 22113, + 22119, 22125, 22131, 22139, 22143, 22149, 22162, 22168, + 22176, 22193, 22209, 22213, 22222, 22228, 22234, 22241, + 22245, 22249, 22261, 22265, 22269, 22275, 22279, 22282, + 22290, 22310, 22314, 22320, 22322, 22326, 22330, 22335, + 22342, 22348, 22349, 22355, 22358, 22360, 22360, 22362, + 22366, 22368, 22378, 22381, 22388, 22398, 22405, 22415, + 22428, 22435, 22439, 22443, 22452, 22484, 22491, 22495, + 22497, 22500, 22510, 22526, 22546, 22565, 22584, 22601, + 22615, 22632, 22649, 22670, 22680, 22693, 22710, 22725, + 22742, 22753, 22772, 22781, 22793, 22796, 22800, 22805, + 22809, 22819, 22825, 22835, 22836, 22885, 22887, 22899, + 22908, 22914, 22924, 22934, 22936, 22944, 22950, 22955, + 22961, 22965, 22970, 22976, 22982, 22992, 23004, 23008, + 23012, 23025, 23029, 23047, 23053, 23064, 23066, 23072, + 23077, 23087, 23097, 23104, 23110, 23140, 23147, 23153, + 23157, 23164, 23170, 23182, 23190, 23196, 23202, 23215, + 23219, 23228, 23235, 23244, 23252, 23269, 23281, 23287, + 23290, 23296, 23306, 23316, 23355, 23356, 23359, 23365, + 23381, 23387, 23412, 23418, 23424, 23430, 23438, 23442, + 23448, 23461, 23467, 23475, 23492, 23508, 23512, 23521, + 23527, 23533, 23540, 23544, 23548, 23560, 23564, 23568, + 23574, 23578, 23582, 23590, 23610, 23614, 23620, 23622, + 23626, 23630, 23635, 23642, 23648, 23649, 23655, 23658, + 23660, 23660, 23662, 23663, 23665, 23667, 23669, 23672, + 23677, 23682, 23685, 23693, 23697, 23699, 23701, 23703, + 23733, 23754, 23760, 23762, 23777, 23782, 23788, 23789, + 23796, 23800, 23803, 23813, 23829, 23849, 23868, 23887, + 23904, 23918, 23935, 23952, 23973, 23983, 23996, 24013, + 24028, 24045, 24056, 24075, 24084, 24096, 24100, 24104, + 24114, 24120, 24130, 24142, 24151, 24153, 24158, 24160, + 24170, 24173, 24180, 24191, 24198, 24208, 24221, 24228, + 24232, 24236, 24245, 24277, 24284, 24288, 24290, 24293, + 24303, 24319, 24339, 24358, 24377, 24394, 24408, 24425, + 24442, 24463, 24473, 24486, 24503, 24518, 24535, 24546, + 24565, 24574, 24586, 24589, 24593, 24598, 24602, 24612, + 24618, 24628, 24629, 24682, 24684, 24696, 24705, 24705, + 24707, 24713, 24723, 24733, 24735, 24743, 24749, 24754, + 24760, 24764, 24769, 24775, 24781, 24791, 24803, 24807, + 24811, 24824, 24828, 24846, 24856, 24867, 24869, 24875, + 24880, 24890, 24897, 24902, 24904, 24908, 24914, 24916, + 24917, 24919, 24921, 24923, 24926, 24931, 24936, 24939, + 24947, 24951, 24953, 24955, 24957, 24987, 24995, 24997, + 24999, 25002, 25007, 25012, 25022, 25034, 25040, 25045, + 25055, 25058, 25065, 25069, 25077, 25087, 25091, 25099, + 25101, 25109, 25112, 25114, 25119, 25121, 25128, 25130, + 25138, 25139, 25164, 25166, 25176, 25181, 25181, 25183, + 25185, 25189, 25193, 25195, 25199, 25201, 25202, 25206, + 25208, 25213, 25215, 25219, 25223, 25227, 25229, 25231, + 25240, 25244, 25250, 25254, 25258, 25260, 25262, 25263, + 25265, 25272, 25274, 25276, 25278, 25280, 25282, 25300, + 25304, 25306, 25308, 25313, 25317, 25321, 25323, 25327, + 25331, 25335, 25337, 25342, 25349, 25354, 25358, 25358, + 25362, 25364, 25368, 25373, 25374, 25378, 25380, 25390, + 25396, 25401, 25402, 25403, 25405, 25414, 25416, 25435, + 25439, 25441, 25447, 25451, 25452, 25456, 25460, 25462, + 25464, 25469, 25482, 25484, 25486, 25490, 25494, 25496, + 25498, 25500, 25504, 25506, 25508, 25510, 25512, 25513, + 25517, 25523, 25527, 25533, 25535, 25539, 25543, 25548, + 25549, 25551, 25552, 25558, 25561, 25593, 25600, 25606, + 25610, 25617, 25623, 25635, 25643, 25651, 25657, 25670, + 25674, 25683, 25690, 25699, 25707, 25707, 25711, 25713, + 25717, 25734, 25746, 25753, 25756, 25758, 25768, 25778, + 25817, 25818, 25821, 25827, 25843, 25849, 25874, 25880, + 25886, 25892, 25900, 25904, 25910, 25923, 25929, 25937, + 25954, 25970, 25974, 25983, 25989, 25995, 26002, 26006, + 26010, 26022, 26026, 26030, 26036, 26040, 26043, 26051, + 26071, 26075, 26081, 26083, 26087, 26091, 26096, 26103, + 26109, 26110, 26116, 26119, 26121, 26123, 26129, 26139, + 26149, 26157, 26163, 26168, 26174, 26178, 26184, 26190, + 26200, 26212, 26216, 26220, 26233, 26241, 26252, 26258, + 26263, 26273, 26277, 26278, 26280, 26282, 26284, 26287, + 26292, 26297, 26300, 26308, 26312, 26314, 26316, 26318, + 26348, 26356, 26358, 26360, 26363, 26368, 26373, 26383, + 26395, 26401, 26406, 26416, 26419, 26426, 26430, 26438, + 26448, 26452, 26460, 26462, 26470, 26473, 26475, 26480, + 26482, 26489, 26491, 26499, 26500, 26521, 26523, 26533, + 26538, 26540, 26544, 26548, 26550, 26554, 26556, 26557, + 26561, 26563, 26568, 26570, 26574, 26578, 26582, 26584, + 26586, 26595, 26599, 26605, 26609, 26613, 26615, 26617, + 26618, 26620, 26622, 26624, 26626, 26641, 26645, 26647, + 26649, 26654, 26658, 26662, 26664, 26666, 26670, 26674, + 26676, 26681, 26688, 26693, 26697, 26702, 26703, 26707, + 26709, 26715, 26720, 26721, 26722, 26724, 26733, 26735, + 26754, 26758, 26760, 26766, 26770, 26771, 26775, 26779, + 26781, 26783, 26788, 26801, 26803, 26805, 26809, 26813, + 26815, 26817, 26819, 26823, 26825, 26827, 26829, 26831, + 26832, 26836, 26842, 26846, 26852, 26854, 26858, 26862, + 26867, 26868, 26870, 26871, 26877, 26880, 26882, 26882, + 26884, 26886, 26900, 26905, 26907, 26917, 26920, 26927, + 26938, 26945, 26955, 26968, 26975, 26979, 26983, 26992, + 27024, 27031, 27035, 27037, 27040, 27050, 27066, 27086, + 27105, 27124, 27141, 27155, 27172, 27189, 27210, 27220, + 27233, 27250, 27265, 27282, 27293, 27312, 27321, 27333, + 27336, 27340, 27345, 27349, 27359, 27365, 27375, 27376, + 27425, 27427, 27439, 27448, 27454, 27464, 27474, 27476, + 27484, 27490, 27495, 27501, 27505, 27510, 27516, 27522, + 27532, 27544, 27548, 27552, 27565, 27569, 27587, 27597, + 27608, 27610, 27616, 27621, 27631, 27636, 27641, 27643, + 27673, 27680, 27686, 27690, 27697, 27703, 27715, 27723, + 27729, 27735, 27748, 27752, 27761, 27768, 27777, 27785, + 27802, 27814, 27821, 27824, 27828, 27838, 27848, 27887, + 27888, 27891, 27897, 27913, 27919, 27944, 27950, 27956, + 27962, 27970, 27974, 27980, 27993, 27999, 28007, 28024, + 28040, 28044, 28053, 28059, 28065, 28072, 28076, 28080, + 28092, 28096, 28100, 28106, 28110, 28113, 28121, 28141, + 28145, 28151, 28153, 28157, 28161, 28166, 28173, 28179, + 28180, 28186, 28189, 28191, 28191, 28193, 28194, 28196, + 28198, 28200, 28203, 28208, 28213, 28216, 28224, 28228, + 28230, 28232, 28234, 28264, 28272, 28274, 28276, 28279, + 28284, 28289, 28299, 28311, 28317, 28322, 28332, 28335, + 28342, 28346, 28354, 28364, 28368, 28376, 28378, 28386, + 28389, 28391, 28396, 28398, 28405, 28407, 28415, 28416, + 28441, 28443, 28453, 28458, 28460, 28464, 28468, 28470, + 28474, 28476, 28477, 28481, 28483, 28488, 28490, 28494, + 28498, 28502, 28504, 28506, 28515, 28519, 28525, 28529, + 28533, 28535, 28537, 28538, 28540, 28547, 28549, 28551, + 28569, 28573, 28575, 28577, 28582, 28586, 28590, 28592, + 28596, 28600, 28604, 28606, 28611, 28618, 28623, 28627, + 28632, 28633, 28637, 28641, 28651, 28657, 28662, 28663, + 28664, 28666, 28675, 28677, 28696, 28700, 28702, 28708, + 28712, 28713, 28717, 28721, 28723, 28725, 28730, 28743, + 28745, 28747, 28751, 28755, 28757, 28759, 28761, 28765, + 28767, 28769, 28771, 28773, 28774, 28778, 28784, 28788, + 28794, 28796, 28800, 28804, 28809, 28810, 28812, 28813, + 28819, 28822, 28824, 28824, 28825, 28827, 28829, 28831, + 28834, 28839, 28844, 28847, 28855, 28859, 28861, 28863, + 28865, 28895, 28903, 28905, 28907, 28910, 28915, 28920, + 28930, 28942, 28948, 28953, 28963, 28966, 28973, 28977, + 28985, 28995, 28999, 29007, 29009, 29017, 29020, 29022, + 29027, 29029, 29036, 29038, 29046, 29047, 29068, 29070, + 29080, 29085, 29087, 29091, 29095, 29097, 29101, 29103, + 29104, 29108, 29110, 29115, 29117, 29121, 29125, 29129, + 29131, 29133, 29142, 29146, 29152, 29158, 29167, 29169, + 29171, 29172, 29174, 29181, 29185, 29189, 29192, 29194, + 29196, 29198, 29213, 29217, 29219, 29221, 29226, 29230, + 29234, 29236, 29238, 29242, 29246, 29248, 29253, 29260, + 29265, 29269, 29277, 29278, 29284, 29286, 29288, 29290, + 29296, 29301, 29302, 29303, 29305, 29314, 29316, 29335, + 29339, 29341, 29347, 29351, 29352, 29356, 29360, 29362, + 29364, 29369, 29382, 29384, 29386, 29390, 29394, 29396, + 29398, 29400, 29404, 29406, 29408, 29410, 29412, 29414, + 29415, 29419, 29425, 29429, 29435, 29437, 29441, 29445, + 29450, 29451, 29453, 29454, 29460, 29463, 29465, 29465, + 29467, 29468, 29470, 29472, 29474, 29477, 29482, 29487, + 29490, 29498, 29502, 29504, 29506, 29508, 29538, 29546, + 29548, 29550, 29553, 29558, 29563, 29573, 29585, 29591, + 29596, 29606, 29609, 29616, 29620, 29628, 29638, 29642, + 29650, 29652, 29660, 29663, 29665, 29670, 29672, 29679, + 29681, 29689, 29690, 29711, 29713, 29723, 29728, 29730, + 29734, 29738, 29740, 29744, 29746, 29747, 29751, 29753, + 29758, 29760, 29764, 29768, 29772, 29774, 29776, 29785, + 29789, 29795, 29799, 29803, 29805, 29807, 29808, 29810, + 29812, 29814, 29816, 29831, 29835, 29837, 29839, 29844, + 29848, 29852, 29854, 29856, 29860, 29864, 29866, 29871, + 29878, 29883, 29887, 29892, 29893, 29897, 29899, 29905, + 29910, 29911, 29912, 29914, 29923, 29925, 29944, 29948, + 29950, 29956, 29960, 29961, 29965, 29969, 29971, 29973, + 29978, 29991, 29993, 29995, 29999, 30003, 30005, 30007, + 30009, 30013, 30015, 30017, 30019, 30021, 30022, 30026, + 30032, 30036, 30042, 30044, 30048, 30052, 30057, 30058, + 30060, 30061, 30067, 30070, 30072, 30072, 30080, 30081, + 30082, 30084, 30086, 30088, 30091, 30096, 30101, 30104, + 30112, 30116, 30118, 30120, 30122, 30152, 30160, 30162, + 30164, 30167, 30172, 30177, 30187, 30199, 30205, 30210, + 30220, 30223, 30230, 30234, 30242, 30252, 30256, 30264, + 30266, 30274, 30277, 30279, 30284, 30286, 30293, 30295, + 30303, 30304, 30325, 30327, 30337, 30342, 30344, 30348, + 30352, 30354, 30358, 30360, 30361, 30365, 30367, 30372, + 30374, 30378, 30382, 30386, 30388, 30390, 30399, 30403, + 30409, 30413, 30417, 30419, 30421, 30422, 30424, 30426, + 30428, 30430, 30445, 30449, 30451, 30453, 30458, 30462, + 30466, 30468, 30470, 30474, 30478, 30480, 30485, 30492, + 30497, 30501, 30506, 30507, 30511, 30513, 30519, 30524, + 30525, 30526, 30528, 30537, 30539, 30558, 30562, 30564, + 30570, 30574, 30575, 30579, 30583, 30585, 30587, 30592, + 30605, 30607, 30609, 30613, 30617, 30619, 30621, 30623, + 30627, 30629, 30631, 30633, 30635, 30636, 30640, 30646, + 30650, 30656, 30658, 30662, 30666, 30671, 30672, 30674, + 30675, 30681, 30684, 30686, 30686, 30688, 30690, 30692, + 30695, 30700, 30705, 30708, 30716, 30720, 30722, 30724, + 30726, 30756, 30777, 30783, 30785, 30800, 30805, 30811, + 30812, 30814, 30821, 30827, 30831, 30838, 30844, 30856, + 30864, 30872, 30878, 30891, 30895, 30904, 30913, 30921, + 30922, 30927, 30929, 30939, 30942, 30949, 30960, 30967, + 30977, 30990, 30997, 31001, 31005, 31014, 31046, 31053, + 31057, 31059, 31062, 31072, 31088, 31108, 31127, 31146, + 31163, 31177, 31194, 31211, 31232, 31242, 31255, 31272, + 31287, 31304, 31315, 31334, 31343, 31355, 31358, 31362, + 31367, 31371, 31381, 31387, 31397, 31398, 31451, 31453, + 31465, 31474, 31480, 31490, 31500, 31502, 31510, 31516, + 31521, 31527, 31531, 31536, 31542, 31548, 31558, 31570, + 31574, 31578, 31591, 31595, 31613, 31623, 31634, 31636, + 31642, 31647, 31657, 31664, 31669, 31671, 31703, 31710, + 31716, 31720, 31727, 31733, 31745, 31753, 31761, 31767, + 31780, 31784, 31793, 31800, 31809, 31817, 31834, 31846, + 31853, 31856, 31860, 31870, 31880, 31919, 31920, 31923, + 31929, 31945, 31951, 31976, 31982, 31988, 31994, 32002, + 32006, 32012, 32025, 32031, 32039, 32056, 32072, 32076, + 32085, 32091, 32097, 32104, 32108, 32112, 32124, 32128, + 32132, 32138, 32142, 32145, 32153, 32173, 32177, 32183, + 32185, 32189, 32193, 32198, 32205, 32211, 32212, 32218, + 32221, 32223, 32223, 32225, 32227, 32229, 32231, 32234, + 32239, 32244, 32247, 32255, 32259, 32261, 32263, 32265, + 32295, 32316, 32322, 32324, 32339, 32344, 32350, 32351, + 32353, 32355, 32367, 32373, 32376, 32380, 32386, 32396, + 32435, 32438, 32444, 32460, 32466, 32491, 32497, 32503, + 32509, 32517, 32521, 32527, 32540, 32546, 32554, 32571, + 32587, 32591, 32600, 32606, 32612, 32619, 32623, 32627, + 32639, 32643, 32647, 32653, 32657, 32661, 32663, 32671, + 32691, 32698, 32704, 32709, 32711, 32712, 32714, 32716, + 32718, 32721, 32726, 32731, 32734, 32742, 32746, 32748, + 32750, 32752, 32782, 32790, 32792, 32794, 32797, 32802, + 32807, 32817, 32829, 32835, 32840, 32850, 32853, 32860, + 32864, 32872, 32882, 32886, 32894, 32896, 32904, 32907, + 32909, 32914, 32916, 32923, 32925, 32933, 32934, 32955, + 32957, 32967, 32972, 32974, 32978, 32982, 32984, 32988, + 32990, 32991, 32995, 32997, 33002, 33004, 33008, 33012, + 33016, 33018, 33020, 33029, 33033, 33039, 33043, 33047, + 33049, 33051, 33052, 33054, 33056, 33058, 33060, 33075, + 33079, 33081, 33083, 33088, 33092, 33096, 33098, 33100, + 33104, 33108, 33110, 33115, 33122, 33127, 33131, 33136, + 33137, 33141, 33143, 33150, 33155, 33156, 33157, 33159, + 33168, 33170, 33189, 33193, 33195, 33201, 33205, 33206, + 33210, 33214, 33216, 33218, 33223, 33236, 33238, 33240, + 33244, 33248, 33250, 33252, 33254, 33258, 33260, 33262, + 33264, 33266, 33267, 33271, 33277, 33281, 33287, 33289, + 33293, 33297, 33302, 33303, 33305, 33306, 33307, 33313, + 33316, 33318, 33318, 33319, 33322, 33322, 33324, 33326, + 33329, 33331, 33333, 33336, 33338, 33342, 33344, 33393, + 33414, 33415, 33460, 33505, 33549, 33594, 33635, 33680, + 33721, 33743, 33784, 33828, 33869, 33910, 33936, 33957, + 33984, 34029, 34081, 34107, 34128, 34178, 34216, 34261, + 34302, 34348, 34395, 34444, 34493, 34540, 34592, 34644, + 34667, 34690, 34742, 34788, 34834, 34908, 34973, 35023, + 35069, 35128, 35177, 35227, 35272, 35303, 35330, 35359, + 35381, 35403, 35425, 35450, 35476, 35508, 35536, 35564, + 35594, 35617, 35640, 35663, 35689, 35742, 35812, 35851, + 35877, 35928, 35980, 36026, 36076, 36097, 36143, 36197, + 36246, 36292, 36340, 36388, 36438, 36463, 36501, 36532, + 36554, 36606, 36652, 36697, 36745, 36791, 36840, 36886, + 36938, 36990, 37038, 37084, 37130, 37183, 37231, 37279, + 37327, 37373, 37419, 37464, 37510, 37556, 37602, 37650, + 37696, 37742, 37791, 37843, 37895, 37941, 37987, 38039, + 38092, 38138, 38187, 38238, 38287, 38335, 38380, 38428, + 38474, 38523, 38586, 38634, 38679, 38729, 38774, 38824, + 38871, 38924, 38977, 39035, 39095, 39149, 39198, 39252, + 39299, 39350, 39402, 39458, 39512, 39564, 39620, 39666, + 39718, 39765, 39811, 39860, 39906, 39957, 40003, 40055, + 40100, 40130, 40153, 40175, 40198, 40220, 40243, 40266, + 40290, 40316, 40342, 40366, 40395, 40420, 40443, 40466, + 40490, 40516, 40542, 40566, 40595, 40620, 40643, 40666, + 40689, 40740, 40782, 40809, 40832, 40868, 40894, 40921, + 40943, 40966, 40989, 41012, 41063, 41105, 41132, 41155, + 41191, 41217, 41244, 41266, 41297, 41326, 41350, 41378, + 41409, 41437, 41468, 41502, 41530, 41555, 41580, 41610, + 41663, 41737, 41781, 41805, 41850, 41871, 41908, 41952, + 41976, 41998, 42019, 42040, 42069, 42092, 42115, 42137, + 42160, 42182, 42205, 42228, 42251, 42275, 42301, 42327, + 42351, 42380, 42405, 42428, 42451, 42475, 42501, 42527, + 42551, 42580, 42605, 42628, 42651, 42674, 42725, 42767, + 42794, 42817, 42853, 42879, 42906, 42928, 42951, 42974, + 42997, 43048, 43090, 43117, 43140, 43176, 43202, 43229, + 43251, 43275, 43301, 43327, 43358, 43391, 43418, 43444, + 43475, 43499, 43527, 43552, 43581, 43612, 43637, 43666, + 43689, 43718, 43742, 43765, 43791, 43814, 43842, 43865, + 43894, 43916, 43939, 43970, 43996, 44019, 44044, 44069, + 44092, 44117, 44140, 44162, 44187, 44210, 44236, 44259, + 44284, 44309, 44334, 44357, 44380, 44410, 44435, 44460, + 44485, 44508, 44531, 44553, 44576, 44599, 44622, 44647, + 44670, 44693, 44719, 44744, 44769, 44792, 44815, 44840, + 44865, 44888, 44914, 44942, 44968, 44993, 45015, 45040, + 45063, 45089, 45129, 45154, 45176, 45203, 45225, 45252, + 45276, 45297, 45321, 45374, 45418, 45470, 45516, 45566, + 45592, 45615, 45660, 45712, 45743, 45767, 45795, 45827, + 45855, 45886, 45920, 45948, 45998, 46044, 46091, 46140, + 46189, 46236, 46288, 46340, 46365, 46390, 46420, 46473, + 46547, 46586, 46614, 46667, 46713, 46767, 46816, 46862, + 46910, 46958, 47008, 47029, 47077, 47125, 47171, 47217, + 47262, 47308, 47354, 47400, 47448, 47494, 47540, 47589, + 47641, 47666, 47704, 47735, 47757, 47809, 47855, 47901, + 47953, 48006, 48052, 48101, 48152, 48201, 48249, 48294, + 48342, 48388, 48437, 48500, 48548, 48593, 48643, 48688, + 48738, 48790, 48836, 48881, 48929, 48975, 49024, 49070, + 49122, 49174, 49222, 49268, 49314, 49367, 49415, 49467, + 49513, 49559, 49633, 49698, 49748, 49794, 49853, 49902, + 49952, 49997, 50044, 50097, 50150, 50208, 50268, 50322, + 50371, 50425, 50472, 50523, 50575, 50631, 50685, 50737, + 50793, 50839, 50891, 50938, 50984, 51033, 51079, 51130, + 51176, 51228, 51273, 51316, 51354, +} + +var _s_trans_keys []byte = []byte{ + 173, 0, 127, 176, 255, 131, 137, 191, + 145, 189, 135, 129, 130, 132, 133, 156, + 128, 133, 144, 154, 176, 139, 159, 150, + 157, 159, 164, 167, 168, 170, 173, 143, + 145, 176, 255, 139, 255, 166, 176, 171, + 179, 160, 161, 163, 164, 165, 167, 169, + 171, 173, 174, 175, 176, 177, 179, 180, + 181, 182, 183, 184, 185, 186, 187, 188, + 189, 190, 191, 166, 170, 172, 178, 150, + 153, 155, 163, 165, 167, 169, 173, 153, + 155, 163, 255, 189, 132, 185, 144, 152, + 161, 164, 255, 188, 129, 131, 190, 255, + 133, 134, 137, 138, 142, 150, 152, 161, + 164, 255, 131, 134, 137, 138, 142, 144, + 146, 175, 178, 180, 182, 255, 134, 138, + 142, 161, 164, 255, 188, 129, 131, 190, + 191, 128, 132, 135, 136, 139, 141, 150, + 151, 162, 163, 130, 190, 191, 151, 128, + 130, 134, 136, 138, 141, 128, 131, 190, + 255, 133, 137, 142, 148, 151, 161, 164, + 255, 128, 132, 134, 136, 138, 141, 149, + 150, 162, 163, 129, 131, 190, 255, 133, + 137, 142, 150, 152, 161, 164, 255, 130, + 131, 138, 150, 143, 148, 152, 159, 178, + 179, 177, 180, 186, 135, 142, 177, 180, + 185, 187, 188, 136, 141, 181, 183, 185, + 152, 153, 190, 191, 177, 191, 128, 132, + 134, 135, 141, 151, 153, 188, 134, 128, + 129, 130, 141, 156, 157, 158, 159, 160, + 162, 164, 168, 169, 170, 172, 173, 174, + 175, 176, 179, 183, 171, 190, 150, 153, + 158, 160, 162, 164, 167, 173, 177, 180, + 143, 130, 141, 154, 157, 157, 159, 146, + 148, 178, 180, 146, 147, 178, 179, 180, + 255, 148, 156, 158, 255, 139, 142, 169, + 160, 171, 176, 187, 151, 155, 191, 149, + 158, 160, 188, 176, 190, 128, 132, 180, + 255, 133, 170, 180, 255, 128, 130, 161, + 173, 166, 179, 164, 183, 173, 144, 146, + 148, 168, 178, 180, 184, 185, 128, 181, + 188, 191, 128, 129, 131, 179, 181, 183, + 140, 143, 170, 174, 160, 164, 166, 175, + 144, 176, 175, 177, 191, 160, 191, 128, + 130, 170, 175, 153, 154, 153, 154, 155, + 160, 162, 163, 164, 165, 166, 167, 168, + 169, 170, 171, 175, 175, 178, 180, 189, + 158, 159, 176, 177, 130, 134, 139, 163, + 167, 128, 129, 180, 255, 133, 159, 178, + 255, 166, 173, 135, 147, 128, 131, 179, + 255, 129, 164, 166, 255, 169, 182, 131, + 140, 141, 187, 189, 176, 178, 180, 183, + 184, 190, 191, 129, 171, 175, 181, 182, + 163, 170, 172, 173, 172, 184, 187, 190, + 191, 158, 128, 143, 160, 175, 185, 187, + 144, 145, 150, 155, 157, 158, 135, 139, + 141, 168, 171, 189, 160, 182, 186, 191, + 129, 131, 133, 134, 140, 143, 184, 186, + 165, 166, 128, 129, 130, 132, 133, 134, + 135, 136, 139, 140, 141, 146, 147, 150, + 151, 152, 153, 154, 156, 128, 130, 184, + 255, 135, 190, 131, 175, 187, 188, 190, + 255, 128, 130, 167, 180, 179, 128, 130, + 179, 255, 129, 137, 141, 255, 172, 183, + 159, 170, 188, 128, 131, 190, 191, 151, + 128, 132, 135, 136, 139, 141, 162, 163, + 166, 172, 176, 180, 176, 255, 132, 255, + 175, 181, 184, 255, 129, 155, 158, 255, + 129, 255, 171, 183, 157, 171, 171, 172, + 189, 190, 176, 180, 176, 182, 145, 190, + 143, 146, 178, 157, 158, 160, 163, 133, + 134, 137, 168, 169, 170, 165, 169, 173, + 255, 131, 132, 140, 169, 174, 255, 130, + 132, 128, 182, 187, 255, 173, 180, 182, + 255, 132, 155, 159, 161, 175, 163, 144, + 150, 160, 128, 129, 132, 135, 133, 134, + 129, 160, 255, 192, 255, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 48, 57, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 65, 90, 97, 122, 196, 218, + 235, 236, 170, 173, 181, 186, 128, 150, + 152, 182, 184, 255, 192, 255, 128, 255, + 173, 130, 133, 146, 159, 165, 171, 175, + 255, 0, 127, 181, 190, 176, 183, 184, + 185, 186, 191, 192, 255, 134, 140, 136, + 138, 142, 161, 163, 255, 182, 130, 131, + 137, 176, 151, 152, 154, 160, 190, 136, + 144, 145, 191, 192, 255, 135, 129, 130, + 132, 133, 144, 170, 176, 179, 156, 128, + 133, 144, 154, 160, 191, 176, 128, 138, + 139, 159, 174, 255, 148, 158, 169, 150, + 164, 167, 173, 176, 185, 189, 190, 192, + 255, 144, 143, 145, 146, 175, 176, 255, + 139, 140, 141, 255, 166, 176, 178, 255, + 186, 138, 170, 171, 179, 180, 181, 160, + 161, 162, 163, 164, 165, 166, 167, 168, + 169, 170, 171, 172, 173, 174, 175, 176, + 177, 178, 179, 180, 181, 182, 183, 184, + 185, 186, 187, 188, 189, 190, 191, 154, + 164, 168, 128, 149, 150, 173, 128, 152, + 153, 155, 160, 180, 163, 255, 189, 132, + 185, 144, 152, 161, 164, 176, 177, 255, + 132, 169, 177, 188, 129, 131, 141, 142, + 145, 146, 179, 181, 186, 187, 190, 255, + 142, 158, 133, 134, 137, 138, 143, 150, + 152, 155, 156, 161, 164, 175, 176, 177, + 178, 255, 188, 129, 131, 133, 138, 143, + 144, 147, 168, 170, 176, 178, 179, 181, + 182, 184, 185, 190, 255, 157, 131, 134, + 137, 138, 142, 144, 146, 152, 153, 158, + 159, 175, 178, 180, 182, 255, 189, 129, + 131, 133, 141, 143, 145, 147, 168, 170, + 176, 178, 179, 181, 185, 188, 255, 134, + 138, 144, 185, 142, 159, 160, 161, 164, + 255, 189, 129, 131, 133, 140, 143, 144, + 147, 168, 170, 176, 178, 179, 181, 185, + 188, 191, 177, 128, 132, 135, 136, 139, + 141, 150, 151, 156, 157, 159, 161, 162, + 163, 130, 131, 156, 133, 138, 142, 144, + 146, 149, 153, 154, 158, 159, 163, 164, + 168, 170, 174, 185, 190, 191, 144, 151, + 128, 130, 134, 136, 138, 141, 189, 128, + 131, 133, 140, 142, 144, 146, 168, 170, + 185, 190, 255, 133, 137, 151, 142, 148, + 152, 154, 155, 159, 160, 161, 164, 255, + 189, 129, 131, 133, 140, 142, 144, 146, + 168, 170, 179, 181, 185, 188, 191, 158, + 128, 132, 134, 136, 138, 141, 149, 150, + 160, 161, 162, 163, 177, 178, 189, 129, + 131, 133, 140, 142, 144, 146, 186, 190, + 255, 133, 137, 142, 143, 150, 152, 158, + 159, 161, 164, 185, 186, 191, 192, 255, + 189, 130, 131, 133, 150, 154, 177, 179, + 187, 138, 150, 128, 134, 143, 148, 152, + 159, 178, 179, 177, 180, 186, 135, 142, + 177, 180, 185, 187, 188, 136, 141, 128, + 181, 183, 185, 152, 153, 190, 191, 128, + 135, 137, 172, 177, 191, 128, 132, 134, + 135, 136, 140, 141, 151, 153, 188, 134, + 128, 129, 130, 131, 137, 138, 139, 140, + 141, 142, 143, 144, 153, 154, 155, 156, + 157, 158, 159, 160, 161, 162, 163, 164, + 168, 169, 170, 172, 173, 174, 175, 176, + 177, 179, 181, 182, 183, 188, 189, 190, + 191, 132, 152, 180, 184, 185, 187, 171, + 190, 150, 153, 158, 160, 162, 164, 167, + 173, 177, 180, 143, 130, 141, 154, 157, + 160, 255, 134, 187, 136, 140, 142, 143, + 137, 151, 153, 142, 143, 158, 159, 137, + 177, 142, 143, 182, 183, 191, 255, 128, + 130, 133, 136, 150, 152, 255, 145, 150, + 151, 155, 156, 157, 159, 160, 255, 128, + 143, 160, 255, 182, 183, 190, 255, 129, + 255, 173, 174, 192, 255, 129, 154, 160, + 255, 171, 173, 185, 255, 128, 140, 142, + 145, 146, 148, 160, 177, 178, 180, 128, + 145, 146, 147, 160, 172, 174, 176, 178, + 179, 180, 255, 148, 156, 158, 255, 139, + 142, 160, 255, 184, 255, 169, 128, 170, + 176, 255, 182, 255, 128, 158, 160, 171, + 176, 187, 128, 150, 151, 155, 191, 149, + 158, 160, 188, 176, 190, 128, 132, 133, + 179, 180, 255, 133, 139, 140, 170, 180, + 255, 128, 130, 131, 160, 161, 173, 174, + 175, 186, 255, 166, 179, 180, 255, 128, + 163, 164, 183, 141, 143, 154, 189, 173, + 144, 146, 148, 168, 169, 177, 178, 180, + 181, 182, 184, 185, 128, 181, 188, 191, + 150, 151, 158, 159, 152, 154, 156, 158, + 134, 135, 142, 143, 190, 255, 190, 128, + 180, 182, 188, 130, 132, 134, 140, 144, + 147, 150, 155, 160, 172, 178, 180, 182, + 188, 128, 129, 130, 131, 132, 133, 134, + 146, 147, 176, 177, 178, 179, 180, 181, + 182, 183, 184, 140, 143, 170, 174, 177, + 191, 160, 164, 166, 175, 144, 156, 144, + 176, 130, 135, 149, 164, 166, 168, 138, + 147, 153, 157, 170, 173, 175, 185, 188, + 191, 142, 133, 137, 160, 255, 137, 255, + 182, 255, 170, 255, 128, 174, 176, 255, + 159, 165, 170, 175, 177, 180, 255, 167, + 173, 128, 165, 176, 255, 191, 168, 174, + 176, 255, 128, 150, 160, 166, 168, 174, + 176, 182, 184, 190, 128, 134, 136, 142, + 144, 150, 152, 158, 160, 191, 175, 128, + 130, 132, 133, 134, 133, 170, 175, 187, + 188, 153, 154, 133, 173, 177, 255, 143, + 159, 187, 255, 128, 146, 147, 148, 152, + 153, 154, 155, 156, 158, 159, 160, 161, + 162, 163, 164, 165, 166, 167, 168, 169, + 170, 171, 172, 173, 174, 175, 176, 129, + 255, 141, 255, 144, 189, 141, 143, 160, + 169, 172, 255, 191, 128, 174, 175, 178, + 180, 189, 128, 157, 158, 159, 160, 255, + 176, 177, 178, 255, 151, 159, 162, 255, + 137, 138, 174, 175, 184, 255, 183, 255, + 130, 134, 139, 163, 167, 168, 255, 128, + 179, 128, 129, 130, 179, 180, 255, 187, + 189, 133, 159, 178, 183, 184, 255, 138, + 165, 166, 173, 176, 255, 135, 147, 148, + 159, 189, 255, 128, 131, 132, 178, 179, + 255, 143, 129, 164, 166, 255, 128, 168, + 169, 182, 131, 128, 139, 140, 141, 187, + 189, 176, 178, 180, 183, 184, 190, 191, + 129, 160, 170, 171, 175, 178, 180, 181, + 182, 129, 134, 137, 142, 145, 150, 160, + 166, 168, 174, 176, 255, 155, 166, 175, + 128, 162, 163, 170, 172, 173, 158, 159, + 160, 255, 164, 175, 135, 138, 188, 255, + 172, 173, 174, 175, 180, 181, 182, 183, + 184, 185, 187, 188, 189, 190, 191, 176, + 186, 158, 190, 128, 134, 147, 151, 157, + 168, 170, 182, 184, 188, 128, 129, 131, + 132, 134, 255, 178, 255, 147, 255, 190, + 255, 144, 255, 144, 145, 136, 175, 188, + 255, 128, 143, 160, 175, 176, 180, 182, + 255, 191, 189, 255, 161, 186, 129, 154, + 158, 159, 160, 190, 130, 135, 138, 143, + 146, 151, 154, 156, 185, 187, 144, 145, + 146, 147, 148, 150, 155, 157, 158, 159, + 128, 129, 130, 131, 133, 135, 138, 139, + 140, 141, 142, 143, 144, 145, 146, 148, + 149, 152, 156, 157, 160, 161, 162, 163, + 164, 166, 168, 169, 170, 171, 172, 173, + 174, 176, 177, 153, 155, 178, 179, 128, + 139, 141, 166, 168, 186, 188, 189, 191, + 255, 142, 143, 158, 255, 187, 255, 128, + 180, 189, 128, 156, 160, 255, 160, 145, + 255, 128, 159, 176, 255, 139, 143, 182, + 186, 187, 255, 128, 157, 160, 255, 144, + 132, 135, 150, 255, 158, 255, 128, 167, + 176, 255, 164, 255, 183, 255, 128, 149, + 160, 167, 136, 188, 128, 133, 138, 181, + 183, 184, 191, 255, 150, 159, 183, 255, + 128, 158, 160, 178, 180, 181, 128, 149, + 160, 185, 128, 183, 190, 191, 128, 191, + 129, 131, 133, 134, 140, 143, 144, 147, + 149, 151, 153, 179, 184, 186, 160, 188, + 128, 156, 128, 135, 137, 164, 165, 166, + 128, 181, 128, 149, 160, 178, 128, 145, + 128, 178, 128, 129, 130, 131, 132, 133, + 134, 135, 136, 138, 139, 140, 141, 146, + 147, 150, 151, 152, 153, 154, 156, 162, + 163, 171, 128, 130, 131, 183, 184, 255, + 135, 190, 131, 175, 187, 188, 190, 255, + 144, 168, 128, 130, 131, 166, 167, 180, + 179, 182, 144, 178, 128, 130, 131, 178, + 179, 255, 154, 156, 129, 132, 133, 137, + 141, 255, 128, 145, 147, 171, 172, 183, + 136, 128, 134, 138, 141, 143, 157, 159, + 168, 176, 255, 159, 170, 171, 255, 189, + 128, 131, 133, 140, 143, 144, 147, 168, + 170, 176, 178, 179, 181, 185, 188, 191, + 144, 151, 128, 132, 135, 136, 139, 141, + 157, 161, 162, 163, 166, 172, 176, 180, + 128, 175, 176, 255, 134, 132, 135, 136, + 255, 128, 174, 175, 181, 184, 255, 129, + 151, 152, 155, 158, 255, 132, 129, 255, + 128, 170, 171, 183, 157, 171, 160, 255, + 160, 190, 192, 255, 128, 184, 128, 142, + 145, 149, 129, 141, 144, 146, 147, 148, + 154, 255, 175, 255, 132, 255, 128, 144, + 129, 143, 144, 153, 145, 152, 135, 255, + 160, 168, 169, 171, 172, 173, 174, 188, + 189, 190, 161, 167, 185, 255, 144, 173, + 176, 180, 128, 175, 176, 182, 128, 131, + 163, 183, 189, 255, 144, 255, 133, 143, + 145, 190, 191, 255, 143, 146, 147, 159, + 176, 177, 178, 171, 175, 189, 255, 128, + 136, 144, 153, 157, 158, 160, 163, 133, + 134, 137, 144, 145, 146, 147, 148, 149, + 154, 155, 156, 157, 158, 159, 168, 169, + 170, 150, 153, 165, 169, 173, 255, 131, + 132, 140, 169, 174, 255, 130, 132, 149, + 157, 173, 186, 188, 160, 161, 163, 164, + 167, 168, 132, 134, 149, 157, 186, 139, + 140, 191, 255, 134, 128, 132, 138, 144, + 146, 255, 166, 167, 129, 155, 187, 149, + 181, 143, 175, 137, 169, 131, 140, 255, + 128, 182, 187, 255, 173, 180, 182, 255, + 132, 155, 159, 161, 175, 160, 163, 184, + 185, 186, 161, 162, 133, 143, 144, 150, + 151, 255, 164, 167, 185, 187, 128, 131, + 133, 159, 161, 162, 169, 178, 180, 183, + 130, 135, 137, 139, 148, 151, 153, 155, + 157, 159, 164, 190, 141, 143, 145, 146, + 161, 162, 167, 170, 172, 178, 180, 183, + 185, 188, 128, 137, 139, 155, 161, 163, + 165, 169, 171, 187, 132, 133, 134, 176, + 255, 138, 143, 170, 175, 138, 255, 160, + 128, 129, 132, 135, 133, 134, 129, 160, + 255, 192, 255, 176, 255, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 48, 57, 173, 128, 255, 176, + 255, 131, 137, 191, 145, 189, 135, 129, + 130, 132, 133, 156, 128, 133, 144, 154, + 171, 176, 139, 159, 160, 169, 150, 157, + 159, 164, 167, 168, 170, 173, 176, 185, + 143, 145, 176, 255, 139, 255, 166, 176, + 128, 137, 171, 179, 160, 161, 163, 164, + 165, 167, 169, 171, 173, 174, 175, 176, + 177, 179, 180, 181, 182, 183, 184, 185, + 186, 187, 188, 189, 190, 191, 166, 170, + 172, 178, 150, 153, 155, 163, 165, 167, + 169, 173, 153, 155, 163, 255, 189, 132, + 185, 144, 152, 161, 164, 165, 166, 175, + 176, 255, 188, 129, 131, 190, 255, 133, + 134, 137, 138, 142, 150, 152, 161, 164, + 165, 166, 175, 176, 255, 131, 134, 137, + 138, 142, 144, 146, 165, 166, 175, 178, + 180, 182, 255, 134, 138, 142, 161, 164, + 165, 166, 175, 176, 255, 188, 129, 131, + 190, 191, 128, 132, 135, 136, 139, 141, + 150, 151, 162, 163, 166, 175, 130, 190, + 191, 151, 128, 130, 134, 136, 138, 141, + 166, 175, 128, 131, 190, 255, 133, 137, + 142, 148, 151, 161, 164, 165, 166, 175, + 176, 255, 128, 132, 134, 136, 138, 141, + 149, 150, 162, 163, 166, 175, 129, 131, + 190, 255, 133, 137, 142, 150, 152, 161, + 164, 165, 166, 175, 176, 255, 130, 131, + 138, 150, 143, 148, 152, 159, 166, 175, + 178, 179, 177, 180, 186, 135, 142, 144, + 153, 177, 180, 185, 187, 188, 136, 141, + 144, 153, 181, 183, 185, 152, 153, 160, + 169, 190, 191, 177, 191, 128, 132, 134, + 135, 141, 151, 153, 188, 134, 128, 129, + 130, 141, 156, 157, 158, 159, 160, 162, + 164, 165, 167, 168, 169, 170, 172, 173, + 174, 175, 176, 177, 179, 183, 171, 190, + 128, 137, 150, 153, 158, 160, 162, 164, + 167, 173, 177, 180, 143, 130, 141, 144, + 153, 154, 157, 157, 159, 146, 148, 178, + 180, 146, 147, 178, 179, 180, 255, 148, + 156, 158, 159, 160, 169, 170, 255, 139, + 142, 144, 153, 169, 160, 171, 176, 187, + 134, 143, 144, 153, 151, 155, 191, 149, + 158, 160, 188, 128, 137, 144, 153, 176, + 190, 128, 132, 180, 255, 133, 143, 144, + 153, 154, 170, 180, 255, 128, 130, 161, + 173, 176, 185, 166, 179, 164, 183, 128, + 137, 144, 153, 173, 144, 146, 148, 168, + 178, 180, 184, 185, 128, 181, 188, 191, + 128, 129, 131, 179, 181, 183, 140, 143, + 170, 174, 160, 164, 166, 175, 144, 176, + 175, 177, 191, 160, 191, 128, 130, 170, + 175, 153, 154, 152, 153, 154, 155, 160, + 162, 163, 164, 165, 166, 167, 168, 169, + 170, 171, 175, 160, 169, 175, 178, 180, + 189, 158, 159, 176, 177, 130, 134, 139, + 163, 167, 128, 129, 180, 255, 133, 143, + 144, 153, 154, 159, 178, 255, 128, 137, + 166, 173, 135, 147, 128, 131, 179, 255, + 129, 143, 144, 153, 154, 164, 166, 175, + 176, 185, 186, 255, 169, 182, 131, 140, + 141, 144, 153, 187, 189, 176, 178, 180, + 183, 184, 190, 191, 129, 171, 175, 181, + 182, 163, 170, 172, 173, 176, 185, 172, + 184, 187, 190, 191, 158, 128, 143, 160, + 175, 185, 187, 144, 145, 150, 155, 157, + 158, 135, 139, 141, 146, 168, 171, 189, + 160, 182, 186, 191, 129, 131, 133, 134, + 140, 143, 184, 186, 165, 166, 128, 129, + 130, 131, 132, 133, 134, 135, 136, 139, + 140, 141, 146, 147, 150, 151, 152, 153, + 154, 155, 156, 163, 128, 130, 184, 255, + 135, 165, 166, 175, 176, 190, 131, 175, + 187, 188, 190, 255, 176, 185, 128, 130, + 167, 180, 182, 191, 179, 128, 130, 179, + 255, 129, 137, 141, 143, 144, 153, 154, + 255, 172, 183, 159, 170, 176, 185, 188, + 128, 131, 190, 191, 151, 128, 132, 135, + 136, 139, 141, 162, 163, 166, 172, 176, + 180, 176, 255, 132, 143, 144, 153, 154, + 255, 175, 181, 184, 255, 129, 155, 158, + 255, 129, 143, 144, 153, 154, 255, 171, + 183, 128, 137, 157, 171, 176, 185, 169, + 171, 172, 173, 189, 190, 176, 180, 176, + 182, 145, 190, 143, 146, 178, 157, 158, + 160, 163, 133, 134, 137, 159, 168, 169, + 170, 165, 169, 173, 255, 131, 132, 140, + 169, 174, 255, 130, 132, 142, 191, 128, + 182, 187, 255, 173, 180, 182, 255, 132, + 155, 159, 161, 175, 163, 144, 150, 160, + 128, 129, 132, 135, 133, 134, 129, 160, + 255, 192, 255, 170, 173, 181, 186, 128, + 255, 181, 190, 176, 183, 184, 185, 186, + 191, 192, 255, 130, 131, 137, 190, 136, + 144, 145, 191, 192, 255, 135, 179, 129, + 130, 132, 133, 144, 170, 176, 178, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 173, 128, 255, 176, + 255, 131, 137, 191, 145, 189, 135, 129, + 130, 132, 133, 144, 170, 176, 178, 170, + 173, 181, 186, 0, 127, 181, 190, 176, + 183, 184, 185, 186, 191, 192, 255, 130, + 131, 137, 190, 136, 144, 145, 191, 192, + 255, 135, 179, 129, 130, 132, 133, 144, + 170, 176, 178, 156, 128, 133, 144, 154, + 160, 191, 171, 176, 128, 138, 139, 159, + 160, 169, 174, 255, 148, 158, 169, 150, + 164, 167, 173, 176, 185, 189, 190, 192, + 255, 144, 143, 145, 146, 175, 176, 255, + 139, 140, 141, 255, 166, 176, 178, 255, + 186, 128, 137, 138, 170, 171, 179, 180, + 181, 160, 161, 162, 163, 164, 165, 166, + 167, 168, 169, 170, 171, 172, 173, 174, + 175, 176, 177, 178, 179, 180, 181, 182, + 183, 184, 185, 186, 187, 188, 189, 190, + 191, 154, 164, 168, 128, 149, 150, 173, + 128, 152, 153, 155, 163, 255, 189, 132, + 185, 144, 176, 152, 161, 164, 165, 166, + 175, 177, 255, 132, 169, 177, 188, 129, + 131, 141, 142, 145, 146, 179, 181, 186, + 187, 190, 255, 142, 158, 133, 134, 137, + 138, 143, 150, 152, 155, 156, 161, 164, + 165, 166, 175, 176, 177, 178, 255, 188, + 129, 131, 133, 138, 143, 144, 147, 168, + 170, 176, 178, 179, 181, 182, 184, 185, + 190, 255, 157, 131, 134, 137, 138, 142, + 144, 146, 152, 153, 158, 159, 165, 166, + 175, 178, 180, 182, 255, 189, 129, 131, + 133, 141, 143, 145, 147, 168, 170, 176, + 178, 179, 181, 185, 188, 255, 134, 138, + 144, 185, 142, 159, 160, 161, 164, 165, + 166, 175, 176, 255, 189, 129, 131, 133, + 140, 143, 144, 147, 168, 170, 176, 178, + 179, 181, 185, 188, 191, 177, 128, 132, + 135, 136, 139, 141, 150, 151, 156, 157, + 159, 161, 162, 163, 166, 175, 130, 131, + 156, 133, 138, 142, 144, 146, 149, 153, + 154, 158, 159, 163, 164, 168, 170, 174, + 185, 190, 191, 144, 151, 128, 130, 134, + 136, 138, 141, 166, 175, 189, 128, 131, + 133, 140, 142, 144, 146, 168, 170, 185, + 190, 255, 133, 137, 151, 142, 148, 152, + 154, 155, 159, 160, 161, 164, 165, 166, + 175, 176, 255, 189, 129, 131, 133, 140, + 142, 144, 146, 168, 170, 179, 181, 185, + 188, 191, 158, 128, 132, 134, 136, 138, + 141, 149, 150, 160, 161, 162, 163, 166, + 175, 177, 178, 189, 129, 131, 133, 140, + 142, 144, 146, 186, 190, 255, 133, 137, + 142, 143, 150, 152, 158, 159, 161, 164, + 165, 166, 175, 176, 185, 186, 191, 192, + 255, 189, 130, 131, 133, 150, 154, 177, + 179, 187, 138, 150, 128, 134, 143, 148, + 152, 159, 166, 175, 178, 179, 177, 180, + 186, 135, 142, 144, 153, 177, 180, 185, + 187, 188, 136, 141, 144, 153, 128, 181, + 183, 185, 152, 153, 160, 169, 190, 191, + 128, 135, 137, 172, 177, 191, 128, 132, + 134, 135, 136, 140, 141, 151, 153, 188, + 134, 128, 129, 130, 131, 137, 138, 139, + 140, 141, 142, 143, 144, 153, 154, 155, + 156, 157, 158, 159, 160, 161, 162, 163, + 164, 165, 167, 168, 169, 170, 172, 173, + 174, 175, 176, 177, 179, 181, 182, 183, + 188, 189, 190, 191, 132, 152, 180, 184, + 185, 187, 171, 190, 128, 137, 150, 153, + 158, 160, 162, 164, 167, 173, 177, 180, + 143, 130, 141, 144, 153, 154, 157, 160, + 255, 155, 156, 157, 159, 160, 255, 128, + 140, 142, 145, 146, 148, 160, 177, 178, + 180, 128, 145, 146, 147, 160, 172, 174, + 176, 178, 179, 180, 255, 148, 156, 158, + 159, 160, 169, 170, 255, 139, 142, 144, + 153, 160, 255, 169, 128, 170, 176, 255, + 128, 158, 160, 171, 176, 187, 128, 150, + 151, 155, 191, 149, 158, 160, 188, 128, + 137, 144, 153, 176, 190, 128, 132, 133, + 179, 180, 255, 133, 139, 140, 143, 144, + 153, 154, 170, 180, 255, 128, 130, 131, + 160, 161, 173, 174, 175, 176, 185, 186, + 255, 166, 179, 180, 255, 128, 163, 164, + 183, 128, 137, 141, 143, 144, 153, 154, + 189, 173, 144, 146, 148, 168, 169, 177, + 178, 180, 181, 182, 184, 185, 128, 181, + 188, 191, 128, 129, 130, 131, 132, 133, + 134, 146, 147, 176, 177, 178, 179, 180, + 181, 182, 183, 184, 140, 143, 170, 174, + 191, 255, 165, 177, 191, 129, 147, 149, + 159, 160, 175, 176, 255, 144, 176, 165, + 170, 175, 177, 180, 255, 191, 168, 174, + 176, 255, 128, 134, 136, 142, 144, 150, + 152, 158, 160, 191, 128, 130, 132, 133, + 134, 133, 170, 175, 187, 188, 153, 154, + 128, 146, 147, 148, 152, 153, 154, 155, + 156, 158, 159, 160, 161, 162, 163, 164, + 165, 166, 167, 168, 169, 170, 171, 172, + 173, 174, 175, 176, 129, 255, 141, 143, + 160, 169, 172, 255, 191, 128, 174, 175, + 178, 180, 189, 128, 157, 158, 159, 160, + 255, 176, 177, 178, 255, 130, 134, 139, + 163, 167, 168, 255, 128, 129, 130, 179, + 180, 255, 187, 189, 133, 143, 144, 153, + 154, 159, 178, 183, 184, 255, 128, 137, + 138, 165, 166, 173, 176, 255, 135, 147, + 148, 159, 189, 255, 128, 131, 132, 178, + 179, 255, 143, 129, 142, 144, 153, 154, + 164, 166, 175, 176, 185, 186, 255, 128, + 168, 169, 182, 131, 128, 139, 140, 141, + 144, 153, 187, 189, 176, 178, 180, 183, + 184, 190, 191, 129, 160, 170, 171, 175, + 178, 180, 181, 182, 128, 162, 163, 170, + 172, 173, 176, 185, 172, 173, 174, 175, + 180, 181, 182, 183, 184, 185, 187, 188, + 189, 190, 191, 176, 186, 158, 190, 128, + 134, 147, 151, 157, 168, 170, 182, 184, + 188, 128, 129, 131, 132, 134, 143, 144, + 255, 128, 143, 160, 175, 179, 180, 141, + 143, 176, 180, 182, 255, 191, 189, 255, + 191, 161, 186, 158, 159, 160, 190, 130, + 135, 138, 143, 146, 151, 154, 156, 185, + 187, 144, 145, 146, 147, 148, 150, 155, + 157, 158, 159, 128, 129, 130, 131, 133, + 135, 138, 139, 140, 141, 142, 143, 144, + 145, 146, 148, 149, 152, 156, 157, 160, + 161, 162, 163, 164, 166, 168, 169, 170, + 171, 172, 173, 174, 176, 177, 153, 155, + 178, 179, 189, 160, 145, 255, 139, 143, + 182, 186, 187, 255, 158, 159, 160, 169, + 170, 255, 128, 191, 129, 131, 133, 134, + 140, 143, 144, 147, 149, 151, 153, 179, + 184, 186, 128, 135, 137, 164, 165, 166, + 128, 129, 130, 131, 132, 133, 134, 135, + 136, 138, 139, 140, 141, 146, 147, 150, + 151, 152, 153, 154, 155, 156, 162, 163, + 171, 128, 130, 131, 183, 184, 255, 135, + 165, 166, 175, 176, 190, 131, 175, 187, + 188, 190, 255, 144, 168, 176, 185, 128, + 130, 131, 166, 167, 180, 182, 191, 179, + 182, 144, 178, 128, 130, 131, 178, 179, + 255, 155, 129, 132, 133, 137, 141, 143, + 144, 153, 154, 156, 157, 255, 128, 145, + 147, 171, 172, 183, 159, 170, 171, 175, + 176, 185, 186, 255, 189, 128, 131, 133, + 140, 143, 144, 147, 168, 170, 176, 178, + 179, 181, 185, 188, 191, 144, 151, 128, + 132, 135, 136, 139, 141, 157, 161, 162, + 163, 166, 172, 176, 180, 128, 175, 176, + 255, 134, 132, 135, 136, 143, 144, 153, + 154, 255, 128, 174, 175, 181, 184, 255, + 129, 151, 152, 155, 158, 255, 132, 129, + 143, 144, 153, 154, 255, 128, 170, 171, + 183, 157, 171, 176, 185, 160, 169, 170, + 190, 192, 255, 160, 168, 169, 171, 172, + 173, 174, 188, 189, 190, 161, 167, 128, + 158, 160, 169, 144, 173, 176, 180, 128, + 175, 176, 182, 128, 131, 144, 153, 163, + 183, 189, 255, 133, 143, 145, 190, 191, + 255, 143, 146, 147, 159, 176, 177, 178, + 128, 136, 144, 153, 157, 158, 160, 163, + 133, 134, 137, 144, 145, 146, 147, 148, + 149, 154, 155, 156, 157, 158, 159, 168, + 169, 170, 150, 153, 165, 169, 173, 255, + 131, 132, 140, 169, 174, 255, 130, 132, + 131, 140, 141, 142, 191, 192, 255, 128, + 182, 187, 255, 173, 180, 182, 255, 132, + 155, 159, 161, 175, 160, 163, 184, 185, + 186, 161, 162, 133, 143, 144, 150, 151, + 255, 160, 128, 129, 132, 135, 133, 134, + 129, 160, 255, 192, 255, 176, 255, 156, + 128, 133, 144, 154, 176, 139, 159, 150, + 157, 159, 164, 167, 168, 170, 173, 143, + 145, 176, 255, 139, 255, 166, 176, 171, + 179, 160, 161, 163, 164, 165, 167, 169, + 171, 173, 174, 175, 176, 177, 179, 180, + 181, 182, 183, 184, 185, 186, 187, 188, + 189, 190, 191, 166, 170, 172, 178, 150, + 153, 155, 163, 165, 167, 169, 173, 153, + 155, 163, 255, 189, 132, 185, 144, 152, + 161, 164, 255, 188, 129, 131, 190, 255, + 133, 134, 137, 138, 142, 150, 152, 161, + 164, 255, 131, 134, 137, 138, 142, 144, + 146, 175, 178, 180, 182, 255, 134, 138, + 142, 161, 164, 255, 188, 129, 131, 190, + 191, 128, 132, 135, 136, 139, 141, 150, + 151, 162, 163, 130, 190, 191, 151, 128, + 130, 134, 136, 138, 141, 128, 131, 190, + 255, 133, 137, 142, 148, 151, 161, 164, + 255, 128, 132, 134, 136, 138, 141, 149, + 150, 162, 163, 129, 131, 190, 255, 133, + 137, 142, 150, 152, 161, 164, 255, 130, + 131, 138, 150, 143, 148, 152, 159, 178, + 179, 177, 180, 186, 135, 142, 177, 180, + 185, 187, 188, 136, 141, 181, 183, 185, + 152, 153, 190, 191, 177, 191, 128, 132, + 134, 135, 141, 151, 153, 188, 134, 128, + 129, 130, 141, 156, 157, 158, 159, 160, + 162, 164, 168, 169, 170, 172, 173, 174, + 175, 176, 179, 183, 171, 190, 150, 153, + 158, 160, 162, 164, 167, 173, 177, 180, + 143, 130, 141, 154, 157, 157, 159, 146, + 148, 178, 180, 146, 147, 178, 179, 180, + 255, 148, 156, 158, 255, 139, 142, 169, + 160, 171, 176, 187, 151, 155, 191, 149, + 158, 160, 188, 176, 190, 128, 132, 180, + 255, 133, 170, 180, 255, 128, 130, 161, + 173, 166, 179, 164, 183, 173, 144, 146, + 148, 168, 178, 180, 184, 185, 128, 181, + 188, 191, 128, 129, 131, 179, 181, 183, + 140, 143, 170, 174, 160, 164, 166, 175, + 144, 176, 175, 177, 191, 160, 191, 128, + 130, 170, 175, 153, 154, 153, 154, 155, + 160, 162, 163, 164, 165, 166, 167, 168, + 169, 170, 171, 175, 175, 178, 180, 189, + 158, 159, 176, 177, 130, 134, 139, 163, + 167, 128, 129, 180, 255, 133, 159, 178, + 255, 166, 173, 135, 147, 128, 131, 179, + 255, 129, 164, 166, 255, 169, 182, 131, + 140, 141, 187, 189, 176, 178, 180, 183, + 184, 190, 191, 129, 171, 175, 181, 182, + 163, 170, 172, 173, 172, 173, 184, 187, + 190, 191, 158, 190, 157, 168, 170, 182, + 184, 188, 128, 129, 131, 132, 134, 143, + 128, 143, 160, 175, 185, 187, 144, 145, + 150, 155, 157, 158, 135, 139, 141, 168, + 171, 189, 160, 182, 186, 191, 129, 131, + 133, 134, 140, 143, 184, 186, 165, 166, + 128, 129, 130, 132, 133, 134, 135, 136, + 139, 140, 141, 146, 147, 150, 151, 152, + 153, 154, 156, 128, 130, 184, 255, 135, + 190, 131, 175, 187, 188, 190, 255, 128, + 130, 167, 180, 179, 128, 130, 179, 255, + 129, 137, 141, 255, 172, 183, 159, 170, + 188, 128, 131, 190, 191, 151, 128, 132, + 135, 136, 139, 141, 162, 163, 166, 172, + 176, 180, 176, 255, 132, 255, 175, 181, + 184, 255, 129, 155, 158, 255, 129, 255, + 171, 183, 157, 171, 171, 172, 189, 190, + 176, 180, 176, 182, 145, 190, 143, 146, + 178, 157, 158, 160, 163, 133, 134, 137, + 168, 169, 170, 165, 169, 173, 255, 131, + 132, 140, 169, 174, 255, 130, 132, 128, + 182, 187, 255, 173, 180, 182, 255, 132, + 155, 159, 161, 175, 163, 144, 150, 160, + 128, 129, 132, 135, 133, 134, 129, 160, + 255, 192, 255, 170, 173, 181, 183, 186, + 151, 173, 130, 133, 146, 159, 165, 171, + 175, 255, 128, 255, 181, 190, 176, 183, + 184, 185, 186, 191, 192, 255, 135, 140, + 134, 138, 142, 161, 163, 255, 130, 131, + 137, 190, 136, 144, 145, 191, 192, 255, + 135, 179, 180, 129, 130, 132, 133, 144, + 170, 176, 178, 156, 128, 133, 144, 154, + 160, 191, 171, 176, 128, 138, 139, 159, + 160, 169, 174, 255, 148, 158, 169, 150, + 164, 167, 173, 176, 185, 189, 190, 192, + 255, 144, 143, 145, 146, 175, 176, 255, + 139, 140, 141, 255, 166, 176, 178, 255, + 186, 128, 137, 138, 170, 171, 179, 180, + 181, 160, 161, 162, 163, 164, 165, 166, + 167, 168, 169, 170, 171, 172, 173, 174, + 175, 176, 177, 178, 179, 180, 181, 182, + 183, 184, 185, 186, 187, 188, 189, 190, + 191, 154, 164, 168, 128, 149, 150, 173, + 128, 152, 153, 155, 163, 255, 189, 132, + 185, 144, 176, 152, 161, 164, 165, 166, + 175, 177, 255, 132, 169, 177, 188, 129, + 131, 141, 142, 145, 146, 179, 181, 186, + 187, 190, 255, 142, 158, 133, 134, 137, + 138, 143, 150, 152, 155, 156, 161, 164, + 165, 166, 175, 176, 177, 178, 255, 188, + 129, 131, 133, 138, 143, 144, 147, 168, + 170, 176, 178, 179, 181, 182, 184, 185, + 190, 255, 157, 131, 134, 137, 138, 142, + 144, 146, 152, 153, 158, 159, 165, 166, + 175, 178, 180, 182, 255, 189, 129, 131, + 133, 141, 143, 145, 147, 168, 170, 176, + 178, 179, 181, 185, 188, 255, 134, 138, + 144, 185, 142, 159, 160, 161, 164, 165, + 166, 175, 176, 255, 189, 129, 131, 133, + 140, 143, 144, 147, 168, 170, 176, 178, + 179, 181, 185, 188, 191, 177, 128, 132, + 135, 136, 139, 141, 150, 151, 156, 157, + 159, 161, 162, 163, 166, 175, 130, 131, + 156, 133, 138, 142, 144, 146, 149, 153, + 154, 158, 159, 163, 164, 168, 170, 174, + 185, 190, 191, 144, 151, 128, 130, 134, + 136, 138, 141, 166, 175, 189, 128, 131, + 133, 140, 142, 144, 146, 168, 170, 185, + 190, 255, 133, 137, 151, 142, 148, 152, + 154, 155, 159, 160, 161, 164, 165, 166, + 175, 176, 255, 189, 129, 131, 133, 140, + 142, 144, 146, 168, 170, 179, 181, 185, + 188, 191, 158, 128, 132, 134, 136, 138, + 141, 149, 150, 160, 161, 162, 163, 166, + 175, 177, 178, 189, 129, 131, 133, 140, + 142, 144, 146, 186, 190, 255, 133, 137, + 142, 143, 150, 152, 158, 159, 161, 164, + 165, 166, 175, 176, 185, 186, 191, 192, + 255, 189, 130, 131, 133, 150, 154, 177, + 179, 187, 138, 150, 128, 134, 143, 148, + 152, 159, 166, 175, 178, 179, 177, 180, + 186, 135, 142, 144, 153, 177, 180, 185, + 187, 188, 136, 141, 144, 153, 128, 181, + 183, 185, 152, 153, 160, 169, 190, 191, + 128, 135, 137, 172, 177, 191, 128, 132, + 134, 135, 136, 140, 141, 151, 153, 188, + 134, 128, 129, 130, 131, 137, 138, 139, + 140, 141, 142, 143, 144, 153, 154, 155, + 156, 157, 158, 159, 160, 161, 162, 163, + 164, 165, 167, 168, 169, 170, 172, 173, + 174, 175, 176, 177, 179, 181, 182, 183, + 188, 189, 190, 191, 132, 152, 180, 184, + 185, 187, 171, 190, 128, 137, 150, 153, + 158, 160, 162, 164, 167, 173, 177, 180, + 143, 130, 141, 144, 153, 154, 157, 160, + 255, 155, 156, 157, 159, 160, 255, 128, + 140, 142, 145, 146, 148, 160, 177, 178, + 180, 128, 145, 146, 147, 160, 172, 174, + 176, 178, 179, 180, 255, 148, 156, 158, + 159, 160, 169, 170, 255, 139, 142, 144, + 153, 160, 255, 169, 128, 170, 176, 255, + 128, 158, 160, 171, 176, 187, 128, 150, + 151, 155, 191, 149, 158, 160, 188, 128, + 137, 144, 153, 176, 190, 128, 132, 133, + 179, 180, 255, 133, 139, 140, 143, 144, + 153, 154, 170, 180, 255, 128, 130, 131, + 160, 161, 173, 174, 175, 176, 185, 186, + 255, 166, 179, 180, 255, 128, 163, 164, + 183, 173, 144, 146, 148, 168, 169, 177, + 178, 180, 181, 182, 184, 185, 128, 181, + 188, 191, 128, 129, 130, 131, 132, 133, + 134, 146, 147, 176, 177, 178, 179, 180, + 181, 182, 183, 184, 164, 167, 140, 143, + 152, 153, 170, 174, 191, 255, 165, 177, + 191, 129, 147, 149, 159, 160, 175, 176, + 255, 144, 176, 165, 170, 175, 177, 180, + 255, 191, 168, 174, 176, 255, 128, 134, + 136, 142, 144, 150, 152, 158, 160, 191, + 128, 130, 132, 133, 134, 133, 170, 175, + 187, 188, 153, 154, 128, 146, 147, 148, + 152, 153, 154, 155, 156, 158, 159, 160, + 161, 162, 163, 164, 165, 166, 167, 168, + 169, 170, 171, 172, 173, 174, 175, 176, + 129, 255, 191, 128, 174, 175, 178, 180, + 189, 128, 157, 158, 159, 160, 255, 176, + 177, 178, 255, 130, 134, 139, 163, 167, + 168, 255, 128, 129, 130, 179, 180, 255, + 187, 189, 133, 143, 144, 153, 154, 159, + 178, 183, 184, 255, 128, 137, 138, 165, + 166, 173, 176, 255, 135, 147, 148, 159, + 189, 255, 128, 131, 132, 178, 179, 255, + 143, 129, 142, 144, 153, 154, 164, 166, + 175, 176, 185, 186, 255, 128, 168, 169, + 182, 131, 128, 139, 140, 141, 144, 153, + 187, 189, 176, 178, 180, 183, 184, 190, + 191, 129, 160, 170, 171, 175, 178, 180, + 181, 182, 128, 162, 163, 170, 172, 173, + 176, 185, 172, 173, 174, 175, 180, 181, + 182, 183, 184, 185, 187, 188, 189, 190, + 191, 176, 186, 190, 128, 134, 147, 151, + 157, 168, 170, 182, 184, 188, 147, 128, + 143, 160, 175, 179, 180, 146, 149, 141, + 143, 176, 180, 182, 255, 191, 189, 255, + 135, 142, 154, 191, 161, 186, 158, 159, + 160, 190, 130, 135, 138, 143, 146, 151, + 154, 156, 185, 187, 144, 145, 146, 147, + 148, 150, 155, 157, 158, 159, 128, 129, + 130, 131, 133, 135, 138, 139, 140, 141, + 142, 143, 144, 145, 146, 148, 149, 152, + 156, 157, 160, 161, 162, 163, 164, 166, + 168, 169, 170, 171, 172, 173, 174, 176, + 177, 153, 155, 178, 179, 189, 160, 145, + 255, 139, 143, 182, 186, 187, 255, 128, + 191, 129, 131, 133, 134, 140, 143, 144, + 147, 149, 151, 153, 179, 184, 186, 128, + 135, 137, 164, 165, 166, 128, 129, 130, + 131, 132, 133, 134, 135, 136, 138, 139, + 140, 141, 146, 147, 150, 151, 152, 153, + 154, 155, 156, 162, 163, 171, 128, 130, + 131, 183, 184, 255, 135, 165, 166, 175, + 176, 190, 131, 175, 187, 188, 190, 255, + 128, 130, 131, 166, 167, 180, 182, 191, + 179, 182, 144, 178, 128, 130, 131, 178, + 179, 255, 155, 129, 132, 133, 137, 141, + 143, 144, 153, 154, 156, 157, 255, 128, + 145, 147, 171, 172, 183, 159, 170, 171, + 175, 176, 185, 186, 255, 189, 128, 131, + 133, 140, 143, 144, 147, 168, 170, 176, + 178, 179, 181, 185, 188, 191, 144, 151, + 128, 132, 135, 136, 139, 141, 157, 161, + 162, 163, 166, 172, 176, 180, 128, 175, + 176, 255, 134, 132, 135, 136, 143, 144, + 153, 154, 255, 128, 174, 175, 181, 184, + 255, 129, 151, 152, 155, 158, 255, 132, + 129, 143, 144, 153, 154, 255, 128, 170, + 171, 183, 157, 171, 176, 185, 160, 168, + 169, 171, 172, 173, 174, 188, 189, 190, + 161, 167, 144, 173, 176, 180, 128, 175, + 176, 182, 133, 143, 145, 190, 191, 255, + 143, 146, 147, 159, 176, 177, 178, 128, + 136, 144, 153, 157, 158, 160, 163, 133, + 134, 137, 144, 145, 146, 147, 148, 149, + 154, 155, 156, 157, 158, 159, 168, 169, + 170, 150, 153, 165, 169, 173, 255, 131, + 132, 140, 169, 174, 255, 130, 132, 128, + 182, 187, 255, 173, 180, 182, 255, 132, + 155, 159, 161, 175, 160, 163, 184, 185, + 186, 161, 162, 133, 143, 144, 150, 151, + 255, 160, 128, 129, 132, 135, 133, 134, + 129, 160, 255, 192, 255, 176, 255, 156, + 128, 133, 144, 154, 160, 191, 171, 176, + 128, 138, 139, 159, 160, 169, 174, 255, + 148, 158, 169, 150, 164, 167, 173, 176, + 185, 189, 190, 192, 255, 144, 143, 145, + 146, 175, 176, 255, 139, 140, 141, 255, + 166, 176, 178, 255, 186, 128, 137, 138, + 170, 171, 179, 180, 181, 160, 161, 162, + 163, 164, 165, 166, 167, 168, 169, 170, + 171, 172, 173, 174, 175, 176, 177, 178, + 179, 180, 181, 182, 183, 184, 185, 186, + 187, 188, 189, 190, 191, 154, 164, 168, + 128, 149, 150, 173, 128, 152, 153, 155, + 163, 255, 189, 132, 185, 144, 176, 152, + 161, 164, 165, 166, 175, 177, 255, 132, + 169, 177, 188, 129, 131, 141, 142, 145, + 146, 179, 181, 186, 187, 190, 255, 142, + 158, 133, 134, 137, 138, 143, 150, 152, + 155, 156, 161, 164, 165, 166, 175, 176, + 177, 178, 255, 188, 129, 131, 133, 138, + 143, 144, 147, 168, 170, 176, 178, 179, + 181, 182, 184, 185, 190, 255, 157, 131, + 134, 137, 138, 142, 144, 146, 152, 153, + 158, 159, 165, 166, 175, 178, 180, 182, + 255, 189, 129, 131, 133, 141, 143, 145, + 147, 168, 170, 176, 178, 179, 181, 185, + 188, 255, 134, 138, 144, 185, 142, 159, + 160, 161, 164, 165, 166, 175, 176, 255, + 189, 129, 131, 133, 140, 143, 144, 147, + 168, 170, 176, 178, 179, 181, 185, 188, + 191, 177, 128, 132, 135, 136, 139, 141, + 150, 151, 156, 157, 159, 161, 162, 163, + 166, 175, 130, 131, 156, 133, 138, 142, + 144, 146, 149, 153, 154, 158, 159, 163, + 164, 168, 170, 174, 185, 190, 191, 144, + 151, 128, 130, 134, 136, 138, 141, 166, + 175, 189, 128, 131, 133, 140, 142, 144, + 146, 168, 170, 185, 190, 255, 133, 137, + 151, 142, 148, 152, 154, 155, 159, 160, + 161, 164, 165, 166, 175, 176, 255, 189, + 129, 131, 133, 140, 142, 144, 146, 168, + 170, 179, 181, 185, 188, 191, 158, 128, + 132, 134, 136, 138, 141, 149, 150, 160, + 161, 162, 163, 166, 175, 177, 178, 189, + 129, 131, 133, 140, 142, 144, 146, 186, + 190, 255, 133, 137, 142, 143, 150, 152, + 158, 159, 161, 164, 165, 166, 175, 176, + 185, 186, 191, 192, 255, 189, 130, 131, + 133, 150, 154, 177, 179, 187, 138, 150, + 128, 134, 143, 148, 152, 159, 166, 175, + 178, 179, 177, 180, 186, 135, 142, 144, + 153, 177, 180, 185, 187, 188, 136, 141, + 144, 153, 128, 181, 183, 185, 152, 153, + 160, 169, 190, 191, 128, 135, 137, 172, + 177, 191, 128, 132, 134, 135, 136, 140, + 141, 151, 153, 188, 134, 128, 129, 130, + 131, 137, 138, 139, 140, 141, 142, 143, + 144, 153, 154, 155, 156, 157, 158, 159, + 160, 161, 162, 163, 164, 165, 167, 168, + 169, 170, 172, 173, 174, 175, 176, 177, + 179, 181, 182, 183, 188, 189, 190, 191, + 132, 152, 180, 184, 185, 187, 171, 190, + 128, 137, 150, 153, 158, 160, 162, 164, + 167, 173, 177, 180, 143, 130, 141, 144, + 153, 154, 157, 160, 255, 155, 156, 157, + 159, 160, 255, 128, 140, 142, 145, 146, + 148, 160, 177, 178, 180, 128, 145, 146, + 147, 160, 172, 174, 176, 178, 179, 180, + 255, 148, 156, 158, 159, 160, 169, 170, + 255, 139, 142, 144, 153, 160, 255, 169, + 128, 170, 176, 255, 128, 158, 160, 171, + 176, 187, 128, 150, 151, 155, 191, 149, + 158, 160, 188, 128, 137, 144, 153, 176, + 190, 128, 132, 133, 179, 180, 255, 133, + 139, 140, 143, 144, 153, 154, 170, 180, + 255, 128, 130, 131, 160, 161, 173, 174, + 175, 176, 185, 186, 255, 166, 179, 180, + 255, 128, 163, 164, 183, 173, 144, 146, + 148, 168, 169, 177, 178, 180, 181, 182, + 184, 185, 128, 181, 188, 191, 128, 129, + 130, 131, 132, 133, 134, 146, 147, 176, + 177, 178, 179, 180, 181, 182, 183, 184, + 140, 143, 170, 174, 191, 255, 165, 177, + 191, 129, 147, 149, 159, 176, 255, 144, + 176, 165, 170, 175, 177, 180, 255, 191, + 168, 174, 176, 255, 128, 134, 136, 142, + 144, 150, 152, 158, 160, 191, 128, 130, + 131, 132, 133, 134, 135, 139, 140, 141, + 133, 170, 175, 177, 181, 187, 188, 173, + 128, 255, 176, 255, 131, 137, 191, 145, + 189, 135, 129, 130, 132, 133, 156, 128, + 133, 144, 154, 176, 139, 159, 150, 157, + 159, 164, 167, 168, 170, 173, 143, 145, + 176, 255, 139, 255, 166, 176, 171, 179, + 160, 161, 163, 164, 165, 167, 169, 171, + 173, 174, 175, 176, 177, 179, 180, 181, + 182, 183, 184, 185, 186, 187, 188, 189, + 190, 191, 166, 170, 172, 178, 150, 153, + 155, 163, 165, 167, 169, 173, 153, 155, + 163, 255, 189, 132, 185, 144, 152, 161, + 164, 255, 188, 129, 131, 190, 255, 133, + 134, 137, 138, 142, 150, 152, 161, 164, + 255, 131, 134, 137, 138, 142, 144, 146, + 175, 178, 180, 182, 255, 134, 138, 142, + 161, 164, 255, 188, 129, 131, 190, 191, + 128, 132, 135, 136, 139, 141, 150, 151, + 162, 163, 130, 190, 191, 151, 128, 130, + 134, 136, 138, 141, 128, 131, 190, 255, + 133, 137, 142, 148, 151, 161, 164, 255, + 128, 132, 134, 136, 138, 141, 149, 150, + 162, 163, 129, 131, 190, 255, 133, 137, + 142, 150, 152, 161, 164, 255, 130, 131, + 138, 150, 143, 148, 152, 159, 178, 179, + 177, 180, 186, 135, 142, 177, 180, 185, + 187, 188, 136, 141, 181, 183, 185, 152, + 153, 190, 191, 177, 191, 128, 132, 134, + 135, 141, 151, 153, 188, 134, 128, 129, + 130, 141, 156, 157, 158, 159, 160, 162, + 164, 168, 169, 170, 172, 173, 174, 175, + 176, 179, 183, 171, 190, 150, 153, 158, + 160, 162, 164, 167, 173, 177, 180, 143, + 130, 141, 154, 157, 157, 159, 146, 148, + 178, 180, 146, 147, 178, 179, 180, 255, + 148, 156, 158, 255, 139, 142, 169, 160, + 171, 176, 187, 151, 155, 191, 149, 158, + 160, 188, 176, 190, 128, 132, 180, 255, + 133, 170, 180, 255, 128, 130, 161, 173, + 166, 179, 164, 183, 173, 144, 146, 148, + 168, 178, 180, 184, 185, 128, 181, 188, + 191, 128, 129, 131, 179, 181, 183, 140, + 143, 170, 174, 191, 255, 165, 129, 147, + 149, 159, 160, 175, 176, 255, 144, 176, + 175, 177, 191, 160, 191, 128, 130, 131, + 135, 139, 140, 141, 170, 175, 177, 181, + 153, 156, 160, 255, 187, 192, 255, 176, + 191, 144, 190, 152, 255, 153, 154, 155, + 160, 162, 163, 164, 165, 166, 167, 168, + 169, 170, 171, 175, 175, 178, 180, 189, + 158, 159, 176, 177, 130, 134, 139, 163, + 167, 128, 129, 180, 255, 133, 159, 178, + 255, 166, 173, 135, 147, 128, 131, 179, + 255, 129, 164, 166, 255, 169, 182, 131, + 140, 141, 187, 189, 176, 178, 180, 183, + 184, 190, 191, 129, 171, 175, 181, 182, + 163, 170, 172, 173, 172, 184, 185, 187, + 188, 189, 190, 191, 158, 128, 143, 160, + 175, 179, 180, 141, 143, 191, 166, 255, + 160, 255, 185, 187, 144, 145, 150, 155, + 157, 158, 135, 139, 141, 168, 171, 189, + 160, 182, 186, 191, 129, 131, 133, 134, + 140, 143, 184, 186, 165, 166, 128, 129, + 130, 132, 133, 134, 135, 136, 139, 140, + 141, 146, 147, 150, 151, 152, 153, 154, + 156, 128, 130, 184, 255, 135, 190, 131, + 175, 187, 188, 190, 255, 128, 130, 167, + 180, 179, 128, 130, 179, 255, 129, 137, + 141, 255, 172, 183, 159, 170, 188, 128, + 131, 190, 191, 151, 128, 132, 135, 136, + 139, 141, 162, 163, 166, 172, 176, 180, + 176, 255, 132, 255, 175, 181, 184, 255, + 129, 155, 158, 255, 129, 255, 171, 183, + 157, 171, 171, 172, 189, 190, 176, 180, + 176, 182, 145, 190, 143, 146, 128, 178, + 128, 157, 158, 160, 163, 133, 134, 137, + 168, 169, 170, 165, 169, 173, 255, 131, + 132, 140, 169, 174, 255, 130, 132, 128, + 182, 187, 255, 173, 180, 182, 255, 132, + 155, 159, 161, 175, 163, 144, 150, 160, + 128, 129, 132, 135, 133, 134, 129, 160, + 255, 192, 255, 153, 154, 155, 156, 160, + 255, 128, 146, 147, 148, 152, 153, 154, + 155, 156, 158, 159, 160, 161, 162, 163, + 164, 165, 166, 167, 168, 169, 170, 171, + 172, 173, 174, 175, 176, 129, 255, 191, + 128, 174, 175, 178, 180, 189, 128, 157, + 158, 159, 160, 255, 176, 177, 178, 255, + 130, 134, 139, 163, 167, 168, 255, 128, + 129, 130, 179, 180, 255, 187, 189, 133, + 143, 144, 153, 154, 159, 178, 183, 184, + 255, 128, 137, 138, 165, 166, 173, 176, + 255, 135, 147, 148, 159, 189, 255, 128, + 131, 132, 178, 179, 255, 143, 129, 142, + 144, 153, 154, 164, 166, 175, 176, 185, + 186, 255, 128, 168, 169, 182, 131, 128, + 139, 140, 141, 144, 153, 187, 189, 176, + 178, 180, 183, 184, 190, 191, 129, 160, + 170, 171, 175, 178, 180, 181, 182, 128, + 162, 163, 170, 172, 173, 176, 185, 172, + 173, 174, 175, 180, 181, 182, 183, 184, + 185, 187, 188, 189, 190, 191, 176, 186, + 158, 190, 128, 134, 147, 151, 157, 168, + 170, 182, 184, 188, 128, 143, 160, 175, + 179, 180, 191, 189, 255, 129, 154, 166, + 255, 158, 159, 160, 190, 191, 255, 130, + 135, 138, 143, 146, 151, 154, 156, 185, + 187, 144, 145, 146, 147, 148, 150, 155, + 157, 158, 159, 128, 129, 130, 131, 133, + 135, 138, 139, 140, 141, 142, 143, 144, + 145, 146, 148, 149, 152, 156, 157, 160, + 161, 162, 163, 164, 166, 168, 169, 170, + 171, 172, 173, 174, 176, 177, 153, 155, + 178, 179, 189, 160, 145, 255, 139, 143, + 182, 186, 187, 255, 128, 191, 129, 131, + 133, 134, 140, 143, 144, 147, 149, 151, + 153, 179, 184, 186, 128, 135, 137, 164, + 165, 166, 128, 129, 130, 131, 132, 133, + 134, 135, 136, 138, 139, 140, 141, 146, + 147, 150, 151, 152, 153, 154, 155, 156, + 162, 163, 171, 128, 130, 131, 183, 184, + 255, 135, 165, 166, 175, 176, 190, 131, + 175, 187, 188, 190, 255, 128, 130, 131, + 166, 167, 180, 182, 191, 179, 182, 144, + 178, 128, 130, 131, 178, 179, 255, 155, + 129, 132, 133, 137, 141, 143, 144, 153, + 154, 156, 157, 255, 128, 145, 147, 171, + 172, 183, 159, 170, 171, 175, 176, 185, + 186, 255, 189, 128, 131, 133, 140, 143, + 144, 147, 168, 170, 176, 178, 179, 181, + 185, 188, 191, 144, 151, 128, 132, 135, + 136, 139, 141, 157, 161, 162, 163, 166, + 172, 176, 180, 128, 175, 176, 255, 134, + 132, 135, 136, 143, 144, 153, 154, 255, + 128, 174, 175, 181, 184, 255, 129, 151, + 152, 155, 158, 255, 132, 129, 143, 144, + 153, 154, 255, 128, 170, 171, 183, 157, + 171, 176, 185, 160, 168, 169, 171, 172, + 173, 174, 188, 189, 190, 161, 167, 144, + 173, 176, 180, 128, 175, 176, 182, 133, + 143, 145, 190, 191, 255, 143, 146, 147, + 159, 128, 176, 177, 178, 128, 136, 144, + 153, 157, 158, 160, 163, 133, 134, 137, + 144, 145, 146, 147, 148, 149, 154, 155, + 156, 157, 158, 159, 168, 169, 170, 150, + 153, 165, 169, 173, 255, 131, 132, 140, + 169, 174, 255, 130, 132, 128, 182, 187, + 255, 173, 180, 182, 255, 132, 155, 159, + 161, 175, 160, 163, 184, 185, 186, 161, + 162, 133, 143, 144, 150, 151, 255, 160, + 128, 129, 132, 135, 133, 134, 129, 160, + 255, 192, 255, 176, 255, 170, 173, 181, + 186, 0, 127, 181, 190, 176, 183, 184, + 185, 186, 191, 192, 255, 130, 131, 137, + 137, 190, 136, 144, 145, 191, 192, 255, + 135, 179, 129, 130, 132, 133, 144, 170, + 176, 178, 156, 128, 133, 140, 141, 144, + 154, 160, 191, 171, 172, 176, 128, 138, + 139, 169, 174, 255, 148, 158, 169, 150, + 164, 167, 173, 176, 185, 189, 190, 192, + 255, 144, 143, 145, 146, 175, 176, 255, + 139, 140, 141, 255, 166, 176, 178, 255, + 184, 186, 128, 137, 138, 170, 171, 179, + 180, 181, 160, 161, 162, 163, 164, 165, + 166, 167, 168, 169, 170, 171, 172, 173, + 174, 175, 176, 177, 178, 179, 180, 181, + 182, 183, 184, 185, 186, 187, 188, 189, + 190, 191, 154, 164, 168, 128, 149, 150, + 173, 128, 152, 153, 155, 163, 255, 189, + 132, 185, 144, 176, 152, 161, 164, 165, + 177, 255, 132, 169, 177, 188, 129, 131, + 141, 142, 145, 146, 179, 181, 186, 187, + 190, 255, 142, 158, 133, 134, 137, 138, + 143, 150, 152, 155, 156, 161, 164, 165, + 176, 177, 178, 255, 188, 129, 131, 133, + 138, 143, 144, 147, 168, 170, 176, 178, + 179, 181, 182, 184, 185, 190, 255, 157, + 131, 134, 137, 138, 142, 144, 146, 152, + 153, 158, 159, 165, 178, 180, 182, 255, + 189, 129, 131, 133, 141, 143, 145, 147, + 168, 170, 176, 178, 179, 181, 185, 188, + 255, 134, 138, 144, 185, 142, 159, 160, + 161, 164, 165, 176, 255, 189, 129, 131, + 133, 140, 143, 144, 147, 168, 170, 176, + 178, 179, 181, 185, 188, 191, 177, 128, + 132, 135, 136, 139, 141, 150, 151, 156, + 157, 159, 161, 162, 163, 166, 175, 130, + 131, 156, 133, 138, 142, 144, 146, 149, + 153, 154, 158, 159, 163, 164, 168, 170, + 174, 185, 190, 191, 144, 151, 128, 130, + 134, 136, 138, 141, 166, 175, 189, 128, + 131, 133, 140, 142, 144, 146, 168, 170, + 185, 190, 255, 133, 137, 151, 142, 148, + 152, 154, 155, 159, 160, 161, 164, 165, + 176, 255, 189, 129, 131, 133, 140, 142, + 144, 146, 168, 170, 179, 181, 185, 188, + 191, 158, 128, 132, 134, 136, 138, 141, + 149, 150, 160, 161, 162, 163, 166, 175, + 177, 178, 189, 129, 131, 133, 140, 142, + 144, 146, 186, 190, 255, 133, 137, 142, + 143, 150, 152, 158, 159, 161, 164, 165, + 176, 185, 186, 191, 192, 255, 189, 130, + 131, 133, 150, 154, 177, 179, 187, 138, + 150, 128, 134, 143, 148, 152, 159, 166, + 175, 178, 179, 177, 180, 186, 135, 142, + 144, 153, 177, 180, 185, 187, 188, 136, + 141, 144, 153, 128, 181, 183, 185, 152, + 153, 160, 169, 190, 191, 128, 135, 137, + 172, 177, 191, 128, 132, 134, 135, 136, + 140, 141, 151, 153, 188, 134, 128, 129, + 130, 131, 137, 138, 139, 140, 141, 142, + 143, 144, 153, 154, 155, 156, 157, 158, + 159, 160, 161, 162, 163, 164, 165, 167, + 168, 169, 170, 172, 173, 174, 175, 176, + 177, 179, 181, 182, 183, 188, 189, 190, + 191, 132, 152, 180, 184, 185, 187, 171, + 190, 128, 137, 150, 153, 158, 160, 162, + 164, 167, 173, 177, 180, 130, 141, 143, + 157, 160, 255, 155, 156, 157, 159, 160, + 255, 128, 140, 142, 145, 146, 148, 160, + 177, 178, 180, 128, 145, 146, 147, 160, + 172, 174, 176, 178, 179, 180, 255, 148, + 156, 158, 159, 170, 255, 139, 142, 144, + 153, 160, 255, 169, 128, 170, 176, 255, + 128, 158, 160, 171, 176, 187, 128, 150, + 151, 155, 191, 149, 158, 160, 188, 128, + 137, 144, 153, 176, 190, 128, 132, 133, + 179, 180, 255, 133, 139, 140, 143, 154, + 170, 180, 255, 128, 130, 131, 160, 161, + 173, 174, 175, 176, 185, 186, 255, 166, + 179, 180, 255, 128, 163, 164, 183, 173, + 144, 146, 148, 168, 169, 177, 178, 180, + 181, 182, 184, 185, 128, 181, 188, 191, + 128, 129, 130, 131, 132, 133, 134, 146, + 147, 176, 177, 178, 179, 180, 181, 182, + 183, 184, 164, 140, 143, 152, 153, 170, + 174, 191, 255, 132, 165, 177, 191, 129, + 147, 149, 159, 160, 175, 176, 255, 144, + 176, 165, 170, 175, 177, 180, 255, 191, + 168, 174, 176, 255, 128, 134, 136, 142, + 144, 150, 152, 158, 160, 191, 128, 130, + 132, 133, 134, 133, 170, 175, 187, 188, + 153, 154, 128, 146, 147, 148, 152, 153, + 154, 155, 156, 158, 159, 160, 161, 162, + 163, 164, 165, 166, 167, 168, 169, 170, + 171, 172, 173, 174, 175, 176, 129, 255, + 191, 128, 174, 175, 178, 180, 189, 128, + 157, 158, 159, 160, 255, 176, 177, 178, + 255, 130, 134, 139, 163, 167, 168, 255, + 128, 129, 130, 179, 180, 255, 187, 189, + 133, 143, 154, 159, 178, 183, 184, 255, + 128, 137, 138, 165, 166, 173, 176, 255, + 135, 147, 148, 159, 189, 255, 128, 131, + 132, 178, 179, 255, 143, 129, 142, 154, + 164, 166, 175, 186, 255, 128, 168, 169, + 182, 131, 128, 139, 140, 141, 144, 153, + 187, 189, 176, 178, 180, 183, 184, 190, + 191, 129, 160, 170, 171, 175, 178, 180, + 181, 182, 128, 162, 163, 170, 172, 173, + 176, 185, 172, 173, 174, 175, 180, 181, + 182, 183, 184, 185, 187, 188, 189, 190, + 191, 176, 186, 158, 190, 128, 134, 147, + 151, 157, 168, 170, 182, 184, 188, 144, + 148, 128, 143, 160, 175, 179, 180, 144, + 146, 148, 141, 143, 176, 180, 182, 255, + 191, 189, 255, 135, 140, 142, 155, 191, + 161, 186, 158, 159, 160, 190, 130, 135, + 138, 143, 146, 151, 154, 156, 185, 187, + 144, 145, 146, 147, 148, 150, 155, 157, + 158, 159, 128, 129, 130, 131, 133, 135, + 138, 139, 140, 141, 142, 143, 144, 145, + 146, 148, 149, 152, 156, 157, 160, 161, + 162, 163, 164, 166, 168, 169, 170, 171, + 172, 173, 174, 176, 177, 153, 155, 178, + 179, 189, 160, 145, 255, 139, 143, 182, + 186, 187, 255, 128, 191, 129, 131, 133, + 134, 140, 143, 144, 147, 149, 151, 153, + 179, 184, 186, 128, 135, 137, 164, 165, + 166, 128, 129, 130, 131, 132, 133, 134, + 135, 136, 138, 139, 140, 141, 146, 147, + 150, 151, 152, 153, 154, 155, 156, 162, + 163, 171, 128, 130, 131, 183, 184, 255, + 135, 165, 176, 190, 131, 175, 187, 188, + 190, 255, 128, 130, 131, 166, 167, 180, + 182, 191, 179, 182, 144, 178, 128, 130, + 131, 178, 179, 255, 155, 129, 132, 133, + 137, 141, 143, 154, 156, 157, 255, 128, + 145, 147, 171, 172, 183, 159, 170, 171, + 175, 176, 185, 186, 255, 189, 128, 131, + 133, 140, 143, 144, 147, 168, 170, 176, + 178, 179, 181, 185, 188, 191, 144, 151, + 128, 132, 135, 136, 139, 141, 157, 161, + 162, 163, 166, 172, 176, 180, 128, 175, + 176, 255, 134, 132, 135, 136, 143, 154, + 255, 128, 174, 175, 181, 184, 255, 129, + 151, 152, 155, 158, 255, 132, 129, 143, + 154, 255, 128, 170, 171, 183, 157, 171, + 176, 185, 160, 168, 169, 171, 172, 173, + 174, 188, 189, 190, 161, 167, 144, 173, + 176, 180, 128, 175, 176, 182, 133, 143, + 145, 190, 191, 255, 143, 146, 147, 159, + 176, 177, 178, 128, 136, 144, 153, 157, + 158, 160, 163, 133, 134, 137, 144, 145, + 146, 147, 148, 149, 154, 155, 156, 157, + 158, 159, 168, 169, 170, 150, 153, 165, + 169, 173, 255, 131, 132, 140, 169, 174, + 255, 130, 132, 128, 182, 187, 255, 173, + 180, 182, 255, 132, 155, 159, 161, 175, + 160, 163, 184, 185, 186, 161, 162, 133, + 143, 144, 150, 151, 255, 160, 128, 129, + 132, 135, 133, 134, 129, 160, 255, 192, + 255, 176, 255, 170, 173, 181, 183, 186, + 181, 190, 184, 185, 192, 255, 130, 190, + 136, 144, 192, 255, 135, 179, 180, 129, + 130, 132, 133, 144, 170, 176, 178, 156, + 128, 133, 144, 154, 160, 191, 171, 128, + 159, 160, 169, 174, 255, 148, 158, 169, + 176, 185, 189, 190, 192, 255, 143, 255, + 139, 140, 186, 128, 137, 138, 181, 160, + 161, 162, 163, 164, 165, 166, 167, 168, + 169, 170, 171, 172, 173, 174, 175, 176, + 177, 178, 179, 180, 181, 182, 183, 184, + 185, 186, 187, 188, 189, 190, 191, 128, + 173, 128, 155, 163, 255, 176, 164, 165, + 166, 175, 132, 169, 177, 141, 142, 145, + 146, 179, 181, 186, 187, 158, 133, 134, + 137, 138, 143, 150, 152, 155, 164, 165, + 166, 175, 178, 255, 188, 129, 131, 133, + 138, 143, 144, 147, 168, 170, 176, 178, + 179, 181, 182, 184, 185, 190, 255, 157, + 131, 134, 137, 138, 142, 144, 146, 152, + 159, 165, 166, 175, 182, 255, 129, 131, + 133, 141, 143, 145, 147, 168, 170, 176, + 178, 179, 181, 185, 188, 255, 134, 138, + 142, 143, 145, 159, 164, 165, 166, 175, + 176, 184, 186, 255, 129, 131, 133, 140, + 143, 144, 147, 168, 170, 176, 178, 179, + 181, 185, 188, 191, 177, 128, 132, 135, + 136, 139, 141, 150, 151, 156, 157, 159, + 163, 166, 175, 156, 130, 131, 133, 138, + 142, 144, 146, 149, 153, 154, 158, 159, + 163, 164, 168, 170, 174, 185, 190, 191, + 144, 151, 128, 130, 134, 136, 138, 141, + 166, 175, 128, 131, 133, 140, 142, 144, + 146, 168, 170, 185, 189, 255, 133, 137, + 151, 142, 148, 155, 159, 164, 165, 166, + 175, 176, 255, 129, 131, 133, 140, 142, + 144, 146, 168, 170, 179, 181, 185, 188, + 191, 158, 128, 132, 134, 136, 138, 141, + 149, 150, 160, 163, 166, 175, 177, 178, + 129, 131, 133, 140, 142, 144, 146, 186, + 189, 255, 133, 137, 143, 150, 152, 158, + 164, 165, 166, 175, 176, 185, 192, 255, + 189, 130, 131, 133, 150, 154, 177, 179, + 187, 138, 150, 128, 134, 143, 148, 152, + 159, 166, 175, 178, 179, 177, 180, 186, + 135, 142, 144, 153, 177, 180, 185, 187, + 188, 136, 141, 144, 153, 128, 181, 183, + 185, 152, 153, 160, 169, 190, 191, 128, + 135, 137, 172, 177, 191, 128, 132, 134, + 151, 153, 188, 134, 128, 129, 130, 131, + 137, 138, 139, 140, 141, 142, 143, 144, + 153, 154, 155, 156, 157, 158, 159, 160, + 161, 162, 163, 164, 165, 167, 168, 169, + 170, 172, 173, 174, 175, 176, 177, 179, + 181, 182, 183, 188, 189, 190, 191, 132, + 152, 180, 184, 185, 187, 171, 190, 128, + 137, 150, 153, 158, 160, 162, 164, 167, + 173, 177, 180, 143, 130, 141, 144, 153, + 154, 157, 160, 255, 155, 156, 160, 255, + 128, 140, 142, 148, 160, 180, 128, 147, + 160, 172, 174, 176, 178, 179, 180, 255, + 148, 156, 158, 159, 160, 169, 170, 255, + 139, 142, 144, 153, 160, 255, 128, 170, + 176, 255, 128, 158, 160, 171, 176, 187, + 191, 149, 158, 160, 188, 128, 137, 144, + 153, 176, 190, 140, 143, 144, 153, 154, + 170, 180, 255, 128, 175, 176, 185, 186, + 255, 180, 255, 128, 183, 144, 146, 148, + 182, 184, 185, 128, 181, 188, 191, 128, + 129, 130, 131, 132, 133, 134, 146, 147, + 176, 177, 178, 179, 180, 181, 182, 183, + 184, 164, 167, 140, 143, 152, 153, 170, + 174, 191, 255, 165, 176, 191, 129, 147, + 149, 159, 160, 177, 178, 255, 144, 176, + 165, 170, 180, 255, 168, 174, 176, 190, + 192, 255, 128, 134, 136, 142, 144, 150, + 152, 158, 160, 191, 128, 130, 132, 133, + 134, 133, 170, 175, 187, 188, 153, 154, + 128, 146, 147, 148, 152, 153, 154, 155, + 156, 158, 159, 160, 161, 162, 163, 164, + 165, 166, 167, 168, 169, 170, 171, 172, + 173, 174, 175, 176, 129, 255, 191, 128, + 178, 180, 189, 168, 255, 188, 133, 143, + 144, 153, 154, 159, 184, 186, 190, 255, + 128, 137, 138, 173, 176, 255, 148, 159, + 189, 255, 129, 142, 144, 153, 154, 164, + 166, 175, 176, 185, 186, 255, 128, 182, + 128, 141, 144, 153, 187, 189, 176, 178, + 180, 183, 184, 190, 191, 129, 160, 175, + 178, 182, 128, 170, 172, 173, 176, 185, + 172, 173, 174, 175, 180, 181, 182, 183, + 184, 185, 187, 188, 189, 190, 191, 176, + 186, 158, 190, 128, 134, 147, 151, 157, + 168, 170, 182, 184, 188, 147, 128, 143, + 160, 175, 179, 180, 189, 190, 192, 255, + 158, 190, 130, 135, 138, 143, 146, 151, + 154, 156, 185, 187, 144, 145, 146, 147, + 148, 150, 155, 157, 158, 159, 128, 129, + 130, 131, 133, 135, 138, 139, 140, 141, + 142, 143, 144, 145, 146, 148, 149, 152, + 156, 157, 160, 161, 162, 163, 164, 166, + 168, 169, 170, 171, 172, 173, 174, 176, + 177, 153, 155, 178, 179, 189, 145, 159, + 161, 255, 139, 143, 187, 255, 191, 128, + 131, 133, 134, 140, 147, 149, 151, 153, + 179, 184, 186, 128, 135, 137, 166, 129, + 130, 131, 132, 133, 135, 136, 138, 139, + 140, 141, 146, 147, 150, 151, 152, 153, + 154, 155, 156, 162, 163, 171, 128, 134, + 135, 165, 166, 175, 176, 190, 187, 188, + 190, 255, 128, 180, 182, 191, 182, 144, + 179, 155, 133, 137, 141, 143, 144, 153, + 157, 255, 128, 145, 147, 183, 171, 175, + 176, 185, 186, 255, 128, 131, 133, 140, + 143, 144, 147, 168, 170, 176, 178, 179, + 181, 185, 188, 191, 144, 151, 128, 132, + 135, 136, 139, 141, 157, 163, 166, 172, + 176, 180, 134, 136, 143, 144, 153, 154, + 255, 128, 181, 184, 255, 129, 151, 158, + 255, 129, 131, 133, 143, 144, 153, 154, + 255, 157, 171, 176, 185, 160, 168, 169, + 171, 172, 173, 174, 188, 189, 190, 161, + 167, 144, 173, 176, 180, 133, 143, 191, + 255, 143, 159, 176, 177, 178, 128, 136, + 144, 153, 157, 158, 160, 163, 133, 134, + 137, 144, 145, 146, 147, 148, 149, 154, + 155, 156, 157, 158, 159, 168, 169, 170, + 150, 153, 165, 169, 173, 255, 131, 132, + 140, 169, 174, 255, 130, 132, 128, 182, + 187, 255, 173, 180, 182, 255, 132, 155, + 159, 161, 175, 160, 163, 184, 185, 186, + 161, 162, 133, 143, 151, 255, 160, 128, + 129, 132, 135, 133, 134, 129, 160, 255, + 176, 255, 170, 173, 181, 186, 128, 255, + 181, 190, 176, 183, 184, 185, 186, 191, + 192, 255, 130, 131, 137, 190, 136, 144, + 145, 191, 192, 255, 135, 179, 129, 130, + 132, 133, 144, 170, 176, 178, 156, 128, + 133, 144, 154, 160, 191, 171, 176, 128, + 138, 139, 159, 160, 169, 174, 255, 148, + 158, 169, 150, 164, 167, 173, 176, 185, + 189, 190, 192, 255, 144, 143, 145, 146, + 175, 176, 255, 139, 140, 141, 255, 166, + 176, 178, 255, 186, 128, 137, 138, 170, + 171, 179, 180, 181, 160, 161, 162, 163, + 164, 165, 166, 167, 168, 169, 170, 171, + 172, 173, 174, 175, 176, 177, 178, 179, + 180, 181, 182, 183, 184, 185, 186, 187, + 188, 189, 190, 191, 154, 164, 168, 128, + 149, 150, 173, 128, 152, 153, 155, 163, + 255, 189, 132, 185, 144, 176, 152, 161, + 164, 165, 166, 175, 177, 255, 132, 169, + 177, 188, 129, 131, 141, 142, 145, 146, + 179, 181, 186, 187, 190, 255, 142, 158, + 133, 134, 137, 138, 143, 150, 152, 155, + 156, 161, 164, 165, 166, 175, 176, 177, + 178, 255, 188, 129, 131, 133, 138, 143, + 144, 147, 168, 170, 176, 178, 179, 181, + 182, 184, 185, 190, 255, 157, 131, 134, + 137, 138, 142, 144, 146, 152, 153, 158, + 159, 165, 166, 175, 178, 180, 182, 255, + 189, 129, 131, 133, 141, 143, 145, 147, + 168, 170, 176, 178, 179, 181, 185, 188, + 255, 134, 138, 144, 185, 142, 159, 160, + 161, 164, 165, 166, 175, 176, 255, 189, + 129, 131, 133, 140, 143, 144, 147, 168, + 170, 176, 178, 179, 181, 185, 188, 191, + 177, 128, 132, 135, 136, 139, 141, 150, + 151, 156, 157, 159, 161, 162, 163, 166, + 175, 130, 131, 156, 133, 138, 142, 144, + 146, 149, 153, 154, 158, 159, 163, 164, + 168, 170, 174, 185, 190, 191, 144, 151, + 128, 130, 134, 136, 138, 141, 166, 175, + 189, 128, 131, 133, 140, 142, 144, 146, + 168, 170, 185, 190, 255, 133, 137, 151, + 142, 148, 152, 154, 155, 159, 160, 161, + 164, 165, 166, 175, 176, 255, 189, 129, + 131, 133, 140, 142, 144, 146, 168, 170, + 179, 181, 185, 188, 191, 158, 128, 132, + 134, 136, 138, 141, 149, 150, 160, 161, + 162, 163, 166, 175, 177, 178, 189, 129, + 131, 133, 140, 142, 144, 146, 186, 190, + 255, 133, 137, 142, 143, 150, 152, 158, + 159, 161, 164, 165, 166, 175, 176, 185, + 186, 191, 192, 255, 189, 130, 131, 133, + 150, 154, 177, 179, 187, 138, 150, 128, + 134, 143, 148, 152, 159, 166, 175, 178, + 179, 177, 180, 186, 135, 142, 144, 153, + 177, 180, 185, 187, 188, 136, 141, 144, + 153, 128, 181, 183, 185, 152, 153, 160, + 169, 190, 191, 128, 135, 137, 172, 177, + 191, 128, 132, 134, 135, 136, 140, 141, + 151, 153, 188, 134, 128, 129, 130, 131, + 137, 138, 139, 140, 141, 142, 143, 144, + 153, 154, 155, 156, 157, 158, 159, 160, + 161, 162, 163, 164, 165, 167, 168, 169, + 170, 172, 173, 174, 175, 176, 177, 179, + 181, 182, 183, 188, 189, 190, 191, 132, + 152, 180, 184, 185, 187, 171, 190, 128, + 137, 150, 153, 158, 160, 162, 164, 167, + 173, 177, 180, 143, 130, 141, 144, 153, + 154, 157, 160, 255, 155, 156, 157, 159, + 160, 255, 128, 140, 142, 145, 146, 148, + 160, 177, 178, 180, 128, 145, 146, 147, + 160, 172, 174, 176, 178, 179, 180, 255, + 148, 156, 158, 159, 160, 169, 170, 255, + 139, 142, 144, 153, 160, 255, 169, 128, + 170, 176, 255, 128, 158, 160, 171, 176, + 187, 134, 143, 144, 153, 128, 150, 151, + 155, 191, 149, 158, 160, 188, 128, 137, + 144, 153, 176, 190, 128, 132, 133, 179, + 180, 255, 133, 139, 140, 143, 144, 153, + 154, 170, 180, 255, 128, 130, 131, 160, + 161, 173, 174, 175, 176, 185, 186, 255, + 166, 179, 180, 255, 128, 163, 164, 183, + 128, 137, 141, 143, 144, 153, 154, 189, + 173, 144, 146, 148, 168, 169, 177, 178, + 180, 181, 182, 184, 185, 128, 181, 188, + 191, 128, 129, 130, 131, 132, 133, 134, + 146, 147, 176, 177, 178, 179, 180, 181, + 182, 183, 184, 140, 143, 170, 174, 191, + 255, 165, 177, 191, 129, 147, 149, 159, + 176, 255, 144, 176, 165, 170, 175, 177, + 180, 255, 191, 168, 174, 176, 255, 128, + 134, 136, 142, 144, 150, 152, 158, 160, + 191, 128, 130, 131, 132, 133, 134, 135, + 139, 140, 141, 133, 170, 175, 177, 181, + 187, 188, 153, 154, 155, 156, 160, 255, + 128, 146, 147, 148, 152, 153, 154, 155, + 156, 158, 159, 160, 161, 162, 163, 164, + 165, 166, 167, 168, 169, 170, 171, 172, + 173, 174, 175, 176, 129, 255, 141, 143, + 160, 169, 172, 255, 191, 128, 174, 175, + 178, 180, 189, 128, 157, 158, 159, 160, + 255, 176, 177, 178, 255, 130, 134, 139, + 163, 167, 168, 255, 128, 129, 130, 179, + 180, 255, 187, 189, 133, 143, 144, 153, + 154, 159, 178, 183, 184, 255, 128, 137, + 138, 165, 166, 173, 176, 255, 135, 147, + 148, 159, 189, 255, 128, 131, 132, 178, + 179, 255, 143, 129, 142, 144, 153, 154, + 164, 166, 175, 176, 185, 186, 255, 128, + 168, 169, 182, 131, 128, 139, 140, 141, + 144, 153, 187, 189, 176, 178, 180, 183, + 184, 190, 191, 129, 160, 170, 171, 175, + 178, 180, 181, 182, 128, 162, 163, 170, + 172, 173, 176, 185, 172, 173, 174, 175, + 180, 181, 182, 183, 184, 185, 187, 188, + 189, 190, 191, 176, 186, 158, 190, 128, + 134, 147, 151, 157, 168, 170, 182, 184, + 188, 128, 143, 160, 175, 179, 180, 141, + 143, 176, 180, 182, 255, 191, 189, 255, + 191, 161, 186, 158, 159, 160, 190, 191, + 255, 130, 135, 138, 143, 146, 151, 154, + 156, 185, 187, 144, 145, 146, 147, 148, + 150, 155, 157, 158, 159, 128, 129, 130, + 131, 133, 135, 138, 139, 140, 141, 142, + 143, 144, 145, 146, 148, 149, 152, 156, + 157, 160, 161, 162, 163, 164, 166, 168, + 169, 170, 171, 172, 173, 174, 176, 177, + 153, 155, 178, 179, 189, 160, 145, 255, + 139, 143, 182, 186, 187, 255, 158, 159, + 160, 169, 170, 255, 128, 191, 129, 131, + 133, 134, 140, 143, 144, 147, 149, 151, + 153, 179, 184, 186, 128, 135, 137, 164, + 165, 166, 128, 129, 130, 131, 132, 133, + 134, 135, 136, 138, 139, 140, 141, 146, + 147, 150, 151, 152, 153, 154, 155, 156, + 162, 163, 171, 128, 130, 131, 183, 184, + 255, 135, 165, 166, 175, 176, 190, 131, + 175, 187, 188, 190, 255, 144, 168, 176, + 185, 128, 130, 131, 166, 167, 180, 182, + 191, 179, 182, 144, 178, 128, 130, 131, + 178, 179, 255, 155, 129, 132, 133, 137, + 141, 143, 144, 153, 154, 156, 157, 255, + 128, 145, 147, 171, 172, 183, 159, 170, + 171, 175, 176, 185, 186, 255, 189, 128, + 131, 133, 140, 143, 144, 147, 168, 170, + 176, 178, 179, 181, 185, 188, 191, 144, + 151, 128, 132, 135, 136, 139, 141, 157, + 161, 162, 163, 166, 172, 176, 180, 128, + 175, 176, 255, 134, 132, 135, 136, 143, + 144, 153, 154, 255, 128, 174, 175, 181, + 184, 255, 129, 151, 152, 155, 158, 255, + 132, 129, 143, 144, 153, 154, 255, 128, + 170, 171, 183, 128, 137, 157, 171, 176, + 185, 160, 169, 170, 190, 192, 255, 160, + 168, 169, 171, 172, 173, 174, 188, 189, + 190, 161, 167, 128, 158, 160, 169, 144, + 173, 176, 180, 128, 175, 176, 182, 128, + 131, 144, 153, 163, 183, 189, 255, 133, + 143, 145, 190, 191, 255, 143, 146, 147, + 159, 128, 176, 177, 178, 128, 136, 144, + 153, 157, 158, 160, 163, 133, 134, 137, + 144, 145, 146, 147, 148, 149, 154, 155, + 156, 157, 158, 159, 168, 169, 170, 150, + 153, 165, 169, 173, 255, 131, 132, 140, + 169, 174, 255, 130, 132, 131, 140, 141, + 142, 191, 192, 255, 128, 182, 187, 255, + 173, 180, 182, 255, 132, 155, 159, 161, + 175, 160, 163, 184, 185, 186, 161, 162, + 133, 143, 144, 150, 151, 255, 160, 128, + 129, 132, 135, 133, 134, 129, 160, 255, + 192, 255, 176, 255, 170, 173, 181, 186, + 128, 255, 181, 190, 176, 183, 184, 185, + 186, 191, 192, 255, 130, 131, 137, 137, + 190, 136, 144, 145, 191, 192, 255, 135, + 179, 129, 130, 132, 133, 144, 170, 176, + 178, 156, 128, 133, 140, 141, 144, 154, + 160, 191, 171, 172, 176, 128, 138, 139, + 169, 174, 255, 148, 158, 169, 150, 164, + 167, 173, 176, 185, 189, 190, 192, 255, + 144, 143, 145, 146, 175, 176, 255, 139, + 140, 141, 255, 166, 176, 178, 255, 184, + 186, 128, 137, 138, 170, 171, 179, 180, + 181, 160, 161, 162, 163, 164, 165, 166, + 167, 168, 169, 170, 171, 172, 173, 174, + 175, 176, 177, 178, 179, 180, 181, 182, + 183, 184, 185, 186, 187, 188, 189, 190, + 191, 154, 164, 168, 128, 149, 150, 173, + 128, 152, 153, 155, 163, 255, 189, 132, + 185, 144, 176, 152, 161, 164, 165, 177, + 255, 132, 169, 177, 188, 129, 131, 141, + 142, 145, 146, 179, 181, 186, 187, 190, + 255, 142, 158, 133, 134, 137, 138, 143, + 150, 152, 155, 156, 161, 164, 165, 176, + 177, 178, 255, 188, 129, 131, 133, 138, + 143, 144, 147, 168, 170, 176, 178, 179, + 181, 182, 184, 185, 190, 255, 157, 131, + 134, 137, 138, 142, 144, 146, 152, 153, + 158, 159, 165, 178, 180, 182, 255, 189, + 129, 131, 133, 141, 143, 145, 147, 168, + 170, 176, 178, 179, 181, 185, 188, 255, + 134, 138, 144, 185, 142, 159, 160, 161, + 164, 165, 176, 255, 189, 129, 131, 133, + 140, 143, 144, 147, 168, 170, 176, 178, + 179, 181, 185, 188, 191, 177, 128, 132, + 135, 136, 139, 141, 150, 151, 156, 157, + 159, 161, 162, 163, 166, 175, 130, 131, + 156, 133, 138, 142, 144, 146, 149, 153, + 154, 158, 159, 163, 164, 168, 170, 174, + 185, 190, 191, 144, 151, 128, 130, 134, + 136, 138, 141, 166, 175, 189, 128, 131, + 133, 140, 142, 144, 146, 168, 170, 185, + 190, 255, 133, 137, 151, 142, 148, 152, + 154, 155, 159, 160, 161, 164, 165, 176, + 255, 189, 129, 131, 133, 140, 142, 144, + 146, 168, 170, 179, 181, 185, 188, 191, + 158, 128, 132, 134, 136, 138, 141, 149, + 150, 160, 161, 162, 163, 166, 175, 177, + 178, 189, 129, 131, 133, 140, 142, 144, + 146, 186, 190, 255, 133, 137, 142, 143, + 150, 152, 158, 159, 161, 164, 165, 176, + 185, 186, 191, 192, 255, 189, 130, 131, + 133, 150, 154, 177, 179, 187, 138, 150, + 128, 134, 143, 148, 152, 159, 166, 175, + 178, 179, 177, 180, 186, 135, 142, 144, + 153, 177, 180, 185, 187, 188, 136, 141, + 144, 153, 128, 181, 183, 185, 152, 153, + 160, 169, 190, 191, 128, 135, 137, 172, + 177, 191, 128, 132, 134, 135, 136, 140, + 141, 151, 153, 188, 134, 128, 129, 130, + 131, 137, 138, 139, 140, 141, 142, 143, + 144, 153, 154, 155, 156, 157, 158, 159, + 160, 161, 162, 163, 164, 165, 167, 168, + 169, 170, 172, 173, 174, 175, 176, 177, + 179, 181, 182, 183, 188, 189, 190, 191, + 132, 152, 180, 184, 185, 187, 171, 190, + 128, 137, 150, 153, 158, 160, 162, 164, + 167, 173, 177, 180, 130, 141, 143, 157, + 160, 255, 155, 156, 157, 159, 160, 255, + 128, 140, 142, 145, 146, 148, 160, 177, + 178, 180, 128, 145, 146, 147, 160, 172, + 174, 176, 178, 179, 180, 255, 148, 156, + 158, 159, 170, 255, 139, 142, 144, 153, + 160, 255, 169, 128, 170, 176, 255, 128, + 158, 160, 171, 176, 187, 128, 150, 151, + 155, 191, 149, 158, 160, 188, 128, 137, + 144, 153, 176, 190, 128, 132, 133, 179, + 180, 255, 133, 139, 140, 143, 154, 170, + 180, 255, 128, 130, 131, 160, 161, 173, + 174, 175, 176, 185, 186, 255, 166, 179, + 180, 255, 128, 163, 164, 183, 173, 144, + 146, 148, 168, 169, 177, 178, 180, 181, + 182, 184, 185, 128, 181, 188, 191, 128, + 129, 130, 131, 132, 133, 134, 146, 147, + 176, 177, 178, 179, 180, 181, 182, 183, + 184, 164, 140, 143, 152, 153, 170, 174, + 191, 255, 132, 165, 177, 191, 129, 147, + 149, 159, 160, 175, 176, 255, 144, 176, + 165, 170, 175, 177, 180, 255, 191, 168, + 174, 176, 255, 128, 134, 136, 142, 144, + 150, 152, 158, 160, 191, 128, 130, 132, + 133, 134, 133, 170, 175, 187, 188, 153, + 154, 128, 146, 147, 148, 152, 153, 154, + 155, 156, 158, 159, 160, 161, 162, 163, + 164, 165, 166, 167, 168, 169, 170, 171, + 172, 173, 174, 175, 176, 129, 255, 191, + 128, 174, 175, 178, 180, 189, 128, 157, + 158, 159, 160, 255, 176, 177, 178, 255, + 130, 134, 139, 163, 167, 168, 255, 128, + 129, 130, 179, 180, 255, 187, 189, 133, + 143, 154, 159, 178, 183, 184, 255, 128, + 137, 138, 165, 166, 173, 176, 255, 135, + 147, 148, 159, 189, 255, 128, 131, 132, + 178, 179, 255, 143, 129, 142, 154, 164, + 166, 175, 186, 255, 128, 168, 169, 182, + 131, 128, 139, 140, 141, 144, 153, 187, + 189, 176, 178, 180, 183, 184, 190, 191, + 129, 160, 170, 171, 175, 178, 180, 181, + 182, 128, 162, 163, 170, 172, 173, 176, + 185, 172, 173, 174, 175, 180, 181, 182, + 183, 184, 185, 187, 188, 189, 190, 191, + 176, 186, 158, 190, 128, 134, 147, 151, + 157, 168, 170, 182, 184, 188, 144, 148, + 128, 143, 160, 175, 179, 180, 144, 146, + 148, 141, 143, 176, 180, 182, 255, 191, + 189, 255, 135, 140, 142, 155, 191, 161, + 186, 158, 159, 160, 190, 130, 135, 138, + 143, 146, 151, 154, 156, 185, 187, 144, + 145, 146, 147, 148, 150, 155, 157, 158, + 159, 128, 129, 130, 131, 133, 135, 138, + 139, 140, 141, 142, 143, 144, 145, 146, + 148, 149, 152, 156, 157, 160, 161, 162, + 163, 164, 166, 168, 169, 170, 171, 172, + 173, 174, 176, 177, 153, 155, 178, 179, + 189, 160, 145, 255, 139, 143, 182, 186, + 187, 255, 128, 191, 129, 131, 133, 134, + 140, 143, 144, 147, 149, 151, 153, 179, + 184, 186, 128, 135, 137, 164, 165, 166, + 128, 129, 130, 131, 132, 133, 134, 135, + 136, 138, 139, 140, 141, 146, 147, 150, + 151, 152, 153, 154, 155, 156, 162, 163, + 171, 128, 130, 131, 183, 184, 255, 135, + 165, 176, 190, 131, 175, 187, 188, 190, + 255, 128, 130, 131, 166, 167, 180, 182, + 191, 179, 182, 144, 178, 128, 130, 131, + 178, 179, 255, 155, 129, 132, 133, 137, + 141, 143, 154, 156, 157, 255, 128, 145, + 147, 171, 172, 183, 159, 170, 171, 175, + 176, 185, 186, 255, 189, 128, 131, 133, + 140, 143, 144, 147, 168, 170, 176, 178, + 179, 181, 185, 188, 191, 144, 151, 128, + 132, 135, 136, 139, 141, 157, 161, 162, + 163, 166, 172, 176, 180, 128, 175, 176, + 255, 134, 132, 135, 136, 143, 154, 255, + 128, 174, 175, 181, 184, 255, 129, 151, + 152, 155, 158, 255, 132, 129, 143, 154, + 255, 128, 170, 171, 183, 157, 171, 176, + 185, 160, 168, 169, 171, 172, 173, 174, + 188, 189, 190, 161, 167, 144, 173, 176, + 180, 128, 175, 176, 182, 133, 143, 145, + 190, 191, 255, 143, 146, 147, 159, 176, + 177, 178, 128, 136, 144, 153, 157, 158, + 160, 163, 133, 134, 137, 144, 145, 146, + 147, 148, 149, 154, 155, 156, 157, 158, + 159, 168, 169, 170, 150, 153, 165, 169, + 173, 255, 131, 132, 140, 169, 174, 255, + 130, 132, 128, 182, 187, 255, 173, 180, + 182, 255, 132, 155, 159, 161, 175, 160, + 163, 184, 185, 186, 161, 162, 133, 143, + 144, 150, 151, 255, 160, 128, 129, 132, + 135, 133, 134, 129, 160, 255, 192, 255, + 176, 255, 173, 128, 255, 176, 255, 131, + 137, 191, 145, 189, 135, 129, 130, 132, + 133, 156, 128, 133, 144, 154, 171, 176, + 139, 159, 160, 169, 150, 157, 159, 164, + 167, 168, 170, 173, 176, 185, 143, 145, + 176, 255, 139, 255, 166, 176, 128, 137, + 171, 179, 160, 161, 163, 164, 165, 167, + 169, 171, 173, 174, 175, 176, 177, 179, + 180, 181, 182, 183, 184, 185, 186, 187, + 188, 189, 190, 191, 166, 170, 172, 178, + 150, 153, 155, 163, 165, 167, 169, 173, + 153, 155, 163, 255, 189, 132, 185, 144, + 152, 161, 164, 165, 166, 175, 176, 255, + 188, 129, 131, 190, 255, 133, 134, 137, + 138, 142, 150, 152, 161, 164, 165, 166, + 175, 176, 255, 131, 134, 137, 138, 142, + 144, 146, 165, 166, 175, 178, 180, 182, + 255, 134, 138, 142, 161, 164, 165, 166, + 175, 176, 255, 188, 129, 131, 190, 191, + 128, 132, 135, 136, 139, 141, 150, 151, + 162, 163, 166, 175, 130, 190, 191, 151, + 128, 130, 134, 136, 138, 141, 166, 175, + 128, 131, 190, 255, 133, 137, 142, 148, + 151, 161, 164, 165, 166, 175, 176, 255, + 128, 132, 134, 136, 138, 141, 149, 150, + 162, 163, 166, 175, 129, 131, 190, 255, + 133, 137, 142, 150, 152, 161, 164, 165, + 166, 175, 176, 255, 130, 131, 138, 150, + 143, 148, 152, 159, 166, 175, 178, 179, + 177, 180, 186, 135, 142, 144, 153, 177, + 180, 185, 187, 188, 136, 141, 144, 153, + 181, 183, 185, 152, 153, 160, 169, 190, + 191, 177, 191, 128, 132, 134, 135, 141, + 151, 153, 188, 134, 128, 129, 130, 141, + 156, 157, 158, 159, 160, 162, 164, 165, + 167, 168, 169, 170, 172, 173, 174, 175, + 176, 177, 179, 183, 171, 190, 128, 137, + 150, 153, 158, 160, 162, 164, 167, 173, + 177, 180, 143, 130, 141, 144, 153, 154, + 157, 157, 159, 146, 148, 178, 180, 146, + 147, 178, 179, 180, 255, 148, 156, 158, + 159, 160, 169, 170, 255, 139, 142, 144, + 153, 169, 160, 171, 176, 187, 151, 155, + 191, 149, 158, 160, 188, 128, 137, 144, + 153, 176, 190, 128, 132, 180, 255, 133, + 143, 144, 153, 154, 170, 180, 255, 128, + 130, 161, 173, 176, 185, 166, 179, 164, + 183, 128, 137, 144, 153, 173, 144, 146, + 148, 168, 178, 180, 184, 185, 128, 181, + 188, 191, 128, 129, 131, 179, 181, 183, + 140, 143, 170, 174, 160, 164, 166, 175, + 144, 176, 175, 177, 191, 160, 191, 128, + 130, 170, 175, 153, 154, 152, 153, 154, + 155, 160, 162, 163, 164, 165, 166, 167, + 168, 169, 170, 171, 175, 160, 169, 175, + 178, 180, 189, 158, 159, 176, 177, 130, + 134, 139, 163, 167, 128, 129, 180, 255, + 133, 143, 144, 153, 154, 159, 178, 255, + 128, 137, 166, 173, 135, 147, 128, 131, + 179, 255, 129, 143, 144, 153, 154, 164, + 166, 175, 176, 185, 186, 255, 169, 182, + 131, 140, 141, 144, 153, 187, 189, 176, + 178, 180, 183, 184, 190, 191, 129, 171, + 175, 181, 182, 163, 170, 172, 173, 176, + 185, 172, 184, 187, 190, 191, 158, 128, + 143, 160, 175, 185, 187, 144, 145, 150, + 155, 157, 158, 135, 139, 141, 146, 168, + 171, 189, 160, 182, 186, 191, 129, 131, + 133, 134, 140, 143, 184, 186, 165, 166, + 128, 129, 130, 131, 132, 133, 134, 135, + 136, 139, 140, 141, 146, 147, 150, 151, + 152, 153, 154, 155, 156, 163, 128, 130, + 184, 255, 135, 165, 166, 175, 176, 190, + 131, 175, 187, 188, 190, 255, 176, 185, + 128, 130, 167, 180, 182, 191, 179, 128, + 130, 179, 255, 129, 137, 141, 143, 144, + 153, 154, 255, 172, 183, 159, 170, 176, + 185, 188, 128, 131, 190, 191, 151, 128, + 132, 135, 136, 139, 141, 162, 163, 166, + 172, 176, 180, 176, 255, 132, 143, 144, + 153, 154, 255, 175, 181, 184, 255, 129, + 155, 158, 255, 129, 143, 144, 153, 154, + 255, 171, 183, 157, 171, 176, 185, 169, + 171, 172, 173, 189, 190, 176, 180, 176, + 182, 145, 190, 143, 146, 178, 157, 158, + 160, 163, 133, 134, 137, 159, 168, 169, + 170, 165, 169, 173, 255, 131, 132, 140, + 169, 174, 255, 130, 132, 142, 191, 128, + 182, 187, 255, 173, 180, 182, 255, 132, + 155, 159, 161, 175, 163, 144, 150, 160, + 128, 129, 132, 135, 133, 134, 129, 160, + 255, 192, 255, 170, 173, 181, 186, 128, + 255, 181, 190, 176, 183, 184, 185, 186, + 191, 192, 255, 130, 131, 137, 137, 190, + 136, 144, 145, 191, 192, 255, 135, 179, + 129, 130, 132, 133, 144, 170, 176, 178, + 156, 128, 133, 140, 141, 144, 154, 160, + 191, 171, 172, 176, 128, 138, 139, 159, + 160, 169, 174, 255, 148, 158, 169, 150, + 164, 167, 173, 176, 185, 189, 190, 192, + 255, 144, 143, 145, 146, 175, 176, 255, + 139, 140, 141, 255, 166, 176, 178, 255, + 184, 186, 128, 137, 138, 170, 171, 179, + 180, 181, 160, 161, 162, 163, 164, 165, + 166, 167, 168, 169, 170, 171, 172, 173, + 174, 175, 176, 177, 178, 179, 180, 181, + 182, 183, 184, 185, 186, 187, 188, 189, + 190, 191, 154, 164, 168, 128, 149, 150, + 173, 128, 152, 153, 155, 163, 255, 189, + 132, 185, 144, 176, 152, 161, 164, 165, + 166, 175, 177, 255, 132, 169, 177, 188, + 129, 131, 141, 142, 145, 146, 179, 181, + 186, 187, 190, 255, 142, 158, 133, 134, + 137, 138, 143, 150, 152, 155, 156, 161, + 164, 165, 166, 175, 176, 177, 178, 255, + 188, 129, 131, 133, 138, 143, 144, 147, + 168, 170, 176, 178, 179, 181, 182, 184, + 185, 190, 255, 157, 131, 134, 137, 138, + 142, 144, 146, 152, 153, 158, 159, 165, + 166, 175, 178, 180, 182, 255, 189, 129, + 131, 133, 141, 143, 145, 147, 168, 170, + 176, 178, 179, 181, 185, 188, 255, 134, + 138, 144, 185, 142, 159, 160, 161, 164, + 165, 166, 175, 176, 255, 189, 129, 131, + 133, 140, 143, 144, 147, 168, 170, 176, + 178, 179, 181, 185, 188, 191, 177, 128, + 132, 135, 136, 139, 141, 150, 151, 156, + 157, 159, 161, 162, 163, 166, 175, 130, + 131, 156, 133, 138, 142, 144, 146, 149, + 153, 154, 158, 159, 163, 164, 168, 170, + 174, 185, 190, 191, 144, 151, 128, 130, + 134, 136, 138, 141, 166, 175, 189, 128, + 131, 133, 140, 142, 144, 146, 168, 170, + 185, 190, 255, 133, 137, 151, 142, 148, + 152, 154, 155, 159, 160, 161, 164, 165, + 166, 175, 176, 255, 189, 129, 131, 133, + 140, 142, 144, 146, 168, 170, 179, 181, + 185, 188, 191, 158, 128, 132, 134, 136, + 138, 141, 149, 150, 160, 161, 162, 163, + 166, 175, 177, 178, 189, 129, 131, 133, + 140, 142, 144, 146, 186, 190, 255, 133, + 137, 142, 143, 150, 152, 158, 159, 161, + 164, 165, 166, 175, 176, 185, 186, 191, + 192, 255, 189, 130, 131, 133, 150, 154, + 177, 179, 187, 138, 150, 128, 134, 143, + 148, 152, 159, 166, 175, 178, 179, 177, + 180, 186, 135, 142, 144, 153, 177, 180, + 185, 187, 188, 136, 141, 144, 153, 128, + 181, 183, 185, 152, 153, 160, 169, 190, + 191, 128, 135, 137, 172, 177, 191, 128, + 132, 134, 135, 136, 140, 141, 151, 153, + 188, 134, 128, 129, 130, 131, 137, 138, + 139, 140, 141, 142, 143, 144, 153, 154, + 155, 156, 157, 158, 159, 160, 161, 162, + 163, 164, 165, 167, 168, 169, 170, 172, + 173, 174, 175, 176, 177, 179, 181, 182, + 183, 188, 189, 190, 191, 132, 152, 180, + 184, 185, 187, 171, 190, 128, 137, 150, + 153, 158, 160, 162, 164, 167, 173, 177, + 180, 143, 130, 141, 144, 153, 154, 157, + 160, 255, 155, 156, 157, 159, 160, 255, + 128, 140, 142, 145, 146, 148, 160, 177, + 178, 180, 128, 145, 146, 147, 160, 172, + 174, 176, 178, 179, 180, 255, 148, 156, + 158, 159, 160, 169, 170, 255, 139, 142, + 144, 153, 160, 255, 169, 128, 170, 176, + 255, 128, 158, 160, 171, 176, 187, 128, + 150, 151, 155, 191, 149, 158, 160, 188, + 128, 137, 144, 153, 176, 190, 128, 132, + 133, 179, 180, 255, 133, 139, 140, 143, + 144, 153, 154, 170, 180, 255, 128, 130, + 131, 160, 161, 173, 174, 175, 176, 185, + 186, 255, 166, 179, 180, 255, 128, 163, + 164, 183, 173, 144, 146, 148, 168, 169, + 177, 178, 180, 181, 182, 184, 185, 128, + 181, 188, 191, 128, 129, 130, 131, 132, + 133, 134, 146, 147, 176, 177, 178, 179, + 180, 181, 182, 183, 184, 164, 140, 143, + 152, 153, 170, 174, 191, 255, 132, 165, + 177, 191, 129, 147, 149, 159, 160, 175, + 176, 255, 144, 176, 165, 170, 175, 177, + 180, 255, 191, 168, 174, 176, 255, 128, + 134, 136, 142, 144, 150, 152, 158, 160, + 191, 128, 130, 132, 133, 134, 133, 170, + 175, 187, 188, 153, 154, 128, 146, 147, + 148, 152, 153, 154, 155, 156, 158, 159, + 160, 161, 162, 163, 164, 165, 166, 167, + 168, 169, 170, 171, 172, 173, 174, 175, + 176, 129, 255, 191, 128, 174, 175, 178, + 180, 189, 128, 157, 158, 159, 160, 255, + 176, 177, 178, 255, 130, 134, 139, 163, + 167, 168, 255, 128, 129, 130, 179, 180, + 255, 187, 189, 133, 143, 144, 153, 154, + 159, 178, 183, 184, 255, 128, 137, 138, + 165, 166, 173, 176, 255, 135, 147, 148, + 159, 189, 255, 128, 131, 132, 178, 179, + 255, 143, 129, 142, 144, 153, 154, 164, + 166, 175, 176, 185, 186, 255, 128, 168, + 169, 182, 131, 128, 139, 140, 141, 144, + 153, 187, 189, 176, 178, 180, 183, 184, + 190, 191, 129, 160, 170, 171, 175, 178, + 180, 181, 182, 128, 162, 163, 170, 172, + 173, 176, 185, 172, 173, 174, 175, 180, + 181, 182, 183, 184, 185, 187, 188, 189, + 190, 191, 176, 186, 158, 190, 128, 134, + 147, 151, 157, 168, 170, 182, 184, 188, + 144, 148, 128, 143, 160, 175, 179, 180, + 191, 189, 255, 158, 159, 160, 190, 130, + 135, 138, 143, 146, 151, 154, 156, 185, + 187, 144, 145, 146, 147, 148, 150, 155, + 157, 158, 159, 128, 129, 130, 131, 133, + 135, 138, 139, 140, 141, 142, 143, 144, + 145, 146, 148, 149, 152, 156, 157, 160, + 161, 162, 163, 164, 166, 168, 169, 170, + 171, 172, 173, 174, 176, 177, 153, 155, + 178, 179, 189, 160, 145, 255, 139, 143, + 182, 186, 187, 255, 128, 191, 129, 131, + 133, 134, 140, 143, 144, 147, 149, 151, + 153, 179, 184, 186, 128, 135, 137, 164, + 165, 166, 128, 129, 130, 131, 132, 133, + 134, 135, 136, 138, 139, 140, 141, 146, + 147, 150, 151, 152, 153, 154, 155, 156, + 162, 163, 171, 128, 130, 131, 183, 184, + 255, 135, 165, 166, 175, 176, 190, 131, + 175, 187, 188, 190, 255, 128, 130, 131, + 166, 167, 180, 182, 191, 179, 182, 144, + 178, 128, 130, 131, 178, 179, 255, 155, + 129, 132, 133, 137, 141, 143, 144, 153, + 154, 156, 157, 255, 128, 145, 147, 171, + 172, 183, 159, 170, 171, 175, 176, 185, + 186, 255, 189, 128, 131, 133, 140, 143, + 144, 147, 168, 170, 176, 178, 179, 181, + 185, 188, 191, 144, 151, 128, 132, 135, + 136, 139, 141, 157, 161, 162, 163, 166, + 172, 176, 180, 128, 175, 176, 255, 134, + 132, 135, 136, 143, 144, 153, 154, 255, + 128, 174, 175, 181, 184, 255, 129, 151, + 152, 155, 158, 255, 132, 129, 143, 144, + 153, 154, 255, 128, 170, 171, 183, 157, + 171, 176, 185, 160, 168, 169, 171, 172, + 173, 174, 188, 189, 190, 161, 167, 144, + 173, 176, 180, 128, 175, 176, 182, 133, + 143, 145, 190, 191, 255, 143, 146, 147, + 159, 176, 177, 178, 128, 136, 144, 153, + 157, 158, 160, 163, 133, 134, 137, 144, + 145, 146, 147, 148, 149, 154, 155, 156, + 157, 158, 159, 168, 169, 170, 150, 153, + 165, 169, 173, 255, 131, 132, 140, 169, + 174, 255, 130, 132, 128, 182, 187, 255, + 173, 180, 182, 255, 132, 155, 159, 161, + 175, 160, 163, 184, 185, 186, 161, 162, + 133, 143, 144, 150, 151, 255, 160, 128, + 129, 132, 135, 133, 134, 129, 160, 255, + 192, 255, 176, 255, 170, 173, 181, 183, + 186, 128, 255, 181, 190, 176, 183, 184, + 185, 186, 191, 192, 255, 130, 131, 137, + 190, 136, 144, 145, 191, 192, 255, 135, + 179, 180, 129, 130, 132, 133, 144, 170, + 176, 178, 156, 128, 133, 144, 154, 160, + 191, 171, 176, 128, 138, 139, 159, 160, + 169, 174, 255, 148, 158, 169, 150, 164, + 167, 173, 176, 185, 189, 190, 192, 255, + 144, 143, 145, 146, 175, 176, 255, 139, + 140, 141, 255, 166, 176, 178, 255, 186, + 128, 137, 138, 170, 171, 179, 180, 181, + 160, 161, 162, 163, 164, 165, 166, 167, + 168, 169, 170, 171, 172, 173, 174, 175, + 176, 177, 178, 179, 180, 181, 182, 183, + 184, 185, 186, 187, 188, 189, 190, 191, + 154, 164, 168, 128, 149, 150, 173, 128, + 152, 153, 155, 163, 255, 189, 132, 185, + 144, 176, 152, 161, 164, 165, 166, 175, + 177, 255, 132, 169, 177, 188, 129, 131, + 141, 142, 145, 146, 179, 181, 186, 187, + 190, 255, 142, 158, 133, 134, 137, 138, + 143, 150, 152, 155, 156, 161, 164, 165, + 166, 175, 176, 177, 178, 255, 188, 129, + 131, 133, 138, 143, 144, 147, 168, 170, + 176, 178, 179, 181, 182, 184, 185, 190, + 255, 157, 131, 134, 137, 138, 142, 144, + 146, 152, 153, 158, 159, 165, 166, 175, + 178, 180, 182, 255, 189, 129, 131, 133, + 141, 143, 145, 147, 168, 170, 176, 178, + 179, 181, 185, 188, 255, 134, 138, 144, + 185, 142, 159, 160, 161, 164, 165, 166, + 175, 176, 255, 189, 129, 131, 133, 140, + 143, 144, 147, 168, 170, 176, 178, 179, + 181, 185, 188, 191, 177, 128, 132, 135, + 136, 139, 141, 150, 151, 156, 157, 159, + 161, 162, 163, 166, 175, 130, 131, 156, + 133, 138, 142, 144, 146, 149, 153, 154, + 158, 159, 163, 164, 168, 170, 174, 185, + 190, 191, 144, 151, 128, 130, 134, 136, + 138, 141, 166, 175, 189, 128, 131, 133, + 140, 142, 144, 146, 168, 170, 185, 190, + 255, 133, 137, 151, 142, 148, 152, 154, + 155, 159, 160, 161, 164, 165, 166, 175, + 176, 255, 189, 129, 131, 133, 140, 142, + 144, 146, 168, 170, 179, 181, 185, 188, + 191, 158, 128, 132, 134, 136, 138, 141, + 149, 150, 160, 161, 162, 163, 166, 175, + 177, 178, 189, 129, 131, 133, 140, 142, + 144, 146, 186, 190, 255, 133, 137, 142, + 143, 150, 152, 158, 159, 161, 164, 165, + 166, 175, 176, 185, 186, 191, 192, 255, + 189, 130, 131, 133, 150, 154, 177, 179, + 187, 138, 150, 128, 134, 143, 148, 152, + 159, 166, 175, 178, 179, 177, 180, 186, + 135, 142, 144, 153, 177, 180, 185, 187, + 188, 136, 141, 144, 153, 128, 181, 183, + 185, 152, 153, 160, 169, 190, 191, 128, + 135, 137, 172, 177, 191, 128, 132, 134, + 135, 136, 140, 141, 151, 153, 188, 134, + 128, 129, 130, 131, 137, 138, 139, 140, + 141, 142, 143, 144, 153, 154, 155, 156, + 157, 158, 159, 160, 161, 162, 163, 164, + 165, 167, 168, 169, 170, 172, 173, 174, + 175, 176, 177, 179, 181, 182, 183, 188, + 189, 190, 191, 132, 152, 180, 184, 185, + 187, 171, 190, 128, 137, 150, 153, 158, + 160, 162, 164, 167, 173, 177, 180, 143, + 130, 141, 144, 153, 154, 157, 160, 255, + 155, 156, 157, 159, 160, 255, 128, 140, + 142, 145, 146, 148, 160, 177, 178, 180, + 128, 145, 146, 147, 160, 172, 174, 176, + 178, 179, 180, 255, 148, 156, 158, 159, + 160, 169, 170, 255, 139, 142, 144, 153, + 160, 255, 169, 128, 170, 176, 255, 128, + 158, 160, 171, 176, 187, 128, 150, 151, + 155, 191, 149, 158, 160, 188, 128, 137, + 144, 153, 176, 190, 128, 132, 133, 179, + 180, 255, 133, 139, 140, 143, 144, 153, + 154, 170, 180, 255, 128, 130, 131, 160, + 161, 173, 174, 175, 176, 185, 186, 255, + 166, 179, 180, 255, 128, 163, 164, 183, + 173, 144, 146, 148, 168, 169, 177, 178, + 180, 181, 182, 184, 185, 128, 181, 188, + 191, 128, 129, 130, 131, 132, 133, 134, + 146, 147, 176, 177, 178, 179, 180, 181, + 182, 183, 184, 164, 167, 140, 143, 152, + 153, 170, 174, 191, 255, 165, 177, 191, + 129, 147, 149, 159, 160, 175, 176, 255, + 144, 176, 165, 170, 175, 177, 180, 255, + 191, 168, 174, 176, 255, 128, 134, 136, + 142, 144, 150, 152, 158, 160, 191, 128, + 130, 132, 133, 134, 133, 170, 175, 187, + 188, 153, 154, 128, 146, 147, 148, 152, + 153, 154, 155, 156, 158, 159, 160, 161, + 162, 163, 164, 165, 166, 167, 168, 169, + 170, 171, 172, 173, 174, 175, 176, 129, + 255, 191, 128, 174, 175, 178, 180, 189, + 128, 157, 158, 159, 160, 255, 176, 177, + 178, 255, 130, 134, 139, 163, 167, 168, + 255, 128, 129, 130, 179, 180, 255, 187, + 189, 133, 143, 144, 153, 154, 159, 178, + 183, 184, 255, 128, 137, 138, 165, 166, + 173, 176, 255, 135, 147, 148, 159, 189, + 255, 128, 131, 132, 178, 179, 255, 143, + 129, 142, 144, 153, 154, 164, 166, 175, + 176, 185, 186, 255, 128, 168, 169, 182, + 131, 128, 139, 140, 141, 144, 153, 187, + 189, 176, 178, 180, 183, 184, 190, 191, + 129, 160, 170, 171, 175, 178, 180, 181, + 182, 128, 162, 163, 170, 172, 173, 176, + 185, 172, 173, 174, 175, 180, 181, 182, + 183, 184, 185, 187, 188, 189, 190, 191, + 176, 186, 158, 190, 128, 134, 147, 151, + 157, 168, 170, 182, 184, 188, 147, 128, + 143, 160, 175, 179, 180, 191, 189, 255, + 158, 159, 160, 190, 130, 135, 138, 143, + 146, 151, 154, 156, 185, 187, 144, 145, + 146, 147, 148, 150, 155, 157, 158, 159, + 128, 129, 130, 131, 133, 135, 138, 139, + 140, 141, 142, 143, 144, 145, 146, 148, + 149, 152, 156, 157, 160, 161, 162, 163, + 164, 166, 168, 169, 170, 171, 172, 173, + 174, 176, 177, 153, 155, 178, 179, 189, + 160, 145, 255, 139, 143, 182, 186, 187, + 255, 128, 191, 129, 131, 133, 134, 140, + 143, 144, 147, 149, 151, 153, 179, 184, + 186, 128, 135, 137, 164, 165, 166, 128, + 129, 130, 131, 132, 133, 134, 135, 136, + 138, 139, 140, 141, 146, 147, 150, 151, + 152, 153, 154, 155, 156, 162, 163, 171, + 128, 130, 131, 183, 184, 255, 135, 165, + 166, 175, 176, 190, 131, 175, 187, 188, + 190, 255, 128, 130, 131, 166, 167, 180, + 182, 191, 179, 182, 144, 178, 128, 130, + 131, 178, 179, 255, 155, 129, 132, 133, + 137, 141, 143, 144, 153, 154, 156, 157, + 255, 128, 145, 147, 171, 172, 183, 159, + 170, 171, 175, 176, 185, 186, 255, 189, + 128, 131, 133, 140, 143, 144, 147, 168, + 170, 176, 178, 179, 181, 185, 188, 191, + 144, 151, 128, 132, 135, 136, 139, 141, + 157, 161, 162, 163, 166, 172, 176, 180, + 128, 175, 176, 255, 134, 132, 135, 136, + 143, 144, 153, 154, 255, 128, 174, 175, + 181, 184, 255, 129, 151, 152, 155, 158, + 255, 132, 129, 143, 144, 153, 154, 255, + 128, 170, 171, 183, 157, 171, 176, 185, + 160, 168, 169, 171, 172, 173, 174, 188, + 189, 190, 161, 167, 144, 173, 176, 180, + 128, 175, 176, 182, 133, 143, 145, 190, + 191, 255, 143, 146, 147, 159, 176, 177, + 178, 128, 136, 144, 153, 157, 158, 160, + 163, 133, 134, 137, 144, 145, 146, 147, + 148, 149, 154, 155, 156, 157, 158, 159, + 168, 169, 170, 150, 153, 165, 169, 173, + 255, 131, 132, 140, 169, 174, 255, 130, + 132, 128, 182, 187, 255, 173, 180, 182, + 255, 132, 155, 159, 161, 175, 160, 163, + 184, 185, 186, 161, 162, 133, 143, 144, + 150, 151, 255, 160, 128, 129, 132, 135, + 133, 134, 129, 160, 255, 192, 255, 176, + 255, 170, 173, 181, 186, 128, 255, 181, + 190, 176, 183, 184, 185, 186, 191, 192, + 255, 130, 131, 137, 190, 136, 144, 145, + 191, 192, 255, 135, 179, 129, 130, 132, + 133, 144, 170, 176, 178, 156, 128, 133, + 144, 154, 160, 191, 171, 176, 128, 138, + 139, 159, 160, 169, 174, 255, 148, 158, + 169, 150, 164, 167, 173, 176, 185, 189, + 190, 192, 255, 144, 143, 145, 146, 175, + 176, 255, 139, 140, 141, 255, 166, 176, + 178, 255, 186, 128, 137, 138, 170, 171, + 179, 180, 181, 160, 161, 162, 163, 164, + 165, 166, 167, 168, 169, 170, 171, 172, + 173, 174, 175, 176, 177, 178, 179, 180, + 181, 182, 183, 184, 185, 186, 187, 188, + 189, 190, 191, 154, 164, 168, 128, 149, + 150, 173, 128, 152, 153, 155, 163, 255, + 189, 132, 185, 144, 176, 152, 161, 164, + 165, 166, 175, 177, 255, 132, 169, 177, + 188, 129, 131, 141, 142, 145, 146, 179, + 181, 186, 187, 190, 255, 142, 158, 133, + 134, 137, 138, 143, 150, 152, 155, 156, + 161, 164, 165, 166, 175, 176, 177, 178, + 255, 188, 129, 131, 133, 138, 143, 144, + 147, 168, 170, 176, 178, 179, 181, 182, + 184, 185, 190, 255, 157, 131, 134, 137, + 138, 142, 144, 146, 152, 153, 158, 159, + 165, 166, 175, 178, 180, 182, 255, 189, + 129, 131, 133, 141, 143, 145, 147, 168, + 170, 176, 178, 179, 181, 185, 188, 255, + 134, 138, 144, 185, 142, 159, 160, 161, + 164, 165, 166, 175, 176, 255, 189, 129, + 131, 133, 140, 143, 144, 147, 168, 170, + 176, 178, 179, 181, 185, 188, 191, 177, + 128, 132, 135, 136, 139, 141, 150, 151, + 156, 157, 159, 161, 162, 163, 166, 175, + 130, 131, 156, 133, 138, 142, 144, 146, + 149, 153, 154, 158, 159, 163, 164, 168, + 170, 174, 185, 190, 191, 144, 151, 128, + 130, 134, 136, 138, 141, 166, 175, 189, + 128, 131, 133, 140, 142, 144, 146, 168, + 170, 185, 190, 255, 133, 137, 151, 142, + 148, 152, 154, 155, 159, 160, 161, 164, + 165, 166, 175, 176, 255, 189, 129, 131, + 133, 140, 142, 144, 146, 168, 170, 179, + 181, 185, 188, 191, 158, 128, 132, 134, + 136, 138, 141, 149, 150, 160, 161, 162, + 163, 166, 175, 177, 178, 189, 129, 131, + 133, 140, 142, 144, 146, 186, 190, 255, + 133, 137, 142, 143, 150, 152, 158, 159, + 161, 164, 165, 166, 175, 176, 185, 186, + 191, 192, 255, 189, 130, 131, 133, 150, + 154, 177, 179, 187, 138, 150, 128, 134, + 143, 148, 152, 159, 166, 175, 178, 179, + 177, 180, 186, 135, 142, 144, 153, 177, + 180, 185, 187, 188, 136, 141, 144, 153, + 128, 181, 183, 185, 152, 153, 160, 169, + 190, 191, 128, 135, 137, 172, 177, 191, + 128, 132, 134, 135, 136, 140, 141, 151, + 153, 188, 134, 128, 129, 130, 131, 137, + 138, 139, 140, 141, 142, 143, 144, 153, + 154, 155, 156, 157, 158, 159, 160, 161, + 162, 163, 164, 165, 167, 168, 169, 170, + 172, 173, 174, 175, 176, 177, 179, 181, + 182, 183, 188, 189, 190, 191, 132, 152, + 180, 184, 185, 187, 171, 190, 128, 137, + 150, 153, 158, 160, 162, 164, 167, 173, + 177, 180, 143, 130, 141, 144, 153, 154, + 157, 160, 255, 155, 156, 157, 159, 160, + 255, 128, 140, 142, 145, 146, 148, 160, + 177, 178, 180, 128, 145, 146, 147, 160, + 172, 174, 176, 178, 179, 180, 255, 148, + 156, 158, 159, 160, 169, 170, 255, 139, + 142, 144, 153, 160, 255, 169, 128, 170, + 176, 255, 128, 158, 160, 171, 176, 187, + 128, 150, 151, 155, 191, 149, 158, 160, + 188, 128, 137, 144, 153, 176, 190, 128, + 132, 133, 179, 180, 255, 133, 139, 140, + 143, 144, 153, 154, 170, 180, 255, 128, + 130, 131, 160, 161, 173, 174, 175, 176, + 185, 186, 255, 166, 179, 180, 255, 128, + 163, 164, 183, 173, 144, 146, 148, 168, + 169, 177, 178, 180, 181, 182, 184, 185, + 128, 181, 188, 191, 128, 129, 130, 131, + 132, 133, 134, 146, 147, 176, 177, 178, + 179, 180, 181, 182, 183, 184, 140, 143, + 170, 174, 191, 255, 165, 177, 191, 129, + 147, 149, 159, 176, 255, 144, 176, 165, + 170, 175, 177, 180, 255, 191, 168, 174, + 176, 255, 128, 134, 136, 142, 144, 150, + 152, 158, 160, 191, 128, 130, 131, 132, + 133, 134, 135, 139, 140, 141, 133, 170, + 175, 177, 181, 187, 188, 153, 154, 155, + 156, 160, 255, 128, 146, 147, 148, 152, + 153, 154, 155, 156, 158, 159, 160, 161, + 162, 163, 164, 165, 166, 167, 168, 169, + 170, 171, 172, 173, 174, 175, 176, 129, + 255, 191, 128, 174, 175, 178, 180, 189, + 128, 157, 158, 159, 160, 255, 176, 177, + 178, 255, 130, 134, 139, 163, 167, 168, + 255, 128, 129, 130, 179, 180, 255, 187, + 189, 133, 143, 144, 153, 154, 159, 178, + 183, 184, 255, 128, 137, 138, 165, 166, + 173, 176, 255, 135, 147, 148, 159, 189, + 255, 128, 131, 132, 178, 179, 255, 143, + 129, 142, 144, 153, 154, 164, 166, 175, + 176, 185, 186, 255, 128, 168, 169, 182, + 131, 128, 139, 140, 141, 144, 153, 187, + 189, 176, 178, 180, 183, 184, 190, 191, + 129, 160, 170, 171, 175, 178, 180, 181, + 182, 128, 162, 163, 170, 172, 173, 176, + 185, 172, 173, 174, 175, 180, 181, 182, + 183, 184, 185, 187, 188, 189, 190, 191, + 176, 186, 158, 190, 128, 134, 147, 151, + 157, 168, 170, 182, 184, 188, 128, 143, + 160, 175, 179, 180, 141, 143, 176, 180, + 182, 255, 191, 189, 255, 191, 161, 186, + 158, 159, 160, 190, 191, 255, 130, 135, + 138, 143, 146, 151, 154, 156, 185, 187, + 144, 145, 146, 147, 148, 150, 155, 157, + 158, 159, 128, 129, 130, 131, 133, 135, + 138, 139, 140, 141, 142, 143, 144, 145, + 146, 148, 149, 152, 156, 157, 160, 161, + 162, 163, 164, 166, 168, 169, 170, 171, + 172, 173, 174, 176, 177, 153, 155, 178, + 179, 189, 160, 145, 255, 139, 143, 182, + 186, 187, 255, 128, 191, 129, 131, 133, + 134, 140, 143, 144, 147, 149, 151, 153, + 179, 184, 186, 128, 135, 137, 164, 165, + 166, 128, 129, 130, 131, 132, 133, 134, + 135, 136, 138, 139, 140, 141, 146, 147, + 150, 151, 152, 153, 154, 155, 156, 162, + 163, 171, 128, 130, 131, 183, 184, 255, + 135, 165, 166, 175, 176, 190, 131, 175, + 187, 188, 190, 255, 128, 130, 131, 166, + 167, 180, 182, 191, 179, 182, 144, 178, + 128, 130, 131, 178, 179, 255, 155, 129, + 132, 133, 137, 141, 143, 144, 153, 154, + 156, 157, 255, 128, 145, 147, 171, 172, + 183, 159, 170, 171, 175, 176, 185, 186, + 255, 189, 128, 131, 133, 140, 143, 144, + 147, 168, 170, 176, 178, 179, 181, 185, + 188, 191, 144, 151, 128, 132, 135, 136, + 139, 141, 157, 161, 162, 163, 166, 172, + 176, 180, 128, 175, 176, 255, 134, 132, + 135, 136, 143, 144, 153, 154, 255, 128, + 174, 175, 181, 184, 255, 129, 151, 152, + 155, 158, 255, 132, 129, 143, 144, 153, + 154, 255, 128, 170, 171, 183, 157, 171, + 176, 185, 160, 168, 169, 171, 172, 173, + 174, 188, 189, 190, 161, 167, 144, 173, + 176, 180, 128, 175, 176, 182, 133, 143, + 145, 190, 191, 255, 143, 146, 147, 159, + 128, 176, 177, 178, 128, 136, 144, 153, + 157, 158, 160, 163, 133, 134, 137, 144, + 145, 146, 147, 148, 149, 154, 155, 156, + 157, 158, 159, 168, 169, 170, 150, 153, + 165, 169, 173, 255, 131, 132, 140, 169, + 174, 255, 130, 132, 128, 182, 187, 255, + 173, 180, 182, 255, 132, 155, 159, 161, + 175, 160, 163, 184, 185, 186, 161, 162, + 133, 143, 144, 150, 151, 255, 160, 128, + 129, 132, 135, 133, 134, 129, 160, 255, + 192, 255, 176, 255, 170, 173, 181, 186, + 0, 127, 181, 190, 176, 183, 184, 185, + 186, 191, 192, 255, 130, 131, 137, 190, + 136, 144, 145, 191, 192, 255, 135, 179, + 129, 130, 132, 133, 144, 170, 176, 178, + 156, 128, 133, 144, 154, 160, 191, 171, + 176, 128, 138, 139, 159, 160, 169, 174, + 255, 148, 158, 169, 150, 164, 167, 173, + 176, 185, 189, 190, 192, 255, 144, 143, + 145, 146, 175, 176, 255, 139, 140, 141, + 255, 166, 176, 178, 255, 186, 128, 137, + 138, 170, 171, 179, 180, 181, 160, 161, + 162, 163, 164, 165, 166, 167, 168, 169, + 170, 171, 172, 173, 174, 175, 176, 177, + 178, 179, 180, 181, 182, 183, 184, 185, + 186, 187, 188, 189, 190, 191, 154, 164, + 168, 128, 149, 150, 173, 128, 152, 153, + 155, 163, 255, 189, 132, 185, 144, 176, + 152, 161, 164, 165, 166, 175, 177, 255, + 132, 169, 177, 188, 129, 131, 141, 142, + 145, 146, 179, 181, 186, 187, 190, 255, + 142, 158, 133, 134, 137, 138, 143, 150, + 152, 155, 156, 161, 164, 165, 166, 175, + 176, 177, 178, 255, 188, 129, 131, 133, + 138, 143, 144, 147, 168, 170, 176, 178, + 179, 181, 182, 184, 185, 190, 255, 157, + 131, 134, 137, 138, 142, 144, 146, 152, + 153, 158, 159, 165, 166, 175, 178, 180, + 182, 255, 189, 129, 131, 133, 141, 143, + 145, 147, 168, 170, 176, 178, 179, 181, + 185, 188, 255, 134, 138, 144, 185, 142, + 159, 160, 161, 164, 165, 166, 175, 176, + 255, 189, 129, 131, 133, 140, 143, 144, + 147, 168, 170, 176, 178, 179, 181, 185, + 188, 191, 177, 128, 132, 135, 136, 139, + 141, 150, 151, 156, 157, 159, 161, 162, + 163, 166, 175, 130, 131, 156, 133, 138, + 142, 144, 146, 149, 153, 154, 158, 159, + 163, 164, 168, 170, 174, 185, 190, 191, + 144, 151, 128, 130, 134, 136, 138, 141, + 166, 175, 189, 128, 131, 133, 140, 142, + 144, 146, 168, 170, 185, 190, 255, 133, + 137, 151, 142, 148, 152, 154, 155, 159, + 160, 161, 164, 165, 166, 175, 176, 255, + 189, 129, 131, 133, 140, 142, 144, 146, + 168, 170, 179, 181, 185, 188, 191, 158, + 128, 132, 134, 136, 138, 141, 149, 150, + 160, 161, 162, 163, 166, 175, 177, 178, + 189, 129, 131, 133, 140, 142, 144, 146, + 186, 190, 255, 133, 137, 142, 143, 150, + 152, 158, 159, 161, 164, 165, 166, 175, + 176, 185, 186, 191, 192, 255, 189, 130, + 131, 133, 150, 154, 177, 179, 187, 138, + 150, 128, 134, 143, 148, 152, 159, 166, + 175, 178, 179, 177, 180, 186, 135, 142, + 144, 153, 177, 180, 185, 187, 188, 136, + 141, 144, 153, 128, 181, 183, 185, 152, + 153, 160, 169, 190, 191, 128, 135, 137, + 172, 177, 191, 128, 132, 134, 135, 136, + 140, 141, 151, 153, 188, 134, 128, 129, + 130, 131, 137, 138, 139, 140, 141, 142, + 143, 144, 153, 154, 155, 156, 157, 158, + 159, 160, 161, 162, 163, 164, 165, 167, + 168, 169, 170, 172, 173, 174, 175, 176, + 177, 179, 181, 182, 183, 188, 189, 190, + 191, 132, 152, 180, 184, 185, 187, 171, + 190, 128, 137, 150, 153, 158, 160, 162, + 164, 167, 173, 177, 180, 143, 130, 141, + 144, 153, 154, 157, 160, 255, 155, 156, + 157, 159, 160, 255, 128, 140, 142, 145, + 146, 148, 160, 177, 178, 180, 128, 145, + 146, 147, 160, 172, 174, 176, 178, 179, + 180, 255, 148, 156, 158, 159, 160, 169, + 170, 255, 139, 142, 144, 153, 160, 255, + 169, 128, 170, 176, 255, 128, 158, 160, + 171, 176, 187, 128, 150, 151, 155, 191, + 149, 158, 160, 188, 128, 137, 144, 153, + 176, 190, 128, 132, 133, 179, 180, 255, + 133, 139, 140, 143, 144, 153, 154, 170, + 180, 255, 128, 130, 131, 160, 161, 173, + 174, 175, 176, 185, 186, 255, 166, 179, + 180, 255, 128, 163, 164, 183, 173, 144, + 146, 148, 168, 169, 177, 178, 180, 181, + 182, 184, 185, 128, 181, 188, 191, 128, + 129, 130, 131, 132, 133, 134, 146, 147, + 176, 177, 178, 179, 180, 181, 182, 183, + 184, 140, 143, 170, 174, 191, 255, 165, + 177, 191, 129, 147, 149, 159, 160, 175, + 176, 255, 144, 176, 165, 170, 175, 177, + 180, 255, 191, 168, 174, 176, 255, 128, + 134, 136, 142, 144, 150, 152, 158, 160, + 191, 128, 130, 131, 132, 133, 134, 135, + 139, 140, 141, 133, 170, 175, 177, 181, + 187, 188, 153, 154, 155, 156, 160, 255, + 128, 146, 147, 148, 152, 153, 154, 155, + 156, 158, 159, 160, 161, 162, 163, 164, + 165, 166, 167, 168, 169, 170, 171, 172, + 173, 174, 175, 176, 129, 255, 191, 128, + 174, 175, 178, 180, 189, 128, 157, 158, + 159, 160, 255, 176, 177, 178, 255, 130, + 134, 139, 163, 167, 168, 255, 128, 129, + 130, 179, 180, 255, 187, 189, 133, 143, + 144, 153, 154, 159, 178, 183, 184, 255, + 128, 137, 138, 165, 166, 173, 176, 255, + 135, 147, 148, 159, 189, 255, 128, 131, + 132, 178, 179, 255, 143, 129, 142, 144, + 153, 154, 164, 166, 175, 176, 185, 186, + 255, 128, 168, 169, 182, 131, 128, 139, + 140, 141, 144, 153, 187, 189, 176, 178, + 180, 183, 184, 190, 191, 129, 160, 170, + 171, 175, 178, 180, 181, 182, 128, 162, + 163, 170, 172, 173, 176, 185, 172, 173, + 174, 175, 180, 181, 182, 183, 184, 185, + 187, 188, 189, 190, 191, 176, 186, 158, + 190, 128, 134, 147, 151, 157, 168, 170, + 182, 184, 188, 128, 143, 160, 175, 179, + 180, 191, 189, 255, 158, 159, 160, 190, + 191, 255, 130, 135, 138, 143, 146, 151, + 154, 156, 185, 187, 144, 145, 146, 147, + 148, 150, 155, 157, 158, 159, 128, 129, + 130, 131, 133, 135, 138, 139, 140, 141, + 142, 143, 144, 145, 146, 148, 149, 152, + 156, 157, 160, 161, 162, 163, 164, 166, + 168, 169, 170, 171, 172, 173, 174, 176, + 177, 153, 155, 178, 179, 189, 160, 145, + 255, 139, 143, 182, 186, 187, 255, 128, + 191, 129, 131, 133, 134, 140, 143, 144, + 147, 149, 151, 153, 179, 184, 186, 128, + 135, 137, 164, 165, 166, 128, 129, 130, + 131, 132, 133, 134, 135, 136, 138, 139, + 140, 141, 146, 147, 150, 151, 152, 153, + 154, 155, 156, 162, 163, 171, 128, 130, + 131, 183, 184, 255, 135, 165, 166, 175, + 176, 190, 131, 175, 187, 188, 190, 255, + 128, 130, 131, 166, 167, 180, 182, 191, + 179, 182, 144, 178, 128, 130, 131, 178, + 179, 255, 155, 129, 132, 133, 137, 141, + 143, 144, 153, 154, 156, 157, 255, 128, + 145, 147, 171, 172, 183, 159, 170, 171, + 175, 176, 185, 186, 255, 189, 128, 131, + 133, 140, 143, 144, 147, 168, 170, 176, + 178, 179, 181, 185, 188, 191, 144, 151, + 128, 132, 135, 136, 139, 141, 157, 161, + 162, 163, 166, 172, 176, 180, 128, 175, + 176, 255, 134, 132, 135, 136, 143, 144, + 153, 154, 255, 128, 174, 175, 181, 184, + 255, 129, 151, 152, 155, 158, 255, 132, + 129, 143, 144, 153, 154, 255, 128, 170, + 171, 183, 157, 171, 176, 185, 160, 168, + 169, 171, 172, 173, 174, 188, 189, 190, + 161, 167, 144, 173, 176, 180, 128, 175, + 176, 182, 133, 143, 145, 190, 191, 255, + 143, 146, 147, 159, 128, 176, 177, 178, + 128, 136, 144, 153, 157, 158, 160, 163, + 133, 134, 137, 144, 145, 146, 147, 148, + 149, 154, 155, 156, 157, 158, 159, 168, + 169, 170, 150, 153, 165, 169, 173, 255, + 131, 132, 140, 169, 174, 255, 130, 132, + 128, 182, 187, 255, 173, 180, 182, 255, + 132, 155, 159, 161, 175, 160, 163, 184, + 185, 186, 161, 162, 133, 143, 144, 150, + 151, 255, 160, 128, 129, 132, 135, 133, + 134, 129, 160, 255, 192, 255, 176, 255, + 173, 0, 127, 176, 255, 131, 137, 191, + 145, 189, 135, 129, 130, 132, 133, 156, + 128, 133, 144, 154, 176, 139, 159, 150, + 157, 159, 164, 167, 168, 170, 173, 143, + 145, 176, 255, 139, 255, 166, 176, 171, + 179, 160, 161, 163, 164, 165, 167, 169, + 171, 173, 174, 175, 176, 177, 179, 180, + 181, 182, 183, 184, 185, 186, 187, 188, + 189, 190, 191, 166, 170, 172, 178, 150, + 153, 155, 163, 165, 167, 169, 173, 153, + 155, 163, 255, 189, 132, 185, 144, 152, + 161, 164, 255, 188, 129, 131, 190, 255, + 133, 134, 137, 138, 142, 150, 152, 161, + 164, 255, 131, 134, 137, 138, 142, 144, + 146, 175, 178, 180, 182, 255, 134, 138, + 142, 161, 164, 255, 188, 129, 131, 190, + 191, 128, 132, 135, 136, 139, 141, 150, + 151, 162, 163, 130, 190, 191, 151, 128, + 130, 134, 136, 138, 141, 128, 131, 190, + 255, 133, 137, 142, 148, 151, 161, 164, + 255, 128, 132, 134, 136, 138, 141, 149, + 150, 162, 163, 129, 131, 190, 255, 133, + 137, 142, 150, 152, 161, 164, 255, 130, + 131, 138, 150, 143, 148, 152, 159, 178, + 179, 177, 180, 186, 135, 142, 177, 180, + 185, 187, 188, 136, 141, 181, 183, 185, + 152, 153, 190, 191, 177, 191, 128, 132, + 134, 135, 141, 151, 153, 188, 134, 128, + 129, 130, 141, 156, 157, 158, 159, 160, + 162, 164, 168, 169, 170, 172, 173, 174, + 175, 176, 179, 183, 171, 190, 150, 153, + 158, 160, 162, 164, 167, 173, 177, 180, + 143, 130, 141, 154, 157, 157, 159, 146, + 148, 178, 180, 146, 147, 178, 179, 180, + 255, 148, 156, 158, 255, 139, 142, 169, + 160, 171, 176, 187, 151, 155, 191, 149, + 158, 160, 188, 176, 190, 128, 132, 180, + 255, 133, 170, 180, 255, 128, 130, 161, + 173, 166, 179, 164, 183, 173, 144, 146, + 148, 168, 178, 180, 184, 185, 128, 181, + 188, 191, 128, 129, 131, 179, 181, 183, + 140, 143, 170, 174, 160, 164, 166, 175, + 144, 176, 175, 177, 191, 160, 191, 128, + 130, 170, 175, 153, 154, 153, 154, 155, + 160, 162, 163, 164, 165, 166, 167, 168, + 169, 170, 171, 175, 175, 178, 180, 189, + 158, 159, 176, 177, 130, 134, 139, 163, + 167, 128, 129, 180, 255, 133, 159, 178, + 255, 166, 173, 135, 147, 128, 131, 179, + 255, 129, 164, 166, 255, 169, 182, 131, + 140, 141, 187, 189, 176, 178, 180, 183, + 184, 190, 191, 129, 171, 175, 181, 182, + 163, 170, 172, 173, 172, 184, 187, 190, + 191, 158, 128, 143, 160, 175, 185, 187, + 144, 145, 150, 155, 157, 158, 135, 139, + 141, 168, 171, 189, 160, 182, 186, 191, + 129, 131, 133, 134, 140, 143, 184, 186, + 165, 166, 128, 129, 130, 132, 133, 134, + 135, 136, 139, 140, 141, 146, 147, 150, + 151, 152, 153, 154, 156, 128, 130, 184, + 255, 135, 190, 131, 175, 187, 188, 190, + 255, 128, 130, 167, 180, 179, 128, 130, + 179, 255, 129, 137, 141, 255, 172, 183, + 159, 170, 188, 128, 131, 190, 191, 151, + 128, 132, 135, 136, 139, 141, 162, 163, + 166, 172, 176, 180, 176, 255, 132, 255, + 175, 181, 184, 255, 129, 155, 158, 255, + 129, 255, 171, 183, 157, 171, 171, 172, + 189, 190, 176, 180, 176, 182, 145, 190, + 143, 146, 178, 157, 158, 160, 163, 133, + 134, 137, 168, 169, 170, 165, 169, 173, + 255, 131, 132, 140, 169, 174, 255, 130, + 132, 128, 182, 187, 255, 173, 180, 182, + 255, 132, 155, 159, 161, 175, 163, 144, + 150, 160, 128, 129, 132, 135, 133, 134, + 129, 160, 255, 192, 255, 170, 173, 181, + 186, 0, 127, 181, 190, 176, 183, 184, + 185, 186, 191, 192, 255, 130, 131, 137, + 190, 136, 144, 145, 191, 192, 255, 135, + 129, 130, 132, 133, 144, 170, 176, 179, + 156, 128, 133, 144, 154, 160, 191, 176, + 128, 138, 139, 159, 174, 255, 148, 158, + 169, 150, 164, 167, 173, 176, 185, 189, + 190, 192, 255, 144, 143, 145, 146, 175, + 176, 255, 139, 140, 141, 255, 166, 176, + 178, 255, 186, 138, 170, 171, 179, 180, + 181, 160, 161, 162, 163, 164, 165, 166, + 167, 168, 169, 170, 171, 172, 173, 174, + 175, 176, 177, 178, 179, 180, 181, 182, + 183, 184, 185, 186, 187, 188, 189, 190, + 191, 154, 164, 168, 128, 149, 150, 173, + 128, 152, 153, 155, 163, 255, 189, 132, + 185, 144, 152, 161, 164, 176, 177, 255, + 132, 169, 177, 188, 129, 131, 141, 142, + 145, 146, 179, 181, 186, 187, 190, 255, + 142, 158, 133, 134, 137, 138, 143, 150, + 152, 155, 156, 161, 164, 175, 176, 177, + 178, 255, 188, 129, 131, 133, 138, 143, + 144, 147, 168, 170, 176, 178, 179, 181, + 182, 184, 185, 190, 255, 157, 131, 134, + 137, 138, 142, 144, 146, 152, 153, 158, + 159, 175, 178, 180, 182, 255, 189, 129, + 131, 133, 141, 143, 145, 147, 168, 170, + 176, 178, 179, 181, 185, 188, 255, 134, + 138, 144, 185, 142, 159, 160, 161, 164, + 255, 189, 129, 131, 133, 140, 143, 144, + 147, 168, 170, 176, 178, 179, 181, 185, + 188, 191, 177, 128, 132, 135, 136, 139, + 141, 150, 151, 156, 157, 159, 161, 162, + 163, 130, 131, 156, 133, 138, 142, 144, + 146, 149, 153, 154, 158, 159, 163, 164, + 168, 170, 174, 185, 190, 191, 144, 151, + 128, 130, 134, 136, 138, 141, 189, 128, + 131, 133, 140, 142, 144, 146, 168, 170, + 185, 190, 255, 133, 137, 151, 142, 148, + 152, 154, 155, 159, 160, 161, 164, 255, + 189, 129, 131, 133, 140, 142, 144, 146, + 168, 170, 179, 181, 185, 188, 191, 158, + 128, 132, 134, 136, 138, 141, 149, 150, + 160, 161, 162, 163, 177, 178, 189, 129, + 131, 133, 140, 142, 144, 146, 186, 190, + 255, 133, 137, 142, 143, 150, 152, 158, + 159, 161, 164, 185, 186, 191, 192, 255, + 189, 130, 131, 133, 150, 154, 177, 179, + 187, 138, 150, 128, 134, 143, 148, 152, + 159, 178, 179, 177, 180, 186, 135, 142, + 177, 180, 185, 187, 188, 136, 141, 128, + 181, 183, 185, 152, 153, 190, 191, 128, + 135, 137, 172, 177, 191, 128, 132, 134, + 135, 136, 140, 141, 151, 153, 188, 134, + 128, 129, 130, 131, 137, 138, 139, 140, + 141, 142, 143, 144, 153, 154, 155, 156, + 157, 158, 159, 160, 161, 162, 163, 164, + 168, 169, 170, 172, 173, 174, 175, 176, + 177, 179, 181, 182, 183, 188, 189, 190, + 191, 132, 152, 180, 184, 185, 187, 171, + 190, 150, 153, 158, 160, 162, 164, 167, + 173, 177, 180, 143, 130, 141, 154, 157, + 160, 255, 155, 156, 157, 159, 160, 255, + 128, 140, 142, 145, 146, 148, 160, 177, + 178, 180, 128, 145, 146, 147, 160, 172, + 174, 176, 178, 179, 180, 255, 148, 156, + 158, 255, 139, 142, 160, 255, 169, 128, + 170, 176, 255, 128, 158, 160, 171, 176, + 187, 128, 150, 151, 155, 191, 149, 158, + 160, 188, 176, 190, 128, 132, 133, 179, + 180, 255, 133, 139, 140, 170, 180, 255, + 128, 130, 131, 160, 161, 173, 174, 175, + 186, 255, 166, 179, 180, 255, 128, 163, + 164, 183, 173, 144, 146, 148, 168, 169, + 177, 178, 180, 181, 182, 184, 185, 128, + 181, 188, 191, 128, 129, 130, 131, 132, + 133, 134, 146, 147, 176, 177, 178, 179, + 180, 181, 182, 183, 184, 140, 143, 170, + 174, 177, 191, 160, 164, 166, 175, 144, + 176, 165, 170, 175, 177, 180, 255, 191, + 168, 174, 176, 255, 128, 134, 136, 142, + 144, 150, 152, 158, 160, 191, 128, 130, + 132, 133, 134, 133, 170, 175, 187, 188, + 153, 154, 128, 146, 147, 148, 152, 153, + 154, 155, 156, 158, 159, 160, 161, 162, + 163, 164, 165, 166, 167, 168, 169, 170, + 171, 172, 173, 174, 175, 176, 129, 255, + 191, 128, 174, 175, 178, 180, 189, 128, + 157, 158, 159, 160, 255, 176, 177, 178, + 255, 130, 134, 139, 163, 167, 168, 255, + 128, 129, 130, 179, 180, 255, 187, 189, + 133, 159, 178, 183, 184, 255, 138, 165, + 166, 173, 176, 255, 135, 147, 148, 159, + 189, 255, 128, 131, 132, 178, 179, 255, + 143, 129, 164, 166, 255, 128, 168, 169, + 182, 131, 128, 139, 140, 141, 187, 189, + 176, 178, 180, 183, 184, 190, 191, 129, + 160, 170, 171, 175, 178, 180, 181, 182, + 128, 162, 163, 170, 172, 173, 172, 173, + 174, 175, 180, 181, 182, 183, 184, 185, + 187, 188, 189, 190, 191, 176, 186, 158, + 190, 128, 134, 147, 151, 157, 168, 170, + 182, 184, 188, 128, 143, 160, 175, 191, + 189, 255, 158, 159, 160, 190, 130, 135, + 138, 143, 146, 151, 154, 156, 185, 187, + 144, 145, 146, 147, 148, 150, 155, 157, + 158, 159, 128, 129, 130, 131, 133, 135, + 138, 139, 140, 141, 142, 143, 144, 145, + 146, 148, 149, 152, 156, 157, 160, 161, + 162, 163, 164, 166, 168, 169, 170, 171, + 172, 173, 174, 176, 177, 153, 155, 178, + 179, 189, 160, 145, 255, 139, 143, 182, + 186, 187, 255, 128, 191, 129, 131, 133, + 134, 140, 143, 144, 147, 149, 151, 153, + 179, 184, 186, 128, 135, 137, 164, 165, + 166, 128, 129, 130, 131, 132, 133, 134, + 135, 136, 138, 139, 140, 141, 146, 147, + 150, 151, 152, 153, 154, 156, 162, 163, + 171, 128, 130, 131, 183, 184, 255, 135, + 190, 131, 175, 187, 188, 190, 255, 128, + 130, 131, 166, 167, 180, 179, 182, 144, + 178, 128, 130, 131, 178, 179, 255, 154, + 156, 129, 132, 133, 137, 141, 255, 128, + 145, 147, 171, 172, 183, 159, 170, 171, + 255, 189, 128, 131, 133, 140, 143, 144, + 147, 168, 170, 176, 178, 179, 181, 185, + 188, 191, 144, 151, 128, 132, 135, 136, + 139, 141, 157, 161, 162, 163, 166, 172, + 176, 180, 128, 175, 176, 255, 134, 132, + 135, 136, 255, 128, 174, 175, 181, 184, + 255, 129, 151, 152, 155, 158, 255, 132, + 129, 255, 128, 170, 171, 183, 157, 171, + 160, 168, 169, 171, 172, 173, 174, 188, + 189, 190, 161, 167, 144, 173, 176, 180, + 128, 175, 176, 182, 133, 143, 145, 190, + 191, 255, 143, 146, 147, 159, 176, 177, + 178, 128, 136, 144, 153, 157, 158, 160, + 163, 133, 134, 137, 144, 145, 146, 147, + 148, 149, 154, 155, 156, 157, 158, 159, + 168, 169, 170, 150, 153, 165, 169, 173, + 255, 131, 132, 140, 169, 174, 255, 130, + 132, 128, 182, 187, 255, 173, 180, 182, + 255, 132, 155, 159, 161, 175, 160, 163, + 184, 185, 186, 161, 162, 133, 143, 144, + 150, 151, 255, 160, 128, 129, 132, 135, + 133, 134, 129, 160, 255, 192, 255, 176, + 255, 170, 173, 181, 186, 0, 127, 181, + 190, 176, 183, 184, 185, 186, 191, 192, + 255, 130, 131, 137, 137, 190, 136, 144, + 145, 191, 192, 255, 135, 179, 129, 130, + 132, 133, 144, 170, 176, 178, 156, 128, + 133, 140, 141, 144, 154, 160, 191, 171, + 172, 176, 128, 138, 139, 159, 160, 169, + 174, 255, 148, 158, 169, 150, 164, 167, + 173, 176, 185, 189, 190, 192, 255, 144, + 143, 145, 146, 175, 176, 255, 139, 140, + 141, 255, 166, 176, 178, 255, 184, 186, + 128, 137, 138, 170, 171, 179, 180, 181, + 160, 161, 162, 163, 164, 165, 166, 167, + 168, 169, 170, 171, 172, 173, 174, 175, + 176, 177, 178, 179, 180, 181, 182, 183, + 184, 185, 186, 187, 188, 189, 190, 191, + 154, 164, 168, 128, 149, 150, 173, 128, + 152, 153, 155, 163, 255, 189, 132, 185, + 144, 176, 152, 161, 164, 165, 166, 175, + 177, 255, 132, 169, 177, 188, 129, 131, + 141, 142, 145, 146, 179, 181, 186, 187, + 190, 255, 142, 158, 133, 134, 137, 138, + 143, 150, 152, 155, 156, 161, 164, 165, + 166, 175, 176, 177, 178, 255, 188, 129, + 131, 133, 138, 143, 144, 147, 168, 170, + 176, 178, 179, 181, 182, 184, 185, 190, + 255, 157, 131, 134, 137, 138, 142, 144, + 146, 152, 153, 158, 159, 165, 166, 175, + 178, 180, 182, 255, 189, 129, 131, 133, + 141, 143, 145, 147, 168, 170, 176, 178, + 179, 181, 185, 188, 255, 134, 138, 144, + 185, 142, 159, 160, 161, 164, 165, 166, + 175, 176, 255, 189, 129, 131, 133, 140, + 143, 144, 147, 168, 170, 176, 178, 179, + 181, 185, 188, 191, 177, 128, 132, 135, + 136, 139, 141, 150, 151, 156, 157, 159, + 161, 162, 163, 166, 175, 130, 131, 156, + 133, 138, 142, 144, 146, 149, 153, 154, + 158, 159, 163, 164, 168, 170, 174, 185, + 190, 191, 144, 151, 128, 130, 134, 136, + 138, 141, 166, 175, 189, 128, 131, 133, + 140, 142, 144, 146, 168, 170, 185, 190, + 255, 133, 137, 151, 142, 148, 152, 154, + 155, 159, 160, 161, 164, 165, 166, 175, + 176, 255, 189, 129, 131, 133, 140, 142, + 144, 146, 168, 170, 179, 181, 185, 188, + 191, 158, 128, 132, 134, 136, 138, 141, + 149, 150, 160, 161, 162, 163, 166, 175, + 177, 178, 189, 129, 131, 133, 140, 142, + 144, 146, 186, 190, 255, 133, 137, 142, + 143, 150, 152, 158, 159, 161, 164, 165, + 166, 175, 176, 185, 186, 191, 192, 255, + 189, 130, 131, 133, 150, 154, 177, 179, + 187, 138, 150, 128, 134, 143, 148, 152, + 159, 166, 175, 178, 179, 177, 180, 186, + 135, 142, 144, 153, 177, 180, 185, 187, + 188, 136, 141, 144, 153, 128, 181, 183, + 185, 152, 153, 160, 169, 190, 191, 128, + 135, 137, 172, 177, 191, 128, 132, 134, + 135, 136, 140, 141, 151, 153, 188, 134, + 128, 129, 130, 131, 137, 138, 139, 140, + 141, 142, 143, 144, 153, 154, 155, 156, + 157, 158, 159, 160, 161, 162, 163, 164, + 165, 167, 168, 169, 170, 172, 173, 174, + 175, 176, 177, 179, 181, 182, 183, 188, + 189, 190, 191, 132, 152, 180, 184, 185, + 187, 171, 190, 128, 137, 150, 153, 158, + 160, 162, 164, 167, 173, 177, 180, 143, + 130, 141, 144, 153, 154, 157, 160, 255, + 155, 156, 157, 159, 160, 255, 128, 140, + 142, 145, 146, 148, 160, 177, 178, 180, + 128, 145, 146, 147, 160, 172, 174, 176, + 178, 179, 180, 255, 148, 156, 158, 159, + 160, 169, 170, 255, 139, 142, 144, 153, + 160, 255, 169, 128, 170, 176, 255, 128, + 158, 160, 171, 176, 187, 128, 150, 151, + 155, 191, 149, 158, 160, 188, 128, 137, + 144, 153, 176, 190, 128, 132, 133, 179, + 180, 255, 133, 139, 140, 143, 144, 153, + 154, 170, 180, 255, 128, 130, 131, 160, + 161, 173, 174, 175, 176, 185, 186, 255, + 166, 179, 180, 255, 128, 163, 164, 183, + 173, 144, 146, 148, 168, 169, 177, 178, + 180, 181, 182, 184, 185, 128, 181, 188, + 191, 128, 129, 130, 131, 132, 133, 134, + 146, 147, 176, 177, 178, 179, 180, 181, + 182, 183, 184, 164, 140, 143, 152, 153, + 170, 174, 191, 255, 132, 165, 177, 191, + 129, 147, 149, 159, 160, 175, 176, 255, + 144, 176, 165, 170, 175, 177, 180, 255, + 191, 168, 174, 176, 255, 128, 134, 136, + 142, 144, 150, 152, 158, 160, 191, 128, + 130, 132, 133, 134, 133, 170, 175, 187, + 188, 153, 154, 128, 146, 147, 148, 152, + 153, 154, 155, 156, 158, 159, 160, 161, + 162, 163, 164, 165, 166, 167, 168, 169, + 170, 171, 172, 173, 174, 175, 176, 129, + 255, 191, 128, 174, 175, 178, 180, 189, + 128, 157, 158, 159, 160, 255, 176, 177, + 178, 255, 130, 134, 139, 163, 167, 168, + 255, 128, 129, 130, 179, 180, 255, 187, + 189, 133, 143, 144, 153, 154, 159, 178, + 183, 184, 255, 128, 137, 138, 165, 166, + 173, 176, 255, 135, 147, 148, 159, 189, + 255, 128, 131, 132, 178, 179, 255, 143, + 129, 142, 144, 153, 154, 164, 166, 175, + 176, 185, 186, 255, 128, 168, 169, 182, + 131, 128, 139, 140, 141, 144, 153, 187, + 189, 176, 178, 180, 183, 184, 190, 191, + 129, 160, 170, 171, 175, 178, 180, 181, + 182, 128, 162, 163, 170, 172, 173, 176, + 185, 172, 173, 174, 175, 180, 181, 182, + 183, 184, 185, 187, 188, 189, 190, 191, + 176, 186, 158, 190, 128, 134, 147, 151, + 157, 168, 170, 182, 184, 188, 144, 148, + 128, 143, 160, 175, 179, 180, 191, 189, + 255, 158, 159, 160, 190, 130, 135, 138, + 143, 146, 151, 154, 156, 185, 187, 144, + 145, 146, 147, 148, 150, 155, 157, 158, + 159, 128, 129, 130, 131, 133, 135, 138, + 139, 140, 141, 142, 143, 144, 145, 146, + 148, 149, 152, 156, 157, 160, 161, 162, + 163, 164, 166, 168, 169, 170, 171, 172, + 173, 174, 176, 177, 153, 155, 178, 179, + 189, 160, 145, 255, 139, 143, 182, 186, + 187, 255, 128, 191, 129, 131, 133, 134, + 140, 143, 144, 147, 149, 151, 153, 179, + 184, 186, 128, 135, 137, 164, 165, 166, + 128, 129, 130, 131, 132, 133, 134, 135, + 136, 138, 139, 140, 141, 146, 147, 150, + 151, 152, 153, 154, 155, 156, 162, 163, + 171, 128, 130, 131, 183, 184, 255, 135, + 165, 166, 175, 176, 190, 131, 175, 187, + 188, 190, 255, 128, 130, 131, 166, 167, + 180, 182, 191, 179, 182, 144, 178, 128, + 130, 131, 178, 179, 255, 155, 129, 132, + 133, 137, 141, 143, 144, 153, 154, 156, + 157, 255, 128, 145, 147, 171, 172, 183, + 159, 170, 171, 175, 176, 185, 186, 255, + 189, 128, 131, 133, 140, 143, 144, 147, + 168, 170, 176, 178, 179, 181, 185, 188, + 191, 144, 151, 128, 132, 135, 136, 139, + 141, 157, 161, 162, 163, 166, 172, 176, + 180, 128, 175, 176, 255, 134, 132, 135, + 136, 143, 144, 153, 154, 255, 128, 174, + 175, 181, 184, 255, 129, 151, 152, 155, + 158, 255, 132, 129, 143, 144, 153, 154, + 255, 128, 170, 171, 183, 157, 171, 176, + 185, 160, 168, 169, 171, 172, 173, 174, + 188, 189, 190, 161, 167, 144, 173, 176, + 180, 128, 175, 176, 182, 133, 143, 145, + 190, 191, 255, 143, 146, 147, 159, 176, + 177, 178, 128, 136, 144, 153, 157, 158, + 160, 163, 133, 134, 137, 144, 145, 146, + 147, 148, 149, 154, 155, 156, 157, 158, + 159, 168, 169, 170, 150, 153, 165, 169, + 173, 255, 131, 132, 140, 169, 174, 255, + 130, 132, 128, 182, 187, 255, 173, 180, + 182, 255, 132, 155, 159, 161, 175, 160, + 163, 184, 185, 186, 161, 162, 133, 143, + 144, 150, 151, 255, 160, 128, 129, 132, + 135, 133, 134, 129, 160, 255, 192, 255, + 176, 255, 170, 173, 181, 186, 0, 127, + 181, 190, 176, 183, 184, 185, 186, 191, + 192, 255, 130, 131, 137, 190, 136, 144, + 145, 191, 192, 255, 135, 179, 129, 130, + 132, 133, 144, 170, 176, 178, 156, 128, + 133, 144, 154, 160, 191, 171, 176, 128, + 138, 139, 159, 160, 169, 174, 255, 148, + 158, 169, 150, 164, 167, 173, 176, 185, + 189, 190, 192, 255, 144, 143, 145, 146, + 175, 176, 255, 139, 140, 141, 255, 166, + 176, 178, 255, 186, 128, 137, 138, 170, + 171, 179, 180, 181, 160, 161, 162, 163, + 164, 165, 166, 167, 168, 169, 170, 171, + 172, 173, 174, 175, 176, 177, 178, 179, + 180, 181, 182, 183, 184, 185, 186, 187, + 188, 189, 190, 191, 154, 164, 168, 128, + 149, 150, 173, 128, 152, 153, 155, 163, + 255, 189, 132, 185, 144, 176, 152, 161, + 164, 165, 166, 175, 177, 255, 132, 169, + 177, 188, 129, 131, 141, 142, 145, 146, + 179, 181, 186, 187, 190, 255, 142, 158, + 133, 134, 137, 138, 143, 150, 152, 155, + 156, 161, 164, 165, 166, 175, 176, 177, + 178, 255, 188, 129, 131, 133, 138, 143, + 144, 147, 168, 170, 176, 178, 179, 181, + 182, 184, 185, 190, 255, 157, 131, 134, + 137, 138, 142, 144, 146, 152, 153, 158, + 159, 165, 166, 175, 178, 180, 182, 255, + 189, 129, 131, 133, 141, 143, 145, 147, + 168, 170, 176, 178, 179, 181, 185, 188, + 255, 134, 138, 144, 185, 142, 159, 160, + 161, 164, 165, 166, 175, 176, 255, 189, + 129, 131, 133, 140, 143, 144, 147, 168, + 170, 176, 178, 179, 181, 185, 188, 191, + 177, 128, 132, 135, 136, 139, 141, 150, + 151, 156, 157, 159, 161, 162, 163, 166, + 175, 130, 131, 156, 133, 138, 142, 144, + 146, 149, 153, 154, 158, 159, 163, 164, + 168, 170, 174, 185, 190, 191, 144, 151, + 128, 130, 134, 136, 138, 141, 166, 175, + 189, 128, 131, 133, 140, 142, 144, 146, + 168, 170, 185, 190, 255, 133, 137, 151, + 142, 148, 152, 154, 155, 159, 160, 161, + 164, 165, 166, 175, 176, 255, 189, 129, + 131, 133, 140, 142, 144, 146, 168, 170, + 179, 181, 185, 188, 191, 158, 128, 132, + 134, 136, 138, 141, 149, 150, 160, 161, + 162, 163, 166, 175, 177, 178, 189, 129, + 131, 133, 140, 142, 144, 146, 186, 190, + 255, 133, 137, 142, 143, 150, 152, 158, + 159, 161, 164, 165, 166, 175, 176, 185, + 186, 191, 192, 255, 189, 130, 131, 133, + 150, 154, 177, 179, 187, 138, 150, 128, + 134, 143, 148, 152, 159, 166, 175, 178, + 179, 177, 180, 186, 135, 142, 144, 153, + 177, 180, 185, 187, 188, 136, 141, 144, + 153, 128, 181, 183, 185, 152, 153, 160, + 169, 190, 191, 128, 135, 137, 172, 177, + 191, 128, 132, 134, 135, 136, 140, 141, + 151, 153, 188, 134, 128, 129, 130, 131, + 137, 138, 139, 140, 141, 142, 143, 144, + 153, 154, 155, 156, 157, 158, 159, 160, + 161, 162, 163, 164, 165, 167, 168, 169, + 170, 172, 173, 174, 175, 176, 177, 179, + 181, 182, 183, 188, 189, 190, 191, 132, + 152, 180, 184, 185, 187, 171, 190, 128, + 137, 150, 153, 158, 160, 162, 164, 167, + 173, 177, 180, 143, 130, 141, 144, 153, + 154, 157, 160, 255, 155, 156, 157, 159, + 160, 255, 128, 140, 142, 145, 146, 148, + 160, 177, 178, 180, 128, 145, 146, 147, + 160, 172, 174, 176, 178, 179, 180, 255, + 148, 156, 158, 159, 160, 169, 170, 255, + 139, 142, 144, 153, 160, 255, 169, 128, + 170, 176, 255, 128, 158, 160, 171, 176, + 187, 128, 150, 151, 155, 191, 149, 158, + 160, 188, 128, 137, 144, 153, 176, 190, + 128, 132, 133, 179, 180, 255, 133, 139, + 140, 143, 144, 153, 154, 170, 180, 255, + 128, 130, 131, 160, 161, 173, 174, 175, + 176, 185, 186, 255, 166, 179, 180, 255, + 128, 163, 164, 183, 173, 144, 146, 148, + 168, 169, 177, 178, 180, 181, 182, 184, + 185, 128, 181, 188, 191, 128, 129, 130, + 131, 132, 133, 134, 146, 147, 176, 177, + 178, 179, 180, 181, 182, 183, 184, 140, + 143, 170, 174, 191, 255, 165, 177, 191, + 129, 147, 149, 159, 160, 175, 176, 255, + 144, 176, 165, 170, 175, 177, 180, 255, + 191, 168, 174, 176, 255, 128, 134, 136, + 142, 144, 150, 152, 158, 160, 191, 128, + 130, 131, 132, 133, 134, 135, 139, 140, + 141, 133, 170, 175, 177, 181, 187, 188, + 153, 154, 155, 156, 160, 255, 128, 146, + 147, 148, 152, 153, 154, 155, 156, 158, + 159, 160, 161, 162, 163, 164, 165, 166, + 167, 168, 169, 170, 171, 172, 173, 174, + 175, 176, 129, 255, 191, 128, 174, 175, + 178, 180, 189, 128, 157, 158, 159, 160, + 255, 176, 177, 178, 255, 130, 134, 139, + 163, 167, 168, 255, 128, 129, 130, 179, + 180, 255, 187, 189, 133, 143, 144, 153, + 154, 159, 178, 183, 184, 255, 128, 137, + 138, 165, 166, 173, 176, 255, 135, 147, + 148, 159, 189, 255, 128, 131, 132, 178, + 179, 255, 143, 129, 142, 144, 153, 154, + 164, 166, 175, 176, 185, 186, 255, 128, + 168, 169, 182, 131, 128, 139, 140, 141, + 144, 153, 187, 189, 176, 178, 180, 183, + 184, 190, 191, 129, 160, 170, 171, 175, + 178, 180, 181, 182, 128, 162, 163, 170, + 172, 173, 176, 185, 172, 173, 174, 175, + 180, 181, 182, 183, 184, 185, 187, 188, + 189, 190, 191, 176, 186, 158, 190, 128, + 134, 147, 151, 157, 168, 170, 182, 184, + 188, 128, 143, 160, 175, 179, 180, 191, + 189, 255, 158, 159, 160, 190, 191, 255, + 130, 135, 138, 143, 146, 151, 154, 156, + 185, 187, 144, 145, 146, 147, 148, 150, + 155, 157, 158, 159, 128, 129, 130, 131, + 133, 135, 138, 139, 140, 141, 142, 143, + 144, 145, 146, 148, 149, 152, 156, 157, + 160, 161, 162, 163, 164, 166, 168, 169, + 170, 171, 172, 173, 174, 176, 177, 153, + 155, 178, 179, 189, 160, 145, 255, 139, + 143, 182, 186, 187, 255, 128, 191, 129, + 131, 133, 134, 140, 143, 144, 147, 149, + 151, 153, 179, 184, 186, 128, 135, 137, + 164, 165, 166, 128, 129, 130, 131, 132, + 133, 134, 135, 136, 138, 139, 140, 141, + 146, 147, 150, 151, 152, 153, 154, 155, + 156, 162, 163, 171, 128, 130, 131, 183, + 184, 255, 135, 165, 166, 175, 176, 190, + 131, 175, 187, 188, 190, 255, 128, 130, + 131, 166, 167, 180, 182, 191, 179, 182, + 144, 178, 128, 130, 131, 178, 179, 255, + 155, 129, 132, 133, 137, 141, 143, 144, + 153, 154, 156, 157, 255, 128, 145, 147, + 171, 172, 183, 159, 170, 171, 175, 176, + 185, 186, 255, 189, 128, 131, 133, 140, + 143, 144, 147, 168, 170, 176, 178, 179, + 181, 185, 188, 191, 144, 151, 128, 132, + 135, 136, 139, 141, 157, 161, 162, 163, + 166, 172, 176, 180, 128, 175, 176, 255, + 134, 132, 135, 136, 143, 144, 153, 154, + 255, 128, 174, 175, 181, 184, 255, 129, + 151, 152, 155, 158, 255, 132, 129, 143, + 144, 153, 154, 255, 128, 170, 171, 183, + 157, 171, 176, 185, 160, 168, 169, 171, + 172, 173, 174, 188, 189, 190, 161, 167, + 144, 173, 176, 180, 128, 175, 176, 182, + 133, 143, 145, 190, 191, 255, 143, 146, + 147, 159, 128, 176, 177, 178, 128, 136, + 144, 153, 157, 158, 160, 163, 133, 134, + 137, 144, 145, 146, 147, 148, 149, 154, + 155, 156, 157, 158, 159, 168, 169, 170, + 150, 153, 165, 169, 173, 255, 131, 132, + 140, 169, 174, 255, 130, 132, 128, 182, + 187, 255, 173, 180, 182, 255, 132, 155, + 159, 161, 175, 160, 163, 184, 185, 186, + 161, 162, 133, 143, 144, 150, 151, 255, + 160, 128, 129, 132, 135, 133, 134, 129, + 160, 255, 192, 255, 176, 255, 173, 0, + 127, 176, 255, 131, 137, 191, 145, 189, + 135, 129, 130, 132, 133, 156, 128, 133, + 144, 154, 176, 139, 159, 150, 157, 159, + 164, 167, 168, 170, 173, 143, 145, 176, + 255, 139, 255, 166, 176, 171, 179, 160, + 161, 163, 164, 165, 167, 169, 171, 173, + 174, 175, 176, 177, 179, 180, 181, 182, + 183, 184, 185, 186, 187, 188, 189, 190, + 191, 166, 170, 172, 178, 128, 129, 130, + 141, 156, 157, 158, 159, 160, 162, 164, + 168, 169, 170, 172, 173, 174, 175, 176, + 179, 183, 128, 129, 131, 179, 181, 183, + 128, 130, 153, 154, 155, 160, 162, 163, + 164, 165, 166, 167, 168, 169, 170, 171, + 175, 172, 184, 187, 190, 191, 144, 145, + 150, 155, 157, 158, 160, 154, 164, 168, + 128, 149, 150, 173, 128, 152, 153, 155, + 189, 132, 185, 144, 176, 152, 161, 164, + 165, 166, 175, 177, 255, 132, 169, 177, + 188, 129, 131, 141, 142, 145, 146, 179, + 181, 186, 187, 190, 255, 142, 158, 133, + 134, 137, 138, 143, 150, 152, 155, 156, + 161, 164, 165, 166, 175, 176, 177, 178, + 255, 188, 129, 131, 133, 138, 143, 144, + 147, 168, 170, 176, 178, 179, 181, 182, + 184, 185, 190, 255, 157, 131, 134, 137, + 138, 142, 144, 146, 152, 153, 158, 159, + 165, 166, 175, 178, 180, 182, 255, 189, + 129, 131, 133, 141, 143, 145, 147, 168, + 170, 176, 178, 179, 181, 185, 188, 255, + 134, 138, 144, 185, 142, 159, 160, 161, + 164, 165, 166, 175, 176, 255, 189, 129, + 131, 133, 140, 143, 144, 147, 168, 170, + 176, 178, 179, 181, 185, 188, 191, 177, + 128, 132, 135, 136, 139, 141, 150, 151, + 156, 157, 159, 161, 162, 163, 166, 175, + 130, 131, 156, 133, 138, 142, 144, 146, + 149, 153, 154, 158, 159, 163, 164, 168, + 170, 174, 185, 190, 191, 144, 151, 128, + 130, 134, 136, 138, 141, 166, 175, 189, + 128, 131, 133, 140, 142, 144, 146, 168, + 170, 185, 190, 255, 133, 137, 151, 142, + 148, 152, 154, 155, 159, 160, 161, 164, + 165, 166, 175, 176, 255, 189, 129, 131, + 133, 140, 142, 144, 146, 168, 170, 179, + 181, 185, 188, 191, 158, 128, 132, 134, + 136, 138, 141, 149, 150, 160, 161, 162, + 163, 166, 175, 177, 178, 189, 129, 131, + 133, 140, 142, 144, 146, 186, 190, 255, + 133, 137, 142, 143, 150, 152, 158, 159, + 161, 164, 165, 166, 175, 176, 185, 186, + 191, 192, 255, 189, 130, 131, 133, 150, + 154, 177, 179, 187, 138, 150, 128, 134, + 143, 148, 152, 159, 166, 175, 178, 179, + 135, 142, 144, 153, 136, 141, 144, 153, + 128, 181, 183, 185, 152, 153, 160, 169, + 190, 191, 128, 135, 137, 172, 177, 191, + 128, 132, 134, 135, 136, 140, 141, 151, + 153, 188, 128, 137, 150, 153, 158, 160, + 162, 164, 167, 173, 177, 180, 143, 130, + 141, 144, 153, 154, 157, 160, 255, 0, + 127, 170, 173, 181, 183, 186, 0, 127, + 181, 190, 176, 183, 184, 185, 186, 191, + 192, 255, 130, 131, 137, 190, 136, 144, + 145, 191, 192, 255, 135, 179, 180, 129, + 130, 132, 133, 144, 170, 176, 178, 156, + 128, 133, 144, 154, 160, 191, 171, 176, + 128, 138, 139, 159, 160, 169, 174, 255, + 148, 158, 169, 150, 164, 167, 173, 176, + 185, 189, 190, 192, 255, 144, 143, 145, + 146, 175, 176, 255, 139, 140, 141, 255, + 166, 176, 178, 255, 186, 128, 137, 138, + 170, 171, 179, 180, 181, 160, 161, 162, + 163, 164, 165, 166, 167, 168, 169, 170, + 171, 172, 173, 174, 175, 176, 177, 178, + 179, 180, 181, 182, 183, 184, 185, 186, + 187, 188, 189, 190, 191, 154, 164, 168, + 128, 149, 150, 173, 128, 152, 153, 155, + 163, 255, 189, 132, 185, 144, 176, 152, + 161, 164, 165, 166, 175, 177, 255, 132, + 169, 177, 188, 129, 131, 141, 142, 145, + 146, 179, 181, 186, 187, 190, 255, 142, + 158, 133, 134, 137, 138, 143, 150, 152, + 155, 156, 161, 164, 165, 166, 175, 176, + 177, 178, 255, 188, 129, 131, 133, 138, + 143, 144, 147, 168, 170, 176, 178, 179, + 181, 182, 184, 185, 190, 255, 157, 131, + 134, 137, 138, 142, 144, 146, 152, 153, + 158, 159, 165, 166, 175, 178, 180, 182, + 255, 189, 129, 131, 133, 141, 143, 145, + 147, 168, 170, 176, 178, 179, 181, 185, + 188, 255, 134, 138, 144, 185, 142, 159, + 160, 161, 164, 165, 166, 175, 176, 255, + 189, 129, 131, 133, 140, 143, 144, 147, + 168, 170, 176, 178, 179, 181, 185, 188, + 191, 177, 128, 132, 135, 136, 139, 141, + 150, 151, 156, 157, 159, 161, 162, 163, + 166, 175, 130, 131, 156, 133, 138, 142, + 144, 146, 149, 153, 154, 158, 159, 163, + 164, 168, 170, 174, 185, 190, 191, 144, + 151, 128, 130, 134, 136, 138, 141, 166, + 175, 189, 128, 131, 133, 140, 142, 144, + 146, 168, 170, 185, 190, 255, 133, 137, + 151, 142, 148, 152, 154, 155, 159, 160, + 161, 164, 165, 166, 175, 176, 255, 189, + 129, 131, 133, 140, 142, 144, 146, 168, + 170, 179, 181, 185, 188, 191, 158, 128, + 132, 134, 136, 138, 141, 149, 150, 160, + 161, 162, 163, 166, 175, 177, 178, 189, + 129, 131, 133, 140, 142, 144, 146, 186, + 190, 255, 133, 137, 142, 143, 150, 152, + 158, 159, 161, 164, 165, 166, 175, 176, + 185, 186, 191, 192, 255, 189, 130, 131, + 133, 150, 154, 177, 179, 187, 138, 150, + 128, 134, 143, 148, 152, 159, 166, 175, + 178, 179, 177, 180, 186, 135, 142, 144, + 153, 177, 180, 185, 187, 188, 136, 141, + 144, 153, 128, 181, 183, 185, 152, 153, + 160, 169, 190, 191, 128, 135, 137, 172, + 177, 191, 128, 132, 134, 135, 136, 140, + 141, 151, 153, 188, 134, 128, 129, 130, + 131, 132, 135, 137, 138, 139, 140, 141, + 142, 143, 144, 153, 154, 155, 156, 157, + 158, 159, 160, 161, 162, 163, 164, 165, + 167, 168, 169, 170, 172, 173, 174, 175, + 176, 177, 179, 181, 182, 183, 188, 189, + 190, 191, 133, 134, 136, 152, 180, 184, + 185, 187, 171, 190, 128, 137, 150, 153, + 158, 160, 162, 164, 167, 173, 177, 180, + 143, 130, 141, 144, 153, 154, 157, 160, + 255, 192, 255, 155, 156, 157, 159, 160, + 255, 128, 140, 142, 145, 146, 148, 160, + 177, 178, 180, 128, 145, 146, 147, 160, + 172, 174, 176, 178, 179, 180, 255, 148, + 156, 158, 159, 160, 169, 170, 255, 139, + 142, 144, 153, 160, 255, 169, 128, 170, + 176, 255, 128, 158, 160, 171, 176, 187, + 128, 150, 151, 155, 191, 149, 158, 160, + 188, 128, 137, 144, 153, 176, 190, 128, + 132, 133, 179, 180, 255, 133, 139, 140, + 143, 144, 153, 154, 170, 180, 255, 128, + 130, 131, 160, 161, 173, 174, 175, 176, + 185, 186, 255, 166, 179, 180, 255, 128, + 163, 164, 183, 173, 144, 146, 148, 168, + 169, 177, 178, 180, 181, 182, 184, 185, + 128, 181, 188, 191, 128, 129, 130, 131, + 132, 133, 134, 146, 147, 176, 177, 178, + 179, 180, 181, 182, 183, 184, 164, 167, + 140, 143, 152, 153, 170, 174, 191, 255, + 165, 177, 191, 129, 147, 149, 159, 160, + 175, 176, 255, 144, 176, 165, 170, 175, + 177, 180, 255, 191, 168, 174, 176, 255, + 128, 134, 136, 142, 144, 150, 152, 158, + 160, 191, 128, 130, 132, 133, 134, 136, + 137, 133, 170, 175, 187, 188, 153, 154, + 133, 173, 177, 255, 143, 159, 160, 186, + 187, 255, 128, 158, 173, 0, 127, 176, + 255, 131, 137, 191, 145, 189, 135, 129, + 130, 132, 133, 156, 128, 133, 144, 154, + 176, 139, 159, 150, 157, 159, 164, 167, + 168, 170, 173, 143, 145, 176, 255, 139, + 255, 166, 176, 171, 179, 160, 161, 163, + 164, 165, 167, 169, 171, 173, 174, 175, + 176, 177, 179, 180, 181, 182, 183, 184, + 185, 186, 187, 188, 189, 190, 191, 166, + 170, 172, 178, 150, 153, 155, 163, 165, + 167, 169, 173, 153, 155, 163, 255, 189, + 132, 185, 144, 152, 161, 164, 255, 188, + 129, 131, 190, 255, 133, 134, 137, 138, + 142, 150, 152, 161, 164, 255, 131, 134, + 137, 138, 142, 144, 146, 175, 178, 180, + 182, 255, 134, 138, 142, 161, 164, 255, + 188, 129, 131, 190, 191, 128, 132, 135, + 136, 139, 141, 150, 151, 162, 163, 130, + 190, 191, 151, 128, 130, 134, 136, 138, + 141, 128, 131, 190, 255, 133, 137, 142, + 148, 151, 161, 164, 255, 128, 132, 134, + 136, 138, 141, 149, 150, 162, 163, 129, + 131, 190, 255, 133, 137, 142, 150, 152, + 161, 164, 255, 130, 131, 138, 150, 143, + 148, 152, 159, 178, 179, 177, 180, 186, + 135, 142, 177, 180, 185, 187, 188, 136, + 141, 181, 183, 185, 152, 153, 190, 191, + 177, 191, 128, 132, 134, 135, 141, 151, + 153, 188, 134, 128, 129, 130, 132, 135, + 141, 156, 157, 158, 159, 160, 162, 164, + 168, 169, 170, 172, 173, 174, 175, 176, + 179, 183, 133, 134, 171, 190, 150, 153, + 158, 160, 162, 164, 167, 173, 177, 180, + 143, 130, 141, 154, 157, 192, 255, 157, + 159, 146, 148, 178, 180, 146, 147, 178, + 179, 180, 255, 148, 156, 158, 255, 139, + 142, 169, 160, 171, 176, 187, 151, 155, + 191, 149, 158, 160, 188, 176, 190, 128, + 132, 180, 255, 133, 170, 180, 255, 128, + 130, 161, 173, 166, 179, 164, 183, 173, + 144, 146, 148, 168, 178, 180, 184, 185, + 128, 181, 188, 191, 128, 129, 131, 179, + 181, 183, 140, 143, 170, 174, 160, 164, + 166, 175, 144, 176, 175, 177, 191, 160, + 191, 128, 130, 132, 133, 134, 136, 137, + 170, 175, 153, 154, 177, 255, 143, 255, + 160, 190, 153, 154, 155, 160, 162, 163, + 164, 165, 166, 167, 168, 169, 170, 171, + 175, 176, 177, 255, 175, 178, 180, 189, + 158, 159, 176, 177, 130, 134, 139, 163, + 167, 128, 129, 180, 255, 133, 159, 178, + 255, 166, 173, 135, 147, 160, 188, 128, + 131, 179, 255, 129, 164, 166, 255, 169, + 182, 131, 140, 141, 187, 189, 176, 178, + 180, 183, 184, 190, 191, 129, 171, 175, + 181, 182, 163, 170, 172, 173, 158, 159, + 160, 255, 164, 175, 135, 138, 188, 255, + 172, 184, 187, 190, 191, 158, 128, 143, + 160, 175, 158, 190, 130, 135, 138, 143, + 146, 151, 154, 156, 185, 187, 144, 145, + 150, 155, 157, 158, 135, 139, 141, 168, + 171, 189, 160, 182, 186, 191, 129, 131, + 133, 134, 140, 143, 184, 186, 165, 166, + 128, 129, 130, 132, 133, 134, 135, 136, + 139, 140, 141, 146, 147, 150, 151, 152, + 153, 154, 156, 128, 130, 184, 255, 135, + 190, 131, 175, 187, 188, 190, 255, 128, + 130, 167, 180, 179, 128, 130, 179, 255, + 129, 137, 141, 255, 172, 183, 159, 170, + 188, 128, 131, 190, 191, 151, 128, 132, + 135, 136, 139, 141, 162, 163, 166, 172, + 176, 180, 176, 255, 132, 255, 175, 181, + 184, 255, 129, 155, 158, 255, 129, 255, + 171, 183, 157, 171, 171, 172, 189, 190, + 176, 180, 176, 182, 145, 190, 143, 146, + 178, 157, 158, 160, 163, 133, 134, 137, + 168, 169, 170, 165, 169, 173, 255, 131, + 132, 140, 169, 174, 255, 130, 132, 128, + 182, 187, 255, 173, 180, 182, 255, 132, + 155, 159, 161, 175, 163, 144, 150, 160, + 128, 129, 132, 135, 133, 134, 129, 160, + 255, 128, 146, 147, 148, 152, 153, 154, + 155, 156, 158, 159, 160, 161, 162, 163, + 164, 165, 166, 167, 168, 169, 170, 171, + 172, 173, 174, 175, 176, 129, 157, 177, + 255, 191, 128, 174, 175, 178, 180, 189, + 128, 157, 158, 159, 160, 255, 176, 177, + 178, 255, 130, 134, 139, 163, 167, 168, + 255, 128, 129, 130, 179, 180, 255, 187, + 189, 133, 143, 144, 153, 154, 159, 178, + 183, 184, 255, 128, 137, 138, 165, 166, + 173, 176, 255, 135, 147, 148, 159, 160, + 188, 189, 255, 128, 131, 132, 178, 179, + 255, 143, 129, 142, 144, 153, 154, 164, + 166, 175, 176, 185, 186, 255, 128, 168, + 169, 182, 131, 128, 139, 140, 141, 144, + 153, 187, 189, 176, 178, 180, 183, 184, + 190, 191, 129, 160, 170, 171, 175, 178, + 180, 181, 182, 128, 162, 163, 170, 172, + 173, 176, 185, 158, 159, 160, 255, 164, + 175, 135, 138, 188, 255, 172, 173, 174, + 175, 180, 181, 182, 183, 184, 185, 187, + 188, 189, 190, 191, 176, 186, 158, 190, + 128, 134, 147, 151, 157, 168, 170, 182, + 184, 188, 147, 128, 143, 160, 175, 179, + 180, 191, 189, 255, 158, 190, 130, 135, + 138, 143, 146, 151, 154, 156, 185, 187, + 144, 145, 146, 147, 148, 150, 155, 157, + 158, 159, 128, 129, 130, 131, 133, 135, + 138, 139, 140, 141, 142, 143, 144, 145, + 146, 148, 149, 152, 156, 157, 160, 161, + 162, 163, 164, 166, 168, 169, 170, 171, + 172, 173, 174, 176, 177, 153, 155, 178, + 179, 189, 160, 145, 255, 139, 143, 182, + 186, 187, 255, 128, 191, 129, 131, 133, + 134, 140, 143, 144, 147, 149, 151, 153, + 179, 184, 186, 128, 135, 137, 164, 165, + 166, 128, 129, 130, 131, 132, 133, 134, + 135, 136, 138, 139, 140, 141, 146, 147, + 150, 151, 152, 153, 154, 155, 156, 162, + 163, 171, 128, 130, 131, 183, 184, 255, + 135, 165, 166, 175, 176, 190, 131, 175, + 187, 188, 190, 255, 128, 130, 131, 166, + 167, 180, 182, 191, 179, 182, 144, 178, + 128, 130, 131, 178, 179, 255, 155, 129, + 132, 133, 137, 141, 143, 144, 153, 154, + 156, 157, 255, 128, 145, 147, 171, 172, + 183, 159, 170, 171, 175, 176, 185, 186, + 255, 189, 128, 131, 133, 140, 143, 144, + 147, 168, 170, 176, 178, 179, 181, 185, + 188, 191, 144, 151, 128, 132, 135, 136, + 139, 141, 157, 161, 162, 163, 166, 172, + 176, 180, 128, 175, 176, 255, 134, 132, + 135, 136, 143, 144, 153, 154, 255, 128, + 174, 175, 181, 184, 255, 129, 151, 152, + 155, 158, 255, 132, 129, 143, 144, 153, + 154, 255, 128, 170, 171, 183, 157, 171, + 176, 185, 160, 168, 169, 171, 172, 173, + 174, 188, 189, 190, 161, 167, 144, 173, + 176, 180, 128, 175, 176, 182, 133, 143, + 145, 190, 191, 255, 143, 146, 147, 159, + 176, 177, 178, 128, 136, 144, 153, 157, + 158, 160, 163, 133, 134, 137, 144, 145, + 146, 147, 148, 149, 154, 155, 156, 157, + 158, 159, 168, 169, 170, 150, 153, 165, + 169, 173, 255, 131, 132, 140, 169, 174, + 255, 130, 132, 128, 182, 187, 255, 173, + 180, 182, 255, 132, 155, 159, 161, 175, + 160, 163, 184, 185, 186, 161, 162, 133, + 143, 144, 150, 151, 255, 160, 128, 129, + 132, 135, 133, 134, 129, 160, 255, 192, + 255, 176, 255, 155, 156, 157, 159, 160, + 255, 128, 140, 142, 145, 146, 148, 160, + 177, 178, 180, 128, 145, 146, 147, 160, + 172, 174, 176, 178, 179, 148, 156, 158, + 159, 160, 169, 170, 255, 139, 142, 144, + 153, 160, 255, 169, 128, 170, 176, 255, + 128, 158, 160, 171, 176, 187, 128, 150, + 151, 155, 128, 137, 144, 153, 176, 190, + 128, 132, 133, 179, 180, 255, 133, 139, + 140, 143, 144, 153, 154, 170, 180, 255, + 128, 130, 131, 160, 161, 173, 174, 175, + 176, 185, 186, 255, 166, 179, 180, 255, + 128, 163, 164, 183, 173, 144, 146, 148, + 168, 169, 177, 178, 180, 181, 182, 184, + 185, 140, 143, 168, 169, 170, 174, 191, + 255, 165, 177, 191, 129, 147, 149, 159, + 160, 175, 176, 255, 165, 170, 175, 177, + 180, 255, 191, 168, 174, 176, 255, 128, + 134, 136, 142, 144, 150, 152, 158, 160, + 191, 128, 153, 155, 255, 173, 0, 127, + 176, 255, 131, 137, 191, 145, 189, 135, + 129, 130, 132, 133, 156, 128, 133, 144, + 154, 176, 139, 159, 150, 157, 159, 164, + 167, 168, 170, 173, 143, 145, 176, 255, + 139, 255, 166, 176, 171, 179, 160, 161, + 163, 164, 165, 167, 169, 171, 173, 174, + 175, 176, 177, 179, 180, 181, 182, 183, + 184, 185, 186, 187, 188, 189, 190, 191, + 166, 170, 172, 178, 150, 153, 155, 163, + 165, 167, 169, 173, 153, 155, 163, 255, + 189, 132, 185, 144, 152, 161, 164, 255, + 188, 129, 131, 190, 255, 133, 134, 137, + 138, 142, 150, 152, 161, 164, 255, 131, + 134, 137, 138, 142, 144, 146, 175, 178, + 180, 182, 255, 134, 138, 142, 161, 164, + 255, 188, 129, 131, 190, 191, 128, 132, + 135, 136, 139, 141, 150, 151, 162, 163, + 130, 190, 191, 151, 128, 130, 134, 136, + 138, 141, 128, 131, 190, 255, 133, 137, + 142, 148, 151, 161, 164, 255, 128, 132, + 134, 136, 138, 141, 149, 150, 162, 163, + 129, 131, 190, 255, 133, 137, 142, 150, + 152, 161, 164, 255, 130, 131, 138, 150, + 143, 148, 152, 159, 178, 179, 177, 180, + 186, 135, 142, 177, 180, 185, 187, 188, + 136, 141, 181, 183, 185, 152, 153, 190, + 191, 177, 191, 128, 132, 134, 135, 141, + 151, 153, 188, 134, 128, 129, 130, 141, + 156, 157, 158, 159, 160, 162, 164, 168, + 169, 170, 172, 173, 174, 175, 176, 179, + 183, 171, 190, 150, 153, 158, 160, 162, + 164, 167, 173, 177, 180, 143, 130, 141, + 154, 157, 157, 159, 146, 148, 178, 180, + 146, 147, 178, 179, 180, 255, 148, 156, + 158, 255, 139, 142, 169, 160, 171, 176, + 187, 151, 155, 191, 149, 158, 160, 188, + 176, 190, 128, 132, 180, 255, 133, 170, + 180, 255, 128, 130, 161, 173, 166, 179, + 164, 183, 173, 144, 146, 148, 168, 178, + 180, 184, 185, 128, 181, 188, 191, 128, + 129, 131, 179, 181, 183, 140, 143, 170, + 174, 160, 164, 166, 175, 144, 176, 175, + 177, 191, 160, 191, 128, 130, 170, 175, + 153, 154, 153, 154, 155, 160, 162, 163, + 164, 165, 166, 167, 168, 169, 170, 171, + 175, 175, 178, 180, 189, 158, 159, 176, + 177, 130, 134, 139, 163, 167, 128, 129, + 180, 255, 133, 159, 178, 255, 166, 173, + 135, 147, 128, 131, 179, 255, 129, 164, + 166, 255, 169, 182, 131, 140, 141, 187, + 189, 176, 178, 180, 183, 184, 190, 191, + 129, 171, 175, 181, 182, 163, 170, 172, + 173, 172, 184, 187, 190, 191, 158, 128, + 143, 160, 175, 185, 187, 144, 145, 150, + 155, 157, 158, 135, 139, 141, 168, 171, + 189, 160, 182, 186, 191, 129, 131, 133, + 134, 140, 143, 184, 186, 165, 166, 128, + 129, 130, 132, 133, 134, 135, 136, 139, + 140, 141, 146, 147, 150, 151, 152, 153, + 154, 156, 128, 130, 184, 255, 135, 190, + 131, 175, 187, 188, 190, 255, 128, 130, + 167, 180, 179, 128, 130, 179, 255, 129, + 137, 141, 255, 172, 183, 159, 170, 188, + 128, 131, 190, 191, 151, 128, 132, 135, + 136, 139, 141, 162, 163, 166, 172, 176, + 180, 176, 255, 132, 255, 175, 181, 184, + 255, 129, 155, 158, 255, 129, 255, 171, + 183, 157, 171, 171, 172, 189, 190, 176, + 180, 176, 182, 145, 190, 143, 146, 178, + 157, 158, 160, 163, 133, 134, 137, 168, + 169, 170, 165, 169, 173, 255, 131, 132, + 140, 169, 174, 255, 130, 132, 128, 182, + 187, 255, 173, 180, 182, 255, 132, 155, + 159, 161, 175, 163, 144, 150, 160, 128, + 129, 132, 135, 133, 134, 129, 160, 255, + 192, 255, 180, 255, 150, 255, 133, 135, + 187, 188, 161, 169, 170, 173, 174, 175, + 177, 181, 184, 186, 170, 173, 181, 183, + 186, 0, 127, 181, 190, 176, 183, 184, + 185, 186, 191, 192, 255, 130, 131, 137, + 190, 136, 144, 145, 191, 192, 255, 135, + 179, 180, 129, 130, 132, 133, 144, 170, + 176, 178, 156, 128, 133, 144, 154, 160, + 191, 171, 176, 128, 138, 139, 159, 160, + 169, 174, 255, 148, 158, 169, 150, 164, + 167, 173, 176, 185, 189, 190, 192, 255, + 144, 143, 145, 146, 175, 176, 255, 139, + 140, 141, 255, 166, 176, 178, 255, 186, + 128, 137, 138, 170, 171, 179, 180, 181, + 160, 161, 162, 163, 164, 165, 166, 167, + 168, 169, 170, 171, 172, 173, 174, 175, + 176, 177, 178, 179, 180, 181, 182, 183, + 184, 185, 186, 187, 188, 189, 190, 191, + 154, 164, 168, 128, 149, 150, 173, 128, + 152, 153, 155, 163, 255, 189, 132, 185, + 144, 176, 152, 161, 164, 165, 166, 175, + 177, 255, 132, 169, 177, 188, 129, 131, + 141, 142, 145, 146, 179, 181, 186, 187, + 190, 255, 142, 158, 133, 134, 137, 138, + 143, 150, 152, 155, 156, 161, 164, 165, + 166, 175, 176, 177, 178, 255, 188, 129, + 131, 133, 138, 143, 144, 147, 168, 170, + 176, 178, 179, 181, 182, 184, 185, 190, + 255, 157, 131, 134, 137, 138, 142, 144, + 146, 152, 153, 158, 159, 165, 166, 175, + 178, 180, 182, 255, 189, 129, 131, 133, + 141, 143, 145, 147, 168, 170, 176, 178, + 179, 181, 185, 188, 255, 134, 138, 144, + 185, 142, 159, 160, 161, 164, 165, 166, + 175, 176, 255, 189, 129, 131, 133, 140, + 143, 144, 147, 168, 170, 176, 178, 179, + 181, 185, 188, 191, 177, 128, 132, 135, + 136, 139, 141, 150, 151, 156, 157, 159, + 161, 162, 163, 166, 175, 130, 131, 156, + 133, 138, 142, 144, 146, 149, 153, 154, + 158, 159, 163, 164, 168, 170, 174, 185, + 190, 191, 144, 151, 128, 130, 134, 136, + 138, 141, 166, 175, 189, 128, 131, 133, + 140, 142, 144, 146, 168, 170, 185, 190, + 255, 133, 137, 151, 142, 148, 152, 154, + 155, 159, 160, 161, 164, 165, 166, 175, + 176, 255, 189, 129, 131, 133, 140, 142, + 144, 146, 168, 170, 179, 181, 185, 188, + 191, 158, 128, 132, 134, 136, 138, 141, + 149, 150, 160, 161, 162, 163, 166, 175, + 177, 178, 189, 129, 131, 133, 140, 142, + 144, 146, 186, 190, 255, 133, 137, 142, + 143, 150, 152, 158, 159, 161, 164, 165, + 166, 175, 176, 185, 186, 191, 192, 255, + 189, 130, 131, 133, 150, 154, 177, 179, + 187, 138, 150, 128, 134, 143, 148, 152, + 159, 166, 175, 178, 179, 177, 180, 186, + 135, 142, 144, 153, 177, 180, 185, 187, + 188, 136, 141, 144, 153, 128, 181, 183, + 185, 152, 153, 160, 169, 190, 191, 128, + 135, 137, 172, 177, 191, 128, 132, 134, + 135, 136, 140, 141, 151, 153, 188, 134, + 128, 129, 130, 131, 137, 138, 139, 140, + 141, 142, 143, 144, 153, 154, 155, 156, + 157, 158, 159, 160, 161, 162, 163, 164, + 165, 167, 168, 169, 170, 172, 173, 174, + 175, 176, 177, 179, 181, 182, 183, 188, + 189, 190, 191, 132, 152, 180, 184, 185, + 187, 171, 190, 128, 137, 150, 153, 158, + 160, 162, 164, 167, 173, 177, 180, 143, + 130, 141, 144, 153, 154, 157, 160, 255, + 155, 156, 157, 159, 160, 255, 128, 140, + 142, 145, 146, 148, 160, 177, 178, 180, + 128, 145, 146, 147, 160, 172, 174, 176, + 178, 179, 180, 255, 148, 156, 158, 159, + 160, 169, 170, 255, 139, 142, 144, 153, + 160, 255, 169, 128, 170, 176, 255, 128, + 158, 160, 171, 176, 187, 128, 150, 151, + 155, 191, 149, 158, 160, 188, 128, 137, + 144, 153, 176, 190, 128, 132, 133, 179, + 180, 255, 133, 139, 140, 143, 144, 153, + 154, 170, 180, 255, 128, 130, 131, 160, + 161, 173, 174, 175, 176, 185, 186, 255, + 166, 179, 180, 255, 128, 163, 164, 183, + 173, 144, 146, 148, 168, 169, 177, 178, + 180, 181, 182, 184, 185, 128, 181, 188, + 191, 128, 129, 130, 131, 132, 133, 134, + 146, 147, 176, 177, 178, 179, 180, 181, + 182, 183, 184, 164, 167, 140, 143, 152, + 153, 170, 174, 191, 255, 165, 177, 191, + 129, 147, 149, 159, 160, 175, 176, 255, + 144, 176, 165, 170, 175, 177, 180, 255, + 191, 168, 174, 176, 255, 128, 134, 136, + 142, 144, 150, 152, 158, 160, 191, 128, + 130, 132, 133, 134, 133, 170, 175, 187, + 188, 153, 154, 128, 146, 147, 148, 152, + 153, 154, 155, 156, 158, 159, 160, 161, + 162, 163, 164, 165, 166, 167, 168, 169, + 170, 171, 172, 173, 174, 175, 176, 129, + 255, 191, 128, 174, 175, 178, 180, 189, + 128, 157, 158, 159, 160, 255, 176, 177, + 178, 255, 130, 134, 139, 163, 167, 168, + 255, 128, 129, 130, 179, 180, 255, 187, + 189, 133, 143, 144, 153, 154, 159, 178, + 183, 184, 255, 128, 137, 138, 165, 166, + 173, 176, 255, 135, 147, 148, 159, 189, + 255, 128, 131, 132, 178, 179, 255, 143, + 129, 142, 144, 153, 154, 164, 166, 175, + 176, 185, 186, 255, 128, 168, 169, 182, + 131, 128, 139, 140, 141, 144, 153, 187, + 189, 176, 178, 180, 183, 184, 190, 191, + 129, 160, 170, 171, 175, 178, 180, 181, + 182, 128, 162, 163, 170, 172, 173, 176, + 185, 172, 173, 174, 175, 180, 181, 182, + 183, 184, 185, 187, 188, 189, 190, 191, + 176, 186, 158, 190, 128, 134, 147, 151, + 157, 168, 170, 182, 184, 188, 147, 128, + 143, 160, 175, 179, 180, 191, 189, 255, + 158, 159, 160, 190, 130, 135, 138, 143, + 146, 151, 154, 156, 185, 187, 144, 145, + 146, 147, 148, 150, 155, 157, 158, 159, + 128, 129, 130, 131, 133, 135, 138, 139, + 140, 141, 142, 143, 144, 145, 146, 148, + 149, 152, 156, 157, 160, 161, 162, 163, + 164, 166, 168, 169, 170, 171, 172, 173, + 174, 176, 177, 153, 155, 178, 179, 189, + 160, 145, 255, 139, 143, 182, 186, 187, + 255, 128, 191, 129, 131, 133, 134, 140, + 143, 144, 147, 149, 151, 153, 179, 184, + 186, 128, 135, 137, 164, 165, 166, 128, + 129, 130, 131, 132, 133, 134, 135, 136, + 138, 139, 140, 141, 146, 147, 150, 151, + 152, 153, 154, 155, 156, 162, 163, 171, + 128, 130, 131, 183, 184, 255, 135, 165, + 166, 175, 176, 190, 131, 175, 187, 188, + 190, 255, 128, 130, 131, 166, 167, 180, + 182, 191, 179, 182, 144, 178, 128, 130, + 131, 178, 179, 255, 155, 129, 132, 133, + 137, 141, 143, 144, 153, 154, 156, 157, + 255, 128, 145, 147, 171, 172, 183, 159, + 170, 171, 175, 176, 185, 186, 255, 189, + 128, 131, 133, 140, 143, 144, 147, 168, + 170, 176, 178, 179, 181, 185, 188, 191, + 144, 151, 128, 132, 135, 136, 139, 141, + 157, 161, 162, 163, 166, 172, 176, 180, + 128, 175, 176, 255, 134, 132, 135, 136, + 143, 144, 153, 154, 255, 128, 174, 175, + 181, 184, 255, 129, 151, 152, 155, 158, + 255, 132, 129, 143, 144, 153, 154, 255, + 128, 170, 171, 183, 157, 171, 176, 185, + 160, 168, 169, 171, 172, 173, 174, 188, + 189, 190, 161, 167, 144, 173, 176, 180, + 128, 175, 176, 182, 133, 143, 145, 190, + 191, 255, 143, 146, 147, 159, 176, 177, + 178, 128, 136, 144, 153, 157, 158, 160, + 163, 133, 134, 137, 144, 145, 146, 147, + 148, 149, 154, 155, 156, 157, 158, 159, + 168, 169, 170, 150, 153, 165, 169, 173, + 255, 131, 132, 140, 169, 174, 255, 130, + 132, 128, 182, 187, 255, 173, 180, 182, + 255, 132, 155, 159, 161, 175, 160, 163, + 184, 185, 186, 161, 162, 133, 143, 144, + 150, 151, 255, 160, 128, 129, 132, 135, + 133, 134, 129, 160, 255, 192, 255, 176, + 255, 173, 0, 127, 176, 255, 131, 137, + 191, 145, 189, 135, 129, 130, 132, 133, + 156, 128, 133, 144, 154, 176, 139, 159, + 150, 157, 159, 164, 167, 168, 170, 173, + 143, 145, 176, 255, 139, 255, 166, 176, + 171, 179, 160, 161, 163, 164, 165, 167, + 169, 171, 173, 174, 175, 176, 177, 179, + 180, 181, 182, 183, 184, 185, 186, 187, + 188, 189, 190, 191, 166, 170, 172, 178, + 150, 153, 155, 163, 165, 167, 169, 173, + 153, 155, 163, 255, 189, 132, 185, 144, + 152, 161, 164, 255, 188, 129, 131, 190, + 255, 133, 134, 137, 138, 142, 150, 152, + 161, 164, 255, 131, 134, 137, 138, 142, + 144, 146, 175, 178, 180, 182, 255, 134, + 138, 142, 161, 164, 255, 188, 129, 131, + 190, 191, 128, 132, 135, 136, 139, 141, + 150, 151, 162, 163, 130, 190, 191, 151, + 128, 130, 134, 136, 138, 141, 128, 131, + 190, 255, 133, 137, 142, 148, 151, 161, + 164, 255, 128, 132, 134, 136, 138, 141, + 149, 150, 162, 163, 129, 131, 190, 255, + 133, 137, 142, 150, 152, 161, 164, 255, + 130, 131, 138, 150, 143, 148, 152, 159, + 178, 179, 177, 180, 186, 135, 142, 177, + 180, 185, 187, 188, 136, 141, 181, 183, + 185, 152, 153, 190, 191, 177, 191, 128, + 132, 134, 135, 141, 151, 153, 188, 134, + 128, 129, 130, 132, 135, 141, 156, 157, + 158, 159, 160, 162, 164, 168, 169, 170, + 172, 173, 174, 175, 176, 179, 183, 133, + 134, 171, 190, 150, 153, 158, 160, 162, + 164, 167, 173, 177, 180, 143, 130, 141, + 154, 157, 157, 159, 146, 148, 178, 180, + 146, 147, 178, 179, 180, 255, 148, 156, + 158, 255, 139, 142, 169, 160, 171, 176, + 187, 151, 155, 191, 149, 158, 160, 188, + 176, 190, 128, 132, 180, 255, 133, 170, + 180, 255, 128, 130, 161, 173, 166, 179, + 164, 183, 173, 144, 146, 148, 168, 178, + 180, 184, 185, 128, 181, 188, 191, 128, + 129, 131, 179, 181, 183, 140, 143, 170, + 174, 160, 164, 166, 175, 144, 176, 175, + 177, 191, 160, 191, 128, 130, 132, 133, + 134, 136, 137, 170, 175, 153, 154, 153, + 154, 155, 160, 162, 163, 164, 165, 166, + 167, 168, 169, 170, 171, 175, 176, 177, + 255, 175, 178, 180, 189, 158, 159, 176, + 177, 130, 134, 139, 163, 167, 128, 129, + 180, 255, 133, 159, 178, 255, 166, 173, + 135, 147, 160, 188, 128, 131, 179, 255, + 129, 164, 166, 255, 169, 182, 131, 140, + 141, 187, 189, 176, 178, 180, 183, 184, + 190, 191, 129, 171, 175, 181, 182, 163, + 170, 172, 173, 172, 184, 187, 190, 191, + 158, 128, 143, 160, 175, 158, 159, 160, + 190, 130, 135, 138, 143, 146, 151, 154, + 156, 185, 187, 144, 145, 150, 155, 157, + 158, 135, 139, 141, 168, 171, 189, 160, + 182, 186, 191, 129, 131, 133, 134, 140, + 143, 184, 186, 165, 166, 128, 129, 130, + 132, 133, 134, 135, 136, 139, 140, 141, + 146, 147, 150, 151, 152, 153, 154, 156, + 128, 130, 184, 255, 135, 190, 131, 175, + 187, 188, 190, 255, 128, 130, 167, 180, + 179, 128, 130, 179, 255, 129, 137, 141, + 255, 172, 183, 159, 170, 188, 128, 131, + 190, 191, 151, 128, 132, 135, 136, 139, + 141, 162, 163, 166, 172, 176, 180, 176, + 255, 132, 255, 175, 181, 184, 255, 129, + 155, 158, 255, 129, 255, 171, 183, 157, + 171, 171, 172, 189, 190, 176, 180, 176, + 182, 145, 190, 143, 146, 178, 157, 158, + 160, 163, 133, 134, 137, 168, 169, 170, + 165, 169, 173, 255, 131, 132, 140, 169, + 174, 255, 130, 132, 128, 182, 187, 255, + 173, 180, 182, 255, 132, 155, 159, 161, + 175, 163, 144, 150, 160, 128, 129, 132, + 135, 133, 134, 129, 160, 255, 192, 255, + 173, 0, 127, 176, 255, 131, 137, 191, + 145, 189, 135, 129, 130, 132, 133, 156, + 128, 133, 144, 154, 176, 139, 159, 150, + 157, 159, 164, 167, 168, 170, 173, 143, + 145, 176, 255, 139, 255, 166, 176, 171, + 179, 160, 161, 163, 164, 165, 167, 169, + 171, 173, 174, 175, 176, 177, 179, 180, + 181, 182, 183, 184, 185, 186, 187, 188, + 189, 190, 191, 166, 170, 172, 178, 150, + 153, 155, 163, 165, 167, 169, 173, 153, + 155, 163, 255, 189, 132, 185, 144, 152, + 161, 164, 255, 188, 129, 131, 190, 255, + 133, 134, 137, 138, 142, 150, 152, 161, + 164, 255, 131, 134, 137, 138, 142, 144, + 146, 175, 178, 180, 182, 255, 134, 138, + 142, 161, 164, 255, 188, 129, 131, 190, + 191, 128, 132, 135, 136, 139, 141, 150, + 151, 162, 163, 130, 190, 191, 151, 128, + 130, 134, 136, 138, 141, 128, 131, 190, + 255, 133, 137, 142, 148, 151, 161, 164, + 255, 128, 132, 134, 136, 138, 141, 149, + 150, 162, 163, 129, 131, 190, 255, 133, + 137, 142, 150, 152, 161, 164, 255, 130, + 131, 138, 150, 143, 148, 152, 159, 178, + 179, 177, 180, 186, 135, 142, 177, 180, + 185, 187, 188, 136, 141, 181, 183, 185, + 152, 153, 190, 191, 177, 191, 128, 132, + 134, 135, 141, 151, 153, 188, 134, 128, + 129, 130, 141, 156, 157, 158, 159, 160, + 162, 164, 168, 169, 170, 172, 173, 174, + 175, 176, 179, 183, 171, 190, 150, 153, + 158, 160, 162, 164, 167, 173, 177, 180, + 143, 130, 141, 154, 157, 157, 159, 146, + 148, 178, 180, 146, 147, 178, 179, 180, + 255, 148, 156, 158, 255, 139, 142, 169, + 160, 171, 176, 187, 151, 155, 191, 149, + 158, 160, 188, 176, 190, 128, 132, 180, + 255, 133, 170, 180, 255, 128, 130, 161, + 173, 166, 179, 164, 183, 173, 144, 146, + 148, 168, 178, 180, 184, 185, 128, 181, + 188, 191, 128, 129, 131, 179, 181, 183, + 140, 143, 170, 174, 191, 255, 165, 129, + 147, 149, 159, 160, 175, 176, 255, 144, + 176, 175, 177, 191, 160, 191, 128, 130, + 131, 135, 139, 140, 141, 170, 175, 177, + 181, 153, 156, 160, 255, 187, 192, 255, + 176, 191, 144, 190, 152, 255, 153, 154, + 155, 160, 162, 163, 164, 165, 166, 167, + 168, 169, 170, 171, 175, 175, 178, 180, + 189, 158, 159, 176, 177, 130, 134, 139, + 163, 167, 128, 129, 180, 255, 133, 159, + 178, 255, 166, 173, 135, 147, 128, 131, + 179, 255, 129, 164, 166, 255, 169, 182, + 131, 140, 141, 187, 189, 176, 178, 180, + 183, 184, 190, 191, 129, 171, 175, 181, + 182, 163, 170, 172, 173, 172, 184, 185, + 187, 188, 189, 190, 191, 158, 128, 143, + 160, 175, 179, 180, 166, 255, 160, 255, + 185, 187, 144, 145, 150, 155, 157, 158, + 135, 139, 141, 168, 171, 189, 160, 182, + 186, 191, 129, 131, 133, 134, 140, 143, + 184, 186, 165, 166, 128, 129, 130, 132, + 133, 134, 135, 136, 139, 140, 141, 146, + 147, 150, 151, 152, 153, 154, 156, 128, + 130, 184, 255, 135, 190, 131, 175, 187, + 188, 190, 255, 128, 130, 167, 180, 179, + 128, 130, 179, 255, 129, 137, 141, 255, + 172, 183, 159, 170, 188, 128, 131, 190, + 191, 151, 128, 132, 135, 136, 139, 141, + 162, 163, 166, 172, 176, 180, 176, 255, + 132, 255, 175, 181, 184, 255, 129, 155, + 158, 255, 129, 255, 171, 183, 157, 171, + 171, 172, 189, 190, 176, 180, 176, 182, + 145, 190, 143, 146, 128, 178, 128, 157, + 158, 160, 163, 133, 134, 137, 168, 169, + 170, 165, 169, 173, 255, 131, 132, 140, + 169, 174, 255, 130, 132, 128, 182, 187, + 255, 173, 180, 182, 255, 132, 155, 159, + 161, 175, 163, 144, 150, 160, 128, 129, + 132, 135, 133, 134, 129, 160, 255, 192, + 255, 129, 255, 173, 128, 255, 176, 255, + 131, 137, 191, 145, 189, 135, 129, 130, + 132, 133, 156, 128, 133, 144, 154, 176, + 139, 159, 150, 157, 159, 164, 167, 168, + 170, 173, 143, 145, 176, 255, 139, 255, + 166, 176, 171, 179, 160, 161, 163, 164, + 165, 167, 169, 171, 173, 174, 175, 176, + 177, 179, 180, 181, 182, 183, 184, 185, + 186, 187, 188, 189, 190, 191, 166, 170, + 172, 178, 150, 153, 155, 163, 165, 167, + 169, 173, 153, 155, 163, 255, 189, 132, + 185, 144, 152, 161, 164, 255, 188, 129, + 131, 190, 255, 133, 134, 137, 138, 142, + 150, 152, 161, 164, 255, 131, 134, 137, + 138, 142, 144, 146, 175, 178, 180, 182, + 255, 134, 138, 142, 161, 164, 255, 188, + 129, 131, 190, 191, 128, 132, 135, 136, + 139, 141, 150, 151, 162, 163, 130, 190, + 191, 151, 128, 130, 134, 136, 138, 141, + 128, 131, 190, 255, 133, 137, 142, 148, + 151, 161, 164, 255, 128, 132, 134, 136, + 138, 141, 149, 150, 162, 163, 129, 131, + 190, 255, 133, 137, 142, 150, 152, 161, + 164, 255, 130, 131, 138, 150, 143, 148, + 152, 159, 178, 179, 177, 180, 186, 135, + 142, 177, 180, 185, 187, 188, 136, 141, + 181, 183, 185, 152, 153, 190, 191, 177, + 191, 128, 132, 134, 135, 141, 151, 153, + 188, 134, 128, 129, 130, 141, 156, 157, + 158, 159, 160, 162, 164, 168, 169, 170, + 172, 173, 174, 175, 176, 179, 183, 171, + 190, 150, 153, 158, 160, 162, 164, 167, + 173, 177, 180, 143, 130, 141, 154, 157, + 157, 159, 146, 148, 178, 180, 146, 147, + 178, 179, 180, 255, 148, 156, 158, 255, + 139, 142, 169, 160, 171, 176, 187, 151, + 155, 191, 149, 158, 160, 188, 176, 190, + 128, 132, 180, 255, 133, 170, 180, 255, + 128, 130, 161, 173, 166, 179, 164, 183, + 173, 144, 146, 148, 168, 178, 180, 184, + 185, 128, 181, 188, 191, 128, 129, 131, + 179, 181, 183, 140, 143, 170, 174, 160, + 164, 166, 175, 144, 176, 175, 177, 191, + 160, 191, 128, 130, 170, 175, 153, 154, + 153, 154, 155, 160, 162, 163, 164, 165, + 166, 167, 168, 169, 170, 171, 175, 175, + 178, 180, 189, 158, 159, 176, 177, 130, + 134, 139, 163, 167, 128, 129, 180, 255, + 133, 159, 178, 255, 166, 173, 135, 147, + 128, 131, 179, 255, 129, 164, 166, 255, + 169, 182, 131, 140, 141, 187, 189, 176, + 178, 180, 183, 184, 190, 191, 129, 171, + 175, 181, 182, 163, 170, 172, 173, 172, + 184, 187, 190, 191, 158, 128, 143, 160, + 175, 185, 187, 144, 145, 150, 155, 157, + 158, 135, 139, 141, 168, 171, 189, 160, + 182, 186, 191, 129, 131, 133, 134, 140, + 143, 184, 186, 165, 166, 128, 129, 130, + 132, 133, 134, 135, 136, 139, 140, 141, + 146, 147, 150, 151, 152, 153, 154, 156, + 128, 130, 184, 255, 135, 190, 131, 175, + 187, 188, 190, 255, 128, 130, 167, 180, + 179, 128, 130, 179, 255, 129, 137, 141, + 255, 172, 183, 159, 170, 188, 128, 131, + 190, 191, 151, 128, 132, 135, 136, 139, + 141, 162, 163, 166, 172, 176, 180, 176, + 255, 132, 255, 175, 181, 184, 255, 129, + 155, 158, 255, 129, 255, 171, 183, 157, + 171, 171, 172, 189, 190, 176, 180, 176, + 182, 145, 190, 143, 146, 178, 157, 158, + 160, 163, 133, 134, 137, 168, 169, 170, + 165, 169, 173, 255, 131, 132, 140, 169, + 174, 255, 130, 132, 128, 182, 187, 255, + 173, 180, 182, 255, 132, 155, 159, 161, + 175, 163, 144, 150, 160, 128, 129, 132, + 135, 133, 134, 129, 160, 255, 192, 255, + 151, 152, 153, 154, 155, 156, 160, 255, + 173, 173, 128, 255, 176, 255, 131, 137, + 191, 145, 189, 135, 129, 130, 132, 133, + 156, 128, 133, 144, 154, 176, 139, 159, + 150, 157, 159, 164, 167, 168, 170, 173, + 143, 145, 176, 255, 139, 255, 166, 176, + 171, 179, 160, 161, 163, 164, 165, 167, + 169, 171, 173, 174, 175, 176, 177, 179, + 180, 181, 182, 183, 184, 185, 186, 187, + 188, 189, 190, 191, 166, 170, 172, 178, + 150, 153, 155, 163, 165, 167, 169, 173, + 153, 155, 163, 255, 189, 132, 185, 144, + 152, 161, 164, 255, 188, 129, 131, 190, + 255, 133, 134, 137, 138, 142, 150, 152, + 161, 164, 255, 131, 134, 137, 138, 142, + 144, 146, 175, 178, 180, 182, 255, 134, + 138, 142, 161, 164, 255, 188, 129, 131, + 190, 191, 128, 132, 135, 136, 139, 141, + 150, 151, 162, 163, 130, 190, 191, 151, + 128, 130, 134, 136, 138, 141, 128, 131, + 190, 255, 133, 137, 142, 148, 151, 161, + 164, 255, 128, 132, 134, 136, 138, 141, + 149, 150, 162, 163, 129, 131, 190, 255, + 133, 137, 142, 150, 152, 161, 164, 255, + 130, 131, 138, 150, 143, 148, 152, 159, + 178, 179, 177, 180, 186, 135, 142, 177, + 180, 185, 187, 188, 136, 141, 181, 183, + 185, 152, 153, 190, 191, 177, 191, 128, + 132, 134, 135, 141, 151, 153, 188, 134, + 128, 129, 130, 141, 156, 157, 158, 159, + 160, 162, 164, 168, 169, 170, 172, 173, + 174, 175, 176, 179, 183, 171, 190, 150, + 153, 158, 160, 162, 164, 167, 173, 177, + 180, 143, 130, 141, 154, 157, 157, 159, + 146, 148, 178, 180, 146, 147, 178, 179, + 180, 255, 148, 156, 158, 255, 139, 142, + 169, 160, 171, 176, 187, 151, 155, 191, + 149, 158, 160, 188, 176, 190, 128, 132, + 180, 255, 133, 170, 180, 255, 128, 130, + 161, 173, 166, 179, 164, 183, 173, 144, + 146, 148, 168, 178, 180, 184, 185, 128, + 181, 188, 191, 128, 129, 131, 179, 181, + 183, 140, 143, 170, 174, 160, 164, 166, + 175, 144, 176, 175, 177, 191, 160, 191, + 128, 130, 170, 175, 153, 154, 153, 154, + 155, 160, 162, 163, 164, 165, 166, 167, + 168, 169, 170, 171, 175, 175, 178, 180, + 189, 158, 159, 176, 177, 130, 134, 139, + 163, 167, 128, 129, 180, 255, 133, 159, + 178, 255, 166, 173, 135, 147, 128, 131, + 179, 255, 129, 164, 166, 255, 169, 182, + 131, 140, 141, 187, 189, 176, 178, 180, + 183, 184, 190, 191, 129, 171, 175, 181, + 182, 163, 170, 172, 173, 172, 184, 187, + 190, 191, 158, 128, 143, 160, 175, 185, + 187, 144, 145, 150, 155, 157, 158, 135, + 139, 141, 168, 171, 189, 160, 182, 186, + 191, 129, 131, 133, 134, 140, 143, 184, + 186, 165, 166, 128, 129, 130, 132, 133, + 134, 135, 136, 139, 140, 141, 146, 147, + 150, 151, 152, 153, 154, 156, 128, 130, + 184, 255, 135, 190, 131, 175, 187, 188, + 190, 255, 128, 130, 167, 180, 179, 128, + 130, 179, 255, 129, 137, 141, 255, 172, + 183, 159, 170, 188, 128, 131, 190, 191, + 151, 128, 132, 135, 136, 139, 141, 162, + 163, 166, 172, 176, 180, 176, 255, 132, + 255, 175, 181, 184, 255, 129, 155, 158, + 255, 129, 255, 171, 183, 157, 171, 171, + 172, 189, 190, 176, 180, 176, 182, 145, + 190, 143, 146, 178, 157, 158, 160, 163, + 133, 134, 137, 168, 169, 170, 165, 169, + 173, 255, 131, 132, 140, 169, 174, 255, + 130, 132, 128, 182, 187, 255, 173, 180, + 182, 255, 132, 155, 159, 161, 175, 163, + 144, 150, 160, 128, 129, 132, 135, 133, + 134, 129, 160, 255, 192, 255, 128, 255, + 176, 255, 131, 137, 191, 145, 189, 135, + 129, 130, 132, 133, 156, 128, 133, 144, + 154, 176, 139, 159, 150, 157, 159, 164, + 167, 168, 170, 173, 143, 145, 176, 255, + 139, 255, 166, 176, 171, 179, 160, 161, + 163, 164, 165, 167, 169, 171, 173, 174, + 175, 176, 177, 179, 180, 181, 182, 183, + 184, 185, 186, 187, 188, 189, 190, 191, + 166, 170, 172, 178, 128, 129, 130, 141, + 156, 157, 158, 159, 160, 162, 164, 168, + 169, 170, 172, 173, 174, 175, 176, 179, + 183, 128, 129, 131, 179, 181, 183, 128, + 130, 153, 154, 155, 160, 162, 163, 164, + 165, 166, 167, 168, 169, 170, 171, 175, + 172, 184, 187, 190, 191, 144, 145, 150, + 155, 157, 158, 160, 182, 255, 191, 128, + 174, 175, 178, 180, 189, 128, 157, 158, + 159, 160, 255, 176, 177, 178, 255, 130, + 134, 139, 163, 167, 168, 255, 128, 129, + 130, 179, 180, 255, 187, 189, 133, 143, + 144, 153, 154, 159, 178, 183, 184, 255, + 128, 137, 138, 165, 166, 173, 176, 255, + 135, 147, 148, 159, 160, 188, 189, 255, + 128, 131, 132, 178, 179, 255, 143, 129, + 142, 144, 153, 154, 164, 166, 175, 176, + 185, 186, 255, 128, 168, 169, 182, 131, + 128, 139, 140, 141, 144, 153, 187, 189, + 129, 160, 170, 171, 175, 178, 180, 181, + 182, 128, 162, 163, 170, 172, 173, 176, + 185, 173, 170, 173, 181, 183, 186, 128, + 255, 181, 190, 176, 183, 184, 185, 186, + 191, 192, 255, 130, 131, 137, 190, 136, + 144, 145, 191, 192, 255, 135, 179, 180, + 129, 130, 132, 133, 144, 170, 176, 178, + 156, 128, 133, 144, 154, 160, 191, 171, + 176, 128, 138, 139, 159, 160, 169, 174, + 255, 148, 158, 169, 150, 164, 167, 173, + 176, 185, 189, 190, 192, 255, 144, 143, + 145, 146, 175, 176, 255, 139, 140, 141, + 255, 166, 176, 178, 255, 186, 128, 137, + 138, 170, 171, 179, 180, 181, 160, 161, + 162, 163, 164, 165, 166, 167, 168, 169, + 170, 171, 172, 173, 174, 175, 176, 177, + 178, 179, 180, 181, 182, 183, 184, 185, + 186, 187, 188, 189, 190, 191, 154, 164, + 168, 128, 149, 150, 173, 128, 152, 153, + 155, 163, 255, 189, 132, 185, 144, 176, + 152, 161, 164, 165, 166, 175, 177, 255, + 132, 169, 177, 188, 129, 131, 141, 142, + 145, 146, 179, 181, 186, 187, 190, 255, + 142, 158, 133, 134, 137, 138, 143, 150, + 152, 155, 156, 161, 164, 165, 166, 175, + 176, 177, 178, 255, 188, 129, 131, 133, + 138, 143, 144, 147, 168, 170, 176, 178, + 179, 181, 182, 184, 185, 190, 255, 157, + 131, 134, 137, 138, 142, 144, 146, 152, + 153, 158, 159, 165, 166, 175, 178, 180, + 182, 255, 189, 129, 131, 133, 141, 143, + 145, 147, 168, 170, 176, 178, 179, 181, + 185, 188, 255, 134, 138, 144, 185, 142, + 159, 160, 161, 164, 165, 166, 175, 176, + 255, 189, 129, 131, 133, 140, 143, 144, + 147, 168, 170, 176, 178, 179, 181, 185, + 188, 191, 177, 128, 132, 135, 136, 139, + 141, 150, 151, 156, 157, 159, 161, 162, + 163, 166, 175, 130, 131, 156, 133, 138, + 142, 144, 146, 149, 153, 154, 158, 159, + 163, 164, 168, 170, 174, 185, 190, 191, + 144, 151, 128, 130, 134, 136, 138, 141, + 166, 175, 189, 128, 131, 133, 140, 142, + 144, 146, 168, 170, 185, 190, 255, 133, + 137, 151, 142, 148, 152, 154, 155, 159, + 160, 161, 164, 165, 166, 175, 176, 255, + 189, 129, 131, 133, 140, 142, 144, 146, + 168, 170, 179, 181, 185, 188, 191, 158, + 128, 132, 134, 136, 138, 141, 149, 150, + 160, 161, 162, 163, 166, 175, 177, 178, + 189, 129, 131, 133, 140, 142, 144, 146, + 186, 190, 255, 133, 137, 142, 143, 150, + 152, 158, 159, 161, 164, 165, 166, 175, + 176, 185, 186, 191, 192, 255, 189, 130, + 131, 133, 150, 154, 177, 179, 187, 138, + 150, 128, 134, 143, 148, 152, 159, 166, + 175, 178, 179, 177, 180, 186, 135, 142, + 144, 153, 177, 180, 185, 187, 188, 136, + 141, 144, 153, 128, 181, 183, 185, 152, + 153, 160, 169, 190, 191, 128, 135, 137, + 172, 177, 191, 128, 132, 134, 135, 136, + 140, 141, 151, 153, 188, 134, 128, 129, + 130, 131, 132, 135, 137, 138, 139, 140, + 141, 142, 143, 144, 153, 154, 155, 156, + 157, 158, 159, 160, 161, 162, 163, 164, + 165, 167, 168, 169, 170, 172, 173, 174, + 175, 176, 177, 179, 181, 182, 183, 188, + 189, 190, 191, 133, 134, 136, 152, 180, + 184, 185, 187, 171, 190, 128, 137, 150, + 153, 158, 160, 162, 164, 167, 173, 177, + 180, 143, 130, 141, 144, 153, 154, 157, + 160, 255, 155, 156, 157, 159, 160, 255, + 128, 140, 142, 145, 146, 148, 160, 177, + 178, 180, 128, 145, 146, 147, 160, 172, + 174, 176, 178, 179, 180, 255, 148, 156, + 158, 159, 160, 169, 170, 255, 139, 142, + 144, 153, 160, 255, 169, 128, 170, 176, + 255, 128, 158, 160, 171, 176, 187, 128, + 150, 151, 155, 191, 149, 158, 160, 188, + 128, 137, 144, 153, 176, 190, 128, 132, + 133, 179, 180, 255, 133, 139, 140, 143, + 144, 153, 154, 170, 180, 255, 128, 130, + 131, 160, 161, 173, 174, 175, 176, 185, + 186, 255, 166, 179, 180, 255, 128, 163, + 164, 183, 173, 144, 146, 148, 168, 169, + 177, 178, 180, 181, 182, 184, 185, 128, + 181, 188, 191, 128, 129, 130, 131, 132, + 133, 134, 146, 147, 176, 177, 178, 179, + 180, 181, 182, 183, 184, 164, 167, 140, + 143, 152, 153, 170, 174, 191, 255, 165, + 177, 191, 129, 147, 149, 159, 160, 175, + 176, 255, 144, 176, 165, 170, 175, 177, + 180, 255, 191, 168, 174, 176, 255, 128, + 134, 136, 142, 144, 150, 152, 158, 160, + 191, 128, 130, 132, 133, 134, 136, 137, + 133, 170, 175, 187, 188, 153, 154, 128, + 146, 147, 148, 152, 153, 154, 155, 156, + 158, 159, 160, 161, 162, 163, 164, 165, + 166, 167, 168, 169, 170, 171, 172, 173, + 174, 175, 176, 129, 157, 177, 255, 191, + 128, 174, 175, 178, 180, 189, 128, 157, + 158, 159, 160, 255, 176, 177, 178, 255, + 130, 134, 139, 163, 167, 168, 255, 128, + 129, 130, 179, 180, 255, 187, 189, 133, + 143, 144, 153, 154, 159, 178, 183, 184, + 255, 128, 137, 138, 165, 166, 173, 176, + 255, 135, 147, 148, 159, 160, 188, 189, + 255, 128, 131, 132, 178, 179, 255, 143, + 129, 142, 144, 153, 154, 164, 166, 175, + 176, 185, 186, 255, 128, 168, 169, 182, + 131, 128, 139, 140, 141, 144, 153, 187, + 189, 176, 178, 180, 183, 184, 190, 191, + 129, 160, 170, 171, 175, 178, 180, 181, + 182, 128, 162, 163, 170, 172, 173, 176, + 185, 172, 173, 174, 175, 180, 181, 182, + 183, 184, 185, 187, 188, 189, 190, 191, + 176, 186, 158, 190, 128, 134, 147, 151, + 157, 168, 170, 182, 184, 188, 147, 128, + 143, 160, 175, 179, 180, 191, 189, 255, + 158, 159, 160, 190, 130, 135, 138, 143, + 146, 151, 154, 156, 185, 187, 144, 145, + 146, 147, 148, 150, 155, 157, 158, 159, + 128, 129, 130, 131, 133, 135, 138, 139, + 140, 141, 142, 143, 144, 145, 146, 148, + 149, 152, 156, 157, 160, 161, 162, 163, + 164, 166, 168, 169, 170, 171, 172, 173, + 174, 176, 177, 153, 155, 178, 179, 189, + 160, 145, 255, 139, 143, 182, 186, 187, + 255, 128, 191, 129, 131, 133, 134, 140, + 143, 144, 147, 149, 151, 153, 179, 184, + 186, 128, 135, 137, 164, 165, 166, 128, + 129, 130, 131, 132, 133, 134, 135, 136, + 138, 139, 140, 141, 146, 147, 150, 151, + 152, 153, 154, 155, 156, 162, 163, 171, + 128, 130, 131, 183, 184, 255, 135, 165, + 166, 175, 176, 190, 131, 175, 187, 188, + 190, 255, 128, 130, 131, 166, 167, 180, + 182, 191, 179, 182, 144, 178, 128, 130, + 131, 178, 179, 255, 155, 129, 132, 133, + 137, 141, 143, 144, 153, 154, 156, 157, + 255, 128, 145, 147, 171, 172, 183, 159, + 170, 171, 175, 176, 185, 186, 255, 189, + 128, 131, 133, 140, 143, 144, 147, 168, + 170, 176, 178, 179, 181, 185, 188, 191, + 144, 151, 128, 132, 135, 136, 139, 141, + 157, 161, 162, 163, 166, 172, 176, 180, + 128, 175, 176, 255, 134, 132, 135, 136, + 143, 144, 153, 154, 255, 128, 174, 175, + 181, 184, 255, 129, 151, 152, 155, 158, + 255, 132, 129, 143, 144, 153, 154, 255, + 128, 170, 171, 183, 157, 171, 176, 185, + 160, 168, 169, 171, 172, 173, 174, 188, + 189, 190, 161, 167, 144, 173, 176, 180, + 128, 175, 176, 182, 133, 143, 145, 190, + 191, 255, 143, 146, 147, 159, 176, 177, + 178, 128, 136, 144, 153, 157, 158, 160, + 163, 133, 134, 137, 144, 145, 146, 147, + 148, 149, 154, 155, 156, 157, 158, 159, + 168, 169, 170, 150, 153, 165, 169, 173, + 255, 131, 132, 140, 169, 174, 255, 130, + 132, 128, 182, 187, 255, 173, 180, 182, + 255, 132, 155, 159, 161, 175, 160, 163, + 184, 185, 186, 161, 162, 133, 143, 144, + 150, 151, 255, 160, 128, 129, 132, 135, + 133, 134, 129, 160, 255, 192, 255, 176, + 255, 128, 255, 176, 255, 131, 137, 191, + 145, 189, 135, 129, 130, 132, 133, 156, + 128, 133, 144, 154, 176, 139, 159, 150, + 157, 159, 164, 167, 168, 170, 173, 143, + 145, 176, 255, 139, 255, 166, 176, 171, + 179, 160, 161, 163, 164, 165, 167, 169, + 171, 173, 174, 175, 176, 177, 179, 180, + 181, 182, 183, 184, 185, 186, 187, 188, + 189, 190, 191, 166, 170, 172, 178, 128, + 129, 130, 141, 156, 157, 158, 159, 160, + 162, 164, 168, 169, 170, 172, 173, 174, + 175, 176, 179, 183, 128, 129, 131, 179, + 181, 183, 128, 130, 153, 154, 155, 160, + 162, 163, 164, 165, 166, 167, 168, 169, + 170, 171, 175, 172, 184, 187, 190, 191, + 144, 145, 150, 155, 157, 158, 160, 174, + 175, 154, 255, 158, 190, 128, 134, 147, + 151, 157, 168, 170, 182, 184, 188, 128, + 143, 160, 175, 179, 180, 191, 189, 255, + 129, 154, 166, 255, 158, 159, 160, 190, + 191, 255, 130, 135, 138, 143, 146, 151, + 154, 156, 185, 187, 128, 129, 130, 131, + 133, 135, 138, 139, 140, 141, 142, 143, + 144, 145, 146, 148, 149, 152, 156, 157, + 160, 161, 162, 163, 164, 166, 168, 169, + 170, 171, 172, 173, 174, 176, 177, 153, + 155, 178, 179, 160, 145, 255, 139, 143, + 182, 186, 187, 255, 128, 191, 129, 131, + 133, 134, 140, 143, 144, 147, 149, 151, + 153, 179, 184, 186, 128, 135, 137, 164, + 165, 166, 128, 129, 130, 131, 132, 133, + 134, 135, 136, 138, 139, 140, 141, 146, + 147, 150, 151, 152, 153, 154, 155, 156, + 162, 163, 171, 128, 130, 131, 183, 184, + 255, 135, 165, 166, 175, 176, 190, 131, + 175, 187, 188, 190, 255, 128, 130, 131, + 166, 167, 180, 182, 191, 179, 182, 144, + 178, 128, 130, 131, 178, 179, 255, 155, + 129, 132, 133, 137, 141, 143, 144, 153, + 154, 156, 157, 255, 128, 145, 147, 171, + 172, 183, 159, 170, 171, 175, 176, 185, + 186, 255, 189, 128, 131, 133, 140, 143, + 144, 147, 168, 170, 176, 178, 179, 181, + 185, 188, 191, 144, 151, 128, 132, 135, + 136, 139, 141, 157, 161, 162, 163, 166, + 172, 176, 180, 128, 175, 176, 255, 134, + 132, 135, 136, 143, 144, 153, 154, 255, + 128, 174, 175, 181, 184, 255, 129, 151, + 152, 155, 158, 255, 132, 129, 143, 144, + 153, 154, 255, 128, 170, 171, 183, 157, + 171, 176, 185, 160, 168, 169, 171, 172, + 173, 174, 188, 189, 190, 161, 167, 144, + 173, 176, 180, 128, 175, 176, 182, 133, + 143, 145, 190, 191, 255, 143, 146, 147, + 159, 128, 176, 177, 178, 128, 129, 128, + 136, 144, 153, 157, 158, 160, 163, 133, + 134, 137, 144, 145, 146, 147, 148, 149, + 154, 155, 156, 157, 158, 159, 168, 169, + 170, 150, 153, 160, 163, 184, 185, 186, + 161, 162, 133, 143, 144, 150, 151, 255, + 132, 133, 134, 135, 136, 166, 191, 173, + 0, 127, 176, 255, 131, 137, 191, 145, + 189, 135, 129, 130, 132, 133, 156, 128, + 133, 144, 154, 176, 139, 159, 150, 157, + 159, 164, 167, 168, 170, 173, 143, 145, + 176, 255, 139, 255, 166, 176, 171, 179, + 160, 161, 163, 164, 165, 167, 169, 171, + 173, 174, 175, 176, 177, 179, 180, 181, + 182, 183, 184, 185, 186, 187, 188, 189, + 190, 191, 166, 170, 172, 178, 150, 153, + 155, 163, 165, 167, 169, 173, 153, 155, + 163, 255, 189, 132, 185, 144, 152, 161, + 164, 255, 188, 129, 131, 190, 255, 133, + 134, 137, 138, 142, 150, 152, 161, 164, + 255, 131, 134, 137, 138, 142, 144, 146, + 175, 178, 180, 182, 255, 134, 138, 142, + 161, 164, 255, 188, 129, 131, 190, 191, + 128, 132, 135, 136, 139, 141, 150, 151, + 162, 163, 130, 190, 191, 151, 128, 130, + 134, 136, 138, 141, 128, 131, 190, 255, + 133, 137, 142, 148, 151, 161, 164, 255, + 128, 132, 134, 136, 138, 141, 149, 150, + 162, 163, 129, 131, 190, 255, 133, 137, + 142, 150, 152, 161, 164, 255, 130, 131, + 138, 150, 143, 148, 152, 159, 178, 179, + 177, 180, 186, 135, 142, 177, 180, 185, + 187, 188, 136, 141, 181, 183, 185, 152, + 153, 190, 191, 177, 191, 128, 132, 134, + 135, 141, 151, 153, 188, 134, 128, 129, + 130, 141, 156, 157, 158, 159, 160, 162, + 164, 168, 169, 170, 172, 173, 174, 175, + 176, 179, 183, 171, 190, 150, 153, 158, + 160, 162, 164, 167, 173, 177, 180, 143, + 130, 141, 154, 157, 157, 159, 146, 148, + 178, 180, 146, 147, 178, 179, 180, 255, + 148, 156, 158, 255, 139, 142, 169, 160, + 171, 176, 187, 151, 155, 191, 149, 158, + 160, 188, 176, 190, 128, 132, 180, 255, + 133, 170, 180, 255, 128, 130, 161, 173, + 166, 179, 164, 183, 173, 144, 146, 148, + 168, 178, 180, 184, 185, 128, 181, 188, + 191, 128, 129, 131, 179, 181, 183, 140, + 143, 170, 174, 160, 164, 166, 175, 144, + 176, 175, 177, 191, 160, 191, 128, 130, + 170, 175, 153, 154, 153, 154, 155, 160, + 162, 163, 164, 165, 166, 167, 168, 169, + 170, 171, 175, 175, 178, 180, 189, 158, + 159, 176, 177, 130, 134, 139, 163, 167, + 128, 129, 180, 255, 133, 159, 178, 255, + 166, 173, 135, 147, 128, 131, 179, 255, + 129, 164, 166, 255, 169, 182, 131, 140, + 141, 187, 189, 176, 178, 180, 183, 184, + 190, 191, 129, 171, 175, 181, 182, 163, + 170, 172, 173, 172, 184, 187, 190, 191, + 158, 128, 143, 160, 175, 185, 187, 144, + 145, 150, 155, 157, 158, 159, 135, 139, + 141, 168, 171, 189, 160, 182, 186, 191, + 129, 131, 133, 134, 140, 143, 184, 186, + 165, 166, 128, 129, 130, 132, 133, 134, + 135, 136, 139, 140, 141, 146, 147, 150, + 151, 152, 153, 154, 156, 128, 130, 184, + 255, 135, 190, 131, 175, 187, 188, 190, + 255, 128, 130, 167, 180, 179, 128, 130, + 179, 255, 129, 137, 141, 255, 172, 183, + 159, 170, 188, 128, 131, 190, 191, 151, + 128, 132, 135, 136, 139, 141, 162, 163, + 166, 172, 176, 180, 176, 255, 132, 255, + 175, 181, 184, 255, 129, 155, 158, 255, + 129, 255, 171, 183, 157, 171, 171, 172, + 189, 190, 176, 180, 176, 182, 145, 190, + 143, 146, 178, 157, 158, 160, 163, 133, + 134, 137, 168, 169, 170, 165, 169, 173, + 255, 131, 132, 140, 169, 174, 255, 130, + 132, 128, 182, 187, 255, 173, 180, 182, + 255, 132, 155, 159, 161, 175, 163, 144, + 150, 135, 160, 128, 129, 132, 135, 133, + 134, 129, 160, 255, 192, 255, 128, 128, + 129, 255, 155, 156, 151, 255, 156, 157, + 160, 181, 255, 158, 159, 186, 187, 255, + 162, 255, 160, 168, 161, 167, 158, 255, + 10, 13, 95, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 228, 233, 234, 237, + 239, 240, 243, 11, 12, 48, 57, 65, + 90, 97, 122, 196, 218, 229, 232, 235, + 236, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 10, 39, + 44, 46, 59, 95, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 196, 218, 235, 236, 39, 44, 46, 59, + 95, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 196, 218, 235, + 236, 39, 46, 58, 95, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 196, 218, 235, 236, 39, 44, 46, + 59, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 196, 218, + 235, 236, 95, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 237, 239, 240, + 243, 48, 57, 65, 90, 97, 122, 196, + 218, 235, 236, 34, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 196, 218, 235, 236, + 95, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 196, 218, 235, + 236, 95, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 196, 218, 235, 236, + 39, 46, 58, 95, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 196, 218, 235, 236, 95, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 196, 218, 235, 236, 95, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 196, 218, 235, 236, 133, 170, + 173, 181, 186, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 128, 150, 152, 182, 184, 255, + 39, 46, 58, 95, 173, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 196, 218, 235, 236, 39, 46, 58, + 95, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 0, + 47, 48, 57, 59, 64, 65, 90, 91, + 96, 97, 122, 123, 127, 196, 218, 235, + 236, 170, 173, 181, 183, 186, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 39, 46, 58, 95, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 176, 193, 196, 218, 228, 233, 235, 236, + 238, 255, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 65, 90, 97, 122, 196, 218, 235, 236, + 39, 44, 46, 59, 95, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 196, 218, 235, 236, 95, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 196, 218, 235, 236, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 131, 137, + 196, 218, 235, 236, 39, 46, 58, 95, + 191, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 145, 189, 196, + 218, 235, 236, 39, 46, 58, 95, 135, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 129, 130, 132, 133, + 196, 218, 235, 236, 39, 46, 58, 95, + 156, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 128, 133, 144, + 154, 196, 218, 235, 236, 39, 46, 58, + 95, 176, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 139, 159, + 196, 218, 235, 236, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 150, 157, 159, 164, + 167, 168, 170, 173, 196, 218, 235, 236, + 39, 46, 58, 95, 143, 145, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 176, 193, 196, 218, 228, 233, + 235, 236, 238, 255, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 192, 255, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 128, 255, 39, 46, 58, 95, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 0, 47, 48, 57, + 59, 64, 65, 90, 91, 96, 97, 122, + 123, 138, 196, 218, 235, 236, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 166, 176, + 196, 218, 235, 236, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 171, 179, 196, 218, + 235, 236, 39, 46, 58, 95, 160, 161, + 163, 164, 165, 167, 169, 171, 173, 174, + 175, 176, 177, 179, 180, 181, 182, 183, + 184, 185, 186, 187, 188, 189, 190, 191, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 166, 170, 172, 178, + 196, 218, 235, 236, 39, 46, 58, 95, + 128, 129, 130, 141, 156, 157, 158, 159, + 160, 162, 164, 168, 169, 170, 172, 173, + 174, 175, 176, 179, 183, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 196, 218, 235, 236, 39, 46, 58, + 95, 128, 129, 131, 179, 181, 183, 194, + 195, 198, 199, 203, 204, 205, 206, 207, + 210, 212, 213, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 237, 239, 240, 243, 48, 57, 65, + 90, 97, 122, 196, 218, 235, 236, 39, + 46, 58, 95, 128, 130, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 196, 218, 235, 236, 39, 46, 58, + 95, 153, 154, 155, 160, 162, 163, 164, + 165, 166, 167, 168, 169, 170, 171, 175, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 196, 218, 235, 236, + 39, 46, 58, 95, 172, 184, 187, 190, + 191, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 196, 218, 235, + 236, 39, 46, 58, 95, 144, 145, 150, + 155, 157, 158, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 237, 239, 240, + 243, 48, 57, 65, 90, 97, 122, 196, + 218, 235, 236, 39, 46, 58, 95, 160, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 196, 218, 235, 236, + 151, 173, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 130, + 133, 146, 159, 165, 171, 175, 255, 181, + 190, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 184, 185, + 192, 255, 135, 140, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 134, 138, 142, 161, 163, 255, 182, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 130, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 176, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 151, 152, + 154, 160, 190, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 136, 144, 192, 255, 135, 179, 180, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 129, 130, 132, 133, + 144, 170, 176, 178, 156, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 128, 133, 144, 154, 160, 191, + 171, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 128, 159, + 160, 169, 174, 255, 148, 158, 169, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 176, 185, 189, 190, + 192, 255, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 143, + 255, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 139, 140, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 178, 255, 186, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 128, 137, 138, + 181, 160, 161, 162, 163, 164, 165, 166, + 167, 168, 169, 170, 171, 172, 173, 174, + 175, 176, 177, 178, 179, 180, 181, 182, + 183, 184, 185, 186, 187, 188, 189, 190, + 191, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 128, 129, + 130, 131, 137, 138, 139, 140, 141, 142, + 143, 144, 153, 154, 155, 156, 157, 158, + 159, 160, 161, 162, 163, 164, 165, 167, + 168, 169, 170, 172, 173, 174, 175, 176, + 177, 179, 181, 182, 183, 188, 189, 190, + 191, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 132, 152, + 180, 184, 185, 187, 128, 129, 130, 131, + 132, 133, 134, 146, 147, 176, 177, 178, + 179, 180, 181, 182, 183, 184, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 128, 130, 132, 133, 134, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 128, 146, 147, + 148, 152, 153, 154, 155, 156, 158, 159, + 160, 161, 162, 163, 164, 165, 166, 167, + 168, 169, 170, 171, 172, 173, 174, 175, + 176, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 129, 255, + 39, 46, 58, 95, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 150, 153, 155, 163, 165, 167, 169, 173, + 196, 218, 235, 236, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 153, 155, 196, 218, + 235, 236, 39, 46, 58, 95, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 163, 193, 196, 218, 228, 233, + 235, 236, 238, 255, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 39, 46, 58, 95, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 171, 190, 196, 218, 235, 236, 39, + 46, 58, 95, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 237, 239, 240, + 243, 48, 57, 65, 90, 97, 122, 150, + 153, 158, 160, 162, 164, 167, 173, 177, + 180, 196, 218, 235, 236, 39, 46, 58, + 95, 143, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 130, 141, + 154, 157, 196, 218, 235, 236, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 157, 159, + 196, 218, 235, 236, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 146, 148, 178, 180, + 196, 218, 235, 236, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 146, 147, 178, 179, + 196, 218, 235, 236, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 180, 193, 196, 218, + 228, 233, 235, 236, 238, 255, 158, 159, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 160, 255, 172, + 173, 174, 175, 180, 181, 182, 183, 184, + 185, 187, 188, 189, 190, 191, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 176, 186, 144, 145, 146, + 147, 148, 150, 155, 157, 158, 159, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 160, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 39, 46, 58, 95, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 148, 156, 158, 193, 196, 218, + 228, 233, 235, 236, 238, 255, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 139, 142, + 196, 218, 235, 236, 39, 46, 58, 95, + 169, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 196, 218, 235, + 236, 39, 46, 58, 95, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 160, 171, 176, 187, 196, 218, 235, + 236, 39, 46, 58, 95, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 151, 155, 196, 218, 235, 236, 39, + 46, 58, 95, 191, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 149, 158, 160, 188, 196, 218, 235, 236, + 39, 46, 58, 95, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 176, 190, 196, 218, 235, 236, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 128, 132, + 180, 193, 196, 218, 228, 233, 235, 236, + 238, 255, 39, 46, 58, 95, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 133, 170, 180, 193, 196, 218, + 228, 233, 235, 236, 238, 255, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 128, 130, + 161, 173, 196, 218, 235, 236, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 166, 179, + 196, 218, 235, 236, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 164, 183, 196, 218, + 235, 236, 39, 46, 58, 95, 173, 194, + 195, 198, 199, 203, 204, 205, 206, 207, + 210, 212, 213, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 237, 239, 240, 243, 48, 57, 65, + 90, 97, 122, 144, 146, 148, 168, 178, + 180, 184, 185, 196, 218, 235, 236, 39, + 46, 58, 95, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 237, 239, 240, + 243, 48, 57, 65, 90, 97, 122, 128, + 181, 188, 191, 196, 218, 235, 236, 39, + 46, 58, 95, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 237, 239, 240, + 243, 48, 57, 65, 90, 97, 122, 140, + 143, 170, 174, 196, 218, 235, 236, 39, + 46, 58, 95, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 237, 239, 240, + 243, 48, 57, 65, 90, 97, 122, 160, + 164, 166, 175, 196, 218, 235, 236, 39, + 46, 58, 95, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 237, 239, 240, + 243, 48, 57, 65, 90, 97, 122, 144, + 176, 196, 218, 235, 236, 39, 46, 58, + 95, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 175, 177, 196, + 218, 235, 236, 39, 46, 58, 95, 191, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 196, 218, 235, 236, + 39, 46, 58, 95, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 160, 191, 196, 218, 235, 236, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 170, 175, + 196, 218, 235, 236, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 153, 154, 196, 218, + 235, 236, 39, 46, 58, 95, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 175, 178, 180, 189, 196, 218, + 235, 236, 39, 46, 58, 95, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 158, 159, 196, 218, 235, 236, + 39, 46, 58, 95, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 176, 177, 196, 218, 235, 236, 39, 46, + 58, 95, 130, 134, 139, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 163, 167, 196, 218, 235, 236, 39, + 46, 58, 95, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 237, 239, 240, + 243, 48, 57, 65, 90, 97, 122, 128, + 129, 180, 193, 196, 218, 228, 233, 235, + 236, 238, 255, 39, 46, 58, 95, 194, + 195, 198, 199, 203, 204, 205, 206, 207, + 210, 212, 213, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 237, 239, 240, 243, 48, 57, 65, + 90, 97, 122, 133, 159, 178, 193, 196, + 218, 228, 233, 235, 236, 238, 255, 39, + 46, 58, 95, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 237, 239, 240, + 243, 48, 57, 65, 90, 97, 122, 166, + 173, 196, 218, 235, 236, 39, 46, 58, + 95, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 135, 147, 196, + 218, 235, 236, 39, 46, 58, 95, 194, + 195, 198, 199, 203, 204, 205, 206, 207, + 210, 212, 213, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 237, 239, 240, 243, 48, 57, 65, + 90, 97, 122, 128, 131, 179, 193, 196, + 218, 228, 233, 235, 236, 238, 255, 39, + 46, 58, 95, 165, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 0, 47, 48, 57, 59, 64, + 65, 90, 91, 96, 97, 122, 123, 128, + 196, 218, 235, 236, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 169, 182, 196, 218, + 235, 236, 39, 46, 58, 95, 131, 194, + 195, 198, 199, 203, 204, 205, 206, 207, + 210, 212, 213, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 237, 239, 240, 243, 48, 57, 65, + 90, 97, 122, 140, 141, 187, 189, 196, + 218, 235, 236, 39, 46, 58, 95, 176, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 178, 180, 183, 184, + 190, 191, 196, 218, 235, 236, 39, 46, + 58, 95, 129, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 237, 239, 240, + 243, 48, 57, 65, 90, 97, 122, 171, + 175, 181, 182, 196, 218, 235, 236, 39, + 46, 58, 95, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 237, 239, 240, + 243, 48, 57, 65, 90, 97, 122, 163, + 170, 172, 173, 196, 218, 235, 236, 39, + 46, 58, 95, 158, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 196, 218, 235, 236, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 128, 143, 160, 175, + 196, 218, 235, 236, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 185, 187, 196, 218, + 235, 236, 39, 46, 58, 95, 135, 139, + 141, 168, 171, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 237, 239, 240, + 243, 48, 57, 65, 90, 97, 122, 196, + 218, 235, 236, 39, 46, 58, 95, 128, + 129, 130, 132, 133, 134, 135, 136, 139, + 140, 141, 146, 147, 150, 151, 152, 153, + 154, 156, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 196, 218, + 235, 236, 39, 46, 58, 95, 171, 172, + 189, 190, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 196, 218, + 235, 236, 39, 46, 58, 95, 178, 194, + 195, 198, 199, 203, 204, 205, 206, 207, + 210, 212, 213, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 237, 239, 240, 243, 48, 57, 65, + 90, 97, 122, 196, 218, 235, 236, 39, + 46, 58, 95, 133, 134, 137, 168, 169, + 170, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 196, 218, 235, + 236, 39, 46, 58, 95, 163, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 196, 218, 235, 236, 39, 46, + 58, 95, 128, 129, 132, 135, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 133, 134, 196, 218, 235, 236, + 39, 46, 58, 95, 189, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 132, 185, 196, 218, 235, 236, 39, + 46, 58, 95, 144, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 152, 161, 164, 193, 196, 218, 228, 233, + 235, 236, 238, 255, 39, 46, 58, 95, + 188, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 129, 131, 190, + 193, 196, 218, 228, 233, 235, 236, 238, + 255, 39, 46, 58, 95, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 133, 134, 137, 138, 142, 150, 152, + 161, 164, 193, 196, 218, 228, 233, 235, + 236, 238, 255, 39, 46, 58, 95, 145, + 181, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 0, + 47, 48, 57, 59, 64, 65, 90, 91, + 96, 97, 122, 123, 130, 135, 136, 139, + 141, 176, 177, 196, 218, 235, 236, 39, + 46, 58, 95, 134, 138, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 142, 161, 164, 193, 196, 218, 228, + 233, 235, 236, 238, 255, 39, 46, 58, + 95, 188, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 129, 131, + 190, 191, 196, 218, 235, 236, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 128, 132, + 135, 136, 139, 141, 150, 151, 162, 163, + 196, 218, 235, 236, 39, 46, 58, 95, + 130, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 190, 191, 196, + 218, 235, 236, 39, 46, 58, 95, 151, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 128, 130, 134, 136, + 138, 141, 196, 218, 235, 236, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 128, 131, + 190, 193, 196, 218, 228, 233, 235, 236, + 238, 255, 39, 46, 58, 95, 133, 137, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 142, 148, 151, 161, + 164, 193, 196, 218, 228, 233, 235, 236, + 238, 255, 39, 46, 58, 95, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 128, 132, 134, 136, 138, 141, + 149, 150, 162, 163, 196, 218, 235, 236, + 39, 46, 58, 95, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 129, 131, 190, 193, 196, 218, 228, 233, + 235, 236, 238, 255, 39, 46, 58, 95, + 133, 137, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 142, 150, + 152, 161, 164, 193, 196, 218, 228, 233, + 235, 236, 238, 255, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 130, 131, 196, 218, + 235, 236, 39, 46, 58, 95, 138, 150, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 143, 148, 152, 159, + 178, 179, 196, 218, 235, 236, 39, 46, + 58, 95, 177, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 237, 239, 240, + 243, 48, 57, 65, 90, 97, 122, 180, + 186, 196, 218, 235, 236, 39, 46, 58, + 95, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 135, 142, 196, + 218, 235, 236, 39, 46, 58, 95, 177, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 180, 185, 187, 188, + 196, 218, 235, 236, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 136, 141, 196, 218, + 235, 236, 39, 46, 58, 95, 181, 183, + 185, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 152, 153, 190, + 191, 196, 218, 235, 236, 39, 46, 58, + 95, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 177, 191, 196, + 218, 235, 236, 39, 46, 58, 95, 194, + 195, 198, 199, 203, 204, 205, 206, 207, + 210, 212, 213, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 237, 239, 240, 243, 48, 57, 65, + 90, 97, 122, 128, 132, 134, 135, 141, + 151, 153, 188, 196, 218, 235, 236, 39, + 46, 58, 95, 134, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 196, 218, 235, 236, 173, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 130, 133, 146, 159, 165, 171, + 175, 255, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 128, + 255, 173, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 128, 255, 173, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 176, 255, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 131, 137, 191, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 145, 189, 135, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 129, 130, 132, 133, 156, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 128, 133, 144, 154, 176, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 139, 159, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 150, 157, 159, 164, 167, + 168, 170, 173, 143, 145, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 176, 255, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 176, 255, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 131, 137, 191, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 145, 189, 135, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 129, 130, 132, 133, 156, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 128, 133, 144, 154, 176, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 139, 159, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 150, 157, 159, 164, 167, + 168, 170, 173, 143, 145, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 176, 255, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 139, 255, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 166, 176, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 171, + 179, 160, 161, 163, 164, 165, 167, 169, + 171, 173, 174, 175, 176, 177, 179, 180, + 181, 182, 183, 184, 185, 186, 187, 188, + 189, 190, 191, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 166, 170, 172, 178, 128, 129, 130, 141, + 156, 157, 158, 159, 160, 162, 164, 168, + 169, 170, 172, 173, 174, 175, 176, 179, + 183, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 128, 129, + 131, 179, 181, 183, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 128, 130, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 153, 154, 155, 160, 162, 163, 164, 165, + 166, 167, 168, 169, 170, 171, 175, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 172, 184, 187, 190, + 191, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 144, 145, + 150, 155, 157, 158, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 160, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 139, 255, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 166, 176, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 171, 179, 160, 161, 163, 164, + 165, 167, 169, 171, 173, 174, 175, 176, + 177, 179, 180, 181, 182, 183, 184, 185, + 186, 187, 188, 189, 190, 191, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 166, 170, 172, 178, 128, + 129, 130, 141, 156, 157, 158, 159, 160, + 162, 164, 168, 169, 170, 172, 173, 174, + 175, 176, 179, 183, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 128, 129, 131, 179, 181, 183, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 128, 130, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 153, 154, 155, 160, 162, + 163, 164, 165, 166, 167, 168, 169, 170, + 171, 175, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 172, + 184, 187, 190, 191, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 144, 145, 150, 155, 157, 158, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 160, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 181, 190, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 176, 183, 184, 185, 186, 191, 192, + 255, 134, 140, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 136, 138, 142, 161, 163, 255, 130, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 131, 137, 190, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 136, 144, 145, 191, + 192, 255, 135, 179, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 129, 130, 132, 133, 144, 170, 176, + 178, 156, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 128, + 133, 144, 154, 160, 191, 171, 176, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 128, 138, 139, 159, + 160, 169, 174, 255, 148, 158, 169, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 150, 164, 167, 173, + 176, 185, 189, 190, 192, 255, 144, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 143, 145, 146, 175, + 176, 255, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 139, + 140, 141, 255, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 166, 176, 178, 255, 186, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 128, 137, 138, 170, 171, 179, + 180, 181, 160, 161, 162, 163, 164, 165, + 166, 167, 168, 169, 170, 171, 172, 173, + 174, 175, 176, 177, 178, 179, 180, 181, + 182, 183, 184, 185, 186, 187, 188, 189, + 190, 191, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 128, + 129, 130, 131, 132, 135, 137, 138, 139, + 140, 141, 142, 143, 144, 153, 154, 155, + 156, 157, 158, 159, 160, 161, 162, 163, + 164, 165, 167, 168, 169, 170, 172, 173, + 174, 175, 176, 177, 179, 181, 182, 183, + 188, 189, 190, 191, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 133, 134, 136, 152, 180, 184, 185, + 187, 39, 46, 58, 95, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 196, 218, 235, 236, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 235, 236, 128, 129, 130, + 131, 132, 133, 134, 146, 147, 176, 177, + 178, 179, 180, 181, 182, 183, 184, 186, + 187, 188, 191, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 189, 190, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 128, + 129, 130, 131, 132, 133, 134, 135, 136, + 137, 139, 140, 141, 144, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 145, 255, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 196, 218, 235, 236, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 235, 236, + 95, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 150, 153, 155, + 163, 165, 167, 169, 173, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 153, 155, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 163, 255, 173, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 128, 255, + 173, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 128, 255, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 176, 255, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 131, 137, 191, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 145, 189, 135, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 129, 130, 132, 133, 156, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 128, 133, 144, 154, 176, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 139, 159, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 150, 157, 159, 164, + 167, 168, 170, 173, 143, 145, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 176, 255, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 176, 255, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 131, 137, 191, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 145, 189, 135, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 129, 130, 132, 133, 156, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 128, 133, 144, 154, 176, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 139, 159, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 150, 157, 159, 164, + 167, 168, 170, 173, 143, 145, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 176, 255, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 139, 255, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 166, 176, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 171, 179, 160, 161, 163, 164, 165, 167, + 169, 171, 173, 174, 175, 176, 177, 179, + 180, 181, 182, 183, 184, 185, 186, 187, + 188, 189, 190, 191, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 166, 170, 172, 178, 128, 129, 130, + 141, 156, 157, 158, 159, 160, 162, 164, + 168, 169, 170, 172, 173, 174, 175, 176, + 179, 183, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 128, + 129, 131, 179, 181, 183, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 128, 130, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 153, 154, 155, 160, 162, 163, 164, + 165, 166, 167, 168, 169, 170, 171, 175, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 172, 184, 187, + 190, 191, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 144, + 145, 150, 155, 157, 158, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 160, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 139, 255, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 166, 176, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 171, 179, 160, 161, 163, + 164, 165, 167, 169, 171, 173, 174, 175, + 176, 177, 179, 180, 181, 182, 183, 184, + 185, 186, 187, 188, 189, 190, 191, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 166, 170, 172, 178, + 128, 129, 130, 141, 156, 157, 158, 159, + 160, 162, 164, 168, 169, 170, 172, 173, + 174, 175, 176, 179, 183, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 128, 129, 131, 179, 181, 183, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 128, 130, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 153, 154, 155, 160, + 162, 163, 164, 165, 166, 167, 168, 169, + 170, 171, 175, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 172, 184, 187, 190, 191, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 144, 145, 150, 155, 157, 158, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 160, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 189, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 132, 185, 144, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 152, 161, 164, 255, 188, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 129, 131, 190, 255, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 133, 134, 137, 138, + 142, 150, 152, 161, 164, 255, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 131, 134, 137, 138, 142, + 144, 146, 175, 178, 180, 182, 255, 134, + 138, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 142, 161, + 164, 255, 188, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 129, 131, 190, 191, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 128, 132, 135, 136, 139, 141, 150, + 151, 162, 163, 130, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 190, 191, 151, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 128, 130, 134, 136, 138, 141, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 128, 131, 190, 255, + 133, 137, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 142, + 148, 151, 161, 164, 255, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 128, 132, 134, 136, 138, 141, + 149, 150, 162, 163, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 129, 131, 190, 255, 133, 137, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 142, 150, 152, 161, + 164, 255, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 130, + 131, 138, 150, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 143, 148, 152, 159, 178, 179, 177, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 180, 186, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 135, 142, 177, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 180, 185, 187, 188, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 136, 141, 181, 183, + 185, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 152, 153, + 190, 191, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 177, + 191, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 128, 132, + 134, 135, 141, 151, 153, 188, 134, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 171, 190, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 150, 153, 158, 160, 162, 164, 167, 173, + 177, 180, 143, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 130, 141, 154, 157, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 157, 159, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 146, 148, 178, 180, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 146, 147, 178, 179, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 180, 255, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 148, 156, 158, 255, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 139, 142, 169, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 160, + 171, 176, 187, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 151, 155, 191, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 149, 158, 160, 188, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 176, 190, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 128, 132, 180, 255, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 133, 170, 180, 255, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 128, 130, 161, 173, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 166, 179, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 164, 183, 173, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 144, 146, 148, 168, 178, 180, + 184, 185, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 128, + 181, 188, 191, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 140, 143, 170, 174, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 160, 164, 166, 175, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 144, 176, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 175, 177, 191, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 160, 191, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 170, 175, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 153, 154, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 175, 178, 180, 189, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 158, 159, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 176, 177, 130, 134, 139, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 163, 167, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 128, 129, 180, 255, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 133, 159, 178, + 255, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 166, 173, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 135, 147, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 128, 131, 179, 255, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 129, 164, 166, + 255, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 169, 182, + 131, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 140, 141, + 187, 189, 176, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 178, 180, 183, 184, 190, 191, 129, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 171, 175, 181, 182, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 163, 170, 172, + 173, 158, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 128, 143, 160, 175, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 185, 187, 135, + 139, 141, 168, 171, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 128, 129, 130, 132, 133, 134, 135, + 136, 139, 140, 141, 146, 147, 150, 151, + 152, 153, 154, 156, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 171, 172, 189, 190, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 178, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 133, 134, 137, 168, 169, 170, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 163, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 128, 129, 132, 135, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 133, 134, 182, 183, 184, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 191, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 192, + 255, 128, 146, 147, 148, 152, 153, 154, + 155, 156, 158, 159, 160, 161, 162, 163, + 164, 165, 166, 167, 168, 169, 170, 171, + 172, 173, 174, 175, 176, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 129, 157, 177, 255, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 196, 218, + 235, 236, 39, 46, 58, 95, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 150, 153, 155, 163, 165, 167, + 169, 173, 196, 218, 235, 236, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 153, 155, + 196, 218, 235, 236, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 163, 193, 196, 218, + 228, 233, 235, 236, 238, 255, 170, 173, + 181, 183, 186, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 128, 255, 39, + 46, 58, 95, 173, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 196, 218, 235, 236, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 0, 47, + 48, 57, 59, 64, 65, 90, 91, 96, + 97, 122, 123, 127, 196, 218, 235, 236, + 181, 190, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 176, + 183, 184, 185, 186, 191, 192, 255, 130, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 131, 137, 190, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 136, 144, 145, + 191, 192, 255, 135, 179, 180, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 129, 130, 132, 133, 144, + 170, 176, 178, 156, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 128, 133, 144, 154, 160, 191, 171, + 176, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 128, 138, + 139, 159, 160, 169, 174, 255, 148, 158, + 169, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 150, 164, + 167, 173, 176, 185, 189, 190, 192, 255, + 144, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 143, 145, + 146, 175, 176, 255, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 176, 193, 196, 218, + 228, 233, 235, 236, 238, 255, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 131, 137, + 196, 218, 235, 236, 39, 46, 58, 95, + 191, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 145, 189, 196, + 218, 235, 236, 39, 46, 58, 95, 135, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 129, 130, 132, 133, + 196, 218, 235, 236, 39, 46, 58, 95, + 156, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 128, 133, 144, + 154, 196, 218, 235, 236, 39, 46, 58, + 95, 176, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 139, 159, + 196, 218, 235, 236, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 150, 157, 159, 164, + 167, 168, 170, 173, 196, 218, 235, 236, + 39, 46, 58, 95, 143, 145, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 176, 193, 196, 218, 228, 233, + 235, 236, 238, 255, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 139, 140, 141, 255, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 166, 176, 178, 255, 186, 194, + 204, 205, 210, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 239, 240, 243, 128, 137, 138, 170, + 171, 179, 180, 181, 160, 161, 162, 163, + 164, 165, 166, 167, 168, 169, 170, 171, + 172, 173, 174, 175, 176, 177, 178, 179, + 180, 181, 182, 183, 184, 185, 186, 187, + 188, 189, 190, 191, 194, 204, 205, 210, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 239, 240, + 243, 128, 129, 130, 131, 132, 135, 137, + 138, 139, 140, 141, 142, 143, 144, 153, + 154, 155, 156, 157, 158, 159, 160, 161, + 162, 163, 164, 165, 167, 168, 169, 170, + 172, 173, 174, 175, 176, 177, 179, 181, + 182, 183, 188, 189, 190, 191, 194, 204, + 205, 210, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 239, 240, 243, 133, 134, 136, 152, 180, + 184, 185, 187, 128, 129, 130, 131, 132, + 133, 134, 146, 147, 176, 177, 178, 179, + 180, 181, 182, 183, 184, 194, 204, 205, + 210, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 239, + 240, 243, 128, 130, 132, 133, 134, 136, + 137, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 128, 146, + 147, 148, 152, 153, 154, 155, 156, 158, + 159, 160, 161, 162, 163, 164, 165, 166, + 167, 168, 169, 170, 171, 172, 173, 174, + 175, 176, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 129, + 157, 177, 255, 39, 46, 58, 95, 194, + 195, 198, 199, 203, 204, 205, 206, 207, + 210, 212, 213, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 237, 239, 240, 243, 48, 57, 65, + 90, 97, 122, 171, 190, 196, 218, 235, + 236, 39, 46, 58, 95, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 150, 153, 158, 160, 162, 164, 167, + 173, 177, 180, 196, 218, 235, 236, 39, + 46, 58, 95, 143, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 130, 141, 154, 157, 196, 218, 235, 236, + 39, 46, 58, 95, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 157, 159, 196, 218, 235, 236, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 146, 148, + 178, 180, 196, 218, 235, 236, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 146, 147, + 178, 179, 196, 218, 235, 236, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 180, 193, + 196, 218, 228, 233, 235, 236, 238, 255, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 39, 46, 58, + 95, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 140, 143, 170, + 174, 196, 218, 235, 236, 39, 46, 58, + 95, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 160, 164, 166, + 175, 196, 218, 235, 236, 39, 46, 58, + 95, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 144, 176, 196, + 218, 235, 236, 39, 46, 58, 95, 194, + 195, 198, 199, 203, 204, 205, 206, 207, + 210, 212, 213, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 237, 239, 240, 243, 48, 57, 65, + 90, 97, 122, 175, 177, 196, 218, 235, + 236, 39, 46, 58, 95, 191, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 196, 218, 235, 236, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 160, 191, + 196, 218, 235, 236, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 170, 175, 196, 218, + 235, 236, 39, 46, 58, 95, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 153, 154, 196, 218, 235, 236, + 39, 46, 58, 95, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 175, 178, 180, 189, 196, 218, 235, 236, + 39, 46, 58, 95, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 158, 159, 196, 218, 235, 236, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 176, 177, + 196, 218, 235, 236, 39, 46, 58, 95, + 130, 134, 139, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 237, 239, 240, + 243, 48, 57, 65, 90, 97, 122, 163, + 167, 196, 218, 235, 236, 39, 46, 58, + 95, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 128, 129, 180, + 193, 196, 218, 228, 233, 235, 236, 238, + 255, 158, 159, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 160, 255, 172, 173, 174, 175, 180, 181, + 182, 183, 184, 185, 187, 188, 189, 190, + 191, 194, 204, 205, 210, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 239, 240, 243, 176, 186, + 144, 145, 146, 147, 148, 150, 155, 157, + 158, 159, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, 160, + 194, 204, 205, 210, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 239, 240, 243, 39, 46, 58, + 95, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 133, 159, 178, + 193, 196, 218, 228, 233, 235, 236, 238, + 255, 39, 46, 58, 95, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 166, 173, 196, 218, 235, 236, 39, + 46, 58, 95, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 237, 239, 240, + 243, 48, 57, 65, 90, 97, 122, 135, + 147, 196, 218, 235, 236, 39, 46, 58, + 95, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 128, 131, 179, + 193, 196, 218, 228, 233, 235, 236, 238, + 255, 39, 46, 58, 95, 165, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 0, 47, 48, 57, + 59, 64, 65, 90, 91, 96, 97, 122, + 123, 128, 196, 218, 235, 236, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 169, 182, + 196, 218, 235, 236, 39, 46, 58, 95, + 131, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 140, 141, 187, + 189, 196, 218, 235, 236, 39, 46, 58, + 95, 176, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 178, 180, + 183, 184, 190, 191, 196, 218, 235, 236, + 39, 46, 58, 95, 129, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 171, 175, 181, 182, 196, 218, 235, + 236, 39, 46, 58, 95, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 163, 170, 172, 173, 196, 218, 235, + 236, 39, 46, 58, 95, 158, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 196, 218, 235, 236, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 128, 143, + 160, 175, 196, 218, 235, 236, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 185, 187, + 196, 218, 235, 236, 39, 46, 58, 95, + 135, 139, 141, 168, 171, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 196, 218, 235, 236, 39, 46, 58, + 95, 128, 129, 130, 132, 133, 134, 135, + 136, 139, 140, 141, 146, 147, 150, 151, + 152, 153, 154, 156, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 196, 218, 235, 236, 39, 46, 58, 95, + 171, 172, 189, 190, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 196, 218, 235, 236, 39, 46, 58, 95, + 178, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 196, 218, 235, + 236, 39, 46, 58, 95, 133, 134, 137, + 168, 169, 170, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 237, 239, 240, + 243, 48, 57, 65, 90, 97, 122, 196, + 218, 235, 236, 39, 46, 58, 95, 163, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 196, 218, 235, 236, + 39, 46, 58, 95, 128, 129, 132, 135, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 133, 134, 196, 218, + 235, 236, 39, 46, 58, 95, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 148, 156, 158, 193, 196, 218, + 228, 233, 235, 236, 238, 255, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 139, 142, + 196, 218, 235, 236, 39, 46, 58, 95, + 169, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 196, 218, 235, + 236, 39, 46, 58, 95, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 160, 171, 176, 187, 196, 218, 235, + 236, 39, 46, 58, 95, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 151, 155, 196, 218, 235, 236, 39, + 46, 58, 95, 191, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 149, 158, 160, 188, 196, 218, 235, 236, + 39, 46, 58, 95, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 176, 190, 196, 218, 235, 236, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 128, 132, + 180, 193, 196, 218, 228, 233, 235, 236, + 238, 255, 39, 46, 58, 95, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 133, 170, 180, 193, 196, 218, + 228, 233, 235, 236, 238, 255, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 128, 130, + 161, 173, 196, 218, 235, 236, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 166, 179, + 196, 218, 235, 236, 39, 46, 58, 95, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 164, 183, 196, 218, + 235, 236, 39, 46, 58, 95, 173, 194, + 195, 198, 199, 203, 204, 205, 206, 207, + 210, 212, 213, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 237, 239, 240, 243, 48, 57, 65, + 90, 97, 122, 144, 146, 148, 168, 178, + 180, 184, 185, 196, 218, 235, 236, 39, + 46, 58, 95, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 237, 239, 240, + 243, 48, 57, 65, 90, 97, 122, 128, + 181, 188, 191, 196, 218, 235, 236, 39, + 46, 58, 95, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 237, 239, 240, + 243, 0, 47, 48, 57, 59, 64, 65, + 90, 91, 96, 97, 122, 123, 138, 196, + 218, 235, 236, 39, 46, 58, 95, 194, + 195, 198, 199, 203, 204, 205, 206, 207, + 210, 212, 213, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 237, 239, 240, 243, 48, 57, 65, + 90, 97, 122, 166, 176, 196, 218, 235, + 236, 39, 46, 58, 95, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 171, 179, 196, 218, 235, 236, 39, + 46, 58, 95, 160, 161, 163, 164, 165, + 167, 169, 171, 173, 174, 175, 176, 177, + 179, 180, 181, 182, 183, 184, 185, 186, + 187, 188, 189, 190, 191, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 166, 170, 172, 178, 196, 218, 235, + 236, 39, 46, 58, 95, 128, 129, 130, + 141, 156, 157, 158, 159, 160, 162, 164, + 168, 169, 170, 172, 173, 174, 175, 176, + 179, 183, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 196, 218, + 235, 236, 39, 46, 58, 95, 128, 129, + 131, 179, 181, 183, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 196, 218, 235, 236, 39, 46, 58, 95, + 128, 130, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 196, 218, + 235, 236, 39, 46, 58, 95, 153, 154, + 155, 160, 162, 163, 164, 165, 166, 167, + 168, 169, 170, 171, 175, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 196, 218, 235, 236, 39, 46, 58, + 95, 172, 184, 187, 190, 191, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 196, 218, 235, 236, 39, 46, + 58, 95, 144, 145, 150, 155, 157, 158, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 196, 218, 235, 236, + 39, 46, 58, 95, 160, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 196, 218, 235, 236, 39, 46, 58, + 95, 189, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 132, 185, + 196, 218, 235, 236, 39, 46, 58, 95, + 144, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 152, 161, 164, + 193, 196, 218, 228, 233, 235, 236, 238, + 255, 39, 46, 58, 95, 188, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 129, 131, 190, 193, 196, 218, + 228, 233, 235, 236, 238, 255, 39, 46, + 58, 95, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 133, 134, + 137, 138, 142, 150, 152, 161, 164, 193, + 196, 218, 228, 233, 235, 236, 238, 255, + 39, 46, 58, 95, 145, 181, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 0, 47, 48, 57, + 59, 64, 65, 90, 91, 96, 97, 122, + 123, 130, 135, 136, 139, 141, 176, 177, + 196, 218, 235, 236, 39, 46, 58, 95, + 134, 138, 194, 195, 198, 199, 203, 204, + 205, 206, 207, 210, 212, 213, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 237, 239, 240, 243, + 48, 57, 65, 90, 97, 122, 142, 161, + 164, 193, 196, 218, 228, 233, 235, 236, + 238, 255, 39, 46, 58, 95, 188, 194, + 195, 198, 199, 203, 204, 205, 206, 207, + 210, 212, 213, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 237, 239, 240, 243, 48, 57, 65, + 90, 97, 122, 129, 131, 190, 191, 196, + 218, 235, 236, 39, 46, 58, 95, 194, + 195, 198, 199, 203, 204, 205, 206, 207, + 210, 212, 213, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 237, 239, 240, 243, 48, 57, 65, + 90, 97, 122, 128, 132, 135, 136, 139, + 141, 150, 151, 162, 163, 196, 218, 235, + 236, 39, 46, 58, 95, 130, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 190, 191, 196, 218, 235, 236, + 39, 46, 58, 95, 151, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 128, 130, 134, 136, 138, 141, 196, + 218, 235, 236, 39, 46, 58, 95, 194, + 195, 198, 199, 203, 204, 205, 206, 207, + 210, 212, 213, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 237, 239, 240, 243, 48, 57, 65, + 90, 97, 122, 128, 131, 190, 193, 196, + 218, 228, 233, 235, 236, 238, 255, 39, + 46, 58, 95, 133, 137, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 142, 148, 151, 161, 164, 193, 196, + 218, 228, 233, 235, 236, 238, 255, 39, + 46, 58, 95, 194, 195, 198, 199, 203, + 204, 205, 206, 207, 210, 212, 213, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 237, 239, 240, + 243, 48, 57, 65, 90, 97, 122, 128, + 132, 134, 136, 138, 141, 149, 150, 162, + 163, 196, 218, 235, 236, 39, 46, 58, + 95, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 129, 131, 190, + 193, 196, 218, 228, 233, 235, 236, 238, + 255, 39, 46, 58, 95, 133, 137, 194, + 195, 198, 199, 203, 204, 205, 206, 207, + 210, 212, 213, 214, 215, 216, 217, 219, + 220, 221, 222, 223, 224, 225, 226, 227, + 234, 237, 239, 240, 243, 48, 57, 65, + 90, 97, 122, 142, 150, 152, 161, 164, + 193, 196, 218, 228, 233, 235, 236, 238, + 255, 39, 46, 58, 95, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 130, 131, 196, 218, 235, 236, 39, + 46, 58, 95, 138, 150, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 143, 148, 152, 159, 178, 179, 196, + 218, 235, 236, 39, 46, 58, 95, 177, + 194, 195, 198, 199, 203, 204, 205, 206, + 207, 210, 212, 213, 214, 215, 216, 217, + 219, 220, 221, 222, 223, 224, 225, 226, + 227, 234, 237, 239, 240, 243, 48, 57, + 65, 90, 97, 122, 180, 186, 196, 218, + 235, 236, 39, 46, 58, 95, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 135, 142, 196, 218, 235, 236, + 39, 46, 58, 95, 177, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 180, 185, 187, 188, 196, 218, 235, + 236, 39, 46, 58, 95, 194, 195, 198, + 199, 203, 204, 205, 206, 207, 210, 212, + 213, 214, 215, 216, 217, 219, 220, 221, + 222, 223, 224, 225, 226, 227, 234, 237, + 239, 240, 243, 48, 57, 65, 90, 97, + 122, 136, 141, 196, 218, 235, 236, 39, + 46, 58, 95, 181, 183, 185, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 152, 153, 190, 191, 196, 218, + 235, 236, 39, 46, 58, 95, 194, 195, + 198, 199, 203, 204, 205, 206, 207, 210, + 212, 213, 214, 215, 216, 217, 219, 220, + 221, 222, 223, 224, 225, 226, 227, 234, + 237, 239, 240, 243, 48, 57, 65, 90, + 97, 122, 177, 191, 196, 218, 235, 236, + 39, 46, 58, 95, 194, 195, 198, 199, + 203, 204, 205, 206, 207, 210, 212, 213, + 214, 215, 216, 217, 219, 220, 221, 222, + 223, 224, 225, 226, 227, 234, 237, 239, + 240, 243, 48, 57, 65, 90, 97, 122, + 128, 132, 134, 135, 141, 151, 153, 188, + 196, 218, 235, 236, 39, 46, 58, 95, + 134, 194, 195, 198, 199, 203, 204, 205, + 206, 207, 210, 212, 213, 214, 215, 216, + 217, 219, 220, 221, 222, 223, 224, 225, + 226, 227, 234, 237, 239, 240, 243, 48, + 57, 65, 90, 97, 122, 196, 218, 235, + 236, 164, 169, 171, 172, 173, 174, 175, + 180, 181, 182, 183, 184, 185, 187, 188, + 189, 190, 191, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 165, 170, 176, 186, 144, 145, 146, 147, + 148, 150, 155, 157, 158, 159, 160, 170, + 171, 172, 175, 194, 204, 205, 210, 214, + 215, 216, 217, 219, 220, 221, 222, 223, + 224, 225, 226, 227, 234, 239, 240, 243, + 161, 169, 194, 204, 205, 210, 214, 215, + 216, 217, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 234, 239, 240, 243, +} + +var _s_single_lengths []byte = []byte{ + 1, 0, 0, 0, 1, 1, 1, 1, + 0, 2, 0, 0, 0, 26, 0, 0, + 0, 1, 1, 1, 0, 0, 2, 1, + 0, 1, 1, 0, 2, 0, 0, 2, + 0, 2, 1, 0, 1, 0, 3, 0, + 0, 1, 21, 0, 0, 1, 0, 0, + 0, 0, 0, 0, 1, 0, 0, 1, + 0, 0, 0, 0, 0, 0, 1, 0, + 6, 0, 0, 0, 0, 1, 0, 2, + 0, 0, 15, 0, 0, 0, 3, 0, + 0, 0, 0, 0, 0, 0, 1, 1, + 1, 0, 5, 1, 0, 0, 6, 5, + 1, 1, 0, 1, 0, 19, 0, 0, + 0, 0, 1, 0, 0, 0, 0, 1, + 1, 0, 0, 0, 0, 0, 0, 0, + 4, 0, 0, 0, 0, 1, 0, 6, + 0, 0, 0, 0, 0, 1, 1, 0, + 1, 4, 1, 0, 0, 21, 30, 4, + 0, 0, 0, 0, 1, 0, 2, 2, + 1, 1, 1, 0, 1, 1, 1, 1, + 3, 1, 0, 0, 1, 32, 3, 0, + 0, 0, 1, 1, 4, 2, 1, 1, + 1, 4, 1, 1, 3, 2, 1, 3, + 1, 1, 1, 3, 1, 2, 1, 0, + 1, 0, 4, 0, 0, 1, 41, 0, + 0, 1, 2, 3, 2, 1, 1, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 1, 0, 0, 0, + 1, 0, 0, 0, 0, 0, 0, 0, + 1, 0, 0, 4, 1, 0, 18, 0, + 2, 0, 0, 6, 1, 0, 0, 0, + 0, 1, 0, 2, 1, 0, 0, 1, + 5, 1, 0, 0, 0, 28, 0, 0, + 0, 1, 0, 0, 0, 0, 0, 3, + 0, 0, 2, 0, 0, 0, 1, 0, + 1, 1, 1, 0, 1, 0, 0, 2, + 0, 0, 15, 2, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 1, 0, 0, + 0, 0, 10, 35, 0, 0, 0, 0, + 1, 0, 1, 0, 0, 0, 1, 0, + 0, 0, 0, 0, 2, 0, 0, 0, + 0, 0, 2, 0, 0, 0, 0, 0, + 0, 0, 24, 0, 0, 0, 0, 0, + 2, 0, 2, 0, 1, 0, 1, 2, + 0, 1, 0, 0, 1, 0, 0, 0, + 0, 0, 4, 0, 0, 0, 2, 2, + 0, 10, 0, 0, 0, 0, 0, 0, + 0, 3, 0, 0, 18, 0, 0, 0, + 1, 4, 1, 4, 1, 0, 3, 2, + 2, 2, 1, 0, 0, 1, 5, 0, + 4, 12, 0, 3, 0, 0, 0, 1, + 4, 1, 0, 0, 0, 21, 1, 0, + 0, 0, 1, 1, 1, 2, 0, 2, + 0, 0, 0, 26, 0, 0, 0, 1, + 1, 1, 0, 0, 2, 1, 0, 1, + 1, 0, 2, 0, 0, 2, 0, 2, + 1, 0, 1, 0, 3, 0, 0, 1, + 24, 0, 0, 1, 0, 0, 0, 0, + 0, 0, 1, 0, 0, 0, 0, 1, + 0, 0, 0, 0, 0, 0, 0, 1, + 0, 6, 0, 0, 0, 0, 1, 0, + 2, 0, 0, 16, 0, 0, 0, 0, + 3, 0, 0, 0, 0, 0, 0, 0, + 1, 1, 1, 0, 5, 1, 0, 0, + 6, 6, 1, 1, 0, 1, 0, 22, + 0, 0, 0, 0, 0, 1, 0, 0, + 0, 0, 1, 1, 0, 0, 0, 0, + 0, 0, 0, 0, 6, 0, 0, 0, + 0, 1, 0, 7, 0, 0, 0, 0, + 0, 0, 1, 1, 0, 1, 4, 1, + 0, 0, 4, 0, 2, 1, 1, 2, + 21, 1, 0, 0, 0, 1, 1, 4, + 0, 2, 1, 1, 2, 1, 2, 3, + 1, 0, 0, 1, 32, 3, 0, 0, + 1, 2, 4, 2, 1, 1, 1, 4, + 1, 1, 3, 2, 1, 3, 1, 1, + 1, 3, 1, 2, 1, 0, 1, 0, + 4, 0, 0, 1, 43, 0, 0, 1, + 0, 0, 0, 0, 0, 0, 1, 0, + 0, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 0, 18, 0, 3, 0, 0, + 1, 0, 5, 1, 0, 28, 0, 1, + 0, 0, 3, 0, 2, 0, 0, 0, + 1, 0, 1, 1, 1, 0, 15, 2, + 0, 0, 0, 1, 1, 0, 0, 10, + 35, 1, 1, 0, 0, 2, 0, 25, + 0, 0, 0, 0, 0, 2, 0, 1, + 0, 0, 1, 2, 0, 1, 0, 0, + 1, 0, 0, 0, 10, 0, 0, 0, + 0, 0, 0, 3, 0, 18, 0, 0, + 0, 1, 0, 0, 1, 5, 0, 1, + 4, 1, 0, 0, 0, 1, 1, 0, + 2, 0, 0, 0, 26, 0, 0, 0, + 1, 1, 1, 0, 0, 2, 1, 0, + 1, 1, 0, 2, 0, 0, 2, 0, + 2, 1, 0, 1, 0, 3, 0, 0, + 1, 21, 0, 0, 1, 0, 0, 0, + 0, 0, 0, 1, 0, 0, 1, 0, + 0, 0, 0, 0, 0, 1, 0, 6, + 0, 0, 0, 0, 1, 0, 2, 0, + 0, 15, 0, 0, 0, 3, 0, 0, + 0, 0, 0, 0, 0, 1, 1, 1, + 0, 6, 2, 0, 0, 0, 6, 5, + 1, 1, 0, 1, 0, 19, 0, 0, + 0, 0, 1, 0, 0, 0, 0, 1, + 1, 0, 0, 0, 0, 0, 0, 0, + 4, 0, 0, 0, 0, 1, 0, 6, + 0, 0, 0, 0, 0, 1, 1, 0, + 1, 4, 1, 0, 0, 5, 2, 0, + 2, 2, 1, 1, 3, 1, 2, 3, + 1, 0, 0, 1, 32, 3, 0, 0, + 1, 2, 4, 2, 1, 1, 1, 4, + 1, 1, 3, 2, 1, 3, 1, 1, + 1, 3, 1, 2, 1, 0, 1, 0, + 4, 0, 0, 1, 43, 0, 0, 1, + 0, 0, 0, 0, 0, 0, 1, 0, + 0, 1, 0, 0, 0, 0, 0, 0, + 1, 0, 18, 2, 3, 0, 0, 1, + 0, 5, 1, 0, 28, 1, 0, 0, + 3, 0, 2, 0, 0, 0, 1, 0, + 1, 1, 1, 0, 15, 1, 1, 2, + 1, 4, 0, 0, 10, 35, 1, 1, + 0, 2, 0, 25, 0, 0, 0, 0, + 2, 0, 1, 0, 0, 1, 2, 0, + 1, 0, 0, 1, 0, 0, 10, 0, + 0, 0, 0, 3, 0, 18, 0, 0, + 0, 0, 0, 1, 5, 0, 1, 4, + 1, 0, 0, 0, 1, 2, 3, 1, + 0, 0, 1, 32, 3, 0, 0, 1, + 2, 4, 2, 1, 1, 1, 4, 1, + 1, 3, 2, 1, 3, 1, 1, 1, + 3, 1, 2, 1, 0, 1, 0, 4, + 0, 0, 1, 43, 0, 0, 1, 0, + 0, 0, 0, 0, 0, 1, 0, 0, + 1, 0, 0, 0, 0, 0, 0, 1, + 0, 18, 0, 3, 0, 0, 1, 0, + 10, 1, 1, 0, 0, 0, 1, 1, + 1, 1, 0, 2, 0, 0, 0, 26, + 0, 0, 0, 1, 1, 1, 0, 0, + 2, 1, 0, 1, 1, 0, 2, 0, + 0, 2, 0, 2, 1, 0, 1, 0, + 3, 0, 0, 1, 21, 0, 0, 1, + 0, 0, 0, 0, 0, 0, 1, 0, + 0, 1, 0, 0, 0, 0, 0, 0, + 1, 0, 6, 0, 1, 0, 0, 1, + 0, 7, 0, 0, 1, 0, 0, 0, + 15, 0, 0, 0, 3, 0, 0, 0, + 0, 0, 0, 0, 1, 1, 1, 0, + 8, 1, 0, 0, 1, 0, 0, 0, + 6, 5, 1, 1, 0, 1, 0, 19, + 0, 0, 0, 0, 1, 0, 0, 0, + 0, 1, 1, 0, 0, 0, 0, 0, + 0, 0, 4, 0, 0, 0, 0, 2, + 1, 0, 6, 0, 0, 0, 0, 0, + 1, 1, 0, 1, 4, 1, 0, 0, + 0, 28, 1, 0, 0, 3, 0, 2, + 0, 0, 0, 1, 0, 1, 1, 1, + 0, 15, 2, 0, 1, 0, 0, 0, + 10, 35, 1, 1, 0, 2, 0, 25, + 0, 0, 0, 0, 2, 0, 1, 0, + 0, 1, 2, 0, 1, 0, 0, 1, + 0, 0, 10, 0, 0, 0, 0, 4, + 0, 18, 0, 0, 0, 0, 0, 1, + 5, 0, 1, 4, 1, 0, 0, 0, + 4, 0, 2, 1, 2, 2, 1, 3, + 3, 1, 0, 0, 2, 32, 3, 0, + 0, 1, 2, 4, 2, 1, 1, 1, + 4, 1, 1, 3, 2, 1, 3, 1, + 1, 1, 3, 1, 2, 1, 0, 1, + 0, 4, 0, 0, 1, 43, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 1, + 0, 0, 1, 0, 0, 0, 0, 0, + 0, 1, 0, 18, 1, 4, 0, 0, + 1, 0, 5, 1, 0, 28, 1, 0, + 0, 3, 0, 2, 0, 0, 0, 1, + 0, 1, 1, 1, 0, 15, 2, 2, + 3, 1, 5, 0, 0, 10, 35, 1, + 1, 0, 2, 0, 25, 0, 0, 0, + 0, 2, 0, 1, 0, 0, 1, 2, + 0, 1, 0, 0, 1, 0, 0, 10, + 0, 0, 0, 0, 3, 0, 18, 0, + 0, 0, 0, 0, 1, 5, 0, 1, + 4, 1, 0, 0, 0, 5, 2, 1, + 1, 3, 1, 1, 3, 0, 0, 1, + 32, 0, 0, 0, 1, 3, 1, 1, + 1, 0, 2, 0, 1, 1, 2, 0, + 3, 0, 1, 0, 2, 1, 2, 1, + 0, 1, 0, 4, 0, 0, 1, 43, + 0, 0, 1, 0, 0, 0, 0, 0, + 0, 0, 0, 1, 0, 0, 0, 0, + 0, 0, 0, 18, 2, 3, 0, 0, + 0, 0, 5, 1, 0, 28, 1, 0, + 1, 0, 0, 0, 0, 0, 1, 1, + 0, 15, 2, 1, 0, 0, 0, 10, + 35, 1, 0, 0, 1, 0, 23, 0, + 0, 0, 1, 1, 0, 0, 0, 2, + 1, 0, 0, 0, 0, 10, 0, 0, + 0, 3, 0, 18, 0, 0, 0, 0, + 0, 1, 5, 0, 1, 4, 1, 0, + 4, 0, 2, 1, 1, 2, 1, 2, + 3, 1, 0, 0, 1, 32, 3, 0, + 0, 1, 2, 4, 2, 1, 1, 1, + 4, 1, 1, 3, 2, 1, 3, 1, + 1, 1, 3, 1, 2, 1, 0, 1, + 0, 4, 0, 0, 1, 43, 0, 0, + 1, 0, 0, 0, 0, 0, 0, 1, + 0, 0, 0, 0, 1, 0, 0, 0, + 0, 0, 0, 0, 1, 0, 18, 0, + 3, 0, 0, 1, 0, 10, 1, 0, + 28, 0, 1, 0, 0, 3, 0, 2, + 0, 0, 0, 1, 0, 1, 1, 1, + 0, 15, 2, 0, 0, 1, 1, 0, + 0, 10, 35, 1, 1, 0, 0, 2, + 0, 25, 0, 0, 0, 0, 0, 2, + 0, 1, 0, 0, 1, 2, 0, 1, + 0, 0, 1, 0, 0, 0, 0, 10, + 0, 0, 0, 0, 0, 0, 4, 0, + 18, 0, 0, 0, 1, 0, 0, 1, + 5, 0, 1, 4, 1, 0, 0, 0, + 4, 0, 2, 1, 2, 2, 1, 3, + 3, 1, 0, 0, 2, 32, 3, 0, + 0, 1, 2, 4, 2, 1, 1, 1, + 4, 1, 1, 3, 2, 1, 3, 1, + 1, 1, 3, 1, 2, 1, 0, 1, + 0, 4, 0, 0, 1, 43, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 1, + 0, 0, 1, 0, 0, 0, 0, 0, + 0, 1, 0, 18, 1, 4, 0, 0, + 1, 0, 5, 1, 0, 28, 1, 0, + 0, 3, 0, 2, 0, 0, 0, 1, + 0, 1, 1, 1, 0, 15, 2, 2, + 3, 1, 5, 0, 0, 10, 35, 1, + 1, 0, 2, 0, 25, 0, 0, 0, + 0, 2, 0, 1, 0, 0, 1, 2, + 0, 1, 0, 0, 1, 0, 0, 10, + 0, 0, 0, 0, 3, 0, 18, 0, + 0, 0, 0, 0, 1, 5, 0, 1, + 4, 1, 0, 0, 0, 1, 0, 0, + 0, 1, 1, 1, 2, 0, 2, 0, + 0, 0, 26, 0, 0, 0, 1, 1, + 1, 0, 0, 2, 1, 0, 1, 1, + 0, 2, 0, 0, 2, 0, 2, 1, + 0, 1, 0, 3, 0, 0, 1, 24, + 0, 0, 1, 0, 0, 0, 0, 0, + 0, 1, 0, 0, 1, 0, 0, 0, + 0, 0, 0, 0, 1, 0, 6, 0, + 0, 0, 0, 1, 0, 2, 0, 0, + 16, 0, 0, 0, 0, 3, 0, 0, + 0, 0, 0, 0, 0, 1, 1, 1, + 0, 5, 1, 0, 0, 6, 6, 1, + 1, 0, 1, 0, 22, 0, 0, 0, + 0, 0, 1, 0, 0, 0, 0, 1, + 1, 0, 0, 0, 0, 0, 0, 0, + 6, 0, 0, 0, 0, 1, 0, 7, + 0, 0, 0, 0, 0, 0, 1, 1, + 0, 1, 4, 1, 0, 0, 4, 0, + 2, 1, 2, 2, 1, 3, 3, 1, + 0, 0, 2, 32, 3, 0, 0, 1, + 2, 4, 2, 1, 1, 1, 4, 1, + 1, 3, 2, 1, 3, 1, 1, 1, + 3, 1, 2, 1, 0, 1, 0, 4, + 0, 0, 1, 43, 0, 0, 1, 0, + 0, 0, 0, 0, 0, 1, 0, 0, + 1, 0, 0, 0, 0, 0, 0, 1, + 0, 18, 1, 4, 0, 0, 1, 0, + 5, 1, 0, 28, 1, 0, 0, 3, + 0, 2, 0, 0, 0, 1, 0, 1, + 1, 1, 0, 15, 2, 2, 1, 0, + 0, 10, 35, 1, 1, 0, 2, 0, + 25, 0, 0, 0, 0, 2, 0, 1, + 0, 0, 1, 2, 0, 1, 0, 0, + 1, 0, 0, 10, 0, 0, 0, 0, + 3, 0, 18, 0, 0, 0, 0, 0, + 1, 5, 0, 1, 4, 1, 0, 0, + 0, 5, 0, 2, 1, 1, 3, 1, + 2, 3, 1, 0, 0, 1, 32, 3, + 0, 0, 1, 2, 4, 2, 1, 1, + 1, 4, 1, 1, 3, 2, 1, 3, + 1, 1, 1, 3, 1, 2, 1, 0, + 1, 0, 4, 0, 0, 1, 43, 0, + 0, 1, 0, 0, 0, 0, 0, 0, + 1, 0, 0, 1, 0, 0, 0, 0, + 0, 0, 1, 0, 18, 2, 3, 0, + 0, 1, 0, 5, 1, 0, 28, 1, + 0, 0, 3, 0, 2, 0, 0, 0, + 1, 0, 1, 1, 1, 0, 15, 2, + 1, 1, 0, 0, 10, 35, 1, 1, + 0, 2, 0, 25, 0, 0, 0, 0, + 2, 0, 1, 0, 0, 1, 2, 0, + 1, 0, 0, 1, 0, 0, 10, 0, + 0, 0, 0, 3, 0, 18, 0, 0, + 0, 0, 0, 1, 5, 0, 1, 4, + 1, 0, 0, 0, 4, 0, 2, 1, + 1, 2, 1, 2, 3, 1, 0, 0, + 1, 32, 3, 0, 0, 1, 2, 4, + 2, 1, 1, 1, 4, 1, 1, 3, + 2, 1, 3, 1, 1, 1, 3, 1, + 2, 1, 0, 1, 0, 4, 0, 0, + 1, 43, 0, 0, 1, 0, 0, 0, + 0, 0, 0, 1, 0, 0, 1, 0, + 0, 0, 0, 0, 0, 1, 0, 18, + 0, 3, 0, 0, 1, 0, 10, 1, + 0, 28, 1, 0, 0, 3, 0, 2, + 0, 0, 0, 1, 0, 1, 1, 1, + 0, 15, 2, 0, 0, 1, 1, 0, + 0, 10, 35, 1, 1, 0, 2, 0, + 25, 0, 0, 0, 0, 2, 0, 1, + 0, 0, 1, 2, 0, 1, 0, 0, + 1, 0, 0, 10, 0, 0, 0, 0, + 4, 0, 18, 0, 0, 0, 0, 0, + 1, 5, 0, 1, 4, 1, 0, 0, + 0, 4, 0, 2, 1, 1, 2, 1, + 2, 3, 1, 0, 0, 1, 32, 3, + 0, 0, 1, 2, 4, 2, 1, 1, + 1, 4, 1, 1, 3, 2, 1, 3, + 1, 1, 1, 3, 1, 2, 1, 0, + 1, 0, 4, 0, 0, 1, 43, 0, + 0, 1, 0, 0, 0, 0, 0, 0, + 1, 0, 0, 1, 0, 0, 0, 0, + 0, 0, 1, 0, 18, 0, 3, 0, + 0, 1, 0, 10, 1, 0, 28, 1, + 0, 0, 3, 0, 2, 0, 0, 0, + 1, 0, 1, 1, 1, 0, 15, 2, + 0, 1, 0, 0, 10, 35, 1, 1, + 0, 2, 0, 25, 0, 0, 0, 0, + 2, 0, 1, 0, 0, 1, 2, 0, + 1, 0, 0, 1, 0, 0, 10, 0, + 0, 0, 0, 4, 0, 18, 0, 0, + 0, 0, 0, 1, 5, 0, 1, 4, + 1, 0, 0, 0, 1, 0, 0, 0, + 1, 1, 1, 1, 0, 2, 0, 0, + 0, 26, 0, 0, 0, 1, 1, 1, + 0, 0, 2, 1, 0, 1, 1, 0, + 2, 0, 0, 2, 0, 2, 1, 0, + 1, 0, 3, 0, 0, 1, 21, 0, + 0, 1, 0, 0, 0, 0, 0, 0, + 1, 0, 0, 1, 0, 0, 0, 0, + 0, 0, 1, 0, 6, 0, 0, 0, + 0, 1, 0, 2, 0, 0, 15, 0, + 0, 0, 3, 0, 0, 0, 0, 0, + 0, 0, 1, 1, 1, 0, 5, 1, + 0, 0, 6, 5, 1, 1, 0, 1, + 0, 19, 0, 0, 0, 0, 1, 0, + 0, 0, 0, 1, 1, 0, 0, 0, + 0, 0, 0, 0, 4, 0, 0, 0, + 0, 1, 0, 6, 0, 0, 0, 0, + 0, 1, 1, 0, 1, 4, 1, 0, + 0, 4, 0, 2, 1, 1, 1, 1, + 1, 3, 1, 0, 0, 1, 32, 3, + 0, 0, 1, 1, 4, 2, 1, 1, + 1, 4, 1, 1, 3, 2, 1, 3, + 1, 1, 1, 3, 1, 2, 1, 0, + 1, 0, 4, 0, 0, 1, 41, 0, + 0, 1, 0, 0, 0, 0, 0, 0, + 1, 0, 0, 1, 0, 0, 0, 0, + 0, 0, 1, 0, 18, 0, 2, 0, + 0, 1, 0, 5, 1, 0, 28, 1, + 0, 0, 3, 0, 2, 0, 0, 0, + 1, 0, 1, 1, 1, 0, 15, 2, + 0, 1, 0, 0, 10, 35, 1, 1, + 0, 2, 0, 24, 0, 0, 0, 0, + 2, 0, 2, 0, 0, 1, 2, 0, + 1, 0, 0, 1, 0, 0, 10, 0, + 0, 0, 0, 3, 0, 18, 0, 0, + 0, 0, 0, 1, 5, 0, 1, 4, + 1, 0, 0, 0, 4, 0, 2, 1, + 2, 2, 1, 3, 3, 1, 0, 0, + 2, 32, 3, 0, 0, 1, 2, 4, + 2, 1, 1, 1, 4, 1, 1, 3, + 2, 1, 3, 1, 1, 1, 3, 1, + 2, 1, 0, 1, 0, 4, 0, 0, + 1, 43, 0, 0, 1, 0, 0, 0, + 0, 0, 0, 1, 0, 0, 1, 0, + 0, 0, 0, 0, 0, 1, 0, 18, + 1, 4, 0, 0, 1, 0, 5, 1, + 0, 28, 1, 0, 0, 3, 0, 2, + 0, 0, 0, 1, 0, 1, 1, 1, + 0, 15, 2, 2, 1, 0, 0, 10, + 35, 1, 1, 0, 2, 0, 25, 0, + 0, 0, 0, 2, 0, 1, 0, 0, + 1, 2, 0, 1, 0, 0, 1, 0, + 0, 10, 0, 0, 0, 0, 3, 0, + 18, 0, 0, 0, 0, 0, 1, 5, + 0, 1, 4, 1, 0, 0, 0, 4, + 0, 2, 1, 1, 2, 1, 2, 3, + 1, 0, 0, 1, 32, 3, 0, 0, + 1, 2, 4, 2, 1, 1, 1, 4, + 1, 1, 3, 2, 1, 3, 1, 1, + 1, 3, 1, 2, 1, 0, 1, 0, + 4, 0, 0, 1, 43, 0, 0, 1, + 0, 0, 0, 0, 0, 0, 1, 0, + 0, 1, 0, 0, 0, 0, 0, 0, + 1, 0, 18, 0, 3, 0, 0, 1, + 0, 10, 1, 0, 28, 1, 0, 0, + 3, 0, 2, 0, 0, 0, 1, 0, + 1, 1, 1, 0, 15, 2, 0, 1, + 0, 0, 10, 35, 1, 1, 0, 2, + 0, 25, 0, 0, 0, 0, 2, 0, + 1, 0, 0, 1, 2, 0, 1, 0, + 0, 1, 0, 0, 10, 0, 0, 0, + 0, 4, 0, 18, 0, 0, 0, 0, + 0, 1, 5, 0, 1, 4, 1, 0, + 0, 0, 1, 0, 0, 0, 1, 1, + 1, 1, 0, 2, 0, 0, 0, 26, + 21, 6, 2, 15, 5, 6, 1, 3, + 0, 1, 2, 4, 2, 1, 1, 1, + 4, 1, 1, 3, 2, 1, 3, 1, + 1, 1, 3, 1, 2, 0, 0, 4, + 0, 0, 0, 1, 0, 5, 0, 2, + 1, 1, 3, 1, 2, 3, 1, 0, + 0, 1, 32, 3, 0, 0, 1, 2, + 4, 2, 1, 1, 1, 4, 1, 1, + 3, 2, 1, 3, 1, 1, 1, 3, + 1, 2, 1, 0, 1, 0, 4, 0, + 0, 1, 45, 0, 0, 1, 0, 0, + 0, 0, 0, 0, 0, 0, 1, 0, + 0, 1, 0, 0, 0, 0, 0, 0, + 1, 0, 18, 2, 3, 0, 0, 1, + 0, 7, 1, 0, 0, 0, 0, 1, + 0, 0, 0, 1, 1, 1, 1, 0, + 2, 0, 0, 0, 26, 0, 0, 0, + 1, 1, 1, 0, 0, 2, 1, 0, + 1, 1, 0, 2, 0, 0, 2, 0, + 2, 1, 0, 1, 0, 3, 0, 0, + 1, 23, 0, 0, 1, 0, 0, 0, + 0, 0, 0, 0, 0, 1, 0, 0, + 1, 0, 0, 0, 0, 0, 0, 1, + 0, 6, 0, 0, 0, 0, 1, 0, + 7, 0, 0, 0, 0, 0, 16, 0, + 0, 0, 3, 0, 0, 0, 0, 0, + 0, 0, 1, 1, 1, 0, 0, 2, + 0, 0, 5, 1, 0, 0, 0, 6, + 5, 1, 1, 0, 1, 0, 19, 0, + 0, 0, 0, 1, 0, 0, 0, 0, + 1, 1, 0, 0, 0, 0, 0, 0, + 0, 4, 0, 0, 0, 0, 1, 0, + 6, 0, 0, 0, 0, 0, 1, 1, + 0, 1, 4, 1, 28, 1, 0, 0, + 3, 0, 2, 0, 0, 0, 1, 0, + 1, 1, 1, 0, 0, 2, 0, 0, + 15, 2, 1, 1, 0, 0, 10, 35, + 1, 1, 0, 2, 0, 25, 0, 0, + 0, 0, 2, 0, 1, 0, 0, 1, + 2, 0, 1, 0, 0, 1, 0, 0, + 10, 0, 0, 0, 0, 3, 0, 18, + 0, 0, 0, 0, 0, 1, 5, 0, + 1, 4, 1, 0, 0, 0, 0, 0, + 0, 0, 1, 0, 0, 0, 0, 0, + 0, 0, 0, 1, 0, 3, 0, 1, + 0, 0, 1, 0, 0, 0, 1, 1, + 1, 1, 0, 2, 0, 0, 0, 26, + 0, 0, 0, 1, 1, 1, 0, 0, + 2, 1, 0, 1, 1, 0, 2, 0, + 0, 2, 0, 2, 1, 0, 1, 0, + 3, 0, 0, 1, 21, 0, 0, 1, + 0, 0, 0, 0, 0, 0, 1, 0, + 0, 1, 0, 0, 0, 0, 0, 0, + 1, 0, 6, 0, 0, 0, 0, 1, + 0, 2, 0, 0, 15, 0, 0, 0, + 3, 0, 0, 0, 0, 0, 0, 0, + 1, 1, 1, 0, 5, 1, 0, 0, + 6, 5, 1, 1, 0, 1, 0, 19, + 0, 0, 0, 0, 1, 0, 0, 0, + 0, 1, 1, 0, 0, 0, 0, 0, + 0, 0, 4, 0, 0, 0, 0, 1, + 0, 6, 0, 0, 0, 0, 0, 1, + 1, 0, 1, 4, 1, 0, 0, 0, + 0, 4, 5, 0, 2, 1, 1, 3, + 1, 2, 3, 1, 0, 0, 1, 32, + 3, 0, 0, 1, 2, 4, 2, 1, + 1, 1, 4, 1, 1, 3, 2, 1, + 3, 1, 1, 1, 3, 1, 2, 1, + 0, 1, 0, 4, 0, 0, 1, 43, + 0, 0, 1, 0, 0, 0, 0, 0, + 0, 1, 0, 0, 1, 0, 0, 0, + 0, 0, 0, 1, 0, 18, 2, 3, + 0, 0, 1, 0, 5, 1, 0, 28, + 1, 0, 0, 3, 0, 2, 0, 0, + 0, 1, 0, 1, 1, 1, 0, 15, + 2, 1, 1, 0, 0, 10, 35, 1, + 1, 0, 2, 0, 25, 0, 0, 0, + 0, 2, 0, 1, 0, 0, 1, 2, + 0, 1, 0, 0, 1, 0, 0, 10, + 0, 0, 0, 0, 3, 0, 18, 0, + 0, 0, 0, 0, 1, 5, 0, 1, + 4, 1, 0, 0, 0, 1, 0, 0, + 0, 1, 1, 1, 1, 0, 2, 0, + 0, 0, 26, 0, 0, 0, 1, 1, + 1, 0, 0, 2, 1, 0, 1, 1, + 0, 2, 0, 0, 2, 0, 2, 1, + 0, 1, 0, 3, 0, 0, 1, 23, + 0, 0, 1, 0, 0, 0, 0, 0, + 0, 1, 0, 0, 1, 0, 0, 0, + 0, 0, 0, 1, 0, 6, 0, 0, + 0, 0, 1, 0, 7, 0, 0, 16, + 0, 0, 0, 3, 0, 0, 0, 0, + 0, 0, 0, 1, 1, 1, 0, 5, + 1, 0, 0, 0, 6, 5, 1, 1, + 0, 1, 0, 19, 0, 0, 0, 0, + 1, 0, 0, 0, 0, 1, 1, 0, + 0, 0, 0, 0, 0, 0, 4, 0, + 0, 0, 0, 1, 0, 6, 0, 0, + 0, 0, 0, 1, 1, 0, 1, 4, + 1, 0, 0, 1, 0, 0, 0, 1, + 1, 1, 1, 0, 2, 0, 0, 0, + 26, 0, 0, 0, 1, 1, 1, 0, + 0, 2, 1, 0, 1, 1, 0, 2, + 0, 0, 2, 0, 2, 1, 0, 1, + 0, 3, 0, 0, 1, 21, 0, 0, + 1, 0, 0, 0, 0, 0, 0, 1, + 0, 0, 1, 0, 0, 0, 0, 0, + 0, 1, 0, 6, 0, 1, 0, 0, + 1, 0, 7, 0, 0, 1, 0, 0, + 0, 15, 0, 0, 0, 3, 0, 0, + 0, 0, 0, 0, 0, 1, 1, 1, + 0, 8, 1, 0, 0, 0, 0, 6, + 5, 1, 1, 0, 1, 0, 19, 0, + 0, 0, 0, 1, 0, 0, 0, 0, + 1, 1, 0, 0, 0, 0, 0, 0, + 0, 4, 0, 0, 0, 0, 2, 1, + 0, 6, 0, 0, 0, 0, 0, 1, + 1, 0, 1, 4, 1, 0, 0, 0, + 1, 0, 0, 0, 1, 1, 1, 1, + 0, 2, 0, 0, 0, 26, 0, 0, + 0, 1, 1, 1, 0, 0, 2, 1, + 0, 1, 1, 0, 2, 0, 0, 2, + 0, 2, 1, 0, 1, 0, 3, 0, + 0, 1, 21, 0, 0, 1, 0, 0, + 0, 0, 0, 0, 1, 0, 0, 1, + 0, 0, 0, 0, 0, 0, 1, 0, + 6, 0, 0, 0, 0, 1, 0, 2, + 0, 0, 15, 0, 0, 0, 3, 0, + 0, 0, 0, 0, 0, 0, 1, 1, + 1, 0, 5, 1, 0, 0, 6, 5, + 1, 1, 0, 1, 0, 19, 0, 0, + 0, 0, 1, 0, 0, 0, 0, 1, + 1, 0, 0, 0, 0, 0, 0, 0, + 4, 0, 0, 0, 0, 1, 0, 6, + 0, 0, 0, 0, 0, 1, 1, 0, + 1, 4, 1, 0, 0, 0, 1, 1, + 0, 0, 0, 1, 1, 1, 1, 0, + 2, 0, 0, 0, 26, 0, 0, 0, + 1, 1, 1, 0, 0, 2, 1, 0, + 1, 1, 0, 2, 0, 0, 2, 0, + 2, 1, 0, 1, 0, 3, 0, 0, + 1, 21, 0, 0, 1, 0, 0, 0, + 0, 0, 0, 1, 0, 0, 1, 0, + 0, 0, 0, 0, 0, 1, 0, 6, + 0, 0, 0, 0, 1, 0, 2, 0, + 0, 15, 0, 0, 0, 3, 0, 0, + 0, 0, 0, 0, 0, 1, 1, 1, + 0, 5, 1, 0, 0, 6, 5, 1, + 1, 0, 1, 0, 19, 0, 0, 0, + 0, 1, 0, 0, 0, 0, 1, 1, + 0, 0, 0, 0, 0, 0, 0, 4, + 0, 0, 0, 0, 1, 0, 6, 0, + 0, 0, 0, 0, 1, 1, 0, 1, + 4, 1, 0, 0, 0, 0, 0, 1, + 1, 1, 1, 0, 2, 0, 0, 0, + 26, 21, 6, 2, 15, 5, 6, 1, + 0, 1, 0, 0, 3, 0, 2, 0, + 0, 0, 1, 0, 1, 1, 0, 1, + 5, 0, 2, 1, 1, 3, 1, 2, + 3, 1, 0, 0, 1, 32, 3, 0, + 0, 1, 2, 4, 2, 1, 1, 1, + 4, 1, 1, 3, 2, 1, 3, 1, + 1, 1, 3, 1, 2, 1, 0, 1, + 0, 4, 0, 0, 1, 45, 0, 0, + 1, 0, 0, 0, 0, 0, 0, 1, + 0, 0, 1, 0, 0, 0, 0, 0, + 0, 1, 0, 18, 2, 3, 0, 0, + 1, 0, 7, 1, 0, 28, 1, 0, + 0, 3, 0, 2, 0, 0, 0, 1, + 0, 1, 1, 1, 0, 15, 2, 1, + 1, 0, 0, 10, 35, 1, 1, 0, + 2, 0, 25, 0, 0, 0, 0, 2, + 0, 1, 0, 0, 1, 2, 0, 1, + 0, 0, 1, 0, 0, 10, 0, 0, + 0, 0, 3, 0, 18, 0, 0, 0, + 0, 0, 1, 5, 0, 1, 4, 1, + 0, 0, 0, 0, 0, 0, 1, 1, + 1, 1, 0, 2, 0, 0, 0, 26, + 21, 6, 2, 15, 5, 6, 1, 0, + 0, 2, 0, 1, 0, 0, 0, 35, + 1, 0, 2, 0, 25, 0, 0, 0, + 0, 2, 0, 1, 0, 0, 1, 2, + 0, 1, 0, 0, 1, 0, 0, 10, + 0, 0, 0, 0, 4, 2, 0, 18, + 5, 0, 5, 0, 1, 0, 0, 0, + 1, 1, 1, 1, 0, 2, 0, 0, + 0, 26, 0, 0, 0, 1, 1, 1, + 0, 0, 2, 1, 0, 1, 1, 0, + 2, 0, 0, 2, 0, 2, 1, 0, + 1, 0, 3, 0, 0, 1, 21, 0, + 0, 1, 0, 0, 0, 0, 0, 0, + 1, 0, 0, 1, 0, 0, 0, 0, + 0, 0, 1, 0, 6, 0, 0, 0, + 0, 1, 0, 2, 0, 0, 15, 0, + 0, 0, 3, 0, 0, 0, 0, 0, + 0, 0, 1, 1, 1, 0, 5, 1, + 0, 0, 7, 5, 1, 1, 0, 1, + 0, 19, 0, 0, 0, 0, 1, 0, + 0, 0, 0, 1, 1, 0, 0, 0, + 0, 0, 0, 0, 4, 0, 0, 0, + 0, 1, 0, 6, 0, 0, 0, 0, + 0, 1, 1, 0, 1, 1, 4, 1, + 0, 0, 1, 1, 0, 2, 0, 3, + 0, 0, 1, 0, 2, 0, 35, 21, + 1, 35, 35, 34, 35, 31, 35, 31, + 22, 31, 34, 31, 31, 26, 21, 21, + 35, 34, 26, 21, 34, 30, 35, 31, + 34, 35, 35, 35, 35, 34, 36, 21, + 21, 34, 34, 34, 60, 55, 40, 36, + 49, 39, 40, 35, 23, 23, 23, 22, + 22, 22, 21, 22, 24, 22, 22, 24, + 21, 21, 21, 22, 53, 64, 39, 26, + 49, 34, 34, 34, 21, 34, 34, 35, + 34, 34, 34, 34, 23, 36, 31, 22, + 34, 34, 35, 34, 34, 35, 34, 34, + 34, 34, 34, 34, 35, 34, 34, 34, + 34, 34, 35, 34, 34, 34, 34, 34, + 34, 37, 34, 34, 34, 34, 34, 35, + 34, 35, 35, 35, 34, 35, 34, 34, + 39, 53, 38, 35, 40, 35, 38, 35, + 35, 35, 34, 36, 36, 35, 34, 35, + 35, 34, 36, 34, 34, 36, 34, 36, + 35, 34, 35, 34, 37, 34, 34, 35, + 22, 21, 22, 21, 22, 21, 21, 22, + 22, 22, 22, 21, 23, 21, 21, 22, + 22, 22, 22, 21, 23, 21, 21, 21, + 47, 42, 27, 23, 36, 26, 27, 22, + 21, 21, 21, 47, 42, 27, 23, 36, + 26, 27, 22, 23, 23, 22, 22, 23, + 22, 23, 24, 22, 21, 21, 22, 53, + 66, 34, 22, 43, 21, 35, 34, 22, + 22, 21, 21, 21, 21, 21, 22, 21, + 22, 21, 21, 21, 22, 22, 22, 22, + 21, 23, 21, 21, 22, 22, 22, 22, + 21, 23, 21, 21, 21, 47, 42, 27, + 23, 36, 26, 27, 22, 21, 21, 21, + 47, 42, 27, 23, 36, 26, 27, 22, + 22, 22, 22, 21, 21, 23, 22, 21, + 22, 22, 21, 23, 21, 21, 23, 21, + 23, 22, 21, 22, 21, 24, 21, 21, + 22, 21, 21, 22, 21, 21, 21, 21, + 21, 21, 22, 21, 21, 22, 21, 21, + 21, 21, 21, 21, 22, 21, 21, 21, + 21, 21, 22, 21, 21, 21, 21, 21, + 21, 24, 21, 21, 21, 21, 21, 21, + 21, 22, 22, 22, 21, 22, 21, 21, + 26, 40, 25, 22, 27, 22, 25, 24, + 21, 22, 49, 34, 34, 34, 34, 26, + 21, 35, 34, 23, 22, 22, 24, 22, + 23, 24, 22, 34, 34, 35, 35, 35, + 35, 34, 36, 21, 21, 22, 53, 66, + 39, 28, 49, 34, 34, 35, 34, 34, + 34, 34, 21, 34, 34, 34, 34, 35, + 34, 34, 34, 34, 34, 34, 37, 34, + 23, 36, 31, 22, 34, 34, 34, 34, + 35, 34, 35, 35, 35, 34, 35, 34, + 34, 39, 53, 38, 35, 40, 35, 38, + 34, 34, 35, 34, 34, 35, 34, 34, + 34, 34, 34, 34, 35, 34, 34, 34, + 34, 60, 55, 40, 36, 49, 39, 40, + 35, 35, 35, 35, 34, 36, 36, 35, + 34, 35, 35, 34, 36, 34, 34, 36, + 34, 36, 35, 34, 35, 34, 37, 34, + 34, 35, 39, 36, 21, +} + +var _s_range_lengths []byte = []byte{ + 0, 1, 1, 1, 1, 2, 2, 1, + 4, 1, 1, 1, 1, 2, 4, 1, + 1, 1, 2, 2, 5, 6, 2, 2, + 5, 1, 3, 2, 3, 5, 2, 3, + 1, 3, 1, 1, 2, 1, 2, 1, + 4, 0, 0, 1, 5, 2, 1, 2, + 2, 1, 2, 1, 0, 2, 1, 2, + 1, 2, 2, 2, 1, 1, 4, 2, + 0, 2, 2, 1, 1, 0, 1, 0, + 1, 1, 0, 2, 1, 1, 1, 2, + 2, 1, 1, 2, 2, 1, 2, 3, + 2, 2, 0, 0, 2, 1, 0, 0, + 0, 0, 1, 4, 1, 0, 2, 1, + 3, 2, 0, 2, 2, 1, 1, 2, + 6, 1, 1, 2, 2, 1, 1, 1, + 0, 1, 1, 1, 1, 0, 2, 0, + 2, 3, 1, 2, 2, 2, 0, 1, + 0, 1, 1, 1, 0, 1, 4, 0, + 3, 0, 1, 1, 4, 1, 4, 3, + 0, 1, 0, 2, 3, 4, 3, 3, + 5, 3, 2, 2, 3, 0, 2, 2, + 1, 1, 1, 3, 6, 8, 9, 8, + 8, 3, 8, 7, 9, 3, 6, 5, + 7, 7, 5, 6, 4, 4, 1, 1, + 2, 1, 2, 3, 5, 0, 3, 1, + 5, 3, 2, 2, 3, 3, 1, 3, + 2, 2, 1, 2, 2, 2, 5, 5, + 1, 2, 2, 1, 2, 1, 3, 2, + 2, 1, 3, 3, 5, 2, 2, 2, + 6, 2, 2, 3, 2, 7, 0, 2, + 2, 1, 1, 5, 2, 1, 1, 1, + 2, 0, 3, 2, 2, 5, 5, 0, + 0, 2, 1, 2, 2, 1, 1, 1, + 3, 3, 3, 2, 2, 3, 1, 2, + 1, 3, 3, 3, 3, 3, 2, 2, + 3, 3, 4, 6, 1, 3, 0, 1, + 1, 2, 1, 5, 3, 1, 1, 1, + 1, 1, 2, 2, 2, 1, 1, 1, + 2, 5, 0, 2, 5, 2, 1, 1, + 0, 2, 1, 2, 3, 2, 2, 1, + 2, 1, 1, 2, 4, 2, 1, 2, + 2, 2, 7, 1, 1, 3, 1, 2, + 1, 1, 0, 3, 1, 3, 1, 3, + 1, 3, 3, 3, 5, 2, 8, 7, + 2, 2, 3, 3, 1, 2, 1, 1, + 2, 1, 3, 1, 1, 1, 1, 1, + 1, 1, 1, 2, 2, 3, 1, 3, + 2, 0, 2, 4, 1, 2, 3, 1, + 0, 3, 0, 2, 3, 1, 0, 0, + 0, 0, 1, 2, 2, 2, 1, 3, + 5, 7, 5, 0, 1, 2, 1, 0, + 1, 1, 1, 0, 1, 1, 0, 1, + 1, 1, 1, 2, 2, 2, 5, 1, + 1, 1, 2, 2, 4, 1, 1, 1, + 4, 2, 7, 7, 4, 2, 6, 1, + 4, 2, 5, 6, 2, 5, 1, 4, + 1, 2, 2, 2, 3, 1, 4, 0, + 0, 1, 6, 3, 1, 2, 2, 1, + 4, 2, 0, 2, 1, 1, 1, 2, + 3, 2, 4, 3, 1, 1, 2, 4, + 2, 0, 2, 2, 1, 1, 0, 1, + 0, 1, 1, 0, 1, 2, 1, 1, + 1, 2, 4, 2, 1, 2, 6, 1, + 3, 3, 2, 3, 0, 0, 2, 1, + 0, 0, 0, 0, 1, 4, 1, 0, + 2, 3, 3, 1, 3, 0, 2, 4, + 1, 2, 2, 6, 1, 3, 2, 2, + 3, 1, 1, 2, 0, 1, 1, 1, + 1, 0, 2, 0, 2, 3, 1, 1, + 2, 2, 2, 0, 1, 0, 1, 1, + 1, 0, 0, 1, 4, 1, 3, 4, + 0, 0, 1, 1, 1, 1, 4, 0, + 1, 4, 1, 3, 4, 3, 4, 5, + 3, 2, 2, 4, 0, 2, 2, 1, + 1, 4, 6, 9, 9, 9, 8, 5, + 8, 8, 9, 4, 6, 7, 7, 8, + 5, 8, 4, 5, 1, 2, 2, 2, + 3, 3, 5, 0, 3, 1, 6, 4, + 3, 5, 5, 1, 4, 3, 2, 3, + 2, 2, 3, 3, 5, 6, 2, 2, + 4, 6, 2, 0, 3, 4, 1, 3, + 2, 5, 0, 2, 1, 1, 3, 3, + 3, 2, 2, 3, 5, 4, 3, 3, + 6, 2, 4, 3, 4, 4, 1, 5, + 4, 3, 3, 1, 1, 2, 5, 0, + 2, 0, 1, 3, 3, 7, 3, 0, + 3, 3, 3, 2, 4, 1, 3, 6, + 3, 4, 8, 7, 2, 4, 3, 3, + 3, 2, 2, 3, 1, 2, 2, 2, + 4, 3, 2, 0, 4, 1, 2, 3, + 1, 3, 2, 2, 2, 1, 3, 0, + 1, 1, 1, 0, 1, 2, 1, 4, + 1, 1, 1, 1, 2, 4, 1, 1, + 1, 2, 2, 5, 6, 2, 2, 5, + 1, 3, 2, 3, 5, 2, 3, 1, + 3, 1, 1, 2, 1, 2, 1, 4, + 0, 0, 1, 5, 2, 1, 2, 2, + 1, 2, 1, 0, 2, 1, 2, 1, + 2, 2, 2, 1, 1, 4, 2, 0, + 2, 2, 1, 1, 0, 1, 0, 1, + 1, 0, 2, 1, 1, 1, 2, 2, + 1, 1, 2, 2, 1, 2, 3, 2, + 2, 0, 3, 3, 2, 1, 0, 0, + 0, 0, 1, 4, 1, 0, 2, 1, + 3, 2, 0, 2, 2, 1, 1, 2, + 6, 1, 1, 2, 2, 1, 1, 1, + 0, 1, 1, 1, 1, 0, 2, 0, + 2, 3, 1, 2, 2, 2, 0, 1, + 0, 1, 1, 1, 0, 0, 4, 1, + 4, 3, 1, 3, 4, 3, 4, 5, + 3, 2, 2, 4, 0, 2, 2, 1, + 1, 4, 6, 9, 9, 9, 8, 5, + 8, 8, 9, 4, 6, 7, 7, 8, + 5, 8, 4, 5, 1, 2, 2, 2, + 3, 3, 5, 0, 3, 1, 6, 4, + 3, 5, 5, 1, 4, 3, 2, 3, + 2, 2, 3, 3, 5, 6, 2, 2, + 6, 2, 0, 4, 4, 1, 3, 2, + 5, 0, 2, 1, 1, 3, 3, 2, + 2, 3, 5, 4, 3, 3, 6, 2, + 4, 3, 4, 4, 1, 5, 3, 3, + 1, 1, 2, 5, 0, 2, 0, 1, + 3, 7, 3, 0, 3, 3, 3, 4, + 1, 3, 6, 3, 4, 8, 7, 2, + 4, 3, 3, 3, 2, 2, 1, 2, + 2, 3, 2, 0, 4, 1, 2, 3, + 1, 2, 2, 2, 1, 3, 0, 1, + 1, 1, 0, 1, 3, 4, 5, 3, + 2, 2, 4, 0, 2, 2, 1, 1, + 4, 6, 9, 9, 9, 8, 5, 8, + 8, 9, 4, 6, 7, 7, 8, 5, + 8, 4, 5, 1, 2, 2, 2, 3, + 3, 5, 0, 3, 1, 6, 4, 3, + 5, 5, 1, 4, 3, 2, 3, 2, + 2, 3, 3, 5, 6, 2, 2, 6, + 2, 0, 3, 3, 1, 3, 2, 5, + 0, 3, 0, 1, 1, 1, 1, 2, + 2, 1, 4, 1, 1, 1, 1, 2, + 4, 1, 1, 1, 2, 2, 5, 6, + 2, 2, 5, 1, 3, 2, 3, 5, + 2, 3, 1, 3, 1, 1, 2, 1, + 2, 1, 4, 0, 0, 1, 5, 2, + 1, 2, 2, 1, 2, 1, 0, 2, + 1, 2, 1, 2, 2, 2, 1, 1, + 4, 2, 0, 3, 4, 1, 1, 0, + 1, 0, 2, 2, 1, 1, 1, 1, + 0, 2, 1, 1, 1, 2, 2, 1, + 1, 2, 2, 1, 2, 3, 2, 2, + 0, 0, 3, 1, 0, 1, 1, 1, + 0, 0, 0, 0, 1, 4, 1, 0, + 2, 1, 3, 2, 0, 2, 2, 1, + 1, 2, 6, 1, 1, 2, 2, 1, + 1, 1, 0, 1, 1, 1, 1, 0, + 0, 2, 0, 2, 3, 1, 2, 2, + 2, 0, 1, 0, 1, 1, 1, 0, + 3, 1, 3, 3, 2, 2, 3, 5, + 4, 3, 3, 6, 2, 4, 3, 4, + 4, 1, 5, 3, 1, 2, 3, 5, + 0, 2, 0, 1, 3, 7, 3, 0, + 3, 3, 3, 4, 1, 3, 6, 3, + 4, 8, 7, 2, 4, 3, 3, 3, + 2, 2, 1, 2, 2, 3, 2, 0, + 4, 1, 2, 3, 1, 2, 2, 2, + 1, 3, 0, 1, 1, 1, 0, 1, + 0, 1, 4, 1, 3, 4, 4, 3, + 5, 3, 2, 2, 4, 0, 2, 2, + 1, 1, 3, 6, 8, 9, 8, 8, + 4, 8, 8, 9, 4, 6, 6, 7, + 8, 5, 7, 4, 5, 1, 2, 2, + 2, 3, 3, 5, 0, 3, 1, 6, + 3, 3, 5, 5, 1, 3, 3, 2, + 3, 2, 2, 3, 3, 4, 6, 2, + 2, 6, 2, 0, 4, 4, 1, 3, + 2, 5, 0, 2, 1, 1, 3, 3, + 2, 2, 3, 4, 4, 3, 3, 4, + 2, 4, 3, 4, 4, 1, 5, 3, + 3, 1, 1, 2, 5, 0, 2, 0, + 1, 3, 7, 3, 0, 3, 2, 3, + 4, 1, 3, 5, 3, 4, 8, 7, + 2, 3, 3, 3, 2, 2, 2, 1, + 2, 2, 3, 2, 0, 4, 1, 2, + 3, 1, 2, 2, 2, 1, 3, 0, + 1, 1, 1, 0, 1, 0, 2, 0, + 2, 4, 3, 3, 3, 1, 1, 2, + 0, 1, 1, 1, 2, 4, 7, 9, + 7, 8, 6, 8, 7, 10, 4, 6, + 5, 7, 7, 5, 6, 4, 5, 1, + 2, 2, 2, 3, 3, 3, 0, 3, + 1, 6, 4, 2, 3, 4, 1, 4, + 3, 2, 3, 2, 3, 4, 3, 1, + 1, 3, 2, 0, 4, 4, 1, 2, + 3, 5, 0, 2, 1, 1, 2, 1, + 5, 3, 2, 6, 1, 3, 3, 2, + 3, 1, 5, 3, 2, 1, 5, 0, + 2, 0, 2, 2, 6, 2, 1, 3, + 2, 2, 1, 4, 2, 3, 8, 6, + 3, 2, 2, 4, 2, 1, 2, 2, + 1, 0, 4, 1, 2, 3, 1, 2, + 2, 2, 1, 2, 0, 1, 1, 1, + 0, 1, 4, 1, 3, 4, 3, 4, + 5, 3, 2, 2, 4, 0, 2, 2, + 1, 1, 4, 6, 9, 9, 9, 8, + 5, 8, 8, 9, 4, 6, 7, 7, + 8, 5, 8, 4, 5, 1, 2, 2, + 2, 3, 3, 5, 0, 3, 1, 6, + 4, 3, 5, 5, 1, 4, 3, 2, + 3, 1, 1, 2, 2, 3, 3, 5, + 6, 2, 2, 4, 6, 2, 0, 3, + 3, 1, 3, 2, 5, 0, 3, 3, + 1, 3, 3, 3, 2, 2, 3, 5, + 4, 3, 3, 6, 2, 4, 3, 4, + 4, 1, 5, 3, 3, 1, 1, 3, + 5, 0, 2, 0, 1, 3, 3, 7, + 3, 0, 3, 3, 3, 2, 4, 1, + 3, 6, 3, 4, 8, 7, 2, 4, + 3, 3, 3, 2, 1, 2, 3, 1, + 2, 2, 2, 4, 3, 2, 0, 4, + 1, 2, 3, 1, 3, 2, 2, 2, + 1, 3, 0, 1, 1, 1, 0, 1, + 0, 1, 4, 1, 3, 4, 4, 3, + 5, 3, 2, 2, 4, 0, 2, 2, + 1, 1, 3, 6, 8, 9, 8, 8, + 4, 8, 8, 9, 4, 6, 6, 7, + 8, 5, 7, 4, 5, 1, 2, 2, + 2, 3, 3, 5, 0, 3, 1, 6, + 3, 3, 5, 5, 1, 3, 3, 2, + 3, 2, 2, 3, 3, 4, 6, 2, + 2, 6, 2, 0, 4, 4, 1, 3, + 2, 5, 0, 2, 1, 1, 3, 3, + 2, 2, 3, 4, 4, 3, 3, 4, + 2, 4, 3, 4, 4, 1, 5, 3, + 3, 1, 1, 2, 5, 0, 2, 0, + 1, 3, 7, 3, 0, 3, 2, 3, + 4, 1, 3, 5, 3, 4, 8, 7, + 2, 3, 3, 3, 2, 2, 2, 1, + 2, 2, 3, 2, 0, 4, 1, 2, + 3, 1, 2, 2, 2, 1, 3, 0, + 1, 1, 1, 0, 1, 0, 1, 1, + 1, 1, 2, 2, 2, 5, 1, 1, + 1, 2, 2, 4, 1, 1, 1, 4, + 2, 7, 7, 4, 2, 6, 1, 4, + 2, 5, 6, 2, 5, 1, 4, 1, + 2, 2, 2, 3, 1, 4, 0, 0, + 1, 6, 3, 1, 2, 2, 1, 4, + 2, 0, 2, 1, 2, 3, 2, 4, + 3, 1, 1, 2, 4, 2, 0, 2, + 2, 1, 1, 0, 1, 0, 1, 1, + 0, 1, 2, 1, 1, 1, 2, 4, + 2, 1, 2, 6, 1, 3, 3, 2, + 3, 0, 0, 2, 1, 0, 0, 0, + 0, 1, 4, 1, 0, 2, 3, 3, + 1, 3, 0, 2, 4, 1, 2, 2, + 6, 1, 3, 2, 2, 3, 1, 2, + 0, 1, 1, 1, 1, 0, 2, 0, + 2, 3, 1, 1, 2, 2, 2, 0, + 1, 0, 1, 1, 1, 0, 0, 1, + 4, 1, 3, 4, 4, 4, 5, 3, + 2, 2, 4, 0, 2, 2, 1, 1, + 4, 6, 9, 9, 9, 8, 5, 8, + 8, 9, 4, 6, 7, 7, 8, 5, + 8, 4, 5, 1, 2, 2, 2, 3, + 3, 5, 0, 3, 1, 6, 4, 3, + 5, 5, 1, 4, 3, 2, 3, 2, + 2, 3, 3, 5, 6, 2, 2, 6, + 2, 0, 4, 4, 1, 3, 2, 5, + 0, 2, 1, 1, 3, 3, 2, 2, + 3, 5, 4, 3, 3, 6, 2, 4, + 3, 4, 4, 1, 5, 3, 1, 2, + 5, 0, 2, 0, 1, 3, 7, 3, + 0, 3, 3, 3, 4, 1, 3, 6, + 3, 4, 8, 7, 2, 4, 3, 3, + 3, 2, 2, 1, 2, 2, 3, 2, + 0, 4, 1, 2, 3, 1, 2, 2, + 2, 1, 3, 0, 1, 1, 1, 0, + 1, 0, 1, 4, 1, 3, 4, 3, + 4, 5, 3, 2, 2, 4, 0, 2, + 2, 1, 1, 4, 6, 9, 9, 9, + 8, 5, 8, 8, 9, 4, 6, 7, + 7, 8, 5, 8, 4, 5, 1, 2, + 2, 2, 3, 3, 5, 0, 3, 1, + 6, 4, 3, 5, 5, 1, 4, 3, + 2, 3, 2, 2, 3, 3, 5, 6, + 2, 2, 6, 2, 0, 4, 4, 1, + 3, 2, 5, 0, 2, 1, 1, 3, + 3, 2, 2, 3, 5, 4, 3, 3, + 6, 2, 4, 3, 4, 4, 1, 5, + 3, 1, 2, 5, 0, 2, 0, 1, + 3, 7, 3, 0, 3, 3, 3, 4, + 1, 3, 6, 3, 4, 8, 7, 2, + 4, 3, 3, 3, 2, 2, 1, 2, + 2, 3, 2, 0, 4, 1, 2, 3, + 1, 2, 2, 2, 1, 3, 0, 1, + 1, 1, 0, 1, 0, 1, 4, 1, + 3, 4, 3, 4, 5, 3, 2, 2, + 4, 0, 2, 2, 1, 1, 4, 6, + 9, 9, 9, 8, 5, 8, 8, 9, + 4, 6, 7, 7, 8, 5, 8, 4, + 5, 1, 2, 2, 2, 3, 3, 5, + 0, 3, 1, 6, 4, 3, 5, 5, + 1, 4, 3, 2, 3, 2, 2, 3, + 3, 5, 6, 2, 2, 6, 2, 0, + 3, 3, 1, 3, 2, 5, 0, 3, + 3, 1, 3, 3, 2, 2, 3, 5, + 4, 3, 3, 6, 2, 4, 3, 4, + 4, 1, 5, 3, 3, 1, 1, 3, + 5, 0, 2, 0, 1, 3, 7, 3, + 0, 3, 3, 3, 4, 1, 3, 6, + 3, 4, 8, 7, 2, 4, 3, 3, + 3, 2, 2, 1, 2, 2, 3, 2, + 0, 4, 1, 2, 3, 1, 2, 2, + 2, 1, 3, 0, 1, 1, 1, 0, + 1, 0, 1, 4, 1, 3, 4, 3, + 4, 5, 3, 2, 2, 4, 0, 2, + 2, 1, 1, 4, 6, 9, 9, 9, + 8, 5, 8, 8, 9, 4, 6, 7, + 7, 8, 5, 8, 4, 5, 1, 2, + 2, 2, 3, 3, 5, 0, 3, 1, + 6, 4, 3, 5, 5, 1, 4, 3, + 2, 3, 2, 2, 3, 3, 5, 6, + 2, 2, 6, 2, 0, 3, 4, 1, + 3, 2, 5, 0, 3, 3, 1, 3, + 3, 2, 2, 3, 5, 4, 3, 3, + 6, 2, 4, 3, 4, 4, 1, 5, + 3, 1, 3, 5, 0, 2, 0, 1, + 3, 7, 3, 0, 3, 3, 3, 4, + 1, 3, 6, 3, 4, 8, 7, 2, + 4, 3, 3, 3, 2, 2, 1, 2, + 2, 3, 2, 0, 4, 1, 2, 3, + 1, 2, 2, 2, 1, 3, 0, 1, + 1, 1, 0, 1, 0, 1, 1, 1, + 1, 2, 2, 1, 4, 1, 1, 1, + 1, 2, 4, 1, 1, 1, 2, 2, + 5, 6, 2, 2, 5, 1, 3, 2, + 3, 5, 2, 3, 1, 3, 1, 1, + 2, 1, 2, 1, 4, 0, 0, 1, + 5, 2, 1, 2, 2, 1, 2, 1, + 0, 2, 1, 2, 1, 2, 2, 2, + 1, 1, 4, 2, 0, 2, 2, 1, + 1, 0, 1, 0, 1, 1, 0, 2, + 1, 1, 1, 2, 2, 1, 1, 2, + 2, 1, 2, 3, 2, 2, 0, 0, + 2, 1, 0, 0, 0, 0, 1, 4, + 1, 0, 2, 1, 3, 2, 0, 2, + 2, 1, 1, 2, 6, 1, 1, 2, + 2, 1, 1, 1, 0, 1, 1, 1, + 1, 0, 2, 0, 2, 3, 1, 2, + 2, 2, 0, 1, 0, 1, 1, 1, + 0, 0, 1, 4, 1, 3, 4, 3, + 3, 5, 3, 2, 2, 3, 0, 2, + 2, 1, 1, 3, 6, 8, 9, 8, + 8, 3, 8, 7, 9, 3, 6, 5, + 7, 7, 5, 6, 4, 4, 1, 1, + 2, 1, 2, 3, 5, 0, 3, 1, + 5, 3, 3, 5, 5, 1, 2, 2, + 2, 3, 2, 2, 1, 3, 3, 5, + 2, 2, 6, 2, 0, 2, 2, 1, + 3, 2, 5, 0, 2, 1, 1, 3, + 3, 2, 2, 3, 3, 3, 3, 3, + 2, 2, 3, 3, 4, 3, 1, 5, + 2, 1, 2, 5, 0, 2, 0, 1, + 3, 7, 3, 0, 3, 1, 3, 3, + 1, 3, 3, 3, 2, 8, 7, 2, + 2, 3, 3, 1, 2, 1, 1, 2, + 2, 3, 2, 0, 4, 1, 2, 3, + 1, 2, 2, 2, 1, 3, 0, 1, + 1, 1, 0, 1, 0, 1, 4, 1, + 3, 4, 4, 4, 5, 3, 2, 2, + 4, 0, 2, 2, 1, 1, 4, 6, + 9, 9, 9, 8, 5, 8, 8, 9, + 4, 6, 7, 7, 8, 5, 8, 4, + 5, 1, 2, 2, 2, 3, 3, 5, + 0, 3, 1, 6, 4, 3, 5, 5, + 1, 4, 3, 2, 3, 2, 2, 3, + 3, 5, 6, 2, 2, 6, 2, 0, + 4, 4, 1, 3, 2, 5, 0, 2, + 1, 1, 3, 3, 2, 2, 3, 5, + 4, 3, 3, 6, 2, 4, 3, 4, + 4, 1, 5, 3, 1, 2, 5, 0, + 2, 0, 1, 3, 7, 3, 0, 3, + 3, 3, 4, 1, 3, 6, 3, 4, + 8, 7, 2, 4, 3, 3, 3, 2, + 2, 1, 2, 2, 3, 2, 0, 4, + 1, 2, 3, 1, 2, 2, 2, 1, + 3, 0, 1, 1, 1, 0, 1, 0, + 1, 4, 1, 3, 4, 3, 4, 5, + 3, 2, 2, 4, 0, 2, 2, 1, + 1, 4, 6, 9, 9, 9, 8, 5, + 8, 8, 9, 4, 6, 7, 7, 8, + 5, 8, 4, 5, 1, 2, 2, 2, + 3, 3, 5, 0, 3, 1, 6, 4, + 3, 5, 5, 1, 4, 3, 2, 3, + 2, 2, 3, 3, 5, 6, 2, 2, + 6, 2, 0, 3, 4, 1, 3, 2, + 5, 0, 3, 3, 1, 3, 3, 2, + 2, 3, 5, 4, 3, 3, 6, 2, + 4, 3, 4, 4, 1, 5, 3, 1, + 3, 5, 0, 2, 0, 1, 3, 7, + 3, 0, 3, 3, 3, 4, 1, 3, + 6, 3, 4, 8, 7, 2, 4, 3, + 3, 3, 2, 2, 1, 2, 2, 3, + 2, 0, 4, 1, 2, 3, 1, 2, + 2, 2, 1, 3, 0, 1, 1, 1, + 0, 1, 0, 1, 1, 1, 1, 2, + 2, 1, 4, 1, 1, 1, 1, 2, + 0, 0, 0, 0, 0, 0, 0, 2, + 2, 1, 4, 6, 9, 9, 9, 8, + 5, 8, 8, 9, 4, 6, 7, 7, + 8, 5, 8, 4, 5, 2, 2, 3, + 3, 5, 6, 4, 1, 0, 1, 4, + 1, 3, 4, 3, 4, 5, 3, 2, + 2, 4, 0, 2, 2, 1, 1, 4, + 6, 9, 9, 9, 8, 5, 8, 8, + 9, 4, 6, 7, 7, 8, 5, 8, + 4, 5, 1, 2, 2, 2, 3, 3, + 5, 0, 4, 1, 6, 4, 0, 1, + 3, 5, 5, 1, 4, 3, 2, 3, + 2, 2, 3, 3, 5, 6, 2, 2, + 6, 2, 0, 4, 4, 1, 3, 2, + 5, 0, 2, 1, 2, 3, 1, 0, + 1, 1, 1, 1, 2, 2, 1, 4, + 1, 1, 1, 1, 2, 4, 1, 1, + 1, 2, 2, 5, 6, 2, 2, 5, + 1, 3, 2, 3, 5, 2, 3, 1, + 3, 1, 1, 2, 1, 2, 1, 4, + 0, 1, 1, 5, 2, 0, 1, 1, + 2, 2, 1, 2, 1, 0, 2, 1, + 2, 1, 2, 2, 2, 1, 1, 4, + 2, 0, 2, 2, 1, 1, 0, 1, + 0, 1, 1, 1, 1, 1, 1, 2, + 1, 1, 1, 2, 2, 1, 2, 2, + 2, 1, 2, 3, 2, 2, 0, 1, + 1, 2, 0, 0, 2, 1, 5, 0, + 0, 0, 0, 1, 4, 1, 0, 2, + 1, 3, 2, 0, 2, 2, 1, 1, + 2, 6, 1, 1, 2, 2, 1, 1, + 1, 0, 1, 1, 1, 1, 0, 2, + 0, 2, 3, 1, 2, 2, 2, 0, + 1, 0, 1, 1, 2, 3, 3, 2, + 2, 3, 5, 4, 4, 3, 6, 2, + 4, 3, 4, 4, 0, 1, 1, 2, + 1, 5, 3, 1, 1, 5, 0, 2, + 0, 1, 3, 7, 3, 0, 3, 3, + 3, 4, 1, 3, 6, 3, 4, 8, + 7, 2, 4, 3, 3, 3, 2, 2, + 1, 2, 2, 3, 2, 0, 4, 1, + 2, 3, 1, 2, 2, 2, 1, 3, + 0, 1, 1, 1, 1, 3, 5, 5, + 4, 3, 2, 3, 2, 3, 3, 5, + 6, 2, 2, 6, 4, 4, 3, 2, + 5, 2, 0, 1, 1, 1, 1, 2, + 2, 1, 4, 1, 1, 1, 1, 2, + 4, 1, 1, 1, 2, 2, 5, 6, + 2, 2, 5, 1, 3, 2, 3, 5, + 2, 3, 1, 3, 1, 1, 2, 1, + 2, 1, 4, 0, 0, 1, 5, 2, + 1, 2, 2, 1, 2, 1, 0, 2, + 1, 2, 1, 2, 2, 2, 1, 1, + 4, 2, 0, 2, 2, 1, 1, 0, + 1, 0, 1, 1, 0, 2, 1, 1, + 1, 2, 2, 1, 1, 2, 2, 1, + 2, 3, 2, 2, 0, 0, 2, 1, + 0, 0, 0, 0, 1, 4, 1, 0, + 2, 1, 3, 2, 0, 2, 2, 1, + 1, 2, 6, 1, 1, 2, 2, 1, + 1, 1, 0, 1, 1, 1, 1, 0, + 2, 0, 2, 3, 1, 2, 2, 2, + 0, 1, 0, 1, 1, 1, 0, 1, + 1, 5, 0, 1, 4, 1, 3, 4, + 3, 4, 5, 3, 2, 2, 4, 0, + 2, 2, 1, 1, 4, 6, 9, 9, + 9, 8, 5, 8, 8, 9, 4, 6, + 7, 7, 8, 5, 8, 4, 5, 1, + 2, 2, 2, 3, 3, 5, 0, 3, + 1, 6, 4, 3, 5, 5, 1, 4, + 3, 2, 3, 2, 2, 3, 3, 5, + 6, 2, 2, 6, 2, 0, 4, 4, + 1, 3, 2, 5, 0, 2, 1, 1, + 3, 3, 2, 2, 3, 5, 4, 3, + 3, 6, 2, 4, 3, 4, 4, 1, + 5, 3, 1, 2, 5, 0, 2, 0, + 1, 3, 7, 3, 0, 3, 3, 3, + 4, 1, 3, 6, 3, 4, 8, 7, + 2, 4, 3, 3, 3, 2, 2, 1, + 2, 2, 3, 2, 0, 4, 1, 2, + 3, 1, 2, 2, 2, 1, 3, 0, + 1, 1, 1, 0, 1, 0, 1, 1, + 1, 1, 2, 2, 1, 4, 1, 1, + 1, 1, 2, 4, 1, 1, 1, 2, + 2, 5, 6, 2, 2, 5, 1, 3, + 2, 3, 5, 2, 3, 1, 3, 1, + 1, 2, 1, 2, 1, 4, 0, 1, + 1, 5, 2, 1, 2, 2, 1, 2, + 1, 0, 2, 1, 2, 1, 2, 2, + 2, 1, 1, 4, 2, 0, 2, 2, + 1, 1, 0, 1, 0, 1, 1, 1, + 2, 1, 1, 1, 2, 2, 1, 2, + 2, 2, 1, 2, 3, 2, 2, 0, + 0, 2, 2, 5, 0, 0, 0, 0, + 1, 4, 1, 0, 2, 1, 3, 2, + 0, 2, 2, 1, 1, 2, 6, 1, + 1, 2, 2, 1, 1, 1, 0, 1, + 1, 1, 1, 0, 2, 0, 2, 3, + 1, 2, 2, 2, 0, 1, 0, 1, + 1, 1, 0, 0, 1, 1, 1, 1, + 2, 2, 1, 4, 1, 1, 1, 1, + 2, 4, 1, 1, 1, 2, 2, 5, + 6, 2, 2, 5, 1, 3, 2, 3, + 5, 2, 3, 1, 3, 1, 1, 2, + 1, 2, 1, 4, 0, 0, 1, 5, + 2, 1, 2, 2, 1, 2, 1, 0, + 2, 1, 2, 1, 2, 2, 2, 1, + 1, 4, 2, 0, 3, 4, 1, 1, + 0, 1, 0, 2, 2, 1, 1, 1, + 1, 0, 2, 1, 1, 1, 2, 2, + 1, 1, 2, 2, 1, 2, 3, 2, + 2, 0, 0, 3, 1, 1, 1, 0, + 0, 0, 0, 1, 4, 1, 0, 2, + 1, 3, 2, 0, 2, 2, 1, 1, + 2, 6, 1, 1, 2, 2, 1, 1, + 1, 0, 1, 1, 1, 1, 0, 0, + 2, 0, 2, 3, 1, 2, 2, 2, + 0, 1, 0, 1, 1, 1, 0, 1, + 0, 1, 1, 1, 1, 2, 2, 1, + 4, 1, 1, 1, 1, 2, 4, 1, + 1, 1, 2, 2, 5, 6, 2, 2, + 5, 1, 3, 2, 3, 5, 2, 3, + 1, 3, 1, 1, 2, 1, 2, 1, + 4, 0, 0, 1, 5, 2, 1, 2, + 2, 1, 2, 1, 0, 2, 1, 2, + 1, 2, 2, 2, 1, 1, 4, 2, + 0, 2, 2, 1, 1, 0, 1, 0, + 1, 1, 0, 2, 1, 1, 1, 2, + 2, 1, 1, 2, 2, 1, 2, 3, + 2, 2, 0, 0, 2, 1, 0, 0, + 0, 0, 1, 4, 1, 0, 2, 1, + 3, 2, 0, 2, 2, 1, 1, 2, + 6, 1, 1, 2, 2, 1, 1, 1, + 0, 1, 1, 1, 1, 0, 2, 0, + 2, 3, 1, 2, 2, 2, 0, 1, + 0, 1, 1, 1, 0, 4, 0, 0, + 1, 1, 1, 1, 2, 2, 1, 4, + 1, 1, 1, 1, 2, 4, 1, 1, + 1, 2, 2, 5, 6, 2, 2, 5, + 1, 3, 2, 3, 5, 2, 3, 1, + 3, 1, 1, 2, 1, 2, 1, 4, + 0, 0, 1, 5, 2, 1, 2, 2, + 1, 2, 1, 0, 2, 1, 2, 1, + 2, 2, 2, 1, 1, 4, 2, 0, + 2, 2, 1, 1, 0, 1, 0, 1, + 1, 0, 2, 1, 1, 1, 2, 2, + 1, 1, 2, 2, 1, 2, 3, 2, + 2, 0, 0, 2, 1, 0, 0, 0, + 0, 1, 4, 1, 0, 2, 1, 3, + 2, 0, 2, 2, 1, 1, 2, 6, + 1, 1, 2, 2, 1, 1, 1, 0, + 1, 1, 1, 1, 0, 2, 0, 2, + 3, 1, 2, 2, 2, 0, 1, 0, + 1, 1, 1, 0, 1, 1, 1, 1, + 2, 2, 1, 4, 1, 1, 1, 1, + 2, 0, 0, 0, 0, 0, 0, 0, + 1, 3, 3, 2, 2, 3, 5, 4, + 4, 3, 6, 2, 4, 4, 4, 0, + 0, 1, 4, 1, 3, 4, 3, 4, + 5, 3, 2, 2, 4, 0, 2, 2, + 1, 1, 4, 6, 9, 9, 9, 8, + 5, 8, 8, 9, 4, 6, 7, 7, + 8, 5, 8, 4, 5, 1, 2, 2, + 2, 3, 3, 5, 0, 4, 1, 6, + 4, 3, 5, 5, 1, 4, 3, 2, + 3, 2, 2, 3, 3, 5, 6, 2, + 2, 6, 2, 0, 4, 4, 1, 3, + 2, 5, 0, 2, 1, 2, 3, 3, + 2, 2, 3, 5, 4, 4, 3, 6, + 2, 4, 3, 4, 4, 1, 5, 3, + 1, 2, 5, 0, 2, 0, 1, 3, + 7, 3, 0, 3, 3, 3, 4, 1, + 3, 6, 3, 4, 8, 7, 2, 4, + 3, 3, 3, 2, 2, 1, 2, 2, + 3, 2, 0, 4, 1, 2, 3, 1, + 2, 2, 2, 1, 3, 0, 1, 1, + 1, 0, 1, 1, 1, 1, 1, 2, + 2, 1, 4, 1, 1, 1, 1, 2, + 0, 0, 0, 0, 0, 0, 0, 1, + 1, 5, 3, 1, 2, 3, 5, 2, + 1, 3, 7, 3, 0, 3, 3, 3, + 4, 1, 3, 6, 3, 4, 8, 7, + 2, 4, 3, 3, 3, 2, 2, 1, + 2, 2, 3, 2, 0, 0, 4, 1, + 1, 3, 0, 1, 0, 1, 1, 1, + 1, 2, 2, 1, 4, 1, 1, 1, + 1, 2, 4, 1, 1, 1, 2, 2, + 5, 6, 2, 2, 5, 1, 3, 2, + 3, 5, 2, 3, 1, 3, 1, 1, + 2, 1, 2, 1, 4, 0, 0, 1, + 5, 2, 1, 2, 2, 1, 2, 1, + 0, 2, 1, 2, 1, 2, 2, 2, + 1, 1, 4, 2, 0, 2, 2, 1, + 1, 0, 1, 0, 1, 1, 0, 2, + 1, 1, 1, 2, 2, 1, 1, 2, + 2, 1, 2, 3, 2, 2, 0, 0, + 2, 1, 0, 0, 0, 0, 1, 4, + 1, 0, 2, 1, 3, 2, 0, 2, + 2, 1, 1, 2, 6, 1, 1, 2, + 2, 1, 1, 1, 0, 1, 1, 1, + 1, 0, 2, 0, 2, 3, 1, 2, + 2, 2, 0, 1, 0, 0, 1, 1, + 1, 0, 0, 1, 0, 0, 1, 0, + 1, 1, 1, 1, 1, 1, 7, 0, + 0, 5, 5, 5, 5, 5, 5, 5, + 0, 5, 5, 5, 5, 0, 0, 3, + 5, 9, 0, 0, 8, 4, 5, 5, + 6, 6, 7, 7, 6, 9, 8, 1, + 1, 9, 6, 6, 7, 5, 5, 5, + 5, 5, 5, 5, 4, 2, 3, 0, + 0, 0, 2, 2, 4, 3, 3, 3, + 1, 1, 1, 2, 0, 3, 0, 0, + 1, 9, 6, 8, 0, 6, 10, 7, + 6, 7, 7, 8, 1, 1, 0, 0, + 9, 6, 5, 7, 6, 7, 6, 9, + 9, 7, 6, 6, 9, 7, 7, 7, + 6, 6, 5, 6, 6, 6, 7, 6, + 6, 6, 9, 9, 6, 6, 9, 9, + 6, 7, 8, 7, 7, 5, 7, 6, + 5, 5, 5, 5, 5, 5, 6, 6, + 9, 9, 12, 12, 9, 7, 10, 6, + 8, 9, 10, 10, 9, 10, 6, 8, + 6, 6, 7, 6, 7, 6, 9, 5, + 4, 1, 0, 1, 0, 1, 1, 1, + 2, 2, 1, 4, 1, 1, 1, 1, + 2, 2, 1, 4, 1, 1, 1, 1, + 2, 0, 0, 0, 0, 0, 0, 0, + 1, 1, 1, 2, 0, 0, 0, 0, + 0, 0, 0, 4, 3, 1, 3, 4, + 3, 4, 5, 3, 2, 2, 4, 0, + 4, 5, 1, 1, 0, 1, 5, 1, + 0, 0, 0, 4, 1, 1, 0, 1, + 0, 1, 1, 1, 1, 2, 2, 1, + 4, 1, 1, 1, 1, 2, 2, 1, + 4, 1, 1, 1, 1, 2, 0, 0, + 0, 0, 0, 0, 0, 1, 1, 1, + 2, 0, 0, 0, 0, 0, 0, 0, + 1, 2, 2, 5, 6, 2, 2, 5, + 1, 3, 2, 3, 5, 2, 3, 1, + 3, 1, 1, 2, 1, 2, 1, 4, + 0, 1, 5, 2, 1, 2, 2, 1, + 2, 1, 0, 2, 1, 2, 1, 2, + 2, 2, 1, 1, 4, 2, 2, 2, + 1, 1, 0, 1, 1, 1, 2, 1, + 1, 1, 2, 2, 1, 1, 2, 2, + 1, 2, 3, 2, 2, 0, 2, 1, + 0, 0, 0, 0, 0, 0, 1, 0, + 0, 1, 2, 5, 9, 6, 8, 0, + 1, 5, 9, 4, 1, 3, 4, 3, + 4, 5, 3, 8, 6, 6, 7, 7, + 6, 9, 8, 2, 2, 4, 0, 4, + 0, 0, 2, 6, 10, 7, 6, 7, + 7, 8, 0, 7, 7, 6, 6, 5, + 6, 6, 6, 7, 6, 6, 6, 9, + 1, 1, 0, 0, 9, 6, 6, 9, + 9, 6, 7, 8, 7, 7, 5, 7, + 6, 5, 5, 5, 5, 5, 5, 6, + 9, 6, 5, 7, 6, 7, 6, 9, + 9, 7, 6, 6, 9, 7, 9, 6, + 6, 7, 5, 5, 5, 5, 5, 5, + 5, 6, 9, 9, 12, 12, 9, 7, + 10, 6, 8, 9, 10, 10, 9, 10, + 6, 8, 6, 6, 7, 6, 7, 6, + 9, 5, 2, 1, 0, +} + +var _s_index_offsets []uint16 = []uint16{ + 0, 2, 4, 6, 8, 11, 15, 19, + 22, 27, 31, 33, 35, 37, 66, 71, + 73, 75, 78, 82, 86, 92, 99, 104, + 108, 114, 117, 122, 125, 131, 137, 140, + 146, 148, 154, 157, 159, 163, 165, 171, + 173, 178, 180, 202, 204, 210, 214, 216, + 219, 222, 224, 227, 229, 231, 234, 236, + 240, 242, 245, 248, 251, 253, 255, 261, + 264, 271, 274, 277, 279, 281, 283, 285, + 288, 290, 292, 308, 311, 313, 315, 320, + 323, 326, 328, 330, 333, 336, 338, 342, + 347, 351, 354, 360, 362, 365, 367, 374, + 380, 382, 384, 386, 392, 394, 414, 417, + 419, 423, 426, 428, 431, 434, 436, 438, + 442, 450, 452, 454, 457, 460, 462, 464, + 466, 471, 473, 475, 477, 479, 481, 484, + 491, 494, 498, 500, 503, 506, 510, 512, + 514, 516, 522, 525, 527, 528, 551, 586, + 591, 595, 596, 598, 600, 606, 608, 615, + 621, 623, 626, 628, 631, 636, 642, 647, + 652, 661, 666, 669, 672, 677, 710, 716, + 719, 721, 723, 726, 731, 742, 753, 764, + 774, 784, 792, 802, 811, 824, 830, 838, + 847, 856, 865, 872, 882, 888, 895, 898, + 900, 904, 906, 913, 917, 923, 925, 970, + 972, 978, 983, 988, 994, 1000, 1005, 1008, + 1012, 1015, 1018, 1020, 1023, 1026, 1029, 1035, + 1041, 1043, 1046, 1049, 1051, 1055, 1057, 1061, + 1064, 1068, 1070, 1074, 1078, 1084, 1087, 1090, + 1093, 1101, 1104, 1107, 1115, 1119, 1127, 1146, + 1149, 1154, 1156, 1158, 1170, 1174, 1176, 1178, + 1180, 1183, 1185, 1189, 1194, 1198, 1204, 1210, + 1212, 1218, 1222, 1224, 1227, 1230, 1260, 1262, + 1264, 1268, 1273, 1277, 1280, 1283, 1287, 1289, + 1295, 1297, 1301, 1307, 1311, 1315, 1319, 1323, + 1326, 1331, 1336, 1342, 1349, 1352, 1356, 1357, + 1361, 1363, 1366, 1383, 1391, 1395, 1397, 1399, + 1401, 1403, 1405, 1408, 1411, 1414, 1417, 1419, + 1421, 1424, 1430, 1441, 1479, 1485, 1488, 1490, + 1492, 1494, 1497, 1500, 1503, 1507, 1510, 1514, + 1516, 1519, 1521, 1523, 1526, 1533, 1536, 1538, + 1541, 1544, 1547, 1557, 1559, 1561, 1565, 1567, + 1570, 1572, 1574, 1599, 1603, 1605, 1609, 1611, + 1615, 1619, 1623, 1629, 1633, 1640, 1643, 1653, + 1663, 1666, 1670, 1674, 1678, 1681, 1684, 1686, + 1688, 1691, 1693, 1701, 1703, 1705, 1707, 1711, + 1715, 1717, 1729, 1731, 1734, 1737, 1741, 1743, + 1747, 1750, 1754, 1757, 1762, 1782, 1785, 1789, + 1791, 1793, 1801, 1803, 1810, 1815, 1817, 1821, + 1824, 1827, 1830, 1833, 1836, 1839, 1843, 1850, + 1854, 1864, 1884, 1890, 1894, 1896, 1899, 1901, + 1903, 1909, 1912, 1914, 1915, 1917, 1940, 1942, + 1944, 1946, 1948, 1951, 1955, 1959, 1964, 1970, + 1974, 1976, 1978, 1981, 2010, 2015, 2017, 2019, + 2022, 2028, 2032, 2040, 2048, 2055, 2059, 2066, + 2069, 2075, 2078, 2086, 2093, 2096, 2104, 2106, + 2113, 2116, 2119, 2123, 2126, 2133, 2135, 2140, + 2142, 2167, 2169, 2176, 2181, 2183, 2186, 2189, + 2191, 2196, 2199, 2201, 2204, 2206, 2208, 2210, + 2214, 2218, 2221, 2226, 2230, 2232, 2234, 2237, + 2243, 2246, 2253, 2256, 2259, 2261, 2263, 2265, + 2267, 2270, 2272, 2274, 2291, 2293, 2296, 2298, + 2300, 2305, 2308, 2313, 2316, 2318, 2321, 2328, + 2330, 2335, 2340, 2344, 2348, 2354, 2356, 2359, + 2361, 2368, 2375, 2377, 2379, 2381, 2387, 2389, + 2412, 2415, 2419, 2423, 2425, 2429, 2431, 2434, + 2439, 2441, 2444, 2448, 2456, 2458, 2462, 2465, + 2468, 2472, 2474, 2476, 2479, 2486, 2488, 2490, + 2492, 2494, 2496, 2499, 2507, 2510, 2514, 2516, + 2518, 2521, 2524, 2528, 2530, 2532, 2534, 2540, + 2543, 2545, 2546, 2551, 2553, 2560, 2563, 2568, + 2575, 2597, 2599, 2601, 2603, 2605, 2608, 2614, + 2619, 2621, 2628, 2631, 2636, 2643, 2648, 2655, + 2664, 2669, 2672, 2675, 2681, 2714, 2720, 2723, + 2725, 2728, 2735, 2746, 2758, 2769, 2780, 2790, + 2800, 2810, 2820, 2833, 2840, 2848, 2859, 2868, + 2878, 2885, 2897, 2903, 2911, 2914, 2917, 2921, + 2924, 2932, 2936, 2942, 2944, 2991, 2993, 3000, + 3006, 3010, 3016, 3022, 3024, 3029, 3033, 3037, + 3041, 3044, 3048, 3052, 3056, 3062, 3069, 3072, + 3075, 3080, 3088, 3091, 3110, 3114, 3122, 3124, + 3128, 3132, 3138, 3144, 3148, 3150, 3180, 3184, + 3189, 3193, 3196, 3202, 3206, 3214, 3219, 3223, + 3227, 3235, 3238, 3244, 3249, 3255, 3260, 3277, + 3285, 3290, 3294, 3298, 3301, 3304, 3307, 3313, + 3324, 3362, 3364, 3367, 3371, 3375, 3385, 3389, + 3415, 3419, 3423, 3427, 3430, 3435, 3439, 3443, + 3451, 3455, 3460, 3470, 3480, 3483, 3489, 3493, + 3497, 3502, 3505, 3508, 3512, 3524, 3527, 3530, + 3533, 3538, 3542, 3545, 3549, 3554, 3574, 3577, + 3581, 3583, 3588, 3591, 3594, 3598, 3605, 3609, + 3611, 3617, 3620, 3622, 3623, 3625, 3629, 3632, + 3637, 3641, 3643, 3645, 3647, 3676, 3681, 3683, + 3685, 3688, 3692, 3696, 3702, 3709, 3714, 3718, + 3724, 3727, 3732, 3735, 3741, 3747, 3750, 3756, + 3758, 3764, 3767, 3769, 3773, 3775, 3781, 3783, + 3788, 3790, 3812, 3814, 3820, 3824, 3826, 3829, + 3832, 3834, 3837, 3839, 3841, 3844, 3846, 3850, + 3852, 3855, 3858, 3861, 3863, 3865, 3871, 3874, + 3881, 3884, 3887, 3889, 3891, 3893, 3895, 3898, + 3900, 3902, 3918, 3921, 3923, 3925, 3930, 3933, + 3936, 3938, 3940, 3943, 3946, 3948, 3952, 3957, + 3961, 3964, 3971, 3977, 3981, 3984, 3986, 3993, + 3999, 4001, 4003, 4005, 4011, 4013, 4033, 4036, + 4038, 4042, 4045, 4047, 4050, 4053, 4055, 4057, + 4061, 4069, 4071, 4073, 4076, 4079, 4081, 4083, + 4085, 4090, 4092, 4094, 4096, 4098, 4100, 4103, + 4110, 4113, 4117, 4119, 4122, 4125, 4129, 4131, + 4133, 4135, 4141, 4144, 4146, 4147, 4153, 4160, + 4162, 4169, 4175, 4178, 4183, 4191, 4196, 4203, + 4212, 4217, 4220, 4223, 4229, 4262, 4268, 4271, + 4273, 4276, 4283, 4294, 4306, 4317, 4328, 4338, + 4348, 4358, 4368, 4381, 4388, 4396, 4407, 4416, + 4426, 4433, 4445, 4451, 4459, 4462, 4465, 4469, + 4472, 4480, 4484, 4490, 4492, 4539, 4541, 4548, + 4554, 4558, 4564, 4570, 4572, 4577, 4581, 4585, + 4589, 4592, 4596, 4600, 4604, 4610, 4617, 4620, + 4623, 4631, 4634, 4653, 4660, 4668, 4670, 4674, + 4678, 4684, 4690, 4694, 4696, 4726, 4731, 4735, + 4738, 4744, 4748, 4756, 4761, 4765, 4769, 4777, + 4780, 4786, 4791, 4797, 4802, 4819, 4826, 4831, + 4837, 4840, 4846, 4849, 4855, 4866, 4904, 4906, + 4909, 4913, 4923, 4927, 4953, 4957, 4961, 4965, + 4970, 4974, 4978, 4986, 4990, 4995, 5005, 5015, + 5018, 5024, 5028, 5032, 5037, 5040, 5043, 5055, + 5058, 5061, 5065, 5068, 5072, 5077, 5097, 5100, + 5104, 5106, 5109, 5112, 5116, 5123, 5127, 5129, + 5135, 5138, 5140, 5141, 5143, 5148, 5155, 5164, + 5169, 5172, 5175, 5181, 5214, 5220, 5223, 5225, + 5228, 5235, 5246, 5258, 5269, 5280, 5290, 5300, + 5310, 5320, 5333, 5340, 5348, 5359, 5368, 5378, + 5385, 5397, 5403, 5411, 5414, 5417, 5421, 5424, + 5432, 5436, 5442, 5444, 5491, 5493, 5500, 5506, + 5510, 5516, 5522, 5524, 5529, 5533, 5537, 5541, + 5544, 5548, 5552, 5556, 5562, 5569, 5572, 5575, + 5583, 5586, 5605, 5609, 5616, 5618, 5622, 5626, + 5632, 5643, 5648, 5650, 5652, 5654, 5656, 5659, + 5663, 5667, 5670, 5675, 5679, 5681, 5683, 5685, + 5714, 5719, 5721, 5723, 5726, 5730, 5734, 5740, + 5747, 5752, 5756, 5762, 5765, 5770, 5773, 5779, + 5785, 5788, 5794, 5796, 5802, 5805, 5807, 5811, + 5813, 5819, 5821, 5826, 5828, 5850, 5852, 5858, + 5862, 5864, 5867, 5870, 5872, 5875, 5877, 5879, + 5882, 5884, 5888, 5890, 5893, 5896, 5899, 5901, + 5903, 5909, 5912, 5919, 5923, 5929, 5931, 5933, + 5935, 5937, 5945, 5948, 5951, 5954, 5956, 5958, + 5960, 5976, 5979, 5981, 5983, 5988, 5991, 5994, + 5996, 5998, 6001, 6004, 6006, 6010, 6015, 6019, + 6022, 6031, 6033, 6037, 6039, 6041, 6043, 6045, + 6047, 6054, 6060, 6062, 6064, 6066, 6072, 6074, + 6094, 6097, 6099, 6103, 6106, 6108, 6111, 6114, + 6116, 6118, 6122, 6130, 6132, 6134, 6137, 6140, + 6142, 6144, 6146, 6151, 6153, 6155, 6157, 6159, + 6162, 6164, 6167, 6174, 6177, 6181, 6183, 6186, + 6189, 6193, 6195, 6197, 6199, 6205, 6208, 6210, + 6211, 6215, 6245, 6250, 6254, 6257, 6263, 6267, + 6275, 6280, 6284, 6288, 6296, 6299, 6305, 6310, + 6316, 6321, 6338, 6346, 6350, 6353, 6356, 6360, + 6366, 6377, 6415, 6417, 6420, 6424, 6434, 6438, + 6464, 6468, 6472, 6476, 6481, 6485, 6489, 6497, + 6501, 6506, 6516, 6526, 6529, 6535, 6539, 6543, + 6548, 6551, 6554, 6566, 6569, 6572, 6576, 6579, + 6584, 6589, 6609, 6612, 6616, 6618, 6621, 6624, + 6628, 6635, 6639, 6641, 6647, 6650, 6652, 6653, + 6655, 6660, 6662, 6669, 6672, 6678, 6685, 6691, + 6698, 6707, 6712, 6715, 6718, 6725, 6758, 6764, + 6767, 6769, 6772, 6778, 6789, 6800, 6811, 6821, + 6831, 6840, 6850, 6860, 6873, 6880, 6888, 6898, + 6907, 6917, 6924, 6935, 6941, 6949, 6952, 6955, + 6959, 6962, 6970, 6974, 6980, 6982, 7029, 7031, + 7038, 7042, 7046, 7052, 7058, 7060, 7064, 7068, + 7072, 7076, 7079, 7083, 7087, 7091, 7096, 7103, + 7106, 7109, 7117, 7120, 7139, 7145, 7154, 7156, + 7160, 7164, 7170, 7176, 7180, 7182, 7212, 7217, + 7221, 7224, 7230, 7234, 7241, 7246, 7250, 7254, + 7260, 7263, 7269, 7274, 7280, 7285, 7302, 7310, + 7316, 7323, 7326, 7333, 7336, 7342, 7353, 7391, + 7393, 7396, 7400, 7410, 7414, 7440, 7444, 7447, + 7451, 7456, 7460, 7464, 7471, 7475, 7480, 7490, + 7500, 7503, 7508, 7512, 7516, 7520, 7523, 7526, + 7538, 7541, 7544, 7548, 7551, 7555, 7560, 7580, + 7583, 7587, 7589, 7592, 7595, 7599, 7606, 7610, + 7612, 7618, 7621, 7623, 7624, 7626, 7632, 7637, + 7639, 7643, 7651, 7656, 7661, 7668, 7670, 7672, + 7676, 7709, 7711, 7713, 7715, 7719, 7727, 7736, + 7747, 7756, 7765, 7774, 7783, 7792, 7804, 7811, + 7818, 7827, 7835, 7844, 7850, 7859, 7865, 7873, + 7876, 7879, 7883, 7886, 7894, 7898, 7902, 7904, + 7951, 7953, 7960, 7966, 7969, 7973, 7978, 7980, + 7985, 7989, 7992, 7996, 8000, 8004, 8009, 8013, + 8015, 8017, 8021, 8024, 8043, 8050, 8058, 8060, + 8063, 8067, 8073, 8079, 8083, 8085, 8115, 8119, + 8121, 8128, 8132, 8135, 8142, 8144, 8148, 8153, + 8157, 8161, 8178, 8186, 8191, 8194, 8196, 8202, + 8213, 8251, 8253, 8256, 8259, 8267, 8270, 8295, + 8299, 8302, 8305, 8308, 8314, 8317, 8321, 8330, + 8339, 8344, 8347, 8350, 8355, 8358, 8370, 8373, + 8376, 8378, 8382, 8387, 8407, 8410, 8414, 8416, + 8419, 8422, 8426, 8433, 8436, 8438, 8444, 8447, + 8449, 8454, 8456, 8463, 8466, 8471, 8478, 8483, + 8490, 8499, 8504, 8507, 8510, 8516, 8549, 8555, + 8558, 8560, 8563, 8570, 8581, 8593, 8604, 8615, + 8625, 8635, 8645, 8655, 8668, 8675, 8683, 8694, + 8703, 8713, 8720, 8732, 8738, 8746, 8749, 8752, + 8756, 8759, 8767, 8771, 8777, 8779, 8826, 8828, + 8835, 8841, 8845, 8851, 8857, 8859, 8864, 8868, + 8872, 8876, 8878, 8880, 8883, 8887, 8891, 8895, + 8901, 8908, 8911, 8914, 8919, 8927, 8930, 8949, + 8953, 8960, 8962, 8966, 8970, 8976, 8987, 8992, + 8996, 9026, 9030, 9035, 9039, 9042, 9048, 9052, + 9060, 9065, 9069, 9073, 9081, 9084, 9090, 9095, + 9101, 9106, 9123, 9131, 9135, 9139, 9142, 9145, + 9149, 9155, 9166, 9204, 9206, 9209, 9213, 9217, + 9227, 9231, 9257, 9261, 9265, 9269, 9272, 9277, + 9281, 9285, 9293, 9297, 9302, 9312, 9322, 9325, + 9331, 9335, 9339, 9344, 9347, 9349, 9352, 9356, + 9368, 9371, 9374, 9377, 9382, 9386, 9389, 9394, + 9399, 9419, 9422, 9426, 9428, 9433, 9436, 9439, + 9443, 9450, 9454, 9456, 9462, 9465, 9467, 9468, + 9470, 9475, 9477, 9484, 9487, 9493, 9500, 9506, + 9513, 9522, 9527, 9530, 9533, 9540, 9573, 9579, + 9582, 9584, 9587, 9593, 9604, 9615, 9626, 9636, + 9646, 9655, 9665, 9675, 9688, 9695, 9703, 9713, + 9722, 9732, 9739, 9750, 9756, 9764, 9767, 9770, + 9774, 9777, 9785, 9789, 9795, 9797, 9844, 9846, + 9853, 9857, 9861, 9867, 9873, 9875, 9879, 9883, + 9887, 9891, 9894, 9898, 9902, 9906, 9911, 9918, + 9921, 9924, 9932, 9935, 9954, 9960, 9969, 9971, + 9975, 9979, 9985, 9991, 9995, 9997, 10027, 10032, + 10036, 10039, 10045, 10049, 10056, 10061, 10065, 10069, + 10075, 10078, 10084, 10089, 10095, 10100, 10117, 10125, + 10131, 10138, 10141, 10148, 10151, 10157, 10168, 10206, + 10208, 10211, 10215, 10225, 10229, 10255, 10259, 10262, + 10266, 10271, 10275, 10279, 10286, 10290, 10295, 10305, + 10315, 10318, 10323, 10327, 10331, 10335, 10338, 10341, + 10353, 10356, 10359, 10363, 10366, 10370, 10375, 10395, + 10398, 10402, 10404, 10407, 10410, 10414, 10421, 10425, + 10427, 10433, 10436, 10438, 10439, 10441, 10443, 10445, + 10447, 10449, 10452, 10456, 10460, 10465, 10471, 10475, + 10477, 10479, 10482, 10511, 10516, 10518, 10520, 10523, + 10529, 10533, 10541, 10549, 10556, 10560, 10567, 10570, + 10576, 10579, 10587, 10594, 10597, 10605, 10607, 10614, + 10617, 10620, 10624, 10627, 10634, 10636, 10641, 10643, + 10668, 10670, 10677, 10682, 10684, 10687, 10690, 10692, + 10697, 10700, 10702, 10705, 10707, 10711, 10715, 10718, + 10723, 10727, 10729, 10731, 10734, 10740, 10743, 10750, + 10753, 10756, 10758, 10760, 10762, 10764, 10767, 10769, + 10771, 10788, 10790, 10793, 10795, 10797, 10802, 10805, + 10810, 10813, 10815, 10818, 10825, 10827, 10832, 10837, + 10841, 10845, 10851, 10853, 10856, 10858, 10865, 10872, + 10874, 10876, 10878, 10884, 10886, 10909, 10912, 10916, + 10920, 10922, 10926, 10928, 10931, 10936, 10938, 10941, + 10945, 10953, 10955, 10959, 10962, 10965, 10969, 10971, + 10974, 10981, 10983, 10985, 10987, 10989, 10991, 10994, + 11002, 11005, 11009, 11011, 11013, 11016, 11019, 11023, + 11025, 11027, 11029, 11035, 11038, 11040, 11041, 11046, + 11048, 11055, 11058, 11064, 11071, 11077, 11085, 11094, + 11099, 11102, 11105, 11112, 11145, 11151, 11154, 11156, + 11159, 11166, 11177, 11189, 11200, 11211, 11221, 11231, + 11241, 11251, 11264, 11271, 11279, 11290, 11299, 11309, + 11316, 11328, 11334, 11342, 11345, 11348, 11352, 11355, + 11363, 11367, 11373, 11375, 11422, 11424, 11431, 11437, + 11441, 11447, 11453, 11455, 11460, 11464, 11468, 11472, + 11475, 11479, 11483, 11487, 11493, 11500, 11503, 11506, + 11514, 11517, 11536, 11542, 11551, 11553, 11557, 11561, + 11567, 11573, 11577, 11579, 11609, 11614, 11618, 11621, + 11627, 11631, 11639, 11644, 11648, 11652, 11660, 11663, + 11669, 11674, 11680, 11685, 11702, 11710, 11716, 11719, + 11722, 11728, 11739, 11777, 11779, 11782, 11786, 11796, + 11800, 11826, 11830, 11834, 11838, 11843, 11847, 11851, + 11859, 11863, 11868, 11878, 11888, 11891, 11897, 11901, + 11905, 11910, 11913, 11916, 11928, 11931, 11934, 11938, + 11941, 11945, 11950, 11970, 11973, 11977, 11979, 11982, + 11985, 11989, 11996, 12000, 12002, 12008, 12011, 12013, + 12014, 12016, 12022, 12024, 12031, 12034, 12039, 12047, + 12052, 12059, 12068, 12073, 12076, 12079, 12085, 12118, + 12124, 12127, 12129, 12132, 12139, 12150, 12162, 12173, + 12184, 12194, 12204, 12214, 12224, 12237, 12244, 12252, + 12263, 12272, 12282, 12289, 12301, 12307, 12315, 12318, + 12321, 12325, 12328, 12336, 12340, 12346, 12348, 12395, + 12397, 12404, 12410, 12414, 12420, 12426, 12428, 12433, + 12437, 12441, 12445, 12448, 12452, 12456, 12460, 12466, + 12473, 12476, 12479, 12487, 12490, 12509, 12516, 12524, + 12526, 12530, 12534, 12540, 12546, 12550, 12552, 12582, + 12587, 12591, 12594, 12600, 12604, 12612, 12617, 12621, + 12625, 12633, 12636, 12642, 12647, 12653, 12658, 12675, + 12683, 12688, 12691, 12694, 12700, 12711, 12749, 12751, + 12754, 12758, 12768, 12772, 12798, 12802, 12806, 12810, + 12815, 12819, 12823, 12831, 12835, 12840, 12850, 12860, + 12863, 12869, 12873, 12877, 12882, 12885, 12888, 12900, + 12903, 12906, 12910, 12913, 12917, 12922, 12942, 12945, + 12949, 12951, 12954, 12957, 12961, 12968, 12972, 12974, + 12980, 12983, 12985, 12986, 12988, 12993, 12995, 13002, + 13005, 13010, 13017, 13022, 13029, 13038, 13043, 13046, + 13049, 13055, 13088, 13094, 13097, 13099, 13102, 13109, + 13120, 13132, 13143, 13154, 13164, 13174, 13184, 13194, + 13207, 13214, 13222, 13233, 13242, 13252, 13259, 13271, + 13277, 13285, 13288, 13291, 13295, 13298, 13306, 13310, + 13316, 13318, 13365, 13367, 13374, 13380, 13384, 13390, + 13396, 13398, 13403, 13407, 13411, 13415, 13418, 13422, + 13426, 13430, 13436, 13443, 13446, 13449, 13457, 13460, + 13479, 13483, 13490, 13492, 13496, 13500, 13506, 13517, + 13522, 13526, 13556, 13561, 13565, 13568, 13574, 13578, + 13586, 13591, 13595, 13599, 13607, 13610, 13616, 13621, + 13627, 13632, 13649, 13657, 13661, 13665, 13668, 13671, + 13675, 13681, 13692, 13730, 13732, 13735, 13739, 13749, + 13753, 13779, 13783, 13787, 13791, 13796, 13800, 13804, + 13812, 13816, 13821, 13831, 13841, 13844, 13850, 13854, + 13858, 13863, 13866, 13869, 13881, 13884, 13887, 13891, + 13894, 13899, 13904, 13924, 13927, 13931, 13933, 13936, + 13939, 13943, 13950, 13954, 13956, 13962, 13965, 13967, + 13968, 13970, 13975, 13977, 13984, 13987, 13992, 13999, + 14004, 14011, 14020, 14025, 14028, 14031, 14037, 14070, + 14076, 14079, 14081, 14084, 14091, 14102, 14114, 14125, + 14136, 14146, 14156, 14166, 14176, 14189, 14196, 14204, + 14215, 14224, 14234, 14241, 14253, 14259, 14267, 14270, + 14273, 14277, 14280, 14288, 14292, 14298, 14300, 14347, + 14349, 14356, 14362, 14366, 14372, 14378, 14380, 14385, + 14389, 14393, 14397, 14400, 14404, 14408, 14412, 14418, + 14425, 14428, 14431, 14439, 14442, 14461, 14465, 14473, + 14475, 14479, 14483, 14489, 14500, 14505, 14509, 14539, + 14544, 14548, 14551, 14557, 14561, 14569, 14574, 14578, + 14582, 14590, 14593, 14599, 14604, 14610, 14615, 14632, + 14640, 14644, 14647, 14651, 14657, 14668, 14706, 14708, + 14711, 14715, 14725, 14729, 14755, 14759, 14763, 14767, + 14772, 14776, 14780, 14788, 14792, 14797, 14807, 14817, + 14820, 14826, 14830, 14834, 14839, 14842, 14845, 14857, + 14860, 14863, 14867, 14870, 14875, 14880, 14900, 14903, + 14907, 14909, 14912, 14915, 14919, 14926, 14930, 14932, + 14938, 14941, 14943, 14944, 14946, 14948, 14950, 14952, + 14954, 14957, 14961, 14965, 14968, 14973, 14977, 14979, + 14981, 14983, 15012, 15017, 15019, 15021, 15024, 15028, + 15032, 15038, 15045, 15050, 15054, 15060, 15063, 15068, + 15071, 15077, 15083, 15086, 15092, 15094, 15100, 15103, + 15105, 15109, 15111, 15117, 15119, 15124, 15126, 15148, + 15150, 15156, 15160, 15162, 15165, 15168, 15170, 15173, + 15175, 15177, 15180, 15182, 15186, 15188, 15191, 15194, + 15197, 15199, 15201, 15207, 15210, 15217, 15220, 15223, + 15225, 15227, 15229, 15231, 15234, 15236, 15238, 15254, + 15257, 15259, 15261, 15266, 15269, 15272, 15274, 15276, + 15279, 15282, 15284, 15288, 15293, 15297, 15300, 15306, + 15308, 15311, 15313, 15320, 15326, 15328, 15330, 15332, + 15338, 15340, 15360, 15363, 15365, 15369, 15372, 15374, + 15377, 15380, 15382, 15384, 15388, 15396, 15398, 15400, + 15403, 15406, 15408, 15410, 15412, 15417, 15419, 15421, + 15423, 15425, 15427, 15430, 15437, 15440, 15444, 15446, + 15449, 15452, 15456, 15458, 15460, 15462, 15468, 15471, + 15473, 15474, 15479, 15481, 15488, 15491, 15496, 15502, + 15507, 15512, 15521, 15526, 15529, 15532, 15537, 15570, + 15576, 15579, 15581, 15584, 15589, 15600, 15611, 15622, + 15632, 15642, 15650, 15660, 15669, 15682, 15688, 15696, + 15705, 15714, 15723, 15730, 15740, 15746, 15753, 15756, + 15758, 15762, 15764, 15771, 15775, 15781, 15783, 15828, + 15830, 15836, 15841, 15845, 15851, 15857, 15859, 15862, + 15865, 15869, 15873, 15876, 15880, 15882, 15886, 15890, + 15896, 15899, 15902, 15910, 15913, 15932, 15935, 15940, + 15942, 15946, 15950, 15956, 15962, 15966, 15968, 15998, + 16003, 16007, 16010, 16016, 16020, 16026, 16030, 16034, + 16038, 16042, 16045, 16050, 16055, 16061, 16065, 16082, + 16090, 16093, 16096, 16099, 16105, 16116, 16154, 16156, + 16159, 16163, 16173, 16177, 16202, 16206, 16208, 16212, + 16216, 16220, 16224, 16230, 16234, 16237, 16247, 16257, + 16260, 16264, 16268, 16272, 16275, 16278, 16280, 16292, + 16295, 16298, 16302, 16305, 16309, 16314, 16334, 16337, + 16341, 16343, 16346, 16349, 16353, 16360, 16364, 16366, + 16372, 16375, 16377, 16378, 16380, 16385, 16387, 16394, + 16397, 16403, 16410, 16416, 16424, 16433, 16438, 16441, + 16444, 16451, 16484, 16490, 16493, 16495, 16498, 16505, + 16516, 16528, 16539, 16550, 16560, 16570, 16580, 16590, + 16603, 16610, 16618, 16629, 16638, 16648, 16655, 16667, + 16673, 16681, 16684, 16687, 16691, 16694, 16702, 16706, + 16712, 16714, 16761, 16763, 16770, 16776, 16780, 16786, + 16792, 16794, 16799, 16803, 16807, 16811, 16814, 16818, + 16822, 16826, 16832, 16839, 16842, 16845, 16853, 16856, + 16875, 16881, 16890, 16892, 16896, 16900, 16906, 16912, + 16916, 16918, 16948, 16953, 16957, 16960, 16966, 16970, + 16978, 16983, 16987, 16991, 16999, 17002, 17008, 17013, + 17019, 17024, 17041, 17049, 17055, 17058, 17061, 17067, + 17078, 17116, 17118, 17121, 17125, 17135, 17139, 17165, + 17169, 17173, 17177, 17182, 17186, 17190, 17198, 17202, + 17207, 17217, 17227, 17230, 17236, 17240, 17244, 17249, + 17252, 17255, 17267, 17270, 17273, 17277, 17280, 17284, + 17289, 17309, 17312, 17316, 17318, 17321, 17324, 17328, + 17335, 17339, 17341, 17347, 17350, 17352, 17353, 17355, + 17360, 17362, 17369, 17372, 17377, 17384, 17389, 17396, + 17405, 17410, 17413, 17416, 17422, 17455, 17461, 17464, + 17466, 17469, 17476, 17487, 17499, 17510, 17521, 17531, + 17541, 17551, 17561, 17574, 17581, 17589, 17600, 17609, + 17619, 17626, 17638, 17644, 17652, 17655, 17658, 17662, + 17665, 17673, 17677, 17683, 17685, 17732, 17734, 17741, + 17747, 17751, 17757, 17763, 17765, 17770, 17774, 17778, + 17782, 17785, 17789, 17793, 17797, 17803, 17810, 17813, + 17816, 17824, 17827, 17846, 17850, 17858, 17860, 17864, + 17868, 17874, 17885, 17890, 17894, 17924, 17929, 17933, + 17936, 17942, 17946, 17954, 17959, 17963, 17967, 17975, + 17978, 17984, 17989, 17995, 18000, 18017, 18025, 18029, + 18032, 18036, 18042, 18053, 18091, 18093, 18096, 18100, + 18110, 18114, 18140, 18144, 18148, 18152, 18157, 18161, + 18165, 18173, 18177, 18182, 18192, 18202, 18205, 18211, + 18215, 18219, 18224, 18227, 18230, 18242, 18245, 18248, + 18252, 18255, 18260, 18265, 18285, 18288, 18292, 18294, + 18297, 18300, 18304, 18311, 18315, 18317, 18323, 18326, + 18328, 18329, 18331, 18333, 18335, 18337, 18339, 18342, + 18346, 18350, 18353, 18358, 18362, 18364, 18366, 18368, + 18397, 18419, 18426, 18429, 18445, 18451, 18458, 18460, + 18466, 18469, 18472, 18479, 18490, 18502, 18513, 18524, + 18534, 18544, 18554, 18564, 18577, 18584, 18592, 18603, + 18612, 18622, 18629, 18641, 18647, 18655, 18658, 18661, + 18669, 18673, 18679, 18686, 18692, 18694, 18700, 18702, + 18709, 18712, 18717, 18725, 18730, 18737, 18746, 18751, + 18754, 18757, 18763, 18796, 18802, 18805, 18807, 18810, + 18817, 18828, 18840, 18851, 18862, 18872, 18882, 18892, + 18902, 18915, 18922, 18930, 18941, 18950, 18960, 18967, + 18979, 18985, 18993, 18996, 18999, 19003, 19006, 19014, + 19018, 19024, 19026, 19076, 19078, 19085, 19091, 19092, + 19094, 19098, 19104, 19110, 19112, 19117, 19121, 19125, + 19129, 19132, 19136, 19140, 19144, 19150, 19157, 19160, + 19163, 19171, 19174, 19193, 19200, 19208, 19210, 19214, + 19218, 19224, 19232, 19236, 19238, 19241, 19245, 19247, + 19249, 19251, 19253, 19255, 19258, 19262, 19266, 19269, + 19274, 19278, 19280, 19282, 19284, 19313, 19318, 19320, + 19322, 19325, 19329, 19333, 19339, 19346, 19351, 19355, + 19361, 19364, 19369, 19372, 19378, 19384, 19387, 19393, + 19395, 19401, 19404, 19406, 19410, 19412, 19418, 19420, + 19425, 19427, 19452, 19454, 19460, 19464, 19465, 19467, + 19469, 19472, 19475, 19477, 19480, 19482, 19484, 19487, + 19489, 19493, 19495, 19498, 19501, 19504, 19506, 19508, + 19514, 19517, 19524, 19527, 19530, 19532, 19534, 19536, + 19538, 19546, 19548, 19550, 19552, 19554, 19556, 19574, + 19577, 19579, 19581, 19586, 19589, 19592, 19594, 19597, + 19600, 19603, 19605, 19609, 19614, 19618, 19621, 19622, + 19626, 19628, 19631, 19637, 19639, 19642, 19644, 19650, + 19657, 19663, 19665, 19667, 19669, 19675, 19677, 19697, + 19700, 19702, 19706, 19709, 19711, 19714, 19717, 19719, + 19721, 19725, 19733, 19735, 19737, 19740, 19743, 19745, + 19747, 19749, 19754, 19756, 19758, 19760, 19762, 19764, + 19767, 19774, 19777, 19781, 19783, 19786, 19789, 19793, + 19795, 19797, 19799, 19805, 19808, 19839, 19844, 19848, + 19851, 19857, 19861, 19869, 19874, 19879, 19883, 19891, + 19894, 19900, 19905, 19911, 19916, 19917, 19921, 19923, + 19926, 19943, 19951, 19956, 19959, 19961, 19967, 19978, + 20016, 20018, 20021, 20025, 20035, 20039, 20065, 20069, + 20073, 20077, 20082, 20086, 20090, 20098, 20102, 20107, + 20117, 20127, 20130, 20136, 20140, 20144, 20149, 20152, + 20155, 20167, 20170, 20173, 20177, 20180, 20184, 20189, + 20209, 20212, 20216, 20218, 20221, 20224, 20228, 20235, + 20239, 20241, 20247, 20250, 20252, 20254, 20258, 20264, + 20270, 20275, 20279, 20283, 20287, 20290, 20294, 20298, + 20304, 20311, 20314, 20317, 20325, 20330, 20338, 20342, + 20346, 20352, 20355, 20357, 20359, 20361, 20363, 20366, + 20370, 20374, 20377, 20382, 20386, 20388, 20390, 20392, + 20421, 20426, 20428, 20430, 20433, 20437, 20441, 20447, + 20454, 20459, 20463, 20469, 20472, 20477, 20480, 20486, + 20492, 20495, 20501, 20503, 20509, 20512, 20514, 20518, + 20520, 20526, 20528, 20533, 20535, 20557, 20559, 20565, + 20569, 20571, 20574, 20577, 20579, 20582, 20584, 20586, + 20589, 20591, 20595, 20597, 20600, 20603, 20606, 20608, + 20610, 20616, 20619, 20626, 20629, 20632, 20634, 20636, + 20638, 20640, 20643, 20645, 20647, 20663, 20666, 20668, + 20670, 20675, 20678, 20681, 20683, 20685, 20688, 20691, + 20693, 20697, 20702, 20706, 20709, 20715, 20717, 20720, + 20722, 20729, 20735, 20737, 20739, 20741, 20747, 20749, + 20769, 20772, 20774, 20778, 20781, 20783, 20786, 20789, + 20791, 20793, 20797, 20805, 20807, 20809, 20812, 20815, + 20817, 20819, 20821, 20826, 20828, 20830, 20832, 20834, + 20836, 20839, 20846, 20849, 20853, 20855, 20858, 20861, + 20865, 20867, 20869, 20871, 20877, 20880, 20882, 20883, + 20885, 20887, 20897, 20903, 20905, 20912, 20915, 20920, + 20928, 20933, 20940, 20949, 20954, 20957, 20960, 20966, + 20999, 21005, 21008, 21010, 21013, 21020, 21031, 21043, + 21054, 21065, 21075, 21085, 21095, 21105, 21118, 21125, + 21133, 21144, 21153, 21163, 21170, 21182, 21188, 21196, + 21199, 21202, 21206, 21209, 21217, 21221, 21227, 21229, + 21276, 21278, 21285, 21291, 21295, 21301, 21307, 21309, + 21314, 21318, 21322, 21326, 21329, 21333, 21337, 21341, + 21347, 21354, 21357, 21360, 21368, 21371, 21390, 21397, + 21405, 21407, 21411, 21415, 21421, 21427, 21431, 21433, + 21463, 21468, 21472, 21475, 21481, 21485, 21493, 21498, + 21502, 21506, 21514, 21517, 21523, 21528, 21534, 21539, + 21556, 21564, 21569, 21572, 21575, 21581, 21592, 21630, + 21632, 21635, 21639, 21649, 21653, 21679, 21683, 21687, + 21691, 21696, 21700, 21704, 21712, 21716, 21721, 21731, + 21741, 21744, 21750, 21754, 21758, 21763, 21766, 21769, + 21781, 21784, 21787, 21791, 21794, 21798, 21803, 21823, + 21826, 21830, 21832, 21835, 21838, 21842, 21849, 21853, + 21855, 21861, 21864, 21866, 21867, 21869, 21871, 21873, + 21875, 21877, 21880, 21884, 21888, 21891, 21896, 21900, + 21902, 21904, 21906, 21935, 21940, 21942, 21944, 21947, + 21951, 21955, 21961, 21968, 21973, 21977, 21983, 21986, + 21991, 21994, 22000, 22006, 22009, 22015, 22017, 22023, + 22026, 22028, 22032, 22034, 22040, 22042, 22047, 22049, + 22074, 22076, 22082, 22086, 22088, 22091, 22094, 22096, + 22099, 22101, 22103, 22106, 22108, 22112, 22114, 22117, + 22120, 22123, 22125, 22127, 22133, 22136, 22143, 22146, + 22149, 22151, 22153, 22155, 22157, 22165, 22167, 22169, + 22187, 22190, 22192, 22194, 22199, 22202, 22205, 22207, + 22210, 22213, 22216, 22218, 22222, 22227, 22231, 22234, + 22240, 22242, 22245, 22248, 22254, 22261, 22267, 22269, + 22271, 22273, 22279, 22281, 22301, 22304, 22306, 22310, + 22313, 22315, 22318, 22321, 22323, 22325, 22329, 22337, + 22339, 22341, 22344, 22347, 22349, 22351, 22353, 22358, + 22360, 22362, 22364, 22366, 22368, 22371, 22378, 22381, + 22385, 22387, 22390, 22393, 22397, 22399, 22401, 22403, + 22409, 22412, 22414, 22415, 22417, 22419, 22421, 22423, + 22426, 22430, 22434, 22437, 22442, 22446, 22448, 22450, + 22452, 22481, 22486, 22488, 22490, 22493, 22497, 22501, + 22507, 22514, 22519, 22523, 22529, 22532, 22537, 22540, + 22546, 22552, 22555, 22561, 22563, 22569, 22572, 22574, + 22578, 22580, 22586, 22588, 22593, 22595, 22617, 22619, + 22625, 22629, 22631, 22634, 22637, 22639, 22642, 22644, + 22646, 22649, 22651, 22655, 22657, 22660, 22663, 22666, + 22668, 22670, 22676, 22679, 22686, 22690, 22696, 22698, + 22700, 22702, 22704, 22712, 22715, 22718, 22721, 22723, + 22725, 22727, 22743, 22746, 22748, 22750, 22755, 22758, + 22761, 22763, 22765, 22768, 22771, 22773, 22777, 22782, + 22786, 22789, 22798, 22800, 22804, 22806, 22808, 22810, + 22817, 22823, 22825, 22827, 22829, 22835, 22837, 22857, + 22860, 22862, 22866, 22869, 22871, 22874, 22877, 22879, + 22881, 22885, 22893, 22895, 22897, 22900, 22903, 22905, + 22907, 22909, 22914, 22916, 22918, 22920, 22922, 22925, + 22927, 22930, 22937, 22940, 22944, 22946, 22949, 22952, + 22956, 22958, 22960, 22962, 22968, 22971, 22973, 22974, + 22976, 22978, 22980, 22982, 22984, 22987, 22991, 22995, + 22998, 23003, 23007, 23009, 23011, 23013, 23042, 23047, + 23049, 23051, 23054, 23058, 23062, 23068, 23075, 23080, + 23084, 23090, 23093, 23098, 23101, 23107, 23113, 23116, + 23122, 23124, 23130, 23133, 23135, 23139, 23141, 23147, + 23149, 23154, 23156, 23178, 23180, 23186, 23190, 23192, + 23195, 23198, 23200, 23203, 23205, 23207, 23210, 23212, + 23216, 23218, 23221, 23224, 23227, 23229, 23231, 23237, + 23240, 23247, 23250, 23253, 23255, 23257, 23259, 23261, + 23264, 23266, 23268, 23284, 23287, 23289, 23291, 23296, + 23299, 23302, 23304, 23306, 23309, 23312, 23314, 23318, + 23323, 23327, 23330, 23336, 23338, 23341, 23343, 23350, + 23356, 23358, 23360, 23362, 23368, 23370, 23390, 23393, + 23395, 23399, 23402, 23404, 23407, 23410, 23412, 23414, + 23418, 23426, 23428, 23430, 23433, 23436, 23438, 23440, + 23442, 23447, 23449, 23451, 23453, 23455, 23457, 23460, + 23467, 23470, 23474, 23476, 23479, 23482, 23486, 23488, + 23490, 23492, 23498, 23501, 23503, 23504, 23509, 23511, + 23513, 23515, 23517, 23519, 23522, 23526, 23530, 23533, + 23538, 23542, 23544, 23546, 23548, 23577, 23582, 23584, + 23586, 23589, 23593, 23597, 23603, 23610, 23615, 23619, + 23625, 23628, 23633, 23636, 23642, 23648, 23651, 23657, + 23659, 23665, 23668, 23670, 23674, 23676, 23682, 23684, + 23689, 23691, 23713, 23715, 23721, 23725, 23727, 23730, + 23733, 23735, 23738, 23740, 23742, 23745, 23747, 23751, + 23753, 23756, 23759, 23762, 23764, 23766, 23772, 23775, + 23782, 23785, 23788, 23790, 23792, 23794, 23796, 23799, + 23801, 23803, 23819, 23822, 23824, 23826, 23831, 23834, + 23837, 23839, 23841, 23844, 23847, 23849, 23853, 23858, + 23862, 23865, 23871, 23873, 23876, 23878, 23885, 23891, + 23893, 23895, 23897, 23903, 23905, 23925, 23928, 23930, + 23934, 23937, 23939, 23942, 23945, 23947, 23949, 23953, + 23961, 23963, 23965, 23968, 23971, 23973, 23975, 23977, + 23982, 23984, 23986, 23988, 23990, 23992, 23995, 24002, + 24005, 24009, 24011, 24014, 24017, 24021, 24023, 24025, + 24027, 24033, 24036, 24038, 24039, 24041, 24043, 24045, + 24048, 24052, 24056, 24059, 24064, 24068, 24070, 24072, + 24074, 24103, 24125, 24132, 24135, 24151, 24157, 24164, + 24166, 24168, 24173, 24177, 24180, 24186, 24190, 24198, + 24203, 24208, 24212, 24220, 24223, 24229, 24235, 24240, + 24242, 24248, 24250, 24257, 24260, 24265, 24273, 24278, + 24285, 24294, 24299, 24302, 24305, 24311, 24344, 24350, + 24353, 24355, 24358, 24365, 24376, 24388, 24399, 24410, + 24420, 24430, 24440, 24450, 24463, 24470, 24478, 24489, + 24498, 24508, 24515, 24527, 24533, 24541, 24544, 24547, + 24551, 24554, 24562, 24566, 24572, 24574, 24624, 24626, + 24633, 24639, 24643, 24649, 24655, 24657, 24662, 24666, + 24670, 24674, 24677, 24681, 24685, 24689, 24695, 24702, + 24705, 24708, 24716, 24719, 24738, 24745, 24753, 24755, + 24759, 24763, 24769, 24777, 24781, 24783, 24814, 24819, + 24823, 24826, 24832, 24836, 24844, 24849, 24854, 24858, + 24866, 24869, 24875, 24880, 24886, 24891, 24908, 24916, + 24921, 24924, 24927, 24933, 24944, 24982, 24984, 24987, + 24991, 25001, 25005, 25031, 25035, 25039, 25043, 25048, + 25052, 25056, 25064, 25068, 25073, 25083, 25093, 25096, + 25102, 25106, 25110, 25115, 25118, 25121, 25133, 25136, + 25139, 25143, 25146, 25150, 25155, 25175, 25178, 25182, + 25184, 25187, 25190, 25194, 25201, 25205, 25207, 25213, + 25216, 25218, 25219, 25221, 25223, 25225, 25227, 25230, + 25234, 25238, 25241, 25246, 25250, 25252, 25254, 25256, + 25285, 25307, 25314, 25317, 25333, 25339, 25346, 25348, + 25350, 25352, 25360, 25364, 25367, 25370, 25374, 25380, + 25418, 25421, 25425, 25435, 25439, 25465, 25469, 25473, + 25477, 25482, 25486, 25490, 25498, 25502, 25507, 25517, + 25527, 25530, 25536, 25540, 25544, 25549, 25552, 25555, + 25567, 25570, 25573, 25577, 25580, 25585, 25588, 25593, + 25613, 25620, 25624, 25630, 25632, 25634, 25636, 25638, + 25640, 25643, 25647, 25651, 25654, 25659, 25663, 25665, + 25667, 25669, 25698, 25703, 25705, 25707, 25710, 25714, + 25718, 25724, 25731, 25736, 25740, 25746, 25749, 25754, + 25757, 25763, 25769, 25772, 25778, 25780, 25786, 25789, + 25791, 25795, 25797, 25803, 25805, 25810, 25812, 25834, + 25836, 25842, 25846, 25848, 25851, 25854, 25856, 25859, + 25861, 25863, 25866, 25868, 25872, 25874, 25877, 25880, + 25883, 25885, 25887, 25893, 25896, 25903, 25906, 25909, + 25911, 25913, 25915, 25917, 25920, 25922, 25924, 25940, + 25943, 25945, 25947, 25952, 25955, 25958, 25960, 25962, + 25965, 25968, 25970, 25974, 25979, 25983, 25986, 25992, + 25994, 25997, 25999, 26007, 26013, 26015, 26017, 26019, + 26025, 26027, 26047, 26050, 26052, 26056, 26059, 26061, + 26064, 26067, 26069, 26071, 26075, 26083, 26085, 26087, + 26090, 26093, 26095, 26097, 26099, 26104, 26106, 26108, + 26110, 26112, 26114, 26117, 26124, 26127, 26131, 26133, + 26136, 26139, 26143, 26145, 26147, 26149, 26151, 26157, + 26160, 26162, 26163, 26165, 26168, 26169, 26172, 26174, + 26178, 26180, 26182, 26185, 26187, 26191, 26193, 26236, + 26258, 26260, 26301, 26342, 26382, 26423, 26460, 26501, + 26538, 26561, 26598, 26638, 26675, 26712, 26739, 26761, + 26786, 26827, 26871, 26898, 26920, 26963, 26998, 27039, + 27076, 27117, 27159, 27202, 27245, 27287, 27331, 27376, + 27399, 27422, 27466, 27507, 27548, 27616, 27677, 27723, + 27765, 27820, 27865, 27911, 27952, 27980, 28006, 28033, + 28056, 28079, 28102, 28126, 28151, 28180, 28206, 28232, + 28260, 28283, 28306, 28329, 28354, 28408, 28476, 28516, + 28543, 28594, 28638, 28679, 28722, 28744, 28785, 28830, + 28873, 28914, 28956, 28998, 29041, 29066, 29104, 29136, + 29159, 29203, 29244, 29285, 29327, 29368, 29411, 29452, + 29496, 29540, 29582, 29623, 29664, 29709, 29751, 29793, + 29835, 29876, 29917, 29958, 29999, 30040, 30081, 30123, + 30164, 30205, 30249, 30293, 30337, 30378, 30419, 30463, + 30508, 30549, 30592, 30636, 30679, 30721, 30762, 30804, + 30845, 30890, 30949, 30993, 31034, 31080, 31121, 31166, + 31208, 31253, 31298, 31345, 31394, 31440, 31483, 31528, + 31570, 31614, 31658, 31705, 31750, 31794, 31841, 31882, + 31927, 31969, 32010, 32053, 32094, 32139, 32180, 32224, + 32265, 32292, 32315, 32338, 32361, 32384, 32407, 32430, + 32454, 32479, 32504, 32528, 32554, 32579, 32602, 32625, + 32649, 32674, 32699, 32723, 32749, 32774, 32797, 32820, + 32843, 32893, 32936, 32964, 32988, 33025, 33052, 33080, + 33103, 33126, 33149, 33172, 33222, 33265, 33293, 33317, + 33354, 33381, 33409, 33432, 33460, 33487, 33511, 33537, + 33565, 33591, 33619, 33649, 33675, 33699, 33723, 33750, + 33804, 33875, 33915, 33939, 33984, 34006, 34043, 34083, + 34107, 34130, 34152, 34174, 34200, 34223, 34246, 34269, + 34292, 34315, 34338, 34361, 34384, 34408, 34433, 34458, + 34482, 34508, 34533, 34556, 34579, 34603, 34628, 34653, + 34677, 34703, 34728, 34751, 34774, 34797, 34847, 34890, + 34918, 34942, 34979, 35006, 35034, 35057, 35080, 35103, + 35126, 35176, 35219, 35247, 35271, 35308, 35335, 35363, + 35386, 35410, 35435, 35460, 35487, 35515, 35541, 35566, + 35593, 35617, 35643, 35667, 35694, 35721, 35745, 35772, + 35795, 35822, 35846, 35869, 35894, 35917, 35944, 35967, + 35993, 36016, 36039, 36066, 36091, 36114, 36138, 36162, + 36185, 36209, 36232, 36255, 36279, 36302, 36327, 36350, + 36374, 36398, 36422, 36445, 36468, 36495, 36519, 36543, + 36567, 36590, 36613, 36636, 36659, 36682, 36705, 36729, + 36752, 36775, 36801, 36825, 36849, 36872, 36895, 36919, + 36943, 36966, 36991, 37017, 37042, 37066, 37089, 37113, + 37136, 37163, 37204, 37230, 37253, 37281, 37304, 37331, + 37356, 37378, 37402, 37454, 37494, 37538, 37579, 37622, + 37649, 37672, 37713, 37757, 37785, 37809, 37835, 37864, + 37890, 37918, 37948, 37974, 38017, 38058, 38100, 38143, + 38186, 38228, 38272, 38317, 38341, 38365, 38392, 38446, + 38517, 38557, 38586, 38638, 38679, 38724, 38767, 38808, + 38850, 38892, 38935, 38957, 38999, 39041, 39082, 39123, + 39164, 39205, 39246, 39287, 39329, 39370, 39411, 39455, + 39499, 39524, 39562, 39594, 39617, 39661, 39702, 39743, + 39787, 39832, 39873, 39916, 39960, 40003, 40045, 40086, + 40128, 40169, 40214, 40273, 40317, 40358, 40404, 40445, + 40490, 40534, 40575, 40616, 40658, 40699, 40742, 40783, + 40827, 40871, 40913, 40954, 40995, 41040, 41082, 41126, + 41167, 41208, 41276, 41337, 41383, 41425, 41480, 41525, + 41571, 41612, 41654, 41699, 41744, 41791, 41840, 41886, + 41929, 41974, 42016, 42060, 42104, 42151, 42196, 42240, + 42287, 42328, 42373, 42415, 42456, 42499, 42540, 42585, + 42626, 42670, 42711, 42753, 42791, +} + +var _s_indicies []int16 = []int16{ + 1, 0, 2, 1, 2, 1, 1, 0, + 1, 1, 0, 1, 1, 1, 0, 1, + 1, 1, 0, 1, 1, 0, 1, 1, + 1, 1, 0, 1, 1, 1, 0, 0, + 1, 1, 0, 1, 0, 3, 4, 5, + 6, 7, 9, 10, 11, 13, 14, 15, + 16, 17, 18, 19, 20, 21, 22, 23, + 24, 25, 26, 27, 28, 29, 30, 8, + 12, 0, 1, 1, 1, 1, 2, 1, + 2, 1, 2, 2, 2, 1, 2, 2, + 2, 1, 1, 1, 1, 2, 2, 2, + 2, 2, 2, 1, 2, 2, 2, 2, + 2, 2, 1, 2, 2, 2, 2, 1, + 1, 1, 1, 2, 1, 1, 1, 1, + 1, 2, 1, 1, 2, 1, 1, 1, + 1, 2, 1, 1, 2, 2, 2, 2, + 2, 2, 1, 1, 1, 1, 1, 1, + 2, 1, 1, 2, 2, 2, 2, 2, + 2, 1, 1, 2, 1, 1, 1, 1, + 1, 2, 1, 1, 2, 1, 2, 1, + 1, 1, 2, 1, 2, 1, 1, 1, + 1, 1, 2, 1, 2, 1, 1, 1, + 1, 2, 1, 2, 31, 32, 33, 34, + 35, 36, 37, 38, 39, 40, 41, 42, + 43, 44, 45, 46, 47, 48, 49, 50, + 51, 0, 1, 2, 1, 1, 1, 1, + 1, 2, 1, 1, 1, 2, 1, 2, + 1, 1, 2, 1, 1, 2, 1, 2, + 2, 2, 1, 1, 2, 1, 2, 1, + 1, 2, 1, 2, 1, 1, 1, 2, + 1, 2, 1, 1, 2, 2, 2, 1, + 1, 1, 2, 1, 2, 1, 2, 1, + 1, 1, 1, 1, 2, 1, 1, 2, + 52, 53, 54, 55, 56, 57, 0, 1, + 1, 2, 1, 1, 2, 1, 2, 1, + 2, 1, 2, 1, 2, 58, 59, 0, + 1, 2, 1, 2, 60, 61, 62, 63, + 64, 65, 66, 67, 68, 69, 70, 71, + 72, 73, 74, 0, 1, 1, 2, 1, + 2, 1, 2, 1, 1, 1, 1, 2, + 1, 1, 2, 2, 2, 1, 1, 2, + 1, 2, 1, 1, 2, 2, 2, 1, + 1, 2, 1, 1, 1, 2, 1, 1, + 1, 1, 2, 1, 1, 1, 2, 1, + 1, 2, 75, 76, 56, 61, 77, 0, + 1, 2, 1, 1, 2, 1, 2, 78, + 79, 80, 81, 82, 83, 0, 84, 85, + 86, 87, 88, 2, 1, 2, 1, 2, + 1, 2, 1, 1, 1, 1, 1, 2, + 1, 2, 89, 90, 91, 92, 93, 94, + 95, 96, 97, 98, 99, 100, 101, 102, + 103, 100, 104, 105, 106, 2, 1, 1, + 2, 2, 1, 2, 2, 2, 1, 1, + 1, 2, 1, 2, 1, 1, 2, 2, + 2, 1, 1, 2, 1, 2, 1, 1, + 1, 2, 1, 1, 1, 1, 1, 1, + 1, 2, 1, 2, 2, 1, 1, 1, + 2, 2, 2, 1, 2, 1, 1, 2, + 1, 2, 107, 108, 109, 110, 2, 1, + 2, 1, 2, 1, 2, 1, 2, 111, + 2, 1, 1, 2, 112, 113, 114, 115, + 116, 117, 2, 1, 1, 2, 2, 2, + 2, 1, 1, 2, 1, 1, 2, 2, + 2, 1, 1, 1, 1, 2, 118, 2, + 1, 2, 119, 0, 120, 121, 122, 124, + 123, 2, 1, 1, 2, 2, 1, 1, + 127, 128, 129, 130, 131, 132, 133, 134, + 135, 136, 137, 138, 139, 140, 141, 142, + 143, 144, 145, 146, 147, 126, 125, 149, + 150, 152, 153, 154, 155, 156, 157, 158, + 159, 160, 161, 162, 163, 164, 165, 166, + 167, 168, 169, 170, 171, 172, 173, 174, + 175, 177, 178, 179, 180, 148, 148, 151, + 176, 2, 148, 181, 148, 148, 2, 148, + 148, 148, 2, 148, 2, 148, 148, 2, + 2, 2, 2, 2, 2, 148, 2, 181, + 2, 2, 148, 2, 148, 2, 181, 148, + 148, 148, 148, 148, 2, 2, 148, 2, + 181, 148, 2, 148, 2, 2, 148, 2, + 2, 181, 2, 148, 181, 181, 181, 148, + 148, 2, 181, 181, 181, 148, 2, 181, + 148, 181, 148, 2, 2, 2, 2, 181, + 181, 2, 2, 2, 148, 148, 181, 148, + 181, 2, 2, 148, 181, 181, 2, 148, + 148, 148, 181, 148, 2, 182, 183, 184, + 185, 186, 187, 188, 189, 190, 191, 192, + 193, 194, 195, 196, 197, 198, 199, 200, + 201, 202, 203, 204, 205, 206, 207, 208, + 209, 210, 211, 212, 213, 2, 148, 148, + 148, 148, 181, 2, 148, 181, 2, 148, + 2, 181, 2, 148, 148, 181, 148, 148, + 2, 148, 181, 2, 2, 2, 181, 181, + 2, 2, 2, 2, 181, 148, 148, 2, + 2, 2, 2, 2, 148, 2, 148, 2, + 181, 181, 181, 148, 148, 148, 148, 148, + 148, 148, 181, 2, 2, 2, 2, 2, + 2, 148, 2, 148, 2, 181, 148, 181, + 148, 148, 148, 148, 148, 148, 181, 2, + 2, 2, 148, 148, 2, 148, 2, 181, + 148, 181, 148, 148, 148, 148, 148, 148, + 181, 2, 148, 181, 181, 181, 181, 148, + 148, 181, 2, 181, 148, 148, 148, 148, + 148, 148, 148, 148, 148, 148, 181, 2, + 148, 181, 181, 181, 181, 2, 148, 181, + 148, 148, 148, 148, 181, 2, 2, 2, + 2, 2, 148, 2, 148, 2, 181, 148, + 181, 148, 148, 148, 148, 148, 181, 2, + 148, 181, 181, 181, 181, 148, 181, 148, + 2, 148, 181, 148, 148, 148, 181, 2, + 2, 2, 148, 2, 2, 148, 2, 148, + 2, 181, 148, 181, 148, 148, 148, 2, + 181, 181, 148, 181, 181, 181, 2, 181, + 181, 2, 181, 2, 181, 181, 181, 2, + 181, 2, 148, 181, 181, 181, 181, 181, + 2, 148, 148, 181, 2, 181, 181, 148, + 181, 181, 2, 181, 2, 214, 215, 216, + 217, 218, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 228, 229, 230, 231, 232, + 233, 234, 235, 236, 237, 238, 239, 240, + 241, 242, 243, 244, 245, 246, 247, 151, + 152, 248, 249, 250, 251, 252, 151, 153, + 151, 2, 181, 2, 181, 181, 181, 181, + 181, 2, 181, 181, 181, 148, 2, 2, + 2, 2, 2, 148, 2, 2, 2, 2, + 2, 148, 2, 2, 2, 2, 2, 148, + 148, 148, 148, 148, 2, 2, 2, 148, + 2, 181, 2, 148, 148, 148, 2, 2, + 2, 148, 148, 2, 2, 2, 148, 148, + 148, 2, 2, 2, 148, 148, 148, 181, + 148, 181, 2, 148, 181, 148, 148, 181, + 2, 181, 2, 2, 2, 181, 181, 148, + 2, 2, 148, 181, 148, 148, 2, 2, + 148, 148, 181, 181, 2, 148, 181, 2, + 181, 181, 181, 2, 181, 2, 181, 148, + 181, 2, 148, 2, 2, 181, 181, 148, + 181, 148, 148, 2, 181, 2, 148, 148, + 181, 2, 148, 148, 2, 181, 181, 181, + 148, 181, 148, 181, 2, 181, 181, 2, + 2, 2, 148, 2, 2, 2, 2, 2, + 2, 2, 148, 148, 148, 148, 2, 148, + 148, 148, 148, 148, 148, 148, 2, 253, + 254, 255, 256, 257, 258, 259, 260, 261, + 262, 263, 151, 264, 265, 266, 267, 268, + 269, 2, 181, 181, 2, 148, 148, 181, + 181, 2, 148, 2, 181, 2, 148, 148, + 148, 148, 148, 148, 148, 148, 148, 148, + 148, 2, 148, 148, 148, 2, 2, 148, + 148, 2, 2, 148, 148, 148, 2, 2, + 148, 2, 181, 2, 148, 148, 148, 148, + 148, 2, 181, 2, 2, 148, 148, 148, + 148, 148, 148, 2, 148, 148, 148, 148, + 181, 2, 148, 2, 270, 271, 272, 151, + 273, 2, 148, 181, 148, 2, 181, 2, + 148, 148, 2, 2, 2, 148, 153, 274, + 275, 153, 276, 277, 278, 279, 280, 281, + 282, 283, 284, 285, 286, 287, 288, 289, + 290, 291, 292, 293, 294, 295, 296, 152, + 297, 153, 151, 2, 2, 148, 148, 2, + 2, 2, 2, 148, 148, 148, 181, 181, + 2, 148, 181, 148, 2, 181, 2, 148, + 148, 148, 2, 2, 2, 2, 148, 148, + 2, 181, 181, 181, 181, 2, 148, 148, + 2, 181, 148, 181, 2, 148, 148, 2, + 148, 2, 181, 148, 181, 148, 2, 181, + 2, 2, 148, 181, 148, 181, 2, 148, + 2, 2, 181, 148, 181, 2, 181, 148, + 181, 181, 2, 181, 181, 181, 181, 2, + 181, 148, 181, 148, 181, 2, 148, 148, + 148, 148, 148, 148, 2, 2, 2, 148, + 148, 181, 181, 2, 151, 298, 299, 2, + 151, 2, 148, 2, 2, 148, 300, 301, + 302, 303, 304, 305, 306, 307, 308, 309, + 310, 311, 312, 313, 314, 151, 2, 181, + 148, 148, 148, 148, 148, 148, 2, 148, + 148, 148, 2, 2, 148, 148, 2, 2, + 148, 148, 2, 2, 148, 2, 2, 148, + 181, 181, 2, 148, 148, 2, 181, 2, + 148, 148, 2, 148, 2, 181, 148, 2, + 148, 148, 148, 148, 181, 2, 315, 316, + 317, 318, 319, 320, 321, 322, 323, 324, + 2, 325, 326, 153, 327, 328, 329, 330, + 331, 332, 333, 334, 335, 153, 151, 336, + 337, 338, 153, 339, 340, 341, 342, 343, + 344, 345, 346, 347, 348, 349, 350, 351, + 352, 353, 153, 259, 151, 354, 2, 148, + 148, 148, 148, 148, 2, 2, 2, 148, + 2, 148, 148, 2, 181, 2, 148, 148, + 2, 181, 2, 148, 148, 148, 2, 2, + 181, 2, 148, 148, 148, 2, 2, 2, + 2, 148, 2, 148, 148, 148, 2, 2, + 148, 2, 148, 148, 148, 2, 148, 148, + 148, 148, 148, 148, 2, 2, 2, 148, + 148, 2, 148, 148, 2, 148, 148, 2, + 148, 148, 2, 148, 181, 181, 181, 181, + 148, 148, 148, 181, 2, 148, 2, 148, + 2, 148, 148, 181, 2, 148, 2, 148, + 148, 2, 148, 2, 148, 2, 355, 356, + 357, 358, 359, 360, 361, 362, 363, 364, + 365, 366, 367, 368, 369, 370, 371, 368, + 372, 373, 374, 375, 376, 377, 2, 181, + 148, 181, 2, 2, 181, 148, 2, 2, + 181, 148, 2, 181, 148, 181, 2, 181, + 148, 148, 2, 181, 148, 181, 2, 148, + 148, 148, 2, 2, 181, 148, 148, 181, + 2, 148, 148, 148, 148, 148, 148, 2, + 181, 2, 148, 148, 181, 148, 148, 148, + 148, 148, 148, 181, 2, 148, 181, 181, + 181, 181, 148, 181, 181, 181, 2, 148, + 181, 2, 2, 148, 2, 181, 148, 181, + 181, 2, 2, 148, 2, 181, 148, 2, + 181, 148, 181, 2, 181, 2, 148, 2, + 2, 2, 148, 148, 2, 153, 378, 379, + 380, 151, 153, 151, 2, 2, 148, 2, + 148, 2, 148, 153, 379, 151, 2, 153, + 381, 151, 2, 2, 148, 153, 382, 343, + 383, 384, 385, 386, 153, 387, 388, 151, + 2, 2, 148, 148, 181, 2, 148, 181, + 2, 148, 148, 148, 2, 2, 148, 2, + 181, 2, 148, 181, 148, 2, 153, 389, + 390, 2, 2, 2, 148, 148, 148, 181, + 181, 2, 391, 392, 393, 153, 394, 395, + 396, 397, 398, 399, 400, 401, 402, 403, + 404, 405, 406, 407, 151, 2, 181, 181, + 2, 2, 2, 2, 181, 181, 2, 2, + 148, 2, 2, 2, 2, 2, 2, 2, + 148, 2, 148, 2, 2, 2, 2, 2, + 2, 148, 148, 148, 148, 148, 2, 2, + 148, 2, 2, 2, 148, 2, 2, 148, + 2, 2, 148, 2, 2, 148, 2, 2, + 148, 181, 181, 2, 2, 2, 181, 181, + 181, 181, 2, 153, 408, 409, 410, 411, + 151, 2, 2, 181, 2, 148, 148, 148, + 148, 148, 148, 148, 148, 148, 148, 2, + 148, 148, 148, 148, 148, 148, 148, 148, + 148, 148, 148, 148, 148, 148, 148, 148, + 148, 148, 148, 2, 148, 148, 148, 148, + 148, 2, 412, 413, 414, 2, 148, 2, + 2, 2, 148, 2, 148, 415, 2, 416, + 417, 155, 419, 418, 2, 181, 181, 2, + 2, 181, 181, 2, 181, 422, 423, 424, + 425, 426, 427, 428, 429, 430, 431, 432, + 433, 434, 435, 436, 437, 438, 439, 440, + 441, 442, 421, 420, 443, 420, 443, 420, + 420, 443, 443, 420, 443, 443, 420, 443, + 443, 443, 420, 443, 443, 443, 420, 421, + 443, 443, 421, 420, 443, 443, 443, 443, + 421, 420, 443, 443, 443, 420, 420, 443, + 443, 420, 421, 443, 420, 444, 445, 446, + 447, 448, 450, 451, 452, 454, 455, 456, + 457, 458, 459, 460, 461, 462, 463, 464, + 465, 466, 467, 468, 469, 470, 471, 449, + 453, 420, 443, 443, 443, 443, 420, 443, + 420, 443, 420, 420, 420, 443, 420, 420, + 420, 421, 420, 443, 443, 443, 443, 420, + 420, 420, 420, 420, 420, 421, 420, 443, + 420, 420, 420, 420, 421, 420, 420, 443, + 420, 420, 420, 420, 421, 420, 443, 443, + 443, 443, 420, 443, 443, 443, 443, 443, + 421, 420, 443, 443, 420, 443, 443, 443, + 443, 421, 420, 443, 443, 420, 420, 420, + 420, 420, 420, 421, 420, 443, 443, 443, + 443, 443, 443, 421, 420, 443, 443, 420, + 420, 420, 420, 420, 420, 421, 420, 443, + 443, 420, 443, 443, 443, 443, 421, 443, + 420, 443, 443, 420, 443, 421, 420, 443, + 443, 443, 420, 443, 421, 420, 443, 443, + 443, 443, 421, 443, 420, 443, 420, 443, + 443, 443, 443, 420, 443, 420, 472, 473, + 474, 475, 476, 477, 478, 479, 480, 481, + 482, 483, 484, 485, 486, 487, 488, 489, + 490, 491, 492, 493, 494, 495, 420, 443, + 420, 421, 443, 443, 443, 443, 443, 420, + 443, 443, 421, 443, 420, 443, 420, 443, + 443, 420, 443, 443, 420, 443, 420, 420, + 420, 421, 420, 443, 443, 421, 420, 443, + 420, 443, 443, 420, 421, 2, 421, 2, + 443, 420, 443, 443, 443, 420, 421, 421, + 443, 420, 443, 443, 420, 420, 421, 420, + 420, 443, 443, 443, 421, 420, 443, 420, + 443, 420, 421, 421, 420, 443, 443, 443, + 443, 443, 420, 443, 443, 420, 496, 497, + 498, 499, 500, 501, 420, 443, 443, 420, + 443, 443, 420, 443, 420, 443, 420, 443, + 420, 443, 420, 502, 503, 420, 443, 420, + 443, 420, 504, 505, 506, 507, 508, 509, + 510, 511, 512, 513, 514, 515, 516, 517, + 518, 519, 420, 421, 420, 443, 443, 420, + 443, 420, 443, 420, 443, 443, 443, 443, + 420, 443, 443, 420, 420, 421, 420, 420, + 443, 421, 443, 420, 443, 420, 443, 443, + 420, 420, 421, 420, 420, 421, 420, 443, + 443, 420, 443, 443, 421, 443, 420, 443, + 443, 443, 443, 420, 443, 443, 443, 420, + 443, 443, 421, 420, 520, 521, 500, 506, + 522, 420, 443, 420, 443, 443, 420, 443, + 420, 523, 524, 525, 526, 527, 528, 420, + 529, 530, 531, 504, 532, 533, 420, 443, + 420, 443, 420, 443, 420, 443, 443, 443, + 443, 443, 420, 443, 420, 534, 535, 536, + 537, 538, 539, 540, 541, 542, 543, 544, + 545, 546, 547, 548, 549, 546, 550, 551, + 552, 553, 504, 420, 443, 443, 420, 420, + 421, 420, 443, 420, 420, 420, 443, 421, + 420, 443, 443, 421, 420, 443, 420, 443, + 443, 420, 420, 420, 421, 420, 443, 443, + 420, 443, 421, 420, 443, 443, 443, 420, + 443, 443, 443, 443, 443, 443, 443, 420, + 443, 420, 420, 421, 420, 443, 443, 443, + 420, 420, 420, 443, 420, 421, 420, 443, + 443, 420, 421, 2, 443, 421, 420, 504, + 554, 555, 484, 556, 557, 420, 443, 420, + 443, 420, 443, 420, 443, 420, 558, 420, + 443, 443, 420, 559, 560, 561, 562, 563, + 564, 565, 420, 443, 443, 420, 420, 420, + 420, 443, 443, 420, 421, 420, 443, 443, + 420, 420, 420, 443, 443, 443, 443, 420, + 566, 420, 443, 420, 567, 420, 568, 569, + 423, 424, 570, 420, 443, 443, 420, 420, + 443, 443, 148, 571, 148, 148, 420, 571, + 420, 420, 420, 148, 420, 148, 420, 571, + 420, 571, 148, 420, 420, 571, 420, 148, + 571, 148, 571, 571, 572, 572, 420, 573, + 574, 575, 576, 577, 578, 579, 580, 581, + 582, 583, 584, 585, 586, 587, 588, 589, + 590, 591, 592, 593, 420, 594, 420, 594, + 420, 420, 594, 594, 420, 594, 594, 420, + 594, 594, 594, 595, 595, 420, 148, 595, + 148, 148, 420, 420, 595, 420, 420, 148, + 420, 148, 420, 595, 420, 595, 148, 420, + 420, 595, 420, 148, 595, 148, 595, 595, + 572, 572, 420, 595, 595, 595, 148, 420, + 421, 595, 148, 595, 421, 148, 420, 420, + 420, 420, 595, 595, 421, 420, 420, 148, + 148, 595, 148, 595, 420, 420, 148, 595, + 595, 420, 148, 148, 421, 148, 595, 148, + 420, 596, 597, 184, 598, 599, 600, 601, + 602, 603, 604, 605, 606, 607, 608, 609, + 610, 611, 612, 613, 614, 615, 616, 617, + 618, 619, 620, 621, 622, 623, 624, 625, + 626, 420, 148, 148, 148, 148, 595, 420, + 148, 595, 420, 595, 420, 148, 148, 595, + 148, 420, 148, 420, 421, 148, 595, 420, + 420, 420, 595, 595, 420, 420, 420, 420, + 595, 148, 148, 420, 420, 420, 420, 420, + 148, 420, 421, 148, 420, 595, 595, 595, + 148, 148, 148, 148, 148, 148, 148, 595, + 420, 420, 420, 420, 420, 420, 148, 420, + 421, 148, 420, 595, 148, 595, 148, 148, + 148, 148, 148, 148, 595, 420, 420, 420, + 148, 148, 420, 148, 420, 421, 420, 595, + 148, 595, 148, 148, 148, 148, 148, 148, + 595, 420, 148, 595, 595, 595, 595, 148, + 148, 595, 421, 420, 595, 148, 148, 148, + 148, 148, 148, 148, 148, 148, 148, 595, + 420, 148, 595, 595, 595, 595, 421, 420, + 148, 595, 148, 148, 148, 148, 595, 420, + 420, 420, 420, 420, 148, 420, 148, 420, + 421, 420, 595, 148, 595, 148, 148, 148, + 148, 148, 595, 420, 148, 595, 595, 595, + 595, 148, 595, 421, 148, 420, 148, 595, + 148, 148, 148, 595, 420, 420, 420, 148, + 420, 420, 148, 420, 421, 420, 148, 420, + 595, 148, 595, 148, 148, 148, 420, 595, + 595, 148, 595, 595, 421, 595, 420, 595, + 595, 420, 595, 421, 420, 595, 595, 595, + 420, 595, 421, 420, 148, 595, 595, 595, + 595, 421, 595, 420, 148, 148, 595, 420, + 595, 595, 148, 595, 595, 420, 595, 420, + 627, 628, 629, 217, 218, 219, 220, 221, + 630, 223, 224, 225, 226, 227, 228, 631, + 632, 633, 634, 635, 234, 636, 236, 637, + 483, 484, 638, 639, 640, 641, 642, 643, + 644, 645, 646, 647, 151, 152, 648, 249, + 250, 251, 252, 151, 153, 151, 420, 595, + 420, 421, 595, 595, 595, 595, 595, 420, + 595, 595, 421, 595, 148, 420, 420, 595, + 420, 148, 148, 148, 595, 148, 595, 420, + 148, 595, 148, 148, 595, 420, 595, 420, + 420, 420, 421, 420, 595, 595, 421, 148, + 420, 595, 148, 148, 420, 148, 595, 595, + 420, 148, 595, 420, 595, 595, 595, 420, + 421, 421, 595, 420, 595, 148, 595, 420, + 148, 420, 421, 420, 420, 595, 595, 148, + 595, 148, 421, 148, 420, 595, 420, 148, + 148, 595, 420, 421, 148, 421, 148, 2, + 595, 595, 595, 148, 595, 148, 595, 420, + 595, 595, 420, 649, 650, 255, 651, 257, + 258, 259, 260, 261, 262, 263, 151, 652, + 265, 653, 267, 654, 269, 420, 595, 595, + 571, 420, 420, 148, 148, 420, 420, 595, + 420, 571, 595, 420, 420, 595, 420, 148, + 595, 420, 420, 148, 148, 148, 148, 148, + 595, 420, 655, 656, 272, 151, 273, 420, + 148, 595, 148, 420, 595, 420, 153, 274, + 275, 153, 657, 658, 659, 660, 280, 281, + 282, 661, 284, 662, 663, 664, 665, 666, + 667, 668, 669, 670, 671, 295, 296, 152, + 672, 153, 151, 420, 2, 421, 2, 148, + 148, 148, 595, 595, 420, 148, 595, 148, + 420, 595, 420, 148, 595, 595, 595, 595, + 420, 148, 595, 148, 595, 420, 148, 148, + 420, 421, 420, 148, 420, 595, 421, 148, + 595, 148, 420, 595, 420, 420, 148, 595, + 148, 595, 420, 148, 420, 421, 420, 420, + 421, 420, 595, 148, 595, 420, 595, 148, + 595, 421, 595, 420, 595, 595, 595, 595, + 420, 595, 148, 595, 148, 595, 420, 148, + 595, 595, 421, 420, 673, 674, 302, 303, + 304, 305, 306, 307, 675, 676, 677, 678, + 312, 679, 680, 151, 420, 595, 572, 148, + 148, 572, 572, 572, 420, 572, 572, 572, + 148, 2, 595, 595, 571, 420, 571, 148, + 148, 420, 595, 420, 148, 571, 148, 420, + 595, 148, 420, 148, 148, 148, 148, 595, + 420, 681, 682, 317, 318, 319, 683, 684, + 685, 686, 324, 420, 325, 326, 153, 327, + 328, 687, 330, 688, 332, 689, 334, 335, + 153, 151, 690, 337, 338, 153, 339, 340, + 341, 342, 343, 344, 345, 346, 691, 348, + 349, 692, 351, 352, 353, 153, 259, 151, + 354, 420, 595, 420, 595, 420, 148, 420, + 595, 420, 148, 2, 421, 2, 148, 148, + 595, 595, 595, 595, 148, 148, 148, 595, + 420, 148, 148, 595, 420, 693, 694, 695, + 696, 697, 698, 699, 700, 701, 364, 702, + 703, 704, 705, 706, 707, 708, 705, 709, + 710, 552, 711, 375, 712, 377, 420, 595, + 148, 595, 420, 420, 421, 420, 595, 148, + 420, 420, 595, 148, 421, 2, 595, 148, + 595, 421, 420, 595, 148, 148, 420, 595, + 148, 595, 420, 420, 148, 420, 420, 421, + 148, 420, 595, 148, 148, 595, 420, 595, + 420, 421, 420, 148, 148, 595, 148, 148, + 148, 148, 148, 148, 595, 420, 148, 595, + 595, 595, 595, 148, 595, 595, 595, 420, + 148, 595, 420, 420, 148, 420, 421, 420, + 595, 148, 595, 595, 420, 420, 148, 420, + 595, 148, 420, 421, 420, 595, 148, 595, + 420, 595, 421, 420, 421, 2, 2, 148, + 153, 382, 713, 714, 715, 716, 386, 153, + 717, 718, 151, 420, 148, 421, 2, 148, + 595, 420, 148, 595, 420, 148, 421, 148, + 148, 2, 420, 595, 420, 148, 595, 148, + 420, 153, 389, 719, 420, 148, 148, 595, + 595, 420, 720, 721, 722, 153, 394, 395, + 396, 397, 398, 399, 400, 401, 402, 403, + 723, 724, 725, 726, 151, 420, 595, 595, + 420, 420, 420, 420, 595, 595, 420, 2, + 2, 421, 2, 148, 595, 595, 420, 420, + 420, 595, 595, 595, 595, 420, 153, 727, + 409, 410, 411, 151, 420, 420, 595, 420, + 148, 728, 420, 729, 730, 731, 733, 732, + 420, 595, 595, 420, 420, 595, 595, 420, + 595, 594, 594, 594, 420, 594, 594, 420, + 594, 594, 594, 594, 420, 594, 594, 594, + 420, 420, 594, 594, 420, 594, 420, 734, + 735, 736, 737, 738, 740, 741, 742, 744, + 745, 746, 747, 748, 749, 750, 751, 752, + 753, 754, 755, 756, 757, 758, 759, 760, + 761, 739, 743, 420, 594, 594, 594, 594, + 420, 594, 420, 594, 420, 420, 420, 594, + 420, 420, 420, 594, 594, 594, 594, 420, + 420, 420, 420, 420, 420, 594, 420, 420, + 420, 420, 420, 420, 594, 420, 420, 420, + 420, 594, 594, 594, 594, 420, 594, 594, + 594, 594, 594, 420, 594, 594, 420, 594, + 594, 594, 594, 420, 594, 594, 420, 420, + 420, 420, 420, 420, 594, 594, 594, 594, + 594, 594, 420, 594, 594, 420, 420, 420, + 420, 420, 420, 594, 594, 420, 594, 594, + 594, 594, 594, 420, 594, 594, 420, 594, + 420, 594, 594, 594, 420, 594, 420, 594, + 594, 594, 594, 594, 420, 594, 420, 594, + 594, 594, 594, 420, 594, 420, 762, 763, + 764, 765, 766, 767, 768, 769, 770, 771, + 772, 773, 774, 775, 776, 777, 778, 779, + 780, 781, 782, 420, 594, 420, 594, 594, + 594, 594, 594, 420, 594, 594, 594, 420, + 594, 420, 594, 594, 420, 594, 594, 420, + 594, 420, 420, 420, 594, 594, 420, 594, + 420, 594, 594, 420, 594, 420, 594, 594, + 594, 420, 594, 420, 594, 594, 420, 420, + 420, 594, 594, 594, 420, 594, 420, 594, + 420, 594, 594, 594, 594, 594, 420, 594, + 594, 420, 783, 784, 785, 786, 787, 788, + 420, 594, 594, 420, 594, 594, 420, 594, + 420, 594, 420, 594, 420, 594, 420, 789, + 790, 420, 594, 420, 594, 420, 791, 792, + 793, 794, 795, 796, 797, 798, 799, 800, + 801, 802, 803, 804, 805, 420, 594, 594, + 420, 594, 420, 594, 420, 594, 594, 594, + 594, 420, 594, 594, 420, 420, 420, 594, + 594, 420, 594, 420, 594, 594, 420, 420, + 420, 594, 594, 420, 594, 594, 594, 420, + 594, 594, 594, 594, 420, 594, 594, 594, + 420, 594, 594, 420, 806, 807, 808, 787, + 792, 809, 420, 594, 595, 595, 595, 595, + 420, 595, 595, 595, 420, 594, 594, 420, + 594, 420, 810, 811, 812, 813, 814, 815, + 420, 816, 817, 818, 819, 820, 420, 594, + 420, 594, 420, 594, 420, 594, 594, 594, + 594, 594, 420, 594, 420, 821, 822, 823, + 824, 825, 826, 827, 828, 829, 830, 831, + 832, 833, 834, 835, 832, 836, 837, 838, + 420, 594, 594, 420, 420, 594, 420, 420, + 420, 594, 594, 594, 420, 594, 420, 594, + 594, 420, 420, 420, 594, 594, 420, 594, + 420, 594, 594, 594, 420, 594, 594, 594, + 594, 594, 594, 594, 420, 594, 420, 420, + 594, 594, 594, 420, 420, 420, 594, 420, + 594, 594, 420, 594, 420, 839, 840, 841, + 842, 420, 594, 420, 594, 420, 594, 420, + 594, 420, 843, 420, 594, 594, 420, 844, + 845, 846, 847, 848, 849, 420, 594, 594, + 420, 420, 420, 420, 594, 594, 420, 594, + 594, 420, 420, 420, 594, 594, 594, 594, + 420, 850, 420, 594, 420, 851, 420, 852, + 853, 574, 575, 854, 420, 594, 594, 420, + 420, 594, 594, 148, 572, 148, 181, 148, + 420, 181, 2, 2, 2, 2, 2, 148, + 572, 420, 420, 420, 148, 420, 148, 420, + 572, 181, 148, 148, 148, 148, 2, 420, + 572, 148, 420, 420, 572, 420, 148, 572, + 148, 181, 572, 572, 572, 572, 420, 572, + 572, 572, 148, 420, 421, 572, 148, 572, + 421, 148, 420, 420, 420, 420, 572, 572, + 421, 420, 420, 148, 148, 572, 148, 572, + 420, 420, 148, 572, 572, 420, 148, 148, + 421, 148, 572, 148, 420, 855, 856, 184, + 857, 858, 859, 860, 861, 862, 863, 864, + 865, 866, 867, 868, 869, 870, 871, 872, + 873, 874, 875, 876, 877, 878, 879, 880, + 881, 882, 883, 884, 885, 420, 148, 148, + 148, 148, 572, 420, 148, 572, 420, 572, + 420, 148, 148, 572, 148, 420, 148, 420, + 421, 148, 572, 420, 420, 420, 572, 572, + 420, 420, 420, 420, 572, 148, 148, 420, + 420, 420, 420, 420, 148, 420, 421, 148, + 420, 572, 572, 572, 148, 148, 148, 148, + 148, 148, 148, 572, 420, 420, 420, 420, + 420, 420, 148, 420, 421, 148, 420, 572, + 148, 572, 148, 148, 148, 148, 148, 148, + 572, 420, 420, 420, 148, 148, 420, 148, + 420, 421, 420, 572, 148, 572, 148, 148, + 148, 148, 148, 148, 572, 420, 148, 572, + 572, 572, 572, 148, 148, 572, 421, 420, + 572, 148, 148, 148, 148, 148, 148, 148, + 148, 148, 148, 572, 420, 148, 572, 572, + 572, 572, 421, 420, 148, 572, 148, 148, + 148, 148, 572, 420, 420, 420, 420, 420, + 148, 420, 148, 420, 421, 420, 572, 148, + 572, 148, 148, 148, 148, 148, 572, 420, + 148, 572, 572, 572, 572, 148, 572, 421, + 148, 420, 148, 572, 148, 148, 148, 572, + 420, 420, 420, 148, 420, 420, 148, 420, + 421, 420, 148, 420, 572, 148, 572, 148, + 148, 148, 420, 572, 572, 148, 572, 572, + 421, 572, 420, 572, 572, 420, 572, 421, + 420, 572, 572, 572, 420, 572, 421, 420, + 148, 572, 572, 572, 572, 421, 572, 420, + 148, 148, 572, 420, 572, 572, 148, 572, + 572, 420, 572, 420, 886, 887, 888, 217, + 218, 219, 220, 221, 889, 223, 224, 225, + 226, 227, 228, 890, 891, 892, 893, 894, + 234, 895, 236, 896, 483, 484, 897, 898, + 899, 900, 901, 902, 903, 904, 646, 905, + 151, 152, 906, 249, 250, 251, 252, 151, + 153, 151, 420, 572, 420, 421, 572, 572, + 572, 572, 572, 420, 572, 572, 421, 572, + 148, 420, 420, 572, 420, 148, 148, 148, + 572, 148, 572, 420, 148, 572, 148, 148, + 572, 420, 572, 420, 420, 420, 421, 420, + 572, 572, 421, 148, 420, 572, 148, 148, + 420, 148, 572, 572, 420, 148, 572, 420, + 572, 572, 572, 420, 421, 421, 572, 420, + 572, 148, 572, 420, 148, 420, 421, 420, + 420, 572, 572, 148, 572, 148, 421, 148, + 420, 572, 420, 148, 148, 572, 420, 572, + 572, 572, 148, 572, 148, 572, 420, 572, + 572, 420, 907, 908, 255, 909, 257, 258, + 259, 260, 261, 262, 263, 151, 910, 265, + 911, 267, 912, 269, 420, 181, 181, 572, + 181, 572, 571, 420, 420, 148, 148, 420, + 420, 572, 420, 571, 572, 420, 420, 572, + 420, 148, 572, 420, 420, 148, 148, 148, + 148, 148, 572, 420, 913, 914, 272, 151, + 273, 420, 148, 572, 148, 420, 572, 420, + 153, 274, 275, 153, 657, 915, 916, 917, + 280, 281, 282, 918, 284, 919, 920, 921, + 922, 923, 924, 925, 926, 927, 928, 295, + 296, 152, 929, 153, 151, 420, 148, 148, + 572, 572, 420, 148, 572, 148, 420, 572, + 420, 148, 572, 572, 572, 572, 420, 148, + 572, 148, 572, 420, 148, 148, 420, 421, + 420, 148, 420, 572, 421, 148, 572, 148, + 420, 572, 420, 420, 148, 572, 148, 572, + 420, 148, 420, 421, 420, 420, 421, 420, + 572, 148, 572, 420, 572, 148, 572, 421, + 572, 420, 572, 572, 572, 572, 420, 572, + 148, 572, 148, 572, 420, 148, 572, 572, + 421, 420, 930, 674, 302, 303, 304, 305, + 306, 307, 931, 932, 933, 934, 312, 935, + 936, 151, 420, 572, 148, 148, 572, 572, + 572, 420, 181, 572, 572, 571, 420, 181, + 181, 571, 148, 148, 2, 572, 420, 148, + 181, 181, 181, 571, 148, 2, 572, 148, + 420, 148, 148, 148, 148, 572, 420, 937, + 938, 317, 318, 319, 939, 940, 941, 942, + 324, 420, 325, 326, 153, 327, 328, 943, + 330, 944, 332, 945, 334, 335, 153, 151, + 690, 337, 338, 153, 339, 340, 341, 342, + 343, 344, 345, 346, 946, 348, 349, 947, + 351, 352, 353, 153, 259, 151, 354, 420, + 572, 420, 572, 420, 148, 420, 572, 420, + 148, 148, 572, 572, 572, 572, 148, 148, + 148, 572, 420, 148, 148, 572, 420, 948, + 949, 950, 696, 951, 952, 953, 954, 955, + 364, 956, 957, 958, 959, 960, 961, 962, + 959, 963, 964, 552, 965, 375, 712, 377, + 420, 572, 148, 572, 420, 420, 421, 420, + 572, 148, 420, 420, 572, 572, 148, 572, + 421, 420, 572, 148, 148, 420, 572, 148, + 572, 420, 420, 148, 420, 420, 421, 148, + 420, 572, 148, 148, 572, 420, 572, 420, + 421, 420, 148, 148, 572, 148, 148, 148, + 148, 148, 148, 572, 420, 148, 572, 572, + 572, 572, 148, 572, 572, 572, 420, 148, + 572, 420, 420, 148, 420, 421, 420, 572, + 148, 572, 572, 420, 420, 148, 420, 572, + 148, 420, 421, 420, 572, 148, 572, 420, + 572, 421, 420, 153, 382, 713, 966, 967, + 716, 386, 153, 968, 969, 151, 420, 148, + 572, 420, 148, 572, 420, 420, 572, 420, + 148, 572, 148, 420, 153, 389, 970, 420, + 148, 148, 572, 572, 420, 971, 972, 973, + 153, 394, 395, 396, 397, 398, 399, 400, + 401, 402, 403, 723, 974, 975, 976, 151, + 420, 572, 572, 420, 420, 420, 420, 572, + 572, 420, 572, 572, 420, 420, 420, 572, + 572, 572, 572, 420, 153, 977, 409, 410, + 411, 151, 420, 420, 572, 420, 148, 978, + 420, 979, 980, 981, 983, 982, 420, 572, + 572, 420, 420, 572, 572, 420, 572, 571, + 571, 571, 148, 420, 421, 571, 148, 571, + 421, 148, 420, 420, 420, 420, 571, 571, + 421, 420, 420, 148, 148, 571, 148, 571, + 420, 420, 148, 571, 571, 420, 148, 148, + 421, 148, 571, 148, 420, 984, 985, 184, + 986, 987, 988, 989, 990, 991, 992, 993, + 994, 995, 996, 997, 998, 999, 1000, 1001, + 1002, 1003, 1004, 1005, 1006, 1007, 1008, 1009, + 1010, 1011, 1012, 1013, 1014, 420, 148, 148, + 148, 148, 571, 420, 148, 571, 420, 571, + 420, 148, 148, 571, 148, 420, 148, 420, + 421, 148, 571, 420, 420, 420, 571, 571, + 420, 420, 420, 420, 571, 148, 148, 420, + 420, 420, 420, 420, 148, 420, 421, 148, + 420, 571, 571, 571, 148, 148, 148, 148, + 148, 148, 148, 571, 420, 420, 420, 420, + 420, 420, 148, 420, 421, 148, 420, 571, + 148, 571, 148, 148, 148, 148, 148, 148, + 571, 420, 420, 420, 148, 148, 420, 148, + 420, 421, 420, 571, 148, 571, 148, 148, + 148, 148, 148, 148, 571, 420, 148, 571, + 571, 571, 571, 148, 148, 571, 421, 420, + 571, 148, 148, 148, 148, 148, 148, 148, + 148, 148, 148, 571, 420, 148, 571, 571, + 571, 571, 421, 420, 148, 571, 148, 148, + 148, 148, 571, 420, 420, 420, 420, 420, + 148, 420, 148, 420, 421, 420, 571, 148, + 571, 148, 148, 148, 148, 148, 571, 420, + 148, 571, 571, 571, 571, 148, 571, 421, + 148, 420, 148, 571, 148, 148, 148, 571, + 420, 420, 420, 148, 420, 420, 148, 420, + 421, 420, 148, 420, 571, 148, 571, 148, + 148, 148, 420, 571, 571, 148, 571, 571, + 421, 571, 420, 571, 571, 420, 571, 421, + 420, 571, 571, 571, 420, 571, 421, 420, + 148, 571, 571, 571, 571, 421, 571, 420, + 148, 148, 571, 420, 571, 571, 148, 571, + 571, 420, 571, 420, 1015, 1016, 1017, 217, + 218, 219, 220, 221, 1018, 223, 224, 225, + 226, 227, 228, 1019, 1020, 1021, 1022, 1023, + 234, 1024, 236, 1025, 483, 484, 1026, 1027, + 1028, 1029, 1030, 1031, 1032, 1033, 646, 1034, + 151, 152, 1035, 249, 250, 251, 252, 151, + 153, 151, 420, 571, 420, 421, 571, 571, + 571, 571, 571, 420, 571, 571, 421, 571, + 148, 420, 420, 571, 420, 148, 148, 148, + 571, 148, 571, 420, 148, 571, 148, 148, + 571, 420, 571, 420, 420, 420, 421, 420, + 571, 571, 421, 148, 420, 571, 148, 148, + 420, 148, 571, 571, 420, 148, 571, 420, + 571, 571, 571, 420, 421, 421, 571, 420, + 571, 148, 571, 420, 148, 420, 421, 420, + 420, 571, 571, 148, 571, 148, 421, 148, + 420, 571, 420, 148, 148, 571, 420, 571, + 571, 571, 148, 571, 148, 571, 420, 571, + 571, 420, 1036, 1037, 255, 1038, 257, 258, + 259, 260, 261, 262, 263, 151, 1039, 265, + 1040, 267, 1041, 269, 420, 571, 571, 571, + 420, 420, 148, 148, 420, 420, 420, 571, + 571, 420, 420, 571, 420, 148, 571, 420, + 420, 148, 148, 148, 148, 148, 571, 420, + 1042, 1043, 1044, 272, 151, 273, 1045, 1046, + 1047, 1048, 420, 148, 571, 1049, 148, 420, + 1049, 420, 1049, 2, 420, 1049, 1049, 420, + 1049, 1049, 420, 1049, 1049, 1049, 420, 1049, + 1049, 1049, 420, 1049, 1049, 420, 1049, 1049, + 1049, 1049, 420, 1049, 1049, 1049, 420, 420, + 1049, 1049, 420, 1049, 420, 1050, 1051, 1052, + 1053, 1054, 1056, 1057, 1058, 1060, 1061, 1062, + 1063, 1064, 1065, 1066, 1067, 1068, 1069, 1070, + 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1055, + 1059, 420, 1049, 1049, 1049, 1049, 420, 1049, + 420, 1049, 420, 420, 420, 1049, 420, 420, + 420, 1049, 1049, 1049, 1049, 420, 420, 420, + 420, 420, 420, 1049, 420, 420, 420, 420, + 420, 420, 1049, 420, 420, 420, 420, 1049, + 1049, 1049, 1049, 420, 1049, 1049, 1049, 1049, + 1049, 420, 1049, 1049, 420, 1049, 1049, 1049, + 1049, 420, 1049, 1049, 420, 420, 420, 420, + 420, 420, 1049, 1049, 1049, 1049, 1049, 1049, + 420, 1049, 1049, 420, 420, 420, 420, 420, + 420, 1049, 1049, 420, 1049, 1049, 1049, 1049, + 1049, 420, 1049, 1049, 420, 1049, 420, 1049, + 1049, 1049, 420, 1049, 420, 1049, 1049, 1049, + 1049, 1049, 420, 1049, 420, 1049, 1049, 1049, + 1049, 420, 1049, 420, 1078, 1079, 1080, 1081, + 1082, 1083, 1084, 1085, 1086, 1087, 1088, 1089, + 1090, 1091, 1092, 1093, 1094, 1095, 1096, 1097, + 1098, 420, 1049, 420, 1049, 1049, 1049, 1049, + 1049, 420, 1049, 1049, 1049, 420, 1049, 420, + 1049, 1049, 420, 1049, 1049, 420, 1049, 420, + 420, 420, 1049, 1049, 420, 1049, 420, 1049, + 1049, 420, 1049, 420, 1049, 1049, 1049, 420, + 1049, 420, 1049, 1049, 420, 420, 420, 1049, + 1049, 1049, 420, 1049, 420, 1049, 420, 1049, + 1049, 1049, 1049, 1049, 420, 1049, 1049, 420, + 1099, 1100, 1101, 1102, 1103, 1104, 420, 1049, + 1049, 571, 420, 420, 420, 420, 1049, 420, + 571, 1049, 420, 1049, 420, 1049, 420, 1049, + 420, 1105, 1106, 1044, 1045, 1046, 1047, 1048, + 420, 1049, 1049, 420, 1049, 1049, 420, 2, + 2, 1049, 1049, 2, 1049, 2, 2, 1049, + 1107, 1108, 1109, 1110, 1111, 1112, 1113, 1114, + 1115, 1116, 1117, 1118, 1119, 1120, 1121, 420, + 1049, 1049, 420, 1049, 420, 1049, 420, 1049, + 1049, 1049, 1049, 420, 1049, 1049, 420, 420, + 420, 1049, 1049, 420, 1049, 420, 1049, 1049, + 420, 420, 420, 1049, 1049, 420, 1049, 1049, + 1049, 420, 1049, 1049, 1049, 1049, 420, 1049, + 1049, 1049, 420, 1049, 1049, 420, 1122, 1123, + 1124, 1103, 1125, 1126, 1127, 1128, 420, 1049, + 420, 1049, 1049, 571, 420, 571, 2, 571, + 2, 1049, 420, 420, 1049, 1049, 420, 1129, + 1130, 1131, 1132, 1133, 1134, 420, 1135, 1136, + 1137, 1138, 1139, 420, 1049, 420, 1049, 420, + 1049, 420, 1049, 1049, 1049, 1049, 1049, 420, + 1049, 420, 1140, 1141, 1142, 1143, 1144, 1145, + 1146, 1147, 1148, 1149, 1150, 1151, 1152, 1153, + 1154, 1151, 1155, 1156, 1157, 420, 1049, 1049, + 420, 420, 1049, 420, 420, 420, 1049, 1049, + 1049, 420, 1049, 420, 1049, 1049, 420, 420, + 420, 1049, 1049, 420, 1049, 420, 1049, 1049, + 1049, 420, 1049, 1049, 1049, 1049, 1049, 1049, + 1049, 420, 1049, 420, 420, 1049, 1049, 1049, + 420, 420, 420, 1049, 420, 1049, 1049, 420, + 1049, 420, 1158, 1159, 1160, 1161, 420, 1049, + 420, 1049, 420, 1049, 420, 1049, 420, 1162, + 1163, 420, 1049, 2, 1049, 1049, 420, 1164, + 1165, 1166, 1167, 1168, 1169, 420, 1049, 1049, + 420, 420, 420, 420, 1049, 1049, 420, 1049, + 1049, 420, 420, 420, 1049, 1049, 1049, 1049, + 420, 1170, 420, 1049, 420, 1171, 420, 1172, + 1173, 1047, 1175, 1174, 420, 1049, 1049, 420, + 420, 1049, 1049, 571, 1049, 1049, 420, 153, + 274, 275, 153, 657, 1176, 1177, 1178, 280, + 281, 282, 1179, 284, 1180, 1181, 1182, 1183, + 1184, 1185, 1186, 1187, 1188, 1189, 295, 296, + 152, 1190, 153, 151, 420, 148, 148, 571, + 571, 420, 148, 571, 148, 420, 571, 420, + 148, 571, 571, 571, 571, 420, 148, 571, + 148, 571, 420, 148, 148, 420, 421, 420, + 148, 420, 571, 421, 148, 571, 148, 420, + 571, 420, 420, 148, 571, 148, 571, 420, + 148, 420, 421, 420, 420, 421, 420, 571, + 148, 571, 420, 571, 148, 571, 421, 571, + 420, 571, 571, 571, 571, 420, 571, 148, + 571, 148, 571, 420, 148, 571, 571, 421, + 420, 1191, 674, 302, 303, 304, 305, 306, + 307, 1192, 676, 1193, 678, 1194, 1195, 1196, + 151, 420, 571, 572, 148, 148, 572, 572, + 572, 420, 571, 571, 571, 420, 571, 420, + 148, 148, 1049, 2, 571, 148, 420, 1049, + 148, 148, 148, 148, 571, 420, 1197, 1198, + 317, 318, 319, 1199, 1200, 1201, 1202, 324, + 420, 325, 326, 153, 327, 328, 1203, 330, + 1204, 332, 1205, 334, 335, 153, 151, 690, + 337, 338, 153, 339, 340, 341, 342, 343, + 344, 345, 346, 1206, 348, 349, 1207, 351, + 352, 353, 153, 259, 151, 354, 420, 571, + 420, 571, 420, 148, 420, 571, 420, 148, + 148, 571, 571, 571, 571, 148, 148, 148, + 571, 420, 148, 148, 571, 420, 1208, 1209, + 1210, 696, 1211, 1212, 1213, 1214, 1215, 364, + 1216, 1217, 1218, 1219, 1220, 1221, 1222, 1219, + 1223, 1224, 552, 1225, 375, 712, 377, 420, + 571, 148, 571, 420, 420, 421, 420, 571, + 148, 420, 420, 571, 571, 148, 571, 421, + 420, 571, 148, 148, 420, 571, 148, 571, + 420, 420, 148, 420, 420, 421, 148, 420, + 571, 148, 148, 571, 420, 571, 420, 421, + 420, 148, 148, 571, 148, 148, 148, 148, + 148, 148, 571, 420, 148, 571, 571, 571, + 571, 148, 571, 571, 571, 420, 148, 571, + 420, 420, 148, 420, 421, 420, 571, 148, + 571, 571, 420, 420, 148, 420, 571, 148, + 420, 421, 420, 571, 148, 571, 420, 571, + 421, 420, 153, 382, 713, 1226, 1227, 716, + 386, 153, 1228, 1229, 151, 420, 148, 571, + 420, 148, 571, 420, 420, 571, 420, 148, + 571, 148, 420, 1162, 153, 389, 1230, 420, + 148, 148, 571, 571, 420, 1231, 1232, 1233, + 153, 394, 395, 396, 397, 398, 399, 400, + 401, 402, 403, 723, 1234, 1235, 1236, 151, + 420, 571, 571, 420, 420, 420, 420, 571, + 571, 420, 571, 571, 420, 420, 420, 571, + 571, 571, 571, 420, 153, 1237, 409, 410, + 411, 151, 420, 420, 571, 420, 148, 1238, + 420, 1239, 1240, 1241, 1243, 1242, 420, 571, + 571, 420, 420, 571, 571, 420, 571, 148, + 421, 148, 148, 420, 420, 421, 420, 443, + 148, 420, 148, 420, 421, 420, 421, 148, + 443, 420, 420, 421, 420, 148, 421, 148, + 421, 421, 572, 572, 420, 421, 421, 443, + 421, 148, 420, 421, 443, 421, 148, 421, + 148, 420, 420, 420, 420, 421, 421, 421, + 420, 420, 148, 148, 421, 148, 421, 420, + 420, 148, 421, 421, 420, 148, 443, 148, + 421, 148, 421, 148, 420, 1244, 1245, 184, + 1246, 1247, 1248, 1249, 1250, 1251, 1252, 1253, + 1254, 1255, 1256, 1257, 1258, 1259, 1260, 1261, + 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, + 1270, 1271, 1272, 1273, 1274, 420, 148, 148, + 148, 148, 421, 420, 148, 421, 420, 421, + 420, 148, 148, 421, 148, 420, 148, 420, + 148, 421, 420, 420, 420, 421, 421, 420, + 420, 420, 420, 421, 148, 148, 420, 420, + 420, 420, 420, 148, 420, 148, 420, 421, + 421, 421, 148, 148, 148, 148, 148, 148, + 148, 421, 420, 420, 420, 420, 420, 420, + 148, 420, 148, 420, 421, 148, 421, 148, + 148, 148, 148, 148, 148, 421, 420, 420, + 420, 148, 148, 420, 148, 420, 420, 421, + 148, 421, 148, 148, 148, 148, 148, 148, + 421, 420, 148, 421, 421, 421, 421, 148, + 148, 421, 421, 420, 421, 148, 148, 148, + 148, 148, 148, 148, 148, 148, 148, 421, + 420, 148, 421, 421, 421, 421, 421, 420, + 148, 421, 148, 148, 148, 148, 421, 420, + 420, 420, 420, 420, 148, 420, 148, 420, + 420, 421, 148, 421, 148, 148, 148, 148, + 148, 421, 420, 148, 421, 421, 421, 421, + 148, 421, 421, 148, 420, 148, 421, 148, + 148, 148, 421, 420, 420, 420, 148, 420, + 420, 148, 420, 420, 148, 420, 421, 148, + 421, 148, 148, 148, 420, 421, 421, 148, + 421, 421, 421, 421, 420, 421, 421, 420, + 421, 421, 420, 421, 421, 421, 420, 421, + 421, 420, 148, 421, 421, 421, 421, 421, + 421, 420, 148, 148, 421, 420, 421, 421, + 148, 421, 421, 420, 421, 420, 1275, 1276, + 1277, 217, 218, 219, 220, 221, 1278, 223, + 224, 225, 226, 227, 228, 1279, 1280, 1281, + 1282, 1283, 234, 1284, 236, 1285, 483, 484, + 1286, 1287, 1288, 1289, 1290, 1291, 1292, 1293, + 646, 1294, 151, 152, 1295, 249, 250, 251, + 252, 151, 153, 151, 420, 421, 420, 421, + 421, 421, 421, 421, 421, 420, 421, 421, + 148, 420, 420, 421, 420, 148, 148, 148, + 421, 148, 421, 420, 148, 421, 148, 148, + 421, 420, 421, 420, 420, 420, 420, 421, + 421, 421, 148, 420, 421, 148, 148, 420, + 148, 421, 421, 420, 148, 421, 420, 421, + 421, 421, 420, 421, 421, 421, 420, 421, + 148, 421, 420, 148, 420, 420, 420, 421, + 421, 148, 421, 148, 421, 148, 420, 421, + 420, 148, 148, 421, 420, 421, 421, 421, + 148, 421, 148, 421, 420, 421, 421, 420, + 1296, 1297, 255, 1298, 257, 258, 259, 260, + 261, 262, 263, 151, 1299, 265, 1300, 267, + 1301, 269, 420, 443, 421, 443, 421, 571, + 420, 443, 420, 148, 148, 420, 420, 421, + 420, 571, 421, 420, 420, 421, 420, 148, + 421, 420, 420, 148, 148, 148, 148, 148, + 421, 420, 1302, 1303, 272, 151, 273, 420, + 148, 421, 148, 420, 421, 420, 153, 274, + 275, 153, 657, 1304, 1305, 1306, 280, 281, + 282, 1307, 284, 1308, 1309, 1310, 1311, 1312, + 1313, 1314, 1315, 1316, 1317, 295, 296, 152, + 1318, 153, 151, 420, 148, 148, 421, 421, + 420, 148, 421, 148, 420, 421, 420, 148, + 421, 421, 421, 421, 420, 148, 421, 148, + 421, 420, 148, 148, 420, 420, 148, 420, + 421, 421, 148, 421, 148, 420, 421, 420, + 420, 148, 421, 148, 421, 420, 148, 420, + 420, 420, 420, 421, 148, 421, 420, 421, + 148, 421, 421, 421, 420, 421, 421, 421, + 421, 420, 421, 148, 421, 148, 421, 420, + 148, 421, 421, 421, 420, 1319, 674, 302, + 303, 304, 305, 306, 307, 1320, 1321, 1322, + 1323, 312, 1324, 1325, 151, 420, 421, 572, + 148, 148, 572, 572, 572, 420, 443, 443, + 421, 421, 571, 420, 443, 443, 443, 571, + 148, 148, 420, 421, 420, 148, 443, 443, + 443, 443, 571, 148, 420, 421, 148, 420, + 148, 148, 148, 148, 421, 420, 1326, 1327, + 317, 318, 319, 1328, 1329, 1330, 1331, 324, + 420, 325, 326, 153, 327, 328, 1332, 330, + 1333, 332, 1334, 334, 335, 153, 151, 690, + 337, 338, 153, 339, 340, 341, 342, 343, + 344, 345, 346, 1335, 348, 349, 1336, 351, + 352, 353, 153, 259, 151, 354, 420, 421, + 420, 421, 420, 148, 420, 421, 420, 148, + 148, 421, 421, 421, 421, 148, 148, 148, + 421, 420, 148, 148, 421, 420, 1337, 1338, + 1339, 696, 1340, 1341, 1342, 1343, 1344, 364, + 1345, 1346, 1347, 1348, 1349, 1350, 1351, 1348, + 1352, 1353, 552, 1354, 375, 712, 377, 420, + 421, 148, 421, 420, 420, 420, 421, 148, + 420, 420, 421, 421, 148, 421, 421, 420, + 421, 148, 148, 420, 421, 148, 421, 420, + 420, 148, 420, 420, 148, 420, 421, 148, + 148, 421, 420, 421, 420, 421, 420, 148, + 148, 421, 148, 148, 148, 148, 148, 148, + 421, 420, 148, 421, 421, 421, 421, 148, + 421, 421, 421, 420, 148, 421, 420, 420, + 148, 420, 420, 421, 148, 421, 421, 420, + 420, 148, 420, 421, 148, 420, 420, 421, + 148, 421, 420, 421, 421, 420, 153, 382, + 713, 1355, 1356, 716, 386, 153, 1357, 1358, + 151, 420, 148, 421, 420, 148, 421, 420, + 420, 421, 420, 148, 421, 148, 420, 153, + 389, 1359, 420, 148, 148, 421, 421, 420, + 1360, 1361, 1362, 153, 394, 395, 396, 397, + 398, 399, 400, 401, 402, 403, 723, 1363, + 1364, 1365, 151, 420, 421, 421, 420, 420, + 420, 420, 421, 421, 420, 421, 421, 420, + 420, 420, 421, 421, 421, 421, 420, 153, + 1366, 409, 410, 411, 151, 420, 420, 421, + 420, 148, 1367, 420, 1368, 1369, 1370, 1372, + 1371, 420, 421, 421, 420, 420, 421, 421, + 420, 421, 148, 148, 148, 181, 148, 420, + 420, 420, 420, 420, 148, 420, 148, 420, + 420, 420, 148, 148, 148, 181, 148, 148, + 572, 572, 420, 148, 148, 148, 148, 420, + 421, 148, 421, 148, 420, 420, 420, 420, + 421, 420, 420, 148, 148, 420, 420, 148, + 148, 421, 148, 420, 1373, 1374, 184, 1375, + 151, 1376, 1377, 1378, 1379, 1380, 1381, 1382, + 1383, 1384, 1385, 1386, 1387, 1388, 1389, 1390, + 1391, 1392, 1393, 1394, 1395, 1396, 1397, 1398, + 1399, 1400, 1401, 1402, 420, 148, 2, 148, + 2, 148, 2, 2, 2, 421, 148, 2, + 2, 2, 2, 2, 2, 2, 148, 2, + 2, 2, 2, 2, 2, 421, 2, 148, + 148, 148, 148, 148, 148, 148, 148, 148, + 148, 148, 2, 2, 2, 2, 2, 2, + 2, 421, 2, 148, 148, 148, 148, 148, + 148, 148, 148, 148, 2, 2, 2, 2, + 2, 2, 421, 2, 2, 148, 148, 148, + 148, 148, 148, 148, 148, 148, 2, 148, + 148, 148, 148, 148, 148, 148, 421, 2, + 148, 148, 148, 148, 148, 148, 148, 148, + 148, 148, 148, 2, 148, 148, 148, 148, + 148, 421, 2, 148, 148, 148, 148, 148, + 148, 2, 2, 2, 2, 2, 2, 2, + 421, 2, 148, 148, 148, 148, 148, 148, + 148, 148, 2, 148, 148, 148, 148, 148, + 148, 421, 148, 2, 148, 148, 148, 148, + 148, 2, 2, 2, 2, 2, 2, 421, + 2, 2, 148, 148, 148, 148, 148, 148, + 2, 148, 148, 148, 148, 148, 421, 148, + 2, 148, 148, 2, 148, 421, 2, 148, + 148, 148, 2, 148, 421, 2, 148, 148, + 148, 148, 148, 421, 148, 2, 148, 148, + 148, 2, 148, 148, 148, 2, 148, 2, + 1403, 1404, 1405, 217, 218, 219, 220, 221, + 1406, 223, 224, 225, 226, 227, 228, 1407, + 1408, 1409, 1410, 1411, 234, 1412, 236, 1413, + 483, 484, 1374, 1414, 1415, 153, 1416, 1417, + 1418, 1419, 646, 1420, 151, 152, 1421, 249, + 250, 251, 252, 151, 153, 151, 420, 148, + 2, 421, 148, 148, 148, 148, 148, 2, + 148, 148, 421, 148, 148, 2, 2, 2, + 148, 148, 148, 148, 2, 148, 148, 148, + 148, 2, 148, 2, 2, 2, 421, 2, + 148, 148, 421, 148, 2, 148, 148, 2, + 148, 148, 148, 2, 148, 148, 148, 2, + 421, 421, 148, 2, 2, 421, 2, 2, + 148, 148, 421, 148, 2, 2, 148, 148, + 2, 148, 148, 148, 2, 148, 148, 2, + 1422, 1423, 255, 1424, 257, 258, 259, 260, + 261, 262, 263, 151, 1425, 265, 1426, 267, + 1427, 269, 420, 181, 181, 148, 181, 148, + 571, 2, 2, 2, 148, 2, 2, 148, + 2, 571, 148, 2, 2, 2, 148, 2, + 2, 2, 148, 148, 148, 148, 148, 148, + 2, 1428, 1429, 272, 151, 273, 420, 148, + 148, 148, 2, 148, 2, 153, 274, 275, + 153, 657, 1430, 153, 302, 280, 281, 282, + 1431, 284, 153, 1432, 1433, 1434, 153, 1435, + 1436, 1437, 1438, 1439, 295, 296, 152, 1440, + 153, 151, 420, 148, 148, 148, 2, 2, + 148, 2, 2, 421, 2, 2, 2, 148, + 421, 148, 148, 2, 2, 2, 148, 2, + 421, 2, 2, 421, 2, 148, 148, 2, + 148, 421, 148, 2, 148, 148, 148, 148, + 2, 148, 148, 148, 2, 148, 148, 421, + 2, 1441, 674, 302, 303, 304, 305, 306, + 307, 1442, 932, 1443, 934, 312, 1444, 1445, + 151, 420, 148, 572, 148, 148, 572, 572, + 572, 2, 181, 148, 148, 571, 2, 2, + 2, 148, 148, 2, 148, 148, 148, 148, + 148, 2, 1446, 1447, 317, 318, 319, 1448, + 1449, 1450, 1451, 324, 420, 325, 326, 153, + 327, 328, 1452, 330, 1453, 332, 1454, 334, + 335, 153, 151, 690, 337, 338, 153, 339, + 340, 341, 342, 343, 344, 345, 346, 1455, + 348, 349, 1456, 351, 352, 353, 153, 259, + 151, 354, 2, 148, 2, 2, 2, 148, + 2, 2, 148, 148, 148, 148, 148, 148, + 148, 148, 2, 148, 148, 2, 1457, 1458, + 696, 1459, 1460, 1461, 1462, 364, 1463, 1464, + 1465, 153, 1466, 1467, 1468, 153, 1469, 1419, + 552, 1470, 375, 712, 377, 153, 2, 2, + 421, 2, 148, 2, 2, 148, 148, 421, + 2, 148, 148, 2, 2, 2, 2, 421, + 2, 148, 148, 148, 2, 2, 421, 2, + 148, 148, 148, 148, 148, 148, 148, 148, + 148, 2, 148, 148, 148, 148, 148, 148, + 148, 148, 2, 2, 2, 421, 2, 148, + 148, 148, 2, 2, 2, 148, 2, 2, + 421, 2, 148, 148, 421, 2, 153, 382, + 713, 1471, 1436, 716, 386, 153, 1472, 1473, + 151, 2, 148, 148, 2, 2, 2, 148, + 148, 2, 153, 389, 1474, 2, 148, 148, + 148, 148, 2, 1475, 1476, 1477, 153, 394, + 395, 396, 397, 398, 399, 400, 401, 402, + 403, 723, 1478, 1479, 1480, 151, 2, 148, + 148, 2, 2, 2, 2, 148, 148, 2, + 148, 148, 2, 2, 2, 148, 148, 148, + 148, 2, 153, 1481, 409, 410, 411, 151, + 2, 2, 2, 148, 1482, 420, 1483, 152, + 153, 1484, 151, 2, 148, 148, 2, 2, + 148, 148, 1485, 148, 148, 125, 1485, 125, + 125, 125, 148, 125, 148, 125, 1485, 125, + 1485, 148, 125, 125, 1485, 125, 148, 1485, + 148, 1485, 1485, 572, 572, 125, 1485, 1485, + 1485, 148, 125, 126, 1485, 148, 1485, 126, + 148, 125, 125, 125, 125, 1485, 1485, 126, + 125, 125, 148, 148, 1485, 148, 1485, 125, + 125, 148, 1485, 1485, 125, 148, 148, 126, + 148, 1485, 148, 125, 1486, 1487, 184, 1488, + 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, + 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, + 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, + 1513, 1514, 1515, 1516, 125, 148, 148, 148, + 148, 1485, 125, 148, 1485, 125, 1485, 125, + 148, 148, 1485, 148, 125, 148, 125, 126, + 148, 1485, 125, 125, 125, 1485, 1485, 125, + 125, 125, 125, 1485, 148, 148, 125, 125, + 125, 125, 125, 148, 125, 126, 148, 125, + 1485, 1485, 1485, 148, 148, 148, 148, 148, + 148, 148, 1485, 125, 125, 125, 125, 125, + 125, 148, 125, 126, 148, 125, 1485, 148, + 1485, 148, 148, 148, 148, 148, 148, 1485, + 125, 125, 125, 148, 148, 125, 148, 125, + 126, 125, 1485, 148, 1485, 148, 148, 148, + 148, 148, 148, 1485, 125, 148, 1485, 1485, + 1485, 1485, 148, 148, 1485, 126, 125, 1485, + 148, 148, 148, 148, 148, 148, 148, 148, + 148, 148, 1485, 125, 148, 1485, 1485, 1485, + 1485, 126, 125, 148, 1485, 148, 148, 148, + 148, 1485, 125, 125, 125, 125, 125, 148, + 125, 148, 125, 126, 125, 1485, 148, 1485, + 148, 148, 148, 148, 148, 1485, 125, 148, + 1485, 1485, 1485, 1485, 148, 1485, 126, 148, + 125, 148, 1485, 148, 148, 148, 1485, 125, + 125, 125, 148, 125, 125, 148, 125, 126, + 125, 148, 125, 1485, 148, 1485, 148, 148, + 148, 125, 1485, 1485, 148, 1485, 1485, 126, + 1485, 125, 1485, 1485, 125, 1485, 126, 125, + 1485, 1485, 1485, 125, 1485, 126, 125, 148, + 1485, 1485, 1485, 1485, 126, 1485, 125, 148, + 148, 1485, 125, 1485, 1485, 148, 1485, 1485, + 125, 1485, 125, 1517, 1518, 1519, 217, 218, + 219, 220, 221, 1520, 223, 224, 225, 226, + 227, 228, 1521, 1522, 1523, 1524, 1525, 234, + 1526, 236, 1527, 1528, 1529, 1530, 1531, 1532, + 1533, 1534, 1535, 1536, 1537, 1538, 1539, 151, + 152, 1540, 249, 250, 251, 252, 151, 153, + 151, 125, 1485, 125, 126, 1485, 1485, 1485, + 1485, 1485, 125, 1485, 1485, 126, 1485, 148, + 125, 125, 1485, 125, 148, 148, 148, 1485, + 148, 1485, 125, 148, 1485, 148, 148, 1485, + 125, 1485, 125, 125, 125, 126, 125, 1485, + 1485, 126, 148, 125, 1485, 148, 148, 125, + 148, 1485, 1485, 125, 126, 2, 126, 2, + 148, 1485, 125, 1485, 1485, 1485, 125, 126, + 126, 1485, 125, 1485, 148, 1485, 125, 148, + 125, 126, 125, 125, 1485, 1485, 148, 1485, + 148, 126, 148, 125, 1485, 125, 148, 148, + 1485, 125, 126, 148, 126, 148, 2, 1485, + 1485, 1485, 148, 1485, 148, 1485, 125, 1485, + 1485, 125, 1541, 1542, 255, 1543, 257, 258, + 259, 260, 261, 262, 263, 151, 1544, 265, + 1545, 267, 1546, 269, 125, 1485, 1485, 1485, + 125, 125, 148, 148, 125, 125, 125, 1485, + 1485, 125, 125, 1485, 125, 148, 1485, 125, + 125, 148, 148, 148, 148, 148, 1485, 125, + 1547, 1548, 1044, 272, 151, 273, 1045, 1046, + 1047, 1048, 125, 148, 1485, 1049, 148, 125, + 1485, 1049, 1049, 125, 153, 274, 275, 153, + 1549, 1550, 1551, 1552, 280, 281, 282, 1553, + 284, 1554, 1555, 1556, 1557, 1558, 1559, 1560, + 1561, 1562, 1563, 295, 296, 152, 1564, 153, + 151, 125, 2, 126, 2, 148, 148, 148, + 1485, 1485, 125, 148, 1485, 148, 125, 1485, + 125, 148, 1485, 1485, 1485, 1485, 125, 148, + 1485, 148, 1485, 125, 148, 148, 125, 126, + 125, 148, 125, 1485, 126, 148, 1485, 148, + 125, 1485, 125, 125, 148, 1485, 148, 1485, + 125, 148, 125, 126, 125, 125, 126, 125, + 1485, 148, 1485, 125, 1485, 148, 1485, 126, + 1485, 125, 1485, 1485, 1485, 1485, 125, 1485, + 148, 1485, 148, 1485, 125, 148, 1485, 1485, + 126, 125, 1565, 674, 302, 303, 304, 305, + 306, 307, 1566, 1567, 1568, 1569, 1194, 1570, + 1571, 151, 125, 1485, 572, 148, 148, 572, + 572, 572, 125, 1485, 1485, 1485, 125, 1485, + 148, 148, 125, 1485, 125, 148, 1485, 148, + 125, 1485, 148, 125, 1049, 148, 148, 148, + 148, 1485, 125, 1572, 1573, 317, 318, 319, + 1574, 1575, 1576, 1577, 324, 125, 325, 326, + 153, 327, 328, 1578, 330, 1579, 332, 1580, + 334, 335, 153, 151, 1581, 337, 338, 153, + 339, 340, 341, 342, 343, 344, 345, 346, + 1582, 348, 349, 1583, 351, 352, 353, 153, + 259, 151, 354, 125, 1485, 125, 1485, 125, + 148, 125, 1485, 125, 148, 2, 126, 2, + 148, 148, 1485, 1485, 1485, 1485, 148, 148, + 148, 1485, 125, 148, 148, 1485, 125, 1584, + 1585, 1586, 1587, 1588, 1589, 1590, 1591, 1592, + 364, 1593, 1594, 1595, 1596, 1597, 1598, 1599, + 1596, 1600, 1601, 1602, 1603, 375, 1604, 377, + 125, 1485, 148, 1485, 125, 125, 126, 125, + 1485, 148, 125, 125, 1485, 148, 126, 2, + 1485, 148, 1485, 126, 125, 1485, 148, 148, + 125, 1485, 148, 1485, 125, 125, 148, 125, + 125, 126, 148, 125, 1485, 148, 148, 1485, + 125, 1485, 125, 126, 125, 148, 148, 1485, + 148, 148, 148, 148, 148, 148, 1485, 125, + 148, 1485, 1485, 1485, 1485, 148, 1485, 1485, + 1485, 125, 148, 1485, 125, 125, 148, 125, + 126, 125, 1485, 148, 1485, 1485, 125, 125, + 148, 125, 1485, 148, 125, 126, 125, 1485, + 148, 1485, 125, 126, 2, 1485, 126, 125, + 126, 2, 2, 148, 153, 382, 1605, 1606, + 1607, 1608, 386, 153, 1609, 1610, 151, 125, + 148, 126, 2, 148, 1485, 125, 148, 1485, + 125, 148, 126, 148, 148, 2, 125, 1485, + 125, 148, 1485, 148, 125, 1162, 153, 389, + 1611, 125, 148, 148, 1485, 1485, 125, 1612, + 1613, 1614, 153, 394, 395, 396, 397, 398, + 399, 400, 401, 402, 403, 1615, 1616, 1617, + 1618, 151, 125, 1485, 1485, 125, 125, 125, + 125, 1485, 1485, 125, 2, 2, 126, 2, + 148, 1485, 1485, 125, 125, 125, 1485, 1485, + 1485, 1485, 125, 153, 1619, 409, 410, 411, + 151, 125, 125, 1485, 125, 148, 1620, 125, + 1621, 1622, 1623, 1625, 1624, 125, 1485, 1485, + 125, 125, 1485, 1485, 125, 1485, 148, 126, + 148, 148, 125, 126, 125, 125, 1626, 148, + 125, 148, 125, 126, 125, 126, 148, 1626, + 125, 125, 126, 125, 148, 126, 148, 126, + 126, 572, 572, 125, 126, 126, 1626, 126, + 148, 125, 126, 1626, 126, 148, 126, 148, + 125, 125, 125, 125, 126, 126, 126, 125, + 125, 148, 148, 126, 148, 126, 125, 125, + 148, 126, 126, 125, 148, 1626, 148, 126, + 148, 126, 148, 125, 1627, 1628, 184, 1629, + 1630, 1631, 1632, 1633, 1634, 1635, 1636, 1637, + 1638, 1639, 1640, 1641, 1642, 1643, 1644, 1645, + 1646, 1647, 1648, 1649, 1650, 1651, 1652, 1653, + 1654, 1655, 1656, 1657, 125, 148, 148, 148, + 148, 126, 125, 148, 126, 125, 126, 125, + 148, 148, 126, 148, 125, 148, 125, 148, + 126, 125, 125, 125, 126, 126, 125, 125, + 125, 125, 126, 148, 148, 125, 125, 125, + 125, 125, 148, 125, 148, 125, 126, 126, + 126, 148, 148, 148, 148, 148, 148, 148, + 126, 125, 125, 125, 125, 125, 125, 148, + 125, 148, 125, 126, 148, 126, 148, 148, + 148, 148, 148, 148, 126, 125, 125, 125, + 148, 148, 125, 148, 125, 125, 126, 148, + 126, 148, 148, 148, 148, 148, 148, 126, + 125, 148, 126, 126, 126, 126, 148, 148, + 126, 126, 125, 126, 148, 148, 148, 148, + 148, 148, 148, 148, 148, 148, 126, 125, + 148, 126, 126, 126, 126, 126, 125, 148, + 126, 148, 148, 148, 148, 126, 125, 125, + 125, 125, 125, 148, 125, 148, 125, 125, + 126, 148, 126, 148, 148, 148, 148, 148, + 126, 125, 148, 126, 126, 126, 126, 148, + 126, 126, 148, 125, 148, 126, 148, 148, + 148, 126, 125, 125, 125, 148, 125, 125, + 148, 125, 125, 148, 125, 126, 148, 126, + 148, 148, 148, 125, 126, 126, 148, 126, + 126, 126, 126, 125, 126, 126, 125, 126, + 126, 125, 126, 126, 126, 125, 126, 126, + 125, 148, 126, 126, 126, 126, 126, 126, + 125, 148, 148, 126, 125, 126, 126, 148, + 126, 126, 125, 126, 125, 1658, 1659, 1660, + 217, 218, 219, 220, 221, 1661, 223, 224, + 225, 226, 227, 228, 1662, 1663, 1664, 1665, + 1666, 234, 1667, 236, 1668, 1528, 1529, 1669, + 1670, 1671, 1672, 1673, 1674, 1675, 1676, 1538, + 1677, 151, 152, 1678, 249, 250, 251, 252, + 151, 153, 151, 125, 126, 125, 126, 126, + 126, 126, 126, 126, 125, 126, 126, 148, + 125, 125, 126, 125, 148, 148, 148, 126, + 148, 126, 125, 148, 126, 148, 148, 126, + 125, 126, 125, 125, 125, 125, 126, 126, + 126, 148, 125, 126, 148, 148, 125, 148, + 126, 126, 125, 148, 126, 125, 126, 126, + 126, 125, 126, 126, 126, 125, 126, 148, + 126, 125, 148, 125, 125, 125, 126, 126, + 148, 126, 148, 126, 148, 125, 126, 125, + 148, 148, 126, 125, 126, 126, 126, 148, + 126, 148, 126, 125, 126, 126, 125, 1679, + 1680, 255, 1681, 257, 258, 259, 260, 261, + 262, 263, 151, 1682, 265, 1683, 267, 1684, + 269, 125, 1626, 126, 1626, 126, 1485, 125, + 1626, 125, 148, 148, 125, 125, 126, 125, + 1485, 126, 125, 125, 126, 125, 148, 126, + 125, 125, 148, 148, 148, 148, 148, 126, + 125, 1685, 1686, 272, 151, 273, 125, 148, + 126, 148, 125, 126, 125, 153, 274, 275, + 153, 1549, 1687, 1688, 1689, 280, 281, 282, + 1690, 284, 1691, 1692, 1693, 1694, 1695, 1696, + 1697, 1698, 1699, 1700, 295, 296, 152, 1701, + 153, 151, 125, 148, 148, 126, 126, 125, + 148, 126, 148, 125, 126, 125, 148, 126, + 126, 126, 126, 125, 148, 126, 148, 126, + 125, 148, 148, 125, 125, 148, 125, 126, + 126, 148, 126, 148, 125, 126, 125, 125, + 148, 126, 148, 126, 125, 148, 125, 125, + 125, 125, 126, 148, 126, 125, 126, 148, + 126, 126, 126, 125, 126, 126, 126, 126, + 125, 126, 148, 126, 148, 126, 125, 148, + 126, 126, 126, 125, 1702, 674, 302, 303, + 304, 305, 306, 307, 1703, 1704, 1705, 1706, + 312, 1707, 1708, 151, 125, 126, 572, 148, + 148, 572, 572, 572, 125, 1626, 1626, 126, + 126, 1485, 125, 1626, 1626, 1626, 1485, 148, + 148, 125, 126, 125, 148, 1626, 1626, 1626, + 1626, 1485, 148, 125, 126, 148, 125, 148, + 148, 148, 148, 126, 125, 1709, 1710, 317, + 318, 319, 1711, 1712, 1713, 1714, 324, 125, + 325, 326, 153, 327, 328, 1715, 330, 1716, + 332, 1717, 334, 335, 153, 151, 1581, 337, + 338, 153, 339, 340, 341, 342, 343, 344, + 345, 346, 1718, 348, 349, 1719, 351, 352, + 353, 153, 259, 151, 354, 125, 126, 125, + 126, 125, 148, 125, 126, 125, 148, 148, + 126, 126, 126, 126, 148, 148, 148, 126, + 125, 148, 148, 126, 125, 1720, 1721, 1722, + 1587, 1723, 1724, 1725, 1726, 1727, 364, 1728, + 1729, 1730, 1731, 1732, 1733, 1734, 1731, 1735, + 1736, 1602, 1737, 375, 1604, 377, 125, 126, + 148, 126, 125, 125, 125, 126, 148, 125, + 125, 126, 126, 148, 126, 126, 125, 126, + 148, 148, 125, 126, 148, 126, 125, 125, + 148, 125, 125, 148, 125, 126, 148, 148, + 126, 125, 126, 125, 126, 125, 148, 148, + 126, 148, 148, 148, 148, 148, 148, 126, + 125, 148, 126, 126, 126, 126, 148, 126, + 126, 126, 125, 148, 126, 125, 125, 148, + 125, 125, 126, 148, 126, 126, 125, 125, + 148, 125, 126, 148, 125, 125, 126, 148, + 126, 125, 126, 126, 125, 153, 382, 1605, + 1738, 1739, 1608, 386, 153, 1740, 1741, 151, + 125, 148, 126, 125, 148, 126, 125, 125, + 126, 125, 148, 126, 148, 125, 153, 389, + 1742, 125, 148, 148, 126, 126, 125, 1743, + 1744, 1745, 153, 394, 395, 396, 397, 398, + 399, 400, 401, 402, 403, 1615, 1746, 1747, + 1748, 151, 125, 126, 126, 125, 125, 125, + 125, 126, 126, 125, 126, 126, 125, 125, + 125, 126, 126, 126, 126, 125, 153, 1749, + 409, 410, 411, 151, 125, 125, 126, 125, + 148, 1750, 125, 1751, 1752, 1753, 1755, 1754, + 125, 126, 126, 125, 125, 126, 126, 125, + 126, 1626, 125, 1626, 125, 125, 1626, 1626, + 125, 1626, 1626, 125, 1626, 1626, 1626, 125, + 1626, 1626, 1626, 125, 126, 1626, 1626, 126, + 125, 1626, 1626, 1626, 1626, 126, 125, 1626, + 1626, 1626, 125, 125, 1626, 1626, 125, 126, + 1626, 125, 1756, 1757, 1758, 1759, 1760, 1762, + 1763, 1764, 1766, 1767, 1768, 1769, 1770, 1771, + 1772, 1773, 1774, 1775, 1776, 1777, 1778, 1779, + 1780, 1781, 1782, 1783, 1761, 1765, 125, 1626, + 1626, 1626, 1626, 125, 1626, 125, 1626, 125, + 125, 125, 1626, 125, 125, 125, 126, 125, + 1626, 1626, 1626, 1626, 125, 125, 125, 125, + 125, 125, 126, 125, 1626, 125, 125, 125, + 125, 126, 125, 125, 1626, 125, 125, 125, + 125, 126, 125, 1626, 1626, 1626, 1626, 125, + 1626, 1626, 1626, 1626, 1626, 126, 125, 1626, + 1626, 125, 1626, 1626, 1626, 1626, 126, 125, + 1626, 1626, 125, 125, 125, 125, 125, 125, + 126, 125, 1626, 1626, 1626, 1626, 1626, 1626, + 126, 125, 1626, 1626, 125, 125, 125, 125, + 125, 125, 126, 125, 1626, 1626, 125, 1626, + 1626, 1626, 1626, 126, 1626, 125, 1626, 1626, + 125, 1626, 126, 125, 1626, 1626, 1626, 125, + 1626, 126, 125, 1626, 1626, 1626, 1626, 126, + 1626, 125, 1626, 125, 1626, 1626, 1626, 1626, + 125, 1626, 125, 1784, 1785, 1786, 1787, 1788, + 1789, 1790, 1791, 1792, 1793, 1794, 1528, 1529, + 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, + 1803, 1804, 1805, 125, 1626, 125, 126, 1626, + 1626, 1626, 1626, 1626, 125, 1626, 1626, 126, + 1626, 125, 1626, 125, 1626, 1626, 125, 1626, + 1626, 125, 1626, 125, 125, 125, 126, 125, + 1626, 1626, 126, 125, 1626, 125, 1626, 1626, + 125, 1626, 125, 1626, 1626, 1626, 125, 126, + 126, 1626, 125, 1626, 1626, 125, 125, 126, + 125, 125, 1626, 1626, 1626, 126, 125, 1626, + 125, 1626, 125, 126, 126, 125, 1626, 1626, + 1626, 1626, 1626, 125, 1626, 1626, 125, 1806, + 1807, 1808, 1809, 1810, 1811, 125, 1626, 1626, + 125, 1626, 1626, 125, 1626, 125, 1626, 125, + 1626, 125, 1626, 125, 1812, 1813, 125, 1626, + 125, 1626, 125, 1814, 1815, 1816, 1817, 1818, + 1819, 1820, 1821, 1822, 1823, 1824, 1825, 1826, + 1827, 1828, 1829, 125, 126, 125, 1626, 1626, + 125, 1626, 125, 1626, 125, 1626, 1626, 1626, + 1626, 125, 1626, 1626, 125, 125, 126, 125, + 125, 1626, 126, 1626, 125, 1626, 125, 1626, + 1626, 125, 125, 126, 125, 125, 126, 125, + 1626, 1626, 125, 1626, 1626, 126, 1626, 125, + 1626, 1626, 1626, 1626, 125, 1626, 1626, 1626, + 125, 1626, 1626, 126, 125, 1830, 1831, 1810, + 1816, 1832, 125, 1626, 125, 1626, 1626, 125, + 1626, 125, 1833, 1834, 1835, 1836, 1837, 1838, + 125, 1839, 1840, 1841, 1814, 1842, 1843, 125, + 1626, 125, 1626, 125, 1626, 125, 1626, 1626, + 1626, 1626, 1626, 125, 1626, 125, 1844, 1845, + 1846, 1847, 1848, 1849, 1850, 1851, 1852, 1853, + 1854, 1855, 1856, 1857, 1858, 1859, 1856, 1860, + 1861, 1602, 1862, 1814, 125, 1626, 1626, 125, + 125, 126, 125, 1626, 125, 125, 125, 1626, + 126, 125, 1626, 1626, 126, 125, 1626, 125, + 1626, 1626, 125, 125, 125, 126, 125, 1626, + 1626, 125, 1626, 126, 125, 1626, 1626, 1626, + 125, 1626, 1626, 1626, 1626, 1626, 1626, 1626, + 125, 1626, 125, 125, 126, 125, 1626, 1626, + 1626, 125, 125, 125, 1626, 125, 126, 125, + 1626, 1626, 125, 1626, 126, 125, 1814, 1863, + 1864, 1529, 1865, 1866, 125, 1626, 125, 1626, + 125, 1626, 125, 1626, 125, 1867, 125, 1626, + 1626, 125, 1868, 1869, 1870, 1871, 1872, 1873, + 1874, 125, 1626, 1626, 125, 125, 125, 125, + 1626, 1626, 125, 126, 125, 1626, 1626, 125, + 125, 125, 1626, 1626, 1626, 1626, 125, 1875, + 125, 1626, 125, 1876, 125, 1877, 1878, 128, + 129, 1879, 125, 1626, 1626, 125, 125, 1626, + 1626, 148, 1880, 148, 148, 125, 1880, 125, + 125, 1626, 148, 125, 148, 125, 1880, 125, + 1880, 148, 1626, 125, 125, 1880, 125, 148, + 1880, 148, 1880, 1880, 572, 572, 125, 1880, + 1880, 1626, 1880, 148, 125, 126, 1626, 1880, + 148, 1880, 126, 148, 125, 125, 125, 125, + 1880, 1880, 126, 125, 125, 148, 148, 1880, + 148, 1880, 125, 125, 148, 1880, 1880, 125, + 148, 1626, 148, 126, 148, 1880, 148, 125, + 1881, 1882, 184, 1883, 1884, 1885, 1886, 1887, + 1888, 1889, 1890, 1891, 1892, 1893, 1894, 1895, + 1896, 1897, 1898, 1899, 1900, 1901, 1902, 1903, + 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, + 125, 148, 148, 148, 148, 1880, 125, 148, + 1880, 125, 1880, 125, 148, 148, 1880, 148, + 125, 148, 125, 126, 148, 1880, 125, 125, + 125, 1880, 1880, 125, 125, 125, 125, 1880, + 148, 148, 125, 125, 125, 125, 125, 148, + 125, 126, 148, 125, 1880, 1880, 1880, 148, + 148, 148, 148, 148, 148, 148, 1880, 125, + 125, 125, 125, 125, 125, 148, 125, 126, + 148, 125, 1880, 148, 1880, 148, 148, 148, + 148, 148, 148, 1880, 125, 125, 125, 148, + 148, 125, 148, 125, 126, 125, 1880, 148, + 1880, 148, 148, 148, 148, 148, 148, 1880, + 125, 148, 1880, 1880, 1880, 1880, 148, 148, + 1880, 126, 125, 1880, 148, 148, 148, 148, + 148, 148, 148, 148, 148, 148, 1880, 125, + 148, 1880, 1880, 1880, 1880, 126, 125, 148, + 1880, 148, 148, 148, 148, 1880, 125, 125, + 125, 125, 125, 148, 125, 148, 125, 126, + 125, 1880, 148, 1880, 148, 148, 148, 148, + 148, 1880, 125, 148, 1880, 1880, 1880, 1880, + 148, 1880, 126, 148, 125, 148, 1880, 148, + 148, 148, 1880, 125, 125, 125, 148, 125, + 125, 148, 125, 126, 125, 148, 125, 1880, + 148, 1880, 148, 148, 148, 125, 1880, 1880, + 148, 1880, 1880, 126, 1880, 125, 1880, 1880, + 125, 1880, 126, 125, 1880, 1880, 1880, 125, + 1880, 126, 125, 148, 1880, 1880, 1880, 1880, + 126, 1880, 125, 148, 148, 1880, 125, 1880, + 1880, 148, 1880, 1880, 125, 1880, 125, 1912, + 1913, 1914, 217, 218, 219, 220, 221, 1915, + 223, 224, 225, 226, 227, 228, 1916, 1917, + 1918, 1919, 1920, 234, 1921, 236, 1922, 1528, + 1529, 1923, 1924, 1925, 1926, 1927, 1928, 1929, + 1930, 1538, 1931, 151, 152, 1932, 249, 250, + 251, 252, 151, 153, 151, 125, 1880, 125, + 126, 1880, 1880, 1880, 1880, 1880, 125, 1880, + 1880, 126, 1880, 148, 125, 125, 1880, 125, + 148, 148, 148, 1880, 148, 1880, 125, 148, + 1880, 148, 148, 1880, 125, 1880, 125, 125, + 125, 126, 125, 1880, 1880, 126, 148, 125, + 1880, 148, 148, 125, 148, 1880, 1880, 125, + 148, 1880, 125, 1880, 1880, 1880, 125, 126, + 126, 1880, 125, 1880, 148, 1880, 125, 148, + 125, 126, 125, 125, 1880, 1880, 148, 1880, + 148, 126, 148, 125, 1880, 125, 148, 148, + 1880, 125, 1880, 1880, 1880, 148, 1880, 148, + 1880, 125, 1880, 1880, 125, 1933, 1934, 255, + 1935, 257, 258, 259, 260, 261, 262, 263, + 151, 1936, 265, 1937, 267, 1938, 269, 125, + 1626, 1880, 1626, 1880, 1485, 125, 1626, 125, + 148, 148, 125, 125, 1880, 125, 1485, 1880, + 125, 125, 1880, 125, 148, 1880, 125, 125, + 148, 148, 148, 148, 148, 1880, 125, 1939, + 1940, 272, 151, 273, 125, 148, 1880, 148, + 125, 1880, 125, 153, 274, 275, 153, 1549, + 1941, 1942, 1943, 280, 281, 282, 1944, 284, + 1945, 1946, 1947, 1948, 1949, 1950, 1951, 1952, + 1953, 1954, 295, 296, 152, 1955, 153, 151, + 125, 148, 148, 1880, 1880, 125, 148, 1880, + 148, 125, 1880, 125, 148, 1880, 1880, 1880, + 1880, 125, 148, 1880, 148, 1880, 125, 148, + 148, 125, 126, 125, 148, 125, 1880, 126, + 148, 1880, 148, 125, 1880, 125, 125, 148, + 1880, 148, 1880, 125, 148, 125, 126, 125, + 125, 126, 125, 1880, 148, 1880, 125, 1880, + 148, 1880, 126, 1880, 125, 1880, 1880, 1880, + 1880, 125, 1880, 148, 1880, 148, 1880, 125, + 148, 1880, 1880, 126, 125, 1956, 674, 302, + 303, 304, 305, 306, 307, 1957, 1704, 1958, + 1706, 312, 1959, 1960, 151, 125, 1880, 572, + 148, 148, 572, 572, 572, 125, 1626, 1626, + 1880, 1880, 1485, 125, 1880, 125, 148, 1880, + 148, 125, 148, 148, 148, 148, 1880, 125, + 1961, 1962, 317, 318, 319, 1963, 1964, 1965, + 1966, 324, 125, 325, 326, 153, 327, 328, + 1967, 330, 1968, 332, 1969, 334, 335, 153, + 151, 1581, 337, 338, 153, 339, 340, 341, + 342, 343, 344, 345, 346, 1970, 348, 349, + 1971, 351, 352, 353, 153, 259, 151, 354, + 125, 1880, 125, 1880, 125, 148, 125, 1880, + 125, 148, 148, 1880, 1880, 1880, 1880, 148, + 148, 148, 1880, 125, 148, 148, 1880, 125, + 1972, 1973, 1974, 1587, 1975, 1976, 1977, 1978, + 1979, 364, 1980, 1981, 1982, 1983, 1984, 1985, + 1986, 1983, 1987, 1988, 1602, 1989, 375, 1604, + 377, 125, 1880, 148, 1880, 125, 125, 126, + 125, 1880, 148, 125, 125, 1880, 1880, 148, + 1880, 126, 125, 1880, 148, 148, 125, 1880, + 148, 1880, 125, 125, 148, 125, 125, 126, + 148, 125, 1880, 148, 148, 1880, 125, 1880, + 125, 126, 125, 148, 148, 1880, 148, 148, + 148, 148, 148, 148, 1880, 125, 148, 1880, + 1880, 1880, 1880, 148, 1880, 1880, 1880, 125, + 148, 1880, 125, 125, 148, 125, 126, 125, + 1880, 148, 1880, 1880, 125, 125, 148, 125, + 1880, 148, 125, 126, 125, 1880, 148, 1880, + 125, 1880, 126, 125, 153, 382, 1605, 1990, + 1991, 1608, 386, 153, 1992, 1993, 151, 125, + 148, 1880, 125, 148, 1880, 125, 125, 1880, + 125, 148, 1880, 148, 125, 153, 389, 1994, + 125, 148, 148, 1880, 1880, 125, 1995, 1996, + 1997, 153, 394, 395, 396, 397, 398, 399, + 400, 401, 402, 403, 1615, 1998, 1999, 2000, + 151, 125, 1880, 1880, 125, 125, 125, 125, + 1880, 1880, 125, 1880, 1880, 125, 125, 125, + 1880, 1880, 1880, 1880, 125, 153, 2001, 409, + 410, 411, 151, 125, 125, 1880, 125, 148, + 2002, 125, 2003, 2004, 2005, 2007, 2006, 125, + 1880, 1880, 125, 125, 1880, 1880, 125, 1880, + 148, 2008, 148, 181, 148, 420, 2008, 420, + 420, 420, 148, 420, 148, 420, 2008, 420, + 2008, 148, 420, 420, 2008, 420, 148, 2008, + 148, 181, 2008, 2008, 572, 572, 420, 2008, + 2008, 2008, 148, 420, 421, 2008, 148, 2008, + 421, 148, 420, 420, 420, 420, 2008, 2008, + 421, 420, 420, 148, 148, 2008, 148, 2008, + 420, 420, 148, 2008, 2008, 420, 148, 148, + 421, 148, 2008, 148, 420, 2009, 2010, 184, + 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, + 2019, 2020, 2021, 2022, 2023, 2024, 2025, 2026, + 2027, 2028, 2029, 2030, 2031, 2032, 2033, 2034, + 2035, 2036, 2037, 2038, 2039, 420, 148, 148, + 148, 148, 2008, 420, 148, 2008, 420, 2008, + 420, 148, 148, 2008, 148, 420, 148, 420, + 421, 148, 2008, 420, 420, 420, 2008, 2008, + 420, 420, 420, 420, 2008, 148, 148, 420, + 420, 420, 420, 420, 148, 420, 421, 148, + 420, 2008, 2008, 2008, 148, 148, 148, 148, + 148, 148, 148, 2008, 420, 420, 420, 420, + 420, 420, 148, 420, 421, 148, 420, 2008, + 148, 2008, 148, 148, 148, 148, 148, 148, + 2008, 420, 420, 420, 148, 148, 420, 148, + 420, 421, 420, 2008, 148, 2008, 148, 148, + 148, 148, 148, 148, 2008, 420, 148, 2008, + 2008, 2008, 2008, 148, 148, 2008, 421, 420, + 2008, 148, 148, 148, 148, 148, 148, 148, + 148, 148, 148, 2008, 420, 148, 2008, 2008, + 2008, 2008, 421, 420, 148, 2008, 148, 148, + 148, 148, 2008, 420, 420, 420, 420, 420, + 148, 420, 148, 420, 421, 420, 2008, 148, + 2008, 148, 148, 148, 148, 148, 2008, 420, + 148, 2008, 2008, 2008, 2008, 148, 2008, 421, + 148, 420, 148, 2008, 148, 148, 148, 2008, + 420, 420, 420, 148, 420, 420, 148, 420, + 421, 420, 148, 420, 2008, 148, 2008, 148, + 148, 148, 420, 2008, 2008, 148, 2008, 2008, + 421, 2008, 420, 2008, 2008, 420, 2008, 421, + 420, 2008, 2008, 2008, 420, 2008, 421, 420, + 148, 2008, 2008, 2008, 2008, 421, 2008, 420, + 148, 148, 2008, 420, 2008, 2008, 148, 2008, + 2008, 420, 2008, 420, 2040, 2041, 2042, 217, + 218, 219, 220, 221, 2043, 223, 224, 225, + 226, 227, 228, 2044, 2045, 2046, 2047, 2048, + 234, 2049, 236, 2050, 483, 484, 2051, 2052, + 2053, 2054, 2055, 2056, 2057, 2058, 646, 2059, + 151, 152, 2060, 249, 250, 251, 252, 151, + 153, 151, 420, 2008, 420, 421, 2008, 2008, + 2008, 2008, 2008, 420, 2008, 2008, 421, 2008, + 148, 420, 420, 2008, 420, 148, 148, 148, + 2008, 148, 2008, 420, 148, 2008, 148, 148, + 2008, 420, 2008, 420, 420, 420, 421, 420, + 2008, 2008, 421, 148, 420, 2008, 148, 148, + 420, 148, 2008, 2008, 420, 148, 2008, 420, + 2008, 2008, 2008, 420, 421, 421, 2008, 420, + 2008, 148, 2008, 420, 148, 420, 421, 420, + 420, 2008, 2008, 148, 2008, 148, 421, 148, + 420, 2008, 420, 148, 148, 2008, 420, 2008, + 2008, 2008, 148, 2008, 148, 2008, 420, 2008, + 2008, 420, 2061, 2062, 255, 2063, 257, 258, + 259, 260, 261, 262, 263, 151, 2064, 265, + 2065, 267, 2066, 269, 420, 181, 181, 2008, + 181, 2008, 571, 420, 420, 148, 148, 420, + 420, 2008, 420, 571, 2008, 420, 420, 2008, + 420, 148, 2008, 420, 420, 148, 148, 148, + 148, 148, 2008, 420, 2067, 2068, 272, 151, + 273, 420, 148, 2008, 148, 420, 2008, 420, + 153, 274, 275, 153, 657, 2069, 2070, 2071, + 280, 281, 282, 2072, 284, 2073, 2074, 2075, + 2076, 2077, 2078, 2079, 2080, 2081, 2082, 295, + 296, 152, 2083, 153, 151, 420, 148, 148, + 2008, 2008, 420, 148, 2008, 148, 420, 2008, + 420, 148, 2008, 2008, 2008, 2008, 420, 148, + 2008, 148, 2008, 420, 148, 148, 420, 421, + 420, 148, 420, 2008, 421, 148, 2008, 148, + 420, 2008, 420, 420, 148, 2008, 148, 2008, + 420, 148, 420, 421, 420, 420, 421, 420, + 2008, 148, 2008, 420, 2008, 148, 2008, 421, + 2008, 420, 2008, 2008, 2008, 2008, 420, 2008, + 148, 2008, 148, 2008, 420, 148, 2008, 2008, + 421, 420, 2084, 674, 302, 303, 304, 305, + 306, 307, 2085, 932, 2086, 934, 312, 2087, + 2088, 151, 420, 2008, 572, 148, 148, 572, + 572, 572, 420, 181, 2008, 2008, 571, 420, + 2008, 420, 148, 2008, 148, 420, 148, 148, + 148, 148, 2008, 420, 2089, 2090, 317, 318, + 319, 2091, 2092, 2093, 2094, 324, 420, 325, + 326, 153, 327, 328, 2095, 330, 2096, 332, + 2097, 334, 335, 153, 151, 690, 337, 338, + 153, 339, 340, 341, 342, 343, 344, 345, + 346, 2098, 348, 349, 2099, 351, 352, 353, + 153, 259, 151, 354, 420, 2008, 420, 2008, + 420, 148, 420, 2008, 420, 148, 148, 2008, + 2008, 2008, 2008, 148, 148, 148, 2008, 420, + 148, 148, 2008, 420, 2100, 2101, 2102, 696, + 2103, 2104, 2105, 2106, 2107, 364, 2108, 2109, + 2110, 2111, 2112, 2113, 2114, 2111, 2115, 2116, + 552, 2117, 375, 712, 377, 420, 2008, 148, + 2008, 420, 420, 421, 420, 2008, 148, 420, + 420, 2008, 2008, 148, 2008, 421, 420, 2008, + 148, 148, 420, 2008, 148, 2008, 420, 420, + 148, 420, 420, 421, 148, 420, 2008, 148, + 148, 2008, 420, 2008, 420, 421, 420, 148, + 148, 2008, 148, 148, 148, 148, 148, 148, + 2008, 420, 148, 2008, 2008, 2008, 2008, 148, + 2008, 2008, 2008, 420, 148, 2008, 420, 420, + 148, 420, 421, 420, 2008, 148, 2008, 2008, + 420, 420, 148, 420, 2008, 148, 420, 421, + 420, 2008, 148, 2008, 420, 2008, 421, 420, + 153, 382, 713, 2118, 2119, 716, 386, 153, + 2120, 2121, 151, 420, 148, 2008, 420, 148, + 2008, 420, 420, 2008, 420, 148, 2008, 148, + 420, 153, 389, 2122, 420, 148, 148, 2008, + 2008, 420, 2123, 2124, 2125, 153, 394, 395, + 396, 397, 398, 399, 400, 401, 402, 403, + 723, 2126, 2127, 2128, 151, 420, 2008, 2008, + 420, 420, 420, 420, 2008, 2008, 420, 2008, + 2008, 420, 420, 420, 2008, 2008, 2008, 2008, + 420, 153, 2129, 409, 410, 411, 151, 420, + 420, 2008, 420, 148, 2130, 420, 2131, 2132, + 2133, 2135, 2134, 420, 2008, 2008, 420, 420, + 2008, 2008, 420, 2008, 148, 2136, 148, 148, + 420, 2136, 420, 420, 420, 148, 420, 148, + 420, 2136, 420, 2136, 148, 420, 420, 2136, + 420, 148, 2136, 148, 2136, 2136, 572, 572, + 420, 2136, 2136, 2136, 148, 420, 126, 2136, + 148, 2136, 126, 148, 420, 420, 420, 420, + 2136, 2136, 126, 420, 420, 148, 148, 2136, + 148, 2136, 420, 420, 148, 2136, 2136, 420, + 148, 148, 126, 148, 2136, 148, 420, 2137, + 2138, 184, 2139, 2140, 2141, 2142, 2143, 2144, + 2145, 2146, 2147, 2148, 2149, 2150, 2151, 2152, + 2153, 2154, 2155, 2156, 2157, 2158, 2159, 2160, + 2161, 2162, 2163, 2164, 2165, 2166, 2167, 420, + 148, 148, 148, 148, 2136, 420, 148, 2136, + 420, 2136, 420, 148, 148, 2136, 148, 420, + 148, 420, 126, 148, 2136, 420, 420, 420, + 2136, 2136, 420, 420, 420, 420, 2136, 148, + 148, 420, 420, 420, 420, 420, 148, 420, + 126, 148, 420, 2136, 2136, 2136, 148, 148, + 148, 148, 148, 148, 148, 2136, 420, 420, + 420, 420, 420, 420, 148, 420, 126, 148, + 420, 2136, 148, 2136, 148, 148, 148, 148, + 148, 148, 2136, 420, 420, 420, 148, 148, + 420, 148, 420, 126, 420, 2136, 148, 2136, + 148, 148, 148, 148, 148, 148, 2136, 420, + 148, 2136, 2136, 2136, 2136, 148, 148, 2136, + 126, 420, 2136, 148, 148, 148, 148, 148, + 148, 148, 148, 148, 148, 2136, 420, 148, + 2136, 2136, 2136, 2136, 126, 420, 148, 2136, + 148, 148, 148, 148, 2136, 420, 420, 420, + 420, 420, 148, 420, 148, 420, 126, 420, + 2136, 148, 2136, 148, 148, 148, 148, 148, + 2136, 420, 148, 2136, 2136, 2136, 2136, 148, + 2136, 126, 148, 420, 148, 2136, 148, 148, + 148, 2136, 420, 420, 420, 148, 420, 420, + 148, 420, 126, 420, 148, 420, 2136, 148, + 2136, 148, 148, 148, 420, 2136, 2136, 148, + 2136, 2136, 126, 2136, 420, 2136, 2136, 420, + 2136, 126, 420, 2136, 2136, 2136, 420, 2136, + 126, 420, 148, 2136, 2136, 2136, 2136, 126, + 2136, 420, 148, 148, 2136, 420, 2136, 2136, + 148, 2136, 2136, 420, 2136, 420, 2168, 2169, + 2170, 217, 218, 219, 220, 221, 2171, 223, + 224, 225, 226, 227, 228, 2172, 2173, 2174, + 2175, 2176, 234, 2177, 236, 2178, 1528, 1529, + 2179, 2180, 2181, 2182, 2183, 2184, 2185, 2186, + 1538, 2187, 151, 152, 2188, 249, 250, 251, + 252, 151, 153, 151, 420, 2136, 420, 126, + 2136, 2136, 2136, 2136, 2136, 420, 2136, 2136, + 126, 2136, 148, 420, 420, 2136, 420, 148, + 148, 148, 2136, 148, 2136, 420, 148, 2136, + 148, 148, 2136, 420, 2136, 420, 420, 420, + 126, 420, 2136, 2136, 126, 148, 420, 2136, + 148, 148, 420, 148, 2136, 2136, 420, 148, + 2136, 420, 2136, 2136, 2136, 420, 126, 126, + 2136, 420, 2136, 148, 2136, 420, 148, 420, + 126, 420, 420, 2136, 2136, 148, 2136, 148, + 126, 148, 420, 2136, 420, 148, 148, 2136, + 420, 2136, 2136, 2136, 148, 2136, 148, 2136, + 420, 2136, 2136, 420, 2189, 2190, 255, 2191, + 257, 258, 259, 260, 261, 262, 263, 151, + 2192, 265, 2193, 267, 2194, 269, 420, 2136, + 2136, 2136, 420, 420, 148, 148, 420, 420, + 420, 2136, 2136, 420, 420, 2136, 420, 148, + 2136, 420, 420, 148, 148, 148, 148, 148, + 2136, 420, 2195, 2196, 1044, 272, 151, 273, + 1045, 1046, 1047, 1048, 420, 148, 2136, 1049, + 148, 420, 2136, 1049, 1049, 420, 153, 274, + 275, 153, 1549, 2197, 2198, 2199, 280, 281, + 282, 2200, 284, 2201, 2202, 2203, 2204, 2205, + 2206, 2207, 2208, 2209, 2210, 295, 296, 152, + 2211, 153, 151, 420, 148, 148, 2136, 2136, + 420, 148, 2136, 148, 420, 2136, 420, 148, + 2136, 2136, 2136, 2136, 420, 148, 2136, 148, + 2136, 420, 148, 148, 420, 126, 420, 148, + 420, 2136, 126, 148, 2136, 148, 420, 2136, + 420, 420, 148, 2136, 148, 2136, 420, 148, + 420, 126, 420, 420, 126, 420, 2136, 148, + 2136, 420, 2136, 148, 2136, 126, 2136, 420, + 2136, 2136, 2136, 2136, 420, 2136, 148, 2136, + 148, 2136, 420, 148, 2136, 2136, 126, 420, + 2212, 674, 302, 303, 304, 305, 306, 307, + 2213, 2214, 2215, 2216, 1194, 2217, 2218, 151, + 420, 2136, 572, 148, 148, 572, 572, 572, + 420, 2136, 2136, 2136, 420, 2136, 148, 148, + 2, 2136, 420, 148, 2136, 148, 2, 2136, + 148, 420, 1049, 148, 148, 148, 148, 2136, + 420, 2219, 2220, 317, 318, 319, 2221, 2222, + 2223, 2224, 324, 420, 325, 326, 153, 327, + 328, 2225, 330, 2226, 332, 2227, 334, 335, + 153, 151, 1581, 337, 338, 153, 339, 340, + 341, 342, 343, 344, 345, 346, 2228, 348, + 349, 2229, 351, 352, 353, 153, 259, 151, + 354, 420, 2136, 420, 2136, 420, 148, 420, + 2136, 420, 148, 148, 2136, 2136, 2136, 2136, + 148, 148, 148, 2136, 420, 148, 148, 2136, + 420, 2230, 2231, 2232, 1587, 2233, 2234, 2235, + 2236, 2237, 364, 2238, 2239, 2240, 2241, 2242, + 2243, 2244, 2241, 2245, 2246, 1602, 2247, 375, + 1604, 377, 420, 2136, 148, 2136, 420, 420, + 126, 420, 2136, 148, 420, 420, 2136, 2136, + 148, 2136, 126, 420, 2136, 148, 148, 420, + 2136, 148, 2136, 420, 420, 148, 420, 420, + 126, 148, 420, 2136, 148, 148, 2136, 420, + 2136, 420, 126, 420, 148, 148, 2136, 148, + 148, 148, 148, 148, 148, 2136, 420, 148, + 2136, 2136, 2136, 2136, 148, 2136, 2136, 2136, + 420, 148, 2136, 420, 420, 148, 420, 126, + 420, 2136, 148, 2136, 2136, 420, 420, 148, + 420, 2136, 148, 420, 126, 420, 2136, 148, + 2136, 420, 2136, 126, 420, 153, 382, 1605, + 2248, 2249, 1608, 386, 153, 2250, 2251, 151, + 420, 148, 2136, 420, 148, 2136, 420, 420, + 2136, 420, 148, 2136, 148, 420, 1162, 153, + 389, 2252, 420, 148, 148, 2136, 2136, 420, + 2253, 2254, 2255, 153, 394, 395, 396, 397, + 398, 399, 400, 401, 402, 403, 1615, 2256, + 2257, 2258, 151, 420, 2136, 2136, 420, 420, + 420, 420, 2136, 2136, 420, 2136, 2136, 420, + 420, 420, 2136, 2136, 2136, 2136, 420, 153, + 2259, 409, 410, 411, 151, 420, 420, 2136, + 420, 148, 2260, 420, 2261, 2262, 2263, 2265, + 2264, 420, 2136, 2136, 420, 420, 2136, 2136, + 420, 2136, 148, 2266, 148, 148, 420, 420, + 2266, 420, 420, 148, 420, 148, 420, 2266, + 420, 2266, 148, 420, 420, 2266, 420, 148, + 2266, 148, 2266, 2266, 572, 572, 420, 2266, + 2266, 2266, 148, 420, 126, 2266, 148, 2266, + 126, 148, 420, 420, 420, 420, 2266, 2266, + 126, 420, 420, 148, 148, 2266, 148, 2266, + 420, 420, 148, 2266, 2266, 420, 148, 148, + 126, 148, 2266, 148, 420, 2267, 2268, 184, + 2269, 2270, 2271, 2272, 2273, 2274, 2275, 2276, + 2277, 2278, 2279, 2280, 2281, 2282, 2283, 2284, + 2285, 2286, 2287, 2288, 2289, 2290, 2291, 2292, + 2293, 2294, 2295, 2296, 2297, 420, 148, 148, + 148, 148, 2266, 420, 148, 2266, 420, 2266, + 420, 148, 148, 2266, 148, 420, 148, 420, + 126, 148, 2266, 420, 420, 420, 2266, 2266, + 420, 420, 420, 420, 2266, 148, 148, 420, + 420, 420, 420, 420, 148, 420, 126, 148, + 420, 2266, 2266, 2266, 148, 148, 148, 148, + 148, 148, 148, 2266, 420, 420, 420, 420, + 420, 420, 148, 420, 126, 148, 420, 2266, + 148, 2266, 148, 148, 148, 148, 148, 148, + 2266, 420, 420, 420, 148, 148, 420, 148, + 420, 126, 420, 2266, 148, 2266, 148, 148, + 148, 148, 148, 148, 2266, 420, 148, 2266, + 2266, 2266, 2266, 148, 148, 2266, 126, 420, + 2266, 148, 148, 148, 148, 148, 148, 148, + 148, 148, 148, 2266, 420, 148, 2266, 2266, + 2266, 2266, 126, 420, 148, 2266, 148, 148, + 148, 148, 2266, 420, 420, 420, 420, 420, + 148, 420, 148, 420, 126, 420, 2266, 148, + 2266, 148, 148, 148, 148, 148, 2266, 420, + 148, 2266, 2266, 2266, 2266, 148, 2266, 126, + 148, 420, 148, 2266, 148, 148, 148, 2266, + 420, 420, 420, 148, 420, 420, 148, 420, + 126, 420, 148, 420, 2266, 148, 2266, 148, + 148, 148, 420, 2266, 2266, 148, 2266, 2266, + 126, 2266, 420, 2266, 2266, 420, 2266, 126, + 420, 2266, 2266, 2266, 420, 2266, 126, 420, + 148, 2266, 2266, 2266, 2266, 126, 2266, 420, + 148, 148, 2266, 420, 2266, 2266, 148, 2266, + 2266, 420, 2266, 420, 2298, 2299, 2300, 217, + 218, 219, 220, 221, 2301, 223, 224, 225, + 226, 227, 228, 2302, 2303, 2304, 2305, 2306, + 234, 2307, 236, 2308, 1528, 1529, 2309, 2310, + 2311, 2312, 2313, 2314, 2315, 2316, 1538, 2317, + 151, 152, 2318, 249, 250, 251, 252, 151, + 153, 151, 420, 2266, 420, 126, 2266, 2266, + 2266, 2266, 2266, 420, 2266, 2266, 126, 2266, + 148, 420, 420, 2266, 420, 148, 148, 148, + 2266, 148, 2266, 420, 148, 2266, 148, 148, + 2266, 420, 2266, 420, 420, 420, 126, 420, + 2266, 2266, 126, 148, 420, 2266, 148, 148, + 420, 148, 2266, 2266, 420, 148, 2266, 420, + 2266, 2266, 2266, 420, 126, 126, 2266, 420, + 2266, 148, 2266, 420, 148, 420, 126, 420, + 420, 2266, 2266, 148, 2266, 148, 126, 148, + 420, 2266, 420, 148, 148, 2266, 420, 2266, + 2266, 2266, 148, 2266, 148, 2266, 420, 2266, + 2266, 420, 2319, 2320, 255, 2321, 257, 258, + 259, 260, 261, 262, 263, 151, 2322, 265, + 2323, 267, 2324, 269, 420, 2266, 2266, 2136, + 420, 420, 148, 148, 420, 420, 2266, 420, + 2136, 2266, 420, 420, 2266, 420, 148, 2266, + 420, 420, 148, 148, 148, 148, 148, 2266, + 420, 2325, 2326, 1044, 272, 151, 273, 1045, + 1046, 1047, 1048, 420, 148, 2266, 1049, 148, + 420, 2266, 1049, 1049, 420, 153, 274, 275, + 153, 1549, 2327, 2328, 2329, 280, 281, 282, + 2330, 284, 2331, 2332, 2333, 2334, 2335, 2336, + 2337, 2338, 2339, 2340, 295, 296, 152, 2341, + 153, 151, 420, 148, 148, 2266, 2266, 420, + 148, 2266, 148, 420, 2266, 420, 148, 2266, + 2266, 2266, 2266, 420, 148, 2266, 148, 2266, + 420, 148, 148, 420, 126, 420, 148, 420, + 2266, 126, 148, 2266, 148, 420, 2266, 420, + 420, 148, 2266, 148, 2266, 420, 148, 420, + 126, 420, 420, 126, 420, 2266, 148, 2266, + 420, 2266, 148, 2266, 126, 2266, 420, 2266, + 2266, 2266, 2266, 420, 2266, 148, 2266, 148, + 2266, 420, 148, 2266, 2266, 126, 420, 2342, + 674, 302, 303, 304, 305, 306, 307, 2343, + 2214, 2344, 2216, 1194, 2345, 2346, 151, 420, + 2266, 572, 148, 148, 572, 572, 572, 420, + 2266, 2266, 2136, 420, 2266, 420, 148, 2266, + 148, 420, 1049, 148, 148, 148, 148, 2266, + 420, 2347, 2348, 317, 318, 319, 2349, 2350, + 2351, 2352, 324, 420, 325, 326, 153, 327, + 328, 2353, 330, 2354, 332, 2355, 334, 335, + 153, 151, 1581, 337, 338, 153, 339, 340, + 341, 342, 343, 344, 345, 346, 2356, 348, + 349, 2357, 351, 352, 353, 153, 259, 151, + 354, 420, 2266, 420, 2266, 420, 148, 420, + 2266, 420, 148, 148, 2266, 2266, 2266, 2266, + 148, 148, 148, 2266, 420, 148, 148, 2266, + 420, 2358, 2359, 2360, 1587, 2361, 2362, 2363, + 2364, 2365, 364, 2366, 2367, 2368, 2369, 2370, + 2371, 2372, 2369, 2373, 2374, 1602, 2375, 375, + 1604, 377, 420, 2266, 148, 2266, 420, 420, + 126, 420, 2266, 148, 420, 420, 2266, 2266, + 148, 2266, 126, 420, 2266, 148, 148, 420, + 2266, 148, 2266, 420, 420, 148, 420, 420, + 126, 148, 420, 2266, 148, 148, 2266, 420, + 2266, 420, 126, 420, 148, 148, 2266, 148, + 148, 148, 148, 148, 148, 2266, 420, 148, + 2266, 2266, 2266, 2266, 148, 2266, 2266, 2266, + 420, 148, 2266, 420, 420, 148, 420, 126, + 420, 2266, 148, 2266, 2266, 420, 420, 148, + 420, 2266, 148, 420, 126, 420, 2266, 148, + 2266, 420, 2266, 126, 420, 153, 382, 1605, + 2376, 2377, 1608, 386, 153, 2378, 2379, 151, + 420, 148, 2266, 420, 148, 2266, 420, 420, + 2266, 420, 148, 2266, 148, 420, 1162, 153, + 389, 2380, 420, 148, 148, 2266, 2266, 420, + 2381, 2382, 2383, 153, 394, 395, 396, 397, + 398, 399, 400, 401, 402, 403, 1615, 2384, + 2385, 2386, 151, 420, 2266, 2266, 420, 420, + 420, 420, 2266, 2266, 420, 2266, 2266, 420, + 420, 420, 2266, 2266, 2266, 2266, 420, 153, + 2387, 409, 410, 411, 151, 420, 420, 2266, + 420, 148, 2388, 420, 2389, 2390, 2391, 2393, + 2392, 420, 2266, 2266, 420, 420, 2266, 2266, + 420, 2266, 2395, 2394, 2, 2395, 2, 2395, + 2395, 2394, 2395, 2395, 2394, 2395, 2395, 2395, + 2394, 2395, 2395, 2395, 2394, 2395, 2395, 2394, + 2395, 2395, 2395, 2395, 2394, 2395, 2395, 2395, + 2394, 2394, 2395, 2395, 2394, 2395, 2394, 2396, + 2397, 2398, 2399, 2400, 2402, 2403, 2404, 2406, + 2407, 2408, 2409, 2410, 2411, 2412, 2413, 2414, + 2415, 2416, 2417, 2418, 2419, 2420, 2421, 2422, + 2423, 2401, 2405, 2394, 2395, 2395, 2395, 2395, + 2, 2395, 2, 2395, 2, 2, 2, 2395, + 2, 2, 2, 2395, 2395, 2395, 2395, 2, + 2, 2, 2, 2, 2, 2395, 2, 2, + 2, 2, 2, 2, 2395, 2, 2, 2, + 2, 2395, 2395, 2395, 2395, 2, 2395, 2395, + 2395, 2395, 2395, 2, 2395, 2395, 2, 2395, + 2395, 2395, 2395, 2, 2395, 2395, 2, 2, + 2, 2, 2, 2, 2395, 2395, 2395, 2395, + 2395, 2395, 2, 2395, 2395, 2, 2, 2, + 2, 2, 2, 2395, 2395, 2, 2395, 2395, + 2395, 2395, 2395, 2, 2395, 2395, 2, 2395, + 2, 2395, 2395, 2395, 2, 2395, 2, 2395, + 2395, 2395, 2395, 2395, 2, 2395, 2, 2395, + 2395, 2395, 2395, 2, 2395, 2, 2424, 2425, + 2426, 2427, 2428, 2429, 2430, 2431, 2432, 2433, + 2434, 2435, 2436, 2437, 2438, 2439, 2440, 2441, + 2442, 2443, 2444, 2394, 2395, 2, 2395, 2395, + 2395, 2395, 2395, 2, 2395, 2395, 2395, 2, + 2395, 2, 2395, 2395, 2, 2395, 2395, 2, + 2395, 2, 2, 2, 2395, 2395, 2, 2395, + 2, 2395, 2395, 2, 2395, 2, 2395, 2395, + 2395, 2, 2395, 2, 2395, 2395, 2, 2, + 2, 2395, 2395, 2395, 2, 2395, 2, 2395, + 2, 2395, 2395, 2395, 2395, 2395, 2, 2395, + 2395, 2, 2445, 2446, 2447, 2448, 2449, 2450, + 2394, 2395, 2395, 2, 2395, 2395, 2, 2395, + 2, 2395, 2, 2395, 2, 2395, 2, 2451, + 2452, 2394, 2395, 2, 2395, 2, 2453, 2454, + 2455, 2456, 2457, 2458, 2459, 2460, 2461, 2462, + 2463, 2464, 2465, 2466, 2467, 2394, 2395, 2395, + 2, 2395, 2, 2395, 2, 2395, 2395, 2395, + 2395, 2, 2395, 2395, 2, 2, 2, 2395, + 2395, 2, 2395, 2, 2395, 2395, 2, 2, + 2, 2395, 2395, 2, 2395, 2395, 2395, 2, + 2395, 2395, 2395, 2395, 2, 2395, 2395, 2395, + 2, 2395, 2395, 2, 2468, 2469, 2449, 2454, + 2470, 2394, 2395, 2, 2395, 2395, 2, 2395, + 2, 2471, 2472, 2473, 2474, 2475, 2476, 2394, + 2477, 2478, 2479, 2480, 2481, 2, 2395, 2, + 2395, 2, 2395, 2, 2395, 2395, 2395, 2395, + 2395, 2, 2395, 2, 2482, 2483, 2484, 2485, + 2486, 2487, 2488, 2489, 2490, 2491, 2492, 2493, + 2494, 2495, 2496, 2493, 2497, 2498, 2499, 2, + 2395, 2395, 2, 2, 2395, 2, 2, 2, + 2395, 2395, 2395, 2, 2395, 2, 2395, 2395, + 2, 2, 2, 2395, 2395, 2, 2395, 2, + 2395, 2395, 2395, 2, 2395, 2395, 2395, 2395, + 2395, 2395, 2395, 2, 2395, 2, 2, 2395, + 2395, 2395, 2, 2, 2, 2395, 2, 2395, + 2395, 2, 2395, 2, 2500, 2501, 2502, 2503, + 2, 2395, 2, 2395, 2, 2395, 2, 2395, + 2, 2504, 2, 2395, 2395, 2, 2505, 2506, + 2507, 2508, 2509, 2510, 2, 2395, 2395, 2, + 2, 2, 2, 2395, 2395, 2, 2395, 2395, + 2, 2, 2, 2395, 2395, 2395, 2395, 2, + 2511, 2, 2395, 2, 2512, 2394, 2513, 2514, + 2515, 2517, 2516, 2, 2395, 2395, 2, 2, + 2395, 2395, 148, 2518, 148, 148, 0, 0, + 2518, 0, 0, 148, 0, 148, 0, 2518, + 0, 2518, 148, 0, 0, 2518, 0, 148, + 2518, 2518, 2518, 148, 148, 0, 2518, 2518, + 2518, 148, 0, 2518, 148, 2518, 148, 0, + 0, 0, 0, 2518, 2518, 0, 0, 0, + 148, 148, 2518, 148, 2518, 0, 0, 148, + 2518, 2518, 0, 148, 148, 148, 2518, 148, + 0, 2519, 2520, 184, 2521, 2522, 2523, 2524, + 2525, 2526, 2527, 2528, 2529, 2530, 2531, 2532, + 2533, 2534, 2535, 2536, 2537, 2538, 2539, 2540, + 2541, 2542, 2543, 2544, 2545, 2546, 2547, 2548, + 2549, 0, 148, 148, 148, 148, 2518, 0, + 148, 2518, 0, 2518, 0, 148, 148, 2518, + 148, 148, 0, 148, 2518, 0, 0, 0, + 2518, 2518, 0, 0, 0, 0, 2518, 148, + 148, 0, 0, 0, 0, 0, 148, 0, + 148, 0, 2518, 2518, 2518, 148, 148, 148, + 148, 148, 148, 148, 2518, 0, 0, 0, + 0, 0, 0, 148, 0, 148, 0, 2518, + 148, 2518, 148, 148, 148, 148, 148, 148, + 2518, 0, 0, 0, 148, 148, 0, 148, + 0, 2518, 148, 2518, 148, 148, 148, 148, + 148, 148, 2518, 0, 148, 2518, 2518, 2518, + 2518, 148, 148, 2518, 0, 2518, 148, 148, + 148, 148, 148, 148, 148, 148, 148, 148, + 2518, 0, 148, 2518, 2518, 2518, 2518, 0, + 148, 2518, 148, 148, 148, 148, 2518, 0, + 0, 0, 0, 0, 148, 0, 148, 0, + 2518, 148, 2518, 148, 148, 148, 148, 148, + 2518, 0, 148, 2518, 2518, 2518, 2518, 148, + 2518, 148, 0, 148, 2518, 148, 148, 148, + 2518, 0, 0, 0, 148, 0, 0, 148, + 0, 148, 0, 2518, 148, 2518, 148, 148, + 148, 0, 2518, 2518, 148, 2518, 2518, 2518, + 0, 2518, 2518, 0, 2518, 0, 2518, 2518, + 2518, 0, 2518, 0, 148, 2518, 2518, 2518, + 2518, 2518, 0, 148, 148, 2518, 0, 2518, + 2518, 148, 2518, 2518, 0, 2518, 0, 2550, + 2551, 2552, 217, 218, 219, 220, 221, 2553, + 223, 224, 225, 226, 227, 228, 2554, 2555, + 2556, 2557, 2558, 234, 2559, 236, 2560, 2561, + 2562, 2563, 2564, 2565, 2566, 2567, 2568, 246, + 2569, 151, 152, 2570, 249, 250, 251, 252, + 151, 153, 151, 0, 2518, 0, 2518, 2518, + 2518, 2518, 2518, 0, 2518, 2518, 2518, 148, + 0, 0, 2518, 0, 148, 148, 148, 2518, + 148, 2518, 0, 148, 2518, 148, 148, 2518, + 0, 2518, 0, 0, 0, 2518, 2518, 148, + 0, 2518, 148, 148, 0, 148, 2518, 2518, + 0, 148, 2518, 0, 2518, 2518, 2518, 0, + 2518, 0, 2518, 148, 2518, 0, 148, 0, + 0, 2518, 2518, 148, 2518, 148, 148, 0, + 2518, 0, 148, 148, 2518, 0, 2518, 2518, + 2518, 148, 2518, 148, 2518, 0, 2518, 2518, + 0, 2571, 2572, 255, 2573, 257, 258, 259, + 260, 261, 262, 263, 151, 2574, 265, 2575, + 267, 2576, 269, 0, 2518, 2518, 0, 148, + 148, 2518, 2518, 0, 2518, 0, 0, 2518, + 0, 148, 2518, 0, 0, 148, 148, 148, + 148, 148, 2518, 0, 2577, 2578, 272, 151, + 273, 0, 148, 2518, 148, 0, 2518, 0, + 153, 274, 275, 153, 276, 2579, 2580, 2581, + 280, 281, 282, 2582, 284, 2583, 2584, 2585, + 2586, 2587, 2588, 2589, 2590, 2591, 2592, 295, + 296, 152, 2593, 153, 151, 0, 148, 148, + 2518, 2518, 0, 148, 2518, 148, 0, 2518, + 0, 148, 2518, 2518, 2518, 2518, 0, 148, + 2518, 148, 2518, 0, 148, 148, 0, 148, + 0, 2518, 148, 2518, 148, 0, 2518, 0, + 0, 148, 2518, 148, 2518, 0, 148, 0, + 0, 2518, 148, 2518, 0, 2518, 148, 2518, + 2518, 0, 2518, 2518, 2518, 2518, 0, 2518, + 148, 2518, 148, 2518, 0, 148, 2518, 2518, + 0, 2594, 301, 302, 303, 304, 305, 306, + 307, 2595, 309, 2596, 311, 312, 2597, 2598, + 151, 0, 2518, 148, 148, 148, 148, 148, + 148, 0, 2518, 2518, 0, 2518, 0, 148, + 2518, 148, 0, 148, 148, 148, 148, 2518, + 0, 2599, 2600, 317, 318, 319, 2601, 2602, + 2603, 2604, 324, 0, 325, 326, 153, 327, + 328, 2605, 330, 2606, 332, 2607, 334, 335, + 153, 151, 336, 337, 338, 153, 339, 340, + 341, 342, 343, 344, 345, 346, 2608, 348, + 349, 2609, 351, 352, 353, 153, 259, 151, + 354, 0, 2518, 0, 2518, 0, 148, 0, + 2518, 0, 148, 148, 2518, 2518, 2518, 2518, + 148, 148, 148, 2518, 0, 148, 148, 2518, + 0, 2610, 2611, 2612, 358, 2613, 2614, 2615, + 2616, 2617, 364, 2618, 2619, 2620, 2621, 2622, + 2623, 2624, 2621, 2625, 2626, 2627, 375, 376, + 377, 0, 2518, 148, 2518, 0, 0, 2518, + 148, 0, 0, 2518, 2518, 148, 2518, 0, + 2518, 148, 148, 0, 2518, 148, 2518, 0, + 148, 148, 148, 0, 0, 2518, 148, 148, + 2518, 0, 2518, 0, 148, 148, 2518, 148, + 148, 148, 148, 148, 148, 2518, 0, 148, + 2518, 2518, 2518, 2518, 148, 2518, 2518, 2518, + 0, 148, 2518, 0, 0, 148, 0, 2518, + 148, 2518, 2518, 0, 0, 148, 0, 2518, + 148, 0, 2518, 148, 2518, 0, 2518, 0, + 153, 382, 343, 2628, 2629, 385, 386, 153, + 2630, 2631, 151, 0, 148, 2518, 0, 148, + 2518, 0, 0, 2518, 0, 148, 2518, 148, + 0, 153, 389, 2632, 0, 148, 148, 2518, + 2518, 0, 2633, 2634, 2635, 153, 394, 395, + 396, 397, 398, 399, 400, 401, 402, 403, + 404, 2636, 2637, 2638, 151, 0, 2518, 2518, + 0, 0, 0, 0, 2518, 2518, 0, 2518, + 2518, 0, 0, 0, 2518, 2518, 2518, 2518, + 0, 153, 2639, 409, 410, 411, 151, 0, + 0, 2518, 0, 148, 2640, 0, 2641, 2642, + 2643, 2645, 2644, 0, 2518, 2518, 0, 0, + 2518, 2518, 0, 2518, 148, 2646, 148, 148, + 420, 420, 2646, 420, 443, 148, 420, 148, + 420, 2646, 420, 2646, 148, 443, 420, 420, + 2646, 420, 148, 2646, 148, 2646, 2646, 572, + 572, 420, 2646, 2646, 443, 2646, 148, 420, + 421, 443, 2646, 148, 2646, 421, 148, 420, + 420, 420, 420, 2646, 2646, 421, 420, 420, + 148, 148, 2646, 148, 2646, 420, 420, 148, + 2646, 2646, 420, 148, 443, 148, 421, 148, + 2646, 148, 420, 2647, 2648, 184, 2649, 2650, + 2651, 2652, 2653, 2654, 2655, 2656, 2657, 2658, + 2659, 2660, 2661, 2662, 2663, 2664, 2665, 2666, + 2667, 2668, 2669, 2670, 2671, 2672, 2673, 2674, + 2675, 2676, 2677, 420, 148, 148, 148, 148, + 2646, 420, 148, 2646, 420, 2646, 420, 148, + 148, 2646, 148, 420, 148, 420, 421, 148, + 2646, 420, 420, 420, 2646, 2646, 420, 420, + 420, 420, 2646, 148, 148, 420, 420, 420, + 420, 420, 148, 420, 421, 148, 420, 2646, + 2646, 2646, 148, 148, 148, 148, 148, 148, + 148, 2646, 420, 420, 420, 420, 420, 420, + 148, 420, 421, 148, 420, 2646, 148, 2646, + 148, 148, 148, 148, 148, 148, 2646, 420, + 420, 420, 148, 148, 420, 148, 420, 421, + 420, 2646, 148, 2646, 148, 148, 148, 148, + 148, 148, 2646, 420, 148, 2646, 2646, 2646, + 2646, 148, 148, 2646, 421, 420, 2646, 148, + 148, 148, 148, 148, 148, 148, 148, 148, + 148, 2646, 420, 148, 2646, 2646, 2646, 2646, + 421, 420, 148, 2646, 148, 148, 148, 148, + 2646, 420, 420, 420, 420, 420, 148, 420, + 148, 420, 421, 420, 2646, 148, 2646, 148, + 148, 148, 148, 148, 2646, 420, 148, 2646, + 2646, 2646, 2646, 148, 2646, 421, 148, 420, + 148, 2646, 148, 148, 148, 2646, 420, 420, + 420, 148, 420, 420, 148, 420, 421, 420, + 148, 420, 2646, 148, 2646, 148, 148, 148, + 420, 2646, 2646, 148, 2646, 2646, 421, 2646, + 420, 2646, 2646, 420, 2646, 421, 420, 2646, + 2646, 2646, 420, 2646, 421, 420, 148, 2646, + 2646, 2646, 2646, 421, 2646, 420, 148, 148, + 2646, 420, 2646, 2646, 148, 2646, 2646, 420, + 2646, 420, 2678, 2679, 2680, 217, 218, 219, + 220, 221, 2681, 223, 224, 225, 226, 227, + 228, 2682, 2683, 2684, 2685, 2686, 234, 2687, + 236, 2688, 483, 484, 2689, 2690, 2691, 2692, + 2693, 2694, 2695, 2696, 646, 2697, 151, 152, + 2698, 249, 250, 251, 252, 151, 153, 151, + 420, 2646, 420, 421, 2646, 2646, 2646, 2646, + 2646, 420, 2646, 2646, 421, 2646, 148, 420, + 420, 2646, 420, 148, 148, 148, 2646, 148, + 2646, 420, 148, 2646, 148, 148, 2646, 420, + 2646, 420, 420, 420, 421, 420, 2646, 2646, + 421, 148, 420, 2646, 148, 148, 420, 148, + 2646, 2646, 420, 148, 2646, 420, 2646, 2646, + 2646, 420, 421, 421, 2646, 420, 2646, 148, + 2646, 420, 148, 420, 421, 420, 420, 2646, + 2646, 148, 2646, 148, 421, 148, 420, 2646, + 420, 148, 148, 2646, 420, 2646, 2646, 2646, + 148, 2646, 148, 2646, 420, 2646, 2646, 420, + 2699, 2700, 255, 2701, 257, 258, 259, 260, + 261, 262, 263, 151, 2702, 265, 2703, 267, + 2704, 269, 420, 443, 2646, 443, 2646, 571, + 420, 443, 420, 148, 148, 420, 420, 2646, + 420, 571, 2646, 420, 420, 2646, 420, 148, + 2646, 420, 420, 148, 148, 148, 148, 148, + 2646, 420, 2705, 2706, 272, 151, 273, 420, + 148, 2646, 148, 420, 2646, 420, 153, 274, + 275, 153, 657, 2707, 2708, 2709, 280, 281, + 282, 2710, 284, 2711, 2712, 2713, 2714, 2715, + 2716, 2717, 2718, 2719, 2720, 295, 296, 152, + 2721, 153, 151, 420, 148, 148, 2646, 2646, + 420, 148, 2646, 148, 420, 2646, 420, 148, + 2646, 2646, 2646, 2646, 420, 148, 2646, 148, + 2646, 420, 148, 148, 420, 421, 420, 148, + 420, 2646, 421, 148, 2646, 148, 420, 2646, + 420, 420, 148, 2646, 148, 2646, 420, 148, + 420, 421, 420, 420, 421, 420, 2646, 148, + 2646, 420, 2646, 148, 2646, 421, 2646, 420, + 2646, 2646, 2646, 2646, 420, 2646, 148, 2646, + 148, 2646, 420, 148, 2646, 2646, 421, 420, + 2722, 674, 302, 303, 304, 305, 306, 307, + 2723, 1321, 2724, 1323, 312, 2725, 2726, 151, + 420, 2646, 572, 148, 148, 572, 572, 572, + 420, 443, 443, 2646, 2646, 571, 420, 2646, + 420, 148, 2646, 148, 420, 148, 148, 148, + 148, 2646, 420, 2727, 2728, 317, 318, 319, + 2729, 2730, 2731, 2732, 324, 420, 325, 326, + 153, 327, 328, 2733, 330, 2734, 332, 2735, + 334, 335, 153, 151, 690, 337, 338, 153, + 339, 340, 341, 342, 343, 344, 345, 346, + 2736, 348, 349, 2737, 351, 352, 353, 153, + 259, 151, 354, 420, 2646, 420, 2646, 420, + 148, 420, 2646, 420, 148, 148, 2646, 2646, + 2646, 2646, 148, 148, 148, 2646, 420, 148, + 148, 2646, 420, 2738, 2739, 2740, 696, 2741, + 2742, 2743, 2744, 2745, 364, 2746, 2747, 2748, + 2749, 2750, 2751, 2752, 2749, 2753, 2754, 552, + 2755, 375, 712, 377, 420, 2646, 148, 2646, + 420, 420, 421, 420, 2646, 148, 420, 420, + 2646, 2646, 148, 2646, 421, 420, 2646, 148, + 148, 420, 2646, 148, 2646, 420, 420, 148, + 420, 420, 421, 148, 420, 2646, 148, 148, + 2646, 420, 2646, 420, 421, 420, 148, 148, + 2646, 148, 148, 148, 148, 148, 148, 2646, + 420, 148, 2646, 2646, 2646, 2646, 148, 2646, + 2646, 2646, 420, 148, 2646, 420, 420, 148, + 420, 421, 420, 2646, 148, 2646, 2646, 420, + 420, 148, 420, 2646, 148, 420, 421, 420, + 2646, 148, 2646, 420, 2646, 421, 420, 153, + 382, 713, 2756, 2757, 716, 386, 153, 2758, + 2759, 151, 420, 148, 2646, 420, 148, 2646, + 420, 420, 2646, 420, 148, 2646, 148, 420, + 153, 389, 2760, 420, 148, 148, 2646, 2646, + 420, 2761, 2762, 2763, 153, 394, 395, 396, + 397, 398, 399, 400, 401, 402, 403, 723, + 2764, 2765, 2766, 151, 420, 2646, 2646, 420, + 420, 420, 420, 2646, 2646, 420, 2646, 2646, + 420, 420, 420, 2646, 2646, 2646, 2646, 420, + 153, 2767, 409, 410, 411, 151, 420, 420, + 2646, 420, 148, 2768, 420, 2769, 2770, 2771, + 2773, 2772, 420, 2646, 2646, 420, 420, 2646, + 2646, 420, 2646, 148, 2774, 148, 148, 420, + 420, 2774, 420, 420, 148, 420, 148, 420, + 2774, 420, 2774, 148, 420, 420, 2774, 420, + 148, 2774, 148, 2774, 2774, 572, 572, 420, + 2774, 2774, 2774, 148, 420, 421, 2774, 148, + 2774, 421, 148, 420, 420, 420, 420, 2774, + 2774, 421, 420, 420, 148, 148, 2774, 148, + 2774, 420, 420, 148, 2774, 2774, 420, 148, + 148, 421, 148, 2774, 148, 420, 2775, 2776, + 184, 2777, 2778, 2779, 2780, 2781, 2782, 2783, + 2784, 2785, 2786, 2787, 2788, 2789, 2790, 2791, + 2792, 2793, 2794, 2795, 2796, 2797, 2798, 2799, + 2800, 2801, 2802, 2803, 2804, 2805, 420, 148, + 148, 148, 148, 2774, 420, 148, 2774, 420, + 2774, 420, 148, 148, 2774, 148, 420, 148, + 420, 421, 148, 2774, 420, 420, 420, 2774, + 2774, 420, 420, 420, 420, 2774, 148, 148, + 420, 420, 420, 420, 420, 148, 420, 421, + 148, 420, 2774, 2774, 2774, 148, 148, 148, + 148, 148, 148, 148, 2774, 420, 420, 420, + 420, 420, 420, 148, 420, 421, 148, 420, + 2774, 148, 2774, 148, 148, 148, 148, 148, + 148, 2774, 420, 420, 420, 148, 148, 420, + 148, 420, 421, 420, 2774, 148, 2774, 148, + 148, 148, 148, 148, 148, 2774, 420, 148, + 2774, 2774, 2774, 2774, 148, 148, 2774, 421, + 420, 2774, 148, 148, 148, 148, 148, 148, + 148, 148, 148, 148, 2774, 420, 148, 2774, + 2774, 2774, 2774, 421, 420, 148, 2774, 148, + 148, 148, 148, 2774, 420, 420, 420, 420, + 420, 148, 420, 148, 420, 421, 420, 2774, + 148, 2774, 148, 148, 148, 148, 148, 2774, + 420, 148, 2774, 2774, 2774, 2774, 148, 2774, + 421, 148, 420, 148, 2774, 148, 148, 148, + 2774, 420, 420, 420, 148, 420, 420, 148, + 420, 421, 420, 148, 420, 2774, 148, 2774, + 148, 148, 148, 420, 2774, 2774, 148, 2774, + 2774, 421, 2774, 420, 2774, 2774, 420, 2774, + 421, 420, 2774, 2774, 2774, 420, 2774, 421, + 420, 148, 2774, 2774, 2774, 2774, 421, 2774, + 420, 148, 148, 2774, 420, 2774, 2774, 148, + 2774, 2774, 420, 2774, 420, 2806, 2807, 2808, + 217, 218, 219, 220, 221, 2809, 223, 224, + 225, 226, 227, 228, 2810, 2811, 2812, 2813, + 2814, 234, 2815, 236, 2816, 483, 484, 2817, + 2818, 2819, 2820, 2821, 2822, 2823, 2824, 646, + 2825, 151, 152, 2826, 249, 250, 251, 252, + 151, 153, 151, 420, 2774, 420, 421, 2774, + 2774, 2774, 2774, 2774, 420, 2774, 2774, 421, + 2774, 148, 420, 420, 2774, 420, 148, 148, + 148, 2774, 148, 2774, 420, 148, 2774, 148, + 148, 2774, 420, 2774, 420, 420, 420, 421, + 420, 2774, 2774, 421, 148, 420, 2774, 148, + 148, 420, 148, 2774, 2774, 420, 148, 2774, + 420, 2774, 2774, 2774, 420, 421, 421, 2774, + 420, 2774, 148, 2774, 420, 148, 420, 421, + 420, 420, 2774, 2774, 148, 2774, 148, 421, + 148, 420, 2774, 420, 148, 148, 2774, 420, + 2774, 2774, 2774, 148, 2774, 148, 2774, 420, + 2774, 2774, 420, 2827, 2828, 255, 2829, 257, + 258, 259, 260, 261, 262, 263, 151, 2830, + 265, 2831, 267, 2832, 269, 420, 2774, 2774, + 571, 420, 420, 148, 148, 420, 420, 2774, + 420, 571, 2774, 420, 420, 2774, 420, 148, + 2774, 420, 420, 148, 148, 148, 148, 148, + 2774, 420, 2833, 2834, 1044, 272, 151, 273, + 1045, 1046, 1047, 1048, 420, 148, 2774, 1049, + 148, 420, 2774, 1049, 1049, 420, 153, 274, + 275, 153, 657, 2835, 2836, 2837, 280, 281, + 282, 2838, 284, 2839, 2840, 2841, 2842, 2843, + 2844, 2845, 2846, 2847, 2848, 295, 296, 152, + 2849, 153, 151, 420, 148, 148, 2774, 2774, + 420, 148, 2774, 148, 420, 2774, 420, 148, + 2774, 2774, 2774, 2774, 420, 148, 2774, 148, + 2774, 420, 148, 148, 420, 421, 420, 148, + 420, 2774, 421, 148, 2774, 148, 420, 2774, + 420, 420, 148, 2774, 148, 2774, 420, 148, + 420, 421, 420, 420, 421, 420, 2774, 148, + 2774, 420, 2774, 148, 2774, 421, 2774, 420, + 2774, 2774, 2774, 2774, 420, 2774, 148, 2774, + 148, 2774, 420, 148, 2774, 2774, 421, 420, + 2850, 674, 302, 303, 304, 305, 306, 307, + 2851, 676, 2852, 678, 1194, 2853, 2854, 151, + 420, 2774, 572, 148, 148, 572, 572, 572, + 420, 2774, 2774, 571, 420, 2774, 420, 148, + 2774, 148, 420, 1049, 148, 148, 148, 148, + 2774, 420, 2855, 2856, 317, 318, 319, 2857, + 2858, 2859, 2860, 324, 420, 325, 326, 153, + 327, 328, 2861, 330, 2862, 332, 2863, 334, + 335, 153, 151, 690, 337, 338, 153, 339, + 340, 341, 342, 343, 344, 345, 346, 2864, + 348, 349, 2865, 351, 352, 353, 153, 259, + 151, 354, 420, 2774, 420, 2774, 420, 148, + 420, 2774, 420, 148, 148, 2774, 2774, 2774, + 2774, 148, 148, 148, 2774, 420, 148, 148, + 2774, 420, 2866, 2867, 2868, 696, 2869, 2870, + 2871, 2872, 2873, 364, 2874, 2875, 2876, 2877, + 2878, 2879, 2880, 2877, 2881, 2882, 552, 2883, + 375, 712, 377, 420, 2774, 148, 2774, 420, + 420, 421, 420, 2774, 148, 420, 420, 2774, + 2774, 148, 2774, 421, 420, 2774, 148, 148, + 420, 2774, 148, 2774, 420, 420, 148, 420, + 420, 421, 148, 420, 2774, 148, 148, 2774, + 420, 2774, 420, 421, 420, 148, 148, 2774, + 148, 148, 148, 148, 148, 148, 2774, 420, + 148, 2774, 2774, 2774, 2774, 148, 2774, 2774, + 2774, 420, 148, 2774, 420, 420, 148, 420, + 421, 420, 2774, 148, 2774, 2774, 420, 420, + 148, 420, 2774, 148, 420, 421, 420, 2774, + 148, 2774, 420, 2774, 421, 420, 153, 382, + 713, 2884, 2885, 716, 386, 153, 2886, 2887, + 151, 420, 148, 2774, 420, 148, 2774, 420, + 420, 2774, 420, 148, 2774, 148, 420, 1162, + 153, 389, 2888, 420, 148, 148, 2774, 2774, + 420, 2889, 2890, 2891, 153, 394, 395, 396, + 397, 398, 399, 400, 401, 402, 403, 723, + 2892, 2893, 2894, 151, 420, 2774, 2774, 420, + 420, 420, 420, 2774, 2774, 420, 2774, 2774, + 420, 420, 420, 2774, 2774, 2774, 2774, 420, + 153, 2895, 409, 410, 411, 151, 420, 420, + 2774, 420, 148, 2896, 420, 2897, 2898, 2899, + 2901, 2900, 420, 2774, 2774, 420, 420, 2774, + 2774, 420, 2774, 2008, 148, 148, 2008, 148, + 2008, 2008, 148, 2008, 2008, 148, 2008, 2008, + 2008, 148, 2008, 2008, 2008, 148, 2008, 2008, + 148, 2008, 2008, 2008, 2008, 148, 2008, 2008, + 2008, 148, 148, 2008, 2008, 148, 2008, 148, + 2902, 2903, 2904, 2905, 2906, 2908, 2909, 2910, + 2912, 2913, 2914, 2915, 2916, 2917, 2918, 2919, + 2920, 2921, 2922, 2923, 2924, 2925, 2926, 2927, + 2928, 2929, 2907, 2911, 148, 2930, 2931, 2932, + 2933, 2934, 2935, 2936, 2937, 2938, 2939, 2940, + 2941, 2942, 2943, 2944, 2945, 2946, 2947, 2948, + 2949, 2950, 148, 2951, 2952, 2953, 2954, 2955, + 2956, 148, 2957, 2958, 148, 2959, 2960, 2961, + 2962, 2963, 2964, 2965, 2966, 2967, 2968, 2969, + 2970, 2971, 2972, 2973, 148, 2974, 2975, 2955, + 2960, 2976, 148, 2977, 2978, 2979, 2980, 2981, + 2982, 148, 2983, 148, 148, 148, 148, 148, + 2395, 0, 148, 2395, 0, 148, 148, 2395, + 148, 0, 148, 0, 126, 148, 2395, 0, + 0, 0, 2395, 2395, 0, 0, 0, 0, + 2395, 148, 148, 0, 0, 0, 0, 0, + 148, 0, 126, 148, 0, 2395, 2395, 2395, + 148, 148, 148, 148, 148, 148, 148, 2395, + 0, 0, 0, 0, 0, 0, 148, 0, + 126, 148, 0, 2395, 148, 2395, 148, 148, + 148, 148, 148, 148, 2395, 0, 0, 0, + 148, 148, 0, 148, 0, 126, 0, 2395, + 148, 2395, 148, 148, 148, 148, 148, 148, + 2395, 0, 148, 2395, 2395, 2395, 2395, 148, + 148, 2395, 126, 0, 2395, 148, 148, 148, + 148, 148, 148, 148, 148, 148, 148, 2395, + 0, 148, 2395, 2395, 2395, 2395, 126, 0, + 148, 2395, 148, 148, 148, 148, 2395, 0, + 0, 0, 0, 0, 148, 0, 148, 0, + 126, 0, 2395, 148, 2395, 148, 148, 148, + 148, 148, 2395, 0, 148, 2395, 2395, 2395, + 2395, 148, 2395, 126, 148, 0, 148, 2395, + 148, 148, 148, 2395, 0, 0, 0, 148, + 0, 0, 148, 0, 126, 0, 148, 0, + 2395, 148, 2395, 148, 148, 148, 0, 2395, + 2395, 148, 2395, 2395, 126, 2395, 0, 2395, + 126, 0, 2395, 126, 0, 148, 2395, 2395, + 2395, 2395, 126, 2395, 0, 148, 148, 2395, + 0, 2395, 2395, 148, 2395, 2395, 0, 126, + 2395, 2395, 2395, 2395, 2395, 0, 2395, 2395, + 126, 2395, 148, 0, 148, 2984, 148, 2984, + 148, 181, 148, 2985, 2, 2984, 2985, 2985, + 148, 2985, 148, 2985, 2984, 2985, 2984, 148, + 2985, 2985, 2984, 2985, 148, 2984, 148, 181, + 2984, 2984, 572, 572, 2985, 2984, 2984, 2984, + 148, 2985, 421, 2984, 148, 2984, 421, 148, + 2985, 2985, 2985, 2985, 2984, 2984, 421, 2985, + 2985, 148, 148, 2984, 148, 2984, 2985, 2985, + 148, 2984, 2984, 2985, 148, 148, 421, 148, + 2984, 148, 2985, 2986, 2987, 184, 2988, 2989, + 2990, 2991, 2992, 2993, 2994, 2995, 2996, 2997, + 2998, 2999, 3000, 3001, 3002, 3003, 3004, 3005, + 3006, 3007, 3008, 3009, 3010, 3011, 3012, 3013, + 3014, 3015, 3016, 2985, 148, 148, 148, 148, + 2984, 2, 148, 2984, 2, 2984, 2, 148, + 148, 2984, 148, 2, 148, 2, 421, 148, + 2984, 2, 2, 2, 2984, 2984, 2, 2, + 2, 2, 2984, 148, 148, 2, 2, 2, + 2, 2, 148, 2, 421, 148, 2, 2984, + 2984, 2984, 148, 148, 148, 148, 148, 148, + 148, 2984, 2, 2, 2, 2, 2, 2, + 148, 2, 421, 148, 2, 2984, 148, 2984, + 148, 148, 148, 148, 148, 148, 2984, 2, + 2, 2, 148, 148, 2, 148, 2, 421, + 2, 2984, 148, 2984, 148, 148, 148, 148, + 148, 148, 2984, 2, 148, 2984, 2984, 2984, + 2984, 148, 148, 2984, 421, 2, 2984, 148, + 148, 148, 148, 148, 148, 148, 148, 148, + 148, 2984, 2, 148, 2984, 2984, 2984, 2984, + 421, 2, 148, 2984, 148, 148, 148, 148, + 2984, 2, 2, 2, 2, 2, 148, 2, + 148, 2, 421, 2, 2984, 148, 2984, 148, + 148, 148, 148, 148, 2984, 2, 148, 2984, + 2984, 2984, 2984, 148, 2984, 421, 148, 2, + 148, 2984, 148, 148, 148, 2984, 2, 2, + 2, 148, 2, 2, 148, 2, 421, 2, + 148, 2, 2984, 148, 2984, 148, 148, 148, + 2, 2984, 2984, 148, 2984, 2984, 421, 2984, + 2, 2984, 2984, 2, 2984, 421, 2, 2984, + 2984, 2984, 2, 2984, 421, 2, 148, 2984, + 2984, 2984, 2984, 421, 2984, 2, 148, 148, + 2984, 2, 2984, 2984, 148, 2984, 2984, 2, + 2984, 2, 3017, 3018, 3019, 217, 3020, 3022, + 218, 219, 220, 221, 3023, 223, 224, 225, + 226, 227, 228, 3024, 3025, 3026, 3027, 3028, + 234, 3029, 236, 3030, 483, 484, 3031, 3032, + 3033, 3034, 3035, 3036, 3037, 3038, 646, 3039, + 151, 152, 3040, 249, 250, 251, 252, 3021, + 151, 153, 151, 2985, 2984, 2, 421, 2984, + 2984, 2984, 2984, 2984, 2, 2984, 2984, 421, + 2984, 148, 2, 2984, 148, 2984, 2, 2984, + 2, 148, 148, 148, 2984, 148, 2984, 2, + 148, 2984, 148, 148, 2984, 2, 2984, 2, + 2, 2, 421, 2, 2984, 2984, 421, 148, + 2, 2984, 148, 148, 2, 148, 2984, 2984, + 2, 148, 2984, 2, 2984, 2984, 2984, 2, + 421, 421, 2984, 2, 2984, 148, 2984, 2, + 148, 2, 421, 2, 2, 2984, 2984, 148, + 2984, 148, 421, 148, 2, 2984, 2, 148, + 148, 2984, 2, 2984, 2984, 2984, 148, 2984, + 148, 2984, 2, 2984, 2984, 2, 3041, 3042, + 255, 3043, 257, 258, 259, 260, 261, 262, + 263, 151, 3044, 265, 3045, 267, 3046, 269, + 2985, 181, 181, 2984, 181, 2984, 571, 2, + 2, 148, 148, 2, 2, 2984, 2, 571, + 2984, 2, 2, 2984, 2, 148, 2984, 2, + 2, 148, 148, 148, 148, 148, 2984, 2, + 3047, 3048, 3049, 3021, 3050, 3051, 3052, 2985, + 148, 2984, 148, 2, 2984, 2, 148, 2984, + 2, 2, 148, 2, 2984, 3053, 2, 3053, + 2985, 2985, 3053, 2985, 3053, 3053, 2985, 3053, + 3053, 2985, 3053, 3053, 3053, 2985, 3053, 3053, + 3053, 2985, 3053, 3053, 2985, 3053, 3053, 3053, + 3053, 2985, 3053, 3053, 3053, 2985, 2985, 3053, + 3053, 2985, 3053, 2985, 3054, 3055, 3056, 3057, + 3058, 3060, 3061, 3062, 3064, 3065, 3066, 3067, + 3068, 3069, 3070, 3071, 3072, 3073, 3074, 3075, + 3076, 3077, 3078, 3079, 3080, 3081, 3059, 3063, + 2985, 3053, 3053, 3053, 3053, 2985, 3053, 2985, + 3053, 2985, 2985, 2985, 3053, 2985, 2985, 2985, + 3053, 3053, 3053, 3053, 2985, 2985, 2985, 2985, + 2985, 2985, 3053, 2985, 2985, 2985, 2985, 2985, + 2985, 3053, 2985, 2985, 2985, 2985, 3053, 3053, + 3053, 3053, 2985, 3053, 3053, 3053, 3053, 3053, + 2985, 3053, 3053, 2985, 3053, 3053, 3053, 3053, + 2985, 3053, 3053, 2985, 2985, 2985, 2985, 2985, + 2985, 3053, 3053, 3053, 3053, 3053, 3053, 2985, + 3053, 3053, 2985, 2985, 2985, 2985, 2985, 2985, + 3053, 3053, 2985, 3053, 3053, 3053, 3053, 3053, + 2985, 3053, 3053, 2985, 3053, 2985, 3053, 3053, + 3053, 2985, 3053, 2985, 3053, 3053, 3053, 3053, + 3053, 2985, 3053, 2985, 3053, 3053, 3053, 3053, + 2985, 3053, 2985, 3082, 3083, 3084, 3085, 3087, + 3088, 3089, 3090, 3091, 3092, 3093, 3094, 3095, + 3096, 3097, 3098, 3099, 3100, 3101, 3102, 3103, + 3104, 3105, 3086, 2985, 3053, 2985, 3053, 3053, + 3053, 3053, 3053, 2985, 3053, 3053, 3053, 2985, + 3053, 2985, 3053, 3053, 2985, 3053, 3053, 2985, + 3053, 3053, 2985, 3053, 2985, 2985, 2985, 3053, + 3053, 2985, 3053, 2985, 3053, 3053, 2985, 3053, + 2985, 3053, 3053, 3053, 2985, 3053, 2985, 3053, + 3053, 2985, 2985, 2985, 3053, 3053, 3053, 2985, + 3053, 2985, 3053, 2985, 3053, 3053, 3053, 3053, + 3053, 2985, 3053, 3053, 2985, 3106, 3107, 3108, + 3109, 3110, 3111, 2985, 3053, 3053, 2985, 3053, + 3053, 2985, 3053, 2985, 3053, 2985, 3053, 2985, + 3053, 2985, 3112, 3113, 3114, 3086, 3115, 3051, + 3052, 2985, 3053, 2985, 3053, 2985, 3053, 2985, + 2985, 3053, 3053, 2, 3116, 3117, 3118, 3119, + 3120, 3121, 3122, 3123, 3124, 3125, 3126, 3127, + 3128, 3129, 3130, 3085, 3086, 2985, 3053, 3053, + 2985, 3053, 2985, 3053, 2985, 3053, 3053, 3053, + 3053, 2985, 3053, 3053, 2985, 2985, 2985, 3053, + 3053, 2985, 3053, 3053, 2985, 3053, 3053, 2985, + 2985, 2985, 3053, 3053, 2985, 3053, 3053, 3053, + 2985, 3053, 3053, 3053, 3053, 2985, 3053, 3053, + 3053, 2985, 3053, 3053, 2985, 3086, 3131, 3132, + 2985, 3086, 2985, 3053, 2985, 2985, 3053, 3133, + 3134, 3110, 3135, 3136, 2985, 3053, 2985, 3053, + 3053, 2985, 3053, 2985, 3053, 3053, 3053, 3053, + 3053, 2985, 3137, 3138, 3139, 3140, 3141, 3142, + 2985, 3143, 3144, 3145, 3146, 3147, 2985, 3053, + 2985, 3053, 2985, 3053, 2985, 3053, 3053, 3053, + 3053, 3053, 2985, 3053, 2985, 3148, 3149, 3150, + 3151, 3152, 3153, 3154, 3155, 3156, 3157, 3158, + 3159, 3160, 3161, 3162, 3159, 3163, 3164, 3165, + 2985, 3053, 3053, 2985, 2985, 3053, 2985, 2985, + 2985, 3053, 3053, 3053, 2985, 3053, 2985, 3053, + 3053, 2985, 2985, 2985, 3053, 3053, 2985, 3053, + 2985, 3053, 3053, 3053, 2985, 3053, 3053, 3053, + 3053, 3053, 3053, 3053, 2985, 3053, 2985, 2985, + 3053, 3053, 3053, 2985, 2985, 2985, 3053, 2985, + 3053, 3053, 2985, 3053, 2985, 3166, 3167, 3168, + 3169, 2985, 3053, 2985, 3053, 2985, 3053, 2985, + 3053, 2985, 3170, 2985, 3053, 3053, 2985, 3171, + 3172, 3173, 3174, 3175, 3176, 2985, 3053, 3053, + 2985, 2985, 2985, 2985, 3053, 3053, 2985, 3053, + 3053, 2985, 2985, 2985, 3053, 3053, 3053, 3053, + 2985, 3177, 2985, 3053, 2985, 3178, 2985, 3179, + 3087, 3085, 3180, 3086, 2985, 3053, 3053, 2985, + 153, 274, 275, 153, 657, 3181, 3182, 3183, + 280, 281, 282, 3184, 284, 3185, 3186, 3187, + 3188, 3189, 3190, 3191, 3192, 3193, 3194, 295, + 296, 152, 3195, 3196, 151, 3021, 2985, 148, + 148, 2984, 2984, 2, 148, 2984, 148, 2, + 2984, 2, 148, 2984, 2984, 2984, 2984, 2, + 148, 2984, 148, 2984, 2, 148, 148, 2, + 421, 2, 148, 2, 2984, 421, 148, 2984, + 148, 2, 2984, 2, 2984, 2, 148, 2984, + 148, 2984, 2, 148, 2, 421, 2, 2, + 421, 2, 2984, 148, 2984, 2, 2984, 148, + 2984, 421, 2984, 2, 2984, 2984, 2984, 2984, + 2, 2984, 148, 2984, 148, 2984, 2, 148, + 2984, 2984, 421, 2, 3021, 3197, 3198, 2985, + 3021, 2, 2984, 2, 2, 2984, 3199, 674, + 302, 303, 304, 305, 306, 307, 3200, 932, + 3201, 934, 312, 3202, 3203, 151, 2985, 2984, + 572, 148, 148, 572, 572, 572, 2, 181, + 2984, 2984, 571, 2, 2984, 2, 148, 2984, + 2, 2984, 2984, 2984, 2984, 2984, 2, 3204, + 3205, 317, 318, 319, 3206, 3207, 3208, 3209, + 324, 2985, 325, 326, 153, 327, 328, 3210, + 330, 3211, 332, 3212, 334, 335, 153, 151, + 690, 337, 338, 153, 339, 340, 341, 342, + 343, 344, 345, 346, 3213, 348, 349, 3214, + 351, 352, 353, 153, 259, 151, 354, 2, + 2984, 2, 2984, 2, 148, 2, 2984, 2, + 148, 148, 2984, 2984, 2984, 2984, 148, 148, + 148, 2984, 2, 148, 148, 2984, 2, 3215, + 3216, 3217, 696, 3218, 3219, 3220, 3221, 3222, + 364, 3223, 3224, 3225, 3226, 3227, 3228, 3229, + 3226, 3230, 3231, 552, 3232, 375, 712, 377, + 2, 2984, 148, 2984, 2, 2, 421, 2, + 2984, 148, 2, 2, 2984, 2984, 148, 2984, + 421, 2, 2984, 148, 148, 2, 2984, 148, + 2984, 2, 2, 148, 2, 2, 421, 148, + 2, 2984, 148, 148, 2984, 2, 2984, 2, + 421, 2, 148, 148, 2984, 148, 148, 148, + 148, 148, 148, 2984, 2, 148, 2984, 2984, + 2984, 2984, 148, 2984, 2984, 2984, 2, 148, + 2984, 2, 2, 148, 2, 421, 2, 2984, + 148, 2984, 2984, 2, 2, 148, 2, 2984, + 148, 2, 421, 2, 2984, 148, 2984, 2, + 2984, 421, 2, 153, 382, 713, 3233, 3234, + 716, 386, 153, 3235, 3236, 151, 2, 148, + 2984, 2, 148, 2984, 2, 2, 2984, 2, + 148, 2984, 148, 2, 153, 389, 3237, 2, + 148, 148, 2984, 2984, 2, 3238, 3239, 3240, + 153, 394, 395, 396, 397, 398, 399, 400, + 401, 402, 403, 723, 3241, 3242, 3243, 151, + 2, 2984, 2984, 2, 2, 2, 2, 2984, + 2984, 2, 2984, 2984, 2, 2, 2, 2984, + 2984, 2984, 2984, 2, 153, 3244, 409, 410, + 411, 151, 2, 2, 2984, 2, 148, 3245, + 2985, 3246, 3247, 3196, 3248, 3021, 2, 2984, + 2984, 2, 2, 2984, 2, 2984, 0, 2395, + 0, 148, 148, 148, 2395, 148, 2395, 0, + 148, 2395, 148, 148, 2395, 0, 0, 0, + 126, 0, 2395, 2395, 126, 148, 0, 2395, + 148, 148, 0, 148, 2395, 2395, 0, 148, + 2395, 0, 126, 126, 2395, 0, 2395, 148, + 2395, 0, 148, 0, 126, 0, 0, 2395, + 2395, 148, 2395, 148, 126, 148, 0, 2395, + 0, 148, 148, 2395, 0, 2395, 2395, 2395, + 148, 2395, 148, 2395, 0, 2395, 3249, 2395, + 2136, 0, 0, 148, 148, 0, 0, 2395, + 0, 2136, 0, 2395, 0, 148, 2395, 0, + 0, 148, 148, 148, 148, 148, 2395, 0, + 3250, 3250, 0, 3250, 3251, 2, 3250, 2, + 3250, 3250, 3251, 3250, 3250, 3251, 3250, 3250, + 3250, 3251, 3250, 3250, 3250, 3251, 3250, 3250, + 3251, 3250, 3250, 3250, 3250, 3251, 3250, 3250, + 3250, 3251, 3251, 3250, 3250, 3251, 3250, 3251, + 3252, 3253, 3254, 3255, 3256, 3258, 3259, 3260, + 3262, 3263, 3264, 3265, 3266, 3267, 3268, 3269, + 3270, 3271, 3272, 3273, 3274, 3275, 3276, 3277, + 3278, 3279, 3257, 3261, 3251, 3250, 3250, 3250, + 3250, 2, 3250, 2, 3250, 2, 2, 2, + 3250, 2, 2, 2, 3250, 3250, 3250, 3250, + 2, 2, 2, 2, 2, 2, 3250, 2, + 2, 2, 2, 2, 2, 3250, 2, 2, + 2, 2, 3250, 3250, 3250, 3250, 2, 3250, + 3250, 3250, 3250, 3250, 2, 3250, 3250, 2, + 3250, 3250, 3250, 3250, 2, 3250, 3250, 2, + 2, 2, 2, 2, 2, 3250, 3250, 3250, + 3250, 3250, 3250, 2, 3250, 3250, 2, 2, + 2, 2, 2, 2, 3250, 3250, 2, 3250, + 3250, 3250, 3250, 3250, 2, 3250, 3250, 2, + 3250, 2, 3250, 3250, 3250, 2, 3250, 2, + 3250, 3250, 3250, 3250, 3250, 2, 3250, 2, + 3250, 3250, 3250, 3250, 2, 3250, 2, 3280, + 3281, 3282, 3283, 3284, 3285, 3286, 3287, 3288, + 3289, 3290, 3291, 3292, 3293, 3294, 3295, 3296, + 3297, 3298, 3299, 3300, 3251, 3250, 2, 3250, + 3250, 3250, 3250, 3250, 2, 3250, 3250, 3250, + 2, 3250, 2, 3250, 3250, 2, 3250, 3250, + 2, 3250, 2, 2, 2, 3250, 3250, 2, + 3250, 2, 3250, 3250, 2, 3250, 2, 3250, + 3250, 3250, 2, 3250, 2, 3250, 3250, 2, + 2, 2, 3250, 3250, 3250, 2, 3250, 2, + 3250, 2, 3250, 3250, 3250, 3250, 3250, 2, + 3250, 3250, 2, 3301, 3302, 3303, 3304, 3305, + 3306, 3251, 3250, 3250, 2, 3250, 3250, 2, + 3250, 2, 3250, 2, 3250, 2, 3250, 2, + 3307, 3308, 3251, 3250, 2, 3250, 2, 3309, + 3310, 3311, 3312, 3313, 3314, 3315, 3316, 3317, + 3318, 3319, 3320, 3321, 3322, 3323, 3251, 3250, + 3250, 2, 3250, 2, 3250, 2, 3250, 3250, + 3250, 3250, 2, 3250, 3250, 2, 2, 2, + 3250, 3250, 2, 3250, 2, 3250, 3250, 2, + 2, 2, 3250, 3250, 2, 3250, 3250, 3250, + 2, 3250, 3250, 3250, 3250, 2, 3250, 3250, + 3250, 2, 3250, 3250, 2, 3324, 3325, 3305, + 3310, 3326, 3251, 3250, 2, 3250, 3250, 2, + 3250, 2, 3327, 3328, 3329, 3330, 3331, 3332, + 3251, 3333, 3334, 3335, 3336, 3337, 2, 3250, + 2, 3250, 2, 3250, 2, 3250, 3250, 3250, + 3250, 3250, 2, 3250, 2, 3338, 3339, 3340, + 3341, 3342, 3343, 3344, 3345, 3346, 3347, 3348, + 3349, 3350, 3351, 3352, 3349, 3353, 3354, 3355, + 2, 3250, 3250, 2, 2, 3250, 2, 2, + 2, 3250, 3250, 3250, 2, 3250, 2, 3250, + 3250, 2, 2, 2, 3250, 3250, 2, 3250, + 2, 3250, 3250, 3250, 2, 3250, 3250, 3250, + 3250, 3250, 3250, 3250, 2, 3250, 2, 2, + 3250, 3250, 3250, 2, 2, 2, 3250, 2, + 3250, 3250, 2, 3250, 2, 3356, 3357, 3358, + 3359, 2, 3250, 2, 3250, 2, 3250, 2, + 3250, 2, 3360, 2, 3250, 3250, 2, 3361, + 3362, 3363, 3364, 3365, 3366, 2, 3250, 3250, + 2, 2, 2, 2, 3250, 3250, 2, 3250, + 3250, 2, 2, 2, 3250, 3250, 3250, 3250, + 2, 3367, 2, 3250, 2, 3368, 3251, 3369, + 3370, 3371, 3373, 3372, 2, 3250, 3250, 2, + 2, 3250, 3250, 0, 3250, 0, 3250, 3374, + 3250, 3374, 148, 3250, 2395, 3375, 3376, 3250, + 0, 148, 3374, 148, 181, 148, 420, 420, + 3374, 420, 420, 148, 420, 148, 420, 3374, + 420, 3374, 148, 420, 420, 3374, 420, 148, + 3374, 148, 181, 3374, 3374, 572, 572, 420, + 3374, 3374, 3374, 148, 420, 421, 3374, 148, + 3374, 421, 148, 420, 420, 420, 420, 3374, + 3374, 421, 420, 420, 148, 148, 3374, 148, + 3374, 420, 420, 148, 3374, 3374, 420, 148, + 148, 421, 148, 3374, 148, 420, 3377, 3378, + 184, 3379, 3380, 3381, 3382, 3383, 3384, 3385, + 3386, 3387, 3388, 3389, 3390, 3391, 3392, 3393, + 3394, 3395, 3396, 3397, 3398, 3399, 3400, 3401, + 3402, 3403, 3404, 3405, 3406, 3407, 420, 148, + 148, 148, 148, 3374, 420, 148, 3374, 420, + 3374, 420, 148, 148, 3374, 148, 420, 148, + 420, 421, 148, 3374, 420, 420, 420, 3374, + 3374, 420, 420, 420, 420, 3374, 148, 148, + 420, 420, 420, 420, 420, 148, 420, 421, + 148, 420, 3374, 3374, 3374, 148, 148, 148, + 148, 148, 148, 148, 3374, 420, 420, 420, + 420, 420, 420, 148, 420, 421, 148, 420, + 3374, 148, 3374, 148, 148, 148, 148, 148, + 148, 3374, 420, 420, 420, 148, 148, 420, + 148, 420, 421, 420, 3374, 148, 3374, 148, + 148, 148, 148, 148, 148, 3374, 420, 148, + 3374, 3374, 3374, 3374, 148, 148, 3374, 421, + 420, 3374, 148, 148, 148, 148, 148, 148, + 148, 148, 148, 148, 3374, 420, 148, 3374, + 3374, 3374, 3374, 421, 420, 148, 3374, 148, + 148, 148, 148, 3374, 420, 420, 420, 420, + 420, 148, 420, 148, 420, 421, 420, 3374, + 148, 3374, 148, 148, 148, 148, 148, 3374, + 420, 148, 3374, 3374, 3374, 3374, 148, 3374, + 421, 148, 420, 148, 3374, 148, 148, 148, + 3374, 420, 420, 420, 148, 420, 420, 148, + 420, 421, 420, 148, 420, 3374, 148, 3374, + 148, 148, 148, 420, 3374, 3374, 148, 3374, + 3374, 421, 3374, 420, 3374, 3374, 420, 3374, + 421, 420, 3374, 3374, 3374, 420, 3374, 421, + 420, 148, 3374, 3374, 3374, 3374, 421, 3374, + 420, 148, 148, 3374, 420, 3374, 3374, 148, + 3374, 3374, 420, 3374, 420, 3408, 3409, 3410, + 217, 218, 219, 220, 221, 3411, 223, 224, + 225, 226, 227, 228, 3412, 3413, 3414, 3415, + 3416, 234, 3417, 236, 3418, 483, 484, 3419, + 3420, 3421, 3422, 3423, 3424, 3425, 3426, 646, + 3427, 151, 152, 3428, 249, 250, 251, 252, + 151, 153, 151, 420, 3374, 420, 421, 3374, + 3374, 3374, 3374, 3374, 420, 3374, 3374, 421, + 3374, 148, 420, 420, 3374, 420, 148, 148, + 148, 3374, 148, 3374, 420, 148, 3374, 148, + 148, 3374, 420, 3374, 420, 420, 420, 421, + 420, 3374, 3374, 421, 148, 420, 3374, 148, + 148, 420, 148, 3374, 3374, 420, 148, 3374, + 420, 3374, 3374, 3374, 420, 421, 421, 3374, + 420, 3374, 148, 3374, 420, 148, 420, 421, + 420, 420, 3374, 3374, 148, 3374, 148, 421, + 148, 420, 3374, 420, 148, 148, 3374, 420, + 3374, 3374, 3374, 148, 3374, 148, 3374, 420, + 3374, 3374, 420, 3429, 3430, 255, 3431, 257, + 258, 259, 260, 261, 262, 263, 151, 3432, + 265, 3433, 267, 3434, 269, 420, 181, 181, + 3374, 181, 3374, 571, 420, 420, 148, 148, + 420, 420, 3374, 420, 571, 3374, 420, 420, + 3374, 420, 148, 3374, 420, 420, 148, 148, + 148, 148, 148, 3374, 420, 3435, 3436, 272, + 151, 273, 420, 148, 3374, 148, 420, 3374, + 420, 153, 274, 275, 153, 657, 3437, 3438, + 3439, 280, 281, 282, 3440, 284, 3441, 3442, + 3443, 3444, 3445, 3446, 3447, 3448, 3449, 3450, + 295, 296, 152, 3451, 153, 151, 420, 148, + 148, 3374, 3374, 420, 148, 3374, 148, 420, + 3374, 420, 148, 3374, 3374, 3374, 3374, 420, + 148, 3374, 148, 3374, 420, 148, 148, 420, + 421, 420, 148, 420, 3374, 421, 148, 3374, + 148, 420, 3374, 420, 420, 148, 3374, 148, + 3374, 420, 148, 420, 421, 420, 420, 421, + 420, 3374, 148, 3374, 420, 3374, 148, 3374, + 421, 3374, 420, 3374, 3374, 3374, 3374, 420, + 3374, 148, 3374, 148, 3374, 420, 148, 3374, + 3374, 421, 420, 3452, 674, 302, 303, 304, + 305, 306, 307, 3453, 932, 3454, 934, 312, + 3455, 3456, 151, 420, 3374, 572, 148, 148, + 572, 572, 572, 420, 181, 3374, 3374, 571, + 420, 3374, 420, 148, 3374, 148, 420, 148, + 148, 148, 148, 3374, 420, 3457, 3458, 317, + 318, 319, 3459, 3460, 3461, 3462, 324, 420, + 325, 326, 153, 327, 328, 3463, 330, 3464, + 332, 3465, 334, 335, 153, 151, 690, 337, + 338, 153, 339, 340, 341, 342, 343, 344, + 345, 346, 3466, 348, 349, 3467, 351, 352, + 353, 153, 259, 151, 354, 420, 3374, 420, + 3374, 420, 148, 420, 3374, 420, 148, 148, + 3374, 3374, 3374, 3374, 148, 148, 148, 3374, + 420, 148, 148, 3374, 420, 3468, 3469, 3470, + 696, 3471, 3472, 3473, 3474, 3475, 364, 3476, + 3477, 3478, 3479, 3480, 3481, 3482, 3479, 3483, + 3484, 552, 3485, 375, 712, 377, 420, 3374, + 148, 3374, 420, 420, 421, 420, 3374, 148, + 420, 420, 3374, 3374, 148, 3374, 421, 420, + 3374, 148, 148, 420, 3374, 148, 3374, 420, + 420, 148, 420, 420, 421, 148, 420, 3374, + 148, 148, 3374, 420, 3374, 420, 421, 420, + 148, 148, 3374, 148, 148, 148, 148, 148, + 148, 3374, 420, 148, 3374, 3374, 3374, 3374, + 148, 3374, 3374, 3374, 420, 148, 3374, 420, + 420, 148, 420, 421, 420, 3374, 148, 3374, + 3374, 420, 420, 148, 420, 3374, 148, 420, + 421, 420, 3374, 148, 3374, 420, 3374, 421, + 420, 153, 382, 713, 3486, 3487, 716, 386, + 153, 3488, 3489, 151, 420, 148, 3374, 420, + 148, 3374, 420, 420, 3374, 420, 148, 3374, + 148, 420, 153, 389, 3490, 420, 148, 148, + 3374, 3374, 420, 3491, 3492, 3493, 153, 394, + 395, 396, 397, 398, 399, 400, 401, 402, + 403, 723, 3494, 3495, 3496, 151, 420, 3374, + 3374, 420, 420, 420, 420, 3374, 3374, 420, + 3374, 3374, 420, 420, 420, 3374, 3374, 3374, + 3374, 420, 153, 3497, 409, 410, 411, 151, + 420, 420, 3374, 420, 148, 3498, 420, 3499, + 3500, 3501, 3503, 3502, 420, 3374, 3374, 420, + 420, 3374, 3374, 420, 3374, 3375, 2985, 2985, + 3375, 2985, 3375, 3375, 2985, 3375, 3375, 2985, + 3375, 3375, 3375, 2985, 3375, 3375, 3375, 2985, + 3375, 3375, 2985, 3375, 3375, 3375, 3375, 2985, + 3375, 3375, 3375, 2985, 2985, 3375, 3375, 2985, + 3375, 2985, 3504, 3505, 3506, 3507, 3508, 3510, + 3511, 3512, 3514, 3515, 3516, 3517, 3518, 3519, + 3520, 3521, 3522, 3523, 3524, 3525, 3526, 3527, + 3528, 3529, 3530, 3531, 3509, 3513, 2985, 3375, + 3375, 3375, 3375, 2985, 3375, 2985, 3375, 2985, + 2985, 2985, 3375, 2985, 2985, 2985, 3375, 3375, + 3375, 3375, 2985, 2985, 2985, 2985, 2985, 2985, + 3375, 2985, 2985, 2985, 2985, 2985, 2985, 3375, + 2985, 2985, 2985, 2985, 3375, 3375, 3375, 3375, + 2985, 3375, 3375, 3375, 3375, 3375, 2985, 3375, + 3375, 2985, 3375, 3375, 3375, 3375, 2985, 3375, + 3375, 2985, 2985, 2985, 2985, 2985, 2985, 3375, + 3375, 3375, 3375, 3375, 3375, 2985, 3375, 3375, + 2985, 2985, 2985, 2985, 2985, 2985, 3375, 3375, + 2985, 3375, 3375, 3375, 3375, 3375, 2985, 3375, + 3375, 2985, 3375, 2985, 3375, 3375, 3375, 2985, + 3375, 2985, 3375, 3375, 3375, 3375, 3375, 2985, + 3375, 2985, 3375, 3375, 3375, 3375, 2985, 3375, + 2985, 3532, 3533, 3534, 3085, 3087, 3535, 3536, + 3537, 3538, 3539, 3540, 3541, 3542, 3543, 3544, + 3545, 3546, 3547, 3548, 3549, 3550, 3551, 3552, + 3086, 2985, 3375, 2985, 3375, 3375, 3375, 3375, + 3375, 2985, 3375, 3375, 3375, 2985, 3375, 2985, + 3375, 3375, 2985, 3375, 3375, 2985, 3375, 2985, + 2985, 2985, 3375, 3375, 2985, 3375, 2985, 3375, + 3375, 2985, 3375, 2985, 3375, 3375, 3375, 2985, + 3375, 2985, 3375, 3375, 2985, 2985, 2985, 3375, + 3375, 3375, 2985, 3375, 2985, 3375, 2985, 3375, + 3375, 3375, 3375, 3375, 2985, 3375, 3375, 2985, + 3553, 3554, 3555, 3556, 3557, 3558, 2985, 3375, + 3375, 2985, 3375, 3375, 2985, 3375, 2985, 3375, + 2985, 3375, 2985, 3375, 2985, 3559, 3560, 3114, + 3086, 3115, 3051, 3052, 2985, 3375, 2985, 3375, + 2985, 3561, 3562, 3563, 3564, 3565, 3566, 3567, + 3568, 3569, 3570, 3571, 3572, 3573, 3574, 3575, + 3085, 3086, 2985, 3375, 3375, 2985, 3375, 2985, + 3375, 2985, 3375, 3375, 3375, 3375, 2985, 3375, + 3375, 2985, 2985, 2985, 3375, 3375, 2985, 3375, + 3053, 2985, 3375, 3375, 2985, 2985, 2985, 3375, + 3375, 2985, 3375, 3375, 3375, 2985, 3375, 3375, + 3375, 3375, 2985, 3375, 3375, 3375, 2985, 3375, + 3375, 2985, 3576, 3577, 3557, 3578, 3579, 2985, + 3375, 2985, 3375, 3375, 2985, 3375, 3053, 2985, + 3053, 3053, 3053, 3053, 3375, 2985, 3580, 3581, + 3582, 3583, 3584, 3585, 2985, 3586, 3587, 3588, + 3589, 3590, 2985, 3375, 2985, 3375, 2985, 3375, + 2985, 3375, 3375, 3375, 3375, 3375, 2985, 3375, + 2985, 3591, 3592, 3593, 3594, 3595, 3596, 3597, + 3598, 3599, 3600, 3601, 3602, 3603, 3604, 3605, + 3602, 3606, 3607, 3608, 2985, 3375, 3375, 2985, + 2985, 3375, 2985, 2985, 2985, 3375, 3375, 3375, + 2985, 3375, 2985, 3375, 3375, 2985, 2985, 2985, + 3375, 3375, 2985, 3375, 2985, 3375, 3375, 3375, + 2985, 3375, 3375, 3375, 3375, 3375, 3375, 3375, + 2985, 3375, 2985, 2985, 3375, 3375, 3375, 2985, + 2985, 2985, 3375, 2985, 3375, 3375, 2985, 3375, + 2985, 3609, 3610, 3611, 3612, 2985, 3375, 2985, + 3375, 2985, 3375, 2985, 3375, 2985, 3613, 2985, + 3375, 3375, 2985, 3614, 3615, 3616, 3617, 3618, + 3619, 2985, 3375, 3375, 2985, 2985, 2985, 2985, + 3375, 3375, 2985, 3375, 3375, 2985, 2985, 2985, + 3375, 3375, 3375, 3375, 2985, 3620, 2985, 3375, + 2985, 3621, 2985, 3622, 3623, 3624, 3626, 3625, + 2985, 3375, 3375, 2985, 2985, 3375, 3375, 3376, + 3627, 2, 3376, 3627, 3376, 3376, 3627, 3376, + 3376, 3627, 3376, 3376, 3376, 3627, 3376, 3376, + 3376, 3627, 3376, 3376, 3627, 3376, 3376, 3376, + 3376, 3627, 3376, 3376, 3376, 3627, 3627, 3376, + 3376, 3627, 3376, 3627, 3628, 3629, 3630, 3631, + 3632, 3634, 3635, 3636, 3638, 3639, 3640, 3641, + 3642, 3643, 3644, 3645, 3646, 3647, 3648, 3649, + 3650, 3651, 3652, 3653, 3654, 3655, 3633, 3637, + 3627, 3376, 3376, 3376, 3376, 3627, 3376, 3627, + 3376, 3627, 3627, 3627, 3376, 3627, 3627, 3627, + 3376, 3376, 3376, 3376, 3627, 3627, 3627, 3627, + 3627, 3627, 3376, 3627, 3627, 3627, 3627, 3627, + 3627, 3376, 3627, 3627, 3627, 3627, 3376, 3376, + 3376, 3376, 3627, 3376, 3376, 3376, 3376, 3376, + 3627, 3376, 3376, 3627, 3376, 3376, 3376, 3376, + 3627, 3376, 3376, 3627, 3627, 3627, 3627, 3627, + 3627, 3376, 3376, 3376, 3376, 3376, 3376, 3627, + 3376, 3376, 3627, 3627, 3627, 3627, 3627, 3627, + 3376, 3376, 3627, 3376, 3376, 3376, 3376, 3376, + 3627, 3376, 3376, 3627, 3376, 3627, 3376, 3376, + 3376, 3627, 3376, 3627, 3376, 3376, 3376, 3376, + 3376, 3627, 3376, 3627, 3376, 3376, 3376, 3376, + 3627, 3376, 3627, 3656, 3657, 3658, 3659, 3660, + 3661, 3662, 3663, 3664, 3665, 3666, 3667, 3668, + 3669, 3670, 3671, 3672, 3673, 3674, 3675, 3676, + 3627, 3376, 3627, 3376, 3376, 3376, 3376, 3376, + 3627, 3376, 3376, 3376, 3627, 3376, 3627, 3376, + 3376, 3627, 3376, 3376, 3627, 3376, 3627, 3627, + 3627, 3376, 3376, 3627, 3376, 3627, 3376, 3376, + 3627, 3376, 3627, 3376, 3376, 3376, 3627, 3376, + 3627, 3376, 3376, 3627, 3627, 3627, 3376, 3376, + 3376, 3627, 3376, 3627, 3376, 3627, 3376, 3376, + 3376, 3376, 3376, 3627, 3376, 3376, 3627, 3677, + 3678, 3679, 3680, 3681, 3682, 3627, 3376, 3376, + 571, 3627, 3627, 3627, 3627, 3376, 3627, 571, + 3376, 3627, 3376, 3627, 3376, 3627, 3376, 3627, + 3683, 3684, 3685, 3686, 3687, 3688, 3689, 3627, + 3376, 3376, 3627, 3376, 3376, 3627, 2, 2, + 3376, 3376, 2, 3376, 2, 2, 3376, 3690, + 3691, 3692, 3693, 3694, 3695, 3696, 3697, 3698, + 3699, 3700, 3701, 3702, 3703, 3704, 3627, 3376, + 3376, 3627, 3376, 3627, 3376, 3627, 3376, 3376, + 3376, 3376, 3627, 3376, 3376, 3627, 3627, 3627, + 3376, 3376, 3627, 3376, 3627, 3376, 3376, 3627, + 3627, 3627, 3376, 3376, 3627, 3376, 3376, 3376, + 3627, 3376, 3376, 3376, 3376, 3627, 3376, 3376, + 3376, 3627, 3376, 3376, 3627, 3705, 3706, 1124, + 3681, 1125, 3707, 3708, 3709, 3627, 3376, 3627, + 3376, 3376, 571, 3627, 3376, 3627, 3627, 3376, + 3376, 3627, 3710, 3711, 3712, 3713, 3714, 3715, + 3627, 3716, 3717, 3718, 3719, 3720, 3627, 3376, + 3627, 3376, 3627, 3376, 3627, 3376, 3376, 3376, + 3376, 3376, 3627, 3376, 3627, 3721, 3722, 3723, + 3724, 3725, 3726, 3727, 3728, 3729, 3730, 3731, + 3732, 3733, 3734, 3735, 3732, 3736, 3737, 3738, + 3627, 3376, 3376, 3627, 3627, 3376, 3627, 3627, + 3627, 3376, 3376, 3376, 3627, 3376, 3627, 3376, + 3376, 3627, 3627, 3627, 3376, 3376, 3627, 3376, + 3627, 3376, 3376, 3376, 3627, 3376, 3376, 3376, + 3376, 3376, 3376, 3376, 3627, 3376, 3627, 3627, + 3376, 3376, 3376, 3627, 3627, 3627, 3376, 3627, + 3376, 3376, 3627, 3376, 3627, 3739, 3740, 3741, + 3742, 3627, 3376, 3627, 3376, 3627, 3376, 3627, + 3376, 3627, 3743, 3744, 3627, 3376, 3627, 3376, + 3376, 3627, 3745, 3746, 3747, 3748, 3749, 3750, + 3627, 3376, 3376, 3627, 3627, 3627, 3627, 3376, + 3376, 3627, 3376, 3376, 3627, 3627, 3627, 3376, + 3376, 3376, 3376, 3627, 3751, 3627, 3376, 3627, + 3752, 3627, 3753, 3754, 3688, 3756, 3755, 3627, + 3376, 3376, 3627, 3627, 3376, 3376, 3757, 0, + 3757, 3758, 3757, 3758, 3758, 3757, 3757, 3758, + 3757, 3757, 3758, 3757, 3757, 3757, 3758, 3757, + 3757, 3757, 3758, 3757, 3757, 3758, 3757, 3757, + 3757, 3757, 3758, 3757, 3757, 3757, 3758, 3758, + 3757, 3757, 3758, 3757, 3758, 3759, 3760, 3761, + 3762, 3763, 3765, 3766, 3767, 3769, 3770, 3771, + 3772, 3773, 3774, 3775, 3776, 3777, 3778, 3779, + 3780, 3781, 3782, 3783, 3784, 3785, 3786, 3764, + 3768, 3758, 3757, 3757, 3757, 3757, 3758, 3757, + 3758, 3757, 3758, 3758, 3758, 3757, 3758, 3758, + 3758, 3757, 3757, 3757, 3757, 3758, 3758, 3758, + 3758, 3758, 3758, 3757, 3758, 3758, 3758, 3758, + 3758, 3758, 3757, 3758, 3758, 3758, 3758, 3757, + 3757, 3757, 3757, 3758, 3757, 3757, 3757, 3757, + 3757, 3758, 3757, 3757, 3758, 3757, 3757, 3757, + 3757, 3758, 3757, 3757, 3758, 3758, 3758, 3758, + 3758, 3758, 3757, 3757, 3757, 3757, 3757, 3757, + 3758, 3757, 3757, 3758, 3758, 3758, 3758, 3758, + 3758, 3757, 3757, 3758, 3757, 3757, 3757, 3757, + 3757, 3758, 3757, 3757, 3758, 3757, 3758, 3757, + 3757, 3757, 3758, 3757, 3758, 3757, 3757, 3757, + 3757, 3757, 3758, 3757, 3758, 3757, 3757, 3757, + 3757, 3758, 3757, 3758, 3787, 3788, 3789, 3790, + 3791, 3792, 3793, 3794, 3795, 3796, 3797, 3798, + 3799, 3800, 3801, 3802, 3803, 3804, 3805, 3806, + 3807, 3758, 3757, 3758, 3757, 3757, 3757, 3757, + 3757, 3758, 3757, 3757, 3757, 3758, 3757, 3758, + 3757, 3757, 3758, 3757, 3757, 3758, 3757, 3758, + 3758, 3758, 3757, 3757, 3758, 3757, 3758, 3757, + 3757, 3758, 3757, 3758, 3757, 3757, 3757, 3758, + 3757, 3758, 3757, 3757, 3758, 3758, 3758, 3757, + 3757, 3757, 3758, 3757, 3758, 3757, 3758, 3757, + 3757, 3757, 3757, 3757, 3758, 3757, 3757, 3758, + 3808, 3809, 3810, 3811, 3812, 3813, 3758, 3757, + 3757, 3758, 3757, 3757, 3758, 3757, 3758, 3757, + 3758, 3757, 3758, 3757, 3758, 3814, 3815, 3758, + 3757, 3758, 3757, 3758, 3816, 3817, 3818, 3819, + 3820, 3821, 3822, 3823, 3824, 3825, 3826, 3827, + 3828, 3829, 3830, 3758, 3757, 3757, 3758, 3757, + 3758, 3757, 3758, 3757, 3757, 3757, 3757, 3758, + 3757, 3757, 3758, 3758, 3758, 3757, 3757, 3758, + 3757, 3758, 3757, 3757, 3758, 3758, 3758, 3757, + 3757, 3758, 3757, 3757, 3757, 3758, 3757, 3757, + 3757, 3757, 3758, 3757, 3757, 3757, 3758, 3757, + 3757, 3758, 3831, 3832, 3812, 3817, 3833, 3758, + 3757, 3758, 3757, 3757, 3758, 3757, 3758, 3834, + 3835, 3836, 3837, 3838, 3839, 3758, 3840, 3841, + 3842, 3843, 3844, 3758, 3757, 3758, 3757, 3758, + 3757, 3758, 3757, 3757, 3757, 3757, 3757, 3758, + 3757, 3758, 3845, 3846, 3847, 3848, 3849, 3850, + 3851, 3852, 3853, 3854, 3855, 3856, 3857, 3858, + 3859, 3856, 3860, 3861, 3862, 3758, 3757, 3757, + 3758, 3758, 3757, 3758, 3758, 3758, 3757, 3757, + 3757, 3758, 3757, 3758, 3757, 3757, 3758, 3758, + 3758, 3757, 3757, 3758, 3757, 3758, 3757, 3757, + 3757, 3758, 3757, 3757, 3757, 3757, 3757, 3757, + 3757, 3758, 3757, 3758, 3758, 3757, 3757, 3757, + 3758, 3758, 3758, 3757, 3758, 3757, 3757, 3758, + 3757, 3758, 3863, 3864, 3865, 3866, 3758, 3757, + 3758, 3757, 3758, 3757, 3758, 3757, 3758, 3867, + 3758, 3757, 3757, 3758, 3868, 3869, 3870, 3871, + 3872, 3873, 3758, 3757, 3757, 3758, 3758, 3758, + 3758, 3757, 3757, 3758, 3757, 3757, 3758, 3758, + 3758, 3757, 3757, 3757, 3757, 3758, 3874, 3758, + 3757, 3758, 3875, 3758, 3876, 3877, 3878, 3880, + 3879, 3758, 3757, 3757, 3758, 3758, 3757, 3757, + 0, 2395, 3376, 3376, 3757, 3881, 3250, 3881, + 3251, 3881, 3251, 3251, 3881, 3881, 3251, 3881, + 3881, 3251, 3881, 3881, 3881, 3251, 3881, 3881, + 3881, 3251, 3881, 3881, 3251, 3881, 3881, 3881, + 3881, 3251, 3881, 3881, 3881, 3251, 3251, 3881, + 3881, 3251, 3881, 3251, 3882, 3883, 3884, 3885, + 3886, 3888, 3889, 3890, 3892, 3893, 3894, 3895, + 3896, 3897, 3898, 3899, 3900, 3901, 3902, 3903, + 3904, 3905, 3906, 3907, 3908, 3909, 3887, 3891, + 3251, 3881, 3881, 3881, 3881, 3251, 3881, 3251, + 3881, 3251, 3251, 3251, 3881, 3251, 3251, 3251, + 3881, 3881, 3881, 3881, 3251, 3251, 3251, 3251, + 3251, 3251, 3881, 3251, 3251, 3251, 3251, 3251, + 3251, 3881, 3251, 3251, 3251, 3251, 3881, 3881, + 3881, 3881, 3251, 3881, 3881, 3881, 3881, 3881, + 3251, 3881, 3881, 3251, 3881, 3881, 3881, 3881, + 3251, 3881, 3881, 3251, 3251, 3251, 3251, 3251, + 3251, 3881, 3881, 3881, 3881, 3881, 3881, 3251, + 3881, 3881, 3251, 3251, 3251, 3251, 3251, 3251, + 3881, 3881, 3251, 3881, 3881, 3881, 3881, 3881, + 3251, 3881, 3881, 3251, 3881, 3251, 3881, 3881, + 3881, 3251, 3881, 3251, 3881, 3881, 3881, 3881, + 3881, 3251, 3881, 3251, 3881, 3881, 3881, 3881, + 3251, 3881, 3251, 3910, 3911, 3912, 3913, 3914, + 3915, 3916, 3917, 3918, 3919, 3920, 3921, 3922, + 3923, 3924, 3925, 3926, 3927, 3928, 3929, 3930, + 3251, 3881, 3251, 3881, 3881, 3881, 3881, 3881, + 3251, 3881, 3881, 3881, 3251, 3881, 3251, 3881, + 3881, 3251, 3881, 3881, 3251, 3881, 3251, 3251, + 3251, 3881, 3881, 3251, 3881, 3251, 3881, 3881, + 3251, 3881, 3251, 3881, 3881, 3881, 3251, 3881, + 3251, 3881, 3881, 3251, 3251, 3251, 3881, 3881, + 3881, 3251, 3881, 3251, 3881, 3251, 3881, 3881, + 3881, 3881, 3881, 3251, 3881, 3881, 3251, 3931, + 3932, 3933, 3934, 3935, 3936, 3251, 3881, 3881, + 3251, 3881, 3881, 3251, 3881, 3251, 3881, 3251, + 3881, 3251, 3881, 3251, 3937, 3938, 3251, 3881, + 3251, 3881, 3251, 3939, 3940, 3941, 3942, 3943, + 3944, 3945, 3946, 3947, 3948, 3949, 3950, 3951, + 3952, 3953, 3251, 3881, 3881, 3251, 3881, 3251, + 3881, 3251, 3881, 3881, 3881, 3881, 3251, 3881, + 3881, 3251, 3251, 3251, 3881, 3881, 3251, 3881, + 3251, 3881, 3881, 3251, 3251, 3251, 3881, 3881, + 3251, 3881, 3881, 3881, 3251, 3881, 3881, 3881, + 3881, 3251, 3881, 3881, 3881, 3251, 3881, 3881, + 3251, 3954, 3955, 3935, 3940, 3956, 3251, 3881, + 3251, 3881, 3881, 3251, 3881, 3251, 3957, 3958, + 3959, 3960, 3961, 3962, 3251, 3963, 3964, 3965, + 3966, 3967, 3251, 3881, 3251, 3881, 3251, 3881, + 3251, 3881, 3881, 3881, 3881, 3881, 3251, 3881, + 3251, 3968, 3969, 3970, 3971, 3972, 3973, 3974, + 3975, 3976, 3977, 3978, 3979, 3980, 3981, 3982, + 3979, 3983, 3984, 3985, 3251, 3881, 3881, 3251, + 3251, 3881, 3251, 3251, 3251, 3881, 3881, 3881, + 3251, 3881, 3251, 3881, 3881, 3251, 3251, 3251, + 3881, 3881, 3251, 3881, 3251, 3881, 3881, 3881, + 3251, 3881, 3881, 3881, 3881, 3881, 3881, 3881, + 3251, 3881, 3251, 3251, 3881, 3881, 3881, 3251, + 3251, 3251, 3881, 3251, 3881, 3881, 3251, 3881, + 3251, 3986, 3987, 3988, 3989, 3251, 3881, 3251, + 3881, 3251, 3881, 3251, 3881, 3251, 3990, 3251, + 3881, 3881, 3251, 3991, 3992, 3993, 3994, 3995, + 3996, 3251, 3881, 3881, 3251, 3251, 3251, 3251, + 3881, 3881, 3251, 3881, 3881, 3251, 3251, 3251, + 3881, 3881, 3881, 3881, 3251, 3997, 3251, 3881, + 3251, 3998, 3251, 3999, 4000, 4001, 4003, 4002, + 3251, 3881, 3881, 3251, 3251, 3881, 3881, 3881, + 3250, 3250, 3881, 3881, 3250, 3881, 3881, 3250, + 3881, 3881, 3881, 3250, 3881, 3881, 3881, 3250, + 3881, 3881, 3250, 3881, 3881, 3881, 3881, 3250, + 3881, 3881, 3881, 3250, 3250, 3881, 3881, 3250, + 3881, 3250, 4004, 4005, 4006, 4007, 4008, 4010, + 4011, 4012, 4014, 4015, 4016, 4017, 4018, 4019, + 4020, 4021, 4022, 4023, 4024, 4025, 4026, 4027, + 4028, 4029, 4030, 4031, 4009, 4013, 3250, 4032, + 4033, 4034, 4035, 4036, 4037, 4038, 4039, 4040, + 4041, 4042, 4043, 4044, 4045, 4046, 4047, 4048, + 4049, 4050, 4051, 4052, 3250, 4053, 4054, 4055, + 4056, 4057, 4058, 3250, 4059, 4060, 3250, 4061, + 4062, 4063, 4064, 4065, 4066, 4067, 4068, 4069, + 4070, 4071, 4072, 4073, 4074, 4075, 3250, 4076, + 4077, 4057, 4062, 4078, 3250, 4079, 4080, 4081, + 4082, 4083, 4084, 3250, 4085, 3250, 0, 3250, + 148, 148, 2395, 2395, 0, 148, 2395, 148, + 0, 2395, 0, 148, 2395, 2395, 2395, 2395, + 0, 148, 2395, 148, 2395, 0, 148, 148, + 0, 126, 0, 148, 0, 2395, 126, 148, + 2395, 148, 0, 2395, 0, 2984, 0, 148, + 2395, 148, 2395, 0, 148, 0, 126, 0, + 0, 126, 0, 2395, 148, 2395, 0, 2395, + 148, 2395, 126, 2395, 0, 2395, 148, 2395, + 148, 2395, 0, 148, 2395, 2395, 126, 0, + 4086, 2984, 148, 4086, 148, 181, 148, 2985, + 4086, 2985, 2985, 2985, 148, 2985, 148, 2985, + 4086, 2985, 4086, 148, 2985, 2985, 4086, 2985, + 148, 4086, 148, 181, 4086, 4086, 572, 572, + 2985, 4086, 4086, 4086, 148, 2985, 421, 4086, + 148, 4086, 421, 148, 2985, 2985, 2985, 2985, + 4086, 4086, 421, 2985, 2985, 148, 148, 4086, + 148, 4086, 2985, 2985, 148, 4086, 4086, 2985, + 148, 148, 421, 148, 4086, 148, 2985, 4087, + 4088, 184, 4089, 4090, 4091, 4092, 4093, 4094, + 4095, 4096, 4097, 4098, 4099, 4100, 4101, 4102, + 4103, 4104, 4105, 4106, 4107, 4108, 4109, 4110, + 4111, 4112, 4113, 4114, 4115, 4116, 4117, 2985, + 148, 148, 148, 148, 4086, 2985, 148, 4086, + 2985, 4086, 2985, 148, 148, 4086, 148, 2985, + 148, 2985, 421, 148, 4086, 2985, 2985, 2985, + 4086, 4086, 2985, 2985, 2985, 2985, 4086, 148, + 148, 2985, 2985, 2985, 2985, 2985, 148, 2985, + 421, 148, 2985, 4086, 4086, 4086, 148, 148, + 148, 148, 148, 148, 148, 4086, 2985, 2985, + 2985, 2985, 2985, 2985, 148, 2985, 421, 148, + 2985, 4086, 148, 4086, 148, 148, 148, 148, + 148, 148, 4086, 2985, 2985, 2985, 148, 148, + 2985, 148, 2985, 421, 2985, 4086, 148, 4086, + 148, 148, 148, 148, 148, 148, 4086, 2985, + 148, 4086, 4086, 4086, 4086, 148, 148, 4086, + 421, 2985, 4086, 148, 148, 148, 148, 148, + 148, 148, 148, 148, 148, 4086, 2985, 148, + 4086, 4086, 4086, 4086, 421, 2985, 148, 4086, + 148, 148, 148, 148, 4086, 2985, 2985, 2985, + 2985, 2985, 148, 2985, 148, 2985, 421, 2985, + 4086, 148, 4086, 148, 148, 148, 148, 148, + 4086, 2985, 148, 4086, 4086, 4086, 4086, 148, + 4086, 421, 148, 2985, 148, 4086, 148, 148, + 148, 4086, 2985, 2985, 2985, 148, 2985, 2985, + 148, 2985, 421, 2985, 148, 2985, 4086, 148, + 4086, 148, 148, 148, 2985, 4086, 4086, 148, + 4086, 4086, 421, 4086, 2985, 4086, 4086, 2985, + 4086, 421, 2985, 4086, 4086, 4086, 2985, 4086, + 421, 2985, 148, 4086, 4086, 4086, 4086, 421, + 4086, 2985, 148, 148, 4086, 2985, 4086, 4086, + 148, 4086, 4086, 2985, 4086, 2985, 4118, 4119, + 4120, 217, 3020, 3022, 218, 219, 220, 221, + 4121, 223, 224, 225, 226, 227, 228, 4122, + 4123, 4124, 4125, 4126, 234, 4127, 236, 4128, + 483, 484, 4129, 4130, 4131, 4132, 4133, 4134, + 4135, 4136, 646, 4137, 151, 152, 4138, 249, + 250, 251, 252, 3021, 151, 153, 151, 2985, + 4086, 2985, 421, 4086, 4086, 4086, 4086, 4086, + 2985, 4086, 4086, 421, 4086, 148, 2985, 2985, + 4086, 2985, 148, 148, 148, 4086, 148, 4086, + 2985, 148, 4086, 148, 148, 4086, 2985, 4086, + 2985, 2985, 2985, 421, 2985, 4086, 4086, 421, + 148, 2985, 4086, 148, 148, 2985, 148, 4086, + 4086, 2985, 148, 4086, 2985, 4086, 4086, 4086, + 2985, 421, 421, 4086, 2985, 4086, 148, 4086, + 2985, 148, 2985, 421, 2985, 2985, 4086, 4086, + 148, 4086, 148, 421, 148, 2985, 4086, 2985, + 148, 148, 4086, 2985, 4086, 4086, 4086, 148, + 4086, 148, 4086, 2985, 4086, 4086, 2985, 4139, + 4140, 255, 4141, 257, 258, 259, 260, 261, + 262, 263, 151, 4142, 265, 4143, 267, 4144, + 269, 2985, 181, 181, 4086, 181, 4086, 571, + 2985, 2985, 148, 148, 2985, 2985, 4086, 2985, + 571, 4086, 2985, 2985, 4086, 2985, 148, 4086, + 2985, 2985, 148, 148, 148, 148, 148, 4086, + 2985, 4145, 4146, 3049, 3021, 3050, 3051, 3052, + 2985, 148, 4086, 148, 2985, 4086, 2985, 153, + 274, 275, 153, 657, 4147, 4148, 4149, 280, + 281, 282, 4150, 284, 4151, 4152, 4153, 4154, + 4155, 4156, 4157, 4158, 4159, 4160, 295, 296, + 152, 4161, 3196, 151, 3021, 2985, 148, 148, + 4086, 4086, 2985, 148, 4086, 148, 2985, 4086, + 2985, 148, 4086, 4086, 4086, 4086, 2985, 148, + 4086, 148, 4086, 2985, 148, 148, 2985, 421, + 2985, 148, 2985, 4086, 421, 148, 4086, 148, + 2985, 4086, 2985, 2984, 2985, 148, 4086, 148, + 4086, 2985, 148, 2985, 421, 2985, 2985, 421, + 2985, 4086, 148, 4086, 2985, 4086, 148, 4086, + 421, 4086, 2985, 4086, 4086, 4086, 4086, 2985, + 4086, 148, 4086, 148, 4086, 2985, 148, 4086, + 4086, 421, 2985, 4162, 674, 302, 303, 304, + 305, 306, 307, 4163, 932, 4164, 934, 312, + 4165, 4166, 151, 2985, 4086, 572, 148, 148, + 572, 572, 572, 2985, 181, 4086, 4086, 571, + 2985, 4086, 2985, 148, 4086, 2984, 2985, 2984, + 2984, 2984, 2984, 4086, 2985, 4167, 4168, 317, + 318, 319, 4169, 4170, 4171, 4172, 324, 2985, + 325, 326, 153, 327, 328, 4173, 330, 4174, + 332, 4175, 334, 335, 153, 151, 690, 337, + 338, 153, 339, 340, 341, 342, 343, 344, + 345, 346, 4176, 348, 349, 4177, 351, 352, + 353, 153, 259, 151, 354, 2985, 4086, 2985, + 4086, 2985, 148, 2985, 4086, 2985, 148, 148, + 4086, 4086, 4086, 4086, 148, 148, 148, 4086, + 2985, 148, 148, 4086, 2985, 4178, 4179, 4180, + 696, 4181, 4182, 4183, 4184, 4185, 364, 4186, + 4187, 4188, 4189, 4190, 4191, 4192, 4189, 4193, + 4194, 552, 4195, 375, 712, 377, 2985, 4086, + 148, 4086, 2985, 2985, 421, 2985, 4086, 148, + 2985, 2985, 4086, 4086, 148, 4086, 421, 2985, + 4086, 148, 148, 2985, 4086, 148, 4086, 2985, + 2985, 148, 2985, 2985, 421, 148, 2985, 4086, + 148, 148, 4086, 2985, 4086, 2985, 421, 2985, + 148, 148, 4086, 148, 148, 148, 148, 148, + 148, 4086, 2985, 148, 4086, 4086, 4086, 4086, + 148, 4086, 4086, 4086, 2985, 148, 4086, 2985, + 2985, 148, 2985, 421, 2985, 4086, 148, 4086, + 4086, 2985, 2985, 148, 2985, 4086, 148, 2985, + 421, 2985, 4086, 148, 4086, 2985, 4086, 421, + 2985, 153, 382, 713, 4196, 4197, 716, 386, + 153, 4198, 4199, 151, 2985, 148, 4086, 2985, + 148, 4086, 2985, 2985, 4086, 2985, 148, 4086, + 148, 2985, 153, 389, 4200, 2985, 148, 148, + 4086, 4086, 2985, 4201, 4202, 4203, 153, 394, + 395, 396, 397, 398, 399, 400, 401, 402, + 403, 723, 4204, 4205, 4206, 151, 2985, 4086, + 4086, 2985, 2985, 2985, 2985, 4086, 4086, 2985, + 4086, 4086, 2985, 2985, 2985, 4086, 4086, 4086, + 4086, 2985, 153, 4207, 409, 410, 411, 151, + 2985, 2985, 4086, 2985, 148, 4208, 2985, 4209, + 4210, 4211, 4213, 4212, 2985, 4086, 4086, 2985, + 2985, 4086, 4086, 2985, 4086, 4086, 2984, 2984, + 4086, 4086, 2984, 4086, 4086, 2984, 4086, 4086, + 4086, 2984, 4086, 4086, 4086, 2984, 4086, 4086, + 2984, 4086, 4086, 4086, 4086, 2984, 4086, 4086, + 4086, 2984, 2984, 4086, 4086, 2984, 4086, 2984, + 4214, 4215, 4216, 4217, 4218, 4220, 4221, 4222, + 4224, 4225, 4226, 4227, 4228, 4229, 4230, 4231, + 4232, 4233, 4234, 4235, 4236, 4237, 4238, 4239, + 4240, 4241, 4219, 4223, 2984, 4242, 4243, 4244, + 4245, 4246, 4247, 4248, 4249, 4250, 4251, 4252, + 4253, 4254, 4255, 4256, 4257, 4258, 4259, 4260, + 4261, 4262, 2984, 4263, 4264, 4265, 4266, 4267, + 4268, 2984, 4269, 4270, 2984, 4271, 4272, 4273, + 4274, 4275, 4276, 4277, 4278, 4279, 4280, 4281, + 4282, 4283, 4284, 4285, 2984, 4286, 4287, 4267, + 4272, 4288, 2984, 4289, 4290, 4291, 4292, 4293, + 4294, 2984, 4295, 2984, 0, 3250, 0, 3250, + 2395, 572, 148, 148, 572, 572, 572, 0, + 2395, 2395, 2136, 0, 2395, 0, 148, 148, + 3376, 0, 2395, 2984, 0, 3376, 2984, 2984, + 2984, 2984, 2395, 0, 325, 326, 153, 327, + 328, 2477, 330, 4296, 332, 4297, 334, 335, + 153, 151, 1581, 337, 338, 153, 339, 340, + 341, 342, 343, 344, 345, 346, 4298, 348, + 349, 4299, 351, 352, 353, 153, 259, 151, + 354, 0, 2395, 0, 148, 0, 2395, 0, + 148, 148, 2395, 2395, 2395, 2395, 148, 148, + 148, 2395, 0, 148, 148, 2395, 0, 4300, + 4301, 4302, 1587, 4303, 4304, 4305, 4306, 4307, + 364, 4308, 4309, 4310, 4311, 4312, 4313, 4314, + 4311, 4315, 4316, 1602, 4317, 375, 1604, 377, + 0, 2395, 148, 2395, 0, 0, 126, 0, + 2395, 148, 0, 0, 2395, 2395, 148, 2395, + 126, 0, 2395, 148, 148, 0, 2395, 148, + 2395, 0, 0, 148, 0, 0, 126, 148, + 0, 2395, 148, 148, 2395, 0, 2395, 0, + 126, 0, 148, 148, 2395, 148, 148, 148, + 148, 148, 148, 2395, 0, 148, 2395, 2395, + 2395, 2395, 148, 2395, 2395, 2395, 0, 148, + 2395, 0, 0, 148, 0, 126, 0, 2395, + 148, 2395, 2395, 0, 0, 148, 0, 2395, + 148, 0, 126, 0, 2395, 148, 2395, 0, + 2395, 126, 0, 153, 382, 1605, 4318, 4319, + 1608, 386, 153, 4320, 4321, 151, 0, 148, + 2395, 0, 148, 2395, 0, 0, 2395, 0, + 148, 2395, 148, 0, 4322, 153, 389, 4323, + 0, 3376, 3757, 0, 148, 148, 2395, 2395, + 0, 2505, 2506, 2507, 153, 394, 395, 396, + 397, 398, 399, 400, 401, 402, 403, 1615, + 2508, 2509, 2510, 151, 0, 153, 4324, 409, + 410, 411, 151, 0, 0, 2395, 0, 148, + 412, 413, 414, 4325, 4326, 0, 4327, 2, + 4327, 4328, 4328, 4327, 4328, 4327, 4327, 4328, + 4327, 4327, 4328, 4327, 4327, 4327, 4328, 4327, + 4327, 4327, 4328, 4327, 4327, 4328, 4327, 4327, + 4327, 4327, 4328, 4327, 4327, 4327, 4328, 4328, + 4327, 4327, 4328, 4327, 4328, 4329, 4330, 4331, + 4332, 4333, 4335, 4336, 4337, 4339, 4340, 4341, + 4342, 4343, 4344, 4345, 4346, 4347, 4348, 4349, + 4350, 4351, 4352, 4353, 4354, 4355, 4356, 4334, + 4338, 4328, 4327, 4327, 4327, 4327, 4328, 4327, + 4328, 4327, 4328, 4328, 4328, 4327, 4328, 4328, + 4328, 4327, 4327, 4327, 4327, 4328, 4328, 4328, + 4328, 4328, 4328, 4327, 4328, 4328, 4328, 4328, + 4328, 4328, 4327, 4328, 4328, 4328, 4328, 4327, + 4327, 4327, 4327, 4328, 4327, 4327, 4327, 4327, + 4327, 4328, 4327, 4327, 4328, 4327, 4327, 4327, + 4327, 4328, 4327, 4327, 4328, 4328, 4328, 4328, + 4328, 4328, 4327, 4327, 4327, 4327, 4327, 4327, + 4328, 4327, 4327, 4328, 4328, 4328, 4328, 4328, + 4328, 4327, 4327, 4328, 4327, 4327, 4327, 4327, + 4327, 4328, 4327, 4327, 4328, 4327, 4328, 4327, + 4327, 4327, 4328, 4327, 4328, 4327, 4327, 4327, + 4327, 4327, 4328, 4327, 4328, 4327, 4327, 4327, + 4327, 4328, 4327, 4328, 4357, 4358, 4359, 4360, + 4361, 4362, 4363, 4364, 4365, 4366, 4367, 4368, + 4369, 4370, 4371, 4372, 4373, 4374, 4375, 4376, + 4377, 4328, 4327, 4328, 4327, 4327, 4327, 4327, + 4327, 4328, 4327, 4327, 4327, 4328, 4327, 4328, + 4327, 4327, 4328, 4327, 4327, 4328, 4327, 4328, + 4328, 4328, 4327, 4327, 4328, 4327, 4328, 4327, + 4327, 4328, 4327, 4328, 4327, 4327, 4327, 4328, + 4327, 4328, 4327, 4327, 4328, 4328, 4328, 4327, + 4327, 4327, 4328, 4327, 4328, 4327, 4328, 4327, + 4327, 4327, 4327, 4327, 4328, 4327, 4327, 4328, + 4378, 4379, 4380, 4381, 4382, 4383, 4328, 4327, + 4327, 4328, 4327, 4327, 4328, 4327, 4328, 4327, + 4328, 4327, 4328, 4327, 4328, 4384, 4385, 4328, + 4327, 4328, 4327, 4328, 4386, 4387, 4388, 4389, + 4390, 4391, 4392, 4393, 4394, 4395, 4396, 4397, + 4398, 4399, 4400, 4328, 4327, 4327, 4328, 4327, + 4328, 4327, 4328, 4327, 4327, 4327, 4327, 4328, + 4327, 4327, 4328, 4328, 4328, 4327, 4327, 4328, + 4327, 4328, 4327, 4327, 4328, 4328, 4328, 4327, + 4327, 4328, 4327, 4327, 4327, 4328, 4327, 4327, + 4327, 4327, 4328, 4327, 4327, 4327, 4328, 4327, + 4327, 4328, 4401, 4402, 4382, 4387, 4403, 4328, + 4327, 4328, 4327, 4327, 4328, 4327, 4328, 4404, + 4405, 4406, 4407, 4408, 4409, 4410, 4328, 4411, + 4412, 4413, 4414, 4415, 4328, 4327, 4328, 4327, + 4328, 4327, 4328, 4327, 4327, 4327, 4327, 4327, + 4328, 4327, 4328, 4416, 4417, 4418, 4419, 4420, + 4421, 4422, 4423, 4424, 4425, 4426, 4427, 4428, + 4429, 4430, 4427, 4431, 4432, 4433, 4328, 4327, + 4327, 4328, 4328, 4327, 4328, 4328, 4328, 4327, + 4327, 4327, 4328, 4327, 4328, 4327, 4327, 4328, + 4328, 4328, 4327, 4327, 4328, 4327, 4328, 4327, + 4327, 4327, 4328, 4327, 4327, 4327, 4327, 4327, + 4327, 4327, 4328, 4327, 4328, 4328, 4327, 4327, + 4327, 4328, 4328, 4328, 4327, 4328, 4327, 4327, + 4328, 4327, 4328, 4434, 4435, 4436, 4437, 4328, + 4327, 4328, 4327, 4328, 4327, 4328, 4327, 4328, + 4438, 4328, 4327, 4327, 4328, 4439, 4440, 4441, + 4442, 4443, 4444, 4328, 4327, 4327, 4328, 4328, + 4328, 4328, 4327, 4327, 4328, 4327, 4327, 4328, + 4328, 4328, 4327, 4327, 4327, 4327, 4328, 4445, + 4328, 4327, 4328, 4325, 4328, 4446, 4328, 4447, + 4448, 4449, 4451, 4450, 4328, 4327, 4327, 4328, + 4328, 4327, 4327, 3757, 0, 3371, 3372, 0, + 3372, 4452, 3371, 3372, 0, 3250, 4453, 3371, + 4454, 3372, 0, 3250, 0, 3250, 4455, 0, + 3372, 0, 3250, 3371, 4456, 3372, 0, 0, + 3250, 4458, 4460, 4463, 4464, 4465, 4467, 4468, + 4469, 4470, 4471, 4472, 4473, 4474, 4475, 4476, + 4477, 4478, 4479, 4480, 4481, 4482, 4483, 4484, + 4485, 4486, 4487, 4488, 4489, 4490, 4492, 4493, + 4495, 4496, 4497, 4498, 4459, 4461, 4462, 4462, + 4466, 4491, 4494, 4457, 4500, 122, 124, 4501, + 4502, 4503, 4504, 4505, 4506, 4507, 4508, 4509, + 4510, 4511, 4512, 4513, 4514, 4515, 4516, 4517, + 4518, 4499, 4520, 4519, 1626, 1626, 1626, 1626, + 1485, 4522, 150, 152, 153, 154, 2005, 4523, + 157, 158, 4524, 160, 161, 4525, 4526, 4527, + 4528, 4529, 4530, 4531, 4532, 4533, 4534, 4535, + 4536, 4537, 4538, 177, 4539, 4540, 4541, 126, + 148, 148, 151, 176, 4521, 1626, 1626, 1626, + 1626, 1485, 4542, 150, 152, 153, 154, 1753, + 4543, 157, 158, 4544, 160, 161, 4545, 4546, + 4547, 4548, 4549, 4550, 4551, 4552, 4553, 4554, + 4555, 4556, 4557, 4558, 177, 4559, 4560, 4561, + 126, 148, 148, 151, 176, 4521, 181, 181, + 181, 571, 4563, 150, 152, 153, 4564, 153, + 4565, 4566, 158, 4567, 160, 161, 4568, 4569, + 4570, 4571, 4572, 4573, 4574, 302, 4575, 4576, + 4577, 4578, 4579, 4580, 177, 4581, 4582, 4583, + 421, 148, 148, 151, 176, 4562, 443, 443, + 443, 443, 571, 4584, 150, 152, 153, 154, + 1370, 4585, 157, 158, 4586, 160, 161, 4587, + 4588, 4589, 4590, 4591, 4592, 4593, 4594, 4595, + 4596, 4597, 4598, 4599, 4600, 177, 4601, 4602, + 4603, 421, 148, 148, 151, 176, 4562, 571, + 4604, 150, 152, 153, 154, 1241, 4605, 157, + 158, 4606, 160, 161, 4607, 4608, 4609, 4610, + 4611, 4612, 4613, 4614, 4615, 4616, 4617, 4618, + 4619, 4620, 177, 4621, 4622, 4623, 421, 148, + 148, 151, 176, 4562, 594, 595, 181, 181, + 571, 4624, 150, 152, 153, 4564, 981, 4625, + 4566, 158, 4626, 160, 161, 4627, 4628, 4629, + 4630, 4631, 4632, 4633, 4634, 4635, 4636, 4637, + 4638, 4639, 4640, 177, 4641, 4642, 4643, 421, + 148, 148, 151, 176, 4562, 571, 4644, 150, + 152, 153, 154, 731, 4645, 157, 158, 4646, + 160, 161, 4647, 4648, 4649, 4650, 4651, 4652, + 4653, 4654, 4655, 4656, 4657, 4658, 4659, 4660, + 177, 4661, 4662, 4663, 421, 148, 148, 151, + 176, 4562, 571, 4664, 1047, 1175, 4665, 4666, + 4667, 4668, 4669, 4670, 4671, 4672, 4673, 4674, + 4675, 4676, 4677, 4678, 4679, 4680, 4681, 4682, + 4562, 1485, 4683, 150, 152, 153, 154, 1623, + 4684, 157, 158, 4685, 160, 161, 4686, 4687, + 4688, 4689, 4690, 4691, 4692, 4693, 4694, 4695, + 4696, 4697, 4698, 4699, 177, 4700, 4701, 4702, + 126, 148, 148, 151, 176, 4521, 181, 181, + 181, 571, 4703, 150, 152, 153, 4564, 2133, + 4704, 4566, 158, 4705, 160, 161, 4706, 4707, + 4708, 4709, 4710, 4711, 4712, 4713, 4714, 4715, + 4716, 4717, 4718, 4719, 177, 4720, 4721, 4722, + 421, 148, 148, 151, 176, 4562, 2136, 4723, + 150, 152, 153, 154, 2391, 4724, 157, 158, + 4725, 160, 161, 4726, 4727, 4728, 4729, 4730, + 4731, 4732, 4733, 4734, 4735, 4736, 4737, 4738, + 4739, 177, 4740, 4741, 4742, 126, 148, 148, + 151, 176, 4562, 2136, 4743, 150, 152, 153, + 154, 2263, 4744, 157, 158, 4745, 160, 161, + 4746, 4747, 4748, 4749, 4750, 4751, 4752, 4753, + 4754, 4755, 4756, 4757, 4758, 4759, 177, 4760, + 4761, 4762, 126, 148, 148, 151, 176, 4562, + 3249, 148, 2395, 148, 148, 4500, 122, 124, + 4501, 4502, 4503, 4504, 4505, 4506, 4507, 4508, + 4509, 4510, 4511, 4512, 4513, 4514, 4515, 4516, + 4517, 4518, 4499, 4764, 2515, 2517, 4765, 4766, + 4767, 4768, 4769, 4770, 4771, 4772, 4773, 4774, + 4775, 4776, 4777, 4778, 4779, 4780, 4781, 4782, + 4763, 4783, 4784, 4785, 4786, 4787, 4788, 4789, + 4790, 4791, 4792, 4793, 4794, 4795, 4796, 4797, + 4798, 4799, 4800, 4801, 4802, 4803, 148, 148, + 148, 4499, 181, 181, 181, 571, 1, 4563, + 150, 152, 153, 4564, 153, 4565, 4566, 158, + 4567, 160, 161, 4568, 4569, 4570, 4571, 4572, + 4573, 4574, 302, 4575, 4576, 4577, 4578, 4579, + 4580, 177, 4581, 4582, 4583, 421, 148, 148, + 151, 176, 4562, 181, 181, 181, 571, 4804, + 4805, 4807, 4808, 4809, 4808, 4810, 4811, 4812, + 4813, 4814, 4815, 4816, 4817, 4818, 4819, 4820, + 4821, 4822, 4823, 4824, 4825, 4826, 4827, 4828, + 4829, 4831, 4832, 4833, 4834, 4562, 421, 4562, + 148, 4562, 148, 4562, 4806, 4830, 1, 148, + 148, 148, 181, 148, 4500, 122, 124, 4501, + 4502, 4503, 4504, 4505, 4506, 4507, 4508, 4509, + 4510, 4511, 4512, 4513, 4514, 4515, 4516, 4517, + 4518, 4499, 4783, 4784, 4785, 4786, 4787, 4788, + 4789, 4790, 4791, 4792, 4793, 4794, 4795, 4796, + 4797, 4798, 4799, 4800, 4801, 4802, 4803, 148, + 2518, 2518, 2518, 2774, 4563, 150, 152, 153, + 4564, 153, 4565, 4566, 158, 4567, 160, 161, + 4568, 4569, 4570, 4571, 4572, 4573, 4574, 302, + 4575, 4576, 4577, 4578, 4579, 4580, 177, 4581, + 4582, 4583, 2646, 2008, 2008, 4562, 151, 4562, + 176, 4562, 1, 4835, 150, 152, 153, 154, + 2643, 4836, 157, 158, 4837, 160, 161, 4838, + 4839, 4840, 4841, 4842, 4843, 4844, 4845, 4846, + 4847, 4848, 4849, 4850, 4851, 177, 4852, 4853, + 4854, 148, 148, 151, 176, 4499, 443, 443, + 443, 443, 571, 4855, 150, 152, 153, 154, + 2771, 4856, 157, 158, 4857, 160, 161, 4858, + 4859, 4860, 4861, 4862, 4863, 4864, 4865, 4866, + 4867, 4868, 4869, 4870, 4871, 177, 4872, 4873, + 4874, 421, 148, 148, 151, 176, 4562, 571, + 4875, 150, 152, 153, 154, 2899, 4876, 157, + 158, 4877, 160, 161, 4878, 4879, 4880, 4881, + 4882, 4883, 4884, 4885, 4886, 4887, 4888, 4889, + 4890, 4891, 177, 4892, 4893, 4894, 421, 148, + 148, 151, 176, 4562, 181, 181, 181, 571, + 4563, 150, 152, 153, 4564, 153, 4565, 4566, + 158, 4567, 160, 161, 4568, 4569, 4570, 4571, + 4572, 4573, 4574, 302, 4575, 4576, 4577, 4578, + 4579, 4580, 177, 4581, 4582, 4583, 421, 148, + 148, 1, 151, 176, 4562, 181, 181, 181, + 571, 1, 4563, 150, 152, 153, 4564, 153, + 4565, 4566, 158, 4567, 160, 161, 4568, 4569, + 4570, 4571, 4572, 4573, 4574, 302, 4575, 4576, + 4577, 4578, 4579, 4580, 177, 4581, 4582, 4583, + 421, 148, 148, 1, 151, 176, 4562, 181, + 181, 181, 571, 1, 4563, 150, 152, 153, + 4564, 153, 4565, 4566, 158, 4567, 160, 161, + 4568, 4569, 4570, 4571, 4572, 4573, 4574, 302, + 4575, 4576, 4577, 4578, 4579, 4580, 177, 4581, + 4582, 4583, 421, 148, 148, 1, 1, 151, + 176, 4562, 181, 181, 181, 571, 1, 4563, + 150, 152, 153, 4564, 153, 4565, 4566, 158, + 4567, 160, 161, 4568, 4569, 4570, 4571, 4572, + 4573, 4574, 302, 4575, 4576, 4577, 4578, 4579, + 4580, 177, 4581, 4582, 4583, 421, 148, 148, + 1, 1, 151, 176, 4562, 181, 181, 181, + 571, 1, 4563, 150, 152, 153, 4564, 153, + 4565, 4566, 158, 4567, 160, 161, 4568, 4569, + 4570, 4571, 4572, 4573, 4574, 302, 4575, 4576, + 4577, 4578, 4579, 4580, 177, 4581, 4582, 4583, + 421, 148, 148, 1, 151, 176, 4562, 181, + 181, 181, 571, 4563, 150, 152, 153, 4564, + 153, 4565, 4566, 158, 4567, 160, 161, 4568, + 4569, 4570, 4571, 4572, 4573, 4574, 302, 4575, + 4576, 4577, 4578, 4579, 4580, 177, 4581, 4582, + 4583, 421, 148, 148, 1, 1, 1, 1, + 151, 176, 4562, 181, 181, 181, 571, 1, + 1, 4804, 4805, 4807, 4808, 4809, 4808, 4810, + 4811, 4812, 4813, 4814, 4815, 4816, 4817, 4818, + 4819, 4820, 4821, 4822, 4823, 4824, 4825, 4826, + 4827, 4828, 4829, 4831, 4832, 4833, 4834, 421, + 148, 148, 1, 4806, 1, 4830, 1, 4562, + 4500, 122, 124, 4501, 4502, 4503, 4504, 4505, + 4506, 4507, 4508, 4509, 4510, 4511, 4512, 4513, + 4514, 4515, 4516, 4517, 4518, 4499, 148, 4783, + 4784, 4785, 4786, 4787, 4788, 4789, 4790, 4791, + 4792, 4793, 4794, 4795, 4796, 4797, 4798, 4799, + 4800, 4801, 4802, 4803, 148, 4499, 2518, 2518, + 2518, 2774, 4563, 150, 152, 153, 4564, 153, + 4565, 4566, 158, 4567, 160, 161, 4568, 4569, + 4570, 4571, 4572, 4573, 4574, 302, 4575, 4576, + 4577, 4578, 4579, 4580, 177, 4581, 4582, 4583, + 1, 2646, 1, 2008, 1, 2008, 1, 151, + 176, 4562, 181, 181, 181, 571, 4563, 150, + 152, 153, 4564, 153, 4565, 4566, 158, 4567, + 160, 161, 4568, 4569, 4570, 4571, 4572, 4573, + 4574, 302, 4575, 4576, 4577, 4578, 4579, 4580, + 177, 4581, 4582, 4583, 421, 148, 148, 1, + 151, 176, 4562, 181, 181, 181, 571, 4563, + 150, 152, 153, 4564, 153, 4565, 4566, 158, + 4567, 160, 161, 4568, 4569, 4570, 4571, 4572, + 4573, 4574, 302, 4575, 4576, 4577, 4578, 4579, + 4580, 177, 4581, 4582, 4583, 421, 148, 148, + 1, 151, 176, 4562, 181, 181, 181, 571, + 3, 4, 5, 6, 7, 9, 10, 11, + 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, + 29, 30, 4563, 150, 152, 153, 4564, 153, + 4565, 4566, 158, 4567, 160, 161, 4568, 4569, + 4570, 4571, 4572, 4573, 4574, 302, 4575, 4576, + 4577, 4578, 4579, 4580, 177, 4581, 4582, 4583, + 421, 148, 148, 8, 12, 151, 176, 4562, + 181, 181, 181, 571, 31, 32, 33, 34, + 35, 36, 37, 38, 39, 40, 41, 42, + 43, 44, 45, 46, 47, 48, 49, 50, + 51, 4563, 150, 152, 153, 4564, 153, 4565, + 4566, 158, 4567, 160, 161, 4568, 4569, 4570, + 4571, 4572, 4573, 4574, 302, 4575, 4576, 4577, + 4578, 4579, 4580, 177, 4581, 4582, 4583, 421, + 148, 148, 151, 176, 4562, 181, 181, 181, + 571, 52, 53, 54, 55, 56, 57, 4563, + 150, 152, 153, 4564, 153, 4565, 4566, 158, + 4567, 160, 161, 4568, 4569, 4570, 4571, 4572, + 4573, 4574, 302, 4575, 4576, 4577, 4578, 4579, + 4580, 177, 4581, 4582, 4583, 421, 148, 148, + 151, 176, 4562, 181, 181, 181, 571, 58, + 59, 4563, 150, 152, 153, 4564, 153, 4565, + 4566, 158, 4567, 160, 161, 4568, 4569, 4570, + 4571, 4572, 4573, 4574, 302, 4575, 4576, 4577, + 4578, 4579, 4580, 177, 4581, 4582, 4583, 421, + 148, 148, 151, 176, 4562, 181, 181, 181, + 571, 60, 61, 62, 63, 64, 65, 66, + 67, 68, 69, 70, 71, 72, 73, 74, + 4563, 150, 152, 153, 4564, 153, 4565, 4566, + 158, 4567, 160, 161, 4568, 4569, 4570, 4571, + 4572, 4573, 4574, 302, 4575, 4576, 4577, 4578, + 4579, 4580, 177, 4581, 4582, 4583, 421, 148, + 148, 151, 176, 4562, 181, 181, 181, 571, + 75, 76, 56, 61, 77, 4563, 150, 152, + 153, 4564, 153, 4565, 4566, 158, 4567, 160, + 161, 4568, 4569, 4570, 4571, 4572, 4573, 4574, + 302, 4575, 4576, 4577, 4578, 4579, 4580, 177, + 4581, 4582, 4583, 421, 148, 148, 151, 176, + 4562, 181, 181, 181, 571, 78, 79, 80, + 81, 82, 83, 4563, 150, 152, 153, 4564, + 153, 4565, 4566, 158, 4567, 160, 161, 4568, + 4569, 4570, 4571, 4572, 4573, 4574, 302, 4575, + 4576, 4577, 4578, 4579, 4580, 177, 4581, 4582, + 4583, 421, 148, 148, 151, 176, 4562, 181, + 181, 181, 571, 119, 4563, 150, 152, 153, + 4564, 153, 4565, 4566, 158, 4567, 160, 161, + 4568, 4569, 4570, 4571, 4572, 4573, 4574, 302, + 4575, 4576, 4577, 4578, 4579, 4580, 177, 4581, + 4582, 4583, 421, 148, 148, 151, 176, 4562, + 181, 4499, 4500, 122, 124, 4501, 4502, 4503, + 4504, 4505, 4506, 4507, 4508, 4509, 4510, 4511, + 4512, 4513, 4514, 4515, 4516, 4517, 4518, 4499, + 4499, 4499, 4499, 148, 4499, 4499, 4500, 122, + 124, 4501, 4502, 4503, 4504, 4505, 4506, 4507, + 4508, 4509, 4510, 4511, 4512, 4513, 4514, 4515, + 4516, 4517, 4518, 4499, 4499, 148, 181, 148, + 4783, 4784, 4785, 4786, 4787, 4788, 4789, 4790, + 4791, 4792, 4793, 4794, 4795, 4796, 4797, 4798, + 4799, 4800, 4801, 4802, 4803, 148, 148, 148, + 4499, 4499, 4783, 4784, 4785, 4786, 4787, 4788, + 4789, 4790, 4791, 4792, 4793, 4794, 4795, 4796, + 4797, 4798, 4799, 4800, 4801, 4802, 4803, 148, + 4499, 4783, 4784, 4785, 4786, 4787, 4788, 4789, + 4790, 4791, 4792, 4793, 4794, 4795, 4796, 4797, + 4798, 4799, 4800, 4801, 4802, 4803, 148, 4499, + 4783, 4784, 4785, 4786, 4787, 4788, 4789, 4790, + 4791, 4792, 4793, 4794, 4795, 4796, 4797, 4798, + 4799, 4800, 4801, 4802, 4803, 148, 4783, 4784, + 4785, 4786, 4787, 4788, 4789, 4790, 4791, 4792, + 4793, 4794, 4795, 4796, 4797, 4798, 4799, 4800, + 4801, 4802, 4803, 4499, 4499, 148, 4499, 4500, + 122, 124, 4501, 4502, 4503, 4504, 4505, 4506, + 4507, 4508, 4509, 4510, 4511, 4512, 4513, 4514, + 4515, 4516, 4517, 4518, 4499, 4499, 148, 148, + 148, 181, 4500, 122, 124, 4501, 4502, 4503, + 4504, 4505, 4506, 4507, 4508, 4509, 4510, 4511, + 4512, 4513, 4514, 4515, 4516, 4517, 4518, 148, + 148, 572, 572, 4499, 148, 4500, 122, 124, + 4501, 4502, 4503, 4504, 4505, 4506, 4507, 4508, + 4509, 4510, 4511, 4512, 4513, 4514, 4515, 4516, + 4517, 4518, 148, 148, 148, 4499, 421, 4783, + 4784, 4785, 4786, 4787, 4788, 4789, 4790, 4791, + 4792, 4793, 4794, 4795, 4796, 4797, 4798, 4799, + 4800, 4801, 4802, 4803, 148, 421, 148, 4499, + 4499, 4499, 4499, 4500, 122, 124, 4501, 4502, + 4503, 4504, 4505, 4506, 4507, 4508, 4509, 4510, + 4511, 4512, 4513, 4514, 4515, 4516, 4517, 4518, + 421, 4499, 4499, 148, 4783, 4784, 4785, 4786, + 4787, 4788, 4789, 4790, 4791, 4792, 4793, 4794, + 4795, 4796, 4797, 4798, 4799, 4800, 4801, 4802, + 4803, 148, 4499, 4783, 4784, 4785, 4786, 4787, + 4788, 4789, 4790, 4791, 4792, 4793, 4794, 4795, + 4796, 4797, 4798, 4799, 4800, 4801, 4802, 4803, + 4499, 148, 4500, 122, 124, 4501, 4502, 4503, + 4504, 4505, 4506, 4507, 4508, 4509, 4510, 4511, + 4512, 4513, 4514, 4515, 4516, 4517, 4518, 4499, + 148, 148, 4500, 122, 124, 4501, 4502, 4503, + 4504, 4505, 4506, 4507, 4508, 4509, 4510, 4511, + 4512, 4513, 4514, 4515, 4516, 4517, 4518, 421, + 148, 4499, 1373, 1374, 184, 1375, 151, 1376, + 1377, 1378, 1379, 1380, 1381, 1382, 1383, 1384, + 1385, 1386, 1387, 1388, 1389, 1390, 1391, 1392, + 1393, 1394, 1395, 1396, 1397, 1398, 1399, 1400, + 1401, 1402, 4500, 122, 124, 4501, 4502, 4503, + 4504, 4505, 4506, 4507, 4508, 4509, 4510, 4511, + 4512, 4513, 4514, 4515, 4516, 4517, 4518, 4499, + 1403, 1404, 1405, 217, 218, 219, 220, 221, + 1406, 223, 224, 225, 226, 227, 228, 1407, + 1408, 1409, 1410, 1411, 234, 1412, 236, 1413, + 483, 484, 1374, 1414, 1415, 153, 1416, 1417, + 1418, 1419, 646, 1420, 151, 152, 1421, 249, + 250, 251, 252, 4500, 122, 124, 4501, 4502, + 4503, 4504, 4505, 4506, 4507, 4508, 4509, 4510, + 4511, 4512, 4513, 4514, 4515, 4516, 4517, 4518, + 151, 153, 151, 4499, 1422, 1423, 255, 1424, + 257, 258, 259, 260, 261, 262, 263, 151, + 1425, 265, 1426, 267, 1427, 269, 4500, 122, + 124, 4501, 4502, 4503, 4504, 4505, 4506, 4507, + 4508, 4509, 4510, 4511, 4512, 4513, 4514, 4515, + 4516, 4517, 4518, 4499, 1428, 1429, 272, 151, + 273, 4500, 122, 124, 4501, 4502, 4503, 4504, + 4505, 4506, 4507, 4508, 4509, 4510, 4511, 4512, + 4513, 4514, 4515, 4516, 4517, 4518, 4499, 153, + 274, 275, 153, 657, 1430, 153, 302, 280, + 281, 282, 1431, 284, 153, 1432, 1433, 1434, + 153, 1435, 1436, 1437, 1438, 1439, 295, 296, + 152, 1440, 153, 4895, 4896, 4897, 4898, 4899, + 4900, 4901, 4902, 4903, 4904, 4905, 4906, 4907, + 4908, 4909, 4910, 4911, 4912, 4913, 4914, 4915, + 151, 4499, 181, 181, 181, 571, 4563, 150, + 152, 153, 4564, 153, 4565, 4566, 158, 4567, + 160, 161, 4568, 4569, 4570, 4571, 4572, 4573, + 4574, 302, 4575, 4576, 4577, 4578, 4579, 4580, + 177, 4581, 4582, 4583, 421, 148, 148, 1, + 1, 1, 1, 151, 176, 4562, 181, 181, + 181, 571, 4563, 150, 152, 153, 4564, 153, + 4565, 4566, 158, 4567, 160, 161, 4568, 4569, + 4570, 4571, 4572, 4573, 4574, 302, 4575, 4576, + 4577, 4578, 4579, 4580, 177, 4581, 4582, 4583, + 421, 148, 148, 1, 151, 176, 4562, 181, + 181, 181, 571, 4804, 4805, 4807, 4808, 4809, + 4808, 4810, 4811, 4812, 4813, 4814, 4815, 4816, + 4817, 4818, 4819, 4820, 4821, 4822, 4823, 4824, + 4825, 4826, 4827, 4828, 4829, 4831, 4832, 4833, + 4834, 421, 148, 148, 1, 4806, 1, 4830, + 1, 4562, 4895, 4896, 4897, 4898, 4899, 4900, + 4901, 4902, 4903, 4904, 4905, 4906, 4907, 4908, + 4909, 4910, 4911, 4912, 4913, 4914, 4915, 151, + 181, 181, 181, 571, 4563, 150, 152, 153, + 4564, 153, 4565, 4566, 158, 4567, 160, 161, + 4568, 4569, 4570, 4571, 4572, 4573, 4574, 302, + 4575, 4576, 4577, 4578, 4579, 4580, 177, 4581, + 4582, 4583, 421, 148, 148, 1, 151, 176, + 4562, 181, 181, 181, 571, 4563, 150, 152, + 153, 4564, 153, 4565, 4566, 158, 4567, 160, + 161, 4568, 4569, 4570, 4571, 4572, 4573, 4574, + 302, 4575, 4576, 4577, 4578, 4579, 4580, 177, + 4581, 4582, 4583, 421, 148, 148, 1, 1, + 1, 1, 1, 151, 176, 4562, 181, 181, + 181, 571, 1, 4563, 150, 152, 153, 4564, + 153, 4565, 4566, 158, 4567, 160, 161, 4568, + 4569, 4570, 4571, 4572, 4573, 4574, 302, 4575, + 4576, 4577, 4578, 4579, 4580, 177, 4581, 4582, + 4583, 421, 148, 148, 1, 1, 151, 176, + 4562, 181, 181, 181, 571, 4563, 150, 152, + 153, 4564, 153, 4565, 4566, 158, 4567, 160, + 161, 4568, 4569, 4570, 4571, 4572, 4573, 4574, + 302, 4575, 4576, 4577, 4578, 4579, 4580, 177, + 4581, 4582, 4583, 421, 148, 148, 1, 151, + 176, 4562, 181, 181, 181, 571, 4563, 150, + 152, 153, 4564, 153, 4565, 4566, 158, 4567, + 160, 161, 4568, 4569, 4570, 4571, 4572, 4573, + 4574, 302, 4575, 4576, 4577, 4578, 4579, 4580, + 177, 4581, 4582, 4583, 421, 148, 148, 1, + 1, 151, 176, 4562, 181, 181, 181, 571, + 4563, 150, 152, 153, 4564, 153, 4565, 4566, + 158, 4567, 160, 161, 4568, 4569, 4570, 4571, + 4572, 4573, 4574, 302, 4575, 4576, 4577, 4578, + 4579, 4580, 177, 4581, 4582, 4583, 421, 148, + 148, 1, 1, 151, 176, 4562, 181, 181, + 181, 571, 4804, 4805, 4807, 4808, 4809, 4808, + 4810, 4811, 4812, 4813, 4814, 4815, 4816, 4817, + 4818, 4819, 4820, 4821, 4822, 4823, 4824, 4825, + 4826, 4827, 4828, 4829, 4831, 4832, 4833, 4834, + 421, 148, 148, 1, 4806, 1, 4830, 1, + 4562, 298, 299, 4500, 122, 124, 4501, 4502, + 4503, 4504, 4505, 4506, 4507, 4508, 4509, 4510, + 4511, 4512, 4513, 4514, 4515, 4516, 4517, 4518, + 4499, 151, 1441, 674, 302, 303, 304, 305, + 306, 307, 1442, 932, 1443, 934, 312, 1444, + 1445, 4500, 122, 124, 4501, 4502, 4503, 4504, + 4505, 4506, 4507, 4508, 4509, 4510, 4511, 4512, + 4513, 4514, 4515, 4516, 4517, 4518, 151, 4499, + 1446, 1447, 317, 318, 319, 1448, 1449, 1450, + 1451, 324, 4500, 122, 124, 4501, 4502, 4503, + 4504, 4505, 4506, 4507, 4508, 4509, 4510, 4511, + 4512, 4513, 4514, 4515, 4516, 4517, 4518, 4499, + 1482, 4500, 122, 124, 4501, 4502, 4503, 4504, + 4505, 4506, 4507, 4508, 4509, 4510, 4511, 4512, + 4513, 4514, 4515, 4516, 4517, 4518, 4499, 2518, + 2518, 2518, 2774, 4563, 150, 152, 153, 4564, + 153, 4565, 4566, 158, 4567, 160, 161, 4568, + 4569, 4570, 4571, 4572, 4573, 4574, 302, 4575, + 4576, 4577, 4578, 4579, 4580, 177, 4581, 4582, + 4583, 2646, 2008, 2008, 4562, 4562, 151, 4562, + 176, 4562, 1, 181, 181, 181, 571, 4563, + 150, 152, 153, 4564, 153, 4565, 4566, 158, + 4567, 160, 161, 4568, 4569, 4570, 4571, 4572, + 4573, 4574, 302, 4575, 4576, 4577, 4578, 4579, + 4580, 177, 4581, 4582, 4583, 421, 148, 148, + 1, 151, 176, 4562, 181, 181, 181, 571, + 1, 4563, 150, 152, 153, 4564, 153, 4565, + 4566, 158, 4567, 160, 161, 4568, 4569, 4570, + 4571, 4572, 4573, 4574, 302, 4575, 4576, 4577, + 4578, 4579, 4580, 177, 4581, 4582, 4583, 421, + 148, 148, 151, 176, 4562, 181, 181, 181, + 571, 4563, 150, 152, 153, 4564, 153, 4565, + 4566, 158, 4567, 160, 161, 4568, 4569, 4570, + 4571, 4572, 4573, 4574, 302, 4575, 4576, 4577, + 4578, 4579, 4580, 177, 4581, 4582, 4583, 421, + 148, 148, 1, 1, 151, 176, 4562, 181, + 181, 181, 571, 4563, 150, 152, 153, 4564, + 153, 4565, 4566, 158, 4567, 160, 161, 4568, + 4569, 4570, 4571, 4572, 4573, 4574, 302, 4575, + 4576, 4577, 4578, 4579, 4580, 177, 4581, 4582, + 4583, 421, 148, 148, 1, 151, 176, 4562, + 181, 181, 181, 571, 1, 4563, 150, 152, + 153, 4564, 153, 4565, 4566, 158, 4567, 160, + 161, 4568, 4569, 4570, 4571, 4572, 4573, 4574, + 302, 4575, 4576, 4577, 4578, 4579, 4580, 177, + 4581, 4582, 4583, 421, 148, 148, 1, 1, + 151, 176, 4562, 181, 181, 181, 571, 4563, + 150, 152, 153, 4564, 153, 4565, 4566, 158, + 4567, 160, 161, 4568, 4569, 4570, 4571, 4572, + 4573, 4574, 302, 4575, 4576, 4577, 4578, 4579, + 4580, 177, 4581, 4582, 4583, 421, 148, 148, + 1, 151, 176, 4562, 181, 181, 181, 571, + 4804, 4805, 4807, 4808, 4809, 4808, 4810, 4811, + 4812, 4813, 4814, 4815, 4816, 4817, 4818, 4819, + 4820, 4821, 4822, 4823, 4824, 4825, 4826, 4827, + 4828, 4829, 4831, 4832, 4833, 4834, 421, 148, + 148, 1, 1, 4806, 1, 4830, 1, 4562, + 2518, 2518, 2518, 2774, 4563, 150, 152, 153, + 4564, 153, 4565, 4566, 158, 4567, 160, 161, + 4568, 4569, 4570, 4571, 4572, 4573, 4574, 302, + 4575, 4576, 4577, 4578, 4579, 4580, 177, 4581, + 4582, 4583, 2646, 2008, 2008, 4562, 4562, 151, + 4562, 176, 4562, 1, 181, 181, 181, 571, + 4563, 150, 152, 153, 4564, 153, 4565, 4566, + 158, 4567, 160, 161, 4568, 4569, 4570, 4571, + 4572, 4573, 4574, 302, 4575, 4576, 4577, 4578, + 4579, 4580, 177, 4581, 4582, 4583, 421, 148, + 148, 1, 1, 151, 176, 4562, 181, 181, + 181, 571, 4563, 150, 152, 153, 4564, 153, + 4565, 4566, 158, 4567, 160, 161, 4568, 4569, + 4570, 4571, 4572, 4573, 4574, 302, 4575, 4576, + 4577, 4578, 4579, 4580, 177, 4581, 4582, 4583, + 421, 148, 148, 1, 151, 176, 4562, 181, + 181, 181, 571, 4563, 150, 152, 153, 4564, + 153, 4565, 4566, 158, 4567, 160, 161, 4568, + 4569, 4570, 4571, 4572, 4573, 4574, 302, 4575, + 4576, 4577, 4578, 4579, 4580, 177, 4581, 4582, + 4583, 421, 148, 148, 1, 151, 176, 4562, + 181, 181, 181, 571, 1, 4563, 150, 152, + 153, 4564, 153, 4565, 4566, 158, 4567, 160, + 161, 4568, 4569, 4570, 4571, 4572, 4573, 4574, + 302, 4575, 4576, 4577, 4578, 4579, 4580, 177, + 4581, 4582, 4583, 421, 148, 148, 1, 1, + 1, 1, 151, 176, 4562, 181, 181, 181, + 571, 4563, 150, 152, 153, 4564, 153, 4565, + 4566, 158, 4567, 160, 161, 4568, 4569, 4570, + 4571, 4572, 4573, 4574, 302, 4575, 4576, 4577, + 4578, 4579, 4580, 177, 4581, 4582, 4583, 421, + 148, 148, 1, 1, 151, 176, 4562, 181, + 181, 181, 571, 4563, 150, 152, 153, 4564, + 153, 4565, 4566, 158, 4567, 160, 161, 4568, + 4569, 4570, 4571, 4572, 4573, 4574, 302, 4575, + 4576, 4577, 4578, 4579, 4580, 177, 4581, 4582, + 4583, 421, 148, 148, 1, 1, 151, 176, + 4562, 181, 181, 181, 571, 4563, 150, 152, + 153, 4564, 153, 4565, 4566, 158, 4567, 160, + 161, 4568, 4569, 4570, 4571, 4572, 4573, 4574, + 302, 4575, 4576, 4577, 4578, 4579, 4580, 177, + 4581, 4582, 4583, 421, 148, 148, 1, 1, + 151, 176, 4562, 181, 181, 181, 571, 4563, + 150, 152, 153, 4564, 153, 4565, 4566, 158, + 4567, 160, 161, 4568, 4569, 4570, 4571, 4572, + 4573, 4574, 302, 4575, 4576, 4577, 4578, 4579, + 4580, 177, 4581, 4582, 4583, 421, 148, 148, + 1, 151, 176, 4562, 181, 181, 181, 571, + 4563, 150, 152, 153, 4564, 153, 4565, 4566, + 158, 4567, 160, 161, 4568, 4569, 4570, 4571, + 4572, 4573, 4574, 302, 4575, 4576, 4577, 4578, + 4579, 4580, 177, 4581, 4582, 4583, 421, 148, + 148, 1, 151, 176, 4562, 181, 181, 181, + 571, 1, 4563, 150, 152, 153, 4564, 153, + 4565, 4566, 158, 4567, 160, 161, 4568, 4569, + 4570, 4571, 4572, 4573, 4574, 302, 4575, 4576, + 4577, 4578, 4579, 4580, 177, 4581, 4582, 4583, + 421, 148, 148, 151, 176, 4562, 181, 181, + 181, 571, 4563, 150, 152, 153, 4564, 153, + 4565, 4566, 158, 4567, 160, 161, 4568, 4569, + 4570, 4571, 4572, 4573, 4574, 302, 4575, 4576, + 4577, 4578, 4579, 4580, 177, 4581, 4582, 4583, + 421, 148, 148, 1, 151, 176, 4562, 181, + 181, 181, 571, 4563, 150, 152, 153, 4564, + 153, 4565, 4566, 158, 4567, 160, 161, 4568, + 4569, 4570, 4571, 4572, 4573, 4574, 302, 4575, + 4576, 4577, 4578, 4579, 4580, 177, 4581, 4582, + 4583, 421, 148, 148, 1, 151, 176, 4562, + 181, 181, 181, 571, 4563, 150, 152, 153, + 4564, 153, 4565, 4566, 158, 4567, 160, 161, + 4568, 4569, 4570, 4571, 4572, 4573, 4574, 302, + 4575, 4576, 4577, 4578, 4579, 4580, 177, 4581, + 4582, 4583, 421, 148, 148, 1, 151, 176, + 4562, 181, 181, 181, 571, 4563, 150, 152, + 153, 4564, 153, 4565, 4566, 158, 4567, 160, + 161, 4568, 4569, 4570, 4571, 4572, 4573, 4574, + 302, 4575, 4576, 4577, 4578, 4579, 4580, 177, + 4581, 4582, 4583, 421, 148, 148, 1, 1, + 151, 176, 4562, 181, 181, 181, 571, 4563, + 150, 152, 153, 4564, 153, 4565, 4566, 158, + 4567, 160, 161, 4568, 4569, 4570, 4571, 4572, + 4573, 4574, 302, 4575, 4576, 4577, 4578, 4579, + 4580, 177, 4581, 4582, 4583, 421, 148, 148, + 1, 151, 176, 4562, 181, 181, 181, 571, + 4563, 150, 152, 153, 4564, 153, 4565, 4566, + 158, 4567, 160, 161, 4568, 4569, 4570, 4571, + 4572, 4573, 4574, 302, 4575, 4576, 4577, 4578, + 4579, 4580, 177, 4581, 4582, 4583, 421, 148, + 148, 1, 151, 176, 4562, 181, 181, 181, + 571, 1, 1, 1, 4563, 150, 152, 153, + 4564, 153, 4565, 4566, 158, 4567, 160, 161, + 4568, 4569, 4570, 4571, 4572, 4573, 4574, 302, + 4575, 4576, 4577, 4578, 4579, 4580, 177, 4581, + 4582, 4583, 421, 148, 148, 1, 151, 176, + 4562, 181, 181, 181, 571, 4804, 4805, 4807, + 4808, 4809, 4808, 4810, 4811, 4812, 4813, 4814, + 4815, 4816, 4817, 4818, 4819, 4820, 4821, 4822, + 4823, 4824, 4825, 4826, 4827, 4828, 4829, 4831, + 4832, 4833, 4834, 421, 148, 148, 1, 1, + 4806, 1, 4830, 1, 4562, 2518, 2518, 2518, + 2774, 4563, 150, 152, 153, 4564, 153, 4565, + 4566, 158, 4567, 160, 161, 4568, 4569, 4570, + 4571, 4572, 4573, 4574, 302, 4575, 4576, 4577, + 4578, 4579, 4580, 177, 4581, 4582, 4583, 2646, + 2008, 2008, 4562, 4562, 151, 4562, 176, 4562, + 1, 181, 181, 181, 571, 4563, 150, 152, + 153, 4564, 153, 4565, 4566, 158, 4567, 160, + 161, 4568, 4569, 4570, 4571, 4572, 4573, 4574, + 302, 4575, 4576, 4577, 4578, 4579, 4580, 177, + 4581, 4582, 4583, 421, 148, 148, 1, 151, + 176, 4562, 181, 181, 181, 571, 4563, 150, + 152, 153, 4564, 153, 4565, 4566, 158, 4567, + 160, 161, 4568, 4569, 4570, 4571, 4572, 4573, + 4574, 302, 4575, 4576, 4577, 4578, 4579, 4580, + 177, 4581, 4582, 4583, 421, 148, 148, 1, + 151, 176, 4562, 181, 181, 181, 571, 4804, + 4805, 4807, 4808, 4809, 4808, 4810, 4811, 4812, + 4813, 4814, 4815, 4816, 4817, 4818, 4819, 4820, + 4821, 4822, 4823, 4824, 4825, 4826, 4827, 4828, + 4829, 4831, 4832, 4833, 4834, 421, 148, 148, + 1, 1, 4806, 1, 4830, 1, 4562, 2518, + 2518, 2518, 2774, 1, 4563, 150, 152, 153, + 4564, 153, 4565, 4566, 158, 4567, 160, 161, + 4568, 4569, 4570, 4571, 4572, 4573, 4574, 302, + 4575, 4576, 4577, 4578, 4579, 4580, 177, 4581, + 4582, 4583, 1, 2646, 1, 2008, 1, 2008, + 1, 151, 176, 4562, 181, 181, 181, 571, + 4563, 150, 152, 153, 4564, 153, 4565, 4566, + 158, 4567, 160, 161, 4568, 4569, 4570, 4571, + 4572, 4573, 4574, 302, 4575, 4576, 4577, 4578, + 4579, 4580, 177, 4581, 4582, 4583, 421, 148, + 148, 1, 151, 176, 4562, 181, 181, 181, + 571, 1, 4563, 150, 152, 153, 4564, 153, + 4565, 4566, 158, 4567, 160, 161, 4568, 4569, + 4570, 4571, 4572, 4573, 4574, 302, 4575, 4576, + 4577, 4578, 4579, 4580, 177, 4581, 4582, 4583, + 421, 148, 148, 1, 1, 151, 176, 4562, + 181, 181, 181, 571, 1, 4563, 150, 152, + 153, 4564, 153, 4565, 4566, 158, 4567, 160, + 161, 4568, 4569, 4570, 4571, 4572, 4573, 4574, + 302, 4575, 4576, 4577, 4578, 4579, 4580, 177, + 4581, 4582, 4583, 421, 148, 148, 1, 1, + 1, 151, 176, 4562, 181, 181, 181, 571, + 1, 4563, 150, 152, 153, 4564, 153, 4565, + 4566, 158, 4567, 160, 161, 4568, 4569, 4570, + 4571, 4572, 4573, 4574, 302, 4575, 4576, 4577, + 4578, 4579, 4580, 177, 4581, 4582, 4583, 421, + 148, 148, 1, 1, 151, 176, 4562, 181, + 181, 181, 571, 4563, 150, 152, 153, 4564, + 153, 4565, 4566, 158, 4567, 160, 161, 4568, + 4569, 4570, 4571, 4572, 4573, 4574, 302, 4575, + 4576, 4577, 4578, 4579, 4580, 177, 4581, 4582, + 4583, 421, 148, 148, 1, 1, 151, 176, + 4562, 181, 181, 181, 571, 1, 4563, 150, + 152, 153, 4564, 153, 4565, 4566, 158, 4567, + 160, 161, 4568, 4569, 4570, 4571, 4572, 4573, + 4574, 302, 4575, 4576, 4577, 4578, 4579, 4580, + 177, 4581, 4582, 4583, 421, 148, 148, 151, + 176, 4562, 181, 181, 181, 571, 4563, 150, + 152, 153, 4564, 153, 4565, 4566, 158, 4567, + 160, 161, 4568, 4569, 4570, 4571, 4572, 4573, + 4574, 302, 4575, 4576, 4577, 4578, 4579, 4580, + 177, 4581, 4582, 4583, 421, 148, 148, 1, + 1, 151, 176, 4562, 181, 181, 181, 571, + 4563, 150, 152, 153, 4564, 153, 4565, 4566, + 158, 4567, 160, 161, 4568, 4569, 4570, 4571, + 4572, 4573, 4574, 302, 4575, 4576, 4577, 4578, + 4579, 4580, 177, 4581, 4582, 4583, 421, 148, + 148, 1, 151, 176, 4562, 181, 181, 181, + 571, 84, 85, 86, 87, 88, 4563, 150, + 152, 153, 4564, 153, 4565, 4566, 158, 4567, + 160, 161, 4568, 4569, 4570, 4571, 4572, 4573, + 4574, 302, 4575, 4576, 4577, 4578, 4579, 4580, + 177, 4581, 4582, 4583, 421, 148, 148, 151, + 176, 4562, 181, 181, 181, 571, 89, 90, + 91, 92, 93, 94, 95, 96, 97, 98, + 99, 100, 101, 102, 103, 100, 104, 105, + 106, 4563, 150, 152, 153, 4564, 153, 4565, + 4566, 158, 4567, 160, 161, 4568, 4569, 4570, + 4571, 4572, 4573, 4574, 302, 4575, 4576, 4577, + 4578, 4579, 4580, 177, 4581, 4582, 4583, 421, + 148, 148, 151, 176, 4562, 181, 181, 181, + 571, 107, 108, 109, 110, 4563, 150, 152, + 153, 4564, 153, 4565, 4566, 158, 4567, 160, + 161, 4568, 4569, 4570, 4571, 4572, 4573, 4574, + 302, 4575, 4576, 4577, 4578, 4579, 4580, 177, + 4581, 4582, 4583, 421, 148, 148, 151, 176, + 4562, 181, 181, 181, 571, 111, 4563, 150, + 152, 153, 4564, 153, 4565, 4566, 158, 4567, + 160, 161, 4568, 4569, 4570, 4571, 4572, 4573, + 4574, 302, 4575, 4576, 4577, 4578, 4579, 4580, + 177, 4581, 4582, 4583, 421, 148, 148, 151, + 176, 4562, 181, 181, 181, 571, 112, 113, + 114, 115, 116, 117, 4563, 150, 152, 153, + 4564, 153, 4565, 4566, 158, 4567, 160, 161, + 4568, 4569, 4570, 4571, 4572, 4573, 4574, 302, + 4575, 4576, 4577, 4578, 4579, 4580, 177, 4581, + 4582, 4583, 421, 148, 148, 151, 176, 4562, + 181, 181, 181, 571, 118, 4563, 150, 152, + 153, 4564, 153, 4565, 4566, 158, 4567, 160, + 161, 4568, 4569, 4570, 4571, 4572, 4573, 4574, + 302, 4575, 4576, 4577, 4578, 4579, 4580, 177, + 4581, 4582, 4583, 421, 148, 148, 151, 176, + 4562, 181, 181, 181, 571, 120, 121, 122, + 124, 4563, 150, 152, 153, 4564, 153, 4565, + 4566, 158, 4567, 160, 161, 4568, 4569, 4570, + 4571, 4572, 4573, 4574, 302, 4575, 4576, 4577, + 4578, 4579, 4580, 177, 4581, 4582, 4583, 421, + 148, 148, 123, 151, 176, 4562, 2518, 2518, + 2518, 2774, 4562, 4804, 4805, 4807, 4808, 4809, + 4808, 4810, 4811, 4812, 4813, 4814, 4815, 4816, + 4817, 4818, 4819, 4820, 4821, 4822, 4823, 4824, + 4825, 4826, 4827, 4828, 4829, 4831, 4832, 4833, + 4834, 2646, 2008, 2008, 4562, 4806, 4830, 1, + 2518, 2518, 2518, 2774, 4562, 4563, 150, 152, + 153, 4564, 153, 4565, 4566, 158, 4567, 160, + 161, 4568, 4569, 4570, 4571, 4572, 4573, 4574, + 302, 4575, 4576, 4577, 4578, 4579, 4580, 177, + 4581, 4582, 4583, 2646, 2008, 2008, 4562, 4562, + 151, 4562, 176, 4562, 1, 181, 181, 181, + 571, 1, 4804, 4805, 4807, 4808, 4809, 4808, + 4810, 4811, 4812, 4813, 4814, 4815, 4816, 4817, + 4818, 4819, 4820, 4821, 4822, 4823, 4824, 4825, + 4826, 4827, 4828, 4829, 4831, 4832, 4833, 4834, + 421, 148, 148, 1, 1, 4806, 1, 4830, + 1, 4562, 2518, 2518, 2518, 2774, 4563, 150, + 152, 153, 4564, 153, 4565, 4566, 158, 4567, + 160, 161, 4568, 4569, 4570, 4571, 4572, 4573, + 4574, 302, 4575, 4576, 4577, 4578, 4579, 4580, + 177, 4581, 4582, 4583, 2646, 2008, 2008, 4562, + 4562, 4562, 4562, 4562, 151, 4562, 176, 4562, + 1, 2518, 2518, 2518, 2774, 1, 1, 4563, + 150, 152, 153, 4564, 153, 4565, 4566, 158, + 4567, 160, 161, 4568, 4569, 4570, 4571, 4572, + 4573, 4574, 302, 4575, 4576, 4577, 4578, 4579, + 4580, 177, 4581, 4582, 4583, 1, 2646, 1, + 2008, 1, 2008, 1, 1, 1, 1, 151, + 176, 4562, 2518, 2518, 2518, 2774, 4562, 4562, + 4563, 150, 152, 153, 4564, 153, 4565, 4566, + 158, 4567, 160, 161, 4568, 4569, 4570, 4571, + 4572, 4573, 4574, 302, 4575, 4576, 4577, 4578, + 4579, 4580, 177, 4581, 4582, 4583, 2646, 2008, + 2008, 4562, 4562, 151, 4562, 176, 4562, 1, + 181, 181, 181, 571, 1, 4563, 150, 152, + 153, 4564, 153, 4565, 4566, 158, 4567, 160, + 161, 4568, 4569, 4570, 4571, 4572, 4573, 4574, + 302, 4575, 4576, 4577, 4578, 4579, 4580, 177, + 4581, 4582, 4583, 421, 148, 148, 1, 1, + 151, 176, 4562, 181, 181, 181, 571, 4563, + 150, 152, 153, 4564, 153, 4565, 4566, 158, + 4567, 160, 161, 4568, 4569, 4570, 4571, 4572, + 4573, 4574, 302, 4575, 4576, 4577, 4578, 4579, + 4580, 177, 4581, 4582, 4583, 421, 148, 148, + 1, 1, 1, 1, 1, 151, 176, 4562, + 181, 181, 181, 571, 1, 4563, 150, 152, + 153, 4564, 153, 4565, 4566, 158, 4567, 160, + 161, 4568, 4569, 4570, 4571, 4572, 4573, 4574, + 302, 4575, 4576, 4577, 4578, 4579, 4580, 177, + 4581, 4582, 4583, 421, 148, 148, 1, 151, + 176, 4562, 181, 181, 181, 571, 1, 4563, + 150, 152, 153, 4564, 153, 4565, 4566, 158, + 4567, 160, 161, 4568, 4569, 4570, 4571, 4572, + 4573, 4574, 302, 4575, 4576, 4577, 4578, 4579, + 4580, 177, 4581, 4582, 4583, 421, 148, 148, + 1, 1, 1, 151, 176, 4562, 181, 181, + 181, 571, 4804, 4805, 4807, 4808, 4809, 4808, + 4810, 4811, 4812, 4813, 4814, 4815, 4816, 4817, + 4818, 4819, 4820, 4821, 4822, 4823, 4824, 4825, + 4826, 4827, 4828, 4829, 4831, 4832, 4833, 4834, + 421, 148, 148, 1, 1, 4806, 1, 4830, + 1, 4562, 2518, 2518, 2518, 2774, 4562, 4562, + 4563, 150, 152, 153, 4564, 153, 4565, 4566, + 158, 4567, 160, 161, 4568, 4569, 4570, 4571, + 4572, 4573, 4574, 302, 4575, 4576, 4577, 4578, + 4579, 4580, 177, 4581, 4582, 4583, 2646, 2008, + 2008, 4562, 4562, 4562, 151, 4562, 176, 4562, + 1, 181, 181, 181, 571, 4563, 150, 152, + 153, 4564, 153, 4565, 4566, 158, 4567, 160, + 161, 4568, 4569, 4570, 4571, 4572, 4573, 4574, + 302, 4575, 4576, 4577, 4578, 4579, 4580, 177, + 4581, 4582, 4583, 421, 148, 148, 1, 1, + 1, 1, 1, 151, 176, 4562, 181, 181, + 181, 571, 4804, 4805, 4807, 4808, 4809, 4808, + 4810, 4811, 4812, 4813, 4814, 4815, 4816, 4817, + 4818, 4819, 4820, 4821, 4822, 4823, 4824, 4825, + 4826, 4827, 4828, 4829, 4831, 4832, 4833, 4834, + 421, 148, 148, 1, 1, 4806, 1, 4830, + 1, 4562, 2518, 2518, 2518, 2774, 4562, 4562, + 4563, 150, 152, 153, 4564, 153, 4565, 4566, + 158, 4567, 160, 161, 4568, 4569, 4570, 4571, + 4572, 4573, 4574, 302, 4575, 4576, 4577, 4578, + 4579, 4580, 177, 4581, 4582, 4583, 2646, 2008, + 2008, 4562, 4562, 4562, 151, 4562, 176, 4562, + 1, 181, 181, 181, 571, 4563, 150, 152, + 153, 4564, 153, 4565, 4566, 158, 4567, 160, + 161, 4568, 4569, 4570, 4571, 4572, 4573, 4574, + 302, 4575, 4576, 4577, 4578, 4579, 4580, 177, + 4581, 4582, 4583, 421, 148, 148, 1, 151, + 176, 4562, 181, 181, 181, 571, 1, 1, + 4563, 150, 152, 153, 4564, 153, 4565, 4566, + 158, 4567, 160, 161, 4568, 4569, 4570, 4571, + 4572, 4573, 4574, 302, 4575, 4576, 4577, 4578, + 4579, 4580, 177, 4581, 4582, 4583, 421, 148, + 148, 1, 1, 1, 151, 176, 4562, 181, + 181, 181, 571, 1, 4563, 150, 152, 153, + 4564, 153, 4565, 4566, 158, 4567, 160, 161, + 4568, 4569, 4570, 4571, 4572, 4573, 4574, 302, + 4575, 4576, 4577, 4578, 4579, 4580, 177, 4581, + 4582, 4583, 421, 148, 148, 1, 151, 176, + 4562, 181, 181, 181, 571, 4563, 150, 152, + 153, 4564, 153, 4565, 4566, 158, 4567, 160, + 161, 4568, 4569, 4570, 4571, 4572, 4573, 4574, + 302, 4575, 4576, 4577, 4578, 4579, 4580, 177, + 4581, 4582, 4583, 421, 148, 148, 1, 151, + 176, 4562, 181, 181, 181, 571, 1, 4563, + 150, 152, 153, 4564, 153, 4565, 4566, 158, + 4567, 160, 161, 4568, 4569, 4570, 4571, 4572, + 4573, 4574, 302, 4575, 4576, 4577, 4578, 4579, + 4580, 177, 4581, 4582, 4583, 421, 148, 148, + 1, 1, 151, 176, 4562, 181, 181, 181, + 571, 4563, 150, 152, 153, 4564, 153, 4565, + 4566, 158, 4567, 160, 161, 4568, 4569, 4570, + 4571, 4572, 4573, 4574, 302, 4575, 4576, 4577, + 4578, 4579, 4580, 177, 4581, 4582, 4583, 421, + 148, 148, 1, 151, 176, 4562, 181, 181, + 181, 571, 1, 1, 1, 4563, 150, 152, + 153, 4564, 153, 4565, 4566, 158, 4567, 160, + 161, 4568, 4569, 4570, 4571, 4572, 4573, 4574, + 302, 4575, 4576, 4577, 4578, 4579, 4580, 177, + 4581, 4582, 4583, 421, 148, 148, 1, 1, + 151, 176, 4562, 181, 181, 181, 571, 4563, + 150, 152, 153, 4564, 153, 4565, 4566, 158, + 4567, 160, 161, 4568, 4569, 4570, 4571, 4572, + 4573, 4574, 302, 4575, 4576, 4577, 4578, 4579, + 4580, 177, 4581, 4582, 4583, 421, 148, 148, + 1, 151, 176, 4562, 181, 181, 181, 571, + 4563, 150, 152, 153, 4564, 153, 4565, 4566, + 158, 4567, 160, 161, 4568, 4569, 4570, 4571, + 4572, 4573, 4574, 302, 4575, 4576, 4577, 4578, + 4579, 4580, 177, 4581, 4582, 4583, 421, 148, + 148, 1, 1, 1, 1, 151, 176, 4562, + 181, 181, 181, 571, 1, 4563, 150, 152, + 153, 4564, 153, 4565, 4566, 158, 4567, 160, + 161, 4568, 4569, 4570, 4571, 4572, 4573, 4574, + 302, 4575, 4576, 4577, 4578, 4579, 4580, 177, + 4581, 4582, 4583, 421, 148, 148, 151, 176, + 4562, 4499, 4500, 122, 124, 4501, 4502, 4503, + 4504, 4505, 4506, 4507, 4508, 4509, 4510, 4511, + 4512, 4513, 4514, 4515, 4516, 4517, 4518, 4499, + 4499, 4499, 4499, 148, 4916, 4917, 4918, 4919, + 4920, 4921, 4922, 4923, 4924, 4925, 4926, 4927, + 4928, 4929, 4930, 4931, 4932, 4933, 4934, 4935, + 4936, 2395, 4499, 1, 4764, 2515, 2517, 4765, + 4766, 4767, 4768, 4769, 4770, 4771, 4772, 4773, + 4774, 4775, 4776, 4777, 4778, 4779, 4780, 4781, + 4782, 4763, 4937, 4938, 4939, 4940, 4941, 4942, + 4943, 4944, 4945, 4946, 4947, 4948, 4949, 4950, + 4951, 4952, 4953, 4954, 4955, 4956, 4957, 1, + 4763, 2395, 4500, 122, 124, 4501, 4502, 4503, + 4504, 4505, 4506, 4507, 4508, 4509, 4510, 4511, + 4512, 4513, 4514, 4515, 4516, 4517, 4518, 4499, + 4500, 122, 124, 4501, 4502, 4503, 4504, 4505, + 4506, 4507, 4508, 4509, 4510, 4511, 4512, 4513, + 4514, 4515, 4516, 4517, 4518, 4499, 2395, 4500, + 122, 124, 4501, 4502, 4503, 4504, 4505, 4506, + 4507, 4508, 4509, 4510, 4511, 4512, 4513, 4514, + 4515, 4516, 4517, 4518, 2395, 4499, 2395, 4500, + 122, 124, 4501, 4502, 4503, 4504, 4505, 4506, + 4507, 4508, 4509, 4510, 4511, 4512, 4513, 4514, + 4515, 4516, 4517, 4518, 2395, 4499, 2395, 4500, + 122, 124, 4501, 4502, 4503, 4504, 4505, 4506, + 4507, 4508, 4509, 4510, 4511, 4512, 4513, 4514, + 4515, 4516, 4517, 4518, 2395, 2395, 4499, 2395, + 4500, 122, 124, 4501, 4502, 4503, 4504, 4505, + 4506, 4507, 4508, 4509, 4510, 4511, 4512, 4513, + 4514, 4515, 4516, 4517, 4518, 2395, 2395, 4499, + 2395, 4500, 122, 124, 4501, 4502, 4503, 4504, + 4505, 4506, 4507, 4508, 4509, 4510, 4511, 4512, + 4513, 4514, 4515, 4516, 4517, 4518, 2395, 4499, + 4500, 122, 124, 4501, 4502, 4503, 4504, 4505, + 4506, 4507, 4508, 4509, 4510, 4511, 4512, 4513, + 4514, 4515, 4516, 4517, 4518, 2395, 2395, 2395, + 2395, 4499, 2395, 2395, 4916, 4917, 4918, 4919, + 4920, 4921, 4922, 4923, 4924, 4925, 4926, 4927, + 4928, 4929, 4930, 4931, 4932, 4933, 4934, 4935, + 4936, 2395, 4499, 4764, 2515, 2517, 4765, 4766, + 4767, 4768, 4769, 4770, 4771, 4772, 4773, 4774, + 4775, 4776, 4777, 4778, 4779, 4780, 4781, 4782, + 4763, 1, 4764, 2515, 2517, 4765, 4766, 4767, + 4768, 4769, 4770, 4771, 4772, 4773, 4774, 4775, + 4776, 4777, 4778, 4779, 4780, 4781, 4782, 1, + 4763, 1, 4764, 2515, 2517, 4765, 4766, 4767, + 4768, 4769, 4770, 4771, 4772, 4773, 4774, 4775, + 4776, 4777, 4778, 4779, 4780, 4781, 4782, 1, + 4763, 1, 4764, 2515, 2517, 4765, 4766, 4767, + 4768, 4769, 4770, 4771, 4772, 4773, 4774, 4775, + 4776, 4777, 4778, 4779, 4780, 4781, 4782, 1, + 1, 4763, 1, 4764, 2515, 2517, 4765, 4766, + 4767, 4768, 4769, 4770, 4771, 4772, 4773, 4774, + 4775, 4776, 4777, 4778, 4779, 4780, 4781, 4782, + 1, 1, 4763, 1, 4764, 2515, 2517, 4765, + 4766, 4767, 4768, 4769, 4770, 4771, 4772, 4773, + 4774, 4775, 4776, 4777, 4778, 4779, 4780, 4781, + 4782, 1, 4763, 4764, 2515, 2517, 4765, 4766, + 4767, 4768, 4769, 4770, 4771, 4772, 4773, 4774, + 4775, 4776, 4777, 4778, 4779, 4780, 4781, 4782, + 1, 1, 1, 1, 4763, 1, 1, 4937, + 4938, 4939, 4940, 4941, 4942, 4943, 4944, 4945, + 4946, 4947, 4948, 4949, 4950, 4951, 4952, 4953, + 4954, 4955, 4956, 4957, 1, 4763, 4500, 122, + 124, 4501, 4502, 4503, 4504, 4505, 4506, 4507, + 4508, 4509, 4510, 4511, 4512, 4513, 4514, 4515, + 4516, 4517, 4518, 4499, 2395, 4500, 122, 124, + 4501, 4502, 4503, 4504, 4505, 4506, 4507, 4508, + 4509, 4510, 4511, 4512, 4513, 4514, 4515, 4516, + 4517, 4518, 2395, 4499, 4500, 122, 124, 4501, + 4502, 4503, 4504, 4505, 4506, 4507, 4508, 4509, + 4510, 4511, 4512, 4513, 4514, 4515, 4516, 4517, + 4518, 2395, 4499, 2396, 2397, 2398, 2399, 2400, + 2402, 2403, 2404, 2406, 2407, 2408, 2409, 2410, + 2411, 2412, 2413, 2414, 2415, 2416, 2417, 2418, + 2419, 2420, 2421, 2422, 2423, 4500, 122, 124, + 4501, 4502, 4503, 4504, 4505, 4506, 4507, 4508, + 4509, 4510, 4511, 4512, 4513, 4514, 4515, 4516, + 4517, 4518, 2401, 2405, 4499, 2424, 2425, 2426, + 2427, 2428, 2429, 2430, 2431, 2432, 2433, 2434, + 2435, 2436, 2437, 2438, 2439, 2440, 2441, 2442, + 2443, 2444, 4500, 122, 124, 4501, 4502, 4503, + 4504, 4505, 4506, 4507, 4508, 4509, 4510, 4511, + 4512, 4513, 4514, 4515, 4516, 4517, 4518, 4499, + 2445, 2446, 2447, 2448, 2449, 2450, 4500, 122, + 124, 4501, 4502, 4503, 4504, 4505, 4506, 4507, + 4508, 4509, 4510, 4511, 4512, 4513, 4514, 4515, + 4516, 4517, 4518, 4499, 2451, 2452, 4500, 122, + 124, 4501, 4502, 4503, 4504, 4505, 4506, 4507, + 4508, 4509, 4510, 4511, 4512, 4513, 4514, 4515, + 4516, 4517, 4518, 4499, 2453, 2454, 2455, 2456, + 2457, 2458, 2459, 2460, 2461, 2462, 2463, 2464, + 2465, 2466, 2467, 4500, 122, 124, 4501, 4502, + 4503, 4504, 4505, 4506, 4507, 4508, 4509, 4510, + 4511, 4512, 4513, 4514, 4515, 4516, 4517, 4518, + 4499, 2468, 2469, 2449, 2454, 2470, 4500, 122, + 124, 4501, 4502, 4503, 4504, 4505, 4506, 4507, + 4508, 4509, 4510, 4511, 4512, 4513, 4514, 4515, + 4516, 4517, 4518, 4499, 2471, 2472, 2473, 2474, + 2475, 2476, 4500, 122, 124, 4501, 4502, 4503, + 4504, 4505, 4506, 4507, 4508, 4509, 4510, 4511, + 4512, 4513, 4514, 4515, 4516, 4517, 4518, 4499, + 2512, 4500, 122, 124, 4501, 4502, 4503, 4504, + 4505, 4506, 4507, 4508, 4509, 4510, 4511, 4512, + 4513, 4514, 4515, 4516, 4517, 4518, 4499, 4764, + 2515, 2517, 4765, 4766, 4767, 4768, 4769, 4770, + 4771, 4772, 4773, 4774, 4775, 4776, 4777, 4778, + 4779, 4780, 4781, 4782, 4763, 1, 4764, 2515, + 2517, 4765, 4766, 4767, 4768, 4769, 4770, 4771, + 4772, 4773, 4774, 4775, 4776, 4777, 4778, 4779, + 4780, 4781, 4782, 1, 4763, 4764, 2515, 2517, + 4765, 4766, 4767, 4768, 4769, 4770, 4771, 4772, + 4773, 4774, 4775, 4776, 4777, 4778, 4779, 4780, + 4781, 4782, 1, 4763, 3, 4, 5, 6, + 7, 9, 10, 11, 13, 14, 15, 16, + 17, 18, 19, 20, 21, 22, 23, 24, + 25, 26, 27, 28, 29, 30, 4764, 2515, + 2517, 4765, 4766, 4767, 4768, 4769, 4770, 4771, + 4772, 4773, 4774, 4775, 4776, 4777, 4778, 4779, + 4780, 4781, 4782, 8, 12, 4763, 31, 32, + 33, 34, 35, 36, 37, 38, 39, 40, + 41, 42, 43, 44, 45, 46, 47, 48, + 49, 50, 51, 4764, 2515, 2517, 4765, 4766, + 4767, 4768, 4769, 4770, 4771, 4772, 4773, 4774, + 4775, 4776, 4777, 4778, 4779, 4780, 4781, 4782, + 4763, 52, 53, 54, 55, 56, 57, 4764, + 2515, 2517, 4765, 4766, 4767, 4768, 4769, 4770, + 4771, 4772, 4773, 4774, 4775, 4776, 4777, 4778, + 4779, 4780, 4781, 4782, 4763, 58, 59, 4764, + 2515, 2517, 4765, 4766, 4767, 4768, 4769, 4770, + 4771, 4772, 4773, 4774, 4775, 4776, 4777, 4778, + 4779, 4780, 4781, 4782, 4763, 60, 61, 62, + 63, 64, 65, 66, 67, 68, 69, 70, + 71, 72, 73, 74, 4764, 2515, 2517, 4765, + 4766, 4767, 4768, 4769, 4770, 4771, 4772, 4773, + 4774, 4775, 4776, 4777, 4778, 4779, 4780, 4781, + 4782, 4763, 75, 76, 56, 61, 77, 4764, + 2515, 2517, 4765, 4766, 4767, 4768, 4769, 4770, + 4771, 4772, 4773, 4774, 4775, 4776, 4777, 4778, + 4779, 4780, 4781, 4782, 4763, 78, 79, 80, + 81, 82, 83, 4764, 2515, 2517, 4765, 4766, + 4767, 4768, 4769, 4770, 4771, 4772, 4773, 4774, + 4775, 4776, 4777, 4778, 4779, 4780, 4781, 4782, + 4763, 119, 4764, 2515, 2517, 4765, 4766, 4767, + 4768, 4769, 4770, 4771, 4772, 4773, 4774, 4775, + 4776, 4777, 4778, 4779, 4780, 4781, 4782, 4763, + 4499, 4499, 4500, 122, 124, 4501, 4502, 4503, + 4504, 4505, 4506, 4507, 4508, 4509, 4510, 4511, + 4512, 4513, 4514, 4515, 4516, 4517, 4518, 148, + 4499, 148, 4499, 2395, 148, 148, 4783, 4784, + 4785, 4786, 4787, 4788, 4789, 4790, 4791, 4792, + 4793, 4794, 4795, 4796, 4797, 4798, 4799, 4800, + 4801, 4802, 4803, 148, 148, 148, 4499, 4499, + 4783, 4784, 4785, 4786, 4787, 4788, 4789, 4790, + 4791, 4792, 4793, 4794, 4795, 4796, 4797, 4798, + 4799, 4800, 4801, 4802, 4803, 2395, 148, 4499, + 4500, 122, 124, 4501, 4502, 4503, 4504, 4505, + 4506, 4507, 4508, 4509, 4510, 4511, 4512, 4513, + 4514, 4515, 4516, 4517, 4518, 4499, 2395, 4499, + 148, 2395, 148, 4500, 122, 124, 4501, 4502, + 4503, 4504, 4505, 4506, 4507, 4508, 4509, 4510, + 4511, 4512, 4513, 4514, 4515, 4516, 4517, 4518, + 2395, 2395, 572, 572, 4499, 2395, 4500, 122, + 124, 4501, 4502, 4503, 4504, 4505, 4506, 4507, + 4508, 4509, 4510, 4511, 4512, 4513, 4514, 4515, + 4516, 4517, 4518, 2395, 2395, 148, 4499, 126, + 2395, 4783, 4784, 4785, 4786, 4787, 4788, 4789, + 4790, 4791, 4792, 4793, 4794, 4795, 4796, 4797, + 4798, 4799, 4800, 4801, 4802, 4803, 148, 2395, + 126, 148, 4499, 4499, 4499, 4499, 4500, 122, + 124, 4501, 4502, 4503, 4504, 4505, 4506, 4507, + 4508, 4509, 4510, 4511, 4512, 4513, 4514, 4515, + 4516, 4517, 4518, 2395, 2395, 126, 4499, 4499, + 148, 148, 4916, 4917, 4918, 4919, 4920, 4921, + 4922, 4923, 4924, 4925, 4926, 4927, 4928, 4929, + 4930, 4931, 4932, 4933, 4934, 4935, 4936, 2395, + 148, 2395, 4499, 4783, 4784, 4785, 4786, 4787, + 4788, 4789, 4790, 4791, 4792, 4793, 4794, 4795, + 4796, 4797, 4798, 4799, 4800, 4801, 4802, 4803, + 4499, 148, 2395, 4500, 122, 124, 4501, 4502, + 4503, 4504, 4505, 4506, 4507, 4508, 4509, 4510, + 4511, 4512, 4513, 4514, 4515, 4516, 4517, 4518, + 2395, 4499, 148, 148, 4500, 122, 124, 4501, + 4502, 4503, 4504, 4505, 4506, 4507, 4508, 4509, + 4510, 4511, 4512, 4513, 4514, 4515, 4516, 4517, + 4518, 126, 148, 2395, 148, 4499, 4958, 4959, + 184, 2398, 4960, 4961, 4962, 4963, 4964, 4965, + 4966, 4967, 4968, 4969, 4970, 4971, 4972, 4973, + 4974, 4975, 4976, 4977, 4978, 4979, 2416, 4980, + 2418, 4981, 4982, 4983, 4984, 2423, 4500, 122, + 124, 4501, 4502, 4503, 4504, 4505, 4506, 4507, + 4508, 4509, 4510, 4511, 4512, 4513, 4514, 4515, + 4516, 4517, 4518, 4499, 2424, 4985, 4986, 217, + 3020, 3022, 218, 219, 220, 221, 4987, 223, + 224, 225, 226, 227, 228, 4988, 4989, 2430, + 4990, 4991, 234, 4992, 236, 4993, 1528, 1529, + 4994, 2436, 4995, 4996, 4997, 4998, 4999, 5000, + 1538, 5001, 151, 152, 2444, 249, 250, 251, + 252, 4500, 122, 124, 4501, 4502, 4503, 4504, + 4505, 4506, 4507, 4508, 4509, 4510, 4511, 4512, + 4513, 4514, 4515, 4516, 4517, 4518, 3021, 151, + 153, 151, 4499, 181, 181, 181, 571, 5003, + 150, 152, 153, 4564, 3196, 5004, 4566, 158, + 5005, 160, 161, 5006, 5007, 5008, 5009, 5010, + 5011, 5012, 5013, 5014, 5015, 5016, 5017, 5018, + 5019, 5021, 5022, 5023, 5024, 421, 148, 148, + 151, 5020, 5002, 5025, 3085, 3180, 5026, 5027, + 5028, 5029, 5030, 5031, 5032, 5033, 5034, 5035, + 5036, 5037, 5038, 5039, 5040, 5042, 5043, 5044, + 5045, 5041, 5002, 5046, 5047, 255, 2447, 257, + 258, 259, 260, 261, 262, 263, 151, 5048, + 265, 5049, 267, 5050, 269, 5051, 5052, 3371, + 5053, 4500, 122, 124, 4501, 4502, 4503, 4504, + 4505, 4506, 4507, 4508, 4509, 4510, 4511, 4512, + 4513, 4514, 4515, 4516, 4517, 4518, 3372, 4499, + 5055, 3371, 3373, 5056, 5057, 5058, 5059, 5060, + 5061, 5062, 5063, 5064, 5065, 5066, 5067, 5068, + 5069, 5070, 5071, 5072, 5073, 5054, 5074, 5075, + 5076, 3685, 3049, 3021, 3050, 3686, 3051, 3052, + 3687, 3688, 3689, 3371, 5077, 5078, 5079, 5080, + 5081, 5082, 5083, 5084, 5085, 5086, 5087, 5088, + 5089, 5090, 5091, 5092, 5093, 5094, 5095, 5096, + 5097, 3372, 4499, 181, 181, 181, 571, 5098, + 150, 152, 153, 4564, 3501, 5099, 4566, 158, + 5100, 160, 161, 5101, 5102, 5103, 5104, 5105, + 5106, 5107, 5108, 5109, 5110, 5111, 5112, 5113, + 5114, 177, 5115, 5116, 5117, 421, 148, 148, + 151, 176, 4562, 5118, 3624, 3626, 5119, 5120, + 5121, 5122, 5123, 5124, 5125, 5126, 5127, 5128, + 5129, 5130, 5131, 5132, 5133, 5042, 5134, 5135, + 5136, 5041, 5002, 571, 5138, 3688, 3756, 5139, + 5140, 5141, 5142, 5143, 5144, 5145, 5146, 5147, + 5148, 5149, 5150, 5151, 5152, 5153, 5154, 5155, + 5156, 5137, 5158, 3878, 3880, 5159, 5160, 5161, + 5162, 5163, 5164, 5165, 5166, 5167, 5168, 5169, + 5170, 5171, 5172, 5173, 5174, 5175, 5176, 5157, + 5177, 4001, 4003, 5178, 5179, 5180, 5181, 5182, + 5183, 5184, 5185, 5186, 5187, 5188, 5189, 5190, + 5191, 5192, 5193, 5194, 5195, 5054, 5055, 3371, + 3373, 5056, 5057, 5058, 5059, 5060, 5061, 5062, + 5063, 5064, 5065, 5066, 5067, 5068, 5069, 5070, + 5071, 5072, 5073, 1, 1, 1, 1, 5054, + 5055, 3371, 3373, 5056, 5057, 5058, 5059, 5060, + 5061, 5062, 5063, 5064, 5065, 5066, 5067, 5068, + 5069, 5070, 5071, 5072, 5073, 1, 5054, 5196, + 5197, 5198, 5199, 5200, 5201, 5202, 5203, 5204, + 5205, 5206, 5207, 5208, 5209, 5210, 5211, 5212, + 5213, 5214, 5215, 5216, 1, 5054, 3250, 4500, + 122, 124, 4501, 4502, 4503, 4504, 4505, 4506, + 4507, 4508, 4509, 4510, 4511, 4512, 4513, 4514, + 4515, 4516, 4517, 4518, 4499, 5217, 5218, 5219, + 5220, 5221, 5222, 5223, 5224, 5225, 5226, 5227, + 5228, 5229, 5230, 5231, 5232, 5233, 5234, 5235, + 5236, 5237, 3250, 4499, 1, 5055, 3371, 3373, + 5056, 5057, 5058, 5059, 5060, 5061, 5062, 5063, + 5064, 5065, 5066, 5067, 5068, 5069, 5070, 5071, + 5072, 5073, 5054, 5196, 5197, 5198, 5199, 5200, + 5201, 5202, 5203, 5204, 5205, 5206, 5207, 5208, + 5209, 5210, 5211, 5212, 5213, 5214, 5215, 5216, + 1, 5054, 4500, 122, 124, 4501, 4502, 4503, + 4504, 4505, 4506, 4507, 4508, 4509, 4510, 4511, + 4512, 4513, 4514, 4515, 4516, 4517, 4518, 4499, + 3250, 4500, 122, 124, 4501, 4502, 4503, 4504, + 4505, 4506, 4507, 4508, 4509, 4510, 4511, 4512, + 4513, 4514, 4515, 4516, 4517, 4518, 3250, 4499, + 3250, 4500, 122, 124, 4501, 4502, 4503, 4504, + 4505, 4506, 4507, 4508, 4509, 4510, 4511, 4512, + 4513, 4514, 4515, 4516, 4517, 4518, 3250, 4499, + 3250, 4500, 122, 124, 4501, 4502, 4503, 4504, + 4505, 4506, 4507, 4508, 4509, 4510, 4511, 4512, + 4513, 4514, 4515, 4516, 4517, 4518, 3250, 3250, + 4499, 3250, 4500, 122, 124, 4501, 4502, 4503, + 4504, 4505, 4506, 4507, 4508, 4509, 4510, 4511, + 4512, 4513, 4514, 4515, 4516, 4517, 4518, 3250, + 3250, 4499, 3250, 4500, 122, 124, 4501, 4502, + 4503, 4504, 4505, 4506, 4507, 4508, 4509, 4510, + 4511, 4512, 4513, 4514, 4515, 4516, 4517, 4518, + 3250, 4499, 4500, 122, 124, 4501, 4502, 4503, + 4504, 4505, 4506, 4507, 4508, 4509, 4510, 4511, + 4512, 4513, 4514, 4515, 4516, 4517, 4518, 3250, + 3250, 3250, 3250, 4499, 3250, 3250, 5217, 5218, + 5219, 5220, 5221, 5222, 5223, 5224, 5225, 5226, + 5227, 5228, 5229, 5230, 5231, 5232, 5233, 5234, + 5235, 5236, 5237, 3250, 4499, 5055, 3371, 3373, + 5056, 5057, 5058, 5059, 5060, 5061, 5062, 5063, + 5064, 5065, 5066, 5067, 5068, 5069, 5070, 5071, + 5072, 5073, 5054, 1, 5055, 3371, 3373, 5056, + 5057, 5058, 5059, 5060, 5061, 5062, 5063, 5064, + 5065, 5066, 5067, 5068, 5069, 5070, 5071, 5072, + 5073, 1, 5054, 1, 5055, 3371, 3373, 5056, + 5057, 5058, 5059, 5060, 5061, 5062, 5063, 5064, + 5065, 5066, 5067, 5068, 5069, 5070, 5071, 5072, + 5073, 1, 5054, 1, 5055, 3371, 3373, 5056, + 5057, 5058, 5059, 5060, 5061, 5062, 5063, 5064, + 5065, 5066, 5067, 5068, 5069, 5070, 5071, 5072, + 5073, 1, 1, 5054, 1, 5055, 3371, 3373, + 5056, 5057, 5058, 5059, 5060, 5061, 5062, 5063, + 5064, 5065, 5066, 5067, 5068, 5069, 5070, 5071, + 5072, 5073, 1, 1, 5054, 1, 5055, 3371, + 3373, 5056, 5057, 5058, 5059, 5060, 5061, 5062, + 5063, 5064, 5065, 5066, 5067, 5068, 5069, 5070, + 5071, 5072, 5073, 1, 5054, 5055, 3371, 3373, + 5056, 5057, 5058, 5059, 5060, 5061, 5062, 5063, + 5064, 5065, 5066, 5067, 5068, 5069, 5070, 5071, + 5072, 5073, 1, 1, 1, 1, 5054, 1, + 1, 5196, 5197, 5198, 5199, 5200, 5201, 5202, + 5203, 5204, 5205, 5206, 5207, 5208, 5209, 5210, + 5211, 5212, 5213, 5214, 5215, 5216, 1, 5054, + 4500, 122, 124, 4501, 4502, 4503, 4504, 4505, + 4506, 4507, 4508, 4509, 4510, 4511, 4512, 4513, + 4514, 4515, 4516, 4517, 4518, 4499, 3250, 4500, + 122, 124, 4501, 4502, 4503, 4504, 4505, 4506, + 4507, 4508, 4509, 4510, 4511, 4512, 4513, 4514, + 4515, 4516, 4517, 4518, 3250, 4499, 4500, 122, + 124, 4501, 4502, 4503, 4504, 4505, 4506, 4507, + 4508, 4509, 4510, 4511, 4512, 4513, 4514, 4515, + 4516, 4517, 4518, 3250, 4499, 3252, 3253, 3254, + 3255, 3256, 3258, 3259, 3260, 3262, 3263, 3264, + 3265, 3266, 3267, 3268, 3269, 3270, 3271, 3272, + 3273, 3274, 3275, 3276, 3277, 3278, 3279, 4500, + 122, 124, 4501, 4502, 4503, 4504, 4505, 4506, + 4507, 4508, 4509, 4510, 4511, 4512, 4513, 4514, + 4515, 4516, 4517, 4518, 3257, 3261, 4499, 3280, + 3281, 3282, 3283, 3284, 3285, 3286, 3287, 3288, + 3289, 3290, 3291, 3292, 3293, 3294, 3295, 3296, + 3297, 3298, 3299, 3300, 4500, 122, 124, 4501, + 4502, 4503, 4504, 4505, 4506, 4507, 4508, 4509, + 4510, 4511, 4512, 4513, 4514, 4515, 4516, 4517, + 4518, 4499, 3301, 3302, 3303, 3304, 3305, 3306, + 4500, 122, 124, 4501, 4502, 4503, 4504, 4505, + 4506, 4507, 4508, 4509, 4510, 4511, 4512, 4513, + 4514, 4515, 4516, 4517, 4518, 4499, 3307, 3308, + 4500, 122, 124, 4501, 4502, 4503, 4504, 4505, + 4506, 4507, 4508, 4509, 4510, 4511, 4512, 4513, + 4514, 4515, 4516, 4517, 4518, 4499, 3309, 3310, + 3311, 3312, 3313, 3314, 3315, 3316, 3317, 3318, + 3319, 3320, 3321, 3322, 3323, 4500, 122, 124, + 4501, 4502, 4503, 4504, 4505, 4506, 4507, 4508, + 4509, 4510, 4511, 4512, 4513, 4514, 4515, 4516, + 4517, 4518, 4499, 3324, 3325, 3305, 3310, 3326, + 4500, 122, 124, 4501, 4502, 4503, 4504, 4505, + 4506, 4507, 4508, 4509, 4510, 4511, 4512, 4513, + 4514, 4515, 4516, 4517, 4518, 4499, 3327, 3328, + 3329, 3330, 3331, 3332, 4500, 122, 124, 4501, + 4502, 4503, 4504, 4505, 4506, 4507, 4508, 4509, + 4510, 4511, 4512, 4513, 4514, 4515, 4516, 4517, + 4518, 4499, 3368, 4500, 122, 124, 4501, 4502, + 4503, 4504, 4505, 4506, 4507, 4508, 4509, 4510, + 4511, 4512, 4513, 4514, 4515, 4516, 4517, 4518, + 4499, 5055, 3371, 3373, 5056, 5057, 5058, 5059, + 5060, 5061, 5062, 5063, 5064, 5065, 5066, 5067, + 5068, 5069, 5070, 5071, 5072, 5073, 5054, 1, + 5055, 3371, 3373, 5056, 5057, 5058, 5059, 5060, + 5061, 5062, 5063, 5064, 5065, 5066, 5067, 5068, + 5069, 5070, 5071, 5072, 5073, 1, 5054, 5055, + 3371, 3373, 5056, 5057, 5058, 5059, 5060, 5061, + 5062, 5063, 5064, 5065, 5066, 5067, 5068, 5069, + 5070, 5071, 5072, 5073, 1, 5054, 3, 4, + 5, 6, 7, 9, 10, 11, 13, 14, + 15, 16, 17, 18, 19, 20, 21, 22, + 23, 24, 25, 26, 27, 28, 29, 30, + 5055, 3371, 3373, 5056, 5057, 5058, 5059, 5060, + 5061, 5062, 5063, 5064, 5065, 5066, 5067, 5068, + 5069, 5070, 5071, 5072, 5073, 8, 12, 5054, + 31, 32, 33, 34, 35, 36, 37, 38, + 39, 40, 41, 42, 43, 44, 45, 46, + 47, 48, 49, 50, 51, 5055, 3371, 3373, + 5056, 5057, 5058, 5059, 5060, 5061, 5062, 5063, + 5064, 5065, 5066, 5067, 5068, 5069, 5070, 5071, + 5072, 5073, 5054, 52, 53, 54, 55, 56, + 57, 5055, 3371, 3373, 5056, 5057, 5058, 5059, + 5060, 5061, 5062, 5063, 5064, 5065, 5066, 5067, + 5068, 5069, 5070, 5071, 5072, 5073, 5054, 58, + 59, 5055, 3371, 3373, 5056, 5057, 5058, 5059, + 5060, 5061, 5062, 5063, 5064, 5065, 5066, 5067, + 5068, 5069, 5070, 5071, 5072, 5073, 5054, 60, + 61, 62, 63, 64, 65, 66, 67, 68, + 69, 70, 71, 72, 73, 74, 5055, 3371, + 3373, 5056, 5057, 5058, 5059, 5060, 5061, 5062, + 5063, 5064, 5065, 5066, 5067, 5068, 5069, 5070, + 5071, 5072, 5073, 5054, 75, 76, 56, 61, + 77, 5055, 3371, 3373, 5056, 5057, 5058, 5059, + 5060, 5061, 5062, 5063, 5064, 5065, 5066, 5067, + 5068, 5069, 5070, 5071, 5072, 5073, 5054, 78, + 79, 80, 81, 82, 83, 5055, 3371, 3373, + 5056, 5057, 5058, 5059, 5060, 5061, 5062, 5063, + 5064, 5065, 5066, 5067, 5068, 5069, 5070, 5071, + 5072, 5073, 5054, 119, 5055, 3371, 3373, 5056, + 5057, 5058, 5059, 5060, 5061, 5062, 5063, 5064, + 5065, 5066, 5067, 5068, 5069, 5070, 5071, 5072, + 5073, 5054, 5054, 5196, 5197, 5198, 5199, 5200, + 5201, 5202, 5203, 5204, 5205, 5206, 5207, 5208, + 5209, 5210, 5211, 5212, 5213, 5214, 5215, 5216, + 5054, 1, 5054, 5055, 3371, 3373, 5056, 5057, + 5058, 5059, 5060, 5061, 5062, 5063, 5064, 5065, + 5066, 5067, 5068, 5069, 5070, 5071, 5072, 5073, + 5054, 5054, 1, 1, 5196, 5197, 5198, 5199, + 5200, 5201, 5202, 5203, 5204, 5205, 5206, 5207, + 5208, 5209, 5210, 5211, 5212, 5213, 5214, 5215, + 5216, 1, 1, 5054, 5055, 3371, 3373, 5056, + 5057, 5058, 5059, 5060, 5061, 5062, 5063, 5064, + 5065, 5066, 5067, 5068, 5069, 5070, 5071, 5072, + 5073, 5054, 5054, 5054, 5054, 5054, 1, 5055, + 3371, 3373, 5056, 5057, 5058, 5059, 5060, 5061, + 5062, 5063, 5064, 5065, 5066, 5067, 5068, 5069, + 5070, 5071, 5072, 5073, 5054, 5054, 5054, 5054, + 5054, 5054, 1, 5054, 5054, 5055, 3371, 3373, + 5056, 5057, 5058, 5059, 5060, 5061, 5062, 5063, + 5064, 5065, 5066, 5067, 5068, 5069, 5070, 5071, + 5072, 5073, 5054, 5054, 1, 1, 5055, 3371, + 3373, 5056, 5057, 5058, 5059, 5060, 5061, 5062, + 5063, 5064, 5065, 5066, 5067, 5068, 5069, 5070, + 5071, 5072, 5073, 1, 1, 5054, 5055, 3371, + 3373, 5056, 5057, 5058, 5059, 5060, 5061, 5062, + 5063, 5064, 5065, 5066, 5067, 5068, 5069, 5070, + 5071, 5072, 5073, 1, 1, 1, 1, 1, + 5054, 1, 5055, 3371, 3373, 5056, 5057, 5058, + 5059, 5060, 5061, 5062, 5063, 5064, 5065, 5066, + 5067, 5068, 5069, 5070, 5071, 5072, 5073, 1, + 5054, 1, 5055, 3371, 3373, 5056, 5057, 5058, + 5059, 5060, 5061, 5062, 5063, 5064, 5065, 5066, + 5067, 5068, 5069, 5070, 5071, 5072, 5073, 1, + 1, 1, 5054, 5196, 5197, 5198, 5199, 5200, + 5201, 5202, 5203, 5204, 5205, 5206, 5207, 5208, + 5209, 5210, 5211, 5212, 5213, 5214, 5215, 5216, + 1, 1, 5054, 5054, 5054, 5055, 3371, 3373, + 5056, 5057, 5058, 5059, 5060, 5061, 5062, 5063, + 5064, 5065, 5066, 5067, 5068, 5069, 5070, 5071, + 5072, 5073, 5054, 5054, 5054, 1, 5055, 3371, + 3373, 5056, 5057, 5058, 5059, 5060, 5061, 5062, + 5063, 5064, 5065, 5066, 5067, 5068, 5069, 5070, + 5071, 5072, 5073, 1, 1, 1, 1, 1, + 5054, 5196, 5197, 5198, 5199, 5200, 5201, 5202, + 5203, 5204, 5205, 5206, 5207, 5208, 5209, 5210, + 5211, 5212, 5213, 5214, 5215, 5216, 1, 1, + 5054, 5054, 5054, 5055, 3371, 3373, 5056, 5057, + 5058, 5059, 5060, 5061, 5062, 5063, 5064, 5065, + 5066, 5067, 5068, 5069, 5070, 5071, 5072, 5073, + 5054, 5054, 5054, 1, 5055, 3371, 3373, 5056, + 5057, 5058, 5059, 5060, 5061, 5062, 5063, 5064, + 5065, 5066, 5067, 5068, 5069, 5070, 5071, 5072, + 5073, 1, 5054, 1, 1, 5055, 3371, 3373, + 5056, 5057, 5058, 5059, 5060, 5061, 5062, 5063, + 5064, 5065, 5066, 5067, 5068, 5069, 5070, 5071, + 5072, 5073, 1, 1, 1, 5054, 1, 5055, + 3371, 3373, 5056, 5057, 5058, 5059, 5060, 5061, + 5062, 5063, 5064, 5065, 5066, 5067, 5068, 5069, + 5070, 5071, 5072, 5073, 1, 5054, 5055, 3371, + 3373, 5056, 5057, 5058, 5059, 5060, 5061, 5062, + 5063, 5064, 5065, 5066, 5067, 5068, 5069, 5070, + 5071, 5072, 5073, 1, 5054, 1, 5055, 3371, + 3373, 5056, 5057, 5058, 5059, 5060, 5061, 5062, + 5063, 5064, 5065, 5066, 5067, 5068, 5069, 5070, + 5071, 5072, 5073, 1, 1, 5054, 5055, 3371, + 3373, 5056, 5057, 5058, 5059, 5060, 5061, 5062, + 5063, 5064, 5065, 5066, 5067, 5068, 5069, 5070, + 5071, 5072, 5073, 1, 5054, 1, 1, 1, + 5055, 3371, 3373, 5056, 5057, 5058, 5059, 5060, + 5061, 5062, 5063, 5064, 5065, 5066, 5067, 5068, + 5069, 5070, 5071, 5072, 5073, 1, 1, 5054, + 5055, 3371, 3373, 5056, 5057, 5058, 5059, 5060, + 5061, 5062, 5063, 5064, 5065, 5066, 5067, 5068, + 5069, 5070, 5071, 5072, 5073, 1, 5054, 5055, + 3371, 3373, 5056, 5057, 5058, 5059, 5060, 5061, + 5062, 5063, 5064, 5065, 5066, 5067, 5068, 5069, + 5070, 5071, 5072, 5073, 1, 1, 1, 1, + 5054, 1, 5055, 3371, 3373, 5056, 5057, 5058, + 5059, 5060, 5061, 5062, 5063, 5064, 5065, 5066, + 5067, 5068, 5069, 5070, 5071, 5072, 5073, 5054, + 5055, 3371, 3373, 5056, 5057, 5058, 5059, 5060, + 5061, 5062, 5063, 5064, 5065, 5066, 5067, 5068, + 5069, 5070, 5071, 5072, 5073, 1, 5054, 5055, + 3371, 3373, 5056, 5057, 5058, 5059, 5060, 5061, + 5062, 5063, 5064, 5065, 5066, 5067, 5068, 5069, + 5070, 5071, 5072, 5073, 1, 1, 1, 1, + 1, 5054, 1, 5055, 3371, 3373, 5056, 5057, + 5058, 5059, 5060, 5061, 5062, 5063, 5064, 5065, + 5066, 5067, 5068, 5069, 5070, 5071, 5072, 5073, + 1, 1, 5054, 5055, 3371, 3373, 5056, 5057, + 5058, 5059, 5060, 5061, 5062, 5063, 5064, 5065, + 5066, 5067, 5068, 5069, 5070, 5071, 5072, 5073, + 1, 5054, 5055, 3371, 3373, 5056, 5057, 5058, + 5059, 5060, 5061, 5062, 5063, 5064, 5065, 5066, + 5067, 5068, 5069, 5070, 5071, 5072, 5073, 1, + 1, 5054, 5055, 3371, 3373, 5056, 5057, 5058, + 5059, 5060, 5061, 5062, 5063, 5064, 5065, 5066, + 5067, 5068, 5069, 5070, 5071, 5072, 5073, 1, + 1, 5054, 5196, 5197, 5198, 5199, 5200, 5201, + 5202, 5203, 5204, 5205, 5206, 5207, 5208, 5209, + 5210, 5211, 5212, 5213, 5214, 5215, 5216, 1, + 5054, 5055, 3371, 3373, 5056, 5057, 5058, 5059, + 5060, 5061, 5062, 5063, 5064, 5065, 5066, 5067, + 5068, 5069, 5070, 5071, 5072, 5073, 5054, 5054, + 1, 5055, 3371, 3373, 5056, 5057, 5058, 5059, + 5060, 5061, 5062, 5063, 5064, 5065, 5066, 5067, + 5068, 5069, 5070, 5071, 5072, 5073, 1, 5054, + 1, 5055, 3371, 3373, 5056, 5057, 5058, 5059, + 5060, 5061, 5062, 5063, 5064, 5065, 5066, 5067, + 5068, 5069, 5070, 5071, 5072, 5073, 5054, 5055, + 3371, 3373, 5056, 5057, 5058, 5059, 5060, 5061, + 5062, 5063, 5064, 5065, 5066, 5067, 5068, 5069, + 5070, 5071, 5072, 5073, 1, 1, 5054, 5055, + 3371, 3373, 5056, 5057, 5058, 5059, 5060, 5061, + 5062, 5063, 5064, 5065, 5066, 5067, 5068, 5069, + 5070, 5071, 5072, 5073, 1, 5054, 1, 5055, + 3371, 3373, 5056, 5057, 5058, 5059, 5060, 5061, + 5062, 5063, 5064, 5065, 5066, 5067, 5068, 5069, + 5070, 5071, 5072, 5073, 1, 1, 5054, 5055, + 3371, 3373, 5056, 5057, 5058, 5059, 5060, 5061, + 5062, 5063, 5064, 5065, 5066, 5067, 5068, 5069, + 5070, 5071, 5072, 5073, 1, 5054, 5196, 5197, + 5198, 5199, 5200, 5201, 5202, 5203, 5204, 5205, + 5206, 5207, 5208, 5209, 5210, 5211, 5212, 5213, + 5214, 5215, 5216, 1, 1, 5054, 5055, 3371, + 3373, 5056, 5057, 5058, 5059, 5060, 5061, 5062, + 5063, 5064, 5065, 5066, 5067, 5068, 5069, 5070, + 5071, 5072, 5073, 5054, 5054, 1, 5055, 3371, + 3373, 5056, 5057, 5058, 5059, 5060, 5061, 5062, + 5063, 5064, 5065, 5066, 5067, 5068, 5069, 5070, + 5071, 5072, 5073, 1, 1, 5054, 5055, 3371, + 3373, 5056, 5057, 5058, 5059, 5060, 5061, 5062, + 5063, 5064, 5065, 5066, 5067, 5068, 5069, 5070, + 5071, 5072, 5073, 1, 5054, 5055, 3371, 3373, + 5056, 5057, 5058, 5059, 5060, 5061, 5062, 5063, + 5064, 5065, 5066, 5067, 5068, 5069, 5070, 5071, + 5072, 5073, 1, 5054, 1, 5055, 3371, 3373, + 5056, 5057, 5058, 5059, 5060, 5061, 5062, 5063, + 5064, 5065, 5066, 5067, 5068, 5069, 5070, 5071, + 5072, 5073, 1, 1, 1, 1, 5054, 5055, + 3371, 3373, 5056, 5057, 5058, 5059, 5060, 5061, + 5062, 5063, 5064, 5065, 5066, 5067, 5068, 5069, + 5070, 5071, 5072, 5073, 1, 1, 5054, 5055, + 3371, 3373, 5056, 5057, 5058, 5059, 5060, 5061, + 5062, 5063, 5064, 5065, 5066, 5067, 5068, 5069, + 5070, 5071, 5072, 5073, 1, 1, 5054, 5055, + 3371, 3373, 5056, 5057, 5058, 5059, 5060, 5061, + 5062, 5063, 5064, 5065, 5066, 5067, 5068, 5069, + 5070, 5071, 5072, 5073, 1, 1, 5054, 5055, + 3371, 3373, 5056, 5057, 5058, 5059, 5060, 5061, + 5062, 5063, 5064, 5065, 5066, 5067, 5068, 5069, + 5070, 5071, 5072, 5073, 1, 5054, 5055, 3371, + 3373, 5056, 5057, 5058, 5059, 5060, 5061, 5062, + 5063, 5064, 5065, 5066, 5067, 5068, 5069, 5070, + 5071, 5072, 5073, 1, 5054, 1, 5055, 3371, + 3373, 5056, 5057, 5058, 5059, 5060, 5061, 5062, + 5063, 5064, 5065, 5066, 5067, 5068, 5069, 5070, + 5071, 5072, 5073, 5054, 5055, 3371, 3373, 5056, + 5057, 5058, 5059, 5060, 5061, 5062, 5063, 5064, + 5065, 5066, 5067, 5068, 5069, 5070, 5071, 5072, + 5073, 1, 5054, 5055, 3371, 3373, 5056, 5057, + 5058, 5059, 5060, 5061, 5062, 5063, 5064, 5065, + 5066, 5067, 5068, 5069, 5070, 5071, 5072, 5073, + 1, 5054, 5055, 3371, 3373, 5056, 5057, 5058, + 5059, 5060, 5061, 5062, 5063, 5064, 5065, 5066, + 5067, 5068, 5069, 5070, 5071, 5072, 5073, 1, + 5054, 5055, 3371, 3373, 5056, 5057, 5058, 5059, + 5060, 5061, 5062, 5063, 5064, 5065, 5066, 5067, + 5068, 5069, 5070, 5071, 5072, 5073, 1, 1, + 5054, 5055, 3371, 3373, 5056, 5057, 5058, 5059, + 5060, 5061, 5062, 5063, 5064, 5065, 5066, 5067, + 5068, 5069, 5070, 5071, 5072, 5073, 1, 5054, + 5055, 3371, 3373, 5056, 5057, 5058, 5059, 5060, + 5061, 5062, 5063, 5064, 5065, 5066, 5067, 5068, + 5069, 5070, 5071, 5072, 5073, 1, 5054, 1, + 1, 1, 5055, 3371, 3373, 5056, 5057, 5058, + 5059, 5060, 5061, 5062, 5063, 5064, 5065, 5066, + 5067, 5068, 5069, 5070, 5071, 5072, 5073, 1, + 5054, 5196, 5197, 5198, 5199, 5200, 5201, 5202, + 5203, 5204, 5205, 5206, 5207, 5208, 5209, 5210, + 5211, 5212, 5213, 5214, 5215, 5216, 1, 1, + 5054, 5055, 3371, 3373, 5056, 5057, 5058, 5059, + 5060, 5061, 5062, 5063, 5064, 5065, 5066, 5067, + 5068, 5069, 5070, 5071, 5072, 5073, 5054, 5054, + 1, 5055, 3371, 3373, 5056, 5057, 5058, 5059, + 5060, 5061, 5062, 5063, 5064, 5065, 5066, 5067, + 5068, 5069, 5070, 5071, 5072, 5073, 1, 5054, + 5055, 3371, 3373, 5056, 5057, 5058, 5059, 5060, + 5061, 5062, 5063, 5064, 5065, 5066, 5067, 5068, + 5069, 5070, 5071, 5072, 5073, 1, 5054, 5196, + 5197, 5198, 5199, 5200, 5201, 5202, 5203, 5204, + 5205, 5206, 5207, 5208, 5209, 5210, 5211, 5212, + 5213, 5214, 5215, 5216, 1, 1, 5054, 5055, + 3371, 3373, 5056, 5057, 5058, 5059, 5060, 5061, + 5062, 5063, 5064, 5065, 5066, 5067, 5068, 5069, + 5070, 5071, 5072, 5073, 5054, 5054, 1, 5055, + 3371, 3373, 5056, 5057, 5058, 5059, 5060, 5061, + 5062, 5063, 5064, 5065, 5066, 5067, 5068, 5069, + 5070, 5071, 5072, 5073, 1, 5054, 1, 5055, + 3371, 3373, 5056, 5057, 5058, 5059, 5060, 5061, + 5062, 5063, 5064, 5065, 5066, 5067, 5068, 5069, + 5070, 5071, 5072, 5073, 1, 1, 5054, 1, + 5055, 3371, 3373, 5056, 5057, 5058, 5059, 5060, + 5061, 5062, 5063, 5064, 5065, 5066, 5067, 5068, + 5069, 5070, 5071, 5072, 5073, 1, 1, 1, + 5054, 1, 5055, 3371, 3373, 5056, 5057, 5058, + 5059, 5060, 5061, 5062, 5063, 5064, 5065, 5066, + 5067, 5068, 5069, 5070, 5071, 5072, 5073, 1, + 1, 5054, 5055, 3371, 3373, 5056, 5057, 5058, + 5059, 5060, 5061, 5062, 5063, 5064, 5065, 5066, + 5067, 5068, 5069, 5070, 5071, 5072, 5073, 1, + 1, 5054, 1, 5055, 3371, 3373, 5056, 5057, + 5058, 5059, 5060, 5061, 5062, 5063, 5064, 5065, + 5066, 5067, 5068, 5069, 5070, 5071, 5072, 5073, + 5054, 5055, 3371, 3373, 5056, 5057, 5058, 5059, + 5060, 5061, 5062, 5063, 5064, 5065, 5066, 5067, + 5068, 5069, 5070, 5071, 5072, 5073, 1, 1, + 5054, 5055, 3371, 3373, 5056, 5057, 5058, 5059, + 5060, 5061, 5062, 5063, 5064, 5065, 5066, 5067, + 5068, 5069, 5070, 5071, 5072, 5073, 1, 5054, + 84, 85, 86, 87, 88, 5055, 3371, 3373, + 5056, 5057, 5058, 5059, 5060, 5061, 5062, 5063, + 5064, 5065, 5066, 5067, 5068, 5069, 5070, 5071, + 5072, 5073, 5054, 89, 90, 91, 92, 93, + 94, 95, 96, 97, 98, 99, 100, 101, + 102, 103, 100, 104, 105, 106, 5055, 3371, + 3373, 5056, 5057, 5058, 5059, 5060, 5061, 5062, + 5063, 5064, 5065, 5066, 5067, 5068, 5069, 5070, + 5071, 5072, 5073, 5054, 107, 108, 109, 110, + 5055, 3371, 3373, 5056, 5057, 5058, 5059, 5060, + 5061, 5062, 5063, 5064, 5065, 5066, 5067, 5068, + 5069, 5070, 5071, 5072, 5073, 5054, 111, 5055, + 3371, 3373, 5056, 5057, 5058, 5059, 5060, 5061, + 5062, 5063, 5064, 5065, 5066, 5067, 5068, 5069, + 5070, 5071, 5072, 5073, 5054, 112, 113, 114, + 115, 116, 117, 5055, 3371, 3373, 5056, 5057, + 5058, 5059, 5060, 5061, 5062, 5063, 5064, 5065, + 5066, 5067, 5068, 5069, 5070, 5071, 5072, 5073, + 5054, 118, 5055, 3371, 3373, 5056, 5057, 5058, + 5059, 5060, 5061, 5062, 5063, 5064, 5065, 5066, + 5067, 5068, 5069, 5070, 5071, 5072, 5073, 5054, + 120, 121, 122, 124, 5055, 3371, 3373, 5056, + 5057, 5058, 5059, 5060, 5061, 5062, 5063, 5064, + 5065, 5066, 5067, 5068, 5069, 5070, 5071, 5072, + 5073, 123, 5054, 5238, 4499, 3371, 5077, 5078, + 5079, 5080, 5081, 5082, 5083, 5084, 5085, 5086, + 5087, 5088, 5089, 5090, 5091, 5092, 5093, 5094, + 5095, 5096, 5097, 3372, 5077, 5078, 5079, 5080, + 5081, 5082, 5083, 5084, 5085, 5086, 5087, 5088, + 5089, 5090, 5091, 5092, 5093, 5094, 5095, 5096, + 5097, 3372, 5053, 4500, 122, 124, 4501, 4502, + 4503, 4504, 4505, 4506, 4507, 4508, 4509, 4510, + 4511, 4512, 4513, 4514, 4515, 4516, 4517, 4518, + 4499, 3372, 153, 274, 275, 153, 1549, 5239, + 5240, 5241, 280, 281, 282, 5242, 284, 5243, + 5244, 5245, 5246, 5247, 5248, 5249, 5250, 2465, + 5251, 295, 296, 152, 5252, 3196, 5253, 5254, + 5255, 5256, 5257, 5258, 5259, 5260, 5261, 5262, + 5263, 5264, 5265, 5266, 5267, 5268, 5269, 5270, + 5271, 5272, 5273, 151, 3021, 4499, 181, 181, + 181, 571, 5274, 150, 152, 153, 4564, 4211, + 5275, 4566, 158, 5276, 160, 161, 5277, 5278, + 5279, 5280, 5281, 5282, 5283, 5284, 5285, 5286, + 5287, 5288, 5289, 5290, 5021, 5291, 5292, 5293, + 421, 148, 148, 151, 5020, 5002, 181, 181, + 181, 571, 5003, 150, 152, 153, 4564, 3196, + 5004, 4566, 158, 5005, 160, 161, 5006, 5007, + 5008, 5009, 5010, 5011, 5012, 5013, 5014, 5015, + 5016, 5017, 5018, 5019, 5021, 5022, 5023, 5024, + 421, 148, 148, 1, 1, 1, 1, 151, + 5020, 5002, 181, 181, 181, 571, 5003, 150, + 152, 153, 4564, 3196, 5004, 4566, 158, 5005, + 160, 161, 5006, 5007, 5008, 5009, 5010, 5011, + 5012, 5013, 5014, 5015, 5016, 5017, 5018, 5019, + 5021, 5022, 5023, 5024, 421, 148, 148, 1, + 151, 5020, 5002, 181, 181, 181, 571, 5294, + 4805, 4807, 4808, 4809, 5295, 5296, 4811, 4812, + 5297, 4814, 4815, 5298, 5299, 5300, 5301, 5302, + 5303, 5304, 5305, 5306, 5307, 5308, 5309, 5310, + 5311, 5313, 5314, 5315, 5316, 421, 148, 148, + 1, 4806, 1, 5312, 1, 5002, 148, 2984, + 148, 181, 148, 4500, 122, 124, 4501, 4502, + 4503, 4504, 4505, 4506, 4507, 4508, 4509, 4510, + 4511, 4512, 4513, 4514, 4515, 4516, 4517, 4518, + 4499, 5317, 5318, 5319, 5320, 5321, 5322, 5323, + 5324, 5325, 5326, 5327, 5328, 5329, 5330, 5331, + 5332, 5333, 5334, 5335, 5336, 5337, 2984, 4499, + 181, 181, 181, 571, 1, 5003, 150, 152, + 153, 4564, 3196, 5004, 4566, 158, 5005, 160, + 161, 5006, 5007, 5008, 5009, 5010, 5011, 5012, + 5013, 5014, 5015, 5016, 5017, 5018, 5019, 5021, + 5022, 5023, 5024, 421, 148, 148, 151, 5020, + 5002, 181, 181, 181, 571, 5294, 4805, 4807, + 4808, 4809, 5295, 5296, 4811, 4812, 5297, 4814, + 4815, 5298, 5299, 5300, 5301, 5302, 5303, 5304, + 5305, 5306, 5307, 5308, 5309, 5310, 5311, 5313, + 5314, 5315, 5316, 5002, 421, 5002, 148, 5002, + 148, 5002, 4806, 5312, 1, 4499, 4499, 4500, + 122, 124, 4501, 4502, 4503, 4504, 4505, 4506, + 4507, 4508, 4509, 4510, 4511, 4512, 4513, 4514, + 4515, 4516, 4517, 4518, 148, 4499, 148, 4499, + 2984, 4499, 4783, 4784, 4785, 4786, 4787, 4788, + 4789, 4790, 4791, 4792, 4793, 4794, 4795, 4796, + 4797, 4798, 4799, 4800, 4801, 4802, 4803, 2984, + 148, 4499, 4500, 122, 124, 4501, 4502, 4503, + 4504, 4505, 4506, 4507, 4508, 4509, 4510, 4511, + 4512, 4513, 4514, 4515, 4516, 4517, 4518, 4499, + 2984, 4499, 148, 2984, 148, 181, 4500, 122, + 124, 4501, 4502, 4503, 4504, 4505, 4506, 4507, + 4508, 4509, 4510, 4511, 4512, 4513, 4514, 4515, + 4516, 4517, 4518, 2984, 2984, 572, 572, 4499, + 2984, 4500, 122, 124, 4501, 4502, 4503, 4504, + 4505, 4506, 4507, 4508, 4509, 4510, 4511, 4512, + 4513, 4514, 4515, 4516, 4517, 4518, 2984, 2984, + 148, 4499, 421, 2984, 4783, 4784, 4785, 4786, + 4787, 4788, 4789, 4790, 4791, 4792, 4793, 4794, + 4795, 4796, 4797, 4798, 4799, 4800, 4801, 4802, + 4803, 148, 2984, 421, 148, 4499, 4499, 4499, + 4499, 4500, 122, 124, 4501, 4502, 4503, 4504, + 4505, 4506, 4507, 4508, 4509, 4510, 4511, 4512, + 4513, 4514, 4515, 4516, 4517, 4518, 2984, 2984, + 421, 4499, 4499, 148, 148, 5317, 5318, 5319, + 5320, 5321, 5322, 5323, 5324, 5325, 5326, 5327, + 5328, 5329, 5330, 5331, 5332, 5333, 5334, 5335, + 5336, 5337, 2984, 148, 2984, 4499, 2518, 2518, + 2518, 2774, 5003, 150, 152, 153, 4564, 3196, + 5004, 4566, 158, 5005, 160, 161, 5006, 5007, + 5008, 5009, 5010, 5011, 5012, 5013, 5014, 5015, + 5016, 5017, 5018, 5019, 5021, 5022, 5023, 5024, + 2646, 2008, 2008, 5002, 151, 5002, 5020, 5002, + 1, 181, 181, 181, 571, 5003, 150, 152, + 153, 4564, 3196, 5004, 4566, 158, 5005, 160, + 161, 5006, 5007, 5008, 5009, 5010, 5011, 5012, + 5013, 5014, 5015, 5016, 5017, 5018, 5019, 5021, + 5022, 5023, 5024, 421, 148, 148, 1, 151, + 5020, 5002, 181, 181, 181, 571, 1, 5003, + 150, 152, 153, 4564, 3196, 5004, 4566, 158, + 5005, 160, 161, 5006, 5007, 5008, 5009, 5010, + 5011, 5012, 5013, 5014, 5015, 5016, 5017, 5018, + 5019, 5021, 5022, 5023, 5024, 421, 148, 148, + 1, 151, 5020, 5002, 181, 181, 181, 571, + 1, 5003, 150, 152, 153, 4564, 3196, 5004, + 4566, 158, 5005, 160, 161, 5006, 5007, 5008, + 5009, 5010, 5011, 5012, 5013, 5014, 5015, 5016, + 5017, 5018, 5019, 5021, 5022, 5023, 5024, 421, + 148, 148, 1, 1, 151, 5020, 5002, 181, + 181, 181, 571, 1, 5003, 150, 152, 153, + 4564, 3196, 5004, 4566, 158, 5005, 160, 161, + 5006, 5007, 5008, 5009, 5010, 5011, 5012, 5013, + 5014, 5015, 5016, 5017, 5018, 5019, 5021, 5022, + 5023, 5024, 421, 148, 148, 1, 1, 151, + 5020, 5002, 181, 181, 181, 571, 1, 5003, + 150, 152, 153, 4564, 3196, 5004, 4566, 158, + 5005, 160, 161, 5006, 5007, 5008, 5009, 5010, + 5011, 5012, 5013, 5014, 5015, 5016, 5017, 5018, + 5019, 5021, 5022, 5023, 5024, 421, 148, 148, + 1, 151, 5020, 5002, 181, 181, 181, 571, + 5003, 150, 152, 153, 4564, 3196, 5004, 4566, + 158, 5005, 160, 161, 5006, 5007, 5008, 5009, + 5010, 5011, 5012, 5013, 5014, 5015, 5016, 5017, + 5018, 5019, 5021, 5022, 5023, 5024, 421, 148, + 148, 1, 1, 1, 1, 151, 5020, 5002, + 181, 181, 181, 571, 1, 1, 5294, 4805, + 4807, 4808, 4809, 5295, 5296, 4811, 4812, 5297, + 4814, 4815, 5298, 5299, 5300, 5301, 5302, 5303, + 5304, 5305, 5306, 5307, 5308, 5309, 5310, 5311, + 5313, 5314, 5315, 5316, 421, 148, 148, 1, + 4806, 1, 5312, 1, 5002, 4783, 4784, 4785, + 4786, 4787, 4788, 4789, 4790, 4791, 4792, 4793, + 4794, 4795, 4796, 4797, 4798, 4799, 4800, 4801, + 4802, 4803, 4499, 148, 2984, 4500, 122, 124, + 4501, 4502, 4503, 4504, 4505, 4506, 4507, 4508, + 4509, 4510, 4511, 4512, 4513, 4514, 4515, 4516, + 4517, 4518, 2984, 4499, 148, 148, 4500, 122, + 124, 4501, 4502, 4503, 4504, 4505, 4506, 4507, + 4508, 4509, 4510, 4511, 4512, 4513, 4514, 4515, + 4516, 4517, 4518, 421, 148, 2984, 148, 4499, + 2986, 2987, 184, 2988, 2989, 2990, 2991, 2992, + 2993, 2994, 2995, 2996, 2997, 2998, 2999, 3000, + 3001, 3002, 3003, 3004, 3005, 3006, 3007, 3008, + 3009, 3010, 3011, 3012, 3013, 3014, 3015, 3016, + 4500, 122, 124, 4501, 4502, 4503, 4504, 4505, + 4506, 4507, 4508, 4509, 4510, 4511, 4512, 4513, + 4514, 4515, 4516, 4517, 4518, 4499, 3017, 3018, + 3019, 217, 3020, 3022, 218, 219, 220, 221, + 3023, 223, 224, 225, 226, 227, 228, 3024, + 3025, 3026, 3027, 3028, 234, 3029, 236, 3030, + 483, 484, 3031, 3032, 3033, 3034, 3035, 3036, + 3037, 3038, 646, 3039, 151, 152, 3040, 249, + 250, 251, 252, 4500, 122, 124, 4501, 4502, + 4503, 4504, 4505, 4506, 4507, 4508, 4509, 4510, + 4511, 4512, 4513, 4514, 4515, 4516, 4517, 4518, + 3021, 151, 153, 151, 4499, 3041, 3042, 255, + 3043, 257, 258, 259, 260, 261, 262, 263, + 151, 3044, 265, 3045, 267, 3046, 269, 4500, + 122, 124, 4501, 4502, 4503, 4504, 4505, 4506, + 4507, 4508, 4509, 4510, 4511, 4512, 4513, 4514, + 4515, 4516, 4517, 4518, 4499, 3047, 3048, 3049, + 3021, 3050, 3051, 3052, 4500, 122, 124, 4501, + 4502, 4503, 4504, 4505, 4506, 4507, 4508, 4509, + 4510, 4511, 4512, 4513, 4514, 4515, 4516, 4517, + 4518, 4499, 153, 274, 275, 153, 657, 3181, + 3182, 3183, 280, 281, 282, 3184, 284, 3185, + 3186, 3187, 3188, 3189, 3190, 3191, 3192, 3193, + 3194, 295, 296, 152, 3195, 3196, 5253, 5254, + 5255, 5256, 5257, 5258, 5259, 5260, 5261, 5262, + 5263, 5264, 5265, 5266, 5267, 5268, 5269, 5270, + 5271, 5272, 5273, 151, 3021, 4499, 181, 181, + 181, 571, 5003, 150, 152, 153, 4564, 3196, + 5004, 4566, 158, 5005, 160, 161, 5006, 5007, + 5008, 5009, 5010, 5011, 5012, 5013, 5014, 5015, + 5016, 5017, 5018, 5019, 5021, 5022, 5023, 5024, + 421, 148, 148, 1, 151, 5020, 5002, 181, + 181, 181, 571, 5003, 150, 152, 153, 4564, + 3196, 5004, 4566, 158, 5005, 160, 161, 5006, + 5007, 5008, 5009, 5010, 5011, 5012, 5013, 5014, + 5015, 5016, 5017, 5018, 5019, 5021, 5022, 5023, + 5024, 421, 148, 148, 1, 1, 1, 1, + 1, 151, 5020, 5002, 181, 181, 181, 571, + 1, 5003, 150, 152, 153, 4564, 3196, 5004, + 4566, 158, 5005, 160, 161, 5006, 5007, 5008, + 5009, 5010, 5011, 5012, 5013, 5014, 5015, 5016, + 5017, 5018, 5019, 5021, 5022, 5023, 5024, 421, + 148, 148, 1, 1, 151, 5020, 5002, 181, + 181, 181, 571, 5003, 150, 152, 153, 4564, + 3196, 5004, 4566, 158, 5005, 160, 161, 5006, + 5007, 5008, 5009, 5010, 5011, 5012, 5013, 5014, + 5015, 5016, 5017, 5018, 5019, 5021, 5022, 5023, + 5024, 421, 148, 148, 1, 151, 5020, 5002, + 181, 181, 181, 571, 5003, 150, 152, 153, + 4564, 3196, 5004, 4566, 158, 5005, 160, 161, + 5006, 5007, 5008, 5009, 5010, 5011, 5012, 5013, + 5014, 5015, 5016, 5017, 5018, 5019, 5021, 5022, + 5023, 5024, 421, 148, 148, 1, 1, 151, + 5020, 5002, 181, 181, 181, 571, 5003, 150, + 152, 153, 4564, 3196, 5004, 4566, 158, 5005, + 160, 161, 5006, 5007, 5008, 5009, 5010, 5011, + 5012, 5013, 5014, 5015, 5016, 5017, 5018, 5019, + 5021, 5022, 5023, 5024, 421, 148, 148, 1, + 1, 151, 5020, 5002, 181, 181, 181, 571, + 5294, 4805, 4807, 4808, 4809, 5295, 5296, 4811, + 4812, 5297, 4814, 4815, 5298, 5299, 5300, 5301, + 5302, 5303, 5304, 5305, 5306, 5307, 5308, 5309, + 5310, 5311, 5313, 5314, 5315, 5316, 421, 148, + 148, 1, 4806, 1, 5312, 1, 5002, 5253, + 5254, 5255, 5256, 5257, 5258, 5259, 5260, 5261, + 5262, 5263, 5264, 5265, 5266, 5267, 5268, 5269, + 5270, 5271, 5272, 5273, 3021, 181, 181, 181, + 571, 5003, 150, 152, 153, 4564, 3196, 5004, + 4566, 158, 5005, 160, 161, 5006, 5007, 5008, + 5009, 5010, 5011, 5012, 5013, 5014, 5015, 5016, + 5017, 5018, 5019, 5021, 5022, 5023, 5024, 421, + 148, 148, 1, 1, 151, 5020, 5002, 181, + 181, 181, 571, 5003, 150, 152, 153, 4564, + 3196, 5004, 4566, 158, 5005, 160, 161, 5006, + 5007, 5008, 5009, 5010, 5011, 5012, 5013, 5014, + 5015, 5016, 5017, 5018, 5019, 5021, 5022, 5023, + 5024, 421, 148, 148, 1, 1, 151, 5020, + 5002, 181, 181, 181, 571, 5003, 150, 152, + 153, 4564, 3196, 5004, 4566, 158, 5005, 160, + 161, 5006, 5007, 5008, 5009, 5010, 5011, 5012, + 5013, 5014, 5015, 5016, 5017, 5018, 5019, 5021, + 5022, 5023, 5024, 421, 148, 148, 1, 151, + 5020, 5002, 181, 181, 181, 571, 5003, 150, + 152, 153, 4564, 3196, 5004, 4566, 158, 5005, + 160, 161, 5006, 5007, 5008, 5009, 5010, 5011, + 5012, 5013, 5014, 5015, 5016, 5017, 5018, 5019, + 5021, 5022, 5023, 5024, 421, 148, 148, 1, + 151, 5020, 5002, 181, 181, 181, 571, 1, + 5003, 150, 152, 153, 4564, 3196, 5004, 4566, + 158, 5005, 160, 161, 5006, 5007, 5008, 5009, + 5010, 5011, 5012, 5013, 5014, 5015, 5016, 5017, + 5018, 5019, 5021, 5022, 5023, 5024, 421, 148, + 148, 151, 5020, 5002, 181, 181, 181, 571, + 5003, 150, 152, 153, 4564, 3196, 5004, 4566, + 158, 5005, 160, 161, 5006, 5007, 5008, 5009, + 5010, 5011, 5012, 5013, 5014, 5015, 5016, 5017, + 5018, 5019, 5021, 5022, 5023, 5024, 421, 148, + 148, 1, 151, 5020, 5002, 181, 181, 181, + 571, 5003, 150, 152, 153, 4564, 3196, 5004, + 4566, 158, 5005, 160, 161, 5006, 5007, 5008, + 5009, 5010, 5011, 5012, 5013, 5014, 5015, 5016, + 5017, 5018, 5019, 5021, 5022, 5023, 5024, 421, + 148, 148, 1, 151, 5020, 5002, 181, 181, + 181, 571, 5003, 150, 152, 153, 4564, 3196, + 5004, 4566, 158, 5005, 160, 161, 5006, 5007, + 5008, 5009, 5010, 5011, 5012, 5013, 5014, 5015, + 5016, 5017, 5018, 5019, 5021, 5022, 5023, 5024, + 421, 148, 148, 1, 151, 5020, 5002, 181, + 181, 181, 571, 5003, 150, 152, 153, 4564, + 3196, 5004, 4566, 158, 5005, 160, 161, 5006, + 5007, 5008, 5009, 5010, 5011, 5012, 5013, 5014, + 5015, 5016, 5017, 5018, 5019, 5021, 5022, 5023, + 5024, 421, 148, 148, 1, 1, 151, 5020, + 5002, 181, 181, 181, 571, 5003, 150, 152, + 153, 4564, 3196, 5004, 4566, 158, 5005, 160, + 161, 5006, 5007, 5008, 5009, 5010, 5011, 5012, + 5013, 5014, 5015, 5016, 5017, 5018, 5019, 5021, + 5022, 5023, 5024, 421, 148, 148, 1, 151, + 5020, 5002, 181, 181, 181, 571, 5003, 150, + 152, 153, 4564, 3196, 5004, 4566, 158, 5005, + 160, 161, 5006, 5007, 5008, 5009, 5010, 5011, + 5012, 5013, 5014, 5015, 5016, 5017, 5018, 5019, + 5021, 5022, 5023, 5024, 421, 148, 148, 1, + 151, 5020, 5002, 181, 181, 181, 571, 1, + 1, 1, 5003, 150, 152, 153, 4564, 3196, + 5004, 4566, 158, 5005, 160, 161, 5006, 5007, + 5008, 5009, 5010, 5011, 5012, 5013, 5014, 5015, + 5016, 5017, 5018, 5019, 5021, 5022, 5023, 5024, + 421, 148, 148, 1, 151, 5020, 5002, 181, + 181, 181, 571, 5294, 4805, 4807, 4808, 4809, + 5295, 5296, 4811, 4812, 5297, 4814, 4815, 5298, + 5299, 5300, 5301, 5302, 5303, 5304, 5305, 5306, + 5307, 5308, 5309, 5310, 5311, 5313, 5314, 5315, + 5316, 421, 148, 148, 1, 1, 4806, 1, + 5312, 1, 5002, 3197, 3198, 4500, 122, 124, + 4501, 4502, 4503, 4504, 4505, 4506, 4507, 4508, + 4509, 4510, 4511, 4512, 4513, 4514, 4515, 4516, + 4517, 4518, 4499, 3021, 3199, 674, 302, 303, + 304, 305, 306, 307, 3200, 932, 3201, 934, + 312, 3202, 3203, 4500, 122, 124, 4501, 4502, + 4503, 4504, 4505, 4506, 4507, 4508, 4509, 4510, + 4511, 4512, 4513, 4514, 4515, 4516, 4517, 4518, + 151, 4499, 3204, 3205, 317, 318, 319, 3206, + 3207, 3208, 3209, 324, 4500, 122, 124, 4501, + 4502, 4503, 4504, 4505, 4506, 4507, 4508, 4509, + 4510, 4511, 4512, 4513, 4514, 4515, 4516, 4517, + 4518, 4499, 3245, 4500, 122, 124, 4501, 4502, + 4503, 4504, 4505, 4506, 4507, 4508, 4509, 4510, + 4511, 4512, 4513, 4514, 4515, 4516, 4517, 4518, + 4499, 2518, 2518, 2518, 2774, 5003, 150, 152, + 153, 4564, 3196, 5004, 4566, 158, 5005, 160, + 161, 5006, 5007, 5008, 5009, 5010, 5011, 5012, + 5013, 5014, 5015, 5016, 5017, 5018, 5019, 5021, + 5022, 5023, 5024, 2646, 2008, 2008, 5002, 5002, + 151, 5002, 5020, 5002, 1, 181, 181, 181, + 571, 5003, 150, 152, 153, 4564, 3196, 5004, + 4566, 158, 5005, 160, 161, 5006, 5007, 5008, + 5009, 5010, 5011, 5012, 5013, 5014, 5015, 5016, + 5017, 5018, 5019, 5021, 5022, 5023, 5024, 421, + 148, 148, 1, 151, 5020, 5002, 181, 181, + 181, 571, 5003, 150, 152, 153, 4564, 3196, + 5004, 4566, 158, 5005, 160, 161, 5006, 5007, + 5008, 5009, 5010, 5011, 5012, 5013, 5014, 5015, + 5016, 5017, 5018, 5019, 5021, 5022, 5023, 5024, + 421, 148, 148, 1, 151, 5020, 5002, 181, + 181, 181, 571, 5294, 4805, 4807, 4808, 4809, + 5295, 5296, 4811, 4812, 5297, 4814, 4815, 5298, + 5299, 5300, 5301, 5302, 5303, 5304, 5305, 5306, + 5307, 5308, 5309, 5310, 5311, 5313, 5314, 5315, + 5316, 421, 148, 148, 1, 1, 4806, 1, + 5312, 1, 5002, 2518, 2518, 2518, 2774, 1, + 5003, 150, 152, 153, 4564, 3196, 5004, 4566, + 158, 5005, 160, 161, 5006, 5007, 5008, 5009, + 5010, 5011, 5012, 5013, 5014, 5015, 5016, 5017, + 5018, 5019, 5021, 5022, 5023, 5024, 1, 2646, + 1, 2008, 1, 2008, 1, 151, 5020, 5002, + 181, 181, 181, 571, 5003, 150, 152, 153, + 4564, 3196, 5004, 4566, 158, 5005, 160, 161, + 5006, 5007, 5008, 5009, 5010, 5011, 5012, 5013, + 5014, 5015, 5016, 5017, 5018, 5019, 5021, 5022, + 5023, 5024, 421, 148, 148, 1, 151, 5020, + 5002, 181, 181, 181, 571, 1, 5003, 150, + 152, 153, 4564, 3196, 5004, 4566, 158, 5005, + 160, 161, 5006, 5007, 5008, 5009, 5010, 5011, + 5012, 5013, 5014, 5015, 5016, 5017, 5018, 5019, + 5021, 5022, 5023, 5024, 421, 148, 148, 1, + 1, 151, 5020, 5002, 181, 181, 181, 571, + 1, 5003, 150, 152, 153, 4564, 3196, 5004, + 4566, 158, 5005, 160, 161, 5006, 5007, 5008, + 5009, 5010, 5011, 5012, 5013, 5014, 5015, 5016, + 5017, 5018, 5019, 5021, 5022, 5023, 5024, 421, + 148, 148, 1, 1, 1, 151, 5020, 5002, + 181, 181, 181, 571, 1, 5003, 150, 152, + 153, 4564, 3196, 5004, 4566, 158, 5005, 160, + 161, 5006, 5007, 5008, 5009, 5010, 5011, 5012, + 5013, 5014, 5015, 5016, 5017, 5018, 5019, 5021, + 5022, 5023, 5024, 421, 148, 148, 1, 1, + 151, 5020, 5002, 181, 181, 181, 571, 5003, + 150, 152, 153, 4564, 3196, 5004, 4566, 158, + 5005, 160, 161, 5006, 5007, 5008, 5009, 5010, + 5011, 5012, 5013, 5014, 5015, 5016, 5017, 5018, + 5019, 5021, 5022, 5023, 5024, 421, 148, 148, + 1, 1, 151, 5020, 5002, 181, 181, 181, + 571, 1, 5003, 150, 152, 153, 4564, 3196, + 5004, 4566, 158, 5005, 160, 161, 5006, 5007, + 5008, 5009, 5010, 5011, 5012, 5013, 5014, 5015, + 5016, 5017, 5018, 5019, 5021, 5022, 5023, 5024, + 421, 148, 148, 151, 5020, 5002, 181, 181, + 181, 571, 5003, 150, 152, 153, 4564, 3196, + 5004, 4566, 158, 5005, 160, 161, 5006, 5007, + 5008, 5009, 5010, 5011, 5012, 5013, 5014, 5015, + 5016, 5017, 5018, 5019, 5021, 5022, 5023, 5024, + 421, 148, 148, 1, 1, 151, 5020, 5002, + 181, 181, 181, 571, 5003, 150, 152, 153, + 4564, 3196, 5004, 4566, 158, 5005, 160, 161, + 5006, 5007, 5008, 5009, 5010, 5011, 5012, 5013, + 5014, 5015, 5016, 5017, 5018, 5019, 5021, 5022, + 5023, 5024, 421, 148, 148, 1, 151, 5020, + 5002, 181, 181, 181, 571, 84, 85, 86, + 87, 88, 5003, 150, 152, 153, 4564, 3196, + 5004, 4566, 158, 5005, 160, 161, 5006, 5007, + 5008, 5009, 5010, 5011, 5012, 5013, 5014, 5015, + 5016, 5017, 5018, 5019, 5021, 5022, 5023, 5024, + 421, 148, 148, 151, 5020, 5002, 181, 181, + 181, 571, 89, 90, 91, 92, 93, 94, + 95, 96, 97, 98, 99, 100, 101, 102, + 103, 100, 104, 105, 106, 5003, 150, 152, + 153, 4564, 3196, 5004, 4566, 158, 5005, 160, + 161, 5006, 5007, 5008, 5009, 5010, 5011, 5012, + 5013, 5014, 5015, 5016, 5017, 5018, 5019, 5021, + 5022, 5023, 5024, 421, 148, 148, 151, 5020, + 5002, 181, 181, 181, 571, 107, 108, 109, + 110, 5003, 150, 152, 153, 4564, 3196, 5004, + 4566, 158, 5005, 160, 161, 5006, 5007, 5008, + 5009, 5010, 5011, 5012, 5013, 5014, 5015, 5016, + 5017, 5018, 5019, 5021, 5022, 5023, 5024, 421, + 148, 148, 151, 5020, 5002, 181, 181, 181, + 571, 111, 5003, 150, 152, 153, 4564, 3196, + 5004, 4566, 158, 5005, 160, 161, 5006, 5007, + 5008, 5009, 5010, 5011, 5012, 5013, 5014, 5015, + 5016, 5017, 5018, 5019, 5021, 5022, 5023, 5024, + 421, 148, 148, 151, 5020, 5002, 181, 181, + 181, 571, 112, 113, 114, 115, 116, 117, + 5003, 150, 152, 153, 4564, 3196, 5004, 4566, + 158, 5005, 160, 161, 5006, 5007, 5008, 5009, + 5010, 5011, 5012, 5013, 5014, 5015, 5016, 5017, + 5018, 5019, 5021, 5022, 5023, 5024, 421, 148, + 148, 151, 5020, 5002, 181, 181, 181, 571, + 118, 5003, 150, 152, 153, 4564, 3196, 5004, + 4566, 158, 5005, 160, 161, 5006, 5007, 5008, + 5009, 5010, 5011, 5012, 5013, 5014, 5015, 5016, + 5017, 5018, 5019, 5021, 5022, 5023, 5024, 421, + 148, 148, 151, 5020, 5002, 181, 181, 181, + 571, 120, 121, 122, 124, 5003, 150, 152, + 153, 4564, 3196, 5004, 4566, 158, 5005, 160, + 161, 5006, 5007, 5008, 5009, 5010, 5011, 5012, + 5013, 5014, 5015, 5016, 5017, 5018, 5019, 5021, + 5022, 5023, 5024, 421, 148, 148, 123, 151, + 5020, 5002, 2518, 2518, 2518, 2774, 5003, 150, + 152, 153, 4564, 3196, 5004, 4566, 158, 5005, + 160, 161, 5006, 5007, 5008, 5009, 5010, 5011, + 5012, 5013, 5014, 5015, 5016, 5017, 5018, 5019, + 5021, 5022, 5023, 5024, 2646, 2008, 2008, 5002, + 5002, 151, 5002, 5020, 5002, 1, 181, 181, + 181, 571, 5003, 150, 152, 153, 4564, 3196, + 5004, 4566, 158, 5005, 160, 161, 5006, 5007, + 5008, 5009, 5010, 5011, 5012, 5013, 5014, 5015, + 5016, 5017, 5018, 5019, 5021, 5022, 5023, 5024, + 421, 148, 148, 1, 151, 5020, 5002, 181, + 181, 181, 571, 1, 5003, 150, 152, 153, + 4564, 3196, 5004, 4566, 158, 5005, 160, 161, + 5006, 5007, 5008, 5009, 5010, 5011, 5012, 5013, + 5014, 5015, 5016, 5017, 5018, 5019, 5021, 5022, + 5023, 5024, 421, 148, 148, 151, 5020, 5002, + 181, 181, 181, 571, 5003, 150, 152, 153, + 4564, 3196, 5004, 4566, 158, 5005, 160, 161, + 5006, 5007, 5008, 5009, 5010, 5011, 5012, 5013, + 5014, 5015, 5016, 5017, 5018, 5019, 5021, 5022, + 5023, 5024, 421, 148, 148, 1, 1, 151, + 5020, 5002, 181, 181, 181, 571, 5003, 150, + 152, 153, 4564, 3196, 5004, 4566, 158, 5005, + 160, 161, 5006, 5007, 5008, 5009, 5010, 5011, + 5012, 5013, 5014, 5015, 5016, 5017, 5018, 5019, + 5021, 5022, 5023, 5024, 421, 148, 148, 1, + 151, 5020, 5002, 181, 181, 181, 571, 1, + 5003, 150, 152, 153, 4564, 3196, 5004, 4566, + 158, 5005, 160, 161, 5006, 5007, 5008, 5009, + 5010, 5011, 5012, 5013, 5014, 5015, 5016, 5017, + 5018, 5019, 5021, 5022, 5023, 5024, 421, 148, + 148, 1, 1, 151, 5020, 5002, 181, 181, + 181, 571, 5003, 150, 152, 153, 4564, 3196, + 5004, 4566, 158, 5005, 160, 161, 5006, 5007, + 5008, 5009, 5010, 5011, 5012, 5013, 5014, 5015, + 5016, 5017, 5018, 5019, 5021, 5022, 5023, 5024, + 421, 148, 148, 1, 151, 5020, 5002, 181, + 181, 181, 571, 5294, 4805, 4807, 4808, 4809, + 5295, 5296, 4811, 4812, 5297, 4814, 4815, 5298, + 5299, 5300, 5301, 5302, 5303, 5304, 5305, 5306, + 5307, 5308, 5309, 5310, 5311, 5313, 5314, 5315, + 5316, 421, 148, 148, 1, 1, 4806, 1, + 5312, 1, 5002, 2518, 2518, 2518, 2774, 5003, + 150, 152, 153, 4564, 3196, 5004, 4566, 158, + 5005, 160, 161, 5006, 5007, 5008, 5009, 5010, + 5011, 5012, 5013, 5014, 5015, 5016, 5017, 5018, + 5019, 5021, 5022, 5023, 5024, 2646, 2008, 2008, + 5002, 5002, 151, 5002, 5020, 5002, 1, 181, + 181, 181, 571, 5003, 150, 152, 153, 4564, + 3196, 5004, 4566, 158, 5005, 160, 161, 5006, + 5007, 5008, 5009, 5010, 5011, 5012, 5013, 5014, + 5015, 5016, 5017, 5018, 5019, 5021, 5022, 5023, + 5024, 421, 148, 148, 1, 1, 151, 5020, + 5002, 181, 181, 181, 571, 5003, 150, 152, + 153, 4564, 3196, 5004, 4566, 158, 5005, 160, + 161, 5006, 5007, 5008, 5009, 5010, 5011, 5012, + 5013, 5014, 5015, 5016, 5017, 5018, 5019, 5021, + 5022, 5023, 5024, 421, 148, 148, 1, 151, + 5020, 5002, 181, 181, 181, 571, 5003, 150, + 152, 153, 4564, 3196, 5004, 4566, 158, 5005, + 160, 161, 5006, 5007, 5008, 5009, 5010, 5011, + 5012, 5013, 5014, 5015, 5016, 5017, 5018, 5019, + 5021, 5022, 5023, 5024, 421, 148, 148, 1, + 151, 5020, 5002, 181, 181, 181, 571, 1, + 5003, 150, 152, 153, 4564, 3196, 5004, 4566, + 158, 5005, 160, 161, 5006, 5007, 5008, 5009, + 5010, 5011, 5012, 5013, 5014, 5015, 5016, 5017, + 5018, 5019, 5021, 5022, 5023, 5024, 421, 148, + 148, 1, 1, 1, 1, 151, 5020, 5002, + 181, 181, 181, 571, 5003, 150, 152, 153, + 4564, 3196, 5004, 4566, 158, 5005, 160, 161, + 5006, 5007, 5008, 5009, 5010, 5011, 5012, 5013, + 5014, 5015, 5016, 5017, 5018, 5019, 5021, 5022, + 5023, 5024, 421, 148, 148, 1, 1, 151, + 5020, 5002, 2518, 2518, 2518, 2774, 5003, 150, + 152, 153, 4564, 3196, 5004, 4566, 158, 5005, + 160, 161, 5006, 5007, 5008, 5009, 5010, 5011, + 5012, 5013, 5014, 5015, 5016, 5017, 5018, 5019, + 5021, 5022, 5023, 5024, 1, 2646, 1, 2008, + 1, 2008, 1, 151, 5020, 5002, 181, 181, + 181, 571, 5003, 150, 152, 153, 4564, 3196, + 5004, 4566, 158, 5005, 160, 161, 5006, 5007, + 5008, 5009, 5010, 5011, 5012, 5013, 5014, 5015, + 5016, 5017, 5018, 5019, 5021, 5022, 5023, 5024, + 421, 148, 148, 1, 151, 5020, 5002, 181, + 181, 181, 571, 5003, 150, 152, 153, 4564, + 3196, 5004, 4566, 158, 5005, 160, 161, 5006, + 5007, 5008, 5009, 5010, 5011, 5012, 5013, 5014, + 5015, 5016, 5017, 5018, 5019, 5021, 5022, 5023, + 5024, 421, 148, 148, 1, 151, 5020, 5002, + 181, 181, 181, 571, 3, 4, 5, 6, + 7, 9, 10, 11, 13, 14, 15, 16, + 17, 18, 19, 20, 21, 22, 23, 24, + 25, 26, 27, 28, 29, 30, 5003, 150, + 152, 153, 4564, 3196, 5004, 4566, 158, 5005, + 160, 161, 5006, 5007, 5008, 5009, 5010, 5011, + 5012, 5013, 5014, 5015, 5016, 5017, 5018, 5019, + 5021, 5022, 5023, 5024, 421, 148, 148, 8, + 12, 151, 5020, 5002, 181, 181, 181, 571, + 31, 32, 33, 34, 35, 36, 37, 38, + 39, 40, 41, 42, 43, 44, 45, 46, + 47, 48, 49, 50, 51, 5003, 150, 152, + 153, 4564, 3196, 5004, 4566, 158, 5005, 160, + 161, 5006, 5007, 5008, 5009, 5010, 5011, 5012, + 5013, 5014, 5015, 5016, 5017, 5018, 5019, 5021, + 5022, 5023, 5024, 421, 148, 148, 151, 5020, + 5002, 181, 181, 181, 571, 52, 53, 54, + 55, 56, 57, 5003, 150, 152, 153, 4564, + 3196, 5004, 4566, 158, 5005, 160, 161, 5006, + 5007, 5008, 5009, 5010, 5011, 5012, 5013, 5014, + 5015, 5016, 5017, 5018, 5019, 5021, 5022, 5023, + 5024, 421, 148, 148, 151, 5020, 5002, 181, + 181, 181, 571, 58, 59, 5003, 150, 152, + 153, 4564, 3196, 5004, 4566, 158, 5005, 160, + 161, 5006, 5007, 5008, 5009, 5010, 5011, 5012, + 5013, 5014, 5015, 5016, 5017, 5018, 5019, 5021, + 5022, 5023, 5024, 421, 148, 148, 151, 5020, + 5002, 181, 181, 181, 571, 60, 61, 62, + 63, 64, 65, 66, 67, 68, 69, 70, + 71, 72, 73, 74, 5003, 150, 152, 153, + 4564, 3196, 5004, 4566, 158, 5005, 160, 161, + 5006, 5007, 5008, 5009, 5010, 5011, 5012, 5013, + 5014, 5015, 5016, 5017, 5018, 5019, 5021, 5022, + 5023, 5024, 421, 148, 148, 151, 5020, 5002, + 181, 181, 181, 571, 75, 76, 56, 61, + 77, 5003, 150, 152, 153, 4564, 3196, 5004, + 4566, 158, 5005, 160, 161, 5006, 5007, 5008, + 5009, 5010, 5011, 5012, 5013, 5014, 5015, 5016, + 5017, 5018, 5019, 5021, 5022, 5023, 5024, 421, + 148, 148, 151, 5020, 5002, 181, 181, 181, + 571, 78, 79, 80, 81, 82, 83, 5003, + 150, 152, 153, 4564, 3196, 5004, 4566, 158, + 5005, 160, 161, 5006, 5007, 5008, 5009, 5010, + 5011, 5012, 5013, 5014, 5015, 5016, 5017, 5018, + 5019, 5021, 5022, 5023, 5024, 421, 148, 148, + 151, 5020, 5002, 181, 181, 181, 571, 119, + 5003, 150, 152, 153, 4564, 3196, 5004, 4566, + 158, 5005, 160, 161, 5006, 5007, 5008, 5009, + 5010, 5011, 5012, 5013, 5014, 5015, 5016, 5017, + 5018, 5019, 5021, 5022, 5023, 5024, 421, 148, + 148, 151, 5020, 5002, 2518, 2518, 2518, 2774, + 5002, 5294, 4805, 4807, 4808, 4809, 5295, 5296, + 4811, 4812, 5297, 4814, 4815, 5298, 5299, 5300, + 5301, 5302, 5303, 5304, 5305, 5306, 5307, 5308, + 5309, 5310, 5311, 5313, 5314, 5315, 5316, 2646, + 2008, 2008, 5002, 4806, 5312, 1, 2518, 2518, + 2518, 2774, 5002, 5003, 150, 152, 153, 4564, + 3196, 5004, 4566, 158, 5005, 160, 161, 5006, + 5007, 5008, 5009, 5010, 5011, 5012, 5013, 5014, + 5015, 5016, 5017, 5018, 5019, 5021, 5022, 5023, + 5024, 2646, 2008, 2008, 5002, 5002, 151, 5002, + 5020, 5002, 1, 181, 181, 181, 571, 1, + 5294, 4805, 4807, 4808, 4809, 5295, 5296, 4811, + 4812, 5297, 4814, 4815, 5298, 5299, 5300, 5301, + 5302, 5303, 5304, 5305, 5306, 5307, 5308, 5309, + 5310, 5311, 5313, 5314, 5315, 5316, 421, 148, + 148, 1, 1, 4806, 1, 5312, 1, 5002, + 2518, 2518, 2518, 2774, 5003, 150, 152, 153, + 4564, 3196, 5004, 4566, 158, 5005, 160, 161, + 5006, 5007, 5008, 5009, 5010, 5011, 5012, 5013, + 5014, 5015, 5016, 5017, 5018, 5019, 5021, 5022, + 5023, 5024, 2646, 2008, 2008, 5002, 5002, 5002, + 5002, 5002, 151, 5002, 5020, 5002, 1, 2518, + 2518, 2518, 2774, 1, 1, 5003, 150, 152, + 153, 4564, 3196, 5004, 4566, 158, 5005, 160, + 161, 5006, 5007, 5008, 5009, 5010, 5011, 5012, + 5013, 5014, 5015, 5016, 5017, 5018, 5019, 5021, + 5022, 5023, 5024, 1, 2646, 1, 2008, 1, + 2008, 1, 1, 1, 1, 151, 5020, 5002, + 2518, 2518, 2518, 2774, 5002, 5002, 5003, 150, + 152, 153, 4564, 3196, 5004, 4566, 158, 5005, + 160, 161, 5006, 5007, 5008, 5009, 5010, 5011, + 5012, 5013, 5014, 5015, 5016, 5017, 5018, 5019, + 5021, 5022, 5023, 5024, 2646, 2008, 2008, 5002, + 5002, 151, 5002, 5020, 5002, 1, 181, 181, + 181, 571, 1, 5003, 150, 152, 153, 4564, + 3196, 5004, 4566, 158, 5005, 160, 161, 5006, + 5007, 5008, 5009, 5010, 5011, 5012, 5013, 5014, + 5015, 5016, 5017, 5018, 5019, 5021, 5022, 5023, + 5024, 421, 148, 148, 1, 1, 151, 5020, + 5002, 181, 181, 181, 571, 5003, 150, 152, + 153, 4564, 3196, 5004, 4566, 158, 5005, 160, + 161, 5006, 5007, 5008, 5009, 5010, 5011, 5012, + 5013, 5014, 5015, 5016, 5017, 5018, 5019, 5021, + 5022, 5023, 5024, 421, 148, 148, 1, 1, + 1, 1, 1, 151, 5020, 5002, 181, 181, + 181, 571, 1, 5003, 150, 152, 153, 4564, + 3196, 5004, 4566, 158, 5005, 160, 161, 5006, + 5007, 5008, 5009, 5010, 5011, 5012, 5013, 5014, + 5015, 5016, 5017, 5018, 5019, 5021, 5022, 5023, + 5024, 421, 148, 148, 1, 151, 5020, 5002, + 181, 181, 181, 571, 1, 5003, 150, 152, + 153, 4564, 3196, 5004, 4566, 158, 5005, 160, + 161, 5006, 5007, 5008, 5009, 5010, 5011, 5012, + 5013, 5014, 5015, 5016, 5017, 5018, 5019, 5021, + 5022, 5023, 5024, 421, 148, 148, 1, 1, + 1, 151, 5020, 5002, 181, 181, 181, 571, + 5294, 4805, 4807, 4808, 4809, 5295, 5296, 4811, + 4812, 5297, 4814, 4815, 5298, 5299, 5300, 5301, + 5302, 5303, 5304, 5305, 5306, 5307, 5308, 5309, + 5310, 5311, 5313, 5314, 5315, 5316, 421, 148, + 148, 1, 1, 4806, 1, 5312, 1, 5002, + 2518, 2518, 2518, 2774, 5002, 5002, 5003, 150, + 152, 153, 4564, 3196, 5004, 4566, 158, 5005, + 160, 161, 5006, 5007, 5008, 5009, 5010, 5011, + 5012, 5013, 5014, 5015, 5016, 5017, 5018, 5019, + 5021, 5022, 5023, 5024, 2646, 2008, 2008, 5002, + 5002, 5002, 151, 5002, 5020, 5002, 1, 181, + 181, 181, 571, 5003, 150, 152, 153, 4564, + 3196, 5004, 4566, 158, 5005, 160, 161, 5006, + 5007, 5008, 5009, 5010, 5011, 5012, 5013, 5014, + 5015, 5016, 5017, 5018, 5019, 5021, 5022, 5023, + 5024, 421, 148, 148, 1, 1, 1, 1, + 1, 151, 5020, 5002, 181, 181, 181, 571, + 5294, 4805, 4807, 4808, 4809, 5295, 5296, 4811, + 4812, 5297, 4814, 4815, 5298, 5299, 5300, 5301, + 5302, 5303, 5304, 5305, 5306, 5307, 5308, 5309, + 5310, 5311, 5313, 5314, 5315, 5316, 421, 148, + 148, 1, 1, 4806, 1, 5312, 1, 5002, + 2518, 2518, 2518, 2774, 5002, 5002, 5003, 150, + 152, 153, 4564, 3196, 5004, 4566, 158, 5005, + 160, 161, 5006, 5007, 5008, 5009, 5010, 5011, + 5012, 5013, 5014, 5015, 5016, 5017, 5018, 5019, + 5021, 5022, 5023, 5024, 2646, 2008, 2008, 5002, + 5002, 5002, 151, 5002, 5020, 5002, 1, 181, + 181, 181, 571, 5003, 150, 152, 153, 4564, + 3196, 5004, 4566, 158, 5005, 160, 161, 5006, + 5007, 5008, 5009, 5010, 5011, 5012, 5013, 5014, + 5015, 5016, 5017, 5018, 5019, 5021, 5022, 5023, + 5024, 421, 148, 148, 1, 151, 5020, 5002, + 181, 181, 181, 571, 1, 1, 5003, 150, + 152, 153, 4564, 3196, 5004, 4566, 158, 5005, + 160, 161, 5006, 5007, 5008, 5009, 5010, 5011, + 5012, 5013, 5014, 5015, 5016, 5017, 5018, 5019, + 5021, 5022, 5023, 5024, 421, 148, 148, 1, + 1, 1, 151, 5020, 5002, 181, 181, 181, + 571, 1, 5003, 150, 152, 153, 4564, 3196, + 5004, 4566, 158, 5005, 160, 161, 5006, 5007, + 5008, 5009, 5010, 5011, 5012, 5013, 5014, 5015, + 5016, 5017, 5018, 5019, 5021, 5022, 5023, 5024, + 421, 148, 148, 1, 151, 5020, 5002, 181, + 181, 181, 571, 5003, 150, 152, 153, 4564, + 3196, 5004, 4566, 158, 5005, 160, 161, 5006, + 5007, 5008, 5009, 5010, 5011, 5012, 5013, 5014, + 5015, 5016, 5017, 5018, 5019, 5021, 5022, 5023, + 5024, 421, 148, 148, 1, 151, 5020, 5002, + 181, 181, 181, 571, 1, 5003, 150, 152, + 153, 4564, 3196, 5004, 4566, 158, 5005, 160, + 161, 5006, 5007, 5008, 5009, 5010, 5011, 5012, + 5013, 5014, 5015, 5016, 5017, 5018, 5019, 5021, + 5022, 5023, 5024, 421, 148, 148, 1, 1, + 151, 5020, 5002, 181, 181, 181, 571, 5003, + 150, 152, 153, 4564, 3196, 5004, 4566, 158, + 5005, 160, 161, 5006, 5007, 5008, 5009, 5010, + 5011, 5012, 5013, 5014, 5015, 5016, 5017, 5018, + 5019, 5021, 5022, 5023, 5024, 421, 148, 148, + 1, 151, 5020, 5002, 181, 181, 181, 571, + 1, 1, 1, 5003, 150, 152, 153, 4564, + 3196, 5004, 4566, 158, 5005, 160, 161, 5006, + 5007, 5008, 5009, 5010, 5011, 5012, 5013, 5014, + 5015, 5016, 5017, 5018, 5019, 5021, 5022, 5023, + 5024, 421, 148, 148, 1, 1, 151, 5020, + 5002, 181, 181, 181, 571, 5003, 150, 152, + 153, 4564, 3196, 5004, 4566, 158, 5005, 160, + 161, 5006, 5007, 5008, 5009, 5010, 5011, 5012, + 5013, 5014, 5015, 5016, 5017, 5018, 5019, 5021, + 5022, 5023, 5024, 421, 148, 148, 1, 151, + 5020, 5002, 181, 181, 181, 571, 5003, 150, + 152, 153, 4564, 3196, 5004, 4566, 158, 5005, + 160, 161, 5006, 5007, 5008, 5009, 5010, 5011, + 5012, 5013, 5014, 5015, 5016, 5017, 5018, 5019, + 5021, 5022, 5023, 5024, 421, 148, 148, 1, + 1, 1, 1, 151, 5020, 5002, 181, 181, + 181, 571, 1, 5003, 150, 152, 153, 4564, + 3196, 5004, 4566, 158, 5005, 160, 161, 5006, + 5007, 5008, 5009, 5010, 5011, 5012, 5013, 5014, + 5015, 5016, 5017, 5018, 5019, 5021, 5022, 5023, + 5024, 421, 148, 148, 151, 5020, 5002, 3371, + 5338, 5339, 5340, 674, 302, 303, 304, 305, + 306, 307, 5341, 2214, 5342, 2216, 5343, 5344, + 5345, 4500, 122, 124, 4501, 4502, 4503, 4504, + 4505, 4506, 4507, 4508, 4509, 4510, 4511, 4512, + 4513, 4514, 4515, 4516, 4517, 4518, 3372, 151, + 4499, 5346, 5347, 317, 318, 319, 5348, 5349, + 5350, 5351, 5352, 5353, 5355, 5356, 5357, 5358, + 4500, 122, 124, 4501, 4502, 4503, 4504, 4505, + 4506, 4507, 4508, 4509, 4510, 4511, 4512, 4513, + 4514, 4515, 4516, 4517, 4518, 5354, 4499, 5360, + 4449, 4451, 5361, 5362, 5363, 5364, 5365, 5366, + 5367, 5368, 5369, 5370, 5371, 5372, 5373, 5374, + 5375, 5376, 5377, 5378, 5359, +} + +var _s_trans_targs []int16 = []int16{ + 4862, 4863, 4862, 14, 15, 16, 17, 18, + 19, 20, 21, 22, 23, 24, 25, 26, + 27, 28, 29, 30, 31, 32, 33, 34, + 35, 36, 37, 38, 39, 40, 41, 43, + 44, 45, 46, 47, 48, 49, 50, 51, + 52, 53, 54, 55, 56, 57, 58, 59, + 60, 61, 62, 63, 65, 66, 67, 68, + 69, 70, 72, 73, 75, 76, 77, 78, + 79, 80, 81, 82, 83, 84, 85, 86, + 87, 88, 89, 91, 92, 93, 95, 101, + 120, 125, 127, 134, 96, 97, 98, 99, + 100, 102, 103, 104, 105, 106, 107, 108, + 109, 110, 111, 112, 113, 114, 115, 116, + 117, 118, 119, 121, 122, 123, 124, 126, + 128, 129, 130, 131, 132, 133, 135, 137, + 138, 139, 1, 140, 2, 4862, 4866, 1901, + 1902, 1903, 1904, 1905, 1906, 1907, 1908, 1909, + 1910, 1911, 1912, 1913, 1914, 1943, 1966, 1973, + 1976, 1993, 1997, 2041, 4867, 143, 144, 145, + 146, 147, 148, 149, 150, 151, 152, 153, + 154, 155, 156, 157, 158, 159, 160, 161, + 162, 163, 164, 165, 198, 238, 256, 261, + 286, 287, 290, 306, 407, 142, 166, 167, + 168, 169, 170, 171, 172, 173, 174, 175, + 176, 177, 178, 179, 180, 181, 182, 183, + 184, 185, 186, 187, 188, 189, 190, 191, + 192, 193, 194, 195, 196, 197, 199, 200, + 201, 202, 203, 204, 205, 206, 207, 208, + 209, 210, 211, 212, 213, 214, 215, 216, + 217, 218, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 228, 229, 230, 231, 232, + 233, 234, 235, 236, 237, 239, 240, 241, + 242, 243, 244, 245, 246, 247, 248, 249, + 250, 251, 252, 253, 254, 255, 257, 258, + 259, 260, 262, 263, 264, 265, 266, 267, + 268, 269, 270, 271, 272, 273, 274, 275, + 276, 277, 278, 279, 280, 281, 282, 283, + 284, 285, 288, 289, 291, 292, 293, 294, + 295, 296, 297, 298, 299, 300, 301, 302, + 303, 304, 305, 307, 338, 362, 366, 367, + 369, 377, 380, 398, 403, 308, 309, 310, + 311, 312, 313, 314, 315, 316, 317, 318, + 319, 320, 321, 322, 323, 324, 325, 326, + 327, 328, 329, 330, 331, 332, 333, 334, + 335, 336, 337, 339, 340, 341, 342, 343, + 344, 345, 346, 347, 348, 349, 350, 351, + 352, 353, 354, 355, 356, 357, 358, 359, + 360, 361, 363, 364, 365, 368, 370, 371, + 372, 373, 374, 375, 376, 378, 379, 381, + 382, 383, 384, 385, 386, 387, 388, 389, + 390, 391, 392, 393, 394, 395, 396, 397, + 399, 400, 401, 402, 404, 405, 406, 408, + 409, 410, 411, 412, 4862, 4868, 414, 415, + 416, 417, 418, 419, 420, 421, 422, 423, + 424, 425, 426, 427, 456, 481, 488, 491, + 508, 512, 557, 413, 428, 429, 430, 431, + 432, 433, 434, 435, 436, 437, 438, 439, + 440, 441, 442, 443, 444, 445, 446, 447, + 448, 449, 450, 451, 452, 453, 454, 455, + 457, 458, 459, 460, 461, 462, 463, 464, + 465, 466, 467, 468, 469, 470, 471, 472, + 473, 474, 475, 476, 477, 478, 479, 480, + 482, 483, 484, 485, 486, 487, 489, 490, + 492, 493, 494, 495, 496, 497, 498, 499, + 500, 501, 502, 503, 504, 505, 506, 507, + 509, 510, 511, 513, 519, 540, 545, 547, + 555, 514, 515, 516, 517, 518, 520, 521, + 522, 523, 524, 525, 526, 527, 528, 529, + 530, 531, 532, 533, 534, 535, 536, 537, + 538, 539, 541, 542, 543, 544, 546, 548, + 549, 550, 551, 552, 553, 554, 556, 558, + 559, 560, 561, 4869, 4870, 569, 570, 571, + 572, 573, 574, 733, 734, 735, 736, 737, + 738, 739, 740, 769, 791, 798, 801, 817, + 822, 864, 568, 4871, 589, 590, 591, 592, + 593, 594, 595, 596, 597, 598, 599, 600, + 601, 602, 603, 604, 605, 606, 607, 608, + 609, 610, 611, 612, 613, 614, 615, 616, + 617, 618, 619, 621, 622, 623, 624, 625, + 626, 627, 628, 629, 630, 631, 632, 633, + 634, 635, 636, 637, 638, 639, 640, 641, + 642, 644, 645, 646, 647, 648, 649, 651, + 652, 654, 655, 656, 657, 658, 659, 660, + 661, 662, 663, 664, 665, 666, 667, 668, + 669, 671, 672, 673, 674, 675, 676, 677, + 678, 680, 687, 708, 715, 717, 725, 681, + 682, 683, 684, 685, 686, 688, 689, 690, + 691, 692, 693, 694, 695, 696, 697, 698, + 699, 700, 701, 702, 703, 704, 705, 706, + 707, 709, 710, 711, 712, 713, 714, 716, + 718, 719, 720, 721, 722, 723, 724, 726, + 728, 729, 730, 576, 731, 732, 741, 742, + 743, 744, 745, 746, 747, 748, 749, 750, + 751, 752, 753, 754, 755, 756, 757, 758, + 759, 760, 761, 762, 763, 764, 765, 766, + 767, 768, 770, 771, 772, 773, 774, 775, + 776, 777, 778, 779, 780, 781, 782, 783, + 784, 785, 786, 787, 788, 789, 790, 792, + 793, 794, 795, 796, 797, 799, 800, 802, + 803, 804, 805, 806, 807, 808, 809, 810, + 811, 812, 813, 814, 815, 816, 818, 819, + 820, 821, 823, 829, 848, 853, 855, 862, + 824, 825, 826, 827, 828, 830, 831, 832, + 833, 834, 835, 836, 837, 838, 839, 840, + 841, 842, 843, 844, 845, 846, 847, 849, + 850, 851, 852, 854, 856, 857, 858, 859, + 860, 861, 863, 865, 866, 867, 868, 885, + 886, 887, 888, 889, 890, 891, 892, 893, + 894, 895, 896, 897, 898, 899, 900, 901, + 902, 903, 904, 905, 906, 907, 908, 909, + 910, 911, 912, 913, 914, 915, 917, 918, + 919, 920, 921, 922, 923, 924, 925, 926, + 927, 928, 929, 930, 931, 932, 933, 934, + 935, 936, 937, 939, 940, 941, 942, 943, + 944, 946, 947, 949, 950, 951, 952, 953, + 954, 955, 956, 957, 958, 959, 960, 961, + 962, 963, 965, 966, 967, 968, 969, 970, + 971, 973, 979, 998, 1003, 1005, 1012, 974, + 975, 976, 977, 978, 980, 981, 982, 983, + 984, 985, 986, 987, 988, 989, 990, 991, + 992, 993, 994, 995, 996, 997, 999, 1000, + 1001, 1002, 1004, 1006, 1007, 1008, 1009, 1010, + 1011, 1013, 1015, 1016, 1017, 871, 1018, 1019, + 1028, 1029, 1030, 1031, 1032, 1033, 1034, 1035, + 1036, 1037, 1038, 1039, 1040, 1041, 1042, 1043, + 1044, 1045, 1046, 1047, 1048, 1049, 1050, 1051, + 1052, 1053, 1054, 1055, 1056, 1057, 1058, 1060, + 1061, 1062, 1063, 1064, 1065, 1066, 1067, 1068, + 1069, 1070, 1071, 1072, 1073, 1074, 1075, 1076, + 1077, 1078, 1079, 1080, 1082, 1083, 1084, 1085, + 1086, 1087, 1089, 1240, 1164, 1165, 1166, 1091, + 1167, 4872, 1104, 1105, 1106, 1107, 1108, 1109, + 1110, 1111, 1112, 1113, 1114, 1115, 1116, 1117, + 1118, 1119, 1120, 1121, 1122, 1123, 1124, 1125, + 1126, 1127, 1128, 1129, 1130, 1131, 1133, 1134, + 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, + 1143, 1144, 1145, 1146, 1147, 1148, 1149, 1150, + 1151, 1152, 1153, 1155, 1156, 1157, 1158, 1159, + 1160, 1162, 1163, 1169, 1170, 1171, 1172, 1173, + 1174, 1175, 1176, 1177, 1178, 1179, 1180, 1181, + 1182, 1183, 1185, 1186, 1187, 1188, 1189, 1190, + 1191, 1193, 1199, 1218, 1223, 1226, 1233, 1194, + 1195, 1196, 1197, 1198, 1200, 1201, 1202, 1203, + 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211, + 1212, 1213, 1214, 1215, 1216, 1217, 1219, 1220, + 1221, 1222, 1224, 1225, 1227, 1228, 1229, 1230, + 1231, 1232, 1234, 1236, 1237, 1238, 1239, 1092, + 1242, 1243, 1244, 1245, 1246, 1247, 1248, 1249, + 1250, 1251, 1252, 1253, 1254, 1255, 1256, 1258, + 1259, 1260, 1261, 1262, 1263, 1265, 1271, 1290, + 1295, 1297, 1304, 1266, 1267, 1268, 1269, 1270, + 1272, 1273, 1274, 1275, 1276, 1277, 1278, 1279, + 1280, 1281, 1282, 1283, 1284, 1285, 1286, 1287, + 1288, 1289, 1291, 1292, 1293, 1294, 1296, 1298, + 1299, 1300, 1301, 1302, 1303, 1305, 1307, 1308, + 1309, 563, 1310, 1311, 1326, 1327, 1328, 1329, + 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, + 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, + 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1353, + 1354, 1355, 1356, 1358, 1359, 1360, 1361, 1362, + 1363, 1364, 1365, 1366, 1367, 1368, 1369, 1370, + 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, + 1380, 1381, 1382, 1383, 1384, 1385, 1387, 1388, + 1390, 1391, 1392, 1393, 1394, 1395, 1396, 1397, + 1398, 1399, 1400, 1401, 1402, 1403, 1404, 1406, + 1407, 1408, 1409, 1410, 1411, 1412, 1414, 1420, + 1439, 1444, 1446, 1453, 1415, 1416, 1417, 1418, + 1419, 1421, 1422, 1423, 1424, 1425, 1426, 1427, + 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, + 1436, 1437, 1438, 1440, 1441, 1442, 1443, 1445, + 1447, 1448, 1449, 1450, 1451, 1452, 1454, 1456, + 1457, 1458, 1313, 1459, 1460, 1473, 1474, 1475, + 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, + 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, + 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, + 1500, 1501, 1502, 1504, 1505, 1506, 1507, 1508, + 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1516, + 1517, 1518, 1519, 1520, 1521, 1522, 1524, 1525, + 1526, 1527, 1528, 1529, 1531, 1532, 1534, 1535, + 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1543, + 1544, 1546, 1547, 1548, 1549, 1550, 1552, 1558, + 1573, 1577, 1579, 1586, 1553, 1554, 1555, 1556, + 1557, 1559, 1560, 1561, 1562, 1563, 1564, 1565, + 1566, 1567, 1568, 1569, 1570, 1571, 1572, 1574, + 1575, 1576, 1578, 1580, 1581, 1582, 1583, 1584, + 1585, 1587, 1589, 1590, 1591, 4873, 1606, 1607, + 1608, 1609, 1610, 1611, 1612, 1613, 1614, 1615, + 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1623, + 1624, 1625, 1626, 1627, 1628, 1629, 1630, 1631, + 1632, 1633, 1634, 1635, 1636, 1638, 1639, 1640, + 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1648, + 1649, 1650, 1651, 1652, 1653, 1654, 1655, 1656, + 1657, 1658, 1659, 1660, 1661, 1663, 1664, 1665, + 1666, 1667, 1668, 1670, 1671, 1673, 1674, 1675, + 1676, 1677, 1678, 1679, 1680, 1681, 1682, 1683, + 1684, 1685, 1686, 1687, 1688, 1690, 1691, 1692, + 1693, 1694, 1695, 1696, 1698, 1705, 1727, 1734, + 1736, 1744, 1699, 1700, 1701, 1702, 1703, 1704, + 1706, 1707, 1708, 1709, 1710, 1711, 1712, 1713, + 1714, 1715, 1716, 1717, 1718, 1719, 1720, 1721, + 1722, 1723, 1724, 1725, 1726, 1728, 1729, 1730, + 1731, 1732, 1733, 1735, 1737, 1738, 1739, 1740, + 1741, 1742, 1743, 1745, 1747, 1748, 1749, 1593, + 1750, 1751, 141, 1766, 1767, 1768, 1769, 1770, + 1771, 1772, 1773, 1774, 1775, 1776, 1777, 1778, + 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, + 1787, 1788, 1789, 1790, 1791, 1792, 1793, 1794, + 1795, 1796, 1798, 1799, 1800, 1801, 1802, 1803, + 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, + 1812, 1813, 1814, 1815, 1816, 1817, 1818, 1820, + 1821, 1822, 1823, 1824, 1825, 1827, 1828, 1830, + 1831, 1832, 1833, 1834, 1835, 1836, 1837, 1838, + 1839, 1840, 1841, 1842, 1843, 1844, 1846, 1847, + 1848, 1849, 1850, 1851, 1852, 1854, 1860, 1879, + 1884, 1886, 1893, 1855, 1856, 1857, 1858, 1859, + 1861, 1862, 1863, 1864, 1865, 1866, 1867, 1868, + 1869, 1870, 1871, 1872, 1873, 1874, 1875, 1876, + 1877, 1878, 1880, 1881, 1882, 1883, 1885, 1887, + 1888, 1889, 1890, 1891, 1892, 1894, 1896, 1897, + 1898, 1753, 1899, 1900, 1915, 1916, 1917, 1918, + 1919, 1920, 1921, 1922, 1923, 1924, 1925, 1926, + 1927, 1928, 1929, 1930, 1931, 1932, 1933, 1934, + 1935, 1936, 1937, 1938, 1939, 1940, 1941, 1942, + 1944, 1945, 1946, 1947, 1948, 1949, 1950, 1951, + 1952, 1953, 1954, 1955, 1956, 1957, 1958, 1959, + 1960, 1961, 1962, 1963, 1964, 1965, 1967, 1968, + 1969, 1970, 1971, 1972, 1974, 1975, 1977, 1978, + 1979, 1980, 1981, 1982, 1983, 1984, 1985, 1986, + 1987, 1988, 1989, 1990, 1991, 1992, 1994, 1995, + 1996, 1998, 2004, 2024, 2029, 2031, 2039, 1999, + 2000, 2001, 2002, 2003, 2005, 2006, 2007, 2008, + 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, + 2017, 2018, 2019, 2020, 2021, 2022, 2023, 2025, + 2026, 2027, 2028, 2030, 2032, 2033, 2034, 2035, + 2036, 2037, 2038, 2040, 2042, 2043, 2044, 2045, + 4865, 2060, 2061, 2062, 2063, 2064, 2065, 2066, + 2067, 2068, 2069, 2070, 2071, 2072, 2073, 2074, + 2075, 2076, 2077, 2078, 2079, 2080, 2081, 2082, + 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2090, + 2092, 2093, 2094, 2095, 2096, 2097, 2098, 2099, + 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, + 2108, 2109, 2110, 2111, 2112, 2114, 2115, 2116, + 2117, 2118, 2119, 2121, 2122, 2124, 2125, 2126, + 2127, 2128, 2129, 2130, 2131, 2132, 2133, 2134, + 2135, 2136, 2137, 2138, 2140, 2141, 2142, 2143, + 2144, 2146, 2152, 2171, 2176, 2178, 2185, 2147, + 2148, 2149, 2150, 2151, 2153, 2154, 2155, 2156, + 2157, 2158, 2159, 2160, 2161, 2162, 2163, 2164, + 2165, 2166, 2167, 2168, 2169, 2170, 2172, 2173, + 2174, 2175, 2177, 2179, 2180, 2181, 2182, 2183, + 2184, 2186, 2188, 2189, 2190, 2047, 2191, 2192, + 4874, 2207, 2208, 2209, 2210, 2211, 2212, 2213, + 2214, 2215, 2216, 2217, 2218, 2219, 2220, 2221, + 2222, 2223, 2224, 2225, 2226, 2227, 2228, 2229, + 2230, 2231, 2232, 2233, 2234, 2235, 2236, 2237, + 2239, 2240, 2241, 2242, 2243, 2244, 2245, 2246, + 2247, 2248, 2249, 2250, 2251, 2252, 2253, 2254, + 2255, 2256, 2257, 2258, 2259, 2261, 2262, 2263, + 2264, 2265, 2266, 2268, 2269, 2271, 2272, 2273, + 2274, 2275, 2276, 2277, 2278, 2279, 2280, 2281, + 2282, 2283, 2284, 2285, 2287, 2288, 2289, 2290, + 2291, 2293, 2299, 2318, 2323, 2325, 2332, 2294, + 2295, 2296, 2297, 2298, 2300, 2301, 2302, 2303, + 2304, 2305, 2306, 2307, 2308, 2309, 2310, 2311, + 2312, 2313, 2314, 2315, 2316, 2317, 2319, 2320, + 2321, 2322, 2324, 2326, 2327, 2328, 2329, 2330, + 2331, 2333, 2335, 2336, 2337, 2194, 2338, 2339, + 4876, 2354, 2355, 2356, 2357, 2358, 2359, 2360, + 2361, 2362, 2363, 2364, 2365, 2366, 2367, 2368, + 2369, 2370, 2371, 2372, 2373, 2374, 2375, 2376, + 2377, 2378, 2379, 2380, 2381, 2382, 2383, 2384, + 2386, 2387, 2388, 2389, 2390, 2391, 2392, 2393, + 2394, 2395, 2396, 2397, 2398, 2399, 2400, 2401, + 2402, 2403, 2404, 2405, 2406, 2408, 2409, 2410, + 2411, 2412, 2413, 2415, 2416, 2418, 2419, 2420, + 2421, 2422, 2423, 2424, 2425, 2426, 2427, 2428, + 2429, 2430, 2431, 2432, 2434, 2435, 2436, 2437, + 2438, 2439, 2440, 2442, 2448, 2467, 2472, 2474, + 2481, 2443, 2444, 2445, 2446, 2447, 2449, 2450, + 2451, 2452, 2453, 2454, 2455, 2456, 2457, 2458, + 2459, 2460, 2461, 2462, 2463, 2464, 2465, 2466, + 2468, 2469, 2470, 2471, 2473, 2475, 2476, 2477, + 2478, 2479, 2480, 2482, 2484, 2485, 2486, 2341, + 2487, 2488, 4875, 2503, 2504, 2505, 2506, 2507, + 2508, 2509, 2510, 2511, 2512, 2513, 2514, 2515, + 2516, 2517, 2518, 2519, 2520, 2521, 2522, 2523, + 2524, 2525, 2526, 2527, 2528, 2529, 2530, 2531, + 2532, 2533, 2535, 2536, 2537, 2538, 2539, 2540, + 2541, 2542, 2543, 2544, 2545, 2546, 2547, 2548, + 2549, 2550, 2551, 2552, 2553, 2554, 2555, 2557, + 2558, 2559, 2560, 2561, 2562, 2564, 2565, 2567, + 2568, 2569, 2570, 2571, 2572, 2573, 2574, 2575, + 2576, 2577, 2578, 2579, 2580, 2581, 2583, 2584, + 2585, 2586, 2587, 2589, 2595, 2614, 2619, 2621, + 2628, 2590, 2591, 2592, 2593, 2594, 2596, 2597, + 2598, 2599, 2600, 2601, 2602, 2603, 2604, 2605, + 2606, 2607, 2608, 2609, 2610, 2611, 2612, 2613, + 2615, 2616, 2617, 2618, 2620, 2622, 2623, 2624, + 2625, 2626, 2627, 2629, 2631, 2632, 2633, 2490, + 2634, 2635, 4862, 4878, 2650, 2651, 2652, 2653, + 2654, 2655, 2656, 2657, 2658, 2659, 2660, 2661, + 2662, 2663, 2664, 2665, 2666, 2667, 2668, 2669, + 2670, 2671, 2672, 2673, 2674, 2675, 2676, 2677, + 2679, 2680, 2681, 2682, 2683, 2684, 2685, 2686, + 2687, 2688, 2689, 2690, 2691, 2692, 2693, 2694, + 2695, 2696, 2697, 2698, 2699, 2701, 2702, 2703, + 2704, 2705, 2706, 2708, 2709, 2711, 2712, 2713, + 2714, 2715, 2716, 2717, 2718, 2719, 2720, 2721, + 2722, 2723, 2724, 2725, 2727, 2728, 2729, 2731, + 2737, 2756, 2761, 2763, 2770, 2732, 2733, 2734, + 2735, 2736, 2738, 2739, 2740, 2741, 2742, 2743, + 2744, 2745, 2746, 2747, 2748, 2749, 2750, 2751, + 2752, 2753, 2754, 2755, 2757, 2758, 2759, 2760, + 2762, 2764, 2765, 2766, 2767, 2768, 2769, 2771, + 2773, 2774, 2775, 2637, 2776, 2638, 4885, 2791, + 2792, 2793, 2794, 2795, 2796, 2797, 2798, 2799, + 2800, 2801, 2802, 2803, 2804, 2805, 2806, 2807, + 2808, 2809, 2810, 2811, 2812, 2813, 2814, 2815, + 2816, 2817, 2818, 2819, 2820, 2821, 2823, 2824, + 2825, 2826, 2827, 2828, 2829, 2830, 2831, 2832, + 2833, 2834, 2835, 2836, 2837, 2838, 2839, 2840, + 2841, 2842, 2843, 2845, 2846, 2847, 2848, 2849, + 2850, 2852, 2853, 2855, 2856, 2857, 2858, 2859, + 2860, 2861, 2862, 2863, 2864, 2865, 2866, 2867, + 2868, 2869, 2871, 2872, 2873, 2874, 2875, 2877, + 2883, 2902, 2907, 2909, 2916, 2878, 2879, 2880, + 2881, 2882, 2884, 2885, 2886, 2887, 2888, 2889, + 2890, 2891, 2892, 2893, 2894, 2895, 2896, 2897, + 2898, 2899, 2900, 2901, 2903, 2904, 2905, 2906, + 2908, 2910, 2911, 2912, 2913, 2914, 2915, 2917, + 2919, 2920, 2921, 2778, 2922, 2923, 4886, 2938, + 2939, 2940, 2941, 2942, 2943, 2944, 2945, 2946, + 2947, 2948, 2949, 2950, 2951, 2952, 2953, 2954, + 2955, 2956, 2957, 2958, 2959, 2960, 2961, 2962, + 2963, 2964, 2965, 2966, 2967, 2968, 2970, 2971, + 2972, 2973, 2974, 2975, 2976, 2977, 2978, 2979, + 2980, 2981, 2982, 2983, 2984, 2985, 2986, 2987, + 2988, 2989, 2990, 2992, 2993, 2994, 2995, 2996, + 2997, 2999, 3000, 3002, 3003, 3004, 3005, 3006, + 3007, 3008, 3009, 3010, 3011, 3012, 3013, 3014, + 3015, 3016, 3018, 3019, 3020, 3021, 3022, 3024, + 3030, 3049, 3054, 3056, 3063, 3025, 3026, 3027, + 3028, 3029, 3031, 3032, 3033, 3034, 3035, 3036, + 3037, 3038, 3039, 3040, 3041, 3042, 3043, 3044, + 3045, 3046, 3047, 3048, 3050, 3051, 3052, 3053, + 3055, 3057, 3058, 3059, 3060, 3061, 3062, 3064, + 3066, 3067, 3068, 2925, 3069, 3070, 4887, 3085, + 3086, 3087, 3088, 3089, 3090, 3091, 3092, 3093, + 3094, 3095, 3096, 3097, 3098, 3099, 3100, 3101, + 3102, 3103, 3104, 3105, 3106, 3107, 3108, 3109, + 3110, 3111, 3112, 3113, 3114, 3115, 3117, 3118, + 3119, 3120, 3121, 3122, 3123, 3124, 3125, 3126, + 3127, 3128, 3129, 3130, 3131, 3132, 3133, 3134, + 3135, 3136, 3137, 3139, 3140, 3141, 3142, 3143, + 3144, 3146, 3147, 3149, 3150, 3151, 3152, 3153, + 3154, 3155, 3156, 3157, 3158, 3159, 3160, 3161, + 3162, 3163, 3165, 3166, 3167, 3168, 3169, 3171, + 3177, 3196, 3201, 3203, 3210, 3172, 3173, 3174, + 3175, 3176, 3178, 3179, 3180, 3181, 3182, 3183, + 3184, 3185, 3186, 3187, 3188, 3189, 3190, 3191, + 3192, 3193, 3194, 3195, 3197, 3198, 3199, 3200, + 3202, 3204, 3205, 3206, 3207, 3208, 3209, 3211, + 3213, 3214, 3215, 3072, 3216, 3217, 4929, 4930, + 4931, 4991, 4992, 4993, 4994, 4995, 4996, 4997, + 4998, 4999, 5000, 5001, 5002, 5003, 5004, 5005, + 5006, 5007, 5008, 5009, 5010, 5011, 5012, 5013, + 5014, 5015, 4933, 4934, 4935, 4936, 4937, 4938, + 4939, 4944, 4945, 4946, 4947, 4948, 4949, 4950, + 4951, 4952, 4953, 4954, 4955, 4956, 4957, 4958, + 4959, 4960, 4961, 4962, 4963, 4964, 4965, 4966, + 4967, 4968, 4969, 4970, 4971, 4972, 4973, 4974, + 4975, 4976, 4977, 4978, 4979, 4980, 4981, 4982, + 4983, 4984, 4985, 4986, 4987, 4988, 4989, 4990, + 5073, 4862, 3283, 3284, 3285, 3286, 3287, 3288, + 3289, 3290, 3291, 3292, 3293, 3294, 3295, 3296, + 3297, 3298, 3299, 3300, 3301, 3302, 3303, 3304, + 3305, 3306, 3307, 3308, 3309, 3310, 3311, 3312, + 3313, 3315, 3316, 3317, 3268, 3318, 3319, 3320, + 3321, 3322, 3323, 3324, 3325, 3326, 3327, 3328, + 3329, 3330, 3331, 3332, 3333, 3334, 3335, 3336, + 3337, 3339, 3340, 3341, 3342, 3343, 3344, 3346, + 3347, 3348, 3349, 3350, 3429, 5074, 3365, 3366, + 3367, 3368, 3369, 3370, 3371, 3372, 3373, 3374, + 3375, 3376, 3377, 3378, 3379, 3380, 3381, 3382, + 3383, 3384, 3385, 3386, 3387, 3388, 3389, 3390, + 3391, 3392, 3394, 3395, 3396, 3352, 3397, 3398, + 3399, 3400, 3401, 3402, 3403, 3404, 3405, 3406, + 3407, 3408, 3409, 3410, 3411, 3412, 3413, 3414, + 3415, 3416, 3418, 3419, 3420, 3421, 3422, 3423, + 3425, 3426, 3427, 3428, 3431, 3432, 3433, 3434, + 3435, 3436, 3437, 3438, 3439, 3440, 3441, 3442, + 3443, 3444, 3445, 3448, 3449, 3451, 3452, 3453, + 3454, 3456, 3462, 3481, 3486, 3488, 3495, 3457, + 3458, 3459, 3460, 3461, 3463, 3464, 3465, 3466, + 3467, 3468, 3469, 3470, 3471, 3472, 3473, 3474, + 3475, 3476, 3477, 3478, 3479, 3480, 3482, 3483, + 3484, 3485, 3487, 3489, 3490, 3491, 3492, 3493, + 3494, 3496, 3498, 3499, 3353, 3501, 3502, 3503, + 3504, 3505, 3506, 3507, 3508, 3509, 3510, 3511, + 3512, 3513, 3514, 3515, 3270, 3518, 3519, 3521, + 3522, 3523, 3524, 3525, 3527, 3533, 3552, 3557, + 3559, 3566, 3528, 3529, 3530, 3531, 3532, 3534, + 3535, 3536, 3537, 3538, 3539, 3540, 3541, 3542, + 3543, 3544, 3545, 3546, 3547, 3548, 3549, 3550, + 3551, 3553, 3554, 3555, 3556, 3558, 3560, 3561, + 3562, 3563, 3564, 3565, 3567, 3569, 3570, 3571, + 3572, 4862, 5076, 4862, 3608, 3609, 3610, 3611, + 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, + 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, + 3628, 3629, 3630, 3631, 3632, 3633, 3634, 3635, + 3637, 3638, 3639, 3640, 3641, 3642, 3643, 3644, + 3645, 3646, 3647, 3648, 3649, 3650, 3651, 3652, + 3653, 3654, 3655, 3656, 3657, 3659, 3660, 3661, + 3662, 3663, 3664, 3666, 3667, 3669, 3670, 3671, + 3672, 3673, 3674, 3675, 3676, 3677, 3678, 3679, + 3680, 3681, 3682, 3683, 3685, 3686, 3687, 3689, + 3695, 3714, 3719, 3721, 3728, 3690, 3691, 3692, + 3693, 3694, 3696, 3697, 3698, 3699, 3700, 3701, + 3702, 3703, 3704, 3705, 3706, 3707, 3708, 3709, + 3710, 3711, 3712, 3713, 3715, 3716, 3717, 3718, + 3720, 3722, 3723, 3724, 3725, 3726, 3727, 3729, + 3731, 3732, 3733, 3595, 3734, 3596, 5078, 5079, + 5080, 3752, 3753, 3754, 3755, 3756, 3757, 3758, + 3759, 3760, 3761, 3762, 3763, 3764, 3765, 3766, + 3767, 3768, 3769, 3770, 3771, 3772, 3773, 3774, + 3775, 3776, 3777, 3778, 3779, 3780, 3781, 3782, + 3784, 3785, 3786, 3787, 3788, 3789, 3790, 3791, + 3792, 3793, 3794, 3795, 3796, 3797, 3798, 3799, + 3800, 3801, 3802, 3803, 3804, 3806, 3807, 3808, + 3809, 3810, 3811, 3813, 3814, 3816, 3817, 3818, + 3819, 3820, 3821, 3822, 3823, 3824, 3825, 3826, + 3827, 3828, 3829, 3830, 3832, 3833, 3834, 3835, + 3836, 3838, 3844, 3863, 3868, 3870, 3877, 3839, + 3840, 3841, 3842, 3843, 3845, 3846, 3847, 3848, + 3849, 3850, 3851, 3852, 3853, 3854, 3855, 3856, + 3857, 3858, 3859, 3860, 3861, 3862, 3864, 3865, + 3866, 3867, 3869, 3871, 3872, 3873, 3874, 3875, + 3876, 3878, 3880, 3881, 3882, 3739, 3883, 3884, + 3899, 3900, 3901, 3902, 3903, 3904, 3905, 3906, + 3907, 3908, 3909, 3910, 3911, 3912, 3913, 3914, + 3915, 3916, 3917, 3918, 3919, 3920, 3921, 3922, + 3923, 3924, 3925, 3926, 3928, 3929, 3930, 3931, + 3932, 3933, 3934, 3935, 3936, 3937, 3938, 3939, + 3940, 3941, 3942, 3943, 3944, 3945, 3946, 3947, + 3948, 3950, 3951, 3952, 3953, 3954, 3955, 3957, + 3958, 3960, 3961, 3962, 3963, 3964, 3965, 3966, + 3967, 3968, 3969, 3970, 3971, 3972, 3973, 3974, + 3976, 3977, 3978, 3979, 3981, 3987, 4006, 4011, + 4013, 4020, 3982, 3983, 3984, 3985, 3986, 3988, + 3989, 3990, 3991, 3992, 3993, 3994, 3995, 3996, + 3997, 3998, 3999, 4000, 4001, 4002, 4003, 4004, + 4005, 4007, 4008, 4009, 4010, 4012, 4014, 4015, + 4016, 4017, 4018, 4019, 4021, 4023, 4024, 4025, + 3886, 4026, 3887, 4862, 4041, 4042, 4043, 4044, + 4045, 4046, 4047, 4048, 4049, 4050, 4051, 4052, + 4053, 4054, 4055, 4056, 4057, 4058, 4059, 4060, + 4061, 4062, 4063, 4064, 4065, 4066, 4067, 4068, + 4070, 4071, 4072, 4073, 4074, 4075, 4076, 4077, + 4078, 4079, 4080, 4081, 4082, 4083, 4084, 4085, + 4086, 4087, 4088, 4089, 4090, 4092, 4093, 4094, + 4095, 4096, 4097, 4099, 4100, 4101, 4102, 4103, + 4028, 4104, 4106, 4107, 4108, 4109, 4110, 4111, + 4112, 4113, 4114, 4115, 4116, 4117, 4118, 4119, + 4120, 4122, 4123, 4124, 4125, 4126, 4128, 4134, + 4153, 4158, 4161, 4168, 4129, 4130, 4131, 4132, + 4133, 4135, 4136, 4137, 4138, 4139, 4140, 4141, + 4142, 4143, 4144, 4145, 4146, 4147, 4148, 4149, + 4150, 4151, 4152, 4154, 4155, 4156, 4157, 4159, + 4160, 4162, 4163, 4164, 4165, 4166, 4167, 4169, + 4171, 4172, 4173, 4174, 4029, 5081, 4862, 4190, + 4191, 4192, 4193, 4194, 4195, 4196, 4197, 4198, + 4199, 4200, 4201, 4202, 4203, 4204, 4205, 4206, + 4207, 4208, 4209, 4210, 4211, 4212, 4213, 4214, + 4215, 4216, 4217, 4219, 4220, 4221, 4222, 4223, + 4224, 4225, 4226, 4227, 4228, 4229, 4230, 4231, + 4232, 4233, 4234, 4235, 4236, 4237, 4238, 4239, + 4241, 4242, 4243, 4244, 4245, 4246, 4248, 4249, + 4251, 4252, 4253, 4254, 4255, 4256, 4257, 4258, + 4259, 4260, 4261, 4262, 4263, 4264, 4265, 4267, + 4268, 4269, 4271, 4277, 4296, 4301, 4303, 4310, + 4272, 4273, 4274, 4275, 4276, 4278, 4279, 4280, + 4281, 4282, 4283, 4284, 4285, 4286, 4287, 4288, + 4289, 4290, 4291, 4292, 4293, 4294, 4295, 4297, + 4298, 4299, 4300, 4302, 4304, 4305, 4306, 4307, + 4308, 4309, 4311, 4313, 4314, 4315, 4177, 4316, + 4178, 5082, 4333, 4334, 4335, 4336, 4337, 4338, + 4339, 4340, 4341, 4342, 4343, 4344, 4345, 4346, + 4347, 4348, 4349, 4350, 4351, 4352, 4353, 4354, + 4355, 4356, 4357, 4358, 4359, 4360, 4362, 4363, + 4364, 4365, 4366, 4367, 4368, 4369, 4370, 4371, + 4372, 4373, 4374, 4375, 4376, 4377, 4378, 4379, + 4380, 4381, 4382, 4384, 4385, 4386, 4387, 4388, + 4389, 4391, 4392, 4394, 4395, 4396, 4397, 4398, + 4399, 4400, 4401, 4402, 4403, 4404, 4405, 4406, + 4407, 4408, 4410, 4411, 4412, 4414, 4420, 4439, + 4444, 4446, 4453, 4415, 4416, 4417, 4418, 4419, + 4421, 4422, 4423, 4424, 4425, 4426, 4427, 4428, + 4429, 4430, 4431, 4432, 4433, 4434, 4435, 4436, + 4437, 4438, 4440, 4441, 4442, 4443, 4445, 4447, + 4448, 4449, 4450, 4451, 4452, 4454, 4456, 4457, + 4458, 4320, 4459, 4321, 5083, 5084, 5085, 5128, + 5129, 5130, 5131, 5132, 5133, 5134, 5135, 5136, + 5137, 5138, 5139, 5140, 5141, 5142, 5143, 5144, + 5145, 5146, 5147, 5148, 5149, 5150, 5151, 5152, + 5153, 5154, 5155, 5156, 5157, 5158, 5159, 5160, + 5161, 5162, 5163, 5164, 5165, 5166, 5167, 5168, + 5169, 5170, 5171, 5172, 5173, 5174, 5175, 5176, + 5177, 5178, 5179, 5180, 5181, 5182, 5183, 5184, + 5185, 5186, 5187, 5188, 5189, 5190, 5191, 5192, + 5193, 5194, 5195, 5196, 5197, 5198, 5199, 5200, + 5201, 5202, 5203, 5204, 5205, 5206, 5211, 4510, + 4511, 4512, 4513, 4514, 4515, 4516, 4517, 4518, + 4519, 4520, 4521, 4522, 4523, 4524, 4525, 4526, + 4527, 4528, 4529, 4530, 4531, 4532, 4533, 4534, + 4535, 4536, 4537, 4538, 4539, 4540, 4542, 4543, + 4544, 4545, 4546, 4547, 4548, 4549, 4550, 4551, + 4552, 4553, 4554, 4555, 4556, 4557, 4558, 4559, + 4560, 4561, 4562, 4564, 4565, 4566, 4567, 4568, + 4569, 4571, 4572, 4574, 4575, 4576, 4577, 4578, + 4579, 4580, 4581, 4582, 4583, 4584, 4585, 4586, + 4587, 4588, 4590, 4591, 4592, 4593, 4594, 4596, + 4602, 4621, 4626, 4628, 4635, 4597, 4598, 4599, + 4600, 4601, 4603, 4604, 4605, 4606, 4607, 4608, + 4609, 4610, 4611, 4612, 4613, 4614, 4615, 4616, + 4617, 4618, 4619, 4620, 4622, 4623, 4624, 4625, + 4627, 4629, 4630, 4631, 4632, 4633, 4634, 4636, + 4638, 4639, 4640, 4497, 4641, 4642, 5212, 5213, + 5214, 5313, 5314, 5315, 5316, 5317, 5318, 5319, + 5320, 5321, 5322, 5323, 5324, 5325, 5326, 5327, + 5328, 5329, 5330, 5331, 5332, 5333, 5334, 5335, + 5336, 5337, 5243, 5244, 5245, 5246, 5247, 5248, + 5249, 5288, 5289, 5290, 5291, 5292, 5293, 5294, + 5295, 5296, 5297, 5298, 5299, 5300, 5301, 5251, + 5252, 5253, 5254, 5255, 5256, 5257, 5258, 5259, + 5260, 5261, 5262, 5263, 5268, 5269, 5270, 5271, + 5272, 5273, 5274, 5275, 5276, 5277, 5278, 5279, + 5280, 5281, 5282, 5283, 5284, 5285, 5286, 5287, + 4672, 4673, 4674, 4675, 4677, 4678, 4679, 4680, + 4681, 4682, 4683, 4684, 4685, 4686, 4687, 4688, + 4689, 4690, 4691, 4692, 4693, 4694, 4696, 4697, + 4698, 4699, 4701, 4702, 4705, 4707, 4850, 5340, + 4862, 4722, 4723, 4724, 4725, 4726, 4727, 4728, + 4729, 4730, 4731, 4732, 4733, 4734, 4735, 4736, + 4737, 4738, 4739, 4740, 4741, 4742, 4743, 4744, + 4745, 4746, 4747, 4748, 4749, 4751, 4752, 4753, + 4754, 4755, 4756, 4757, 4758, 4759, 4760, 4761, + 4762, 4763, 4764, 4765, 4766, 4767, 4768, 4769, + 4770, 4771, 4773, 4774, 4775, 4776, 4777, 4778, + 4780, 4781, 4783, 4784, 4785, 4786, 4787, 4788, + 4789, 4790, 4791, 4792, 4793, 4794, 4795, 4796, + 4797, 4799, 4800, 4801, 4803, 4809, 4828, 4833, + 4835, 4842, 4844, 4804, 4805, 4806, 4807, 4808, + 4810, 4811, 4812, 4813, 4814, 4815, 4816, 4817, + 4818, 4819, 4820, 4821, 4822, 4823, 4824, 4825, + 4826, 4827, 4829, 4830, 4831, 4832, 4834, 4836, + 4837, 4838, 4839, 4840, 4841, 4843, 4846, 4847, + 4848, 4709, 4849, 4710, 4854, 4856, 4857, 4859, + 4861, 4863, 4862, 4862, 4864, 4865, 4874, 4875, + 4877, 4879, 4883, 4895, 4896, 5016, 5017, 5059, + 5060, 4911, 5061, 4913, 4914, 5062, 5063, 5064, + 5065, 5066, 5067, 5068, 5069, 5070, 5071, 5072, + 5075, 5077, 5207, 5208, 5209, 5210, 5250, 5264, + 5338, 5339, 5047, 4862, 0, 3, 4, 5, + 6, 7, 8, 9, 10, 11, 12, 13, + 42, 64, 71, 74, 90, 94, 136, 4862, + 4862, 4862, 2046, 2048, 2049, 2050, 2051, 2052, + 2053, 2054, 2055, 2056, 2057, 2058, 2059, 2091, + 2113, 2120, 2123, 2139, 2145, 2187, 1752, 1754, + 1755, 1756, 1757, 1758, 1759, 1760, 1761, 1762, + 1763, 1764, 1765, 1797, 1819, 1826, 1829, 1845, + 1853, 1895, 4862, 1461, 870, 1462, 873, 1463, + 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, + 1472, 1503, 1523, 1530, 1533, 1545, 1551, 1588, + 1312, 1314, 1315, 1316, 1317, 1318, 1319, 1320, + 1321, 1322, 1323, 1324, 1325, 1357, 1379, 1386, + 1389, 1405, 1413, 1455, 562, 564, 565, 566, + 567, 1020, 1021, 1022, 1023, 1024, 1025, 1026, + 1027, 1059, 1081, 1088, 1241, 1257, 1264, 1306, + 869, 872, 874, 875, 876, 877, 878, 879, + 880, 881, 882, 883, 884, 916, 938, 945, + 948, 964, 972, 1014, 575, 577, 578, 579, + 580, 581, 582, 583, 584, 585, 586, 587, + 588, 620, 643, 650, 653, 670, 679, 727, + 1090, 1093, 1094, 1095, 1096, 1097, 1098, 1099, + 1100, 1101, 1102, 1103, 1132, 1154, 1161, 1168, + 1184, 1192, 1235, 1592, 1594, 1595, 1596, 1597, + 1598, 1599, 1600, 1601, 1602, 1603, 1604, 1605, + 1637, 1662, 1669, 1672, 1689, 1697, 1746, 2193, + 2195, 2196, 2197, 2198, 2199, 2200, 2201, 2202, + 2203, 2204, 2205, 2206, 2238, 2260, 2267, 2270, + 2286, 2292, 2334, 2489, 2491, 2492, 2493, 2494, + 2495, 2496, 2497, 2498, 2499, 2500, 2501, 2502, + 2534, 2556, 2563, 2566, 2582, 2588, 2630, 2340, + 2342, 2343, 2344, 2345, 2346, 2347, 2348, 2349, + 2350, 2351, 2352, 2353, 2385, 2407, 2414, 2417, + 2433, 2441, 2483, 4862, 2636, 2639, 2640, 2641, + 2642, 2643, 2644, 2645, 2646, 2647, 2648, 2649, + 2678, 2700, 2707, 2710, 2726, 2730, 2772, 4880, + 4881, 4884, 4888, 4889, 4890, 4891, 4892, 4893, + 4894, 4897, 4898, 4899, 4900, 4901, 4902, 4903, + 4904, 4905, 4906, 4907, 4882, 4879, 4883, 4895, + 4896, 4908, 4909, 4910, 4911, 4912, 4913, 4914, + 4915, 4916, 4917, 4918, 4919, 4920, 4921, 4922, + 4923, 4924, 4925, 4926, 4927, 4928, 4932, 4940, + 4941, 4942, 4943, 2777, 2779, 2780, 2781, 2782, + 2783, 2784, 2785, 2786, 2787, 2788, 2789, 2790, + 2822, 2844, 2851, 2854, 2870, 2876, 2918, 2924, + 2926, 2927, 2928, 2929, 2930, 2931, 2932, 2933, + 2934, 2935, 2936, 2937, 2969, 2991, 2998, 3001, + 3017, 3023, 3065, 3071, 3073, 3074, 3075, 3076, + 3077, 3078, 3079, 3080, 3081, 3082, 3083, 3084, + 3116, 3138, 3145, 3148, 3164, 3170, 3212, 3218, + 3219, 3220, 3221, 3222, 3223, 3224, 3225, 3226, + 3227, 3228, 3229, 3230, 3231, 3232, 3233, 3234, + 3235, 3236, 3237, 3238, 5018, 5019, 5029, 5030, + 5031, 5032, 5033, 5034, 5035, 5036, 5048, 5049, + 5050, 5051, 5052, 5053, 5054, 5055, 5056, 5057, + 5058, 5020, 5017, 5021, 5022, 5023, 5024, 5025, + 5026, 5027, 5028, 5037, 5038, 5039, 5040, 5041, + 5042, 5043, 5044, 5045, 5046, 5047, 3239, 3240, + 3241, 3242, 3243, 3244, 3245, 3246, 3247, 3248, + 3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, + 3257, 3258, 3259, 3260, 3261, 3262, 3263, 3264, + 3265, 3266, 3267, 3573, 3574, 3575, 3576, 3577, + 3578, 3579, 3580, 3581, 3582, 3583, 3584, 3585, + 3586, 3587, 4862, 3269, 3271, 3272, 3273, 3274, + 3275, 3276, 3277, 3278, 3279, 3280, 3281, 3282, + 3314, 3338, 3345, 3500, 3516, 3517, 3520, 3526, + 3568, 3351, 3354, 3355, 3356, 3357, 3358, 3359, + 3360, 3361, 3362, 3363, 3364, 3393, 3417, 3424, + 3430, 3446, 3447, 3450, 3455, 3497, 3588, 3589, + 3590, 3591, 3592, 3593, 3735, 3736, 4862, 3594, + 3597, 3598, 3599, 3600, 3601, 3602, 3603, 3604, + 3605, 3606, 3607, 3636, 3658, 3665, 3668, 3684, + 3688, 3730, 3737, 4175, 4317, 4318, 4460, 4461, + 4462, 4463, 4464, 4465, 4466, 4467, 4468, 4469, + 4470, 4471, 4472, 4473, 4474, 4475, 4476, 4477, + 4478, 4479, 3738, 3740, 3741, 3742, 3743, 3744, + 3745, 3746, 3747, 3748, 3749, 3750, 3751, 3783, + 3805, 3812, 3815, 3831, 3837, 3879, 3885, 3888, + 3889, 3890, 3891, 3892, 3893, 3894, 3895, 3896, + 3897, 3898, 3927, 3949, 3956, 3959, 3975, 3980, + 4022, 4862, 4027, 4030, 4031, 4032, 4033, 4034, + 4035, 4036, 4037, 4038, 4039, 4040, 4069, 4091, + 4098, 4105, 4121, 4127, 4170, 4862, 4176, 4179, + 4180, 4181, 4182, 4183, 4184, 4185, 4186, 4187, + 4188, 4189, 4218, 4240, 4247, 4250, 4266, 4270, + 4312, 4319, 4322, 4323, 4324, 4325, 4326, 4327, + 4328, 4329, 4330, 4331, 4332, 4361, 4383, 4390, + 4393, 4409, 4413, 4455, 5086, 5087, 5090, 5091, + 5092, 5093, 5094, 5095, 5096, 5097, 5106, 5107, + 5108, 5109, 5110, 5111, 5112, 5113, 5114, 5115, + 5116, 5088, 5089, 5098, 5099, 5100, 5101, 5102, + 5103, 5104, 5105, 5117, 5118, 5119, 5120, 5121, + 5122, 5123, 5124, 5125, 5126, 5127, 4480, 4481, + 4482, 4483, 4484, 4485, 4486, 4487, 4488, 4489, + 4490, 4491, 4492, 4493, 4494, 4495, 4643, 4644, + 4645, 4646, 4647, 4648, 4649, 4650, 4651, 4652, + 4653, 4654, 4655, 4656, 4657, 4658, 4659, 4660, + 4661, 4662, 4496, 4498, 4499, 4500, 4501, 4502, + 4503, 4504, 4505, 4506, 4507, 4508, 4509, 4541, + 4563, 4570, 4573, 4589, 4595, 4637, 5215, 5216, + 5219, 5220, 5221, 5222, 5223, 5224, 5225, 5226, + 5235, 5236, 5237, 5238, 5239, 5240, 5241, 5242, + 5250, 5264, 5265, 5266, 5267, 5217, 5218, 5227, + 5228, 5229, 5230, 5231, 5232, 5233, 5234, 5302, + 5303, 5304, 5305, 5306, 5307, 5308, 5309, 5310, + 5311, 5312, 4663, 4664, 4665, 4666, 4667, 4668, + 4669, 4670, 4671, 4676, 4695, 4700, 4703, 4704, + 4706, 4851, 4852, 4853, 4855, 4858, 4860, 4862, + 4708, 4711, 4712, 4713, 4714, 4715, 4716, 4717, + 4718, 4719, 4720, 4721, 4750, 4772, 4779, 4782, + 4798, 4802, 4845, +} + +var _s_trans_actions []byte = []byte{ + 1, 2, 3, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 4, 5, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 6, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 7, 6, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 6, 6, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 6, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 6, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 5, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 5, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 6, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 6, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 6, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 8, 9, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 2, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 6, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 6, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 6, 6, + 6, 10, 6, 6, 6, 6, 6, 6, + 6, 6, 6, 6, 6, 6, 6, 6, + 6, 6, 6, 6, 6, 6, 6, 6, + 6, 6, 6, 6, 6, 6, 6, 6, + 6, 6, 6, 6, 6, 6, 6, 6, + 6, 6, 6, 6, 6, 6, 6, 6, + 6, 6, 6, 6, 6, 6, 6, 6, + 6, 6, 6, 6, 6, 6, 6, 6, + 6, 6, 6, 6, 6, 6, 6, 6, + 6, 6, 6, 6, 6, 6, 6, 6, + 11, 12, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 11, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13, 14, 15, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 6, 11, + 16, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 17, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 18, 19, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 18, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14, 14, 10, 10, + 14, 10, 14, 14, 14, 14, 14, 14, + 14, 10, 14, 14, 10, 14, 14, 14, + 14, 14, 14, 14, 14, 14, 14, 14, + 14, 14, 14, 14, 14, 14, 10, 14, + 14, 14, 14, 14, 14, 14, 10, 14, + 14, 14, 14, 14, 14, 14, 14, 14, + 14, 14, 14, 14, 14, 14, 14, 14, + 14, 10, 14, 14, 14, 10, 14, 14, + 14, 14, 14, 14, 14, 14, 14, 14, + 14, 14, 14, 14, 14, 14, 11, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 11, 11, + 11, 10, 11, 11, 11, 11, 11, 11, + 11, 11, 11, 11, 11, 11, 11, 11, + 11, 11, 11, 11, 11, 11, 11, 11, + 11, 11, 11, 11, 11, 11, 11, 11, + 11, 11, 11, 11, 11, 11, 11, 11, + 11, 11, 11, 11, 11, 11, 11, 11, + 11, 11, 11, 11, 11, 11, 11, 11, + 11, 11, 11, 11, 11, 11, 11, 11, + 11, 11, 11, 11, 11, 11, 11, 11, + 11, 11, 11, 11, 11, 11, 11, 11, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 20, + 21, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 25, 26, 27, 28, 29, 30, 30, + 25, 28, 28, 25, 28, 25, 28, 25, + 28, 28, 28, 28, 28, 25, 25, 25, + 28, 25, 28, 28, 25, 25, 25, 25, + 25, 25, 25, 25, 25, 25, 25, 25, + 25, 25, 25, 31, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 32, + 33, 34, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 35, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 36, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 6, + 6, 6, 6, 6, 6, 6, 6, 6, + 6, 6, 6, 6, 6, 6, 6, 6, + 6, 6, 6, 6, 2, 10, 10, 2, + 10, 2, 2, 2, 10, 10, 10, 10, + 2, 2, 2, 10, 2, 10, 10, 2, + 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 9, 10, 9, 9, + 9, 9, 9, 9, 9, 10, 9, 9, + 9, 9, 9, 9, 9, 9, 9, 9, + 9, 2, 10, 2, 2, 2, 2, 2, + 2, 2, 10, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 37, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 38, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 39, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 40, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 2, 10, 2, 2, + 2, 2, 2, 2, 2, 10, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, + 2, 14, 10, 14, 14, 14, 14, 14, + 14, 14, 10, 14, 14, 14, 14, 14, + 14, 14, 14, 14, 14, 14, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 2, 10, + 2, 10, 2, 2, 2, 10, 2, 10, + 10, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 11, 11, 11, + 11, 11, 11, 11, 11, 11, 11, 11, + 11, 11, 11, 11, 11, 11, 11, 11, + 11, 11, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 41, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, +} + +var _s_to_state_actions []byte = []byte{ + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 22, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, +} + +var _s_from_state_actions []byte = []byte{ + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 23, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, +} + +var _s_eof_actions []byte = []byte{ + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 24, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, +} + +var _s_eof_trans []uint16 = []uint16{ + 1, 3, 3, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 1, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 1, 3, 3, 3, 3, 3, 3, 1, + 3, 3, 1, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 1, 3, 3, 3, 1, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 1, 3, 3, 3, 3, 126, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 3, 3, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 3, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 3, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 3, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 3, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 3, 421, 421, 421, + 421, 421, 421, 3, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 3, 421, 3, 421, 421, + 3, 421, 421, 421, 421, 421, 421, 421, + 421, 3, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 3, 421, + 421, 3, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 3, + 421, 3, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 3, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 3, 3, 3, 3, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 3, 3, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 3, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 3, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 421, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 421, 3, 3, 3, 3, + 3, 3, 421, 3, 3, 421, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 421, 3, 3, 3, 3, 3, 421, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 421, 3, 3, 3, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 3, 3, 126, 126, 126, 126, 126, + 126, 126, 126, 3, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 3, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 3, 126, + 126, 126, 126, 126, 126, 3, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 3, 126, 3, 126, + 3, 126, 126, 3, 126, 126, 126, 126, + 126, 126, 126, 126, 3, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 126, 126, 126, 126, 126, 126, 126, + 126, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 3, 421, 3, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 2395, 3, 3, 2395, + 2395, 2395, 2395, 2395, 2395, 2395, 2395, 2395, + 2395, 2395, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 2395, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 2395, 3, 3, 3, + 3, 3, 3, 2395, 3, 3, 2395, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 2395, 3, + 3, 3, 2395, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 2395, 3, 3, 3, + 3, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 3, 2986, 3, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 2986, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 2986, 3, 3, 3, 3, 3, + 3, 2986, 3, 3, 3, 3, 3, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 3, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 2986, 2986, 3, 3, + 2986, 3, 3, 3, 3, 3, 2986, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 2986, 3, 3, 3, 3, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 3252, 3, 3, 3252, 3252, 3252, + 3252, 3252, 3252, 3252, 3252, 3252, 3252, 3252, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3252, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3252, 3, 3, 3, 3, 3, + 3, 3252, 3, 3, 3252, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3252, 3, 3, 3, + 3252, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3252, 3, 3, 3, 3, 1, + 1, 1, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 421, 421, 421, + 421, 421, 421, 421, 421, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 3628, 3, 3628, 3628, 3628, + 3628, 3628, 3628, 3628, 3628, 3628, 3628, 3628, + 3628, 3628, 3628, 3628, 3628, 3628, 3628, 3628, + 3628, 3628, 3628, 3628, 3628, 3628, 3628, 3628, + 3628, 3628, 3628, 3628, 3628, 3628, 3628, 3628, + 3628, 3628, 3628, 3628, 3628, 3628, 3628, 3628, + 3628, 3628, 3628, 3628, 3628, 3628, 3628, 3628, + 3628, 3628, 3628, 3628, 3628, 3628, 3628, 3628, + 3628, 3628, 3628, 3628, 3628, 3628, 3628, 3628, + 3628, 3628, 3628, 3628, 3628, 3, 3, 3, + 3, 3628, 3628, 3628, 3628, 3628, 3628, 3628, + 3628, 3628, 3628, 3628, 3628, 3628, 3628, 3628, + 3628, 3628, 3628, 3628, 3628, 3628, 3628, 3628, + 3628, 3628, 3628, 3628, 3628, 3628, 3628, 3628, + 3628, 3628, 3628, 3628, 3628, 3628, 3628, 3628, + 3628, 3628, 3628, 3628, 3628, 3628, 3628, 3628, + 3628, 3628, 3628, 3628, 3628, 3628, 3628, 3628, + 3628, 3628, 3628, 3628, 3628, 3628, 3628, 3628, + 3628, 3628, 3628, 3628, 3628, 3628, 3628, 1, + 3759, 3759, 3759, 3759, 3759, 3759, 3759, 3759, + 3759, 3759, 3759, 3759, 3759, 3759, 3759, 3759, + 3759, 3759, 3759, 3759, 3759, 3759, 3759, 3759, + 3759, 3759, 3759, 3759, 3759, 3759, 3759, 3759, + 3759, 3759, 3759, 3759, 3759, 3759, 3759, 3759, + 3759, 3759, 3759, 3759, 3759, 3759, 3759, 3759, + 3759, 3759, 3759, 3759, 3759, 3759, 3759, 3759, + 3759, 3759, 3759, 3759, 3759, 3759, 3759, 3759, + 3759, 3759, 3759, 3759, 3759, 3759, 3759, 3759, + 3759, 3759, 3759, 3759, 3759, 3759, 3759, 3759, + 3759, 3759, 3759, 3759, 3759, 3759, 3759, 3759, + 3759, 3759, 3759, 3759, 3759, 3759, 3759, 3759, + 3759, 3759, 3759, 3759, 3759, 3759, 3759, 3759, + 3759, 3759, 3759, 3759, 3759, 3759, 3759, 3759, + 3759, 3759, 3759, 3759, 3759, 3759, 3759, 3759, + 3759, 3759, 3759, 3759, 3759, 3759, 3759, 3759, + 3759, 3759, 3759, 3759, 3759, 3759, 3759, 3759, + 3759, 3759, 3759, 3759, 3759, 1, 1, 3252, + 3252, 3252, 3252, 3252, 3252, 3252, 3252, 3252, + 3252, 3252, 3252, 3252, 3252, 3252, 3252, 3252, + 3252, 3252, 3252, 3252, 3252, 3252, 3252, 3252, + 3252, 3252, 3252, 3252, 3252, 3252, 3252, 3252, + 3252, 3252, 3252, 3252, 3252, 3252, 3252, 3252, + 3252, 3252, 3252, 3252, 3252, 3252, 3252, 3252, + 3252, 3252, 3252, 3252, 3252, 3252, 3252, 3252, + 3252, 3252, 3252, 3252, 3252, 3252, 3252, 3252, + 3252, 3252, 3252, 3252, 3252, 3252, 3252, 3252, + 3252, 3252, 3252, 3252, 3252, 3252, 3252, 3252, + 3252, 3252, 3252, 3252, 3252, 3252, 3252, 3252, + 3252, 3252, 3252, 3252, 3252, 3252, 3252, 3252, + 3252, 3252, 3252, 3252, 3252, 3252, 3252, 3252, + 3252, 3252, 3252, 3252, 3252, 3252, 3252, 3252, + 3252, 3252, 3252, 3252, 3252, 3252, 3252, 3252, + 3252, 3252, 3252, 3252, 3252, 3252, 3252, 3252, + 3252, 3252, 3252, 3252, 3252, 3252, 3252, 3252, + 3252, 3252, 3252, 3252, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 2986, 2986, 2986, 2986, 2986, + 2986, 2986, 2986, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 3, 4329, 4329, 4329, 4329, + 4329, 4329, 4329, 4329, 4329, 4329, 4329, 4329, + 4329, 4329, 4329, 4329, 4329, 4329, 4329, 4329, + 4329, 4329, 4329, 4329, 4329, 4329, 4329, 4329, + 4329, 4329, 4329, 4329, 4329, 4329, 4329, 4329, + 4329, 4329, 4329, 4329, 4329, 4329, 4329, 4329, + 4329, 4329, 4329, 4329, 4329, 4329, 4329, 4329, + 4329, 4329, 4329, 4329, 4329, 4329, 4329, 4329, + 4329, 4329, 4329, 4329, 4329, 4329, 4329, 4329, + 4329, 4329, 4329, 4329, 4329, 4329, 4329, 4329, + 4329, 4329, 4329, 4329, 4329, 4329, 4329, 4329, + 4329, 4329, 4329, 4329, 4329, 4329, 4329, 4329, + 4329, 4329, 4329, 4329, 4329, 4329, 4329, 4329, + 4329, 4329, 4329, 4329, 4329, 4329, 4329, 4329, + 4329, 4329, 4329, 4329, 4329, 4329, 4329, 4329, + 4329, 4329, 4329, 4329, 4329, 4329, 4329, 4329, + 4329, 4329, 4329, 4329, 4329, 4329, 4329, 4329, + 4329, 4329, 4329, 4329, 4329, 4329, 4329, 4329, + 4329, 4329, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 0, 4500, + 4520, 4522, 4522, 4563, 4563, 4563, 4563, 4563, + 4563, 4522, 4563, 4563, 4563, 4500, 4764, 4500, + 4563, 4563, 4500, 4500, 4563, 4500, 4563, 4563, + 4563, 4563, 4563, 4563, 4563, 4563, 4563, 4500, + 4500, 4563, 4563, 4563, 4563, 4563, 4563, 4563, + 4563, 4563, 4563, 4563, 4500, 4500, 4500, 4500, + 4500, 4500, 4500, 4500, 4500, 4500, 4500, 4500, + 4500, 4500, 4500, 4500, 4500, 4500, 4500, 4500, + 4500, 4563, 4563, 4563, 4500, 4563, 4563, 4563, + 4563, 4563, 4563, 4563, 4500, 4500, 4500, 4500, + 4563, 4563, 4563, 4563, 4563, 4563, 4563, 4563, + 4563, 4563, 4563, 4563, 4563, 4563, 4563, 4563, + 4563, 4563, 4563, 4563, 4563, 4563, 4563, 4563, + 4563, 4563, 4563, 4563, 4563, 4563, 4563, 4563, + 4563, 4563, 4563, 4563, 4563, 4563, 4563, 4563, + 4563, 4563, 4563, 4563, 4563, 4563, 4563, 4563, + 4563, 4563, 4563, 4563, 4563, 4563, 4563, 4563, + 4563, 4563, 4563, 4563, 4563, 4563, 4563, 4563, + 4563, 4563, 4563, 4563, 4563, 4563, 4563, 4563, + 4500, 4500, 4764, 4764, 4500, 4500, 4500, 4500, + 4500, 4500, 4500, 4500, 4500, 4764, 4764, 4764, + 4764, 4764, 4764, 4764, 4764, 4500, 4500, 4500, + 4500, 4500, 4500, 4500, 4500, 4500, 4500, 4500, + 4764, 4764, 4764, 4764, 4764, 4764, 4764, 4764, + 4764, 4764, 4764, 4500, 4500, 4500, 4500, 4500, + 4500, 4500, 4500, 4500, 4500, 4500, 4500, 4500, + 4500, 5003, 5003, 4500, 5055, 4500, 4563, 5003, + 5138, 5158, 5055, 5055, 5055, 5055, 4500, 4500, + 5055, 5055, 4500, 4500, 4500, 4500, 4500, 4500, + 4500, 4500, 5055, 5055, 5055, 5055, 5055, 5055, + 5055, 5055, 4500, 4500, 4500, 4500, 4500, 4500, + 4500, 4500, 4500, 4500, 4500, 5055, 5055, 5055, + 5055, 5055, 5055, 5055, 5055, 5055, 5055, 5055, + 5055, 5055, 5055, 5055, 5055, 5055, 5055, 5055, + 5055, 5055, 5055, 5055, 5055, 5055, 5055, 5055, + 5055, 5055, 5055, 5055, 5055, 5055, 5055, 5055, + 5055, 5055, 5055, 5055, 5055, 5055, 5055, 5055, + 5055, 5055, 5055, 5055, 5055, 5055, 5055, 5055, + 5055, 5055, 5055, 5055, 5055, 5055, 5055, 5055, + 5055, 5055, 5055, 5055, 5055, 5055, 5055, 5055, + 5055, 5055, 5055, 5055, 5055, 5055, 5055, 5055, + 5055, 5055, 5055, 5055, 5055, 5055, 5055, 5055, + 5055, 5055, 5055, 5055, 5055, 5055, 5055, 4500, + 4500, 4500, 4500, 5003, 5003, 5003, 5003, 4500, + 4500, 5003, 5003, 4500, 4500, 4500, 4500, 4500, + 4500, 4500, 4500, 5003, 5003, 5003, 5003, 5003, + 5003, 5003, 5003, 4500, 4500, 4500, 4500, 4500, + 4500, 4500, 4500, 5003, 5003, 5003, 5003, 5003, + 5003, 5003, 4500, 5003, 5003, 5003, 5003, 5003, + 5003, 5003, 5003, 5003, 5003, 5003, 5003, 5003, + 4500, 4500, 4500, 4500, 5003, 5003, 5003, 5003, + 5003, 5003, 5003, 5003, 5003, 5003, 5003, 5003, + 5003, 5003, 5003, 5003, 5003, 5003, 5003, 5003, + 5003, 5003, 5003, 5003, 5003, 5003, 5003, 5003, + 5003, 5003, 5003, 5003, 5003, 5003, 5003, 5003, + 5003, 5003, 5003, 5003, 5003, 5003, 5003, 5003, + 5003, 5003, 5003, 5003, 5003, 5003, 5003, 5003, + 5003, 5003, 5003, 5003, 5003, 5003, 5003, 5003, + 5003, 5003, 5003, 5003, 5003, 5003, 5003, 5003, + 5003, 5003, 4500, 4500, 5360, +} + +const s_start int = 4862 +const s_first_final int = 4862 +const s_error int = -1 + +const s_en_main int = 4862 + + +//line segment_words.rl:35 + + +func segmentWords(data []byte, maxTokens int, atEOF bool, val [][]byte, types []int) ([][]byte, []int, int, error) { + cs, p, pe := 0, 0, len(data) + cap := maxTokens + if cap < 0 { + cap = 1000 + } + if val == nil { + val = make([][]byte, 0, cap) + } + if types == nil { + types = make([]int, 0, cap) + } + + // added for scanner + ts := 0 + te := 0 + act := 0 + eof := pe + _ = ts // compiler not happy + _ = te + _ = act + + // our state + startPos := 0 + endPos := 0 + totalConsumed := 0 + +//line segment_words.go:18574 + { + cs = s_start + ts = 0 + te = 0 + act = 0 + } + +//line segment_words.go:18582 + { + var _klen int + var _keys int + var _trans int + + if p == pe { + goto _test_eof + } +_resume: + switch _s_from_state_actions[cs] { + case 23: +//line NONE:1 +ts = p + + +//line segment_words.go:18598 + } + + _keys = int(_s_key_offsets[cs]) + _trans = int(_s_index_offsets[cs]) + + _klen = int(_s_single_lengths[cs]) + if _klen > 0 { + _lower := int(_keys) + var _mid int + _upper := int(_keys + _klen - 1) + for { + if _upper < _lower { + break + } + + _mid = _lower + ((_upper - _lower) >> 1) + switch { + case data[p] < _s_trans_keys[_mid]: + _upper = _mid - 1 + case data[p] > _s_trans_keys[_mid]: + _lower = _mid + 1 + default: + _trans += int(_mid - int(_keys)) + goto _match + } + } + _keys += _klen + _trans += _klen + } + + _klen = int(_s_range_lengths[cs]) + if _klen > 0 { + _lower := int(_keys) + var _mid int + _upper := int(_keys + (_klen << 1) - 2) + for { + if _upper < _lower { + break + } + + _mid = _lower + (((_upper - _lower) >> 1) & ^1) + switch { + case data[p] < _s_trans_keys[_mid]: + _upper = _mid - 2 + case data[p] > _s_trans_keys[_mid + 1]: + _lower = _mid + 2 + default: + _trans += int((_mid - int(_keys)) >> 1) + goto _match + } + } + _trans += _klen + } + +_match: + _trans = int(_s_indicies[_trans]) +_eof_trans: + cs = int(_s_trans_targs[_trans]) + + if _s_trans_actions[_trans] == 0 { + goto _again + } + + switch _s_trans_actions[_trans] { + case 10: +//line segment_words.rl:72 + + endPos = p + + + case 34: +//line segment_words.rl:76 +te = p +p-- +{ + if !atEOF { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Number) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 37: +//line segment_words.rl:89 +te = p +p-- +{ + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Letter) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 39: +//line segment_words.rl:104 +te = p +p-- +{ + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Ideo) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 35: +//line segment_words.rl:119 +te = p +p-- +{ + if !atEOF { + return val, types, totalConsumed, nil + } + val = append(val, data[startPos:endPos+1]) + types = append(types, Letter) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 38: +//line segment_words.rl:131 +te = p +p-- +{ + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Ideo) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 40: +//line segment_words.rl:146 +te = p +p-- +{ + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Ideo) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 41: +//line segment_words.rl:161 +te = p +p-- +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 32: +//line segment_words.rl:161 +te = p +p-- +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 36: +//line segment_words.rl:161 +te = p +p-- +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 31: +//line segment_words.rl:161 +te = p +p-- +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 4: +//line segment_words.rl:76 +p = (te) - 1 +{ + if !atEOF { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Number) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 12: +//line segment_words.rl:89 +p = (te) - 1 +{ + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Letter) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 17: +//line segment_words.rl:104 +p = (te) - 1 +{ + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Ideo) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 7: +//line segment_words.rl:119 +p = (te) - 1 +{ + if !atEOF { + return val, types, totalConsumed, nil + } + val = append(val, data[startPos:endPos+1]) + types = append(types, Letter) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 15: +//line segment_words.rl:131 +p = (te) - 1 +{ + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Ideo) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 19: +//line segment_words.rl:146 +p = (te) - 1 +{ + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Ideo) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 21: +//line segment_words.rl:161 +p = (te) - 1 +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 8: +//line segment_words.rl:161 +p = (te) - 1 +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 1: +//line segment_words.rl:161 +p = (te) - 1 +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 3: +//line NONE:1 + switch act { + case 1: + {p = (te) - 1 + + if !atEOF { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Number) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + case 2: + {p = (te) - 1 + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Letter) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + case 3: + {p = (te) - 1 + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Ideo) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + case 4: + {p = (te) - 1 + + if !atEOF { + return val, types, totalConsumed, nil + } + val = append(val, data[startPos:endPos+1]) + types = append(types, Letter) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + case 5: + {p = (te) - 1 + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Ideo) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + case 7: + {p = (te) - 1 + + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + case 12: + {p = (te) - 1 + + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + case 13: + {p = (te) - 1 + + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + } + + + case 28: +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + + + case 33: +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +te = p+1 +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 13: +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +te = p+1 +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 18: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + + + case 26: +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +te = p+1 +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 27: +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +te = p+1 +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + case 5: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:76 +act = 1; + + case 11: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + + case 16: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:104 +act = 3; + + case 6: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + + case 14: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + + case 20: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 7; + + case 9: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + + case 2: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + + case 29: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:76 +act = 1; + + case 30: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + + case 25: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + +//line segment_words.go:19500 + } + +_again: + switch _s_to_state_actions[cs] { + case 22: +//line NONE:1 +ts = 0 + + +//line segment_words.go:19510 + } + + if p++; p != pe { + goto _resume + } + _test_eof: {} + if p == eof { + if _s_eof_trans[cs] > 0 { + _trans = int(_s_eof_trans[cs] - 1) + goto _eof_trans + } + switch _s_eof_actions[cs] { + case 24: +//line segment_words.rl:68 + + startPos = p + + +//line segment_words.go:19529 + } + } + + } + +//line segment_words.rl:278 + + + if cs < s_first_final { + return val, types, totalConsumed, ParseError + } + + return val, types, totalConsumed, nil +} diff --git a/vendor/github.com/blevesearch/segment/segment_words.rl b/vendor/github.com/blevesearch/segment/segment_words.rl new file mode 100644 index 0000000..e69af8b --- /dev/null +++ b/vendor/github.com/blevesearch/segment/segment_words.rl @@ -0,0 +1,285 @@ +// Copyright (c) 2015 Couchbase, Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file +// except in compliance with the License. You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software distributed under the +// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +// either express or implied. See the License for the specific language governing permissions +// and limitations under the License. + +// +build BUILDTAGS + +package segment + +import ( + "fmt" + "unicode/utf8" +) + +var RagelFlags = "RAGELFLAGS" + +var ParseError = fmt.Errorf("unicode word segmentation parse error") + +// Word Types +const ( + None = iota + Number + Letter + Kana + Ideo +) + +%%{ + machine s; + write data; +}%% + +func segmentWords(data []byte, maxTokens int, atEOF bool, val [][]byte, types []int) ([][]byte, []int, int, error) { + cs, p, pe := 0, 0, len(data) + cap := maxTokens + if cap < 0 { + cap = 1000 + } + if val == nil { + val = make([][]byte, 0, cap) + } + if types == nil { + types = make([]int, 0, cap) + } + + // added for scanner + ts := 0 + te := 0 + act := 0 + eof := pe + _ = ts // compiler not happy + _ = te + _ = act + + // our state + startPos := 0 + endPos := 0 + totalConsumed := 0 + %%{ + + include SCRIPTS "ragel/uscript.rl"; + include WB "ragel/uwb.rl"; + + action startToken { + startPos = p + } + + action endToken { + endPos = p + } + + action finishNumericToken { + if !atEOF { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Number) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + action finishHangulToken { + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Letter) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + action finishKatakanaToken { + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Ideo) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + action finishWordToken { + if !atEOF { + return val, types, totalConsumed, nil + } + val = append(val, data[startPos:endPos+1]) + types = append(types, Letter) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + action finishHanToken { + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Ideo) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + action finishHiraganaToken { + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Ideo) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + action finishNoneToken { + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + + HangulEx = Hangul ( Extend | Format )*; + HebrewOrALetterEx = ( Hebrew_Letter | ALetter ) ( Extend | Format )*; + NumericEx = Numeric ( Extend | Format )*; + KatakanaEx = Katakana ( Extend | Format )*; + MidLetterEx = ( MidLetter | MidNumLet | Single_Quote ) ( Extend | Format )*; + MidNumericEx = ( MidNum | MidNumLet | Single_Quote ) ( Extend | Format )*; + ExtendNumLetEx = ExtendNumLet ( Extend | Format )*; + HanEx = Han ( Extend | Format )*; + HiraganaEx = Hiragana ( Extend | Format )*; + SingleQuoteEx = Single_Quote ( Extend | Format )*; + DoubleQuoteEx = Double_Quote ( Extend | Format )*; + HebrewLetterEx = Hebrew_Letter ( Extend | Format )*; + RegionalIndicatorEx = Regional_Indicator ( Extend | Format )*; + NLCRLF = Newline | CR | LF; + OtherEx = ^(NLCRLF) ( Extend | Format )* ; + + # UAX#29 WB8. Numeric × Numeric + # WB11. Numeric (MidNum | MidNumLet | Single_Quote) × Numeric + # WB12. Numeric × (MidNum | MidNumLet | Single_Quote) Numeric + # WB13a. (ALetter | Hebrew_Letter | Numeric | Katakana | ExtendNumLet) × ExtendNumLet + # WB13b. ExtendNumLet × (ALetter | Hebrew_Letter | Numeric | Katakana) + # + WordNumeric = ( ( ExtendNumLetEx )* NumericEx ( ( ( ExtendNumLetEx )* | MidNumericEx ) NumericEx )* ( ExtendNumLetEx )* ) >startToken @endToken; + + # subset of the below for typing purposes only! + WordHangul = ( HangulEx )+ >startToken @endToken; + WordKatakana = ( KatakanaEx )+ >startToken @endToken; + + # UAX#29 WB5. (ALetter | Hebrew_Letter) × (ALetter | Hebrew_Letter) + # WB6. (ALetter | Hebrew_Letter) × (MidLetter | MidNumLet | Single_Quote) (ALetter | Hebrew_Letter) + # WB7. (ALetter | Hebrew_Letter) (MidLetter | MidNumLet | Single_Quote) × (ALetter | Hebrew_Letter) + # WB7a. Hebrew_Letter × Single_Quote + # WB7b. Hebrew_Letter × Double_Quote Hebrew_Letter + # WB7c. Hebrew_Letter Double_Quote × Hebrew_Letter + # WB9. (ALetter | Hebrew_Letter) × Numeric + # WB10. Numeric × (ALetter | Hebrew_Letter) + # WB13. Katakana × Katakana + # WB13a. (ALetter | Hebrew_Letter | Numeric | Katakana | ExtendNumLet) × ExtendNumLet + # WB13b. ExtendNumLet × (ALetter | Hebrew_Letter | Numeric | Katakana) + # + # Marty -deviated here to allow for (ExtendNumLetEx x ExtendNumLetEx) part of 13a + # + Word = ( ( ExtendNumLetEx )* ( KatakanaEx ( ( ExtendNumLetEx )* KatakanaEx )* + | ( HebrewLetterEx ( SingleQuoteEx | DoubleQuoteEx HebrewLetterEx ) + | NumericEx ( ( ( ExtendNumLetEx )* | MidNumericEx ) NumericEx )* + | HebrewOrALetterEx ( ( ( ExtendNumLetEx )* | MidLetterEx ) HebrewOrALetterEx )* + |ExtendNumLetEx + )+ + ) + ( + ( ExtendNumLetEx )+ ( KatakanaEx ( ( ExtendNumLetEx )* KatakanaEx )* + | ( HebrewLetterEx ( SingleQuoteEx | DoubleQuoteEx HebrewLetterEx ) + | NumericEx ( ( ( ExtendNumLetEx )* | MidNumericEx ) NumericEx )* + | HebrewOrALetterEx ( ( ( ExtendNumLetEx )* | MidLetterEx ) HebrewOrALetterEx )* + )+ + ) + )* ExtendNumLetEx*) >startToken @endToken; + + # UAX#29 WB14. Any ÷ Any + WordHan = HanEx >startToken @endToken; + WordHiragana = HiraganaEx >startToken @endToken; + + WordExt = ( ( Extend | Format )* ) >startToken @endToken; # maybe plus not star + + WordCRLF = (CR LF) >startToken @endToken; + + WordCR = CR >startToken @endToken; + + WordLF = LF >startToken @endToken; + + WordNL = Newline >startToken @endToken; + + WordRegional = (RegionalIndicatorEx+) >startToken @endToken; + + Other = OtherEx >startToken @endToken; + + main := |* + WordNumeric => finishNumericToken; + WordHangul => finishHangulToken; + WordKatakana => finishKatakanaToken; + Word => finishWordToken; + WordHan => finishHanToken; + WordHiragana => finishHiraganaToken; + WordRegional =>finishNoneToken; + WordCRLF => finishNoneToken; + WordCR => finishNoneToken; + WordLF => finishNoneToken; + WordNL => finishNoneToken; + WordExt => finishNoneToken; + Other => finishNoneToken; + *|; + + write init; + write exec; + }%% + + if cs < s_first_final { + return val, types, totalConsumed, ParseError + } + + return val, types, totalConsumed, nil +} diff --git a/vendor/github.com/blevesearch/segment/segment_words_prod.go b/vendor/github.com/blevesearch/segment/segment_words_prod.go new file mode 100644 index 0000000..93b3b6e --- /dev/null +++ b/vendor/github.com/blevesearch/segment/segment_words_prod.go @@ -0,0 +1,173643 @@ + +//line segment_words.rl:1 +// Copyright (c) 2015 Couchbase, Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file +// except in compliance with the License. You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software distributed under the +// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, +// either express or implied. See the License for the specific language governing permissions +// and limitations under the License. + +// +build prod + +package segment + +import ( + "fmt" + "unicode/utf8" +) + +var RagelFlags = "-G2" + +var ParseError = fmt.Errorf("unicode word segmentation parse error") + +// Word Types +const ( + None = iota + Number + Letter + Kana + Ideo +) + + +//line segment_words_prod.go:36 +const s_start int = 4862 +const s_first_final int = 4862 +const s_error int = -1 + +const s_en_main int = 4862 + + +//line segment_words.rl:35 + + +func segmentWords(data []byte, maxTokens int, atEOF bool, val [][]byte, types []int) ([][]byte, []int, int, error) { + cs, p, pe := 0, 0, len(data) + cap := maxTokens + if cap < 0 { + cap = 1000 + } + if val == nil { + val = make([][]byte, 0, cap) + } + if types == nil { + types = make([]int, 0, cap) + } + + // added for scanner + ts := 0 + te := 0 + act := 0 + eof := pe + _ = ts // compiler not happy + _ = te + _ = act + + // our state + startPos := 0 + endPos := 0 + totalConsumed := 0 + +//line segment_words_prod.go:74 + { + cs = s_start + ts = 0 + te = 0 + act = 0 + } + +//line segment_words_prod.go:82 + { + if p == pe { + goto _test_eof + } + switch cs { + case 4862: + goto st_case_4862 + case 4863: + goto st_case_4863 + case 0: + goto st_case_0 + case 1: + goto st_case_1 + case 2: + goto st_case_2 + case 3: + goto st_case_3 + case 4: + goto st_case_4 + case 5: + goto st_case_5 + case 6: + goto st_case_6 + case 7: + goto st_case_7 + case 8: + goto st_case_8 + case 9: + goto st_case_9 + case 10: + goto st_case_10 + case 11: + goto st_case_11 + case 12: + goto st_case_12 + case 13: + goto st_case_13 + case 14: + goto st_case_14 + case 15: + goto st_case_15 + case 16: + goto st_case_16 + case 17: + goto st_case_17 + case 18: + goto st_case_18 + case 19: + goto st_case_19 + case 20: + goto st_case_20 + case 21: + goto st_case_21 + case 22: + goto st_case_22 + case 23: + goto st_case_23 + case 24: + goto st_case_24 + case 25: + goto st_case_25 + case 26: + goto st_case_26 + case 27: + goto st_case_27 + case 28: + goto st_case_28 + case 29: + goto st_case_29 + case 30: + goto st_case_30 + case 31: + goto st_case_31 + case 32: + goto st_case_32 + case 33: + goto st_case_33 + case 34: + goto st_case_34 + case 35: + goto st_case_35 + case 36: + goto st_case_36 + case 37: + goto st_case_37 + case 38: + goto st_case_38 + case 39: + goto st_case_39 + case 40: + goto st_case_40 + case 41: + goto st_case_41 + case 42: + goto st_case_42 + case 43: + goto st_case_43 + case 44: + goto st_case_44 + case 45: + goto st_case_45 + case 46: + goto st_case_46 + case 47: + goto st_case_47 + case 48: + goto st_case_48 + case 49: + goto st_case_49 + case 50: + goto st_case_50 + case 51: + goto st_case_51 + case 52: + goto st_case_52 + case 53: + goto st_case_53 + case 54: + goto st_case_54 + case 55: + goto st_case_55 + case 56: + goto st_case_56 + case 57: + goto st_case_57 + case 58: + goto st_case_58 + case 59: + goto st_case_59 + case 60: + goto st_case_60 + case 61: + goto st_case_61 + case 62: + goto st_case_62 + case 63: + goto st_case_63 + case 64: + goto st_case_64 + case 65: + goto st_case_65 + case 66: + goto st_case_66 + case 67: + goto st_case_67 + case 68: + goto st_case_68 + case 69: + goto st_case_69 + case 70: + goto st_case_70 + case 71: + goto st_case_71 + case 72: + goto st_case_72 + case 73: + goto st_case_73 + case 74: + goto st_case_74 + case 75: + goto st_case_75 + case 76: + goto st_case_76 + case 77: + goto st_case_77 + case 78: + goto st_case_78 + case 79: + goto st_case_79 + case 80: + goto st_case_80 + case 81: + goto st_case_81 + case 82: + goto st_case_82 + case 83: + goto st_case_83 + case 84: + goto st_case_84 + case 85: + goto st_case_85 + case 86: + goto st_case_86 + case 87: + goto st_case_87 + case 88: + goto st_case_88 + case 89: + goto st_case_89 + case 90: + goto st_case_90 + case 91: + goto st_case_91 + case 92: + goto st_case_92 + case 93: + goto st_case_93 + case 94: + goto st_case_94 + case 95: + goto st_case_95 + case 96: + goto st_case_96 + case 97: + goto st_case_97 + case 98: + goto st_case_98 + case 99: + goto st_case_99 + case 100: + goto st_case_100 + case 101: + goto st_case_101 + case 102: + goto st_case_102 + case 103: + goto st_case_103 + case 104: + goto st_case_104 + case 105: + goto st_case_105 + case 106: + goto st_case_106 + case 107: + goto st_case_107 + case 108: + goto st_case_108 + case 109: + goto st_case_109 + case 110: + goto st_case_110 + case 111: + goto st_case_111 + case 112: + goto st_case_112 + case 113: + goto st_case_113 + case 114: + goto st_case_114 + case 115: + goto st_case_115 + case 116: + goto st_case_116 + case 117: + goto st_case_117 + case 118: + goto st_case_118 + case 119: + goto st_case_119 + case 120: + goto st_case_120 + case 121: + goto st_case_121 + case 122: + goto st_case_122 + case 123: + goto st_case_123 + case 124: + goto st_case_124 + case 125: + goto st_case_125 + case 126: + goto st_case_126 + case 127: + goto st_case_127 + case 128: + goto st_case_128 + case 129: + goto st_case_129 + case 130: + goto st_case_130 + case 131: + goto st_case_131 + case 132: + goto st_case_132 + case 133: + goto st_case_133 + case 134: + goto st_case_134 + case 135: + goto st_case_135 + case 136: + goto st_case_136 + case 137: + goto st_case_137 + case 138: + goto st_case_138 + case 139: + goto st_case_139 + case 140: + goto st_case_140 + case 4864: + goto st_case_4864 + case 4865: + goto st_case_4865 + case 141: + goto st_case_141 + case 4866: + goto st_case_4866 + case 4867: + goto st_case_4867 + case 142: + goto st_case_142 + case 143: + goto st_case_143 + case 144: + goto st_case_144 + case 145: + goto st_case_145 + case 146: + goto st_case_146 + case 147: + goto st_case_147 + case 148: + goto st_case_148 + case 149: + goto st_case_149 + case 150: + goto st_case_150 + case 151: + goto st_case_151 + case 152: + goto st_case_152 + case 153: + goto st_case_153 + case 154: + goto st_case_154 + case 155: + goto st_case_155 + case 156: + goto st_case_156 + case 157: + goto st_case_157 + case 158: + goto st_case_158 + case 159: + goto st_case_159 + case 160: + goto st_case_160 + case 161: + goto st_case_161 + case 162: + goto st_case_162 + case 163: + goto st_case_163 + case 164: + goto st_case_164 + case 165: + goto st_case_165 + case 166: + goto st_case_166 + case 167: + goto st_case_167 + case 168: + goto st_case_168 + case 169: + goto st_case_169 + case 170: + goto st_case_170 + case 171: + goto st_case_171 + case 172: + goto st_case_172 + case 173: + goto st_case_173 + case 174: + goto st_case_174 + case 175: + goto st_case_175 + case 176: + goto st_case_176 + case 177: + goto st_case_177 + case 178: + goto st_case_178 + case 179: + goto st_case_179 + case 180: + goto st_case_180 + case 181: + goto st_case_181 + case 182: + goto st_case_182 + case 183: + goto st_case_183 + case 184: + goto st_case_184 + case 185: + goto st_case_185 + case 186: + goto st_case_186 + case 187: + goto st_case_187 + case 188: + goto st_case_188 + case 189: + goto st_case_189 + case 190: + goto st_case_190 + case 191: + goto st_case_191 + case 192: + goto st_case_192 + case 193: + goto st_case_193 + case 194: + goto st_case_194 + case 195: + goto st_case_195 + case 196: + goto st_case_196 + case 197: + goto st_case_197 + case 198: + goto st_case_198 + case 199: + goto st_case_199 + case 200: + goto st_case_200 + case 201: + goto st_case_201 + case 202: + goto st_case_202 + case 203: + goto st_case_203 + case 204: + goto st_case_204 + case 205: + goto st_case_205 + case 206: + goto st_case_206 + case 207: + goto st_case_207 + case 208: + goto st_case_208 + case 209: + goto st_case_209 + case 210: + goto st_case_210 + case 211: + goto st_case_211 + case 212: + goto st_case_212 + case 213: + goto st_case_213 + case 214: + goto st_case_214 + case 215: + goto st_case_215 + case 216: + goto st_case_216 + case 217: + goto st_case_217 + case 218: + goto st_case_218 + case 219: + goto st_case_219 + case 220: + goto st_case_220 + case 221: + goto st_case_221 + case 222: + goto st_case_222 + case 223: + goto st_case_223 + case 224: + goto st_case_224 + case 225: + goto st_case_225 + case 226: + goto st_case_226 + case 227: + goto st_case_227 + case 228: + goto st_case_228 + case 229: + goto st_case_229 + case 230: + goto st_case_230 + case 231: + goto st_case_231 + case 232: + goto st_case_232 + case 233: + goto st_case_233 + case 234: + goto st_case_234 + case 235: + goto st_case_235 + case 236: + goto st_case_236 + case 237: + goto st_case_237 + case 238: + goto st_case_238 + case 239: + goto st_case_239 + case 240: + goto st_case_240 + case 241: + goto st_case_241 + case 242: + goto st_case_242 + case 243: + goto st_case_243 + case 244: + goto st_case_244 + case 245: + goto st_case_245 + case 246: + goto st_case_246 + case 247: + goto st_case_247 + case 248: + goto st_case_248 + case 249: + goto st_case_249 + case 250: + goto st_case_250 + case 251: + goto st_case_251 + case 252: + goto st_case_252 + case 253: + goto st_case_253 + case 254: + goto st_case_254 + case 255: + goto st_case_255 + case 256: + goto st_case_256 + case 257: + goto st_case_257 + case 258: + goto st_case_258 + case 259: + goto st_case_259 + case 260: + goto st_case_260 + case 261: + goto st_case_261 + case 262: + goto st_case_262 + case 263: + goto st_case_263 + case 264: + goto st_case_264 + case 265: + goto st_case_265 + case 266: + goto st_case_266 + case 267: + goto st_case_267 + case 268: + goto st_case_268 + case 269: + goto st_case_269 + case 270: + goto st_case_270 + case 271: + goto st_case_271 + case 272: + goto st_case_272 + case 273: + goto st_case_273 + case 274: + goto st_case_274 + case 275: + goto st_case_275 + case 276: + goto st_case_276 + case 277: + goto st_case_277 + case 278: + goto st_case_278 + case 279: + goto st_case_279 + case 280: + goto st_case_280 + case 281: + goto st_case_281 + case 282: + goto st_case_282 + case 283: + goto st_case_283 + case 284: + goto st_case_284 + case 285: + goto st_case_285 + case 286: + goto st_case_286 + case 287: + goto st_case_287 + case 288: + goto st_case_288 + case 289: + goto st_case_289 + case 290: + goto st_case_290 + case 291: + goto st_case_291 + case 292: + goto st_case_292 + case 293: + goto st_case_293 + case 294: + goto st_case_294 + case 295: + goto st_case_295 + case 296: + goto st_case_296 + case 297: + goto st_case_297 + case 298: + goto st_case_298 + case 299: + goto st_case_299 + case 300: + goto st_case_300 + case 301: + goto st_case_301 + case 302: + goto st_case_302 + case 303: + goto st_case_303 + case 304: + goto st_case_304 + case 305: + goto st_case_305 + case 306: + goto st_case_306 + case 307: + goto st_case_307 + case 308: + goto st_case_308 + case 309: + goto st_case_309 + case 310: + goto st_case_310 + case 311: + goto st_case_311 + case 312: + goto st_case_312 + case 313: + goto st_case_313 + case 314: + goto st_case_314 + case 315: + goto st_case_315 + case 316: + goto st_case_316 + case 317: + goto st_case_317 + case 318: + goto st_case_318 + case 319: + goto st_case_319 + case 320: + goto st_case_320 + case 321: + goto st_case_321 + case 322: + goto st_case_322 + case 323: + goto st_case_323 + case 324: + goto st_case_324 + case 325: + goto st_case_325 + case 326: + goto st_case_326 + case 327: + goto st_case_327 + case 328: + goto st_case_328 + case 329: + goto st_case_329 + case 330: + goto st_case_330 + case 331: + goto st_case_331 + case 332: + goto st_case_332 + case 333: + goto st_case_333 + case 334: + goto st_case_334 + case 335: + goto st_case_335 + case 336: + goto st_case_336 + case 337: + goto st_case_337 + case 338: + goto st_case_338 + case 339: + goto st_case_339 + case 340: + goto st_case_340 + case 341: + goto st_case_341 + case 342: + goto st_case_342 + case 343: + goto st_case_343 + case 344: + goto st_case_344 + case 345: + goto st_case_345 + case 346: + goto st_case_346 + case 347: + goto st_case_347 + case 348: + goto st_case_348 + case 349: + goto st_case_349 + case 350: + goto st_case_350 + case 351: + goto st_case_351 + case 352: + goto st_case_352 + case 353: + goto st_case_353 + case 354: + goto st_case_354 + case 355: + goto st_case_355 + case 356: + goto st_case_356 + case 357: + goto st_case_357 + case 358: + goto st_case_358 + case 359: + goto st_case_359 + case 360: + goto st_case_360 + case 361: + goto st_case_361 + case 362: + goto st_case_362 + case 363: + goto st_case_363 + case 364: + goto st_case_364 + case 365: + goto st_case_365 + case 366: + goto st_case_366 + case 367: + goto st_case_367 + case 368: + goto st_case_368 + case 369: + goto st_case_369 + case 370: + goto st_case_370 + case 371: + goto st_case_371 + case 372: + goto st_case_372 + case 373: + goto st_case_373 + case 374: + goto st_case_374 + case 375: + goto st_case_375 + case 376: + goto st_case_376 + case 377: + goto st_case_377 + case 378: + goto st_case_378 + case 379: + goto st_case_379 + case 380: + goto st_case_380 + case 381: + goto st_case_381 + case 382: + goto st_case_382 + case 383: + goto st_case_383 + case 384: + goto st_case_384 + case 385: + goto st_case_385 + case 386: + goto st_case_386 + case 387: + goto st_case_387 + case 388: + goto st_case_388 + case 389: + goto st_case_389 + case 390: + goto st_case_390 + case 391: + goto st_case_391 + case 392: + goto st_case_392 + case 393: + goto st_case_393 + case 394: + goto st_case_394 + case 395: + goto st_case_395 + case 396: + goto st_case_396 + case 397: + goto st_case_397 + case 398: + goto st_case_398 + case 399: + goto st_case_399 + case 400: + goto st_case_400 + case 401: + goto st_case_401 + case 402: + goto st_case_402 + case 403: + goto st_case_403 + case 404: + goto st_case_404 + case 405: + goto st_case_405 + case 406: + goto st_case_406 + case 407: + goto st_case_407 + case 408: + goto st_case_408 + case 409: + goto st_case_409 + case 410: + goto st_case_410 + case 411: + goto st_case_411 + case 412: + goto st_case_412 + case 4868: + goto st_case_4868 + case 413: + goto st_case_413 + case 414: + goto st_case_414 + case 415: + goto st_case_415 + case 416: + goto st_case_416 + case 417: + goto st_case_417 + case 418: + goto st_case_418 + case 419: + goto st_case_419 + case 420: + goto st_case_420 + case 421: + goto st_case_421 + case 422: + goto st_case_422 + case 423: + goto st_case_423 + case 424: + goto st_case_424 + case 425: + goto st_case_425 + case 426: + goto st_case_426 + case 427: + goto st_case_427 + case 428: + goto st_case_428 + case 429: + goto st_case_429 + case 430: + goto st_case_430 + case 431: + goto st_case_431 + case 432: + goto st_case_432 + case 433: + goto st_case_433 + case 434: + goto st_case_434 + case 435: + goto st_case_435 + case 436: + goto st_case_436 + case 437: + goto st_case_437 + case 438: + goto st_case_438 + case 439: + goto st_case_439 + case 440: + goto st_case_440 + case 441: + goto st_case_441 + case 442: + goto st_case_442 + case 443: + goto st_case_443 + case 444: + goto st_case_444 + case 445: + goto st_case_445 + case 446: + goto st_case_446 + case 447: + goto st_case_447 + case 448: + goto st_case_448 + case 449: + goto st_case_449 + case 450: + goto st_case_450 + case 451: + goto st_case_451 + case 452: + goto st_case_452 + case 453: + goto st_case_453 + case 454: + goto st_case_454 + case 455: + goto st_case_455 + case 456: + goto st_case_456 + case 457: + goto st_case_457 + case 458: + goto st_case_458 + case 459: + goto st_case_459 + case 460: + goto st_case_460 + case 461: + goto st_case_461 + case 462: + goto st_case_462 + case 463: + goto st_case_463 + case 464: + goto st_case_464 + case 465: + goto st_case_465 + case 466: + goto st_case_466 + case 467: + goto st_case_467 + case 468: + goto st_case_468 + case 469: + goto st_case_469 + case 470: + goto st_case_470 + case 471: + goto st_case_471 + case 472: + goto st_case_472 + case 473: + goto st_case_473 + case 474: + goto st_case_474 + case 475: + goto st_case_475 + case 476: + goto st_case_476 + case 477: + goto st_case_477 + case 478: + goto st_case_478 + case 479: + goto st_case_479 + case 480: + goto st_case_480 + case 481: + goto st_case_481 + case 482: + goto st_case_482 + case 483: + goto st_case_483 + case 484: + goto st_case_484 + case 485: + goto st_case_485 + case 486: + goto st_case_486 + case 487: + goto st_case_487 + case 488: + goto st_case_488 + case 489: + goto st_case_489 + case 490: + goto st_case_490 + case 491: + goto st_case_491 + case 492: + goto st_case_492 + case 493: + goto st_case_493 + case 494: + goto st_case_494 + case 495: + goto st_case_495 + case 496: + goto st_case_496 + case 497: + goto st_case_497 + case 498: + goto st_case_498 + case 499: + goto st_case_499 + case 500: + goto st_case_500 + case 501: + goto st_case_501 + case 502: + goto st_case_502 + case 503: + goto st_case_503 + case 504: + goto st_case_504 + case 505: + goto st_case_505 + case 506: + goto st_case_506 + case 507: + goto st_case_507 + case 508: + goto st_case_508 + case 509: + goto st_case_509 + case 510: + goto st_case_510 + case 511: + goto st_case_511 + case 512: + goto st_case_512 + case 513: + goto st_case_513 + case 514: + goto st_case_514 + case 515: + goto st_case_515 + case 516: + goto st_case_516 + case 517: + goto st_case_517 + case 518: + goto st_case_518 + case 519: + goto st_case_519 + case 520: + goto st_case_520 + case 521: + goto st_case_521 + case 522: + goto st_case_522 + case 523: + goto st_case_523 + case 524: + goto st_case_524 + case 525: + goto st_case_525 + case 526: + goto st_case_526 + case 527: + goto st_case_527 + case 528: + goto st_case_528 + case 529: + goto st_case_529 + case 530: + goto st_case_530 + case 531: + goto st_case_531 + case 532: + goto st_case_532 + case 533: + goto st_case_533 + case 534: + goto st_case_534 + case 535: + goto st_case_535 + case 536: + goto st_case_536 + case 537: + goto st_case_537 + case 538: + goto st_case_538 + case 539: + goto st_case_539 + case 540: + goto st_case_540 + case 541: + goto st_case_541 + case 542: + goto st_case_542 + case 543: + goto st_case_543 + case 544: + goto st_case_544 + case 545: + goto st_case_545 + case 546: + goto st_case_546 + case 547: + goto st_case_547 + case 548: + goto st_case_548 + case 549: + goto st_case_549 + case 550: + goto st_case_550 + case 551: + goto st_case_551 + case 552: + goto st_case_552 + case 553: + goto st_case_553 + case 554: + goto st_case_554 + case 555: + goto st_case_555 + case 556: + goto st_case_556 + case 557: + goto st_case_557 + case 558: + goto st_case_558 + case 559: + goto st_case_559 + case 560: + goto st_case_560 + case 561: + goto st_case_561 + case 4869: + goto st_case_4869 + case 562: + goto st_case_562 + case 563: + goto st_case_563 + case 564: + goto st_case_564 + case 565: + goto st_case_565 + case 566: + goto st_case_566 + case 567: + goto st_case_567 + case 4870: + goto st_case_4870 + case 568: + goto st_case_568 + case 569: + goto st_case_569 + case 570: + goto st_case_570 + case 571: + goto st_case_571 + case 572: + goto st_case_572 + case 573: + goto st_case_573 + case 574: + goto st_case_574 + case 4871: + goto st_case_4871 + case 575: + goto st_case_575 + case 576: + goto st_case_576 + case 577: + goto st_case_577 + case 578: + goto st_case_578 + case 579: + goto st_case_579 + case 580: + goto st_case_580 + case 581: + goto st_case_581 + case 582: + goto st_case_582 + case 583: + goto st_case_583 + case 584: + goto st_case_584 + case 585: + goto st_case_585 + case 586: + goto st_case_586 + case 587: + goto st_case_587 + case 588: + goto st_case_588 + case 589: + goto st_case_589 + case 590: + goto st_case_590 + case 591: + goto st_case_591 + case 592: + goto st_case_592 + case 593: + goto st_case_593 + case 594: + goto st_case_594 + case 595: + goto st_case_595 + case 596: + goto st_case_596 + case 597: + goto st_case_597 + case 598: + goto st_case_598 + case 599: + goto st_case_599 + case 600: + goto st_case_600 + case 601: + goto st_case_601 + case 602: + goto st_case_602 + case 603: + goto st_case_603 + case 604: + goto st_case_604 + case 605: + goto st_case_605 + case 606: + goto st_case_606 + case 607: + goto st_case_607 + case 608: + goto st_case_608 + case 609: + goto st_case_609 + case 610: + goto st_case_610 + case 611: + goto st_case_611 + case 612: + goto st_case_612 + case 613: + goto st_case_613 + case 614: + goto st_case_614 + case 615: + goto st_case_615 + case 616: + goto st_case_616 + case 617: + goto st_case_617 + case 618: + goto st_case_618 + case 619: + goto st_case_619 + case 620: + goto st_case_620 + case 621: + goto st_case_621 + case 622: + goto st_case_622 + case 623: + goto st_case_623 + case 624: + goto st_case_624 + case 625: + goto st_case_625 + case 626: + goto st_case_626 + case 627: + goto st_case_627 + case 628: + goto st_case_628 + case 629: + goto st_case_629 + case 630: + goto st_case_630 + case 631: + goto st_case_631 + case 632: + goto st_case_632 + case 633: + goto st_case_633 + case 634: + goto st_case_634 + case 635: + goto st_case_635 + case 636: + goto st_case_636 + case 637: + goto st_case_637 + case 638: + goto st_case_638 + case 639: + goto st_case_639 + case 640: + goto st_case_640 + case 641: + goto st_case_641 + case 642: + goto st_case_642 + case 643: + goto st_case_643 + case 644: + goto st_case_644 + case 645: + goto st_case_645 + case 646: + goto st_case_646 + case 647: + goto st_case_647 + case 648: + goto st_case_648 + case 649: + goto st_case_649 + case 650: + goto st_case_650 + case 651: + goto st_case_651 + case 652: + goto st_case_652 + case 653: + goto st_case_653 + case 654: + goto st_case_654 + case 655: + goto st_case_655 + case 656: + goto st_case_656 + case 657: + goto st_case_657 + case 658: + goto st_case_658 + case 659: + goto st_case_659 + case 660: + goto st_case_660 + case 661: + goto st_case_661 + case 662: + goto st_case_662 + case 663: + goto st_case_663 + case 664: + goto st_case_664 + case 665: + goto st_case_665 + case 666: + goto st_case_666 + case 667: + goto st_case_667 + case 668: + goto st_case_668 + case 669: + goto st_case_669 + case 670: + goto st_case_670 + case 671: + goto st_case_671 + case 672: + goto st_case_672 + case 673: + goto st_case_673 + case 674: + goto st_case_674 + case 675: + goto st_case_675 + case 676: + goto st_case_676 + case 677: + goto st_case_677 + case 678: + goto st_case_678 + case 679: + goto st_case_679 + case 680: + goto st_case_680 + case 681: + goto st_case_681 + case 682: + goto st_case_682 + case 683: + goto st_case_683 + case 684: + goto st_case_684 + case 685: + goto st_case_685 + case 686: + goto st_case_686 + case 687: + goto st_case_687 + case 688: + goto st_case_688 + case 689: + goto st_case_689 + case 690: + goto st_case_690 + case 691: + goto st_case_691 + case 692: + goto st_case_692 + case 693: + goto st_case_693 + case 694: + goto st_case_694 + case 695: + goto st_case_695 + case 696: + goto st_case_696 + case 697: + goto st_case_697 + case 698: + goto st_case_698 + case 699: + goto st_case_699 + case 700: + goto st_case_700 + case 701: + goto st_case_701 + case 702: + goto st_case_702 + case 703: + goto st_case_703 + case 704: + goto st_case_704 + case 705: + goto st_case_705 + case 706: + goto st_case_706 + case 707: + goto st_case_707 + case 708: + goto st_case_708 + case 709: + goto st_case_709 + case 710: + goto st_case_710 + case 711: + goto st_case_711 + case 712: + goto st_case_712 + case 713: + goto st_case_713 + case 714: + goto st_case_714 + case 715: + goto st_case_715 + case 716: + goto st_case_716 + case 717: + goto st_case_717 + case 718: + goto st_case_718 + case 719: + goto st_case_719 + case 720: + goto st_case_720 + case 721: + goto st_case_721 + case 722: + goto st_case_722 + case 723: + goto st_case_723 + case 724: + goto st_case_724 + case 725: + goto st_case_725 + case 726: + goto st_case_726 + case 727: + goto st_case_727 + case 728: + goto st_case_728 + case 729: + goto st_case_729 + case 730: + goto st_case_730 + case 731: + goto st_case_731 + case 732: + goto st_case_732 + case 733: + goto st_case_733 + case 734: + goto st_case_734 + case 735: + goto st_case_735 + case 736: + goto st_case_736 + case 737: + goto st_case_737 + case 738: + goto st_case_738 + case 739: + goto st_case_739 + case 740: + goto st_case_740 + case 741: + goto st_case_741 + case 742: + goto st_case_742 + case 743: + goto st_case_743 + case 744: + goto st_case_744 + case 745: + goto st_case_745 + case 746: + goto st_case_746 + case 747: + goto st_case_747 + case 748: + goto st_case_748 + case 749: + goto st_case_749 + case 750: + goto st_case_750 + case 751: + goto st_case_751 + case 752: + goto st_case_752 + case 753: + goto st_case_753 + case 754: + goto st_case_754 + case 755: + goto st_case_755 + case 756: + goto st_case_756 + case 757: + goto st_case_757 + case 758: + goto st_case_758 + case 759: + goto st_case_759 + case 760: + goto st_case_760 + case 761: + goto st_case_761 + case 762: + goto st_case_762 + case 763: + goto st_case_763 + case 764: + goto st_case_764 + case 765: + goto st_case_765 + case 766: + goto st_case_766 + case 767: + goto st_case_767 + case 768: + goto st_case_768 + case 769: + goto st_case_769 + case 770: + goto st_case_770 + case 771: + goto st_case_771 + case 772: + goto st_case_772 + case 773: + goto st_case_773 + case 774: + goto st_case_774 + case 775: + goto st_case_775 + case 776: + goto st_case_776 + case 777: + goto st_case_777 + case 778: + goto st_case_778 + case 779: + goto st_case_779 + case 780: + goto st_case_780 + case 781: + goto st_case_781 + case 782: + goto st_case_782 + case 783: + goto st_case_783 + case 784: + goto st_case_784 + case 785: + goto st_case_785 + case 786: + goto st_case_786 + case 787: + goto st_case_787 + case 788: + goto st_case_788 + case 789: + goto st_case_789 + case 790: + goto st_case_790 + case 791: + goto st_case_791 + case 792: + goto st_case_792 + case 793: + goto st_case_793 + case 794: + goto st_case_794 + case 795: + goto st_case_795 + case 796: + goto st_case_796 + case 797: + goto st_case_797 + case 798: + goto st_case_798 + case 799: + goto st_case_799 + case 800: + goto st_case_800 + case 801: + goto st_case_801 + case 802: + goto st_case_802 + case 803: + goto st_case_803 + case 804: + goto st_case_804 + case 805: + goto st_case_805 + case 806: + goto st_case_806 + case 807: + goto st_case_807 + case 808: + goto st_case_808 + case 809: + goto st_case_809 + case 810: + goto st_case_810 + case 811: + goto st_case_811 + case 812: + goto st_case_812 + case 813: + goto st_case_813 + case 814: + goto st_case_814 + case 815: + goto st_case_815 + case 816: + goto st_case_816 + case 817: + goto st_case_817 + case 818: + goto st_case_818 + case 819: + goto st_case_819 + case 820: + goto st_case_820 + case 821: + goto st_case_821 + case 822: + goto st_case_822 + case 823: + goto st_case_823 + case 824: + goto st_case_824 + case 825: + goto st_case_825 + case 826: + goto st_case_826 + case 827: + goto st_case_827 + case 828: + goto st_case_828 + case 829: + goto st_case_829 + case 830: + goto st_case_830 + case 831: + goto st_case_831 + case 832: + goto st_case_832 + case 833: + goto st_case_833 + case 834: + goto st_case_834 + case 835: + goto st_case_835 + case 836: + goto st_case_836 + case 837: + goto st_case_837 + case 838: + goto st_case_838 + case 839: + goto st_case_839 + case 840: + goto st_case_840 + case 841: + goto st_case_841 + case 842: + goto st_case_842 + case 843: + goto st_case_843 + case 844: + goto st_case_844 + case 845: + goto st_case_845 + case 846: + goto st_case_846 + case 847: + goto st_case_847 + case 848: + goto st_case_848 + case 849: + goto st_case_849 + case 850: + goto st_case_850 + case 851: + goto st_case_851 + case 852: + goto st_case_852 + case 853: + goto st_case_853 + case 854: + goto st_case_854 + case 855: + goto st_case_855 + case 856: + goto st_case_856 + case 857: + goto st_case_857 + case 858: + goto st_case_858 + case 859: + goto st_case_859 + case 860: + goto st_case_860 + case 861: + goto st_case_861 + case 862: + goto st_case_862 + case 863: + goto st_case_863 + case 864: + goto st_case_864 + case 865: + goto st_case_865 + case 866: + goto st_case_866 + case 867: + goto st_case_867 + case 868: + goto st_case_868 + case 869: + goto st_case_869 + case 870: + goto st_case_870 + case 871: + goto st_case_871 + case 872: + goto st_case_872 + case 873: + goto st_case_873 + case 874: + goto st_case_874 + case 875: + goto st_case_875 + case 876: + goto st_case_876 + case 877: + goto st_case_877 + case 878: + goto st_case_878 + case 879: + goto st_case_879 + case 880: + goto st_case_880 + case 881: + goto st_case_881 + case 882: + goto st_case_882 + case 883: + goto st_case_883 + case 884: + goto st_case_884 + case 885: + goto st_case_885 + case 886: + goto st_case_886 + case 887: + goto st_case_887 + case 888: + goto st_case_888 + case 889: + goto st_case_889 + case 890: + goto st_case_890 + case 891: + goto st_case_891 + case 892: + goto st_case_892 + case 893: + goto st_case_893 + case 894: + goto st_case_894 + case 895: + goto st_case_895 + case 896: + goto st_case_896 + case 897: + goto st_case_897 + case 898: + goto st_case_898 + case 899: + goto st_case_899 + case 900: + goto st_case_900 + case 901: + goto st_case_901 + case 902: + goto st_case_902 + case 903: + goto st_case_903 + case 904: + goto st_case_904 + case 905: + goto st_case_905 + case 906: + goto st_case_906 + case 907: + goto st_case_907 + case 908: + goto st_case_908 + case 909: + goto st_case_909 + case 910: + goto st_case_910 + case 911: + goto st_case_911 + case 912: + goto st_case_912 + case 913: + goto st_case_913 + case 914: + goto st_case_914 + case 915: + goto st_case_915 + case 916: + goto st_case_916 + case 917: + goto st_case_917 + case 918: + goto st_case_918 + case 919: + goto st_case_919 + case 920: + goto st_case_920 + case 921: + goto st_case_921 + case 922: + goto st_case_922 + case 923: + goto st_case_923 + case 924: + goto st_case_924 + case 925: + goto st_case_925 + case 926: + goto st_case_926 + case 927: + goto st_case_927 + case 928: + goto st_case_928 + case 929: + goto st_case_929 + case 930: + goto st_case_930 + case 931: + goto st_case_931 + case 932: + goto st_case_932 + case 933: + goto st_case_933 + case 934: + goto st_case_934 + case 935: + goto st_case_935 + case 936: + goto st_case_936 + case 937: + goto st_case_937 + case 938: + goto st_case_938 + case 939: + goto st_case_939 + case 940: + goto st_case_940 + case 941: + goto st_case_941 + case 942: + goto st_case_942 + case 943: + goto st_case_943 + case 944: + goto st_case_944 + case 945: + goto st_case_945 + case 946: + goto st_case_946 + case 947: + goto st_case_947 + case 948: + goto st_case_948 + case 949: + goto st_case_949 + case 950: + goto st_case_950 + case 951: + goto st_case_951 + case 952: + goto st_case_952 + case 953: + goto st_case_953 + case 954: + goto st_case_954 + case 955: + goto st_case_955 + case 956: + goto st_case_956 + case 957: + goto st_case_957 + case 958: + goto st_case_958 + case 959: + goto st_case_959 + case 960: + goto st_case_960 + case 961: + goto st_case_961 + case 962: + goto st_case_962 + case 963: + goto st_case_963 + case 964: + goto st_case_964 + case 965: + goto st_case_965 + case 966: + goto st_case_966 + case 967: + goto st_case_967 + case 968: + goto st_case_968 + case 969: + goto st_case_969 + case 970: + goto st_case_970 + case 971: + goto st_case_971 + case 972: + goto st_case_972 + case 973: + goto st_case_973 + case 974: + goto st_case_974 + case 975: + goto st_case_975 + case 976: + goto st_case_976 + case 977: + goto st_case_977 + case 978: + goto st_case_978 + case 979: + goto st_case_979 + case 980: + goto st_case_980 + case 981: + goto st_case_981 + case 982: + goto st_case_982 + case 983: + goto st_case_983 + case 984: + goto st_case_984 + case 985: + goto st_case_985 + case 986: + goto st_case_986 + case 987: + goto st_case_987 + case 988: + goto st_case_988 + case 989: + goto st_case_989 + case 990: + goto st_case_990 + case 991: + goto st_case_991 + case 992: + goto st_case_992 + case 993: + goto st_case_993 + case 994: + goto st_case_994 + case 995: + goto st_case_995 + case 996: + goto st_case_996 + case 997: + goto st_case_997 + case 998: + goto st_case_998 + case 999: + goto st_case_999 + case 1000: + goto st_case_1000 + case 1001: + goto st_case_1001 + case 1002: + goto st_case_1002 + case 1003: + goto st_case_1003 + case 1004: + goto st_case_1004 + case 1005: + goto st_case_1005 + case 1006: + goto st_case_1006 + case 1007: + goto st_case_1007 + case 1008: + goto st_case_1008 + case 1009: + goto st_case_1009 + case 1010: + goto st_case_1010 + case 1011: + goto st_case_1011 + case 1012: + goto st_case_1012 + case 1013: + goto st_case_1013 + case 1014: + goto st_case_1014 + case 1015: + goto st_case_1015 + case 1016: + goto st_case_1016 + case 1017: + goto st_case_1017 + case 1018: + goto st_case_1018 + case 1019: + goto st_case_1019 + case 1020: + goto st_case_1020 + case 1021: + goto st_case_1021 + case 1022: + goto st_case_1022 + case 1023: + goto st_case_1023 + case 1024: + goto st_case_1024 + case 1025: + goto st_case_1025 + case 1026: + goto st_case_1026 + case 1027: + goto st_case_1027 + case 1028: + goto st_case_1028 + case 1029: + goto st_case_1029 + case 1030: + goto st_case_1030 + case 1031: + goto st_case_1031 + case 1032: + goto st_case_1032 + case 1033: + goto st_case_1033 + case 1034: + goto st_case_1034 + case 1035: + goto st_case_1035 + case 1036: + goto st_case_1036 + case 1037: + goto st_case_1037 + case 1038: + goto st_case_1038 + case 1039: + goto st_case_1039 + case 1040: + goto st_case_1040 + case 1041: + goto st_case_1041 + case 1042: + goto st_case_1042 + case 1043: + goto st_case_1043 + case 1044: + goto st_case_1044 + case 1045: + goto st_case_1045 + case 1046: + goto st_case_1046 + case 1047: + goto st_case_1047 + case 1048: + goto st_case_1048 + case 1049: + goto st_case_1049 + case 1050: + goto st_case_1050 + case 1051: + goto st_case_1051 + case 1052: + goto st_case_1052 + case 1053: + goto st_case_1053 + case 1054: + goto st_case_1054 + case 1055: + goto st_case_1055 + case 1056: + goto st_case_1056 + case 1057: + goto st_case_1057 + case 1058: + goto st_case_1058 + case 1059: + goto st_case_1059 + case 1060: + goto st_case_1060 + case 1061: + goto st_case_1061 + case 1062: + goto st_case_1062 + case 1063: + goto st_case_1063 + case 1064: + goto st_case_1064 + case 1065: + goto st_case_1065 + case 1066: + goto st_case_1066 + case 1067: + goto st_case_1067 + case 1068: + goto st_case_1068 + case 1069: + goto st_case_1069 + case 1070: + goto st_case_1070 + case 1071: + goto st_case_1071 + case 1072: + goto st_case_1072 + case 1073: + goto st_case_1073 + case 1074: + goto st_case_1074 + case 1075: + goto st_case_1075 + case 1076: + goto st_case_1076 + case 1077: + goto st_case_1077 + case 1078: + goto st_case_1078 + case 1079: + goto st_case_1079 + case 1080: + goto st_case_1080 + case 1081: + goto st_case_1081 + case 1082: + goto st_case_1082 + case 1083: + goto st_case_1083 + case 1084: + goto st_case_1084 + case 1085: + goto st_case_1085 + case 1086: + goto st_case_1086 + case 1087: + goto st_case_1087 + case 1088: + goto st_case_1088 + case 1089: + goto st_case_1089 + case 4872: + goto st_case_4872 + case 1090: + goto st_case_1090 + case 1091: + goto st_case_1091 + case 1092: + goto st_case_1092 + case 1093: + goto st_case_1093 + case 1094: + goto st_case_1094 + case 1095: + goto st_case_1095 + case 1096: + goto st_case_1096 + case 1097: + goto st_case_1097 + case 1098: + goto st_case_1098 + case 1099: + goto st_case_1099 + case 1100: + goto st_case_1100 + case 1101: + goto st_case_1101 + case 1102: + goto st_case_1102 + case 1103: + goto st_case_1103 + case 1104: + goto st_case_1104 + case 1105: + goto st_case_1105 + case 1106: + goto st_case_1106 + case 1107: + goto st_case_1107 + case 1108: + goto st_case_1108 + case 1109: + goto st_case_1109 + case 1110: + goto st_case_1110 + case 1111: + goto st_case_1111 + case 1112: + goto st_case_1112 + case 1113: + goto st_case_1113 + case 1114: + goto st_case_1114 + case 1115: + goto st_case_1115 + case 1116: + goto st_case_1116 + case 1117: + goto st_case_1117 + case 1118: + goto st_case_1118 + case 1119: + goto st_case_1119 + case 1120: + goto st_case_1120 + case 1121: + goto st_case_1121 + case 1122: + goto st_case_1122 + case 1123: + goto st_case_1123 + case 1124: + goto st_case_1124 + case 1125: + goto st_case_1125 + case 1126: + goto st_case_1126 + case 1127: + goto st_case_1127 + case 1128: + goto st_case_1128 + case 1129: + goto st_case_1129 + case 1130: + goto st_case_1130 + case 1131: + goto st_case_1131 + case 1132: + goto st_case_1132 + case 1133: + goto st_case_1133 + case 1134: + goto st_case_1134 + case 1135: + goto st_case_1135 + case 1136: + goto st_case_1136 + case 1137: + goto st_case_1137 + case 1138: + goto st_case_1138 + case 1139: + goto st_case_1139 + case 1140: + goto st_case_1140 + case 1141: + goto st_case_1141 + case 1142: + goto st_case_1142 + case 1143: + goto st_case_1143 + case 1144: + goto st_case_1144 + case 1145: + goto st_case_1145 + case 1146: + goto st_case_1146 + case 1147: + goto st_case_1147 + case 1148: + goto st_case_1148 + case 1149: + goto st_case_1149 + case 1150: + goto st_case_1150 + case 1151: + goto st_case_1151 + case 1152: + goto st_case_1152 + case 1153: + goto st_case_1153 + case 1154: + goto st_case_1154 + case 1155: + goto st_case_1155 + case 1156: + goto st_case_1156 + case 1157: + goto st_case_1157 + case 1158: + goto st_case_1158 + case 1159: + goto st_case_1159 + case 1160: + goto st_case_1160 + case 1161: + goto st_case_1161 + case 1162: + goto st_case_1162 + case 1163: + goto st_case_1163 + case 1164: + goto st_case_1164 + case 1165: + goto st_case_1165 + case 1166: + goto st_case_1166 + case 1167: + goto st_case_1167 + case 1168: + goto st_case_1168 + case 1169: + goto st_case_1169 + case 1170: + goto st_case_1170 + case 1171: + goto st_case_1171 + case 1172: + goto st_case_1172 + case 1173: + goto st_case_1173 + case 1174: + goto st_case_1174 + case 1175: + goto st_case_1175 + case 1176: + goto st_case_1176 + case 1177: + goto st_case_1177 + case 1178: + goto st_case_1178 + case 1179: + goto st_case_1179 + case 1180: + goto st_case_1180 + case 1181: + goto st_case_1181 + case 1182: + goto st_case_1182 + case 1183: + goto st_case_1183 + case 1184: + goto st_case_1184 + case 1185: + goto st_case_1185 + case 1186: + goto st_case_1186 + case 1187: + goto st_case_1187 + case 1188: + goto st_case_1188 + case 1189: + goto st_case_1189 + case 1190: + goto st_case_1190 + case 1191: + goto st_case_1191 + case 1192: + goto st_case_1192 + case 1193: + goto st_case_1193 + case 1194: + goto st_case_1194 + case 1195: + goto st_case_1195 + case 1196: + goto st_case_1196 + case 1197: + goto st_case_1197 + case 1198: + goto st_case_1198 + case 1199: + goto st_case_1199 + case 1200: + goto st_case_1200 + case 1201: + goto st_case_1201 + case 1202: + goto st_case_1202 + case 1203: + goto st_case_1203 + case 1204: + goto st_case_1204 + case 1205: + goto st_case_1205 + case 1206: + goto st_case_1206 + case 1207: + goto st_case_1207 + case 1208: + goto st_case_1208 + case 1209: + goto st_case_1209 + case 1210: + goto st_case_1210 + case 1211: + goto st_case_1211 + case 1212: + goto st_case_1212 + case 1213: + goto st_case_1213 + case 1214: + goto st_case_1214 + case 1215: + goto st_case_1215 + case 1216: + goto st_case_1216 + case 1217: + goto st_case_1217 + case 1218: + goto st_case_1218 + case 1219: + goto st_case_1219 + case 1220: + goto st_case_1220 + case 1221: + goto st_case_1221 + case 1222: + goto st_case_1222 + case 1223: + goto st_case_1223 + case 1224: + goto st_case_1224 + case 1225: + goto st_case_1225 + case 1226: + goto st_case_1226 + case 1227: + goto st_case_1227 + case 1228: + goto st_case_1228 + case 1229: + goto st_case_1229 + case 1230: + goto st_case_1230 + case 1231: + goto st_case_1231 + case 1232: + goto st_case_1232 + case 1233: + goto st_case_1233 + case 1234: + goto st_case_1234 + case 1235: + goto st_case_1235 + case 1236: + goto st_case_1236 + case 1237: + goto st_case_1237 + case 1238: + goto st_case_1238 + case 1239: + goto st_case_1239 + case 1240: + goto st_case_1240 + case 1241: + goto st_case_1241 + case 1242: + goto st_case_1242 + case 1243: + goto st_case_1243 + case 1244: + goto st_case_1244 + case 1245: + goto st_case_1245 + case 1246: + goto st_case_1246 + case 1247: + goto st_case_1247 + case 1248: + goto st_case_1248 + case 1249: + goto st_case_1249 + case 1250: + goto st_case_1250 + case 1251: + goto st_case_1251 + case 1252: + goto st_case_1252 + case 1253: + goto st_case_1253 + case 1254: + goto st_case_1254 + case 1255: + goto st_case_1255 + case 1256: + goto st_case_1256 + case 1257: + goto st_case_1257 + case 1258: + goto st_case_1258 + case 1259: + goto st_case_1259 + case 1260: + goto st_case_1260 + case 1261: + goto st_case_1261 + case 1262: + goto st_case_1262 + case 1263: + goto st_case_1263 + case 1264: + goto st_case_1264 + case 1265: + goto st_case_1265 + case 1266: + goto st_case_1266 + case 1267: + goto st_case_1267 + case 1268: + goto st_case_1268 + case 1269: + goto st_case_1269 + case 1270: + goto st_case_1270 + case 1271: + goto st_case_1271 + case 1272: + goto st_case_1272 + case 1273: + goto st_case_1273 + case 1274: + goto st_case_1274 + case 1275: + goto st_case_1275 + case 1276: + goto st_case_1276 + case 1277: + goto st_case_1277 + case 1278: + goto st_case_1278 + case 1279: + goto st_case_1279 + case 1280: + goto st_case_1280 + case 1281: + goto st_case_1281 + case 1282: + goto st_case_1282 + case 1283: + goto st_case_1283 + case 1284: + goto st_case_1284 + case 1285: + goto st_case_1285 + case 1286: + goto st_case_1286 + case 1287: + goto st_case_1287 + case 1288: + goto st_case_1288 + case 1289: + goto st_case_1289 + case 1290: + goto st_case_1290 + case 1291: + goto st_case_1291 + case 1292: + goto st_case_1292 + case 1293: + goto st_case_1293 + case 1294: + goto st_case_1294 + case 1295: + goto st_case_1295 + case 1296: + goto st_case_1296 + case 1297: + goto st_case_1297 + case 1298: + goto st_case_1298 + case 1299: + goto st_case_1299 + case 1300: + goto st_case_1300 + case 1301: + goto st_case_1301 + case 1302: + goto st_case_1302 + case 1303: + goto st_case_1303 + case 1304: + goto st_case_1304 + case 1305: + goto st_case_1305 + case 1306: + goto st_case_1306 + case 1307: + goto st_case_1307 + case 1308: + goto st_case_1308 + case 1309: + goto st_case_1309 + case 1310: + goto st_case_1310 + case 1311: + goto st_case_1311 + case 1312: + goto st_case_1312 + case 1313: + goto st_case_1313 + case 1314: + goto st_case_1314 + case 1315: + goto st_case_1315 + case 1316: + goto st_case_1316 + case 1317: + goto st_case_1317 + case 1318: + goto st_case_1318 + case 1319: + goto st_case_1319 + case 1320: + goto st_case_1320 + case 1321: + goto st_case_1321 + case 1322: + goto st_case_1322 + case 1323: + goto st_case_1323 + case 1324: + goto st_case_1324 + case 1325: + goto st_case_1325 + case 1326: + goto st_case_1326 + case 1327: + goto st_case_1327 + case 1328: + goto st_case_1328 + case 1329: + goto st_case_1329 + case 1330: + goto st_case_1330 + case 1331: + goto st_case_1331 + case 1332: + goto st_case_1332 + case 1333: + goto st_case_1333 + case 1334: + goto st_case_1334 + case 1335: + goto st_case_1335 + case 1336: + goto st_case_1336 + case 1337: + goto st_case_1337 + case 1338: + goto st_case_1338 + case 1339: + goto st_case_1339 + case 1340: + goto st_case_1340 + case 1341: + goto st_case_1341 + case 1342: + goto st_case_1342 + case 1343: + goto st_case_1343 + case 1344: + goto st_case_1344 + case 1345: + goto st_case_1345 + case 1346: + goto st_case_1346 + case 1347: + goto st_case_1347 + case 1348: + goto st_case_1348 + case 1349: + goto st_case_1349 + case 1350: + goto st_case_1350 + case 1351: + goto st_case_1351 + case 1352: + goto st_case_1352 + case 1353: + goto st_case_1353 + case 1354: + goto st_case_1354 + case 1355: + goto st_case_1355 + case 1356: + goto st_case_1356 + case 1357: + goto st_case_1357 + case 1358: + goto st_case_1358 + case 1359: + goto st_case_1359 + case 1360: + goto st_case_1360 + case 1361: + goto st_case_1361 + case 1362: + goto st_case_1362 + case 1363: + goto st_case_1363 + case 1364: + goto st_case_1364 + case 1365: + goto st_case_1365 + case 1366: + goto st_case_1366 + case 1367: + goto st_case_1367 + case 1368: + goto st_case_1368 + case 1369: + goto st_case_1369 + case 1370: + goto st_case_1370 + case 1371: + goto st_case_1371 + case 1372: + goto st_case_1372 + case 1373: + goto st_case_1373 + case 1374: + goto st_case_1374 + case 1375: + goto st_case_1375 + case 1376: + goto st_case_1376 + case 1377: + goto st_case_1377 + case 1378: + goto st_case_1378 + case 1379: + goto st_case_1379 + case 1380: + goto st_case_1380 + case 1381: + goto st_case_1381 + case 1382: + goto st_case_1382 + case 1383: + goto st_case_1383 + case 1384: + goto st_case_1384 + case 1385: + goto st_case_1385 + case 1386: + goto st_case_1386 + case 1387: + goto st_case_1387 + case 1388: + goto st_case_1388 + case 1389: + goto st_case_1389 + case 1390: + goto st_case_1390 + case 1391: + goto st_case_1391 + case 1392: + goto st_case_1392 + case 1393: + goto st_case_1393 + case 1394: + goto st_case_1394 + case 1395: + goto st_case_1395 + case 1396: + goto st_case_1396 + case 1397: + goto st_case_1397 + case 1398: + goto st_case_1398 + case 1399: + goto st_case_1399 + case 1400: + goto st_case_1400 + case 1401: + goto st_case_1401 + case 1402: + goto st_case_1402 + case 1403: + goto st_case_1403 + case 1404: + goto st_case_1404 + case 1405: + goto st_case_1405 + case 1406: + goto st_case_1406 + case 1407: + goto st_case_1407 + case 1408: + goto st_case_1408 + case 1409: + goto st_case_1409 + case 1410: + goto st_case_1410 + case 1411: + goto st_case_1411 + case 1412: + goto st_case_1412 + case 1413: + goto st_case_1413 + case 1414: + goto st_case_1414 + case 1415: + goto st_case_1415 + case 1416: + goto st_case_1416 + case 1417: + goto st_case_1417 + case 1418: + goto st_case_1418 + case 1419: + goto st_case_1419 + case 1420: + goto st_case_1420 + case 1421: + goto st_case_1421 + case 1422: + goto st_case_1422 + case 1423: + goto st_case_1423 + case 1424: + goto st_case_1424 + case 1425: + goto st_case_1425 + case 1426: + goto st_case_1426 + case 1427: + goto st_case_1427 + case 1428: + goto st_case_1428 + case 1429: + goto st_case_1429 + case 1430: + goto st_case_1430 + case 1431: + goto st_case_1431 + case 1432: + goto st_case_1432 + case 1433: + goto st_case_1433 + case 1434: + goto st_case_1434 + case 1435: + goto st_case_1435 + case 1436: + goto st_case_1436 + case 1437: + goto st_case_1437 + case 1438: + goto st_case_1438 + case 1439: + goto st_case_1439 + case 1440: + goto st_case_1440 + case 1441: + goto st_case_1441 + case 1442: + goto st_case_1442 + case 1443: + goto st_case_1443 + case 1444: + goto st_case_1444 + case 1445: + goto st_case_1445 + case 1446: + goto st_case_1446 + case 1447: + goto st_case_1447 + case 1448: + goto st_case_1448 + case 1449: + goto st_case_1449 + case 1450: + goto st_case_1450 + case 1451: + goto st_case_1451 + case 1452: + goto st_case_1452 + case 1453: + goto st_case_1453 + case 1454: + goto st_case_1454 + case 1455: + goto st_case_1455 + case 1456: + goto st_case_1456 + case 1457: + goto st_case_1457 + case 1458: + goto st_case_1458 + case 1459: + goto st_case_1459 + case 1460: + goto st_case_1460 + case 1461: + goto st_case_1461 + case 1462: + goto st_case_1462 + case 1463: + goto st_case_1463 + case 1464: + goto st_case_1464 + case 1465: + goto st_case_1465 + case 1466: + goto st_case_1466 + case 1467: + goto st_case_1467 + case 1468: + goto st_case_1468 + case 1469: + goto st_case_1469 + case 1470: + goto st_case_1470 + case 1471: + goto st_case_1471 + case 1472: + goto st_case_1472 + case 1473: + goto st_case_1473 + case 1474: + goto st_case_1474 + case 1475: + goto st_case_1475 + case 1476: + goto st_case_1476 + case 1477: + goto st_case_1477 + case 1478: + goto st_case_1478 + case 1479: + goto st_case_1479 + case 1480: + goto st_case_1480 + case 1481: + goto st_case_1481 + case 1482: + goto st_case_1482 + case 1483: + goto st_case_1483 + case 1484: + goto st_case_1484 + case 1485: + goto st_case_1485 + case 1486: + goto st_case_1486 + case 1487: + goto st_case_1487 + case 1488: + goto st_case_1488 + case 1489: + goto st_case_1489 + case 1490: + goto st_case_1490 + case 1491: + goto st_case_1491 + case 1492: + goto st_case_1492 + case 1493: + goto st_case_1493 + case 1494: + goto st_case_1494 + case 1495: + goto st_case_1495 + case 1496: + goto st_case_1496 + case 1497: + goto st_case_1497 + case 1498: + goto st_case_1498 + case 1499: + goto st_case_1499 + case 1500: + goto st_case_1500 + case 1501: + goto st_case_1501 + case 1502: + goto st_case_1502 + case 1503: + goto st_case_1503 + case 1504: + goto st_case_1504 + case 1505: + goto st_case_1505 + case 1506: + goto st_case_1506 + case 1507: + goto st_case_1507 + case 1508: + goto st_case_1508 + case 1509: + goto st_case_1509 + case 1510: + goto st_case_1510 + case 1511: + goto st_case_1511 + case 1512: + goto st_case_1512 + case 1513: + goto st_case_1513 + case 1514: + goto st_case_1514 + case 1515: + goto st_case_1515 + case 1516: + goto st_case_1516 + case 1517: + goto st_case_1517 + case 1518: + goto st_case_1518 + case 1519: + goto st_case_1519 + case 1520: + goto st_case_1520 + case 1521: + goto st_case_1521 + case 1522: + goto st_case_1522 + case 1523: + goto st_case_1523 + case 1524: + goto st_case_1524 + case 1525: + goto st_case_1525 + case 1526: + goto st_case_1526 + case 1527: + goto st_case_1527 + case 1528: + goto st_case_1528 + case 1529: + goto st_case_1529 + case 1530: + goto st_case_1530 + case 1531: + goto st_case_1531 + case 1532: + goto st_case_1532 + case 1533: + goto st_case_1533 + case 1534: + goto st_case_1534 + case 1535: + goto st_case_1535 + case 1536: + goto st_case_1536 + case 1537: + goto st_case_1537 + case 1538: + goto st_case_1538 + case 1539: + goto st_case_1539 + case 1540: + goto st_case_1540 + case 1541: + goto st_case_1541 + case 1542: + goto st_case_1542 + case 1543: + goto st_case_1543 + case 1544: + goto st_case_1544 + case 1545: + goto st_case_1545 + case 1546: + goto st_case_1546 + case 1547: + goto st_case_1547 + case 1548: + goto st_case_1548 + case 1549: + goto st_case_1549 + case 1550: + goto st_case_1550 + case 1551: + goto st_case_1551 + case 1552: + goto st_case_1552 + case 1553: + goto st_case_1553 + case 1554: + goto st_case_1554 + case 1555: + goto st_case_1555 + case 1556: + goto st_case_1556 + case 1557: + goto st_case_1557 + case 1558: + goto st_case_1558 + case 1559: + goto st_case_1559 + case 1560: + goto st_case_1560 + case 1561: + goto st_case_1561 + case 1562: + goto st_case_1562 + case 1563: + goto st_case_1563 + case 1564: + goto st_case_1564 + case 1565: + goto st_case_1565 + case 1566: + goto st_case_1566 + case 1567: + goto st_case_1567 + case 1568: + goto st_case_1568 + case 1569: + goto st_case_1569 + case 1570: + goto st_case_1570 + case 1571: + goto st_case_1571 + case 1572: + goto st_case_1572 + case 1573: + goto st_case_1573 + case 1574: + goto st_case_1574 + case 1575: + goto st_case_1575 + case 1576: + goto st_case_1576 + case 1577: + goto st_case_1577 + case 1578: + goto st_case_1578 + case 1579: + goto st_case_1579 + case 1580: + goto st_case_1580 + case 1581: + goto st_case_1581 + case 1582: + goto st_case_1582 + case 1583: + goto st_case_1583 + case 1584: + goto st_case_1584 + case 1585: + goto st_case_1585 + case 1586: + goto st_case_1586 + case 1587: + goto st_case_1587 + case 1588: + goto st_case_1588 + case 1589: + goto st_case_1589 + case 1590: + goto st_case_1590 + case 1591: + goto st_case_1591 + case 4873: + goto st_case_4873 + case 1592: + goto st_case_1592 + case 1593: + goto st_case_1593 + case 1594: + goto st_case_1594 + case 1595: + goto st_case_1595 + case 1596: + goto st_case_1596 + case 1597: + goto st_case_1597 + case 1598: + goto st_case_1598 + case 1599: + goto st_case_1599 + case 1600: + goto st_case_1600 + case 1601: + goto st_case_1601 + case 1602: + goto st_case_1602 + case 1603: + goto st_case_1603 + case 1604: + goto st_case_1604 + case 1605: + goto st_case_1605 + case 1606: + goto st_case_1606 + case 1607: + goto st_case_1607 + case 1608: + goto st_case_1608 + case 1609: + goto st_case_1609 + case 1610: + goto st_case_1610 + case 1611: + goto st_case_1611 + case 1612: + goto st_case_1612 + case 1613: + goto st_case_1613 + case 1614: + goto st_case_1614 + case 1615: + goto st_case_1615 + case 1616: + goto st_case_1616 + case 1617: + goto st_case_1617 + case 1618: + goto st_case_1618 + case 1619: + goto st_case_1619 + case 1620: + goto st_case_1620 + case 1621: + goto st_case_1621 + case 1622: + goto st_case_1622 + case 1623: + goto st_case_1623 + case 1624: + goto st_case_1624 + case 1625: + goto st_case_1625 + case 1626: + goto st_case_1626 + case 1627: + goto st_case_1627 + case 1628: + goto st_case_1628 + case 1629: + goto st_case_1629 + case 1630: + goto st_case_1630 + case 1631: + goto st_case_1631 + case 1632: + goto st_case_1632 + case 1633: + goto st_case_1633 + case 1634: + goto st_case_1634 + case 1635: + goto st_case_1635 + case 1636: + goto st_case_1636 + case 1637: + goto st_case_1637 + case 1638: + goto st_case_1638 + case 1639: + goto st_case_1639 + case 1640: + goto st_case_1640 + case 1641: + goto st_case_1641 + case 1642: + goto st_case_1642 + case 1643: + goto st_case_1643 + case 1644: + goto st_case_1644 + case 1645: + goto st_case_1645 + case 1646: + goto st_case_1646 + case 1647: + goto st_case_1647 + case 1648: + goto st_case_1648 + case 1649: + goto st_case_1649 + case 1650: + goto st_case_1650 + case 1651: + goto st_case_1651 + case 1652: + goto st_case_1652 + case 1653: + goto st_case_1653 + case 1654: + goto st_case_1654 + case 1655: + goto st_case_1655 + case 1656: + goto st_case_1656 + case 1657: + goto st_case_1657 + case 1658: + goto st_case_1658 + case 1659: + goto st_case_1659 + case 1660: + goto st_case_1660 + case 1661: + goto st_case_1661 + case 1662: + goto st_case_1662 + case 1663: + goto st_case_1663 + case 1664: + goto st_case_1664 + case 1665: + goto st_case_1665 + case 1666: + goto st_case_1666 + case 1667: + goto st_case_1667 + case 1668: + goto st_case_1668 + case 1669: + goto st_case_1669 + case 1670: + goto st_case_1670 + case 1671: + goto st_case_1671 + case 1672: + goto st_case_1672 + case 1673: + goto st_case_1673 + case 1674: + goto st_case_1674 + case 1675: + goto st_case_1675 + case 1676: + goto st_case_1676 + case 1677: + goto st_case_1677 + case 1678: + goto st_case_1678 + case 1679: + goto st_case_1679 + case 1680: + goto st_case_1680 + case 1681: + goto st_case_1681 + case 1682: + goto st_case_1682 + case 1683: + goto st_case_1683 + case 1684: + goto st_case_1684 + case 1685: + goto st_case_1685 + case 1686: + goto st_case_1686 + case 1687: + goto st_case_1687 + case 1688: + goto st_case_1688 + case 1689: + goto st_case_1689 + case 1690: + goto st_case_1690 + case 1691: + goto st_case_1691 + case 1692: + goto st_case_1692 + case 1693: + goto st_case_1693 + case 1694: + goto st_case_1694 + case 1695: + goto st_case_1695 + case 1696: + goto st_case_1696 + case 1697: + goto st_case_1697 + case 1698: + goto st_case_1698 + case 1699: + goto st_case_1699 + case 1700: + goto st_case_1700 + case 1701: + goto st_case_1701 + case 1702: + goto st_case_1702 + case 1703: + goto st_case_1703 + case 1704: + goto st_case_1704 + case 1705: + goto st_case_1705 + case 1706: + goto st_case_1706 + case 1707: + goto st_case_1707 + case 1708: + goto st_case_1708 + case 1709: + goto st_case_1709 + case 1710: + goto st_case_1710 + case 1711: + goto st_case_1711 + case 1712: + goto st_case_1712 + case 1713: + goto st_case_1713 + case 1714: + goto st_case_1714 + case 1715: + goto st_case_1715 + case 1716: + goto st_case_1716 + case 1717: + goto st_case_1717 + case 1718: + goto st_case_1718 + case 1719: + goto st_case_1719 + case 1720: + goto st_case_1720 + case 1721: + goto st_case_1721 + case 1722: + goto st_case_1722 + case 1723: + goto st_case_1723 + case 1724: + goto st_case_1724 + case 1725: + goto st_case_1725 + case 1726: + goto st_case_1726 + case 1727: + goto st_case_1727 + case 1728: + goto st_case_1728 + case 1729: + goto st_case_1729 + case 1730: + goto st_case_1730 + case 1731: + goto st_case_1731 + case 1732: + goto st_case_1732 + case 1733: + goto st_case_1733 + case 1734: + goto st_case_1734 + case 1735: + goto st_case_1735 + case 1736: + goto st_case_1736 + case 1737: + goto st_case_1737 + case 1738: + goto st_case_1738 + case 1739: + goto st_case_1739 + case 1740: + goto st_case_1740 + case 1741: + goto st_case_1741 + case 1742: + goto st_case_1742 + case 1743: + goto st_case_1743 + case 1744: + goto st_case_1744 + case 1745: + goto st_case_1745 + case 1746: + goto st_case_1746 + case 1747: + goto st_case_1747 + case 1748: + goto st_case_1748 + case 1749: + goto st_case_1749 + case 1750: + goto st_case_1750 + case 1751: + goto st_case_1751 + case 1752: + goto st_case_1752 + case 1753: + goto st_case_1753 + case 1754: + goto st_case_1754 + case 1755: + goto st_case_1755 + case 1756: + goto st_case_1756 + case 1757: + goto st_case_1757 + case 1758: + goto st_case_1758 + case 1759: + goto st_case_1759 + case 1760: + goto st_case_1760 + case 1761: + goto st_case_1761 + case 1762: + goto st_case_1762 + case 1763: + goto st_case_1763 + case 1764: + goto st_case_1764 + case 1765: + goto st_case_1765 + case 1766: + goto st_case_1766 + case 1767: + goto st_case_1767 + case 1768: + goto st_case_1768 + case 1769: + goto st_case_1769 + case 1770: + goto st_case_1770 + case 1771: + goto st_case_1771 + case 1772: + goto st_case_1772 + case 1773: + goto st_case_1773 + case 1774: + goto st_case_1774 + case 1775: + goto st_case_1775 + case 1776: + goto st_case_1776 + case 1777: + goto st_case_1777 + case 1778: + goto st_case_1778 + case 1779: + goto st_case_1779 + case 1780: + goto st_case_1780 + case 1781: + goto st_case_1781 + case 1782: + goto st_case_1782 + case 1783: + goto st_case_1783 + case 1784: + goto st_case_1784 + case 1785: + goto st_case_1785 + case 1786: + goto st_case_1786 + case 1787: + goto st_case_1787 + case 1788: + goto st_case_1788 + case 1789: + goto st_case_1789 + case 1790: + goto st_case_1790 + case 1791: + goto st_case_1791 + case 1792: + goto st_case_1792 + case 1793: + goto st_case_1793 + case 1794: + goto st_case_1794 + case 1795: + goto st_case_1795 + case 1796: + goto st_case_1796 + case 1797: + goto st_case_1797 + case 1798: + goto st_case_1798 + case 1799: + goto st_case_1799 + case 1800: + goto st_case_1800 + case 1801: + goto st_case_1801 + case 1802: + goto st_case_1802 + case 1803: + goto st_case_1803 + case 1804: + goto st_case_1804 + case 1805: + goto st_case_1805 + case 1806: + goto st_case_1806 + case 1807: + goto st_case_1807 + case 1808: + goto st_case_1808 + case 1809: + goto st_case_1809 + case 1810: + goto st_case_1810 + case 1811: + goto st_case_1811 + case 1812: + goto st_case_1812 + case 1813: + goto st_case_1813 + case 1814: + goto st_case_1814 + case 1815: + goto st_case_1815 + case 1816: + goto st_case_1816 + case 1817: + goto st_case_1817 + case 1818: + goto st_case_1818 + case 1819: + goto st_case_1819 + case 1820: + goto st_case_1820 + case 1821: + goto st_case_1821 + case 1822: + goto st_case_1822 + case 1823: + goto st_case_1823 + case 1824: + goto st_case_1824 + case 1825: + goto st_case_1825 + case 1826: + goto st_case_1826 + case 1827: + goto st_case_1827 + case 1828: + goto st_case_1828 + case 1829: + goto st_case_1829 + case 1830: + goto st_case_1830 + case 1831: + goto st_case_1831 + case 1832: + goto st_case_1832 + case 1833: + goto st_case_1833 + case 1834: + goto st_case_1834 + case 1835: + goto st_case_1835 + case 1836: + goto st_case_1836 + case 1837: + goto st_case_1837 + case 1838: + goto st_case_1838 + case 1839: + goto st_case_1839 + case 1840: + goto st_case_1840 + case 1841: + goto st_case_1841 + case 1842: + goto st_case_1842 + case 1843: + goto st_case_1843 + case 1844: + goto st_case_1844 + case 1845: + goto st_case_1845 + case 1846: + goto st_case_1846 + case 1847: + goto st_case_1847 + case 1848: + goto st_case_1848 + case 1849: + goto st_case_1849 + case 1850: + goto st_case_1850 + case 1851: + goto st_case_1851 + case 1852: + goto st_case_1852 + case 1853: + goto st_case_1853 + case 1854: + goto st_case_1854 + case 1855: + goto st_case_1855 + case 1856: + goto st_case_1856 + case 1857: + goto st_case_1857 + case 1858: + goto st_case_1858 + case 1859: + goto st_case_1859 + case 1860: + goto st_case_1860 + case 1861: + goto st_case_1861 + case 1862: + goto st_case_1862 + case 1863: + goto st_case_1863 + case 1864: + goto st_case_1864 + case 1865: + goto st_case_1865 + case 1866: + goto st_case_1866 + case 1867: + goto st_case_1867 + case 1868: + goto st_case_1868 + case 1869: + goto st_case_1869 + case 1870: + goto st_case_1870 + case 1871: + goto st_case_1871 + case 1872: + goto st_case_1872 + case 1873: + goto st_case_1873 + case 1874: + goto st_case_1874 + case 1875: + goto st_case_1875 + case 1876: + goto st_case_1876 + case 1877: + goto st_case_1877 + case 1878: + goto st_case_1878 + case 1879: + goto st_case_1879 + case 1880: + goto st_case_1880 + case 1881: + goto st_case_1881 + case 1882: + goto st_case_1882 + case 1883: + goto st_case_1883 + case 1884: + goto st_case_1884 + case 1885: + goto st_case_1885 + case 1886: + goto st_case_1886 + case 1887: + goto st_case_1887 + case 1888: + goto st_case_1888 + case 1889: + goto st_case_1889 + case 1890: + goto st_case_1890 + case 1891: + goto st_case_1891 + case 1892: + goto st_case_1892 + case 1893: + goto st_case_1893 + case 1894: + goto st_case_1894 + case 1895: + goto st_case_1895 + case 1896: + goto st_case_1896 + case 1897: + goto st_case_1897 + case 1898: + goto st_case_1898 + case 1899: + goto st_case_1899 + case 1900: + goto st_case_1900 + case 1901: + goto st_case_1901 + case 1902: + goto st_case_1902 + case 1903: + goto st_case_1903 + case 1904: + goto st_case_1904 + case 1905: + goto st_case_1905 + case 1906: + goto st_case_1906 + case 1907: + goto st_case_1907 + case 1908: + goto st_case_1908 + case 1909: + goto st_case_1909 + case 1910: + goto st_case_1910 + case 1911: + goto st_case_1911 + case 1912: + goto st_case_1912 + case 1913: + goto st_case_1913 + case 1914: + goto st_case_1914 + case 1915: + goto st_case_1915 + case 1916: + goto st_case_1916 + case 1917: + goto st_case_1917 + case 1918: + goto st_case_1918 + case 1919: + goto st_case_1919 + case 1920: + goto st_case_1920 + case 1921: + goto st_case_1921 + case 1922: + goto st_case_1922 + case 1923: + goto st_case_1923 + case 1924: + goto st_case_1924 + case 1925: + goto st_case_1925 + case 1926: + goto st_case_1926 + case 1927: + goto st_case_1927 + case 1928: + goto st_case_1928 + case 1929: + goto st_case_1929 + case 1930: + goto st_case_1930 + case 1931: + goto st_case_1931 + case 1932: + goto st_case_1932 + case 1933: + goto st_case_1933 + case 1934: + goto st_case_1934 + case 1935: + goto st_case_1935 + case 1936: + goto st_case_1936 + case 1937: + goto st_case_1937 + case 1938: + goto st_case_1938 + case 1939: + goto st_case_1939 + case 1940: + goto st_case_1940 + case 1941: + goto st_case_1941 + case 1942: + goto st_case_1942 + case 1943: + goto st_case_1943 + case 1944: + goto st_case_1944 + case 1945: + goto st_case_1945 + case 1946: + goto st_case_1946 + case 1947: + goto st_case_1947 + case 1948: + goto st_case_1948 + case 1949: + goto st_case_1949 + case 1950: + goto st_case_1950 + case 1951: + goto st_case_1951 + case 1952: + goto st_case_1952 + case 1953: + goto st_case_1953 + case 1954: + goto st_case_1954 + case 1955: + goto st_case_1955 + case 1956: + goto st_case_1956 + case 1957: + goto st_case_1957 + case 1958: + goto st_case_1958 + case 1959: + goto st_case_1959 + case 1960: + goto st_case_1960 + case 1961: + goto st_case_1961 + case 1962: + goto st_case_1962 + case 1963: + goto st_case_1963 + case 1964: + goto st_case_1964 + case 1965: + goto st_case_1965 + case 1966: + goto st_case_1966 + case 1967: + goto st_case_1967 + case 1968: + goto st_case_1968 + case 1969: + goto st_case_1969 + case 1970: + goto st_case_1970 + case 1971: + goto st_case_1971 + case 1972: + goto st_case_1972 + case 1973: + goto st_case_1973 + case 1974: + goto st_case_1974 + case 1975: + goto st_case_1975 + case 1976: + goto st_case_1976 + case 1977: + goto st_case_1977 + case 1978: + goto st_case_1978 + case 1979: + goto st_case_1979 + case 1980: + goto st_case_1980 + case 1981: + goto st_case_1981 + case 1982: + goto st_case_1982 + case 1983: + goto st_case_1983 + case 1984: + goto st_case_1984 + case 1985: + goto st_case_1985 + case 1986: + goto st_case_1986 + case 1987: + goto st_case_1987 + case 1988: + goto st_case_1988 + case 1989: + goto st_case_1989 + case 1990: + goto st_case_1990 + case 1991: + goto st_case_1991 + case 1992: + goto st_case_1992 + case 1993: + goto st_case_1993 + case 1994: + goto st_case_1994 + case 1995: + goto st_case_1995 + case 1996: + goto st_case_1996 + case 1997: + goto st_case_1997 + case 1998: + goto st_case_1998 + case 1999: + goto st_case_1999 + case 2000: + goto st_case_2000 + case 2001: + goto st_case_2001 + case 2002: + goto st_case_2002 + case 2003: + goto st_case_2003 + case 2004: + goto st_case_2004 + case 2005: + goto st_case_2005 + case 2006: + goto st_case_2006 + case 2007: + goto st_case_2007 + case 2008: + goto st_case_2008 + case 2009: + goto st_case_2009 + case 2010: + goto st_case_2010 + case 2011: + goto st_case_2011 + case 2012: + goto st_case_2012 + case 2013: + goto st_case_2013 + case 2014: + goto st_case_2014 + case 2015: + goto st_case_2015 + case 2016: + goto st_case_2016 + case 2017: + goto st_case_2017 + case 2018: + goto st_case_2018 + case 2019: + goto st_case_2019 + case 2020: + goto st_case_2020 + case 2021: + goto st_case_2021 + case 2022: + goto st_case_2022 + case 2023: + goto st_case_2023 + case 2024: + goto st_case_2024 + case 2025: + goto st_case_2025 + case 2026: + goto st_case_2026 + case 2027: + goto st_case_2027 + case 2028: + goto st_case_2028 + case 2029: + goto st_case_2029 + case 2030: + goto st_case_2030 + case 2031: + goto st_case_2031 + case 2032: + goto st_case_2032 + case 2033: + goto st_case_2033 + case 2034: + goto st_case_2034 + case 2035: + goto st_case_2035 + case 2036: + goto st_case_2036 + case 2037: + goto st_case_2037 + case 2038: + goto st_case_2038 + case 2039: + goto st_case_2039 + case 2040: + goto st_case_2040 + case 2041: + goto st_case_2041 + case 2042: + goto st_case_2042 + case 2043: + goto st_case_2043 + case 2044: + goto st_case_2044 + case 2045: + goto st_case_2045 + case 2046: + goto st_case_2046 + case 2047: + goto st_case_2047 + case 2048: + goto st_case_2048 + case 2049: + goto st_case_2049 + case 2050: + goto st_case_2050 + case 2051: + goto st_case_2051 + case 2052: + goto st_case_2052 + case 2053: + goto st_case_2053 + case 2054: + goto st_case_2054 + case 2055: + goto st_case_2055 + case 2056: + goto st_case_2056 + case 2057: + goto st_case_2057 + case 2058: + goto st_case_2058 + case 2059: + goto st_case_2059 + case 2060: + goto st_case_2060 + case 2061: + goto st_case_2061 + case 2062: + goto st_case_2062 + case 2063: + goto st_case_2063 + case 2064: + goto st_case_2064 + case 2065: + goto st_case_2065 + case 2066: + goto st_case_2066 + case 2067: + goto st_case_2067 + case 2068: + goto st_case_2068 + case 2069: + goto st_case_2069 + case 2070: + goto st_case_2070 + case 2071: + goto st_case_2071 + case 2072: + goto st_case_2072 + case 2073: + goto st_case_2073 + case 2074: + goto st_case_2074 + case 2075: + goto st_case_2075 + case 2076: + goto st_case_2076 + case 2077: + goto st_case_2077 + case 2078: + goto st_case_2078 + case 2079: + goto st_case_2079 + case 2080: + goto st_case_2080 + case 2081: + goto st_case_2081 + case 2082: + goto st_case_2082 + case 2083: + goto st_case_2083 + case 2084: + goto st_case_2084 + case 2085: + goto st_case_2085 + case 2086: + goto st_case_2086 + case 2087: + goto st_case_2087 + case 2088: + goto st_case_2088 + case 2089: + goto st_case_2089 + case 2090: + goto st_case_2090 + case 2091: + goto st_case_2091 + case 2092: + goto st_case_2092 + case 2093: + goto st_case_2093 + case 2094: + goto st_case_2094 + case 2095: + goto st_case_2095 + case 2096: + goto st_case_2096 + case 2097: + goto st_case_2097 + case 2098: + goto st_case_2098 + case 2099: + goto st_case_2099 + case 2100: + goto st_case_2100 + case 2101: + goto st_case_2101 + case 2102: + goto st_case_2102 + case 2103: + goto st_case_2103 + case 2104: + goto st_case_2104 + case 2105: + goto st_case_2105 + case 2106: + goto st_case_2106 + case 2107: + goto st_case_2107 + case 2108: + goto st_case_2108 + case 2109: + goto st_case_2109 + case 2110: + goto st_case_2110 + case 2111: + goto st_case_2111 + case 2112: + goto st_case_2112 + case 2113: + goto st_case_2113 + case 2114: + goto st_case_2114 + case 2115: + goto st_case_2115 + case 2116: + goto st_case_2116 + case 2117: + goto st_case_2117 + case 2118: + goto st_case_2118 + case 2119: + goto st_case_2119 + case 2120: + goto st_case_2120 + case 2121: + goto st_case_2121 + case 2122: + goto st_case_2122 + case 2123: + goto st_case_2123 + case 2124: + goto st_case_2124 + case 2125: + goto st_case_2125 + case 2126: + goto st_case_2126 + case 2127: + goto st_case_2127 + case 2128: + goto st_case_2128 + case 2129: + goto st_case_2129 + case 2130: + goto st_case_2130 + case 2131: + goto st_case_2131 + case 2132: + goto st_case_2132 + case 2133: + goto st_case_2133 + case 2134: + goto st_case_2134 + case 2135: + goto st_case_2135 + case 2136: + goto st_case_2136 + case 2137: + goto st_case_2137 + case 2138: + goto st_case_2138 + case 2139: + goto st_case_2139 + case 2140: + goto st_case_2140 + case 2141: + goto st_case_2141 + case 2142: + goto st_case_2142 + case 2143: + goto st_case_2143 + case 2144: + goto st_case_2144 + case 2145: + goto st_case_2145 + case 2146: + goto st_case_2146 + case 2147: + goto st_case_2147 + case 2148: + goto st_case_2148 + case 2149: + goto st_case_2149 + case 2150: + goto st_case_2150 + case 2151: + goto st_case_2151 + case 2152: + goto st_case_2152 + case 2153: + goto st_case_2153 + case 2154: + goto st_case_2154 + case 2155: + goto st_case_2155 + case 2156: + goto st_case_2156 + case 2157: + goto st_case_2157 + case 2158: + goto st_case_2158 + case 2159: + goto st_case_2159 + case 2160: + goto st_case_2160 + case 2161: + goto st_case_2161 + case 2162: + goto st_case_2162 + case 2163: + goto st_case_2163 + case 2164: + goto st_case_2164 + case 2165: + goto st_case_2165 + case 2166: + goto st_case_2166 + case 2167: + goto st_case_2167 + case 2168: + goto st_case_2168 + case 2169: + goto st_case_2169 + case 2170: + goto st_case_2170 + case 2171: + goto st_case_2171 + case 2172: + goto st_case_2172 + case 2173: + goto st_case_2173 + case 2174: + goto st_case_2174 + case 2175: + goto st_case_2175 + case 2176: + goto st_case_2176 + case 2177: + goto st_case_2177 + case 2178: + goto st_case_2178 + case 2179: + goto st_case_2179 + case 2180: + goto st_case_2180 + case 2181: + goto st_case_2181 + case 2182: + goto st_case_2182 + case 2183: + goto st_case_2183 + case 2184: + goto st_case_2184 + case 2185: + goto st_case_2185 + case 2186: + goto st_case_2186 + case 2187: + goto st_case_2187 + case 2188: + goto st_case_2188 + case 2189: + goto st_case_2189 + case 2190: + goto st_case_2190 + case 2191: + goto st_case_2191 + case 2192: + goto st_case_2192 + case 4874: + goto st_case_4874 + case 2193: + goto st_case_2193 + case 2194: + goto st_case_2194 + case 2195: + goto st_case_2195 + case 2196: + goto st_case_2196 + case 2197: + goto st_case_2197 + case 2198: + goto st_case_2198 + case 2199: + goto st_case_2199 + case 2200: + goto st_case_2200 + case 2201: + goto st_case_2201 + case 2202: + goto st_case_2202 + case 2203: + goto st_case_2203 + case 2204: + goto st_case_2204 + case 2205: + goto st_case_2205 + case 2206: + goto st_case_2206 + case 2207: + goto st_case_2207 + case 2208: + goto st_case_2208 + case 2209: + goto st_case_2209 + case 2210: + goto st_case_2210 + case 2211: + goto st_case_2211 + case 2212: + goto st_case_2212 + case 2213: + goto st_case_2213 + case 2214: + goto st_case_2214 + case 2215: + goto st_case_2215 + case 2216: + goto st_case_2216 + case 2217: + goto st_case_2217 + case 2218: + goto st_case_2218 + case 2219: + goto st_case_2219 + case 2220: + goto st_case_2220 + case 2221: + goto st_case_2221 + case 2222: + goto st_case_2222 + case 2223: + goto st_case_2223 + case 2224: + goto st_case_2224 + case 2225: + goto st_case_2225 + case 2226: + goto st_case_2226 + case 2227: + goto st_case_2227 + case 2228: + goto st_case_2228 + case 2229: + goto st_case_2229 + case 2230: + goto st_case_2230 + case 2231: + goto st_case_2231 + case 2232: + goto st_case_2232 + case 2233: + goto st_case_2233 + case 2234: + goto st_case_2234 + case 2235: + goto st_case_2235 + case 2236: + goto st_case_2236 + case 2237: + goto st_case_2237 + case 2238: + goto st_case_2238 + case 2239: + goto st_case_2239 + case 2240: + goto st_case_2240 + case 2241: + goto st_case_2241 + case 2242: + goto st_case_2242 + case 2243: + goto st_case_2243 + case 2244: + goto st_case_2244 + case 2245: + goto st_case_2245 + case 2246: + goto st_case_2246 + case 2247: + goto st_case_2247 + case 2248: + goto st_case_2248 + case 2249: + goto st_case_2249 + case 2250: + goto st_case_2250 + case 2251: + goto st_case_2251 + case 2252: + goto st_case_2252 + case 2253: + goto st_case_2253 + case 2254: + goto st_case_2254 + case 2255: + goto st_case_2255 + case 2256: + goto st_case_2256 + case 2257: + goto st_case_2257 + case 2258: + goto st_case_2258 + case 2259: + goto st_case_2259 + case 2260: + goto st_case_2260 + case 2261: + goto st_case_2261 + case 2262: + goto st_case_2262 + case 2263: + goto st_case_2263 + case 2264: + goto st_case_2264 + case 2265: + goto st_case_2265 + case 2266: + goto st_case_2266 + case 2267: + goto st_case_2267 + case 2268: + goto st_case_2268 + case 2269: + goto st_case_2269 + case 2270: + goto st_case_2270 + case 2271: + goto st_case_2271 + case 2272: + goto st_case_2272 + case 2273: + goto st_case_2273 + case 2274: + goto st_case_2274 + case 2275: + goto st_case_2275 + case 2276: + goto st_case_2276 + case 2277: + goto st_case_2277 + case 2278: + goto st_case_2278 + case 2279: + goto st_case_2279 + case 2280: + goto st_case_2280 + case 2281: + goto st_case_2281 + case 2282: + goto st_case_2282 + case 2283: + goto st_case_2283 + case 2284: + goto st_case_2284 + case 2285: + goto st_case_2285 + case 2286: + goto st_case_2286 + case 2287: + goto st_case_2287 + case 2288: + goto st_case_2288 + case 2289: + goto st_case_2289 + case 2290: + goto st_case_2290 + case 2291: + goto st_case_2291 + case 2292: + goto st_case_2292 + case 2293: + goto st_case_2293 + case 2294: + goto st_case_2294 + case 2295: + goto st_case_2295 + case 2296: + goto st_case_2296 + case 2297: + goto st_case_2297 + case 2298: + goto st_case_2298 + case 2299: + goto st_case_2299 + case 2300: + goto st_case_2300 + case 2301: + goto st_case_2301 + case 2302: + goto st_case_2302 + case 2303: + goto st_case_2303 + case 2304: + goto st_case_2304 + case 2305: + goto st_case_2305 + case 2306: + goto st_case_2306 + case 2307: + goto st_case_2307 + case 2308: + goto st_case_2308 + case 2309: + goto st_case_2309 + case 2310: + goto st_case_2310 + case 2311: + goto st_case_2311 + case 2312: + goto st_case_2312 + case 2313: + goto st_case_2313 + case 2314: + goto st_case_2314 + case 2315: + goto st_case_2315 + case 2316: + goto st_case_2316 + case 2317: + goto st_case_2317 + case 2318: + goto st_case_2318 + case 2319: + goto st_case_2319 + case 2320: + goto st_case_2320 + case 2321: + goto st_case_2321 + case 2322: + goto st_case_2322 + case 2323: + goto st_case_2323 + case 2324: + goto st_case_2324 + case 2325: + goto st_case_2325 + case 2326: + goto st_case_2326 + case 2327: + goto st_case_2327 + case 2328: + goto st_case_2328 + case 2329: + goto st_case_2329 + case 2330: + goto st_case_2330 + case 2331: + goto st_case_2331 + case 2332: + goto st_case_2332 + case 2333: + goto st_case_2333 + case 2334: + goto st_case_2334 + case 2335: + goto st_case_2335 + case 2336: + goto st_case_2336 + case 2337: + goto st_case_2337 + case 2338: + goto st_case_2338 + case 2339: + goto st_case_2339 + case 4875: + goto st_case_4875 + case 4876: + goto st_case_4876 + case 2340: + goto st_case_2340 + case 2341: + goto st_case_2341 + case 2342: + goto st_case_2342 + case 2343: + goto st_case_2343 + case 2344: + goto st_case_2344 + case 2345: + goto st_case_2345 + case 2346: + goto st_case_2346 + case 2347: + goto st_case_2347 + case 2348: + goto st_case_2348 + case 2349: + goto st_case_2349 + case 2350: + goto st_case_2350 + case 2351: + goto st_case_2351 + case 2352: + goto st_case_2352 + case 2353: + goto st_case_2353 + case 2354: + goto st_case_2354 + case 2355: + goto st_case_2355 + case 2356: + goto st_case_2356 + case 2357: + goto st_case_2357 + case 2358: + goto st_case_2358 + case 2359: + goto st_case_2359 + case 2360: + goto st_case_2360 + case 2361: + goto st_case_2361 + case 2362: + goto st_case_2362 + case 2363: + goto st_case_2363 + case 2364: + goto st_case_2364 + case 2365: + goto st_case_2365 + case 2366: + goto st_case_2366 + case 2367: + goto st_case_2367 + case 2368: + goto st_case_2368 + case 2369: + goto st_case_2369 + case 2370: + goto st_case_2370 + case 2371: + goto st_case_2371 + case 2372: + goto st_case_2372 + case 2373: + goto st_case_2373 + case 2374: + goto st_case_2374 + case 2375: + goto st_case_2375 + case 2376: + goto st_case_2376 + case 2377: + goto st_case_2377 + case 2378: + goto st_case_2378 + case 2379: + goto st_case_2379 + case 2380: + goto st_case_2380 + case 2381: + goto st_case_2381 + case 2382: + goto st_case_2382 + case 2383: + goto st_case_2383 + case 2384: + goto st_case_2384 + case 2385: + goto st_case_2385 + case 2386: + goto st_case_2386 + case 2387: + goto st_case_2387 + case 2388: + goto st_case_2388 + case 2389: + goto st_case_2389 + case 2390: + goto st_case_2390 + case 2391: + goto st_case_2391 + case 2392: + goto st_case_2392 + case 2393: + goto st_case_2393 + case 2394: + goto st_case_2394 + case 2395: + goto st_case_2395 + case 2396: + goto st_case_2396 + case 2397: + goto st_case_2397 + case 2398: + goto st_case_2398 + case 2399: + goto st_case_2399 + case 2400: + goto st_case_2400 + case 2401: + goto st_case_2401 + case 2402: + goto st_case_2402 + case 2403: + goto st_case_2403 + case 2404: + goto st_case_2404 + case 2405: + goto st_case_2405 + case 2406: + goto st_case_2406 + case 2407: + goto st_case_2407 + case 2408: + goto st_case_2408 + case 2409: + goto st_case_2409 + case 2410: + goto st_case_2410 + case 2411: + goto st_case_2411 + case 2412: + goto st_case_2412 + case 2413: + goto st_case_2413 + case 2414: + goto st_case_2414 + case 2415: + goto st_case_2415 + case 2416: + goto st_case_2416 + case 2417: + goto st_case_2417 + case 2418: + goto st_case_2418 + case 2419: + goto st_case_2419 + case 2420: + goto st_case_2420 + case 2421: + goto st_case_2421 + case 2422: + goto st_case_2422 + case 2423: + goto st_case_2423 + case 2424: + goto st_case_2424 + case 2425: + goto st_case_2425 + case 2426: + goto st_case_2426 + case 2427: + goto st_case_2427 + case 2428: + goto st_case_2428 + case 2429: + goto st_case_2429 + case 2430: + goto st_case_2430 + case 2431: + goto st_case_2431 + case 2432: + goto st_case_2432 + case 2433: + goto st_case_2433 + case 2434: + goto st_case_2434 + case 2435: + goto st_case_2435 + case 2436: + goto st_case_2436 + case 2437: + goto st_case_2437 + case 2438: + goto st_case_2438 + case 2439: + goto st_case_2439 + case 2440: + goto st_case_2440 + case 2441: + goto st_case_2441 + case 2442: + goto st_case_2442 + case 2443: + goto st_case_2443 + case 2444: + goto st_case_2444 + case 2445: + goto st_case_2445 + case 2446: + goto st_case_2446 + case 2447: + goto st_case_2447 + case 2448: + goto st_case_2448 + case 2449: + goto st_case_2449 + case 2450: + goto st_case_2450 + case 2451: + goto st_case_2451 + case 2452: + goto st_case_2452 + case 2453: + goto st_case_2453 + case 2454: + goto st_case_2454 + case 2455: + goto st_case_2455 + case 2456: + goto st_case_2456 + case 2457: + goto st_case_2457 + case 2458: + goto st_case_2458 + case 2459: + goto st_case_2459 + case 2460: + goto st_case_2460 + case 2461: + goto st_case_2461 + case 2462: + goto st_case_2462 + case 2463: + goto st_case_2463 + case 2464: + goto st_case_2464 + case 2465: + goto st_case_2465 + case 2466: + goto st_case_2466 + case 2467: + goto st_case_2467 + case 2468: + goto st_case_2468 + case 2469: + goto st_case_2469 + case 2470: + goto st_case_2470 + case 2471: + goto st_case_2471 + case 2472: + goto st_case_2472 + case 2473: + goto st_case_2473 + case 2474: + goto st_case_2474 + case 2475: + goto st_case_2475 + case 2476: + goto st_case_2476 + case 2477: + goto st_case_2477 + case 2478: + goto st_case_2478 + case 2479: + goto st_case_2479 + case 2480: + goto st_case_2480 + case 2481: + goto st_case_2481 + case 2482: + goto st_case_2482 + case 2483: + goto st_case_2483 + case 2484: + goto st_case_2484 + case 2485: + goto st_case_2485 + case 2486: + goto st_case_2486 + case 2487: + goto st_case_2487 + case 2488: + goto st_case_2488 + case 2489: + goto st_case_2489 + case 2490: + goto st_case_2490 + case 2491: + goto st_case_2491 + case 2492: + goto st_case_2492 + case 2493: + goto st_case_2493 + case 2494: + goto st_case_2494 + case 2495: + goto st_case_2495 + case 2496: + goto st_case_2496 + case 2497: + goto st_case_2497 + case 2498: + goto st_case_2498 + case 2499: + goto st_case_2499 + case 2500: + goto st_case_2500 + case 2501: + goto st_case_2501 + case 2502: + goto st_case_2502 + case 2503: + goto st_case_2503 + case 2504: + goto st_case_2504 + case 2505: + goto st_case_2505 + case 2506: + goto st_case_2506 + case 2507: + goto st_case_2507 + case 2508: + goto st_case_2508 + case 2509: + goto st_case_2509 + case 2510: + goto st_case_2510 + case 2511: + goto st_case_2511 + case 2512: + goto st_case_2512 + case 2513: + goto st_case_2513 + case 2514: + goto st_case_2514 + case 2515: + goto st_case_2515 + case 2516: + goto st_case_2516 + case 2517: + goto st_case_2517 + case 2518: + goto st_case_2518 + case 2519: + goto st_case_2519 + case 2520: + goto st_case_2520 + case 2521: + goto st_case_2521 + case 2522: + goto st_case_2522 + case 2523: + goto st_case_2523 + case 2524: + goto st_case_2524 + case 2525: + goto st_case_2525 + case 2526: + goto st_case_2526 + case 2527: + goto st_case_2527 + case 2528: + goto st_case_2528 + case 2529: + goto st_case_2529 + case 2530: + goto st_case_2530 + case 2531: + goto st_case_2531 + case 2532: + goto st_case_2532 + case 2533: + goto st_case_2533 + case 2534: + goto st_case_2534 + case 2535: + goto st_case_2535 + case 2536: + goto st_case_2536 + case 2537: + goto st_case_2537 + case 2538: + goto st_case_2538 + case 2539: + goto st_case_2539 + case 2540: + goto st_case_2540 + case 2541: + goto st_case_2541 + case 2542: + goto st_case_2542 + case 2543: + goto st_case_2543 + case 2544: + goto st_case_2544 + case 2545: + goto st_case_2545 + case 2546: + goto st_case_2546 + case 2547: + goto st_case_2547 + case 2548: + goto st_case_2548 + case 2549: + goto st_case_2549 + case 2550: + goto st_case_2550 + case 2551: + goto st_case_2551 + case 2552: + goto st_case_2552 + case 2553: + goto st_case_2553 + case 2554: + goto st_case_2554 + case 2555: + goto st_case_2555 + case 2556: + goto st_case_2556 + case 2557: + goto st_case_2557 + case 2558: + goto st_case_2558 + case 2559: + goto st_case_2559 + case 2560: + goto st_case_2560 + case 2561: + goto st_case_2561 + case 2562: + goto st_case_2562 + case 2563: + goto st_case_2563 + case 2564: + goto st_case_2564 + case 2565: + goto st_case_2565 + case 2566: + goto st_case_2566 + case 2567: + goto st_case_2567 + case 2568: + goto st_case_2568 + case 2569: + goto st_case_2569 + case 2570: + goto st_case_2570 + case 2571: + goto st_case_2571 + case 2572: + goto st_case_2572 + case 2573: + goto st_case_2573 + case 2574: + goto st_case_2574 + case 2575: + goto st_case_2575 + case 2576: + goto st_case_2576 + case 2577: + goto st_case_2577 + case 2578: + goto st_case_2578 + case 2579: + goto st_case_2579 + case 2580: + goto st_case_2580 + case 2581: + goto st_case_2581 + case 2582: + goto st_case_2582 + case 2583: + goto st_case_2583 + case 2584: + goto st_case_2584 + case 2585: + goto st_case_2585 + case 2586: + goto st_case_2586 + case 2587: + goto st_case_2587 + case 2588: + goto st_case_2588 + case 2589: + goto st_case_2589 + case 2590: + goto st_case_2590 + case 2591: + goto st_case_2591 + case 2592: + goto st_case_2592 + case 2593: + goto st_case_2593 + case 2594: + goto st_case_2594 + case 2595: + goto st_case_2595 + case 2596: + goto st_case_2596 + case 2597: + goto st_case_2597 + case 2598: + goto st_case_2598 + case 2599: + goto st_case_2599 + case 2600: + goto st_case_2600 + case 2601: + goto st_case_2601 + case 2602: + goto st_case_2602 + case 2603: + goto st_case_2603 + case 2604: + goto st_case_2604 + case 2605: + goto st_case_2605 + case 2606: + goto st_case_2606 + case 2607: + goto st_case_2607 + case 2608: + goto st_case_2608 + case 2609: + goto st_case_2609 + case 2610: + goto st_case_2610 + case 2611: + goto st_case_2611 + case 2612: + goto st_case_2612 + case 2613: + goto st_case_2613 + case 2614: + goto st_case_2614 + case 2615: + goto st_case_2615 + case 2616: + goto st_case_2616 + case 2617: + goto st_case_2617 + case 2618: + goto st_case_2618 + case 2619: + goto st_case_2619 + case 2620: + goto st_case_2620 + case 2621: + goto st_case_2621 + case 2622: + goto st_case_2622 + case 2623: + goto st_case_2623 + case 2624: + goto st_case_2624 + case 2625: + goto st_case_2625 + case 2626: + goto st_case_2626 + case 2627: + goto st_case_2627 + case 2628: + goto st_case_2628 + case 2629: + goto st_case_2629 + case 2630: + goto st_case_2630 + case 2631: + goto st_case_2631 + case 2632: + goto st_case_2632 + case 2633: + goto st_case_2633 + case 2634: + goto st_case_2634 + case 2635: + goto st_case_2635 + case 4877: + goto st_case_4877 + case 4878: + goto st_case_4878 + case 2636: + goto st_case_2636 + case 2637: + goto st_case_2637 + case 2638: + goto st_case_2638 + case 2639: + goto st_case_2639 + case 2640: + goto st_case_2640 + case 2641: + goto st_case_2641 + case 2642: + goto st_case_2642 + case 2643: + goto st_case_2643 + case 2644: + goto st_case_2644 + case 2645: + goto st_case_2645 + case 2646: + goto st_case_2646 + case 2647: + goto st_case_2647 + case 2648: + goto st_case_2648 + case 2649: + goto st_case_2649 + case 2650: + goto st_case_2650 + case 2651: + goto st_case_2651 + case 2652: + goto st_case_2652 + case 2653: + goto st_case_2653 + case 2654: + goto st_case_2654 + case 2655: + goto st_case_2655 + case 2656: + goto st_case_2656 + case 2657: + goto st_case_2657 + case 2658: + goto st_case_2658 + case 2659: + goto st_case_2659 + case 2660: + goto st_case_2660 + case 2661: + goto st_case_2661 + case 2662: + goto st_case_2662 + case 2663: + goto st_case_2663 + case 2664: + goto st_case_2664 + case 2665: + goto st_case_2665 + case 2666: + goto st_case_2666 + case 2667: + goto st_case_2667 + case 2668: + goto st_case_2668 + case 2669: + goto st_case_2669 + case 2670: + goto st_case_2670 + case 2671: + goto st_case_2671 + case 2672: + goto st_case_2672 + case 2673: + goto st_case_2673 + case 2674: + goto st_case_2674 + case 2675: + goto st_case_2675 + case 2676: + goto st_case_2676 + case 2677: + goto st_case_2677 + case 2678: + goto st_case_2678 + case 2679: + goto st_case_2679 + case 2680: + goto st_case_2680 + case 2681: + goto st_case_2681 + case 2682: + goto st_case_2682 + case 2683: + goto st_case_2683 + case 2684: + goto st_case_2684 + case 2685: + goto st_case_2685 + case 2686: + goto st_case_2686 + case 2687: + goto st_case_2687 + case 2688: + goto st_case_2688 + case 2689: + goto st_case_2689 + case 2690: + goto st_case_2690 + case 2691: + goto st_case_2691 + case 2692: + goto st_case_2692 + case 2693: + goto st_case_2693 + case 2694: + goto st_case_2694 + case 2695: + goto st_case_2695 + case 2696: + goto st_case_2696 + case 2697: + goto st_case_2697 + case 2698: + goto st_case_2698 + case 2699: + goto st_case_2699 + case 2700: + goto st_case_2700 + case 2701: + goto st_case_2701 + case 2702: + goto st_case_2702 + case 2703: + goto st_case_2703 + case 2704: + goto st_case_2704 + case 2705: + goto st_case_2705 + case 2706: + goto st_case_2706 + case 2707: + goto st_case_2707 + case 2708: + goto st_case_2708 + case 2709: + goto st_case_2709 + case 2710: + goto st_case_2710 + case 2711: + goto st_case_2711 + case 2712: + goto st_case_2712 + case 2713: + goto st_case_2713 + case 2714: + goto st_case_2714 + case 2715: + goto st_case_2715 + case 2716: + goto st_case_2716 + case 2717: + goto st_case_2717 + case 2718: + goto st_case_2718 + case 2719: + goto st_case_2719 + case 2720: + goto st_case_2720 + case 2721: + goto st_case_2721 + case 2722: + goto st_case_2722 + case 2723: + goto st_case_2723 + case 2724: + goto st_case_2724 + case 2725: + goto st_case_2725 + case 2726: + goto st_case_2726 + case 2727: + goto st_case_2727 + case 2728: + goto st_case_2728 + case 2729: + goto st_case_2729 + case 2730: + goto st_case_2730 + case 2731: + goto st_case_2731 + case 2732: + goto st_case_2732 + case 2733: + goto st_case_2733 + case 2734: + goto st_case_2734 + case 2735: + goto st_case_2735 + case 2736: + goto st_case_2736 + case 2737: + goto st_case_2737 + case 2738: + goto st_case_2738 + case 2739: + goto st_case_2739 + case 2740: + goto st_case_2740 + case 2741: + goto st_case_2741 + case 2742: + goto st_case_2742 + case 2743: + goto st_case_2743 + case 2744: + goto st_case_2744 + case 2745: + goto st_case_2745 + case 2746: + goto st_case_2746 + case 2747: + goto st_case_2747 + case 2748: + goto st_case_2748 + case 2749: + goto st_case_2749 + case 2750: + goto st_case_2750 + case 2751: + goto st_case_2751 + case 2752: + goto st_case_2752 + case 2753: + goto st_case_2753 + case 2754: + goto st_case_2754 + case 2755: + goto st_case_2755 + case 2756: + goto st_case_2756 + case 2757: + goto st_case_2757 + case 2758: + goto st_case_2758 + case 2759: + goto st_case_2759 + case 2760: + goto st_case_2760 + case 2761: + goto st_case_2761 + case 2762: + goto st_case_2762 + case 2763: + goto st_case_2763 + case 2764: + goto st_case_2764 + case 2765: + goto st_case_2765 + case 2766: + goto st_case_2766 + case 2767: + goto st_case_2767 + case 2768: + goto st_case_2768 + case 2769: + goto st_case_2769 + case 2770: + goto st_case_2770 + case 2771: + goto st_case_2771 + case 2772: + goto st_case_2772 + case 2773: + goto st_case_2773 + case 2774: + goto st_case_2774 + case 2775: + goto st_case_2775 + case 2776: + goto st_case_2776 + case 4879: + goto st_case_4879 + case 4880: + goto st_case_4880 + case 4881: + goto st_case_4881 + case 4882: + goto st_case_4882 + case 4883: + goto st_case_4883 + case 4884: + goto st_case_4884 + case 4885: + goto st_case_4885 + case 2777: + goto st_case_2777 + case 2778: + goto st_case_2778 + case 2779: + goto st_case_2779 + case 2780: + goto st_case_2780 + case 2781: + goto st_case_2781 + case 2782: + goto st_case_2782 + case 2783: + goto st_case_2783 + case 2784: + goto st_case_2784 + case 2785: + goto st_case_2785 + case 2786: + goto st_case_2786 + case 2787: + goto st_case_2787 + case 2788: + goto st_case_2788 + case 2789: + goto st_case_2789 + case 2790: + goto st_case_2790 + case 2791: + goto st_case_2791 + case 2792: + goto st_case_2792 + case 2793: + goto st_case_2793 + case 2794: + goto st_case_2794 + case 2795: + goto st_case_2795 + case 2796: + goto st_case_2796 + case 2797: + goto st_case_2797 + case 2798: + goto st_case_2798 + case 2799: + goto st_case_2799 + case 2800: + goto st_case_2800 + case 2801: + goto st_case_2801 + case 2802: + goto st_case_2802 + case 2803: + goto st_case_2803 + case 2804: + goto st_case_2804 + case 2805: + goto st_case_2805 + case 2806: + goto st_case_2806 + case 2807: + goto st_case_2807 + case 2808: + goto st_case_2808 + case 2809: + goto st_case_2809 + case 2810: + goto st_case_2810 + case 2811: + goto st_case_2811 + case 2812: + goto st_case_2812 + case 2813: + goto st_case_2813 + case 2814: + goto st_case_2814 + case 2815: + goto st_case_2815 + case 2816: + goto st_case_2816 + case 2817: + goto st_case_2817 + case 2818: + goto st_case_2818 + case 2819: + goto st_case_2819 + case 2820: + goto st_case_2820 + case 2821: + goto st_case_2821 + case 2822: + goto st_case_2822 + case 2823: + goto st_case_2823 + case 2824: + goto st_case_2824 + case 2825: + goto st_case_2825 + case 2826: + goto st_case_2826 + case 2827: + goto st_case_2827 + case 2828: + goto st_case_2828 + case 2829: + goto st_case_2829 + case 2830: + goto st_case_2830 + case 2831: + goto st_case_2831 + case 2832: + goto st_case_2832 + case 2833: + goto st_case_2833 + case 2834: + goto st_case_2834 + case 2835: + goto st_case_2835 + case 2836: + goto st_case_2836 + case 2837: + goto st_case_2837 + case 2838: + goto st_case_2838 + case 2839: + goto st_case_2839 + case 2840: + goto st_case_2840 + case 2841: + goto st_case_2841 + case 2842: + goto st_case_2842 + case 2843: + goto st_case_2843 + case 2844: + goto st_case_2844 + case 2845: + goto st_case_2845 + case 2846: + goto st_case_2846 + case 2847: + goto st_case_2847 + case 2848: + goto st_case_2848 + case 2849: + goto st_case_2849 + case 2850: + goto st_case_2850 + case 2851: + goto st_case_2851 + case 2852: + goto st_case_2852 + case 2853: + goto st_case_2853 + case 2854: + goto st_case_2854 + case 2855: + goto st_case_2855 + case 2856: + goto st_case_2856 + case 2857: + goto st_case_2857 + case 2858: + goto st_case_2858 + case 2859: + goto st_case_2859 + case 2860: + goto st_case_2860 + case 2861: + goto st_case_2861 + case 2862: + goto st_case_2862 + case 2863: + goto st_case_2863 + case 2864: + goto st_case_2864 + case 2865: + goto st_case_2865 + case 2866: + goto st_case_2866 + case 2867: + goto st_case_2867 + case 2868: + goto st_case_2868 + case 2869: + goto st_case_2869 + case 2870: + goto st_case_2870 + case 2871: + goto st_case_2871 + case 2872: + goto st_case_2872 + case 2873: + goto st_case_2873 + case 2874: + goto st_case_2874 + case 2875: + goto st_case_2875 + case 2876: + goto st_case_2876 + case 2877: + goto st_case_2877 + case 2878: + goto st_case_2878 + case 2879: + goto st_case_2879 + case 2880: + goto st_case_2880 + case 2881: + goto st_case_2881 + case 2882: + goto st_case_2882 + case 2883: + goto st_case_2883 + case 2884: + goto st_case_2884 + case 2885: + goto st_case_2885 + case 2886: + goto st_case_2886 + case 2887: + goto st_case_2887 + case 2888: + goto st_case_2888 + case 2889: + goto st_case_2889 + case 2890: + goto st_case_2890 + case 2891: + goto st_case_2891 + case 2892: + goto st_case_2892 + case 2893: + goto st_case_2893 + case 2894: + goto st_case_2894 + case 2895: + goto st_case_2895 + case 2896: + goto st_case_2896 + case 2897: + goto st_case_2897 + case 2898: + goto st_case_2898 + case 2899: + goto st_case_2899 + case 2900: + goto st_case_2900 + case 2901: + goto st_case_2901 + case 2902: + goto st_case_2902 + case 2903: + goto st_case_2903 + case 2904: + goto st_case_2904 + case 2905: + goto st_case_2905 + case 2906: + goto st_case_2906 + case 2907: + goto st_case_2907 + case 2908: + goto st_case_2908 + case 2909: + goto st_case_2909 + case 2910: + goto st_case_2910 + case 2911: + goto st_case_2911 + case 2912: + goto st_case_2912 + case 2913: + goto st_case_2913 + case 2914: + goto st_case_2914 + case 2915: + goto st_case_2915 + case 2916: + goto st_case_2916 + case 2917: + goto st_case_2917 + case 2918: + goto st_case_2918 + case 2919: + goto st_case_2919 + case 2920: + goto st_case_2920 + case 2921: + goto st_case_2921 + case 2922: + goto st_case_2922 + case 2923: + goto st_case_2923 + case 4886: + goto st_case_4886 + case 2924: + goto st_case_2924 + case 2925: + goto st_case_2925 + case 2926: + goto st_case_2926 + case 2927: + goto st_case_2927 + case 2928: + goto st_case_2928 + case 2929: + goto st_case_2929 + case 2930: + goto st_case_2930 + case 2931: + goto st_case_2931 + case 2932: + goto st_case_2932 + case 2933: + goto st_case_2933 + case 2934: + goto st_case_2934 + case 2935: + goto st_case_2935 + case 2936: + goto st_case_2936 + case 2937: + goto st_case_2937 + case 2938: + goto st_case_2938 + case 2939: + goto st_case_2939 + case 2940: + goto st_case_2940 + case 2941: + goto st_case_2941 + case 2942: + goto st_case_2942 + case 2943: + goto st_case_2943 + case 2944: + goto st_case_2944 + case 2945: + goto st_case_2945 + case 2946: + goto st_case_2946 + case 2947: + goto st_case_2947 + case 2948: + goto st_case_2948 + case 2949: + goto st_case_2949 + case 2950: + goto st_case_2950 + case 2951: + goto st_case_2951 + case 2952: + goto st_case_2952 + case 2953: + goto st_case_2953 + case 2954: + goto st_case_2954 + case 2955: + goto st_case_2955 + case 2956: + goto st_case_2956 + case 2957: + goto st_case_2957 + case 2958: + goto st_case_2958 + case 2959: + goto st_case_2959 + case 2960: + goto st_case_2960 + case 2961: + goto st_case_2961 + case 2962: + goto st_case_2962 + case 2963: + goto st_case_2963 + case 2964: + goto st_case_2964 + case 2965: + goto st_case_2965 + case 2966: + goto st_case_2966 + case 2967: + goto st_case_2967 + case 2968: + goto st_case_2968 + case 2969: + goto st_case_2969 + case 2970: + goto st_case_2970 + case 2971: + goto st_case_2971 + case 2972: + goto st_case_2972 + case 2973: + goto st_case_2973 + case 2974: + goto st_case_2974 + case 2975: + goto st_case_2975 + case 2976: + goto st_case_2976 + case 2977: + goto st_case_2977 + case 2978: + goto st_case_2978 + case 2979: + goto st_case_2979 + case 2980: + goto st_case_2980 + case 2981: + goto st_case_2981 + case 2982: + goto st_case_2982 + case 2983: + goto st_case_2983 + case 2984: + goto st_case_2984 + case 2985: + goto st_case_2985 + case 2986: + goto st_case_2986 + case 2987: + goto st_case_2987 + case 2988: + goto st_case_2988 + case 2989: + goto st_case_2989 + case 2990: + goto st_case_2990 + case 2991: + goto st_case_2991 + case 2992: + goto st_case_2992 + case 2993: + goto st_case_2993 + case 2994: + goto st_case_2994 + case 2995: + goto st_case_2995 + case 2996: + goto st_case_2996 + case 2997: + goto st_case_2997 + case 2998: + goto st_case_2998 + case 2999: + goto st_case_2999 + case 3000: + goto st_case_3000 + case 3001: + goto st_case_3001 + case 3002: + goto st_case_3002 + case 3003: + goto st_case_3003 + case 3004: + goto st_case_3004 + case 3005: + goto st_case_3005 + case 3006: + goto st_case_3006 + case 3007: + goto st_case_3007 + case 3008: + goto st_case_3008 + case 3009: + goto st_case_3009 + case 3010: + goto st_case_3010 + case 3011: + goto st_case_3011 + case 3012: + goto st_case_3012 + case 3013: + goto st_case_3013 + case 3014: + goto st_case_3014 + case 3015: + goto st_case_3015 + case 3016: + goto st_case_3016 + case 3017: + goto st_case_3017 + case 3018: + goto st_case_3018 + case 3019: + goto st_case_3019 + case 3020: + goto st_case_3020 + case 3021: + goto st_case_3021 + case 3022: + goto st_case_3022 + case 3023: + goto st_case_3023 + case 3024: + goto st_case_3024 + case 3025: + goto st_case_3025 + case 3026: + goto st_case_3026 + case 3027: + goto st_case_3027 + case 3028: + goto st_case_3028 + case 3029: + goto st_case_3029 + case 3030: + goto st_case_3030 + case 3031: + goto st_case_3031 + case 3032: + goto st_case_3032 + case 3033: + goto st_case_3033 + case 3034: + goto st_case_3034 + case 3035: + goto st_case_3035 + case 3036: + goto st_case_3036 + case 3037: + goto st_case_3037 + case 3038: + goto st_case_3038 + case 3039: + goto st_case_3039 + case 3040: + goto st_case_3040 + case 3041: + goto st_case_3041 + case 3042: + goto st_case_3042 + case 3043: + goto st_case_3043 + case 3044: + goto st_case_3044 + case 3045: + goto st_case_3045 + case 3046: + goto st_case_3046 + case 3047: + goto st_case_3047 + case 3048: + goto st_case_3048 + case 3049: + goto st_case_3049 + case 3050: + goto st_case_3050 + case 3051: + goto st_case_3051 + case 3052: + goto st_case_3052 + case 3053: + goto st_case_3053 + case 3054: + goto st_case_3054 + case 3055: + goto st_case_3055 + case 3056: + goto st_case_3056 + case 3057: + goto st_case_3057 + case 3058: + goto st_case_3058 + case 3059: + goto st_case_3059 + case 3060: + goto st_case_3060 + case 3061: + goto st_case_3061 + case 3062: + goto st_case_3062 + case 3063: + goto st_case_3063 + case 3064: + goto st_case_3064 + case 3065: + goto st_case_3065 + case 3066: + goto st_case_3066 + case 3067: + goto st_case_3067 + case 3068: + goto st_case_3068 + case 3069: + goto st_case_3069 + case 3070: + goto st_case_3070 + case 4887: + goto st_case_4887 + case 3071: + goto st_case_3071 + case 3072: + goto st_case_3072 + case 3073: + goto st_case_3073 + case 3074: + goto st_case_3074 + case 3075: + goto st_case_3075 + case 3076: + goto st_case_3076 + case 3077: + goto st_case_3077 + case 3078: + goto st_case_3078 + case 3079: + goto st_case_3079 + case 3080: + goto st_case_3080 + case 3081: + goto st_case_3081 + case 3082: + goto st_case_3082 + case 3083: + goto st_case_3083 + case 3084: + goto st_case_3084 + case 3085: + goto st_case_3085 + case 3086: + goto st_case_3086 + case 3087: + goto st_case_3087 + case 3088: + goto st_case_3088 + case 3089: + goto st_case_3089 + case 3090: + goto st_case_3090 + case 3091: + goto st_case_3091 + case 3092: + goto st_case_3092 + case 3093: + goto st_case_3093 + case 3094: + goto st_case_3094 + case 3095: + goto st_case_3095 + case 3096: + goto st_case_3096 + case 3097: + goto st_case_3097 + case 3098: + goto st_case_3098 + case 3099: + goto st_case_3099 + case 3100: + goto st_case_3100 + case 3101: + goto st_case_3101 + case 3102: + goto st_case_3102 + case 3103: + goto st_case_3103 + case 3104: + goto st_case_3104 + case 3105: + goto st_case_3105 + case 3106: + goto st_case_3106 + case 3107: + goto st_case_3107 + case 3108: + goto st_case_3108 + case 3109: + goto st_case_3109 + case 3110: + goto st_case_3110 + case 3111: + goto st_case_3111 + case 3112: + goto st_case_3112 + case 3113: + goto st_case_3113 + case 3114: + goto st_case_3114 + case 3115: + goto st_case_3115 + case 3116: + goto st_case_3116 + case 3117: + goto st_case_3117 + case 3118: + goto st_case_3118 + case 3119: + goto st_case_3119 + case 3120: + goto st_case_3120 + case 3121: + goto st_case_3121 + case 3122: + goto st_case_3122 + case 3123: + goto st_case_3123 + case 3124: + goto st_case_3124 + case 3125: + goto st_case_3125 + case 3126: + goto st_case_3126 + case 3127: + goto st_case_3127 + case 3128: + goto st_case_3128 + case 3129: + goto st_case_3129 + case 3130: + goto st_case_3130 + case 3131: + goto st_case_3131 + case 3132: + goto st_case_3132 + case 3133: + goto st_case_3133 + case 3134: + goto st_case_3134 + case 3135: + goto st_case_3135 + case 3136: + goto st_case_3136 + case 3137: + goto st_case_3137 + case 3138: + goto st_case_3138 + case 3139: + goto st_case_3139 + case 3140: + goto st_case_3140 + case 3141: + goto st_case_3141 + case 3142: + goto st_case_3142 + case 3143: + goto st_case_3143 + case 3144: + goto st_case_3144 + case 3145: + goto st_case_3145 + case 3146: + goto st_case_3146 + case 3147: + goto st_case_3147 + case 3148: + goto st_case_3148 + case 3149: + goto st_case_3149 + case 3150: + goto st_case_3150 + case 3151: + goto st_case_3151 + case 3152: + goto st_case_3152 + case 3153: + goto st_case_3153 + case 3154: + goto st_case_3154 + case 3155: + goto st_case_3155 + case 3156: + goto st_case_3156 + case 3157: + goto st_case_3157 + case 3158: + goto st_case_3158 + case 3159: + goto st_case_3159 + case 3160: + goto st_case_3160 + case 3161: + goto st_case_3161 + case 3162: + goto st_case_3162 + case 3163: + goto st_case_3163 + case 3164: + goto st_case_3164 + case 3165: + goto st_case_3165 + case 3166: + goto st_case_3166 + case 3167: + goto st_case_3167 + case 3168: + goto st_case_3168 + case 3169: + goto st_case_3169 + case 3170: + goto st_case_3170 + case 3171: + goto st_case_3171 + case 3172: + goto st_case_3172 + case 3173: + goto st_case_3173 + case 3174: + goto st_case_3174 + case 3175: + goto st_case_3175 + case 3176: + goto st_case_3176 + case 3177: + goto st_case_3177 + case 3178: + goto st_case_3178 + case 3179: + goto st_case_3179 + case 3180: + goto st_case_3180 + case 3181: + goto st_case_3181 + case 3182: + goto st_case_3182 + case 3183: + goto st_case_3183 + case 3184: + goto st_case_3184 + case 3185: + goto st_case_3185 + case 3186: + goto st_case_3186 + case 3187: + goto st_case_3187 + case 3188: + goto st_case_3188 + case 3189: + goto st_case_3189 + case 3190: + goto st_case_3190 + case 3191: + goto st_case_3191 + case 3192: + goto st_case_3192 + case 3193: + goto st_case_3193 + case 3194: + goto st_case_3194 + case 3195: + goto st_case_3195 + case 3196: + goto st_case_3196 + case 3197: + goto st_case_3197 + case 3198: + goto st_case_3198 + case 3199: + goto st_case_3199 + case 3200: + goto st_case_3200 + case 3201: + goto st_case_3201 + case 3202: + goto st_case_3202 + case 3203: + goto st_case_3203 + case 3204: + goto st_case_3204 + case 3205: + goto st_case_3205 + case 3206: + goto st_case_3206 + case 3207: + goto st_case_3207 + case 3208: + goto st_case_3208 + case 3209: + goto st_case_3209 + case 3210: + goto st_case_3210 + case 3211: + goto st_case_3211 + case 3212: + goto st_case_3212 + case 3213: + goto st_case_3213 + case 3214: + goto st_case_3214 + case 3215: + goto st_case_3215 + case 3216: + goto st_case_3216 + case 3217: + goto st_case_3217 + case 4888: + goto st_case_4888 + case 4889: + goto st_case_4889 + case 4890: + goto st_case_4890 + case 4891: + goto st_case_4891 + case 4892: + goto st_case_4892 + case 4893: + goto st_case_4893 + case 4894: + goto st_case_4894 + case 4895: + goto st_case_4895 + case 4896: + goto st_case_4896 + case 4897: + goto st_case_4897 + case 4898: + goto st_case_4898 + case 4899: + goto st_case_4899 + case 4900: + goto st_case_4900 + case 4901: + goto st_case_4901 + case 4902: + goto st_case_4902 + case 4903: + goto st_case_4903 + case 4904: + goto st_case_4904 + case 4905: + goto st_case_4905 + case 4906: + goto st_case_4906 + case 4907: + goto st_case_4907 + case 4908: + goto st_case_4908 + case 4909: + goto st_case_4909 + case 4910: + goto st_case_4910 + case 4911: + goto st_case_4911 + case 4912: + goto st_case_4912 + case 4913: + goto st_case_4913 + case 4914: + goto st_case_4914 + case 4915: + goto st_case_4915 + case 4916: + goto st_case_4916 + case 4917: + goto st_case_4917 + case 4918: + goto st_case_4918 + case 4919: + goto st_case_4919 + case 4920: + goto st_case_4920 + case 4921: + goto st_case_4921 + case 4922: + goto st_case_4922 + case 4923: + goto st_case_4923 + case 4924: + goto st_case_4924 + case 4925: + goto st_case_4925 + case 4926: + goto st_case_4926 + case 4927: + goto st_case_4927 + case 4928: + goto st_case_4928 + case 3218: + goto st_case_3218 + case 3219: + goto st_case_3219 + case 3220: + goto st_case_3220 + case 3221: + goto st_case_3221 + case 3222: + goto st_case_3222 + case 3223: + goto st_case_3223 + case 3224: + goto st_case_3224 + case 3225: + goto st_case_3225 + case 3226: + goto st_case_3226 + case 3227: + goto st_case_3227 + case 3228: + goto st_case_3228 + case 3229: + goto st_case_3229 + case 3230: + goto st_case_3230 + case 3231: + goto st_case_3231 + case 4929: + goto st_case_4929 + case 4930: + goto st_case_4930 + case 4931: + goto st_case_4931 + case 4932: + goto st_case_4932 + case 3232: + goto st_case_3232 + case 4933: + goto st_case_4933 + case 4934: + goto st_case_4934 + case 4935: + goto st_case_4935 + case 4936: + goto st_case_4936 + case 4937: + goto st_case_4937 + case 4938: + goto st_case_4938 + case 4939: + goto st_case_4939 + case 4940: + goto st_case_4940 + case 4941: + goto st_case_4941 + case 4942: + goto st_case_4942 + case 4943: + goto st_case_4943 + case 4944: + goto st_case_4944 + case 4945: + goto st_case_4945 + case 4946: + goto st_case_4946 + case 4947: + goto st_case_4947 + case 4948: + goto st_case_4948 + case 4949: + goto st_case_4949 + case 4950: + goto st_case_4950 + case 4951: + goto st_case_4951 + case 4952: + goto st_case_4952 + case 4953: + goto st_case_4953 + case 4954: + goto st_case_4954 + case 4955: + goto st_case_4955 + case 4956: + goto st_case_4956 + case 4957: + goto st_case_4957 + case 3233: + goto st_case_3233 + case 4958: + goto st_case_4958 + case 4959: + goto st_case_4959 + case 4960: + goto st_case_4960 + case 4961: + goto st_case_4961 + case 4962: + goto st_case_4962 + case 4963: + goto st_case_4963 + case 3234: + goto st_case_3234 + case 4964: + goto st_case_4964 + case 4965: + goto st_case_4965 + case 3235: + goto st_case_3235 + case 4966: + goto st_case_4966 + case 4967: + goto st_case_4967 + case 4968: + goto st_case_4968 + case 4969: + goto st_case_4969 + case 4970: + goto st_case_4970 + case 4971: + goto st_case_4971 + case 4972: + goto st_case_4972 + case 4973: + goto st_case_4973 + case 4974: + goto st_case_4974 + case 4975: + goto st_case_4975 + case 4976: + goto st_case_4976 + case 4977: + goto st_case_4977 + case 4978: + goto st_case_4978 + case 4979: + goto st_case_4979 + case 4980: + goto st_case_4980 + case 3236: + goto st_case_3236 + case 4981: + goto st_case_4981 + case 4982: + goto st_case_4982 + case 4983: + goto st_case_4983 + case 3237: + goto st_case_3237 + case 4984: + goto st_case_4984 + case 4985: + goto st_case_4985 + case 4986: + goto st_case_4986 + case 4987: + goto st_case_4987 + case 4988: + goto st_case_4988 + case 4989: + goto st_case_4989 + case 3238: + goto st_case_3238 + case 4990: + goto st_case_4990 + case 4991: + goto st_case_4991 + case 4992: + goto st_case_4992 + case 4993: + goto st_case_4993 + case 4994: + goto st_case_4994 + case 4995: + goto st_case_4995 + case 4996: + goto st_case_4996 + case 4997: + goto st_case_4997 + case 4998: + goto st_case_4998 + case 4999: + goto st_case_4999 + case 5000: + goto st_case_5000 + case 5001: + goto st_case_5001 + case 5002: + goto st_case_5002 + case 5003: + goto st_case_5003 + case 5004: + goto st_case_5004 + case 5005: + goto st_case_5005 + case 5006: + goto st_case_5006 + case 5007: + goto st_case_5007 + case 5008: + goto st_case_5008 + case 5009: + goto st_case_5009 + case 5010: + goto st_case_5010 + case 5011: + goto st_case_5011 + case 5012: + goto st_case_5012 + case 5013: + goto st_case_5013 + case 5014: + goto st_case_5014 + case 5015: + goto st_case_5015 + case 5016: + goto st_case_5016 + case 5017: + goto st_case_5017 + case 5018: + goto st_case_5018 + case 5019: + goto st_case_5019 + case 5020: + goto st_case_5020 + case 5021: + goto st_case_5021 + case 5022: + goto st_case_5022 + case 5023: + goto st_case_5023 + case 5024: + goto st_case_5024 + case 5025: + goto st_case_5025 + case 5026: + goto st_case_5026 + case 5027: + goto st_case_5027 + case 5028: + goto st_case_5028 + case 5029: + goto st_case_5029 + case 5030: + goto st_case_5030 + case 5031: + goto st_case_5031 + case 5032: + goto st_case_5032 + case 5033: + goto st_case_5033 + case 5034: + goto st_case_5034 + case 5035: + goto st_case_5035 + case 5036: + goto st_case_5036 + case 5037: + goto st_case_5037 + case 5038: + goto st_case_5038 + case 5039: + goto st_case_5039 + case 5040: + goto st_case_5040 + case 5041: + goto st_case_5041 + case 5042: + goto st_case_5042 + case 5043: + goto st_case_5043 + case 5044: + goto st_case_5044 + case 5045: + goto st_case_5045 + case 5046: + goto st_case_5046 + case 5047: + goto st_case_5047 + case 5048: + goto st_case_5048 + case 5049: + goto st_case_5049 + case 5050: + goto st_case_5050 + case 5051: + goto st_case_5051 + case 5052: + goto st_case_5052 + case 5053: + goto st_case_5053 + case 5054: + goto st_case_5054 + case 5055: + goto st_case_5055 + case 5056: + goto st_case_5056 + case 5057: + goto st_case_5057 + case 5058: + goto st_case_5058 + case 5059: + goto st_case_5059 + case 5060: + goto st_case_5060 + case 5061: + goto st_case_5061 + case 5062: + goto st_case_5062 + case 5063: + goto st_case_5063 + case 5064: + goto st_case_5064 + case 5065: + goto st_case_5065 + case 5066: + goto st_case_5066 + case 5067: + goto st_case_5067 + case 5068: + goto st_case_5068 + case 5069: + goto st_case_5069 + case 5070: + goto st_case_5070 + case 5071: + goto st_case_5071 + case 3239: + goto st_case_3239 + case 3240: + goto st_case_3240 + case 3241: + goto st_case_3241 + case 3242: + goto st_case_3242 + case 3243: + goto st_case_3243 + case 3244: + goto st_case_3244 + case 3245: + goto st_case_3245 + case 3246: + goto st_case_3246 + case 3247: + goto st_case_3247 + case 3248: + goto st_case_3248 + case 3249: + goto st_case_3249 + case 3250: + goto st_case_3250 + case 3251: + goto st_case_3251 + case 3252: + goto st_case_3252 + case 3253: + goto st_case_3253 + case 3254: + goto st_case_3254 + case 3255: + goto st_case_3255 + case 3256: + goto st_case_3256 + case 3257: + goto st_case_3257 + case 3258: + goto st_case_3258 + case 3259: + goto st_case_3259 + case 3260: + goto st_case_3260 + case 3261: + goto st_case_3261 + case 3262: + goto st_case_3262 + case 3263: + goto st_case_3263 + case 3264: + goto st_case_3264 + case 3265: + goto st_case_3265 + case 5072: + goto st_case_5072 + case 3266: + goto st_case_3266 + case 3267: + goto st_case_3267 + case 3268: + goto st_case_3268 + case 5073: + goto st_case_5073 + case 3269: + goto st_case_3269 + case 3270: + goto st_case_3270 + case 3271: + goto st_case_3271 + case 3272: + goto st_case_3272 + case 3273: + goto st_case_3273 + case 3274: + goto st_case_3274 + case 3275: + goto st_case_3275 + case 3276: + goto st_case_3276 + case 3277: + goto st_case_3277 + case 3278: + goto st_case_3278 + case 3279: + goto st_case_3279 + case 3280: + goto st_case_3280 + case 3281: + goto st_case_3281 + case 3282: + goto st_case_3282 + case 3283: + goto st_case_3283 + case 3284: + goto st_case_3284 + case 3285: + goto st_case_3285 + case 3286: + goto st_case_3286 + case 3287: + goto st_case_3287 + case 3288: + goto st_case_3288 + case 3289: + goto st_case_3289 + case 3290: + goto st_case_3290 + case 3291: + goto st_case_3291 + case 3292: + goto st_case_3292 + case 3293: + goto st_case_3293 + case 3294: + goto st_case_3294 + case 3295: + goto st_case_3295 + case 3296: + goto st_case_3296 + case 3297: + goto st_case_3297 + case 3298: + goto st_case_3298 + case 3299: + goto st_case_3299 + case 3300: + goto st_case_3300 + case 3301: + goto st_case_3301 + case 3302: + goto st_case_3302 + case 3303: + goto st_case_3303 + case 3304: + goto st_case_3304 + case 3305: + goto st_case_3305 + case 3306: + goto st_case_3306 + case 3307: + goto st_case_3307 + case 3308: + goto st_case_3308 + case 3309: + goto st_case_3309 + case 3310: + goto st_case_3310 + case 3311: + goto st_case_3311 + case 3312: + goto st_case_3312 + case 3313: + goto st_case_3313 + case 3314: + goto st_case_3314 + case 3315: + goto st_case_3315 + case 3316: + goto st_case_3316 + case 3317: + goto st_case_3317 + case 3318: + goto st_case_3318 + case 3319: + goto st_case_3319 + case 3320: + goto st_case_3320 + case 3321: + goto st_case_3321 + case 3322: + goto st_case_3322 + case 3323: + goto st_case_3323 + case 3324: + goto st_case_3324 + case 3325: + goto st_case_3325 + case 3326: + goto st_case_3326 + case 3327: + goto st_case_3327 + case 3328: + goto st_case_3328 + case 3329: + goto st_case_3329 + case 3330: + goto st_case_3330 + case 3331: + goto st_case_3331 + case 3332: + goto st_case_3332 + case 3333: + goto st_case_3333 + case 3334: + goto st_case_3334 + case 3335: + goto st_case_3335 + case 3336: + goto st_case_3336 + case 3337: + goto st_case_3337 + case 3338: + goto st_case_3338 + case 3339: + goto st_case_3339 + case 3340: + goto st_case_3340 + case 3341: + goto st_case_3341 + case 3342: + goto st_case_3342 + case 3343: + goto st_case_3343 + case 3344: + goto st_case_3344 + case 3345: + goto st_case_3345 + case 3346: + goto st_case_3346 + case 3347: + goto st_case_3347 + case 3348: + goto st_case_3348 + case 3349: + goto st_case_3349 + case 3350: + goto st_case_3350 + case 5074: + goto st_case_5074 + case 3351: + goto st_case_3351 + case 3352: + goto st_case_3352 + case 3353: + goto st_case_3353 + case 3354: + goto st_case_3354 + case 3355: + goto st_case_3355 + case 3356: + goto st_case_3356 + case 3357: + goto st_case_3357 + case 3358: + goto st_case_3358 + case 3359: + goto st_case_3359 + case 3360: + goto st_case_3360 + case 3361: + goto st_case_3361 + case 3362: + goto st_case_3362 + case 3363: + goto st_case_3363 + case 3364: + goto st_case_3364 + case 3365: + goto st_case_3365 + case 3366: + goto st_case_3366 + case 3367: + goto st_case_3367 + case 3368: + goto st_case_3368 + case 3369: + goto st_case_3369 + case 3370: + goto st_case_3370 + case 3371: + goto st_case_3371 + case 3372: + goto st_case_3372 + case 3373: + goto st_case_3373 + case 3374: + goto st_case_3374 + case 3375: + goto st_case_3375 + case 3376: + goto st_case_3376 + case 3377: + goto st_case_3377 + case 3378: + goto st_case_3378 + case 3379: + goto st_case_3379 + case 3380: + goto st_case_3380 + case 3381: + goto st_case_3381 + case 3382: + goto st_case_3382 + case 3383: + goto st_case_3383 + case 3384: + goto st_case_3384 + case 3385: + goto st_case_3385 + case 3386: + goto st_case_3386 + case 3387: + goto st_case_3387 + case 3388: + goto st_case_3388 + case 3389: + goto st_case_3389 + case 3390: + goto st_case_3390 + case 3391: + goto st_case_3391 + case 3392: + goto st_case_3392 + case 3393: + goto st_case_3393 + case 3394: + goto st_case_3394 + case 3395: + goto st_case_3395 + case 3396: + goto st_case_3396 + case 3397: + goto st_case_3397 + case 3398: + goto st_case_3398 + case 3399: + goto st_case_3399 + case 3400: + goto st_case_3400 + case 3401: + goto st_case_3401 + case 3402: + goto st_case_3402 + case 3403: + goto st_case_3403 + case 3404: + goto st_case_3404 + case 3405: + goto st_case_3405 + case 3406: + goto st_case_3406 + case 3407: + goto st_case_3407 + case 3408: + goto st_case_3408 + case 3409: + goto st_case_3409 + case 3410: + goto st_case_3410 + case 3411: + goto st_case_3411 + case 3412: + goto st_case_3412 + case 3413: + goto st_case_3413 + case 3414: + goto st_case_3414 + case 3415: + goto st_case_3415 + case 3416: + goto st_case_3416 + case 3417: + goto st_case_3417 + case 3418: + goto st_case_3418 + case 3419: + goto st_case_3419 + case 3420: + goto st_case_3420 + case 3421: + goto st_case_3421 + case 3422: + goto st_case_3422 + case 3423: + goto st_case_3423 + case 3424: + goto st_case_3424 + case 3425: + goto st_case_3425 + case 3426: + goto st_case_3426 + case 3427: + goto st_case_3427 + case 3428: + goto st_case_3428 + case 3429: + goto st_case_3429 + case 3430: + goto st_case_3430 + case 3431: + goto st_case_3431 + case 3432: + goto st_case_3432 + case 3433: + goto st_case_3433 + case 3434: + goto st_case_3434 + case 3435: + goto st_case_3435 + case 3436: + goto st_case_3436 + case 3437: + goto st_case_3437 + case 3438: + goto st_case_3438 + case 3439: + goto st_case_3439 + case 3440: + goto st_case_3440 + case 3441: + goto st_case_3441 + case 3442: + goto st_case_3442 + case 3443: + goto st_case_3443 + case 3444: + goto st_case_3444 + case 3445: + goto st_case_3445 + case 3446: + goto st_case_3446 + case 3447: + goto st_case_3447 + case 3448: + goto st_case_3448 + case 3449: + goto st_case_3449 + case 3450: + goto st_case_3450 + case 3451: + goto st_case_3451 + case 3452: + goto st_case_3452 + case 3453: + goto st_case_3453 + case 3454: + goto st_case_3454 + case 3455: + goto st_case_3455 + case 3456: + goto st_case_3456 + case 3457: + goto st_case_3457 + case 3458: + goto st_case_3458 + case 3459: + goto st_case_3459 + case 3460: + goto st_case_3460 + case 3461: + goto st_case_3461 + case 3462: + goto st_case_3462 + case 3463: + goto st_case_3463 + case 3464: + goto st_case_3464 + case 3465: + goto st_case_3465 + case 3466: + goto st_case_3466 + case 3467: + goto st_case_3467 + case 3468: + goto st_case_3468 + case 3469: + goto st_case_3469 + case 3470: + goto st_case_3470 + case 3471: + goto st_case_3471 + case 3472: + goto st_case_3472 + case 3473: + goto st_case_3473 + case 3474: + goto st_case_3474 + case 3475: + goto st_case_3475 + case 3476: + goto st_case_3476 + case 3477: + goto st_case_3477 + case 3478: + goto st_case_3478 + case 3479: + goto st_case_3479 + case 3480: + goto st_case_3480 + case 3481: + goto st_case_3481 + case 3482: + goto st_case_3482 + case 3483: + goto st_case_3483 + case 3484: + goto st_case_3484 + case 3485: + goto st_case_3485 + case 3486: + goto st_case_3486 + case 3487: + goto st_case_3487 + case 3488: + goto st_case_3488 + case 3489: + goto st_case_3489 + case 3490: + goto st_case_3490 + case 3491: + goto st_case_3491 + case 3492: + goto st_case_3492 + case 3493: + goto st_case_3493 + case 3494: + goto st_case_3494 + case 3495: + goto st_case_3495 + case 3496: + goto st_case_3496 + case 3497: + goto st_case_3497 + case 3498: + goto st_case_3498 + case 3499: + goto st_case_3499 + case 3500: + goto st_case_3500 + case 3501: + goto st_case_3501 + case 3502: + goto st_case_3502 + case 3503: + goto st_case_3503 + case 3504: + goto st_case_3504 + case 3505: + goto st_case_3505 + case 3506: + goto st_case_3506 + case 3507: + goto st_case_3507 + case 3508: + goto st_case_3508 + case 3509: + goto st_case_3509 + case 3510: + goto st_case_3510 + case 3511: + goto st_case_3511 + case 3512: + goto st_case_3512 + case 3513: + goto st_case_3513 + case 3514: + goto st_case_3514 + case 3515: + goto st_case_3515 + case 3516: + goto st_case_3516 + case 3517: + goto st_case_3517 + case 3518: + goto st_case_3518 + case 3519: + goto st_case_3519 + case 3520: + goto st_case_3520 + case 3521: + goto st_case_3521 + case 3522: + goto st_case_3522 + case 3523: + goto st_case_3523 + case 3524: + goto st_case_3524 + case 3525: + goto st_case_3525 + case 3526: + goto st_case_3526 + case 3527: + goto st_case_3527 + case 3528: + goto st_case_3528 + case 3529: + goto st_case_3529 + case 3530: + goto st_case_3530 + case 3531: + goto st_case_3531 + case 3532: + goto st_case_3532 + case 3533: + goto st_case_3533 + case 3534: + goto st_case_3534 + case 3535: + goto st_case_3535 + case 3536: + goto st_case_3536 + case 3537: + goto st_case_3537 + case 3538: + goto st_case_3538 + case 3539: + goto st_case_3539 + case 3540: + goto st_case_3540 + case 3541: + goto st_case_3541 + case 3542: + goto st_case_3542 + case 3543: + goto st_case_3543 + case 3544: + goto st_case_3544 + case 3545: + goto st_case_3545 + case 3546: + goto st_case_3546 + case 3547: + goto st_case_3547 + case 3548: + goto st_case_3548 + case 3549: + goto st_case_3549 + case 3550: + goto st_case_3550 + case 3551: + goto st_case_3551 + case 3552: + goto st_case_3552 + case 3553: + goto st_case_3553 + case 3554: + goto st_case_3554 + case 3555: + goto st_case_3555 + case 3556: + goto st_case_3556 + case 3557: + goto st_case_3557 + case 3558: + goto st_case_3558 + case 3559: + goto st_case_3559 + case 3560: + goto st_case_3560 + case 3561: + goto st_case_3561 + case 3562: + goto st_case_3562 + case 3563: + goto st_case_3563 + case 3564: + goto st_case_3564 + case 3565: + goto st_case_3565 + case 3566: + goto st_case_3566 + case 3567: + goto st_case_3567 + case 3568: + goto st_case_3568 + case 3569: + goto st_case_3569 + case 3570: + goto st_case_3570 + case 3571: + goto st_case_3571 + case 3572: + goto st_case_3572 + case 3573: + goto st_case_3573 + case 3574: + goto st_case_3574 + case 3575: + goto st_case_3575 + case 3576: + goto st_case_3576 + case 3577: + goto st_case_3577 + case 3578: + goto st_case_3578 + case 3579: + goto st_case_3579 + case 3580: + goto st_case_3580 + case 3581: + goto st_case_3581 + case 3582: + goto st_case_3582 + case 3583: + goto st_case_3583 + case 3584: + goto st_case_3584 + case 3585: + goto st_case_3585 + case 3586: + goto st_case_3586 + case 3587: + goto st_case_3587 + case 5075: + goto st_case_5075 + case 3588: + goto st_case_3588 + case 3589: + goto st_case_3589 + case 3590: + goto st_case_3590 + case 3591: + goto st_case_3591 + case 3592: + goto st_case_3592 + case 3593: + goto st_case_3593 + case 5076: + goto st_case_5076 + case 3594: + goto st_case_3594 + case 3595: + goto st_case_3595 + case 3596: + goto st_case_3596 + case 3597: + goto st_case_3597 + case 3598: + goto st_case_3598 + case 3599: + goto st_case_3599 + case 3600: + goto st_case_3600 + case 3601: + goto st_case_3601 + case 3602: + goto st_case_3602 + case 3603: + goto st_case_3603 + case 3604: + goto st_case_3604 + case 3605: + goto st_case_3605 + case 3606: + goto st_case_3606 + case 3607: + goto st_case_3607 + case 3608: + goto st_case_3608 + case 3609: + goto st_case_3609 + case 3610: + goto st_case_3610 + case 3611: + goto st_case_3611 + case 3612: + goto st_case_3612 + case 3613: + goto st_case_3613 + case 3614: + goto st_case_3614 + case 3615: + goto st_case_3615 + case 3616: + goto st_case_3616 + case 3617: + goto st_case_3617 + case 3618: + goto st_case_3618 + case 3619: + goto st_case_3619 + case 3620: + goto st_case_3620 + case 3621: + goto st_case_3621 + case 3622: + goto st_case_3622 + case 3623: + goto st_case_3623 + case 3624: + goto st_case_3624 + case 3625: + goto st_case_3625 + case 3626: + goto st_case_3626 + case 3627: + goto st_case_3627 + case 3628: + goto st_case_3628 + case 3629: + goto st_case_3629 + case 3630: + goto st_case_3630 + case 3631: + goto st_case_3631 + case 3632: + goto st_case_3632 + case 3633: + goto st_case_3633 + case 3634: + goto st_case_3634 + case 3635: + goto st_case_3635 + case 3636: + goto st_case_3636 + case 3637: + goto st_case_3637 + case 3638: + goto st_case_3638 + case 3639: + goto st_case_3639 + case 3640: + goto st_case_3640 + case 3641: + goto st_case_3641 + case 3642: + goto st_case_3642 + case 3643: + goto st_case_3643 + case 3644: + goto st_case_3644 + case 3645: + goto st_case_3645 + case 3646: + goto st_case_3646 + case 3647: + goto st_case_3647 + case 3648: + goto st_case_3648 + case 3649: + goto st_case_3649 + case 3650: + goto st_case_3650 + case 3651: + goto st_case_3651 + case 3652: + goto st_case_3652 + case 3653: + goto st_case_3653 + case 3654: + goto st_case_3654 + case 3655: + goto st_case_3655 + case 3656: + goto st_case_3656 + case 3657: + goto st_case_3657 + case 3658: + goto st_case_3658 + case 3659: + goto st_case_3659 + case 3660: + goto st_case_3660 + case 3661: + goto st_case_3661 + case 3662: + goto st_case_3662 + case 3663: + goto st_case_3663 + case 3664: + goto st_case_3664 + case 3665: + goto st_case_3665 + case 3666: + goto st_case_3666 + case 3667: + goto st_case_3667 + case 3668: + goto st_case_3668 + case 3669: + goto st_case_3669 + case 3670: + goto st_case_3670 + case 3671: + goto st_case_3671 + case 3672: + goto st_case_3672 + case 3673: + goto st_case_3673 + case 3674: + goto st_case_3674 + case 3675: + goto st_case_3675 + case 3676: + goto st_case_3676 + case 3677: + goto st_case_3677 + case 3678: + goto st_case_3678 + case 3679: + goto st_case_3679 + case 3680: + goto st_case_3680 + case 3681: + goto st_case_3681 + case 3682: + goto st_case_3682 + case 3683: + goto st_case_3683 + case 3684: + goto st_case_3684 + case 3685: + goto st_case_3685 + case 3686: + goto st_case_3686 + case 3687: + goto st_case_3687 + case 3688: + goto st_case_3688 + case 3689: + goto st_case_3689 + case 3690: + goto st_case_3690 + case 3691: + goto st_case_3691 + case 3692: + goto st_case_3692 + case 3693: + goto st_case_3693 + case 3694: + goto st_case_3694 + case 3695: + goto st_case_3695 + case 3696: + goto st_case_3696 + case 3697: + goto st_case_3697 + case 3698: + goto st_case_3698 + case 3699: + goto st_case_3699 + case 3700: + goto st_case_3700 + case 3701: + goto st_case_3701 + case 3702: + goto st_case_3702 + case 3703: + goto st_case_3703 + case 3704: + goto st_case_3704 + case 3705: + goto st_case_3705 + case 3706: + goto st_case_3706 + case 3707: + goto st_case_3707 + case 3708: + goto st_case_3708 + case 3709: + goto st_case_3709 + case 3710: + goto st_case_3710 + case 3711: + goto st_case_3711 + case 3712: + goto st_case_3712 + case 3713: + goto st_case_3713 + case 3714: + goto st_case_3714 + case 3715: + goto st_case_3715 + case 3716: + goto st_case_3716 + case 3717: + goto st_case_3717 + case 3718: + goto st_case_3718 + case 3719: + goto st_case_3719 + case 3720: + goto st_case_3720 + case 3721: + goto st_case_3721 + case 3722: + goto st_case_3722 + case 3723: + goto st_case_3723 + case 3724: + goto st_case_3724 + case 3725: + goto st_case_3725 + case 3726: + goto st_case_3726 + case 3727: + goto st_case_3727 + case 3728: + goto st_case_3728 + case 3729: + goto st_case_3729 + case 3730: + goto st_case_3730 + case 3731: + goto st_case_3731 + case 3732: + goto st_case_3732 + case 3733: + goto st_case_3733 + case 3734: + goto st_case_3734 + case 3735: + goto st_case_3735 + case 3736: + goto st_case_3736 + case 5077: + goto st_case_5077 + case 3737: + goto st_case_3737 + case 5078: + goto st_case_5078 + case 3738: + goto st_case_3738 + case 3739: + goto st_case_3739 + case 3740: + goto st_case_3740 + case 3741: + goto st_case_3741 + case 3742: + goto st_case_3742 + case 3743: + goto st_case_3743 + case 3744: + goto st_case_3744 + case 3745: + goto st_case_3745 + case 3746: + goto st_case_3746 + case 3747: + goto st_case_3747 + case 3748: + goto st_case_3748 + case 3749: + goto st_case_3749 + case 3750: + goto st_case_3750 + case 3751: + goto st_case_3751 + case 3752: + goto st_case_3752 + case 3753: + goto st_case_3753 + case 3754: + goto st_case_3754 + case 3755: + goto st_case_3755 + case 3756: + goto st_case_3756 + case 3757: + goto st_case_3757 + case 3758: + goto st_case_3758 + case 3759: + goto st_case_3759 + case 3760: + goto st_case_3760 + case 3761: + goto st_case_3761 + case 3762: + goto st_case_3762 + case 3763: + goto st_case_3763 + case 3764: + goto st_case_3764 + case 3765: + goto st_case_3765 + case 3766: + goto st_case_3766 + case 3767: + goto st_case_3767 + case 3768: + goto st_case_3768 + case 3769: + goto st_case_3769 + case 3770: + goto st_case_3770 + case 3771: + goto st_case_3771 + case 3772: + goto st_case_3772 + case 3773: + goto st_case_3773 + case 3774: + goto st_case_3774 + case 3775: + goto st_case_3775 + case 3776: + goto st_case_3776 + case 3777: + goto st_case_3777 + case 3778: + goto st_case_3778 + case 3779: + goto st_case_3779 + case 3780: + goto st_case_3780 + case 3781: + goto st_case_3781 + case 3782: + goto st_case_3782 + case 3783: + goto st_case_3783 + case 3784: + goto st_case_3784 + case 3785: + goto st_case_3785 + case 3786: + goto st_case_3786 + case 3787: + goto st_case_3787 + case 3788: + goto st_case_3788 + case 3789: + goto st_case_3789 + case 3790: + goto st_case_3790 + case 3791: + goto st_case_3791 + case 3792: + goto st_case_3792 + case 3793: + goto st_case_3793 + case 3794: + goto st_case_3794 + case 3795: + goto st_case_3795 + case 3796: + goto st_case_3796 + case 3797: + goto st_case_3797 + case 3798: + goto st_case_3798 + case 3799: + goto st_case_3799 + case 3800: + goto st_case_3800 + case 3801: + goto st_case_3801 + case 3802: + goto st_case_3802 + case 3803: + goto st_case_3803 + case 3804: + goto st_case_3804 + case 3805: + goto st_case_3805 + case 3806: + goto st_case_3806 + case 3807: + goto st_case_3807 + case 3808: + goto st_case_3808 + case 3809: + goto st_case_3809 + case 3810: + goto st_case_3810 + case 3811: + goto st_case_3811 + case 3812: + goto st_case_3812 + case 3813: + goto st_case_3813 + case 3814: + goto st_case_3814 + case 3815: + goto st_case_3815 + case 3816: + goto st_case_3816 + case 3817: + goto st_case_3817 + case 3818: + goto st_case_3818 + case 3819: + goto st_case_3819 + case 3820: + goto st_case_3820 + case 3821: + goto st_case_3821 + case 3822: + goto st_case_3822 + case 3823: + goto st_case_3823 + case 3824: + goto st_case_3824 + case 3825: + goto st_case_3825 + case 3826: + goto st_case_3826 + case 3827: + goto st_case_3827 + case 3828: + goto st_case_3828 + case 3829: + goto st_case_3829 + case 3830: + goto st_case_3830 + case 3831: + goto st_case_3831 + case 3832: + goto st_case_3832 + case 3833: + goto st_case_3833 + case 3834: + goto st_case_3834 + case 3835: + goto st_case_3835 + case 3836: + goto st_case_3836 + case 3837: + goto st_case_3837 + case 3838: + goto st_case_3838 + case 3839: + goto st_case_3839 + case 3840: + goto st_case_3840 + case 3841: + goto st_case_3841 + case 3842: + goto st_case_3842 + case 3843: + goto st_case_3843 + case 3844: + goto st_case_3844 + case 3845: + goto st_case_3845 + case 3846: + goto st_case_3846 + case 3847: + goto st_case_3847 + case 3848: + goto st_case_3848 + case 3849: + goto st_case_3849 + case 3850: + goto st_case_3850 + case 3851: + goto st_case_3851 + case 3852: + goto st_case_3852 + case 3853: + goto st_case_3853 + case 3854: + goto st_case_3854 + case 3855: + goto st_case_3855 + case 3856: + goto st_case_3856 + case 3857: + goto st_case_3857 + case 3858: + goto st_case_3858 + case 3859: + goto st_case_3859 + case 3860: + goto st_case_3860 + case 3861: + goto st_case_3861 + case 3862: + goto st_case_3862 + case 3863: + goto st_case_3863 + case 3864: + goto st_case_3864 + case 3865: + goto st_case_3865 + case 3866: + goto st_case_3866 + case 3867: + goto st_case_3867 + case 3868: + goto st_case_3868 + case 3869: + goto st_case_3869 + case 3870: + goto st_case_3870 + case 3871: + goto st_case_3871 + case 3872: + goto st_case_3872 + case 3873: + goto st_case_3873 + case 3874: + goto st_case_3874 + case 3875: + goto st_case_3875 + case 3876: + goto st_case_3876 + case 3877: + goto st_case_3877 + case 3878: + goto st_case_3878 + case 3879: + goto st_case_3879 + case 3880: + goto st_case_3880 + case 3881: + goto st_case_3881 + case 3882: + goto st_case_3882 + case 3883: + goto st_case_3883 + case 3884: + goto st_case_3884 + case 5079: + goto st_case_5079 + case 3885: + goto st_case_3885 + case 3886: + goto st_case_3886 + case 3887: + goto st_case_3887 + case 3888: + goto st_case_3888 + case 3889: + goto st_case_3889 + case 3890: + goto st_case_3890 + case 3891: + goto st_case_3891 + case 3892: + goto st_case_3892 + case 3893: + goto st_case_3893 + case 3894: + goto st_case_3894 + case 3895: + goto st_case_3895 + case 3896: + goto st_case_3896 + case 3897: + goto st_case_3897 + case 3898: + goto st_case_3898 + case 3899: + goto st_case_3899 + case 3900: + goto st_case_3900 + case 3901: + goto st_case_3901 + case 3902: + goto st_case_3902 + case 3903: + goto st_case_3903 + case 3904: + goto st_case_3904 + case 3905: + goto st_case_3905 + case 3906: + goto st_case_3906 + case 3907: + goto st_case_3907 + case 3908: + goto st_case_3908 + case 3909: + goto st_case_3909 + case 3910: + goto st_case_3910 + case 3911: + goto st_case_3911 + case 3912: + goto st_case_3912 + case 3913: + goto st_case_3913 + case 3914: + goto st_case_3914 + case 3915: + goto st_case_3915 + case 3916: + goto st_case_3916 + case 3917: + goto st_case_3917 + case 3918: + goto st_case_3918 + case 3919: + goto st_case_3919 + case 3920: + goto st_case_3920 + case 3921: + goto st_case_3921 + case 3922: + goto st_case_3922 + case 3923: + goto st_case_3923 + case 3924: + goto st_case_3924 + case 3925: + goto st_case_3925 + case 3926: + goto st_case_3926 + case 3927: + goto st_case_3927 + case 3928: + goto st_case_3928 + case 3929: + goto st_case_3929 + case 3930: + goto st_case_3930 + case 3931: + goto st_case_3931 + case 3932: + goto st_case_3932 + case 3933: + goto st_case_3933 + case 3934: + goto st_case_3934 + case 3935: + goto st_case_3935 + case 3936: + goto st_case_3936 + case 3937: + goto st_case_3937 + case 3938: + goto st_case_3938 + case 3939: + goto st_case_3939 + case 3940: + goto st_case_3940 + case 3941: + goto st_case_3941 + case 3942: + goto st_case_3942 + case 3943: + goto st_case_3943 + case 3944: + goto st_case_3944 + case 3945: + goto st_case_3945 + case 3946: + goto st_case_3946 + case 3947: + goto st_case_3947 + case 3948: + goto st_case_3948 + case 3949: + goto st_case_3949 + case 3950: + goto st_case_3950 + case 3951: + goto st_case_3951 + case 3952: + goto st_case_3952 + case 3953: + goto st_case_3953 + case 3954: + goto st_case_3954 + case 3955: + goto st_case_3955 + case 3956: + goto st_case_3956 + case 3957: + goto st_case_3957 + case 3958: + goto st_case_3958 + case 3959: + goto st_case_3959 + case 3960: + goto st_case_3960 + case 3961: + goto st_case_3961 + case 3962: + goto st_case_3962 + case 3963: + goto st_case_3963 + case 3964: + goto st_case_3964 + case 3965: + goto st_case_3965 + case 3966: + goto st_case_3966 + case 3967: + goto st_case_3967 + case 3968: + goto st_case_3968 + case 3969: + goto st_case_3969 + case 3970: + goto st_case_3970 + case 3971: + goto st_case_3971 + case 3972: + goto st_case_3972 + case 3973: + goto st_case_3973 + case 3974: + goto st_case_3974 + case 3975: + goto st_case_3975 + case 3976: + goto st_case_3976 + case 3977: + goto st_case_3977 + case 3978: + goto st_case_3978 + case 3979: + goto st_case_3979 + case 3980: + goto st_case_3980 + case 3981: + goto st_case_3981 + case 3982: + goto st_case_3982 + case 3983: + goto st_case_3983 + case 3984: + goto st_case_3984 + case 3985: + goto st_case_3985 + case 3986: + goto st_case_3986 + case 3987: + goto st_case_3987 + case 3988: + goto st_case_3988 + case 3989: + goto st_case_3989 + case 3990: + goto st_case_3990 + case 3991: + goto st_case_3991 + case 3992: + goto st_case_3992 + case 3993: + goto st_case_3993 + case 3994: + goto st_case_3994 + case 3995: + goto st_case_3995 + case 3996: + goto st_case_3996 + case 3997: + goto st_case_3997 + case 3998: + goto st_case_3998 + case 3999: + goto st_case_3999 + case 4000: + goto st_case_4000 + case 4001: + goto st_case_4001 + case 4002: + goto st_case_4002 + case 4003: + goto st_case_4003 + case 4004: + goto st_case_4004 + case 4005: + goto st_case_4005 + case 4006: + goto st_case_4006 + case 4007: + goto st_case_4007 + case 4008: + goto st_case_4008 + case 4009: + goto st_case_4009 + case 4010: + goto st_case_4010 + case 4011: + goto st_case_4011 + case 4012: + goto st_case_4012 + case 4013: + goto st_case_4013 + case 4014: + goto st_case_4014 + case 4015: + goto st_case_4015 + case 4016: + goto st_case_4016 + case 4017: + goto st_case_4017 + case 4018: + goto st_case_4018 + case 4019: + goto st_case_4019 + case 4020: + goto st_case_4020 + case 4021: + goto st_case_4021 + case 4022: + goto st_case_4022 + case 4023: + goto st_case_4023 + case 4024: + goto st_case_4024 + case 4025: + goto st_case_4025 + case 4026: + goto st_case_4026 + case 5080: + goto st_case_5080 + case 4027: + goto st_case_4027 + case 4028: + goto st_case_4028 + case 4029: + goto st_case_4029 + case 4030: + goto st_case_4030 + case 4031: + goto st_case_4031 + case 4032: + goto st_case_4032 + case 4033: + goto st_case_4033 + case 4034: + goto st_case_4034 + case 4035: + goto st_case_4035 + case 4036: + goto st_case_4036 + case 4037: + goto st_case_4037 + case 4038: + goto st_case_4038 + case 4039: + goto st_case_4039 + case 4040: + goto st_case_4040 + case 4041: + goto st_case_4041 + case 4042: + goto st_case_4042 + case 4043: + goto st_case_4043 + case 4044: + goto st_case_4044 + case 4045: + goto st_case_4045 + case 4046: + goto st_case_4046 + case 4047: + goto st_case_4047 + case 4048: + goto st_case_4048 + case 4049: + goto st_case_4049 + case 4050: + goto st_case_4050 + case 4051: + goto st_case_4051 + case 4052: + goto st_case_4052 + case 4053: + goto st_case_4053 + case 4054: + goto st_case_4054 + case 4055: + goto st_case_4055 + case 4056: + goto st_case_4056 + case 4057: + goto st_case_4057 + case 4058: + goto st_case_4058 + case 4059: + goto st_case_4059 + case 4060: + goto st_case_4060 + case 4061: + goto st_case_4061 + case 4062: + goto st_case_4062 + case 4063: + goto st_case_4063 + case 4064: + goto st_case_4064 + case 4065: + goto st_case_4065 + case 4066: + goto st_case_4066 + case 4067: + goto st_case_4067 + case 4068: + goto st_case_4068 + case 4069: + goto st_case_4069 + case 4070: + goto st_case_4070 + case 4071: + goto st_case_4071 + case 4072: + goto st_case_4072 + case 4073: + goto st_case_4073 + case 4074: + goto st_case_4074 + case 4075: + goto st_case_4075 + case 4076: + goto st_case_4076 + case 4077: + goto st_case_4077 + case 4078: + goto st_case_4078 + case 4079: + goto st_case_4079 + case 4080: + goto st_case_4080 + case 4081: + goto st_case_4081 + case 4082: + goto st_case_4082 + case 4083: + goto st_case_4083 + case 4084: + goto st_case_4084 + case 4085: + goto st_case_4085 + case 4086: + goto st_case_4086 + case 4087: + goto st_case_4087 + case 4088: + goto st_case_4088 + case 4089: + goto st_case_4089 + case 4090: + goto st_case_4090 + case 4091: + goto st_case_4091 + case 4092: + goto st_case_4092 + case 4093: + goto st_case_4093 + case 4094: + goto st_case_4094 + case 4095: + goto st_case_4095 + case 4096: + goto st_case_4096 + case 4097: + goto st_case_4097 + case 4098: + goto st_case_4098 + case 4099: + goto st_case_4099 + case 4100: + goto st_case_4100 + case 4101: + goto st_case_4101 + case 4102: + goto st_case_4102 + case 4103: + goto st_case_4103 + case 4104: + goto st_case_4104 + case 4105: + goto st_case_4105 + case 4106: + goto st_case_4106 + case 4107: + goto st_case_4107 + case 4108: + goto st_case_4108 + case 4109: + goto st_case_4109 + case 4110: + goto st_case_4110 + case 4111: + goto st_case_4111 + case 4112: + goto st_case_4112 + case 4113: + goto st_case_4113 + case 4114: + goto st_case_4114 + case 4115: + goto st_case_4115 + case 4116: + goto st_case_4116 + case 4117: + goto st_case_4117 + case 4118: + goto st_case_4118 + case 4119: + goto st_case_4119 + case 4120: + goto st_case_4120 + case 4121: + goto st_case_4121 + case 4122: + goto st_case_4122 + case 4123: + goto st_case_4123 + case 4124: + goto st_case_4124 + case 4125: + goto st_case_4125 + case 4126: + goto st_case_4126 + case 4127: + goto st_case_4127 + case 4128: + goto st_case_4128 + case 4129: + goto st_case_4129 + case 4130: + goto st_case_4130 + case 4131: + goto st_case_4131 + case 4132: + goto st_case_4132 + case 4133: + goto st_case_4133 + case 4134: + goto st_case_4134 + case 4135: + goto st_case_4135 + case 4136: + goto st_case_4136 + case 4137: + goto st_case_4137 + case 4138: + goto st_case_4138 + case 4139: + goto st_case_4139 + case 4140: + goto st_case_4140 + case 4141: + goto st_case_4141 + case 4142: + goto st_case_4142 + case 4143: + goto st_case_4143 + case 4144: + goto st_case_4144 + case 4145: + goto st_case_4145 + case 4146: + goto st_case_4146 + case 4147: + goto st_case_4147 + case 4148: + goto st_case_4148 + case 4149: + goto st_case_4149 + case 4150: + goto st_case_4150 + case 4151: + goto st_case_4151 + case 4152: + goto st_case_4152 + case 4153: + goto st_case_4153 + case 4154: + goto st_case_4154 + case 4155: + goto st_case_4155 + case 4156: + goto st_case_4156 + case 4157: + goto st_case_4157 + case 4158: + goto st_case_4158 + case 4159: + goto st_case_4159 + case 4160: + goto st_case_4160 + case 4161: + goto st_case_4161 + case 4162: + goto st_case_4162 + case 4163: + goto st_case_4163 + case 4164: + goto st_case_4164 + case 4165: + goto st_case_4165 + case 4166: + goto st_case_4166 + case 4167: + goto st_case_4167 + case 4168: + goto st_case_4168 + case 4169: + goto st_case_4169 + case 4170: + goto st_case_4170 + case 4171: + goto st_case_4171 + case 4172: + goto st_case_4172 + case 4173: + goto st_case_4173 + case 4174: + goto st_case_4174 + case 4175: + goto st_case_4175 + case 5081: + goto st_case_5081 + case 4176: + goto st_case_4176 + case 4177: + goto st_case_4177 + case 4178: + goto st_case_4178 + case 4179: + goto st_case_4179 + case 4180: + goto st_case_4180 + case 4181: + goto st_case_4181 + case 4182: + goto st_case_4182 + case 4183: + goto st_case_4183 + case 4184: + goto st_case_4184 + case 4185: + goto st_case_4185 + case 4186: + goto st_case_4186 + case 4187: + goto st_case_4187 + case 4188: + goto st_case_4188 + case 4189: + goto st_case_4189 + case 4190: + goto st_case_4190 + case 4191: + goto st_case_4191 + case 4192: + goto st_case_4192 + case 4193: + goto st_case_4193 + case 4194: + goto st_case_4194 + case 4195: + goto st_case_4195 + case 4196: + goto st_case_4196 + case 4197: + goto st_case_4197 + case 4198: + goto st_case_4198 + case 4199: + goto st_case_4199 + case 4200: + goto st_case_4200 + case 4201: + goto st_case_4201 + case 4202: + goto st_case_4202 + case 4203: + goto st_case_4203 + case 4204: + goto st_case_4204 + case 4205: + goto st_case_4205 + case 4206: + goto st_case_4206 + case 4207: + goto st_case_4207 + case 4208: + goto st_case_4208 + case 4209: + goto st_case_4209 + case 4210: + goto st_case_4210 + case 4211: + goto st_case_4211 + case 4212: + goto st_case_4212 + case 4213: + goto st_case_4213 + case 4214: + goto st_case_4214 + case 4215: + goto st_case_4215 + case 4216: + goto st_case_4216 + case 4217: + goto st_case_4217 + case 4218: + goto st_case_4218 + case 4219: + goto st_case_4219 + case 4220: + goto st_case_4220 + case 4221: + goto st_case_4221 + case 4222: + goto st_case_4222 + case 4223: + goto st_case_4223 + case 4224: + goto st_case_4224 + case 4225: + goto st_case_4225 + case 4226: + goto st_case_4226 + case 4227: + goto st_case_4227 + case 4228: + goto st_case_4228 + case 4229: + goto st_case_4229 + case 4230: + goto st_case_4230 + case 4231: + goto st_case_4231 + case 4232: + goto st_case_4232 + case 4233: + goto st_case_4233 + case 4234: + goto st_case_4234 + case 4235: + goto st_case_4235 + case 4236: + goto st_case_4236 + case 4237: + goto st_case_4237 + case 4238: + goto st_case_4238 + case 4239: + goto st_case_4239 + case 4240: + goto st_case_4240 + case 4241: + goto st_case_4241 + case 4242: + goto st_case_4242 + case 4243: + goto st_case_4243 + case 4244: + goto st_case_4244 + case 4245: + goto st_case_4245 + case 4246: + goto st_case_4246 + case 4247: + goto st_case_4247 + case 4248: + goto st_case_4248 + case 4249: + goto st_case_4249 + case 4250: + goto st_case_4250 + case 4251: + goto st_case_4251 + case 4252: + goto st_case_4252 + case 4253: + goto st_case_4253 + case 4254: + goto st_case_4254 + case 4255: + goto st_case_4255 + case 4256: + goto st_case_4256 + case 4257: + goto st_case_4257 + case 4258: + goto st_case_4258 + case 4259: + goto st_case_4259 + case 4260: + goto st_case_4260 + case 4261: + goto st_case_4261 + case 4262: + goto st_case_4262 + case 4263: + goto st_case_4263 + case 4264: + goto st_case_4264 + case 4265: + goto st_case_4265 + case 4266: + goto st_case_4266 + case 4267: + goto st_case_4267 + case 4268: + goto st_case_4268 + case 4269: + goto st_case_4269 + case 4270: + goto st_case_4270 + case 4271: + goto st_case_4271 + case 4272: + goto st_case_4272 + case 4273: + goto st_case_4273 + case 4274: + goto st_case_4274 + case 4275: + goto st_case_4275 + case 4276: + goto st_case_4276 + case 4277: + goto st_case_4277 + case 4278: + goto st_case_4278 + case 4279: + goto st_case_4279 + case 4280: + goto st_case_4280 + case 4281: + goto st_case_4281 + case 4282: + goto st_case_4282 + case 4283: + goto st_case_4283 + case 4284: + goto st_case_4284 + case 4285: + goto st_case_4285 + case 4286: + goto st_case_4286 + case 4287: + goto st_case_4287 + case 4288: + goto st_case_4288 + case 4289: + goto st_case_4289 + case 4290: + goto st_case_4290 + case 4291: + goto st_case_4291 + case 4292: + goto st_case_4292 + case 4293: + goto st_case_4293 + case 4294: + goto st_case_4294 + case 4295: + goto st_case_4295 + case 4296: + goto st_case_4296 + case 4297: + goto st_case_4297 + case 4298: + goto st_case_4298 + case 4299: + goto st_case_4299 + case 4300: + goto st_case_4300 + case 4301: + goto st_case_4301 + case 4302: + goto st_case_4302 + case 4303: + goto st_case_4303 + case 4304: + goto st_case_4304 + case 4305: + goto st_case_4305 + case 4306: + goto st_case_4306 + case 4307: + goto st_case_4307 + case 4308: + goto st_case_4308 + case 4309: + goto st_case_4309 + case 4310: + goto st_case_4310 + case 4311: + goto st_case_4311 + case 4312: + goto st_case_4312 + case 4313: + goto st_case_4313 + case 4314: + goto st_case_4314 + case 4315: + goto st_case_4315 + case 4316: + goto st_case_4316 + case 4317: + goto st_case_4317 + case 4318: + goto st_case_4318 + case 5082: + goto st_case_5082 + case 4319: + goto st_case_4319 + case 4320: + goto st_case_4320 + case 4321: + goto st_case_4321 + case 4322: + goto st_case_4322 + case 4323: + goto st_case_4323 + case 4324: + goto st_case_4324 + case 4325: + goto st_case_4325 + case 4326: + goto st_case_4326 + case 4327: + goto st_case_4327 + case 4328: + goto st_case_4328 + case 4329: + goto st_case_4329 + case 4330: + goto st_case_4330 + case 4331: + goto st_case_4331 + case 4332: + goto st_case_4332 + case 4333: + goto st_case_4333 + case 4334: + goto st_case_4334 + case 4335: + goto st_case_4335 + case 4336: + goto st_case_4336 + case 4337: + goto st_case_4337 + case 4338: + goto st_case_4338 + case 4339: + goto st_case_4339 + case 4340: + goto st_case_4340 + case 4341: + goto st_case_4341 + case 4342: + goto st_case_4342 + case 4343: + goto st_case_4343 + case 4344: + goto st_case_4344 + case 4345: + goto st_case_4345 + case 4346: + goto st_case_4346 + case 4347: + goto st_case_4347 + case 4348: + goto st_case_4348 + case 4349: + goto st_case_4349 + case 4350: + goto st_case_4350 + case 4351: + goto st_case_4351 + case 4352: + goto st_case_4352 + case 4353: + goto st_case_4353 + case 4354: + goto st_case_4354 + case 4355: + goto st_case_4355 + case 4356: + goto st_case_4356 + case 4357: + goto st_case_4357 + case 4358: + goto st_case_4358 + case 4359: + goto st_case_4359 + case 4360: + goto st_case_4360 + case 4361: + goto st_case_4361 + case 4362: + goto st_case_4362 + case 4363: + goto st_case_4363 + case 4364: + goto st_case_4364 + case 4365: + goto st_case_4365 + case 4366: + goto st_case_4366 + case 4367: + goto st_case_4367 + case 4368: + goto st_case_4368 + case 4369: + goto st_case_4369 + case 4370: + goto st_case_4370 + case 4371: + goto st_case_4371 + case 4372: + goto st_case_4372 + case 4373: + goto st_case_4373 + case 4374: + goto st_case_4374 + case 4375: + goto st_case_4375 + case 4376: + goto st_case_4376 + case 4377: + goto st_case_4377 + case 4378: + goto st_case_4378 + case 4379: + goto st_case_4379 + case 4380: + goto st_case_4380 + case 4381: + goto st_case_4381 + case 4382: + goto st_case_4382 + case 4383: + goto st_case_4383 + case 4384: + goto st_case_4384 + case 4385: + goto st_case_4385 + case 4386: + goto st_case_4386 + case 4387: + goto st_case_4387 + case 4388: + goto st_case_4388 + case 4389: + goto st_case_4389 + case 4390: + goto st_case_4390 + case 4391: + goto st_case_4391 + case 4392: + goto st_case_4392 + case 4393: + goto st_case_4393 + case 4394: + goto st_case_4394 + case 4395: + goto st_case_4395 + case 4396: + goto st_case_4396 + case 4397: + goto st_case_4397 + case 4398: + goto st_case_4398 + case 4399: + goto st_case_4399 + case 4400: + goto st_case_4400 + case 4401: + goto st_case_4401 + case 4402: + goto st_case_4402 + case 4403: + goto st_case_4403 + case 4404: + goto st_case_4404 + case 4405: + goto st_case_4405 + case 4406: + goto st_case_4406 + case 4407: + goto st_case_4407 + case 4408: + goto st_case_4408 + case 4409: + goto st_case_4409 + case 4410: + goto st_case_4410 + case 4411: + goto st_case_4411 + case 4412: + goto st_case_4412 + case 4413: + goto st_case_4413 + case 4414: + goto st_case_4414 + case 4415: + goto st_case_4415 + case 4416: + goto st_case_4416 + case 4417: + goto st_case_4417 + case 4418: + goto st_case_4418 + case 4419: + goto st_case_4419 + case 4420: + goto st_case_4420 + case 4421: + goto st_case_4421 + case 4422: + goto st_case_4422 + case 4423: + goto st_case_4423 + case 4424: + goto st_case_4424 + case 4425: + goto st_case_4425 + case 4426: + goto st_case_4426 + case 4427: + goto st_case_4427 + case 4428: + goto st_case_4428 + case 4429: + goto st_case_4429 + case 4430: + goto st_case_4430 + case 4431: + goto st_case_4431 + case 4432: + goto st_case_4432 + case 4433: + goto st_case_4433 + case 4434: + goto st_case_4434 + case 4435: + goto st_case_4435 + case 4436: + goto st_case_4436 + case 4437: + goto st_case_4437 + case 4438: + goto st_case_4438 + case 4439: + goto st_case_4439 + case 4440: + goto st_case_4440 + case 4441: + goto st_case_4441 + case 4442: + goto st_case_4442 + case 4443: + goto st_case_4443 + case 4444: + goto st_case_4444 + case 4445: + goto st_case_4445 + case 4446: + goto st_case_4446 + case 4447: + goto st_case_4447 + case 4448: + goto st_case_4448 + case 4449: + goto st_case_4449 + case 4450: + goto st_case_4450 + case 4451: + goto st_case_4451 + case 4452: + goto st_case_4452 + case 4453: + goto st_case_4453 + case 4454: + goto st_case_4454 + case 4455: + goto st_case_4455 + case 4456: + goto st_case_4456 + case 4457: + goto st_case_4457 + case 4458: + goto st_case_4458 + case 4459: + goto st_case_4459 + case 4460: + goto st_case_4460 + case 4461: + goto st_case_4461 + case 4462: + goto st_case_4462 + case 4463: + goto st_case_4463 + case 4464: + goto st_case_4464 + case 4465: + goto st_case_4465 + case 4466: + goto st_case_4466 + case 4467: + goto st_case_4467 + case 4468: + goto st_case_4468 + case 4469: + goto st_case_4469 + case 4470: + goto st_case_4470 + case 4471: + goto st_case_4471 + case 4472: + goto st_case_4472 + case 5083: + goto st_case_5083 + case 5084: + goto st_case_5084 + case 5085: + goto st_case_5085 + case 5086: + goto st_case_5086 + case 5087: + goto st_case_5087 + case 5088: + goto st_case_5088 + case 5089: + goto st_case_5089 + case 5090: + goto st_case_5090 + case 5091: + goto st_case_5091 + case 5092: + goto st_case_5092 + case 5093: + goto st_case_5093 + case 5094: + goto st_case_5094 + case 5095: + goto st_case_5095 + case 5096: + goto st_case_5096 + case 5097: + goto st_case_5097 + case 5098: + goto st_case_5098 + case 5099: + goto st_case_5099 + case 5100: + goto st_case_5100 + case 5101: + goto st_case_5101 + case 5102: + goto st_case_5102 + case 5103: + goto st_case_5103 + case 5104: + goto st_case_5104 + case 5105: + goto st_case_5105 + case 5106: + goto st_case_5106 + case 5107: + goto st_case_5107 + case 5108: + goto st_case_5108 + case 5109: + goto st_case_5109 + case 5110: + goto st_case_5110 + case 5111: + goto st_case_5111 + case 5112: + goto st_case_5112 + case 5113: + goto st_case_5113 + case 5114: + goto st_case_5114 + case 5115: + goto st_case_5115 + case 5116: + goto st_case_5116 + case 5117: + goto st_case_5117 + case 5118: + goto st_case_5118 + case 5119: + goto st_case_5119 + case 5120: + goto st_case_5120 + case 5121: + goto st_case_5121 + case 5122: + goto st_case_5122 + case 5123: + goto st_case_5123 + case 5124: + goto st_case_5124 + case 5125: + goto st_case_5125 + case 5126: + goto st_case_5126 + case 5127: + goto st_case_5127 + case 5128: + goto st_case_5128 + case 5129: + goto st_case_5129 + case 5130: + goto st_case_5130 + case 5131: + goto st_case_5131 + case 5132: + goto st_case_5132 + case 5133: + goto st_case_5133 + case 5134: + goto st_case_5134 + case 5135: + goto st_case_5135 + case 5136: + goto st_case_5136 + case 5137: + goto st_case_5137 + case 5138: + goto st_case_5138 + case 5139: + goto st_case_5139 + case 5140: + goto st_case_5140 + case 5141: + goto st_case_5141 + case 5142: + goto st_case_5142 + case 5143: + goto st_case_5143 + case 5144: + goto st_case_5144 + case 5145: + goto st_case_5145 + case 5146: + goto st_case_5146 + case 5147: + goto st_case_5147 + case 5148: + goto st_case_5148 + case 5149: + goto st_case_5149 + case 5150: + goto st_case_5150 + case 5151: + goto st_case_5151 + case 5152: + goto st_case_5152 + case 4473: + goto st_case_4473 + case 5153: + goto st_case_5153 + case 5154: + goto st_case_5154 + case 5155: + goto st_case_5155 + case 5156: + goto st_case_5156 + case 5157: + goto st_case_5157 + case 5158: + goto st_case_5158 + case 5159: + goto st_case_5159 + case 5160: + goto st_case_5160 + case 5161: + goto st_case_5161 + case 5162: + goto st_case_5162 + case 5163: + goto st_case_5163 + case 5164: + goto st_case_5164 + case 5165: + goto st_case_5165 + case 5166: + goto st_case_5166 + case 5167: + goto st_case_5167 + case 5168: + goto st_case_5168 + case 5169: + goto st_case_5169 + case 5170: + goto st_case_5170 + case 5171: + goto st_case_5171 + case 5172: + goto st_case_5172 + case 5173: + goto st_case_5173 + case 4474: + goto st_case_4474 + case 5174: + goto st_case_5174 + case 5175: + goto st_case_5175 + case 5176: + goto st_case_5176 + case 5177: + goto st_case_5177 + case 5178: + goto st_case_5178 + case 5179: + goto st_case_5179 + case 4475: + goto st_case_4475 + case 5180: + goto st_case_5180 + case 5181: + goto st_case_5181 + case 4476: + goto st_case_4476 + case 5182: + goto st_case_5182 + case 5183: + goto st_case_5183 + case 5184: + goto st_case_5184 + case 5185: + goto st_case_5185 + case 5186: + goto st_case_5186 + case 5187: + goto st_case_5187 + case 5188: + goto st_case_5188 + case 5189: + goto st_case_5189 + case 5190: + goto st_case_5190 + case 5191: + goto st_case_5191 + case 5192: + goto st_case_5192 + case 5193: + goto st_case_5193 + case 5194: + goto st_case_5194 + case 5195: + goto st_case_5195 + case 5196: + goto st_case_5196 + case 4477: + goto st_case_4477 + case 5197: + goto st_case_5197 + case 5198: + goto st_case_5198 + case 5199: + goto st_case_5199 + case 4478: + goto st_case_4478 + case 5200: + goto st_case_5200 + case 5201: + goto st_case_5201 + case 5202: + goto st_case_5202 + case 5203: + goto st_case_5203 + case 5204: + goto st_case_5204 + case 5205: + goto st_case_5205 + case 4479: + goto st_case_4479 + case 5206: + goto st_case_5206 + case 5207: + goto st_case_5207 + case 4480: + goto st_case_4480 + case 5208: + goto st_case_5208 + case 5209: + goto st_case_5209 + case 5210: + goto st_case_5210 + case 4481: + goto st_case_4481 + case 4482: + goto st_case_4482 + case 4483: + goto st_case_4483 + case 4484: + goto st_case_4484 + case 4485: + goto st_case_4485 + case 4486: + goto st_case_4486 + case 4487: + goto st_case_4487 + case 4488: + goto st_case_4488 + case 4489: + goto st_case_4489 + case 4490: + goto st_case_4490 + case 4491: + goto st_case_4491 + case 4492: + goto st_case_4492 + case 4493: + goto st_case_4493 + case 4494: + goto st_case_4494 + case 4495: + goto st_case_4495 + case 5211: + goto st_case_5211 + case 4496: + goto st_case_4496 + case 4497: + goto st_case_4497 + case 4498: + goto st_case_4498 + case 4499: + goto st_case_4499 + case 4500: + goto st_case_4500 + case 4501: + goto st_case_4501 + case 4502: + goto st_case_4502 + case 4503: + goto st_case_4503 + case 4504: + goto st_case_4504 + case 4505: + goto st_case_4505 + case 4506: + goto st_case_4506 + case 4507: + goto st_case_4507 + case 4508: + goto st_case_4508 + case 4509: + goto st_case_4509 + case 4510: + goto st_case_4510 + case 4511: + goto st_case_4511 + case 4512: + goto st_case_4512 + case 4513: + goto st_case_4513 + case 4514: + goto st_case_4514 + case 4515: + goto st_case_4515 + case 4516: + goto st_case_4516 + case 4517: + goto st_case_4517 + case 4518: + goto st_case_4518 + case 4519: + goto st_case_4519 + case 4520: + goto st_case_4520 + case 4521: + goto st_case_4521 + case 4522: + goto st_case_4522 + case 4523: + goto st_case_4523 + case 4524: + goto st_case_4524 + case 4525: + goto st_case_4525 + case 4526: + goto st_case_4526 + case 4527: + goto st_case_4527 + case 4528: + goto st_case_4528 + case 4529: + goto st_case_4529 + case 4530: + goto st_case_4530 + case 4531: + goto st_case_4531 + case 4532: + goto st_case_4532 + case 4533: + goto st_case_4533 + case 4534: + goto st_case_4534 + case 4535: + goto st_case_4535 + case 4536: + goto st_case_4536 + case 4537: + goto st_case_4537 + case 4538: + goto st_case_4538 + case 4539: + goto st_case_4539 + case 4540: + goto st_case_4540 + case 4541: + goto st_case_4541 + case 4542: + goto st_case_4542 + case 4543: + goto st_case_4543 + case 4544: + goto st_case_4544 + case 4545: + goto st_case_4545 + case 4546: + goto st_case_4546 + case 4547: + goto st_case_4547 + case 4548: + goto st_case_4548 + case 4549: + goto st_case_4549 + case 4550: + goto st_case_4550 + case 4551: + goto st_case_4551 + case 4552: + goto st_case_4552 + case 4553: + goto st_case_4553 + case 4554: + goto st_case_4554 + case 4555: + goto st_case_4555 + case 4556: + goto st_case_4556 + case 4557: + goto st_case_4557 + case 4558: + goto st_case_4558 + case 4559: + goto st_case_4559 + case 4560: + goto st_case_4560 + case 4561: + goto st_case_4561 + case 4562: + goto st_case_4562 + case 4563: + goto st_case_4563 + case 4564: + goto st_case_4564 + case 4565: + goto st_case_4565 + case 4566: + goto st_case_4566 + case 4567: + goto st_case_4567 + case 4568: + goto st_case_4568 + case 4569: + goto st_case_4569 + case 4570: + goto st_case_4570 + case 4571: + goto st_case_4571 + case 4572: + goto st_case_4572 + case 4573: + goto st_case_4573 + case 4574: + goto st_case_4574 + case 4575: + goto st_case_4575 + case 4576: + goto st_case_4576 + case 4577: + goto st_case_4577 + case 4578: + goto st_case_4578 + case 4579: + goto st_case_4579 + case 4580: + goto st_case_4580 + case 4581: + goto st_case_4581 + case 4582: + goto st_case_4582 + case 4583: + goto st_case_4583 + case 4584: + goto st_case_4584 + case 4585: + goto st_case_4585 + case 4586: + goto st_case_4586 + case 4587: + goto st_case_4587 + case 4588: + goto st_case_4588 + case 4589: + goto st_case_4589 + case 4590: + goto st_case_4590 + case 4591: + goto st_case_4591 + case 4592: + goto st_case_4592 + case 4593: + goto st_case_4593 + case 4594: + goto st_case_4594 + case 4595: + goto st_case_4595 + case 4596: + goto st_case_4596 + case 4597: + goto st_case_4597 + case 4598: + goto st_case_4598 + case 4599: + goto st_case_4599 + case 4600: + goto st_case_4600 + case 4601: + goto st_case_4601 + case 4602: + goto st_case_4602 + case 4603: + goto st_case_4603 + case 4604: + goto st_case_4604 + case 4605: + goto st_case_4605 + case 4606: + goto st_case_4606 + case 4607: + goto st_case_4607 + case 4608: + goto st_case_4608 + case 4609: + goto st_case_4609 + case 4610: + goto st_case_4610 + case 4611: + goto st_case_4611 + case 4612: + goto st_case_4612 + case 4613: + goto st_case_4613 + case 4614: + goto st_case_4614 + case 4615: + goto st_case_4615 + case 4616: + goto st_case_4616 + case 4617: + goto st_case_4617 + case 4618: + goto st_case_4618 + case 4619: + goto st_case_4619 + case 4620: + goto st_case_4620 + case 4621: + goto st_case_4621 + case 4622: + goto st_case_4622 + case 4623: + goto st_case_4623 + case 4624: + goto st_case_4624 + case 4625: + goto st_case_4625 + case 4626: + goto st_case_4626 + case 4627: + goto st_case_4627 + case 4628: + goto st_case_4628 + case 4629: + goto st_case_4629 + case 4630: + goto st_case_4630 + case 4631: + goto st_case_4631 + case 4632: + goto st_case_4632 + case 4633: + goto st_case_4633 + case 4634: + goto st_case_4634 + case 4635: + goto st_case_4635 + case 4636: + goto st_case_4636 + case 4637: + goto st_case_4637 + case 4638: + goto st_case_4638 + case 4639: + goto st_case_4639 + case 4640: + goto st_case_4640 + case 4641: + goto st_case_4641 + case 4642: + goto st_case_4642 + case 4643: + goto st_case_4643 + case 4644: + goto st_case_4644 + case 4645: + goto st_case_4645 + case 4646: + goto st_case_4646 + case 4647: + goto st_case_4647 + case 4648: + goto st_case_4648 + case 4649: + goto st_case_4649 + case 4650: + goto st_case_4650 + case 4651: + goto st_case_4651 + case 4652: + goto st_case_4652 + case 4653: + goto st_case_4653 + case 4654: + goto st_case_4654 + case 4655: + goto st_case_4655 + case 5212: + goto st_case_5212 + case 5213: + goto st_case_5213 + case 5214: + goto st_case_5214 + case 5215: + goto st_case_5215 + case 5216: + goto st_case_5216 + case 5217: + goto st_case_5217 + case 5218: + goto st_case_5218 + case 5219: + goto st_case_5219 + case 5220: + goto st_case_5220 + case 5221: + goto st_case_5221 + case 5222: + goto st_case_5222 + case 5223: + goto st_case_5223 + case 5224: + goto st_case_5224 + case 5225: + goto st_case_5225 + case 5226: + goto st_case_5226 + case 5227: + goto st_case_5227 + case 5228: + goto st_case_5228 + case 5229: + goto st_case_5229 + case 5230: + goto st_case_5230 + case 5231: + goto st_case_5231 + case 5232: + goto st_case_5232 + case 5233: + goto st_case_5233 + case 5234: + goto st_case_5234 + case 5235: + goto st_case_5235 + case 5236: + goto st_case_5236 + case 5237: + goto st_case_5237 + case 5238: + goto st_case_5238 + case 5239: + goto st_case_5239 + case 5240: + goto st_case_5240 + case 5241: + goto st_case_5241 + case 5242: + goto st_case_5242 + case 4656: + goto st_case_4656 + case 5243: + goto st_case_5243 + case 5244: + goto st_case_5244 + case 5245: + goto st_case_5245 + case 5246: + goto st_case_5246 + case 5247: + goto st_case_5247 + case 5248: + goto st_case_5248 + case 5249: + goto st_case_5249 + case 5250: + goto st_case_5250 + case 4657: + goto st_case_4657 + case 5251: + goto st_case_5251 + case 5252: + goto st_case_5252 + case 5253: + goto st_case_5253 + case 5254: + goto st_case_5254 + case 5255: + goto st_case_5255 + case 5256: + goto st_case_5256 + case 4658: + goto st_case_4658 + case 5257: + goto st_case_5257 + case 5258: + goto st_case_5258 + case 4659: + goto st_case_4659 + case 5259: + goto st_case_5259 + case 5260: + goto st_case_5260 + case 5261: + goto st_case_5261 + case 5262: + goto st_case_5262 + case 5263: + goto st_case_5263 + case 5264: + goto st_case_5264 + case 5265: + goto st_case_5265 + case 5266: + goto st_case_5266 + case 5267: + goto st_case_5267 + case 5268: + goto st_case_5268 + case 5269: + goto st_case_5269 + case 5270: + goto st_case_5270 + case 5271: + goto st_case_5271 + case 5272: + goto st_case_5272 + case 5273: + goto st_case_5273 + case 5274: + goto st_case_5274 + case 5275: + goto st_case_5275 + case 5276: + goto st_case_5276 + case 5277: + goto st_case_5277 + case 4660: + goto st_case_4660 + case 5278: + goto st_case_5278 + case 5279: + goto st_case_5279 + case 5280: + goto st_case_5280 + case 4661: + goto st_case_4661 + case 5281: + goto st_case_5281 + case 5282: + goto st_case_5282 + case 5283: + goto st_case_5283 + case 5284: + goto st_case_5284 + case 5285: + goto st_case_5285 + case 5286: + goto st_case_5286 + case 4662: + goto st_case_4662 + case 5287: + goto st_case_5287 + case 5288: + goto st_case_5288 + case 5289: + goto st_case_5289 + case 5290: + goto st_case_5290 + case 5291: + goto st_case_5291 + case 5292: + goto st_case_5292 + case 5293: + goto st_case_5293 + case 5294: + goto st_case_5294 + case 5295: + goto st_case_5295 + case 5296: + goto st_case_5296 + case 5297: + goto st_case_5297 + case 5298: + goto st_case_5298 + case 5299: + goto st_case_5299 + case 5300: + goto st_case_5300 + case 5301: + goto st_case_5301 + case 5302: + goto st_case_5302 + case 5303: + goto st_case_5303 + case 5304: + goto st_case_5304 + case 5305: + goto st_case_5305 + case 5306: + goto st_case_5306 + case 5307: + goto st_case_5307 + case 5308: + goto st_case_5308 + case 5309: + goto st_case_5309 + case 5310: + goto st_case_5310 + case 5311: + goto st_case_5311 + case 5312: + goto st_case_5312 + case 5313: + goto st_case_5313 + case 5314: + goto st_case_5314 + case 5315: + goto st_case_5315 + case 5316: + goto st_case_5316 + case 5317: + goto st_case_5317 + case 5318: + goto st_case_5318 + case 5319: + goto st_case_5319 + case 5320: + goto st_case_5320 + case 5321: + goto st_case_5321 + case 5322: + goto st_case_5322 + case 5323: + goto st_case_5323 + case 5324: + goto st_case_5324 + case 5325: + goto st_case_5325 + case 5326: + goto st_case_5326 + case 5327: + goto st_case_5327 + case 5328: + goto st_case_5328 + case 5329: + goto st_case_5329 + case 5330: + goto st_case_5330 + case 5331: + goto st_case_5331 + case 5332: + goto st_case_5332 + case 5333: + goto st_case_5333 + case 5334: + goto st_case_5334 + case 5335: + goto st_case_5335 + case 5336: + goto st_case_5336 + case 5337: + goto st_case_5337 + case 5338: + goto st_case_5338 + case 4663: + goto st_case_4663 + case 4664: + goto st_case_4664 + case 4665: + goto st_case_4665 + case 4666: + goto st_case_4666 + case 4667: + goto st_case_4667 + case 4668: + goto st_case_4668 + case 4669: + goto st_case_4669 + case 4670: + goto st_case_4670 + case 5339: + goto st_case_5339 + case 4671: + goto st_case_4671 + case 4672: + goto st_case_4672 + case 4673: + goto st_case_4673 + case 4674: + goto st_case_4674 + case 4675: + goto st_case_4675 + case 4676: + goto st_case_4676 + case 4677: + goto st_case_4677 + case 4678: + goto st_case_4678 + case 4679: + goto st_case_4679 + case 4680: + goto st_case_4680 + case 4681: + goto st_case_4681 + case 4682: + goto st_case_4682 + case 4683: + goto st_case_4683 + case 4684: + goto st_case_4684 + case 4685: + goto st_case_4685 + case 4686: + goto st_case_4686 + case 4687: + goto st_case_4687 + case 4688: + goto st_case_4688 + case 4689: + goto st_case_4689 + case 4690: + goto st_case_4690 + case 4691: + goto st_case_4691 + case 4692: + goto st_case_4692 + case 4693: + goto st_case_4693 + case 4694: + goto st_case_4694 + case 4695: + goto st_case_4695 + case 4696: + goto st_case_4696 + case 4697: + goto st_case_4697 + case 4698: + goto st_case_4698 + case 4699: + goto st_case_4699 + case 4700: + goto st_case_4700 + case 4701: + goto st_case_4701 + case 4702: + goto st_case_4702 + case 4703: + goto st_case_4703 + case 4704: + goto st_case_4704 + case 4705: + goto st_case_4705 + case 4706: + goto st_case_4706 + case 4707: + goto st_case_4707 + case 5340: + goto st_case_5340 + case 4708: + goto st_case_4708 + case 4709: + goto st_case_4709 + case 4710: + goto st_case_4710 + case 4711: + goto st_case_4711 + case 4712: + goto st_case_4712 + case 4713: + goto st_case_4713 + case 4714: + goto st_case_4714 + case 4715: + goto st_case_4715 + case 4716: + goto st_case_4716 + case 4717: + goto st_case_4717 + case 4718: + goto st_case_4718 + case 4719: + goto st_case_4719 + case 4720: + goto st_case_4720 + case 4721: + goto st_case_4721 + case 4722: + goto st_case_4722 + case 4723: + goto st_case_4723 + case 4724: + goto st_case_4724 + case 4725: + goto st_case_4725 + case 4726: + goto st_case_4726 + case 4727: + goto st_case_4727 + case 4728: + goto st_case_4728 + case 4729: + goto st_case_4729 + case 4730: + goto st_case_4730 + case 4731: + goto st_case_4731 + case 4732: + goto st_case_4732 + case 4733: + goto st_case_4733 + case 4734: + goto st_case_4734 + case 4735: + goto st_case_4735 + case 4736: + goto st_case_4736 + case 4737: + goto st_case_4737 + case 4738: + goto st_case_4738 + case 4739: + goto st_case_4739 + case 4740: + goto st_case_4740 + case 4741: + goto st_case_4741 + case 4742: + goto st_case_4742 + case 4743: + goto st_case_4743 + case 4744: + goto st_case_4744 + case 4745: + goto st_case_4745 + case 4746: + goto st_case_4746 + case 4747: + goto st_case_4747 + case 4748: + goto st_case_4748 + case 4749: + goto st_case_4749 + case 4750: + goto st_case_4750 + case 4751: + goto st_case_4751 + case 4752: + goto st_case_4752 + case 4753: + goto st_case_4753 + case 4754: + goto st_case_4754 + case 4755: + goto st_case_4755 + case 4756: + goto st_case_4756 + case 4757: + goto st_case_4757 + case 4758: + goto st_case_4758 + case 4759: + goto st_case_4759 + case 4760: + goto st_case_4760 + case 4761: + goto st_case_4761 + case 4762: + goto st_case_4762 + case 4763: + goto st_case_4763 + case 4764: + goto st_case_4764 + case 4765: + goto st_case_4765 + case 4766: + goto st_case_4766 + case 4767: + goto st_case_4767 + case 4768: + goto st_case_4768 + case 4769: + goto st_case_4769 + case 4770: + goto st_case_4770 + case 4771: + goto st_case_4771 + case 4772: + goto st_case_4772 + case 4773: + goto st_case_4773 + case 4774: + goto st_case_4774 + case 4775: + goto st_case_4775 + case 4776: + goto st_case_4776 + case 4777: + goto st_case_4777 + case 4778: + goto st_case_4778 + case 4779: + goto st_case_4779 + case 4780: + goto st_case_4780 + case 4781: + goto st_case_4781 + case 4782: + goto st_case_4782 + case 4783: + goto st_case_4783 + case 4784: + goto st_case_4784 + case 4785: + goto st_case_4785 + case 4786: + goto st_case_4786 + case 4787: + goto st_case_4787 + case 4788: + goto st_case_4788 + case 4789: + goto st_case_4789 + case 4790: + goto st_case_4790 + case 4791: + goto st_case_4791 + case 4792: + goto st_case_4792 + case 4793: + goto st_case_4793 + case 4794: + goto st_case_4794 + case 4795: + goto st_case_4795 + case 4796: + goto st_case_4796 + case 4797: + goto st_case_4797 + case 4798: + goto st_case_4798 + case 4799: + goto st_case_4799 + case 4800: + goto st_case_4800 + case 4801: + goto st_case_4801 + case 4802: + goto st_case_4802 + case 4803: + goto st_case_4803 + case 4804: + goto st_case_4804 + case 4805: + goto st_case_4805 + case 4806: + goto st_case_4806 + case 4807: + goto st_case_4807 + case 4808: + goto st_case_4808 + case 4809: + goto st_case_4809 + case 4810: + goto st_case_4810 + case 4811: + goto st_case_4811 + case 4812: + goto st_case_4812 + case 4813: + goto st_case_4813 + case 4814: + goto st_case_4814 + case 4815: + goto st_case_4815 + case 4816: + goto st_case_4816 + case 4817: + goto st_case_4817 + case 4818: + goto st_case_4818 + case 4819: + goto st_case_4819 + case 4820: + goto st_case_4820 + case 4821: + goto st_case_4821 + case 4822: + goto st_case_4822 + case 4823: + goto st_case_4823 + case 4824: + goto st_case_4824 + case 4825: + goto st_case_4825 + case 4826: + goto st_case_4826 + case 4827: + goto st_case_4827 + case 4828: + goto st_case_4828 + case 4829: + goto st_case_4829 + case 4830: + goto st_case_4830 + case 4831: + goto st_case_4831 + case 4832: + goto st_case_4832 + case 4833: + goto st_case_4833 + case 4834: + goto st_case_4834 + case 4835: + goto st_case_4835 + case 4836: + goto st_case_4836 + case 4837: + goto st_case_4837 + case 4838: + goto st_case_4838 + case 4839: + goto st_case_4839 + case 4840: + goto st_case_4840 + case 4841: + goto st_case_4841 + case 4842: + goto st_case_4842 + case 4843: + goto st_case_4843 + case 4844: + goto st_case_4844 + case 4845: + goto st_case_4845 + case 4846: + goto st_case_4846 + case 4847: + goto st_case_4847 + case 4848: + goto st_case_4848 + case 4849: + goto st_case_4849 + case 4850: + goto st_case_4850 + case 4851: + goto st_case_4851 + case 4852: + goto st_case_4852 + case 4853: + goto st_case_4853 + case 4854: + goto st_case_4854 + case 4855: + goto st_case_4855 + case 4856: + goto st_case_4856 + case 4857: + goto st_case_4857 + case 4858: + goto st_case_4858 + case 4859: + goto st_case_4859 + case 4860: + goto st_case_4860 + case 4861: + goto st_case_4861 + } + goto st_out +tr0: +//line segment_words.rl:161 +p = (te) - 1 +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr2: +//line NONE:1 + switch act { + case 1: + {p = (te) - 1 + + if !atEOF { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Number) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + case 2: + {p = (te) - 1 + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Letter) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + case 3: + {p = (te) - 1 + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Ideo) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + case 4: + {p = (te) - 1 + + if !atEOF { + return val, types, totalConsumed, nil + } + val = append(val, data[startPos:endPos+1]) + types = append(types, Letter) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + case 5: + {p = (te) - 1 + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Ideo) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + case 7: + {p = (te) - 1 + + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + case 12: + {p = (te) - 1 + + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + case 13: + {p = (te) - 1 + + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + } + + goto st4862 +tr125: +//line segment_words.rl:76 +p = (te) - 1 +{ + if !atEOF { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Number) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr420: +//line segment_words.rl:119 +p = (te) - 1 +{ + if !atEOF { + return val, types, totalConsumed, nil + } + val = append(val, data[startPos:endPos+1]) + types = append(types, Letter) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr2394: +//line segment_words.rl:161 +p = (te) - 1 +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr2985: +//line segment_words.rl:89 +p = (te) - 1 +{ + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Letter) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr3249: +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +te = p+1 +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr3251: +//line segment_words.rl:131 +p = (te) - 1 +{ + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Ideo) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr3627: +//line segment_words.rl:104 +p = (te) - 1 +{ + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Ideo) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr3758: +//line segment_words.rl:146 +p = (te) - 1 +{ + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Ideo) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr4328: +//line segment_words.rl:161 +p = (te) - 1 +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr4458: +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +te = p+1 +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr4459: +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +te = p+1 +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr4499: +//line segment_words.rl:161 +te = p +p-- +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr4519: +//line segment_words.rl:161 +te = p +p-- +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr4520: +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +te = p+1 +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr4521: +//line segment_words.rl:76 +te = p +p-- +{ + if !atEOF { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Number) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr4562: +//line segment_words.rl:119 +te = p +p-- +{ + if !atEOF { + return val, types, totalConsumed, nil + } + val = append(val, data[startPos:endPos+1]) + types = append(types, Letter) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr4763: +//line segment_words.rl:161 +te = p +p-- +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr5002: +//line segment_words.rl:89 +te = p +p-- +{ + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Letter) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr5054: +//line segment_words.rl:131 +te = p +p-- +{ + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Ideo) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr5137: +//line segment_words.rl:104 +te = p +p-- +{ + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Ideo) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr5157: +//line segment_words.rl:146 +te = p +p-- +{ + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + + val = append(val, data[startPos:endPos+1]) + types = append(types, Ideo) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 +tr5359: +//line segment_words.rl:161 +te = p +p-- +{ + lastPos := startPos + for lastPos <= endPos { + _, size := utf8.DecodeRune(data[lastPos:]) + lastPos += size + } + endPos = lastPos -1 + p = endPos + + if endPos+1 == pe && !atEOF { + return val, types, totalConsumed, nil + } else if dr, size := utf8.DecodeRune(data[endPos+1:]); dr == utf8.RuneError && size == 1 { + return val, types, totalConsumed, nil + } + // otherwise, consume this as well + val = append(val, data[startPos:endPos+1]) + types = append(types, None) + totalConsumed = endPos+1 + if maxTokens > 0 && len(val) >= maxTokens { + return val, types, totalConsumed, nil + } + } + goto st4862 + st4862: +//line NONE:1 +ts = 0 + + if p++; p == pe { + goto _test_eof4862 + } + st_case_4862: +//line NONE:1 +ts = p + +//line segment_words_prod.go:11462 + switch data[p] { + case 10: + goto tr4458 + case 13: + goto tr4460 + case 95: + goto tr4463 + case 194: + goto tr4464 + case 195: + goto tr4465 + case 198: + goto tr4467 + case 199: + goto tr4468 + case 203: + goto tr4469 + case 204: + goto tr4470 + case 205: + goto tr4471 + case 206: + goto tr4472 + case 207: + goto tr4473 + case 210: + goto tr4474 + case 212: + goto tr4475 + case 213: + goto tr4476 + case 214: + goto tr4477 + case 215: + goto tr4478 + case 216: + goto tr4479 + case 217: + goto tr4480 + case 219: + goto tr4481 + case 220: + goto tr4482 + case 221: + goto tr4483 + case 222: + goto tr4484 + case 223: + goto tr4485 + case 224: + goto tr4486 + case 225: + goto tr4487 + case 226: + goto tr4488 + case 227: + goto tr4489 + case 228: + goto tr4490 + case 233: + goto tr4492 + case 234: + goto tr4493 + case 237: + goto tr4495 + case 239: + goto tr4496 + case 240: + goto tr4497 + case 243: + goto tr4498 + } + switch { + case data[p] < 97: + switch { + case data[p] < 48: + if 11 <= data[p] && data[p] <= 12 { + goto tr4459 + } + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr4462 + } + default: + goto tr4461 + } + case data[p] > 122: + switch { + case data[p] < 229: + if 196 <= data[p] && data[p] <= 218 { + goto tr4466 + } + case data[p] > 232: + if 235 <= data[p] && data[p] <= 236 { + goto tr4494 + } + default: + goto tr4491 + } + default: + goto tr4462 + } + goto tr4457 +tr1: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4863 +tr4457: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4863 + st4863: + if p++; p == pe { + goto _test_eof4863 + } + st_case_4863: +//line segment_words_prod.go:11597 + switch data[p] { + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 + st0: + if p++; p == pe { + goto _test_eof0 + } + st_case_0: + if data[p] == 173 { + goto tr1 + } + goto tr0 + st1: + if p++; p == pe { + goto _test_eof1 + } + st_case_1: + if data[p] <= 127 { + goto tr2 + } + goto tr1 + st2: + if p++; p == pe { + goto _test_eof2 + } + st_case_2: + if 176 <= data[p] { + goto tr2 + } + goto tr1 + st3: + if p++; p == pe { + goto _test_eof3 + } + st_case_3: + if 131 <= data[p] && data[p] <= 137 { + goto tr1 + } + goto tr0 + st4: + if p++; p == pe { + goto _test_eof4 + } + st_case_4: + if data[p] == 191 { + goto tr1 + } + if 145 <= data[p] && data[p] <= 189 { + goto tr1 + } + goto tr0 + st5: + if p++; p == pe { + goto _test_eof5 + } + st_case_5: + if data[p] == 135 { + goto tr1 + } + switch { + case data[p] > 130: + if 132 <= data[p] && data[p] <= 133 { + goto tr1 + } + case data[p] >= 129: + goto tr1 + } + goto tr0 + st6: + if p++; p == pe { + goto _test_eof6 + } + st_case_6: + if data[p] == 156 { + goto tr1 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr0 + st7: + if p++; p == pe { + goto _test_eof7 + } + st_case_7: + if data[p] == 176 { + goto tr1 + } + if 139 <= data[p] && data[p] <= 159 { + goto tr1 + } + goto tr0 + st8: + if p++; p == pe { + goto _test_eof8 + } + st_case_8: + switch { + case data[p] < 159: + if 150 <= data[p] && data[p] <= 157 { + goto tr1 + } + case data[p] > 164: + switch { + case data[p] > 168: + if 170 <= data[p] && data[p] <= 173 { + goto tr1 + } + case data[p] >= 167: + goto tr1 + } + default: + goto tr1 + } + goto tr0 + st9: + if p++; p == pe { + goto _test_eof9 + } + st_case_9: + switch data[p] { + case 143: + goto tr1 + case 145: + goto tr1 + } + if 176 <= data[p] { + goto tr1 + } + goto tr0 + st10: + if p++; p == pe { + goto _test_eof10 + } + st_case_10: + if 139 <= data[p] { + goto tr0 + } + goto tr1 + st11: + if p++; p == pe { + goto _test_eof11 + } + st_case_11: + if 166 <= data[p] && data[p] <= 176 { + goto tr1 + } + goto tr0 + st12: + if p++; p == pe { + goto _test_eof12 + } + st_case_12: + if 171 <= data[p] && data[p] <= 179 { + goto tr1 + } + goto tr0 + st13: + if p++; p == pe { + goto _test_eof13 + } + st_case_13: + switch data[p] { + case 160: + goto st14 + case 161: + goto st15 + case 163: + goto st16 + case 164: + goto st17 + case 165: + goto st18 + case 167: + goto st20 + case 169: + goto st21 + case 171: + goto st22 + case 173: + goto st24 + case 174: + goto st25 + case 175: + goto st26 + case 176: + goto st27 + case 177: + goto st28 + case 179: + goto st29 + case 180: + goto st30 + case 181: + goto st31 + case 182: + goto st32 + case 183: + goto st33 + case 184: + goto st34 + case 185: + goto st35 + case 186: + goto st36 + case 187: + goto st37 + case 188: + goto st38 + case 189: + goto st39 + case 190: + goto st40 + case 191: + goto st41 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto st23 + } + case data[p] >= 166: + goto st19 + } + goto tr0 + st14: + if p++; p == pe { + goto _test_eof14 + } + st_case_14: + switch { + case data[p] < 155: + if 150 <= data[p] && data[p] <= 153 { + goto tr1 + } + case data[p] > 163: + switch { + case data[p] > 167: + if 169 <= data[p] && data[p] <= 173 { + goto tr1 + } + case data[p] >= 165: + goto tr1 + } + default: + goto tr1 + } + goto tr2 + st15: + if p++; p == pe { + goto _test_eof15 + } + st_case_15: + if 153 <= data[p] && data[p] <= 155 { + goto tr1 + } + goto tr2 + st16: + if p++; p == pe { + goto _test_eof16 + } + st_case_16: + if 163 <= data[p] { + goto tr1 + } + goto tr2 + st17: + if p++; p == pe { + goto _test_eof17 + } + st_case_17: + if data[p] == 189 { + goto tr2 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr2 + } + goto tr1 + st18: + if p++; p == pe { + goto _test_eof18 + } + st_case_18: + if data[p] == 144 { + goto tr2 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr2 + } + case data[p] >= 152: + goto tr2 + } + goto tr1 + st19: + if p++; p == pe { + goto _test_eof19 + } + st_case_19: + if data[p] == 188 { + goto tr1 + } + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr1 + } + case data[p] >= 129: + goto tr1 + } + goto tr2 + st20: + if p++; p == pe { + goto _test_eof20 + } + st_case_20: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr2 + } + case data[p] >= 133: + goto tr2 + } + case data[p] > 150: + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr2 + } + case data[p] >= 152: + goto tr2 + } + default: + goto tr2 + } + goto tr1 + st21: + if p++; p == pe { + goto _test_eof21 + } + st_case_21: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr2 + } + case data[p] >= 131: + goto tr2 + } + case data[p] > 144: + switch { + case data[p] < 178: + if 146 <= data[p] && data[p] <= 175 { + goto tr2 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + default: + goto tr2 + } + goto tr1 + st22: + if p++; p == pe { + goto _test_eof22 + } + st_case_22: + switch data[p] { + case 134: + goto tr2 + case 138: + goto tr2 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr2 + } + case data[p] >= 142: + goto tr2 + } + goto tr1 + st23: + if p++; p == pe { + goto _test_eof23 + } + st_case_23: + if data[p] == 188 { + goto tr1 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr1 + } + case data[p] >= 129: + goto tr1 + } + goto tr2 + st24: + if p++; p == pe { + goto _test_eof24 + } + st_case_24: + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + case data[p] > 141: + switch { + case data[p] > 151: + if 162 <= data[p] && data[p] <= 163 { + goto tr1 + } + case data[p] >= 150: + goto tr1 + } + default: + goto tr1 + } + goto tr2 + st25: + if p++; p == pe { + goto _test_eof25 + } + st_case_25: + if data[p] == 130 { + goto tr1 + } + if 190 <= data[p] && data[p] <= 191 { + goto tr1 + } + goto tr2 + st26: + if p++; p == pe { + goto _test_eof26 + } + st_case_26: + if data[p] == 151 { + goto tr1 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr1 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr1 + } + default: + goto tr1 + } + goto tr2 + st27: + if p++; p == pe { + goto _test_eof27 + } + st_case_27: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr2 + st28: + if p++; p == pe { + goto _test_eof28 + } + st_case_28: + switch data[p] { + case 133: + goto tr2 + case 137: + goto tr2 + } + switch { + case data[p] < 151: + if 142 <= data[p] && data[p] <= 148 { + goto tr2 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr1 + st29: + if p++; p == pe { + goto _test_eof29 + } + st_case_29: + switch { + case data[p] < 138: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 136 { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + case data[p] > 141: + switch { + case data[p] > 150: + if 162 <= data[p] && data[p] <= 163 { + goto tr1 + } + case data[p] >= 149: + goto tr1 + } + default: + goto tr1 + } + goto tr2 + st30: + if p++; p == pe { + goto _test_eof30 + } + st_case_30: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr1 + } + case data[p] >= 129: + goto tr1 + } + goto tr2 + st31: + if p++; p == pe { + goto _test_eof31 + } + st_case_31: + switch data[p] { + case 133: + goto tr2 + case 137: + goto tr2 + } + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 150 { + goto tr2 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr1 + st32: + if p++; p == pe { + goto _test_eof32 + } + st_case_32: + if 130 <= data[p] && data[p] <= 131 { + goto tr1 + } + goto tr2 + st33: + if p++; p == pe { + goto _test_eof33 + } + st_case_33: + switch data[p] { + case 138: + goto tr1 + case 150: + goto tr1 + } + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 148 { + goto tr1 + } + case data[p] > 159: + if 178 <= data[p] && data[p] <= 179 { + goto tr1 + } + default: + goto tr1 + } + goto tr2 + st34: + if p++; p == pe { + goto _test_eof34 + } + st_case_34: + if data[p] == 177 { + goto tr1 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr1 + } + goto tr2 + st35: + if p++; p == pe { + goto _test_eof35 + } + st_case_35: + if 135 <= data[p] && data[p] <= 142 { + goto tr1 + } + goto tr2 + st36: + if p++; p == pe { + goto _test_eof36 + } + st_case_36: + if data[p] == 177 { + goto tr1 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr1 + } + case data[p] >= 180: + goto tr1 + } + goto tr2 + st37: + if p++; p == pe { + goto _test_eof37 + } + st_case_37: + if 136 <= data[p] && data[p] <= 141 { + goto tr1 + } + goto tr2 + st38: + if p++; p == pe { + goto _test_eof38 + } + st_case_38: + switch data[p] { + case 181: + goto tr1 + case 183: + goto tr1 + case 185: + goto tr1 + } + switch { + case data[p] > 153: + if 190 <= data[p] && data[p] <= 191 { + goto tr1 + } + case data[p] >= 152: + goto tr1 + } + goto tr2 + st39: + if p++; p == pe { + goto _test_eof39 + } + st_case_39: + if 177 <= data[p] && data[p] <= 191 { + goto tr1 + } + goto tr2 + st40: + if p++; p == pe { + goto _test_eof40 + } + st_case_40: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr1 + } + case data[p] > 135: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr1 + } + case data[p] >= 141: + goto tr1 + } + default: + goto tr1 + } + goto tr2 + st41: + if p++; p == pe { + goto _test_eof41 + } + st_case_41: + if data[p] == 134 { + goto tr1 + } + goto tr2 + st42: + if p++; p == pe { + goto _test_eof42 + } + st_case_42: + switch data[p] { + case 128: + goto st43 + case 129: + goto st44 + case 130: + goto st45 + case 141: + goto st46 + case 156: + goto st47 + case 157: + goto st48 + case 158: + goto st49 + case 159: + goto st50 + case 160: + goto st51 + case 162: + goto st52 + case 164: + goto st53 + case 168: + goto st54 + case 169: + goto st55 + case 170: + goto st56 + case 172: + goto st57 + case 173: + goto st58 + case 174: + goto st59 + case 175: + goto st60 + case 176: + goto st61 + case 179: + goto st62 + case 183: + goto st63 + } + goto tr0 + st43: + if p++; p == pe { + goto _test_eof43 + } + st_case_43: + if 171 <= data[p] && data[p] <= 190 { + goto tr1 + } + goto tr2 + st44: + if p++; p == pe { + goto _test_eof44 + } + st_case_44: + switch { + case data[p] < 162: + switch { + case data[p] > 153: + if 158 <= data[p] && data[p] <= 160 { + goto tr1 + } + case data[p] >= 150: + goto tr1 + } + case data[p] > 164: + switch { + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr1 + } + case data[p] >= 167: + goto tr1 + } + default: + goto tr1 + } + goto tr2 + st45: + if p++; p == pe { + goto _test_eof45 + } + st_case_45: + if data[p] == 143 { + goto tr1 + } + switch { + case data[p] > 141: + if 154 <= data[p] && data[p] <= 157 { + goto tr1 + } + case data[p] >= 130: + goto tr1 + } + goto tr2 + st46: + if p++; p == pe { + goto _test_eof46 + } + st_case_46: + if 157 <= data[p] && data[p] <= 159 { + goto tr1 + } + goto tr2 + st47: + if p++; p == pe { + goto _test_eof47 + } + st_case_47: + switch { + case data[p] > 148: + if 178 <= data[p] && data[p] <= 180 { + goto tr1 + } + case data[p] >= 146: + goto tr1 + } + goto tr2 + st48: + if p++; p == pe { + goto _test_eof48 + } + st_case_48: + switch { + case data[p] > 147: + if 178 <= data[p] && data[p] <= 179 { + goto tr1 + } + case data[p] >= 146: + goto tr1 + } + goto tr2 + st49: + if p++; p == pe { + goto _test_eof49 + } + st_case_49: + if 180 <= data[p] { + goto tr1 + } + goto tr2 + st50: + if p++; p == pe { + goto _test_eof50 + } + st_case_50: + switch { + case data[p] > 156: + if 158 <= data[p] { + goto tr2 + } + case data[p] >= 148: + goto tr2 + } + goto tr1 + st51: + if p++; p == pe { + goto _test_eof51 + } + st_case_51: + if 139 <= data[p] && data[p] <= 142 { + goto tr1 + } + goto tr2 + st52: + if p++; p == pe { + goto _test_eof52 + } + st_case_52: + if data[p] == 169 { + goto tr1 + } + goto tr2 + st53: + if p++; p == pe { + goto _test_eof53 + } + st_case_53: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr1 + } + case data[p] >= 160: + goto tr1 + } + goto tr2 + st54: + if p++; p == pe { + goto _test_eof54 + } + st_case_54: + if 151 <= data[p] && data[p] <= 155 { + goto tr1 + } + goto tr2 + st55: + if p++; p == pe { + goto _test_eof55 + } + st_case_55: + if data[p] == 191 { + goto tr1 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr1 + } + case data[p] >= 149: + goto tr1 + } + goto tr2 + st56: + if p++; p == pe { + goto _test_eof56 + } + st_case_56: + if 176 <= data[p] && data[p] <= 190 { + goto tr1 + } + goto tr2 + st57: + if p++; p == pe { + goto _test_eof57 + } + st_case_57: + switch { + case data[p] > 132: + if 180 <= data[p] { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr2 + st58: + if p++; p == pe { + goto _test_eof58 + } + st_case_58: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr2 + } + case data[p] >= 133: + goto tr2 + } + goto tr1 + st59: + if p++; p == pe { + goto _test_eof59 + } + st_case_59: + switch { + case data[p] > 130: + if 161 <= data[p] && data[p] <= 173 { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr2 + st60: + if p++; p == pe { + goto _test_eof60 + } + st_case_60: + if 166 <= data[p] && data[p] <= 179 { + goto tr1 + } + goto tr2 + st61: + if p++; p == pe { + goto _test_eof61 + } + st_case_61: + if 164 <= data[p] && data[p] <= 183 { + goto tr1 + } + goto tr2 + st62: + if p++; p == pe { + goto _test_eof62 + } + st_case_62: + if data[p] == 173 { + goto tr1 + } + switch { + case data[p] < 148: + if 144 <= data[p] && data[p] <= 146 { + goto tr1 + } + case data[p] > 168: + switch { + case data[p] > 180: + if 184 <= data[p] && data[p] <= 185 { + goto tr1 + } + case data[p] >= 178: + goto tr1 + } + default: + goto tr1 + } + goto tr2 + st63: + if p++; p == pe { + goto _test_eof63 + } + st_case_63: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr2 + st64: + if p++; p == pe { + goto _test_eof64 + } + st_case_64: + switch data[p] { + case 128: + goto st65 + case 129: + goto st66 + case 131: + goto st67 + case 179: + goto st68 + case 181: + goto st69 + case 183: + goto st70 + } + goto tr0 + st65: + if p++; p == pe { + goto _test_eof65 + } + st_case_65: + switch { + case data[p] > 143: + if 170 <= data[p] && data[p] <= 174 { + goto tr1 + } + case data[p] >= 140: + goto tr1 + } + goto tr2 + st66: + if p++; p == pe { + goto _test_eof66 + } + st_case_66: + switch { + case data[p] > 164: + if 166 <= data[p] && data[p] <= 175 { + goto tr1 + } + case data[p] >= 160: + goto tr1 + } + goto tr2 + st67: + if p++; p == pe { + goto _test_eof67 + } + st_case_67: + if 144 <= data[p] && data[p] <= 176 { + goto tr1 + } + goto tr2 + st68: + if p++; p == pe { + goto _test_eof68 + } + st_case_68: + if 175 <= data[p] && data[p] <= 177 { + goto tr1 + } + goto tr2 + st69: + if p++; p == pe { + goto _test_eof69 + } + st_case_69: + if data[p] == 191 { + goto tr1 + } + goto tr2 + st70: + if p++; p == pe { + goto _test_eof70 + } + st_case_70: + if 160 <= data[p] && data[p] <= 191 { + goto tr1 + } + goto tr2 + st71: + if p++; p == pe { + goto _test_eof71 + } + st_case_71: + switch data[p] { + case 128: + goto st72 + case 130: + goto st73 + } + goto tr0 + st72: + if p++; p == pe { + goto _test_eof72 + } + st_case_72: + if 170 <= data[p] && data[p] <= 175 { + goto tr1 + } + goto tr2 + st73: + if p++; p == pe { + goto _test_eof73 + } + st_case_73: + if 153 <= data[p] && data[p] <= 154 { + goto tr1 + } + goto tr2 + st74: + if p++; p == pe { + goto _test_eof74 + } + st_case_74: + switch data[p] { + case 153: + goto st75 + case 154: + goto st76 + case 155: + goto st77 + case 160: + goto st78 + case 162: + goto st79 + case 163: + goto st80 + case 164: + goto st81 + case 165: + goto st82 + case 166: + goto st83 + case 167: + goto st84 + case 168: + goto st85 + case 169: + goto st86 + case 170: + goto st87 + case 171: + goto st88 + case 175: + goto st89 + } + goto tr0 + st75: + if p++; p == pe { + goto _test_eof75 + } + st_case_75: + switch { + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr1 + } + case data[p] >= 175: + goto tr1 + } + goto tr2 + st76: + if p++; p == pe { + goto _test_eof76 + } + st_case_76: + if 158 <= data[p] && data[p] <= 159 { + goto tr1 + } + goto tr2 + st77: + if p++; p == pe { + goto _test_eof77 + } + st_case_77: + if 176 <= data[p] && data[p] <= 177 { + goto tr1 + } + goto tr2 + st78: + if p++; p == pe { + goto _test_eof78 + } + st_case_78: + switch data[p] { + case 130: + goto tr1 + case 134: + goto tr1 + case 139: + goto tr1 + } + if 163 <= data[p] && data[p] <= 167 { + goto tr1 + } + goto tr2 + st79: + if p++; p == pe { + goto _test_eof79 + } + st_case_79: + switch { + case data[p] > 129: + if 180 <= data[p] { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr2 + st80: + if p++; p == pe { + goto _test_eof80 + } + st_case_80: + switch { + case data[p] > 159: + if 178 <= data[p] { + goto tr2 + } + case data[p] >= 133: + goto tr2 + } + goto tr1 + st81: + if p++; p == pe { + goto _test_eof81 + } + st_case_81: + if 166 <= data[p] && data[p] <= 173 { + goto tr1 + } + goto tr2 + st82: + if p++; p == pe { + goto _test_eof82 + } + st_case_82: + if 135 <= data[p] && data[p] <= 147 { + goto tr1 + } + goto tr2 + st83: + if p++; p == pe { + goto _test_eof83 + } + st_case_83: + switch { + case data[p] > 131: + if 179 <= data[p] { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr2 + st84: + if p++; p == pe { + goto _test_eof84 + } + st_case_84: + switch { + case data[p] > 164: + if 166 <= data[p] { + goto tr2 + } + case data[p] >= 129: + goto tr2 + } + goto tr1 + st85: + if p++; p == pe { + goto _test_eof85 + } + st_case_85: + if 169 <= data[p] && data[p] <= 182 { + goto tr1 + } + goto tr2 + st86: + if p++; p == pe { + goto _test_eof86 + } + st_case_86: + if data[p] == 131 { + goto tr1 + } + switch { + case data[p] > 141: + if 187 <= data[p] && data[p] <= 189 { + goto tr1 + } + case data[p] >= 140: + goto tr1 + } + goto tr2 + st87: + if p++; p == pe { + goto _test_eof87 + } + st_case_87: + if data[p] == 176 { + goto tr1 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr1 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr1 + } + default: + goto tr1 + } + goto tr2 + st88: + if p++; p == pe { + goto _test_eof88 + } + st_case_88: + if data[p] == 129 { + goto tr1 + } + switch { + case data[p] > 175: + if 181 <= data[p] && data[p] <= 182 { + goto tr1 + } + case data[p] >= 171: + goto tr1 + } + goto tr2 + st89: + if p++; p == pe { + goto _test_eof89 + } + st_case_89: + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 173 { + goto tr1 + } + case data[p] >= 163: + goto tr1 + } + goto tr2 + st90: + if p++; p == pe { + goto _test_eof90 + } + st_case_90: + switch data[p] { + case 172: + goto st91 + case 184: + goto st92 + case 187: + goto st69 + case 190: + goto st76 + case 191: + goto st93 + } + goto tr0 + st91: + if p++; p == pe { + goto _test_eof91 + } + st_case_91: + if data[p] == 158 { + goto tr1 + } + goto tr2 + st92: + if p++; p == pe { + goto _test_eof92 + } + st_case_92: + switch { + case data[p] > 143: + if 160 <= data[p] && data[p] <= 175 { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr2 + st93: + if p++; p == pe { + goto _test_eof93 + } + st_case_93: + if 185 <= data[p] && data[p] <= 187 { + goto tr1 + } + goto tr2 + st94: + if p++; p == pe { + goto _test_eof94 + } + st_case_94: + switch data[p] { + case 144: + goto st95 + case 145: + goto st101 + case 150: + goto st120 + case 155: + goto st125 + case 157: + goto st127 + case 158: + goto st134 + } + goto tr0 + st95: + if p++; p == pe { + goto _test_eof95 + } + st_case_95: + switch data[p] { + case 135: + goto st96 + case 139: + goto st97 + case 141: + goto st98 + case 168: + goto st99 + case 171: + goto st100 + } + goto tr2 + st96: + if p++; p == pe { + goto _test_eof96 + } + st_case_96: + if data[p] == 189 { + goto tr1 + } + goto tr2 + st97: + if p++; p == pe { + goto _test_eof97 + } + st_case_97: + if data[p] == 160 { + goto tr1 + } + goto tr2 + st98: + if p++; p == pe { + goto _test_eof98 + } + st_case_98: + if 182 <= data[p] && data[p] <= 186 { + goto tr1 + } + goto tr2 + st99: + if p++; p == pe { + goto _test_eof99 + } + st_case_99: + if data[p] == 191 { + goto tr1 + } + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr1 + } + case data[p] > 134: + switch { + case data[p] > 143: + if 184 <= data[p] && data[p] <= 186 { + goto tr1 + } + case data[p] >= 140: + goto tr1 + } + default: + goto tr1 + } + goto tr2 + st100: + if p++; p == pe { + goto _test_eof100 + } + st_case_100: + if 165 <= data[p] && data[p] <= 166 { + goto tr1 + } + goto tr2 + st101: + if p++; p == pe { + goto _test_eof101 + } + st_case_101: + switch data[p] { + case 128: + goto st102 + case 129: + goto st103 + case 130: + goto st104 + case 132: + goto st105 + case 133: + goto st106 + case 134: + goto st107 + case 135: + goto st108 + case 136: + goto st109 + case 139: + goto st110 + case 140: + goto st111 + case 141: + goto st112 + case 146: + goto st113 + case 147: + goto st114 + case 150: + goto st115 + case 151: + goto st116 + case 152: + goto st113 + case 153: + goto st117 + case 154: + goto st118 + case 156: + goto st119 + } + goto tr2 + st102: + if p++; p == pe { + goto _test_eof102 + } + st_case_102: + switch { + case data[p] > 130: + if 184 <= data[p] { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr2 + st103: + if p++; p == pe { + goto _test_eof103 + } + st_case_103: + if 135 <= data[p] && data[p] <= 190 { + goto tr2 + } + goto tr1 + st104: + if p++; p == pe { + goto _test_eof104 + } + st_case_104: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr2 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr1 + st105: + if p++; p == pe { + goto _test_eof105 + } + st_case_105: + switch { + case data[p] > 130: + if 167 <= data[p] && data[p] <= 180 { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr2 + st106: + if p++; p == pe { + goto _test_eof106 + } + st_case_106: + if data[p] == 179 { + goto tr1 + } + goto tr2 + st107: + if p++; p == pe { + goto _test_eof107 + } + st_case_107: + switch { + case data[p] > 130: + if 179 <= data[p] { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr2 + st108: + if p++; p == pe { + goto _test_eof108 + } + st_case_108: + switch { + case data[p] > 137: + if 141 <= data[p] { + goto tr2 + } + case data[p] >= 129: + goto tr2 + } + goto tr1 + st109: + if p++; p == pe { + goto _test_eof109 + } + st_case_109: + if 172 <= data[p] && data[p] <= 183 { + goto tr1 + } + goto tr2 + st110: + if p++; p == pe { + goto _test_eof110 + } + st_case_110: + if 159 <= data[p] && data[p] <= 170 { + goto tr1 + } + goto tr2 + st111: + if p++; p == pe { + goto _test_eof111 + } + st_case_111: + if data[p] == 188 { + goto tr1 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr2 + st112: + if p++; p == pe { + goto _test_eof112 + } + st_case_112: + if data[p] == 151 { + goto tr1 + } + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + case data[p] > 141: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr1 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr1 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr2 + st113: + if p++; p == pe { + goto _test_eof113 + } + st_case_113: + if 176 <= data[p] { + goto tr1 + } + goto tr2 + st114: + if p++; p == pe { + goto _test_eof114 + } + st_case_114: + if 132 <= data[p] { + goto tr2 + } + goto tr1 + st115: + if p++; p == pe { + goto _test_eof115 + } + st_case_115: + switch { + case data[p] > 181: + if 184 <= data[p] { + goto tr1 + } + case data[p] >= 175: + goto tr1 + } + goto tr2 + st116: + if p++; p == pe { + goto _test_eof116 + } + st_case_116: + switch { + case data[p] > 155: + if 158 <= data[p] { + goto tr2 + } + case data[p] >= 129: + goto tr2 + } + goto tr1 + st117: + if p++; p == pe { + goto _test_eof117 + } + st_case_117: + if 129 <= data[p] { + goto tr2 + } + goto tr1 + st118: + if p++; p == pe { + goto _test_eof118 + } + st_case_118: + if 171 <= data[p] && data[p] <= 183 { + goto tr1 + } + goto tr2 + st119: + if p++; p == pe { + goto _test_eof119 + } + st_case_119: + if 157 <= data[p] && data[p] <= 171 { + goto tr1 + } + goto tr2 + st120: + if p++; p == pe { + goto _test_eof120 + } + st_case_120: + switch data[p] { + case 171: + goto st121 + case 172: + goto st122 + case 189: + goto st123 + case 190: + goto st124 + } + goto tr2 + st121: + if p++; p == pe { + goto _test_eof121 + } + st_case_121: + if 176 <= data[p] && data[p] <= 180 { + goto tr1 + } + goto tr2 + st122: + if p++; p == pe { + goto _test_eof122 + } + st_case_122: + if 176 <= data[p] && data[p] <= 182 { + goto tr1 + } + goto tr2 + st123: + if p++; p == pe { + goto _test_eof123 + } + st_case_123: + if 145 <= data[p] && data[p] <= 190 { + goto tr1 + } + goto tr2 + st124: + if p++; p == pe { + goto _test_eof124 + } + st_case_124: + if 143 <= data[p] && data[p] <= 146 { + goto tr1 + } + goto tr2 + st125: + if p++; p == pe { + goto _test_eof125 + } + st_case_125: + if data[p] == 178 { + goto st126 + } + goto tr2 + st126: + if p++; p == pe { + goto _test_eof126 + } + st_case_126: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr1 + } + case data[p] >= 157: + goto tr1 + } + goto tr2 + st127: + if p++; p == pe { + goto _test_eof127 + } + st_case_127: + switch data[p] { + case 133: + goto st128 + case 134: + goto st129 + case 137: + goto st130 + case 168: + goto st131 + case 169: + goto st132 + case 170: + goto st133 + } + goto tr2 + st128: + if p++; p == pe { + goto _test_eof128 + } + st_case_128: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr1 + } + case data[p] >= 165: + goto tr1 + } + goto tr2 + st129: + if p++; p == pe { + goto _test_eof129 + } + st_case_129: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr2 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr1 + st130: + if p++; p == pe { + goto _test_eof130 + } + st_case_130: + if 130 <= data[p] && data[p] <= 132 { + goto tr1 + } + goto tr2 + st131: + if p++; p == pe { + goto _test_eof131 + } + st_case_131: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr2 + st132: + if p++; p == pe { + goto _test_eof132 + } + st_case_132: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr2 + } + case data[p] >= 173: + goto tr2 + } + goto tr1 + st133: + if p++; p == pe { + goto _test_eof133 + } + st_case_133: + if data[p] == 132 { + goto tr1 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr1 + } + case data[p] >= 155: + goto tr1 + } + goto tr2 + st134: + if p++; p == pe { + goto _test_eof134 + } + st_case_134: + if data[p] == 163 { + goto st135 + } + goto tr2 + st135: + if p++; p == pe { + goto _test_eof135 + } + st_case_135: + if 144 <= data[p] && data[p] <= 150 { + goto tr1 + } + goto tr2 + st136: + if p++; p == pe { + goto _test_eof136 + } + st_case_136: + if data[p] == 160 { + goto st137 + } + goto tr0 + st137: + if p++; p == pe { + goto _test_eof137 + } + st_case_137: + switch data[p] { + case 128: + goto st138 + case 129: + goto st139 + case 132: + goto st1 + case 135: + goto st2 + } + if 133 <= data[p] && data[p] <= 134 { + goto st140 + } + goto tr2 + st138: + if p++; p == pe { + goto _test_eof138 + } + st_case_138: + if data[p] == 129 { + goto tr1 + } + if 160 <= data[p] { + goto tr1 + } + goto tr2 + st139: + if p++; p == pe { + goto _test_eof139 + } + st_case_139: + if 192 <= data[p] { + goto tr2 + } + goto tr1 + st140: + if p++; p == pe { + goto _test_eof140 + } + st_case_140: + goto tr1 +tr4460: +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + + goto st4864 + st4864: + if p++; p == pe { + goto _test_eof4864 + } + st_case_4864: +//line segment_words_prod.go:13746 + if data[p] == 10 { + goto tr4520 + } + goto tr4519 +tr1880: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:76 +act = 1; + goto st4865 +tr4461: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:76 +act = 1; + goto st4865 + st4865: + if p++; p == pe { + goto _test_eof4865 + } + st_case_4865: +//line segment_words_prod.go:13782 + switch data[p] { + case 39: + goto st141 + case 44: + goto st141 + case 46: + goto st141 + case 59: + goto st141 + case 95: + goto tr1485 + case 194: + goto st2046 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st148 + case 204: + goto st2047 + case 205: + goto st2048 + case 206: + goto st151 + case 207: + goto st152 + case 210: + goto st2049 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st2050 + case 215: + goto st2051 + case 216: + goto st2052 + case 217: + goto st2053 + case 219: + goto st2054 + case 220: + goto st2055 + case 221: + goto st2056 + case 222: + goto st2057 + case 223: + goto st2058 + case 224: + goto st2059 + case 225: + goto st2091 + case 226: + goto st2113 + case 227: + goto st2120 + case 234: + goto st2123 + case 237: + goto st287 + case 239: + goto st2139 + case 240: + goto st2145 + case 243: + goto st2187 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr126 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4521 + st141: + if p++; p == pe { + goto _test_eof141 + } + st_case_141: + switch data[p] { + case 194: + goto st1901 + case 204: + goto st1902 + case 205: + goto st1903 + case 210: + goto st1904 + case 214: + goto st1905 + case 215: + goto st1906 + case 216: + goto st1907 + case 217: + goto st1908 + case 219: + goto st1909 + case 220: + goto st1910 + case 221: + goto st1911 + case 222: + goto st1912 + case 223: + goto st1913 + case 224: + goto st1914 + case 225: + goto st1943 + case 226: + goto st1966 + case 227: + goto st1973 + case 234: + goto st1976 + case 239: + goto st1993 + case 240: + goto st1997 + case 243: + goto st2041 + } + if 48 <= data[p] && data[p] <= 57 { + goto tr126 + } + goto tr125 +tr126: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:76 +act = 1; + goto st4866 + st4866: + if p++; p == pe { + goto _test_eof4866 + } + st_case_4866: +//line segment_words_prod.go:13947 + switch data[p] { + case 39: + goto st141 + case 44: + goto st141 + case 46: + goto st141 + case 59: + goto st141 + case 95: + goto tr1485 + case 194: + goto st1752 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st148 + case 204: + goto st1753 + case 205: + goto st1754 + case 206: + goto st151 + case 207: + goto st152 + case 210: + goto st1755 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1756 + case 215: + goto st1757 + case 216: + goto st1758 + case 217: + goto st1759 + case 219: + goto st1760 + case 220: + goto st1761 + case 221: + goto st1762 + case 222: + goto st1763 + case 223: + goto st1764 + case 224: + goto st1765 + case 225: + goto st1797 + case 226: + goto st1819 + case 227: + goto st1826 + case 234: + goto st1829 + case 237: + goto st287 + case 239: + goto st1845 + case 240: + goto st1853 + case 243: + goto st1895 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr126 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4521 +tr148: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4867 + st4867: + if p++; p == pe { + goto _test_eof4867 + } + st_case_4867: +//line segment_words_prod.go:14059 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 + st142: + if p++; p == pe { + goto _test_eof142 + } + st_case_142: + switch data[p] { + case 194: + goto st143 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st148 + case 204: + goto st149 + case 205: + goto st150 + case 206: + goto st151 + case 207: + goto st152 + case 210: + goto st153 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st156 + case 215: + goto st157 + case 216: + goto st158 + case 217: + goto st159 + case 219: + goto st160 + case 220: + goto st161 + case 221: + goto st162 + case 222: + goto st163 + case 223: + goto st164 + case 224: + goto st165 + case 225: + goto st198 + case 226: + goto st238 + case 227: + goto st256 + case 234: + goto st261 + case 237: + goto st287 + case 239: + goto st290 + case 240: + goto st306 + case 243: + goto st407 + } + switch { + case data[p] < 97: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr2 + st143: + if p++; p == pe { + goto _test_eof143 + } + st_case_143: + switch data[p] { + case 170: + goto tr148 + case 173: + goto st142 + case 181: + goto tr148 + case 186: + goto tr148 + } + goto tr2 + st144: + if p++; p == pe { + goto _test_eof144 + } + st_case_144: + switch { + case data[p] < 152: + if 128 <= data[p] && data[p] <= 150 { + goto tr148 + } + case data[p] > 182: + if 184 <= data[p] { + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st145: + if p++; p == pe { + goto _test_eof145 + } + st_case_145: + goto tr148 + st146: + if p++; p == pe { + goto _test_eof146 + } + st_case_146: + if 192 <= data[p] { + goto tr2 + } + goto tr148 + st147: + if p++; p == pe { + goto _test_eof147 + } + st_case_147: + if data[p] <= 127 { + goto tr2 + } + goto tr148 + st148: + if p++; p == pe { + goto _test_eof148 + } + st_case_148: + if data[p] == 173 { + goto tr2 + } + switch { + case data[p] < 146: + if 130 <= data[p] && data[p] <= 133 { + goto tr2 + } + case data[p] > 159: + switch { + case data[p] > 171: + if 175 <= data[p] { + goto tr2 + } + case data[p] >= 165: + goto tr2 + } + default: + goto tr2 + } + goto tr148 + st149: + if p++; p == pe { + goto _test_eof149 + } + st_case_149: + if 128 <= data[p] { + goto st142 + } + goto tr2 + st150: + if p++; p == pe { + goto _test_eof150 + } + st_case_150: + switch data[p] { + case 181: + goto tr2 + case 190: + goto tr2 + } + switch { + case data[p] < 184: + if 176 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr2 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr2 + } + goto st142 + st151: + if p++; p == pe { + goto _test_eof151 + } + st_case_151: + switch data[p] { + case 134: + goto tr148 + case 140: + goto tr148 + } + switch { + case data[p] < 142: + if 136 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 161: + if 163 <= data[p] { + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st152: + if p++; p == pe { + goto _test_eof152 + } + st_case_152: + if data[p] == 182 { + goto tr2 + } + goto tr148 + st153: + if p++; p == pe { + goto _test_eof153 + } + st_case_153: + if data[p] == 130 { + goto tr2 + } + if 131 <= data[p] && data[p] <= 137 { + goto st142 + } + goto tr148 + st154: + if p++; p == pe { + goto _test_eof154 + } + st_case_154: + if data[p] == 176 { + goto tr2 + } + goto tr148 + st155: + if p++; p == pe { + goto _test_eof155 + } + st_case_155: + switch { + case data[p] > 152: + if 154 <= data[p] && data[p] <= 160 { + goto tr2 + } + case data[p] >= 151: + goto tr2 + } + goto tr148 + st156: + if p++; p == pe { + goto _test_eof156 + } + st_case_156: + if data[p] == 190 { + goto tr2 + } + switch { + case data[p] < 145: + if 136 <= data[p] && data[p] <= 144 { + goto tr2 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr2 + } + default: + goto st142 + } + goto tr148 + st157: + if p++; p == pe { + goto _test_eof157 + } + st_case_157: + if data[p] == 135 { + goto st142 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto st142 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] >= 144: + goto tr148 + } + default: + goto st142 + } + goto tr2 + st158: + if p++; p == pe { + goto _test_eof158 + } + st_case_158: + if data[p] == 156 { + goto st142 + } + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 133 { + goto st142 + } + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto st142 + } + goto tr2 + st159: + if p++; p == pe { + goto _test_eof159 + } + st_case_159: + if data[p] == 176 { + goto st142 + } + switch { + case data[p] < 139: + if 128 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 159: + if 174 <= data[p] { + goto tr148 + } + default: + goto st142 + } + goto tr2 + st160: + if p++; p == pe { + goto _test_eof160 + } + st_case_160: + switch data[p] { + case 148: + goto tr2 + case 158: + goto tr2 + case 169: + goto tr2 + } + switch { + case data[p] < 176: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto st142 + } + case data[p] >= 150: + goto st142 + } + case data[p] > 185: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr2 + } + case data[p] >= 189: + goto tr2 + } + default: + goto tr2 + } + goto tr148 + st161: + if p++; p == pe { + goto _test_eof161 + } + st_case_161: + if data[p] == 144 { + goto tr148 + } + switch { + case data[p] < 146: + if 143 <= data[p] && data[p] <= 145 { + goto st142 + } + case data[p] > 175: + if 176 <= data[p] { + goto st142 + } + default: + goto tr148 + } + goto tr2 + st162: + if p++; p == pe { + goto _test_eof162 + } + st_case_162: + switch { + case data[p] > 140: + if 141 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr2 + } + goto st142 + st163: + if p++; p == pe { + goto _test_eof163 + } + st_case_163: + switch { + case data[p] > 176: + if 178 <= data[p] { + goto tr2 + } + case data[p] >= 166: + goto st142 + } + goto tr148 + st164: + if p++; p == pe { + goto _test_eof164 + } + st_case_164: + if data[p] == 186 { + goto tr148 + } + switch { + case data[p] < 171: + if 138 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 179: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + default: + goto st142 + } + goto tr2 + st165: + if p++; p == pe { + goto _test_eof165 + } + st_case_165: + switch data[p] { + case 160: + goto st166 + case 161: + goto st167 + case 162: + goto st168 + case 163: + goto st169 + case 164: + goto st170 + case 165: + goto st171 + case 166: + goto st172 + case 167: + goto st173 + case 168: + goto st174 + case 169: + goto st175 + case 170: + goto st176 + case 171: + goto st177 + case 172: + goto st178 + case 173: + goto st179 + case 174: + goto st180 + case 175: + goto st181 + case 176: + goto st182 + case 177: + goto st183 + case 178: + goto st184 + case 179: + goto st185 + case 180: + goto st186 + case 181: + goto st187 + case 182: + goto st188 + case 183: + goto st189 + case 184: + goto st190 + case 185: + goto st191 + case 186: + goto st192 + case 187: + goto st193 + case 188: + goto st194 + case 189: + goto st195 + case 190: + goto st196 + case 191: + goto st197 + } + goto tr2 + st166: + if p++; p == pe { + goto _test_eof166 + } + st_case_166: + switch data[p] { + case 154: + goto tr148 + case 164: + goto tr148 + case 168: + goto tr148 + } + switch { + case data[p] > 149: + if 150 <= data[p] && data[p] <= 173 { + goto st142 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st167: + if p++; p == pe { + goto _test_eof167 + } + st_case_167: + switch { + case data[p] > 152: + if 153 <= data[p] && data[p] <= 155 { + goto st142 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st168: + if p++; p == pe { + goto _test_eof168 + } + st_case_168: + if 160 <= data[p] && data[p] <= 180 { + goto tr148 + } + goto tr2 + st169: + if p++; p == pe { + goto _test_eof169 + } + st_case_169: + if 163 <= data[p] { + goto st142 + } + goto tr2 + st170: + if p++; p == pe { + goto _test_eof170 + } + st_case_170: + if data[p] == 189 { + goto tr148 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr148 + } + goto st142 + st171: + if p++; p == pe { + goto _test_eof171 + } + st_case_171: + if data[p] == 144 { + goto tr148 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 176: + if 177 <= data[p] { + goto tr148 + } + default: + goto tr2 + } + goto st142 + st172: + if p++; p == pe { + goto _test_eof172 + } + st_case_172: + switch data[p] { + case 132: + goto tr2 + case 169: + goto tr2 + case 177: + goto tr2 + case 188: + goto st142 + } + switch { + case data[p] < 145: + switch { + case data[p] > 131: + if 141 <= data[p] && data[p] <= 142 { + goto tr2 + } + case data[p] >= 129: + goto st142 + } + case data[p] > 146: + switch { + case data[p] < 186: + if 179 <= data[p] && data[p] <= 181 { + goto tr2 + } + case data[p] > 187: + if 190 <= data[p] { + goto st142 + } + default: + goto tr2 + } + default: + goto tr2 + } + goto tr148 + st173: + if p++; p == pe { + goto _test_eof173 + } + st_case_173: + switch data[p] { + case 142: + goto tr148 + case 158: + goto tr2 + } + switch { + case data[p] < 152: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr2 + } + case data[p] > 138: + if 143 <= data[p] && data[p] <= 150 { + goto tr2 + } + default: + goto tr2 + } + case data[p] > 155: + switch { + case data[p] < 164: + if 156 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr2 + } + case data[p] >= 176: + goto tr148 + } + default: + goto tr2 + } + default: + goto tr2 + } + goto st142 + st174: + if p++; p == pe { + goto _test_eof174 + } + st_case_174: + if data[p] == 188 { + goto st142 + } + switch { + case data[p] < 170: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto st142 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 147 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] >= 143: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 176: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 182: + switch { + case data[p] > 185: + if 190 <= data[p] { + goto st142 + } + case data[p] >= 184: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st175: + if p++; p == pe { + goto _test_eof175 + } + st_case_175: + if data[p] == 157 { + goto tr2 + } + switch { + case data[p] < 146: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr2 + } + case data[p] > 138: + if 142 <= data[p] && data[p] <= 144 { + goto tr2 + } + default: + goto tr2 + } + case data[p] > 152: + switch { + case data[p] < 159: + if 153 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr2 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr2 + } + default: + goto tr2 + } + goto st142 + st176: + if p++; p == pe { + goto _test_eof176 + } + st_case_176: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto st142 + } + case data[p] > 141: + if 143 <= data[p] && data[p] <= 145 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] { + goto st142 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st177: + if p++; p == pe { + goto _test_eof177 + } + st_case_177: + switch data[p] { + case 134: + goto tr2 + case 138: + goto tr2 + case 144: + goto tr148 + case 185: + goto tr148 + } + switch { + case data[p] < 160: + if 142 <= data[p] && data[p] <= 159 { + goto tr2 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr2 + } + default: + goto tr148 + } + goto st142 + st178: + if p++; p == pe { + goto _test_eof178 + } + st_case_178: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto st142 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto st142 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st179: + if p++; p == pe { + goto _test_eof179 + } + st_case_179: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] < 150: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto st142 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto st142 + } + default: + goto st142 + } + case data[p] > 151: + switch { + case data[p] < 159: + if 156 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 161: + if 162 <= data[p] && data[p] <= 163 { + goto st142 + } + default: + goto tr148 + } + default: + goto st142 + } + goto tr2 + st180: + if p++; p == pe { + goto _test_eof180 + } + st_case_180: + switch data[p] { + case 130: + goto st142 + case 131: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 158: + switch { + case data[p] < 142: + if 133 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 144: + switch { + case data[p] > 149: + if 153 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] < 168: + if 163 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 185: + if 190 <= data[p] && data[p] <= 191 { + goto st142 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st181: + if p++; p == pe { + goto _test_eof181 + } + st_case_181: + switch data[p] { + case 144: + goto tr148 + case 151: + goto st142 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto st142 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto st142 + } + default: + goto st142 + } + goto tr2 + st182: + if p++; p == pe { + goto _test_eof182 + } + st_case_182: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 128: + goto st142 + } + case data[p] > 144: + switch { + case data[p] < 170: + if 146 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] > 185: + if 190 <= data[p] { + goto st142 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st183: + if p++; p == pe { + goto _test_eof183 + } + st_case_183: + switch data[p] { + case 133: + goto tr2 + case 137: + goto tr2 + case 151: + goto tr2 + } + switch { + case data[p] < 155: + switch { + case data[p] > 148: + if 152 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 142: + goto tr2 + } + case data[p] > 159: + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr2 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr2 + } + goto st142 + st184: + if p++; p == pe { + goto _test_eof184 + } + st_case_184: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 146: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto st142 + } + case data[p] > 140: + if 142 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 181: + if 170 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto st142 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st185: + if p++; p == pe { + goto _test_eof185 + } + st_case_185: + if data[p] == 158 { + goto tr148 + } + switch { + case data[p] < 149: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto st142 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto st142 + } + default: + goto st142 + } + case data[p] > 150: + switch { + case data[p] < 162: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 163: + if 177 <= data[p] && data[p] <= 178 { + goto tr148 + } + default: + goto st142 + } + default: + goto st142 + } + goto tr2 + st186: + if p++; p == pe { + goto _test_eof186 + } + st_case_186: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 129: + goto st142 + } + case data[p] > 144: + switch { + case data[p] > 186: + if 190 <= data[p] { + goto st142 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st187: + if p++; p == pe { + goto _test_eof187 + } + st_case_187: + switch data[p] { + case 133: + goto tr2 + case 137: + goto tr2 + case 142: + goto tr148 + } + switch { + case data[p] < 159: + switch { + case data[p] > 150: + if 152 <= data[p] && data[p] <= 158 { + goto tr2 + } + case data[p] >= 143: + goto tr2 + } + case data[p] > 161: + switch { + case data[p] < 186: + if 164 <= data[p] && data[p] <= 185 { + goto tr2 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr2 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto st142 + st188: + if p++; p == pe { + goto _test_eof188 + } + st_case_188: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto st142 + } + case data[p] > 150: + switch { + case data[p] > 177: + if 179 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st189: + if p++; p == pe { + goto _test_eof189 + } + st_case_189: + switch data[p] { + case 138: + goto st142 + case 150: + goto st142 + } + switch { + case data[p] < 143: + if 128 <= data[p] && data[p] <= 134 { + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 159: + if 178 <= data[p] && data[p] <= 179 { + goto st142 + } + case data[p] >= 152: + goto st142 + } + default: + goto st142 + } + goto tr2 + st190: + if p++; p == pe { + goto _test_eof190 + } + st_case_190: + if data[p] == 177 { + goto st142 + } + if 180 <= data[p] && data[p] <= 186 { + goto st142 + } + goto tr2 + st191: + if p++; p == pe { + goto _test_eof191 + } + st_case_191: + if 135 <= data[p] && data[p] <= 142 { + goto st142 + } + goto tr2 + st192: + if p++; p == pe { + goto _test_eof192 + } + st_case_192: + if data[p] == 177 { + goto st142 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto st142 + } + case data[p] >= 180: + goto st142 + } + goto tr2 + st193: + if p++; p == pe { + goto _test_eof193 + } + st_case_193: + if 136 <= data[p] && data[p] <= 141 { + goto st142 + } + goto tr2 + st194: + if p++; p == pe { + goto _test_eof194 + } + st_case_194: + switch data[p] { + case 128: + goto tr148 + case 181: + goto st142 + case 183: + goto st142 + case 185: + goto st142 + } + switch { + case data[p] > 153: + if 190 <= data[p] && data[p] <= 191 { + goto st142 + } + case data[p] >= 152: + goto st142 + } + goto tr2 + st195: + if p++; p == pe { + goto _test_eof195 + } + st_case_195: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 172: + if 177 <= data[p] && data[p] <= 191 { + goto st142 + } + default: + goto tr148 + } + goto tr2 + st196: + if p++; p == pe { + goto _test_eof196 + } + st_case_196: + switch { + case data[p] < 136: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 135 { + goto st142 + } + case data[p] >= 128: + goto st142 + } + case data[p] > 140: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto st142 + } + case data[p] >= 141: + goto st142 + } + default: + goto tr148 + } + goto tr2 + st197: + if p++; p == pe { + goto _test_eof197 + } + st_case_197: + if data[p] == 134 { + goto st142 + } + goto tr2 + st198: + if p++; p == pe { + goto _test_eof198 + } + st_case_198: + switch data[p] { + case 128: + goto st199 + case 129: + goto st200 + case 130: + goto st201 + case 131: + goto st202 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st207 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st214 + case 157: + goto st215 + case 158: + goto st216 + case 159: + goto st217 + case 160: + goto st218 + case 161: + goto st219 + case 162: + goto st220 + case 163: + goto st221 + case 164: + goto st222 + case 168: + goto st223 + case 169: + goto st224 + case 170: + goto st225 + case 172: + goto st226 + case 173: + goto st227 + case 174: + goto st228 + case 175: + goto st229 + case 176: + goto st230 + case 177: + goto st231 + case 179: + goto st232 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st233 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + } + switch { + case data[p] < 180: + if 132 <= data[p] && data[p] <= 152 { + goto st145 + } + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + default: + goto st147 + } + goto tr2 + st199: + if p++; p == pe { + goto _test_eof199 + } + st_case_199: + if 171 <= data[p] && data[p] <= 190 { + goto st142 + } + goto tr2 + st200: + if p++; p == pe { + goto _test_eof200 + } + st_case_200: + switch { + case data[p] < 162: + switch { + case data[p] > 153: + if 158 <= data[p] && data[p] <= 160 { + goto st142 + } + case data[p] >= 150: + goto st142 + } + case data[p] > 164: + switch { + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto st142 + } + case data[p] >= 167: + goto st142 + } + default: + goto st142 + } + goto tr2 + st201: + if p++; p == pe { + goto _test_eof201 + } + st_case_201: + if data[p] == 143 { + goto st142 + } + switch { + case data[p] < 154: + if 130 <= data[p] && data[p] <= 141 { + goto st142 + } + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + default: + goto st142 + } + goto tr2 + st202: + if p++; p == pe { + goto _test_eof202 + } + st_case_202: + switch data[p] { + case 134: + goto tr2 + case 187: + goto tr2 + } + switch { + case data[p] > 140: + if 142 <= data[p] && data[p] <= 143 { + goto tr2 + } + case data[p] >= 136: + goto tr2 + } + goto tr148 + st203: + if p++; p == pe { + goto _test_eof203 + } + st_case_203: + switch data[p] { + case 137: + goto tr2 + case 151: + goto tr2 + case 153: + goto tr2 + } + switch { + case data[p] > 143: + if 158 <= data[p] && data[p] <= 159 { + goto tr2 + } + case data[p] >= 142: + goto tr2 + } + goto tr148 + st204: + if p++; p == pe { + goto _test_eof204 + } + st_case_204: + switch data[p] { + case 137: + goto tr2 + case 177: + goto tr2 + } + switch { + case data[p] < 182: + if 142 <= data[p] && data[p] <= 143 { + goto tr2 + } + case data[p] > 183: + if 191 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr148 + st205: + if p++; p == pe { + goto _test_eof205 + } + st_case_205: + if data[p] == 128 { + goto tr148 + } + switch { + case data[p] < 136: + if 130 <= data[p] && data[p] <= 133 { + goto tr148 + } + case data[p] > 150: + if 152 <= data[p] { + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st206: + if p++; p == pe { + goto _test_eof206 + } + st_case_206: + if data[p] == 145 { + goto tr2 + } + if 150 <= data[p] && data[p] <= 151 { + goto tr2 + } + goto tr148 + st207: + if p++; p == pe { + goto _test_eof207 + } + st_case_207: + switch { + case data[p] < 157: + if 155 <= data[p] && data[p] <= 156 { + goto tr2 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr2 + } + default: + goto st142 + } + goto tr148 + st208: + if p++; p == pe { + goto _test_eof208 + } + st_case_208: + switch { + case data[p] > 143: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st209: + if p++; p == pe { + goto _test_eof209 + } + st_case_209: + switch { + case data[p] > 183: + if 190 <= data[p] { + goto tr2 + } + case data[p] >= 182: + goto tr2 + } + goto tr148 + st210: + if p++; p == pe { + goto _test_eof210 + } + st_case_210: + if 129 <= data[p] { + goto tr148 + } + goto tr2 + st211: + if p++; p == pe { + goto _test_eof211 + } + st_case_211: + switch { + case data[p] > 174: + if 192 <= data[p] { + goto tr2 + } + case data[p] >= 173: + goto tr2 + } + goto tr148 + st212: + if p++; p == pe { + goto _test_eof212 + } + st_case_212: + switch { + case data[p] > 154: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 129: + goto tr148 + } + goto tr2 + st213: + if p++; p == pe { + goto _test_eof213 + } + st_case_213: + switch { + case data[p] > 173: + if 185 <= data[p] { + goto tr2 + } + case data[p] >= 171: + goto tr2 + } + goto tr148 + st214: + if p++; p == pe { + goto _test_eof214 + } + st_case_214: + switch { + case data[p] < 146: + switch { + case data[p] > 140: + if 142 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 177: + if 178 <= data[p] && data[p] <= 180 { + goto st142 + } + case data[p] >= 160: + goto tr148 + } + default: + goto st142 + } + goto tr2 + st215: + if p++; p == pe { + goto _test_eof215 + } + st_case_215: + switch { + case data[p] < 160: + switch { + case data[p] > 145: + if 146 <= data[p] && data[p] <= 147 { + goto st142 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 172: + switch { + case data[p] > 176: + if 178 <= data[p] && data[p] <= 179 { + goto st142 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st216: + if p++; p == pe { + goto _test_eof216 + } + st_case_216: + if 180 <= data[p] { + goto st142 + } + goto tr2 + st217: + if p++; p == pe { + goto _test_eof217 + } + st_case_217: + switch { + case data[p] > 156: + if 158 <= data[p] { + goto tr2 + } + case data[p] >= 148: + goto tr2 + } + goto st142 + st218: + if p++; p == pe { + goto _test_eof218 + } + st_case_218: + switch { + case data[p] > 142: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto st142 + } + goto tr2 + st219: + if p++; p == pe { + goto _test_eof219 + } + st_case_219: + if 184 <= data[p] { + goto tr2 + } + goto tr148 + st220: + if p++; p == pe { + goto _test_eof220 + } + st_case_220: + if data[p] == 169 { + goto st142 + } + switch { + case data[p] > 170: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st221: + if p++; p == pe { + goto _test_eof221 + } + st_case_221: + if 182 <= data[p] { + goto tr2 + } + goto tr148 + st222: + if p++; p == pe { + goto _test_eof222 + } + st_case_222: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto st142 + } + default: + goto st142 + } + goto tr2 + st223: + if p++; p == pe { + goto _test_eof223 + } + st_case_223: + switch { + case data[p] > 150: + if 151 <= data[p] && data[p] <= 155 { + goto st142 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st224: + if p++; p == pe { + goto _test_eof224 + } + st_case_224: + if data[p] == 191 { + goto st142 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto st142 + } + case data[p] >= 149: + goto st142 + } + goto tr2 + st225: + if p++; p == pe { + goto _test_eof225 + } + st_case_225: + if 176 <= data[p] && data[p] <= 190 { + goto st142 + } + goto tr2 + st226: + if p++; p == pe { + goto _test_eof226 + } + st_case_226: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 132 { + goto st142 + } + case data[p] > 179: + if 180 <= data[p] { + goto st142 + } + default: + goto tr148 + } + goto tr2 + st227: + if p++; p == pe { + goto _test_eof227 + } + st_case_227: + switch { + case data[p] < 140: + if 133 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 170: + if 180 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto st142 + st228: + if p++; p == pe { + goto _test_eof228 + } + st_case_228: + switch { + case data[p] < 161: + switch { + case data[p] > 130: + if 131 <= data[p] && data[p] <= 160 { + goto tr148 + } + case data[p] >= 128: + goto st142 + } + case data[p] > 173: + switch { + case data[p] > 175: + if 186 <= data[p] { + goto tr148 + } + case data[p] >= 174: + goto tr148 + } + default: + goto st142 + } + goto tr2 + st229: + if p++; p == pe { + goto _test_eof229 + } + st_case_229: + switch { + case data[p] > 179: + if 180 <= data[p] { + goto tr2 + } + case data[p] >= 166: + goto st142 + } + goto tr148 + st230: + if p++; p == pe { + goto _test_eof230 + } + st_case_230: + switch { + case data[p] > 163: + if 164 <= data[p] && data[p] <= 183 { + goto st142 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st231: + if p++; p == pe { + goto _test_eof231 + } + st_case_231: + switch { + case data[p] > 143: + if 154 <= data[p] && data[p] <= 189 { + goto tr148 + } + case data[p] >= 141: + goto tr148 + } + goto tr2 + st232: + if p++; p == pe { + goto _test_eof232 + } + st_case_232: + if data[p] == 173 { + goto st142 + } + switch { + case data[p] < 169: + switch { + case data[p] > 146: + if 148 <= data[p] && data[p] <= 168 { + goto st142 + } + case data[p] >= 144: + goto st142 + } + case data[p] > 177: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 180 { + goto st142 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 185 { + goto st142 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st233: + if p++; p == pe { + goto _test_eof233 + } + st_case_233: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto st142 + } + case data[p] >= 128: + goto st142 + } + goto tr2 + st234: + if p++; p == pe { + goto _test_eof234 + } + st_case_234: + switch { + case data[p] > 151: + if 158 <= data[p] && data[p] <= 159 { + goto tr2 + } + case data[p] >= 150: + goto tr2 + } + goto tr148 + st235: + if p++; p == pe { + goto _test_eof235 + } + st_case_235: + switch data[p] { + case 152: + goto tr2 + case 154: + goto tr2 + case 156: + goto tr2 + case 158: + goto tr2 + } + switch { + case data[p] < 142: + if 134 <= data[p] && data[p] <= 135 { + goto tr2 + } + case data[p] > 143: + if 190 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr148 + st236: + if p++; p == pe { + goto _test_eof236 + } + st_case_236: + if data[p] == 190 { + goto tr148 + } + switch { + case data[p] > 180: + if 182 <= data[p] && data[p] <= 188 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st237: + if p++; p == pe { + goto _test_eof237 + } + st_case_237: + switch { + case data[p] < 150: + switch { + case data[p] < 134: + if 130 <= data[p] && data[p] <= 132 { + goto tr148 + } + case data[p] > 140: + if 144 <= data[p] && data[p] <= 147 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 155: + switch { + case data[p] < 178: + if 160 <= data[p] && data[p] <= 172 { + goto tr148 + } + case data[p] > 180: + if 182 <= data[p] && data[p] <= 188 { + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st238: + if p++; p == pe { + goto _test_eof238 + } + st_case_238: + switch data[p] { + case 128: + goto st239 + case 129: + goto st240 + case 130: + goto st241 + case 131: + goto st242 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st250 + case 180: + goto st251 + case 181: + goto st252 + case 182: + goto st253 + case 183: + goto st254 + case 184: + goto st255 + } + goto tr2 + st239: + if p++; p == pe { + goto _test_eof239 + } + st_case_239: + switch { + case data[p] > 143: + if 170 <= data[p] && data[p] <= 174 { + goto st142 + } + case data[p] >= 140: + goto st142 + } + goto tr2 + st240: + if p++; p == pe { + goto _test_eof240 + } + st_case_240: + switch data[p] { + case 177: + goto tr148 + case 191: + goto tr148 + } + switch { + case data[p] > 164: + if 166 <= data[p] && data[p] <= 175 { + goto st142 + } + case data[p] >= 160: + goto st142 + } + goto tr2 + st241: + if p++; p == pe { + goto _test_eof241 + } + st_case_241: + if 144 <= data[p] && data[p] <= 156 { + goto tr148 + } + goto tr2 + st242: + if p++; p == pe { + goto _test_eof242 + } + st_case_242: + if 144 <= data[p] && data[p] <= 176 { + goto st142 + } + goto tr2 + st243: + if p++; p == pe { + goto _test_eof243 + } + st_case_243: + switch data[p] { + case 130: + goto tr148 + case 135: + goto tr148 + case 149: + goto tr148 + case 164: + goto tr148 + case 166: + goto tr148 + case 168: + goto tr148 + } + switch { + case data[p] < 170: + switch { + case data[p] > 147: + if 153 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] >= 138: + goto tr148 + } + case data[p] > 173: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr148 + } + case data[p] >= 175: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st244: + if p++; p == pe { + goto _test_eof244 + } + st_case_244: + if data[p] == 142 { + goto tr148 + } + switch { + case data[p] > 137: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 133: + goto tr148 + } + goto tr2 + st245: + if p++; p == pe { + goto _test_eof245 + } + st_case_245: + if 137 <= data[p] { + goto tr2 + } + goto tr148 + st246: + if p++; p == pe { + goto _test_eof246 + } + st_case_246: + if 182 <= data[p] { + goto tr148 + } + goto tr2 + st247: + if p++; p == pe { + goto _test_eof247 + } + st_case_247: + if 170 <= data[p] { + goto tr2 + } + goto tr148 + st248: + if p++; p == pe { + goto _test_eof248 + } + st_case_248: + switch { + case data[p] > 174: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st249: + if p++; p == pe { + goto _test_eof249 + } + st_case_249: + if data[p] == 159 { + goto tr2 + } + goto tr148 + st250: + if p++; p == pe { + goto _test_eof250 + } + st_case_250: + switch { + case data[p] < 175: + if 165 <= data[p] && data[p] <= 170 { + goto tr2 + } + case data[p] > 177: + if 180 <= data[p] { + goto tr2 + } + default: + goto st142 + } + goto tr148 + st251: + if p++; p == pe { + goto _test_eof251 + } + st_case_251: + switch data[p] { + case 167: + goto tr148 + case 173: + goto tr148 + } + switch { + case data[p] > 165: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st252: + if p++; p == pe { + goto _test_eof252 + } + st_case_252: + if data[p] == 191 { + goto st142 + } + switch { + case data[p] > 174: + if 176 <= data[p] { + goto tr2 + } + case data[p] >= 168: + goto tr2 + } + goto tr148 + st253: + if p++; p == pe { + goto _test_eof253 + } + st_case_253: + switch { + case data[p] < 168: + switch { + case data[p] > 150: + if 160 <= data[p] && data[p] <= 166 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 174: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 190 { + goto tr148 + } + case data[p] >= 176: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st254: + if p++; p == pe { + goto _test_eof254 + } + st_case_254: + switch { + case data[p] < 144: + switch { + case data[p] > 134: + if 136 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 191 { + goto st142 + } + case data[p] >= 152: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st255: + if p++; p == pe { + goto _test_eof255 + } + st_case_255: + if data[p] == 175 { + goto tr148 + } + goto tr2 + st256: + if p++; p == pe { + goto _test_eof256 + } + st_case_256: + switch data[p] { + case 128: + goto st257 + case 130: + goto st258 + case 132: + goto st259 + case 133: + goto st145 + case 134: + goto st260 + } + goto tr2 + st257: + if p++; p == pe { + goto _test_eof257 + } + st_case_257: + if data[p] == 133 { + goto tr148 + } + switch { + case data[p] > 175: + if 187 <= data[p] && data[p] <= 188 { + goto tr148 + } + case data[p] >= 170: + goto st142 + } + goto tr2 + st258: + if p++; p == pe { + goto _test_eof258 + } + st_case_258: + if 153 <= data[p] && data[p] <= 154 { + goto st142 + } + goto tr2 + st259: + if p++; p == pe { + goto _test_eof259 + } + st_case_259: + switch { + case data[p] > 173: + if 177 <= data[p] { + goto tr148 + } + case data[p] >= 133: + goto tr148 + } + goto tr2 + st260: + if p++; p == pe { + goto _test_eof260 + } + st_case_260: + switch { + case data[p] > 159: + if 187 <= data[p] { + goto tr2 + } + case data[p] >= 143: + goto tr2 + } + goto tr148 + st261: + if p++; p == pe { + goto _test_eof261 + } + st_case_261: + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st264 + case 153: + goto st265 + case 154: + goto st266 + case 155: + goto st267 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st271 + case 161: + goto st272 + case 162: + goto st273 + case 163: + goto st274 + case 164: + goto st275 + case 165: + goto st276 + case 166: + goto st277 + case 167: + goto st278 + case 168: + goto st279 + case 169: + goto st280 + case 170: + goto st281 + case 171: + goto st282 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st285 + case 176: + goto st147 + } + if 129 <= data[p] { + goto st145 + } + goto tr2 + st262: + if p++; p == pe { + goto _test_eof262 + } + st_case_262: + if 141 <= data[p] { + goto tr2 + } + goto tr148 + st263: + if p++; p == pe { + goto _test_eof263 + } + st_case_263: + if 144 <= data[p] && data[p] <= 189 { + goto tr148 + } + goto tr2 + st264: + if p++; p == pe { + goto _test_eof264 + } + st_case_264: + switch { + case data[p] < 160: + if 141 <= data[p] && data[p] <= 143 { + goto tr2 + } + case data[p] > 169: + if 172 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr148 + st265: + if p++; p == pe { + goto _test_eof265 + } + st_case_265: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto st142 + } + default: + goto st142 + } + goto tr2 + st266: + if p++; p == pe { + goto _test_eof266 + } + st_case_266: + switch { + case data[p] < 158: + if 128 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr148 + } + default: + goto st142 + } + goto tr2 + st267: + if p++; p == pe { + goto _test_eof267 + } + st_case_267: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr2 + } + case data[p] >= 176: + goto st142 + } + goto tr148 + st268: + if p++; p == pe { + goto _test_eof268 + } + st_case_268: + switch { + case data[p] > 159: + if 162 <= data[p] { + goto tr148 + } + case data[p] >= 151: + goto tr148 + } + goto tr2 + st269: + if p++; p == pe { + goto _test_eof269 + } + st_case_269: + switch { + case data[p] < 174: + if 137 <= data[p] && data[p] <= 138 { + goto tr2 + } + case data[p] > 175: + if 184 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr148 + st270: + if p++; p == pe { + goto _test_eof270 + } + st_case_270: + if 183 <= data[p] { + goto tr148 + } + goto tr2 + st271: + if p++; p == pe { + goto _test_eof271 + } + st_case_271: + switch data[p] { + case 130: + goto st142 + case 134: + goto st142 + case 139: + goto st142 + } + switch { + case data[p] > 167: + if 168 <= data[p] { + goto tr2 + } + case data[p] >= 163: + goto st142 + } + goto tr148 + st272: + if p++; p == pe { + goto _test_eof272 + } + st_case_272: + if 128 <= data[p] && data[p] <= 179 { + goto tr148 + } + goto tr2 + st273: + if p++; p == pe { + goto _test_eof273 + } + st_case_273: + switch { + case data[p] < 130: + if 128 <= data[p] && data[p] <= 129 { + goto st142 + } + case data[p] > 179: + if 180 <= data[p] { + goto st142 + } + default: + goto tr148 + } + goto tr2 + st274: + if p++; p == pe { + goto _test_eof274 + } + st_case_274: + switch data[p] { + case 187: + goto tr148 + case 189: + goto tr148 + } + switch { + case data[p] < 178: + if 133 <= data[p] && data[p] <= 159 { + goto tr2 + } + case data[p] > 183: + if 184 <= data[p] { + goto tr2 + } + default: + goto tr148 + } + goto st142 + st275: + if p++; p == pe { + goto _test_eof275 + } + st_case_275: + switch { + case data[p] < 166: + if 138 <= data[p] && data[p] <= 165 { + goto tr148 + } + case data[p] > 173: + if 176 <= data[p] { + goto tr148 + } + default: + goto st142 + } + goto tr2 + st276: + if p++; p == pe { + goto _test_eof276 + } + st_case_276: + switch { + case data[p] < 148: + if 135 <= data[p] && data[p] <= 147 { + goto st142 + } + case data[p] > 159: + if 189 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr148 + st277: + if p++; p == pe { + goto _test_eof277 + } + st_case_277: + switch { + case data[p] < 132: + if 128 <= data[p] && data[p] <= 131 { + goto st142 + } + case data[p] > 178: + if 179 <= data[p] { + goto st142 + } + default: + goto tr148 + } + goto tr2 + st278: + if p++; p == pe { + goto _test_eof278 + } + st_case_278: + if data[p] == 143 { + goto tr148 + } + switch { + case data[p] > 164: + if 166 <= data[p] { + goto tr2 + } + case data[p] >= 129: + goto tr2 + } + goto st142 + st279: + if p++; p == pe { + goto _test_eof279 + } + st_case_279: + switch { + case data[p] > 168: + if 169 <= data[p] && data[p] <= 182 { + goto st142 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st280: + if p++; p == pe { + goto _test_eof280 + } + st_case_280: + if data[p] == 131 { + goto st142 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 141: + if 187 <= data[p] && data[p] <= 189 { + goto st142 + } + default: + goto st142 + } + goto tr2 + st281: + if p++; p == pe { + goto _test_eof281 + } + st_case_281: + if data[p] == 176 { + goto st142 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto st142 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto st142 + } + default: + goto st142 + } + goto tr2 + st282: + if p++; p == pe { + goto _test_eof282 + } + st_case_282: + if data[p] == 129 { + goto st142 + } + switch { + case data[p] < 171: + if 160 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 181 <= data[p] && data[p] <= 182 { + goto st142 + } + case data[p] >= 178: + goto tr148 + } + default: + goto st142 + } + goto tr2 + st283: + if p++; p == pe { + goto _test_eof283 + } + st_case_283: + switch { + case data[p] < 145: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 129: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] < 168: + if 160 <= data[p] && data[p] <= 166 { + goto tr148 + } + case data[p] > 174: + if 176 <= data[p] { + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st284: + if p++; p == pe { + goto _test_eof284 + } + st_case_284: + if data[p] == 155 { + goto tr2 + } + if 166 <= data[p] && data[p] <= 175 { + goto tr2 + } + goto tr148 + st285: + if p++; p == pe { + goto _test_eof285 + } + st_case_285: + switch { + case data[p] < 163: + if 128 <= data[p] && data[p] <= 162 { + goto tr148 + } + case data[p] > 170: + if 172 <= data[p] && data[p] <= 173 { + goto st142 + } + default: + goto st142 + } + goto tr2 + st286: + if p++; p == pe { + goto _test_eof286 + } + st_case_286: + goto st145 + st287: + if p++; p == pe { + goto _test_eof287 + } + st_case_287: + switch data[p] { + case 158: + goto st288 + case 159: + goto st289 + } + if 160 <= data[p] { + goto tr2 + } + goto st145 + st288: + if p++; p == pe { + goto _test_eof288 + } + st_case_288: + if 164 <= data[p] && data[p] <= 175 { + goto tr2 + } + goto tr148 + st289: + if p++; p == pe { + goto _test_eof289 + } + st_case_289: + switch { + case data[p] > 138: + if 188 <= data[p] { + goto tr2 + } + case data[p] >= 135: + goto tr2 + } + goto tr148 + st290: + if p++; p == pe { + goto _test_eof290 + } + st_case_290: + switch data[p] { + case 172: + goto st291 + case 173: + goto st292 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st299 + case 185: + goto st300 + case 187: + goto st301 + case 188: + goto st302 + case 189: + goto st303 + case 190: + goto st304 + case 191: + goto st305 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr2 + st291: + if p++; p == pe { + goto _test_eof291 + } + st_case_291: + switch data[p] { + case 158: + goto st142 + case 190: + goto tr148 + } + switch { + case data[p] < 157: + switch { + case data[p] > 134: + if 147 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr148 + } + case data[p] >= 170: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st292: + if p++; p == pe { + goto _test_eof292 + } + st_case_292: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 129 { + goto tr148 + } + case data[p] > 132: + if 134 <= data[p] { + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st293: + if p++; p == pe { + goto _test_eof293 + } + st_case_293: + if 178 <= data[p] { + goto tr2 + } + goto tr148 + st294: + if p++; p == pe { + goto _test_eof294 + } + st_case_294: + if 147 <= data[p] { + goto tr148 + } + goto tr2 + st295: + if p++; p == pe { + goto _test_eof295 + } + st_case_295: + if 190 <= data[p] { + goto tr2 + } + goto tr148 + st296: + if p++; p == pe { + goto _test_eof296 + } + st_case_296: + if 144 <= data[p] { + goto tr148 + } + goto tr2 + st297: + if p++; p == pe { + goto _test_eof297 + } + st_case_297: + if 144 <= data[p] && data[p] <= 145 { + goto tr2 + } + goto tr148 + st298: + if p++; p == pe { + goto _test_eof298 + } + st_case_298: + switch { + case data[p] > 175: + if 188 <= data[p] { + goto tr2 + } + case data[p] >= 136: + goto tr2 + } + goto tr148 + st299: + if p++; p == pe { + goto _test_eof299 + } + st_case_299: + switch { + case data[p] > 143: + if 160 <= data[p] && data[p] <= 175 { + goto st142 + } + case data[p] >= 128: + goto st142 + } + goto tr2 + st300: + if p++; p == pe { + goto _test_eof300 + } + st_case_300: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr148 + } + case data[p] >= 176: + goto tr148 + } + goto tr2 + st301: + if p++; p == pe { + goto _test_eof301 + } + st_case_301: + if data[p] == 191 { + goto st142 + } + if 189 <= data[p] { + goto tr2 + } + goto tr148 + st302: + if p++; p == pe { + goto _test_eof302 + } + st_case_302: + if 161 <= data[p] && data[p] <= 186 { + goto tr148 + } + goto tr2 + st303: + if p++; p == pe { + goto _test_eof303 + } + st_case_303: + if 129 <= data[p] && data[p] <= 154 { + goto tr148 + } + goto tr2 + st304: + if p++; p == pe { + goto _test_eof304 + } + st_case_304: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 190 { + goto tr148 + } + case data[p] >= 158: + goto st142 + } + goto tr2 + st305: + if p++; p == pe { + goto _test_eof305 + } + st_case_305: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr148 + } + case data[p] >= 130: + goto tr148 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto st142 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st306: + if p++; p == pe { + goto _test_eof306 + } + st_case_306: + switch data[p] { + case 144: + goto st307 + case 145: + goto st338 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st369 + case 155: + goto st377 + case 157: + goto st380 + case 158: + goto st398 + case 159: + goto st403 + } + goto tr2 + st307: + if p++; p == pe { + goto _test_eof307 + } + st_case_307: + switch data[p] { + case 128: + goto st308 + case 129: + goto st309 + case 130: + goto st147 + case 131: + goto st310 + case 133: + goto st311 + case 135: + goto st312 + case 138: + goto st313 + case 139: + goto st314 + case 140: + goto st315 + case 141: + goto st316 + case 142: + goto st317 + case 143: + goto st318 + case 144: + goto st147 + case 145: + goto st145 + case 146: + goto st319 + case 148: + goto st320 + case 149: + goto st321 + case 152: + goto st147 + case 156: + goto st322 + case 157: + goto st323 + case 160: + goto st324 + case 161: + goto st325 + case 162: + goto st326 + case 163: + goto st327 + case 164: + goto st328 + case 166: + goto st329 + case 168: + goto st330 + case 169: + goto st331 + case 170: + goto st332 + case 171: + goto st333 + case 172: + goto st334 + case 173: + goto st335 + case 174: + goto st336 + case 176: + goto st147 + case 177: + goto st245 + } + switch { + case data[p] > 155: + if 178 <= data[p] && data[p] <= 179 { + goto st337 + } + case data[p] >= 153: + goto st145 + } + goto tr2 + st308: + if p++; p == pe { + goto _test_eof308 + } + st_case_308: + switch { + case data[p] < 168: + switch { + case data[p] > 139: + if 141 <= data[p] && data[p] <= 166 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 186: + switch { + case data[p] > 189: + if 191 <= data[p] { + goto tr148 + } + case data[p] >= 188: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st309: + if p++; p == pe { + goto _test_eof309 + } + st_case_309: + switch { + case data[p] > 143: + if 158 <= data[p] { + goto tr2 + } + case data[p] >= 142: + goto tr2 + } + goto tr148 + st310: + if p++; p == pe { + goto _test_eof310 + } + st_case_310: + if 187 <= data[p] { + goto tr2 + } + goto tr148 + st311: + if p++; p == pe { + goto _test_eof311 + } + st_case_311: + if 128 <= data[p] && data[p] <= 180 { + goto tr148 + } + goto tr2 + st312: + if p++; p == pe { + goto _test_eof312 + } + st_case_312: + if data[p] == 189 { + goto st142 + } + goto tr2 + st313: + if p++; p == pe { + goto _test_eof313 + } + st_case_313: + switch { + case data[p] > 156: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st314: + if p++; p == pe { + goto _test_eof314 + } + st_case_314: + if data[p] == 160 { + goto st142 + } + if 145 <= data[p] { + goto tr2 + } + goto tr148 + st315: + if p++; p == pe { + goto _test_eof315 + } + st_case_315: + switch { + case data[p] > 159: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st316: + if p++; p == pe { + goto _test_eof316 + } + st_case_316: + switch { + case data[p] < 182: + if 139 <= data[p] && data[p] <= 143 { + goto tr2 + } + case data[p] > 186: + if 187 <= data[p] { + goto tr2 + } + default: + goto st142 + } + goto tr148 + st317: + if p++; p == pe { + goto _test_eof317 + } + st_case_317: + switch { + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st318: + if p++; p == pe { + goto _test_eof318 + } + st_case_318: + if data[p] == 144 { + goto tr2 + } + switch { + case data[p] > 135: + if 150 <= data[p] { + goto tr2 + } + case data[p] >= 132: + goto tr2 + } + goto tr148 + st319: + if p++; p == pe { + goto _test_eof319 + } + st_case_319: + if 158 <= data[p] { + goto tr2 + } + goto tr148 + st320: + if p++; p == pe { + goto _test_eof320 + } + st_case_320: + switch { + case data[p] > 167: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st321: + if p++; p == pe { + goto _test_eof321 + } + st_case_321: + if 164 <= data[p] { + goto tr2 + } + goto tr148 + st322: + if p++; p == pe { + goto _test_eof322 + } + st_case_322: + if 183 <= data[p] { + goto tr2 + } + goto tr148 + st323: + if p++; p == pe { + goto _test_eof323 + } + st_case_323: + switch { + case data[p] > 149: + if 160 <= data[p] && data[p] <= 167 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st324: + if p++; p == pe { + goto _test_eof324 + } + st_case_324: + switch data[p] { + case 136: + goto tr148 + case 188: + goto tr148 + } + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 133 { + goto tr148 + } + case data[p] > 181: + switch { + case data[p] > 184: + if 191 <= data[p] { + goto tr148 + } + case data[p] >= 183: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st325: + if p++; p == pe { + goto _test_eof325 + } + st_case_325: + switch { + case data[p] > 159: + if 183 <= data[p] { + goto tr2 + } + case data[p] >= 150: + goto tr2 + } + goto tr148 + st326: + if p++; p == pe { + goto _test_eof326 + } + st_case_326: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + goto tr2 + st327: + if p++; p == pe { + goto _test_eof327 + } + st_case_327: + switch { + case data[p] > 178: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 160: + goto tr148 + } + goto tr2 + st328: + if p++; p == pe { + goto _test_eof328 + } + st_case_328: + switch { + case data[p] > 149: + if 160 <= data[p] && data[p] <= 185 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st329: + if p++; p == pe { + goto _test_eof329 + } + st_case_329: + switch { + case data[p] > 183: + if 190 <= data[p] && data[p] <= 191 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st330: + if p++; p == pe { + goto _test_eof330 + } + st_case_330: + switch data[p] { + case 128: + goto tr148 + case 191: + goto st142 + } + switch { + case data[p] < 144: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto st142 + } + case data[p] > 134: + if 140 <= data[p] && data[p] <= 143 { + goto st142 + } + default: + goto st142 + } + case data[p] > 147: + switch { + case data[p] < 153: + if 149 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] > 179: + if 184 <= data[p] && data[p] <= 186 { + goto st142 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st331: + if p++; p == pe { + goto _test_eof331 + } + st_case_331: + if 160 <= data[p] && data[p] <= 188 { + goto tr148 + } + goto tr2 + st332: + if p++; p == pe { + goto _test_eof332 + } + st_case_332: + if 128 <= data[p] && data[p] <= 156 { + goto tr148 + } + goto tr2 + st333: + if p++; p == pe { + goto _test_eof333 + } + st_case_333: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 164: + if 165 <= data[p] && data[p] <= 166 { + goto st142 + } + default: + goto tr148 + } + goto tr2 + st334: + if p++; p == pe { + goto _test_eof334 + } + st_case_334: + if 128 <= data[p] && data[p] <= 181 { + goto tr148 + } + goto tr2 + st335: + if p++; p == pe { + goto _test_eof335 + } + st_case_335: + switch { + case data[p] > 149: + if 160 <= data[p] && data[p] <= 178 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st336: + if p++; p == pe { + goto _test_eof336 + } + st_case_336: + if 128 <= data[p] && data[p] <= 145 { + goto tr148 + } + goto tr2 + st337: + if p++; p == pe { + goto _test_eof337 + } + st_case_337: + if 128 <= data[p] && data[p] <= 178 { + goto tr148 + } + goto tr2 + st338: + if p++; p == pe { + goto _test_eof338 + } + st_case_338: + switch data[p] { + case 128: + goto st339 + case 129: + goto st340 + case 130: + goto st341 + case 131: + goto st342 + case 132: + goto st343 + case 133: + goto st344 + case 134: + goto st345 + case 135: + goto st346 + case 136: + goto st347 + case 138: + goto st348 + case 139: + goto st349 + case 140: + goto st350 + case 141: + goto st351 + case 146: + goto st352 + case 147: + goto st353 + case 150: + goto st354 + case 151: + goto st355 + case 152: + goto st352 + case 153: + goto st356 + case 154: + goto st357 + case 156: + goto st358 + case 162: + goto st359 + case 163: + goto st360 + case 171: + goto st361 + } + goto tr2 + st339: + if p++; p == pe { + goto _test_eof339 + } + st_case_339: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto st142 + } + case data[p] > 183: + if 184 <= data[p] { + goto st142 + } + default: + goto tr148 + } + goto tr2 + st340: + if p++; p == pe { + goto _test_eof340 + } + st_case_340: + if 135 <= data[p] && data[p] <= 190 { + goto tr2 + } + goto st142 + st341: + if p++; p == pe { + goto _test_eof341 + } + st_case_341: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto st142 + st342: + if p++; p == pe { + goto _test_eof342 + } + st_case_342: + if 144 <= data[p] && data[p] <= 168 { + goto tr148 + } + goto tr2 + st343: + if p++; p == pe { + goto _test_eof343 + } + st_case_343: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto st142 + } + case data[p] > 166: + if 167 <= data[p] && data[p] <= 180 { + goto st142 + } + default: + goto tr148 + } + goto tr2 + st344: + if p++; p == pe { + goto _test_eof344 + } + st_case_344: + switch data[p] { + case 179: + goto st142 + case 182: + goto tr148 + } + if 144 <= data[p] && data[p] <= 178 { + goto tr148 + } + goto tr2 + st345: + if p++; p == pe { + goto _test_eof345 + } + st_case_345: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto st142 + } + case data[p] > 178: + if 179 <= data[p] { + goto st142 + } + default: + goto tr148 + } + goto tr2 + st346: + if p++; p == pe { + goto _test_eof346 + } + st_case_346: + switch data[p] { + case 154: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 132 { + goto tr148 + } + case data[p] > 137: + if 141 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto st142 + st347: + if p++; p == pe { + goto _test_eof347 + } + st_case_347: + switch { + case data[p] < 147: + if 128 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] > 171: + if 172 <= data[p] && data[p] <= 183 { + goto st142 + } + default: + goto tr148 + } + goto tr2 + st348: + if p++; p == pe { + goto _test_eof348 + } + st_case_348: + if data[p] == 136 { + goto tr148 + } + switch { + case data[p] < 143: + switch { + case data[p] > 134: + if 138 <= data[p] && data[p] <= 141 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 157: + switch { + case data[p] > 168: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 159: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st349: + if p++; p == pe { + goto _test_eof349 + } + st_case_349: + switch { + case data[p] > 170: + if 171 <= data[p] { + goto tr2 + } + case data[p] >= 159: + goto st142 + } + goto tr148 + st350: + if p++; p == pe { + goto _test_eof350 + } + st_case_350: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 131 { + goto st142 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto st142 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st351: + if p++; p == pe { + goto _test_eof351 + } + st_case_351: + switch data[p] { + case 144: + goto tr148 + case 151: + goto st142 + } + switch { + case data[p] < 157: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto st142 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto st142 + } + default: + goto st142 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto st142 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto st142 + } + default: + goto st142 + } + default: + goto tr148 + } + goto tr2 + st352: + if p++; p == pe { + goto _test_eof352 + } + st_case_352: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto st142 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st353: + if p++; p == pe { + goto _test_eof353 + } + st_case_353: + if data[p] == 134 { + goto tr2 + } + switch { + case data[p] > 135: + if 136 <= data[p] { + goto tr2 + } + case data[p] >= 132: + goto tr148 + } + goto st142 + st354: + if p++; p == pe { + goto _test_eof354 + } + st_case_354: + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 181: + if 184 <= data[p] { + goto st142 + } + default: + goto st142 + } + goto tr2 + st355: + if p++; p == pe { + goto _test_eof355 + } + st_case_355: + switch { + case data[p] < 152: + if 129 <= data[p] && data[p] <= 151 { + goto tr2 + } + case data[p] > 155: + if 158 <= data[p] { + goto tr2 + } + default: + goto tr148 + } + goto st142 + st356: + if p++; p == pe { + goto _test_eof356 + } + st_case_356: + if data[p] == 132 { + goto tr148 + } + if 129 <= data[p] { + goto tr2 + } + goto st142 + st357: + if p++; p == pe { + goto _test_eof357 + } + st_case_357: + switch { + case data[p] > 170: + if 171 <= data[p] && data[p] <= 183 { + goto st142 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st358: + if p++; p == pe { + goto _test_eof358 + } + st_case_358: + if 157 <= data[p] && data[p] <= 171 { + goto st142 + } + goto tr2 + st359: + if p++; p == pe { + goto _test_eof359 + } + st_case_359: + if 160 <= data[p] { + goto tr148 + } + goto tr2 + st360: + if p++; p == pe { + goto _test_eof360 + } + st_case_360: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr2 + } + case data[p] >= 160: + goto tr2 + } + goto tr148 + st361: + if p++; p == pe { + goto _test_eof361 + } + st_case_361: + if 128 <= data[p] && data[p] <= 184 { + goto tr148 + } + goto tr2 + st362: + if p++; p == pe { + goto _test_eof362 + } + st_case_362: + switch data[p] { + case 128: + goto st147 + case 142: + goto st363 + case 145: + goto st364 + case 149: + goto st365 + } + switch { + case data[p] < 144: + if 129 <= data[p] && data[p] <= 141 { + goto st145 + } + case data[p] > 146: + if 147 <= data[p] && data[p] <= 148 { + goto st145 + } + default: + goto st147 + } + goto tr2 + st363: + if p++; p == pe { + goto _test_eof363 + } + st_case_363: + if 154 <= data[p] { + goto tr2 + } + goto tr148 + st364: + if p++; p == pe { + goto _test_eof364 + } + st_case_364: + if 175 <= data[p] { + goto tr2 + } + goto tr148 + st365: + if p++; p == pe { + goto _test_eof365 + } + st_case_365: + if 132 <= data[p] { + goto tr2 + } + goto tr148 + st366: + if p++; p == pe { + goto _test_eof366 + } + st_case_366: + switch data[p] { + case 128: + goto st147 + case 144: + goto st364 + } + if 129 <= data[p] && data[p] <= 143 { + goto st145 + } + goto tr2 + st367: + if p++; p == pe { + goto _test_eof367 + } + st_case_367: + switch data[p] { + case 144: + goto st147 + case 153: + goto st368 + } + if 145 <= data[p] && data[p] <= 152 { + goto st145 + } + goto tr2 + st368: + if p++; p == pe { + goto _test_eof368 + } + st_case_368: + if 135 <= data[p] { + goto tr2 + } + goto tr148 + st369: + if p++; p == pe { + goto _test_eof369 + } + st_case_369: + switch data[p] { + case 160: + goto st147 + case 168: + goto st370 + case 169: + goto st326 + case 171: + goto st371 + case 172: + goto st372 + case 173: + goto st373 + case 174: + goto st374 + case 188: + goto st147 + case 189: + goto st375 + case 190: + goto st376 + } + if 161 <= data[p] && data[p] <= 167 { + goto st145 + } + goto tr2 + st370: + if p++; p == pe { + goto _test_eof370 + } + st_case_370: + if 185 <= data[p] { + goto tr2 + } + goto tr148 + st371: + if p++; p == pe { + goto _test_eof371 + } + st_case_371: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 180 { + goto st142 + } + case data[p] >= 144: + goto tr148 + } + goto tr2 + st372: + if p++; p == pe { + goto _test_eof372 + } + st_case_372: + switch { + case data[p] > 175: + if 176 <= data[p] && data[p] <= 182 { + goto st142 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st373: + if p++; p == pe { + goto _test_eof373 + } + st_case_373: + switch { + case data[p] < 163: + if 128 <= data[p] && data[p] <= 131 { + goto tr148 + } + case data[p] > 183: + if 189 <= data[p] { + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st374: + if p++; p == pe { + goto _test_eof374 + } + st_case_374: + if 144 <= data[p] { + goto tr2 + } + goto tr148 + st375: + if p++; p == pe { + goto _test_eof375 + } + st_case_375: + switch { + case data[p] < 145: + if 133 <= data[p] && data[p] <= 143 { + goto tr2 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr2 + } + default: + goto st142 + } + goto tr148 + st376: + if p++; p == pe { + goto _test_eof376 + } + st_case_376: + switch { + case data[p] > 146: + if 147 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] >= 143: + goto st142 + } + goto tr2 + st377: + if p++; p == pe { + goto _test_eof377 + } + st_case_377: + switch data[p] { + case 176: + goto st147 + case 177: + goto st378 + case 178: + goto st379 + } + goto tr2 + st378: + if p++; p == pe { + goto _test_eof378 + } + st_case_378: + switch { + case data[p] > 175: + if 189 <= data[p] { + goto tr2 + } + case data[p] >= 171: + goto tr2 + } + goto tr148 + st379: + if p++; p == pe { + goto _test_eof379 + } + st_case_379: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto st142 + } + case data[p] >= 157: + goto st142 + } + default: + goto tr148 + } + goto tr2 + st380: + if p++; p == pe { + goto _test_eof380 + } + st_case_380: + switch data[p] { + case 133: + goto st381 + case 134: + goto st382 + case 137: + goto st383 + case 144: + goto st147 + case 145: + goto st384 + case 146: + goto st385 + case 147: + goto st386 + case 148: + goto st387 + case 149: + goto st388 + case 154: + goto st389 + case 155: + goto st390 + case 156: + goto st391 + case 157: + goto st392 + case 158: + goto st393 + case 159: + goto st394 + case 168: + goto st395 + case 169: + goto st396 + case 170: + goto st397 + } + if 150 <= data[p] && data[p] <= 153 { + goto st145 + } + goto tr2 + st381: + if p++; p == pe { + goto _test_eof381 + } + st_case_381: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto st142 + } + case data[p] >= 165: + goto st142 + } + goto tr2 + st382: + if p++; p == pe { + goto _test_eof382 + } + st_case_382: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr2 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto st142 + st383: + if p++; p == pe { + goto _test_eof383 + } + st_case_383: + if 130 <= data[p] && data[p] <= 132 { + goto st142 + } + goto tr2 + st384: + if p++; p == pe { + goto _test_eof384 + } + st_case_384: + if data[p] == 149 { + goto tr2 + } + goto tr148 + st385: + if p++; p == pe { + goto _test_eof385 + } + st_case_385: + switch data[p] { + case 157: + goto tr2 + case 173: + goto tr2 + case 186: + goto tr2 + case 188: + goto tr2 + } + switch { + case data[p] < 163: + if 160 <= data[p] && data[p] <= 161 { + goto tr2 + } + case data[p] > 164: + if 167 <= data[p] && data[p] <= 168 { + goto tr2 + } + default: + goto tr2 + } + goto tr148 + st386: + if p++; p == pe { + goto _test_eof386 + } + st_case_386: + if data[p] == 132 { + goto tr2 + } + goto tr148 + st387: + if p++; p == pe { + goto _test_eof387 + } + st_case_387: + switch data[p] { + case 134: + goto tr2 + case 149: + goto tr2 + case 157: + goto tr2 + case 186: + goto tr2 + } + switch { + case data[p] > 140: + if 191 <= data[p] { + goto tr2 + } + case data[p] >= 139: + goto tr2 + } + goto tr148 + st388: + if p++; p == pe { + goto _test_eof388 + } + st_case_388: + if data[p] == 134 { + goto tr148 + } + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 132 { + goto tr148 + } + case data[p] > 144: + if 146 <= data[p] { + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st389: + if p++; p == pe { + goto _test_eof389 + } + st_case_389: + if 166 <= data[p] && data[p] <= 167 { + goto tr2 + } + goto tr148 + st390: + if p++; p == pe { + goto _test_eof390 + } + st_case_390: + switch data[p] { + case 129: + goto tr2 + case 155: + goto tr2 + case 187: + goto tr2 + } + goto tr148 + st391: + if p++; p == pe { + goto _test_eof391 + } + st_case_391: + switch data[p] { + case 149: + goto tr2 + case 181: + goto tr2 + } + goto tr148 + st392: + if p++; p == pe { + goto _test_eof392 + } + st_case_392: + switch data[p] { + case 143: + goto tr2 + case 175: + goto tr2 + } + goto tr148 + st393: + if p++; p == pe { + goto _test_eof393 + } + st_case_393: + switch data[p] { + case 137: + goto tr2 + case 169: + goto tr2 + } + goto tr148 + st394: + if p++; p == pe { + goto _test_eof394 + } + st_case_394: + if data[p] == 131 { + goto tr2 + } + if 140 <= data[p] { + goto tr2 + } + goto tr148 + st395: + if p++; p == pe { + goto _test_eof395 + } + st_case_395: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto st142 + } + case data[p] >= 128: + goto st142 + } + goto tr2 + st396: + if p++; p == pe { + goto _test_eof396 + } + st_case_396: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr2 + } + case data[p] >= 173: + goto tr2 + } + goto st142 + st397: + if p++; p == pe { + goto _test_eof397 + } + st_case_397: + if data[p] == 132 { + goto st142 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto st142 + } + case data[p] >= 155: + goto st142 + } + goto tr2 + st398: + if p++; p == pe { + goto _test_eof398 + } + st_case_398: + switch data[p] { + case 160: + goto st147 + case 163: + goto st399 + case 184: + goto st400 + case 185: + goto st401 + case 186: + goto st402 + } + if 161 <= data[p] && data[p] <= 162 { + goto st145 + } + goto tr2 + st399: + if p++; p == pe { + goto _test_eof399 + } + st_case_399: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr2 + } + case data[p] > 150: + if 151 <= data[p] { + goto tr2 + } + default: + goto st142 + } + goto tr148 + st400: + if p++; p == pe { + goto _test_eof400 + } + st_case_400: + switch data[p] { + case 164: + goto tr148 + case 167: + goto tr148 + case 185: + goto tr148 + case 187: + goto tr148 + } + switch { + case data[p] < 161: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 162: + switch { + case data[p] > 178: + if 180 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] >= 169: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st401: + if p++; p == pe { + goto _test_eof401 + } + st_case_401: + switch data[p] { + case 130: + goto tr148 + case 135: + goto tr148 + case 137: + goto tr148 + case 139: + goto tr148 + case 148: + goto tr148 + case 151: + goto tr148 + case 153: + goto tr148 + case 155: + goto tr148 + case 157: + goto tr148 + case 159: + goto tr148 + case 164: + goto tr148 + case 190: + goto tr148 + } + switch { + case data[p] < 167: + switch { + case data[p] < 145: + if 141 <= data[p] && data[p] <= 143 { + goto tr148 + } + case data[p] > 146: + if 161 <= data[p] && data[p] <= 162 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 170: + switch { + case data[p] < 180: + if 172 <= data[p] && data[p] <= 178 { + goto tr148 + } + case data[p] > 183: + if 185 <= data[p] && data[p] <= 188 { + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st402: + if p++; p == pe { + goto _test_eof402 + } + st_case_402: + switch { + case data[p] < 161: + switch { + case data[p] > 137: + if 139 <= data[p] && data[p] <= 155 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 163: + switch { + case data[p] > 169: + if 171 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 165: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st403: + if p++; p == pe { + goto _test_eof403 + } + st_case_403: + switch data[p] { + case 132: + goto st404 + case 133: + goto st405 + case 134: + goto st406 + } + goto tr2 + st404: + if p++; p == pe { + goto _test_eof404 + } + st_case_404: + if 176 <= data[p] { + goto tr148 + } + goto tr2 + st405: + if p++; p == pe { + goto _test_eof405 + } + st_case_405: + switch { + case data[p] > 143: + if 170 <= data[p] && data[p] <= 175 { + goto tr2 + } + case data[p] >= 138: + goto tr2 + } + goto tr148 + st406: + if p++; p == pe { + goto _test_eof406 + } + st_case_406: + if 138 <= data[p] { + goto tr2 + } + goto tr148 + st407: + if p++; p == pe { + goto _test_eof407 + } + st_case_407: + if data[p] == 160 { + goto st408 + } + goto tr2 + st408: + if p++; p == pe { + goto _test_eof408 + } + st_case_408: + switch data[p] { + case 128: + goto st409 + case 129: + goto st410 + case 132: + goto st149 + case 135: + goto st412 + } + if 133 <= data[p] && data[p] <= 134 { + goto st411 + } + goto tr2 + st409: + if p++; p == pe { + goto _test_eof409 + } + st_case_409: + if data[p] == 129 { + goto st142 + } + if 160 <= data[p] { + goto st142 + } + goto tr2 + st410: + if p++; p == pe { + goto _test_eof410 + } + st_case_410: + if 192 <= data[p] { + goto tr2 + } + goto st142 + st411: + if p++; p == pe { + goto _test_eof411 + } + st_case_411: + goto st142 + st412: + if p++; p == pe { + goto _test_eof412 + } + st_case_412: + if 176 <= data[p] { + goto tr2 + } + goto st142 +tr421: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4868 + st4868: + if p++; p == pe { + goto _test_eof4868 + } + st_case_4868: +//line segment_words_prod.go:19436 + switch data[p] { + case 39: + goto st413 + case 44: + goto st413 + case 46: + goto st413 + case 59: + goto st413 + case 95: + goto tr571 + case 194: + goto st1312 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st148 + case 204: + goto st1313 + case 205: + goto st1314 + case 206: + goto st151 + case 207: + goto st152 + case 210: + goto st1315 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1316 + case 215: + goto st1317 + case 216: + goto st1318 + case 217: + goto st1319 + case 219: + goto st1320 + case 220: + goto st1321 + case 221: + goto st1322 + case 222: + goto st1323 + case 223: + goto st1324 + case 224: + goto st1325 + case 225: + goto st1357 + case 226: + goto st1379 + case 227: + goto st1386 + case 234: + goto st1389 + case 237: + goto st287 + case 239: + goto st1405 + case 240: + goto st1413 + case 243: + goto st1455 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 + st413: + if p++; p == pe { + goto _test_eof413 + } + st_case_413: + switch data[p] { + case 194: + goto st414 + case 204: + goto st415 + case 205: + goto st416 + case 210: + goto st417 + case 214: + goto st418 + case 215: + goto st419 + case 216: + goto st420 + case 217: + goto st421 + case 219: + goto st422 + case 220: + goto st423 + case 221: + goto st424 + case 222: + goto st425 + case 223: + goto st426 + case 224: + goto st427 + case 225: + goto st456 + case 226: + goto st481 + case 227: + goto st488 + case 234: + goto st491 + case 239: + goto st508 + case 240: + goto st512 + case 243: + goto st557 + } + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + goto tr420 + st414: + if p++; p == pe { + goto _test_eof414 + } + st_case_414: + if data[p] == 173 { + goto st413 + } + goto tr420 + st415: + if p++; p == pe { + goto _test_eof415 + } + st_case_415: + if data[p] <= 127 { + goto tr420 + } + goto st413 + st416: + if p++; p == pe { + goto _test_eof416 + } + st_case_416: + if 176 <= data[p] { + goto tr420 + } + goto st413 + st417: + if p++; p == pe { + goto _test_eof417 + } + st_case_417: + if 131 <= data[p] && data[p] <= 137 { + goto st413 + } + goto tr420 + st418: + if p++; p == pe { + goto _test_eof418 + } + st_case_418: + if data[p] == 191 { + goto st413 + } + if 145 <= data[p] && data[p] <= 189 { + goto st413 + } + goto tr420 + st419: + if p++; p == pe { + goto _test_eof419 + } + st_case_419: + if data[p] == 135 { + goto st413 + } + switch { + case data[p] > 130: + if 132 <= data[p] && data[p] <= 133 { + goto st413 + } + case data[p] >= 129: + goto st413 + } + goto tr420 + st420: + if p++; p == pe { + goto _test_eof420 + } + st_case_420: + if data[p] == 156 { + goto st413 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto st413 + } + case data[p] >= 128: + goto st413 + } + goto tr420 + st421: + if p++; p == pe { + goto _test_eof421 + } + st_case_421: + switch data[p] { + case 171: + goto tr421 + case 176: + goto st413 + } + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 169 { + goto tr421 + } + case data[p] >= 139: + goto st413 + } + goto tr420 + st422: + if p++; p == pe { + goto _test_eof422 + } + st_case_422: + switch { + case data[p] < 167: + switch { + case data[p] > 157: + if 159 <= data[p] && data[p] <= 164 { + goto st413 + } + case data[p] >= 150: + goto st413 + } + case data[p] > 168: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 170: + goto st413 + } + default: + goto st413 + } + goto tr420 + st423: + if p++; p == pe { + goto _test_eof423 + } + st_case_423: + switch data[p] { + case 143: + goto st413 + case 145: + goto st413 + } + if 176 <= data[p] { + goto st413 + } + goto tr420 + st424: + if p++; p == pe { + goto _test_eof424 + } + st_case_424: + if 139 <= data[p] { + goto tr420 + } + goto st413 + st425: + if p++; p == pe { + goto _test_eof425 + } + st_case_425: + if 166 <= data[p] && data[p] <= 176 { + goto st413 + } + goto tr420 + st426: + if p++; p == pe { + goto _test_eof426 + } + st_case_426: + switch { + case data[p] > 137: + if 171 <= data[p] && data[p] <= 179 { + goto st413 + } + case data[p] >= 128: + goto tr421 + } + goto tr420 + st427: + if p++; p == pe { + goto _test_eof427 + } + st_case_427: + switch data[p] { + case 160: + goto st428 + case 161: + goto st429 + case 163: + goto st430 + case 164: + goto st431 + case 165: + goto st432 + case 167: + goto st434 + case 169: + goto st435 + case 171: + goto st436 + case 173: + goto st438 + case 174: + goto st439 + case 175: + goto st440 + case 176: + goto st441 + case 177: + goto st442 + case 179: + goto st443 + case 180: + goto st444 + case 181: + goto st445 + case 182: + goto st446 + case 183: + goto st447 + case 184: + goto st448 + case 185: + goto st449 + case 186: + goto st450 + case 187: + goto st451 + case 188: + goto st452 + case 189: + goto st453 + case 190: + goto st454 + case 191: + goto st455 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto st437 + } + case data[p] >= 166: + goto st433 + } + goto tr420 + st428: + if p++; p == pe { + goto _test_eof428 + } + st_case_428: + switch { + case data[p] < 155: + if 150 <= data[p] && data[p] <= 153 { + goto st413 + } + case data[p] > 163: + switch { + case data[p] > 167: + if 169 <= data[p] && data[p] <= 173 { + goto st413 + } + case data[p] >= 165: + goto st413 + } + default: + goto st413 + } + goto tr420 + st429: + if p++; p == pe { + goto _test_eof429 + } + st_case_429: + if 153 <= data[p] && data[p] <= 155 { + goto st413 + } + goto tr420 + st430: + if p++; p == pe { + goto _test_eof430 + } + st_case_430: + if 163 <= data[p] { + goto st413 + } + goto tr420 + st431: + if p++; p == pe { + goto _test_eof431 + } + st_case_431: + if data[p] == 189 { + goto tr420 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr420 + } + goto st413 + st432: + if p++; p == pe { + goto _test_eof432 + } + st_case_432: + if data[p] == 144 { + goto tr420 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr420 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr420 + } + goto st413 + st433: + if p++; p == pe { + goto _test_eof433 + } + st_case_433: + if data[p] == 188 { + goto st413 + } + switch { + case data[p] > 131: + if 190 <= data[p] { + goto st413 + } + case data[p] >= 129: + goto st413 + } + goto tr420 + st434: + if p++; p == pe { + goto _test_eof434 + } + st_case_434: + switch { + case data[p] < 152: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + if 142 <= data[p] && data[p] <= 150 { + goto tr420 + } + default: + goto tr420 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + default: + goto tr420 + } + goto st413 + st435: + if p++; p == pe { + goto _test_eof435 + } + st_case_435: + switch { + case data[p] < 146: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + if 142 <= data[p] && data[p] <= 144 { + goto tr420 + } + default: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] < 178: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto st413 + st436: + if p++; p == pe { + goto _test_eof436 + } + st_case_436: + switch data[p] { + case 134: + goto tr420 + case 138: + goto tr420 + } + switch { + case data[p] < 164: + if 142 <= data[p] && data[p] <= 161 { + goto tr420 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr420 + } + goto st413 + st437: + if p++; p == pe { + goto _test_eof437 + } + st_case_437: + if data[p] == 188 { + goto st413 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto st413 + } + case data[p] >= 129: + goto st413 + } + goto tr420 + st438: + if p++; p == pe { + goto _test_eof438 + } + st_case_438: + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto st413 + } + case data[p] >= 128: + goto st413 + } + case data[p] > 141: + switch { + case data[p] < 162: + if 150 <= data[p] && data[p] <= 151 { + goto st413 + } + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + default: + goto st413 + } + default: + goto st413 + } + goto tr420 + st439: + if p++; p == pe { + goto _test_eof439 + } + st_case_439: + if data[p] == 130 { + goto st413 + } + if 190 <= data[p] && data[p] <= 191 { + goto st413 + } + goto tr420 + st440: + if p++; p == pe { + goto _test_eof440 + } + st_case_440: + if data[p] == 151 { + goto st413 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto st413 + } + case data[p] > 136: + switch { + case data[p] > 141: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 138: + goto st413 + } + default: + goto st413 + } + goto tr420 + st441: + if p++; p == pe { + goto _test_eof441 + } + st_case_441: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto st413 + } + case data[p] >= 128: + goto st413 + } + goto tr420 + st442: + if p++; p == pe { + goto _test_eof442 + } + st_case_442: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + } + switch { + case data[p] < 164: + switch { + case data[p] > 148: + if 151 <= data[p] && data[p] <= 161 { + goto tr420 + } + case data[p] >= 142: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr420 + } + goto st413 + st443: + if p++; p == pe { + goto _test_eof443 + } + st_case_443: + switch { + case data[p] < 138: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 136 { + goto st413 + } + case data[p] >= 128: + goto st413 + } + case data[p] > 141: + switch { + case data[p] < 162: + if 149 <= data[p] && data[p] <= 150 { + goto st413 + } + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + default: + goto st413 + } + default: + goto st413 + } + goto tr420 + st444: + if p++; p == pe { + goto _test_eof444 + } + st_case_444: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto st413 + } + case data[p] >= 129: + goto st413 + } + goto tr420 + st445: + if p++; p == pe { + goto _test_eof445 + } + st_case_445: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + } + switch { + case data[p] < 164: + switch { + case data[p] > 150: + if 152 <= data[p] && data[p] <= 161 { + goto tr420 + } + case data[p] >= 142: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr420 + } + goto st413 + st446: + if p++; p == pe { + goto _test_eof446 + } + st_case_446: + if 130 <= data[p] && data[p] <= 131 { + goto st413 + } + goto tr420 + st447: + if p++; p == pe { + goto _test_eof447 + } + st_case_447: + switch data[p] { + case 138: + goto st413 + case 150: + goto st413 + } + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 148 { + goto st413 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 179 { + goto st413 + } + case data[p] >= 166: + goto tr421 + } + default: + goto st413 + } + goto tr420 + st448: + if p++; p == pe { + goto _test_eof448 + } + st_case_448: + if data[p] == 177 { + goto st413 + } + if 180 <= data[p] && data[p] <= 186 { + goto st413 + } + goto tr420 + st449: + if p++; p == pe { + goto _test_eof449 + } + st_case_449: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 135: + goto st413 + } + goto tr420 + st450: + if p++; p == pe { + goto _test_eof450 + } + st_case_450: + if data[p] == 177 { + goto st413 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto st413 + } + case data[p] >= 180: + goto st413 + } + goto tr420 + st451: + if p++; p == pe { + goto _test_eof451 + } + st_case_451: + switch { + case data[p] > 141: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 136: + goto st413 + } + goto tr420 + st452: + if p++; p == pe { + goto _test_eof452 + } + st_case_452: + switch data[p] { + case 181: + goto st413 + case 183: + goto st413 + case 185: + goto st413 + } + switch { + case data[p] < 160: + if 152 <= data[p] && data[p] <= 153 { + goto st413 + } + case data[p] > 169: + if 190 <= data[p] && data[p] <= 191 { + goto st413 + } + default: + goto tr421 + } + goto tr420 + st453: + if p++; p == pe { + goto _test_eof453 + } + st_case_453: + if 177 <= data[p] && data[p] <= 191 { + goto st413 + } + goto tr420 + st454: + if p++; p == pe { + goto _test_eof454 + } + st_case_454: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto st413 + } + case data[p] > 135: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto st413 + } + case data[p] >= 141: + goto st413 + } + default: + goto st413 + } + goto tr420 + st455: + if p++; p == pe { + goto _test_eof455 + } + st_case_455: + if data[p] == 134 { + goto st413 + } + goto tr420 + st456: + if p++; p == pe { + goto _test_eof456 + } + st_case_456: + switch data[p] { + case 128: + goto st457 + case 129: + goto st458 + case 130: + goto st459 + case 141: + goto st460 + case 156: + goto st461 + case 157: + goto st462 + case 158: + goto st463 + case 159: + goto st464 + case 160: + goto st465 + case 162: + goto st466 + case 164: + goto st467 + case 165: + goto st468 + case 167: + goto st469 + case 168: + goto st470 + case 169: + goto st471 + case 170: + goto st472 + case 172: + goto st473 + case 173: + goto st474 + case 174: + goto st475 + case 175: + goto st476 + case 176: + goto st477 + case 177: + goto st478 + case 179: + goto st479 + case 183: + goto st480 + } + goto tr420 + st457: + if p++; p == pe { + goto _test_eof457 + } + st_case_457: + if 171 <= data[p] && data[p] <= 190 { + goto st413 + } + goto tr420 + st458: + if p++; p == pe { + goto _test_eof458 + } + st_case_458: + switch { + case data[p] < 158: + switch { + case data[p] > 137: + if 150 <= data[p] && data[p] <= 153 { + goto st413 + } + case data[p] >= 128: + goto tr421 + } + case data[p] > 160: + switch { + case data[p] < 167: + if 162 <= data[p] && data[p] <= 164 { + goto st413 + } + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto st413 + } + default: + goto st413 + } + default: + goto st413 + } + goto tr420 + st459: + if p++; p == pe { + goto _test_eof459 + } + st_case_459: + if data[p] == 143 { + goto st413 + } + switch { + case data[p] < 144: + if 130 <= data[p] && data[p] <= 141 { + goto st413 + } + case data[p] > 153: + if 154 <= data[p] && data[p] <= 157 { + goto st413 + } + default: + goto tr421 + } + goto tr420 + st460: + if p++; p == pe { + goto _test_eof460 + } + st_case_460: + if 157 <= data[p] && data[p] <= 159 { + goto st413 + } + goto tr420 + st461: + if p++; p == pe { + goto _test_eof461 + } + st_case_461: + switch { + case data[p] > 148: + if 178 <= data[p] && data[p] <= 180 { + goto st413 + } + case data[p] >= 146: + goto st413 + } + goto tr420 + st462: + if p++; p == pe { + goto _test_eof462 + } + st_case_462: + switch { + case data[p] > 147: + if 178 <= data[p] && data[p] <= 179 { + goto st413 + } + case data[p] >= 146: + goto st413 + } + goto tr420 + st463: + if p++; p == pe { + goto _test_eof463 + } + st_case_463: + if 180 <= data[p] { + goto st413 + } + goto tr420 + st464: + if p++; p == pe { + goto _test_eof464 + } + st_case_464: + switch { + case data[p] < 158: + if 148 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 170 <= data[p] { + goto tr420 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr420 + } + goto st413 + st465: + if p++; p == pe { + goto _test_eof465 + } + st_case_465: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 139: + goto st413 + } + goto tr420 + st466: + if p++; p == pe { + goto _test_eof466 + } + st_case_466: + if data[p] == 169 { + goto st413 + } + goto tr420 + st467: + if p++; p == pe { + goto _test_eof467 + } + st_case_467: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto st413 + } + case data[p] >= 160: + goto st413 + } + goto tr420 + st468: + if p++; p == pe { + goto _test_eof468 + } + st_case_468: + if 134 <= data[p] && data[p] <= 143 { + goto tr421 + } + goto tr2 + st469: + if p++; p == pe { + goto _test_eof469 + } + st_case_469: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + goto tr2 + st470: + if p++; p == pe { + goto _test_eof470 + } + st_case_470: + if 151 <= data[p] && data[p] <= 155 { + goto st413 + } + goto tr420 + st471: + if p++; p == pe { + goto _test_eof471 + } + st_case_471: + if data[p] == 191 { + goto st413 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto st413 + } + case data[p] >= 149: + goto st413 + } + goto tr420 + st472: + if p++; p == pe { + goto _test_eof472 + } + st_case_472: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 153: + if 176 <= data[p] && data[p] <= 190 { + goto st413 + } + default: + goto tr421 + } + goto tr420 + st473: + if p++; p == pe { + goto _test_eof473 + } + st_case_473: + switch { + case data[p] > 132: + if 180 <= data[p] { + goto st413 + } + case data[p] >= 128: + goto st413 + } + goto tr420 + st474: + if p++; p == pe { + goto _test_eof474 + } + st_case_474: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 153: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 154: + goto tr420 + } + default: + goto tr421 + } + goto st413 + st475: + if p++; p == pe { + goto _test_eof475 + } + st_case_475: + switch { + case data[p] < 161: + if 128 <= data[p] && data[p] <= 130 { + goto st413 + } + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + default: + goto st413 + } + goto tr420 + st476: + if p++; p == pe { + goto _test_eof476 + } + st_case_476: + if 166 <= data[p] && data[p] <= 179 { + goto st413 + } + goto tr420 + st477: + if p++; p == pe { + goto _test_eof477 + } + st_case_477: + if 164 <= data[p] && data[p] <= 183 { + goto st413 + } + goto tr420 + st478: + if p++; p == pe { + goto _test_eof478 + } + st_case_478: + switch { + case data[p] > 137: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 128: + goto tr421 + } + goto tr420 + st479: + if p++; p == pe { + goto _test_eof479 + } + st_case_479: + if data[p] == 173 { + goto st413 + } + switch { + case data[p] < 148: + if 144 <= data[p] && data[p] <= 146 { + goto st413 + } + case data[p] > 168: + switch { + case data[p] > 180: + if 184 <= data[p] && data[p] <= 185 { + goto st413 + } + case data[p] >= 178: + goto st413 + } + default: + goto st413 + } + goto tr420 + st480: + if p++; p == pe { + goto _test_eof480 + } + st_case_480: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto st413 + } + case data[p] >= 128: + goto st413 + } + goto tr420 + st481: + if p++; p == pe { + goto _test_eof481 + } + st_case_481: + switch data[p] { + case 128: + goto st482 + case 129: + goto st483 + case 131: + goto st484 + case 179: + goto st485 + case 181: + goto st486 + case 183: + goto st487 + } + goto tr420 + st482: + if p++; p == pe { + goto _test_eof482 + } + st_case_482: + switch { + case data[p] > 143: + if 170 <= data[p] && data[p] <= 174 { + goto st413 + } + case data[p] >= 140: + goto st413 + } + goto tr420 + st483: + if p++; p == pe { + goto _test_eof483 + } + st_case_483: + switch { + case data[p] > 164: + if 166 <= data[p] && data[p] <= 175 { + goto st413 + } + case data[p] >= 160: + goto st413 + } + goto tr420 + st484: + if p++; p == pe { + goto _test_eof484 + } + st_case_484: + if 144 <= data[p] && data[p] <= 176 { + goto st413 + } + goto tr420 + st485: + if p++; p == pe { + goto _test_eof485 + } + st_case_485: + if 175 <= data[p] && data[p] <= 177 { + goto st413 + } + goto tr420 + st486: + if p++; p == pe { + goto _test_eof486 + } + st_case_486: + if data[p] == 191 { + goto st413 + } + goto tr420 + st487: + if p++; p == pe { + goto _test_eof487 + } + st_case_487: + if 160 <= data[p] && data[p] <= 191 { + goto st413 + } + goto tr420 + st488: + if p++; p == pe { + goto _test_eof488 + } + st_case_488: + switch data[p] { + case 128: + goto st489 + case 130: + goto st490 + } + goto tr420 + st489: + if p++; p == pe { + goto _test_eof489 + } + st_case_489: + if 170 <= data[p] && data[p] <= 175 { + goto st413 + } + goto tr420 + st490: + if p++; p == pe { + goto _test_eof490 + } + st_case_490: + if 153 <= data[p] && data[p] <= 154 { + goto st413 + } + goto tr420 + st491: + if p++; p == pe { + goto _test_eof491 + } + st_case_491: + switch data[p] { + case 152: + goto st492 + case 153: + goto st493 + case 154: + goto st494 + case 155: + goto st495 + case 160: + goto st496 + case 162: + goto st497 + case 163: + goto st498 + case 164: + goto st499 + case 165: + goto st500 + case 166: + goto st501 + case 167: + goto st502 + case 168: + goto st503 + case 169: + goto st504 + case 170: + goto st505 + case 171: + goto st506 + case 175: + goto st507 + } + goto tr420 + st492: + if p++; p == pe { + goto _test_eof492 + } + st_case_492: + if 160 <= data[p] && data[p] <= 169 { + goto tr421 + } + goto tr420 + st493: + if p++; p == pe { + goto _test_eof493 + } + st_case_493: + switch { + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto st413 + } + case data[p] >= 175: + goto st413 + } + goto tr420 + st494: + if p++; p == pe { + goto _test_eof494 + } + st_case_494: + if 158 <= data[p] && data[p] <= 159 { + goto st413 + } + goto tr420 + st495: + if p++; p == pe { + goto _test_eof495 + } + st_case_495: + if 176 <= data[p] && data[p] <= 177 { + goto st413 + } + goto tr420 + st496: + if p++; p == pe { + goto _test_eof496 + } + st_case_496: + switch data[p] { + case 130: + goto st413 + case 134: + goto st413 + case 139: + goto st413 + } + if 163 <= data[p] && data[p] <= 167 { + goto st413 + } + goto tr420 + st497: + if p++; p == pe { + goto _test_eof497 + } + st_case_497: + switch { + case data[p] > 129: + if 180 <= data[p] { + goto st413 + } + case data[p] >= 128: + goto st413 + } + goto tr420 + st498: + if p++; p == pe { + goto _test_eof498 + } + st_case_498: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 153: + switch { + case data[p] > 159: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 154: + goto tr420 + } + default: + goto tr421 + } + goto st413 + st499: + if p++; p == pe { + goto _test_eof499 + } + st_case_499: + switch { + case data[p] > 137: + if 166 <= data[p] && data[p] <= 173 { + goto st413 + } + case data[p] >= 128: + goto tr421 + } + goto tr420 + st500: + if p++; p == pe { + goto _test_eof500 + } + st_case_500: + if 135 <= data[p] && data[p] <= 147 { + goto st413 + } + goto tr420 + st501: + if p++; p == pe { + goto _test_eof501 + } + st_case_501: + switch { + case data[p] > 131: + if 179 <= data[p] { + goto st413 + } + case data[p] >= 128: + goto st413 + } + goto tr420 + st502: + if p++; p == pe { + goto _test_eof502 + } + st_case_502: + switch { + case data[p] < 154: + switch { + case data[p] > 143: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 129: + goto tr420 + } + case data[p] > 164: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr420 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + default: + goto tr420 + } + goto st413 + st503: + if p++; p == pe { + goto _test_eof503 + } + st_case_503: + if 169 <= data[p] && data[p] <= 182 { + goto st413 + } + goto tr420 + st504: + if p++; p == pe { + goto _test_eof504 + } + st_case_504: + if data[p] == 131 { + goto st413 + } + switch { + case data[p] < 144: + if 140 <= data[p] && data[p] <= 141 { + goto st413 + } + case data[p] > 153: + if 187 <= data[p] && data[p] <= 189 { + goto st413 + } + default: + goto tr421 + } + goto tr420 + st505: + if p++; p == pe { + goto _test_eof505 + } + st_case_505: + if data[p] == 176 { + goto st413 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto st413 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto st413 + } + default: + goto st413 + } + goto tr420 + st506: + if p++; p == pe { + goto _test_eof506 + } + st_case_506: + if data[p] == 129 { + goto st413 + } + switch { + case data[p] > 175: + if 181 <= data[p] && data[p] <= 182 { + goto st413 + } + case data[p] >= 171: + goto st413 + } + goto tr420 + st507: + if p++; p == pe { + goto _test_eof507 + } + st_case_507: + switch { + case data[p] < 172: + if 163 <= data[p] && data[p] <= 170 { + goto st413 + } + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + default: + goto st413 + } + goto tr420 + st508: + if p++; p == pe { + goto _test_eof508 + } + st_case_508: + switch data[p] { + case 172: + goto st509 + case 184: + goto st510 + case 187: + goto st486 + case 190: + goto st494 + case 191: + goto st511 + } + goto tr420 + st509: + if p++; p == pe { + goto _test_eof509 + } + st_case_509: + if data[p] == 158 { + goto st413 + } + goto tr420 + st510: + if p++; p == pe { + goto _test_eof510 + } + st_case_510: + switch { + case data[p] > 143: + if 160 <= data[p] && data[p] <= 175 { + goto st413 + } + case data[p] >= 128: + goto st413 + } + goto tr420 + st511: + if p++; p == pe { + goto _test_eof511 + } + st_case_511: + if 185 <= data[p] && data[p] <= 187 { + goto st413 + } + goto tr420 + st512: + if p++; p == pe { + goto _test_eof512 + } + st_case_512: + switch data[p] { + case 144: + goto st513 + case 145: + goto st519 + case 150: + goto st540 + case 155: + goto st545 + case 157: + goto st547 + case 158: + goto st555 + } + goto tr420 + st513: + if p++; p == pe { + goto _test_eof513 + } + st_case_513: + switch data[p] { + case 135: + goto st514 + case 139: + goto st515 + case 141: + goto st516 + case 146: + goto st492 + case 168: + goto st517 + case 171: + goto st518 + } + goto tr420 + st514: + if p++; p == pe { + goto _test_eof514 + } + st_case_514: + if data[p] == 189 { + goto st413 + } + goto tr420 + st515: + if p++; p == pe { + goto _test_eof515 + } + st_case_515: + if data[p] == 160 { + goto st413 + } + goto tr420 + st516: + if p++; p == pe { + goto _test_eof516 + } + st_case_516: + if 182 <= data[p] && data[p] <= 186 { + goto st413 + } + goto tr420 + st517: + if p++; p == pe { + goto _test_eof517 + } + st_case_517: + if data[p] == 191 { + goto st413 + } + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto st413 + } + case data[p] > 134: + switch { + case data[p] > 143: + if 184 <= data[p] && data[p] <= 186 { + goto st413 + } + case data[p] >= 140: + goto st413 + } + default: + goto st413 + } + goto tr420 + st518: + if p++; p == pe { + goto _test_eof518 + } + st_case_518: + if 165 <= data[p] && data[p] <= 166 { + goto st413 + } + goto tr420 + st519: + if p++; p == pe { + goto _test_eof519 + } + st_case_519: + switch data[p] { + case 128: + goto st520 + case 129: + goto st521 + case 130: + goto st522 + case 131: + goto st523 + case 132: + goto st524 + case 133: + goto st525 + case 134: + goto st526 + case 135: + goto st527 + case 136: + goto st528 + case 139: + goto st529 + case 140: + goto st530 + case 141: + goto st531 + case 146: + goto st532 + case 147: + goto st533 + case 150: + goto st534 + case 151: + goto st535 + case 152: + goto st532 + case 153: + goto st536 + case 154: + goto st537 + case 155: + goto st538 + case 156: + goto st539 + case 163: + goto st492 + } + goto tr420 + st520: + if p++; p == pe { + goto _test_eof520 + } + st_case_520: + switch { + case data[p] > 130: + if 184 <= data[p] { + goto st413 + } + case data[p] >= 128: + goto st413 + } + goto tr420 + st521: + if p++; p == pe { + goto _test_eof521 + } + st_case_521: + switch { + case data[p] < 166: + if 135 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] && data[p] <= 190 { + goto tr420 + } + default: + goto tr421 + } + goto st413 + st522: + if p++; p == pe { + goto _test_eof522 + } + st_case_522: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr420 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto st413 + st523: + if p++; p == pe { + goto _test_eof523 + } + st_case_523: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + goto tr420 + st524: + if p++; p == pe { + goto _test_eof524 + } + st_case_524: + switch { + case data[p] < 167: + if 128 <= data[p] && data[p] <= 130 { + goto st413 + } + case data[p] > 180: + if 182 <= data[p] && data[p] <= 191 { + goto tr421 + } + default: + goto st413 + } + goto tr420 + st525: + if p++; p == pe { + goto _test_eof525 + } + st_case_525: + if data[p] == 179 { + goto st413 + } + goto tr420 + st526: + if p++; p == pe { + goto _test_eof526 + } + st_case_526: + switch { + case data[p] > 130: + if 179 <= data[p] { + goto st413 + } + case data[p] >= 128: + goto st413 + } + goto tr420 + st527: + if p++; p == pe { + goto _test_eof527 + } + st_case_527: + switch { + case data[p] < 141: + if 129 <= data[p] && data[p] <= 137 { + goto tr420 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr420 + } + goto st413 + st528: + if p++; p == pe { + goto _test_eof528 + } + st_case_528: + if 172 <= data[p] && data[p] <= 183 { + goto st413 + } + goto tr420 + st529: + if p++; p == pe { + goto _test_eof529 + } + st_case_529: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 159: + goto st413 + } + goto tr420 + st530: + if p++; p == pe { + goto _test_eof530 + } + st_case_530: + if data[p] == 188 { + goto st413 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto st413 + } + case data[p] >= 128: + goto st413 + } + goto tr420 + st531: + if p++; p == pe { + goto _test_eof531 + } + st_case_531: + if data[p] == 151 { + goto st413 + } + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto st413 + } + case data[p] >= 128: + goto st413 + } + case data[p] > 141: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto st413 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto st413 + } + default: + goto st413 + } + default: + goto st413 + } + goto tr420 + st532: + if p++; p == pe { + goto _test_eof532 + } + st_case_532: + if 176 <= data[p] { + goto st413 + } + goto tr420 + st533: + if p++; p == pe { + goto _test_eof533 + } + st_case_533: + switch { + case data[p] < 144: + if 132 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + goto st413 + st534: + if p++; p == pe { + goto _test_eof534 + } + st_case_534: + switch { + case data[p] > 181: + if 184 <= data[p] { + goto st413 + } + case data[p] >= 175: + goto st413 + } + goto tr420 + st535: + if p++; p == pe { + goto _test_eof535 + } + st_case_535: + switch { + case data[p] > 155: + if 158 <= data[p] { + goto tr420 + } + case data[p] >= 129: + goto tr420 + } + goto st413 + st536: + if p++; p == pe { + goto _test_eof536 + } + st_case_536: + switch { + case data[p] < 144: + if 129 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + goto st413 + st537: + if p++; p == pe { + goto _test_eof537 + } + st_case_537: + if 171 <= data[p] && data[p] <= 183 { + goto st413 + } + goto tr420 + st538: + if p++; p == pe { + goto _test_eof538 + } + st_case_538: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + goto tr2 + st539: + if p++; p == pe { + goto _test_eof539 + } + st_case_539: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 157: + goto st413 + } + goto tr420 + st540: + if p++; p == pe { + goto _test_eof540 + } + st_case_540: + switch data[p] { + case 169: + goto st492 + case 171: + goto st541 + case 172: + goto st542 + case 173: + goto st469 + case 189: + goto st543 + case 190: + goto st544 + } + goto tr420 + st541: + if p++; p == pe { + goto _test_eof541 + } + st_case_541: + if 176 <= data[p] && data[p] <= 180 { + goto st413 + } + goto tr420 + st542: + if p++; p == pe { + goto _test_eof542 + } + st_case_542: + if 176 <= data[p] && data[p] <= 182 { + goto st413 + } + goto tr420 + st543: + if p++; p == pe { + goto _test_eof543 + } + st_case_543: + if 145 <= data[p] && data[p] <= 190 { + goto st413 + } + goto tr420 + st544: + if p++; p == pe { + goto _test_eof544 + } + st_case_544: + if 143 <= data[p] && data[p] <= 146 { + goto st413 + } + goto tr420 + st545: + if p++; p == pe { + goto _test_eof545 + } + st_case_545: + if data[p] == 178 { + goto st546 + } + goto tr420 + st546: + if p++; p == pe { + goto _test_eof546 + } + st_case_546: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto st413 + } + case data[p] >= 157: + goto st413 + } + goto tr420 + st547: + if p++; p == pe { + goto _test_eof547 + } + st_case_547: + switch data[p] { + case 133: + goto st548 + case 134: + goto st549 + case 137: + goto st550 + case 159: + goto st551 + case 168: + goto st552 + case 169: + goto st553 + case 170: + goto st554 + } + goto tr420 + st548: + if p++; p == pe { + goto _test_eof548 + } + st_case_548: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto st413 + } + case data[p] >= 165: + goto st413 + } + goto tr420 + st549: + if p++; p == pe { + goto _test_eof549 + } + st_case_549: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr420 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto st413 + st550: + if p++; p == pe { + goto _test_eof550 + } + st_case_550: + if 130 <= data[p] && data[p] <= 132 { + goto st413 + } + goto tr420 + st551: + if p++; p == pe { + goto _test_eof551 + } + st_case_551: + if 142 <= data[p] && data[p] <= 191 { + goto tr421 + } + goto tr420 + st552: + if p++; p == pe { + goto _test_eof552 + } + st_case_552: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto st413 + } + case data[p] >= 128: + goto st413 + } + goto tr420 + st553: + if p++; p == pe { + goto _test_eof553 + } + st_case_553: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 173: + goto tr420 + } + goto st413 + st554: + if p++; p == pe { + goto _test_eof554 + } + st_case_554: + if data[p] == 132 { + goto st413 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto st413 + } + case data[p] >= 155: + goto st413 + } + goto tr420 + st555: + if p++; p == pe { + goto _test_eof555 + } + st_case_555: + if data[p] == 163 { + goto st556 + } + goto tr420 + st556: + if p++; p == pe { + goto _test_eof556 + } + st_case_556: + if 144 <= data[p] && data[p] <= 150 { + goto st413 + } + goto tr420 + st557: + if p++; p == pe { + goto _test_eof557 + } + st_case_557: + if data[p] == 160 { + goto st558 + } + goto tr420 + st558: + if p++; p == pe { + goto _test_eof558 + } + st_case_558: + switch data[p] { + case 128: + goto st559 + case 129: + goto st560 + case 132: + goto st415 + case 135: + goto st416 + } + if 133 <= data[p] && data[p] <= 134 { + goto st561 + } + goto tr420 + st559: + if p++; p == pe { + goto _test_eof559 + } + st_case_559: + if data[p] == 129 { + goto st413 + } + if 160 <= data[p] { + goto st413 + } + goto tr420 + st560: + if p++; p == pe { + goto _test_eof560 + } + st_case_560: + if 192 <= data[p] { + goto tr420 + } + goto st413 + st561: + if p++; p == pe { + goto _test_eof561 + } + st_case_561: + goto st413 +tr571: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4869 + st4869: + if p++; p == pe { + goto _test_eof4869 + } + st_case_4869: +//line segment_words_prod.go:22013 + switch data[p] { + case 95: + goto tr571 + case 194: + goto st562 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st148 + case 204: + goto st563 + case 205: + goto st564 + case 206: + goto st151 + case 207: + goto st152 + case 210: + goto st565 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st566 + case 215: + goto st567 + case 216: + goto st1020 + case 217: + goto st1021 + case 219: + goto st1022 + case 220: + goto st1023 + case 221: + goto st1024 + case 222: + goto st1025 + case 223: + goto st1026 + case 224: + goto st1027 + case 225: + goto st1059 + case 226: + goto st1081 + case 227: + goto st1088 + case 234: + goto st1241 + case 237: + goto st287 + case 239: + goto st1257 + case 240: + goto st1264 + case 243: + goto st1306 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 + st562: + if p++; p == pe { + goto _test_eof562 + } + st_case_562: + switch data[p] { + case 170: + goto tr148 + case 173: + goto tr571 + case 181: + goto tr148 + case 186: + goto tr148 + } + goto tr420 + st563: + if p++; p == pe { + goto _test_eof563 + } + st_case_563: + if data[p] <= 127 { + goto tr420 + } + goto tr571 + st564: + if p++; p == pe { + goto _test_eof564 + } + st_case_564: + switch data[p] { + case 181: + goto tr420 + case 190: + goto tr420 + } + switch { + case data[p] < 184: + if 176 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr420 + } + goto tr571 + st565: + if p++; p == pe { + goto _test_eof565 + } + st_case_565: + if data[p] == 130 { + goto tr420 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr571 + } + goto tr148 + st566: + if p++; p == pe { + goto _test_eof566 + } + st_case_566: + if data[p] == 190 { + goto tr420 + } + switch { + case data[p] < 145: + if 136 <= data[p] && data[p] <= 144 { + goto tr420 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + default: + goto tr571 + } + goto tr148 + st567: + if p++; p == pe { + goto _test_eof567 + } + st_case_567: + switch data[p] { + case 135: + goto tr571 + case 179: + goto tr148 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr571 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + default: + goto tr571 + } + goto tr420 +tr572: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4870 + st4870: + if p++; p == pe { + goto _test_eof4870 + } + st_case_4870: +//line segment_words_prod.go:22233 + switch data[p] { + case 34: + goto st568 + case 39: + goto tr595 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st869 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st871 + case 205: + goto st872 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st874 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st875 + case 215: + goto st876 + case 216: + goto st877 + case 217: + goto st878 + case 219: + goto st879 + case 220: + goto st880 + case 221: + goto st881 + case 222: + goto st882 + case 223: + goto st883 + case 224: + goto st884 + case 225: + goto st916 + case 226: + goto st938 + case 227: + goto st945 + case 234: + goto st948 + case 237: + goto st287 + case 239: + goto st964 + case 240: + goto st972 + case 243: + goto st1014 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 + st568: + if p++; p == pe { + goto _test_eof568 + } + st_case_568: + switch data[p] { + case 194: + goto st569 + case 204: + goto st570 + case 205: + goto st571 + case 210: + goto st572 + case 214: + goto st573 + case 215: + goto st574 + case 216: + goto st733 + case 217: + goto st734 + case 219: + goto st735 + case 220: + goto st736 + case 221: + goto st737 + case 222: + goto st738 + case 223: + goto st739 + case 224: + goto st740 + case 225: + goto st769 + case 226: + goto st791 + case 227: + goto st798 + case 234: + goto st801 + case 239: + goto st817 + case 240: + goto st822 + case 243: + goto st864 + } + goto tr420 + st569: + if p++; p == pe { + goto _test_eof569 + } + st_case_569: + if data[p] == 173 { + goto st568 + } + goto tr420 + st570: + if p++; p == pe { + goto _test_eof570 + } + st_case_570: + if data[p] <= 127 { + goto tr420 + } + goto st568 + st571: + if p++; p == pe { + goto _test_eof571 + } + st_case_571: + if 176 <= data[p] { + goto tr420 + } + goto st568 + st572: + if p++; p == pe { + goto _test_eof572 + } + st_case_572: + if 131 <= data[p] && data[p] <= 137 { + goto st568 + } + goto tr420 + st573: + if p++; p == pe { + goto _test_eof573 + } + st_case_573: + if data[p] == 191 { + goto st568 + } + if 145 <= data[p] && data[p] <= 189 { + goto st568 + } + goto tr420 + st574: + if p++; p == pe { + goto _test_eof574 + } + st_case_574: + if data[p] == 135 { + goto st568 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto st568 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr595 + } + case data[p] >= 144: + goto tr595 + } + default: + goto st568 + } + goto tr420 +tr595: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4871 + st4871: + if p++; p == pe { + goto _test_eof4871 + } + st_case_4871: +//line segment_words_prod.go:22469 + switch data[p] { + case 95: + goto tr571 + case 194: + goto st575 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st148 + case 204: + goto st576 + case 205: + goto st577 + case 206: + goto st151 + case 207: + goto st152 + case 210: + goto st578 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st579 + case 215: + goto st580 + case 216: + goto st581 + case 217: + goto st582 + case 219: + goto st583 + case 220: + goto st584 + case 221: + goto st585 + case 222: + goto st586 + case 223: + goto st587 + case 224: + goto st588 + case 225: + goto st620 + case 226: + goto st643 + case 227: + goto st650 + case 234: + goto st653 + case 237: + goto st287 + case 239: + goto st670 + case 240: + goto st679 + case 243: + goto st727 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 + st575: + if p++; p == pe { + goto _test_eof575 + } + st_case_575: + switch data[p] { + case 170: + goto tr148 + case 173: + goto tr595 + case 181: + goto tr148 + case 186: + goto tr148 + } + goto tr420 + st576: + if p++; p == pe { + goto _test_eof576 + } + st_case_576: + if data[p] <= 127 { + goto tr420 + } + goto tr595 + st577: + if p++; p == pe { + goto _test_eof577 + } + st_case_577: + switch data[p] { + case 181: + goto tr420 + case 190: + goto tr420 + } + switch { + case data[p] < 184: + if 176 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr420 + } + goto tr595 + st578: + if p++; p == pe { + goto _test_eof578 + } + st_case_578: + if data[p] == 130 { + goto tr420 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr595 + } + goto tr148 + st579: + if p++; p == pe { + goto _test_eof579 + } + st_case_579: + if data[p] == 190 { + goto tr420 + } + switch { + case data[p] < 145: + if 136 <= data[p] && data[p] <= 144 { + goto tr420 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + default: + goto tr595 + } + goto tr148 + st580: + if p++; p == pe { + goto _test_eof580 + } + st_case_580: + switch data[p] { + case 135: + goto tr595 + case 179: + goto tr148 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr595 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + default: + goto tr595 + } + goto tr420 + st581: + if p++; p == pe { + goto _test_eof581 + } + st_case_581: + if data[p] == 156 { + goto tr595 + } + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 133 { + goto tr595 + } + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr595 + } + goto tr420 + st582: + if p++; p == pe { + goto _test_eof582 + } + st_case_582: + switch data[p] { + case 171: + goto tr421 + case 176: + goto tr595 + } + switch { + case data[p] < 139: + if 128 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 174 <= data[p] { + goto tr148 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr595 + } + goto tr420 + st583: + if p++; p == pe { + goto _test_eof583 + } + st_case_583: + switch data[p] { + case 148: + goto tr420 + case 158: + goto tr420 + case 169: + goto tr420 + } + switch { + case data[p] < 176: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr595 + } + case data[p] >= 150: + goto tr595 + } + case data[p] > 185: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 189: + goto tr420 + } + default: + goto tr421 + } + goto tr148 + st584: + if p++; p == pe { + goto _test_eof584 + } + st_case_584: + if data[p] == 144 { + goto tr148 + } + switch { + case data[p] < 146: + if 143 <= data[p] && data[p] <= 145 { + goto tr595 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr595 + } + default: + goto tr148 + } + goto tr420 + st585: + if p++; p == pe { + goto _test_eof585 + } + st_case_585: + switch { + case data[p] > 140: + if 141 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr420 + } + goto tr595 + st586: + if p++; p == pe { + goto _test_eof586 + } + st_case_586: + switch { + case data[p] > 176: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr595 + } + goto tr148 + st587: + if p++; p == pe { + goto _test_eof587 + } + st_case_587: + if data[p] == 186 { + goto tr148 + } + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 170: + switch { + case data[p] > 179: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 171: + goto tr595 + } + default: + goto tr148 + } + goto tr420 + st588: + if p++; p == pe { + goto _test_eof588 + } + st_case_588: + switch data[p] { + case 160: + goto st589 + case 161: + goto st590 + case 162: + goto st168 + case 163: + goto st591 + case 164: + goto st592 + case 165: + goto st593 + case 166: + goto st594 + case 167: + goto st595 + case 168: + goto st596 + case 169: + goto st597 + case 170: + goto st598 + case 171: + goto st599 + case 172: + goto st600 + case 173: + goto st601 + case 174: + goto st602 + case 175: + goto st603 + case 176: + goto st604 + case 177: + goto st605 + case 178: + goto st606 + case 179: + goto st607 + case 180: + goto st608 + case 181: + goto st609 + case 182: + goto st610 + case 183: + goto st611 + case 184: + goto st612 + case 185: + goto st613 + case 186: + goto st614 + case 187: + goto st615 + case 188: + goto st616 + case 189: + goto st617 + case 190: + goto st618 + case 191: + goto st619 + } + goto tr420 + st589: + if p++; p == pe { + goto _test_eof589 + } + st_case_589: + switch data[p] { + case 154: + goto tr148 + case 164: + goto tr148 + case 168: + goto tr148 + } + switch { + case data[p] > 149: + if 150 <= data[p] && data[p] <= 173 { + goto tr595 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st590: + if p++; p == pe { + goto _test_eof590 + } + st_case_590: + switch { + case data[p] > 152: + if 153 <= data[p] && data[p] <= 155 { + goto tr595 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st591: + if p++; p == pe { + goto _test_eof591 + } + st_case_591: + if 163 <= data[p] { + goto tr595 + } + goto tr420 + st592: + if p++; p == pe { + goto _test_eof592 + } + st_case_592: + if data[p] == 189 { + goto tr148 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr148 + } + goto tr595 + st593: + if p++; p == pe { + goto _test_eof593 + } + st_case_593: + switch data[p] { + case 144: + goto tr148 + case 176: + goto tr420 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 177 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr420 + } + goto tr595 + st594: + if p++; p == pe { + goto _test_eof594 + } + st_case_594: + switch data[p] { + case 132: + goto tr420 + case 169: + goto tr420 + case 177: + goto tr420 + case 188: + goto tr595 + } + switch { + case data[p] < 145: + switch { + case data[p] > 131: + if 141 <= data[p] && data[p] <= 142 { + goto tr420 + } + case data[p] >= 129: + goto tr595 + } + case data[p] > 146: + switch { + case data[p] < 186: + if 179 <= data[p] && data[p] <= 181 { + goto tr420 + } + case data[p] > 187: + if 190 <= data[p] { + goto tr595 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st595: + if p++; p == pe { + goto _test_eof595 + } + st_case_595: + switch data[p] { + case 142: + goto tr148 + case 158: + goto tr420 + } + switch { + case data[p] < 156: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + switch { + case data[p] > 150: + if 152 <= data[p] && data[p] <= 155 { + goto tr420 + } + case data[p] >= 143: + goto tr420 + } + default: + goto tr420 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr148 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr595 + st596: + if p++; p == pe { + goto _test_eof596 + } + st_case_596: + if data[p] == 188 { + goto tr595 + } + switch { + case data[p] < 170: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr595 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 147 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] >= 143: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 176: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 182: + switch { + case data[p] > 185: + if 190 <= data[p] { + goto tr595 + } + case data[p] >= 184: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st597: + if p++; p == pe { + goto _test_eof597 + } + st_case_597: + if data[p] == 157 { + goto tr420 + } + switch { + case data[p] < 153: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 146 <= data[p] && data[p] <= 152 { + goto tr420 + } + case data[p] >= 142: + goto tr420 + } + default: + goto tr420 + } + case data[p] > 158: + switch { + case data[p] < 166: + if 159 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr595 + st598: + if p++; p == pe { + goto _test_eof598 + } + st_case_598: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr595 + } + case data[p] > 141: + if 143 <= data[p] && data[p] <= 145 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] { + goto tr595 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st599: + if p++; p == pe { + goto _test_eof599 + } + st_case_599: + switch data[p] { + case 134: + goto tr420 + case 138: + goto tr420 + case 144: + goto tr148 + case 185: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] >= 142: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr420 + } + goto tr595 + st600: + if p++; p == pe { + goto _test_eof600 + } + st_case_600: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr595 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr595 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st601: + if p++; p == pe { + goto _test_eof601 + } + st_case_601: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] < 150: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr595 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr595 + } + default: + goto tr595 + } + case data[p] > 151: + switch { + case data[p] < 159: + if 156 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 161: + switch { + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 162: + goto tr595 + } + default: + goto tr148 + } + default: + goto tr595 + } + goto tr420 + st602: + if p++; p == pe { + goto _test_eof602 + } + st_case_602: + switch data[p] { + case 130: + goto tr595 + case 131: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 158: + switch { + case data[p] < 142: + if 133 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 144: + switch { + case data[p] > 149: + if 153 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] < 168: + if 163 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 185: + if 190 <= data[p] && data[p] <= 191 { + goto tr595 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st603: + if p++; p == pe { + goto _test_eof603 + } + st_case_603: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr595 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr595 + } + case data[p] > 136: + switch { + case data[p] > 141: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 138: + goto tr595 + } + default: + goto tr595 + } + goto tr420 + st604: + if p++; p == pe { + goto _test_eof604 + } + st_case_604: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 128: + goto tr595 + } + case data[p] > 144: + switch { + case data[p] < 170: + if 146 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] > 185: + if 190 <= data[p] { + goto tr595 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st605: + if p++; p == pe { + goto _test_eof605 + } + st_case_605: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 151: + goto tr420 + } + switch { + case data[p] < 160: + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 148 { + goto tr420 + } + case data[p] > 154: + if 155 <= data[p] && data[p] <= 159 { + goto tr420 + } + default: + goto tr148 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr595 + st606: + if p++; p == pe { + goto _test_eof606 + } + st_case_606: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 146: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr595 + } + case data[p] > 140: + if 142 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 181: + if 170 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr595 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st607: + if p++; p == pe { + goto _test_eof607 + } + st_case_607: + if data[p] == 158 { + goto tr148 + } + switch { + case data[p] < 149: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr595 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr595 + } + default: + goto tr595 + } + case data[p] > 150: + switch { + case data[p] < 162: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 163: + switch { + case data[p] > 175: + if 177 <= data[p] && data[p] <= 178 { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr595 + } + default: + goto tr595 + } + goto tr420 + st608: + if p++; p == pe { + goto _test_eof608 + } + st_case_608: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 129: + goto tr595 + } + case data[p] > 144: + switch { + case data[p] > 186: + if 190 <= data[p] { + goto tr595 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st609: + if p++; p == pe { + goto _test_eof609 + } + st_case_609: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 142: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 150 { + goto tr420 + } + case data[p] > 158: + if 159 <= data[p] && data[p] <= 161 { + goto tr148 + } + default: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr595 + st610: + if p++; p == pe { + goto _test_eof610 + } + st_case_610: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto tr595 + } + case data[p] > 150: + switch { + case data[p] > 177: + if 179 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st611: + if p++; p == pe { + goto _test_eof611 + } + st_case_611: + switch data[p] { + case 138: + goto tr595 + case 150: + goto tr595 + } + switch { + case data[p] < 152: + switch { + case data[p] > 134: + if 143 <= data[p] && data[p] <= 148 { + goto tr595 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 179 { + goto tr595 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr595 + } + goto tr420 + st612: + if p++; p == pe { + goto _test_eof612 + } + st_case_612: + if data[p] == 177 { + goto tr595 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr595 + } + goto tr420 + st613: + if p++; p == pe { + goto _test_eof613 + } + st_case_613: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 135: + goto tr595 + } + goto tr420 + st614: + if p++; p == pe { + goto _test_eof614 + } + st_case_614: + if data[p] == 177 { + goto tr595 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr595 + } + case data[p] >= 180: + goto tr595 + } + goto tr420 + st615: + if p++; p == pe { + goto _test_eof615 + } + st_case_615: + switch { + case data[p] > 141: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 136: + goto tr595 + } + goto tr420 + st616: + if p++; p == pe { + goto _test_eof616 + } + st_case_616: + switch data[p] { + case 128: + goto tr148 + case 181: + goto tr595 + case 183: + goto tr595 + case 185: + goto tr595 + } + switch { + case data[p] < 160: + if 152 <= data[p] && data[p] <= 153 { + goto tr595 + } + case data[p] > 169: + if 190 <= data[p] && data[p] <= 191 { + goto tr595 + } + default: + goto tr421 + } + goto tr420 + st617: + if p++; p == pe { + goto _test_eof617 + } + st_case_617: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 172: + if 177 <= data[p] && data[p] <= 191 { + goto tr595 + } + default: + goto tr148 + } + goto tr420 + st618: + if p++; p == pe { + goto _test_eof618 + } + st_case_618: + switch { + case data[p] < 136: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 135 { + goto tr595 + } + case data[p] >= 128: + goto tr595 + } + case data[p] > 140: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr595 + } + case data[p] >= 141: + goto tr595 + } + default: + goto tr148 + } + goto tr420 + st619: + if p++; p == pe { + goto _test_eof619 + } + st_case_619: + if data[p] == 134 { + goto tr595 + } + goto tr420 + st620: + if p++; p == pe { + goto _test_eof620 + } + st_case_620: + switch data[p] { + case 128: + goto st621 + case 129: + goto st622 + case 130: + goto st623 + case 131: + goto st202 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st624 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st625 + case 157: + goto st626 + case 158: + goto st627 + case 159: + goto st628 + case 160: + goto st629 + case 161: + goto st219 + case 162: + goto st630 + case 163: + goto st221 + case 164: + goto st631 + case 165: + goto st468 + case 167: + goto st469 + case 168: + goto st632 + case 169: + goto st633 + case 170: + goto st634 + case 172: + goto st635 + case 173: + goto st636 + case 174: + goto st637 + case 175: + goto st638 + case 176: + goto st639 + case 177: + goto st640 + case 179: + goto st641 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st642 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + } + switch { + case data[p] < 180: + if 132 <= data[p] && data[p] <= 152 { + goto st145 + } + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + default: + goto st147 + } + goto tr420 + st621: + if p++; p == pe { + goto _test_eof621 + } + st_case_621: + if 171 <= data[p] && data[p] <= 190 { + goto tr595 + } + goto tr420 + st622: + if p++; p == pe { + goto _test_eof622 + } + st_case_622: + switch { + case data[p] < 158: + switch { + case data[p] > 137: + if 150 <= data[p] && data[p] <= 153 { + goto tr595 + } + case data[p] >= 128: + goto tr421 + } + case data[p] > 160: + switch { + case data[p] < 167: + if 162 <= data[p] && data[p] <= 164 { + goto tr595 + } + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr595 + } + default: + goto tr595 + } + default: + goto tr595 + } + goto tr420 + st623: + if p++; p == pe { + goto _test_eof623 + } + st_case_623: + if data[p] == 143 { + goto tr595 + } + switch { + case data[p] < 144: + if 130 <= data[p] && data[p] <= 141 { + goto tr595 + } + case data[p] > 153: + switch { + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 154: + goto tr595 + } + default: + goto tr421 + } + goto tr420 + st624: + if p++; p == pe { + goto _test_eof624 + } + st_case_624: + switch { + case data[p] < 157: + if 155 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr420 + } + default: + goto tr595 + } + goto tr148 + st625: + if p++; p == pe { + goto _test_eof625 + } + st_case_625: + switch { + case data[p] < 146: + switch { + case data[p] > 140: + if 142 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 177: + if 178 <= data[p] && data[p] <= 180 { + goto tr595 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr595 + } + goto tr420 + st626: + if p++; p == pe { + goto _test_eof626 + } + st_case_626: + switch { + case data[p] < 160: + switch { + case data[p] > 145: + if 146 <= data[p] && data[p] <= 147 { + goto tr595 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 172: + switch { + case data[p] > 176: + if 178 <= data[p] && data[p] <= 179 { + goto tr595 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st627: + if p++; p == pe { + goto _test_eof627 + } + st_case_627: + if 180 <= data[p] { + goto tr595 + } + goto tr420 + st628: + if p++; p == pe { + goto _test_eof628 + } + st_case_628: + switch { + case data[p] < 158: + if 148 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 170 <= data[p] { + goto tr420 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr420 + } + goto tr595 + st629: + if p++; p == pe { + goto _test_eof629 + } + st_case_629: + switch { + case data[p] < 144: + if 139 <= data[p] && data[p] <= 142 { + goto tr595 + } + case data[p] > 153: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + goto tr420 + st630: + if p++; p == pe { + goto _test_eof630 + } + st_case_630: + if data[p] == 169 { + goto tr595 + } + switch { + case data[p] > 170: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st631: + if p++; p == pe { + goto _test_eof631 + } + st_case_631: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr595 + } + default: + goto tr595 + } + goto tr420 + st632: + if p++; p == pe { + goto _test_eof632 + } + st_case_632: + switch { + case data[p] > 150: + if 151 <= data[p] && data[p] <= 155 { + goto tr595 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st633: + if p++; p == pe { + goto _test_eof633 + } + st_case_633: + if data[p] == 191 { + goto tr595 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr595 + } + case data[p] >= 149: + goto tr595 + } + goto tr420 + st634: + if p++; p == pe { + goto _test_eof634 + } + st_case_634: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 153: + if 176 <= data[p] && data[p] <= 190 { + goto tr595 + } + default: + goto tr421 + } + goto tr420 + st635: + if p++; p == pe { + goto _test_eof635 + } + st_case_635: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 132 { + goto tr595 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr595 + } + default: + goto tr148 + } + goto tr420 + st636: + if p++; p == pe { + goto _test_eof636 + } + st_case_636: + switch { + case data[p] < 144: + switch { + case data[p] > 139: + if 140 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] >= 133: + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 154: + goto tr420 + } + default: + goto tr421 + } + goto tr595 + st637: + if p++; p == pe { + goto _test_eof637 + } + st_case_637: + switch { + case data[p] < 161: + switch { + case data[p] > 130: + if 131 <= data[p] && data[p] <= 160 { + goto tr148 + } + case data[p] >= 128: + goto tr595 + } + case data[p] > 173: + switch { + case data[p] < 176: + if 174 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + default: + goto tr595 + } + goto tr420 + st638: + if p++; p == pe { + goto _test_eof638 + } + st_case_638: + switch { + case data[p] > 179: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr595 + } + goto tr148 + st639: + if p++; p == pe { + goto _test_eof639 + } + st_case_639: + switch { + case data[p] > 163: + if 164 <= data[p] && data[p] <= 183 { + goto tr595 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st640: + if p++; p == pe { + goto _test_eof640 + } + st_case_640: + switch { + case data[p] < 141: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 154 <= data[p] && data[p] <= 189 { + goto tr148 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr148 + } + goto tr2 + st641: + if p++; p == pe { + goto _test_eof641 + } + st_case_641: + if data[p] == 173 { + goto tr595 + } + switch { + case data[p] < 169: + switch { + case data[p] > 146: + if 148 <= data[p] && data[p] <= 168 { + goto tr595 + } + case data[p] >= 144: + goto tr595 + } + case data[p] > 177: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 180 { + goto tr595 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 185 { + goto tr595 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st642: + if p++; p == pe { + goto _test_eof642 + } + st_case_642: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr595 + } + case data[p] >= 128: + goto tr595 + } + goto tr420 + st643: + if p++; p == pe { + goto _test_eof643 + } + st_case_643: + switch data[p] { + case 128: + goto st644 + case 129: + goto st645 + case 130: + goto st241 + case 131: + goto st646 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st647 + case 180: + goto st251 + case 181: + goto st648 + case 182: + goto st253 + case 183: + goto st649 + case 184: + goto st255 + } + goto tr420 + st644: + if p++; p == pe { + goto _test_eof644 + } + st_case_644: + switch { + case data[p] < 170: + if 140 <= data[p] && data[p] <= 143 { + goto tr595 + } + case data[p] > 174: + if 191 <= data[p] { + goto tr571 + } + default: + goto tr595 + } + goto tr420 + st645: + if p++; p == pe { + goto _test_eof645 + } + st_case_645: + switch data[p] { + case 165: + goto tr420 + case 177: + goto tr148 + case 191: + goto tr148 + } + switch { + case data[p] < 149: + if 129 <= data[p] && data[p] <= 147 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 160: + goto tr595 + } + default: + goto tr420 + } + goto tr571 + st646: + if p++; p == pe { + goto _test_eof646 + } + st_case_646: + if 144 <= data[p] && data[p] <= 176 { + goto tr595 + } + goto tr420 + st647: + if p++; p == pe { + goto _test_eof647 + } + st_case_647: + switch { + case data[p] < 175: + if 165 <= data[p] && data[p] <= 170 { + goto tr420 + } + case data[p] > 177: + if 180 <= data[p] { + goto tr420 + } + default: + goto tr595 + } + goto tr148 + st648: + if p++; p == pe { + goto _test_eof648 + } + st_case_648: + if data[p] == 191 { + goto tr595 + } + switch { + case data[p] > 174: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 168: + goto tr420 + } + goto tr148 + st649: + if p++; p == pe { + goto _test_eof649 + } + st_case_649: + switch { + case data[p] < 144: + switch { + case data[p] > 134: + if 136 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 191 { + goto tr595 + } + case data[p] >= 152: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st650: + if p++; p == pe { + goto _test_eof650 + } + st_case_650: + switch data[p] { + case 128: + goto st651 + case 130: + goto st652 + case 132: + goto st259 + case 133: + goto st145 + case 134: + goto st260 + } + goto tr420 + st651: + if p++; p == pe { + goto _test_eof651 + } + st_case_651: + if data[p] == 133 { + goto tr148 + } + switch { + case data[p] > 175: + if 187 <= data[p] && data[p] <= 188 { + goto tr148 + } + case data[p] >= 170: + goto tr595 + } + goto tr420 + st652: + if p++; p == pe { + goto _test_eof652 + } + st_case_652: + if 153 <= data[p] && data[p] <= 154 { + goto tr595 + } + goto tr420 + st653: + if p++; p == pe { + goto _test_eof653 + } + st_case_653: + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st654 + case 153: + goto st655 + case 154: + goto st656 + case 155: + goto st657 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st658 + case 161: + goto st272 + case 162: + goto st659 + case 163: + goto st660 + case 164: + goto st661 + case 165: + goto st662 + case 166: + goto st663 + case 167: + goto st664 + case 168: + goto st665 + case 169: + goto st666 + case 170: + goto st667 + case 171: + goto st668 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st669 + case 176: + goto st147 + } + if 129 <= data[p] { + goto st145 + } + goto tr420 + st654: + if p++; p == pe { + goto _test_eof654 + } + st_case_654: + switch { + case data[p] < 160: + if 141 <= data[p] && data[p] <= 143 { + goto tr2 + } + case data[p] > 169: + if 172 <= data[p] { + goto tr2 + } + default: + goto tr421 + } + goto tr148 + st655: + if p++; p == pe { + goto _test_eof655 + } + st_case_655: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr595 + } + default: + goto tr595 + } + goto tr420 + st656: + if p++; p == pe { + goto _test_eof656 + } + st_case_656: + switch { + case data[p] < 158: + if 128 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr595 + } + goto tr420 + st657: + if p++; p == pe { + goto _test_eof657 + } + st_case_657: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr595 + } + goto tr148 + st658: + if p++; p == pe { + goto _test_eof658 + } + st_case_658: + switch data[p] { + case 130: + goto tr595 + case 134: + goto tr595 + case 139: + goto tr595 + } + switch { + case data[p] > 167: + if 168 <= data[p] { + goto tr420 + } + case data[p] >= 163: + goto tr595 + } + goto tr148 + st659: + if p++; p == pe { + goto _test_eof659 + } + st_case_659: + switch { + case data[p] < 130: + if 128 <= data[p] && data[p] <= 129 { + goto tr595 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr595 + } + default: + goto tr148 + } + goto tr420 + st660: + if p++; p == pe { + goto _test_eof660 + } + st_case_660: + switch data[p] { + case 187: + goto tr148 + case 189: + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 143: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 133: + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 183: + if 184 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr420 + } + goto tr595 + st661: + if p++; p == pe { + goto _test_eof661 + } + st_case_661: + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 165: + switch { + case data[p] > 173: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr595 + } + default: + goto tr148 + } + goto tr420 + st662: + if p++; p == pe { + goto _test_eof662 + } + st_case_662: + switch { + case data[p] < 148: + if 135 <= data[p] && data[p] <= 147 { + goto tr595 + } + case data[p] > 159: + if 189 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st663: + if p++; p == pe { + goto _test_eof663 + } + st_case_663: + switch { + case data[p] < 132: + if 128 <= data[p] && data[p] <= 131 { + goto tr595 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr595 + } + default: + goto tr148 + } + goto tr420 + st664: + if p++; p == pe { + goto _test_eof664 + } + st_case_664: + if data[p] == 143 { + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 129: + goto tr420 + } + case data[p] > 164: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr420 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + default: + goto tr420 + } + goto tr595 + st665: + if p++; p == pe { + goto _test_eof665 + } + st_case_665: + switch { + case data[p] > 168: + if 169 <= data[p] && data[p] <= 182 { + goto tr595 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st666: + if p++; p == pe { + goto _test_eof666 + } + st_case_666: + if data[p] == 131 { + goto tr595 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 141: + switch { + case data[p] > 153: + if 187 <= data[p] && data[p] <= 189 { + goto tr595 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr595 + } + goto tr420 + st667: + if p++; p == pe { + goto _test_eof667 + } + st_case_667: + if data[p] == 176 { + goto tr595 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr595 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr595 + } + default: + goto tr595 + } + goto tr420 + st668: + if p++; p == pe { + goto _test_eof668 + } + st_case_668: + if data[p] == 129 { + goto tr595 + } + switch { + case data[p] < 171: + if 160 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 181 <= data[p] && data[p] <= 182 { + goto tr595 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr595 + } + goto tr420 + st669: + if p++; p == pe { + goto _test_eof669 + } + st_case_669: + switch { + case data[p] < 163: + if 128 <= data[p] && data[p] <= 162 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 172: + goto tr595 + } + default: + goto tr595 + } + goto tr420 + st670: + if p++; p == pe { + goto _test_eof670 + } + st_case_670: + switch data[p] { + case 172: + goto st671 + case 173: + goto st672 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st673 + case 185: + goto st674 + case 187: + goto st675 + case 188: + goto st676 + case 189: + goto st303 + case 190: + goto st677 + case 191: + goto st678 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr420 + st671: + if p++; p == pe { + goto _test_eof671 + } + st_case_671: + switch data[p] { + case 158: + goto tr595 + case 190: + goto tr572 + } + switch { + case data[p] < 157: + switch { + case data[p] > 134: + if 147 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 170: + goto tr572 + } + default: + goto tr572 + } + goto tr420 + st672: + if p++; p == pe { + goto _test_eof672 + } + st_case_672: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 129 { + goto tr572 + } + case data[p] > 132: + switch { + case data[p] > 143: + if 144 <= data[p] { + goto tr148 + } + case data[p] >= 134: + goto tr572 + } + default: + goto tr572 + } + goto tr2 + st673: + if p++; p == pe { + goto _test_eof673 + } + st_case_673: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 143 { + goto tr595 + } + case data[p] > 175: + if 179 <= data[p] && data[p] <= 180 { + goto tr571 + } + default: + goto tr595 + } + goto tr420 + st674: + if p++; p == pe { + goto _test_eof674 + } + st_case_674: + switch { + case data[p] < 176: + if 141 <= data[p] && data[p] <= 143 { + goto tr571 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st675: + if p++; p == pe { + goto _test_eof675 + } + st_case_675: + if data[p] == 191 { + goto tr595 + } + if 189 <= data[p] { + goto tr420 + } + goto tr148 + st676: + if p++; p == pe { + goto _test_eof676 + } + st_case_676: + if data[p] == 191 { + goto tr571 + } + if 161 <= data[p] && data[p] <= 186 { + goto tr148 + } + goto tr420 + st677: + if p++; p == pe { + goto _test_eof677 + } + st_case_677: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 190 { + goto tr148 + } + case data[p] >= 158: + goto tr595 + } + goto tr420 + st678: + if p++; p == pe { + goto _test_eof678 + } + st_case_678: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr148 + } + case data[p] >= 130: + goto tr148 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr595 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st679: + if p++; p == pe { + goto _test_eof679 + } + st_case_679: + switch data[p] { + case 144: + goto st680 + case 145: + goto st687 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st708 + case 155: + goto st715 + case 157: + goto st717 + case 158: + goto st725 + case 159: + goto st403 + } + goto tr420 + st680: + if p++; p == pe { + goto _test_eof680 + } + st_case_680: + switch data[p] { + case 128: + goto st308 + case 129: + goto st309 + case 130: + goto st147 + case 131: + goto st310 + case 133: + goto st311 + case 135: + goto st681 + case 138: + goto st313 + case 139: + goto st682 + case 140: + goto st315 + case 141: + goto st683 + case 142: + goto st317 + case 143: + goto st318 + case 144: + goto st147 + case 145: + goto st145 + case 146: + goto st684 + case 148: + goto st320 + case 149: + goto st321 + case 152: + goto st147 + case 156: + goto st322 + case 157: + goto st323 + case 160: + goto st324 + case 161: + goto st325 + case 162: + goto st326 + case 163: + goto st327 + case 164: + goto st328 + case 166: + goto st329 + case 168: + goto st685 + case 169: + goto st331 + case 170: + goto st332 + case 171: + goto st686 + case 172: + goto st334 + case 173: + goto st335 + case 174: + goto st336 + case 176: + goto st147 + case 177: + goto st245 + } + switch { + case data[p] > 155: + if 178 <= data[p] && data[p] <= 179 { + goto st337 + } + case data[p] >= 153: + goto st145 + } + goto tr420 + st681: + if p++; p == pe { + goto _test_eof681 + } + st_case_681: + if data[p] == 189 { + goto tr595 + } + goto tr420 + st682: + if p++; p == pe { + goto _test_eof682 + } + st_case_682: + if data[p] == 160 { + goto tr595 + } + if 145 <= data[p] { + goto tr420 + } + goto tr148 + st683: + if p++; p == pe { + goto _test_eof683 + } + st_case_683: + switch { + case data[p] < 182: + if 139 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 186: + if 187 <= data[p] { + goto tr420 + } + default: + goto tr595 + } + goto tr148 + st684: + if p++; p == pe { + goto _test_eof684 + } + st_case_684: + switch { + case data[p] < 160: + if 158 <= data[p] && data[p] <= 159 { + goto tr2 + } + case data[p] > 169: + if 170 <= data[p] { + goto tr2 + } + default: + goto tr421 + } + goto tr148 + st685: + if p++; p == pe { + goto _test_eof685 + } + st_case_685: + switch data[p] { + case 128: + goto tr148 + case 191: + goto tr595 + } + switch { + case data[p] < 144: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr595 + } + case data[p] > 134: + if 140 <= data[p] && data[p] <= 143 { + goto tr595 + } + default: + goto tr595 + } + case data[p] > 147: + switch { + case data[p] < 153: + if 149 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] > 179: + if 184 <= data[p] && data[p] <= 186 { + goto tr595 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st686: + if p++; p == pe { + goto _test_eof686 + } + st_case_686: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 164: + if 165 <= data[p] && data[p] <= 166 { + goto tr595 + } + default: + goto tr148 + } + goto tr420 + st687: + if p++; p == pe { + goto _test_eof687 + } + st_case_687: + switch data[p] { + case 128: + goto st688 + case 129: + goto st689 + case 130: + goto st690 + case 131: + goto st691 + case 132: + goto st692 + case 133: + goto st693 + case 134: + goto st694 + case 135: + goto st695 + case 136: + goto st696 + case 138: + goto st348 + case 139: + goto st697 + case 140: + goto st698 + case 141: + goto st699 + case 146: + goto st700 + case 147: + goto st701 + case 150: + goto st702 + case 151: + goto st703 + case 152: + goto st700 + case 153: + goto st704 + case 154: + goto st705 + case 155: + goto st538 + case 156: + goto st706 + case 162: + goto st359 + case 163: + goto st707 + case 171: + goto st361 + } + goto tr420 + st688: + if p++; p == pe { + goto _test_eof688 + } + st_case_688: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr595 + } + case data[p] > 183: + if 184 <= data[p] { + goto tr595 + } + default: + goto tr148 + } + goto tr420 + st689: + if p++; p == pe { + goto _test_eof689 + } + st_case_689: + switch { + case data[p] < 166: + if 135 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] && data[p] <= 190 { + goto tr420 + } + default: + goto tr421 + } + goto tr595 + st690: + if p++; p == pe { + goto _test_eof690 + } + st_case_690: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr595 + st691: + if p++; p == pe { + goto _test_eof691 + } + st_case_691: + switch { + case data[p] > 168: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 144: + goto tr148 + } + goto tr2 + st692: + if p++; p == pe { + goto _test_eof692 + } + st_case_692: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr595 + } + case data[p] > 166: + switch { + case data[p] > 180: + if 182 <= data[p] && data[p] <= 191 { + goto tr421 + } + case data[p] >= 167: + goto tr595 + } + default: + goto tr148 + } + goto tr420 + st693: + if p++; p == pe { + goto _test_eof693 + } + st_case_693: + switch data[p] { + case 179: + goto tr595 + case 182: + goto tr148 + } + if 144 <= data[p] && data[p] <= 178 { + goto tr148 + } + goto tr420 + st694: + if p++; p == pe { + goto _test_eof694 + } + st_case_694: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr595 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr595 + } + default: + goto tr148 + } + goto tr420 + st695: + if p++; p == pe { + goto _test_eof695 + } + st_case_695: + if data[p] == 155 { + goto tr420 + } + switch { + case data[p] < 141: + switch { + case data[p] > 132: + if 133 <= data[p] && data[p] <= 137 { + goto tr420 + } + case data[p] >= 129: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] < 154: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] > 156: + if 157 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + default: + goto tr420 + } + goto tr595 + st696: + if p++; p == pe { + goto _test_eof696 + } + st_case_696: + switch { + case data[p] < 147: + if 128 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] > 171: + if 172 <= data[p] && data[p] <= 183 { + goto tr595 + } + default: + goto tr148 + } + goto tr420 + st697: + if p++; p == pe { + goto _test_eof697 + } + st_case_697: + switch { + case data[p] < 171: + if 159 <= data[p] && data[p] <= 170 { + goto tr595 + } + case data[p] > 175: + switch { + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr421 + } + default: + goto tr420 + } + goto tr148 + st698: + if p++; p == pe { + goto _test_eof698 + } + st_case_698: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 131 { + goto tr595 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr595 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st699: + if p++; p == pe { + goto _test_eof699 + } + st_case_699: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr595 + } + switch { + case data[p] < 157: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr595 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr595 + } + default: + goto tr595 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr595 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr595 + } + default: + goto tr595 + } + default: + goto tr148 + } + goto tr420 + st700: + if p++; p == pe { + goto _test_eof700 + } + st_case_700: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr595 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st701: + if p++; p == pe { + goto _test_eof701 + } + st_case_701: + if data[p] == 134 { + goto tr420 + } + switch { + case data[p] < 136: + if 132 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr420 + } + goto tr595 + st702: + if p++; p == pe { + goto _test_eof702 + } + st_case_702: + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 181: + if 184 <= data[p] { + goto tr595 + } + default: + goto tr595 + } + goto tr420 + st703: + if p++; p == pe { + goto _test_eof703 + } + st_case_703: + switch { + case data[p] < 152: + if 129 <= data[p] && data[p] <= 151 { + goto tr420 + } + case data[p] > 155: + if 158 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + goto tr595 + st704: + if p++; p == pe { + goto _test_eof704 + } + st_case_704: + if data[p] == 132 { + goto tr148 + } + switch { + case data[p] < 144: + if 129 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + goto tr595 + st705: + if p++; p == pe { + goto _test_eof705 + } + st_case_705: + switch { + case data[p] > 170: + if 171 <= data[p] && data[p] <= 183 { + goto tr595 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st706: + if p++; p == pe { + goto _test_eof706 + } + st_case_706: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 157: + goto tr595 + } + goto tr420 + st707: + if p++; p == pe { + goto _test_eof707 + } + st_case_707: + switch { + case data[p] < 170: + if 160 <= data[p] && data[p] <= 169 { + goto tr421 + } + case data[p] > 190: + if 192 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr148 + st708: + if p++; p == pe { + goto _test_eof708 + } + st_case_708: + switch data[p] { + case 160: + goto st147 + case 168: + goto st370 + case 169: + goto st709 + case 171: + goto st710 + case 172: + goto st711 + case 173: + goto st712 + case 174: + goto st374 + case 188: + goto st147 + case 189: + goto st713 + case 190: + goto st714 + } + if 161 <= data[p] && data[p] <= 167 { + goto st145 + } + goto tr420 + st709: + if p++; p == pe { + goto _test_eof709 + } + st_case_709: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 169 { + goto tr421 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st710: + if p++; p == pe { + goto _test_eof710 + } + st_case_710: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 180 { + goto tr595 + } + case data[p] >= 144: + goto tr148 + } + goto tr420 + st711: + if p++; p == pe { + goto _test_eof711 + } + st_case_711: + switch { + case data[p] > 175: + if 176 <= data[p] && data[p] <= 182 { + goto tr595 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st712: + if p++; p == pe { + goto _test_eof712 + } + st_case_712: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 131 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 183: + if 189 <= data[p] { + goto tr148 + } + case data[p] >= 163: + goto tr148 + } + default: + goto tr421 + } + goto tr2 + st713: + if p++; p == pe { + goto _test_eof713 + } + st_case_713: + switch { + case data[p] < 145: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr420 + } + default: + goto tr595 + } + goto tr148 + st714: + if p++; p == pe { + goto _test_eof714 + } + st_case_714: + switch { + case data[p] > 146: + if 147 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] >= 143: + goto tr595 + } + goto tr420 + st715: + if p++; p == pe { + goto _test_eof715 + } + st_case_715: + switch data[p] { + case 176: + goto st147 + case 177: + goto st378 + case 178: + goto st716 + } + goto tr420 + st716: + if p++; p == pe { + goto _test_eof716 + } + st_case_716: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr595 + } + case data[p] >= 157: + goto tr595 + } + default: + goto tr148 + } + goto tr420 + st717: + if p++; p == pe { + goto _test_eof717 + } + st_case_717: + switch data[p] { + case 133: + goto st718 + case 134: + goto st719 + case 137: + goto st720 + case 144: + goto st147 + case 145: + goto st384 + case 146: + goto st385 + case 147: + goto st386 + case 148: + goto st387 + case 149: + goto st388 + case 154: + goto st389 + case 155: + goto st390 + case 156: + goto st391 + case 157: + goto st392 + case 158: + goto st393 + case 159: + goto st721 + case 168: + goto st722 + case 169: + goto st723 + case 170: + goto st724 + } + if 150 <= data[p] && data[p] <= 153 { + goto st145 + } + goto tr420 + st718: + if p++; p == pe { + goto _test_eof718 + } + st_case_718: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr595 + } + case data[p] >= 165: + goto tr595 + } + goto tr420 + st719: + if p++; p == pe { + goto _test_eof719 + } + st_case_719: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr420 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr595 + st720: + if p++; p == pe { + goto _test_eof720 + } + st_case_720: + if 130 <= data[p] && data[p] <= 132 { + goto tr595 + } + goto tr420 + st721: + if p++; p == pe { + goto _test_eof721 + } + st_case_721: + if data[p] == 131 { + goto tr2 + } + switch { + case data[p] < 142: + if 140 <= data[p] && data[p] <= 141 { + goto tr2 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr2 + } + default: + goto tr421 + } + goto tr148 + st722: + if p++; p == pe { + goto _test_eof722 + } + st_case_722: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr595 + } + case data[p] >= 128: + goto tr595 + } + goto tr420 + st723: + if p++; p == pe { + goto _test_eof723 + } + st_case_723: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 173: + goto tr420 + } + goto tr595 + st724: + if p++; p == pe { + goto _test_eof724 + } + st_case_724: + if data[p] == 132 { + goto tr595 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr595 + } + case data[p] >= 155: + goto tr595 + } + goto tr420 + st725: + if p++; p == pe { + goto _test_eof725 + } + st_case_725: + switch data[p] { + case 160: + goto st147 + case 163: + goto st726 + case 184: + goto st400 + case 185: + goto st401 + case 186: + goto st402 + } + if 161 <= data[p] && data[p] <= 162 { + goto st145 + } + goto tr420 + st726: + if p++; p == pe { + goto _test_eof726 + } + st_case_726: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 150: + if 151 <= data[p] { + goto tr420 + } + default: + goto tr595 + } + goto tr148 + st727: + if p++; p == pe { + goto _test_eof727 + } + st_case_727: + if data[p] == 160 { + goto st728 + } + goto tr420 + st728: + if p++; p == pe { + goto _test_eof728 + } + st_case_728: + switch data[p] { + case 128: + goto st729 + case 129: + goto st730 + case 132: + goto st576 + case 135: + goto st732 + } + if 133 <= data[p] && data[p] <= 134 { + goto st731 + } + goto tr420 + st729: + if p++; p == pe { + goto _test_eof729 + } + st_case_729: + if data[p] == 129 { + goto tr595 + } + if 160 <= data[p] { + goto tr595 + } + goto tr420 + st730: + if p++; p == pe { + goto _test_eof730 + } + st_case_730: + if 192 <= data[p] { + goto tr420 + } + goto tr595 + st731: + if p++; p == pe { + goto _test_eof731 + } + st_case_731: + goto tr595 + st732: + if p++; p == pe { + goto _test_eof732 + } + st_case_732: + if 176 <= data[p] { + goto tr420 + } + goto tr595 + st733: + if p++; p == pe { + goto _test_eof733 + } + st_case_733: + if data[p] == 156 { + goto st568 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto st568 + } + case data[p] >= 128: + goto st568 + } + goto tr420 + st734: + if p++; p == pe { + goto _test_eof734 + } + st_case_734: + if data[p] == 176 { + goto st568 + } + if 139 <= data[p] && data[p] <= 159 { + goto st568 + } + goto tr420 + st735: + if p++; p == pe { + goto _test_eof735 + } + st_case_735: + switch { + case data[p] < 159: + if 150 <= data[p] && data[p] <= 157 { + goto st568 + } + case data[p] > 164: + switch { + case data[p] > 168: + if 170 <= data[p] && data[p] <= 173 { + goto st568 + } + case data[p] >= 167: + goto st568 + } + default: + goto st568 + } + goto tr420 + st736: + if p++; p == pe { + goto _test_eof736 + } + st_case_736: + switch data[p] { + case 143: + goto st568 + case 145: + goto st568 + } + if 176 <= data[p] { + goto st568 + } + goto tr420 + st737: + if p++; p == pe { + goto _test_eof737 + } + st_case_737: + if 139 <= data[p] { + goto tr420 + } + goto st568 + st738: + if p++; p == pe { + goto _test_eof738 + } + st_case_738: + if 166 <= data[p] && data[p] <= 176 { + goto st568 + } + goto tr420 + st739: + if p++; p == pe { + goto _test_eof739 + } + st_case_739: + if 171 <= data[p] && data[p] <= 179 { + goto st568 + } + goto tr420 + st740: + if p++; p == pe { + goto _test_eof740 + } + st_case_740: + switch data[p] { + case 160: + goto st741 + case 161: + goto st742 + case 163: + goto st743 + case 164: + goto st744 + case 165: + goto st745 + case 167: + goto st747 + case 169: + goto st748 + case 171: + goto st749 + case 173: + goto st751 + case 174: + goto st752 + case 175: + goto st753 + case 176: + goto st754 + case 177: + goto st755 + case 179: + goto st756 + case 180: + goto st757 + case 181: + goto st758 + case 182: + goto st759 + case 183: + goto st760 + case 184: + goto st761 + case 185: + goto st762 + case 186: + goto st763 + case 187: + goto st764 + case 188: + goto st765 + case 189: + goto st766 + case 190: + goto st767 + case 191: + goto st768 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto st750 + } + case data[p] >= 166: + goto st746 + } + goto tr420 + st741: + if p++; p == pe { + goto _test_eof741 + } + st_case_741: + switch { + case data[p] < 155: + if 150 <= data[p] && data[p] <= 153 { + goto st568 + } + case data[p] > 163: + switch { + case data[p] > 167: + if 169 <= data[p] && data[p] <= 173 { + goto st568 + } + case data[p] >= 165: + goto st568 + } + default: + goto st568 + } + goto tr420 + st742: + if p++; p == pe { + goto _test_eof742 + } + st_case_742: + if 153 <= data[p] && data[p] <= 155 { + goto st568 + } + goto tr420 + st743: + if p++; p == pe { + goto _test_eof743 + } + st_case_743: + if 163 <= data[p] { + goto st568 + } + goto tr420 + st744: + if p++; p == pe { + goto _test_eof744 + } + st_case_744: + if data[p] == 189 { + goto tr420 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr420 + } + goto st568 + st745: + if p++; p == pe { + goto _test_eof745 + } + st_case_745: + if data[p] == 144 { + goto tr420 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr420 + } + case data[p] >= 152: + goto tr420 + } + goto st568 + st746: + if p++; p == pe { + goto _test_eof746 + } + st_case_746: + if data[p] == 188 { + goto st568 + } + switch { + case data[p] > 131: + if 190 <= data[p] { + goto st568 + } + case data[p] >= 129: + goto st568 + } + goto tr420 + st747: + if p++; p == pe { + goto _test_eof747 + } + st_case_747: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr420 + } + case data[p] >= 133: + goto tr420 + } + case data[p] > 150: + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr420 + } + case data[p] >= 152: + goto tr420 + } + default: + goto tr420 + } + goto st568 + st748: + if p++; p == pe { + goto _test_eof748 + } + st_case_748: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr420 + } + case data[p] >= 131: + goto tr420 + } + case data[p] > 144: + switch { + case data[p] < 178: + if 146 <= data[p] && data[p] <= 175 { + goto tr420 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto st568 + st749: + if p++; p == pe { + goto _test_eof749 + } + st_case_749: + switch data[p] { + case 134: + goto tr420 + case 138: + goto tr420 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr420 + } + case data[p] >= 142: + goto tr420 + } + goto st568 + st750: + if p++; p == pe { + goto _test_eof750 + } + st_case_750: + if data[p] == 188 { + goto st568 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto st568 + } + case data[p] >= 129: + goto st568 + } + goto tr420 + st751: + if p++; p == pe { + goto _test_eof751 + } + st_case_751: + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto st568 + } + case data[p] >= 128: + goto st568 + } + case data[p] > 141: + switch { + case data[p] > 151: + if 162 <= data[p] && data[p] <= 163 { + goto st568 + } + case data[p] >= 150: + goto st568 + } + default: + goto st568 + } + goto tr420 + st752: + if p++; p == pe { + goto _test_eof752 + } + st_case_752: + if data[p] == 130 { + goto st568 + } + if 190 <= data[p] && data[p] <= 191 { + goto st568 + } + goto tr420 + st753: + if p++; p == pe { + goto _test_eof753 + } + st_case_753: + if data[p] == 151 { + goto st568 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto st568 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto st568 + } + default: + goto st568 + } + goto tr420 + st754: + if p++; p == pe { + goto _test_eof754 + } + st_case_754: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto st568 + } + case data[p] >= 128: + goto st568 + } + goto tr420 + st755: + if p++; p == pe { + goto _test_eof755 + } + st_case_755: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + } + switch { + case data[p] < 151: + if 142 <= data[p] && data[p] <= 148 { + goto tr420 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto st568 + st756: + if p++; p == pe { + goto _test_eof756 + } + st_case_756: + switch { + case data[p] < 138: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 136 { + goto st568 + } + case data[p] >= 128: + goto st568 + } + case data[p] > 141: + switch { + case data[p] > 150: + if 162 <= data[p] && data[p] <= 163 { + goto st568 + } + case data[p] >= 149: + goto st568 + } + default: + goto st568 + } + goto tr420 + st757: + if p++; p == pe { + goto _test_eof757 + } + st_case_757: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto st568 + } + case data[p] >= 129: + goto st568 + } + goto tr420 + st758: + if p++; p == pe { + goto _test_eof758 + } + st_case_758: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + } + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 150 { + goto tr420 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto st568 + st759: + if p++; p == pe { + goto _test_eof759 + } + st_case_759: + if 130 <= data[p] && data[p] <= 131 { + goto st568 + } + goto tr420 + st760: + if p++; p == pe { + goto _test_eof760 + } + st_case_760: + switch data[p] { + case 138: + goto st568 + case 150: + goto st568 + } + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 148 { + goto st568 + } + case data[p] > 159: + if 178 <= data[p] && data[p] <= 179 { + goto st568 + } + default: + goto st568 + } + goto tr420 + st761: + if p++; p == pe { + goto _test_eof761 + } + st_case_761: + if data[p] == 177 { + goto st568 + } + if 180 <= data[p] && data[p] <= 186 { + goto st568 + } + goto tr420 + st762: + if p++; p == pe { + goto _test_eof762 + } + st_case_762: + if 135 <= data[p] && data[p] <= 142 { + goto st568 + } + goto tr420 + st763: + if p++; p == pe { + goto _test_eof763 + } + st_case_763: + if data[p] == 177 { + goto st568 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto st568 + } + case data[p] >= 180: + goto st568 + } + goto tr420 + st764: + if p++; p == pe { + goto _test_eof764 + } + st_case_764: + if 136 <= data[p] && data[p] <= 141 { + goto st568 + } + goto tr420 + st765: + if p++; p == pe { + goto _test_eof765 + } + st_case_765: + switch data[p] { + case 181: + goto st568 + case 183: + goto st568 + case 185: + goto st568 + } + switch { + case data[p] > 153: + if 190 <= data[p] && data[p] <= 191 { + goto st568 + } + case data[p] >= 152: + goto st568 + } + goto tr420 + st766: + if p++; p == pe { + goto _test_eof766 + } + st_case_766: + if 177 <= data[p] && data[p] <= 191 { + goto st568 + } + goto tr420 + st767: + if p++; p == pe { + goto _test_eof767 + } + st_case_767: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto st568 + } + case data[p] > 135: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto st568 + } + case data[p] >= 141: + goto st568 + } + default: + goto st568 + } + goto tr420 + st768: + if p++; p == pe { + goto _test_eof768 + } + st_case_768: + if data[p] == 134 { + goto st568 + } + goto tr420 + st769: + if p++; p == pe { + goto _test_eof769 + } + st_case_769: + switch data[p] { + case 128: + goto st770 + case 129: + goto st771 + case 130: + goto st772 + case 141: + goto st773 + case 156: + goto st774 + case 157: + goto st775 + case 158: + goto st776 + case 159: + goto st777 + case 160: + goto st778 + case 162: + goto st779 + case 164: + goto st780 + case 168: + goto st781 + case 169: + goto st782 + case 170: + goto st783 + case 172: + goto st784 + case 173: + goto st785 + case 174: + goto st786 + case 175: + goto st787 + case 176: + goto st788 + case 179: + goto st789 + case 183: + goto st790 + } + goto tr420 + st770: + if p++; p == pe { + goto _test_eof770 + } + st_case_770: + if 171 <= data[p] && data[p] <= 190 { + goto st568 + } + goto tr420 + st771: + if p++; p == pe { + goto _test_eof771 + } + st_case_771: + switch { + case data[p] < 162: + switch { + case data[p] > 153: + if 158 <= data[p] && data[p] <= 160 { + goto st568 + } + case data[p] >= 150: + goto st568 + } + case data[p] > 164: + switch { + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto st568 + } + case data[p] >= 167: + goto st568 + } + default: + goto st568 + } + goto tr420 + st772: + if p++; p == pe { + goto _test_eof772 + } + st_case_772: + if data[p] == 143 { + goto st568 + } + switch { + case data[p] > 141: + if 154 <= data[p] && data[p] <= 157 { + goto st568 + } + case data[p] >= 130: + goto st568 + } + goto tr420 + st773: + if p++; p == pe { + goto _test_eof773 + } + st_case_773: + if 157 <= data[p] && data[p] <= 159 { + goto st568 + } + goto tr420 + st774: + if p++; p == pe { + goto _test_eof774 + } + st_case_774: + switch { + case data[p] > 148: + if 178 <= data[p] && data[p] <= 180 { + goto st568 + } + case data[p] >= 146: + goto st568 + } + goto tr420 + st775: + if p++; p == pe { + goto _test_eof775 + } + st_case_775: + switch { + case data[p] > 147: + if 178 <= data[p] && data[p] <= 179 { + goto st568 + } + case data[p] >= 146: + goto st568 + } + goto tr420 + st776: + if p++; p == pe { + goto _test_eof776 + } + st_case_776: + if 180 <= data[p] { + goto st568 + } + goto tr420 + st777: + if p++; p == pe { + goto _test_eof777 + } + st_case_777: + switch { + case data[p] > 156: + if 158 <= data[p] { + goto tr420 + } + case data[p] >= 148: + goto tr420 + } + goto st568 + st778: + if p++; p == pe { + goto _test_eof778 + } + st_case_778: + if 139 <= data[p] && data[p] <= 142 { + goto st568 + } + goto tr420 + st779: + if p++; p == pe { + goto _test_eof779 + } + st_case_779: + if data[p] == 169 { + goto st568 + } + goto tr420 + st780: + if p++; p == pe { + goto _test_eof780 + } + st_case_780: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto st568 + } + case data[p] >= 160: + goto st568 + } + goto tr420 + st781: + if p++; p == pe { + goto _test_eof781 + } + st_case_781: + if 151 <= data[p] && data[p] <= 155 { + goto st568 + } + goto tr420 + st782: + if p++; p == pe { + goto _test_eof782 + } + st_case_782: + if data[p] == 191 { + goto st568 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto st568 + } + case data[p] >= 149: + goto st568 + } + goto tr420 + st783: + if p++; p == pe { + goto _test_eof783 + } + st_case_783: + if 176 <= data[p] && data[p] <= 190 { + goto st568 + } + goto tr420 + st784: + if p++; p == pe { + goto _test_eof784 + } + st_case_784: + switch { + case data[p] > 132: + if 180 <= data[p] { + goto st568 + } + case data[p] >= 128: + goto st568 + } + goto tr420 + st785: + if p++; p == pe { + goto _test_eof785 + } + st_case_785: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 133: + goto tr420 + } + goto st568 + st786: + if p++; p == pe { + goto _test_eof786 + } + st_case_786: + switch { + case data[p] > 130: + if 161 <= data[p] && data[p] <= 173 { + goto st568 + } + case data[p] >= 128: + goto st568 + } + goto tr420 + st787: + if p++; p == pe { + goto _test_eof787 + } + st_case_787: + if 166 <= data[p] && data[p] <= 179 { + goto st568 + } + goto tr420 + st788: + if p++; p == pe { + goto _test_eof788 + } + st_case_788: + if 164 <= data[p] && data[p] <= 183 { + goto st568 + } + goto tr420 + st789: + if p++; p == pe { + goto _test_eof789 + } + st_case_789: + if data[p] == 173 { + goto st568 + } + switch { + case data[p] < 148: + if 144 <= data[p] && data[p] <= 146 { + goto st568 + } + case data[p] > 168: + switch { + case data[p] > 180: + if 184 <= data[p] && data[p] <= 185 { + goto st568 + } + case data[p] >= 178: + goto st568 + } + default: + goto st568 + } + goto tr420 + st790: + if p++; p == pe { + goto _test_eof790 + } + st_case_790: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto st568 + } + case data[p] >= 128: + goto st568 + } + goto tr420 + st791: + if p++; p == pe { + goto _test_eof791 + } + st_case_791: + switch data[p] { + case 128: + goto st792 + case 129: + goto st793 + case 131: + goto st794 + case 179: + goto st795 + case 181: + goto st796 + case 183: + goto st797 + } + goto tr420 + st792: + if p++; p == pe { + goto _test_eof792 + } + st_case_792: + switch { + case data[p] > 143: + if 170 <= data[p] && data[p] <= 174 { + goto st568 + } + case data[p] >= 140: + goto st568 + } + goto tr420 + st793: + if p++; p == pe { + goto _test_eof793 + } + st_case_793: + switch { + case data[p] > 164: + if 166 <= data[p] && data[p] <= 175 { + goto st568 + } + case data[p] >= 160: + goto st568 + } + goto tr420 + st794: + if p++; p == pe { + goto _test_eof794 + } + st_case_794: + if 144 <= data[p] && data[p] <= 176 { + goto st568 + } + goto tr420 + st795: + if p++; p == pe { + goto _test_eof795 + } + st_case_795: + if 175 <= data[p] && data[p] <= 177 { + goto st568 + } + goto tr420 + st796: + if p++; p == pe { + goto _test_eof796 + } + st_case_796: + if data[p] == 191 { + goto st568 + } + goto tr420 + st797: + if p++; p == pe { + goto _test_eof797 + } + st_case_797: + if 160 <= data[p] && data[p] <= 191 { + goto st568 + } + goto tr420 + st798: + if p++; p == pe { + goto _test_eof798 + } + st_case_798: + switch data[p] { + case 128: + goto st799 + case 130: + goto st800 + } + goto tr420 + st799: + if p++; p == pe { + goto _test_eof799 + } + st_case_799: + if 170 <= data[p] && data[p] <= 175 { + goto st568 + } + goto tr420 + st800: + if p++; p == pe { + goto _test_eof800 + } + st_case_800: + if 153 <= data[p] && data[p] <= 154 { + goto st568 + } + goto tr420 + st801: + if p++; p == pe { + goto _test_eof801 + } + st_case_801: + switch data[p] { + case 153: + goto st802 + case 154: + goto st803 + case 155: + goto st804 + case 160: + goto st805 + case 162: + goto st806 + case 163: + goto st807 + case 164: + goto st808 + case 165: + goto st809 + case 166: + goto st810 + case 167: + goto st811 + case 168: + goto st812 + case 169: + goto st813 + case 170: + goto st814 + case 171: + goto st815 + case 175: + goto st816 + } + goto tr420 + st802: + if p++; p == pe { + goto _test_eof802 + } + st_case_802: + switch { + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto st568 + } + case data[p] >= 175: + goto st568 + } + goto tr420 + st803: + if p++; p == pe { + goto _test_eof803 + } + st_case_803: + if 158 <= data[p] && data[p] <= 159 { + goto st568 + } + goto tr420 + st804: + if p++; p == pe { + goto _test_eof804 + } + st_case_804: + if 176 <= data[p] && data[p] <= 177 { + goto st568 + } + goto tr420 + st805: + if p++; p == pe { + goto _test_eof805 + } + st_case_805: + switch data[p] { + case 130: + goto st568 + case 134: + goto st568 + case 139: + goto st568 + } + if 163 <= data[p] && data[p] <= 167 { + goto st568 + } + goto tr420 + st806: + if p++; p == pe { + goto _test_eof806 + } + st_case_806: + switch { + case data[p] > 129: + if 180 <= data[p] { + goto st568 + } + case data[p] >= 128: + goto st568 + } + goto tr420 + st807: + if p++; p == pe { + goto _test_eof807 + } + st_case_807: + switch { + case data[p] > 159: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 133: + goto tr420 + } + goto st568 + st808: + if p++; p == pe { + goto _test_eof808 + } + st_case_808: + if 166 <= data[p] && data[p] <= 173 { + goto st568 + } + goto tr420 + st809: + if p++; p == pe { + goto _test_eof809 + } + st_case_809: + if 135 <= data[p] && data[p] <= 147 { + goto st568 + } + goto tr420 + st810: + if p++; p == pe { + goto _test_eof810 + } + st_case_810: + switch { + case data[p] > 131: + if 179 <= data[p] { + goto st568 + } + case data[p] >= 128: + goto st568 + } + goto tr420 + st811: + if p++; p == pe { + goto _test_eof811 + } + st_case_811: + switch { + case data[p] > 164: + if 166 <= data[p] { + goto tr420 + } + case data[p] >= 129: + goto tr420 + } + goto st568 + st812: + if p++; p == pe { + goto _test_eof812 + } + st_case_812: + if 169 <= data[p] && data[p] <= 182 { + goto st568 + } + goto tr420 + st813: + if p++; p == pe { + goto _test_eof813 + } + st_case_813: + if data[p] == 131 { + goto st568 + } + switch { + case data[p] > 141: + if 187 <= data[p] && data[p] <= 189 { + goto st568 + } + case data[p] >= 140: + goto st568 + } + goto tr420 + st814: + if p++; p == pe { + goto _test_eof814 + } + st_case_814: + if data[p] == 176 { + goto st568 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto st568 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto st568 + } + default: + goto st568 + } + goto tr420 + st815: + if p++; p == pe { + goto _test_eof815 + } + st_case_815: + if data[p] == 129 { + goto st568 + } + switch { + case data[p] > 175: + if 181 <= data[p] && data[p] <= 182 { + goto st568 + } + case data[p] >= 171: + goto st568 + } + goto tr420 + st816: + if p++; p == pe { + goto _test_eof816 + } + st_case_816: + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 173 { + goto st568 + } + case data[p] >= 163: + goto st568 + } + goto tr420 + st817: + if p++; p == pe { + goto _test_eof817 + } + st_case_817: + switch data[p] { + case 172: + goto st818 + case 173: + goto st819 + case 184: + goto st820 + case 187: + goto st796 + case 190: + goto st803 + case 191: + goto st821 + } + goto tr420 + st818: + if p++; p == pe { + goto _test_eof818 + } + st_case_818: + switch data[p] { + case 158: + goto st568 + case 190: + goto tr595 + } + switch { + case data[p] < 170: + if 157 <= data[p] && data[p] <= 168 { + goto tr595 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr595 + } + default: + goto tr595 + } + goto tr420 + st819: + if p++; p == pe { + goto _test_eof819 + } + st_case_819: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 129 { + goto tr595 + } + case data[p] > 132: + if 134 <= data[p] && data[p] <= 143 { + goto tr595 + } + default: + goto tr595 + } + goto tr420 + st820: + if p++; p == pe { + goto _test_eof820 + } + st_case_820: + switch { + case data[p] > 143: + if 160 <= data[p] && data[p] <= 175 { + goto st568 + } + case data[p] >= 128: + goto st568 + } + goto tr420 + st821: + if p++; p == pe { + goto _test_eof821 + } + st_case_821: + if 185 <= data[p] && data[p] <= 187 { + goto st568 + } + goto tr420 + st822: + if p++; p == pe { + goto _test_eof822 + } + st_case_822: + switch data[p] { + case 144: + goto st823 + case 145: + goto st829 + case 150: + goto st848 + case 155: + goto st853 + case 157: + goto st855 + case 158: + goto st862 + } + goto tr420 + st823: + if p++; p == pe { + goto _test_eof823 + } + st_case_823: + switch data[p] { + case 135: + goto st824 + case 139: + goto st825 + case 141: + goto st826 + case 168: + goto st827 + case 171: + goto st828 + } + goto tr420 + st824: + if p++; p == pe { + goto _test_eof824 + } + st_case_824: + if data[p] == 189 { + goto st568 + } + goto tr420 + st825: + if p++; p == pe { + goto _test_eof825 + } + st_case_825: + if data[p] == 160 { + goto st568 + } + goto tr420 + st826: + if p++; p == pe { + goto _test_eof826 + } + st_case_826: + if 182 <= data[p] && data[p] <= 186 { + goto st568 + } + goto tr420 + st827: + if p++; p == pe { + goto _test_eof827 + } + st_case_827: + if data[p] == 191 { + goto st568 + } + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto st568 + } + case data[p] > 134: + switch { + case data[p] > 143: + if 184 <= data[p] && data[p] <= 186 { + goto st568 + } + case data[p] >= 140: + goto st568 + } + default: + goto st568 + } + goto tr420 + st828: + if p++; p == pe { + goto _test_eof828 + } + st_case_828: + if 165 <= data[p] && data[p] <= 166 { + goto st568 + } + goto tr420 + st829: + if p++; p == pe { + goto _test_eof829 + } + st_case_829: + switch data[p] { + case 128: + goto st830 + case 129: + goto st831 + case 130: + goto st832 + case 132: + goto st833 + case 133: + goto st834 + case 134: + goto st835 + case 135: + goto st836 + case 136: + goto st837 + case 139: + goto st838 + case 140: + goto st839 + case 141: + goto st840 + case 146: + goto st841 + case 147: + goto st842 + case 150: + goto st843 + case 151: + goto st844 + case 152: + goto st841 + case 153: + goto st845 + case 154: + goto st846 + case 156: + goto st847 + } + goto tr420 + st830: + if p++; p == pe { + goto _test_eof830 + } + st_case_830: + switch { + case data[p] > 130: + if 184 <= data[p] { + goto st568 + } + case data[p] >= 128: + goto st568 + } + goto tr420 + st831: + if p++; p == pe { + goto _test_eof831 + } + st_case_831: + if 135 <= data[p] && data[p] <= 190 { + goto tr420 + } + goto st568 + st832: + if p++; p == pe { + goto _test_eof832 + } + st_case_832: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr420 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto st568 + st833: + if p++; p == pe { + goto _test_eof833 + } + st_case_833: + switch { + case data[p] > 130: + if 167 <= data[p] && data[p] <= 180 { + goto st568 + } + case data[p] >= 128: + goto st568 + } + goto tr420 + st834: + if p++; p == pe { + goto _test_eof834 + } + st_case_834: + if data[p] == 179 { + goto st568 + } + goto tr420 + st835: + if p++; p == pe { + goto _test_eof835 + } + st_case_835: + switch { + case data[p] > 130: + if 179 <= data[p] { + goto st568 + } + case data[p] >= 128: + goto st568 + } + goto tr420 + st836: + if p++; p == pe { + goto _test_eof836 + } + st_case_836: + switch { + case data[p] > 137: + if 141 <= data[p] { + goto tr420 + } + case data[p] >= 129: + goto tr420 + } + goto st568 + st837: + if p++; p == pe { + goto _test_eof837 + } + st_case_837: + if 172 <= data[p] && data[p] <= 183 { + goto st568 + } + goto tr420 + st838: + if p++; p == pe { + goto _test_eof838 + } + st_case_838: + if 159 <= data[p] && data[p] <= 170 { + goto st568 + } + goto tr420 + st839: + if p++; p == pe { + goto _test_eof839 + } + st_case_839: + if data[p] == 188 { + goto st568 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto st568 + } + case data[p] >= 128: + goto st568 + } + goto tr420 + st840: + if p++; p == pe { + goto _test_eof840 + } + st_case_840: + if data[p] == 151 { + goto st568 + } + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto st568 + } + case data[p] >= 128: + goto st568 + } + case data[p] > 141: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto st568 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto st568 + } + default: + goto st568 + } + default: + goto st568 + } + goto tr420 + st841: + if p++; p == pe { + goto _test_eof841 + } + st_case_841: + if 176 <= data[p] { + goto st568 + } + goto tr420 + st842: + if p++; p == pe { + goto _test_eof842 + } + st_case_842: + if 132 <= data[p] { + goto tr420 + } + goto st568 + st843: + if p++; p == pe { + goto _test_eof843 + } + st_case_843: + switch { + case data[p] > 181: + if 184 <= data[p] { + goto st568 + } + case data[p] >= 175: + goto st568 + } + goto tr420 + st844: + if p++; p == pe { + goto _test_eof844 + } + st_case_844: + switch { + case data[p] > 155: + if 158 <= data[p] { + goto tr420 + } + case data[p] >= 129: + goto tr420 + } + goto st568 + st845: + if p++; p == pe { + goto _test_eof845 + } + st_case_845: + if 129 <= data[p] { + goto tr420 + } + goto st568 + st846: + if p++; p == pe { + goto _test_eof846 + } + st_case_846: + if 171 <= data[p] && data[p] <= 183 { + goto st568 + } + goto tr420 + st847: + if p++; p == pe { + goto _test_eof847 + } + st_case_847: + if 157 <= data[p] && data[p] <= 171 { + goto st568 + } + goto tr420 + st848: + if p++; p == pe { + goto _test_eof848 + } + st_case_848: + switch data[p] { + case 171: + goto st849 + case 172: + goto st850 + case 189: + goto st851 + case 190: + goto st852 + } + goto tr420 + st849: + if p++; p == pe { + goto _test_eof849 + } + st_case_849: + if 176 <= data[p] && data[p] <= 180 { + goto st568 + } + goto tr420 + st850: + if p++; p == pe { + goto _test_eof850 + } + st_case_850: + if 176 <= data[p] && data[p] <= 182 { + goto st568 + } + goto tr420 + st851: + if p++; p == pe { + goto _test_eof851 + } + st_case_851: + if 145 <= data[p] && data[p] <= 190 { + goto st568 + } + goto tr420 + st852: + if p++; p == pe { + goto _test_eof852 + } + st_case_852: + if 143 <= data[p] && data[p] <= 146 { + goto st568 + } + goto tr420 + st853: + if p++; p == pe { + goto _test_eof853 + } + st_case_853: + if data[p] == 178 { + goto st854 + } + goto tr420 + st854: + if p++; p == pe { + goto _test_eof854 + } + st_case_854: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto st568 + } + case data[p] >= 157: + goto st568 + } + goto tr420 + st855: + if p++; p == pe { + goto _test_eof855 + } + st_case_855: + switch data[p] { + case 133: + goto st856 + case 134: + goto st857 + case 137: + goto st858 + case 168: + goto st859 + case 169: + goto st860 + case 170: + goto st861 + } + goto tr420 + st856: + if p++; p == pe { + goto _test_eof856 + } + st_case_856: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto st568 + } + case data[p] >= 165: + goto st568 + } + goto tr420 + st857: + if p++; p == pe { + goto _test_eof857 + } + st_case_857: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr420 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto st568 + st858: + if p++; p == pe { + goto _test_eof858 + } + st_case_858: + if 130 <= data[p] && data[p] <= 132 { + goto st568 + } + goto tr420 + st859: + if p++; p == pe { + goto _test_eof859 + } + st_case_859: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto st568 + } + case data[p] >= 128: + goto st568 + } + goto tr420 + st860: + if p++; p == pe { + goto _test_eof860 + } + st_case_860: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 173: + goto tr420 + } + goto st568 + st861: + if p++; p == pe { + goto _test_eof861 + } + st_case_861: + if data[p] == 132 { + goto st568 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto st568 + } + case data[p] >= 155: + goto st568 + } + goto tr420 + st862: + if p++; p == pe { + goto _test_eof862 + } + st_case_862: + if data[p] == 163 { + goto st863 + } + goto tr420 + st863: + if p++; p == pe { + goto _test_eof863 + } + st_case_863: + if 144 <= data[p] && data[p] <= 150 { + goto st568 + } + goto tr420 + st864: + if p++; p == pe { + goto _test_eof864 + } + st_case_864: + if data[p] == 160 { + goto st865 + } + goto tr420 + st865: + if p++; p == pe { + goto _test_eof865 + } + st_case_865: + switch data[p] { + case 128: + goto st866 + case 129: + goto st867 + case 132: + goto st570 + case 135: + goto st571 + } + if 133 <= data[p] && data[p] <= 134 { + goto st868 + } + goto tr420 + st866: + if p++; p == pe { + goto _test_eof866 + } + st_case_866: + if data[p] == 129 { + goto st568 + } + if 160 <= data[p] { + goto st568 + } + goto tr420 + st867: + if p++; p == pe { + goto _test_eof867 + } + st_case_867: + if 192 <= data[p] { + goto tr420 + } + goto st568 + st868: + if p++; p == pe { + goto _test_eof868 + } + st_case_868: + goto st568 + st869: + if p++; p == pe { + goto _test_eof869 + } + st_case_869: + switch data[p] { + case 170: + goto tr148 + case 173: + goto tr572 + case 181: + goto tr148 + case 183: + goto st142 + case 186: + goto tr148 + } + goto tr420 + st870: + if p++; p == pe { + goto _test_eof870 + } + st_case_870: + switch data[p] { + case 151: + goto st142 + case 173: + goto tr2 + } + switch { + case data[p] < 146: + if 130 <= data[p] && data[p] <= 133 { + goto tr2 + } + case data[p] > 159: + switch { + case data[p] > 171: + if 175 <= data[p] { + goto tr2 + } + case data[p] >= 165: + goto tr2 + } + default: + goto tr2 + } + goto tr148 + st871: + if p++; p == pe { + goto _test_eof871 + } + st_case_871: + if data[p] <= 127 { + goto tr420 + } + goto tr572 + st872: + if p++; p == pe { + goto _test_eof872 + } + st_case_872: + switch data[p] { + case 181: + goto tr420 + case 190: + goto tr420 + } + switch { + case data[p] < 184: + if 176 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr420 + } + goto tr572 + st873: + if p++; p == pe { + goto _test_eof873 + } + st_case_873: + switch data[p] { + case 135: + goto st142 + case 140: + goto tr148 + } + switch { + case data[p] < 142: + if 134 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 161: + if 163 <= data[p] { + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st874: + if p++; p == pe { + goto _test_eof874 + } + st_case_874: + if data[p] == 130 { + goto tr420 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr572 + } + goto tr148 + st875: + if p++; p == pe { + goto _test_eof875 + } + st_case_875: + if data[p] == 190 { + goto tr420 + } + switch { + case data[p] < 145: + if 136 <= data[p] && data[p] <= 144 { + goto tr420 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + default: + goto tr572 + } + goto tr148 + st876: + if p++; p == pe { + goto _test_eof876 + } + st_case_876: + switch data[p] { + case 135: + goto tr572 + case 179: + goto tr148 + case 180: + goto st142 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr572 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + default: + goto tr572 + } + goto tr420 + st877: + if p++; p == pe { + goto _test_eof877 + } + st_case_877: + if data[p] == 156 { + goto tr572 + } + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 133 { + goto tr572 + } + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr572 + } + goto tr420 + st878: + if p++; p == pe { + goto _test_eof878 + } + st_case_878: + switch data[p] { + case 171: + goto tr421 + case 176: + goto tr572 + } + switch { + case data[p] < 139: + if 128 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 174 <= data[p] { + goto tr148 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr572 + } + goto tr420 + st879: + if p++; p == pe { + goto _test_eof879 + } + st_case_879: + switch data[p] { + case 148: + goto tr420 + case 158: + goto tr420 + case 169: + goto tr420 + } + switch { + case data[p] < 176: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr572 + } + case data[p] >= 150: + goto tr572 + } + case data[p] > 185: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 189: + goto tr420 + } + default: + goto tr421 + } + goto tr148 + st880: + if p++; p == pe { + goto _test_eof880 + } + st_case_880: + if data[p] == 144 { + goto tr148 + } + switch { + case data[p] < 146: + if 143 <= data[p] && data[p] <= 145 { + goto tr572 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr572 + } + default: + goto tr148 + } + goto tr420 + st881: + if p++; p == pe { + goto _test_eof881 + } + st_case_881: + switch { + case data[p] > 140: + if 141 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr420 + } + goto tr572 + st882: + if p++; p == pe { + goto _test_eof882 + } + st_case_882: + switch { + case data[p] > 176: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr572 + } + goto tr148 + st883: + if p++; p == pe { + goto _test_eof883 + } + st_case_883: + if data[p] == 186 { + goto tr148 + } + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 170: + switch { + case data[p] > 179: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 171: + goto tr572 + } + default: + goto tr148 + } + goto tr420 + st884: + if p++; p == pe { + goto _test_eof884 + } + st_case_884: + switch data[p] { + case 160: + goto st885 + case 161: + goto st886 + case 162: + goto st168 + case 163: + goto st887 + case 164: + goto st888 + case 165: + goto st889 + case 166: + goto st890 + case 167: + goto st891 + case 168: + goto st892 + case 169: + goto st893 + case 170: + goto st894 + case 171: + goto st895 + case 172: + goto st896 + case 173: + goto st897 + case 174: + goto st898 + case 175: + goto st899 + case 176: + goto st900 + case 177: + goto st901 + case 178: + goto st902 + case 179: + goto st903 + case 180: + goto st904 + case 181: + goto st905 + case 182: + goto st906 + case 183: + goto st907 + case 184: + goto st908 + case 185: + goto st909 + case 186: + goto st910 + case 187: + goto st911 + case 188: + goto st912 + case 189: + goto st913 + case 190: + goto st914 + case 191: + goto st915 + } + goto tr420 + st885: + if p++; p == pe { + goto _test_eof885 + } + st_case_885: + switch data[p] { + case 154: + goto tr148 + case 164: + goto tr148 + case 168: + goto tr148 + } + switch { + case data[p] > 149: + if 150 <= data[p] && data[p] <= 173 { + goto tr572 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st886: + if p++; p == pe { + goto _test_eof886 + } + st_case_886: + switch { + case data[p] > 152: + if 153 <= data[p] && data[p] <= 155 { + goto tr572 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st887: + if p++; p == pe { + goto _test_eof887 + } + st_case_887: + if 163 <= data[p] { + goto tr572 + } + goto tr420 + st888: + if p++; p == pe { + goto _test_eof888 + } + st_case_888: + if data[p] == 189 { + goto tr148 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr148 + } + goto tr572 + st889: + if p++; p == pe { + goto _test_eof889 + } + st_case_889: + switch data[p] { + case 144: + goto tr148 + case 176: + goto tr420 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 177 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr420 + } + goto tr572 + st890: + if p++; p == pe { + goto _test_eof890 + } + st_case_890: + switch data[p] { + case 132: + goto tr420 + case 169: + goto tr420 + case 177: + goto tr420 + case 188: + goto tr572 + } + switch { + case data[p] < 145: + switch { + case data[p] > 131: + if 141 <= data[p] && data[p] <= 142 { + goto tr420 + } + case data[p] >= 129: + goto tr572 + } + case data[p] > 146: + switch { + case data[p] < 186: + if 179 <= data[p] && data[p] <= 181 { + goto tr420 + } + case data[p] > 187: + if 190 <= data[p] { + goto tr572 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st891: + if p++; p == pe { + goto _test_eof891 + } + st_case_891: + switch data[p] { + case 142: + goto tr148 + case 158: + goto tr420 + } + switch { + case data[p] < 156: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + switch { + case data[p] > 150: + if 152 <= data[p] && data[p] <= 155 { + goto tr420 + } + case data[p] >= 143: + goto tr420 + } + default: + goto tr420 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr148 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr572 + st892: + if p++; p == pe { + goto _test_eof892 + } + st_case_892: + if data[p] == 188 { + goto tr572 + } + switch { + case data[p] < 170: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr572 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 147 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] >= 143: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 176: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 182: + switch { + case data[p] > 185: + if 190 <= data[p] { + goto tr572 + } + case data[p] >= 184: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st893: + if p++; p == pe { + goto _test_eof893 + } + st_case_893: + if data[p] == 157 { + goto tr420 + } + switch { + case data[p] < 153: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 146 <= data[p] && data[p] <= 152 { + goto tr420 + } + case data[p] >= 142: + goto tr420 + } + default: + goto tr420 + } + case data[p] > 158: + switch { + case data[p] < 166: + if 159 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr572 + st894: + if p++; p == pe { + goto _test_eof894 + } + st_case_894: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr572 + } + case data[p] > 141: + if 143 <= data[p] && data[p] <= 145 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] { + goto tr572 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st895: + if p++; p == pe { + goto _test_eof895 + } + st_case_895: + switch data[p] { + case 134: + goto tr420 + case 138: + goto tr420 + case 144: + goto tr148 + case 185: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] >= 142: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr420 + } + goto tr572 + st896: + if p++; p == pe { + goto _test_eof896 + } + st_case_896: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr572 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr572 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st897: + if p++; p == pe { + goto _test_eof897 + } + st_case_897: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] < 150: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr572 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr572 + } + default: + goto tr572 + } + case data[p] > 151: + switch { + case data[p] < 159: + if 156 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 161: + switch { + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 162: + goto tr572 + } + default: + goto tr148 + } + default: + goto tr572 + } + goto tr420 + st898: + if p++; p == pe { + goto _test_eof898 + } + st_case_898: + switch data[p] { + case 130: + goto tr572 + case 131: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 158: + switch { + case data[p] < 142: + if 133 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 144: + switch { + case data[p] > 149: + if 153 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] < 168: + if 163 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 185: + if 190 <= data[p] && data[p] <= 191 { + goto tr572 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st899: + if p++; p == pe { + goto _test_eof899 + } + st_case_899: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr572 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr572 + } + case data[p] > 136: + switch { + case data[p] > 141: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 138: + goto tr572 + } + default: + goto tr572 + } + goto tr420 + st900: + if p++; p == pe { + goto _test_eof900 + } + st_case_900: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 128: + goto tr572 + } + case data[p] > 144: + switch { + case data[p] < 170: + if 146 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] > 185: + if 190 <= data[p] { + goto tr572 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st901: + if p++; p == pe { + goto _test_eof901 + } + st_case_901: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 151: + goto tr420 + } + switch { + case data[p] < 160: + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 148 { + goto tr420 + } + case data[p] > 154: + if 155 <= data[p] && data[p] <= 159 { + goto tr420 + } + default: + goto tr148 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr572 + st902: + if p++; p == pe { + goto _test_eof902 + } + st_case_902: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 146: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr572 + } + case data[p] > 140: + if 142 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 181: + if 170 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr572 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st903: + if p++; p == pe { + goto _test_eof903 + } + st_case_903: + if data[p] == 158 { + goto tr148 + } + switch { + case data[p] < 149: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr572 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr572 + } + default: + goto tr572 + } + case data[p] > 150: + switch { + case data[p] < 162: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 163: + switch { + case data[p] > 175: + if 177 <= data[p] && data[p] <= 178 { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr572 + } + default: + goto tr572 + } + goto tr420 + st904: + if p++; p == pe { + goto _test_eof904 + } + st_case_904: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 129: + goto tr572 + } + case data[p] > 144: + switch { + case data[p] > 186: + if 190 <= data[p] { + goto tr572 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st905: + if p++; p == pe { + goto _test_eof905 + } + st_case_905: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 142: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 150 { + goto tr420 + } + case data[p] > 158: + if 159 <= data[p] && data[p] <= 161 { + goto tr148 + } + default: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr572 + st906: + if p++; p == pe { + goto _test_eof906 + } + st_case_906: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto tr572 + } + case data[p] > 150: + switch { + case data[p] > 177: + if 179 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st907: + if p++; p == pe { + goto _test_eof907 + } + st_case_907: + switch data[p] { + case 138: + goto tr572 + case 150: + goto tr572 + } + switch { + case data[p] < 152: + switch { + case data[p] > 134: + if 143 <= data[p] && data[p] <= 148 { + goto tr572 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 179 { + goto tr572 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr572 + } + goto tr420 + st908: + if p++; p == pe { + goto _test_eof908 + } + st_case_908: + if data[p] == 177 { + goto tr572 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr572 + } + goto tr420 + st909: + if p++; p == pe { + goto _test_eof909 + } + st_case_909: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 135: + goto tr572 + } + goto tr420 + st910: + if p++; p == pe { + goto _test_eof910 + } + st_case_910: + if data[p] == 177 { + goto tr572 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 180: + goto tr572 + } + goto tr420 + st911: + if p++; p == pe { + goto _test_eof911 + } + st_case_911: + switch { + case data[p] > 141: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 136: + goto tr572 + } + goto tr420 + st912: + if p++; p == pe { + goto _test_eof912 + } + st_case_912: + switch data[p] { + case 128: + goto tr148 + case 181: + goto tr572 + case 183: + goto tr572 + case 185: + goto tr572 + } + switch { + case data[p] < 160: + if 152 <= data[p] && data[p] <= 153 { + goto tr572 + } + case data[p] > 169: + if 190 <= data[p] && data[p] <= 191 { + goto tr572 + } + default: + goto tr421 + } + goto tr420 + st913: + if p++; p == pe { + goto _test_eof913 + } + st_case_913: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 172: + if 177 <= data[p] && data[p] <= 191 { + goto tr572 + } + default: + goto tr148 + } + goto tr420 + st914: + if p++; p == pe { + goto _test_eof914 + } + st_case_914: + switch { + case data[p] < 136: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 135 { + goto tr572 + } + case data[p] >= 128: + goto tr572 + } + case data[p] > 140: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 141: + goto tr572 + } + default: + goto tr148 + } + goto tr420 + st915: + if p++; p == pe { + goto _test_eof915 + } + st_case_915: + if data[p] == 134 { + goto tr572 + } + goto tr420 + st916: + if p++; p == pe { + goto _test_eof916 + } + st_case_916: + switch data[p] { + case 128: + goto st917 + case 129: + goto st918 + case 130: + goto st919 + case 131: + goto st202 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st920 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st921 + case 157: + goto st922 + case 158: + goto st923 + case 159: + goto st924 + case 160: + goto st925 + case 161: + goto st219 + case 162: + goto st926 + case 163: + goto st221 + case 164: + goto st927 + case 165: + goto st468 + case 167: + goto st469 + case 168: + goto st928 + case 169: + goto st929 + case 170: + goto st930 + case 172: + goto st931 + case 173: + goto st932 + case 174: + goto st933 + case 175: + goto st934 + case 176: + goto st935 + case 177: + goto st640 + case 179: + goto st936 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st937 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + } + switch { + case data[p] < 180: + if 132 <= data[p] && data[p] <= 152 { + goto st145 + } + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + default: + goto st147 + } + goto tr420 + st917: + if p++; p == pe { + goto _test_eof917 + } + st_case_917: + if 171 <= data[p] && data[p] <= 190 { + goto tr572 + } + goto tr420 + st918: + if p++; p == pe { + goto _test_eof918 + } + st_case_918: + switch { + case data[p] < 158: + switch { + case data[p] > 137: + if 150 <= data[p] && data[p] <= 153 { + goto tr572 + } + case data[p] >= 128: + goto tr421 + } + case data[p] > 160: + switch { + case data[p] < 167: + if 162 <= data[p] && data[p] <= 164 { + goto tr572 + } + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr572 + } + default: + goto tr572 + } + default: + goto tr572 + } + goto tr420 + st919: + if p++; p == pe { + goto _test_eof919 + } + st_case_919: + if data[p] == 143 { + goto tr572 + } + switch { + case data[p] < 144: + if 130 <= data[p] && data[p] <= 141 { + goto tr572 + } + case data[p] > 153: + switch { + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 154: + goto tr572 + } + default: + goto tr421 + } + goto tr420 + st920: + if p++; p == pe { + goto _test_eof920 + } + st_case_920: + switch { + case data[p] < 157: + if 155 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr420 + } + default: + goto tr572 + } + goto tr148 + st921: + if p++; p == pe { + goto _test_eof921 + } + st_case_921: + switch { + case data[p] < 146: + switch { + case data[p] > 140: + if 142 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 177: + if 178 <= data[p] && data[p] <= 180 { + goto tr572 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr572 + } + goto tr420 + st922: + if p++; p == pe { + goto _test_eof922 + } + st_case_922: + switch { + case data[p] < 160: + switch { + case data[p] > 145: + if 146 <= data[p] && data[p] <= 147 { + goto tr572 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 172: + switch { + case data[p] > 176: + if 178 <= data[p] && data[p] <= 179 { + goto tr572 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st923: + if p++; p == pe { + goto _test_eof923 + } + st_case_923: + if 180 <= data[p] { + goto tr572 + } + goto tr420 + st924: + if p++; p == pe { + goto _test_eof924 + } + st_case_924: + switch { + case data[p] < 158: + if 148 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 170 <= data[p] { + goto tr420 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr420 + } + goto tr572 + st925: + if p++; p == pe { + goto _test_eof925 + } + st_case_925: + switch { + case data[p] < 144: + if 139 <= data[p] && data[p] <= 142 { + goto tr572 + } + case data[p] > 153: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + goto tr420 + st926: + if p++; p == pe { + goto _test_eof926 + } + st_case_926: + if data[p] == 169 { + goto tr572 + } + switch { + case data[p] > 170: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st927: + if p++; p == pe { + goto _test_eof927 + } + st_case_927: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr572 + } + default: + goto tr572 + } + goto tr420 + st928: + if p++; p == pe { + goto _test_eof928 + } + st_case_928: + switch { + case data[p] > 150: + if 151 <= data[p] && data[p] <= 155 { + goto tr572 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st929: + if p++; p == pe { + goto _test_eof929 + } + st_case_929: + if data[p] == 191 { + goto tr572 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 149: + goto tr572 + } + goto tr420 + st930: + if p++; p == pe { + goto _test_eof930 + } + st_case_930: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 153: + if 176 <= data[p] && data[p] <= 190 { + goto tr572 + } + default: + goto tr421 + } + goto tr420 + st931: + if p++; p == pe { + goto _test_eof931 + } + st_case_931: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 132 { + goto tr572 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr572 + } + default: + goto tr148 + } + goto tr420 + st932: + if p++; p == pe { + goto _test_eof932 + } + st_case_932: + switch { + case data[p] < 144: + switch { + case data[p] > 139: + if 140 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] >= 133: + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 154: + goto tr420 + } + default: + goto tr421 + } + goto tr572 + st933: + if p++; p == pe { + goto _test_eof933 + } + st_case_933: + switch { + case data[p] < 161: + switch { + case data[p] > 130: + if 131 <= data[p] && data[p] <= 160 { + goto tr148 + } + case data[p] >= 128: + goto tr572 + } + case data[p] > 173: + switch { + case data[p] < 176: + if 174 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + default: + goto tr572 + } + goto tr420 + st934: + if p++; p == pe { + goto _test_eof934 + } + st_case_934: + switch { + case data[p] > 179: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr572 + } + goto tr148 + st935: + if p++; p == pe { + goto _test_eof935 + } + st_case_935: + switch { + case data[p] > 163: + if 164 <= data[p] && data[p] <= 183 { + goto tr572 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st936: + if p++; p == pe { + goto _test_eof936 + } + st_case_936: + if data[p] == 173 { + goto tr572 + } + switch { + case data[p] < 169: + switch { + case data[p] > 146: + if 148 <= data[p] && data[p] <= 168 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + case data[p] > 177: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 180 { + goto tr572 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 185 { + goto tr572 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st937: + if p++; p == pe { + goto _test_eof937 + } + st_case_937: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr572 + } + case data[p] >= 128: + goto tr572 + } + goto tr420 + st938: + if p++; p == pe { + goto _test_eof938 + } + st_case_938: + switch data[p] { + case 128: + goto st939 + case 129: + goto st940 + case 130: + goto st241 + case 131: + goto st941 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st942 + case 180: + goto st251 + case 181: + goto st943 + case 182: + goto st253 + case 183: + goto st944 + case 184: + goto st255 + } + goto tr420 + st939: + if p++; p == pe { + goto _test_eof939 + } + st_case_939: + switch data[p] { + case 164: + goto st142 + case 167: + goto st142 + } + switch { + case data[p] < 152: + if 140 <= data[p] && data[p] <= 143 { + goto tr572 + } + case data[p] > 153: + switch { + case data[p] > 174: + if 191 <= data[p] { + goto tr571 + } + case data[p] >= 170: + goto tr572 + } + default: + goto st142 + } + goto tr420 + st940: + if p++; p == pe { + goto _test_eof940 + } + st_case_940: + switch data[p] { + case 165: + goto tr420 + case 177: + goto tr148 + case 191: + goto tr148 + } + switch { + case data[p] < 149: + if 129 <= data[p] && data[p] <= 147 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 160: + goto tr572 + } + default: + goto tr420 + } + goto tr571 + st941: + if p++; p == pe { + goto _test_eof941 + } + st_case_941: + if 144 <= data[p] && data[p] <= 176 { + goto tr572 + } + goto tr420 + st942: + if p++; p == pe { + goto _test_eof942 + } + st_case_942: + switch { + case data[p] < 175: + if 165 <= data[p] && data[p] <= 170 { + goto tr420 + } + case data[p] > 177: + if 180 <= data[p] { + goto tr420 + } + default: + goto tr572 + } + goto tr148 + st943: + if p++; p == pe { + goto _test_eof943 + } + st_case_943: + if data[p] == 191 { + goto tr572 + } + switch { + case data[p] > 174: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 168: + goto tr420 + } + goto tr148 + st944: + if p++; p == pe { + goto _test_eof944 + } + st_case_944: + switch { + case data[p] < 144: + switch { + case data[p] > 134: + if 136 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 191 { + goto tr572 + } + case data[p] >= 152: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st945: + if p++; p == pe { + goto _test_eof945 + } + st_case_945: + switch data[p] { + case 128: + goto st946 + case 130: + goto st947 + case 132: + goto st259 + case 133: + goto st145 + case 134: + goto st260 + } + goto tr420 + st946: + if p++; p == pe { + goto _test_eof946 + } + st_case_946: + if data[p] == 133 { + goto tr148 + } + switch { + case data[p] > 175: + if 187 <= data[p] && data[p] <= 188 { + goto tr148 + } + case data[p] >= 170: + goto tr572 + } + goto tr420 + st947: + if p++; p == pe { + goto _test_eof947 + } + st_case_947: + if 153 <= data[p] && data[p] <= 154 { + goto tr572 + } + goto tr420 + st948: + if p++; p == pe { + goto _test_eof948 + } + st_case_948: + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st654 + case 153: + goto st949 + case 154: + goto st950 + case 155: + goto st951 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st952 + case 161: + goto st272 + case 162: + goto st953 + case 163: + goto st954 + case 164: + goto st955 + case 165: + goto st956 + case 166: + goto st957 + case 167: + goto st958 + case 168: + goto st959 + case 169: + goto st960 + case 170: + goto st961 + case 171: + goto st962 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st963 + case 176: + goto st147 + } + if 129 <= data[p] { + goto st145 + } + goto tr420 + st949: + if p++; p == pe { + goto _test_eof949 + } + st_case_949: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr572 + } + default: + goto tr572 + } + goto tr420 + st950: + if p++; p == pe { + goto _test_eof950 + } + st_case_950: + switch { + case data[p] < 158: + if 128 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr572 + } + goto tr420 + st951: + if p++; p == pe { + goto _test_eof951 + } + st_case_951: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr572 + } + goto tr148 + st952: + if p++; p == pe { + goto _test_eof952 + } + st_case_952: + switch data[p] { + case 130: + goto tr572 + case 134: + goto tr572 + case 139: + goto tr572 + } + switch { + case data[p] > 167: + if 168 <= data[p] { + goto tr420 + } + case data[p] >= 163: + goto tr572 + } + goto tr148 + st953: + if p++; p == pe { + goto _test_eof953 + } + st_case_953: + switch { + case data[p] < 130: + if 128 <= data[p] && data[p] <= 129 { + goto tr572 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr572 + } + default: + goto tr148 + } + goto tr420 + st954: + if p++; p == pe { + goto _test_eof954 + } + st_case_954: + switch data[p] { + case 187: + goto tr148 + case 189: + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 143: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 133: + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 183: + if 184 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr420 + } + goto tr572 + st955: + if p++; p == pe { + goto _test_eof955 + } + st_case_955: + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 165: + switch { + case data[p] > 173: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr572 + } + default: + goto tr148 + } + goto tr420 + st956: + if p++; p == pe { + goto _test_eof956 + } + st_case_956: + switch { + case data[p] < 148: + if 135 <= data[p] && data[p] <= 147 { + goto tr572 + } + case data[p] > 159: + if 189 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st957: + if p++; p == pe { + goto _test_eof957 + } + st_case_957: + switch { + case data[p] < 132: + if 128 <= data[p] && data[p] <= 131 { + goto tr572 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr572 + } + default: + goto tr148 + } + goto tr420 + st958: + if p++; p == pe { + goto _test_eof958 + } + st_case_958: + if data[p] == 143 { + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 129: + goto tr420 + } + case data[p] > 164: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr420 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + default: + goto tr420 + } + goto tr572 + st959: + if p++; p == pe { + goto _test_eof959 + } + st_case_959: + switch { + case data[p] > 168: + if 169 <= data[p] && data[p] <= 182 { + goto tr572 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st960: + if p++; p == pe { + goto _test_eof960 + } + st_case_960: + if data[p] == 131 { + goto tr572 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 141: + switch { + case data[p] > 153: + if 187 <= data[p] && data[p] <= 189 { + goto tr572 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr572 + } + goto tr420 + st961: + if p++; p == pe { + goto _test_eof961 + } + st_case_961: + if data[p] == 176 { + goto tr572 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr572 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr572 + } + default: + goto tr572 + } + goto tr420 + st962: + if p++; p == pe { + goto _test_eof962 + } + st_case_962: + if data[p] == 129 { + goto tr572 + } + switch { + case data[p] < 171: + if 160 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 181 <= data[p] && data[p] <= 182 { + goto tr572 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr572 + } + goto tr420 + st963: + if p++; p == pe { + goto _test_eof963 + } + st_case_963: + switch { + case data[p] < 163: + if 128 <= data[p] && data[p] <= 162 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 172: + goto tr572 + } + default: + goto tr572 + } + goto tr420 + st964: + if p++; p == pe { + goto _test_eof964 + } + st_case_964: + switch data[p] { + case 172: + goto st965 + case 173: + goto st672 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st966 + case 185: + goto st967 + case 187: + goto st968 + case 188: + goto st969 + case 189: + goto st303 + case 190: + goto st970 + case 191: + goto st971 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr420 + st965: + if p++; p == pe { + goto _test_eof965 + } + st_case_965: + if data[p] == 190 { + goto tr572 + } + switch { + case data[p] < 157: + switch { + case data[p] > 134: + if 147 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 170: + goto tr572 + } + default: + goto tr572 + } + goto tr420 + st966: + if p++; p == pe { + goto _test_eof966 + } + st_case_966: + if data[p] == 147 { + goto st142 + } + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 143 { + goto tr572 + } + case data[p] > 175: + if 179 <= data[p] && data[p] <= 180 { + goto tr571 + } + default: + goto tr572 + } + goto tr420 + st967: + if p++; p == pe { + goto _test_eof967 + } + st_case_967: + switch data[p] { + case 146: + goto st142 + case 149: + goto st142 + } + switch { + case data[p] < 176: + if 141 <= data[p] && data[p] <= 143 { + goto tr571 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st968: + if p++; p == pe { + goto _test_eof968 + } + st_case_968: + if data[p] == 191 { + goto tr572 + } + if 189 <= data[p] { + goto tr420 + } + goto tr148 + st969: + if p++; p == pe { + goto _test_eof969 + } + st_case_969: + switch data[p] { + case 135: + goto st142 + case 142: + goto st142 + case 154: + goto st142 + case 191: + goto tr571 + } + if 161 <= data[p] && data[p] <= 186 { + goto tr148 + } + goto tr2 + st970: + if p++; p == pe { + goto _test_eof970 + } + st_case_970: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 190 { + goto tr148 + } + case data[p] >= 158: + goto tr572 + } + goto tr420 + st971: + if p++; p == pe { + goto _test_eof971 + } + st_case_971: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr148 + } + case data[p] >= 130: + goto tr148 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr572 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st972: + if p++; p == pe { + goto _test_eof972 + } + st_case_972: + switch data[p] { + case 144: + goto st973 + case 145: + goto st979 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st998 + case 155: + goto st1003 + case 157: + goto st1005 + case 158: + goto st1012 + case 159: + goto st403 + } + goto tr420 + st973: + if p++; p == pe { + goto _test_eof973 + } + st_case_973: + switch data[p] { + case 128: + goto st308 + case 129: + goto st309 + case 130: + goto st147 + case 131: + goto st310 + case 133: + goto st311 + case 135: + goto st974 + case 138: + goto st313 + case 139: + goto st975 + case 140: + goto st315 + case 141: + goto st976 + case 142: + goto st317 + case 143: + goto st318 + case 144: + goto st147 + case 145: + goto st145 + case 146: + goto st684 + case 148: + goto st320 + case 149: + goto st321 + case 152: + goto st147 + case 156: + goto st322 + case 157: + goto st323 + case 160: + goto st324 + case 161: + goto st325 + case 162: + goto st326 + case 163: + goto st327 + case 164: + goto st328 + case 166: + goto st329 + case 168: + goto st977 + case 169: + goto st331 + case 170: + goto st332 + case 171: + goto st978 + case 172: + goto st334 + case 173: + goto st335 + case 174: + goto st336 + case 176: + goto st147 + case 177: + goto st245 + } + switch { + case data[p] > 155: + if 178 <= data[p] && data[p] <= 179 { + goto st337 + } + case data[p] >= 153: + goto st145 + } + goto tr420 + st974: + if p++; p == pe { + goto _test_eof974 + } + st_case_974: + if data[p] == 189 { + goto tr572 + } + goto tr420 + st975: + if p++; p == pe { + goto _test_eof975 + } + st_case_975: + if data[p] == 160 { + goto tr572 + } + if 145 <= data[p] { + goto tr420 + } + goto tr148 + st976: + if p++; p == pe { + goto _test_eof976 + } + st_case_976: + switch { + case data[p] < 182: + if 139 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 186: + if 187 <= data[p] { + goto tr420 + } + default: + goto tr572 + } + goto tr148 + st977: + if p++; p == pe { + goto _test_eof977 + } + st_case_977: + switch data[p] { + case 128: + goto tr148 + case 191: + goto tr572 + } + switch { + case data[p] < 144: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr572 + } + case data[p] > 134: + if 140 <= data[p] && data[p] <= 143 { + goto tr572 + } + default: + goto tr572 + } + case data[p] > 147: + switch { + case data[p] < 153: + if 149 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] > 179: + if 184 <= data[p] && data[p] <= 186 { + goto tr572 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st978: + if p++; p == pe { + goto _test_eof978 + } + st_case_978: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 164: + if 165 <= data[p] && data[p] <= 166 { + goto tr572 + } + default: + goto tr148 + } + goto tr420 + st979: + if p++; p == pe { + goto _test_eof979 + } + st_case_979: + switch data[p] { + case 128: + goto st980 + case 129: + goto st981 + case 130: + goto st982 + case 131: + goto st691 + case 132: + goto st983 + case 133: + goto st984 + case 134: + goto st985 + case 135: + goto st986 + case 136: + goto st987 + case 138: + goto st348 + case 139: + goto st988 + case 140: + goto st989 + case 141: + goto st990 + case 146: + goto st991 + case 147: + goto st992 + case 150: + goto st993 + case 151: + goto st994 + case 152: + goto st991 + case 153: + goto st995 + case 154: + goto st996 + case 155: + goto st538 + case 156: + goto st997 + case 162: + goto st359 + case 163: + goto st707 + case 171: + goto st361 + } + goto tr420 + st980: + if p++; p == pe { + goto _test_eof980 + } + st_case_980: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr572 + } + case data[p] > 183: + if 184 <= data[p] { + goto tr572 + } + default: + goto tr148 + } + goto tr420 + st981: + if p++; p == pe { + goto _test_eof981 + } + st_case_981: + switch { + case data[p] < 166: + if 135 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] && data[p] <= 190 { + goto tr420 + } + default: + goto tr421 + } + goto tr572 + st982: + if p++; p == pe { + goto _test_eof982 + } + st_case_982: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr572 + st983: + if p++; p == pe { + goto _test_eof983 + } + st_case_983: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr572 + } + case data[p] > 166: + switch { + case data[p] > 180: + if 182 <= data[p] && data[p] <= 191 { + goto tr421 + } + case data[p] >= 167: + goto tr572 + } + default: + goto tr148 + } + goto tr420 + st984: + if p++; p == pe { + goto _test_eof984 + } + st_case_984: + switch data[p] { + case 179: + goto tr572 + case 182: + goto tr148 + } + if 144 <= data[p] && data[p] <= 178 { + goto tr148 + } + goto tr420 + st985: + if p++; p == pe { + goto _test_eof985 + } + st_case_985: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr572 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr572 + } + default: + goto tr148 + } + goto tr420 + st986: + if p++; p == pe { + goto _test_eof986 + } + st_case_986: + if data[p] == 155 { + goto tr420 + } + switch { + case data[p] < 141: + switch { + case data[p] > 132: + if 133 <= data[p] && data[p] <= 137 { + goto tr420 + } + case data[p] >= 129: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] < 154: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] > 156: + if 157 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + default: + goto tr420 + } + goto tr572 + st987: + if p++; p == pe { + goto _test_eof987 + } + st_case_987: + switch { + case data[p] < 147: + if 128 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] > 171: + if 172 <= data[p] && data[p] <= 183 { + goto tr572 + } + default: + goto tr148 + } + goto tr420 + st988: + if p++; p == pe { + goto _test_eof988 + } + st_case_988: + switch { + case data[p] < 171: + if 159 <= data[p] && data[p] <= 170 { + goto tr572 + } + case data[p] > 175: + switch { + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr421 + } + default: + goto tr420 + } + goto tr148 + st989: + if p++; p == pe { + goto _test_eof989 + } + st_case_989: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 131 { + goto tr572 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr572 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st990: + if p++; p == pe { + goto _test_eof990 + } + st_case_990: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr572 + } + switch { + case data[p] < 157: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr572 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr572 + } + default: + goto tr572 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr572 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr572 + } + default: + goto tr572 + } + default: + goto tr148 + } + goto tr420 + st991: + if p++; p == pe { + goto _test_eof991 + } + st_case_991: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr572 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st992: + if p++; p == pe { + goto _test_eof992 + } + st_case_992: + if data[p] == 134 { + goto tr420 + } + switch { + case data[p] < 136: + if 132 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr420 + } + goto tr572 + st993: + if p++; p == pe { + goto _test_eof993 + } + st_case_993: + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 181: + if 184 <= data[p] { + goto tr572 + } + default: + goto tr572 + } + goto tr420 + st994: + if p++; p == pe { + goto _test_eof994 + } + st_case_994: + switch { + case data[p] < 152: + if 129 <= data[p] && data[p] <= 151 { + goto tr420 + } + case data[p] > 155: + if 158 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + goto tr572 + st995: + if p++; p == pe { + goto _test_eof995 + } + st_case_995: + if data[p] == 132 { + goto tr148 + } + switch { + case data[p] < 144: + if 129 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + goto tr572 + st996: + if p++; p == pe { + goto _test_eof996 + } + st_case_996: + switch { + case data[p] > 170: + if 171 <= data[p] && data[p] <= 183 { + goto tr572 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st997: + if p++; p == pe { + goto _test_eof997 + } + st_case_997: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 157: + goto tr572 + } + goto tr420 + st998: + if p++; p == pe { + goto _test_eof998 + } + st_case_998: + switch data[p] { + case 160: + goto st147 + case 168: + goto st370 + case 169: + goto st709 + case 171: + goto st999 + case 172: + goto st1000 + case 173: + goto st712 + case 174: + goto st374 + case 188: + goto st147 + case 189: + goto st1001 + case 190: + goto st1002 + } + if 161 <= data[p] && data[p] <= 167 { + goto st145 + } + goto tr420 + st999: + if p++; p == pe { + goto _test_eof999 + } + st_case_999: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 180 { + goto tr572 + } + case data[p] >= 144: + goto tr148 + } + goto tr420 + st1000: + if p++; p == pe { + goto _test_eof1000 + } + st_case_1000: + switch { + case data[p] > 175: + if 176 <= data[p] && data[p] <= 182 { + goto tr572 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st1001: + if p++; p == pe { + goto _test_eof1001 + } + st_case_1001: + switch { + case data[p] < 145: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr420 + } + default: + goto tr572 + } + goto tr148 + st1002: + if p++; p == pe { + goto _test_eof1002 + } + st_case_1002: + switch { + case data[p] > 146: + if 147 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] >= 143: + goto tr572 + } + goto tr420 + st1003: + if p++; p == pe { + goto _test_eof1003 + } + st_case_1003: + switch data[p] { + case 176: + goto st147 + case 177: + goto st378 + case 178: + goto st1004 + } + goto tr420 + st1004: + if p++; p == pe { + goto _test_eof1004 + } + st_case_1004: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr572 + } + case data[p] >= 157: + goto tr572 + } + default: + goto tr148 + } + goto tr420 + st1005: + if p++; p == pe { + goto _test_eof1005 + } + st_case_1005: + switch data[p] { + case 133: + goto st1006 + case 134: + goto st1007 + case 137: + goto st1008 + case 144: + goto st147 + case 145: + goto st384 + case 146: + goto st385 + case 147: + goto st386 + case 148: + goto st387 + case 149: + goto st388 + case 154: + goto st389 + case 155: + goto st390 + case 156: + goto st391 + case 157: + goto st392 + case 158: + goto st393 + case 159: + goto st721 + case 168: + goto st1009 + case 169: + goto st1010 + case 170: + goto st1011 + } + if 150 <= data[p] && data[p] <= 153 { + goto st145 + } + goto tr420 + st1006: + if p++; p == pe { + goto _test_eof1006 + } + st_case_1006: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr572 + } + case data[p] >= 165: + goto tr572 + } + goto tr420 + st1007: + if p++; p == pe { + goto _test_eof1007 + } + st_case_1007: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr420 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr572 + st1008: + if p++; p == pe { + goto _test_eof1008 + } + st_case_1008: + if 130 <= data[p] && data[p] <= 132 { + goto tr572 + } + goto tr420 + st1009: + if p++; p == pe { + goto _test_eof1009 + } + st_case_1009: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr572 + } + case data[p] >= 128: + goto tr572 + } + goto tr420 + st1010: + if p++; p == pe { + goto _test_eof1010 + } + st_case_1010: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 173: + goto tr420 + } + goto tr572 + st1011: + if p++; p == pe { + goto _test_eof1011 + } + st_case_1011: + if data[p] == 132 { + goto tr572 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr572 + } + case data[p] >= 155: + goto tr572 + } + goto tr420 + st1012: + if p++; p == pe { + goto _test_eof1012 + } + st_case_1012: + switch data[p] { + case 160: + goto st147 + case 163: + goto st1013 + case 184: + goto st400 + case 185: + goto st401 + case 186: + goto st402 + } + if 161 <= data[p] && data[p] <= 162 { + goto st145 + } + goto tr420 + st1013: + if p++; p == pe { + goto _test_eof1013 + } + st_case_1013: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 150: + if 151 <= data[p] { + goto tr420 + } + default: + goto tr572 + } + goto tr148 + st1014: + if p++; p == pe { + goto _test_eof1014 + } + st_case_1014: + if data[p] == 160 { + goto st1015 + } + goto tr420 + st1015: + if p++; p == pe { + goto _test_eof1015 + } + st_case_1015: + switch data[p] { + case 128: + goto st1016 + case 129: + goto st1017 + case 132: + goto st871 + case 135: + goto st1019 + } + if 133 <= data[p] && data[p] <= 134 { + goto st1018 + } + goto tr420 + st1016: + if p++; p == pe { + goto _test_eof1016 + } + st_case_1016: + if data[p] == 129 { + goto tr572 + } + if 160 <= data[p] { + goto tr572 + } + goto tr420 + st1017: + if p++; p == pe { + goto _test_eof1017 + } + st_case_1017: + if 192 <= data[p] { + goto tr420 + } + goto tr572 + st1018: + if p++; p == pe { + goto _test_eof1018 + } + st_case_1018: + goto tr572 + st1019: + if p++; p == pe { + goto _test_eof1019 + } + st_case_1019: + if 176 <= data[p] { + goto tr420 + } + goto tr572 + st1020: + if p++; p == pe { + goto _test_eof1020 + } + st_case_1020: + if data[p] == 156 { + goto tr571 + } + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 133 { + goto tr571 + } + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr571 + } + goto tr420 + st1021: + if p++; p == pe { + goto _test_eof1021 + } + st_case_1021: + switch data[p] { + case 171: + goto tr421 + case 176: + goto tr571 + } + switch { + case data[p] < 139: + if 128 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 174 <= data[p] { + goto tr148 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr571 + } + goto tr420 + st1022: + if p++; p == pe { + goto _test_eof1022 + } + st_case_1022: + switch data[p] { + case 148: + goto tr420 + case 158: + goto tr420 + case 169: + goto tr420 + } + switch { + case data[p] < 176: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr571 + } + case data[p] >= 150: + goto tr571 + } + case data[p] > 185: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 189: + goto tr420 + } + default: + goto tr421 + } + goto tr148 + st1023: + if p++; p == pe { + goto _test_eof1023 + } + st_case_1023: + if data[p] == 144 { + goto tr148 + } + switch { + case data[p] < 146: + if 143 <= data[p] && data[p] <= 145 { + goto tr571 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr571 + } + default: + goto tr148 + } + goto tr420 + st1024: + if p++; p == pe { + goto _test_eof1024 + } + st_case_1024: + switch { + case data[p] > 140: + if 141 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr420 + } + goto tr571 + st1025: + if p++; p == pe { + goto _test_eof1025 + } + st_case_1025: + switch { + case data[p] > 176: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr571 + } + goto tr148 + st1026: + if p++; p == pe { + goto _test_eof1026 + } + st_case_1026: + if data[p] == 186 { + goto tr148 + } + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 170: + switch { + case data[p] > 179: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 171: + goto tr571 + } + default: + goto tr148 + } + goto tr420 + st1027: + if p++; p == pe { + goto _test_eof1027 + } + st_case_1027: + switch data[p] { + case 160: + goto st1028 + case 161: + goto st1029 + case 162: + goto st168 + case 163: + goto st1030 + case 164: + goto st1031 + case 165: + goto st1032 + case 166: + goto st1033 + case 167: + goto st1034 + case 168: + goto st1035 + case 169: + goto st1036 + case 170: + goto st1037 + case 171: + goto st1038 + case 172: + goto st1039 + case 173: + goto st1040 + case 174: + goto st1041 + case 175: + goto st1042 + case 176: + goto st1043 + case 177: + goto st1044 + case 178: + goto st1045 + case 179: + goto st1046 + case 180: + goto st1047 + case 181: + goto st1048 + case 182: + goto st1049 + case 183: + goto st1050 + case 184: + goto st1051 + case 185: + goto st1052 + case 186: + goto st1053 + case 187: + goto st1054 + case 188: + goto st1055 + case 189: + goto st1056 + case 190: + goto st1057 + case 191: + goto st1058 + } + goto tr420 + st1028: + if p++; p == pe { + goto _test_eof1028 + } + st_case_1028: + switch data[p] { + case 154: + goto tr148 + case 164: + goto tr148 + case 168: + goto tr148 + } + switch { + case data[p] > 149: + if 150 <= data[p] && data[p] <= 173 { + goto tr571 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st1029: + if p++; p == pe { + goto _test_eof1029 + } + st_case_1029: + switch { + case data[p] > 152: + if 153 <= data[p] && data[p] <= 155 { + goto tr571 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st1030: + if p++; p == pe { + goto _test_eof1030 + } + st_case_1030: + if 163 <= data[p] { + goto tr571 + } + goto tr420 + st1031: + if p++; p == pe { + goto _test_eof1031 + } + st_case_1031: + if data[p] == 189 { + goto tr148 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr148 + } + goto tr571 + st1032: + if p++; p == pe { + goto _test_eof1032 + } + st_case_1032: + switch data[p] { + case 144: + goto tr148 + case 176: + goto tr420 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 177 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr420 + } + goto tr571 + st1033: + if p++; p == pe { + goto _test_eof1033 + } + st_case_1033: + switch data[p] { + case 132: + goto tr420 + case 169: + goto tr420 + case 177: + goto tr420 + case 188: + goto tr571 + } + switch { + case data[p] < 145: + switch { + case data[p] > 131: + if 141 <= data[p] && data[p] <= 142 { + goto tr420 + } + case data[p] >= 129: + goto tr571 + } + case data[p] > 146: + switch { + case data[p] < 186: + if 179 <= data[p] && data[p] <= 181 { + goto tr420 + } + case data[p] > 187: + if 190 <= data[p] { + goto tr571 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st1034: + if p++; p == pe { + goto _test_eof1034 + } + st_case_1034: + switch data[p] { + case 142: + goto tr148 + case 158: + goto tr420 + } + switch { + case data[p] < 156: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + switch { + case data[p] > 150: + if 152 <= data[p] && data[p] <= 155 { + goto tr420 + } + case data[p] >= 143: + goto tr420 + } + default: + goto tr420 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr148 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr571 + st1035: + if p++; p == pe { + goto _test_eof1035 + } + st_case_1035: + if data[p] == 188 { + goto tr571 + } + switch { + case data[p] < 170: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr571 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 147 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] >= 143: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 176: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 182: + switch { + case data[p] > 185: + if 190 <= data[p] { + goto tr571 + } + case data[p] >= 184: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1036: + if p++; p == pe { + goto _test_eof1036 + } + st_case_1036: + if data[p] == 157 { + goto tr420 + } + switch { + case data[p] < 153: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 146 <= data[p] && data[p] <= 152 { + goto tr420 + } + case data[p] >= 142: + goto tr420 + } + default: + goto tr420 + } + case data[p] > 158: + switch { + case data[p] < 166: + if 159 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr571 + st1037: + if p++; p == pe { + goto _test_eof1037 + } + st_case_1037: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr571 + } + case data[p] > 141: + if 143 <= data[p] && data[p] <= 145 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] { + goto tr571 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1038: + if p++; p == pe { + goto _test_eof1038 + } + st_case_1038: + switch data[p] { + case 134: + goto tr420 + case 138: + goto tr420 + case 144: + goto tr148 + case 185: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] >= 142: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr420 + } + goto tr571 + st1039: + if p++; p == pe { + goto _test_eof1039 + } + st_case_1039: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr571 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr571 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1040: + if p++; p == pe { + goto _test_eof1040 + } + st_case_1040: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] < 150: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr571 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr571 + } + default: + goto tr571 + } + case data[p] > 151: + switch { + case data[p] < 159: + if 156 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 161: + switch { + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 162: + goto tr571 + } + default: + goto tr148 + } + default: + goto tr571 + } + goto tr420 + st1041: + if p++; p == pe { + goto _test_eof1041 + } + st_case_1041: + switch data[p] { + case 130: + goto tr571 + case 131: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 158: + switch { + case data[p] < 142: + if 133 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 144: + switch { + case data[p] > 149: + if 153 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] < 168: + if 163 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 185: + if 190 <= data[p] && data[p] <= 191 { + goto tr571 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1042: + if p++; p == pe { + goto _test_eof1042 + } + st_case_1042: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr571 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr571 + } + case data[p] > 136: + switch { + case data[p] > 141: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 138: + goto tr571 + } + default: + goto tr571 + } + goto tr420 + st1043: + if p++; p == pe { + goto _test_eof1043 + } + st_case_1043: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 128: + goto tr571 + } + case data[p] > 144: + switch { + case data[p] < 170: + if 146 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] > 185: + if 190 <= data[p] { + goto tr571 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1044: + if p++; p == pe { + goto _test_eof1044 + } + st_case_1044: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 151: + goto tr420 + } + switch { + case data[p] < 160: + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 148 { + goto tr420 + } + case data[p] > 154: + if 155 <= data[p] && data[p] <= 159 { + goto tr420 + } + default: + goto tr148 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr571 + st1045: + if p++; p == pe { + goto _test_eof1045 + } + st_case_1045: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 146: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr571 + } + case data[p] > 140: + if 142 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 181: + if 170 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr571 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1046: + if p++; p == pe { + goto _test_eof1046 + } + st_case_1046: + if data[p] == 158 { + goto tr148 + } + switch { + case data[p] < 149: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr571 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr571 + } + default: + goto tr571 + } + case data[p] > 150: + switch { + case data[p] < 162: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 163: + switch { + case data[p] > 175: + if 177 <= data[p] && data[p] <= 178 { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr571 + } + default: + goto tr571 + } + goto tr420 + st1047: + if p++; p == pe { + goto _test_eof1047 + } + st_case_1047: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 129: + goto tr571 + } + case data[p] > 144: + switch { + case data[p] > 186: + if 190 <= data[p] { + goto tr571 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1048: + if p++; p == pe { + goto _test_eof1048 + } + st_case_1048: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 142: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 150 { + goto tr420 + } + case data[p] > 158: + if 159 <= data[p] && data[p] <= 161 { + goto tr148 + } + default: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr571 + st1049: + if p++; p == pe { + goto _test_eof1049 + } + st_case_1049: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto tr571 + } + case data[p] > 150: + switch { + case data[p] > 177: + if 179 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1050: + if p++; p == pe { + goto _test_eof1050 + } + st_case_1050: + switch data[p] { + case 138: + goto tr571 + case 150: + goto tr571 + } + switch { + case data[p] < 152: + switch { + case data[p] > 134: + if 143 <= data[p] && data[p] <= 148 { + goto tr571 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 179 { + goto tr571 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr571 + } + goto tr420 + st1051: + if p++; p == pe { + goto _test_eof1051 + } + st_case_1051: + if data[p] == 177 { + goto tr571 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr571 + } + goto tr420 + st1052: + if p++; p == pe { + goto _test_eof1052 + } + st_case_1052: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 135: + goto tr571 + } + goto tr420 + st1053: + if p++; p == pe { + goto _test_eof1053 + } + st_case_1053: + if data[p] == 177 { + goto tr571 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr571 + } + case data[p] >= 180: + goto tr571 + } + goto tr420 + st1054: + if p++; p == pe { + goto _test_eof1054 + } + st_case_1054: + switch { + case data[p] > 141: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 136: + goto tr571 + } + goto tr420 + st1055: + if p++; p == pe { + goto _test_eof1055 + } + st_case_1055: + switch data[p] { + case 128: + goto tr148 + case 181: + goto tr571 + case 183: + goto tr571 + case 185: + goto tr571 + } + switch { + case data[p] < 160: + if 152 <= data[p] && data[p] <= 153 { + goto tr571 + } + case data[p] > 169: + if 190 <= data[p] && data[p] <= 191 { + goto tr571 + } + default: + goto tr421 + } + goto tr420 + st1056: + if p++; p == pe { + goto _test_eof1056 + } + st_case_1056: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 172: + if 177 <= data[p] && data[p] <= 191 { + goto tr571 + } + default: + goto tr148 + } + goto tr420 + st1057: + if p++; p == pe { + goto _test_eof1057 + } + st_case_1057: + switch { + case data[p] < 136: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 135 { + goto tr571 + } + case data[p] >= 128: + goto tr571 + } + case data[p] > 140: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr571 + } + case data[p] >= 141: + goto tr571 + } + default: + goto tr148 + } + goto tr420 + st1058: + if p++; p == pe { + goto _test_eof1058 + } + st_case_1058: + if data[p] == 134 { + goto tr571 + } + goto tr420 + st1059: + if p++; p == pe { + goto _test_eof1059 + } + st_case_1059: + switch data[p] { + case 128: + goto st1060 + case 129: + goto st1061 + case 130: + goto st1062 + case 131: + goto st202 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st1063 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st1064 + case 157: + goto st1065 + case 158: + goto st1066 + case 159: + goto st1067 + case 160: + goto st1068 + case 161: + goto st219 + case 162: + goto st1069 + case 163: + goto st221 + case 164: + goto st1070 + case 165: + goto st468 + case 167: + goto st469 + case 168: + goto st1071 + case 169: + goto st1072 + case 170: + goto st1073 + case 172: + goto st1074 + case 173: + goto st1075 + case 174: + goto st1076 + case 175: + goto st1077 + case 176: + goto st1078 + case 177: + goto st640 + case 179: + goto st1079 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st1080 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + } + switch { + case data[p] < 180: + if 132 <= data[p] && data[p] <= 152 { + goto st145 + } + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + default: + goto st147 + } + goto tr420 + st1060: + if p++; p == pe { + goto _test_eof1060 + } + st_case_1060: + if 171 <= data[p] && data[p] <= 190 { + goto tr571 + } + goto tr420 + st1061: + if p++; p == pe { + goto _test_eof1061 + } + st_case_1061: + switch { + case data[p] < 158: + switch { + case data[p] > 137: + if 150 <= data[p] && data[p] <= 153 { + goto tr571 + } + case data[p] >= 128: + goto tr421 + } + case data[p] > 160: + switch { + case data[p] < 167: + if 162 <= data[p] && data[p] <= 164 { + goto tr571 + } + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr571 + } + default: + goto tr571 + } + default: + goto tr571 + } + goto tr420 + st1062: + if p++; p == pe { + goto _test_eof1062 + } + st_case_1062: + if data[p] == 143 { + goto tr571 + } + switch { + case data[p] < 144: + if 130 <= data[p] && data[p] <= 141 { + goto tr571 + } + case data[p] > 153: + switch { + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 154: + goto tr571 + } + default: + goto tr421 + } + goto tr420 + st1063: + if p++; p == pe { + goto _test_eof1063 + } + st_case_1063: + switch { + case data[p] < 157: + if 155 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr420 + } + default: + goto tr571 + } + goto tr148 + st1064: + if p++; p == pe { + goto _test_eof1064 + } + st_case_1064: + switch { + case data[p] < 146: + switch { + case data[p] > 140: + if 142 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 177: + if 178 <= data[p] && data[p] <= 180 { + goto tr571 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr571 + } + goto tr420 + st1065: + if p++; p == pe { + goto _test_eof1065 + } + st_case_1065: + switch { + case data[p] < 160: + switch { + case data[p] > 145: + if 146 <= data[p] && data[p] <= 147 { + goto tr571 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 172: + switch { + case data[p] > 176: + if 178 <= data[p] && data[p] <= 179 { + goto tr571 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1066: + if p++; p == pe { + goto _test_eof1066 + } + st_case_1066: + if 180 <= data[p] { + goto tr571 + } + goto tr420 + st1067: + if p++; p == pe { + goto _test_eof1067 + } + st_case_1067: + switch { + case data[p] < 158: + if 148 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 170 <= data[p] { + goto tr420 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr420 + } + goto tr571 + st1068: + if p++; p == pe { + goto _test_eof1068 + } + st_case_1068: + switch { + case data[p] < 144: + if 139 <= data[p] && data[p] <= 142 { + goto tr571 + } + case data[p] > 153: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + goto tr420 + st1069: + if p++; p == pe { + goto _test_eof1069 + } + st_case_1069: + if data[p] == 169 { + goto tr571 + } + switch { + case data[p] > 170: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st1070: + if p++; p == pe { + goto _test_eof1070 + } + st_case_1070: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr571 + } + default: + goto tr571 + } + goto tr420 + st1071: + if p++; p == pe { + goto _test_eof1071 + } + st_case_1071: + switch { + case data[p] > 150: + if 151 <= data[p] && data[p] <= 155 { + goto tr571 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st1072: + if p++; p == pe { + goto _test_eof1072 + } + st_case_1072: + if data[p] == 191 { + goto tr571 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr571 + } + case data[p] >= 149: + goto tr571 + } + goto tr420 + st1073: + if p++; p == pe { + goto _test_eof1073 + } + st_case_1073: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 153: + if 176 <= data[p] && data[p] <= 190 { + goto tr571 + } + default: + goto tr421 + } + goto tr420 + st1074: + if p++; p == pe { + goto _test_eof1074 + } + st_case_1074: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 132 { + goto tr571 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr571 + } + default: + goto tr148 + } + goto tr420 + st1075: + if p++; p == pe { + goto _test_eof1075 + } + st_case_1075: + switch { + case data[p] < 144: + switch { + case data[p] > 139: + if 140 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] >= 133: + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 154: + goto tr420 + } + default: + goto tr421 + } + goto tr571 + st1076: + if p++; p == pe { + goto _test_eof1076 + } + st_case_1076: + switch { + case data[p] < 161: + switch { + case data[p] > 130: + if 131 <= data[p] && data[p] <= 160 { + goto tr148 + } + case data[p] >= 128: + goto tr571 + } + case data[p] > 173: + switch { + case data[p] < 176: + if 174 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + default: + goto tr571 + } + goto tr420 + st1077: + if p++; p == pe { + goto _test_eof1077 + } + st_case_1077: + switch { + case data[p] > 179: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr571 + } + goto tr148 + st1078: + if p++; p == pe { + goto _test_eof1078 + } + st_case_1078: + switch { + case data[p] > 163: + if 164 <= data[p] && data[p] <= 183 { + goto tr571 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st1079: + if p++; p == pe { + goto _test_eof1079 + } + st_case_1079: + if data[p] == 173 { + goto tr571 + } + switch { + case data[p] < 169: + switch { + case data[p] > 146: + if 148 <= data[p] && data[p] <= 168 { + goto tr571 + } + case data[p] >= 144: + goto tr571 + } + case data[p] > 177: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 180 { + goto tr571 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 185 { + goto tr571 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1080: + if p++; p == pe { + goto _test_eof1080 + } + st_case_1080: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr571 + } + case data[p] >= 128: + goto tr571 + } + goto tr420 + st1081: + if p++; p == pe { + goto _test_eof1081 + } + st_case_1081: + switch data[p] { + case 128: + goto st1082 + case 129: + goto st1083 + case 130: + goto st241 + case 131: + goto st1084 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st1085 + case 180: + goto st251 + case 181: + goto st1086 + case 182: + goto st253 + case 183: + goto st1087 + case 184: + goto st255 + } + goto tr420 + st1082: + if p++; p == pe { + goto _test_eof1082 + } + st_case_1082: + switch { + case data[p] < 170: + if 140 <= data[p] && data[p] <= 143 { + goto tr571 + } + case data[p] > 174: + if 191 <= data[p] { + goto tr571 + } + default: + goto tr571 + } + goto tr420 + st1083: + if p++; p == pe { + goto _test_eof1083 + } + st_case_1083: + switch data[p] { + case 165: + goto tr420 + case 177: + goto tr148 + case 191: + goto tr148 + } + switch { + case data[p] < 149: + if 129 <= data[p] && data[p] <= 147 { + goto tr420 + } + case data[p] > 159: + if 176 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr571 + st1084: + if p++; p == pe { + goto _test_eof1084 + } + st_case_1084: + if 144 <= data[p] && data[p] <= 176 { + goto tr571 + } + goto tr420 + st1085: + if p++; p == pe { + goto _test_eof1085 + } + st_case_1085: + switch { + case data[p] < 175: + if 165 <= data[p] && data[p] <= 170 { + goto tr420 + } + case data[p] > 177: + if 180 <= data[p] { + goto tr420 + } + default: + goto tr571 + } + goto tr148 + st1086: + if p++; p == pe { + goto _test_eof1086 + } + st_case_1086: + if data[p] == 191 { + goto tr571 + } + switch { + case data[p] > 174: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 168: + goto tr420 + } + goto tr148 + st1087: + if p++; p == pe { + goto _test_eof1087 + } + st_case_1087: + switch { + case data[p] < 144: + switch { + case data[p] > 134: + if 136 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 191 { + goto tr571 + } + case data[p] >= 152: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1088: + if p++; p == pe { + goto _test_eof1088 + } + st_case_1088: + switch data[p] { + case 128: + goto st1089 + case 130: + goto st1240 + case 131: + goto st1164 + case 132: + goto st259 + case 133: + goto st145 + case 134: + goto st260 + case 135: + goto st1165 + case 139: + goto st1166 + case 140: + goto st1091 + case 141: + goto st1167 + } + goto tr420 + st1089: + if p++; p == pe { + goto _test_eof1089 + } + st_case_1089: + if data[p] == 133 { + goto tr148 + } + switch { + case data[p] < 177: + if 170 <= data[p] && data[p] <= 175 { + goto tr571 + } + case data[p] > 181: + if 187 <= data[p] && data[p] <= 188 { + goto tr148 + } + default: + goto tr1049 + } + goto tr420 +tr1049: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4872 + st4872: + if p++; p == pe { + goto _test_eof4872 + } + st_case_4872: +//line segment_words_prod.go:34183 + switch data[p] { + case 95: + goto tr571 + case 194: + goto st1090 + case 204: + goto st1091 + case 205: + goto st1092 + case 210: + goto st1093 + case 214: + goto st1094 + case 215: + goto st1095 + case 216: + goto st1096 + case 217: + goto st1097 + case 219: + goto st1098 + case 220: + goto st1099 + case 221: + goto st1100 + case 222: + goto st1101 + case 223: + goto st1102 + case 224: + goto st1103 + case 225: + goto st1132 + case 226: + goto st1154 + case 227: + goto st1161 + case 234: + goto st1168 + case 239: + goto st1184 + case 240: + goto st1192 + case 243: + goto st1235 + } + goto tr4562 + st1090: + if p++; p == pe { + goto _test_eof1090 + } + st_case_1090: + if data[p] == 173 { + goto tr1049 + } + goto tr420 + st1091: + if p++; p == pe { + goto _test_eof1091 + } + st_case_1091: + if 128 <= data[p] { + goto tr1049 + } + goto tr2 + st1092: + if p++; p == pe { + goto _test_eof1092 + } + st_case_1092: + if 176 <= data[p] { + goto tr420 + } + goto tr1049 + st1093: + if p++; p == pe { + goto _test_eof1093 + } + st_case_1093: + if 131 <= data[p] && data[p] <= 137 { + goto tr1049 + } + goto tr420 + st1094: + if p++; p == pe { + goto _test_eof1094 + } + st_case_1094: + if data[p] == 191 { + goto tr1049 + } + if 145 <= data[p] && data[p] <= 189 { + goto tr1049 + } + goto tr420 + st1095: + if p++; p == pe { + goto _test_eof1095 + } + st_case_1095: + if data[p] == 135 { + goto tr1049 + } + switch { + case data[p] > 130: + if 132 <= data[p] && data[p] <= 133 { + goto tr1049 + } + case data[p] >= 129: + goto tr1049 + } + goto tr420 + st1096: + if p++; p == pe { + goto _test_eof1096 + } + st_case_1096: + if data[p] == 156 { + goto tr1049 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto tr1049 + } + case data[p] >= 128: + goto tr1049 + } + goto tr420 + st1097: + if p++; p == pe { + goto _test_eof1097 + } + st_case_1097: + if data[p] == 176 { + goto tr1049 + } + if 139 <= data[p] && data[p] <= 159 { + goto tr1049 + } + goto tr420 + st1098: + if p++; p == pe { + goto _test_eof1098 + } + st_case_1098: + switch { + case data[p] < 159: + if 150 <= data[p] && data[p] <= 157 { + goto tr1049 + } + case data[p] > 164: + switch { + case data[p] > 168: + if 170 <= data[p] && data[p] <= 173 { + goto tr1049 + } + case data[p] >= 167: + goto tr1049 + } + default: + goto tr1049 + } + goto tr420 + st1099: + if p++; p == pe { + goto _test_eof1099 + } + st_case_1099: + switch data[p] { + case 143: + goto tr1049 + case 145: + goto tr1049 + } + if 176 <= data[p] { + goto tr1049 + } + goto tr420 + st1100: + if p++; p == pe { + goto _test_eof1100 + } + st_case_1100: + if 139 <= data[p] { + goto tr420 + } + goto tr1049 + st1101: + if p++; p == pe { + goto _test_eof1101 + } + st_case_1101: + if 166 <= data[p] && data[p] <= 176 { + goto tr1049 + } + goto tr420 + st1102: + if p++; p == pe { + goto _test_eof1102 + } + st_case_1102: + if 171 <= data[p] && data[p] <= 179 { + goto tr1049 + } + goto tr420 + st1103: + if p++; p == pe { + goto _test_eof1103 + } + st_case_1103: + switch data[p] { + case 160: + goto st1104 + case 161: + goto st1105 + case 163: + goto st1106 + case 164: + goto st1107 + case 165: + goto st1108 + case 167: + goto st1110 + case 169: + goto st1111 + case 171: + goto st1112 + case 173: + goto st1114 + case 174: + goto st1115 + case 175: + goto st1116 + case 176: + goto st1117 + case 177: + goto st1118 + case 179: + goto st1119 + case 180: + goto st1120 + case 181: + goto st1121 + case 182: + goto st1122 + case 183: + goto st1123 + case 184: + goto st1124 + case 185: + goto st1125 + case 186: + goto st1126 + case 187: + goto st1127 + case 188: + goto st1128 + case 189: + goto st1129 + case 190: + goto st1130 + case 191: + goto st1131 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto st1113 + } + case data[p] >= 166: + goto st1109 + } + goto tr420 + st1104: + if p++; p == pe { + goto _test_eof1104 + } + st_case_1104: + switch { + case data[p] < 155: + if 150 <= data[p] && data[p] <= 153 { + goto tr1049 + } + case data[p] > 163: + switch { + case data[p] > 167: + if 169 <= data[p] && data[p] <= 173 { + goto tr1049 + } + case data[p] >= 165: + goto tr1049 + } + default: + goto tr1049 + } + goto tr420 + st1105: + if p++; p == pe { + goto _test_eof1105 + } + st_case_1105: + if 153 <= data[p] && data[p] <= 155 { + goto tr1049 + } + goto tr420 + st1106: + if p++; p == pe { + goto _test_eof1106 + } + st_case_1106: + if 163 <= data[p] { + goto tr1049 + } + goto tr420 + st1107: + if p++; p == pe { + goto _test_eof1107 + } + st_case_1107: + if data[p] == 189 { + goto tr420 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr420 + } + goto tr1049 + st1108: + if p++; p == pe { + goto _test_eof1108 + } + st_case_1108: + if data[p] == 144 { + goto tr420 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr420 + } + case data[p] >= 152: + goto tr420 + } + goto tr1049 + st1109: + if p++; p == pe { + goto _test_eof1109 + } + st_case_1109: + if data[p] == 188 { + goto tr1049 + } + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr1049 + } + case data[p] >= 129: + goto tr1049 + } + goto tr420 + st1110: + if p++; p == pe { + goto _test_eof1110 + } + st_case_1110: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr420 + } + case data[p] >= 133: + goto tr420 + } + case data[p] > 150: + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr420 + } + case data[p] >= 152: + goto tr420 + } + default: + goto tr420 + } + goto tr1049 + st1111: + if p++; p == pe { + goto _test_eof1111 + } + st_case_1111: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr420 + } + case data[p] >= 131: + goto tr420 + } + case data[p] > 144: + switch { + case data[p] < 178: + if 146 <= data[p] && data[p] <= 175 { + goto tr420 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr1049 + st1112: + if p++; p == pe { + goto _test_eof1112 + } + st_case_1112: + switch data[p] { + case 134: + goto tr420 + case 138: + goto tr420 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr420 + } + case data[p] >= 142: + goto tr420 + } + goto tr1049 + st1113: + if p++; p == pe { + goto _test_eof1113 + } + st_case_1113: + if data[p] == 188 { + goto tr1049 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr1049 + } + case data[p] >= 129: + goto tr1049 + } + goto tr420 + st1114: + if p++; p == pe { + goto _test_eof1114 + } + st_case_1114: + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr1049 + } + case data[p] >= 128: + goto tr1049 + } + case data[p] > 141: + switch { + case data[p] > 151: + if 162 <= data[p] && data[p] <= 163 { + goto tr1049 + } + case data[p] >= 150: + goto tr1049 + } + default: + goto tr1049 + } + goto tr420 + st1115: + if p++; p == pe { + goto _test_eof1115 + } + st_case_1115: + if data[p] == 130 { + goto tr1049 + } + if 190 <= data[p] && data[p] <= 191 { + goto tr1049 + } + goto tr420 + st1116: + if p++; p == pe { + goto _test_eof1116 + } + st_case_1116: + if data[p] == 151 { + goto tr1049 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr1049 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr1049 + } + default: + goto tr1049 + } + goto tr420 + st1117: + if p++; p == pe { + goto _test_eof1117 + } + st_case_1117: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr1049 + } + case data[p] >= 128: + goto tr1049 + } + goto tr420 + st1118: + if p++; p == pe { + goto _test_eof1118 + } + st_case_1118: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + } + switch { + case data[p] < 151: + if 142 <= data[p] && data[p] <= 148 { + goto tr420 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr1049 + st1119: + if p++; p == pe { + goto _test_eof1119 + } + st_case_1119: + switch { + case data[p] < 138: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 136 { + goto tr1049 + } + case data[p] >= 128: + goto tr1049 + } + case data[p] > 141: + switch { + case data[p] > 150: + if 162 <= data[p] && data[p] <= 163 { + goto tr1049 + } + case data[p] >= 149: + goto tr1049 + } + default: + goto tr1049 + } + goto tr420 + st1120: + if p++; p == pe { + goto _test_eof1120 + } + st_case_1120: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr1049 + } + case data[p] >= 129: + goto tr1049 + } + goto tr420 + st1121: + if p++; p == pe { + goto _test_eof1121 + } + st_case_1121: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + } + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 150 { + goto tr420 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr1049 + st1122: + if p++; p == pe { + goto _test_eof1122 + } + st_case_1122: + if 130 <= data[p] && data[p] <= 131 { + goto tr1049 + } + goto tr420 + st1123: + if p++; p == pe { + goto _test_eof1123 + } + st_case_1123: + switch data[p] { + case 138: + goto tr1049 + case 150: + goto tr1049 + } + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 148 { + goto tr1049 + } + case data[p] > 159: + if 178 <= data[p] && data[p] <= 179 { + goto tr1049 + } + default: + goto tr1049 + } + goto tr420 + st1124: + if p++; p == pe { + goto _test_eof1124 + } + st_case_1124: + if data[p] == 177 { + goto tr1049 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr1049 + } + goto tr420 + st1125: + if p++; p == pe { + goto _test_eof1125 + } + st_case_1125: + if 135 <= data[p] && data[p] <= 142 { + goto tr1049 + } + goto tr420 + st1126: + if p++; p == pe { + goto _test_eof1126 + } + st_case_1126: + if data[p] == 177 { + goto tr1049 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr1049 + } + case data[p] >= 180: + goto tr1049 + } + goto tr420 + st1127: + if p++; p == pe { + goto _test_eof1127 + } + st_case_1127: + if 136 <= data[p] && data[p] <= 141 { + goto tr1049 + } + goto tr420 + st1128: + if p++; p == pe { + goto _test_eof1128 + } + st_case_1128: + switch data[p] { + case 181: + goto tr1049 + case 183: + goto tr1049 + case 185: + goto tr1049 + } + switch { + case data[p] > 153: + if 190 <= data[p] && data[p] <= 191 { + goto tr1049 + } + case data[p] >= 152: + goto tr1049 + } + goto tr420 + st1129: + if p++; p == pe { + goto _test_eof1129 + } + st_case_1129: + if 177 <= data[p] && data[p] <= 191 { + goto tr1049 + } + goto tr420 + st1130: + if p++; p == pe { + goto _test_eof1130 + } + st_case_1130: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr1049 + } + case data[p] > 135: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr1049 + } + case data[p] >= 141: + goto tr1049 + } + default: + goto tr1049 + } + goto tr420 + st1131: + if p++; p == pe { + goto _test_eof1131 + } + st_case_1131: + if data[p] == 134 { + goto tr1049 + } + goto tr420 + st1132: + if p++; p == pe { + goto _test_eof1132 + } + st_case_1132: + switch data[p] { + case 128: + goto st1133 + case 129: + goto st1134 + case 130: + goto st1135 + case 141: + goto st1136 + case 156: + goto st1137 + case 157: + goto st1138 + case 158: + goto st1139 + case 159: + goto st1140 + case 160: + goto st1141 + case 162: + goto st1142 + case 164: + goto st1143 + case 168: + goto st1144 + case 169: + goto st1145 + case 170: + goto st1146 + case 172: + goto st1147 + case 173: + goto st1148 + case 174: + goto st1149 + case 175: + goto st1150 + case 176: + goto st1151 + case 179: + goto st1152 + case 183: + goto st1153 + } + goto tr420 + st1133: + if p++; p == pe { + goto _test_eof1133 + } + st_case_1133: + if 171 <= data[p] && data[p] <= 190 { + goto tr1049 + } + goto tr420 + st1134: + if p++; p == pe { + goto _test_eof1134 + } + st_case_1134: + switch { + case data[p] < 162: + switch { + case data[p] > 153: + if 158 <= data[p] && data[p] <= 160 { + goto tr1049 + } + case data[p] >= 150: + goto tr1049 + } + case data[p] > 164: + switch { + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr1049 + } + case data[p] >= 167: + goto tr1049 + } + default: + goto tr1049 + } + goto tr420 + st1135: + if p++; p == pe { + goto _test_eof1135 + } + st_case_1135: + if data[p] == 143 { + goto tr1049 + } + switch { + case data[p] > 141: + if 154 <= data[p] && data[p] <= 157 { + goto tr1049 + } + case data[p] >= 130: + goto tr1049 + } + goto tr420 + st1136: + if p++; p == pe { + goto _test_eof1136 + } + st_case_1136: + if 157 <= data[p] && data[p] <= 159 { + goto tr1049 + } + goto tr420 + st1137: + if p++; p == pe { + goto _test_eof1137 + } + st_case_1137: + switch { + case data[p] > 148: + if 178 <= data[p] && data[p] <= 180 { + goto tr1049 + } + case data[p] >= 146: + goto tr1049 + } + goto tr420 + st1138: + if p++; p == pe { + goto _test_eof1138 + } + st_case_1138: + switch { + case data[p] > 147: + if 178 <= data[p] && data[p] <= 179 { + goto tr1049 + } + case data[p] >= 146: + goto tr1049 + } + goto tr420 + st1139: + if p++; p == pe { + goto _test_eof1139 + } + st_case_1139: + if 180 <= data[p] { + goto tr1049 + } + goto tr420 + st1140: + if p++; p == pe { + goto _test_eof1140 + } + st_case_1140: + switch { + case data[p] > 156: + if 158 <= data[p] { + goto tr420 + } + case data[p] >= 148: + goto tr420 + } + goto tr1049 + st1141: + if p++; p == pe { + goto _test_eof1141 + } + st_case_1141: + if 139 <= data[p] && data[p] <= 142 { + goto tr1049 + } + goto tr420 + st1142: + if p++; p == pe { + goto _test_eof1142 + } + st_case_1142: + if data[p] == 169 { + goto tr1049 + } + goto tr420 + st1143: + if p++; p == pe { + goto _test_eof1143 + } + st_case_1143: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr1049 + } + case data[p] >= 160: + goto tr1049 + } + goto tr420 + st1144: + if p++; p == pe { + goto _test_eof1144 + } + st_case_1144: + if 151 <= data[p] && data[p] <= 155 { + goto tr1049 + } + goto tr420 + st1145: + if p++; p == pe { + goto _test_eof1145 + } + st_case_1145: + if data[p] == 191 { + goto tr1049 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr1049 + } + case data[p] >= 149: + goto tr1049 + } + goto tr420 + st1146: + if p++; p == pe { + goto _test_eof1146 + } + st_case_1146: + if 176 <= data[p] && data[p] <= 190 { + goto tr1049 + } + goto tr420 + st1147: + if p++; p == pe { + goto _test_eof1147 + } + st_case_1147: + switch { + case data[p] > 132: + if 180 <= data[p] { + goto tr1049 + } + case data[p] >= 128: + goto tr1049 + } + goto tr420 + st1148: + if p++; p == pe { + goto _test_eof1148 + } + st_case_1148: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 133: + goto tr420 + } + goto tr1049 + st1149: + if p++; p == pe { + goto _test_eof1149 + } + st_case_1149: + switch { + case data[p] > 130: + if 161 <= data[p] && data[p] <= 173 { + goto tr1049 + } + case data[p] >= 128: + goto tr1049 + } + goto tr420 + st1150: + if p++; p == pe { + goto _test_eof1150 + } + st_case_1150: + if 166 <= data[p] && data[p] <= 179 { + goto tr1049 + } + goto tr420 + st1151: + if p++; p == pe { + goto _test_eof1151 + } + st_case_1151: + if 164 <= data[p] && data[p] <= 183 { + goto tr1049 + } + goto tr420 + st1152: + if p++; p == pe { + goto _test_eof1152 + } + st_case_1152: + if data[p] == 173 { + goto tr1049 + } + switch { + case data[p] < 148: + if 144 <= data[p] && data[p] <= 146 { + goto tr1049 + } + case data[p] > 168: + switch { + case data[p] > 180: + if 184 <= data[p] && data[p] <= 185 { + goto tr1049 + } + case data[p] >= 178: + goto tr1049 + } + default: + goto tr1049 + } + goto tr420 + st1153: + if p++; p == pe { + goto _test_eof1153 + } + st_case_1153: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr1049 + } + case data[p] >= 128: + goto tr1049 + } + goto tr420 + st1154: + if p++; p == pe { + goto _test_eof1154 + } + st_case_1154: + switch data[p] { + case 128: + goto st1155 + case 129: + goto st1156 + case 131: + goto st1157 + case 179: + goto st1158 + case 181: + goto st1159 + case 183: + goto st1160 + } + goto tr420 + st1155: + if p++; p == pe { + goto _test_eof1155 + } + st_case_1155: + switch { + case data[p] < 170: + if 140 <= data[p] && data[p] <= 143 { + goto tr1049 + } + case data[p] > 174: + if 191 <= data[p] { + goto tr571 + } + default: + goto tr1049 + } + goto tr420 + st1156: + if p++; p == pe { + goto _test_eof1156 + } + st_case_1156: + if data[p] == 165 { + goto tr420 + } + switch { + case data[p] < 149: + if 129 <= data[p] && data[p] <= 147 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 160: + goto tr1049 + } + default: + goto tr420 + } + goto tr571 + st1157: + if p++; p == pe { + goto _test_eof1157 + } + st_case_1157: + if 144 <= data[p] && data[p] <= 176 { + goto tr1049 + } + goto tr420 + st1158: + if p++; p == pe { + goto _test_eof1158 + } + st_case_1158: + if 175 <= data[p] && data[p] <= 177 { + goto tr1049 + } + goto tr420 + st1159: + if p++; p == pe { + goto _test_eof1159 + } + st_case_1159: + if data[p] == 191 { + goto tr1049 + } + goto tr420 + st1160: + if p++; p == pe { + goto _test_eof1160 + } + st_case_1160: + if 160 <= data[p] && data[p] <= 191 { + goto tr1049 + } + goto tr420 + st1161: + if p++; p == pe { + goto _test_eof1161 + } + st_case_1161: + switch data[p] { + case 128: + goto st1162 + case 130: + goto st1163 + case 131: + goto st1164 + case 135: + goto st1165 + case 139: + goto st1166 + case 140: + goto st1091 + case 141: + goto st1167 + } + goto tr420 + st1162: + if p++; p == pe { + goto _test_eof1162 + } + st_case_1162: + switch { + case data[p] > 175: + if 177 <= data[p] && data[p] <= 181 { + goto tr1049 + } + case data[p] >= 170: + goto tr1049 + } + goto tr420 + st1163: + if p++; p == pe { + goto _test_eof1163 + } + st_case_1163: + switch { + case data[p] > 156: + if 160 <= data[p] { + goto tr1049 + } + case data[p] >= 153: + goto tr1049 + } + goto tr420 + st1164: + if p++; p == pe { + goto _test_eof1164 + } + st_case_1164: + if data[p] == 187 { + goto tr2 + } + if 192 <= data[p] { + goto tr2 + } + goto tr1049 + st1165: + if p++; p == pe { + goto _test_eof1165 + } + st_case_1165: + if 176 <= data[p] && data[p] <= 191 { + goto tr1049 + } + goto tr2 + st1166: + if p++; p == pe { + goto _test_eof1166 + } + st_case_1166: + if 144 <= data[p] && data[p] <= 190 { + goto tr1049 + } + goto tr2 + st1167: + if p++; p == pe { + goto _test_eof1167 + } + st_case_1167: + if 152 <= data[p] { + goto tr2 + } + goto tr1049 + st1168: + if p++; p == pe { + goto _test_eof1168 + } + st_case_1168: + switch data[p] { + case 153: + goto st1169 + case 154: + goto st1170 + case 155: + goto st1171 + case 160: + goto st1172 + case 162: + goto st1173 + case 163: + goto st1174 + case 164: + goto st1175 + case 165: + goto st1176 + case 166: + goto st1177 + case 167: + goto st1178 + case 168: + goto st1179 + case 169: + goto st1180 + case 170: + goto st1181 + case 171: + goto st1182 + case 175: + goto st1183 + } + goto tr420 + st1169: + if p++; p == pe { + goto _test_eof1169 + } + st_case_1169: + switch { + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr1049 + } + case data[p] >= 175: + goto tr1049 + } + goto tr420 + st1170: + if p++; p == pe { + goto _test_eof1170 + } + st_case_1170: + if 158 <= data[p] && data[p] <= 159 { + goto tr1049 + } + goto tr420 + st1171: + if p++; p == pe { + goto _test_eof1171 + } + st_case_1171: + if 176 <= data[p] && data[p] <= 177 { + goto tr1049 + } + goto tr420 + st1172: + if p++; p == pe { + goto _test_eof1172 + } + st_case_1172: + switch data[p] { + case 130: + goto tr1049 + case 134: + goto tr1049 + case 139: + goto tr1049 + } + if 163 <= data[p] && data[p] <= 167 { + goto tr1049 + } + goto tr420 + st1173: + if p++; p == pe { + goto _test_eof1173 + } + st_case_1173: + switch { + case data[p] > 129: + if 180 <= data[p] { + goto tr1049 + } + case data[p] >= 128: + goto tr1049 + } + goto tr420 + st1174: + if p++; p == pe { + goto _test_eof1174 + } + st_case_1174: + switch { + case data[p] > 159: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 133: + goto tr420 + } + goto tr1049 + st1175: + if p++; p == pe { + goto _test_eof1175 + } + st_case_1175: + if 166 <= data[p] && data[p] <= 173 { + goto tr1049 + } + goto tr420 + st1176: + if p++; p == pe { + goto _test_eof1176 + } + st_case_1176: + if 135 <= data[p] && data[p] <= 147 { + goto tr1049 + } + goto tr420 + st1177: + if p++; p == pe { + goto _test_eof1177 + } + st_case_1177: + switch { + case data[p] > 131: + if 179 <= data[p] { + goto tr1049 + } + case data[p] >= 128: + goto tr1049 + } + goto tr420 + st1178: + if p++; p == pe { + goto _test_eof1178 + } + st_case_1178: + switch { + case data[p] > 164: + if 166 <= data[p] { + goto tr420 + } + case data[p] >= 129: + goto tr420 + } + goto tr1049 + st1179: + if p++; p == pe { + goto _test_eof1179 + } + st_case_1179: + if 169 <= data[p] && data[p] <= 182 { + goto tr1049 + } + goto tr420 + st1180: + if p++; p == pe { + goto _test_eof1180 + } + st_case_1180: + if data[p] == 131 { + goto tr1049 + } + switch { + case data[p] > 141: + if 187 <= data[p] && data[p] <= 189 { + goto tr1049 + } + case data[p] >= 140: + goto tr1049 + } + goto tr420 + st1181: + if p++; p == pe { + goto _test_eof1181 + } + st_case_1181: + if data[p] == 176 { + goto tr1049 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr1049 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr1049 + } + default: + goto tr1049 + } + goto tr420 + st1182: + if p++; p == pe { + goto _test_eof1182 + } + st_case_1182: + if data[p] == 129 { + goto tr1049 + } + switch { + case data[p] > 175: + if 181 <= data[p] && data[p] <= 182 { + goto tr1049 + } + case data[p] >= 171: + goto tr1049 + } + goto tr420 + st1183: + if p++; p == pe { + goto _test_eof1183 + } + st_case_1183: + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 173 { + goto tr1049 + } + case data[p] >= 163: + goto tr1049 + } + goto tr420 + st1184: + if p++; p == pe { + goto _test_eof1184 + } + st_case_1184: + switch data[p] { + case 172: + goto st1185 + case 184: + goto st1186 + case 185: + goto st1187 + case 187: + goto st1159 + case 188: + goto st1188 + case 189: + goto st1189 + case 190: + goto st1190 + case 191: + goto st1191 + } + goto tr420 + st1185: + if p++; p == pe { + goto _test_eof1185 + } + st_case_1185: + if data[p] == 158 { + goto tr1049 + } + goto tr420 + st1186: + if p++; p == pe { + goto _test_eof1186 + } + st_case_1186: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 143 { + goto tr1049 + } + case data[p] > 175: + if 179 <= data[p] && data[p] <= 180 { + goto tr571 + } + default: + goto tr1049 + } + goto tr420 + st1187: + if p++; p == pe { + goto _test_eof1187 + } + st_case_1187: + if 141 <= data[p] && data[p] <= 143 { + goto tr571 + } + goto tr2 + st1188: + if p++; p == pe { + goto _test_eof1188 + } + st_case_1188: + if data[p] == 191 { + goto tr571 + } + goto tr2 + st1189: + if p++; p == pe { + goto _test_eof1189 + } + st_case_1189: + if 166 <= data[p] { + goto tr1049 + } + goto tr420 + st1190: + if p++; p == pe { + goto _test_eof1190 + } + st_case_1190: + if 160 <= data[p] { + goto tr420 + } + goto tr1049 + st1191: + if p++; p == pe { + goto _test_eof1191 + } + st_case_1191: + if 185 <= data[p] && data[p] <= 187 { + goto tr1049 + } + goto tr420 + st1192: + if p++; p == pe { + goto _test_eof1192 + } + st_case_1192: + switch data[p] { + case 144: + goto st1193 + case 145: + goto st1199 + case 150: + goto st1218 + case 155: + goto st1223 + case 157: + goto st1226 + case 158: + goto st1233 + } + goto tr420 + st1193: + if p++; p == pe { + goto _test_eof1193 + } + st_case_1193: + switch data[p] { + case 135: + goto st1194 + case 139: + goto st1195 + case 141: + goto st1196 + case 168: + goto st1197 + case 171: + goto st1198 + } + goto tr420 + st1194: + if p++; p == pe { + goto _test_eof1194 + } + st_case_1194: + if data[p] == 189 { + goto tr1049 + } + goto tr420 + st1195: + if p++; p == pe { + goto _test_eof1195 + } + st_case_1195: + if data[p] == 160 { + goto tr1049 + } + goto tr420 + st1196: + if p++; p == pe { + goto _test_eof1196 + } + st_case_1196: + if 182 <= data[p] && data[p] <= 186 { + goto tr1049 + } + goto tr420 + st1197: + if p++; p == pe { + goto _test_eof1197 + } + st_case_1197: + if data[p] == 191 { + goto tr1049 + } + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr1049 + } + case data[p] > 134: + switch { + case data[p] > 143: + if 184 <= data[p] && data[p] <= 186 { + goto tr1049 + } + case data[p] >= 140: + goto tr1049 + } + default: + goto tr1049 + } + goto tr420 + st1198: + if p++; p == pe { + goto _test_eof1198 + } + st_case_1198: + if 165 <= data[p] && data[p] <= 166 { + goto tr1049 + } + goto tr420 + st1199: + if p++; p == pe { + goto _test_eof1199 + } + st_case_1199: + switch data[p] { + case 128: + goto st1200 + case 129: + goto st1201 + case 130: + goto st1202 + case 132: + goto st1203 + case 133: + goto st1204 + case 134: + goto st1205 + case 135: + goto st1206 + case 136: + goto st1207 + case 139: + goto st1208 + case 140: + goto st1209 + case 141: + goto st1210 + case 146: + goto st1211 + case 147: + goto st1212 + case 150: + goto st1213 + case 151: + goto st1214 + case 152: + goto st1211 + case 153: + goto st1215 + case 154: + goto st1216 + case 156: + goto st1217 + } + goto tr420 + st1200: + if p++; p == pe { + goto _test_eof1200 + } + st_case_1200: + switch { + case data[p] > 130: + if 184 <= data[p] { + goto tr1049 + } + case data[p] >= 128: + goto tr1049 + } + goto tr420 + st1201: + if p++; p == pe { + goto _test_eof1201 + } + st_case_1201: + if 135 <= data[p] && data[p] <= 190 { + goto tr420 + } + goto tr1049 + st1202: + if p++; p == pe { + goto _test_eof1202 + } + st_case_1202: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr420 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr1049 + st1203: + if p++; p == pe { + goto _test_eof1203 + } + st_case_1203: + switch { + case data[p] > 130: + if 167 <= data[p] && data[p] <= 180 { + goto tr1049 + } + case data[p] >= 128: + goto tr1049 + } + goto tr420 + st1204: + if p++; p == pe { + goto _test_eof1204 + } + st_case_1204: + if data[p] == 179 { + goto tr1049 + } + goto tr420 + st1205: + if p++; p == pe { + goto _test_eof1205 + } + st_case_1205: + switch { + case data[p] > 130: + if 179 <= data[p] { + goto tr1049 + } + case data[p] >= 128: + goto tr1049 + } + goto tr420 + st1206: + if p++; p == pe { + goto _test_eof1206 + } + st_case_1206: + switch { + case data[p] > 137: + if 141 <= data[p] { + goto tr420 + } + case data[p] >= 129: + goto tr420 + } + goto tr1049 + st1207: + if p++; p == pe { + goto _test_eof1207 + } + st_case_1207: + if 172 <= data[p] && data[p] <= 183 { + goto tr1049 + } + goto tr420 + st1208: + if p++; p == pe { + goto _test_eof1208 + } + st_case_1208: + if 159 <= data[p] && data[p] <= 170 { + goto tr1049 + } + goto tr420 + st1209: + if p++; p == pe { + goto _test_eof1209 + } + st_case_1209: + if data[p] == 188 { + goto tr1049 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr1049 + } + case data[p] >= 128: + goto tr1049 + } + goto tr420 + st1210: + if p++; p == pe { + goto _test_eof1210 + } + st_case_1210: + if data[p] == 151 { + goto tr1049 + } + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr1049 + } + case data[p] >= 128: + goto tr1049 + } + case data[p] > 141: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr1049 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr1049 + } + default: + goto tr1049 + } + default: + goto tr1049 + } + goto tr420 + st1211: + if p++; p == pe { + goto _test_eof1211 + } + st_case_1211: + if 176 <= data[p] { + goto tr1049 + } + goto tr420 + st1212: + if p++; p == pe { + goto _test_eof1212 + } + st_case_1212: + if 132 <= data[p] { + goto tr420 + } + goto tr1049 + st1213: + if p++; p == pe { + goto _test_eof1213 + } + st_case_1213: + switch { + case data[p] > 181: + if 184 <= data[p] { + goto tr1049 + } + case data[p] >= 175: + goto tr1049 + } + goto tr420 + st1214: + if p++; p == pe { + goto _test_eof1214 + } + st_case_1214: + switch { + case data[p] > 155: + if 158 <= data[p] { + goto tr420 + } + case data[p] >= 129: + goto tr420 + } + goto tr1049 + st1215: + if p++; p == pe { + goto _test_eof1215 + } + st_case_1215: + if 129 <= data[p] { + goto tr420 + } + goto tr1049 + st1216: + if p++; p == pe { + goto _test_eof1216 + } + st_case_1216: + if 171 <= data[p] && data[p] <= 183 { + goto tr1049 + } + goto tr420 + st1217: + if p++; p == pe { + goto _test_eof1217 + } + st_case_1217: + if 157 <= data[p] && data[p] <= 171 { + goto tr1049 + } + goto tr420 + st1218: + if p++; p == pe { + goto _test_eof1218 + } + st_case_1218: + switch data[p] { + case 171: + goto st1219 + case 172: + goto st1220 + case 189: + goto st1221 + case 190: + goto st1222 + } + goto tr420 + st1219: + if p++; p == pe { + goto _test_eof1219 + } + st_case_1219: + if 176 <= data[p] && data[p] <= 180 { + goto tr1049 + } + goto tr420 + st1220: + if p++; p == pe { + goto _test_eof1220 + } + st_case_1220: + if 176 <= data[p] && data[p] <= 182 { + goto tr1049 + } + goto tr420 + st1221: + if p++; p == pe { + goto _test_eof1221 + } + st_case_1221: + if 145 <= data[p] && data[p] <= 190 { + goto tr1049 + } + goto tr420 + st1222: + if p++; p == pe { + goto _test_eof1222 + } + st_case_1222: + if 143 <= data[p] && data[p] <= 146 { + goto tr1049 + } + goto tr420 + st1223: + if p++; p == pe { + goto _test_eof1223 + } + st_case_1223: + switch data[p] { + case 128: + goto st1224 + case 178: + goto st1225 + } + goto tr420 + st1224: + if p++; p == pe { + goto _test_eof1224 + } + st_case_1224: + if data[p] == 128 { + goto tr1049 + } + goto tr2 + st1225: + if p++; p == pe { + goto _test_eof1225 + } + st_case_1225: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr1049 + } + case data[p] >= 157: + goto tr1049 + } + goto tr420 + st1226: + if p++; p == pe { + goto _test_eof1226 + } + st_case_1226: + switch data[p] { + case 133: + goto st1227 + case 134: + goto st1228 + case 137: + goto st1229 + case 168: + goto st1230 + case 169: + goto st1231 + case 170: + goto st1232 + } + goto tr420 + st1227: + if p++; p == pe { + goto _test_eof1227 + } + st_case_1227: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr1049 + } + case data[p] >= 165: + goto tr1049 + } + goto tr420 + st1228: + if p++; p == pe { + goto _test_eof1228 + } + st_case_1228: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr420 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr1049 + st1229: + if p++; p == pe { + goto _test_eof1229 + } + st_case_1229: + if 130 <= data[p] && data[p] <= 132 { + goto tr1049 + } + goto tr420 + st1230: + if p++; p == pe { + goto _test_eof1230 + } + st_case_1230: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr1049 + } + case data[p] >= 128: + goto tr1049 + } + goto tr420 + st1231: + if p++; p == pe { + goto _test_eof1231 + } + st_case_1231: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 173: + goto tr420 + } + goto tr1049 + st1232: + if p++; p == pe { + goto _test_eof1232 + } + st_case_1232: + if data[p] == 132 { + goto tr1049 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr1049 + } + case data[p] >= 155: + goto tr1049 + } + goto tr420 + st1233: + if p++; p == pe { + goto _test_eof1233 + } + st_case_1233: + if data[p] == 163 { + goto st1234 + } + goto tr420 + st1234: + if p++; p == pe { + goto _test_eof1234 + } + st_case_1234: + if 144 <= data[p] && data[p] <= 150 { + goto tr1049 + } + goto tr420 + st1235: + if p++; p == pe { + goto _test_eof1235 + } + st_case_1235: + if data[p] == 160 { + goto st1236 + } + goto tr420 + st1236: + if p++; p == pe { + goto _test_eof1236 + } + st_case_1236: + switch data[p] { + case 128: + goto st1237 + case 129: + goto st1238 + case 132: + goto st1091 + case 135: + goto st1092 + } + if 133 <= data[p] && data[p] <= 134 { + goto st1239 + } + goto tr420 + st1237: + if p++; p == pe { + goto _test_eof1237 + } + st_case_1237: + if data[p] == 129 { + goto tr1049 + } + if 160 <= data[p] { + goto tr1049 + } + goto tr420 + st1238: + if p++; p == pe { + goto _test_eof1238 + } + st_case_1238: + if 192 <= data[p] { + goto tr420 + } + goto tr1049 + st1239: + if p++; p == pe { + goto _test_eof1239 + } + st_case_1239: + goto tr1049 + st1240: + if p++; p == pe { + goto _test_eof1240 + } + st_case_1240: + switch { + case data[p] < 155: + if 153 <= data[p] && data[p] <= 154 { + goto tr571 + } + case data[p] > 156: + if 160 <= data[p] { + goto tr1049 + } + default: + goto tr1049 + } + goto tr420 + st1241: + if p++; p == pe { + goto _test_eof1241 + } + st_case_1241: + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st654 + case 153: + goto st1242 + case 154: + goto st1243 + case 155: + goto st1244 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st1245 + case 161: + goto st272 + case 162: + goto st1246 + case 163: + goto st1247 + case 164: + goto st1248 + case 165: + goto st1249 + case 166: + goto st1250 + case 167: + goto st1251 + case 168: + goto st1252 + case 169: + goto st1253 + case 170: + goto st1254 + case 171: + goto st1255 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st1256 + case 176: + goto st147 + } + if 129 <= data[p] { + goto st145 + } + goto tr420 + st1242: + if p++; p == pe { + goto _test_eof1242 + } + st_case_1242: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr571 + } + default: + goto tr571 + } + goto tr420 + st1243: + if p++; p == pe { + goto _test_eof1243 + } + st_case_1243: + switch { + case data[p] < 158: + if 128 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr571 + } + goto tr420 + st1244: + if p++; p == pe { + goto _test_eof1244 + } + st_case_1244: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr571 + } + goto tr148 + st1245: + if p++; p == pe { + goto _test_eof1245 + } + st_case_1245: + switch data[p] { + case 130: + goto tr571 + case 134: + goto tr571 + case 139: + goto tr571 + } + switch { + case data[p] > 167: + if 168 <= data[p] { + goto tr420 + } + case data[p] >= 163: + goto tr571 + } + goto tr148 + st1246: + if p++; p == pe { + goto _test_eof1246 + } + st_case_1246: + switch { + case data[p] < 130: + if 128 <= data[p] && data[p] <= 129 { + goto tr571 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr571 + } + default: + goto tr148 + } + goto tr420 + st1247: + if p++; p == pe { + goto _test_eof1247 + } + st_case_1247: + switch data[p] { + case 187: + goto tr148 + case 189: + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 143: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 133: + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 183: + if 184 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr420 + } + goto tr571 + st1248: + if p++; p == pe { + goto _test_eof1248 + } + st_case_1248: + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 165: + switch { + case data[p] > 173: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr571 + } + default: + goto tr148 + } + goto tr420 + st1249: + if p++; p == pe { + goto _test_eof1249 + } + st_case_1249: + switch { + case data[p] < 148: + if 135 <= data[p] && data[p] <= 147 { + goto tr571 + } + case data[p] > 159: + if 189 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st1250: + if p++; p == pe { + goto _test_eof1250 + } + st_case_1250: + switch { + case data[p] < 132: + if 128 <= data[p] && data[p] <= 131 { + goto tr571 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr571 + } + default: + goto tr148 + } + goto tr420 + st1251: + if p++; p == pe { + goto _test_eof1251 + } + st_case_1251: + if data[p] == 143 { + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 129: + goto tr420 + } + case data[p] > 164: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr420 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + default: + goto tr420 + } + goto tr571 + st1252: + if p++; p == pe { + goto _test_eof1252 + } + st_case_1252: + switch { + case data[p] > 168: + if 169 <= data[p] && data[p] <= 182 { + goto tr571 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st1253: + if p++; p == pe { + goto _test_eof1253 + } + st_case_1253: + if data[p] == 131 { + goto tr571 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 141: + switch { + case data[p] > 153: + if 187 <= data[p] && data[p] <= 189 { + goto tr571 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr571 + } + goto tr420 + st1254: + if p++; p == pe { + goto _test_eof1254 + } + st_case_1254: + if data[p] == 176 { + goto tr571 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr571 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr571 + } + default: + goto tr571 + } + goto tr420 + st1255: + if p++; p == pe { + goto _test_eof1255 + } + st_case_1255: + if data[p] == 129 { + goto tr571 + } + switch { + case data[p] < 171: + if 160 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 181 <= data[p] && data[p] <= 182 { + goto tr571 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr571 + } + goto tr420 + st1256: + if p++; p == pe { + goto _test_eof1256 + } + st_case_1256: + switch { + case data[p] < 163: + if 128 <= data[p] && data[p] <= 162 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 172: + goto tr571 + } + default: + goto tr571 + } + goto tr420 + st1257: + if p++; p == pe { + goto _test_eof1257 + } + st_case_1257: + switch data[p] { + case 172: + goto st1258 + case 173: + goto st672 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st1259 + case 185: + goto st674 + case 187: + goto st1260 + case 188: + goto st676 + case 189: + goto st1261 + case 190: + goto st1262 + case 191: + goto st1263 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr420 + st1258: + if p++; p == pe { + goto _test_eof1258 + } + st_case_1258: + switch data[p] { + case 158: + goto tr571 + case 190: + goto tr572 + } + switch { + case data[p] < 157: + switch { + case data[p] > 134: + if 147 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 170: + goto tr572 + } + default: + goto tr572 + } + goto tr420 + st1259: + if p++; p == pe { + goto _test_eof1259 + } + st_case_1259: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 143 { + goto tr571 + } + case data[p] > 175: + if 179 <= data[p] && data[p] <= 180 { + goto tr571 + } + default: + goto tr571 + } + goto tr420 + st1260: + if p++; p == pe { + goto _test_eof1260 + } + st_case_1260: + if data[p] == 191 { + goto tr571 + } + if 189 <= data[p] { + goto tr420 + } + goto tr148 + st1261: + if p++; p == pe { + goto _test_eof1261 + } + st_case_1261: + switch { + case data[p] > 154: + if 166 <= data[p] { + goto tr1049 + } + case data[p] >= 129: + goto tr148 + } + goto tr2 + st1262: + if p++; p == pe { + goto _test_eof1262 + } + st_case_1262: + switch { + case data[p] < 160: + if 158 <= data[p] && data[p] <= 159 { + goto tr571 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + goto tr1049 + st1263: + if p++; p == pe { + goto _test_eof1263 + } + st_case_1263: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr148 + } + case data[p] >= 130: + goto tr148 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr571 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1264: + if p++; p == pe { + goto _test_eof1264 + } + st_case_1264: + switch data[p] { + case 144: + goto st1265 + case 145: + goto st1271 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st1290 + case 155: + goto st1295 + case 157: + goto st1297 + case 158: + goto st1304 + case 159: + goto st403 + } + goto tr420 + st1265: + if p++; p == pe { + goto _test_eof1265 + } + st_case_1265: + switch data[p] { + case 128: + goto st308 + case 129: + goto st309 + case 130: + goto st147 + case 131: + goto st310 + case 133: + goto st311 + case 135: + goto st1266 + case 138: + goto st313 + case 139: + goto st1267 + case 140: + goto st315 + case 141: + goto st1268 + case 142: + goto st317 + case 143: + goto st318 + case 144: + goto st147 + case 145: + goto st145 + case 146: + goto st684 + case 148: + goto st320 + case 149: + goto st321 + case 152: + goto st147 + case 156: + goto st322 + case 157: + goto st323 + case 160: + goto st324 + case 161: + goto st325 + case 162: + goto st326 + case 163: + goto st327 + case 164: + goto st328 + case 166: + goto st329 + case 168: + goto st1269 + case 169: + goto st331 + case 170: + goto st332 + case 171: + goto st1270 + case 172: + goto st334 + case 173: + goto st335 + case 174: + goto st336 + case 176: + goto st147 + case 177: + goto st245 + } + switch { + case data[p] > 155: + if 178 <= data[p] && data[p] <= 179 { + goto st337 + } + case data[p] >= 153: + goto st145 + } + goto tr420 + st1266: + if p++; p == pe { + goto _test_eof1266 + } + st_case_1266: + if data[p] == 189 { + goto tr571 + } + goto tr420 + st1267: + if p++; p == pe { + goto _test_eof1267 + } + st_case_1267: + if data[p] == 160 { + goto tr571 + } + if 145 <= data[p] { + goto tr420 + } + goto tr148 + st1268: + if p++; p == pe { + goto _test_eof1268 + } + st_case_1268: + switch { + case data[p] < 182: + if 139 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 186: + if 187 <= data[p] { + goto tr420 + } + default: + goto tr571 + } + goto tr148 + st1269: + if p++; p == pe { + goto _test_eof1269 + } + st_case_1269: + switch data[p] { + case 128: + goto tr148 + case 191: + goto tr571 + } + switch { + case data[p] < 144: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr571 + } + case data[p] > 134: + if 140 <= data[p] && data[p] <= 143 { + goto tr571 + } + default: + goto tr571 + } + case data[p] > 147: + switch { + case data[p] < 153: + if 149 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] > 179: + if 184 <= data[p] && data[p] <= 186 { + goto tr571 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1270: + if p++; p == pe { + goto _test_eof1270 + } + st_case_1270: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 164: + if 165 <= data[p] && data[p] <= 166 { + goto tr571 + } + default: + goto tr148 + } + goto tr420 + st1271: + if p++; p == pe { + goto _test_eof1271 + } + st_case_1271: + switch data[p] { + case 128: + goto st1272 + case 129: + goto st1273 + case 130: + goto st1274 + case 131: + goto st691 + case 132: + goto st1275 + case 133: + goto st1276 + case 134: + goto st1277 + case 135: + goto st1278 + case 136: + goto st1279 + case 138: + goto st348 + case 139: + goto st1280 + case 140: + goto st1281 + case 141: + goto st1282 + case 146: + goto st1283 + case 147: + goto st1284 + case 150: + goto st1285 + case 151: + goto st1286 + case 152: + goto st1283 + case 153: + goto st1287 + case 154: + goto st1288 + case 155: + goto st538 + case 156: + goto st1289 + case 162: + goto st359 + case 163: + goto st707 + case 171: + goto st361 + } + goto tr420 + st1272: + if p++; p == pe { + goto _test_eof1272 + } + st_case_1272: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr571 + } + case data[p] > 183: + if 184 <= data[p] { + goto tr571 + } + default: + goto tr148 + } + goto tr420 + st1273: + if p++; p == pe { + goto _test_eof1273 + } + st_case_1273: + switch { + case data[p] < 166: + if 135 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] && data[p] <= 190 { + goto tr420 + } + default: + goto tr421 + } + goto tr571 + st1274: + if p++; p == pe { + goto _test_eof1274 + } + st_case_1274: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr571 + st1275: + if p++; p == pe { + goto _test_eof1275 + } + st_case_1275: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr571 + } + case data[p] > 166: + switch { + case data[p] > 180: + if 182 <= data[p] && data[p] <= 191 { + goto tr421 + } + case data[p] >= 167: + goto tr571 + } + default: + goto tr148 + } + goto tr420 + st1276: + if p++; p == pe { + goto _test_eof1276 + } + st_case_1276: + switch data[p] { + case 179: + goto tr571 + case 182: + goto tr148 + } + if 144 <= data[p] && data[p] <= 178 { + goto tr148 + } + goto tr420 + st1277: + if p++; p == pe { + goto _test_eof1277 + } + st_case_1277: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr571 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr571 + } + default: + goto tr148 + } + goto tr420 + st1278: + if p++; p == pe { + goto _test_eof1278 + } + st_case_1278: + if data[p] == 155 { + goto tr420 + } + switch { + case data[p] < 141: + switch { + case data[p] > 132: + if 133 <= data[p] && data[p] <= 137 { + goto tr420 + } + case data[p] >= 129: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] < 154: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] > 156: + if 157 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + default: + goto tr420 + } + goto tr571 + st1279: + if p++; p == pe { + goto _test_eof1279 + } + st_case_1279: + switch { + case data[p] < 147: + if 128 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] > 171: + if 172 <= data[p] && data[p] <= 183 { + goto tr571 + } + default: + goto tr148 + } + goto tr420 + st1280: + if p++; p == pe { + goto _test_eof1280 + } + st_case_1280: + switch { + case data[p] < 171: + if 159 <= data[p] && data[p] <= 170 { + goto tr571 + } + case data[p] > 175: + switch { + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr421 + } + default: + goto tr420 + } + goto tr148 + st1281: + if p++; p == pe { + goto _test_eof1281 + } + st_case_1281: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 131 { + goto tr571 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr571 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1282: + if p++; p == pe { + goto _test_eof1282 + } + st_case_1282: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr571 + } + switch { + case data[p] < 157: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr571 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr571 + } + default: + goto tr571 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr571 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr571 + } + default: + goto tr571 + } + default: + goto tr148 + } + goto tr420 + st1283: + if p++; p == pe { + goto _test_eof1283 + } + st_case_1283: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr571 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st1284: + if p++; p == pe { + goto _test_eof1284 + } + st_case_1284: + if data[p] == 134 { + goto tr420 + } + switch { + case data[p] < 136: + if 132 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr420 + } + goto tr571 + st1285: + if p++; p == pe { + goto _test_eof1285 + } + st_case_1285: + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 181: + if 184 <= data[p] { + goto tr571 + } + default: + goto tr571 + } + goto tr420 + st1286: + if p++; p == pe { + goto _test_eof1286 + } + st_case_1286: + switch { + case data[p] < 152: + if 129 <= data[p] && data[p] <= 151 { + goto tr420 + } + case data[p] > 155: + if 158 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + goto tr571 + st1287: + if p++; p == pe { + goto _test_eof1287 + } + st_case_1287: + if data[p] == 132 { + goto tr148 + } + switch { + case data[p] < 144: + if 129 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + goto tr571 + st1288: + if p++; p == pe { + goto _test_eof1288 + } + st_case_1288: + switch { + case data[p] > 170: + if 171 <= data[p] && data[p] <= 183 { + goto tr571 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st1289: + if p++; p == pe { + goto _test_eof1289 + } + st_case_1289: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 157: + goto tr571 + } + goto tr420 + st1290: + if p++; p == pe { + goto _test_eof1290 + } + st_case_1290: + switch data[p] { + case 160: + goto st147 + case 168: + goto st370 + case 169: + goto st709 + case 171: + goto st1291 + case 172: + goto st1292 + case 173: + goto st712 + case 174: + goto st374 + case 188: + goto st147 + case 189: + goto st1293 + case 190: + goto st1294 + } + if 161 <= data[p] && data[p] <= 167 { + goto st145 + } + goto tr420 + st1291: + if p++; p == pe { + goto _test_eof1291 + } + st_case_1291: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 180 { + goto tr571 + } + case data[p] >= 144: + goto tr148 + } + goto tr420 + st1292: + if p++; p == pe { + goto _test_eof1292 + } + st_case_1292: + switch { + case data[p] > 175: + if 176 <= data[p] && data[p] <= 182 { + goto tr571 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st1293: + if p++; p == pe { + goto _test_eof1293 + } + st_case_1293: + switch { + case data[p] < 145: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr420 + } + default: + goto tr571 + } + goto tr148 + st1294: + if p++; p == pe { + goto _test_eof1294 + } + st_case_1294: + switch { + case data[p] > 146: + if 147 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] >= 143: + goto tr571 + } + goto tr420 + st1295: + if p++; p == pe { + goto _test_eof1295 + } + st_case_1295: + switch data[p] { + case 128: + goto st1224 + case 176: + goto st147 + case 177: + goto st378 + case 178: + goto st1296 + } + goto tr420 + st1296: + if p++; p == pe { + goto _test_eof1296 + } + st_case_1296: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr571 + } + case data[p] >= 157: + goto tr571 + } + default: + goto tr148 + } + goto tr420 + st1297: + if p++; p == pe { + goto _test_eof1297 + } + st_case_1297: + switch data[p] { + case 133: + goto st1298 + case 134: + goto st1299 + case 137: + goto st1300 + case 144: + goto st147 + case 145: + goto st384 + case 146: + goto st385 + case 147: + goto st386 + case 148: + goto st387 + case 149: + goto st388 + case 154: + goto st389 + case 155: + goto st390 + case 156: + goto st391 + case 157: + goto st392 + case 158: + goto st393 + case 159: + goto st721 + case 168: + goto st1301 + case 169: + goto st1302 + case 170: + goto st1303 + } + if 150 <= data[p] && data[p] <= 153 { + goto st145 + } + goto tr420 + st1298: + if p++; p == pe { + goto _test_eof1298 + } + st_case_1298: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr571 + } + case data[p] >= 165: + goto tr571 + } + goto tr420 + st1299: + if p++; p == pe { + goto _test_eof1299 + } + st_case_1299: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr420 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr571 + st1300: + if p++; p == pe { + goto _test_eof1300 + } + st_case_1300: + if 130 <= data[p] && data[p] <= 132 { + goto tr571 + } + goto tr420 + st1301: + if p++; p == pe { + goto _test_eof1301 + } + st_case_1301: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr571 + } + case data[p] >= 128: + goto tr571 + } + goto tr420 + st1302: + if p++; p == pe { + goto _test_eof1302 + } + st_case_1302: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 173: + goto tr420 + } + goto tr571 + st1303: + if p++; p == pe { + goto _test_eof1303 + } + st_case_1303: + if data[p] == 132 { + goto tr571 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr571 + } + case data[p] >= 155: + goto tr571 + } + goto tr420 + st1304: + if p++; p == pe { + goto _test_eof1304 + } + st_case_1304: + switch data[p] { + case 160: + goto st147 + case 163: + goto st1305 + case 184: + goto st400 + case 185: + goto st401 + case 186: + goto st402 + } + if 161 <= data[p] && data[p] <= 162 { + goto st145 + } + goto tr420 + st1305: + if p++; p == pe { + goto _test_eof1305 + } + st_case_1305: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 150: + if 151 <= data[p] { + goto tr420 + } + default: + goto tr571 + } + goto tr148 + st1306: + if p++; p == pe { + goto _test_eof1306 + } + st_case_1306: + if data[p] == 160 { + goto st1307 + } + goto tr420 + st1307: + if p++; p == pe { + goto _test_eof1307 + } + st_case_1307: + switch data[p] { + case 128: + goto st1308 + case 129: + goto st1309 + case 132: + goto st563 + case 135: + goto st1311 + } + if 133 <= data[p] && data[p] <= 134 { + goto st1310 + } + goto tr420 + st1308: + if p++; p == pe { + goto _test_eof1308 + } + st_case_1308: + if data[p] == 129 { + goto tr571 + } + if 160 <= data[p] { + goto tr571 + } + goto tr420 + st1309: + if p++; p == pe { + goto _test_eof1309 + } + st_case_1309: + if 192 <= data[p] { + goto tr420 + } + goto tr571 + st1310: + if p++; p == pe { + goto _test_eof1310 + } + st_case_1310: + goto tr571 + st1311: + if p++; p == pe { + goto _test_eof1311 + } + st_case_1311: + if 176 <= data[p] { + goto tr420 + } + goto tr571 + st1312: + if p++; p == pe { + goto _test_eof1312 + } + st_case_1312: + switch data[p] { + case 170: + goto tr148 + case 173: + goto tr421 + case 181: + goto tr148 + case 186: + goto tr148 + } + goto tr420 + st1313: + if p++; p == pe { + goto _test_eof1313 + } + st_case_1313: + if 128 <= data[p] { + goto tr421 + } + goto tr420 + st1314: + if p++; p == pe { + goto _test_eof1314 + } + st_case_1314: + switch data[p] { + case 181: + goto tr420 + case 190: + goto st413 + } + switch { + case data[p] < 184: + if 176 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr420 + } + goto tr421 + st1315: + if p++; p == pe { + goto _test_eof1315 + } + st_case_1315: + if data[p] == 130 { + goto tr420 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr421 + } + goto tr148 + st1316: + if p++; p == pe { + goto _test_eof1316 + } + st_case_1316: + switch data[p] { + case 137: + goto st413 + case 190: + goto tr420 + } + switch { + case data[p] < 145: + if 136 <= data[p] && data[p] <= 144 { + goto tr420 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + goto tr148 + st1317: + if p++; p == pe { + goto _test_eof1317 + } + st_case_1317: + switch data[p] { + case 135: + goto tr421 + case 179: + goto tr148 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr421 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + default: + goto tr421 + } + goto tr420 + st1318: + if p++; p == pe { + goto _test_eof1318 + } + st_case_1318: + if data[p] == 156 { + goto tr421 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 133 { + goto tr421 + } + case data[p] > 141: + switch { + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + case data[p] >= 144: + goto tr421 + } + default: + goto st413 + } + goto tr420 + st1319: + if p++; p == pe { + goto _test_eof1319 + } + st_case_1319: + switch data[p] { + case 171: + goto tr421 + case 172: + goto st413 + case 176: + goto tr421 + } + switch { + case data[p] < 139: + if 128 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + goto tr420 + st1320: + if p++; p == pe { + goto _test_eof1320 + } + st_case_1320: + switch data[p] { + case 148: + goto tr420 + case 158: + goto tr420 + case 169: + goto tr420 + } + switch { + case data[p] < 176: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr421 + } + case data[p] >= 150: + goto tr421 + } + case data[p] > 185: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 189: + goto tr420 + } + default: + goto tr421 + } + goto tr148 + st1321: + if p++; p == pe { + goto _test_eof1321 + } + st_case_1321: + if data[p] == 144 { + goto tr148 + } + switch { + case data[p] < 146: + if 143 <= data[p] && data[p] <= 145 { + goto tr421 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr421 + } + default: + goto tr148 + } + goto tr420 + st1322: + if p++; p == pe { + goto _test_eof1322 + } + st_case_1322: + switch { + case data[p] > 140: + if 141 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr420 + } + goto tr421 + st1323: + if p++; p == pe { + goto _test_eof1323 + } + st_case_1323: + switch { + case data[p] > 176: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr421 + } + goto tr148 + st1324: + if p++; p == pe { + goto _test_eof1324 + } + st_case_1324: + switch data[p] { + case 184: + goto st413 + case 186: + goto tr148 + } + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 170: + switch { + case data[p] > 179: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 171: + goto tr421 + } + default: + goto tr148 + } + goto tr420 + st1325: + if p++; p == pe { + goto _test_eof1325 + } + st_case_1325: + switch data[p] { + case 160: + goto st1326 + case 161: + goto st1327 + case 162: + goto st168 + case 163: + goto st1328 + case 164: + goto st1329 + case 165: + goto st1330 + case 166: + goto st1331 + case 167: + goto st1332 + case 168: + goto st1333 + case 169: + goto st1334 + case 170: + goto st1335 + case 171: + goto st1336 + case 172: + goto st1337 + case 173: + goto st1338 + case 174: + goto st1339 + case 175: + goto st1340 + case 176: + goto st1341 + case 177: + goto st1342 + case 178: + goto st1343 + case 179: + goto st1344 + case 180: + goto st1345 + case 181: + goto st1346 + case 182: + goto st1347 + case 183: + goto st1348 + case 184: + goto st1349 + case 185: + goto st1350 + case 186: + goto st1351 + case 187: + goto st1352 + case 188: + goto st1353 + case 189: + goto st1354 + case 190: + goto st1355 + case 191: + goto st1356 + } + goto tr420 + st1326: + if p++; p == pe { + goto _test_eof1326 + } + st_case_1326: + switch data[p] { + case 154: + goto tr148 + case 164: + goto tr148 + case 168: + goto tr148 + } + switch { + case data[p] > 149: + if 150 <= data[p] && data[p] <= 173 { + goto tr421 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st1327: + if p++; p == pe { + goto _test_eof1327 + } + st_case_1327: + switch { + case data[p] > 152: + if 153 <= data[p] && data[p] <= 155 { + goto tr421 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st1328: + if p++; p == pe { + goto _test_eof1328 + } + st_case_1328: + if 163 <= data[p] { + goto tr421 + } + goto tr420 + st1329: + if p++; p == pe { + goto _test_eof1329 + } + st_case_1329: + if data[p] == 189 { + goto tr148 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr148 + } + goto tr421 + st1330: + if p++; p == pe { + goto _test_eof1330 + } + st_case_1330: + switch data[p] { + case 144: + goto tr148 + case 176: + goto tr420 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + if 177 <= data[p] { + goto tr148 + } + default: + goto tr420 + } + goto tr421 + st1331: + if p++; p == pe { + goto _test_eof1331 + } + st_case_1331: + switch data[p] { + case 132: + goto tr420 + case 169: + goto tr420 + case 177: + goto tr420 + case 188: + goto tr421 + } + switch { + case data[p] < 145: + switch { + case data[p] > 131: + if 141 <= data[p] && data[p] <= 142 { + goto tr420 + } + case data[p] >= 129: + goto tr421 + } + case data[p] > 146: + switch { + case data[p] < 186: + if 179 <= data[p] && data[p] <= 181 { + goto tr420 + } + case data[p] > 187: + if 190 <= data[p] { + goto tr421 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st1332: + if p++; p == pe { + goto _test_eof1332 + } + st_case_1332: + switch data[p] { + case 142: + goto tr148 + case 158: + goto tr420 + } + switch { + case data[p] < 152: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + if 143 <= data[p] && data[p] <= 150 { + goto tr420 + } + default: + goto tr420 + } + case data[p] > 155: + switch { + case data[p] < 164: + if 156 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr148 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr421 + st1333: + if p++; p == pe { + goto _test_eof1333 + } + st_case_1333: + if data[p] == 188 { + goto tr421 + } + switch { + case data[p] < 170: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr421 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 147 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] >= 143: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 176: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 182: + switch { + case data[p] > 185: + if 190 <= data[p] { + goto tr421 + } + case data[p] >= 184: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1334: + if p++; p == pe { + goto _test_eof1334 + } + st_case_1334: + if data[p] == 157 { + goto tr420 + } + switch { + case data[p] < 146: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + if 142 <= data[p] && data[p] <= 144 { + goto tr420 + } + default: + goto tr420 + } + case data[p] > 152: + switch { + case data[p] < 159: + if 153 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 165: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr421 + st1335: + if p++; p == pe { + goto _test_eof1335 + } + st_case_1335: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr421 + } + case data[p] > 141: + if 143 <= data[p] && data[p] <= 145 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] { + goto tr421 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1336: + if p++; p == pe { + goto _test_eof1336 + } + st_case_1336: + switch data[p] { + case 134: + goto tr420 + case 138: + goto tr420 + case 144: + goto tr148 + case 185: + goto tr148 + } + switch { + case data[p] < 160: + if 142 <= data[p] && data[p] <= 159 { + goto tr420 + } + case data[p] > 161: + switch { + case data[p] > 165: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 164: + goto tr420 + } + default: + goto tr148 + } + goto tr421 + st1337: + if p++; p == pe { + goto _test_eof1337 + } + st_case_1337: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr421 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr421 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1338: + if p++; p == pe { + goto _test_eof1338 + } + st_case_1338: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] < 150: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr421 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr421 + } + default: + goto tr421 + } + case data[p] > 151: + switch { + case data[p] < 159: + if 156 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 161: + switch { + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 162: + goto tr421 + } + default: + goto tr148 + } + default: + goto tr421 + } + goto tr420 + st1339: + if p++; p == pe { + goto _test_eof1339 + } + st_case_1339: + switch data[p] { + case 130: + goto tr421 + case 131: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 158: + switch { + case data[p] < 142: + if 133 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 144: + switch { + case data[p] > 149: + if 153 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] < 168: + if 163 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 185: + if 190 <= data[p] && data[p] <= 191 { + goto tr421 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1340: + if p++; p == pe { + goto _test_eof1340 + } + st_case_1340: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr421 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr421 + } + case data[p] > 136: + switch { + case data[p] > 141: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 138: + goto tr421 + } + default: + goto tr421 + } + goto tr420 + st1341: + if p++; p == pe { + goto _test_eof1341 + } + st_case_1341: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 128: + goto tr421 + } + case data[p] > 144: + switch { + case data[p] < 170: + if 146 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] > 185: + if 190 <= data[p] { + goto tr421 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1342: + if p++; p == pe { + goto _test_eof1342 + } + st_case_1342: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 151: + goto tr420 + } + switch { + case data[p] < 155: + switch { + case data[p] > 148: + if 152 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 142: + goto tr420 + } + case data[p] > 159: + switch { + case data[p] < 164: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + if 176 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr421 + st1343: + if p++; p == pe { + goto _test_eof1343 + } + st_case_1343: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 146: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr421 + } + case data[p] > 140: + if 142 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 181: + if 170 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr421 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1344: + if p++; p == pe { + goto _test_eof1344 + } + st_case_1344: + if data[p] == 158 { + goto tr148 + } + switch { + case data[p] < 149: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr421 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr421 + } + default: + goto tr421 + } + case data[p] > 150: + switch { + case data[p] < 162: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 163: + switch { + case data[p] > 175: + if 177 <= data[p] && data[p] <= 178 { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr421 + } + default: + goto tr421 + } + goto tr420 + st1345: + if p++; p == pe { + goto _test_eof1345 + } + st_case_1345: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 129: + goto tr421 + } + case data[p] > 144: + switch { + case data[p] > 186: + if 190 <= data[p] { + goto tr421 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1346: + if p++; p == pe { + goto _test_eof1346 + } + st_case_1346: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 142: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 150 { + goto tr420 + } + case data[p] > 158: + if 159 <= data[p] && data[p] <= 161 { + goto tr148 + } + default: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] < 186: + if 176 <= data[p] && data[p] <= 185 { + goto tr420 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + default: + goto tr420 + } + goto tr421 + st1347: + if p++; p == pe { + goto _test_eof1347 + } + st_case_1347: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto tr421 + } + case data[p] > 150: + switch { + case data[p] > 177: + if 179 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1348: + if p++; p == pe { + goto _test_eof1348 + } + st_case_1348: + switch data[p] { + case 138: + goto tr421 + case 150: + goto tr421 + } + switch { + case data[p] < 152: + switch { + case data[p] > 134: + if 143 <= data[p] && data[p] <= 148 { + goto tr421 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 179 { + goto tr421 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr421 + } + goto tr420 + st1349: + if p++; p == pe { + goto _test_eof1349 + } + st_case_1349: + if data[p] == 177 { + goto tr421 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr421 + } + goto tr420 + st1350: + if p++; p == pe { + goto _test_eof1350 + } + st_case_1350: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 135: + goto tr421 + } + goto tr420 + st1351: + if p++; p == pe { + goto _test_eof1351 + } + st_case_1351: + if data[p] == 177 { + goto tr421 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr421 + } + case data[p] >= 180: + goto tr421 + } + goto tr420 + st1352: + if p++; p == pe { + goto _test_eof1352 + } + st_case_1352: + switch { + case data[p] > 141: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 136: + goto tr421 + } + goto tr420 + st1353: + if p++; p == pe { + goto _test_eof1353 + } + st_case_1353: + switch data[p] { + case 128: + goto tr148 + case 181: + goto tr421 + case 183: + goto tr421 + case 185: + goto tr421 + } + switch { + case data[p] < 160: + if 152 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] > 169: + if 190 <= data[p] && data[p] <= 191 { + goto tr421 + } + default: + goto tr421 + } + goto tr420 + st1354: + if p++; p == pe { + goto _test_eof1354 + } + st_case_1354: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 172: + if 177 <= data[p] && data[p] <= 191 { + goto tr421 + } + default: + goto tr148 + } + goto tr420 + st1355: + if p++; p == pe { + goto _test_eof1355 + } + st_case_1355: + switch { + case data[p] < 136: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 135 { + goto tr421 + } + case data[p] >= 128: + goto tr421 + } + case data[p] > 140: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr421 + } + case data[p] >= 141: + goto tr421 + } + default: + goto tr148 + } + goto tr420 + st1356: + if p++; p == pe { + goto _test_eof1356 + } + st_case_1356: + if data[p] == 134 { + goto tr421 + } + goto tr420 + st1357: + if p++; p == pe { + goto _test_eof1357 + } + st_case_1357: + switch data[p] { + case 128: + goto st1358 + case 129: + goto st1359 + case 130: + goto st1360 + case 131: + goto st202 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st1361 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st1362 + case 157: + goto st1363 + case 158: + goto st1364 + case 159: + goto st1365 + case 160: + goto st1366 + case 161: + goto st219 + case 162: + goto st1367 + case 163: + goto st221 + case 164: + goto st1368 + case 165: + goto st468 + case 167: + goto st469 + case 168: + goto st1369 + case 169: + goto st1370 + case 170: + goto st1371 + case 172: + goto st1372 + case 173: + goto st1373 + case 174: + goto st1374 + case 175: + goto st1375 + case 176: + goto st1376 + case 177: + goto st640 + case 179: + goto st1377 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st1378 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + } + switch { + case data[p] < 180: + if 132 <= data[p] && data[p] <= 152 { + goto st145 + } + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + default: + goto st147 + } + goto tr420 + st1358: + if p++; p == pe { + goto _test_eof1358 + } + st_case_1358: + if 171 <= data[p] && data[p] <= 190 { + goto tr421 + } + goto tr420 + st1359: + if p++; p == pe { + goto _test_eof1359 + } + st_case_1359: + switch { + case data[p] < 158: + switch { + case data[p] > 137: + if 150 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 128: + goto tr421 + } + case data[p] > 160: + switch { + case data[p] < 167: + if 162 <= data[p] && data[p] <= 164 { + goto tr421 + } + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr421 + } + default: + goto tr421 + } + default: + goto tr421 + } + goto tr420 + st1360: + if p++; p == pe { + goto _test_eof1360 + } + st_case_1360: + switch { + case data[p] < 143: + if 130 <= data[p] && data[p] <= 141 { + goto tr421 + } + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + goto tr420 + st1361: + if p++; p == pe { + goto _test_eof1361 + } + st_case_1361: + switch { + case data[p] < 157: + if 155 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + goto tr148 + st1362: + if p++; p == pe { + goto _test_eof1362 + } + st_case_1362: + switch { + case data[p] < 146: + switch { + case data[p] > 140: + if 142 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 177: + if 178 <= data[p] && data[p] <= 180 { + goto tr421 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr421 + } + goto tr420 + st1363: + if p++; p == pe { + goto _test_eof1363 + } + st_case_1363: + switch { + case data[p] < 160: + switch { + case data[p] > 145: + if 146 <= data[p] && data[p] <= 147 { + goto tr421 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 172: + switch { + case data[p] > 176: + if 178 <= data[p] && data[p] <= 179 { + goto tr421 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1364: + if p++; p == pe { + goto _test_eof1364 + } + st_case_1364: + if 180 <= data[p] { + goto tr421 + } + goto tr420 + st1365: + if p++; p == pe { + goto _test_eof1365 + } + st_case_1365: + switch { + case data[p] < 158: + if 148 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + if 170 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr421 + st1366: + if p++; p == pe { + goto _test_eof1366 + } + st_case_1366: + switch { + case data[p] < 144: + if 139 <= data[p] && data[p] <= 142 { + goto tr421 + } + case data[p] > 153: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + goto tr420 + st1367: + if p++; p == pe { + goto _test_eof1367 + } + st_case_1367: + if data[p] == 169 { + goto tr421 + } + switch { + case data[p] > 170: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st1368: + if p++; p == pe { + goto _test_eof1368 + } + st_case_1368: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr421 + } + default: + goto tr421 + } + goto tr420 + st1369: + if p++; p == pe { + goto _test_eof1369 + } + st_case_1369: + switch { + case data[p] > 150: + if 151 <= data[p] && data[p] <= 155 { + goto tr421 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st1370: + if p++; p == pe { + goto _test_eof1370 + } + st_case_1370: + if data[p] == 191 { + goto tr421 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr421 + } + case data[p] >= 149: + goto tr421 + } + goto tr420 + st1371: + if p++; p == pe { + goto _test_eof1371 + } + st_case_1371: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 153: + if 176 <= data[p] && data[p] <= 190 { + goto tr421 + } + default: + goto tr421 + } + goto tr420 + st1372: + if p++; p == pe { + goto _test_eof1372 + } + st_case_1372: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 132 { + goto tr421 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr421 + } + default: + goto tr148 + } + goto tr420 + st1373: + if p++; p == pe { + goto _test_eof1373 + } + st_case_1373: + switch { + case data[p] < 140: + if 133 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 143: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 154: + goto tr420 + } + default: + goto tr420 + } + goto tr421 + st1374: + if p++; p == pe { + goto _test_eof1374 + } + st_case_1374: + switch { + case data[p] < 161: + switch { + case data[p] > 130: + if 131 <= data[p] && data[p] <= 160 { + goto tr148 + } + case data[p] >= 128: + goto tr421 + } + case data[p] > 173: + switch { + case data[p] < 176: + if 174 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + default: + goto tr421 + } + goto tr420 + st1375: + if p++; p == pe { + goto _test_eof1375 + } + st_case_1375: + switch { + case data[p] > 179: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr421 + } + goto tr148 + st1376: + if p++; p == pe { + goto _test_eof1376 + } + st_case_1376: + switch { + case data[p] > 163: + if 164 <= data[p] && data[p] <= 183 { + goto tr421 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st1377: + if p++; p == pe { + goto _test_eof1377 + } + st_case_1377: + if data[p] == 173 { + goto tr421 + } + switch { + case data[p] < 169: + switch { + case data[p] > 146: + if 148 <= data[p] && data[p] <= 168 { + goto tr421 + } + case data[p] >= 144: + goto tr421 + } + case data[p] > 177: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 180 { + goto tr421 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 185 { + goto tr421 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1378: + if p++; p == pe { + goto _test_eof1378 + } + st_case_1378: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr421 + } + case data[p] >= 128: + goto tr421 + } + goto tr420 + st1379: + if p++; p == pe { + goto _test_eof1379 + } + st_case_1379: + switch data[p] { + case 128: + goto st1380 + case 129: + goto st1381 + case 130: + goto st241 + case 131: + goto st1382 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st1383 + case 180: + goto st251 + case 181: + goto st1384 + case 182: + goto st253 + case 183: + goto st1385 + case 184: + goto st255 + } + goto tr420 + st1380: + if p++; p == pe { + goto _test_eof1380 + } + st_case_1380: + if data[p] == 164 { + goto st413 + } + switch { + case data[p] < 152: + if 140 <= data[p] && data[p] <= 143 { + goto tr421 + } + case data[p] > 153: + switch { + case data[p] > 174: + if 191 <= data[p] { + goto tr571 + } + case data[p] >= 170: + goto tr421 + } + default: + goto st413 + } + goto tr420 + st1381: + if p++; p == pe { + goto _test_eof1381 + } + st_case_1381: + switch data[p] { + case 132: + goto st413 + case 165: + goto tr420 + case 177: + goto tr148 + case 191: + goto tr148 + } + switch { + case data[p] < 149: + if 129 <= data[p] && data[p] <= 147 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr420 + } + goto tr571 + st1382: + if p++; p == pe { + goto _test_eof1382 + } + st_case_1382: + if 144 <= data[p] && data[p] <= 176 { + goto tr421 + } + goto tr420 + st1383: + if p++; p == pe { + goto _test_eof1383 + } + st_case_1383: + switch { + case data[p] < 175: + if 165 <= data[p] && data[p] <= 170 { + goto tr420 + } + case data[p] > 177: + if 180 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + goto tr148 + st1384: + if p++; p == pe { + goto _test_eof1384 + } + st_case_1384: + if data[p] == 191 { + goto tr421 + } + switch { + case data[p] > 174: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 168: + goto tr420 + } + goto tr148 + st1385: + if p++; p == pe { + goto _test_eof1385 + } + st_case_1385: + switch { + case data[p] < 144: + switch { + case data[p] > 134: + if 136 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 191 { + goto tr421 + } + case data[p] >= 152: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1386: + if p++; p == pe { + goto _test_eof1386 + } + st_case_1386: + switch data[p] { + case 128: + goto st1387 + case 130: + goto st1388 + case 132: + goto st259 + case 133: + goto st145 + case 134: + goto st260 + } + goto tr420 + st1387: + if p++; p == pe { + goto _test_eof1387 + } + st_case_1387: + if data[p] == 133 { + goto tr148 + } + switch { + case data[p] > 175: + if 187 <= data[p] && data[p] <= 188 { + goto tr148 + } + case data[p] >= 170: + goto tr421 + } + goto tr420 + st1388: + if p++; p == pe { + goto _test_eof1388 + } + st_case_1388: + if 153 <= data[p] && data[p] <= 154 { + goto tr421 + } + goto tr420 + st1389: + if p++; p == pe { + goto _test_eof1389 + } + st_case_1389: + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st654 + case 153: + goto st1390 + case 154: + goto st1391 + case 155: + goto st1392 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st1393 + case 161: + goto st272 + case 162: + goto st1394 + case 163: + goto st1395 + case 164: + goto st1396 + case 165: + goto st1397 + case 166: + goto st1398 + case 167: + goto st1399 + case 168: + goto st1400 + case 169: + goto st1401 + case 170: + goto st1402 + case 171: + goto st1403 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st1404 + case 176: + goto st147 + } + if 129 <= data[p] { + goto st145 + } + goto tr420 + st1390: + if p++; p == pe { + goto _test_eof1390 + } + st_case_1390: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr421 + } + default: + goto tr421 + } + goto tr420 + st1391: + if p++; p == pe { + goto _test_eof1391 + } + st_case_1391: + switch { + case data[p] < 158: + if 128 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + goto tr420 + st1392: + if p++; p == pe { + goto _test_eof1392 + } + st_case_1392: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr421 + } + goto tr148 + st1393: + if p++; p == pe { + goto _test_eof1393 + } + st_case_1393: + switch data[p] { + case 130: + goto tr421 + case 134: + goto tr421 + case 139: + goto tr421 + } + switch { + case data[p] > 167: + if 168 <= data[p] { + goto tr420 + } + case data[p] >= 163: + goto tr421 + } + goto tr148 + st1394: + if p++; p == pe { + goto _test_eof1394 + } + st_case_1394: + switch { + case data[p] < 130: + if 128 <= data[p] && data[p] <= 129 { + goto tr421 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr421 + } + default: + goto tr148 + } + goto tr420 + st1395: + if p++; p == pe { + goto _test_eof1395 + } + st_case_1395: + switch data[p] { + case 187: + goto tr148 + case 189: + goto tr148 + } + switch { + case data[p] < 154: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 183: + if 184 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr420 + } + goto tr421 + st1396: + if p++; p == pe { + goto _test_eof1396 + } + st_case_1396: + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 165: + switch { + case data[p] > 173: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr148 + } + goto tr420 + st1397: + if p++; p == pe { + goto _test_eof1397 + } + st_case_1397: + switch { + case data[p] < 148: + if 135 <= data[p] && data[p] <= 147 { + goto tr421 + } + case data[p] > 159: + if 189 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st1398: + if p++; p == pe { + goto _test_eof1398 + } + st_case_1398: + switch { + case data[p] < 132: + if 128 <= data[p] && data[p] <= 131 { + goto tr421 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr421 + } + default: + goto tr148 + } + goto tr420 + st1399: + if p++; p == pe { + goto _test_eof1399 + } + st_case_1399: + if data[p] == 143 { + goto tr148 + } + switch { + case data[p] < 154: + if 129 <= data[p] && data[p] <= 142 { + goto tr420 + } + case data[p] > 164: + switch { + case data[p] > 175: + if 186 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr420 + } + default: + goto tr420 + } + goto tr421 + st1400: + if p++; p == pe { + goto _test_eof1400 + } + st_case_1400: + switch { + case data[p] > 168: + if 169 <= data[p] && data[p] <= 182 { + goto tr421 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st1401: + if p++; p == pe { + goto _test_eof1401 + } + st_case_1401: + if data[p] == 131 { + goto tr421 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 141: + switch { + case data[p] > 153: + if 187 <= data[p] && data[p] <= 189 { + goto tr421 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr421 + } + goto tr420 + st1402: + if p++; p == pe { + goto _test_eof1402 + } + st_case_1402: + if data[p] == 176 { + goto tr421 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr421 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr421 + } + default: + goto tr421 + } + goto tr420 + st1403: + if p++; p == pe { + goto _test_eof1403 + } + st_case_1403: + if data[p] == 129 { + goto tr421 + } + switch { + case data[p] < 171: + if 160 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 181 <= data[p] && data[p] <= 182 { + goto tr421 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr421 + } + goto tr420 + st1404: + if p++; p == pe { + goto _test_eof1404 + } + st_case_1404: + switch { + case data[p] < 163: + if 128 <= data[p] && data[p] <= 162 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 172: + goto tr421 + } + default: + goto tr421 + } + goto tr420 + st1405: + if p++; p == pe { + goto _test_eof1405 + } + st_case_1405: + switch data[p] { + case 172: + goto st1406 + case 173: + goto st672 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st1407 + case 185: + goto st1408 + case 187: + goto st1409 + case 188: + goto st1410 + case 189: + goto st303 + case 190: + goto st1411 + case 191: + goto st1412 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr420 + st1406: + if p++; p == pe { + goto _test_eof1406 + } + st_case_1406: + switch data[p] { + case 158: + goto tr421 + case 190: + goto tr572 + } + switch { + case data[p] < 157: + switch { + case data[p] > 134: + if 147 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 170: + goto tr572 + } + default: + goto tr572 + } + goto tr420 + st1407: + if p++; p == pe { + goto _test_eof1407 + } + st_case_1407: + switch data[p] { + case 144: + goto st413 + case 148: + goto st413 + } + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 143 { + goto tr421 + } + case data[p] > 175: + if 179 <= data[p] && data[p] <= 180 { + goto tr571 + } + default: + goto tr421 + } + goto tr420 + st1408: + if p++; p == pe { + goto _test_eof1408 + } + st_case_1408: + switch data[p] { + case 144: + goto st413 + case 146: + goto st413 + case 148: + goto st413 + } + switch { + case data[p] < 176: + if 141 <= data[p] && data[p] <= 143 { + goto tr571 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1409: + if p++; p == pe { + goto _test_eof1409 + } + st_case_1409: + if data[p] == 191 { + goto tr421 + } + if 189 <= data[p] { + goto tr420 + } + goto tr148 + st1410: + if p++; p == pe { + goto _test_eof1410 + } + st_case_1410: + switch data[p] { + case 135: + goto st413 + case 140: + goto st413 + case 142: + goto st413 + case 155: + goto st413 + case 191: + goto tr571 + } + if 161 <= data[p] && data[p] <= 186 { + goto tr148 + } + goto tr420 + st1411: + if p++; p == pe { + goto _test_eof1411 + } + st_case_1411: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 190 { + goto tr148 + } + case data[p] >= 158: + goto tr421 + } + goto tr420 + st1412: + if p++; p == pe { + goto _test_eof1412 + } + st_case_1412: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr148 + } + case data[p] >= 130: + goto tr148 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr421 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1413: + if p++; p == pe { + goto _test_eof1413 + } + st_case_1413: + switch data[p] { + case 144: + goto st1414 + case 145: + goto st1420 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st1439 + case 155: + goto st1444 + case 157: + goto st1446 + case 158: + goto st1453 + case 159: + goto st403 + } + goto tr420 + st1414: + if p++; p == pe { + goto _test_eof1414 + } + st_case_1414: + switch data[p] { + case 128: + goto st308 + case 129: + goto st309 + case 130: + goto st147 + case 131: + goto st310 + case 133: + goto st311 + case 135: + goto st1415 + case 138: + goto st313 + case 139: + goto st1416 + case 140: + goto st315 + case 141: + goto st1417 + case 142: + goto st317 + case 143: + goto st318 + case 144: + goto st147 + case 145: + goto st145 + case 146: + goto st684 + case 148: + goto st320 + case 149: + goto st321 + case 152: + goto st147 + case 156: + goto st322 + case 157: + goto st323 + case 160: + goto st324 + case 161: + goto st325 + case 162: + goto st326 + case 163: + goto st327 + case 164: + goto st328 + case 166: + goto st329 + case 168: + goto st1418 + case 169: + goto st331 + case 170: + goto st332 + case 171: + goto st1419 + case 172: + goto st334 + case 173: + goto st335 + case 174: + goto st336 + case 176: + goto st147 + case 177: + goto st245 + } + switch { + case data[p] > 155: + if 178 <= data[p] && data[p] <= 179 { + goto st337 + } + case data[p] >= 153: + goto st145 + } + goto tr420 + st1415: + if p++; p == pe { + goto _test_eof1415 + } + st_case_1415: + if data[p] == 189 { + goto tr421 + } + goto tr420 + st1416: + if p++; p == pe { + goto _test_eof1416 + } + st_case_1416: + if data[p] == 160 { + goto tr421 + } + if 145 <= data[p] { + goto tr420 + } + goto tr148 + st1417: + if p++; p == pe { + goto _test_eof1417 + } + st_case_1417: + switch { + case data[p] < 182: + if 139 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 186: + if 187 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + goto tr148 + st1418: + if p++; p == pe { + goto _test_eof1418 + } + st_case_1418: + switch data[p] { + case 128: + goto tr148 + case 191: + goto tr421 + } + switch { + case data[p] < 144: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr421 + } + case data[p] > 134: + if 140 <= data[p] && data[p] <= 143 { + goto tr421 + } + default: + goto tr421 + } + case data[p] > 147: + switch { + case data[p] < 153: + if 149 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] > 179: + if 184 <= data[p] && data[p] <= 186 { + goto tr421 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1419: + if p++; p == pe { + goto _test_eof1419 + } + st_case_1419: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 164: + if 165 <= data[p] && data[p] <= 166 { + goto tr421 + } + default: + goto tr148 + } + goto tr420 + st1420: + if p++; p == pe { + goto _test_eof1420 + } + st_case_1420: + switch data[p] { + case 128: + goto st1421 + case 129: + goto st1422 + case 130: + goto st1423 + case 131: + goto st691 + case 132: + goto st1424 + case 133: + goto st1425 + case 134: + goto st1426 + case 135: + goto st1427 + case 136: + goto st1428 + case 138: + goto st348 + case 139: + goto st1429 + case 140: + goto st1430 + case 141: + goto st1431 + case 146: + goto st1432 + case 147: + goto st1433 + case 150: + goto st1434 + case 151: + goto st1435 + case 152: + goto st1432 + case 153: + goto st1436 + case 154: + goto st1437 + case 155: + goto st538 + case 156: + goto st1438 + case 162: + goto st359 + case 163: + goto st707 + case 171: + goto st361 + } + goto tr420 + st1421: + if p++; p == pe { + goto _test_eof1421 + } + st_case_1421: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr421 + } + case data[p] > 183: + if 184 <= data[p] { + goto tr421 + } + default: + goto tr148 + } + goto tr420 + st1422: + if p++; p == pe { + goto _test_eof1422 + } + st_case_1422: + switch { + case data[p] > 165: + if 176 <= data[p] && data[p] <= 190 { + goto tr420 + } + case data[p] >= 135: + goto tr420 + } + goto tr421 + st1423: + if p++; p == pe { + goto _test_eof1423 + } + st_case_1423: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr421 + st1424: + if p++; p == pe { + goto _test_eof1424 + } + st_case_1424: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr421 + } + case data[p] > 166: + switch { + case data[p] > 180: + if 182 <= data[p] && data[p] <= 191 { + goto tr421 + } + case data[p] >= 167: + goto tr421 + } + default: + goto tr148 + } + goto tr420 + st1425: + if p++; p == pe { + goto _test_eof1425 + } + st_case_1425: + switch data[p] { + case 179: + goto tr421 + case 182: + goto tr148 + } + if 144 <= data[p] && data[p] <= 178 { + goto tr148 + } + goto tr420 + st1426: + if p++; p == pe { + goto _test_eof1426 + } + st_case_1426: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr421 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr421 + } + default: + goto tr148 + } + goto tr420 + st1427: + if p++; p == pe { + goto _test_eof1427 + } + st_case_1427: + if data[p] == 155 { + goto tr420 + } + switch { + case data[p] < 141: + switch { + case data[p] > 132: + if 133 <= data[p] && data[p] <= 137 { + goto tr420 + } + case data[p] >= 129: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] > 156: + if 157 <= data[p] { + goto tr420 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr420 + } + goto tr421 + st1428: + if p++; p == pe { + goto _test_eof1428 + } + st_case_1428: + switch { + case data[p] < 147: + if 128 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] > 171: + if 172 <= data[p] && data[p] <= 183 { + goto tr421 + } + default: + goto tr148 + } + goto tr420 + st1429: + if p++; p == pe { + goto _test_eof1429 + } + st_case_1429: + switch { + case data[p] < 171: + if 159 <= data[p] && data[p] <= 170 { + goto tr421 + } + case data[p] > 175: + switch { + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr421 + } + default: + goto tr420 + } + goto tr148 + st1430: + if p++; p == pe { + goto _test_eof1430 + } + st_case_1430: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 131 { + goto tr421 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr421 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1431: + if p++; p == pe { + goto _test_eof1431 + } + st_case_1431: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr421 + } + switch { + case data[p] < 157: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr421 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr421 + } + default: + goto tr421 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr421 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr421 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr420 + st1432: + if p++; p == pe { + goto _test_eof1432 + } + st_case_1432: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr421 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st1433: + if p++; p == pe { + goto _test_eof1433 + } + st_case_1433: + if data[p] == 134 { + goto tr420 + } + switch { + case data[p] < 136: + if 132 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 143: + if 154 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr421 + st1434: + if p++; p == pe { + goto _test_eof1434 + } + st_case_1434: + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 181: + if 184 <= data[p] { + goto tr421 + } + default: + goto tr421 + } + goto tr420 + st1435: + if p++; p == pe { + goto _test_eof1435 + } + st_case_1435: + switch { + case data[p] < 152: + if 129 <= data[p] && data[p] <= 151 { + goto tr420 + } + case data[p] > 155: + if 158 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + goto tr421 + st1436: + if p++; p == pe { + goto _test_eof1436 + } + st_case_1436: + if data[p] == 132 { + goto tr148 + } + switch { + case data[p] > 143: + if 154 <= data[p] { + goto tr420 + } + case data[p] >= 129: + goto tr420 + } + goto tr421 + st1437: + if p++; p == pe { + goto _test_eof1437 + } + st_case_1437: + switch { + case data[p] > 170: + if 171 <= data[p] && data[p] <= 183 { + goto tr421 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st1438: + if p++; p == pe { + goto _test_eof1438 + } + st_case_1438: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 157: + goto tr421 + } + goto tr420 + st1439: + if p++; p == pe { + goto _test_eof1439 + } + st_case_1439: + switch data[p] { + case 160: + goto st147 + case 168: + goto st370 + case 169: + goto st709 + case 171: + goto st1440 + case 172: + goto st1441 + case 173: + goto st712 + case 174: + goto st374 + case 188: + goto st147 + case 189: + goto st1442 + case 190: + goto st1443 + } + if 161 <= data[p] && data[p] <= 167 { + goto st145 + } + goto tr420 + st1440: + if p++; p == pe { + goto _test_eof1440 + } + st_case_1440: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 180 { + goto tr421 + } + case data[p] >= 144: + goto tr148 + } + goto tr420 + st1441: + if p++; p == pe { + goto _test_eof1441 + } + st_case_1441: + switch { + case data[p] > 175: + if 176 <= data[p] && data[p] <= 182 { + goto tr421 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st1442: + if p++; p == pe { + goto _test_eof1442 + } + st_case_1442: + switch { + case data[p] < 145: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + goto tr148 + st1443: + if p++; p == pe { + goto _test_eof1443 + } + st_case_1443: + switch { + case data[p] > 146: + if 147 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] >= 143: + goto tr421 + } + goto tr420 + st1444: + if p++; p == pe { + goto _test_eof1444 + } + st_case_1444: + switch data[p] { + case 176: + goto st147 + case 177: + goto st378 + case 178: + goto st1445 + } + goto tr420 + st1445: + if p++; p == pe { + goto _test_eof1445 + } + st_case_1445: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr421 + } + case data[p] >= 157: + goto tr421 + } + default: + goto tr148 + } + goto tr420 + st1446: + if p++; p == pe { + goto _test_eof1446 + } + st_case_1446: + switch data[p] { + case 133: + goto st1447 + case 134: + goto st1448 + case 137: + goto st1449 + case 144: + goto st147 + case 145: + goto st384 + case 146: + goto st385 + case 147: + goto st386 + case 148: + goto st387 + case 149: + goto st388 + case 154: + goto st389 + case 155: + goto st390 + case 156: + goto st391 + case 157: + goto st392 + case 158: + goto st393 + case 159: + goto st721 + case 168: + goto st1450 + case 169: + goto st1451 + case 170: + goto st1452 + } + if 150 <= data[p] && data[p] <= 153 { + goto st145 + } + goto tr420 + st1447: + if p++; p == pe { + goto _test_eof1447 + } + st_case_1447: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr421 + } + case data[p] >= 165: + goto tr421 + } + goto tr420 + st1448: + if p++; p == pe { + goto _test_eof1448 + } + st_case_1448: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr420 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr421 + st1449: + if p++; p == pe { + goto _test_eof1449 + } + st_case_1449: + if 130 <= data[p] && data[p] <= 132 { + goto tr421 + } + goto tr420 + st1450: + if p++; p == pe { + goto _test_eof1450 + } + st_case_1450: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr421 + } + case data[p] >= 128: + goto tr421 + } + goto tr420 + st1451: + if p++; p == pe { + goto _test_eof1451 + } + st_case_1451: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 173: + goto tr420 + } + goto tr421 + st1452: + if p++; p == pe { + goto _test_eof1452 + } + st_case_1452: + if data[p] == 132 { + goto tr421 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 155: + goto tr421 + } + goto tr420 + st1453: + if p++; p == pe { + goto _test_eof1453 + } + st_case_1453: + switch data[p] { + case 160: + goto st147 + case 163: + goto st1454 + case 184: + goto st400 + case 185: + goto st401 + case 186: + goto st402 + } + if 161 <= data[p] && data[p] <= 162 { + goto st145 + } + goto tr420 + st1454: + if p++; p == pe { + goto _test_eof1454 + } + st_case_1454: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 150: + if 151 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + goto tr148 + st1455: + if p++; p == pe { + goto _test_eof1455 + } + st_case_1455: + if data[p] == 160 { + goto st1456 + } + goto tr420 + st1456: + if p++; p == pe { + goto _test_eof1456 + } + st_case_1456: + switch data[p] { + case 128: + goto st1457 + case 129: + goto st1458 + case 132: + goto st1313 + case 135: + goto st1460 + } + if 133 <= data[p] && data[p] <= 134 { + goto st1459 + } + goto tr420 + st1457: + if p++; p == pe { + goto _test_eof1457 + } + st_case_1457: + if data[p] == 129 { + goto tr421 + } + if 160 <= data[p] { + goto tr421 + } + goto tr420 + st1458: + if p++; p == pe { + goto _test_eof1458 + } + st_case_1458: + if 192 <= data[p] { + goto tr420 + } + goto tr421 + st1459: + if p++; p == pe { + goto _test_eof1459 + } + st_case_1459: + goto tr421 + st1460: + if p++; p == pe { + goto _test_eof1460 + } + st_case_1460: + if 176 <= data[p] { + goto tr420 + } + goto tr421 + st1461: + if p++; p == pe { + goto _test_eof1461 + } + st_case_1461: + switch data[p] { + case 170: + goto tr148 + case 173: + goto tr148 + case 181: + goto tr148 + case 183: + goto st142 + case 186: + goto tr148 + } + goto tr420 + st1462: + if p++; p == pe { + goto _test_eof1462 + } + st_case_1462: + switch data[p] { + case 181: + goto tr420 + case 190: + goto tr420 + } + switch { + case data[p] > 185: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 184: + goto tr420 + } + goto tr148 + st1463: + if p++; p == pe { + goto _test_eof1463 + } + st_case_1463: + if data[p] == 130 { + goto tr420 + } + goto tr148 + st1464: + if p++; p == pe { + goto _test_eof1464 + } + st_case_1464: + if data[p] == 190 { + goto tr420 + } + switch { + case data[p] > 144: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 136: + goto tr420 + } + goto tr148 + st1465: + if p++; p == pe { + goto _test_eof1465 + } + st_case_1465: + switch data[p] { + case 135: + goto tr148 + case 179: + goto tr148 + case 180: + goto st142 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr148 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + default: + goto tr148 + } + goto tr420 + st1466: + if p++; p == pe { + goto _test_eof1466 + } + st_case_1466: + if data[p] == 156 { + goto tr148 + } + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 133 { + goto tr148 + } + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st1467: + if p++; p == pe { + goto _test_eof1467 + } + st_case_1467: + if data[p] == 171 { + goto tr421 + } + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + goto tr420 + st1468: + if p++; p == pe { + goto _test_eof1468 + } + st_case_1468: + switch data[p] { + case 148: + goto tr420 + case 158: + goto tr420 + case 169: + goto tr420 + } + switch { + case data[p] < 189: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] > 190: + if 192 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st1469: + if p++; p == pe { + goto _test_eof1469 + } + st_case_1469: + if 143 <= data[p] { + goto tr148 + } + goto tr420 + st1470: + if p++; p == pe { + goto _test_eof1470 + } + st_case_1470: + if 139 <= data[p] && data[p] <= 140 { + goto tr420 + } + goto tr148 + st1471: + if p++; p == pe { + goto _test_eof1471 + } + st_case_1471: + if data[p] == 186 { + goto tr148 + } + switch { + case data[p] > 137: + if 138 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 128: + goto tr421 + } + goto tr420 + st1472: + if p++; p == pe { + goto _test_eof1472 + } + st_case_1472: + switch data[p] { + case 160: + goto st1473 + case 161: + goto st1474 + case 162: + goto st168 + case 163: + goto st1475 + case 164: + goto st145 + case 165: + goto st1476 + case 166: + goto st1477 + case 167: + goto st1478 + case 168: + goto st1479 + case 169: + goto st1480 + case 170: + goto st1481 + case 171: + goto st1482 + case 172: + goto st1483 + case 173: + goto st1484 + case 174: + goto st1485 + case 175: + goto st1486 + case 176: + goto st1487 + case 177: + goto st1488 + case 178: + goto st1489 + case 179: + goto st1490 + case 180: + goto st1491 + case 181: + goto st1492 + case 182: + goto st1493 + case 183: + goto st1494 + case 184: + goto st1495 + case 185: + goto st1496 + case 186: + goto st1497 + case 187: + goto st1498 + case 188: + goto st1499 + case 189: + goto st1500 + case 190: + goto st1501 + case 191: + goto st1502 + } + goto tr420 + st1473: + if p++; p == pe { + goto _test_eof1473 + } + st_case_1473: + if 128 <= data[p] && data[p] <= 173 { + goto tr148 + } + goto tr2 + st1474: + if p++; p == pe { + goto _test_eof1474 + } + st_case_1474: + if 128 <= data[p] && data[p] <= 155 { + goto tr148 + } + goto tr2 + st1475: + if p++; p == pe { + goto _test_eof1475 + } + st_case_1475: + if 163 <= data[p] { + goto tr148 + } + goto tr2 + st1476: + if p++; p == pe { + goto _test_eof1476 + } + st_case_1476: + if data[p] == 176 { + goto tr2 + } + switch { + case data[p] > 165: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 164: + goto tr2 + } + goto tr148 + st1477: + if p++; p == pe { + goto _test_eof1477 + } + st_case_1477: + switch data[p] { + case 132: + goto tr2 + case 169: + goto tr2 + case 177: + goto tr2 + } + switch { + case data[p] < 145: + if 141 <= data[p] && data[p] <= 142 { + goto tr2 + } + case data[p] > 146: + switch { + case data[p] > 181: + if 186 <= data[p] && data[p] <= 187 { + goto tr2 + } + case data[p] >= 179: + goto tr2 + } + default: + goto tr2 + } + goto tr148 + st1478: + if p++; p == pe { + goto _test_eof1478 + } + st_case_1478: + if data[p] == 158 { + goto tr2 + } + switch { + case data[p] < 152: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr2 + } + case data[p] > 138: + if 143 <= data[p] && data[p] <= 150 { + goto tr2 + } + default: + goto tr2 + } + case data[p] > 155: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr2 + } + case data[p] > 175: + if 178 <= data[p] { + goto tr2 + } + default: + goto tr421 + } + default: + goto tr2 + } + goto tr148 + st1479: + if p++; p == pe { + goto _test_eof1479 + } + st_case_1479: + if data[p] == 188 { + goto tr148 + } + switch { + case data[p] < 170: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr148 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 147 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] >= 143: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 176: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 182: + switch { + case data[p] > 185: + if 190 <= data[p] { + goto tr148 + } + case data[p] >= 184: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1480: + if p++; p == pe { + goto _test_eof1480 + } + st_case_1480: + if data[p] == 157 { + goto tr2 + } + switch { + case data[p] < 146: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr2 + } + case data[p] > 138: + if 142 <= data[p] && data[p] <= 144 { + goto tr2 + } + default: + goto tr2 + } + case data[p] > 152: + switch { + case data[p] < 166: + if 159 <= data[p] && data[p] <= 165 { + goto tr2 + } + case data[p] > 175: + if 182 <= data[p] { + goto tr2 + } + default: + goto tr421 + } + default: + goto tr2 + } + goto tr148 + st1481: + if p++; p == pe { + goto _test_eof1481 + } + st_case_1481: + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr148 + } + case data[p] > 141: + if 143 <= data[p] && data[p] <= 145 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] { + goto tr148 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1482: + if p++; p == pe { + goto _test_eof1482 + } + st_case_1482: + switch data[p] { + case 134: + goto tr2 + case 138: + goto tr2 + } + switch { + case data[p] < 164: + switch { + case data[p] > 143: + if 145 <= data[p] && data[p] <= 159 { + goto tr2 + } + case data[p] >= 142: + goto tr2 + } + case data[p] > 165: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] > 184: + if 186 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + default: + goto tr2 + } + goto tr148 + st1483: + if p++; p == pe { + goto _test_eof1483 + } + st_case_1483: + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr148 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr148 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1484: + if p++; p == pe { + goto _test_eof1484 + } + st_case_1484: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] < 150: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr148 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 151: + switch { + case data[p] < 159: + if 156 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1485: + if p++; p == pe { + goto _test_eof1485 + } + st_case_1485: + if data[p] == 156 { + goto tr148 + } + switch { + case data[p] < 153: + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto tr148 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 146 <= data[p] && data[p] <= 149 { + goto tr148 + } + case data[p] >= 142: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 154: + switch { + case data[p] < 168: + switch { + case data[p] > 159: + if 163 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] >= 158: + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 185: + if 190 <= data[p] && data[p] <= 191 { + goto tr148 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1486: + if p++; p == pe { + goto _test_eof1486 + } + st_case_1486: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr148 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr148 + } + case data[p] > 136: + switch { + case data[p] > 141: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 138: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1487: + if p++; p == pe { + goto _test_eof1487 + } + st_case_1487: + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 144: + switch { + case data[p] < 170: + if 146 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] > 185: + if 189 <= data[p] { + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1488: + if p++; p == pe { + goto _test_eof1488 + } + st_case_1488: + switch data[p] { + case 133: + goto tr2 + case 137: + goto tr2 + case 151: + goto tr2 + } + switch { + case data[p] < 164: + switch { + case data[p] > 148: + if 155 <= data[p] && data[p] <= 159 { + goto tr2 + } + case data[p] >= 142: + goto tr2 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr2 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr2 + } + goto tr148 + st1489: + if p++; p == pe { + goto _test_eof1489 + } + st_case_1489: + switch { + case data[p] < 146: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr148 + } + case data[p] > 140: + if 142 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 181: + if 170 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1490: + if p++; p == pe { + goto _test_eof1490 + } + st_case_1490: + if data[p] == 158 { + goto tr148 + } + switch { + case data[p] < 149: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr148 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] < 166: + if 160 <= data[p] && data[p] <= 163 { + goto tr148 + } + case data[p] > 175: + if 177 <= data[p] && data[p] <= 178 { + goto tr148 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr2 + st1491: + if p++; p == pe { + goto _test_eof1491 + } + st_case_1491: + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 129: + goto tr148 + } + case data[p] > 144: + switch { + case data[p] > 186: + if 189 <= data[p] { + goto tr148 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1492: + if p++; p == pe { + goto _test_eof1492 + } + st_case_1492: + switch data[p] { + case 133: + goto tr2 + case 137: + goto tr2 + } + switch { + case data[p] < 164: + switch { + case data[p] > 150: + if 152 <= data[p] && data[p] <= 158 { + goto tr2 + } + case data[p] >= 143: + goto tr2 + } + case data[p] > 165: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] > 185: + if 192 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + default: + goto tr2 + } + goto tr148 + st1493: + if p++; p == pe { + goto _test_eof1493 + } + st_case_1493: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 177: + if 179 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1494: + if p++; p == pe { + goto _test_eof1494 + } + st_case_1494: + switch data[p] { + case 138: + goto tr148 + case 150: + goto tr148 + } + switch { + case data[p] < 152: + switch { + case data[p] > 134: + if 143 <= data[p] && data[p] <= 148 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr148 + } + goto tr2 + st1495: + if p++; p == pe { + goto _test_eof1495 + } + st_case_1495: + if data[p] == 177 { + goto tr148 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr148 + } + goto tr2 + st1496: + if p++; p == pe { + goto _test_eof1496 + } + st_case_1496: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 135: + goto tr148 + } + goto tr2 + st1497: + if p++; p == pe { + goto _test_eof1497 + } + st_case_1497: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr148 + } + case data[p] >= 180: + goto tr148 + } + goto tr2 + st1498: + if p++; p == pe { + goto _test_eof1498 + } + st_case_1498: + switch { + case data[p] > 141: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 136: + goto tr148 + } + goto tr2 + st1499: + if p++; p == pe { + goto _test_eof1499 + } + st_case_1499: + switch data[p] { + case 128: + goto tr148 + case 181: + goto tr148 + case 183: + goto tr148 + case 185: + goto tr148 + } + switch { + case data[p] < 160: + if 152 <= data[p] && data[p] <= 153 { + goto tr148 + } + case data[p] > 169: + if 190 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr421 + } + goto tr2 + st1500: + if p++; p == pe { + goto _test_eof1500 + } + st_case_1500: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 172: + if 177 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1501: + if p++; p == pe { + goto _test_eof1501 + } + st_case_1501: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr148 + } + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1502: + if p++; p == pe { + goto _test_eof1502 + } + st_case_1502: + if data[p] == 134 { + goto tr148 + } + goto tr2 + st1503: + if p++; p == pe { + goto _test_eof1503 + } + st_case_1503: + switch data[p] { + case 128: + goto st1504 + case 129: + goto st1505 + case 130: + goto st1506 + case 131: + goto st202 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st1507 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st1508 + case 157: + goto st1509 + case 158: + goto st1510 + case 159: + goto st1511 + case 160: + goto st1512 + case 161: + goto st219 + case 162: + goto st1513 + case 163: + goto st221 + case 164: + goto st1514 + case 165: + goto st468 + case 167: + goto st469 + case 168: + goto st1474 + case 169: + goto st1515 + case 170: + goto st1516 + case 172: + goto st147 + case 173: + goto st1517 + case 174: + goto st1518 + case 175: + goto st1519 + case 176: + goto st1520 + case 177: + goto st640 + case 179: + goto st1521 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st1522 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + } + switch { + case data[p] < 180: + if 132 <= data[p] && data[p] <= 152 { + goto st145 + } + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + default: + goto st147 + } + goto tr420 + st1504: + if p++; p == pe { + goto _test_eof1504 + } + st_case_1504: + if 171 <= data[p] && data[p] <= 190 { + goto tr148 + } + goto tr2 + st1505: + if p++; p == pe { + goto _test_eof1505 + } + st_case_1505: + switch { + case data[p] < 158: + switch { + case data[p] > 137: + if 150 <= data[p] && data[p] <= 153 { + goto tr148 + } + case data[p] >= 128: + goto tr421 + } + case data[p] > 160: + switch { + case data[p] < 167: + if 162 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1506: + if p++; p == pe { + goto _test_eof1506 + } + st_case_1506: + if data[p] == 143 { + goto tr148 + } + switch { + case data[p] < 144: + if 130 <= data[p] && data[p] <= 141 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr421 + } + goto tr2 + st1507: + if p++; p == pe { + goto _test_eof1507 + } + st_case_1507: + switch { + case data[p] > 156: + if 160 <= data[p] { + goto tr2 + } + case data[p] >= 155: + goto tr2 + } + goto tr148 + st1508: + if p++; p == pe { + goto _test_eof1508 + } + st_case_1508: + switch { + case data[p] < 142: + if 128 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] > 148: + if 160 <= data[p] && data[p] <= 180 { + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1509: + if p++; p == pe { + goto _test_eof1509 + } + st_case_1509: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 147 { + goto tr148 + } + case data[p] > 172: + switch { + case data[p] > 176: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1510: + if p++; p == pe { + goto _test_eof1510 + } + st_case_1510: + if 180 <= data[p] { + goto tr148 + } + goto tr2 + st1511: + if p++; p == pe { + goto _test_eof1511 + } + st_case_1511: + switch { + case data[p] < 158: + if 148 <= data[p] && data[p] <= 156 { + goto tr2 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 170 <= data[p] { + goto tr2 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr2 + } + goto tr148 + st1512: + if p++; p == pe { + goto _test_eof1512 + } + st_case_1512: + switch { + case data[p] < 144: + if 139 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] > 153: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + goto tr2 + st1513: + if p++; p == pe { + goto _test_eof1513 + } + st_case_1513: + switch { + case data[p] > 170: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st1514: + if p++; p == pe { + goto _test_eof1514 + } + st_case_1514: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1515: + if p++; p == pe { + goto _test_eof1515 + } + st_case_1515: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr148 + } + case data[p] >= 149: + goto tr148 + } + goto tr2 + st1516: + if p++; p == pe { + goto _test_eof1516 + } + st_case_1516: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 153: + if 176 <= data[p] && data[p] <= 190 { + goto tr148 + } + default: + goto tr421 + } + goto tr2 + st1517: + if p++; p == pe { + goto _test_eof1517 + } + st_case_1517: + switch { + case data[p] < 144: + if 140 <= data[p] && data[p] <= 143 { + goto tr2 + } + case data[p] > 153: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr2 + } + case data[p] >= 154: + goto tr2 + } + default: + goto tr421 + } + goto tr148 + st1518: + if p++; p == pe { + goto _test_eof1518 + } + st_case_1518: + switch { + case data[p] < 176: + if 128 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + goto tr2 + st1519: + if p++; p == pe { + goto _test_eof1519 + } + st_case_1519: + if 180 <= data[p] { + goto tr2 + } + goto tr148 + st1520: + if p++; p == pe { + goto _test_eof1520 + } + st_case_1520: + if 128 <= data[p] && data[p] <= 183 { + goto tr148 + } + goto tr2 + st1521: + if p++; p == pe { + goto _test_eof1521 + } + st_case_1521: + switch { + case data[p] < 148: + if 144 <= data[p] && data[p] <= 146 { + goto tr148 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 185 { + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1522: + if p++; p == pe { + goto _test_eof1522 + } + st_case_1522: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st1523: + if p++; p == pe { + goto _test_eof1523 + } + st_case_1523: + switch data[p] { + case 128: + goto st1524 + case 129: + goto st1525 + case 130: + goto st241 + case 131: + goto st1526 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st1527 + case 180: + goto st251 + case 181: + goto st1528 + case 182: + goto st253 + case 183: + goto st1529 + case 184: + goto st255 + } + goto tr420 + st1524: + if p++; p == pe { + goto _test_eof1524 + } + st_case_1524: + switch data[p] { + case 164: + goto st142 + case 167: + goto st142 + } + switch { + case data[p] < 152: + if 140 <= data[p] && data[p] <= 143 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 174: + if 191 <= data[p] { + goto tr571 + } + case data[p] >= 170: + goto tr148 + } + default: + goto st142 + } + goto tr2 + st1525: + if p++; p == pe { + goto _test_eof1525 + } + st_case_1525: + switch data[p] { + case 165: + goto tr2 + case 176: + goto tr2 + case 191: + goto tr148 + } + switch { + case data[p] < 149: + if 129 <= data[p] && data[p] <= 147 { + goto tr2 + } + case data[p] > 159: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr2 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr2 + } + goto tr571 + st1526: + if p++; p == pe { + goto _test_eof1526 + } + st_case_1526: + if 144 <= data[p] && data[p] <= 176 { + goto tr148 + } + goto tr2 + st1527: + if p++; p == pe { + goto _test_eof1527 + } + st_case_1527: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr2 + } + case data[p] >= 165: + goto tr2 + } + goto tr148 + st1528: + if p++; p == pe { + goto _test_eof1528 + } + st_case_1528: + switch { + case data[p] < 176: + if 168 <= data[p] && data[p] <= 174 { + goto tr2 + } + case data[p] > 190: + if 192 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr148 + st1529: + if p++; p == pe { + goto _test_eof1529 + } + st_case_1529: + switch { + case data[p] < 144: + switch { + case data[p] > 134: + if 136 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + case data[p] >= 152: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1530: + if p++; p == pe { + goto _test_eof1530 + } + st_case_1530: + switch data[p] { + case 128: + goto st1531 + case 130: + goto st1532 + case 132: + goto st259 + case 133: + goto st145 + case 134: + goto st260 + } + goto tr420 + st1531: + if p++; p == pe { + goto _test_eof1531 + } + st_case_1531: + if data[p] == 133 { + goto tr148 + } + switch { + case data[p] > 175: + if 187 <= data[p] && data[p] <= 188 { + goto tr148 + } + case data[p] >= 170: + goto tr148 + } + goto tr2 + st1532: + if p++; p == pe { + goto _test_eof1532 + } + st_case_1532: + if 153 <= data[p] && data[p] <= 154 { + goto tr148 + } + goto tr2 + st1533: + if p++; p == pe { + goto _test_eof1533 + } + st_case_1533: + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st654 + case 153: + goto st1534 + case 154: + goto st147 + case 155: + goto st293 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st1535 + case 161: + goto st272 + case 162: + goto st147 + case 163: + goto st1536 + case 164: + goto st1537 + case 165: + goto st1538 + case 166: + goto st147 + case 167: + goto st1539 + case 168: + goto st1540 + case 169: + goto st1541 + case 170: + goto st1542 + case 171: + goto st1543 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st1544 + case 176: + goto st147 + } + if 129 <= data[p] { + goto st145 + } + goto tr420 + st1534: + if p++; p == pe { + goto _test_eof1534 + } + st_case_1534: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st1535: + if p++; p == pe { + goto _test_eof1535 + } + st_case_1535: + if 168 <= data[p] { + goto tr2 + } + goto tr148 + st1536: + if p++; p == pe { + goto _test_eof1536 + } + st_case_1536: + if data[p] == 188 { + goto tr2 + } + switch { + case data[p] < 154: + switch { + case data[p] > 143: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 133: + goto tr2 + } + case data[p] > 159: + switch { + case data[p] > 186: + if 190 <= data[p] { + goto tr2 + } + case data[p] >= 184: + goto tr2 + } + default: + goto tr2 + } + goto tr148 + st1537: + if p++; p == pe { + goto _test_eof1537 + } + st_case_1537: + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 173: + if 176 <= data[p] { + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1538: + if p++; p == pe { + goto _test_eof1538 + } + st_case_1538: + switch { + case data[p] > 159: + if 189 <= data[p] { + goto tr2 + } + case data[p] >= 148: + goto tr2 + } + goto tr148 + st1539: + if p++; p == pe { + goto _test_eof1539 + } + st_case_1539: + switch { + case data[p] < 154: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 129: + goto tr2 + } + case data[p] > 164: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr2 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr2 + } + default: + goto tr421 + } + default: + goto tr2 + } + goto tr148 + st1540: + if p++; p == pe { + goto _test_eof1540 + } + st_case_1540: + if 128 <= data[p] && data[p] <= 182 { + goto tr148 + } + goto tr2 + st1541: + if p++; p == pe { + goto _test_eof1541 + } + st_case_1541: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 141 { + goto tr148 + } + case data[p] > 153: + if 187 <= data[p] && data[p] <= 189 { + goto tr148 + } + default: + goto tr421 + } + goto tr2 + st1542: + if p++; p == pe { + goto _test_eof1542 + } + st_case_1542: + if data[p] == 176 { + goto tr148 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr148 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1543: + if p++; p == pe { + goto _test_eof1543 + } + st_case_1543: + if data[p] == 129 { + goto tr148 + } + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 182 { + goto tr148 + } + case data[p] >= 160: + goto tr148 + } + goto tr2 + st1544: + if p++; p == pe { + goto _test_eof1544 + } + st_case_1544: + switch { + case data[p] < 172: + if 128 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + default: + goto tr148 + } + goto tr2 + st1545: + if p++; p == pe { + goto _test_eof1545 + } + st_case_1545: + switch data[p] { + case 172: + goto st1546 + case 173: + goto st672 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st1547 + case 185: + goto st967 + case 187: + goto st1548 + case 188: + goto st969 + case 189: + goto st303 + case 190: + goto st1549 + case 191: + goto st1550 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr420 + st1546: + if p++; p == pe { + goto _test_eof1546 + } + st_case_1546: + switch data[p] { + case 158: + goto tr148 + case 190: + goto tr572 + } + switch { + case data[p] < 157: + switch { + case data[p] > 134: + if 147 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 170: + goto tr572 + } + default: + goto tr572 + } + goto tr2 + st1547: + if p++; p == pe { + goto _test_eof1547 + } + st_case_1547: + if data[p] == 147 { + goto st142 + } + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 143 { + goto tr148 + } + case data[p] > 175: + if 179 <= data[p] && data[p] <= 180 { + goto tr571 + } + default: + goto tr148 + } + goto tr2 + st1548: + if p++; p == pe { + goto _test_eof1548 + } + st_case_1548: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr2 + } + case data[p] >= 189: + goto tr2 + } + goto tr148 + st1549: + if p++; p == pe { + goto _test_eof1549 + } + st_case_1549: + if 158 <= data[p] && data[p] <= 190 { + goto tr148 + } + goto tr2 + st1550: + if p++; p == pe { + goto _test_eof1550 + } + st_case_1550: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr148 + } + case data[p] >= 130: + goto tr148 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1551: + if p++; p == pe { + goto _test_eof1551 + } + st_case_1551: + switch data[p] { + case 144: + goto st1552 + case 145: + goto st1558 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st1573 + case 155: + goto st1577 + case 157: + goto st1579 + case 158: + goto st1586 + case 159: + goto st403 + } + goto tr420 + st1552: + if p++; p == pe { + goto _test_eof1552 + } + st_case_1552: + switch data[p] { + case 128: + goto st308 + case 129: + goto st309 + case 130: + goto st147 + case 131: + goto st310 + case 133: + goto st311 + case 135: + goto st1553 + case 138: + goto st313 + case 139: + goto st1554 + case 140: + goto st315 + case 141: + goto st1555 + case 142: + goto st317 + case 143: + goto st318 + case 144: + goto st147 + case 145: + goto st145 + case 146: + goto st684 + case 148: + goto st320 + case 149: + goto st321 + case 152: + goto st147 + case 156: + goto st322 + case 157: + goto st323 + case 160: + goto st324 + case 161: + goto st325 + case 162: + goto st326 + case 163: + goto st327 + case 164: + goto st328 + case 166: + goto st329 + case 168: + goto st1556 + case 169: + goto st331 + case 170: + goto st332 + case 171: + goto st1557 + case 172: + goto st334 + case 173: + goto st335 + case 174: + goto st336 + case 176: + goto st147 + case 177: + goto st245 + } + switch { + case data[p] > 155: + if 178 <= data[p] && data[p] <= 179 { + goto st337 + } + case data[p] >= 153: + goto st145 + } + goto tr2 + st1553: + if p++; p == pe { + goto _test_eof1553 + } + st_case_1553: + if data[p] == 189 { + goto tr148 + } + goto tr2 + st1554: + if p++; p == pe { + goto _test_eof1554 + } + st_case_1554: + switch { + case data[p] > 159: + if 161 <= data[p] { + goto tr2 + } + case data[p] >= 145: + goto tr2 + } + goto tr148 + st1555: + if p++; p == pe { + goto _test_eof1555 + } + st_case_1555: + switch { + case data[p] > 143: + if 187 <= data[p] { + goto tr2 + } + case data[p] >= 139: + goto tr2 + } + goto tr148 + st1556: + if p++; p == pe { + goto _test_eof1556 + } + st_case_1556: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] < 140: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 134 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 147: + switch { + case data[p] < 153: + if 149 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] > 179: + if 184 <= data[p] && data[p] <= 186 { + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1557: + if p++; p == pe { + goto _test_eof1557 + } + st_case_1557: + switch { + case data[p] > 135: + if 137 <= data[p] && data[p] <= 166 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st1558: + if p++; p == pe { + goto _test_eof1558 + } + st_case_1558: + switch data[p] { + case 129: + goto st1559 + case 130: + goto st1560 + case 131: + goto st691 + case 132: + goto st1561 + case 133: + goto st1562 + case 135: + goto st1563 + case 136: + goto st1564 + case 138: + goto st348 + case 139: + goto st1565 + case 140: + goto st1566 + case 141: + goto st1567 + case 146: + goto st147 + case 147: + goto st1568 + case 150: + goto st1569 + case 151: + goto st1570 + case 152: + goto st147 + case 153: + goto st1571 + case 154: + goto st1520 + case 155: + goto st538 + case 156: + goto st1572 + case 162: + goto st359 + case 163: + goto st707 + case 171: + goto st361 + } + if 128 <= data[p] && data[p] <= 134 { + goto st147 + } + goto tr2 + st1559: + if p++; p == pe { + goto _test_eof1559 + } + st_case_1559: + switch { + case data[p] < 166: + if 135 <= data[p] && data[p] <= 165 { + goto tr2 + } + case data[p] > 175: + if 176 <= data[p] && data[p] <= 190 { + goto tr2 + } + default: + goto tr421 + } + goto tr148 + st1560: + if p++; p == pe { + goto _test_eof1560 + } + st_case_1560: + switch { + case data[p] > 188: + if 190 <= data[p] { + goto tr2 + } + case data[p] >= 187: + goto tr2 + } + goto tr148 + st1561: + if p++; p == pe { + goto _test_eof1561 + } + st_case_1561: + switch { + case data[p] > 180: + if 182 <= data[p] && data[p] <= 191 { + goto tr421 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st1562: + if p++; p == pe { + goto _test_eof1562 + } + st_case_1562: + if data[p] == 182 { + goto tr148 + } + if 144 <= data[p] && data[p] <= 179 { + goto tr148 + } + goto tr2 + st1563: + if p++; p == pe { + goto _test_eof1563 + } + st_case_1563: + if data[p] == 155 { + goto tr2 + } + switch { + case data[p] < 141: + if 133 <= data[p] && data[p] <= 137 { + goto tr2 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 157 <= data[p] { + goto tr2 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr2 + } + goto tr148 + st1564: + if p++; p == pe { + goto _test_eof1564 + } + st_case_1564: + switch { + case data[p] > 145: + if 147 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st1565: + if p++; p == pe { + goto _test_eof1565 + } + st_case_1565: + switch { + case data[p] < 176: + if 171 <= data[p] && data[p] <= 175 { + goto tr2 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr2 + } + default: + goto tr421 + } + goto tr148 + st1566: + if p++; p == pe { + goto _test_eof1566 + } + st_case_1566: + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 131 { + goto tr148 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr148 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1567: + if p++; p == pe { + goto _test_eof1567 + } + st_case_1567: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr148 + } + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 141: + switch { + case data[p] < 166: + if 157 <= data[p] && data[p] <= 163 { + goto tr148 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1568: + if p++; p == pe { + goto _test_eof1568 + } + st_case_1568: + if data[p] == 134 { + goto tr2 + } + switch { + case data[p] < 144: + if 136 <= data[p] && data[p] <= 143 { + goto tr2 + } + case data[p] > 153: + if 154 <= data[p] { + goto tr2 + } + default: + goto tr421 + } + goto tr148 + st1569: + if p++; p == pe { + goto _test_eof1569 + } + st_case_1569: + switch { + case data[p] > 181: + if 184 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st1570: + if p++; p == pe { + goto _test_eof1570 + } + st_case_1570: + switch { + case data[p] > 151: + if 158 <= data[p] { + goto tr2 + } + case data[p] >= 129: + goto tr2 + } + goto tr148 + st1571: + if p++; p == pe { + goto _test_eof1571 + } + st_case_1571: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 154 <= data[p] { + goto tr2 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr2 + } + goto tr148 + st1572: + if p++; p == pe { + goto _test_eof1572 + } + st_case_1572: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 157: + goto tr148 + } + goto tr2 + st1573: + if p++; p == pe { + goto _test_eof1573 + } + st_case_1573: + switch data[p] { + case 160: + goto st147 + case 168: + goto st370 + case 169: + goto st709 + case 171: + goto st1574 + case 172: + goto st1540 + case 173: + goto st712 + case 174: + goto st374 + case 188: + goto st147 + case 189: + goto st1575 + case 190: + goto st1576 + } + if 161 <= data[p] && data[p] <= 167 { + goto st145 + } + goto tr2 + st1574: + if p++; p == pe { + goto _test_eof1574 + } + st_case_1574: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 180 { + goto tr148 + } + case data[p] >= 144: + goto tr148 + } + goto tr2 + st1575: + if p++; p == pe { + goto _test_eof1575 + } + st_case_1575: + switch { + case data[p] > 143: + if 191 <= data[p] { + goto tr2 + } + case data[p] >= 133: + goto tr2 + } + goto tr148 + st1576: + if p++; p == pe { + goto _test_eof1576 + } + st_case_1576: + if 143 <= data[p] && data[p] <= 159 { + goto tr148 + } + goto tr2 + st1577: + if p++; p == pe { + goto _test_eof1577 + } + st_case_1577: + switch data[p] { + case 176: + goto st147 + case 177: + goto st378 + case 178: + goto st1578 + } + goto tr2 + st1578: + if p++; p == pe { + goto _test_eof1578 + } + st_case_1578: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr148 + } + case data[p] >= 157: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st1579: + if p++; p == pe { + goto _test_eof1579 + } + st_case_1579: + switch data[p] { + case 133: + goto st1580 + case 134: + goto st1581 + case 137: + goto st1582 + case 144: + goto st147 + case 145: + goto st384 + case 146: + goto st385 + case 147: + goto st386 + case 148: + goto st387 + case 149: + goto st388 + case 154: + goto st389 + case 155: + goto st390 + case 156: + goto st391 + case 157: + goto st392 + case 158: + goto st393 + case 159: + goto st721 + case 168: + goto st1583 + case 169: + goto st1584 + case 170: + goto st1585 + } + if 150 <= data[p] && data[p] <= 153 { + goto st145 + } + goto tr2 + st1580: + if p++; p == pe { + goto _test_eof1580 + } + st_case_1580: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr148 + } + case data[p] >= 165: + goto tr148 + } + goto tr2 + st1581: + if p++; p == pe { + goto _test_eof1581 + } + st_case_1581: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr2 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr148 + st1582: + if p++; p == pe { + goto _test_eof1582 + } + st_case_1582: + if 130 <= data[p] && data[p] <= 132 { + goto tr148 + } + goto tr2 + st1583: + if p++; p == pe { + goto _test_eof1583 + } + st_case_1583: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st1584: + if p++; p == pe { + goto _test_eof1584 + } + st_case_1584: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr2 + } + case data[p] >= 173: + goto tr2 + } + goto tr148 + st1585: + if p++; p == pe { + goto _test_eof1585 + } + st_case_1585: + if data[p] == 132 { + goto tr148 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] >= 155: + goto tr148 + } + goto tr2 + st1586: + if p++; p == pe { + goto _test_eof1586 + } + st_case_1586: + switch data[p] { + case 160: + goto st147 + case 163: + goto st1587 + case 184: + goto st400 + case 185: + goto st401 + case 186: + goto st402 + } + if 161 <= data[p] && data[p] <= 162 { + goto st145 + } + goto tr2 + st1587: + if p++; p == pe { + goto _test_eof1587 + } + st_case_1587: + switch { + case data[p] > 143: + if 151 <= data[p] { + goto tr2 + } + case data[p] >= 133: + goto tr2 + } + goto tr148 + st1588: + if p++; p == pe { + goto _test_eof1588 + } + st_case_1588: + if data[p] == 160 { + goto st1589 + } + goto tr420 + st1589: + if p++; p == pe { + goto _test_eof1589 + } + st_case_1589: + switch data[p] { + case 128: + goto st1590 + case 129: + goto st146 + case 132: + goto st147 + case 135: + goto st1591 + } + if 133 <= data[p] && data[p] <= 134 { + goto st145 + } + goto tr2 + st1590: + if p++; p == pe { + goto _test_eof1590 + } + st_case_1590: + if data[p] == 129 { + goto tr148 + } + if 160 <= data[p] { + goto tr148 + } + goto tr2 + st1591: + if p++; p == pe { + goto _test_eof1591 + } + st_case_1591: + if 176 <= data[p] { + goto tr2 + } + goto tr148 +tr1485: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:76 +act = 1; + goto st4873 + st4873: + if p++; p == pe { + goto _test_eof4873 + } + st_case_4873: +//line segment_words_prod.go:44764 + switch data[p] { + case 95: + goto tr1485 + case 194: + goto st1592 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st148 + case 204: + goto st1593 + case 205: + goto st1594 + case 206: + goto st151 + case 207: + goto st152 + case 210: + goto st1595 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1596 + case 215: + goto st1597 + case 216: + goto st1598 + case 217: + goto st1599 + case 219: + goto st1600 + case 220: + goto st1601 + case 221: + goto st1602 + case 222: + goto st1603 + case 223: + goto st1604 + case 224: + goto st1605 + case 225: + goto st1637 + case 226: + goto st1662 + case 227: + goto st1669 + case 234: + goto st1672 + case 237: + goto st287 + case 239: + goto st1689 + case 240: + goto st1697 + case 243: + goto st1746 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr126 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4521 + st1592: + if p++; p == pe { + goto _test_eof1592 + } + st_case_1592: + switch data[p] { + case 170: + goto tr148 + case 173: + goto tr1485 + case 181: + goto tr148 + case 186: + goto tr148 + } + goto tr125 + st1593: + if p++; p == pe { + goto _test_eof1593 + } + st_case_1593: + if data[p] <= 127 { + goto tr125 + } + goto tr1485 + st1594: + if p++; p == pe { + goto _test_eof1594 + } + st_case_1594: + switch data[p] { + case 181: + goto tr125 + case 190: + goto tr125 + } + switch { + case data[p] < 184: + if 176 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr125 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr125 + } + goto tr1485 + st1595: + if p++; p == pe { + goto _test_eof1595 + } + st_case_1595: + if data[p] == 130 { + goto tr125 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr1485 + } + goto tr148 + st1596: + if p++; p == pe { + goto _test_eof1596 + } + st_case_1596: + if data[p] == 190 { + goto tr125 + } + switch { + case data[p] < 145: + if 136 <= data[p] && data[p] <= 144 { + goto tr125 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr125 + } + default: + goto tr1485 + } + goto tr148 + st1597: + if p++; p == pe { + goto _test_eof1597 + } + st_case_1597: + switch data[p] { + case 135: + goto tr1485 + case 179: + goto tr148 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr1485 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + default: + goto tr1485 + } + goto tr125 + st1598: + if p++; p == pe { + goto _test_eof1598 + } + st_case_1598: + if data[p] == 156 { + goto tr1485 + } + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 133 { + goto tr1485 + } + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr1485 + } + goto tr125 + st1599: + if p++; p == pe { + goto _test_eof1599 + } + st_case_1599: + switch data[p] { + case 171: + goto tr126 + case 176: + goto tr1485 + } + switch { + case data[p] < 139: + if 128 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 174 <= data[p] { + goto tr148 + } + case data[p] >= 160: + goto tr126 + } + default: + goto tr1485 + } + goto tr125 + st1600: + if p++; p == pe { + goto _test_eof1600 + } + st_case_1600: + switch data[p] { + case 148: + goto tr125 + case 158: + goto tr125 + case 169: + goto tr125 + } + switch { + case data[p] < 176: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr1485 + } + case data[p] >= 150: + goto tr1485 + } + case data[p] > 185: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr125 + } + case data[p] >= 189: + goto tr125 + } + default: + goto tr126 + } + goto tr148 + st1601: + if p++; p == pe { + goto _test_eof1601 + } + st_case_1601: + if data[p] == 144 { + goto tr148 + } + switch { + case data[p] < 146: + if 143 <= data[p] && data[p] <= 145 { + goto tr1485 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr1485 + } + default: + goto tr148 + } + goto tr125 + st1602: + if p++; p == pe { + goto _test_eof1602 + } + st_case_1602: + switch { + case data[p] > 140: + if 141 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr125 + } + goto tr1485 + st1603: + if p++; p == pe { + goto _test_eof1603 + } + st_case_1603: + switch { + case data[p] > 176: + if 178 <= data[p] { + goto tr125 + } + case data[p] >= 166: + goto tr1485 + } + goto tr148 + st1604: + if p++; p == pe { + goto _test_eof1604 + } + st_case_1604: + if data[p] == 186 { + goto tr148 + } + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 170: + switch { + case data[p] > 179: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 171: + goto tr1485 + } + default: + goto tr148 + } + goto tr125 + st1605: + if p++; p == pe { + goto _test_eof1605 + } + st_case_1605: + switch data[p] { + case 160: + goto st1606 + case 161: + goto st1607 + case 162: + goto st168 + case 163: + goto st1608 + case 164: + goto st1609 + case 165: + goto st1610 + case 166: + goto st1611 + case 167: + goto st1612 + case 168: + goto st1613 + case 169: + goto st1614 + case 170: + goto st1615 + case 171: + goto st1616 + case 172: + goto st1617 + case 173: + goto st1618 + case 174: + goto st1619 + case 175: + goto st1620 + case 176: + goto st1621 + case 177: + goto st1622 + case 178: + goto st1623 + case 179: + goto st1624 + case 180: + goto st1625 + case 181: + goto st1626 + case 182: + goto st1627 + case 183: + goto st1628 + case 184: + goto st1629 + case 185: + goto st1630 + case 186: + goto st1631 + case 187: + goto st1632 + case 188: + goto st1633 + case 189: + goto st1634 + case 190: + goto st1635 + case 191: + goto st1636 + } + goto tr125 + st1606: + if p++; p == pe { + goto _test_eof1606 + } + st_case_1606: + switch data[p] { + case 154: + goto tr148 + case 164: + goto tr148 + case 168: + goto tr148 + } + switch { + case data[p] > 149: + if 150 <= data[p] && data[p] <= 173 { + goto tr1485 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st1607: + if p++; p == pe { + goto _test_eof1607 + } + st_case_1607: + switch { + case data[p] > 152: + if 153 <= data[p] && data[p] <= 155 { + goto tr1485 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st1608: + if p++; p == pe { + goto _test_eof1608 + } + st_case_1608: + if 163 <= data[p] { + goto tr1485 + } + goto tr125 + st1609: + if p++; p == pe { + goto _test_eof1609 + } + st_case_1609: + if data[p] == 189 { + goto tr148 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr148 + } + goto tr1485 + st1610: + if p++; p == pe { + goto _test_eof1610 + } + st_case_1610: + switch data[p] { + case 144: + goto tr148 + case 176: + goto tr125 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 177 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr125 + } + goto tr1485 + st1611: + if p++; p == pe { + goto _test_eof1611 + } + st_case_1611: + switch data[p] { + case 132: + goto tr125 + case 169: + goto tr125 + case 177: + goto tr125 + case 188: + goto tr1485 + } + switch { + case data[p] < 145: + switch { + case data[p] > 131: + if 141 <= data[p] && data[p] <= 142 { + goto tr125 + } + case data[p] >= 129: + goto tr1485 + } + case data[p] > 146: + switch { + case data[p] < 186: + if 179 <= data[p] && data[p] <= 181 { + goto tr125 + } + case data[p] > 187: + if 190 <= data[p] { + goto tr1485 + } + default: + goto tr125 + } + default: + goto tr125 + } + goto tr148 + st1612: + if p++; p == pe { + goto _test_eof1612 + } + st_case_1612: + switch data[p] { + case 142: + goto tr148 + case 158: + goto tr125 + } + switch { + case data[p] < 156: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr125 + } + case data[p] > 138: + switch { + case data[p] > 150: + if 152 <= data[p] && data[p] <= 155 { + goto tr125 + } + case data[p] >= 143: + goto tr125 + } + default: + goto tr125 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr125 + } + case data[p] > 175: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr125 + } + case data[p] >= 176: + goto tr148 + } + default: + goto tr126 + } + default: + goto tr148 + } + goto tr1485 + st1613: + if p++; p == pe { + goto _test_eof1613 + } + st_case_1613: + if data[p] == 188 { + goto tr1485 + } + switch { + case data[p] < 170: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr1485 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 147 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] >= 143: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 176: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 182: + switch { + case data[p] > 185: + if 190 <= data[p] { + goto tr1485 + } + case data[p] >= 184: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1614: + if p++; p == pe { + goto _test_eof1614 + } + st_case_1614: + if data[p] == 157 { + goto tr125 + } + switch { + case data[p] < 153: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr125 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 146 <= data[p] && data[p] <= 152 { + goto tr125 + } + case data[p] >= 142: + goto tr125 + } + default: + goto tr125 + } + case data[p] > 158: + switch { + case data[p] < 166: + if 159 <= data[p] && data[p] <= 165 { + goto tr125 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr125 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr126 + } + default: + goto tr148 + } + goto tr1485 + st1615: + if p++; p == pe { + goto _test_eof1615 + } + st_case_1615: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr1485 + } + case data[p] > 141: + if 143 <= data[p] && data[p] <= 145 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] { + goto tr1485 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1616: + if p++; p == pe { + goto _test_eof1616 + } + st_case_1616: + switch data[p] { + case 134: + goto tr125 + case 138: + goto tr125 + case 144: + goto tr148 + case 185: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] >= 142: + goto tr125 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr125 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr125 + } + goto tr1485 + st1617: + if p++; p == pe { + goto _test_eof1617 + } + st_case_1617: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr1485 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr1485 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1618: + if p++; p == pe { + goto _test_eof1618 + } + st_case_1618: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] < 150: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr1485 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr1485 + } + default: + goto tr1485 + } + case data[p] > 151: + switch { + case data[p] < 159: + if 156 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 161: + switch { + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] >= 162: + goto tr1485 + } + default: + goto tr148 + } + default: + goto tr1485 + } + goto tr125 + st1619: + if p++; p == pe { + goto _test_eof1619 + } + st_case_1619: + switch data[p] { + case 130: + goto tr1485 + case 131: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 158: + switch { + case data[p] < 142: + if 133 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 144: + switch { + case data[p] > 149: + if 153 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] < 168: + if 163 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 185: + if 190 <= data[p] && data[p] <= 191 { + goto tr1485 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1620: + if p++; p == pe { + goto _test_eof1620 + } + st_case_1620: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr1485 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr1485 + } + case data[p] > 136: + switch { + case data[p] > 141: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] >= 138: + goto tr1485 + } + default: + goto tr1485 + } + goto tr125 + st1621: + if p++; p == pe { + goto _test_eof1621 + } + st_case_1621: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 128: + goto tr1485 + } + case data[p] > 144: + switch { + case data[p] < 170: + if 146 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] > 185: + if 190 <= data[p] { + goto tr1485 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1622: + if p++; p == pe { + goto _test_eof1622 + } + st_case_1622: + switch data[p] { + case 133: + goto tr125 + case 137: + goto tr125 + case 151: + goto tr125 + } + switch { + case data[p] < 160: + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 148 { + goto tr125 + } + case data[p] > 154: + if 155 <= data[p] && data[p] <= 159 { + goto tr125 + } + default: + goto tr148 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr125 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr125 + } + default: + goto tr126 + } + default: + goto tr148 + } + goto tr1485 + st1623: + if p++; p == pe { + goto _test_eof1623 + } + st_case_1623: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 146: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr1485 + } + case data[p] > 140: + if 142 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 181: + if 170 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr1485 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1624: + if p++; p == pe { + goto _test_eof1624 + } + st_case_1624: + if data[p] == 158 { + goto tr148 + } + switch { + case data[p] < 149: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr1485 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr1485 + } + default: + goto tr1485 + } + case data[p] > 150: + switch { + case data[p] < 162: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 163: + switch { + case data[p] > 175: + if 177 <= data[p] && data[p] <= 178 { + goto tr148 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr1485 + } + default: + goto tr1485 + } + goto tr125 + st1625: + if p++; p == pe { + goto _test_eof1625 + } + st_case_1625: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 129: + goto tr1485 + } + case data[p] > 144: + switch { + case data[p] > 186: + if 190 <= data[p] { + goto tr1485 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1626: + if p++; p == pe { + goto _test_eof1626 + } + st_case_1626: + switch data[p] { + case 133: + goto tr125 + case 137: + goto tr125 + case 142: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 150 { + goto tr125 + } + case data[p] > 158: + if 159 <= data[p] && data[p] <= 161 { + goto tr148 + } + default: + goto tr125 + } + case data[p] > 165: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr125 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr125 + } + default: + goto tr125 + } + goto tr1485 + st1627: + if p++; p == pe { + goto _test_eof1627 + } + st_case_1627: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto tr1485 + } + case data[p] > 150: + switch { + case data[p] > 177: + if 179 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1628: + if p++; p == pe { + goto _test_eof1628 + } + st_case_1628: + switch data[p] { + case 138: + goto tr1485 + case 150: + goto tr1485 + } + switch { + case data[p] < 152: + switch { + case data[p] > 134: + if 143 <= data[p] && data[p] <= 148 { + goto tr1485 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 179 { + goto tr1485 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr1485 + } + goto tr125 + st1629: + if p++; p == pe { + goto _test_eof1629 + } + st_case_1629: + if data[p] == 177 { + goto tr1485 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr1485 + } + goto tr125 + st1630: + if p++; p == pe { + goto _test_eof1630 + } + st_case_1630: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 135: + goto tr1485 + } + goto tr125 + st1631: + if p++; p == pe { + goto _test_eof1631 + } + st_case_1631: + if data[p] == 177 { + goto tr1485 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr1485 + } + case data[p] >= 180: + goto tr1485 + } + goto tr125 + st1632: + if p++; p == pe { + goto _test_eof1632 + } + st_case_1632: + switch { + case data[p] > 141: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 136: + goto tr1485 + } + goto tr125 + st1633: + if p++; p == pe { + goto _test_eof1633 + } + st_case_1633: + switch data[p] { + case 128: + goto tr148 + case 181: + goto tr1485 + case 183: + goto tr1485 + case 185: + goto tr1485 + } + switch { + case data[p] < 160: + if 152 <= data[p] && data[p] <= 153 { + goto tr1485 + } + case data[p] > 169: + if 190 <= data[p] && data[p] <= 191 { + goto tr1485 + } + default: + goto tr126 + } + goto tr125 + st1634: + if p++; p == pe { + goto _test_eof1634 + } + st_case_1634: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 172: + if 177 <= data[p] && data[p] <= 191 { + goto tr1485 + } + default: + goto tr148 + } + goto tr125 + st1635: + if p++; p == pe { + goto _test_eof1635 + } + st_case_1635: + switch { + case data[p] < 136: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 135 { + goto tr1485 + } + case data[p] >= 128: + goto tr1485 + } + case data[p] > 140: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr1485 + } + case data[p] >= 141: + goto tr1485 + } + default: + goto tr148 + } + goto tr125 + st1636: + if p++; p == pe { + goto _test_eof1636 + } + st_case_1636: + if data[p] == 134 { + goto tr1485 + } + goto tr125 + st1637: + if p++; p == pe { + goto _test_eof1637 + } + st_case_1637: + switch data[p] { + case 128: + goto st1638 + case 129: + goto st1639 + case 130: + goto st1640 + case 131: + goto st202 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st1641 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st1642 + case 157: + goto st1643 + case 158: + goto st1644 + case 159: + goto st1645 + case 160: + goto st1646 + case 161: + goto st219 + case 162: + goto st1647 + case 163: + goto st221 + case 164: + goto st1648 + case 165: + goto st1649 + case 167: + goto st1650 + case 168: + goto st1651 + case 169: + goto st1652 + case 170: + goto st1653 + case 172: + goto st1654 + case 173: + goto st1655 + case 174: + goto st1656 + case 175: + goto st1657 + case 176: + goto st1658 + case 177: + goto st1659 + case 179: + goto st1660 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st1661 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + } + switch { + case data[p] < 180: + if 132 <= data[p] && data[p] <= 152 { + goto st145 + } + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + default: + goto st147 + } + goto tr125 + st1638: + if p++; p == pe { + goto _test_eof1638 + } + st_case_1638: + if 171 <= data[p] && data[p] <= 190 { + goto tr1485 + } + goto tr125 + st1639: + if p++; p == pe { + goto _test_eof1639 + } + st_case_1639: + switch { + case data[p] < 158: + switch { + case data[p] > 137: + if 150 <= data[p] && data[p] <= 153 { + goto tr1485 + } + case data[p] >= 128: + goto tr126 + } + case data[p] > 160: + switch { + case data[p] < 167: + if 162 <= data[p] && data[p] <= 164 { + goto tr1485 + } + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr1485 + } + default: + goto tr1485 + } + default: + goto tr1485 + } + goto tr125 + st1640: + if p++; p == pe { + goto _test_eof1640 + } + st_case_1640: + if data[p] == 143 { + goto tr1485 + } + switch { + case data[p] < 144: + if 130 <= data[p] && data[p] <= 141 { + goto tr1485 + } + case data[p] > 153: + switch { + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 154: + goto tr1485 + } + default: + goto tr126 + } + goto tr125 + st1641: + if p++; p == pe { + goto _test_eof1641 + } + st_case_1641: + switch { + case data[p] < 157: + if 155 <= data[p] && data[p] <= 156 { + goto tr125 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr125 + } + default: + goto tr1485 + } + goto tr148 + st1642: + if p++; p == pe { + goto _test_eof1642 + } + st_case_1642: + switch { + case data[p] < 146: + switch { + case data[p] > 140: + if 142 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 177: + if 178 <= data[p] && data[p] <= 180 { + goto tr1485 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr1485 + } + goto tr125 + st1643: + if p++; p == pe { + goto _test_eof1643 + } + st_case_1643: + switch { + case data[p] < 160: + switch { + case data[p] > 145: + if 146 <= data[p] && data[p] <= 147 { + goto tr1485 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 172: + switch { + case data[p] > 176: + if 178 <= data[p] && data[p] <= 179 { + goto tr1485 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1644: + if p++; p == pe { + goto _test_eof1644 + } + st_case_1644: + if 180 <= data[p] { + goto tr1485 + } + goto tr125 + st1645: + if p++; p == pe { + goto _test_eof1645 + } + st_case_1645: + switch { + case data[p] < 158: + if 148 <= data[p] && data[p] <= 156 { + goto tr125 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 170 <= data[p] { + goto tr125 + } + case data[p] >= 160: + goto tr126 + } + default: + goto tr125 + } + goto tr1485 + st1646: + if p++; p == pe { + goto _test_eof1646 + } + st_case_1646: + switch { + case data[p] < 144: + if 139 <= data[p] && data[p] <= 142 { + goto tr1485 + } + case data[p] > 153: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr126 + } + goto tr125 + st1647: + if p++; p == pe { + goto _test_eof1647 + } + st_case_1647: + if data[p] == 169 { + goto tr1485 + } + switch { + case data[p] > 170: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st1648: + if p++; p == pe { + goto _test_eof1648 + } + st_case_1648: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr1485 + } + default: + goto tr1485 + } + goto tr125 + st1649: + if p++; p == pe { + goto _test_eof1649 + } + st_case_1649: + if 134 <= data[p] && data[p] <= 143 { + goto tr126 + } + goto tr2 + st1650: + if p++; p == pe { + goto _test_eof1650 + } + st_case_1650: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + goto tr2 + st1651: + if p++; p == pe { + goto _test_eof1651 + } + st_case_1651: + switch { + case data[p] > 150: + if 151 <= data[p] && data[p] <= 155 { + goto tr1485 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st1652: + if p++; p == pe { + goto _test_eof1652 + } + st_case_1652: + if data[p] == 191 { + goto tr1485 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr1485 + } + case data[p] >= 149: + goto tr1485 + } + goto tr125 + st1653: + if p++; p == pe { + goto _test_eof1653 + } + st_case_1653: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 153: + if 176 <= data[p] && data[p] <= 190 { + goto tr1485 + } + default: + goto tr126 + } + goto tr125 + st1654: + if p++; p == pe { + goto _test_eof1654 + } + st_case_1654: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 132 { + goto tr1485 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr1485 + } + default: + goto tr148 + } + goto tr125 + st1655: + if p++; p == pe { + goto _test_eof1655 + } + st_case_1655: + switch { + case data[p] < 144: + switch { + case data[p] > 139: + if 140 <= data[p] && data[p] <= 143 { + goto tr125 + } + case data[p] >= 133: + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr125 + } + case data[p] >= 154: + goto tr125 + } + default: + goto tr126 + } + goto tr1485 + st1656: + if p++; p == pe { + goto _test_eof1656 + } + st_case_1656: + switch { + case data[p] < 161: + switch { + case data[p] > 130: + if 131 <= data[p] && data[p] <= 160 { + goto tr148 + } + case data[p] >= 128: + goto tr1485 + } + case data[p] > 173: + switch { + case data[p] < 176: + if 174 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr148 + } + default: + goto tr126 + } + default: + goto tr1485 + } + goto tr125 + st1657: + if p++; p == pe { + goto _test_eof1657 + } + st_case_1657: + switch { + case data[p] > 179: + if 180 <= data[p] { + goto tr125 + } + case data[p] >= 166: + goto tr1485 + } + goto tr148 + st1658: + if p++; p == pe { + goto _test_eof1658 + } + st_case_1658: + switch { + case data[p] > 163: + if 164 <= data[p] && data[p] <= 183 { + goto tr1485 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st1659: + if p++; p == pe { + goto _test_eof1659 + } + st_case_1659: + switch { + case data[p] < 141: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 154 <= data[p] && data[p] <= 189 { + goto tr148 + } + case data[p] >= 144: + goto tr126 + } + default: + goto tr148 + } + goto tr2 + st1660: + if p++; p == pe { + goto _test_eof1660 + } + st_case_1660: + if data[p] == 173 { + goto tr1485 + } + switch { + case data[p] < 169: + switch { + case data[p] > 146: + if 148 <= data[p] && data[p] <= 168 { + goto tr1485 + } + case data[p] >= 144: + goto tr1485 + } + case data[p] > 177: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 180 { + goto tr1485 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 185 { + goto tr1485 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1661: + if p++; p == pe { + goto _test_eof1661 + } + st_case_1661: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr1485 + } + case data[p] >= 128: + goto tr1485 + } + goto tr125 + st1662: + if p++; p == pe { + goto _test_eof1662 + } + st_case_1662: + switch data[p] { + case 128: + goto st1663 + case 129: + goto st1664 + case 130: + goto st241 + case 131: + goto st1665 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st1666 + case 180: + goto st251 + case 181: + goto st1667 + case 182: + goto st253 + case 183: + goto st1668 + case 184: + goto st255 + } + goto tr125 + st1663: + if p++; p == pe { + goto _test_eof1663 + } + st_case_1663: + switch { + case data[p] < 170: + if 140 <= data[p] && data[p] <= 143 { + goto tr1485 + } + case data[p] > 174: + if 191 <= data[p] { + goto tr1485 + } + default: + goto tr1485 + } + goto tr125 + st1664: + if p++; p == pe { + goto _test_eof1664 + } + st_case_1664: + switch data[p] { + case 165: + goto tr125 + case 177: + goto tr148 + case 191: + goto tr148 + } + switch { + case data[p] < 149: + if 129 <= data[p] && data[p] <= 147 { + goto tr125 + } + case data[p] > 159: + if 176 <= data[p] { + goto tr125 + } + default: + goto tr125 + } + goto tr1485 + st1665: + if p++; p == pe { + goto _test_eof1665 + } + st_case_1665: + if 144 <= data[p] && data[p] <= 176 { + goto tr1485 + } + goto tr125 + st1666: + if p++; p == pe { + goto _test_eof1666 + } + st_case_1666: + switch { + case data[p] < 175: + if 165 <= data[p] && data[p] <= 170 { + goto tr125 + } + case data[p] > 177: + if 180 <= data[p] { + goto tr125 + } + default: + goto tr1485 + } + goto tr148 + st1667: + if p++; p == pe { + goto _test_eof1667 + } + st_case_1667: + if data[p] == 191 { + goto tr1485 + } + switch { + case data[p] > 174: + if 176 <= data[p] { + goto tr125 + } + case data[p] >= 168: + goto tr125 + } + goto tr148 + st1668: + if p++; p == pe { + goto _test_eof1668 + } + st_case_1668: + switch { + case data[p] < 144: + switch { + case data[p] > 134: + if 136 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 191 { + goto tr1485 + } + case data[p] >= 152: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1669: + if p++; p == pe { + goto _test_eof1669 + } + st_case_1669: + switch data[p] { + case 128: + goto st1670 + case 130: + goto st1671 + case 131: + goto st1164 + case 132: + goto st259 + case 133: + goto st145 + case 134: + goto st260 + case 135: + goto st1165 + case 139: + goto st1166 + case 140: + goto st1091 + case 141: + goto st1167 + } + goto tr125 + st1670: + if p++; p == pe { + goto _test_eof1670 + } + st_case_1670: + if data[p] == 133 { + goto tr148 + } + switch { + case data[p] < 177: + if 170 <= data[p] && data[p] <= 175 { + goto tr1485 + } + case data[p] > 181: + if 187 <= data[p] && data[p] <= 188 { + goto tr148 + } + default: + goto tr1049 + } + goto tr125 + st1671: + if p++; p == pe { + goto _test_eof1671 + } + st_case_1671: + switch { + case data[p] < 155: + if 153 <= data[p] && data[p] <= 154 { + goto tr1485 + } + case data[p] > 156: + if 160 <= data[p] { + goto tr1049 + } + default: + goto tr1049 + } + goto tr125 + st1672: + if p++; p == pe { + goto _test_eof1672 + } + st_case_1672: + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st1673 + case 153: + goto st1674 + case 154: + goto st1675 + case 155: + goto st1676 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st1677 + case 161: + goto st272 + case 162: + goto st1678 + case 163: + goto st1679 + case 164: + goto st1680 + case 165: + goto st1681 + case 166: + goto st1682 + case 167: + goto st1683 + case 168: + goto st1684 + case 169: + goto st1685 + case 170: + goto st1686 + case 171: + goto st1687 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st1688 + case 176: + goto st147 + } + if 129 <= data[p] { + goto st145 + } + goto tr125 + st1673: + if p++; p == pe { + goto _test_eof1673 + } + st_case_1673: + switch { + case data[p] < 160: + if 141 <= data[p] && data[p] <= 143 { + goto tr2 + } + case data[p] > 169: + if 172 <= data[p] { + goto tr2 + } + default: + goto tr126 + } + goto tr148 + st1674: + if p++; p == pe { + goto _test_eof1674 + } + st_case_1674: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr1485 + } + default: + goto tr1485 + } + goto tr125 + st1675: + if p++; p == pe { + goto _test_eof1675 + } + st_case_1675: + switch { + case data[p] < 158: + if 128 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr1485 + } + goto tr125 + st1676: + if p++; p == pe { + goto _test_eof1676 + } + st_case_1676: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr125 + } + case data[p] >= 176: + goto tr1485 + } + goto tr148 + st1677: + if p++; p == pe { + goto _test_eof1677 + } + st_case_1677: + switch data[p] { + case 130: + goto tr1485 + case 134: + goto tr1485 + case 139: + goto tr1485 + } + switch { + case data[p] > 167: + if 168 <= data[p] { + goto tr125 + } + case data[p] >= 163: + goto tr1485 + } + goto tr148 + st1678: + if p++; p == pe { + goto _test_eof1678 + } + st_case_1678: + switch { + case data[p] < 130: + if 128 <= data[p] && data[p] <= 129 { + goto tr1485 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr1485 + } + default: + goto tr148 + } + goto tr125 + st1679: + if p++; p == pe { + goto _test_eof1679 + } + st_case_1679: + switch data[p] { + case 187: + goto tr148 + case 189: + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 143: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 133: + goto tr125 + } + case data[p] > 159: + switch { + case data[p] > 183: + if 184 <= data[p] { + goto tr125 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr125 + } + goto tr1485 + st1680: + if p++; p == pe { + goto _test_eof1680 + } + st_case_1680: + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 165: + switch { + case data[p] > 173: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr1485 + } + default: + goto tr148 + } + goto tr125 + st1681: + if p++; p == pe { + goto _test_eof1681 + } + st_case_1681: + switch { + case data[p] < 148: + if 135 <= data[p] && data[p] <= 147 { + goto tr1485 + } + case data[p] > 159: + if 189 <= data[p] { + goto tr125 + } + default: + goto tr125 + } + goto tr148 + st1682: + if p++; p == pe { + goto _test_eof1682 + } + st_case_1682: + switch { + case data[p] < 132: + if 128 <= data[p] && data[p] <= 131 { + goto tr1485 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr1485 + } + default: + goto tr148 + } + goto tr125 + st1683: + if p++; p == pe { + goto _test_eof1683 + } + st_case_1683: + if data[p] == 143 { + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 129: + goto tr125 + } + case data[p] > 164: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr125 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr125 + } + default: + goto tr126 + } + default: + goto tr125 + } + goto tr1485 + st1684: + if p++; p == pe { + goto _test_eof1684 + } + st_case_1684: + switch { + case data[p] > 168: + if 169 <= data[p] && data[p] <= 182 { + goto tr1485 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st1685: + if p++; p == pe { + goto _test_eof1685 + } + st_case_1685: + if data[p] == 131 { + goto tr1485 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 141: + switch { + case data[p] > 153: + if 187 <= data[p] && data[p] <= 189 { + goto tr1485 + } + case data[p] >= 144: + goto tr126 + } + default: + goto tr1485 + } + goto tr125 + st1686: + if p++; p == pe { + goto _test_eof1686 + } + st_case_1686: + if data[p] == 176 { + goto tr1485 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr1485 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr1485 + } + default: + goto tr1485 + } + goto tr125 + st1687: + if p++; p == pe { + goto _test_eof1687 + } + st_case_1687: + if data[p] == 129 { + goto tr1485 + } + switch { + case data[p] < 171: + if 160 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 181 <= data[p] && data[p] <= 182 { + goto tr1485 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr1485 + } + goto tr125 + st1688: + if p++; p == pe { + goto _test_eof1688 + } + st_case_1688: + switch { + case data[p] < 163: + if 128 <= data[p] && data[p] <= 162 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr126 + } + case data[p] >= 172: + goto tr1485 + } + default: + goto tr1485 + } + goto tr125 + st1689: + if p++; p == pe { + goto _test_eof1689 + } + st_case_1689: + switch data[p] { + case 172: + goto st1690 + case 173: + goto st672 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st1691 + case 185: + goto st1692 + case 187: + goto st1693 + case 188: + goto st1694 + case 189: + goto st1261 + case 190: + goto st1695 + case 191: + goto st1696 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr125 + st1690: + if p++; p == pe { + goto _test_eof1690 + } + st_case_1690: + switch data[p] { + case 158: + goto tr1485 + case 190: + goto tr572 + } + switch { + case data[p] < 157: + switch { + case data[p] > 134: + if 147 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 170: + goto tr572 + } + default: + goto tr572 + } + goto tr125 + st1691: + if p++; p == pe { + goto _test_eof1691 + } + st_case_1691: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 143 { + goto tr1485 + } + case data[p] > 175: + if 179 <= data[p] && data[p] <= 180 { + goto tr1485 + } + default: + goto tr1485 + } + goto tr125 + st1692: + if p++; p == pe { + goto _test_eof1692 + } + st_case_1692: + switch { + case data[p] < 176: + if 141 <= data[p] && data[p] <= 143 { + goto tr1485 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1693: + if p++; p == pe { + goto _test_eof1693 + } + st_case_1693: + if data[p] == 191 { + goto tr1485 + } + if 189 <= data[p] { + goto tr125 + } + goto tr148 + st1694: + if p++; p == pe { + goto _test_eof1694 + } + st_case_1694: + if data[p] == 191 { + goto tr1485 + } + if 161 <= data[p] && data[p] <= 186 { + goto tr148 + } + goto tr125 + st1695: + if p++; p == pe { + goto _test_eof1695 + } + st_case_1695: + switch { + case data[p] < 160: + if 158 <= data[p] && data[p] <= 159 { + goto tr1485 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr125 + } + default: + goto tr148 + } + goto tr1049 + st1696: + if p++; p == pe { + goto _test_eof1696 + } + st_case_1696: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr148 + } + case data[p] >= 130: + goto tr148 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr1485 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1697: + if p++; p == pe { + goto _test_eof1697 + } + st_case_1697: + switch data[p] { + case 144: + goto st1698 + case 145: + goto st1705 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st1727 + case 155: + goto st1734 + case 157: + goto st1736 + case 158: + goto st1744 + case 159: + goto st403 + } + goto tr125 + st1698: + if p++; p == pe { + goto _test_eof1698 + } + st_case_1698: + switch data[p] { + case 128: + goto st308 + case 129: + goto st309 + case 130: + goto st147 + case 131: + goto st310 + case 133: + goto st311 + case 135: + goto st1699 + case 138: + goto st313 + case 139: + goto st1700 + case 140: + goto st315 + case 141: + goto st1701 + case 142: + goto st317 + case 143: + goto st318 + case 144: + goto st147 + case 145: + goto st145 + case 146: + goto st1702 + case 148: + goto st320 + case 149: + goto st321 + case 152: + goto st147 + case 156: + goto st322 + case 157: + goto st323 + case 160: + goto st324 + case 161: + goto st325 + case 162: + goto st326 + case 163: + goto st327 + case 164: + goto st328 + case 166: + goto st329 + case 168: + goto st1703 + case 169: + goto st331 + case 170: + goto st332 + case 171: + goto st1704 + case 172: + goto st334 + case 173: + goto st335 + case 174: + goto st336 + case 176: + goto st147 + case 177: + goto st245 + } + switch { + case data[p] > 155: + if 178 <= data[p] && data[p] <= 179 { + goto st337 + } + case data[p] >= 153: + goto st145 + } + goto tr125 + st1699: + if p++; p == pe { + goto _test_eof1699 + } + st_case_1699: + if data[p] == 189 { + goto tr1485 + } + goto tr125 + st1700: + if p++; p == pe { + goto _test_eof1700 + } + st_case_1700: + if data[p] == 160 { + goto tr1485 + } + if 145 <= data[p] { + goto tr125 + } + goto tr148 + st1701: + if p++; p == pe { + goto _test_eof1701 + } + st_case_1701: + switch { + case data[p] < 182: + if 139 <= data[p] && data[p] <= 143 { + goto tr125 + } + case data[p] > 186: + if 187 <= data[p] { + goto tr125 + } + default: + goto tr1485 + } + goto tr148 + st1702: + if p++; p == pe { + goto _test_eof1702 + } + st_case_1702: + switch { + case data[p] < 160: + if 158 <= data[p] && data[p] <= 159 { + goto tr2 + } + case data[p] > 169: + if 170 <= data[p] { + goto tr2 + } + default: + goto tr126 + } + goto tr148 + st1703: + if p++; p == pe { + goto _test_eof1703 + } + st_case_1703: + switch data[p] { + case 128: + goto tr148 + case 191: + goto tr1485 + } + switch { + case data[p] < 144: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr1485 + } + case data[p] > 134: + if 140 <= data[p] && data[p] <= 143 { + goto tr1485 + } + default: + goto tr1485 + } + case data[p] > 147: + switch { + case data[p] < 153: + if 149 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] > 179: + if 184 <= data[p] && data[p] <= 186 { + goto tr1485 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1704: + if p++; p == pe { + goto _test_eof1704 + } + st_case_1704: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 164: + if 165 <= data[p] && data[p] <= 166 { + goto tr1485 + } + default: + goto tr148 + } + goto tr125 + st1705: + if p++; p == pe { + goto _test_eof1705 + } + st_case_1705: + switch data[p] { + case 128: + goto st1706 + case 129: + goto st1707 + case 130: + goto st1708 + case 131: + goto st1709 + case 132: + goto st1710 + case 133: + goto st1711 + case 134: + goto st1712 + case 135: + goto st1713 + case 136: + goto st1714 + case 138: + goto st348 + case 139: + goto st1715 + case 140: + goto st1716 + case 141: + goto st1717 + case 146: + goto st1718 + case 147: + goto st1719 + case 150: + goto st1720 + case 151: + goto st1721 + case 152: + goto st1718 + case 153: + goto st1722 + case 154: + goto st1723 + case 155: + goto st1724 + case 156: + goto st1725 + case 162: + goto st359 + case 163: + goto st1726 + case 171: + goto st361 + } + goto tr125 + st1706: + if p++; p == pe { + goto _test_eof1706 + } + st_case_1706: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr1485 + } + case data[p] > 183: + if 184 <= data[p] { + goto tr1485 + } + default: + goto tr148 + } + goto tr125 + st1707: + if p++; p == pe { + goto _test_eof1707 + } + st_case_1707: + switch { + case data[p] < 166: + if 135 <= data[p] && data[p] <= 165 { + goto tr125 + } + case data[p] > 175: + if 176 <= data[p] && data[p] <= 190 { + goto tr125 + } + default: + goto tr126 + } + goto tr1485 + st1708: + if p++; p == pe { + goto _test_eof1708 + } + st_case_1708: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr125 + } + default: + goto tr125 + } + goto tr1485 + st1709: + if p++; p == pe { + goto _test_eof1709 + } + st_case_1709: + switch { + case data[p] > 168: + if 176 <= data[p] && data[p] <= 185 { + goto tr126 + } + case data[p] >= 144: + goto tr148 + } + goto tr2 + st1710: + if p++; p == pe { + goto _test_eof1710 + } + st_case_1710: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr1485 + } + case data[p] > 166: + switch { + case data[p] > 180: + if 182 <= data[p] && data[p] <= 191 { + goto tr126 + } + case data[p] >= 167: + goto tr1485 + } + default: + goto tr148 + } + goto tr125 + st1711: + if p++; p == pe { + goto _test_eof1711 + } + st_case_1711: + switch data[p] { + case 179: + goto tr1485 + case 182: + goto tr148 + } + if 144 <= data[p] && data[p] <= 178 { + goto tr148 + } + goto tr125 + st1712: + if p++; p == pe { + goto _test_eof1712 + } + st_case_1712: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr1485 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr1485 + } + default: + goto tr148 + } + goto tr125 + st1713: + if p++; p == pe { + goto _test_eof1713 + } + st_case_1713: + if data[p] == 155 { + goto tr125 + } + switch { + case data[p] < 141: + switch { + case data[p] > 132: + if 133 <= data[p] && data[p] <= 137 { + goto tr125 + } + case data[p] >= 129: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] < 154: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] > 156: + if 157 <= data[p] { + goto tr125 + } + default: + goto tr148 + } + default: + goto tr125 + } + goto tr1485 + st1714: + if p++; p == pe { + goto _test_eof1714 + } + st_case_1714: + switch { + case data[p] < 147: + if 128 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] > 171: + if 172 <= data[p] && data[p] <= 183 { + goto tr1485 + } + default: + goto tr148 + } + goto tr125 + st1715: + if p++; p == pe { + goto _test_eof1715 + } + st_case_1715: + switch { + case data[p] < 171: + if 159 <= data[p] && data[p] <= 170 { + goto tr1485 + } + case data[p] > 175: + switch { + case data[p] > 185: + if 186 <= data[p] { + goto tr125 + } + case data[p] >= 176: + goto tr126 + } + default: + goto tr125 + } + goto tr148 + st1716: + if p++; p == pe { + goto _test_eof1716 + } + st_case_1716: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 131 { + goto tr1485 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr1485 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1717: + if p++; p == pe { + goto _test_eof1717 + } + st_case_1717: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr1485 + } + switch { + case data[p] < 157: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr1485 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr1485 + } + default: + goto tr1485 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr1485 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr1485 + } + default: + goto tr1485 + } + default: + goto tr148 + } + goto tr125 + st1718: + if p++; p == pe { + goto _test_eof1718 + } + st_case_1718: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr1485 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st1719: + if p++; p == pe { + goto _test_eof1719 + } + st_case_1719: + if data[p] == 134 { + goto tr125 + } + switch { + case data[p] < 136: + if 132 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 154 <= data[p] { + goto tr125 + } + case data[p] >= 144: + goto tr126 + } + default: + goto tr125 + } + goto tr1485 + st1720: + if p++; p == pe { + goto _test_eof1720 + } + st_case_1720: + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 181: + if 184 <= data[p] { + goto tr1485 + } + default: + goto tr1485 + } + goto tr125 + st1721: + if p++; p == pe { + goto _test_eof1721 + } + st_case_1721: + switch { + case data[p] < 152: + if 129 <= data[p] && data[p] <= 151 { + goto tr125 + } + case data[p] > 155: + if 158 <= data[p] { + goto tr125 + } + default: + goto tr148 + } + goto tr1485 + st1722: + if p++; p == pe { + goto _test_eof1722 + } + st_case_1722: + if data[p] == 132 { + goto tr148 + } + switch { + case data[p] < 144: + if 129 <= data[p] && data[p] <= 143 { + goto tr125 + } + case data[p] > 153: + if 154 <= data[p] { + goto tr125 + } + default: + goto tr126 + } + goto tr1485 + st1723: + if p++; p == pe { + goto _test_eof1723 + } + st_case_1723: + switch { + case data[p] > 170: + if 171 <= data[p] && data[p] <= 183 { + goto tr1485 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st1724: + if p++; p == pe { + goto _test_eof1724 + } + st_case_1724: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + goto tr2 + st1725: + if p++; p == pe { + goto _test_eof1725 + } + st_case_1725: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 185 { + goto tr126 + } + case data[p] >= 157: + goto tr1485 + } + goto tr125 + st1726: + if p++; p == pe { + goto _test_eof1726 + } + st_case_1726: + switch { + case data[p] < 170: + if 160 <= data[p] && data[p] <= 169 { + goto tr126 + } + case data[p] > 190: + if 192 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr148 + st1727: + if p++; p == pe { + goto _test_eof1727 + } + st_case_1727: + switch data[p] { + case 160: + goto st147 + case 168: + goto st370 + case 169: + goto st1728 + case 171: + goto st1729 + case 172: + goto st1730 + case 173: + goto st1731 + case 174: + goto st374 + case 188: + goto st147 + case 189: + goto st1732 + case 190: + goto st1733 + } + if 161 <= data[p] && data[p] <= 167 { + goto st145 + } + goto tr125 + st1728: + if p++; p == pe { + goto _test_eof1728 + } + st_case_1728: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 169 { + goto tr126 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st1729: + if p++; p == pe { + goto _test_eof1729 + } + st_case_1729: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 180 { + goto tr1485 + } + case data[p] >= 144: + goto tr148 + } + goto tr125 + st1730: + if p++; p == pe { + goto _test_eof1730 + } + st_case_1730: + switch { + case data[p] > 175: + if 176 <= data[p] && data[p] <= 182 { + goto tr1485 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st1731: + if p++; p == pe { + goto _test_eof1731 + } + st_case_1731: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 131 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 183: + if 189 <= data[p] { + goto tr148 + } + case data[p] >= 163: + goto tr148 + } + default: + goto tr126 + } + goto tr2 + st1732: + if p++; p == pe { + goto _test_eof1732 + } + st_case_1732: + switch { + case data[p] < 145: + if 133 <= data[p] && data[p] <= 143 { + goto tr125 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr125 + } + default: + goto tr1485 + } + goto tr148 + st1733: + if p++; p == pe { + goto _test_eof1733 + } + st_case_1733: + switch { + case data[p] > 146: + if 147 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] >= 143: + goto tr1485 + } + goto tr125 + st1734: + if p++; p == pe { + goto _test_eof1734 + } + st_case_1734: + switch data[p] { + case 128: + goto st1224 + case 176: + goto st147 + case 177: + goto st378 + case 178: + goto st1735 + } + goto tr125 + st1735: + if p++; p == pe { + goto _test_eof1735 + } + st_case_1735: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr1485 + } + case data[p] >= 157: + goto tr1485 + } + default: + goto tr148 + } + goto tr125 + st1736: + if p++; p == pe { + goto _test_eof1736 + } + st_case_1736: + switch data[p] { + case 133: + goto st1737 + case 134: + goto st1738 + case 137: + goto st1739 + case 144: + goto st147 + case 145: + goto st384 + case 146: + goto st385 + case 147: + goto st386 + case 148: + goto st387 + case 149: + goto st388 + case 154: + goto st389 + case 155: + goto st390 + case 156: + goto st391 + case 157: + goto st392 + case 158: + goto st393 + case 159: + goto st1740 + case 168: + goto st1741 + case 169: + goto st1742 + case 170: + goto st1743 + } + if 150 <= data[p] && data[p] <= 153 { + goto st145 + } + goto tr125 + st1737: + if p++; p == pe { + goto _test_eof1737 + } + st_case_1737: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr1485 + } + case data[p] >= 165: + goto tr1485 + } + goto tr125 + st1738: + if p++; p == pe { + goto _test_eof1738 + } + st_case_1738: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr125 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr125 + } + default: + goto tr125 + } + goto tr1485 + st1739: + if p++; p == pe { + goto _test_eof1739 + } + st_case_1739: + if 130 <= data[p] && data[p] <= 132 { + goto tr1485 + } + goto tr125 + st1740: + if p++; p == pe { + goto _test_eof1740 + } + st_case_1740: + if data[p] == 131 { + goto tr2 + } + switch { + case data[p] < 142: + if 140 <= data[p] && data[p] <= 141 { + goto tr2 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr2 + } + default: + goto tr126 + } + goto tr148 + st1741: + if p++; p == pe { + goto _test_eof1741 + } + st_case_1741: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr1485 + } + case data[p] >= 128: + goto tr1485 + } + goto tr125 + st1742: + if p++; p == pe { + goto _test_eof1742 + } + st_case_1742: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr125 + } + case data[p] >= 173: + goto tr125 + } + goto tr1485 + st1743: + if p++; p == pe { + goto _test_eof1743 + } + st_case_1743: + if data[p] == 132 { + goto tr1485 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr1485 + } + case data[p] >= 155: + goto tr1485 + } + goto tr125 + st1744: + if p++; p == pe { + goto _test_eof1744 + } + st_case_1744: + switch data[p] { + case 160: + goto st147 + case 163: + goto st1745 + case 184: + goto st400 + case 185: + goto st401 + case 186: + goto st402 + } + if 161 <= data[p] && data[p] <= 162 { + goto st145 + } + goto tr125 + st1745: + if p++; p == pe { + goto _test_eof1745 + } + st_case_1745: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr125 + } + case data[p] > 150: + if 151 <= data[p] { + goto tr125 + } + default: + goto tr1485 + } + goto tr148 + st1746: + if p++; p == pe { + goto _test_eof1746 + } + st_case_1746: + if data[p] == 160 { + goto st1747 + } + goto tr125 + st1747: + if p++; p == pe { + goto _test_eof1747 + } + st_case_1747: + switch data[p] { + case 128: + goto st1748 + case 129: + goto st1749 + case 132: + goto st1593 + case 135: + goto st1751 + } + if 133 <= data[p] && data[p] <= 134 { + goto st1750 + } + goto tr125 + st1748: + if p++; p == pe { + goto _test_eof1748 + } + st_case_1748: + if data[p] == 129 { + goto tr1485 + } + if 160 <= data[p] { + goto tr1485 + } + goto tr125 + st1749: + if p++; p == pe { + goto _test_eof1749 + } + st_case_1749: + if 192 <= data[p] { + goto tr125 + } + goto tr1485 + st1750: + if p++; p == pe { + goto _test_eof1750 + } + st_case_1750: + goto tr1485 + st1751: + if p++; p == pe { + goto _test_eof1751 + } + st_case_1751: + if 176 <= data[p] { + goto tr125 + } + goto tr1485 + st1752: + if p++; p == pe { + goto _test_eof1752 + } + st_case_1752: + switch data[p] { + case 170: + goto tr148 + case 173: + goto tr126 + case 181: + goto tr148 + case 186: + goto tr148 + } + goto tr125 + st1753: + if p++; p == pe { + goto _test_eof1753 + } + st_case_1753: + if 128 <= data[p] { + goto tr126 + } + goto tr125 + st1754: + if p++; p == pe { + goto _test_eof1754 + } + st_case_1754: + switch data[p] { + case 181: + goto tr125 + case 190: + goto st141 + } + switch { + case data[p] < 184: + if 176 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr125 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr125 + } + goto tr126 + st1755: + if p++; p == pe { + goto _test_eof1755 + } + st_case_1755: + if data[p] == 130 { + goto tr125 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr126 + } + goto tr148 + st1756: + if p++; p == pe { + goto _test_eof1756 + } + st_case_1756: + switch data[p] { + case 137: + goto st141 + case 190: + goto tr125 + } + switch { + case data[p] < 145: + if 136 <= data[p] && data[p] <= 144 { + goto tr125 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr125 + } + default: + goto tr126 + } + goto tr148 + st1757: + if p++; p == pe { + goto _test_eof1757 + } + st_case_1757: + switch data[p] { + case 135: + goto tr126 + case 179: + goto tr148 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr126 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + default: + goto tr126 + } + goto tr125 + st1758: + if p++; p == pe { + goto _test_eof1758 + } + st_case_1758: + if data[p] == 156 { + goto tr126 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 133 { + goto tr126 + } + case data[p] > 141: + switch { + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + case data[p] >= 144: + goto tr126 + } + default: + goto st141 + } + goto tr125 + st1759: + if p++; p == pe { + goto _test_eof1759 + } + st_case_1759: + switch data[p] { + case 171: + goto tr126 + case 172: + goto st141 + case 176: + goto tr126 + } + switch { + case data[p] < 139: + if 128 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr148 + } + default: + goto tr126 + } + goto tr125 + st1760: + if p++; p == pe { + goto _test_eof1760 + } + st_case_1760: + switch data[p] { + case 148: + goto tr125 + case 158: + goto tr125 + case 169: + goto tr125 + } + switch { + case data[p] < 176: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr126 + } + case data[p] >= 150: + goto tr126 + } + case data[p] > 185: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr125 + } + case data[p] >= 189: + goto tr125 + } + default: + goto tr126 + } + goto tr148 + st1761: + if p++; p == pe { + goto _test_eof1761 + } + st_case_1761: + if data[p] == 144 { + goto tr148 + } + switch { + case data[p] < 146: + if 143 <= data[p] && data[p] <= 145 { + goto tr126 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr126 + } + default: + goto tr148 + } + goto tr125 + st1762: + if p++; p == pe { + goto _test_eof1762 + } + st_case_1762: + switch { + case data[p] > 140: + if 141 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr125 + } + goto tr126 + st1763: + if p++; p == pe { + goto _test_eof1763 + } + st_case_1763: + switch { + case data[p] > 176: + if 178 <= data[p] { + goto tr125 + } + case data[p] >= 166: + goto tr126 + } + goto tr148 + st1764: + if p++; p == pe { + goto _test_eof1764 + } + st_case_1764: + switch data[p] { + case 184: + goto st141 + case 186: + goto tr148 + } + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 170: + switch { + case data[p] > 179: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 171: + goto tr126 + } + default: + goto tr148 + } + goto tr125 + st1765: + if p++; p == pe { + goto _test_eof1765 + } + st_case_1765: + switch data[p] { + case 160: + goto st1766 + case 161: + goto st1767 + case 162: + goto st168 + case 163: + goto st1768 + case 164: + goto st1769 + case 165: + goto st1770 + case 166: + goto st1771 + case 167: + goto st1772 + case 168: + goto st1773 + case 169: + goto st1774 + case 170: + goto st1775 + case 171: + goto st1776 + case 172: + goto st1777 + case 173: + goto st1778 + case 174: + goto st1779 + case 175: + goto st1780 + case 176: + goto st1781 + case 177: + goto st1782 + case 178: + goto st1783 + case 179: + goto st1784 + case 180: + goto st1785 + case 181: + goto st1786 + case 182: + goto st1787 + case 183: + goto st1788 + case 184: + goto st1789 + case 185: + goto st1790 + case 186: + goto st1791 + case 187: + goto st1792 + case 188: + goto st1793 + case 189: + goto st1794 + case 190: + goto st1795 + case 191: + goto st1796 + } + goto tr125 + st1766: + if p++; p == pe { + goto _test_eof1766 + } + st_case_1766: + switch data[p] { + case 154: + goto tr148 + case 164: + goto tr148 + case 168: + goto tr148 + } + switch { + case data[p] > 149: + if 150 <= data[p] && data[p] <= 173 { + goto tr126 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st1767: + if p++; p == pe { + goto _test_eof1767 + } + st_case_1767: + switch { + case data[p] > 152: + if 153 <= data[p] && data[p] <= 155 { + goto tr126 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st1768: + if p++; p == pe { + goto _test_eof1768 + } + st_case_1768: + if 163 <= data[p] { + goto tr126 + } + goto tr125 + st1769: + if p++; p == pe { + goto _test_eof1769 + } + st_case_1769: + if data[p] == 189 { + goto tr148 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr148 + } + goto tr126 + st1770: + if p++; p == pe { + goto _test_eof1770 + } + st_case_1770: + switch data[p] { + case 144: + goto tr148 + case 176: + goto tr125 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + if 177 <= data[p] { + goto tr148 + } + default: + goto tr125 + } + goto tr126 + st1771: + if p++; p == pe { + goto _test_eof1771 + } + st_case_1771: + switch data[p] { + case 132: + goto tr125 + case 169: + goto tr125 + case 177: + goto tr125 + case 188: + goto tr126 + } + switch { + case data[p] < 145: + switch { + case data[p] > 131: + if 141 <= data[p] && data[p] <= 142 { + goto tr125 + } + case data[p] >= 129: + goto tr126 + } + case data[p] > 146: + switch { + case data[p] < 186: + if 179 <= data[p] && data[p] <= 181 { + goto tr125 + } + case data[p] > 187: + if 190 <= data[p] { + goto tr126 + } + default: + goto tr125 + } + default: + goto tr125 + } + goto tr148 + st1772: + if p++; p == pe { + goto _test_eof1772 + } + st_case_1772: + switch data[p] { + case 142: + goto tr148 + case 158: + goto tr125 + } + switch { + case data[p] < 152: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr125 + } + case data[p] > 138: + if 143 <= data[p] && data[p] <= 150 { + goto tr125 + } + default: + goto tr125 + } + case data[p] > 155: + switch { + case data[p] < 164: + if 156 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr125 + } + case data[p] >= 176: + goto tr148 + } + default: + goto tr125 + } + default: + goto tr125 + } + goto tr126 + st1773: + if p++; p == pe { + goto _test_eof1773 + } + st_case_1773: + if data[p] == 188 { + goto tr126 + } + switch { + case data[p] < 170: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr126 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 147 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] >= 143: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 176: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 182: + switch { + case data[p] > 185: + if 190 <= data[p] { + goto tr126 + } + case data[p] >= 184: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1774: + if p++; p == pe { + goto _test_eof1774 + } + st_case_1774: + if data[p] == 157 { + goto tr125 + } + switch { + case data[p] < 146: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr125 + } + case data[p] > 138: + if 142 <= data[p] && data[p] <= 144 { + goto tr125 + } + default: + goto tr125 + } + case data[p] > 152: + switch { + case data[p] < 159: + if 153 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 165: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr125 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr125 + } + default: + goto tr125 + } + goto tr126 + st1775: + if p++; p == pe { + goto _test_eof1775 + } + st_case_1775: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr126 + } + case data[p] > 141: + if 143 <= data[p] && data[p] <= 145 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] { + goto tr126 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1776: + if p++; p == pe { + goto _test_eof1776 + } + st_case_1776: + switch data[p] { + case 134: + goto tr125 + case 138: + goto tr125 + case 144: + goto tr148 + case 185: + goto tr148 + } + switch { + case data[p] < 160: + if 142 <= data[p] && data[p] <= 159 { + goto tr125 + } + case data[p] > 161: + switch { + case data[p] > 165: + if 176 <= data[p] { + goto tr125 + } + case data[p] >= 164: + goto tr125 + } + default: + goto tr148 + } + goto tr126 + st1777: + if p++; p == pe { + goto _test_eof1777 + } + st_case_1777: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr126 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr126 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1778: + if p++; p == pe { + goto _test_eof1778 + } + st_case_1778: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] < 150: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr126 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr126 + } + default: + goto tr126 + } + case data[p] > 151: + switch { + case data[p] < 159: + if 156 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 161: + switch { + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] >= 162: + goto tr126 + } + default: + goto tr148 + } + default: + goto tr126 + } + goto tr125 + st1779: + if p++; p == pe { + goto _test_eof1779 + } + st_case_1779: + switch data[p] { + case 130: + goto tr126 + case 131: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 158: + switch { + case data[p] < 142: + if 133 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 144: + switch { + case data[p] > 149: + if 153 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] < 168: + if 163 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 185: + if 190 <= data[p] && data[p] <= 191 { + goto tr126 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1780: + if p++; p == pe { + goto _test_eof1780 + } + st_case_1780: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr126 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr126 + } + case data[p] > 136: + switch { + case data[p] > 141: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] >= 138: + goto tr126 + } + default: + goto tr126 + } + goto tr125 + st1781: + if p++; p == pe { + goto _test_eof1781 + } + st_case_1781: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 128: + goto tr126 + } + case data[p] > 144: + switch { + case data[p] < 170: + if 146 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] > 185: + if 190 <= data[p] { + goto tr126 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1782: + if p++; p == pe { + goto _test_eof1782 + } + st_case_1782: + switch data[p] { + case 133: + goto tr125 + case 137: + goto tr125 + case 151: + goto tr125 + } + switch { + case data[p] < 155: + switch { + case data[p] > 148: + if 152 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 142: + goto tr125 + } + case data[p] > 159: + switch { + case data[p] < 164: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + if 176 <= data[p] { + goto tr125 + } + default: + goto tr125 + } + default: + goto tr125 + } + goto tr126 + st1783: + if p++; p == pe { + goto _test_eof1783 + } + st_case_1783: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 146: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr126 + } + case data[p] > 140: + if 142 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 181: + if 170 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr126 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1784: + if p++; p == pe { + goto _test_eof1784 + } + st_case_1784: + if data[p] == 158 { + goto tr148 + } + switch { + case data[p] < 149: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr126 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr126 + } + default: + goto tr126 + } + case data[p] > 150: + switch { + case data[p] < 162: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 163: + switch { + case data[p] > 175: + if 177 <= data[p] && data[p] <= 178 { + goto tr148 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr126 + } + default: + goto tr126 + } + goto tr125 + st1785: + if p++; p == pe { + goto _test_eof1785 + } + st_case_1785: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 129: + goto tr126 + } + case data[p] > 144: + switch { + case data[p] > 186: + if 190 <= data[p] { + goto tr126 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1786: + if p++; p == pe { + goto _test_eof1786 + } + st_case_1786: + switch data[p] { + case 133: + goto tr125 + case 137: + goto tr125 + case 142: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 150 { + goto tr125 + } + case data[p] > 158: + if 159 <= data[p] && data[p] <= 161 { + goto tr148 + } + default: + goto tr125 + } + case data[p] > 165: + switch { + case data[p] < 186: + if 176 <= data[p] && data[p] <= 185 { + goto tr125 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr125 + } + default: + goto tr148 + } + default: + goto tr125 + } + goto tr126 + st1787: + if p++; p == pe { + goto _test_eof1787 + } + st_case_1787: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto tr126 + } + case data[p] > 150: + switch { + case data[p] > 177: + if 179 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1788: + if p++; p == pe { + goto _test_eof1788 + } + st_case_1788: + switch data[p] { + case 138: + goto tr126 + case 150: + goto tr126 + } + switch { + case data[p] < 152: + switch { + case data[p] > 134: + if 143 <= data[p] && data[p] <= 148 { + goto tr126 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 179 { + goto tr126 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr126 + } + goto tr125 + st1789: + if p++; p == pe { + goto _test_eof1789 + } + st_case_1789: + if data[p] == 177 { + goto tr126 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr126 + } + goto tr125 + st1790: + if p++; p == pe { + goto _test_eof1790 + } + st_case_1790: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 135: + goto tr126 + } + goto tr125 + st1791: + if p++; p == pe { + goto _test_eof1791 + } + st_case_1791: + if data[p] == 177 { + goto tr126 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr126 + } + case data[p] >= 180: + goto tr126 + } + goto tr125 + st1792: + if p++; p == pe { + goto _test_eof1792 + } + st_case_1792: + switch { + case data[p] > 141: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 136: + goto tr126 + } + goto tr125 + st1793: + if p++; p == pe { + goto _test_eof1793 + } + st_case_1793: + switch data[p] { + case 128: + goto tr148 + case 181: + goto tr126 + case 183: + goto tr126 + case 185: + goto tr126 + } + switch { + case data[p] < 160: + if 152 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] > 169: + if 190 <= data[p] && data[p] <= 191 { + goto tr126 + } + default: + goto tr126 + } + goto tr125 + st1794: + if p++; p == pe { + goto _test_eof1794 + } + st_case_1794: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 172: + if 177 <= data[p] && data[p] <= 191 { + goto tr126 + } + default: + goto tr148 + } + goto tr125 + st1795: + if p++; p == pe { + goto _test_eof1795 + } + st_case_1795: + switch { + case data[p] < 136: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 135 { + goto tr126 + } + case data[p] >= 128: + goto tr126 + } + case data[p] > 140: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr126 + } + case data[p] >= 141: + goto tr126 + } + default: + goto tr148 + } + goto tr125 + st1796: + if p++; p == pe { + goto _test_eof1796 + } + st_case_1796: + if data[p] == 134 { + goto tr126 + } + goto tr125 + st1797: + if p++; p == pe { + goto _test_eof1797 + } + st_case_1797: + switch data[p] { + case 128: + goto st1798 + case 129: + goto st1799 + case 130: + goto st1800 + case 131: + goto st202 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st1801 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st1802 + case 157: + goto st1803 + case 158: + goto st1804 + case 159: + goto st1805 + case 160: + goto st1806 + case 161: + goto st219 + case 162: + goto st1807 + case 163: + goto st221 + case 164: + goto st1808 + case 165: + goto st1649 + case 167: + goto st1650 + case 168: + goto st1809 + case 169: + goto st1810 + case 170: + goto st1811 + case 172: + goto st1812 + case 173: + goto st1813 + case 174: + goto st1814 + case 175: + goto st1815 + case 176: + goto st1816 + case 177: + goto st1659 + case 179: + goto st1817 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st1818 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + } + switch { + case data[p] < 180: + if 132 <= data[p] && data[p] <= 152 { + goto st145 + } + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + default: + goto st147 + } + goto tr125 + st1798: + if p++; p == pe { + goto _test_eof1798 + } + st_case_1798: + if 171 <= data[p] && data[p] <= 190 { + goto tr126 + } + goto tr125 + st1799: + if p++; p == pe { + goto _test_eof1799 + } + st_case_1799: + switch { + case data[p] < 158: + switch { + case data[p] > 137: + if 150 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 128: + goto tr126 + } + case data[p] > 160: + switch { + case data[p] < 167: + if 162 <= data[p] && data[p] <= 164 { + goto tr126 + } + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr126 + } + default: + goto tr126 + } + default: + goto tr126 + } + goto tr125 + st1800: + if p++; p == pe { + goto _test_eof1800 + } + st_case_1800: + switch { + case data[p] < 143: + if 130 <= data[p] && data[p] <= 141 { + goto tr126 + } + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr126 + } + goto tr125 + st1801: + if p++; p == pe { + goto _test_eof1801 + } + st_case_1801: + switch { + case data[p] < 157: + if 155 <= data[p] && data[p] <= 156 { + goto tr125 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr125 + } + default: + goto tr126 + } + goto tr148 + st1802: + if p++; p == pe { + goto _test_eof1802 + } + st_case_1802: + switch { + case data[p] < 146: + switch { + case data[p] > 140: + if 142 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 177: + if 178 <= data[p] && data[p] <= 180 { + goto tr126 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr126 + } + goto tr125 + st1803: + if p++; p == pe { + goto _test_eof1803 + } + st_case_1803: + switch { + case data[p] < 160: + switch { + case data[p] > 145: + if 146 <= data[p] && data[p] <= 147 { + goto tr126 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 172: + switch { + case data[p] > 176: + if 178 <= data[p] && data[p] <= 179 { + goto tr126 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1804: + if p++; p == pe { + goto _test_eof1804 + } + st_case_1804: + if 180 <= data[p] { + goto tr126 + } + goto tr125 + st1805: + if p++; p == pe { + goto _test_eof1805 + } + st_case_1805: + switch { + case data[p] < 158: + if 148 <= data[p] && data[p] <= 156 { + goto tr125 + } + case data[p] > 159: + if 170 <= data[p] { + goto tr125 + } + default: + goto tr125 + } + goto tr126 + st1806: + if p++; p == pe { + goto _test_eof1806 + } + st_case_1806: + switch { + case data[p] < 144: + if 139 <= data[p] && data[p] <= 142 { + goto tr126 + } + case data[p] > 153: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr126 + } + goto tr125 + st1807: + if p++; p == pe { + goto _test_eof1807 + } + st_case_1807: + if data[p] == 169 { + goto tr126 + } + switch { + case data[p] > 170: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st1808: + if p++; p == pe { + goto _test_eof1808 + } + st_case_1808: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr126 + } + default: + goto tr126 + } + goto tr125 + st1809: + if p++; p == pe { + goto _test_eof1809 + } + st_case_1809: + switch { + case data[p] > 150: + if 151 <= data[p] && data[p] <= 155 { + goto tr126 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st1810: + if p++; p == pe { + goto _test_eof1810 + } + st_case_1810: + if data[p] == 191 { + goto tr126 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr126 + } + case data[p] >= 149: + goto tr126 + } + goto tr125 + st1811: + if p++; p == pe { + goto _test_eof1811 + } + st_case_1811: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 153: + if 176 <= data[p] && data[p] <= 190 { + goto tr126 + } + default: + goto tr126 + } + goto tr125 + st1812: + if p++; p == pe { + goto _test_eof1812 + } + st_case_1812: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 132 { + goto tr126 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr126 + } + default: + goto tr148 + } + goto tr125 + st1813: + if p++; p == pe { + goto _test_eof1813 + } + st_case_1813: + switch { + case data[p] < 140: + if 133 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 143: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr125 + } + case data[p] >= 154: + goto tr125 + } + default: + goto tr125 + } + goto tr126 + st1814: + if p++; p == pe { + goto _test_eof1814 + } + st_case_1814: + switch { + case data[p] < 161: + switch { + case data[p] > 130: + if 131 <= data[p] && data[p] <= 160 { + goto tr148 + } + case data[p] >= 128: + goto tr126 + } + case data[p] > 173: + switch { + case data[p] < 176: + if 174 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr148 + } + default: + goto tr126 + } + default: + goto tr126 + } + goto tr125 + st1815: + if p++; p == pe { + goto _test_eof1815 + } + st_case_1815: + switch { + case data[p] > 179: + if 180 <= data[p] { + goto tr125 + } + case data[p] >= 166: + goto tr126 + } + goto tr148 + st1816: + if p++; p == pe { + goto _test_eof1816 + } + st_case_1816: + switch { + case data[p] > 163: + if 164 <= data[p] && data[p] <= 183 { + goto tr126 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st1817: + if p++; p == pe { + goto _test_eof1817 + } + st_case_1817: + if data[p] == 173 { + goto tr126 + } + switch { + case data[p] < 169: + switch { + case data[p] > 146: + if 148 <= data[p] && data[p] <= 168 { + goto tr126 + } + case data[p] >= 144: + goto tr126 + } + case data[p] > 177: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 180 { + goto tr126 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 185 { + goto tr126 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1818: + if p++; p == pe { + goto _test_eof1818 + } + st_case_1818: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr126 + } + case data[p] >= 128: + goto tr126 + } + goto tr125 + st1819: + if p++; p == pe { + goto _test_eof1819 + } + st_case_1819: + switch data[p] { + case 128: + goto st1820 + case 129: + goto st1821 + case 130: + goto st241 + case 131: + goto st1822 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st1823 + case 180: + goto st251 + case 181: + goto st1824 + case 182: + goto st253 + case 183: + goto st1825 + case 184: + goto st255 + } + goto tr125 + st1820: + if p++; p == pe { + goto _test_eof1820 + } + st_case_1820: + if data[p] == 164 { + goto st141 + } + switch { + case data[p] < 152: + if 140 <= data[p] && data[p] <= 143 { + goto tr126 + } + case data[p] > 153: + switch { + case data[p] > 174: + if 191 <= data[p] { + goto tr1485 + } + case data[p] >= 170: + goto tr126 + } + default: + goto st141 + } + goto tr125 + st1821: + if p++; p == pe { + goto _test_eof1821 + } + st_case_1821: + switch data[p] { + case 132: + goto st141 + case 165: + goto tr125 + case 177: + goto tr148 + case 191: + goto tr148 + } + switch { + case data[p] < 149: + if 129 <= data[p] && data[p] <= 147 { + goto tr125 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr125 + } + case data[p] >= 160: + goto tr126 + } + default: + goto tr125 + } + goto tr1485 + st1822: + if p++; p == pe { + goto _test_eof1822 + } + st_case_1822: + if 144 <= data[p] && data[p] <= 176 { + goto tr126 + } + goto tr125 + st1823: + if p++; p == pe { + goto _test_eof1823 + } + st_case_1823: + switch { + case data[p] < 175: + if 165 <= data[p] && data[p] <= 170 { + goto tr125 + } + case data[p] > 177: + if 180 <= data[p] { + goto tr125 + } + default: + goto tr126 + } + goto tr148 + st1824: + if p++; p == pe { + goto _test_eof1824 + } + st_case_1824: + if data[p] == 191 { + goto tr126 + } + switch { + case data[p] > 174: + if 176 <= data[p] { + goto tr125 + } + case data[p] >= 168: + goto tr125 + } + goto tr148 + st1825: + if p++; p == pe { + goto _test_eof1825 + } + st_case_1825: + switch { + case data[p] < 144: + switch { + case data[p] > 134: + if 136 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 191 { + goto tr126 + } + case data[p] >= 152: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1826: + if p++; p == pe { + goto _test_eof1826 + } + st_case_1826: + switch data[p] { + case 128: + goto st1827 + case 130: + goto st1828 + case 132: + goto st259 + case 133: + goto st145 + case 134: + goto st260 + } + goto tr125 + st1827: + if p++; p == pe { + goto _test_eof1827 + } + st_case_1827: + if data[p] == 133 { + goto tr148 + } + switch { + case data[p] > 175: + if 187 <= data[p] && data[p] <= 188 { + goto tr148 + } + case data[p] >= 170: + goto tr126 + } + goto tr125 + st1828: + if p++; p == pe { + goto _test_eof1828 + } + st_case_1828: + if 153 <= data[p] && data[p] <= 154 { + goto tr126 + } + goto tr125 + st1829: + if p++; p == pe { + goto _test_eof1829 + } + st_case_1829: + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st1673 + case 153: + goto st1830 + case 154: + goto st1831 + case 155: + goto st1832 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st1833 + case 161: + goto st272 + case 162: + goto st1834 + case 163: + goto st1835 + case 164: + goto st1836 + case 165: + goto st1837 + case 166: + goto st1838 + case 167: + goto st1839 + case 168: + goto st1840 + case 169: + goto st1841 + case 170: + goto st1842 + case 171: + goto st1843 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st1844 + case 176: + goto st147 + } + if 129 <= data[p] { + goto st145 + } + goto tr125 + st1830: + if p++; p == pe { + goto _test_eof1830 + } + st_case_1830: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr126 + } + default: + goto tr126 + } + goto tr125 + st1831: + if p++; p == pe { + goto _test_eof1831 + } + st_case_1831: + switch { + case data[p] < 158: + if 128 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr126 + } + goto tr125 + st1832: + if p++; p == pe { + goto _test_eof1832 + } + st_case_1832: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr125 + } + case data[p] >= 176: + goto tr126 + } + goto tr148 + st1833: + if p++; p == pe { + goto _test_eof1833 + } + st_case_1833: + switch data[p] { + case 130: + goto tr126 + case 134: + goto tr126 + case 139: + goto tr126 + } + switch { + case data[p] > 167: + if 168 <= data[p] { + goto tr125 + } + case data[p] >= 163: + goto tr126 + } + goto tr148 + st1834: + if p++; p == pe { + goto _test_eof1834 + } + st_case_1834: + switch { + case data[p] < 130: + if 128 <= data[p] && data[p] <= 129 { + goto tr126 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr126 + } + default: + goto tr148 + } + goto tr125 + st1835: + if p++; p == pe { + goto _test_eof1835 + } + st_case_1835: + switch data[p] { + case 187: + goto tr148 + case 189: + goto tr148 + } + switch { + case data[p] < 154: + if 133 <= data[p] && data[p] <= 143 { + goto tr125 + } + case data[p] > 159: + switch { + case data[p] > 183: + if 184 <= data[p] { + goto tr125 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr125 + } + goto tr126 + st1836: + if p++; p == pe { + goto _test_eof1836 + } + st_case_1836: + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 165: + switch { + case data[p] > 173: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr148 + } + goto tr125 + st1837: + if p++; p == pe { + goto _test_eof1837 + } + st_case_1837: + switch { + case data[p] < 148: + if 135 <= data[p] && data[p] <= 147 { + goto tr126 + } + case data[p] > 159: + if 189 <= data[p] { + goto tr125 + } + default: + goto tr125 + } + goto tr148 + st1838: + if p++; p == pe { + goto _test_eof1838 + } + st_case_1838: + switch { + case data[p] < 132: + if 128 <= data[p] && data[p] <= 131 { + goto tr126 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr126 + } + default: + goto tr148 + } + goto tr125 + st1839: + if p++; p == pe { + goto _test_eof1839 + } + st_case_1839: + if data[p] == 143 { + goto tr148 + } + switch { + case data[p] < 154: + if 129 <= data[p] && data[p] <= 142 { + goto tr125 + } + case data[p] > 164: + switch { + case data[p] > 175: + if 186 <= data[p] { + goto tr125 + } + case data[p] >= 166: + goto tr125 + } + default: + goto tr125 + } + goto tr126 + st1840: + if p++; p == pe { + goto _test_eof1840 + } + st_case_1840: + switch { + case data[p] > 168: + if 169 <= data[p] && data[p] <= 182 { + goto tr126 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st1841: + if p++; p == pe { + goto _test_eof1841 + } + st_case_1841: + if data[p] == 131 { + goto tr126 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 141: + switch { + case data[p] > 153: + if 187 <= data[p] && data[p] <= 189 { + goto tr126 + } + case data[p] >= 144: + goto tr126 + } + default: + goto tr126 + } + goto tr125 + st1842: + if p++; p == pe { + goto _test_eof1842 + } + st_case_1842: + if data[p] == 176 { + goto tr126 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr126 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr126 + } + default: + goto tr126 + } + goto tr125 + st1843: + if p++; p == pe { + goto _test_eof1843 + } + st_case_1843: + if data[p] == 129 { + goto tr126 + } + switch { + case data[p] < 171: + if 160 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 181 <= data[p] && data[p] <= 182 { + goto tr126 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr126 + } + goto tr125 + st1844: + if p++; p == pe { + goto _test_eof1844 + } + st_case_1844: + switch { + case data[p] < 163: + if 128 <= data[p] && data[p] <= 162 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr126 + } + case data[p] >= 172: + goto tr126 + } + default: + goto tr126 + } + goto tr125 + st1845: + if p++; p == pe { + goto _test_eof1845 + } + st_case_1845: + switch data[p] { + case 172: + goto st1846 + case 173: + goto st672 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st1847 + case 185: + goto st1848 + case 187: + goto st1849 + case 188: + goto st1850 + case 189: + goto st303 + case 190: + goto st1851 + case 191: + goto st1852 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr125 + st1846: + if p++; p == pe { + goto _test_eof1846 + } + st_case_1846: + switch data[p] { + case 158: + goto tr126 + case 190: + goto tr572 + } + switch { + case data[p] < 157: + switch { + case data[p] > 134: + if 147 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 170: + goto tr572 + } + default: + goto tr572 + } + goto tr125 + st1847: + if p++; p == pe { + goto _test_eof1847 + } + st_case_1847: + switch data[p] { + case 144: + goto st141 + case 148: + goto st141 + } + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 143 { + goto tr126 + } + case data[p] > 175: + if 179 <= data[p] && data[p] <= 180 { + goto tr1485 + } + default: + goto tr126 + } + goto tr125 + st1848: + if p++; p == pe { + goto _test_eof1848 + } + st_case_1848: + switch data[p] { + case 144: + goto st141 + case 146: + goto st141 + case 148: + goto st141 + } + switch { + case data[p] < 176: + if 141 <= data[p] && data[p] <= 143 { + goto tr1485 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1849: + if p++; p == pe { + goto _test_eof1849 + } + st_case_1849: + if data[p] == 191 { + goto tr126 + } + if 189 <= data[p] { + goto tr125 + } + goto tr148 + st1850: + if p++; p == pe { + goto _test_eof1850 + } + st_case_1850: + switch data[p] { + case 135: + goto st141 + case 140: + goto st141 + case 142: + goto st141 + case 155: + goto st141 + case 191: + goto tr1485 + } + if 161 <= data[p] && data[p] <= 186 { + goto tr148 + } + goto tr125 + st1851: + if p++; p == pe { + goto _test_eof1851 + } + st_case_1851: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 190 { + goto tr148 + } + case data[p] >= 158: + goto tr126 + } + goto tr125 + st1852: + if p++; p == pe { + goto _test_eof1852 + } + st_case_1852: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr148 + } + case data[p] >= 130: + goto tr148 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr126 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1853: + if p++; p == pe { + goto _test_eof1853 + } + st_case_1853: + switch data[p] { + case 144: + goto st1854 + case 145: + goto st1860 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st1879 + case 155: + goto st1884 + case 157: + goto st1886 + case 158: + goto st1893 + case 159: + goto st403 + } + goto tr125 + st1854: + if p++; p == pe { + goto _test_eof1854 + } + st_case_1854: + switch data[p] { + case 128: + goto st308 + case 129: + goto st309 + case 130: + goto st147 + case 131: + goto st310 + case 133: + goto st311 + case 135: + goto st1855 + case 138: + goto st313 + case 139: + goto st1856 + case 140: + goto st315 + case 141: + goto st1857 + case 142: + goto st317 + case 143: + goto st318 + case 144: + goto st147 + case 145: + goto st145 + case 146: + goto st1702 + case 148: + goto st320 + case 149: + goto st321 + case 152: + goto st147 + case 156: + goto st322 + case 157: + goto st323 + case 160: + goto st324 + case 161: + goto st325 + case 162: + goto st326 + case 163: + goto st327 + case 164: + goto st328 + case 166: + goto st329 + case 168: + goto st1858 + case 169: + goto st331 + case 170: + goto st332 + case 171: + goto st1859 + case 172: + goto st334 + case 173: + goto st335 + case 174: + goto st336 + case 176: + goto st147 + case 177: + goto st245 + } + switch { + case data[p] > 155: + if 178 <= data[p] && data[p] <= 179 { + goto st337 + } + case data[p] >= 153: + goto st145 + } + goto tr125 + st1855: + if p++; p == pe { + goto _test_eof1855 + } + st_case_1855: + if data[p] == 189 { + goto tr126 + } + goto tr125 + st1856: + if p++; p == pe { + goto _test_eof1856 + } + st_case_1856: + if data[p] == 160 { + goto tr126 + } + if 145 <= data[p] { + goto tr125 + } + goto tr148 + st1857: + if p++; p == pe { + goto _test_eof1857 + } + st_case_1857: + switch { + case data[p] < 182: + if 139 <= data[p] && data[p] <= 143 { + goto tr125 + } + case data[p] > 186: + if 187 <= data[p] { + goto tr125 + } + default: + goto tr126 + } + goto tr148 + st1858: + if p++; p == pe { + goto _test_eof1858 + } + st_case_1858: + switch data[p] { + case 128: + goto tr148 + case 191: + goto tr126 + } + switch { + case data[p] < 144: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr126 + } + case data[p] > 134: + if 140 <= data[p] && data[p] <= 143 { + goto tr126 + } + default: + goto tr126 + } + case data[p] > 147: + switch { + case data[p] < 153: + if 149 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] > 179: + if 184 <= data[p] && data[p] <= 186 { + goto tr126 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1859: + if p++; p == pe { + goto _test_eof1859 + } + st_case_1859: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 164: + if 165 <= data[p] && data[p] <= 166 { + goto tr126 + } + default: + goto tr148 + } + goto tr125 + st1860: + if p++; p == pe { + goto _test_eof1860 + } + st_case_1860: + switch data[p] { + case 128: + goto st1861 + case 129: + goto st1862 + case 130: + goto st1863 + case 131: + goto st1709 + case 132: + goto st1864 + case 133: + goto st1865 + case 134: + goto st1866 + case 135: + goto st1867 + case 136: + goto st1868 + case 138: + goto st348 + case 139: + goto st1869 + case 140: + goto st1870 + case 141: + goto st1871 + case 146: + goto st1872 + case 147: + goto st1873 + case 150: + goto st1874 + case 151: + goto st1875 + case 152: + goto st1872 + case 153: + goto st1876 + case 154: + goto st1877 + case 155: + goto st1724 + case 156: + goto st1878 + case 162: + goto st359 + case 163: + goto st1726 + case 171: + goto st361 + } + goto tr125 + st1861: + if p++; p == pe { + goto _test_eof1861 + } + st_case_1861: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr126 + } + case data[p] > 183: + if 184 <= data[p] { + goto tr126 + } + default: + goto tr148 + } + goto tr125 + st1862: + if p++; p == pe { + goto _test_eof1862 + } + st_case_1862: + switch { + case data[p] > 165: + if 176 <= data[p] && data[p] <= 190 { + goto tr125 + } + case data[p] >= 135: + goto tr125 + } + goto tr126 + st1863: + if p++; p == pe { + goto _test_eof1863 + } + st_case_1863: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr125 + } + default: + goto tr125 + } + goto tr126 + st1864: + if p++; p == pe { + goto _test_eof1864 + } + st_case_1864: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr126 + } + case data[p] > 166: + switch { + case data[p] > 180: + if 182 <= data[p] && data[p] <= 191 { + goto tr126 + } + case data[p] >= 167: + goto tr126 + } + default: + goto tr148 + } + goto tr125 + st1865: + if p++; p == pe { + goto _test_eof1865 + } + st_case_1865: + switch data[p] { + case 179: + goto tr126 + case 182: + goto tr148 + } + if 144 <= data[p] && data[p] <= 178 { + goto tr148 + } + goto tr125 + st1866: + if p++; p == pe { + goto _test_eof1866 + } + st_case_1866: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr126 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr126 + } + default: + goto tr148 + } + goto tr125 + st1867: + if p++; p == pe { + goto _test_eof1867 + } + st_case_1867: + if data[p] == 155 { + goto tr125 + } + switch { + case data[p] < 141: + switch { + case data[p] > 132: + if 133 <= data[p] && data[p] <= 137 { + goto tr125 + } + case data[p] >= 129: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] > 156: + if 157 <= data[p] { + goto tr125 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr125 + } + goto tr126 + st1868: + if p++; p == pe { + goto _test_eof1868 + } + st_case_1868: + switch { + case data[p] < 147: + if 128 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] > 171: + if 172 <= data[p] && data[p] <= 183 { + goto tr126 + } + default: + goto tr148 + } + goto tr125 + st1869: + if p++; p == pe { + goto _test_eof1869 + } + st_case_1869: + switch { + case data[p] < 171: + if 159 <= data[p] && data[p] <= 170 { + goto tr126 + } + case data[p] > 175: + switch { + case data[p] > 185: + if 186 <= data[p] { + goto tr125 + } + case data[p] >= 176: + goto tr126 + } + default: + goto tr125 + } + goto tr148 + st1870: + if p++; p == pe { + goto _test_eof1870 + } + st_case_1870: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 131 { + goto tr126 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr126 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st1871: + if p++; p == pe { + goto _test_eof1871 + } + st_case_1871: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr126 + } + switch { + case data[p] < 157: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr126 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr126 + } + default: + goto tr126 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr126 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr126 + } + default: + goto tr126 + } + default: + goto tr148 + } + goto tr125 + st1872: + if p++; p == pe { + goto _test_eof1872 + } + st_case_1872: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr126 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st1873: + if p++; p == pe { + goto _test_eof1873 + } + st_case_1873: + if data[p] == 134 { + goto tr125 + } + switch { + case data[p] < 136: + if 132 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 143: + if 154 <= data[p] { + goto tr125 + } + default: + goto tr125 + } + goto tr126 + st1874: + if p++; p == pe { + goto _test_eof1874 + } + st_case_1874: + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 181: + if 184 <= data[p] { + goto tr126 + } + default: + goto tr126 + } + goto tr125 + st1875: + if p++; p == pe { + goto _test_eof1875 + } + st_case_1875: + switch { + case data[p] < 152: + if 129 <= data[p] && data[p] <= 151 { + goto tr125 + } + case data[p] > 155: + if 158 <= data[p] { + goto tr125 + } + default: + goto tr148 + } + goto tr126 + st1876: + if p++; p == pe { + goto _test_eof1876 + } + st_case_1876: + if data[p] == 132 { + goto tr148 + } + switch { + case data[p] > 143: + if 154 <= data[p] { + goto tr125 + } + case data[p] >= 129: + goto tr125 + } + goto tr126 + st1877: + if p++; p == pe { + goto _test_eof1877 + } + st_case_1877: + switch { + case data[p] > 170: + if 171 <= data[p] && data[p] <= 183 { + goto tr126 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st1878: + if p++; p == pe { + goto _test_eof1878 + } + st_case_1878: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 185 { + goto tr126 + } + case data[p] >= 157: + goto tr126 + } + goto tr125 + st1879: + if p++; p == pe { + goto _test_eof1879 + } + st_case_1879: + switch data[p] { + case 160: + goto st147 + case 168: + goto st370 + case 169: + goto st1728 + case 171: + goto st1880 + case 172: + goto st1881 + case 173: + goto st1731 + case 174: + goto st374 + case 188: + goto st147 + case 189: + goto st1882 + case 190: + goto st1883 + } + if 161 <= data[p] && data[p] <= 167 { + goto st145 + } + goto tr125 + st1880: + if p++; p == pe { + goto _test_eof1880 + } + st_case_1880: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 180 { + goto tr126 + } + case data[p] >= 144: + goto tr148 + } + goto tr125 + st1881: + if p++; p == pe { + goto _test_eof1881 + } + st_case_1881: + switch { + case data[p] > 175: + if 176 <= data[p] && data[p] <= 182 { + goto tr126 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st1882: + if p++; p == pe { + goto _test_eof1882 + } + st_case_1882: + switch { + case data[p] < 145: + if 133 <= data[p] && data[p] <= 143 { + goto tr125 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr125 + } + default: + goto tr126 + } + goto tr148 + st1883: + if p++; p == pe { + goto _test_eof1883 + } + st_case_1883: + switch { + case data[p] > 146: + if 147 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] >= 143: + goto tr126 + } + goto tr125 + st1884: + if p++; p == pe { + goto _test_eof1884 + } + st_case_1884: + switch data[p] { + case 176: + goto st147 + case 177: + goto st378 + case 178: + goto st1885 + } + goto tr125 + st1885: + if p++; p == pe { + goto _test_eof1885 + } + st_case_1885: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr126 + } + case data[p] >= 157: + goto tr126 + } + default: + goto tr148 + } + goto tr125 + st1886: + if p++; p == pe { + goto _test_eof1886 + } + st_case_1886: + switch data[p] { + case 133: + goto st1887 + case 134: + goto st1888 + case 137: + goto st1889 + case 144: + goto st147 + case 145: + goto st384 + case 146: + goto st385 + case 147: + goto st386 + case 148: + goto st387 + case 149: + goto st388 + case 154: + goto st389 + case 155: + goto st390 + case 156: + goto st391 + case 157: + goto st392 + case 158: + goto st393 + case 159: + goto st1740 + case 168: + goto st1890 + case 169: + goto st1891 + case 170: + goto st1892 + } + if 150 <= data[p] && data[p] <= 153 { + goto st145 + } + goto tr125 + st1887: + if p++; p == pe { + goto _test_eof1887 + } + st_case_1887: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr126 + } + case data[p] >= 165: + goto tr126 + } + goto tr125 + st1888: + if p++; p == pe { + goto _test_eof1888 + } + st_case_1888: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr125 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr125 + } + default: + goto tr125 + } + goto tr126 + st1889: + if p++; p == pe { + goto _test_eof1889 + } + st_case_1889: + if 130 <= data[p] && data[p] <= 132 { + goto tr126 + } + goto tr125 + st1890: + if p++; p == pe { + goto _test_eof1890 + } + st_case_1890: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr126 + } + case data[p] >= 128: + goto tr126 + } + goto tr125 + st1891: + if p++; p == pe { + goto _test_eof1891 + } + st_case_1891: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr125 + } + case data[p] >= 173: + goto tr125 + } + goto tr126 + st1892: + if p++; p == pe { + goto _test_eof1892 + } + st_case_1892: + if data[p] == 132 { + goto tr126 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] >= 155: + goto tr126 + } + goto tr125 + st1893: + if p++; p == pe { + goto _test_eof1893 + } + st_case_1893: + switch data[p] { + case 160: + goto st147 + case 163: + goto st1894 + case 184: + goto st400 + case 185: + goto st401 + case 186: + goto st402 + } + if 161 <= data[p] && data[p] <= 162 { + goto st145 + } + goto tr125 + st1894: + if p++; p == pe { + goto _test_eof1894 + } + st_case_1894: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr125 + } + case data[p] > 150: + if 151 <= data[p] { + goto tr125 + } + default: + goto tr126 + } + goto tr148 + st1895: + if p++; p == pe { + goto _test_eof1895 + } + st_case_1895: + if data[p] == 160 { + goto st1896 + } + goto tr125 + st1896: + if p++; p == pe { + goto _test_eof1896 + } + st_case_1896: + switch data[p] { + case 128: + goto st1897 + case 129: + goto st1898 + case 132: + goto st1753 + case 135: + goto st1900 + } + if 133 <= data[p] && data[p] <= 134 { + goto st1899 + } + goto tr125 + st1897: + if p++; p == pe { + goto _test_eof1897 + } + st_case_1897: + if data[p] == 129 { + goto tr126 + } + if 160 <= data[p] { + goto tr126 + } + goto tr125 + st1898: + if p++; p == pe { + goto _test_eof1898 + } + st_case_1898: + if 192 <= data[p] { + goto tr125 + } + goto tr126 + st1899: + if p++; p == pe { + goto _test_eof1899 + } + st_case_1899: + goto tr126 + st1900: + if p++; p == pe { + goto _test_eof1900 + } + st_case_1900: + if 176 <= data[p] { + goto tr125 + } + goto tr126 + st1901: + if p++; p == pe { + goto _test_eof1901 + } + st_case_1901: + if data[p] == 173 { + goto st141 + } + goto tr125 + st1902: + if p++; p == pe { + goto _test_eof1902 + } + st_case_1902: + if 128 <= data[p] { + goto st141 + } + goto tr125 + st1903: + if p++; p == pe { + goto _test_eof1903 + } + st_case_1903: + if 176 <= data[p] { + goto tr125 + } + goto st141 + st1904: + if p++; p == pe { + goto _test_eof1904 + } + st_case_1904: + if 131 <= data[p] && data[p] <= 137 { + goto st141 + } + goto tr125 + st1905: + if p++; p == pe { + goto _test_eof1905 + } + st_case_1905: + if data[p] == 191 { + goto st141 + } + if 145 <= data[p] && data[p] <= 189 { + goto st141 + } + goto tr125 + st1906: + if p++; p == pe { + goto _test_eof1906 + } + st_case_1906: + if data[p] == 135 { + goto st141 + } + switch { + case data[p] > 130: + if 132 <= data[p] && data[p] <= 133 { + goto st141 + } + case data[p] >= 129: + goto st141 + } + goto tr125 + st1907: + if p++; p == pe { + goto _test_eof1907 + } + st_case_1907: + if data[p] == 156 { + goto st141 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto st141 + } + case data[p] >= 128: + goto st141 + } + goto tr125 + st1908: + if p++; p == pe { + goto _test_eof1908 + } + st_case_1908: + switch data[p] { + case 171: + goto tr126 + case 176: + goto st141 + } + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 169 { + goto tr126 + } + case data[p] >= 139: + goto st141 + } + goto tr125 + st1909: + if p++; p == pe { + goto _test_eof1909 + } + st_case_1909: + switch { + case data[p] < 167: + switch { + case data[p] > 157: + if 159 <= data[p] && data[p] <= 164 { + goto st141 + } + case data[p] >= 150: + goto st141 + } + case data[p] > 168: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr126 + } + case data[p] >= 170: + goto st141 + } + default: + goto st141 + } + goto tr125 + st1910: + if p++; p == pe { + goto _test_eof1910 + } + st_case_1910: + switch data[p] { + case 143: + goto st141 + case 145: + goto st141 + } + if 176 <= data[p] { + goto st141 + } + goto tr125 + st1911: + if p++; p == pe { + goto _test_eof1911 + } + st_case_1911: + if 139 <= data[p] { + goto tr125 + } + goto st141 + st1912: + if p++; p == pe { + goto _test_eof1912 + } + st_case_1912: + if 166 <= data[p] && data[p] <= 176 { + goto st141 + } + goto tr125 + st1913: + if p++; p == pe { + goto _test_eof1913 + } + st_case_1913: + switch { + case data[p] > 137: + if 171 <= data[p] && data[p] <= 179 { + goto st141 + } + case data[p] >= 128: + goto tr126 + } + goto tr125 + st1914: + if p++; p == pe { + goto _test_eof1914 + } + st_case_1914: + switch data[p] { + case 160: + goto st1915 + case 161: + goto st1916 + case 163: + goto st1917 + case 164: + goto st1918 + case 165: + goto st1919 + case 167: + goto st1921 + case 169: + goto st1922 + case 171: + goto st1923 + case 173: + goto st1925 + case 174: + goto st1926 + case 175: + goto st1927 + case 176: + goto st1928 + case 177: + goto st1929 + case 179: + goto st1930 + case 180: + goto st1931 + case 181: + goto st1932 + case 182: + goto st1933 + case 183: + goto st1934 + case 184: + goto st1935 + case 185: + goto st1936 + case 186: + goto st1937 + case 187: + goto st1938 + case 188: + goto st1939 + case 189: + goto st1940 + case 190: + goto st1941 + case 191: + goto st1942 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto st1924 + } + case data[p] >= 166: + goto st1920 + } + goto tr125 + st1915: + if p++; p == pe { + goto _test_eof1915 + } + st_case_1915: + switch { + case data[p] < 155: + if 150 <= data[p] && data[p] <= 153 { + goto st141 + } + case data[p] > 163: + switch { + case data[p] > 167: + if 169 <= data[p] && data[p] <= 173 { + goto st141 + } + case data[p] >= 165: + goto st141 + } + default: + goto st141 + } + goto tr125 + st1916: + if p++; p == pe { + goto _test_eof1916 + } + st_case_1916: + if 153 <= data[p] && data[p] <= 155 { + goto st141 + } + goto tr125 + st1917: + if p++; p == pe { + goto _test_eof1917 + } + st_case_1917: + if 163 <= data[p] { + goto st141 + } + goto tr125 + st1918: + if p++; p == pe { + goto _test_eof1918 + } + st_case_1918: + if data[p] == 189 { + goto tr125 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr125 + } + goto st141 + st1919: + if p++; p == pe { + goto _test_eof1919 + } + st_case_1919: + if data[p] == 144 { + goto tr125 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr125 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr125 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr125 + } + goto st141 + st1920: + if p++; p == pe { + goto _test_eof1920 + } + st_case_1920: + if data[p] == 188 { + goto st141 + } + switch { + case data[p] > 131: + if 190 <= data[p] { + goto st141 + } + case data[p] >= 129: + goto st141 + } + goto tr125 + st1921: + if p++; p == pe { + goto _test_eof1921 + } + st_case_1921: + switch { + case data[p] < 152: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr125 + } + case data[p] > 138: + if 142 <= data[p] && data[p] <= 150 { + goto tr125 + } + default: + goto tr125 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr125 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr125 + } + default: + goto tr126 + } + default: + goto tr125 + } + goto st141 + st1922: + if p++; p == pe { + goto _test_eof1922 + } + st_case_1922: + switch { + case data[p] < 146: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr125 + } + case data[p] > 138: + if 142 <= data[p] && data[p] <= 144 { + goto tr125 + } + default: + goto tr125 + } + case data[p] > 165: + switch { + case data[p] < 178: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr125 + } + default: + goto tr125 + } + default: + goto tr125 + } + goto st141 + st1923: + if p++; p == pe { + goto _test_eof1923 + } + st_case_1923: + switch data[p] { + case 134: + goto tr125 + case 138: + goto tr125 + } + switch { + case data[p] < 164: + if 142 <= data[p] && data[p] <= 161 { + goto tr125 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr125 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr125 + } + goto st141 + st1924: + if p++; p == pe { + goto _test_eof1924 + } + st_case_1924: + if data[p] == 188 { + goto st141 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto st141 + } + case data[p] >= 129: + goto st141 + } + goto tr125 + st1925: + if p++; p == pe { + goto _test_eof1925 + } + st_case_1925: + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto st141 + } + case data[p] >= 128: + goto st141 + } + case data[p] > 141: + switch { + case data[p] < 162: + if 150 <= data[p] && data[p] <= 151 { + goto st141 + } + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + default: + goto st141 + } + default: + goto st141 + } + goto tr125 + st1926: + if p++; p == pe { + goto _test_eof1926 + } + st_case_1926: + if data[p] == 130 { + goto st141 + } + if 190 <= data[p] && data[p] <= 191 { + goto st141 + } + goto tr125 + st1927: + if p++; p == pe { + goto _test_eof1927 + } + st_case_1927: + if data[p] == 151 { + goto st141 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto st141 + } + case data[p] > 136: + switch { + case data[p] > 141: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] >= 138: + goto st141 + } + default: + goto st141 + } + goto tr125 + st1928: + if p++; p == pe { + goto _test_eof1928 + } + st_case_1928: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto st141 + } + case data[p] >= 128: + goto st141 + } + goto tr125 + st1929: + if p++; p == pe { + goto _test_eof1929 + } + st_case_1929: + switch data[p] { + case 133: + goto tr125 + case 137: + goto tr125 + } + switch { + case data[p] < 164: + switch { + case data[p] > 148: + if 151 <= data[p] && data[p] <= 161 { + goto tr125 + } + case data[p] >= 142: + goto tr125 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr125 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr125 + } + goto st141 + st1930: + if p++; p == pe { + goto _test_eof1930 + } + st_case_1930: + switch { + case data[p] < 138: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 136 { + goto st141 + } + case data[p] >= 128: + goto st141 + } + case data[p] > 141: + switch { + case data[p] < 162: + if 149 <= data[p] && data[p] <= 150 { + goto st141 + } + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + default: + goto st141 + } + default: + goto st141 + } + goto tr125 + st1931: + if p++; p == pe { + goto _test_eof1931 + } + st_case_1931: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto st141 + } + case data[p] >= 129: + goto st141 + } + goto tr125 + st1932: + if p++; p == pe { + goto _test_eof1932 + } + st_case_1932: + switch data[p] { + case 133: + goto tr125 + case 137: + goto tr125 + } + switch { + case data[p] < 164: + switch { + case data[p] > 150: + if 152 <= data[p] && data[p] <= 161 { + goto tr125 + } + case data[p] >= 142: + goto tr125 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr125 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr125 + } + goto st141 + st1933: + if p++; p == pe { + goto _test_eof1933 + } + st_case_1933: + if 130 <= data[p] && data[p] <= 131 { + goto st141 + } + goto tr125 + st1934: + if p++; p == pe { + goto _test_eof1934 + } + st_case_1934: + switch data[p] { + case 138: + goto st141 + case 150: + goto st141 + } + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 148 { + goto st141 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 179 { + goto st141 + } + case data[p] >= 166: + goto tr126 + } + default: + goto st141 + } + goto tr125 + st1935: + if p++; p == pe { + goto _test_eof1935 + } + st_case_1935: + if data[p] == 177 { + goto st141 + } + if 180 <= data[p] && data[p] <= 186 { + goto st141 + } + goto tr125 + st1936: + if p++; p == pe { + goto _test_eof1936 + } + st_case_1936: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 135: + goto st141 + } + goto tr125 + st1937: + if p++; p == pe { + goto _test_eof1937 + } + st_case_1937: + if data[p] == 177 { + goto st141 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto st141 + } + case data[p] >= 180: + goto st141 + } + goto tr125 + st1938: + if p++; p == pe { + goto _test_eof1938 + } + st_case_1938: + switch { + case data[p] > 141: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 136: + goto st141 + } + goto tr125 + st1939: + if p++; p == pe { + goto _test_eof1939 + } + st_case_1939: + switch data[p] { + case 181: + goto st141 + case 183: + goto st141 + case 185: + goto st141 + } + switch { + case data[p] < 160: + if 152 <= data[p] && data[p] <= 153 { + goto st141 + } + case data[p] > 169: + if 190 <= data[p] && data[p] <= 191 { + goto st141 + } + default: + goto tr126 + } + goto tr125 + st1940: + if p++; p == pe { + goto _test_eof1940 + } + st_case_1940: + if 177 <= data[p] && data[p] <= 191 { + goto st141 + } + goto tr125 + st1941: + if p++; p == pe { + goto _test_eof1941 + } + st_case_1941: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto st141 + } + case data[p] > 135: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto st141 + } + case data[p] >= 141: + goto st141 + } + default: + goto st141 + } + goto tr125 + st1942: + if p++; p == pe { + goto _test_eof1942 + } + st_case_1942: + if data[p] == 134 { + goto st141 + } + goto tr125 + st1943: + if p++; p == pe { + goto _test_eof1943 + } + st_case_1943: + switch data[p] { + case 128: + goto st1944 + case 129: + goto st1945 + case 130: + goto st1946 + case 141: + goto st1947 + case 156: + goto st1948 + case 157: + goto st1949 + case 158: + goto st1950 + case 159: + goto st1951 + case 160: + goto st1952 + case 162: + goto st1953 + case 164: + goto st1954 + case 165: + goto st1649 + case 167: + goto st1650 + case 168: + goto st1955 + case 169: + goto st1956 + case 170: + goto st1957 + case 172: + goto st1958 + case 173: + goto st1959 + case 174: + goto st1960 + case 175: + goto st1961 + case 176: + goto st1962 + case 177: + goto st1963 + case 179: + goto st1964 + case 183: + goto st1965 + } + goto tr125 + st1944: + if p++; p == pe { + goto _test_eof1944 + } + st_case_1944: + if 171 <= data[p] && data[p] <= 190 { + goto st141 + } + goto tr125 + st1945: + if p++; p == pe { + goto _test_eof1945 + } + st_case_1945: + switch { + case data[p] < 158: + switch { + case data[p] > 137: + if 150 <= data[p] && data[p] <= 153 { + goto st141 + } + case data[p] >= 128: + goto tr126 + } + case data[p] > 160: + switch { + case data[p] < 167: + if 162 <= data[p] && data[p] <= 164 { + goto st141 + } + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto st141 + } + default: + goto st141 + } + default: + goto st141 + } + goto tr125 + st1946: + if p++; p == pe { + goto _test_eof1946 + } + st_case_1946: + if data[p] == 143 { + goto st141 + } + switch { + case data[p] < 144: + if 130 <= data[p] && data[p] <= 141 { + goto st141 + } + case data[p] > 153: + if 154 <= data[p] && data[p] <= 157 { + goto st141 + } + default: + goto tr126 + } + goto tr125 + st1947: + if p++; p == pe { + goto _test_eof1947 + } + st_case_1947: + if 157 <= data[p] && data[p] <= 159 { + goto st141 + } + goto tr125 + st1948: + if p++; p == pe { + goto _test_eof1948 + } + st_case_1948: + switch { + case data[p] > 148: + if 178 <= data[p] && data[p] <= 180 { + goto st141 + } + case data[p] >= 146: + goto st141 + } + goto tr125 + st1949: + if p++; p == pe { + goto _test_eof1949 + } + st_case_1949: + switch { + case data[p] > 147: + if 178 <= data[p] && data[p] <= 179 { + goto st141 + } + case data[p] >= 146: + goto st141 + } + goto tr125 + st1950: + if p++; p == pe { + goto _test_eof1950 + } + st_case_1950: + if 180 <= data[p] { + goto st141 + } + goto tr125 + st1951: + if p++; p == pe { + goto _test_eof1951 + } + st_case_1951: + switch { + case data[p] < 158: + if 148 <= data[p] && data[p] <= 156 { + goto tr125 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 170 <= data[p] { + goto tr125 + } + case data[p] >= 160: + goto tr126 + } + default: + goto tr125 + } + goto st141 + st1952: + if p++; p == pe { + goto _test_eof1952 + } + st_case_1952: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 139: + goto st141 + } + goto tr125 + st1953: + if p++; p == pe { + goto _test_eof1953 + } + st_case_1953: + if data[p] == 169 { + goto st141 + } + goto tr125 + st1954: + if p++; p == pe { + goto _test_eof1954 + } + st_case_1954: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto st141 + } + case data[p] >= 160: + goto st141 + } + goto tr125 + st1955: + if p++; p == pe { + goto _test_eof1955 + } + st_case_1955: + if 151 <= data[p] && data[p] <= 155 { + goto st141 + } + goto tr125 + st1956: + if p++; p == pe { + goto _test_eof1956 + } + st_case_1956: + if data[p] == 191 { + goto st141 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto st141 + } + case data[p] >= 149: + goto st141 + } + goto tr125 + st1957: + if p++; p == pe { + goto _test_eof1957 + } + st_case_1957: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 153: + if 176 <= data[p] && data[p] <= 190 { + goto st141 + } + default: + goto tr126 + } + goto tr125 + st1958: + if p++; p == pe { + goto _test_eof1958 + } + st_case_1958: + switch { + case data[p] > 132: + if 180 <= data[p] { + goto st141 + } + case data[p] >= 128: + goto st141 + } + goto tr125 + st1959: + if p++; p == pe { + goto _test_eof1959 + } + st_case_1959: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr125 + } + case data[p] > 153: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr125 + } + case data[p] >= 154: + goto tr125 + } + default: + goto tr126 + } + goto st141 + st1960: + if p++; p == pe { + goto _test_eof1960 + } + st_case_1960: + switch { + case data[p] < 161: + if 128 <= data[p] && data[p] <= 130 { + goto st141 + } + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr126 + } + default: + goto st141 + } + goto tr125 + st1961: + if p++; p == pe { + goto _test_eof1961 + } + st_case_1961: + if 166 <= data[p] && data[p] <= 179 { + goto st141 + } + goto tr125 + st1962: + if p++; p == pe { + goto _test_eof1962 + } + st_case_1962: + if 164 <= data[p] && data[p] <= 183 { + goto st141 + } + goto tr125 + st1963: + if p++; p == pe { + goto _test_eof1963 + } + st_case_1963: + switch { + case data[p] > 137: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 128: + goto tr126 + } + goto tr125 + st1964: + if p++; p == pe { + goto _test_eof1964 + } + st_case_1964: + if data[p] == 173 { + goto st141 + } + switch { + case data[p] < 148: + if 144 <= data[p] && data[p] <= 146 { + goto st141 + } + case data[p] > 168: + switch { + case data[p] > 180: + if 184 <= data[p] && data[p] <= 185 { + goto st141 + } + case data[p] >= 178: + goto st141 + } + default: + goto st141 + } + goto tr125 + st1965: + if p++; p == pe { + goto _test_eof1965 + } + st_case_1965: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto st141 + } + case data[p] >= 128: + goto st141 + } + goto tr125 + st1966: + if p++; p == pe { + goto _test_eof1966 + } + st_case_1966: + switch data[p] { + case 128: + goto st1967 + case 129: + goto st1968 + case 131: + goto st1969 + case 179: + goto st1970 + case 181: + goto st1971 + case 183: + goto st1972 + } + goto tr125 + st1967: + if p++; p == pe { + goto _test_eof1967 + } + st_case_1967: + switch { + case data[p] > 143: + if 170 <= data[p] && data[p] <= 174 { + goto st141 + } + case data[p] >= 140: + goto st141 + } + goto tr125 + st1968: + if p++; p == pe { + goto _test_eof1968 + } + st_case_1968: + switch { + case data[p] > 164: + if 166 <= data[p] && data[p] <= 175 { + goto st141 + } + case data[p] >= 160: + goto st141 + } + goto tr125 + st1969: + if p++; p == pe { + goto _test_eof1969 + } + st_case_1969: + if 144 <= data[p] && data[p] <= 176 { + goto st141 + } + goto tr125 + st1970: + if p++; p == pe { + goto _test_eof1970 + } + st_case_1970: + if 175 <= data[p] && data[p] <= 177 { + goto st141 + } + goto tr125 + st1971: + if p++; p == pe { + goto _test_eof1971 + } + st_case_1971: + if data[p] == 191 { + goto st141 + } + goto tr125 + st1972: + if p++; p == pe { + goto _test_eof1972 + } + st_case_1972: + if 160 <= data[p] && data[p] <= 191 { + goto st141 + } + goto tr125 + st1973: + if p++; p == pe { + goto _test_eof1973 + } + st_case_1973: + switch data[p] { + case 128: + goto st1974 + case 130: + goto st1975 + } + goto tr125 + st1974: + if p++; p == pe { + goto _test_eof1974 + } + st_case_1974: + if 170 <= data[p] && data[p] <= 175 { + goto st141 + } + goto tr125 + st1975: + if p++; p == pe { + goto _test_eof1975 + } + st_case_1975: + if 153 <= data[p] && data[p] <= 154 { + goto st141 + } + goto tr125 + st1976: + if p++; p == pe { + goto _test_eof1976 + } + st_case_1976: + switch data[p] { + case 152: + goto st1977 + case 153: + goto st1978 + case 154: + goto st1979 + case 155: + goto st1980 + case 160: + goto st1981 + case 162: + goto st1982 + case 163: + goto st1983 + case 164: + goto st1984 + case 165: + goto st1985 + case 166: + goto st1986 + case 167: + goto st1987 + case 168: + goto st1988 + case 169: + goto st1989 + case 170: + goto st1990 + case 171: + goto st1991 + case 175: + goto st1992 + } + goto tr125 + st1977: + if p++; p == pe { + goto _test_eof1977 + } + st_case_1977: + if 160 <= data[p] && data[p] <= 169 { + goto tr126 + } + goto tr125 + st1978: + if p++; p == pe { + goto _test_eof1978 + } + st_case_1978: + switch { + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto st141 + } + case data[p] >= 175: + goto st141 + } + goto tr125 + st1979: + if p++; p == pe { + goto _test_eof1979 + } + st_case_1979: + if 158 <= data[p] && data[p] <= 159 { + goto st141 + } + goto tr125 + st1980: + if p++; p == pe { + goto _test_eof1980 + } + st_case_1980: + if 176 <= data[p] && data[p] <= 177 { + goto st141 + } + goto tr125 + st1981: + if p++; p == pe { + goto _test_eof1981 + } + st_case_1981: + switch data[p] { + case 130: + goto st141 + case 134: + goto st141 + case 139: + goto st141 + } + if 163 <= data[p] && data[p] <= 167 { + goto st141 + } + goto tr125 + st1982: + if p++; p == pe { + goto _test_eof1982 + } + st_case_1982: + switch { + case data[p] > 129: + if 180 <= data[p] { + goto st141 + } + case data[p] >= 128: + goto st141 + } + goto tr125 + st1983: + if p++; p == pe { + goto _test_eof1983 + } + st_case_1983: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr125 + } + case data[p] > 153: + switch { + case data[p] > 159: + if 178 <= data[p] { + goto tr125 + } + case data[p] >= 154: + goto tr125 + } + default: + goto tr126 + } + goto st141 + st1984: + if p++; p == pe { + goto _test_eof1984 + } + st_case_1984: + switch { + case data[p] > 137: + if 166 <= data[p] && data[p] <= 173 { + goto st141 + } + case data[p] >= 128: + goto tr126 + } + goto tr125 + st1985: + if p++; p == pe { + goto _test_eof1985 + } + st_case_1985: + if 135 <= data[p] && data[p] <= 147 { + goto st141 + } + goto tr125 + st1986: + if p++; p == pe { + goto _test_eof1986 + } + st_case_1986: + switch { + case data[p] > 131: + if 179 <= data[p] { + goto st141 + } + case data[p] >= 128: + goto st141 + } + goto tr125 + st1987: + if p++; p == pe { + goto _test_eof1987 + } + st_case_1987: + switch { + case data[p] < 154: + switch { + case data[p] > 143: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 129: + goto tr125 + } + case data[p] > 164: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr125 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr125 + } + default: + goto tr126 + } + default: + goto tr125 + } + goto st141 + st1988: + if p++; p == pe { + goto _test_eof1988 + } + st_case_1988: + if 169 <= data[p] && data[p] <= 182 { + goto st141 + } + goto tr125 + st1989: + if p++; p == pe { + goto _test_eof1989 + } + st_case_1989: + if data[p] == 131 { + goto st141 + } + switch { + case data[p] < 144: + if 140 <= data[p] && data[p] <= 141 { + goto st141 + } + case data[p] > 153: + if 187 <= data[p] && data[p] <= 189 { + goto st141 + } + default: + goto tr126 + } + goto tr125 + st1990: + if p++; p == pe { + goto _test_eof1990 + } + st_case_1990: + if data[p] == 176 { + goto st141 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto st141 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto st141 + } + default: + goto st141 + } + goto tr125 + st1991: + if p++; p == pe { + goto _test_eof1991 + } + st_case_1991: + if data[p] == 129 { + goto st141 + } + switch { + case data[p] > 175: + if 181 <= data[p] && data[p] <= 182 { + goto st141 + } + case data[p] >= 171: + goto st141 + } + goto tr125 + st1992: + if p++; p == pe { + goto _test_eof1992 + } + st_case_1992: + switch { + case data[p] < 172: + if 163 <= data[p] && data[p] <= 170 { + goto st141 + } + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr126 + } + default: + goto st141 + } + goto tr125 + st1993: + if p++; p == pe { + goto _test_eof1993 + } + st_case_1993: + switch data[p] { + case 172: + goto st1994 + case 184: + goto st1995 + case 187: + goto st1971 + case 190: + goto st1979 + case 191: + goto st1996 + } + goto tr125 + st1994: + if p++; p == pe { + goto _test_eof1994 + } + st_case_1994: + if data[p] == 158 { + goto st141 + } + goto tr125 + st1995: + if p++; p == pe { + goto _test_eof1995 + } + st_case_1995: + switch { + case data[p] > 143: + if 160 <= data[p] && data[p] <= 175 { + goto st141 + } + case data[p] >= 128: + goto st141 + } + goto tr125 + st1996: + if p++; p == pe { + goto _test_eof1996 + } + st_case_1996: + if 185 <= data[p] && data[p] <= 187 { + goto st141 + } + goto tr125 + st1997: + if p++; p == pe { + goto _test_eof1997 + } + st_case_1997: + switch data[p] { + case 144: + goto st1998 + case 145: + goto st2004 + case 150: + goto st2024 + case 155: + goto st2029 + case 157: + goto st2031 + case 158: + goto st2039 + } + goto tr125 + st1998: + if p++; p == pe { + goto _test_eof1998 + } + st_case_1998: + switch data[p] { + case 135: + goto st1999 + case 139: + goto st2000 + case 141: + goto st2001 + case 146: + goto st1977 + case 168: + goto st2002 + case 171: + goto st2003 + } + goto tr125 + st1999: + if p++; p == pe { + goto _test_eof1999 + } + st_case_1999: + if data[p] == 189 { + goto st141 + } + goto tr125 + st2000: + if p++; p == pe { + goto _test_eof2000 + } + st_case_2000: + if data[p] == 160 { + goto st141 + } + goto tr125 + st2001: + if p++; p == pe { + goto _test_eof2001 + } + st_case_2001: + if 182 <= data[p] && data[p] <= 186 { + goto st141 + } + goto tr125 + st2002: + if p++; p == pe { + goto _test_eof2002 + } + st_case_2002: + if data[p] == 191 { + goto st141 + } + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto st141 + } + case data[p] > 134: + switch { + case data[p] > 143: + if 184 <= data[p] && data[p] <= 186 { + goto st141 + } + case data[p] >= 140: + goto st141 + } + default: + goto st141 + } + goto tr125 + st2003: + if p++; p == pe { + goto _test_eof2003 + } + st_case_2003: + if 165 <= data[p] && data[p] <= 166 { + goto st141 + } + goto tr125 + st2004: + if p++; p == pe { + goto _test_eof2004 + } + st_case_2004: + switch data[p] { + case 128: + goto st2005 + case 129: + goto st2006 + case 130: + goto st2007 + case 131: + goto st2008 + case 132: + goto st2009 + case 133: + goto st2010 + case 134: + goto st2011 + case 135: + goto st2012 + case 136: + goto st2013 + case 139: + goto st2014 + case 140: + goto st2015 + case 141: + goto st2016 + case 146: + goto st2017 + case 147: + goto st2018 + case 150: + goto st2019 + case 151: + goto st2020 + case 152: + goto st2017 + case 153: + goto st2021 + case 154: + goto st2022 + case 155: + goto st1724 + case 156: + goto st2023 + case 163: + goto st1977 + } + goto tr125 + st2005: + if p++; p == pe { + goto _test_eof2005 + } + st_case_2005: + switch { + case data[p] > 130: + if 184 <= data[p] { + goto st141 + } + case data[p] >= 128: + goto st141 + } + goto tr125 + st2006: + if p++; p == pe { + goto _test_eof2006 + } + st_case_2006: + switch { + case data[p] < 166: + if 135 <= data[p] && data[p] <= 165 { + goto tr125 + } + case data[p] > 175: + if 176 <= data[p] && data[p] <= 190 { + goto tr125 + } + default: + goto tr126 + } + goto st141 + st2007: + if p++; p == pe { + goto _test_eof2007 + } + st_case_2007: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr125 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr125 + } + default: + goto tr125 + } + goto st141 + st2008: + if p++; p == pe { + goto _test_eof2008 + } + st_case_2008: + if 176 <= data[p] && data[p] <= 185 { + goto tr126 + } + goto tr125 + st2009: + if p++; p == pe { + goto _test_eof2009 + } + st_case_2009: + switch { + case data[p] < 167: + if 128 <= data[p] && data[p] <= 130 { + goto st141 + } + case data[p] > 180: + if 182 <= data[p] && data[p] <= 191 { + goto tr126 + } + default: + goto st141 + } + goto tr125 + st2010: + if p++; p == pe { + goto _test_eof2010 + } + st_case_2010: + if data[p] == 179 { + goto st141 + } + goto tr125 + st2011: + if p++; p == pe { + goto _test_eof2011 + } + st_case_2011: + switch { + case data[p] > 130: + if 179 <= data[p] { + goto st141 + } + case data[p] >= 128: + goto st141 + } + goto tr125 + st2012: + if p++; p == pe { + goto _test_eof2012 + } + st_case_2012: + switch { + case data[p] < 141: + if 129 <= data[p] && data[p] <= 137 { + goto tr125 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 154 <= data[p] { + goto tr125 + } + case data[p] >= 144: + goto tr126 + } + default: + goto tr125 + } + goto st141 + st2013: + if p++; p == pe { + goto _test_eof2013 + } + st_case_2013: + if 172 <= data[p] && data[p] <= 183 { + goto st141 + } + goto tr125 + st2014: + if p++; p == pe { + goto _test_eof2014 + } + st_case_2014: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 185 { + goto tr126 + } + case data[p] >= 159: + goto st141 + } + goto tr125 + st2015: + if p++; p == pe { + goto _test_eof2015 + } + st_case_2015: + if data[p] == 188 { + goto st141 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto st141 + } + case data[p] >= 128: + goto st141 + } + goto tr125 + st2016: + if p++; p == pe { + goto _test_eof2016 + } + st_case_2016: + if data[p] == 151 { + goto st141 + } + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto st141 + } + case data[p] >= 128: + goto st141 + } + case data[p] > 141: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto st141 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto st141 + } + default: + goto st141 + } + default: + goto st141 + } + goto tr125 + st2017: + if p++; p == pe { + goto _test_eof2017 + } + st_case_2017: + if 176 <= data[p] { + goto st141 + } + goto tr125 + st2018: + if p++; p == pe { + goto _test_eof2018 + } + st_case_2018: + switch { + case data[p] < 144: + if 132 <= data[p] && data[p] <= 143 { + goto tr125 + } + case data[p] > 153: + if 154 <= data[p] { + goto tr125 + } + default: + goto tr126 + } + goto st141 + st2019: + if p++; p == pe { + goto _test_eof2019 + } + st_case_2019: + switch { + case data[p] > 181: + if 184 <= data[p] { + goto st141 + } + case data[p] >= 175: + goto st141 + } + goto tr125 + st2020: + if p++; p == pe { + goto _test_eof2020 + } + st_case_2020: + switch { + case data[p] > 155: + if 158 <= data[p] { + goto tr125 + } + case data[p] >= 129: + goto tr125 + } + goto st141 + st2021: + if p++; p == pe { + goto _test_eof2021 + } + st_case_2021: + switch { + case data[p] < 144: + if 129 <= data[p] && data[p] <= 143 { + goto tr125 + } + case data[p] > 153: + if 154 <= data[p] { + goto tr125 + } + default: + goto tr126 + } + goto st141 + st2022: + if p++; p == pe { + goto _test_eof2022 + } + st_case_2022: + if 171 <= data[p] && data[p] <= 183 { + goto st141 + } + goto tr125 + st2023: + if p++; p == pe { + goto _test_eof2023 + } + st_case_2023: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 185 { + goto tr126 + } + case data[p] >= 157: + goto st141 + } + goto tr125 + st2024: + if p++; p == pe { + goto _test_eof2024 + } + st_case_2024: + switch data[p] { + case 169: + goto st1977 + case 171: + goto st2025 + case 172: + goto st2026 + case 173: + goto st1650 + case 189: + goto st2027 + case 190: + goto st2028 + } + goto tr125 + st2025: + if p++; p == pe { + goto _test_eof2025 + } + st_case_2025: + if 176 <= data[p] && data[p] <= 180 { + goto st141 + } + goto tr125 + st2026: + if p++; p == pe { + goto _test_eof2026 + } + st_case_2026: + if 176 <= data[p] && data[p] <= 182 { + goto st141 + } + goto tr125 + st2027: + if p++; p == pe { + goto _test_eof2027 + } + st_case_2027: + if 145 <= data[p] && data[p] <= 190 { + goto st141 + } + goto tr125 + st2028: + if p++; p == pe { + goto _test_eof2028 + } + st_case_2028: + if 143 <= data[p] && data[p] <= 146 { + goto st141 + } + goto tr125 + st2029: + if p++; p == pe { + goto _test_eof2029 + } + st_case_2029: + if data[p] == 178 { + goto st2030 + } + goto tr125 + st2030: + if p++; p == pe { + goto _test_eof2030 + } + st_case_2030: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto st141 + } + case data[p] >= 157: + goto st141 + } + goto tr125 + st2031: + if p++; p == pe { + goto _test_eof2031 + } + st_case_2031: + switch data[p] { + case 133: + goto st2032 + case 134: + goto st2033 + case 137: + goto st2034 + case 159: + goto st2035 + case 168: + goto st2036 + case 169: + goto st2037 + case 170: + goto st2038 + } + goto tr125 + st2032: + if p++; p == pe { + goto _test_eof2032 + } + st_case_2032: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto st141 + } + case data[p] >= 165: + goto st141 + } + goto tr125 + st2033: + if p++; p == pe { + goto _test_eof2033 + } + st_case_2033: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr125 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr125 + } + default: + goto tr125 + } + goto st141 + st2034: + if p++; p == pe { + goto _test_eof2034 + } + st_case_2034: + if 130 <= data[p] && data[p] <= 132 { + goto st141 + } + goto tr125 + st2035: + if p++; p == pe { + goto _test_eof2035 + } + st_case_2035: + if 142 <= data[p] && data[p] <= 191 { + goto tr126 + } + goto tr125 + st2036: + if p++; p == pe { + goto _test_eof2036 + } + st_case_2036: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto st141 + } + case data[p] >= 128: + goto st141 + } + goto tr125 + st2037: + if p++; p == pe { + goto _test_eof2037 + } + st_case_2037: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr125 + } + case data[p] >= 173: + goto tr125 + } + goto st141 + st2038: + if p++; p == pe { + goto _test_eof2038 + } + st_case_2038: + if data[p] == 132 { + goto st141 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto st141 + } + case data[p] >= 155: + goto st141 + } + goto tr125 + st2039: + if p++; p == pe { + goto _test_eof2039 + } + st_case_2039: + if data[p] == 163 { + goto st2040 + } + goto tr125 + st2040: + if p++; p == pe { + goto _test_eof2040 + } + st_case_2040: + if 144 <= data[p] && data[p] <= 150 { + goto st141 + } + goto tr125 + st2041: + if p++; p == pe { + goto _test_eof2041 + } + st_case_2041: + if data[p] == 160 { + goto st2042 + } + goto tr125 + st2042: + if p++; p == pe { + goto _test_eof2042 + } + st_case_2042: + switch data[p] { + case 128: + goto st2043 + case 129: + goto st2044 + case 132: + goto st1902 + case 135: + goto st1903 + } + if 133 <= data[p] && data[p] <= 134 { + goto st2045 + } + goto tr125 + st2043: + if p++; p == pe { + goto _test_eof2043 + } + st_case_2043: + if data[p] == 129 { + goto st141 + } + if 160 <= data[p] { + goto st141 + } + goto tr125 + st2044: + if p++; p == pe { + goto _test_eof2044 + } + st_case_2044: + if 192 <= data[p] { + goto tr125 + } + goto st141 + st2045: + if p++; p == pe { + goto _test_eof2045 + } + st_case_2045: + goto st141 + st2046: + if p++; p == pe { + goto _test_eof2046 + } + st_case_2046: + switch data[p] { + case 170: + goto tr148 + case 173: + goto tr1880 + case 181: + goto tr148 + case 186: + goto tr148 + } + goto tr125 + st2047: + if p++; p == pe { + goto _test_eof2047 + } + st_case_2047: + if data[p] <= 127 { + goto tr125 + } + goto tr1880 + st2048: + if p++; p == pe { + goto _test_eof2048 + } + st_case_2048: + switch data[p] { + case 181: + goto tr125 + case 190: + goto st141 + } + switch { + case data[p] < 184: + if 176 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr125 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr125 + } + goto tr1880 + st2049: + if p++; p == pe { + goto _test_eof2049 + } + st_case_2049: + if data[p] == 130 { + goto tr125 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr1880 + } + goto tr148 + st2050: + if p++; p == pe { + goto _test_eof2050 + } + st_case_2050: + switch data[p] { + case 137: + goto st141 + case 190: + goto tr125 + } + switch { + case data[p] < 145: + if 136 <= data[p] && data[p] <= 144 { + goto tr125 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr125 + } + default: + goto tr1880 + } + goto tr148 + st2051: + if p++; p == pe { + goto _test_eof2051 + } + st_case_2051: + switch data[p] { + case 135: + goto tr1880 + case 179: + goto tr148 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr1880 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + default: + goto tr1880 + } + goto tr125 + st2052: + if p++; p == pe { + goto _test_eof2052 + } + st_case_2052: + if data[p] == 156 { + goto tr1880 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 133 { + goto tr1880 + } + case data[p] > 141: + switch { + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + case data[p] >= 144: + goto tr1880 + } + default: + goto st141 + } + goto tr125 + st2053: + if p++; p == pe { + goto _test_eof2053 + } + st_case_2053: + switch data[p] { + case 171: + goto tr126 + case 172: + goto st141 + case 176: + goto tr1880 + } + switch { + case data[p] < 139: + if 128 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 174 <= data[p] { + goto tr148 + } + case data[p] >= 160: + goto tr126 + } + default: + goto tr1880 + } + goto tr125 + st2054: + if p++; p == pe { + goto _test_eof2054 + } + st_case_2054: + switch data[p] { + case 148: + goto tr125 + case 158: + goto tr125 + case 169: + goto tr125 + } + switch { + case data[p] < 176: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr1880 + } + case data[p] >= 150: + goto tr1880 + } + case data[p] > 185: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr125 + } + case data[p] >= 189: + goto tr125 + } + default: + goto tr126 + } + goto tr148 + st2055: + if p++; p == pe { + goto _test_eof2055 + } + st_case_2055: + if data[p] == 144 { + goto tr148 + } + switch { + case data[p] < 146: + if 143 <= data[p] && data[p] <= 145 { + goto tr1880 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr1880 + } + default: + goto tr148 + } + goto tr125 + st2056: + if p++; p == pe { + goto _test_eof2056 + } + st_case_2056: + switch { + case data[p] > 140: + if 141 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr125 + } + goto tr1880 + st2057: + if p++; p == pe { + goto _test_eof2057 + } + st_case_2057: + switch { + case data[p] > 176: + if 178 <= data[p] { + goto tr125 + } + case data[p] >= 166: + goto tr1880 + } + goto tr148 + st2058: + if p++; p == pe { + goto _test_eof2058 + } + st_case_2058: + switch data[p] { + case 184: + goto st141 + case 186: + goto tr148 + } + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 170: + switch { + case data[p] > 179: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 171: + goto tr1880 + } + default: + goto tr148 + } + goto tr125 + st2059: + if p++; p == pe { + goto _test_eof2059 + } + st_case_2059: + switch data[p] { + case 160: + goto st2060 + case 161: + goto st2061 + case 162: + goto st168 + case 163: + goto st2062 + case 164: + goto st2063 + case 165: + goto st2064 + case 166: + goto st2065 + case 167: + goto st2066 + case 168: + goto st2067 + case 169: + goto st2068 + case 170: + goto st2069 + case 171: + goto st2070 + case 172: + goto st2071 + case 173: + goto st2072 + case 174: + goto st2073 + case 175: + goto st2074 + case 176: + goto st2075 + case 177: + goto st2076 + case 178: + goto st2077 + case 179: + goto st2078 + case 180: + goto st2079 + case 181: + goto st2080 + case 182: + goto st2081 + case 183: + goto st2082 + case 184: + goto st2083 + case 185: + goto st2084 + case 186: + goto st2085 + case 187: + goto st2086 + case 188: + goto st2087 + case 189: + goto st2088 + case 190: + goto st2089 + case 191: + goto st2090 + } + goto tr125 + st2060: + if p++; p == pe { + goto _test_eof2060 + } + st_case_2060: + switch data[p] { + case 154: + goto tr148 + case 164: + goto tr148 + case 168: + goto tr148 + } + switch { + case data[p] > 149: + if 150 <= data[p] && data[p] <= 173 { + goto tr1880 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st2061: + if p++; p == pe { + goto _test_eof2061 + } + st_case_2061: + switch { + case data[p] > 152: + if 153 <= data[p] && data[p] <= 155 { + goto tr1880 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st2062: + if p++; p == pe { + goto _test_eof2062 + } + st_case_2062: + if 163 <= data[p] { + goto tr1880 + } + goto tr125 + st2063: + if p++; p == pe { + goto _test_eof2063 + } + st_case_2063: + if data[p] == 189 { + goto tr148 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr148 + } + goto tr1880 + st2064: + if p++; p == pe { + goto _test_eof2064 + } + st_case_2064: + switch data[p] { + case 144: + goto tr148 + case 176: + goto tr125 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 177 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr125 + } + goto tr1880 + st2065: + if p++; p == pe { + goto _test_eof2065 + } + st_case_2065: + switch data[p] { + case 132: + goto tr125 + case 169: + goto tr125 + case 177: + goto tr125 + case 188: + goto tr1880 + } + switch { + case data[p] < 145: + switch { + case data[p] > 131: + if 141 <= data[p] && data[p] <= 142 { + goto tr125 + } + case data[p] >= 129: + goto tr1880 + } + case data[p] > 146: + switch { + case data[p] < 186: + if 179 <= data[p] && data[p] <= 181 { + goto tr125 + } + case data[p] > 187: + if 190 <= data[p] { + goto tr1880 + } + default: + goto tr125 + } + default: + goto tr125 + } + goto tr148 + st2066: + if p++; p == pe { + goto _test_eof2066 + } + st_case_2066: + switch data[p] { + case 142: + goto tr148 + case 158: + goto tr125 + } + switch { + case data[p] < 156: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr125 + } + case data[p] > 138: + switch { + case data[p] > 150: + if 152 <= data[p] && data[p] <= 155 { + goto tr125 + } + case data[p] >= 143: + goto tr125 + } + default: + goto tr125 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr125 + } + case data[p] > 175: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr125 + } + case data[p] >= 176: + goto tr148 + } + default: + goto tr126 + } + default: + goto tr148 + } + goto tr1880 + st2067: + if p++; p == pe { + goto _test_eof2067 + } + st_case_2067: + if data[p] == 188 { + goto tr1880 + } + switch { + case data[p] < 170: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr1880 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 147 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] >= 143: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 176: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 182: + switch { + case data[p] > 185: + if 190 <= data[p] { + goto tr1880 + } + case data[p] >= 184: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st2068: + if p++; p == pe { + goto _test_eof2068 + } + st_case_2068: + if data[p] == 157 { + goto tr125 + } + switch { + case data[p] < 153: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr125 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 146 <= data[p] && data[p] <= 152 { + goto tr125 + } + case data[p] >= 142: + goto tr125 + } + default: + goto tr125 + } + case data[p] > 158: + switch { + case data[p] < 166: + if 159 <= data[p] && data[p] <= 165 { + goto tr125 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr125 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr126 + } + default: + goto tr148 + } + goto tr1880 + st2069: + if p++; p == pe { + goto _test_eof2069 + } + st_case_2069: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr1880 + } + case data[p] > 141: + if 143 <= data[p] && data[p] <= 145 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] { + goto tr1880 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st2070: + if p++; p == pe { + goto _test_eof2070 + } + st_case_2070: + switch data[p] { + case 134: + goto tr125 + case 138: + goto tr125 + case 144: + goto tr148 + case 185: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] >= 142: + goto tr125 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr125 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr125 + } + goto tr1880 + st2071: + if p++; p == pe { + goto _test_eof2071 + } + st_case_2071: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr1880 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr1880 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st2072: + if p++; p == pe { + goto _test_eof2072 + } + st_case_2072: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] < 150: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr1880 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr1880 + } + default: + goto tr1880 + } + case data[p] > 151: + switch { + case data[p] < 159: + if 156 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 161: + switch { + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] >= 162: + goto tr1880 + } + default: + goto tr148 + } + default: + goto tr1880 + } + goto tr125 + st2073: + if p++; p == pe { + goto _test_eof2073 + } + st_case_2073: + switch data[p] { + case 130: + goto tr1880 + case 131: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 158: + switch { + case data[p] < 142: + if 133 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 144: + switch { + case data[p] > 149: + if 153 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] < 168: + if 163 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 185: + if 190 <= data[p] && data[p] <= 191 { + goto tr1880 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st2074: + if p++; p == pe { + goto _test_eof2074 + } + st_case_2074: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr1880 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr1880 + } + case data[p] > 136: + switch { + case data[p] > 141: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] >= 138: + goto tr1880 + } + default: + goto tr1880 + } + goto tr125 + st2075: + if p++; p == pe { + goto _test_eof2075 + } + st_case_2075: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 128: + goto tr1880 + } + case data[p] > 144: + switch { + case data[p] < 170: + if 146 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] > 185: + if 190 <= data[p] { + goto tr1880 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st2076: + if p++; p == pe { + goto _test_eof2076 + } + st_case_2076: + switch data[p] { + case 133: + goto tr125 + case 137: + goto tr125 + case 151: + goto tr125 + } + switch { + case data[p] < 160: + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 148 { + goto tr125 + } + case data[p] > 154: + if 155 <= data[p] && data[p] <= 159 { + goto tr125 + } + default: + goto tr148 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr125 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr125 + } + default: + goto tr126 + } + default: + goto tr148 + } + goto tr1880 + st2077: + if p++; p == pe { + goto _test_eof2077 + } + st_case_2077: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 146: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr1880 + } + case data[p] > 140: + if 142 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 181: + if 170 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr1880 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st2078: + if p++; p == pe { + goto _test_eof2078 + } + st_case_2078: + if data[p] == 158 { + goto tr148 + } + switch { + case data[p] < 149: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr1880 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr1880 + } + default: + goto tr1880 + } + case data[p] > 150: + switch { + case data[p] < 162: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 163: + switch { + case data[p] > 175: + if 177 <= data[p] && data[p] <= 178 { + goto tr148 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr1880 + } + default: + goto tr1880 + } + goto tr125 + st2079: + if p++; p == pe { + goto _test_eof2079 + } + st_case_2079: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 129: + goto tr1880 + } + case data[p] > 144: + switch { + case data[p] > 186: + if 190 <= data[p] { + goto tr1880 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st2080: + if p++; p == pe { + goto _test_eof2080 + } + st_case_2080: + switch data[p] { + case 133: + goto tr125 + case 137: + goto tr125 + case 142: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 150 { + goto tr125 + } + case data[p] > 158: + if 159 <= data[p] && data[p] <= 161 { + goto tr148 + } + default: + goto tr125 + } + case data[p] > 165: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr125 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr125 + } + default: + goto tr125 + } + goto tr1880 + st2081: + if p++; p == pe { + goto _test_eof2081 + } + st_case_2081: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto tr1880 + } + case data[p] > 150: + switch { + case data[p] > 177: + if 179 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st2082: + if p++; p == pe { + goto _test_eof2082 + } + st_case_2082: + switch data[p] { + case 138: + goto tr1880 + case 150: + goto tr1880 + } + switch { + case data[p] < 152: + switch { + case data[p] > 134: + if 143 <= data[p] && data[p] <= 148 { + goto tr1880 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 179 { + goto tr1880 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr1880 + } + goto tr125 + st2083: + if p++; p == pe { + goto _test_eof2083 + } + st_case_2083: + if data[p] == 177 { + goto tr1880 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr1880 + } + goto tr125 + st2084: + if p++; p == pe { + goto _test_eof2084 + } + st_case_2084: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 135: + goto tr1880 + } + goto tr125 + st2085: + if p++; p == pe { + goto _test_eof2085 + } + st_case_2085: + if data[p] == 177 { + goto tr1880 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr1880 + } + case data[p] >= 180: + goto tr1880 + } + goto tr125 + st2086: + if p++; p == pe { + goto _test_eof2086 + } + st_case_2086: + switch { + case data[p] > 141: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 136: + goto tr1880 + } + goto tr125 + st2087: + if p++; p == pe { + goto _test_eof2087 + } + st_case_2087: + switch data[p] { + case 128: + goto tr148 + case 181: + goto tr1880 + case 183: + goto tr1880 + case 185: + goto tr1880 + } + switch { + case data[p] < 160: + if 152 <= data[p] && data[p] <= 153 { + goto tr1880 + } + case data[p] > 169: + if 190 <= data[p] && data[p] <= 191 { + goto tr1880 + } + default: + goto tr126 + } + goto tr125 + st2088: + if p++; p == pe { + goto _test_eof2088 + } + st_case_2088: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 172: + if 177 <= data[p] && data[p] <= 191 { + goto tr1880 + } + default: + goto tr148 + } + goto tr125 + st2089: + if p++; p == pe { + goto _test_eof2089 + } + st_case_2089: + switch { + case data[p] < 136: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 135 { + goto tr1880 + } + case data[p] >= 128: + goto tr1880 + } + case data[p] > 140: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr1880 + } + case data[p] >= 141: + goto tr1880 + } + default: + goto tr148 + } + goto tr125 + st2090: + if p++; p == pe { + goto _test_eof2090 + } + st_case_2090: + if data[p] == 134 { + goto tr1880 + } + goto tr125 + st2091: + if p++; p == pe { + goto _test_eof2091 + } + st_case_2091: + switch data[p] { + case 128: + goto st2092 + case 129: + goto st2093 + case 130: + goto st2094 + case 131: + goto st202 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st2095 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st2096 + case 157: + goto st2097 + case 158: + goto st2098 + case 159: + goto st2099 + case 160: + goto st2100 + case 161: + goto st219 + case 162: + goto st2101 + case 163: + goto st221 + case 164: + goto st2102 + case 165: + goto st1649 + case 167: + goto st1650 + case 168: + goto st2103 + case 169: + goto st2104 + case 170: + goto st2105 + case 172: + goto st2106 + case 173: + goto st2107 + case 174: + goto st2108 + case 175: + goto st2109 + case 176: + goto st2110 + case 177: + goto st1659 + case 179: + goto st2111 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st2112 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + } + switch { + case data[p] < 180: + if 132 <= data[p] && data[p] <= 152 { + goto st145 + } + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + default: + goto st147 + } + goto tr125 + st2092: + if p++; p == pe { + goto _test_eof2092 + } + st_case_2092: + if 171 <= data[p] && data[p] <= 190 { + goto tr1880 + } + goto tr125 + st2093: + if p++; p == pe { + goto _test_eof2093 + } + st_case_2093: + switch { + case data[p] < 158: + switch { + case data[p] > 137: + if 150 <= data[p] && data[p] <= 153 { + goto tr1880 + } + case data[p] >= 128: + goto tr126 + } + case data[p] > 160: + switch { + case data[p] < 167: + if 162 <= data[p] && data[p] <= 164 { + goto tr1880 + } + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr1880 + } + default: + goto tr1880 + } + default: + goto tr1880 + } + goto tr125 + st2094: + if p++; p == pe { + goto _test_eof2094 + } + st_case_2094: + if data[p] == 143 { + goto tr1880 + } + switch { + case data[p] < 144: + if 130 <= data[p] && data[p] <= 141 { + goto tr1880 + } + case data[p] > 153: + switch { + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 154: + goto tr1880 + } + default: + goto tr126 + } + goto tr125 + st2095: + if p++; p == pe { + goto _test_eof2095 + } + st_case_2095: + switch { + case data[p] < 157: + if 155 <= data[p] && data[p] <= 156 { + goto tr125 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr125 + } + default: + goto tr1880 + } + goto tr148 + st2096: + if p++; p == pe { + goto _test_eof2096 + } + st_case_2096: + switch { + case data[p] < 146: + switch { + case data[p] > 140: + if 142 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 177: + if 178 <= data[p] && data[p] <= 180 { + goto tr1880 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr1880 + } + goto tr125 + st2097: + if p++; p == pe { + goto _test_eof2097 + } + st_case_2097: + switch { + case data[p] < 160: + switch { + case data[p] > 145: + if 146 <= data[p] && data[p] <= 147 { + goto tr1880 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 172: + switch { + case data[p] > 176: + if 178 <= data[p] && data[p] <= 179 { + goto tr1880 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st2098: + if p++; p == pe { + goto _test_eof2098 + } + st_case_2098: + if 180 <= data[p] { + goto tr1880 + } + goto tr125 + st2099: + if p++; p == pe { + goto _test_eof2099 + } + st_case_2099: + switch { + case data[p] < 158: + if 148 <= data[p] && data[p] <= 156 { + goto tr125 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 170 <= data[p] { + goto tr125 + } + case data[p] >= 160: + goto tr126 + } + default: + goto tr125 + } + goto tr1880 + st2100: + if p++; p == pe { + goto _test_eof2100 + } + st_case_2100: + switch { + case data[p] < 144: + if 139 <= data[p] && data[p] <= 142 { + goto tr1880 + } + case data[p] > 153: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr126 + } + goto tr125 + st2101: + if p++; p == pe { + goto _test_eof2101 + } + st_case_2101: + if data[p] == 169 { + goto tr1880 + } + switch { + case data[p] > 170: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st2102: + if p++; p == pe { + goto _test_eof2102 + } + st_case_2102: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr1880 + } + default: + goto tr1880 + } + goto tr125 + st2103: + if p++; p == pe { + goto _test_eof2103 + } + st_case_2103: + switch { + case data[p] > 150: + if 151 <= data[p] && data[p] <= 155 { + goto tr1880 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st2104: + if p++; p == pe { + goto _test_eof2104 + } + st_case_2104: + if data[p] == 191 { + goto tr1880 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr1880 + } + case data[p] >= 149: + goto tr1880 + } + goto tr125 + st2105: + if p++; p == pe { + goto _test_eof2105 + } + st_case_2105: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 153: + if 176 <= data[p] && data[p] <= 190 { + goto tr1880 + } + default: + goto tr126 + } + goto tr125 + st2106: + if p++; p == pe { + goto _test_eof2106 + } + st_case_2106: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 132 { + goto tr1880 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr1880 + } + default: + goto tr148 + } + goto tr125 + st2107: + if p++; p == pe { + goto _test_eof2107 + } + st_case_2107: + switch { + case data[p] < 144: + switch { + case data[p] > 139: + if 140 <= data[p] && data[p] <= 143 { + goto tr125 + } + case data[p] >= 133: + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr125 + } + case data[p] >= 154: + goto tr125 + } + default: + goto tr126 + } + goto tr1880 + st2108: + if p++; p == pe { + goto _test_eof2108 + } + st_case_2108: + switch { + case data[p] < 161: + switch { + case data[p] > 130: + if 131 <= data[p] && data[p] <= 160 { + goto tr148 + } + case data[p] >= 128: + goto tr1880 + } + case data[p] > 173: + switch { + case data[p] < 176: + if 174 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr148 + } + default: + goto tr126 + } + default: + goto tr1880 + } + goto tr125 + st2109: + if p++; p == pe { + goto _test_eof2109 + } + st_case_2109: + switch { + case data[p] > 179: + if 180 <= data[p] { + goto tr125 + } + case data[p] >= 166: + goto tr1880 + } + goto tr148 + st2110: + if p++; p == pe { + goto _test_eof2110 + } + st_case_2110: + switch { + case data[p] > 163: + if 164 <= data[p] && data[p] <= 183 { + goto tr1880 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st2111: + if p++; p == pe { + goto _test_eof2111 + } + st_case_2111: + if data[p] == 173 { + goto tr1880 + } + switch { + case data[p] < 169: + switch { + case data[p] > 146: + if 148 <= data[p] && data[p] <= 168 { + goto tr1880 + } + case data[p] >= 144: + goto tr1880 + } + case data[p] > 177: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 180 { + goto tr1880 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 185 { + goto tr1880 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st2112: + if p++; p == pe { + goto _test_eof2112 + } + st_case_2112: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr1880 + } + case data[p] >= 128: + goto tr1880 + } + goto tr125 + st2113: + if p++; p == pe { + goto _test_eof2113 + } + st_case_2113: + switch data[p] { + case 128: + goto st2114 + case 129: + goto st2115 + case 130: + goto st241 + case 131: + goto st2116 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st2117 + case 180: + goto st251 + case 181: + goto st2118 + case 182: + goto st253 + case 183: + goto st2119 + case 184: + goto st255 + } + goto tr125 + st2114: + if p++; p == pe { + goto _test_eof2114 + } + st_case_2114: + if data[p] == 164 { + goto st141 + } + switch { + case data[p] < 152: + if 140 <= data[p] && data[p] <= 143 { + goto tr1880 + } + case data[p] > 153: + switch { + case data[p] > 174: + if 191 <= data[p] { + goto tr1485 + } + case data[p] >= 170: + goto tr1880 + } + default: + goto st141 + } + goto tr125 + st2115: + if p++; p == pe { + goto _test_eof2115 + } + st_case_2115: + switch data[p] { + case 132: + goto st141 + case 165: + goto tr125 + case 177: + goto tr148 + case 191: + goto tr148 + } + switch { + case data[p] < 149: + if 129 <= data[p] && data[p] <= 147 { + goto tr125 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr125 + } + case data[p] >= 160: + goto tr1880 + } + default: + goto tr125 + } + goto tr1485 + st2116: + if p++; p == pe { + goto _test_eof2116 + } + st_case_2116: + if 144 <= data[p] && data[p] <= 176 { + goto tr1880 + } + goto tr125 + st2117: + if p++; p == pe { + goto _test_eof2117 + } + st_case_2117: + switch { + case data[p] < 175: + if 165 <= data[p] && data[p] <= 170 { + goto tr125 + } + case data[p] > 177: + if 180 <= data[p] { + goto tr125 + } + default: + goto tr1880 + } + goto tr148 + st2118: + if p++; p == pe { + goto _test_eof2118 + } + st_case_2118: + if data[p] == 191 { + goto tr1880 + } + switch { + case data[p] > 174: + if 176 <= data[p] { + goto tr125 + } + case data[p] >= 168: + goto tr125 + } + goto tr148 + st2119: + if p++; p == pe { + goto _test_eof2119 + } + st_case_2119: + switch { + case data[p] < 144: + switch { + case data[p] > 134: + if 136 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 191 { + goto tr1880 + } + case data[p] >= 152: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st2120: + if p++; p == pe { + goto _test_eof2120 + } + st_case_2120: + switch data[p] { + case 128: + goto st2121 + case 130: + goto st2122 + case 132: + goto st259 + case 133: + goto st145 + case 134: + goto st260 + } + goto tr125 + st2121: + if p++; p == pe { + goto _test_eof2121 + } + st_case_2121: + if data[p] == 133 { + goto tr148 + } + switch { + case data[p] > 175: + if 187 <= data[p] && data[p] <= 188 { + goto tr148 + } + case data[p] >= 170: + goto tr1880 + } + goto tr125 + st2122: + if p++; p == pe { + goto _test_eof2122 + } + st_case_2122: + if 153 <= data[p] && data[p] <= 154 { + goto tr1880 + } + goto tr125 + st2123: + if p++; p == pe { + goto _test_eof2123 + } + st_case_2123: + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st1673 + case 153: + goto st2124 + case 154: + goto st2125 + case 155: + goto st2126 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st2127 + case 161: + goto st272 + case 162: + goto st2128 + case 163: + goto st2129 + case 164: + goto st2130 + case 165: + goto st2131 + case 166: + goto st2132 + case 167: + goto st2133 + case 168: + goto st2134 + case 169: + goto st2135 + case 170: + goto st2136 + case 171: + goto st2137 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st2138 + case 176: + goto st147 + } + if 129 <= data[p] { + goto st145 + } + goto tr125 + st2124: + if p++; p == pe { + goto _test_eof2124 + } + st_case_2124: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr1880 + } + default: + goto tr1880 + } + goto tr125 + st2125: + if p++; p == pe { + goto _test_eof2125 + } + st_case_2125: + switch { + case data[p] < 158: + if 128 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr1880 + } + goto tr125 + st2126: + if p++; p == pe { + goto _test_eof2126 + } + st_case_2126: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr125 + } + case data[p] >= 176: + goto tr1880 + } + goto tr148 + st2127: + if p++; p == pe { + goto _test_eof2127 + } + st_case_2127: + switch data[p] { + case 130: + goto tr1880 + case 134: + goto tr1880 + case 139: + goto tr1880 + } + switch { + case data[p] > 167: + if 168 <= data[p] { + goto tr125 + } + case data[p] >= 163: + goto tr1880 + } + goto tr148 + st2128: + if p++; p == pe { + goto _test_eof2128 + } + st_case_2128: + switch { + case data[p] < 130: + if 128 <= data[p] && data[p] <= 129 { + goto tr1880 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr1880 + } + default: + goto tr148 + } + goto tr125 + st2129: + if p++; p == pe { + goto _test_eof2129 + } + st_case_2129: + switch data[p] { + case 187: + goto tr148 + case 189: + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 143: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 133: + goto tr125 + } + case data[p] > 159: + switch { + case data[p] > 183: + if 184 <= data[p] { + goto tr125 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr125 + } + goto tr1880 + st2130: + if p++; p == pe { + goto _test_eof2130 + } + st_case_2130: + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 165: + switch { + case data[p] > 173: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr1880 + } + default: + goto tr148 + } + goto tr125 + st2131: + if p++; p == pe { + goto _test_eof2131 + } + st_case_2131: + switch { + case data[p] < 148: + if 135 <= data[p] && data[p] <= 147 { + goto tr1880 + } + case data[p] > 159: + if 189 <= data[p] { + goto tr125 + } + default: + goto tr125 + } + goto tr148 + st2132: + if p++; p == pe { + goto _test_eof2132 + } + st_case_2132: + switch { + case data[p] < 132: + if 128 <= data[p] && data[p] <= 131 { + goto tr1880 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr1880 + } + default: + goto tr148 + } + goto tr125 + st2133: + if p++; p == pe { + goto _test_eof2133 + } + st_case_2133: + if data[p] == 143 { + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 129: + goto tr125 + } + case data[p] > 164: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr125 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr125 + } + default: + goto tr126 + } + default: + goto tr125 + } + goto tr1880 + st2134: + if p++; p == pe { + goto _test_eof2134 + } + st_case_2134: + switch { + case data[p] > 168: + if 169 <= data[p] && data[p] <= 182 { + goto tr1880 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st2135: + if p++; p == pe { + goto _test_eof2135 + } + st_case_2135: + if data[p] == 131 { + goto tr1880 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 141: + switch { + case data[p] > 153: + if 187 <= data[p] && data[p] <= 189 { + goto tr1880 + } + case data[p] >= 144: + goto tr126 + } + default: + goto tr1880 + } + goto tr125 + st2136: + if p++; p == pe { + goto _test_eof2136 + } + st_case_2136: + if data[p] == 176 { + goto tr1880 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr1880 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr1880 + } + default: + goto tr1880 + } + goto tr125 + st2137: + if p++; p == pe { + goto _test_eof2137 + } + st_case_2137: + if data[p] == 129 { + goto tr1880 + } + switch { + case data[p] < 171: + if 160 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 181 <= data[p] && data[p] <= 182 { + goto tr1880 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr1880 + } + goto tr125 + st2138: + if p++; p == pe { + goto _test_eof2138 + } + st_case_2138: + switch { + case data[p] < 163: + if 128 <= data[p] && data[p] <= 162 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr126 + } + case data[p] >= 172: + goto tr1880 + } + default: + goto tr1880 + } + goto tr125 + st2139: + if p++; p == pe { + goto _test_eof2139 + } + st_case_2139: + switch data[p] { + case 172: + goto st2140 + case 173: + goto st672 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st2141 + case 185: + goto st1848 + case 187: + goto st2142 + case 188: + goto st1850 + case 189: + goto st303 + case 190: + goto st2143 + case 191: + goto st2144 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr125 + st2140: + if p++; p == pe { + goto _test_eof2140 + } + st_case_2140: + switch data[p] { + case 158: + goto tr1880 + case 190: + goto tr572 + } + switch { + case data[p] < 157: + switch { + case data[p] > 134: + if 147 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 170: + goto tr572 + } + default: + goto tr572 + } + goto tr125 + st2141: + if p++; p == pe { + goto _test_eof2141 + } + st_case_2141: + switch data[p] { + case 144: + goto st141 + case 148: + goto st141 + } + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 143 { + goto tr1880 + } + case data[p] > 175: + if 179 <= data[p] && data[p] <= 180 { + goto tr1485 + } + default: + goto tr1880 + } + goto tr125 + st2142: + if p++; p == pe { + goto _test_eof2142 + } + st_case_2142: + if data[p] == 191 { + goto tr1880 + } + if 189 <= data[p] { + goto tr125 + } + goto tr148 + st2143: + if p++; p == pe { + goto _test_eof2143 + } + st_case_2143: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 190 { + goto tr148 + } + case data[p] >= 158: + goto tr1880 + } + goto tr125 + st2144: + if p++; p == pe { + goto _test_eof2144 + } + st_case_2144: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr148 + } + case data[p] >= 130: + goto tr148 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr1880 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st2145: + if p++; p == pe { + goto _test_eof2145 + } + st_case_2145: + switch data[p] { + case 144: + goto st2146 + case 145: + goto st2152 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st2171 + case 155: + goto st2176 + case 157: + goto st2178 + case 158: + goto st2185 + case 159: + goto st403 + } + goto tr125 + st2146: + if p++; p == pe { + goto _test_eof2146 + } + st_case_2146: + switch data[p] { + case 128: + goto st308 + case 129: + goto st309 + case 130: + goto st147 + case 131: + goto st310 + case 133: + goto st311 + case 135: + goto st2147 + case 138: + goto st313 + case 139: + goto st2148 + case 140: + goto st315 + case 141: + goto st2149 + case 142: + goto st317 + case 143: + goto st318 + case 144: + goto st147 + case 145: + goto st145 + case 146: + goto st1702 + case 148: + goto st320 + case 149: + goto st321 + case 152: + goto st147 + case 156: + goto st322 + case 157: + goto st323 + case 160: + goto st324 + case 161: + goto st325 + case 162: + goto st326 + case 163: + goto st327 + case 164: + goto st328 + case 166: + goto st329 + case 168: + goto st2150 + case 169: + goto st331 + case 170: + goto st332 + case 171: + goto st2151 + case 172: + goto st334 + case 173: + goto st335 + case 174: + goto st336 + case 176: + goto st147 + case 177: + goto st245 + } + switch { + case data[p] > 155: + if 178 <= data[p] && data[p] <= 179 { + goto st337 + } + case data[p] >= 153: + goto st145 + } + goto tr125 + st2147: + if p++; p == pe { + goto _test_eof2147 + } + st_case_2147: + if data[p] == 189 { + goto tr1880 + } + goto tr125 + st2148: + if p++; p == pe { + goto _test_eof2148 + } + st_case_2148: + if data[p] == 160 { + goto tr1880 + } + if 145 <= data[p] { + goto tr125 + } + goto tr148 + st2149: + if p++; p == pe { + goto _test_eof2149 + } + st_case_2149: + switch { + case data[p] < 182: + if 139 <= data[p] && data[p] <= 143 { + goto tr125 + } + case data[p] > 186: + if 187 <= data[p] { + goto tr125 + } + default: + goto tr1880 + } + goto tr148 + st2150: + if p++; p == pe { + goto _test_eof2150 + } + st_case_2150: + switch data[p] { + case 128: + goto tr148 + case 191: + goto tr1880 + } + switch { + case data[p] < 144: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr1880 + } + case data[p] > 134: + if 140 <= data[p] && data[p] <= 143 { + goto tr1880 + } + default: + goto tr1880 + } + case data[p] > 147: + switch { + case data[p] < 153: + if 149 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] > 179: + if 184 <= data[p] && data[p] <= 186 { + goto tr1880 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st2151: + if p++; p == pe { + goto _test_eof2151 + } + st_case_2151: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 164: + if 165 <= data[p] && data[p] <= 166 { + goto tr1880 + } + default: + goto tr148 + } + goto tr125 + st2152: + if p++; p == pe { + goto _test_eof2152 + } + st_case_2152: + switch data[p] { + case 128: + goto st2153 + case 129: + goto st2154 + case 130: + goto st2155 + case 131: + goto st1709 + case 132: + goto st2156 + case 133: + goto st2157 + case 134: + goto st2158 + case 135: + goto st2159 + case 136: + goto st2160 + case 138: + goto st348 + case 139: + goto st2161 + case 140: + goto st2162 + case 141: + goto st2163 + case 146: + goto st2164 + case 147: + goto st2165 + case 150: + goto st2166 + case 151: + goto st2167 + case 152: + goto st2164 + case 153: + goto st2168 + case 154: + goto st2169 + case 155: + goto st1724 + case 156: + goto st2170 + case 162: + goto st359 + case 163: + goto st1726 + case 171: + goto st361 + } + goto tr125 + st2153: + if p++; p == pe { + goto _test_eof2153 + } + st_case_2153: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr1880 + } + case data[p] > 183: + if 184 <= data[p] { + goto tr1880 + } + default: + goto tr148 + } + goto tr125 + st2154: + if p++; p == pe { + goto _test_eof2154 + } + st_case_2154: + switch { + case data[p] < 166: + if 135 <= data[p] && data[p] <= 165 { + goto tr125 + } + case data[p] > 175: + if 176 <= data[p] && data[p] <= 190 { + goto tr125 + } + default: + goto tr126 + } + goto tr1880 + st2155: + if p++; p == pe { + goto _test_eof2155 + } + st_case_2155: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr125 + } + default: + goto tr125 + } + goto tr1880 + st2156: + if p++; p == pe { + goto _test_eof2156 + } + st_case_2156: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr1880 + } + case data[p] > 166: + switch { + case data[p] > 180: + if 182 <= data[p] && data[p] <= 191 { + goto tr126 + } + case data[p] >= 167: + goto tr1880 + } + default: + goto tr148 + } + goto tr125 + st2157: + if p++; p == pe { + goto _test_eof2157 + } + st_case_2157: + switch data[p] { + case 179: + goto tr1880 + case 182: + goto tr148 + } + if 144 <= data[p] && data[p] <= 178 { + goto tr148 + } + goto tr125 + st2158: + if p++; p == pe { + goto _test_eof2158 + } + st_case_2158: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr1880 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr1880 + } + default: + goto tr148 + } + goto tr125 + st2159: + if p++; p == pe { + goto _test_eof2159 + } + st_case_2159: + if data[p] == 155 { + goto tr125 + } + switch { + case data[p] < 141: + switch { + case data[p] > 132: + if 133 <= data[p] && data[p] <= 137 { + goto tr125 + } + case data[p] >= 129: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] < 154: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] > 156: + if 157 <= data[p] { + goto tr125 + } + default: + goto tr148 + } + default: + goto tr125 + } + goto tr1880 + st2160: + if p++; p == pe { + goto _test_eof2160 + } + st_case_2160: + switch { + case data[p] < 147: + if 128 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] > 171: + if 172 <= data[p] && data[p] <= 183 { + goto tr1880 + } + default: + goto tr148 + } + goto tr125 + st2161: + if p++; p == pe { + goto _test_eof2161 + } + st_case_2161: + switch { + case data[p] < 171: + if 159 <= data[p] && data[p] <= 170 { + goto tr1880 + } + case data[p] > 175: + switch { + case data[p] > 185: + if 186 <= data[p] { + goto tr125 + } + case data[p] >= 176: + goto tr126 + } + default: + goto tr125 + } + goto tr148 + st2162: + if p++; p == pe { + goto _test_eof2162 + } + st_case_2162: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 131 { + goto tr1880 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr1880 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr125 + st2163: + if p++; p == pe { + goto _test_eof2163 + } + st_case_2163: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr1880 + } + switch { + case data[p] < 157: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr1880 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr1880 + } + default: + goto tr1880 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr1880 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr1880 + } + default: + goto tr1880 + } + default: + goto tr148 + } + goto tr125 + st2164: + if p++; p == pe { + goto _test_eof2164 + } + st_case_2164: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr1880 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st2165: + if p++; p == pe { + goto _test_eof2165 + } + st_case_2165: + if data[p] == 134 { + goto tr125 + } + switch { + case data[p] < 136: + if 132 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 154 <= data[p] { + goto tr125 + } + case data[p] >= 144: + goto tr126 + } + default: + goto tr125 + } + goto tr1880 + st2166: + if p++; p == pe { + goto _test_eof2166 + } + st_case_2166: + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 181: + if 184 <= data[p] { + goto tr1880 + } + default: + goto tr1880 + } + goto tr125 + st2167: + if p++; p == pe { + goto _test_eof2167 + } + st_case_2167: + switch { + case data[p] < 152: + if 129 <= data[p] && data[p] <= 151 { + goto tr125 + } + case data[p] > 155: + if 158 <= data[p] { + goto tr125 + } + default: + goto tr148 + } + goto tr1880 + st2168: + if p++; p == pe { + goto _test_eof2168 + } + st_case_2168: + if data[p] == 132 { + goto tr148 + } + switch { + case data[p] < 144: + if 129 <= data[p] && data[p] <= 143 { + goto tr125 + } + case data[p] > 153: + if 154 <= data[p] { + goto tr125 + } + default: + goto tr126 + } + goto tr1880 + st2169: + if p++; p == pe { + goto _test_eof2169 + } + st_case_2169: + switch { + case data[p] > 170: + if 171 <= data[p] && data[p] <= 183 { + goto tr1880 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st2170: + if p++; p == pe { + goto _test_eof2170 + } + st_case_2170: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 185 { + goto tr126 + } + case data[p] >= 157: + goto tr1880 + } + goto tr125 + st2171: + if p++; p == pe { + goto _test_eof2171 + } + st_case_2171: + switch data[p] { + case 160: + goto st147 + case 168: + goto st370 + case 169: + goto st1728 + case 171: + goto st2172 + case 172: + goto st2173 + case 173: + goto st1731 + case 174: + goto st374 + case 188: + goto st147 + case 189: + goto st2174 + case 190: + goto st2175 + } + if 161 <= data[p] && data[p] <= 167 { + goto st145 + } + goto tr125 + st2172: + if p++; p == pe { + goto _test_eof2172 + } + st_case_2172: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 180 { + goto tr1880 + } + case data[p] >= 144: + goto tr148 + } + goto tr125 + st2173: + if p++; p == pe { + goto _test_eof2173 + } + st_case_2173: + switch { + case data[p] > 175: + if 176 <= data[p] && data[p] <= 182 { + goto tr1880 + } + case data[p] >= 128: + goto tr148 + } + goto tr125 + st2174: + if p++; p == pe { + goto _test_eof2174 + } + st_case_2174: + switch { + case data[p] < 145: + if 133 <= data[p] && data[p] <= 143 { + goto tr125 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr125 + } + default: + goto tr1880 + } + goto tr148 + st2175: + if p++; p == pe { + goto _test_eof2175 + } + st_case_2175: + switch { + case data[p] > 146: + if 147 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] >= 143: + goto tr1880 + } + goto tr125 + st2176: + if p++; p == pe { + goto _test_eof2176 + } + st_case_2176: + switch data[p] { + case 176: + goto st147 + case 177: + goto st378 + case 178: + goto st2177 + } + goto tr125 + st2177: + if p++; p == pe { + goto _test_eof2177 + } + st_case_2177: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr1880 + } + case data[p] >= 157: + goto tr1880 + } + default: + goto tr148 + } + goto tr125 + st2178: + if p++; p == pe { + goto _test_eof2178 + } + st_case_2178: + switch data[p] { + case 133: + goto st2179 + case 134: + goto st2180 + case 137: + goto st2181 + case 144: + goto st147 + case 145: + goto st384 + case 146: + goto st385 + case 147: + goto st386 + case 148: + goto st387 + case 149: + goto st388 + case 154: + goto st389 + case 155: + goto st390 + case 156: + goto st391 + case 157: + goto st392 + case 158: + goto st393 + case 159: + goto st1740 + case 168: + goto st2182 + case 169: + goto st2183 + case 170: + goto st2184 + } + if 150 <= data[p] && data[p] <= 153 { + goto st145 + } + goto tr125 + st2179: + if p++; p == pe { + goto _test_eof2179 + } + st_case_2179: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr1880 + } + case data[p] >= 165: + goto tr1880 + } + goto tr125 + st2180: + if p++; p == pe { + goto _test_eof2180 + } + st_case_2180: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr125 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr125 + } + default: + goto tr125 + } + goto tr1880 + st2181: + if p++; p == pe { + goto _test_eof2181 + } + st_case_2181: + if 130 <= data[p] && data[p] <= 132 { + goto tr1880 + } + goto tr125 + st2182: + if p++; p == pe { + goto _test_eof2182 + } + st_case_2182: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr1880 + } + case data[p] >= 128: + goto tr1880 + } + goto tr125 + st2183: + if p++; p == pe { + goto _test_eof2183 + } + st_case_2183: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr125 + } + case data[p] >= 173: + goto tr125 + } + goto tr1880 + st2184: + if p++; p == pe { + goto _test_eof2184 + } + st_case_2184: + if data[p] == 132 { + goto tr1880 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr1880 + } + case data[p] >= 155: + goto tr1880 + } + goto tr125 + st2185: + if p++; p == pe { + goto _test_eof2185 + } + st_case_2185: + switch data[p] { + case 160: + goto st147 + case 163: + goto st2186 + case 184: + goto st400 + case 185: + goto st401 + case 186: + goto st402 + } + if 161 <= data[p] && data[p] <= 162 { + goto st145 + } + goto tr125 + st2186: + if p++; p == pe { + goto _test_eof2186 + } + st_case_2186: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr125 + } + case data[p] > 150: + if 151 <= data[p] { + goto tr125 + } + default: + goto tr1880 + } + goto tr148 + st2187: + if p++; p == pe { + goto _test_eof2187 + } + st_case_2187: + if data[p] == 160 { + goto st2188 + } + goto tr125 + st2188: + if p++; p == pe { + goto _test_eof2188 + } + st_case_2188: + switch data[p] { + case 128: + goto st2189 + case 129: + goto st2190 + case 132: + goto st2047 + case 135: + goto st2192 + } + if 133 <= data[p] && data[p] <= 134 { + goto st2191 + } + goto tr125 + st2189: + if p++; p == pe { + goto _test_eof2189 + } + st_case_2189: + if data[p] == 129 { + goto tr1880 + } + if 160 <= data[p] { + goto tr1880 + } + goto tr125 + st2190: + if p++; p == pe { + goto _test_eof2190 + } + st_case_2190: + if 192 <= data[p] { + goto tr125 + } + goto tr1880 + st2191: + if p++; p == pe { + goto _test_eof2191 + } + st_case_2191: + goto tr1880 + st2192: + if p++; p == pe { + goto _test_eof2192 + } + st_case_2192: + if 176 <= data[p] { + goto tr125 + } + goto tr1880 +tr2008: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4874 +tr4462: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4874 + st4874: + if p++; p == pe { + goto _test_eof4874 + } + st_case_4874: +//line segment_words_prod.go:58452 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st2193 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st2194 + case 205: + goto st2195 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st2196 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st2197 + case 215: + goto st2198 + case 216: + goto st2199 + case 217: + goto st2200 + case 219: + goto st2201 + case 220: + goto st2202 + case 221: + goto st2203 + case 222: + goto st2204 + case 223: + goto st2205 + case 224: + goto st2206 + case 225: + goto st2238 + case 226: + goto st2260 + case 227: + goto st2267 + case 234: + goto st2270 + case 237: + goto st287 + case 239: + goto st2286 + case 240: + goto st2292 + case 243: + goto st2334 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 + st2193: + if p++; p == pe { + goto _test_eof2193 + } + st_case_2193: + switch data[p] { + case 170: + goto tr148 + case 173: + goto tr2008 + case 181: + goto tr148 + case 183: + goto st142 + case 186: + goto tr148 + } + goto tr420 + st2194: + if p++; p == pe { + goto _test_eof2194 + } + st_case_2194: + if data[p] <= 127 { + goto tr420 + } + goto tr2008 + st2195: + if p++; p == pe { + goto _test_eof2195 + } + st_case_2195: + switch data[p] { + case 181: + goto tr420 + case 190: + goto tr420 + } + switch { + case data[p] < 184: + if 176 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr420 + } + goto tr2008 + st2196: + if p++; p == pe { + goto _test_eof2196 + } + st_case_2196: + if data[p] == 130 { + goto tr420 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr2008 + } + goto tr148 + st2197: + if p++; p == pe { + goto _test_eof2197 + } + st_case_2197: + if data[p] == 190 { + goto tr420 + } + switch { + case data[p] < 145: + if 136 <= data[p] && data[p] <= 144 { + goto tr420 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + default: + goto tr2008 + } + goto tr148 + st2198: + if p++; p == pe { + goto _test_eof2198 + } + st_case_2198: + switch data[p] { + case 135: + goto tr2008 + case 179: + goto tr148 + case 180: + goto st142 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr2008 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + default: + goto tr2008 + } + goto tr420 + st2199: + if p++; p == pe { + goto _test_eof2199 + } + st_case_2199: + if data[p] == 156 { + goto tr2008 + } + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 133 { + goto tr2008 + } + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr2008 + } + goto tr420 + st2200: + if p++; p == pe { + goto _test_eof2200 + } + st_case_2200: + switch data[p] { + case 171: + goto tr421 + case 176: + goto tr2008 + } + switch { + case data[p] < 139: + if 128 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 174 <= data[p] { + goto tr148 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr2008 + } + goto tr420 + st2201: + if p++; p == pe { + goto _test_eof2201 + } + st_case_2201: + switch data[p] { + case 148: + goto tr420 + case 158: + goto tr420 + case 169: + goto tr420 + } + switch { + case data[p] < 176: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr2008 + } + case data[p] >= 150: + goto tr2008 + } + case data[p] > 185: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 189: + goto tr420 + } + default: + goto tr421 + } + goto tr148 + st2202: + if p++; p == pe { + goto _test_eof2202 + } + st_case_2202: + if data[p] == 144 { + goto tr148 + } + switch { + case data[p] < 146: + if 143 <= data[p] && data[p] <= 145 { + goto tr2008 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr2008 + } + default: + goto tr148 + } + goto tr420 + st2203: + if p++; p == pe { + goto _test_eof2203 + } + st_case_2203: + switch { + case data[p] > 140: + if 141 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr420 + } + goto tr2008 + st2204: + if p++; p == pe { + goto _test_eof2204 + } + st_case_2204: + switch { + case data[p] > 176: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr2008 + } + goto tr148 + st2205: + if p++; p == pe { + goto _test_eof2205 + } + st_case_2205: + if data[p] == 186 { + goto tr148 + } + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 170: + switch { + case data[p] > 179: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 171: + goto tr2008 + } + default: + goto tr148 + } + goto tr420 + st2206: + if p++; p == pe { + goto _test_eof2206 + } + st_case_2206: + switch data[p] { + case 160: + goto st2207 + case 161: + goto st2208 + case 162: + goto st168 + case 163: + goto st2209 + case 164: + goto st2210 + case 165: + goto st2211 + case 166: + goto st2212 + case 167: + goto st2213 + case 168: + goto st2214 + case 169: + goto st2215 + case 170: + goto st2216 + case 171: + goto st2217 + case 172: + goto st2218 + case 173: + goto st2219 + case 174: + goto st2220 + case 175: + goto st2221 + case 176: + goto st2222 + case 177: + goto st2223 + case 178: + goto st2224 + case 179: + goto st2225 + case 180: + goto st2226 + case 181: + goto st2227 + case 182: + goto st2228 + case 183: + goto st2229 + case 184: + goto st2230 + case 185: + goto st2231 + case 186: + goto st2232 + case 187: + goto st2233 + case 188: + goto st2234 + case 189: + goto st2235 + case 190: + goto st2236 + case 191: + goto st2237 + } + goto tr420 + st2207: + if p++; p == pe { + goto _test_eof2207 + } + st_case_2207: + switch data[p] { + case 154: + goto tr148 + case 164: + goto tr148 + case 168: + goto tr148 + } + switch { + case data[p] > 149: + if 150 <= data[p] && data[p] <= 173 { + goto tr2008 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2208: + if p++; p == pe { + goto _test_eof2208 + } + st_case_2208: + switch { + case data[p] > 152: + if 153 <= data[p] && data[p] <= 155 { + goto tr2008 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2209: + if p++; p == pe { + goto _test_eof2209 + } + st_case_2209: + if 163 <= data[p] { + goto tr2008 + } + goto tr420 + st2210: + if p++; p == pe { + goto _test_eof2210 + } + st_case_2210: + if data[p] == 189 { + goto tr148 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr148 + } + goto tr2008 + st2211: + if p++; p == pe { + goto _test_eof2211 + } + st_case_2211: + switch data[p] { + case 144: + goto tr148 + case 176: + goto tr420 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 177 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr420 + } + goto tr2008 + st2212: + if p++; p == pe { + goto _test_eof2212 + } + st_case_2212: + switch data[p] { + case 132: + goto tr420 + case 169: + goto tr420 + case 177: + goto tr420 + case 188: + goto tr2008 + } + switch { + case data[p] < 145: + switch { + case data[p] > 131: + if 141 <= data[p] && data[p] <= 142 { + goto tr420 + } + case data[p] >= 129: + goto tr2008 + } + case data[p] > 146: + switch { + case data[p] < 186: + if 179 <= data[p] && data[p] <= 181 { + goto tr420 + } + case data[p] > 187: + if 190 <= data[p] { + goto tr2008 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st2213: + if p++; p == pe { + goto _test_eof2213 + } + st_case_2213: + switch data[p] { + case 142: + goto tr148 + case 158: + goto tr420 + } + switch { + case data[p] < 156: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + switch { + case data[p] > 150: + if 152 <= data[p] && data[p] <= 155 { + goto tr420 + } + case data[p] >= 143: + goto tr420 + } + default: + goto tr420 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr148 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr2008 + st2214: + if p++; p == pe { + goto _test_eof2214 + } + st_case_2214: + if data[p] == 188 { + goto tr2008 + } + switch { + case data[p] < 170: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2008 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 147 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] >= 143: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 176: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 182: + switch { + case data[p] > 185: + if 190 <= data[p] { + goto tr2008 + } + case data[p] >= 184: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2215: + if p++; p == pe { + goto _test_eof2215 + } + st_case_2215: + if data[p] == 157 { + goto tr420 + } + switch { + case data[p] < 153: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 146 <= data[p] && data[p] <= 152 { + goto tr420 + } + case data[p] >= 142: + goto tr420 + } + default: + goto tr420 + } + case data[p] > 158: + switch { + case data[p] < 166: + if 159 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr2008 + st2216: + if p++; p == pe { + goto _test_eof2216 + } + st_case_2216: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2008 + } + case data[p] > 141: + if 143 <= data[p] && data[p] <= 145 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] { + goto tr2008 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2217: + if p++; p == pe { + goto _test_eof2217 + } + st_case_2217: + switch data[p] { + case 134: + goto tr420 + case 138: + goto tr420 + case 144: + goto tr148 + case 185: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] >= 142: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr420 + } + goto tr2008 + st2218: + if p++; p == pe { + goto _test_eof2218 + } + st_case_2218: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2008 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2008 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2219: + if p++; p == pe { + goto _test_eof2219 + } + st_case_2219: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] < 150: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr2008 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 151: + switch { + case data[p] < 159: + if 156 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 161: + switch { + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 162: + goto tr2008 + } + default: + goto tr148 + } + default: + goto tr2008 + } + goto tr420 + st2220: + if p++; p == pe { + goto _test_eof2220 + } + st_case_2220: + switch data[p] { + case 130: + goto tr2008 + case 131: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 158: + switch { + case data[p] < 142: + if 133 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 144: + switch { + case data[p] > 149: + if 153 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] < 168: + if 163 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 185: + if 190 <= data[p] && data[p] <= 191 { + goto tr2008 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2221: + if p++; p == pe { + goto _test_eof2221 + } + st_case_2221: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr2008 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr2008 + } + case data[p] > 136: + switch { + case data[p] > 141: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 138: + goto tr2008 + } + default: + goto tr2008 + } + goto tr420 + st2222: + if p++; p == pe { + goto _test_eof2222 + } + st_case_2222: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 128: + goto tr2008 + } + case data[p] > 144: + switch { + case data[p] < 170: + if 146 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] > 185: + if 190 <= data[p] { + goto tr2008 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2223: + if p++; p == pe { + goto _test_eof2223 + } + st_case_2223: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 151: + goto tr420 + } + switch { + case data[p] < 160: + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 148 { + goto tr420 + } + case data[p] > 154: + if 155 <= data[p] && data[p] <= 159 { + goto tr420 + } + default: + goto tr148 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr2008 + st2224: + if p++; p == pe { + goto _test_eof2224 + } + st_case_2224: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 146: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2008 + } + case data[p] > 140: + if 142 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 181: + if 170 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2008 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2225: + if p++; p == pe { + goto _test_eof2225 + } + st_case_2225: + if data[p] == 158 { + goto tr148 + } + switch { + case data[p] < 149: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr2008 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 150: + switch { + case data[p] < 162: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 163: + switch { + case data[p] > 175: + if 177 <= data[p] && data[p] <= 178 { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr2008 + } + default: + goto tr2008 + } + goto tr420 + st2226: + if p++; p == pe { + goto _test_eof2226 + } + st_case_2226: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 129: + goto tr2008 + } + case data[p] > 144: + switch { + case data[p] > 186: + if 190 <= data[p] { + goto tr2008 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2227: + if p++; p == pe { + goto _test_eof2227 + } + st_case_2227: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 142: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 150 { + goto tr420 + } + case data[p] > 158: + if 159 <= data[p] && data[p] <= 161 { + goto tr148 + } + default: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr2008 + st2228: + if p++; p == pe { + goto _test_eof2228 + } + st_case_2228: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto tr2008 + } + case data[p] > 150: + switch { + case data[p] > 177: + if 179 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2229: + if p++; p == pe { + goto _test_eof2229 + } + st_case_2229: + switch data[p] { + case 138: + goto tr2008 + case 150: + goto tr2008 + } + switch { + case data[p] < 152: + switch { + case data[p] > 134: + if 143 <= data[p] && data[p] <= 148 { + goto tr2008 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 179 { + goto tr2008 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr2008 + } + goto tr420 + st2230: + if p++; p == pe { + goto _test_eof2230 + } + st_case_2230: + if data[p] == 177 { + goto tr2008 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr2008 + } + goto tr420 + st2231: + if p++; p == pe { + goto _test_eof2231 + } + st_case_2231: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 135: + goto tr2008 + } + goto tr420 + st2232: + if p++; p == pe { + goto _test_eof2232 + } + st_case_2232: + if data[p] == 177 { + goto tr2008 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr2008 + } + case data[p] >= 180: + goto tr2008 + } + goto tr420 + st2233: + if p++; p == pe { + goto _test_eof2233 + } + st_case_2233: + switch { + case data[p] > 141: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 136: + goto tr2008 + } + goto tr420 + st2234: + if p++; p == pe { + goto _test_eof2234 + } + st_case_2234: + switch data[p] { + case 128: + goto tr148 + case 181: + goto tr2008 + case 183: + goto tr2008 + case 185: + goto tr2008 + } + switch { + case data[p] < 160: + if 152 <= data[p] && data[p] <= 153 { + goto tr2008 + } + case data[p] > 169: + if 190 <= data[p] && data[p] <= 191 { + goto tr2008 + } + default: + goto tr421 + } + goto tr420 + st2235: + if p++; p == pe { + goto _test_eof2235 + } + st_case_2235: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 172: + if 177 <= data[p] && data[p] <= 191 { + goto tr2008 + } + default: + goto tr148 + } + goto tr420 + st2236: + if p++; p == pe { + goto _test_eof2236 + } + st_case_2236: + switch { + case data[p] < 136: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 135 { + goto tr2008 + } + case data[p] >= 128: + goto tr2008 + } + case data[p] > 140: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr2008 + } + case data[p] >= 141: + goto tr2008 + } + default: + goto tr148 + } + goto tr420 + st2237: + if p++; p == pe { + goto _test_eof2237 + } + st_case_2237: + if data[p] == 134 { + goto tr2008 + } + goto tr420 + st2238: + if p++; p == pe { + goto _test_eof2238 + } + st_case_2238: + switch data[p] { + case 128: + goto st2239 + case 129: + goto st2240 + case 130: + goto st2241 + case 131: + goto st202 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st2242 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st2243 + case 157: + goto st2244 + case 158: + goto st2245 + case 159: + goto st2246 + case 160: + goto st2247 + case 161: + goto st219 + case 162: + goto st2248 + case 163: + goto st221 + case 164: + goto st2249 + case 165: + goto st468 + case 167: + goto st469 + case 168: + goto st2250 + case 169: + goto st2251 + case 170: + goto st2252 + case 172: + goto st2253 + case 173: + goto st2254 + case 174: + goto st2255 + case 175: + goto st2256 + case 176: + goto st2257 + case 177: + goto st640 + case 179: + goto st2258 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st2259 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + } + switch { + case data[p] < 180: + if 132 <= data[p] && data[p] <= 152 { + goto st145 + } + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + default: + goto st147 + } + goto tr420 + st2239: + if p++; p == pe { + goto _test_eof2239 + } + st_case_2239: + if 171 <= data[p] && data[p] <= 190 { + goto tr2008 + } + goto tr420 + st2240: + if p++; p == pe { + goto _test_eof2240 + } + st_case_2240: + switch { + case data[p] < 158: + switch { + case data[p] > 137: + if 150 <= data[p] && data[p] <= 153 { + goto tr2008 + } + case data[p] >= 128: + goto tr421 + } + case data[p] > 160: + switch { + case data[p] < 167: + if 162 <= data[p] && data[p] <= 164 { + goto tr2008 + } + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr2008 + } + default: + goto tr2008 + } + default: + goto tr2008 + } + goto tr420 + st2241: + if p++; p == pe { + goto _test_eof2241 + } + st_case_2241: + if data[p] == 143 { + goto tr2008 + } + switch { + case data[p] < 144: + if 130 <= data[p] && data[p] <= 141 { + goto tr2008 + } + case data[p] > 153: + switch { + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 154: + goto tr2008 + } + default: + goto tr421 + } + goto tr420 + st2242: + if p++; p == pe { + goto _test_eof2242 + } + st_case_2242: + switch { + case data[p] < 157: + if 155 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr420 + } + default: + goto tr2008 + } + goto tr148 + st2243: + if p++; p == pe { + goto _test_eof2243 + } + st_case_2243: + switch { + case data[p] < 146: + switch { + case data[p] > 140: + if 142 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 177: + if 178 <= data[p] && data[p] <= 180 { + goto tr2008 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr2008 + } + goto tr420 + st2244: + if p++; p == pe { + goto _test_eof2244 + } + st_case_2244: + switch { + case data[p] < 160: + switch { + case data[p] > 145: + if 146 <= data[p] && data[p] <= 147 { + goto tr2008 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 172: + switch { + case data[p] > 176: + if 178 <= data[p] && data[p] <= 179 { + goto tr2008 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2245: + if p++; p == pe { + goto _test_eof2245 + } + st_case_2245: + if 180 <= data[p] { + goto tr2008 + } + goto tr420 + st2246: + if p++; p == pe { + goto _test_eof2246 + } + st_case_2246: + switch { + case data[p] < 158: + if 148 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 170 <= data[p] { + goto tr420 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr420 + } + goto tr2008 + st2247: + if p++; p == pe { + goto _test_eof2247 + } + st_case_2247: + switch { + case data[p] < 144: + if 139 <= data[p] && data[p] <= 142 { + goto tr2008 + } + case data[p] > 153: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + goto tr420 + st2248: + if p++; p == pe { + goto _test_eof2248 + } + st_case_2248: + if data[p] == 169 { + goto tr2008 + } + switch { + case data[p] > 170: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2249: + if p++; p == pe { + goto _test_eof2249 + } + st_case_2249: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr2008 + } + default: + goto tr2008 + } + goto tr420 + st2250: + if p++; p == pe { + goto _test_eof2250 + } + st_case_2250: + switch { + case data[p] > 150: + if 151 <= data[p] && data[p] <= 155 { + goto tr2008 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2251: + if p++; p == pe { + goto _test_eof2251 + } + st_case_2251: + if data[p] == 191 { + goto tr2008 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr2008 + } + case data[p] >= 149: + goto tr2008 + } + goto tr420 + st2252: + if p++; p == pe { + goto _test_eof2252 + } + st_case_2252: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 153: + if 176 <= data[p] && data[p] <= 190 { + goto tr2008 + } + default: + goto tr421 + } + goto tr420 + st2253: + if p++; p == pe { + goto _test_eof2253 + } + st_case_2253: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 132 { + goto tr2008 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr2008 + } + default: + goto tr148 + } + goto tr420 + st2254: + if p++; p == pe { + goto _test_eof2254 + } + st_case_2254: + switch { + case data[p] < 144: + switch { + case data[p] > 139: + if 140 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] >= 133: + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 154: + goto tr420 + } + default: + goto tr421 + } + goto tr2008 + st2255: + if p++; p == pe { + goto _test_eof2255 + } + st_case_2255: + switch { + case data[p] < 161: + switch { + case data[p] > 130: + if 131 <= data[p] && data[p] <= 160 { + goto tr148 + } + case data[p] >= 128: + goto tr2008 + } + case data[p] > 173: + switch { + case data[p] < 176: + if 174 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + default: + goto tr2008 + } + goto tr420 + st2256: + if p++; p == pe { + goto _test_eof2256 + } + st_case_2256: + switch { + case data[p] > 179: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr2008 + } + goto tr148 + st2257: + if p++; p == pe { + goto _test_eof2257 + } + st_case_2257: + switch { + case data[p] > 163: + if 164 <= data[p] && data[p] <= 183 { + goto tr2008 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2258: + if p++; p == pe { + goto _test_eof2258 + } + st_case_2258: + if data[p] == 173 { + goto tr2008 + } + switch { + case data[p] < 169: + switch { + case data[p] > 146: + if 148 <= data[p] && data[p] <= 168 { + goto tr2008 + } + case data[p] >= 144: + goto tr2008 + } + case data[p] > 177: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 180 { + goto tr2008 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 185 { + goto tr2008 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2259: + if p++; p == pe { + goto _test_eof2259 + } + st_case_2259: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr2008 + } + case data[p] >= 128: + goto tr2008 + } + goto tr420 + st2260: + if p++; p == pe { + goto _test_eof2260 + } + st_case_2260: + switch data[p] { + case 128: + goto st2261 + case 129: + goto st2262 + case 130: + goto st241 + case 131: + goto st2263 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st2264 + case 180: + goto st251 + case 181: + goto st2265 + case 182: + goto st253 + case 183: + goto st2266 + case 184: + goto st255 + } + goto tr420 + st2261: + if p++; p == pe { + goto _test_eof2261 + } + st_case_2261: + switch data[p] { + case 164: + goto st142 + case 167: + goto st142 + } + switch { + case data[p] < 152: + if 140 <= data[p] && data[p] <= 143 { + goto tr2008 + } + case data[p] > 153: + switch { + case data[p] > 174: + if 191 <= data[p] { + goto tr571 + } + case data[p] >= 170: + goto tr2008 + } + default: + goto st142 + } + goto tr420 + st2262: + if p++; p == pe { + goto _test_eof2262 + } + st_case_2262: + switch data[p] { + case 165: + goto tr420 + case 177: + goto tr148 + case 191: + goto tr148 + } + switch { + case data[p] < 149: + if 129 <= data[p] && data[p] <= 147 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 160: + goto tr2008 + } + default: + goto tr420 + } + goto tr571 + st2263: + if p++; p == pe { + goto _test_eof2263 + } + st_case_2263: + if 144 <= data[p] && data[p] <= 176 { + goto tr2008 + } + goto tr420 + st2264: + if p++; p == pe { + goto _test_eof2264 + } + st_case_2264: + switch { + case data[p] < 175: + if 165 <= data[p] && data[p] <= 170 { + goto tr420 + } + case data[p] > 177: + if 180 <= data[p] { + goto tr420 + } + default: + goto tr2008 + } + goto tr148 + st2265: + if p++; p == pe { + goto _test_eof2265 + } + st_case_2265: + if data[p] == 191 { + goto tr2008 + } + switch { + case data[p] > 174: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 168: + goto tr420 + } + goto tr148 + st2266: + if p++; p == pe { + goto _test_eof2266 + } + st_case_2266: + switch { + case data[p] < 144: + switch { + case data[p] > 134: + if 136 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 191 { + goto tr2008 + } + case data[p] >= 152: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2267: + if p++; p == pe { + goto _test_eof2267 + } + st_case_2267: + switch data[p] { + case 128: + goto st2268 + case 130: + goto st2269 + case 132: + goto st259 + case 133: + goto st145 + case 134: + goto st260 + } + goto tr420 + st2268: + if p++; p == pe { + goto _test_eof2268 + } + st_case_2268: + if data[p] == 133 { + goto tr148 + } + switch { + case data[p] > 175: + if 187 <= data[p] && data[p] <= 188 { + goto tr148 + } + case data[p] >= 170: + goto tr2008 + } + goto tr420 + st2269: + if p++; p == pe { + goto _test_eof2269 + } + st_case_2269: + if 153 <= data[p] && data[p] <= 154 { + goto tr2008 + } + goto tr420 + st2270: + if p++; p == pe { + goto _test_eof2270 + } + st_case_2270: + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st654 + case 153: + goto st2271 + case 154: + goto st2272 + case 155: + goto st2273 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st2274 + case 161: + goto st272 + case 162: + goto st2275 + case 163: + goto st2276 + case 164: + goto st2277 + case 165: + goto st2278 + case 166: + goto st2279 + case 167: + goto st2280 + case 168: + goto st2281 + case 169: + goto st2282 + case 170: + goto st2283 + case 171: + goto st2284 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st2285 + case 176: + goto st147 + } + if 129 <= data[p] { + goto st145 + } + goto tr420 + st2271: + if p++; p == pe { + goto _test_eof2271 + } + st_case_2271: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr2008 + } + default: + goto tr2008 + } + goto tr420 + st2272: + if p++; p == pe { + goto _test_eof2272 + } + st_case_2272: + switch { + case data[p] < 158: + if 128 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr2008 + } + goto tr420 + st2273: + if p++; p == pe { + goto _test_eof2273 + } + st_case_2273: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr2008 + } + goto tr148 + st2274: + if p++; p == pe { + goto _test_eof2274 + } + st_case_2274: + switch data[p] { + case 130: + goto tr2008 + case 134: + goto tr2008 + case 139: + goto tr2008 + } + switch { + case data[p] > 167: + if 168 <= data[p] { + goto tr420 + } + case data[p] >= 163: + goto tr2008 + } + goto tr148 + st2275: + if p++; p == pe { + goto _test_eof2275 + } + st_case_2275: + switch { + case data[p] < 130: + if 128 <= data[p] && data[p] <= 129 { + goto tr2008 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr2008 + } + default: + goto tr148 + } + goto tr420 + st2276: + if p++; p == pe { + goto _test_eof2276 + } + st_case_2276: + switch data[p] { + case 187: + goto tr148 + case 189: + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 143: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 133: + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 183: + if 184 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr420 + } + goto tr2008 + st2277: + if p++; p == pe { + goto _test_eof2277 + } + st_case_2277: + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 165: + switch { + case data[p] > 173: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr2008 + } + default: + goto tr148 + } + goto tr420 + st2278: + if p++; p == pe { + goto _test_eof2278 + } + st_case_2278: + switch { + case data[p] < 148: + if 135 <= data[p] && data[p] <= 147 { + goto tr2008 + } + case data[p] > 159: + if 189 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st2279: + if p++; p == pe { + goto _test_eof2279 + } + st_case_2279: + switch { + case data[p] < 132: + if 128 <= data[p] && data[p] <= 131 { + goto tr2008 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr2008 + } + default: + goto tr148 + } + goto tr420 + st2280: + if p++; p == pe { + goto _test_eof2280 + } + st_case_2280: + if data[p] == 143 { + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 129: + goto tr420 + } + case data[p] > 164: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr420 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + default: + goto tr420 + } + goto tr2008 + st2281: + if p++; p == pe { + goto _test_eof2281 + } + st_case_2281: + switch { + case data[p] > 168: + if 169 <= data[p] && data[p] <= 182 { + goto tr2008 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2282: + if p++; p == pe { + goto _test_eof2282 + } + st_case_2282: + if data[p] == 131 { + goto tr2008 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 141: + switch { + case data[p] > 153: + if 187 <= data[p] && data[p] <= 189 { + goto tr2008 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr2008 + } + goto tr420 + st2283: + if p++; p == pe { + goto _test_eof2283 + } + st_case_2283: + if data[p] == 176 { + goto tr2008 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr2008 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr2008 + } + default: + goto tr2008 + } + goto tr420 + st2284: + if p++; p == pe { + goto _test_eof2284 + } + st_case_2284: + if data[p] == 129 { + goto tr2008 + } + switch { + case data[p] < 171: + if 160 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 181 <= data[p] && data[p] <= 182 { + goto tr2008 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr2008 + } + goto tr420 + st2285: + if p++; p == pe { + goto _test_eof2285 + } + st_case_2285: + switch { + case data[p] < 163: + if 128 <= data[p] && data[p] <= 162 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 172: + goto tr2008 + } + default: + goto tr2008 + } + goto tr420 + st2286: + if p++; p == pe { + goto _test_eof2286 + } + st_case_2286: + switch data[p] { + case 172: + goto st2287 + case 173: + goto st672 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st2288 + case 185: + goto st967 + case 187: + goto st2289 + case 188: + goto st969 + case 189: + goto st303 + case 190: + goto st2290 + case 191: + goto st2291 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr420 + st2287: + if p++; p == pe { + goto _test_eof2287 + } + st_case_2287: + switch data[p] { + case 158: + goto tr2008 + case 190: + goto tr572 + } + switch { + case data[p] < 157: + switch { + case data[p] > 134: + if 147 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 170: + goto tr572 + } + default: + goto tr572 + } + goto tr420 + st2288: + if p++; p == pe { + goto _test_eof2288 + } + st_case_2288: + if data[p] == 147 { + goto st142 + } + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 143 { + goto tr2008 + } + case data[p] > 175: + if 179 <= data[p] && data[p] <= 180 { + goto tr571 + } + default: + goto tr2008 + } + goto tr420 + st2289: + if p++; p == pe { + goto _test_eof2289 + } + st_case_2289: + if data[p] == 191 { + goto tr2008 + } + if 189 <= data[p] { + goto tr420 + } + goto tr148 + st2290: + if p++; p == pe { + goto _test_eof2290 + } + st_case_2290: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 190 { + goto tr148 + } + case data[p] >= 158: + goto tr2008 + } + goto tr420 + st2291: + if p++; p == pe { + goto _test_eof2291 + } + st_case_2291: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr148 + } + case data[p] >= 130: + goto tr148 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr2008 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2292: + if p++; p == pe { + goto _test_eof2292 + } + st_case_2292: + switch data[p] { + case 144: + goto st2293 + case 145: + goto st2299 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st2318 + case 155: + goto st2323 + case 157: + goto st2325 + case 158: + goto st2332 + case 159: + goto st403 + } + goto tr420 + st2293: + if p++; p == pe { + goto _test_eof2293 + } + st_case_2293: + switch data[p] { + case 128: + goto st308 + case 129: + goto st309 + case 130: + goto st147 + case 131: + goto st310 + case 133: + goto st311 + case 135: + goto st2294 + case 138: + goto st313 + case 139: + goto st2295 + case 140: + goto st315 + case 141: + goto st2296 + case 142: + goto st317 + case 143: + goto st318 + case 144: + goto st147 + case 145: + goto st145 + case 146: + goto st684 + case 148: + goto st320 + case 149: + goto st321 + case 152: + goto st147 + case 156: + goto st322 + case 157: + goto st323 + case 160: + goto st324 + case 161: + goto st325 + case 162: + goto st326 + case 163: + goto st327 + case 164: + goto st328 + case 166: + goto st329 + case 168: + goto st2297 + case 169: + goto st331 + case 170: + goto st332 + case 171: + goto st2298 + case 172: + goto st334 + case 173: + goto st335 + case 174: + goto st336 + case 176: + goto st147 + case 177: + goto st245 + } + switch { + case data[p] > 155: + if 178 <= data[p] && data[p] <= 179 { + goto st337 + } + case data[p] >= 153: + goto st145 + } + goto tr420 + st2294: + if p++; p == pe { + goto _test_eof2294 + } + st_case_2294: + if data[p] == 189 { + goto tr2008 + } + goto tr420 + st2295: + if p++; p == pe { + goto _test_eof2295 + } + st_case_2295: + if data[p] == 160 { + goto tr2008 + } + if 145 <= data[p] { + goto tr420 + } + goto tr148 + st2296: + if p++; p == pe { + goto _test_eof2296 + } + st_case_2296: + switch { + case data[p] < 182: + if 139 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 186: + if 187 <= data[p] { + goto tr420 + } + default: + goto tr2008 + } + goto tr148 + st2297: + if p++; p == pe { + goto _test_eof2297 + } + st_case_2297: + switch data[p] { + case 128: + goto tr148 + case 191: + goto tr2008 + } + switch { + case data[p] < 144: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2008 + } + case data[p] > 134: + if 140 <= data[p] && data[p] <= 143 { + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 147: + switch { + case data[p] < 153: + if 149 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] > 179: + if 184 <= data[p] && data[p] <= 186 { + goto tr2008 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2298: + if p++; p == pe { + goto _test_eof2298 + } + st_case_2298: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 164: + if 165 <= data[p] && data[p] <= 166 { + goto tr2008 + } + default: + goto tr148 + } + goto tr420 + st2299: + if p++; p == pe { + goto _test_eof2299 + } + st_case_2299: + switch data[p] { + case 128: + goto st2300 + case 129: + goto st2301 + case 130: + goto st2302 + case 131: + goto st691 + case 132: + goto st2303 + case 133: + goto st2304 + case 134: + goto st2305 + case 135: + goto st2306 + case 136: + goto st2307 + case 138: + goto st348 + case 139: + goto st2308 + case 140: + goto st2309 + case 141: + goto st2310 + case 146: + goto st2311 + case 147: + goto st2312 + case 150: + goto st2313 + case 151: + goto st2314 + case 152: + goto st2311 + case 153: + goto st2315 + case 154: + goto st2316 + case 155: + goto st538 + case 156: + goto st2317 + case 162: + goto st359 + case 163: + goto st707 + case 171: + goto st361 + } + goto tr420 + st2300: + if p++; p == pe { + goto _test_eof2300 + } + st_case_2300: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2008 + } + case data[p] > 183: + if 184 <= data[p] { + goto tr2008 + } + default: + goto tr148 + } + goto tr420 + st2301: + if p++; p == pe { + goto _test_eof2301 + } + st_case_2301: + switch { + case data[p] < 166: + if 135 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] && data[p] <= 190 { + goto tr420 + } + default: + goto tr421 + } + goto tr2008 + st2302: + if p++; p == pe { + goto _test_eof2302 + } + st_case_2302: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr2008 + st2303: + if p++; p == pe { + goto _test_eof2303 + } + st_case_2303: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2008 + } + case data[p] > 166: + switch { + case data[p] > 180: + if 182 <= data[p] && data[p] <= 191 { + goto tr421 + } + case data[p] >= 167: + goto tr2008 + } + default: + goto tr148 + } + goto tr420 + st2304: + if p++; p == pe { + goto _test_eof2304 + } + st_case_2304: + switch data[p] { + case 179: + goto tr2008 + case 182: + goto tr148 + } + if 144 <= data[p] && data[p] <= 178 { + goto tr148 + } + goto tr420 + st2305: + if p++; p == pe { + goto _test_eof2305 + } + st_case_2305: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2008 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr2008 + } + default: + goto tr148 + } + goto tr420 + st2306: + if p++; p == pe { + goto _test_eof2306 + } + st_case_2306: + if data[p] == 155 { + goto tr420 + } + switch { + case data[p] < 141: + switch { + case data[p] > 132: + if 133 <= data[p] && data[p] <= 137 { + goto tr420 + } + case data[p] >= 129: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] < 154: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] > 156: + if 157 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + default: + goto tr420 + } + goto tr2008 + st2307: + if p++; p == pe { + goto _test_eof2307 + } + st_case_2307: + switch { + case data[p] < 147: + if 128 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] > 171: + if 172 <= data[p] && data[p] <= 183 { + goto tr2008 + } + default: + goto tr148 + } + goto tr420 + st2308: + if p++; p == pe { + goto _test_eof2308 + } + st_case_2308: + switch { + case data[p] < 171: + if 159 <= data[p] && data[p] <= 170 { + goto tr2008 + } + case data[p] > 175: + switch { + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr421 + } + default: + goto tr420 + } + goto tr148 + st2309: + if p++; p == pe { + goto _test_eof2309 + } + st_case_2309: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 131 { + goto tr2008 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2008 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2310: + if p++; p == pe { + goto _test_eof2310 + } + st_case_2310: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr2008 + } + switch { + case data[p] < 157: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr2008 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr2008 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr2008 + } + default: + goto tr2008 + } + default: + goto tr148 + } + goto tr420 + st2311: + if p++; p == pe { + goto _test_eof2311 + } + st_case_2311: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr2008 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2312: + if p++; p == pe { + goto _test_eof2312 + } + st_case_2312: + if data[p] == 134 { + goto tr420 + } + switch { + case data[p] < 136: + if 132 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr420 + } + goto tr2008 + st2313: + if p++; p == pe { + goto _test_eof2313 + } + st_case_2313: + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 181: + if 184 <= data[p] { + goto tr2008 + } + default: + goto tr2008 + } + goto tr420 + st2314: + if p++; p == pe { + goto _test_eof2314 + } + st_case_2314: + switch { + case data[p] < 152: + if 129 <= data[p] && data[p] <= 151 { + goto tr420 + } + case data[p] > 155: + if 158 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + goto tr2008 + st2315: + if p++; p == pe { + goto _test_eof2315 + } + st_case_2315: + if data[p] == 132 { + goto tr148 + } + switch { + case data[p] < 144: + if 129 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + goto tr2008 + st2316: + if p++; p == pe { + goto _test_eof2316 + } + st_case_2316: + switch { + case data[p] > 170: + if 171 <= data[p] && data[p] <= 183 { + goto tr2008 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2317: + if p++; p == pe { + goto _test_eof2317 + } + st_case_2317: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 157: + goto tr2008 + } + goto tr420 + st2318: + if p++; p == pe { + goto _test_eof2318 + } + st_case_2318: + switch data[p] { + case 160: + goto st147 + case 168: + goto st370 + case 169: + goto st709 + case 171: + goto st2319 + case 172: + goto st2320 + case 173: + goto st712 + case 174: + goto st374 + case 188: + goto st147 + case 189: + goto st2321 + case 190: + goto st2322 + } + if 161 <= data[p] && data[p] <= 167 { + goto st145 + } + goto tr420 + st2319: + if p++; p == pe { + goto _test_eof2319 + } + st_case_2319: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 180 { + goto tr2008 + } + case data[p] >= 144: + goto tr148 + } + goto tr420 + st2320: + if p++; p == pe { + goto _test_eof2320 + } + st_case_2320: + switch { + case data[p] > 175: + if 176 <= data[p] && data[p] <= 182 { + goto tr2008 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2321: + if p++; p == pe { + goto _test_eof2321 + } + st_case_2321: + switch { + case data[p] < 145: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr420 + } + default: + goto tr2008 + } + goto tr148 + st2322: + if p++; p == pe { + goto _test_eof2322 + } + st_case_2322: + switch { + case data[p] > 146: + if 147 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] >= 143: + goto tr2008 + } + goto tr420 + st2323: + if p++; p == pe { + goto _test_eof2323 + } + st_case_2323: + switch data[p] { + case 176: + goto st147 + case 177: + goto st378 + case 178: + goto st2324 + } + goto tr420 + st2324: + if p++; p == pe { + goto _test_eof2324 + } + st_case_2324: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr2008 + } + case data[p] >= 157: + goto tr2008 + } + default: + goto tr148 + } + goto tr420 + st2325: + if p++; p == pe { + goto _test_eof2325 + } + st_case_2325: + switch data[p] { + case 133: + goto st2326 + case 134: + goto st2327 + case 137: + goto st2328 + case 144: + goto st147 + case 145: + goto st384 + case 146: + goto st385 + case 147: + goto st386 + case 148: + goto st387 + case 149: + goto st388 + case 154: + goto st389 + case 155: + goto st390 + case 156: + goto st391 + case 157: + goto st392 + case 158: + goto st393 + case 159: + goto st721 + case 168: + goto st2329 + case 169: + goto st2330 + case 170: + goto st2331 + } + if 150 <= data[p] && data[p] <= 153 { + goto st145 + } + goto tr420 + st2326: + if p++; p == pe { + goto _test_eof2326 + } + st_case_2326: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr2008 + } + case data[p] >= 165: + goto tr2008 + } + goto tr420 + st2327: + if p++; p == pe { + goto _test_eof2327 + } + st_case_2327: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr420 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr2008 + st2328: + if p++; p == pe { + goto _test_eof2328 + } + st_case_2328: + if 130 <= data[p] && data[p] <= 132 { + goto tr2008 + } + goto tr420 + st2329: + if p++; p == pe { + goto _test_eof2329 + } + st_case_2329: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr2008 + } + case data[p] >= 128: + goto tr2008 + } + goto tr420 + st2330: + if p++; p == pe { + goto _test_eof2330 + } + st_case_2330: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 173: + goto tr420 + } + goto tr2008 + st2331: + if p++; p == pe { + goto _test_eof2331 + } + st_case_2331: + if data[p] == 132 { + goto tr2008 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr2008 + } + case data[p] >= 155: + goto tr2008 + } + goto tr420 + st2332: + if p++; p == pe { + goto _test_eof2332 + } + st_case_2332: + switch data[p] { + case 160: + goto st147 + case 163: + goto st2333 + case 184: + goto st400 + case 185: + goto st401 + case 186: + goto st402 + } + if 161 <= data[p] && data[p] <= 162 { + goto st145 + } + goto tr420 + st2333: + if p++; p == pe { + goto _test_eof2333 + } + st_case_2333: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 150: + if 151 <= data[p] { + goto tr420 + } + default: + goto tr2008 + } + goto tr148 + st2334: + if p++; p == pe { + goto _test_eof2334 + } + st_case_2334: + if data[p] == 160 { + goto st2335 + } + goto tr420 + st2335: + if p++; p == pe { + goto _test_eof2335 + } + st_case_2335: + switch data[p] { + case 128: + goto st2336 + case 129: + goto st2337 + case 132: + goto st2194 + case 135: + goto st2339 + } + if 133 <= data[p] && data[p] <= 134 { + goto st2338 + } + goto tr420 + st2336: + if p++; p == pe { + goto _test_eof2336 + } + st_case_2336: + if data[p] == 129 { + goto tr2008 + } + if 160 <= data[p] { + goto tr2008 + } + goto tr420 + st2337: + if p++; p == pe { + goto _test_eof2337 + } + st_case_2337: + if 192 <= data[p] { + goto tr420 + } + goto tr2008 + st2338: + if p++; p == pe { + goto _test_eof2338 + } + st_case_2338: + goto tr2008 + st2339: + if p++; p == pe { + goto _test_eof2339 + } + st_case_2339: + if 176 <= data[p] { + goto tr420 + } + goto tr2008 +tr2266: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4875 +tr4463: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4875 + st4875: + if p++; p == pe { + goto _test_eof4875 + } + st_case_4875: +//line segment_words_prod.go:62239 + switch data[p] { + case 95: + goto tr2136 + case 194: + goto st2489 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st148 + case 204: + goto st2490 + case 205: + goto st2491 + case 206: + goto st151 + case 207: + goto st152 + case 210: + goto st2492 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st2493 + case 215: + goto st2494 + case 216: + goto st2495 + case 217: + goto st2496 + case 219: + goto st2497 + case 220: + goto st2498 + case 221: + goto st2499 + case 222: + goto st2500 + case 223: + goto st2501 + case 224: + goto st2502 + case 225: + goto st2534 + case 226: + goto st2556 + case 227: + goto st2563 + case 234: + goto st2566 + case 237: + goto st287 + case 239: + goto st2582 + case 240: + goto st2588 + case 243: + goto st2630 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr126 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2136: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4876 + st4876: + if p++; p == pe { + goto _test_eof4876 + } + st_case_4876: +//line segment_words_prod.go:62343 + switch data[p] { + case 95: + goto tr2136 + case 194: + goto st2340 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st148 + case 204: + goto st2341 + case 205: + goto st2342 + case 206: + goto st151 + case 207: + goto st152 + case 210: + goto st2343 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st2344 + case 215: + goto st2345 + case 216: + goto st2346 + case 217: + goto st2347 + case 219: + goto st2348 + case 220: + goto st2349 + case 221: + goto st2350 + case 222: + goto st2351 + case 223: + goto st2352 + case 224: + goto st2353 + case 225: + goto st2385 + case 226: + goto st2407 + case 227: + goto st2414 + case 234: + goto st2417 + case 237: + goto st287 + case 239: + goto st2433 + case 240: + goto st2441 + case 243: + goto st2483 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr126 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 + st2340: + if p++; p == pe { + goto _test_eof2340 + } + st_case_2340: + switch data[p] { + case 170: + goto tr148 + case 173: + goto tr2136 + case 181: + goto tr148 + case 186: + goto tr148 + } + goto tr420 + st2341: + if p++; p == pe { + goto _test_eof2341 + } + st_case_2341: + if data[p] <= 127 { + goto tr420 + } + goto tr2136 + st2342: + if p++; p == pe { + goto _test_eof2342 + } + st_case_2342: + switch data[p] { + case 181: + goto tr420 + case 190: + goto tr420 + } + switch { + case data[p] < 184: + if 176 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr420 + } + goto tr2136 + st2343: + if p++; p == pe { + goto _test_eof2343 + } + st_case_2343: + if data[p] == 130 { + goto tr420 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr2136 + } + goto tr148 + st2344: + if p++; p == pe { + goto _test_eof2344 + } + st_case_2344: + if data[p] == 190 { + goto tr420 + } + switch { + case data[p] < 145: + if 136 <= data[p] && data[p] <= 144 { + goto tr420 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + default: + goto tr2136 + } + goto tr148 + st2345: + if p++; p == pe { + goto _test_eof2345 + } + st_case_2345: + switch data[p] { + case 135: + goto tr2136 + case 179: + goto tr148 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr2136 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + default: + goto tr2136 + } + goto tr420 + st2346: + if p++; p == pe { + goto _test_eof2346 + } + st_case_2346: + if data[p] == 156 { + goto tr2136 + } + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 133 { + goto tr2136 + } + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr2136 + } + goto tr420 + st2347: + if p++; p == pe { + goto _test_eof2347 + } + st_case_2347: + switch data[p] { + case 171: + goto tr126 + case 176: + goto tr2136 + } + switch { + case data[p] < 139: + if 128 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 174 <= data[p] { + goto tr148 + } + case data[p] >= 160: + goto tr126 + } + default: + goto tr2136 + } + goto tr420 + st2348: + if p++; p == pe { + goto _test_eof2348 + } + st_case_2348: + switch data[p] { + case 148: + goto tr420 + case 158: + goto tr420 + case 169: + goto tr420 + } + switch { + case data[p] < 176: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr2136 + } + case data[p] >= 150: + goto tr2136 + } + case data[p] > 185: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 189: + goto tr420 + } + default: + goto tr126 + } + goto tr148 + st2349: + if p++; p == pe { + goto _test_eof2349 + } + st_case_2349: + if data[p] == 144 { + goto tr148 + } + switch { + case data[p] < 146: + if 143 <= data[p] && data[p] <= 145 { + goto tr2136 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr2136 + } + default: + goto tr148 + } + goto tr420 + st2350: + if p++; p == pe { + goto _test_eof2350 + } + st_case_2350: + switch { + case data[p] > 140: + if 141 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr420 + } + goto tr2136 + st2351: + if p++; p == pe { + goto _test_eof2351 + } + st_case_2351: + switch { + case data[p] > 176: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr2136 + } + goto tr148 + st2352: + if p++; p == pe { + goto _test_eof2352 + } + st_case_2352: + if data[p] == 186 { + goto tr148 + } + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 170: + switch { + case data[p] > 179: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 171: + goto tr2136 + } + default: + goto tr148 + } + goto tr420 + st2353: + if p++; p == pe { + goto _test_eof2353 + } + st_case_2353: + switch data[p] { + case 160: + goto st2354 + case 161: + goto st2355 + case 162: + goto st168 + case 163: + goto st2356 + case 164: + goto st2357 + case 165: + goto st2358 + case 166: + goto st2359 + case 167: + goto st2360 + case 168: + goto st2361 + case 169: + goto st2362 + case 170: + goto st2363 + case 171: + goto st2364 + case 172: + goto st2365 + case 173: + goto st2366 + case 174: + goto st2367 + case 175: + goto st2368 + case 176: + goto st2369 + case 177: + goto st2370 + case 178: + goto st2371 + case 179: + goto st2372 + case 180: + goto st2373 + case 181: + goto st2374 + case 182: + goto st2375 + case 183: + goto st2376 + case 184: + goto st2377 + case 185: + goto st2378 + case 186: + goto st2379 + case 187: + goto st2380 + case 188: + goto st2381 + case 189: + goto st2382 + case 190: + goto st2383 + case 191: + goto st2384 + } + goto tr420 + st2354: + if p++; p == pe { + goto _test_eof2354 + } + st_case_2354: + switch data[p] { + case 154: + goto tr148 + case 164: + goto tr148 + case 168: + goto tr148 + } + switch { + case data[p] > 149: + if 150 <= data[p] && data[p] <= 173 { + goto tr2136 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2355: + if p++; p == pe { + goto _test_eof2355 + } + st_case_2355: + switch { + case data[p] > 152: + if 153 <= data[p] && data[p] <= 155 { + goto tr2136 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2356: + if p++; p == pe { + goto _test_eof2356 + } + st_case_2356: + if 163 <= data[p] { + goto tr2136 + } + goto tr420 + st2357: + if p++; p == pe { + goto _test_eof2357 + } + st_case_2357: + if data[p] == 189 { + goto tr148 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr148 + } + goto tr2136 + st2358: + if p++; p == pe { + goto _test_eof2358 + } + st_case_2358: + switch data[p] { + case 144: + goto tr148 + case 176: + goto tr420 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 177 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr420 + } + goto tr2136 + st2359: + if p++; p == pe { + goto _test_eof2359 + } + st_case_2359: + switch data[p] { + case 132: + goto tr420 + case 169: + goto tr420 + case 177: + goto tr420 + case 188: + goto tr2136 + } + switch { + case data[p] < 145: + switch { + case data[p] > 131: + if 141 <= data[p] && data[p] <= 142 { + goto tr420 + } + case data[p] >= 129: + goto tr2136 + } + case data[p] > 146: + switch { + case data[p] < 186: + if 179 <= data[p] && data[p] <= 181 { + goto tr420 + } + case data[p] > 187: + if 190 <= data[p] { + goto tr2136 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st2360: + if p++; p == pe { + goto _test_eof2360 + } + st_case_2360: + switch data[p] { + case 142: + goto tr148 + case 158: + goto tr420 + } + switch { + case data[p] < 156: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + switch { + case data[p] > 150: + if 152 <= data[p] && data[p] <= 155 { + goto tr420 + } + case data[p] >= 143: + goto tr420 + } + default: + goto tr420 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr148 + } + default: + goto tr126 + } + default: + goto tr148 + } + goto tr2136 + st2361: + if p++; p == pe { + goto _test_eof2361 + } + st_case_2361: + if data[p] == 188 { + goto tr2136 + } + switch { + case data[p] < 170: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2136 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 147 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] >= 143: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 176: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 182: + switch { + case data[p] > 185: + if 190 <= data[p] { + goto tr2136 + } + case data[p] >= 184: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2362: + if p++; p == pe { + goto _test_eof2362 + } + st_case_2362: + if data[p] == 157 { + goto tr420 + } + switch { + case data[p] < 153: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 146 <= data[p] && data[p] <= 152 { + goto tr420 + } + case data[p] >= 142: + goto tr420 + } + default: + goto tr420 + } + case data[p] > 158: + switch { + case data[p] < 166: + if 159 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr126 + } + default: + goto tr148 + } + goto tr2136 + st2363: + if p++; p == pe { + goto _test_eof2363 + } + st_case_2363: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2136 + } + case data[p] > 141: + if 143 <= data[p] && data[p] <= 145 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] { + goto tr2136 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2364: + if p++; p == pe { + goto _test_eof2364 + } + st_case_2364: + switch data[p] { + case 134: + goto tr420 + case 138: + goto tr420 + case 144: + goto tr148 + case 185: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] >= 142: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr420 + } + goto tr2136 + st2365: + if p++; p == pe { + goto _test_eof2365 + } + st_case_2365: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2136 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2136 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2366: + if p++; p == pe { + goto _test_eof2366 + } + st_case_2366: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] < 150: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr2136 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr2136 + } + default: + goto tr2136 + } + case data[p] > 151: + switch { + case data[p] < 159: + if 156 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 161: + switch { + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] >= 162: + goto tr2136 + } + default: + goto tr148 + } + default: + goto tr2136 + } + goto tr420 + st2367: + if p++; p == pe { + goto _test_eof2367 + } + st_case_2367: + switch data[p] { + case 130: + goto tr2136 + case 131: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 158: + switch { + case data[p] < 142: + if 133 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 144: + switch { + case data[p] > 149: + if 153 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] < 168: + if 163 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 185: + if 190 <= data[p] && data[p] <= 191 { + goto tr2136 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2368: + if p++; p == pe { + goto _test_eof2368 + } + st_case_2368: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr2136 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr2136 + } + case data[p] > 136: + switch { + case data[p] > 141: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] >= 138: + goto tr2136 + } + default: + goto tr2136 + } + goto tr420 + st2369: + if p++; p == pe { + goto _test_eof2369 + } + st_case_2369: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 128: + goto tr2136 + } + case data[p] > 144: + switch { + case data[p] < 170: + if 146 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] > 185: + if 190 <= data[p] { + goto tr2136 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2370: + if p++; p == pe { + goto _test_eof2370 + } + st_case_2370: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 151: + goto tr420 + } + switch { + case data[p] < 160: + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 148 { + goto tr420 + } + case data[p] > 154: + if 155 <= data[p] && data[p] <= 159 { + goto tr420 + } + default: + goto tr148 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + default: + goto tr126 + } + default: + goto tr148 + } + goto tr2136 + st2371: + if p++; p == pe { + goto _test_eof2371 + } + st_case_2371: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 146: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2136 + } + case data[p] > 140: + if 142 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 181: + if 170 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2136 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2372: + if p++; p == pe { + goto _test_eof2372 + } + st_case_2372: + if data[p] == 158 { + goto tr148 + } + switch { + case data[p] < 149: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr2136 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr2136 + } + default: + goto tr2136 + } + case data[p] > 150: + switch { + case data[p] < 162: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 163: + switch { + case data[p] > 175: + if 177 <= data[p] && data[p] <= 178 { + goto tr148 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr2136 + } + default: + goto tr2136 + } + goto tr420 + st2373: + if p++; p == pe { + goto _test_eof2373 + } + st_case_2373: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 129: + goto tr2136 + } + case data[p] > 144: + switch { + case data[p] > 186: + if 190 <= data[p] { + goto tr2136 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2374: + if p++; p == pe { + goto _test_eof2374 + } + st_case_2374: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 142: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 150 { + goto tr420 + } + case data[p] > 158: + if 159 <= data[p] && data[p] <= 161 { + goto tr148 + } + default: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr2136 + st2375: + if p++; p == pe { + goto _test_eof2375 + } + st_case_2375: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto tr2136 + } + case data[p] > 150: + switch { + case data[p] > 177: + if 179 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2376: + if p++; p == pe { + goto _test_eof2376 + } + st_case_2376: + switch data[p] { + case 138: + goto tr2136 + case 150: + goto tr2136 + } + switch { + case data[p] < 152: + switch { + case data[p] > 134: + if 143 <= data[p] && data[p] <= 148 { + goto tr2136 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 179 { + goto tr2136 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr2136 + } + goto tr420 + st2377: + if p++; p == pe { + goto _test_eof2377 + } + st_case_2377: + if data[p] == 177 { + goto tr2136 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr2136 + } + goto tr420 + st2378: + if p++; p == pe { + goto _test_eof2378 + } + st_case_2378: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 135: + goto tr2136 + } + goto tr420 + st2379: + if p++; p == pe { + goto _test_eof2379 + } + st_case_2379: + if data[p] == 177 { + goto tr2136 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr2136 + } + case data[p] >= 180: + goto tr2136 + } + goto tr420 + st2380: + if p++; p == pe { + goto _test_eof2380 + } + st_case_2380: + switch { + case data[p] > 141: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 136: + goto tr2136 + } + goto tr420 + st2381: + if p++; p == pe { + goto _test_eof2381 + } + st_case_2381: + switch data[p] { + case 128: + goto tr148 + case 181: + goto tr2136 + case 183: + goto tr2136 + case 185: + goto tr2136 + } + switch { + case data[p] < 160: + if 152 <= data[p] && data[p] <= 153 { + goto tr2136 + } + case data[p] > 169: + if 190 <= data[p] && data[p] <= 191 { + goto tr2136 + } + default: + goto tr126 + } + goto tr420 + st2382: + if p++; p == pe { + goto _test_eof2382 + } + st_case_2382: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 172: + if 177 <= data[p] && data[p] <= 191 { + goto tr2136 + } + default: + goto tr148 + } + goto tr420 + st2383: + if p++; p == pe { + goto _test_eof2383 + } + st_case_2383: + switch { + case data[p] < 136: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 135 { + goto tr2136 + } + case data[p] >= 128: + goto tr2136 + } + case data[p] > 140: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr2136 + } + case data[p] >= 141: + goto tr2136 + } + default: + goto tr148 + } + goto tr420 + st2384: + if p++; p == pe { + goto _test_eof2384 + } + st_case_2384: + if data[p] == 134 { + goto tr2136 + } + goto tr420 + st2385: + if p++; p == pe { + goto _test_eof2385 + } + st_case_2385: + switch data[p] { + case 128: + goto st2386 + case 129: + goto st2387 + case 130: + goto st2388 + case 131: + goto st202 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st2389 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st2390 + case 157: + goto st2391 + case 158: + goto st2392 + case 159: + goto st2393 + case 160: + goto st2394 + case 161: + goto st219 + case 162: + goto st2395 + case 163: + goto st221 + case 164: + goto st2396 + case 165: + goto st1649 + case 167: + goto st1650 + case 168: + goto st2397 + case 169: + goto st2398 + case 170: + goto st2399 + case 172: + goto st2400 + case 173: + goto st2401 + case 174: + goto st2402 + case 175: + goto st2403 + case 176: + goto st2404 + case 177: + goto st1659 + case 179: + goto st2405 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st2406 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + } + switch { + case data[p] < 180: + if 132 <= data[p] && data[p] <= 152 { + goto st145 + } + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + default: + goto st147 + } + goto tr420 + st2386: + if p++; p == pe { + goto _test_eof2386 + } + st_case_2386: + if 171 <= data[p] && data[p] <= 190 { + goto tr2136 + } + goto tr420 + st2387: + if p++; p == pe { + goto _test_eof2387 + } + st_case_2387: + switch { + case data[p] < 158: + switch { + case data[p] > 137: + if 150 <= data[p] && data[p] <= 153 { + goto tr2136 + } + case data[p] >= 128: + goto tr126 + } + case data[p] > 160: + switch { + case data[p] < 167: + if 162 <= data[p] && data[p] <= 164 { + goto tr2136 + } + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr2136 + } + default: + goto tr2136 + } + default: + goto tr2136 + } + goto tr420 + st2388: + if p++; p == pe { + goto _test_eof2388 + } + st_case_2388: + if data[p] == 143 { + goto tr2136 + } + switch { + case data[p] < 144: + if 130 <= data[p] && data[p] <= 141 { + goto tr2136 + } + case data[p] > 153: + switch { + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 154: + goto tr2136 + } + default: + goto tr126 + } + goto tr420 + st2389: + if p++; p == pe { + goto _test_eof2389 + } + st_case_2389: + switch { + case data[p] < 157: + if 155 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr420 + } + default: + goto tr2136 + } + goto tr148 + st2390: + if p++; p == pe { + goto _test_eof2390 + } + st_case_2390: + switch { + case data[p] < 146: + switch { + case data[p] > 140: + if 142 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 177: + if 178 <= data[p] && data[p] <= 180 { + goto tr2136 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr2136 + } + goto tr420 + st2391: + if p++; p == pe { + goto _test_eof2391 + } + st_case_2391: + switch { + case data[p] < 160: + switch { + case data[p] > 145: + if 146 <= data[p] && data[p] <= 147 { + goto tr2136 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 172: + switch { + case data[p] > 176: + if 178 <= data[p] && data[p] <= 179 { + goto tr2136 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2392: + if p++; p == pe { + goto _test_eof2392 + } + st_case_2392: + if 180 <= data[p] { + goto tr2136 + } + goto tr420 + st2393: + if p++; p == pe { + goto _test_eof2393 + } + st_case_2393: + switch { + case data[p] < 158: + if 148 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 170 <= data[p] { + goto tr420 + } + case data[p] >= 160: + goto tr126 + } + default: + goto tr420 + } + goto tr2136 + st2394: + if p++; p == pe { + goto _test_eof2394 + } + st_case_2394: + switch { + case data[p] < 144: + if 139 <= data[p] && data[p] <= 142 { + goto tr2136 + } + case data[p] > 153: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr126 + } + goto tr420 + st2395: + if p++; p == pe { + goto _test_eof2395 + } + st_case_2395: + if data[p] == 169 { + goto tr2136 + } + switch { + case data[p] > 170: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2396: + if p++; p == pe { + goto _test_eof2396 + } + st_case_2396: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr2136 + } + default: + goto tr2136 + } + goto tr420 + st2397: + if p++; p == pe { + goto _test_eof2397 + } + st_case_2397: + switch { + case data[p] > 150: + if 151 <= data[p] && data[p] <= 155 { + goto tr2136 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2398: + if p++; p == pe { + goto _test_eof2398 + } + st_case_2398: + if data[p] == 191 { + goto tr2136 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr2136 + } + case data[p] >= 149: + goto tr2136 + } + goto tr420 + st2399: + if p++; p == pe { + goto _test_eof2399 + } + st_case_2399: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 153: + if 176 <= data[p] && data[p] <= 190 { + goto tr2136 + } + default: + goto tr126 + } + goto tr420 + st2400: + if p++; p == pe { + goto _test_eof2400 + } + st_case_2400: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 132 { + goto tr2136 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr2136 + } + default: + goto tr148 + } + goto tr420 + st2401: + if p++; p == pe { + goto _test_eof2401 + } + st_case_2401: + switch { + case data[p] < 144: + switch { + case data[p] > 139: + if 140 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] >= 133: + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 154: + goto tr420 + } + default: + goto tr126 + } + goto tr2136 + st2402: + if p++; p == pe { + goto _test_eof2402 + } + st_case_2402: + switch { + case data[p] < 161: + switch { + case data[p] > 130: + if 131 <= data[p] && data[p] <= 160 { + goto tr148 + } + case data[p] >= 128: + goto tr2136 + } + case data[p] > 173: + switch { + case data[p] < 176: + if 174 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr148 + } + default: + goto tr126 + } + default: + goto tr2136 + } + goto tr420 + st2403: + if p++; p == pe { + goto _test_eof2403 + } + st_case_2403: + switch { + case data[p] > 179: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr2136 + } + goto tr148 + st2404: + if p++; p == pe { + goto _test_eof2404 + } + st_case_2404: + switch { + case data[p] > 163: + if 164 <= data[p] && data[p] <= 183 { + goto tr2136 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2405: + if p++; p == pe { + goto _test_eof2405 + } + st_case_2405: + if data[p] == 173 { + goto tr2136 + } + switch { + case data[p] < 169: + switch { + case data[p] > 146: + if 148 <= data[p] && data[p] <= 168 { + goto tr2136 + } + case data[p] >= 144: + goto tr2136 + } + case data[p] > 177: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 180 { + goto tr2136 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 185 { + goto tr2136 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2406: + if p++; p == pe { + goto _test_eof2406 + } + st_case_2406: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr2136 + } + case data[p] >= 128: + goto tr2136 + } + goto tr420 + st2407: + if p++; p == pe { + goto _test_eof2407 + } + st_case_2407: + switch data[p] { + case 128: + goto st2408 + case 129: + goto st2409 + case 130: + goto st241 + case 131: + goto st2410 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st2411 + case 180: + goto st251 + case 181: + goto st2412 + case 182: + goto st253 + case 183: + goto st2413 + case 184: + goto st255 + } + goto tr420 + st2408: + if p++; p == pe { + goto _test_eof2408 + } + st_case_2408: + switch { + case data[p] < 170: + if 140 <= data[p] && data[p] <= 143 { + goto tr2136 + } + case data[p] > 174: + if 191 <= data[p] { + goto tr2136 + } + default: + goto tr2136 + } + goto tr420 + st2409: + if p++; p == pe { + goto _test_eof2409 + } + st_case_2409: + switch data[p] { + case 165: + goto tr420 + case 177: + goto tr148 + case 191: + goto tr148 + } + switch { + case data[p] < 149: + if 129 <= data[p] && data[p] <= 147 { + goto tr420 + } + case data[p] > 159: + if 176 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr2136 + st2410: + if p++; p == pe { + goto _test_eof2410 + } + st_case_2410: + if 144 <= data[p] && data[p] <= 176 { + goto tr2136 + } + goto tr420 + st2411: + if p++; p == pe { + goto _test_eof2411 + } + st_case_2411: + switch { + case data[p] < 175: + if 165 <= data[p] && data[p] <= 170 { + goto tr420 + } + case data[p] > 177: + if 180 <= data[p] { + goto tr420 + } + default: + goto tr2136 + } + goto tr148 + st2412: + if p++; p == pe { + goto _test_eof2412 + } + st_case_2412: + if data[p] == 191 { + goto tr2136 + } + switch { + case data[p] > 174: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 168: + goto tr420 + } + goto tr148 + st2413: + if p++; p == pe { + goto _test_eof2413 + } + st_case_2413: + switch { + case data[p] < 144: + switch { + case data[p] > 134: + if 136 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 191 { + goto tr2136 + } + case data[p] >= 152: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2414: + if p++; p == pe { + goto _test_eof2414 + } + st_case_2414: + switch data[p] { + case 128: + goto st2415 + case 130: + goto st2416 + case 131: + goto st1164 + case 132: + goto st259 + case 133: + goto st145 + case 134: + goto st260 + case 135: + goto st1165 + case 139: + goto st1166 + case 140: + goto st1091 + case 141: + goto st1167 + } + goto tr420 + st2415: + if p++; p == pe { + goto _test_eof2415 + } + st_case_2415: + if data[p] == 133 { + goto tr148 + } + switch { + case data[p] < 177: + if 170 <= data[p] && data[p] <= 175 { + goto tr2136 + } + case data[p] > 181: + if 187 <= data[p] && data[p] <= 188 { + goto tr148 + } + default: + goto tr1049 + } + goto tr420 + st2416: + if p++; p == pe { + goto _test_eof2416 + } + st_case_2416: + switch { + case data[p] < 155: + if 153 <= data[p] && data[p] <= 154 { + goto tr2136 + } + case data[p] > 156: + if 160 <= data[p] { + goto tr1049 + } + default: + goto tr1049 + } + goto tr420 + st2417: + if p++; p == pe { + goto _test_eof2417 + } + st_case_2417: + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st1673 + case 153: + goto st2418 + case 154: + goto st2419 + case 155: + goto st2420 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st2421 + case 161: + goto st272 + case 162: + goto st2422 + case 163: + goto st2423 + case 164: + goto st2424 + case 165: + goto st2425 + case 166: + goto st2426 + case 167: + goto st2427 + case 168: + goto st2428 + case 169: + goto st2429 + case 170: + goto st2430 + case 171: + goto st2431 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st2432 + case 176: + goto st147 + } + if 129 <= data[p] { + goto st145 + } + goto tr420 + st2418: + if p++; p == pe { + goto _test_eof2418 + } + st_case_2418: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr2136 + } + default: + goto tr2136 + } + goto tr420 + st2419: + if p++; p == pe { + goto _test_eof2419 + } + st_case_2419: + switch { + case data[p] < 158: + if 128 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr2136 + } + goto tr420 + st2420: + if p++; p == pe { + goto _test_eof2420 + } + st_case_2420: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr2136 + } + goto tr148 + st2421: + if p++; p == pe { + goto _test_eof2421 + } + st_case_2421: + switch data[p] { + case 130: + goto tr2136 + case 134: + goto tr2136 + case 139: + goto tr2136 + } + switch { + case data[p] > 167: + if 168 <= data[p] { + goto tr420 + } + case data[p] >= 163: + goto tr2136 + } + goto tr148 + st2422: + if p++; p == pe { + goto _test_eof2422 + } + st_case_2422: + switch { + case data[p] < 130: + if 128 <= data[p] && data[p] <= 129 { + goto tr2136 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr2136 + } + default: + goto tr148 + } + goto tr420 + st2423: + if p++; p == pe { + goto _test_eof2423 + } + st_case_2423: + switch data[p] { + case 187: + goto tr148 + case 189: + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 143: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 133: + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 183: + if 184 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr420 + } + goto tr2136 + st2424: + if p++; p == pe { + goto _test_eof2424 + } + st_case_2424: + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 165: + switch { + case data[p] > 173: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr2136 + } + default: + goto tr148 + } + goto tr420 + st2425: + if p++; p == pe { + goto _test_eof2425 + } + st_case_2425: + switch { + case data[p] < 148: + if 135 <= data[p] && data[p] <= 147 { + goto tr2136 + } + case data[p] > 159: + if 189 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st2426: + if p++; p == pe { + goto _test_eof2426 + } + st_case_2426: + switch { + case data[p] < 132: + if 128 <= data[p] && data[p] <= 131 { + goto tr2136 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr2136 + } + default: + goto tr148 + } + goto tr420 + st2427: + if p++; p == pe { + goto _test_eof2427 + } + st_case_2427: + if data[p] == 143 { + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 129: + goto tr420 + } + case data[p] > 164: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr420 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + default: + goto tr126 + } + default: + goto tr420 + } + goto tr2136 + st2428: + if p++; p == pe { + goto _test_eof2428 + } + st_case_2428: + switch { + case data[p] > 168: + if 169 <= data[p] && data[p] <= 182 { + goto tr2136 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2429: + if p++; p == pe { + goto _test_eof2429 + } + st_case_2429: + if data[p] == 131 { + goto tr2136 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 141: + switch { + case data[p] > 153: + if 187 <= data[p] && data[p] <= 189 { + goto tr2136 + } + case data[p] >= 144: + goto tr126 + } + default: + goto tr2136 + } + goto tr420 + st2430: + if p++; p == pe { + goto _test_eof2430 + } + st_case_2430: + if data[p] == 176 { + goto tr2136 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr2136 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr2136 + } + default: + goto tr2136 + } + goto tr420 + st2431: + if p++; p == pe { + goto _test_eof2431 + } + st_case_2431: + if data[p] == 129 { + goto tr2136 + } + switch { + case data[p] < 171: + if 160 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 181 <= data[p] && data[p] <= 182 { + goto tr2136 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr2136 + } + goto tr420 + st2432: + if p++; p == pe { + goto _test_eof2432 + } + st_case_2432: + switch { + case data[p] < 163: + if 128 <= data[p] && data[p] <= 162 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr126 + } + case data[p] >= 172: + goto tr2136 + } + default: + goto tr2136 + } + goto tr420 + st2433: + if p++; p == pe { + goto _test_eof2433 + } + st_case_2433: + switch data[p] { + case 172: + goto st2434 + case 173: + goto st672 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st2435 + case 185: + goto st2436 + case 187: + goto st2437 + case 188: + goto st2438 + case 189: + goto st1261 + case 190: + goto st2439 + case 191: + goto st2440 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr420 + st2434: + if p++; p == pe { + goto _test_eof2434 + } + st_case_2434: + switch data[p] { + case 158: + goto tr2136 + case 190: + goto tr572 + } + switch { + case data[p] < 157: + switch { + case data[p] > 134: + if 147 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 170: + goto tr572 + } + default: + goto tr572 + } + goto tr420 + st2435: + if p++; p == pe { + goto _test_eof2435 + } + st_case_2435: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 143 { + goto tr2136 + } + case data[p] > 175: + if 179 <= data[p] && data[p] <= 180 { + goto tr2136 + } + default: + goto tr2136 + } + goto tr420 + st2436: + if p++; p == pe { + goto _test_eof2436 + } + st_case_2436: + switch { + case data[p] < 176: + if 141 <= data[p] && data[p] <= 143 { + goto tr2136 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st2437: + if p++; p == pe { + goto _test_eof2437 + } + st_case_2437: + if data[p] == 191 { + goto tr2136 + } + if 189 <= data[p] { + goto tr420 + } + goto tr148 + st2438: + if p++; p == pe { + goto _test_eof2438 + } + st_case_2438: + if data[p] == 191 { + goto tr2136 + } + if 161 <= data[p] && data[p] <= 186 { + goto tr148 + } + goto tr2 + st2439: + if p++; p == pe { + goto _test_eof2439 + } + st_case_2439: + switch { + case data[p] < 160: + if 158 <= data[p] && data[p] <= 159 { + goto tr2136 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + goto tr1049 + st2440: + if p++; p == pe { + goto _test_eof2440 + } + st_case_2440: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr148 + } + case data[p] >= 130: + goto tr148 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr2136 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2441: + if p++; p == pe { + goto _test_eof2441 + } + st_case_2441: + switch data[p] { + case 144: + goto st2442 + case 145: + goto st2448 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st2467 + case 155: + goto st2472 + case 157: + goto st2474 + case 158: + goto st2481 + case 159: + goto st403 + } + goto tr420 + st2442: + if p++; p == pe { + goto _test_eof2442 + } + st_case_2442: + switch data[p] { + case 128: + goto st308 + case 129: + goto st309 + case 130: + goto st147 + case 131: + goto st310 + case 133: + goto st311 + case 135: + goto st2443 + case 138: + goto st313 + case 139: + goto st2444 + case 140: + goto st315 + case 141: + goto st2445 + case 142: + goto st317 + case 143: + goto st318 + case 144: + goto st147 + case 145: + goto st145 + case 146: + goto st1702 + case 148: + goto st320 + case 149: + goto st321 + case 152: + goto st147 + case 156: + goto st322 + case 157: + goto st323 + case 160: + goto st324 + case 161: + goto st325 + case 162: + goto st326 + case 163: + goto st327 + case 164: + goto st328 + case 166: + goto st329 + case 168: + goto st2446 + case 169: + goto st331 + case 170: + goto st332 + case 171: + goto st2447 + case 172: + goto st334 + case 173: + goto st335 + case 174: + goto st336 + case 176: + goto st147 + case 177: + goto st245 + } + switch { + case data[p] > 155: + if 178 <= data[p] && data[p] <= 179 { + goto st337 + } + case data[p] >= 153: + goto st145 + } + goto tr420 + st2443: + if p++; p == pe { + goto _test_eof2443 + } + st_case_2443: + if data[p] == 189 { + goto tr2136 + } + goto tr420 + st2444: + if p++; p == pe { + goto _test_eof2444 + } + st_case_2444: + if data[p] == 160 { + goto tr2136 + } + if 145 <= data[p] { + goto tr420 + } + goto tr148 + st2445: + if p++; p == pe { + goto _test_eof2445 + } + st_case_2445: + switch { + case data[p] < 182: + if 139 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 186: + if 187 <= data[p] { + goto tr420 + } + default: + goto tr2136 + } + goto tr148 + st2446: + if p++; p == pe { + goto _test_eof2446 + } + st_case_2446: + switch data[p] { + case 128: + goto tr148 + case 191: + goto tr2136 + } + switch { + case data[p] < 144: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2136 + } + case data[p] > 134: + if 140 <= data[p] && data[p] <= 143 { + goto tr2136 + } + default: + goto tr2136 + } + case data[p] > 147: + switch { + case data[p] < 153: + if 149 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] > 179: + if 184 <= data[p] && data[p] <= 186 { + goto tr2136 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2447: + if p++; p == pe { + goto _test_eof2447 + } + st_case_2447: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 164: + if 165 <= data[p] && data[p] <= 166 { + goto tr2136 + } + default: + goto tr148 + } + goto tr420 + st2448: + if p++; p == pe { + goto _test_eof2448 + } + st_case_2448: + switch data[p] { + case 128: + goto st2449 + case 129: + goto st2450 + case 130: + goto st2451 + case 131: + goto st1709 + case 132: + goto st2452 + case 133: + goto st2453 + case 134: + goto st2454 + case 135: + goto st2455 + case 136: + goto st2456 + case 138: + goto st348 + case 139: + goto st2457 + case 140: + goto st2458 + case 141: + goto st2459 + case 146: + goto st2460 + case 147: + goto st2461 + case 150: + goto st2462 + case 151: + goto st2463 + case 152: + goto st2460 + case 153: + goto st2464 + case 154: + goto st2465 + case 155: + goto st1724 + case 156: + goto st2466 + case 162: + goto st359 + case 163: + goto st1726 + case 171: + goto st361 + } + goto tr420 + st2449: + if p++; p == pe { + goto _test_eof2449 + } + st_case_2449: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2136 + } + case data[p] > 183: + if 184 <= data[p] { + goto tr2136 + } + default: + goto tr148 + } + goto tr420 + st2450: + if p++; p == pe { + goto _test_eof2450 + } + st_case_2450: + switch { + case data[p] < 166: + if 135 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] && data[p] <= 190 { + goto tr420 + } + default: + goto tr126 + } + goto tr2136 + st2451: + if p++; p == pe { + goto _test_eof2451 + } + st_case_2451: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr2136 + st2452: + if p++; p == pe { + goto _test_eof2452 + } + st_case_2452: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2136 + } + case data[p] > 166: + switch { + case data[p] > 180: + if 182 <= data[p] && data[p] <= 191 { + goto tr126 + } + case data[p] >= 167: + goto tr2136 + } + default: + goto tr148 + } + goto tr420 + st2453: + if p++; p == pe { + goto _test_eof2453 + } + st_case_2453: + switch data[p] { + case 179: + goto tr2136 + case 182: + goto tr148 + } + if 144 <= data[p] && data[p] <= 178 { + goto tr148 + } + goto tr420 + st2454: + if p++; p == pe { + goto _test_eof2454 + } + st_case_2454: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2136 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr2136 + } + default: + goto tr148 + } + goto tr420 + st2455: + if p++; p == pe { + goto _test_eof2455 + } + st_case_2455: + if data[p] == 155 { + goto tr420 + } + switch { + case data[p] < 141: + switch { + case data[p] > 132: + if 133 <= data[p] && data[p] <= 137 { + goto tr420 + } + case data[p] >= 129: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] < 154: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] > 156: + if 157 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + default: + goto tr420 + } + goto tr2136 + st2456: + if p++; p == pe { + goto _test_eof2456 + } + st_case_2456: + switch { + case data[p] < 147: + if 128 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] > 171: + if 172 <= data[p] && data[p] <= 183 { + goto tr2136 + } + default: + goto tr148 + } + goto tr420 + st2457: + if p++; p == pe { + goto _test_eof2457 + } + st_case_2457: + switch { + case data[p] < 171: + if 159 <= data[p] && data[p] <= 170 { + goto tr2136 + } + case data[p] > 175: + switch { + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr126 + } + default: + goto tr420 + } + goto tr148 + st2458: + if p++; p == pe { + goto _test_eof2458 + } + st_case_2458: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 131 { + goto tr2136 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2136 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2459: + if p++; p == pe { + goto _test_eof2459 + } + st_case_2459: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr2136 + } + switch { + case data[p] < 157: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr2136 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr2136 + } + default: + goto tr2136 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr2136 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr2136 + } + default: + goto tr2136 + } + default: + goto tr148 + } + goto tr420 + st2460: + if p++; p == pe { + goto _test_eof2460 + } + st_case_2460: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr2136 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2461: + if p++; p == pe { + goto _test_eof2461 + } + st_case_2461: + if data[p] == 134 { + goto tr420 + } + switch { + case data[p] < 136: + if 132 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + case data[p] >= 144: + goto tr126 + } + default: + goto tr420 + } + goto tr2136 + st2462: + if p++; p == pe { + goto _test_eof2462 + } + st_case_2462: + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 181: + if 184 <= data[p] { + goto tr2136 + } + default: + goto tr2136 + } + goto tr420 + st2463: + if p++; p == pe { + goto _test_eof2463 + } + st_case_2463: + switch { + case data[p] < 152: + if 129 <= data[p] && data[p] <= 151 { + goto tr420 + } + case data[p] > 155: + if 158 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + goto tr2136 + st2464: + if p++; p == pe { + goto _test_eof2464 + } + st_case_2464: + if data[p] == 132 { + goto tr148 + } + switch { + case data[p] < 144: + if 129 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + default: + goto tr126 + } + goto tr2136 + st2465: + if p++; p == pe { + goto _test_eof2465 + } + st_case_2465: + switch { + case data[p] > 170: + if 171 <= data[p] && data[p] <= 183 { + goto tr2136 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2466: + if p++; p == pe { + goto _test_eof2466 + } + st_case_2466: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 185 { + goto tr126 + } + case data[p] >= 157: + goto tr2136 + } + goto tr420 + st2467: + if p++; p == pe { + goto _test_eof2467 + } + st_case_2467: + switch data[p] { + case 160: + goto st147 + case 168: + goto st370 + case 169: + goto st1728 + case 171: + goto st2468 + case 172: + goto st2469 + case 173: + goto st1731 + case 174: + goto st374 + case 188: + goto st147 + case 189: + goto st2470 + case 190: + goto st2471 + } + if 161 <= data[p] && data[p] <= 167 { + goto st145 + } + goto tr420 + st2468: + if p++; p == pe { + goto _test_eof2468 + } + st_case_2468: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 180 { + goto tr2136 + } + case data[p] >= 144: + goto tr148 + } + goto tr420 + st2469: + if p++; p == pe { + goto _test_eof2469 + } + st_case_2469: + switch { + case data[p] > 175: + if 176 <= data[p] && data[p] <= 182 { + goto tr2136 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2470: + if p++; p == pe { + goto _test_eof2470 + } + st_case_2470: + switch { + case data[p] < 145: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr420 + } + default: + goto tr2136 + } + goto tr148 + st2471: + if p++; p == pe { + goto _test_eof2471 + } + st_case_2471: + switch { + case data[p] > 146: + if 147 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] >= 143: + goto tr2136 + } + goto tr420 + st2472: + if p++; p == pe { + goto _test_eof2472 + } + st_case_2472: + switch data[p] { + case 128: + goto st1224 + case 176: + goto st147 + case 177: + goto st378 + case 178: + goto st2473 + } + goto tr420 + st2473: + if p++; p == pe { + goto _test_eof2473 + } + st_case_2473: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr2136 + } + case data[p] >= 157: + goto tr2136 + } + default: + goto tr148 + } + goto tr420 + st2474: + if p++; p == pe { + goto _test_eof2474 + } + st_case_2474: + switch data[p] { + case 133: + goto st2475 + case 134: + goto st2476 + case 137: + goto st2477 + case 144: + goto st147 + case 145: + goto st384 + case 146: + goto st385 + case 147: + goto st386 + case 148: + goto st387 + case 149: + goto st388 + case 154: + goto st389 + case 155: + goto st390 + case 156: + goto st391 + case 157: + goto st392 + case 158: + goto st393 + case 159: + goto st1740 + case 168: + goto st2478 + case 169: + goto st2479 + case 170: + goto st2480 + } + if 150 <= data[p] && data[p] <= 153 { + goto st145 + } + goto tr420 + st2475: + if p++; p == pe { + goto _test_eof2475 + } + st_case_2475: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr2136 + } + case data[p] >= 165: + goto tr2136 + } + goto tr420 + st2476: + if p++; p == pe { + goto _test_eof2476 + } + st_case_2476: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr420 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr2136 + st2477: + if p++; p == pe { + goto _test_eof2477 + } + st_case_2477: + if 130 <= data[p] && data[p] <= 132 { + goto tr2136 + } + goto tr420 + st2478: + if p++; p == pe { + goto _test_eof2478 + } + st_case_2478: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr2136 + } + case data[p] >= 128: + goto tr2136 + } + goto tr420 + st2479: + if p++; p == pe { + goto _test_eof2479 + } + st_case_2479: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 173: + goto tr420 + } + goto tr2136 + st2480: + if p++; p == pe { + goto _test_eof2480 + } + st_case_2480: + if data[p] == 132 { + goto tr2136 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr2136 + } + case data[p] >= 155: + goto tr2136 + } + goto tr420 + st2481: + if p++; p == pe { + goto _test_eof2481 + } + st_case_2481: + switch data[p] { + case 160: + goto st147 + case 163: + goto st2482 + case 184: + goto st400 + case 185: + goto st401 + case 186: + goto st402 + } + if 161 <= data[p] && data[p] <= 162 { + goto st145 + } + goto tr420 + st2482: + if p++; p == pe { + goto _test_eof2482 + } + st_case_2482: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 150: + if 151 <= data[p] { + goto tr420 + } + default: + goto tr2136 + } + goto tr148 + st2483: + if p++; p == pe { + goto _test_eof2483 + } + st_case_2483: + if data[p] == 160 { + goto st2484 + } + goto tr420 + st2484: + if p++; p == pe { + goto _test_eof2484 + } + st_case_2484: + switch data[p] { + case 128: + goto st2485 + case 129: + goto st2486 + case 132: + goto st2341 + case 135: + goto st2488 + } + if 133 <= data[p] && data[p] <= 134 { + goto st2487 + } + goto tr420 + st2485: + if p++; p == pe { + goto _test_eof2485 + } + st_case_2485: + if data[p] == 129 { + goto tr2136 + } + if 160 <= data[p] { + goto tr2136 + } + goto tr420 + st2486: + if p++; p == pe { + goto _test_eof2486 + } + st_case_2486: + if 192 <= data[p] { + goto tr420 + } + goto tr2136 + st2487: + if p++; p == pe { + goto _test_eof2487 + } + st_case_2487: + goto tr2136 + st2488: + if p++; p == pe { + goto _test_eof2488 + } + st_case_2488: + if 176 <= data[p] { + goto tr420 + } + goto tr2136 + st2489: + if p++; p == pe { + goto _test_eof2489 + } + st_case_2489: + switch data[p] { + case 170: + goto tr148 + case 173: + goto tr2266 + case 181: + goto tr148 + case 186: + goto tr148 + } + goto tr420 + st2490: + if p++; p == pe { + goto _test_eof2490 + } + st_case_2490: + if data[p] <= 127 { + goto tr420 + } + goto tr2266 + st2491: + if p++; p == pe { + goto _test_eof2491 + } + st_case_2491: + switch data[p] { + case 181: + goto tr420 + case 190: + goto tr420 + } + switch { + case data[p] < 184: + if 176 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr420 + } + goto tr2266 + st2492: + if p++; p == pe { + goto _test_eof2492 + } + st_case_2492: + if data[p] == 130 { + goto tr420 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr2266 + } + goto tr148 + st2493: + if p++; p == pe { + goto _test_eof2493 + } + st_case_2493: + if data[p] == 190 { + goto tr420 + } + switch { + case data[p] < 145: + if 136 <= data[p] && data[p] <= 144 { + goto tr420 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + default: + goto tr2266 + } + goto tr148 + st2494: + if p++; p == pe { + goto _test_eof2494 + } + st_case_2494: + switch data[p] { + case 135: + goto tr2266 + case 179: + goto tr148 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr2266 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + default: + goto tr2266 + } + goto tr420 + st2495: + if p++; p == pe { + goto _test_eof2495 + } + st_case_2495: + if data[p] == 156 { + goto tr2266 + } + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 133 { + goto tr2266 + } + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr2266 + } + goto tr420 + st2496: + if p++; p == pe { + goto _test_eof2496 + } + st_case_2496: + switch data[p] { + case 171: + goto tr126 + case 176: + goto tr2266 + } + switch { + case data[p] < 139: + if 128 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 174 <= data[p] { + goto tr148 + } + case data[p] >= 160: + goto tr126 + } + default: + goto tr2266 + } + goto tr420 + st2497: + if p++; p == pe { + goto _test_eof2497 + } + st_case_2497: + switch data[p] { + case 148: + goto tr420 + case 158: + goto tr420 + case 169: + goto tr420 + } + switch { + case data[p] < 176: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr2266 + } + case data[p] >= 150: + goto tr2266 + } + case data[p] > 185: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 189: + goto tr420 + } + default: + goto tr126 + } + goto tr148 + st2498: + if p++; p == pe { + goto _test_eof2498 + } + st_case_2498: + if data[p] == 144 { + goto tr148 + } + switch { + case data[p] < 146: + if 143 <= data[p] && data[p] <= 145 { + goto tr2266 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr2266 + } + default: + goto tr148 + } + goto tr420 + st2499: + if p++; p == pe { + goto _test_eof2499 + } + st_case_2499: + switch { + case data[p] > 140: + if 141 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr420 + } + goto tr2266 + st2500: + if p++; p == pe { + goto _test_eof2500 + } + st_case_2500: + switch { + case data[p] > 176: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr2266 + } + goto tr148 + st2501: + if p++; p == pe { + goto _test_eof2501 + } + st_case_2501: + if data[p] == 186 { + goto tr148 + } + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 170: + switch { + case data[p] > 179: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 171: + goto tr2266 + } + default: + goto tr148 + } + goto tr420 + st2502: + if p++; p == pe { + goto _test_eof2502 + } + st_case_2502: + switch data[p] { + case 160: + goto st2503 + case 161: + goto st2504 + case 162: + goto st168 + case 163: + goto st2505 + case 164: + goto st2506 + case 165: + goto st2507 + case 166: + goto st2508 + case 167: + goto st2509 + case 168: + goto st2510 + case 169: + goto st2511 + case 170: + goto st2512 + case 171: + goto st2513 + case 172: + goto st2514 + case 173: + goto st2515 + case 174: + goto st2516 + case 175: + goto st2517 + case 176: + goto st2518 + case 177: + goto st2519 + case 178: + goto st2520 + case 179: + goto st2521 + case 180: + goto st2522 + case 181: + goto st2523 + case 182: + goto st2524 + case 183: + goto st2525 + case 184: + goto st2526 + case 185: + goto st2527 + case 186: + goto st2528 + case 187: + goto st2529 + case 188: + goto st2530 + case 189: + goto st2531 + case 190: + goto st2532 + case 191: + goto st2533 + } + goto tr420 + st2503: + if p++; p == pe { + goto _test_eof2503 + } + st_case_2503: + switch data[p] { + case 154: + goto tr148 + case 164: + goto tr148 + case 168: + goto tr148 + } + switch { + case data[p] > 149: + if 150 <= data[p] && data[p] <= 173 { + goto tr2266 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2504: + if p++; p == pe { + goto _test_eof2504 + } + st_case_2504: + switch { + case data[p] > 152: + if 153 <= data[p] && data[p] <= 155 { + goto tr2266 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2505: + if p++; p == pe { + goto _test_eof2505 + } + st_case_2505: + if 163 <= data[p] { + goto tr2266 + } + goto tr420 + st2506: + if p++; p == pe { + goto _test_eof2506 + } + st_case_2506: + if data[p] == 189 { + goto tr148 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr148 + } + goto tr2266 + st2507: + if p++; p == pe { + goto _test_eof2507 + } + st_case_2507: + switch data[p] { + case 144: + goto tr148 + case 176: + goto tr420 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 177 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr420 + } + goto tr2266 + st2508: + if p++; p == pe { + goto _test_eof2508 + } + st_case_2508: + switch data[p] { + case 132: + goto tr420 + case 169: + goto tr420 + case 177: + goto tr420 + case 188: + goto tr2266 + } + switch { + case data[p] < 145: + switch { + case data[p] > 131: + if 141 <= data[p] && data[p] <= 142 { + goto tr420 + } + case data[p] >= 129: + goto tr2266 + } + case data[p] > 146: + switch { + case data[p] < 186: + if 179 <= data[p] && data[p] <= 181 { + goto tr420 + } + case data[p] > 187: + if 190 <= data[p] { + goto tr2266 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st2509: + if p++; p == pe { + goto _test_eof2509 + } + st_case_2509: + switch data[p] { + case 142: + goto tr148 + case 158: + goto tr420 + } + switch { + case data[p] < 156: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + switch { + case data[p] > 150: + if 152 <= data[p] && data[p] <= 155 { + goto tr420 + } + case data[p] >= 143: + goto tr420 + } + default: + goto tr420 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr148 + } + default: + goto tr126 + } + default: + goto tr148 + } + goto tr2266 + st2510: + if p++; p == pe { + goto _test_eof2510 + } + st_case_2510: + if data[p] == 188 { + goto tr2266 + } + switch { + case data[p] < 170: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2266 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 147 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] >= 143: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 176: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 182: + switch { + case data[p] > 185: + if 190 <= data[p] { + goto tr2266 + } + case data[p] >= 184: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2511: + if p++; p == pe { + goto _test_eof2511 + } + st_case_2511: + if data[p] == 157 { + goto tr420 + } + switch { + case data[p] < 153: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 146 <= data[p] && data[p] <= 152 { + goto tr420 + } + case data[p] >= 142: + goto tr420 + } + default: + goto tr420 + } + case data[p] > 158: + switch { + case data[p] < 166: + if 159 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr126 + } + default: + goto tr148 + } + goto tr2266 + st2512: + if p++; p == pe { + goto _test_eof2512 + } + st_case_2512: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2266 + } + case data[p] > 141: + if 143 <= data[p] && data[p] <= 145 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] { + goto tr2266 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2513: + if p++; p == pe { + goto _test_eof2513 + } + st_case_2513: + switch data[p] { + case 134: + goto tr420 + case 138: + goto tr420 + case 144: + goto tr148 + case 185: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] >= 142: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr420 + } + goto tr2266 + st2514: + if p++; p == pe { + goto _test_eof2514 + } + st_case_2514: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2266 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2266 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2515: + if p++; p == pe { + goto _test_eof2515 + } + st_case_2515: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] < 150: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr2266 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr2266 + } + default: + goto tr2266 + } + case data[p] > 151: + switch { + case data[p] < 159: + if 156 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 161: + switch { + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] >= 162: + goto tr2266 + } + default: + goto tr148 + } + default: + goto tr2266 + } + goto tr420 + st2516: + if p++; p == pe { + goto _test_eof2516 + } + st_case_2516: + switch data[p] { + case 130: + goto tr2266 + case 131: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 158: + switch { + case data[p] < 142: + if 133 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 144: + switch { + case data[p] > 149: + if 153 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] < 168: + if 163 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 185: + if 190 <= data[p] && data[p] <= 191 { + goto tr2266 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2517: + if p++; p == pe { + goto _test_eof2517 + } + st_case_2517: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr2266 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr2266 + } + case data[p] > 136: + switch { + case data[p] > 141: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] >= 138: + goto tr2266 + } + default: + goto tr2266 + } + goto tr420 + st2518: + if p++; p == pe { + goto _test_eof2518 + } + st_case_2518: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 128: + goto tr2266 + } + case data[p] > 144: + switch { + case data[p] < 170: + if 146 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] > 185: + if 190 <= data[p] { + goto tr2266 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2519: + if p++; p == pe { + goto _test_eof2519 + } + st_case_2519: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 151: + goto tr420 + } + switch { + case data[p] < 160: + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 148 { + goto tr420 + } + case data[p] > 154: + if 155 <= data[p] && data[p] <= 159 { + goto tr420 + } + default: + goto tr148 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + default: + goto tr126 + } + default: + goto tr148 + } + goto tr2266 + st2520: + if p++; p == pe { + goto _test_eof2520 + } + st_case_2520: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 146: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2266 + } + case data[p] > 140: + if 142 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 181: + if 170 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2266 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2521: + if p++; p == pe { + goto _test_eof2521 + } + st_case_2521: + if data[p] == 158 { + goto tr148 + } + switch { + case data[p] < 149: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr2266 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr2266 + } + default: + goto tr2266 + } + case data[p] > 150: + switch { + case data[p] < 162: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 163: + switch { + case data[p] > 175: + if 177 <= data[p] && data[p] <= 178 { + goto tr148 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr2266 + } + default: + goto tr2266 + } + goto tr420 + st2522: + if p++; p == pe { + goto _test_eof2522 + } + st_case_2522: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 129: + goto tr2266 + } + case data[p] > 144: + switch { + case data[p] > 186: + if 190 <= data[p] { + goto tr2266 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2523: + if p++; p == pe { + goto _test_eof2523 + } + st_case_2523: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 142: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 150 { + goto tr420 + } + case data[p] > 158: + if 159 <= data[p] && data[p] <= 161 { + goto tr148 + } + default: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr2266 + st2524: + if p++; p == pe { + goto _test_eof2524 + } + st_case_2524: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto tr2266 + } + case data[p] > 150: + switch { + case data[p] > 177: + if 179 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2525: + if p++; p == pe { + goto _test_eof2525 + } + st_case_2525: + switch data[p] { + case 138: + goto tr2266 + case 150: + goto tr2266 + } + switch { + case data[p] < 152: + switch { + case data[p] > 134: + if 143 <= data[p] && data[p] <= 148 { + goto tr2266 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 179 { + goto tr2266 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr2266 + } + goto tr420 + st2526: + if p++; p == pe { + goto _test_eof2526 + } + st_case_2526: + if data[p] == 177 { + goto tr2266 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr2266 + } + goto tr420 + st2527: + if p++; p == pe { + goto _test_eof2527 + } + st_case_2527: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 135: + goto tr2266 + } + goto tr420 + st2528: + if p++; p == pe { + goto _test_eof2528 + } + st_case_2528: + if data[p] == 177 { + goto tr2266 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr2266 + } + case data[p] >= 180: + goto tr2266 + } + goto tr420 + st2529: + if p++; p == pe { + goto _test_eof2529 + } + st_case_2529: + switch { + case data[p] > 141: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 136: + goto tr2266 + } + goto tr420 + st2530: + if p++; p == pe { + goto _test_eof2530 + } + st_case_2530: + switch data[p] { + case 128: + goto tr148 + case 181: + goto tr2266 + case 183: + goto tr2266 + case 185: + goto tr2266 + } + switch { + case data[p] < 160: + if 152 <= data[p] && data[p] <= 153 { + goto tr2266 + } + case data[p] > 169: + if 190 <= data[p] && data[p] <= 191 { + goto tr2266 + } + default: + goto tr126 + } + goto tr420 + st2531: + if p++; p == pe { + goto _test_eof2531 + } + st_case_2531: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 172: + if 177 <= data[p] && data[p] <= 191 { + goto tr2266 + } + default: + goto tr148 + } + goto tr420 + st2532: + if p++; p == pe { + goto _test_eof2532 + } + st_case_2532: + switch { + case data[p] < 136: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 135 { + goto tr2266 + } + case data[p] >= 128: + goto tr2266 + } + case data[p] > 140: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr2266 + } + case data[p] >= 141: + goto tr2266 + } + default: + goto tr148 + } + goto tr420 + st2533: + if p++; p == pe { + goto _test_eof2533 + } + st_case_2533: + if data[p] == 134 { + goto tr2266 + } + goto tr420 + st2534: + if p++; p == pe { + goto _test_eof2534 + } + st_case_2534: + switch data[p] { + case 128: + goto st2535 + case 129: + goto st2536 + case 130: + goto st2537 + case 131: + goto st202 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st2538 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st2539 + case 157: + goto st2540 + case 158: + goto st2541 + case 159: + goto st2542 + case 160: + goto st2543 + case 161: + goto st219 + case 162: + goto st2544 + case 163: + goto st221 + case 164: + goto st2545 + case 165: + goto st1649 + case 167: + goto st1650 + case 168: + goto st2546 + case 169: + goto st2547 + case 170: + goto st2548 + case 172: + goto st2549 + case 173: + goto st2550 + case 174: + goto st2551 + case 175: + goto st2552 + case 176: + goto st2553 + case 177: + goto st1659 + case 179: + goto st2554 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st2555 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + } + switch { + case data[p] < 180: + if 132 <= data[p] && data[p] <= 152 { + goto st145 + } + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + default: + goto st147 + } + goto tr420 + st2535: + if p++; p == pe { + goto _test_eof2535 + } + st_case_2535: + if 171 <= data[p] && data[p] <= 190 { + goto tr2266 + } + goto tr420 + st2536: + if p++; p == pe { + goto _test_eof2536 + } + st_case_2536: + switch { + case data[p] < 158: + switch { + case data[p] > 137: + if 150 <= data[p] && data[p] <= 153 { + goto tr2266 + } + case data[p] >= 128: + goto tr126 + } + case data[p] > 160: + switch { + case data[p] < 167: + if 162 <= data[p] && data[p] <= 164 { + goto tr2266 + } + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr2266 + } + default: + goto tr2266 + } + default: + goto tr2266 + } + goto tr420 + st2537: + if p++; p == pe { + goto _test_eof2537 + } + st_case_2537: + if data[p] == 143 { + goto tr2266 + } + switch { + case data[p] < 144: + if 130 <= data[p] && data[p] <= 141 { + goto tr2266 + } + case data[p] > 153: + switch { + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 154: + goto tr2266 + } + default: + goto tr126 + } + goto tr420 + st2538: + if p++; p == pe { + goto _test_eof2538 + } + st_case_2538: + switch { + case data[p] < 157: + if 155 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr420 + } + default: + goto tr2266 + } + goto tr148 + st2539: + if p++; p == pe { + goto _test_eof2539 + } + st_case_2539: + switch { + case data[p] < 146: + switch { + case data[p] > 140: + if 142 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 177: + if 178 <= data[p] && data[p] <= 180 { + goto tr2266 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr2266 + } + goto tr420 + st2540: + if p++; p == pe { + goto _test_eof2540 + } + st_case_2540: + switch { + case data[p] < 160: + switch { + case data[p] > 145: + if 146 <= data[p] && data[p] <= 147 { + goto tr2266 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 172: + switch { + case data[p] > 176: + if 178 <= data[p] && data[p] <= 179 { + goto tr2266 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2541: + if p++; p == pe { + goto _test_eof2541 + } + st_case_2541: + if 180 <= data[p] { + goto tr2266 + } + goto tr420 + st2542: + if p++; p == pe { + goto _test_eof2542 + } + st_case_2542: + switch { + case data[p] < 158: + if 148 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 170 <= data[p] { + goto tr420 + } + case data[p] >= 160: + goto tr126 + } + default: + goto tr420 + } + goto tr2266 + st2543: + if p++; p == pe { + goto _test_eof2543 + } + st_case_2543: + switch { + case data[p] < 144: + if 139 <= data[p] && data[p] <= 142 { + goto tr2266 + } + case data[p] > 153: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr126 + } + goto tr420 + st2544: + if p++; p == pe { + goto _test_eof2544 + } + st_case_2544: + if data[p] == 169 { + goto tr2266 + } + switch { + case data[p] > 170: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2545: + if p++; p == pe { + goto _test_eof2545 + } + st_case_2545: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr2266 + } + default: + goto tr2266 + } + goto tr420 + st2546: + if p++; p == pe { + goto _test_eof2546 + } + st_case_2546: + switch { + case data[p] > 150: + if 151 <= data[p] && data[p] <= 155 { + goto tr2266 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2547: + if p++; p == pe { + goto _test_eof2547 + } + st_case_2547: + if data[p] == 191 { + goto tr2266 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr2266 + } + case data[p] >= 149: + goto tr2266 + } + goto tr420 + st2548: + if p++; p == pe { + goto _test_eof2548 + } + st_case_2548: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 153: + if 176 <= data[p] && data[p] <= 190 { + goto tr2266 + } + default: + goto tr126 + } + goto tr420 + st2549: + if p++; p == pe { + goto _test_eof2549 + } + st_case_2549: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 132 { + goto tr2266 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr2266 + } + default: + goto tr148 + } + goto tr420 + st2550: + if p++; p == pe { + goto _test_eof2550 + } + st_case_2550: + switch { + case data[p] < 144: + switch { + case data[p] > 139: + if 140 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] >= 133: + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 154: + goto tr420 + } + default: + goto tr126 + } + goto tr2266 + st2551: + if p++; p == pe { + goto _test_eof2551 + } + st_case_2551: + switch { + case data[p] < 161: + switch { + case data[p] > 130: + if 131 <= data[p] && data[p] <= 160 { + goto tr148 + } + case data[p] >= 128: + goto tr2266 + } + case data[p] > 173: + switch { + case data[p] < 176: + if 174 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr148 + } + default: + goto tr126 + } + default: + goto tr2266 + } + goto tr420 + st2552: + if p++; p == pe { + goto _test_eof2552 + } + st_case_2552: + switch { + case data[p] > 179: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr2266 + } + goto tr148 + st2553: + if p++; p == pe { + goto _test_eof2553 + } + st_case_2553: + switch { + case data[p] > 163: + if 164 <= data[p] && data[p] <= 183 { + goto tr2266 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2554: + if p++; p == pe { + goto _test_eof2554 + } + st_case_2554: + if data[p] == 173 { + goto tr2266 + } + switch { + case data[p] < 169: + switch { + case data[p] > 146: + if 148 <= data[p] && data[p] <= 168 { + goto tr2266 + } + case data[p] >= 144: + goto tr2266 + } + case data[p] > 177: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 180 { + goto tr2266 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 185 { + goto tr2266 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2555: + if p++; p == pe { + goto _test_eof2555 + } + st_case_2555: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr2266 + } + case data[p] >= 128: + goto tr2266 + } + goto tr420 + st2556: + if p++; p == pe { + goto _test_eof2556 + } + st_case_2556: + switch data[p] { + case 128: + goto st2557 + case 129: + goto st2558 + case 130: + goto st241 + case 131: + goto st2559 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st2560 + case 180: + goto st251 + case 181: + goto st2561 + case 182: + goto st253 + case 183: + goto st2562 + case 184: + goto st255 + } + goto tr420 + st2557: + if p++; p == pe { + goto _test_eof2557 + } + st_case_2557: + switch { + case data[p] < 170: + if 140 <= data[p] && data[p] <= 143 { + goto tr2266 + } + case data[p] > 174: + if 191 <= data[p] { + goto tr2136 + } + default: + goto tr2266 + } + goto tr420 + st2558: + if p++; p == pe { + goto _test_eof2558 + } + st_case_2558: + switch data[p] { + case 165: + goto tr420 + case 177: + goto tr148 + case 191: + goto tr148 + } + switch { + case data[p] < 149: + if 129 <= data[p] && data[p] <= 147 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 160: + goto tr2266 + } + default: + goto tr420 + } + goto tr2136 + st2559: + if p++; p == pe { + goto _test_eof2559 + } + st_case_2559: + if 144 <= data[p] && data[p] <= 176 { + goto tr2266 + } + goto tr420 + st2560: + if p++; p == pe { + goto _test_eof2560 + } + st_case_2560: + switch { + case data[p] < 175: + if 165 <= data[p] && data[p] <= 170 { + goto tr420 + } + case data[p] > 177: + if 180 <= data[p] { + goto tr420 + } + default: + goto tr2266 + } + goto tr148 + st2561: + if p++; p == pe { + goto _test_eof2561 + } + st_case_2561: + if data[p] == 191 { + goto tr2266 + } + switch { + case data[p] > 174: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 168: + goto tr420 + } + goto tr148 + st2562: + if p++; p == pe { + goto _test_eof2562 + } + st_case_2562: + switch { + case data[p] < 144: + switch { + case data[p] > 134: + if 136 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 191 { + goto tr2266 + } + case data[p] >= 152: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2563: + if p++; p == pe { + goto _test_eof2563 + } + st_case_2563: + switch data[p] { + case 128: + goto st2564 + case 130: + goto st2565 + case 131: + goto st1164 + case 132: + goto st259 + case 133: + goto st145 + case 134: + goto st260 + case 135: + goto st1165 + case 139: + goto st1166 + case 140: + goto st1091 + case 141: + goto st1167 + } + goto tr420 + st2564: + if p++; p == pe { + goto _test_eof2564 + } + st_case_2564: + if data[p] == 133 { + goto tr148 + } + switch { + case data[p] < 177: + if 170 <= data[p] && data[p] <= 175 { + goto tr2266 + } + case data[p] > 181: + if 187 <= data[p] && data[p] <= 188 { + goto tr148 + } + default: + goto tr1049 + } + goto tr420 + st2565: + if p++; p == pe { + goto _test_eof2565 + } + st_case_2565: + switch { + case data[p] < 155: + if 153 <= data[p] && data[p] <= 154 { + goto tr2266 + } + case data[p] > 156: + if 160 <= data[p] { + goto tr1049 + } + default: + goto tr1049 + } + goto tr420 + st2566: + if p++; p == pe { + goto _test_eof2566 + } + st_case_2566: + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st1673 + case 153: + goto st2567 + case 154: + goto st2568 + case 155: + goto st2569 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st2570 + case 161: + goto st272 + case 162: + goto st2571 + case 163: + goto st2572 + case 164: + goto st2573 + case 165: + goto st2574 + case 166: + goto st2575 + case 167: + goto st2576 + case 168: + goto st2577 + case 169: + goto st2578 + case 170: + goto st2579 + case 171: + goto st2580 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st2581 + case 176: + goto st147 + } + if 129 <= data[p] { + goto st145 + } + goto tr420 + st2567: + if p++; p == pe { + goto _test_eof2567 + } + st_case_2567: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr2266 + } + default: + goto tr2266 + } + goto tr420 + st2568: + if p++; p == pe { + goto _test_eof2568 + } + st_case_2568: + switch { + case data[p] < 158: + if 128 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr2266 + } + goto tr420 + st2569: + if p++; p == pe { + goto _test_eof2569 + } + st_case_2569: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr2266 + } + goto tr148 + st2570: + if p++; p == pe { + goto _test_eof2570 + } + st_case_2570: + switch data[p] { + case 130: + goto tr2266 + case 134: + goto tr2266 + case 139: + goto tr2266 + } + switch { + case data[p] > 167: + if 168 <= data[p] { + goto tr420 + } + case data[p] >= 163: + goto tr2266 + } + goto tr148 + st2571: + if p++; p == pe { + goto _test_eof2571 + } + st_case_2571: + switch { + case data[p] < 130: + if 128 <= data[p] && data[p] <= 129 { + goto tr2266 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr2266 + } + default: + goto tr148 + } + goto tr420 + st2572: + if p++; p == pe { + goto _test_eof2572 + } + st_case_2572: + switch data[p] { + case 187: + goto tr148 + case 189: + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 143: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 133: + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 183: + if 184 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr420 + } + goto tr2266 + st2573: + if p++; p == pe { + goto _test_eof2573 + } + st_case_2573: + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 165: + switch { + case data[p] > 173: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr2266 + } + default: + goto tr148 + } + goto tr420 + st2574: + if p++; p == pe { + goto _test_eof2574 + } + st_case_2574: + switch { + case data[p] < 148: + if 135 <= data[p] && data[p] <= 147 { + goto tr2266 + } + case data[p] > 159: + if 189 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st2575: + if p++; p == pe { + goto _test_eof2575 + } + st_case_2575: + switch { + case data[p] < 132: + if 128 <= data[p] && data[p] <= 131 { + goto tr2266 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr2266 + } + default: + goto tr148 + } + goto tr420 + st2576: + if p++; p == pe { + goto _test_eof2576 + } + st_case_2576: + if data[p] == 143 { + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 129: + goto tr420 + } + case data[p] > 164: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr420 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + default: + goto tr126 + } + default: + goto tr420 + } + goto tr2266 + st2577: + if p++; p == pe { + goto _test_eof2577 + } + st_case_2577: + switch { + case data[p] > 168: + if 169 <= data[p] && data[p] <= 182 { + goto tr2266 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2578: + if p++; p == pe { + goto _test_eof2578 + } + st_case_2578: + if data[p] == 131 { + goto tr2266 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 141: + switch { + case data[p] > 153: + if 187 <= data[p] && data[p] <= 189 { + goto tr2266 + } + case data[p] >= 144: + goto tr126 + } + default: + goto tr2266 + } + goto tr420 + st2579: + if p++; p == pe { + goto _test_eof2579 + } + st_case_2579: + if data[p] == 176 { + goto tr2266 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr2266 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr2266 + } + default: + goto tr2266 + } + goto tr420 + st2580: + if p++; p == pe { + goto _test_eof2580 + } + st_case_2580: + if data[p] == 129 { + goto tr2266 + } + switch { + case data[p] < 171: + if 160 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 181 <= data[p] && data[p] <= 182 { + goto tr2266 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr2266 + } + goto tr420 + st2581: + if p++; p == pe { + goto _test_eof2581 + } + st_case_2581: + switch { + case data[p] < 163: + if 128 <= data[p] && data[p] <= 162 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr126 + } + case data[p] >= 172: + goto tr2266 + } + default: + goto tr2266 + } + goto tr420 + st2582: + if p++; p == pe { + goto _test_eof2582 + } + st_case_2582: + switch data[p] { + case 172: + goto st2583 + case 173: + goto st672 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st2584 + case 185: + goto st2436 + case 187: + goto st2585 + case 188: + goto st2438 + case 189: + goto st1261 + case 190: + goto st2586 + case 191: + goto st2587 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr420 + st2583: + if p++; p == pe { + goto _test_eof2583 + } + st_case_2583: + switch data[p] { + case 158: + goto tr2266 + case 190: + goto tr572 + } + switch { + case data[p] < 157: + switch { + case data[p] > 134: + if 147 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 170: + goto tr572 + } + default: + goto tr572 + } + goto tr420 + st2584: + if p++; p == pe { + goto _test_eof2584 + } + st_case_2584: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 143 { + goto tr2266 + } + case data[p] > 175: + if 179 <= data[p] && data[p] <= 180 { + goto tr2136 + } + default: + goto tr2266 + } + goto tr420 + st2585: + if p++; p == pe { + goto _test_eof2585 + } + st_case_2585: + if data[p] == 191 { + goto tr2266 + } + if 189 <= data[p] { + goto tr420 + } + goto tr148 + st2586: + if p++; p == pe { + goto _test_eof2586 + } + st_case_2586: + switch { + case data[p] < 160: + if 158 <= data[p] && data[p] <= 159 { + goto tr2266 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + goto tr1049 + st2587: + if p++; p == pe { + goto _test_eof2587 + } + st_case_2587: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr148 + } + case data[p] >= 130: + goto tr148 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr2266 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2588: + if p++; p == pe { + goto _test_eof2588 + } + st_case_2588: + switch data[p] { + case 144: + goto st2589 + case 145: + goto st2595 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st2614 + case 155: + goto st2619 + case 157: + goto st2621 + case 158: + goto st2628 + case 159: + goto st403 + } + goto tr420 + st2589: + if p++; p == pe { + goto _test_eof2589 + } + st_case_2589: + switch data[p] { + case 128: + goto st308 + case 129: + goto st309 + case 130: + goto st147 + case 131: + goto st310 + case 133: + goto st311 + case 135: + goto st2590 + case 138: + goto st313 + case 139: + goto st2591 + case 140: + goto st315 + case 141: + goto st2592 + case 142: + goto st317 + case 143: + goto st318 + case 144: + goto st147 + case 145: + goto st145 + case 146: + goto st1702 + case 148: + goto st320 + case 149: + goto st321 + case 152: + goto st147 + case 156: + goto st322 + case 157: + goto st323 + case 160: + goto st324 + case 161: + goto st325 + case 162: + goto st326 + case 163: + goto st327 + case 164: + goto st328 + case 166: + goto st329 + case 168: + goto st2593 + case 169: + goto st331 + case 170: + goto st332 + case 171: + goto st2594 + case 172: + goto st334 + case 173: + goto st335 + case 174: + goto st336 + case 176: + goto st147 + case 177: + goto st245 + } + switch { + case data[p] > 155: + if 178 <= data[p] && data[p] <= 179 { + goto st337 + } + case data[p] >= 153: + goto st145 + } + goto tr420 + st2590: + if p++; p == pe { + goto _test_eof2590 + } + st_case_2590: + if data[p] == 189 { + goto tr2266 + } + goto tr420 + st2591: + if p++; p == pe { + goto _test_eof2591 + } + st_case_2591: + if data[p] == 160 { + goto tr2266 + } + if 145 <= data[p] { + goto tr420 + } + goto tr148 + st2592: + if p++; p == pe { + goto _test_eof2592 + } + st_case_2592: + switch { + case data[p] < 182: + if 139 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 186: + if 187 <= data[p] { + goto tr420 + } + default: + goto tr2266 + } + goto tr148 + st2593: + if p++; p == pe { + goto _test_eof2593 + } + st_case_2593: + switch data[p] { + case 128: + goto tr148 + case 191: + goto tr2266 + } + switch { + case data[p] < 144: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2266 + } + case data[p] > 134: + if 140 <= data[p] && data[p] <= 143 { + goto tr2266 + } + default: + goto tr2266 + } + case data[p] > 147: + switch { + case data[p] < 153: + if 149 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] > 179: + if 184 <= data[p] && data[p] <= 186 { + goto tr2266 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2594: + if p++; p == pe { + goto _test_eof2594 + } + st_case_2594: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 164: + if 165 <= data[p] && data[p] <= 166 { + goto tr2266 + } + default: + goto tr148 + } + goto tr420 + st2595: + if p++; p == pe { + goto _test_eof2595 + } + st_case_2595: + switch data[p] { + case 128: + goto st2596 + case 129: + goto st2597 + case 130: + goto st2598 + case 131: + goto st1709 + case 132: + goto st2599 + case 133: + goto st2600 + case 134: + goto st2601 + case 135: + goto st2602 + case 136: + goto st2603 + case 138: + goto st348 + case 139: + goto st2604 + case 140: + goto st2605 + case 141: + goto st2606 + case 146: + goto st2607 + case 147: + goto st2608 + case 150: + goto st2609 + case 151: + goto st2610 + case 152: + goto st2607 + case 153: + goto st2611 + case 154: + goto st2612 + case 155: + goto st1724 + case 156: + goto st2613 + case 162: + goto st359 + case 163: + goto st1726 + case 171: + goto st361 + } + goto tr420 + st2596: + if p++; p == pe { + goto _test_eof2596 + } + st_case_2596: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2266 + } + case data[p] > 183: + if 184 <= data[p] { + goto tr2266 + } + default: + goto tr148 + } + goto tr420 + st2597: + if p++; p == pe { + goto _test_eof2597 + } + st_case_2597: + switch { + case data[p] < 166: + if 135 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] && data[p] <= 190 { + goto tr420 + } + default: + goto tr126 + } + goto tr2266 + st2598: + if p++; p == pe { + goto _test_eof2598 + } + st_case_2598: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr2266 + st2599: + if p++; p == pe { + goto _test_eof2599 + } + st_case_2599: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2266 + } + case data[p] > 166: + switch { + case data[p] > 180: + if 182 <= data[p] && data[p] <= 191 { + goto tr126 + } + case data[p] >= 167: + goto tr2266 + } + default: + goto tr148 + } + goto tr420 + st2600: + if p++; p == pe { + goto _test_eof2600 + } + st_case_2600: + switch data[p] { + case 179: + goto tr2266 + case 182: + goto tr148 + } + if 144 <= data[p] && data[p] <= 178 { + goto tr148 + } + goto tr420 + st2601: + if p++; p == pe { + goto _test_eof2601 + } + st_case_2601: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2266 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr2266 + } + default: + goto tr148 + } + goto tr420 + st2602: + if p++; p == pe { + goto _test_eof2602 + } + st_case_2602: + if data[p] == 155 { + goto tr420 + } + switch { + case data[p] < 141: + switch { + case data[p] > 132: + if 133 <= data[p] && data[p] <= 137 { + goto tr420 + } + case data[p] >= 129: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] < 154: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] > 156: + if 157 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + default: + goto tr420 + } + goto tr2266 + st2603: + if p++; p == pe { + goto _test_eof2603 + } + st_case_2603: + switch { + case data[p] < 147: + if 128 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] > 171: + if 172 <= data[p] && data[p] <= 183 { + goto tr2266 + } + default: + goto tr148 + } + goto tr420 + st2604: + if p++; p == pe { + goto _test_eof2604 + } + st_case_2604: + switch { + case data[p] < 171: + if 159 <= data[p] && data[p] <= 170 { + goto tr2266 + } + case data[p] > 175: + switch { + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr126 + } + default: + goto tr420 + } + goto tr148 + st2605: + if p++; p == pe { + goto _test_eof2605 + } + st_case_2605: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 131 { + goto tr2266 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2266 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2606: + if p++; p == pe { + goto _test_eof2606 + } + st_case_2606: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr2266 + } + switch { + case data[p] < 157: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr2266 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr2266 + } + default: + goto tr2266 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr2266 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr2266 + } + default: + goto tr2266 + } + default: + goto tr148 + } + goto tr420 + st2607: + if p++; p == pe { + goto _test_eof2607 + } + st_case_2607: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr2266 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2608: + if p++; p == pe { + goto _test_eof2608 + } + st_case_2608: + if data[p] == 134 { + goto tr420 + } + switch { + case data[p] < 136: + if 132 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + case data[p] >= 144: + goto tr126 + } + default: + goto tr420 + } + goto tr2266 + st2609: + if p++; p == pe { + goto _test_eof2609 + } + st_case_2609: + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 181: + if 184 <= data[p] { + goto tr2266 + } + default: + goto tr2266 + } + goto tr420 + st2610: + if p++; p == pe { + goto _test_eof2610 + } + st_case_2610: + switch { + case data[p] < 152: + if 129 <= data[p] && data[p] <= 151 { + goto tr420 + } + case data[p] > 155: + if 158 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + goto tr2266 + st2611: + if p++; p == pe { + goto _test_eof2611 + } + st_case_2611: + if data[p] == 132 { + goto tr148 + } + switch { + case data[p] < 144: + if 129 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + default: + goto tr126 + } + goto tr2266 + st2612: + if p++; p == pe { + goto _test_eof2612 + } + st_case_2612: + switch { + case data[p] > 170: + if 171 <= data[p] && data[p] <= 183 { + goto tr2266 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2613: + if p++; p == pe { + goto _test_eof2613 + } + st_case_2613: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 185 { + goto tr126 + } + case data[p] >= 157: + goto tr2266 + } + goto tr420 + st2614: + if p++; p == pe { + goto _test_eof2614 + } + st_case_2614: + switch data[p] { + case 160: + goto st147 + case 168: + goto st370 + case 169: + goto st1728 + case 171: + goto st2615 + case 172: + goto st2616 + case 173: + goto st1731 + case 174: + goto st374 + case 188: + goto st147 + case 189: + goto st2617 + case 190: + goto st2618 + } + if 161 <= data[p] && data[p] <= 167 { + goto st145 + } + goto tr420 + st2615: + if p++; p == pe { + goto _test_eof2615 + } + st_case_2615: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 180 { + goto tr2266 + } + case data[p] >= 144: + goto tr148 + } + goto tr420 + st2616: + if p++; p == pe { + goto _test_eof2616 + } + st_case_2616: + switch { + case data[p] > 175: + if 176 <= data[p] && data[p] <= 182 { + goto tr2266 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2617: + if p++; p == pe { + goto _test_eof2617 + } + st_case_2617: + switch { + case data[p] < 145: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr420 + } + default: + goto tr2266 + } + goto tr148 + st2618: + if p++; p == pe { + goto _test_eof2618 + } + st_case_2618: + switch { + case data[p] > 146: + if 147 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] >= 143: + goto tr2266 + } + goto tr420 + st2619: + if p++; p == pe { + goto _test_eof2619 + } + st_case_2619: + switch data[p] { + case 128: + goto st1224 + case 176: + goto st147 + case 177: + goto st378 + case 178: + goto st2620 + } + goto tr420 + st2620: + if p++; p == pe { + goto _test_eof2620 + } + st_case_2620: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr2266 + } + case data[p] >= 157: + goto tr2266 + } + default: + goto tr148 + } + goto tr420 + st2621: + if p++; p == pe { + goto _test_eof2621 + } + st_case_2621: + switch data[p] { + case 133: + goto st2622 + case 134: + goto st2623 + case 137: + goto st2624 + case 144: + goto st147 + case 145: + goto st384 + case 146: + goto st385 + case 147: + goto st386 + case 148: + goto st387 + case 149: + goto st388 + case 154: + goto st389 + case 155: + goto st390 + case 156: + goto st391 + case 157: + goto st392 + case 158: + goto st393 + case 159: + goto st1740 + case 168: + goto st2625 + case 169: + goto st2626 + case 170: + goto st2627 + } + if 150 <= data[p] && data[p] <= 153 { + goto st145 + } + goto tr420 + st2622: + if p++; p == pe { + goto _test_eof2622 + } + st_case_2622: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr2266 + } + case data[p] >= 165: + goto tr2266 + } + goto tr420 + st2623: + if p++; p == pe { + goto _test_eof2623 + } + st_case_2623: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr420 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr2266 + st2624: + if p++; p == pe { + goto _test_eof2624 + } + st_case_2624: + if 130 <= data[p] && data[p] <= 132 { + goto tr2266 + } + goto tr420 + st2625: + if p++; p == pe { + goto _test_eof2625 + } + st_case_2625: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr2266 + } + case data[p] >= 128: + goto tr2266 + } + goto tr420 + st2626: + if p++; p == pe { + goto _test_eof2626 + } + st_case_2626: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 173: + goto tr420 + } + goto tr2266 + st2627: + if p++; p == pe { + goto _test_eof2627 + } + st_case_2627: + if data[p] == 132 { + goto tr2266 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr2266 + } + case data[p] >= 155: + goto tr2266 + } + goto tr420 + st2628: + if p++; p == pe { + goto _test_eof2628 + } + st_case_2628: + switch data[p] { + case 160: + goto st147 + case 163: + goto st2629 + case 184: + goto st400 + case 185: + goto st401 + case 186: + goto st402 + } + if 161 <= data[p] && data[p] <= 162 { + goto st145 + } + goto tr420 + st2629: + if p++; p == pe { + goto _test_eof2629 + } + st_case_2629: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 150: + if 151 <= data[p] { + goto tr420 + } + default: + goto tr2266 + } + goto tr148 + st2630: + if p++; p == pe { + goto _test_eof2630 + } + st_case_2630: + if data[p] == 160 { + goto st2631 + } + goto tr420 + st2631: + if p++; p == pe { + goto _test_eof2631 + } + st_case_2631: + switch data[p] { + case 128: + goto st2632 + case 129: + goto st2633 + case 132: + goto st2490 + case 135: + goto st2635 + } + if 133 <= data[p] && data[p] <= 134 { + goto st2634 + } + goto tr420 + st2632: + if p++; p == pe { + goto _test_eof2632 + } + st_case_2632: + if data[p] == 129 { + goto tr2266 + } + if 160 <= data[p] { + goto tr2266 + } + goto tr420 + st2633: + if p++; p == pe { + goto _test_eof2633 + } + st_case_2633: + if 192 <= data[p] { + goto tr420 + } + goto tr2266 + st2634: + if p++; p == pe { + goto _test_eof2634 + } + st_case_2634: + goto tr2266 + st2635: + if p++; p == pe { + goto _test_eof2635 + } + st_case_2635: + if 176 <= data[p] { + goto tr420 + } + goto tr2266 +tr4464: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4877 + st4877: + if p++; p == pe { + goto _test_eof4877 + } + st_case_4877: +//line segment_words_prod.go:69822 + switch data[p] { + case 133: + goto tr3249 + case 170: + goto tr148 + case 173: + goto tr2395 + case 181: + goto tr148 + case 186: + goto tr148 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr2395: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st4878 + st4878: + if p++; p == pe { + goto _test_eof4878 + } + st_case_4878: +//line segment_words_prod.go:69894 + switch data[p] { + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + goto tr4763 + st2636: + if p++; p == pe { + goto _test_eof2636 + } + st_case_2636: + if data[p] == 173 { + goto tr2395 + } + goto tr2394 + st2637: + if p++; p == pe { + goto _test_eof2637 + } + st_case_2637: + if 128 <= data[p] { + goto tr2395 + } + goto tr2 + st2638: + if p++; p == pe { + goto _test_eof2638 + } + st_case_2638: + if 176 <= data[p] { + goto tr2 + } + goto tr2395 + st2639: + if p++; p == pe { + goto _test_eof2639 + } + st_case_2639: + if 131 <= data[p] && data[p] <= 137 { + goto tr2395 + } + goto tr2394 + st2640: + if p++; p == pe { + goto _test_eof2640 + } + st_case_2640: + if data[p] == 191 { + goto tr2395 + } + if 145 <= data[p] && data[p] <= 189 { + goto tr2395 + } + goto tr2394 + st2641: + if p++; p == pe { + goto _test_eof2641 + } + st_case_2641: + if data[p] == 135 { + goto tr2395 + } + switch { + case data[p] > 130: + if 132 <= data[p] && data[p] <= 133 { + goto tr2395 + } + case data[p] >= 129: + goto tr2395 + } + goto tr2394 + st2642: + if p++; p == pe { + goto _test_eof2642 + } + st_case_2642: + if data[p] == 156 { + goto tr2395 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto tr2395 + } + case data[p] >= 128: + goto tr2395 + } + goto tr2394 + st2643: + if p++; p == pe { + goto _test_eof2643 + } + st_case_2643: + if data[p] == 176 { + goto tr2395 + } + if 139 <= data[p] && data[p] <= 159 { + goto tr2395 + } + goto tr2394 + st2644: + if p++; p == pe { + goto _test_eof2644 + } + st_case_2644: + switch { + case data[p] < 159: + if 150 <= data[p] && data[p] <= 157 { + goto tr2395 + } + case data[p] > 164: + switch { + case data[p] > 168: + if 170 <= data[p] && data[p] <= 173 { + goto tr2395 + } + case data[p] >= 167: + goto tr2395 + } + default: + goto tr2395 + } + goto tr2394 + st2645: + if p++; p == pe { + goto _test_eof2645 + } + st_case_2645: + switch data[p] { + case 143: + goto tr2395 + case 145: + goto tr2395 + } + if 176 <= data[p] { + goto tr2395 + } + goto tr2394 + st2646: + if p++; p == pe { + goto _test_eof2646 + } + st_case_2646: + if 139 <= data[p] { + goto tr2394 + } + goto tr2395 + st2647: + if p++; p == pe { + goto _test_eof2647 + } + st_case_2647: + if 166 <= data[p] && data[p] <= 176 { + goto tr2395 + } + goto tr2394 + st2648: + if p++; p == pe { + goto _test_eof2648 + } + st_case_2648: + if 171 <= data[p] && data[p] <= 179 { + goto tr2395 + } + goto tr2394 + st2649: + if p++; p == pe { + goto _test_eof2649 + } + st_case_2649: + switch data[p] { + case 160: + goto st2650 + case 161: + goto st2651 + case 163: + goto st2652 + case 164: + goto st2653 + case 165: + goto st2654 + case 167: + goto st2656 + case 169: + goto st2657 + case 171: + goto st2658 + case 173: + goto st2660 + case 174: + goto st2661 + case 175: + goto st2662 + case 176: + goto st2663 + case 177: + goto st2664 + case 179: + goto st2665 + case 180: + goto st2666 + case 181: + goto st2667 + case 182: + goto st2668 + case 183: + goto st2669 + case 184: + goto st2670 + case 185: + goto st2671 + case 186: + goto st2672 + case 187: + goto st2673 + case 188: + goto st2674 + case 189: + goto st2675 + case 190: + goto st2676 + case 191: + goto st2677 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto st2659 + } + case data[p] >= 166: + goto st2655 + } + goto tr2394 + st2650: + if p++; p == pe { + goto _test_eof2650 + } + st_case_2650: + switch { + case data[p] < 155: + if 150 <= data[p] && data[p] <= 153 { + goto tr2395 + } + case data[p] > 163: + switch { + case data[p] > 167: + if 169 <= data[p] && data[p] <= 173 { + goto tr2395 + } + case data[p] >= 165: + goto tr2395 + } + default: + goto tr2395 + } + goto tr2 + st2651: + if p++; p == pe { + goto _test_eof2651 + } + st_case_2651: + if 153 <= data[p] && data[p] <= 155 { + goto tr2395 + } + goto tr2 + st2652: + if p++; p == pe { + goto _test_eof2652 + } + st_case_2652: + if 163 <= data[p] { + goto tr2395 + } + goto tr2 + st2653: + if p++; p == pe { + goto _test_eof2653 + } + st_case_2653: + if data[p] == 189 { + goto tr2 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr2 + } + goto tr2395 + st2654: + if p++; p == pe { + goto _test_eof2654 + } + st_case_2654: + if data[p] == 144 { + goto tr2 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr2 + } + case data[p] >= 152: + goto tr2 + } + goto tr2395 + st2655: + if p++; p == pe { + goto _test_eof2655 + } + st_case_2655: + if data[p] == 188 { + goto tr2395 + } + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr2395 + } + case data[p] >= 129: + goto tr2395 + } + goto tr2 + st2656: + if p++; p == pe { + goto _test_eof2656 + } + st_case_2656: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr2 + } + case data[p] >= 133: + goto tr2 + } + case data[p] > 150: + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr2 + } + case data[p] >= 152: + goto tr2 + } + default: + goto tr2 + } + goto tr2395 + st2657: + if p++; p == pe { + goto _test_eof2657 + } + st_case_2657: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr2 + } + case data[p] >= 131: + goto tr2 + } + case data[p] > 144: + switch { + case data[p] < 178: + if 146 <= data[p] && data[p] <= 175 { + goto tr2 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + default: + goto tr2 + } + goto tr2395 + st2658: + if p++; p == pe { + goto _test_eof2658 + } + st_case_2658: + switch data[p] { + case 134: + goto tr2 + case 138: + goto tr2 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr2 + } + case data[p] >= 142: + goto tr2 + } + goto tr2395 + st2659: + if p++; p == pe { + goto _test_eof2659 + } + st_case_2659: + if data[p] == 188 { + goto tr2395 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr2395 + } + case data[p] >= 129: + goto tr2395 + } + goto tr2 + st2660: + if p++; p == pe { + goto _test_eof2660 + } + st_case_2660: + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr2395 + } + case data[p] >= 128: + goto tr2395 + } + case data[p] > 141: + switch { + case data[p] > 151: + if 162 <= data[p] && data[p] <= 163 { + goto tr2395 + } + case data[p] >= 150: + goto tr2395 + } + default: + goto tr2395 + } + goto tr2 + st2661: + if p++; p == pe { + goto _test_eof2661 + } + st_case_2661: + if data[p] == 130 { + goto tr2395 + } + if 190 <= data[p] && data[p] <= 191 { + goto tr2395 + } + goto tr2 + st2662: + if p++; p == pe { + goto _test_eof2662 + } + st_case_2662: + if data[p] == 151 { + goto tr2395 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr2395 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr2395 + } + default: + goto tr2395 + } + goto tr2 + st2663: + if p++; p == pe { + goto _test_eof2663 + } + st_case_2663: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr2395 + } + case data[p] >= 128: + goto tr2395 + } + goto tr2 + st2664: + if p++; p == pe { + goto _test_eof2664 + } + st_case_2664: + switch data[p] { + case 133: + goto tr2 + case 137: + goto tr2 + } + switch { + case data[p] < 151: + if 142 <= data[p] && data[p] <= 148 { + goto tr2 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr2395 + st2665: + if p++; p == pe { + goto _test_eof2665 + } + st_case_2665: + switch { + case data[p] < 138: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 136 { + goto tr2395 + } + case data[p] >= 128: + goto tr2395 + } + case data[p] > 141: + switch { + case data[p] > 150: + if 162 <= data[p] && data[p] <= 163 { + goto tr2395 + } + case data[p] >= 149: + goto tr2395 + } + default: + goto tr2395 + } + goto tr2 + st2666: + if p++; p == pe { + goto _test_eof2666 + } + st_case_2666: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr2395 + } + case data[p] >= 129: + goto tr2395 + } + goto tr2 + st2667: + if p++; p == pe { + goto _test_eof2667 + } + st_case_2667: + switch data[p] { + case 133: + goto tr2 + case 137: + goto tr2 + } + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 150 { + goto tr2 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr2395 + st2668: + if p++; p == pe { + goto _test_eof2668 + } + st_case_2668: + if 130 <= data[p] && data[p] <= 131 { + goto tr2395 + } + goto tr2 + st2669: + if p++; p == pe { + goto _test_eof2669 + } + st_case_2669: + switch data[p] { + case 138: + goto tr2395 + case 150: + goto tr2395 + } + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 148 { + goto tr2395 + } + case data[p] > 159: + if 178 <= data[p] && data[p] <= 179 { + goto tr2395 + } + default: + goto tr2395 + } + goto tr2 + st2670: + if p++; p == pe { + goto _test_eof2670 + } + st_case_2670: + if data[p] == 177 { + goto tr2395 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr2395 + } + goto tr2 + st2671: + if p++; p == pe { + goto _test_eof2671 + } + st_case_2671: + if 135 <= data[p] && data[p] <= 142 { + goto tr2395 + } + goto tr2 + st2672: + if p++; p == pe { + goto _test_eof2672 + } + st_case_2672: + if data[p] == 177 { + goto tr2395 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr2395 + } + case data[p] >= 180: + goto tr2395 + } + goto tr2 + st2673: + if p++; p == pe { + goto _test_eof2673 + } + st_case_2673: + if 136 <= data[p] && data[p] <= 141 { + goto tr2395 + } + goto tr2 + st2674: + if p++; p == pe { + goto _test_eof2674 + } + st_case_2674: + switch data[p] { + case 181: + goto tr2395 + case 183: + goto tr2395 + case 185: + goto tr2395 + } + switch { + case data[p] > 153: + if 190 <= data[p] && data[p] <= 191 { + goto tr2395 + } + case data[p] >= 152: + goto tr2395 + } + goto tr2 + st2675: + if p++; p == pe { + goto _test_eof2675 + } + st_case_2675: + if 177 <= data[p] && data[p] <= 191 { + goto tr2395 + } + goto tr2 + st2676: + if p++; p == pe { + goto _test_eof2676 + } + st_case_2676: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr2395 + } + case data[p] > 135: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr2395 + } + case data[p] >= 141: + goto tr2395 + } + default: + goto tr2395 + } + goto tr2 + st2677: + if p++; p == pe { + goto _test_eof2677 + } + st_case_2677: + if data[p] == 134 { + goto tr2395 + } + goto tr2 + st2678: + if p++; p == pe { + goto _test_eof2678 + } + st_case_2678: + switch data[p] { + case 128: + goto st2679 + case 129: + goto st2680 + case 130: + goto st2681 + case 141: + goto st2682 + case 156: + goto st2683 + case 157: + goto st2684 + case 158: + goto st2685 + case 159: + goto st2686 + case 160: + goto st2687 + case 162: + goto st2688 + case 164: + goto st2689 + case 168: + goto st2690 + case 169: + goto st2691 + case 170: + goto st2692 + case 172: + goto st2693 + case 173: + goto st2694 + case 174: + goto st2695 + case 175: + goto st2696 + case 176: + goto st2697 + case 179: + goto st2698 + case 183: + goto st2699 + } + goto tr2394 + st2679: + if p++; p == pe { + goto _test_eof2679 + } + st_case_2679: + if 171 <= data[p] && data[p] <= 190 { + goto tr2395 + } + goto tr2 + st2680: + if p++; p == pe { + goto _test_eof2680 + } + st_case_2680: + switch { + case data[p] < 162: + switch { + case data[p] > 153: + if 158 <= data[p] && data[p] <= 160 { + goto tr2395 + } + case data[p] >= 150: + goto tr2395 + } + case data[p] > 164: + switch { + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr2395 + } + case data[p] >= 167: + goto tr2395 + } + default: + goto tr2395 + } + goto tr2 + st2681: + if p++; p == pe { + goto _test_eof2681 + } + st_case_2681: + if data[p] == 143 { + goto tr2395 + } + switch { + case data[p] > 141: + if 154 <= data[p] && data[p] <= 157 { + goto tr2395 + } + case data[p] >= 130: + goto tr2395 + } + goto tr2 + st2682: + if p++; p == pe { + goto _test_eof2682 + } + st_case_2682: + if 157 <= data[p] && data[p] <= 159 { + goto tr2395 + } + goto tr2 + st2683: + if p++; p == pe { + goto _test_eof2683 + } + st_case_2683: + switch { + case data[p] > 148: + if 178 <= data[p] && data[p] <= 180 { + goto tr2395 + } + case data[p] >= 146: + goto tr2395 + } + goto tr2 + st2684: + if p++; p == pe { + goto _test_eof2684 + } + st_case_2684: + switch { + case data[p] > 147: + if 178 <= data[p] && data[p] <= 179 { + goto tr2395 + } + case data[p] >= 146: + goto tr2395 + } + goto tr2 + st2685: + if p++; p == pe { + goto _test_eof2685 + } + st_case_2685: + if 180 <= data[p] { + goto tr2395 + } + goto tr2 + st2686: + if p++; p == pe { + goto _test_eof2686 + } + st_case_2686: + switch { + case data[p] > 156: + if 158 <= data[p] { + goto tr2 + } + case data[p] >= 148: + goto tr2 + } + goto tr2395 + st2687: + if p++; p == pe { + goto _test_eof2687 + } + st_case_2687: + if 139 <= data[p] && data[p] <= 142 { + goto tr2395 + } + goto tr2 + st2688: + if p++; p == pe { + goto _test_eof2688 + } + st_case_2688: + if data[p] == 169 { + goto tr2395 + } + goto tr2 + st2689: + if p++; p == pe { + goto _test_eof2689 + } + st_case_2689: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr2395 + } + case data[p] >= 160: + goto tr2395 + } + goto tr2 + st2690: + if p++; p == pe { + goto _test_eof2690 + } + st_case_2690: + if 151 <= data[p] && data[p] <= 155 { + goto tr2395 + } + goto tr2 + st2691: + if p++; p == pe { + goto _test_eof2691 + } + st_case_2691: + if data[p] == 191 { + goto tr2395 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr2395 + } + case data[p] >= 149: + goto tr2395 + } + goto tr2 + st2692: + if p++; p == pe { + goto _test_eof2692 + } + st_case_2692: + if 176 <= data[p] && data[p] <= 190 { + goto tr2395 + } + goto tr2 + st2693: + if p++; p == pe { + goto _test_eof2693 + } + st_case_2693: + switch { + case data[p] > 132: + if 180 <= data[p] { + goto tr2395 + } + case data[p] >= 128: + goto tr2395 + } + goto tr2 + st2694: + if p++; p == pe { + goto _test_eof2694 + } + st_case_2694: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr2 + } + case data[p] >= 133: + goto tr2 + } + goto tr2395 + st2695: + if p++; p == pe { + goto _test_eof2695 + } + st_case_2695: + switch { + case data[p] > 130: + if 161 <= data[p] && data[p] <= 173 { + goto tr2395 + } + case data[p] >= 128: + goto tr2395 + } + goto tr2 + st2696: + if p++; p == pe { + goto _test_eof2696 + } + st_case_2696: + if 166 <= data[p] && data[p] <= 179 { + goto tr2395 + } + goto tr2 + st2697: + if p++; p == pe { + goto _test_eof2697 + } + st_case_2697: + if 164 <= data[p] && data[p] <= 183 { + goto tr2395 + } + goto tr2 + st2698: + if p++; p == pe { + goto _test_eof2698 + } + st_case_2698: + if data[p] == 173 { + goto tr2395 + } + switch { + case data[p] < 148: + if 144 <= data[p] && data[p] <= 146 { + goto tr2395 + } + case data[p] > 168: + switch { + case data[p] > 180: + if 184 <= data[p] && data[p] <= 185 { + goto tr2395 + } + case data[p] >= 178: + goto tr2395 + } + default: + goto tr2395 + } + goto tr2 + st2699: + if p++; p == pe { + goto _test_eof2699 + } + st_case_2699: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr2395 + } + case data[p] >= 128: + goto tr2395 + } + goto tr2 + st2700: + if p++; p == pe { + goto _test_eof2700 + } + st_case_2700: + switch data[p] { + case 128: + goto st2701 + case 129: + goto st2702 + case 131: + goto st2703 + case 179: + goto st2704 + case 181: + goto st2705 + case 183: + goto st2706 + } + goto tr2394 + st2701: + if p++; p == pe { + goto _test_eof2701 + } + st_case_2701: + switch { + case data[p] > 143: + if 170 <= data[p] && data[p] <= 174 { + goto tr2395 + } + case data[p] >= 140: + goto tr2395 + } + goto tr2 + st2702: + if p++; p == pe { + goto _test_eof2702 + } + st_case_2702: + switch { + case data[p] > 164: + if 166 <= data[p] && data[p] <= 175 { + goto tr2395 + } + case data[p] >= 160: + goto tr2395 + } + goto tr2 + st2703: + if p++; p == pe { + goto _test_eof2703 + } + st_case_2703: + if 144 <= data[p] && data[p] <= 176 { + goto tr2395 + } + goto tr2 + st2704: + if p++; p == pe { + goto _test_eof2704 + } + st_case_2704: + if 175 <= data[p] && data[p] <= 177 { + goto tr2395 + } + goto tr2 + st2705: + if p++; p == pe { + goto _test_eof2705 + } + st_case_2705: + if data[p] == 191 { + goto tr2395 + } + goto tr2 + st2706: + if p++; p == pe { + goto _test_eof2706 + } + st_case_2706: + if 160 <= data[p] && data[p] <= 191 { + goto tr2395 + } + goto tr2 + st2707: + if p++; p == pe { + goto _test_eof2707 + } + st_case_2707: + switch data[p] { + case 128: + goto st2708 + case 130: + goto st2709 + } + goto tr2394 + st2708: + if p++; p == pe { + goto _test_eof2708 + } + st_case_2708: + if 170 <= data[p] && data[p] <= 175 { + goto tr2395 + } + goto tr2 + st2709: + if p++; p == pe { + goto _test_eof2709 + } + st_case_2709: + if 153 <= data[p] && data[p] <= 154 { + goto tr2395 + } + goto tr2 + st2710: + if p++; p == pe { + goto _test_eof2710 + } + st_case_2710: + switch data[p] { + case 153: + goto st2711 + case 154: + goto st2712 + case 155: + goto st2713 + case 160: + goto st2714 + case 162: + goto st2715 + case 163: + goto st2716 + case 164: + goto st2717 + case 165: + goto st2718 + case 166: + goto st2719 + case 167: + goto st2720 + case 168: + goto st2721 + case 169: + goto st2722 + case 170: + goto st2723 + case 171: + goto st2724 + case 175: + goto st2725 + } + goto tr2394 + st2711: + if p++; p == pe { + goto _test_eof2711 + } + st_case_2711: + switch { + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr2395 + } + case data[p] >= 175: + goto tr2395 + } + goto tr2 + st2712: + if p++; p == pe { + goto _test_eof2712 + } + st_case_2712: + if 158 <= data[p] && data[p] <= 159 { + goto tr2395 + } + goto tr2 + st2713: + if p++; p == pe { + goto _test_eof2713 + } + st_case_2713: + if 176 <= data[p] && data[p] <= 177 { + goto tr2395 + } + goto tr2 + st2714: + if p++; p == pe { + goto _test_eof2714 + } + st_case_2714: + switch data[p] { + case 130: + goto tr2395 + case 134: + goto tr2395 + case 139: + goto tr2395 + } + if 163 <= data[p] && data[p] <= 167 { + goto tr2395 + } + goto tr2 + st2715: + if p++; p == pe { + goto _test_eof2715 + } + st_case_2715: + switch { + case data[p] > 129: + if 180 <= data[p] { + goto tr2395 + } + case data[p] >= 128: + goto tr2395 + } + goto tr2 + st2716: + if p++; p == pe { + goto _test_eof2716 + } + st_case_2716: + switch { + case data[p] > 159: + if 178 <= data[p] { + goto tr2 + } + case data[p] >= 133: + goto tr2 + } + goto tr2395 + st2717: + if p++; p == pe { + goto _test_eof2717 + } + st_case_2717: + if 166 <= data[p] && data[p] <= 173 { + goto tr2395 + } + goto tr2 + st2718: + if p++; p == pe { + goto _test_eof2718 + } + st_case_2718: + if 135 <= data[p] && data[p] <= 147 { + goto tr2395 + } + goto tr2 + st2719: + if p++; p == pe { + goto _test_eof2719 + } + st_case_2719: + switch { + case data[p] > 131: + if 179 <= data[p] { + goto tr2395 + } + case data[p] >= 128: + goto tr2395 + } + goto tr2 + st2720: + if p++; p == pe { + goto _test_eof2720 + } + st_case_2720: + switch { + case data[p] > 164: + if 166 <= data[p] { + goto tr2 + } + case data[p] >= 129: + goto tr2 + } + goto tr2395 + st2721: + if p++; p == pe { + goto _test_eof2721 + } + st_case_2721: + if 169 <= data[p] && data[p] <= 182 { + goto tr2395 + } + goto tr2 + st2722: + if p++; p == pe { + goto _test_eof2722 + } + st_case_2722: + if data[p] == 131 { + goto tr2395 + } + switch { + case data[p] > 141: + if 187 <= data[p] && data[p] <= 189 { + goto tr2395 + } + case data[p] >= 140: + goto tr2395 + } + goto tr2 + st2723: + if p++; p == pe { + goto _test_eof2723 + } + st_case_2723: + if data[p] == 176 { + goto tr2395 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr2395 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr2395 + } + default: + goto tr2395 + } + goto tr2 + st2724: + if p++; p == pe { + goto _test_eof2724 + } + st_case_2724: + if data[p] == 129 { + goto tr2395 + } + switch { + case data[p] > 175: + if 181 <= data[p] && data[p] <= 182 { + goto tr2395 + } + case data[p] >= 171: + goto tr2395 + } + goto tr2 + st2725: + if p++; p == pe { + goto _test_eof2725 + } + st_case_2725: + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 173 { + goto tr2395 + } + case data[p] >= 163: + goto tr2395 + } + goto tr2 + st2726: + if p++; p == pe { + goto _test_eof2726 + } + st_case_2726: + switch data[p] { + case 172: + goto st2727 + case 184: + goto st2728 + case 187: + goto st2705 + case 190: + goto st2712 + case 191: + goto st2729 + } + goto tr2394 + st2727: + if p++; p == pe { + goto _test_eof2727 + } + st_case_2727: + if data[p] == 158 { + goto tr2395 + } + goto tr2 + st2728: + if p++; p == pe { + goto _test_eof2728 + } + st_case_2728: + switch { + case data[p] > 143: + if 160 <= data[p] && data[p] <= 175 { + goto tr2395 + } + case data[p] >= 128: + goto tr2395 + } + goto tr2 + st2729: + if p++; p == pe { + goto _test_eof2729 + } + st_case_2729: + if 185 <= data[p] && data[p] <= 187 { + goto tr2395 + } + goto tr2 + st2730: + if p++; p == pe { + goto _test_eof2730 + } + st_case_2730: + switch data[p] { + case 144: + goto st2731 + case 145: + goto st2737 + case 150: + goto st2756 + case 155: + goto st2761 + case 157: + goto st2763 + case 158: + goto st2770 + } + goto tr2394 + st2731: + if p++; p == pe { + goto _test_eof2731 + } + st_case_2731: + switch data[p] { + case 135: + goto st2732 + case 139: + goto st2733 + case 141: + goto st2734 + case 168: + goto st2735 + case 171: + goto st2736 + } + goto tr2 + st2732: + if p++; p == pe { + goto _test_eof2732 + } + st_case_2732: + if data[p] == 189 { + goto tr2395 + } + goto tr2 + st2733: + if p++; p == pe { + goto _test_eof2733 + } + st_case_2733: + if data[p] == 160 { + goto tr2395 + } + goto tr2 + st2734: + if p++; p == pe { + goto _test_eof2734 + } + st_case_2734: + if 182 <= data[p] && data[p] <= 186 { + goto tr2395 + } + goto tr2 + st2735: + if p++; p == pe { + goto _test_eof2735 + } + st_case_2735: + if data[p] == 191 { + goto tr2395 + } + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2395 + } + case data[p] > 134: + switch { + case data[p] > 143: + if 184 <= data[p] && data[p] <= 186 { + goto tr2395 + } + case data[p] >= 140: + goto tr2395 + } + default: + goto tr2395 + } + goto tr2 + st2736: + if p++; p == pe { + goto _test_eof2736 + } + st_case_2736: + if 165 <= data[p] && data[p] <= 166 { + goto tr2395 + } + goto tr2 + st2737: + if p++; p == pe { + goto _test_eof2737 + } + st_case_2737: + switch data[p] { + case 128: + goto st2738 + case 129: + goto st2739 + case 130: + goto st2740 + case 132: + goto st2741 + case 133: + goto st2742 + case 134: + goto st2743 + case 135: + goto st2744 + case 136: + goto st2745 + case 139: + goto st2746 + case 140: + goto st2747 + case 141: + goto st2748 + case 146: + goto st2749 + case 147: + goto st2750 + case 150: + goto st2751 + case 151: + goto st2752 + case 152: + goto st2749 + case 153: + goto st2753 + case 154: + goto st2754 + case 156: + goto st2755 + } + goto tr2 + st2738: + if p++; p == pe { + goto _test_eof2738 + } + st_case_2738: + switch { + case data[p] > 130: + if 184 <= data[p] { + goto tr2395 + } + case data[p] >= 128: + goto tr2395 + } + goto tr2 + st2739: + if p++; p == pe { + goto _test_eof2739 + } + st_case_2739: + if 135 <= data[p] && data[p] <= 190 { + goto tr2 + } + goto tr2395 + st2740: + if p++; p == pe { + goto _test_eof2740 + } + st_case_2740: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr2 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr2395 + st2741: + if p++; p == pe { + goto _test_eof2741 + } + st_case_2741: + switch { + case data[p] > 130: + if 167 <= data[p] && data[p] <= 180 { + goto tr2395 + } + case data[p] >= 128: + goto tr2395 + } + goto tr2 + st2742: + if p++; p == pe { + goto _test_eof2742 + } + st_case_2742: + if data[p] == 179 { + goto tr2395 + } + goto tr2 + st2743: + if p++; p == pe { + goto _test_eof2743 + } + st_case_2743: + switch { + case data[p] > 130: + if 179 <= data[p] { + goto tr2395 + } + case data[p] >= 128: + goto tr2395 + } + goto tr2 + st2744: + if p++; p == pe { + goto _test_eof2744 + } + st_case_2744: + switch { + case data[p] > 137: + if 141 <= data[p] { + goto tr2 + } + case data[p] >= 129: + goto tr2 + } + goto tr2395 + st2745: + if p++; p == pe { + goto _test_eof2745 + } + st_case_2745: + if 172 <= data[p] && data[p] <= 183 { + goto tr2395 + } + goto tr2 + st2746: + if p++; p == pe { + goto _test_eof2746 + } + st_case_2746: + if 159 <= data[p] && data[p] <= 170 { + goto tr2395 + } + goto tr2 + st2747: + if p++; p == pe { + goto _test_eof2747 + } + st_case_2747: + if data[p] == 188 { + goto tr2395 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr2395 + } + case data[p] >= 128: + goto tr2395 + } + goto tr2 + st2748: + if p++; p == pe { + goto _test_eof2748 + } + st_case_2748: + if data[p] == 151 { + goto tr2395 + } + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr2395 + } + case data[p] >= 128: + goto tr2395 + } + case data[p] > 141: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr2395 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr2395 + } + default: + goto tr2395 + } + default: + goto tr2395 + } + goto tr2 + st2749: + if p++; p == pe { + goto _test_eof2749 + } + st_case_2749: + if 176 <= data[p] { + goto tr2395 + } + goto tr2 + st2750: + if p++; p == pe { + goto _test_eof2750 + } + st_case_2750: + if 132 <= data[p] { + goto tr2 + } + goto tr2395 + st2751: + if p++; p == pe { + goto _test_eof2751 + } + st_case_2751: + switch { + case data[p] > 181: + if 184 <= data[p] { + goto tr2395 + } + case data[p] >= 175: + goto tr2395 + } + goto tr2 + st2752: + if p++; p == pe { + goto _test_eof2752 + } + st_case_2752: + switch { + case data[p] > 155: + if 158 <= data[p] { + goto tr2 + } + case data[p] >= 129: + goto tr2 + } + goto tr2395 + st2753: + if p++; p == pe { + goto _test_eof2753 + } + st_case_2753: + if 129 <= data[p] { + goto tr2 + } + goto tr2395 + st2754: + if p++; p == pe { + goto _test_eof2754 + } + st_case_2754: + if 171 <= data[p] && data[p] <= 183 { + goto tr2395 + } + goto tr2 + st2755: + if p++; p == pe { + goto _test_eof2755 + } + st_case_2755: + if 157 <= data[p] && data[p] <= 171 { + goto tr2395 + } + goto tr2 + st2756: + if p++; p == pe { + goto _test_eof2756 + } + st_case_2756: + switch data[p] { + case 171: + goto st2757 + case 172: + goto st2758 + case 189: + goto st2759 + case 190: + goto st2760 + } + goto tr2 + st2757: + if p++; p == pe { + goto _test_eof2757 + } + st_case_2757: + if 176 <= data[p] && data[p] <= 180 { + goto tr2395 + } + goto tr2 + st2758: + if p++; p == pe { + goto _test_eof2758 + } + st_case_2758: + if 176 <= data[p] && data[p] <= 182 { + goto tr2395 + } + goto tr2 + st2759: + if p++; p == pe { + goto _test_eof2759 + } + st_case_2759: + if 145 <= data[p] && data[p] <= 190 { + goto tr2395 + } + goto tr2 + st2760: + if p++; p == pe { + goto _test_eof2760 + } + st_case_2760: + if 143 <= data[p] && data[p] <= 146 { + goto tr2395 + } + goto tr2 + st2761: + if p++; p == pe { + goto _test_eof2761 + } + st_case_2761: + if data[p] == 178 { + goto st2762 + } + goto tr2 + st2762: + if p++; p == pe { + goto _test_eof2762 + } + st_case_2762: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr2395 + } + case data[p] >= 157: + goto tr2395 + } + goto tr2 + st2763: + if p++; p == pe { + goto _test_eof2763 + } + st_case_2763: + switch data[p] { + case 133: + goto st2764 + case 134: + goto st2765 + case 137: + goto st2766 + case 168: + goto st2767 + case 169: + goto st2768 + case 170: + goto st2769 + } + goto tr2 + st2764: + if p++; p == pe { + goto _test_eof2764 + } + st_case_2764: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr2395 + } + case data[p] >= 165: + goto tr2395 + } + goto tr2 + st2765: + if p++; p == pe { + goto _test_eof2765 + } + st_case_2765: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr2 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr2395 + st2766: + if p++; p == pe { + goto _test_eof2766 + } + st_case_2766: + if 130 <= data[p] && data[p] <= 132 { + goto tr2395 + } + goto tr2 + st2767: + if p++; p == pe { + goto _test_eof2767 + } + st_case_2767: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr2395 + } + case data[p] >= 128: + goto tr2395 + } + goto tr2 + st2768: + if p++; p == pe { + goto _test_eof2768 + } + st_case_2768: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr2 + } + case data[p] >= 173: + goto tr2 + } + goto tr2395 + st2769: + if p++; p == pe { + goto _test_eof2769 + } + st_case_2769: + if data[p] == 132 { + goto tr2395 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr2395 + } + case data[p] >= 155: + goto tr2395 + } + goto tr2 + st2770: + if p++; p == pe { + goto _test_eof2770 + } + st_case_2770: + if data[p] == 163 { + goto st2771 + } + goto tr2 + st2771: + if p++; p == pe { + goto _test_eof2771 + } + st_case_2771: + if 144 <= data[p] && data[p] <= 150 { + goto tr2395 + } + goto tr2 + st2772: + if p++; p == pe { + goto _test_eof2772 + } + st_case_2772: + if data[p] == 160 { + goto st2773 + } + goto tr2394 + st2773: + if p++; p == pe { + goto _test_eof2773 + } + st_case_2773: + switch data[p] { + case 128: + goto st2774 + case 129: + goto st2775 + case 132: + goto st2637 + case 135: + goto st2638 + } + if 133 <= data[p] && data[p] <= 134 { + goto st2776 + } + goto tr2 + st2774: + if p++; p == pe { + goto _test_eof2774 + } + st_case_2774: + if data[p] == 129 { + goto tr2395 + } + if 160 <= data[p] { + goto tr2395 + } + goto tr2 + st2775: + if p++; p == pe { + goto _test_eof2775 + } + st_case_2775: + if 192 <= data[p] { + goto tr2 + } + goto tr2395 + st2776: + if p++; p == pe { + goto _test_eof2776 + } + st_case_2776: + goto tr2395 +tr4805: +//line segment_words.rl:72 + + endPos = p + + goto st4879 +tr4465: +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + + goto st4879 + st4879: + if p++; p == pe { + goto _test_eof4879 + } + st_case_4879: +//line segment_words_prod.go:72049 + switch data[p] { + case 194: + goto tr4783 + case 204: + goto tr4784 + case 205: + goto tr4785 + case 210: + goto tr4786 + case 214: + goto tr4787 + case 215: + goto tr4788 + case 216: + goto tr4789 + case 217: + goto tr4790 + case 219: + goto tr4791 + case 220: + goto tr4792 + case 221: + goto tr4793 + case 222: + goto tr4794 + case 223: + goto tr4795 + case 224: + goto tr4796 + case 225: + goto tr4797 + case 226: + goto tr4798 + case 227: + goto tr4799 + case 234: + goto tr4800 + case 239: + goto tr4801 + case 240: + goto tr4802 + case 243: + goto tr4803 + } + switch { + case data[p] < 152: + if 128 <= data[p] && data[p] <= 150 { + goto tr148 + } + case data[p] > 182: + if 184 <= data[p] { + goto tr148 + } + default: + goto tr148 + } + goto tr4499 +tr4783: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4880 + st4880: + if p++; p == pe { + goto _test_eof4880 + } + st_case_4880: +//line segment_words_prod.go:72123 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 173: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr4784: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4881 + st4881: + if p++; p == pe { + goto _test_eof4881 + } + st_case_4881: +//line segment_words_prod.go:72235 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto tr4804 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr4808 + case 205: + goto tr4810 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr4813 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr4816 + case 215: + goto tr4817 + case 216: + goto tr4818 + case 217: + goto tr4819 + case 219: + goto tr4820 + case 220: + goto tr4821 + case 221: + goto tr4822 + case 222: + goto tr4823 + case 223: + goto tr4824 + case 224: + goto tr4825 + case 225: + goto tr4826 + case 226: + goto tr4827 + case 227: + goto tr4828 + case 234: + goto tr4829 + case 237: + goto tr4831 + case 239: + goto tr4832 + case 240: + goto tr4833 + case 243: + goto tr4834 + } + switch { + case data[p] < 91: + switch { + case data[p] < 48: + if data[p] <= 47 { + goto tr4562 + } + case data[p] > 57: + switch { + case data[p] > 64: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 59: + goto tr4562 + } + default: + goto tr421 + } + case data[p] > 96: + switch { + case data[p] < 123: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + case data[p] > 127: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto tr4830 + } + case data[p] >= 196: + goto tr4806 + } + default: + goto tr4562 + } + default: + goto tr4562 + } + goto tr1 +tr4804: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4882 + st4882: + if p++; p == pe { + goto _test_eof4882 + } + st_case_4882: +//line segment_words_prod.go:72363 + switch data[p] { + case 170: + goto tr148 + case 173: + goto tr148 + case 181: + goto tr148 + case 183: + goto st142 + case 186: + goto tr148 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr4806: +//line segment_words.rl:72 + + endPos = p + + goto st4883 +tr4466: +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + + goto st4883 + st4883: + if p++; p == pe { + goto _test_eof4883 + } + st_case_4883: +//line segment_words_prod.go:72440 + switch data[p] { + case 194: + goto tr4783 + case 204: + goto tr4784 + case 205: + goto tr4785 + case 210: + goto tr4786 + case 214: + goto tr4787 + case 215: + goto tr4788 + case 216: + goto tr4789 + case 217: + goto tr4790 + case 219: + goto tr4791 + case 220: + goto tr4792 + case 221: + goto tr4793 + case 222: + goto tr4794 + case 223: + goto tr4795 + case 224: + goto tr4796 + case 225: + goto tr4797 + case 226: + goto tr4798 + case 227: + goto tr4799 + case 234: + goto tr4800 + case 239: + goto tr4801 + case 240: + goto tr4802 + case 243: + goto tr4803 + } + goto tr148 +tr4785: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4884 + st4884: + if p++; p == pe { + goto _test_eof4884 + } + st_case_4884: +//line segment_words_prod.go:72502 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 176: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr2646 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto st145 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr4562 + } + case data[p] >= 235: + goto st286 + } + default: + goto tr4562 + } + default: + goto tr4562 + } + goto tr1 +tr2518: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4885 + st4885: + if p++; p == pe { + goto _test_eof4885 + } + st_case_4885: +//line segment_words_prod.go:72625 + switch data[p] { + case 194: + goto st2777 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st148 + case 204: + goto st2778 + case 205: + goto st2779 + case 206: + goto st151 + case 207: + goto st152 + case 210: + goto st2780 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st2781 + case 215: + goto st2782 + case 216: + goto st2783 + case 217: + goto st2784 + case 219: + goto st2785 + case 220: + goto st2786 + case 221: + goto st2787 + case 222: + goto st2788 + case 223: + goto st2789 + case 224: + goto st2790 + case 225: + goto st2822 + case 226: + goto st2844 + case 227: + goto st2851 + case 234: + goto st2854 + case 237: + goto st287 + case 239: + goto st2870 + case 240: + goto st2876 + case 243: + goto st2918 + } + switch { + case data[p] < 97: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4499 + st2777: + if p++; p == pe { + goto _test_eof2777 + } + st_case_2777: + switch data[p] { + case 170: + goto tr148 + case 173: + goto tr2518 + case 181: + goto tr148 + case 186: + goto tr148 + } + goto tr0 + st2778: + if p++; p == pe { + goto _test_eof2778 + } + st_case_2778: + if 128 <= data[p] { + goto tr2518 + } + goto tr0 + st2779: + if p++; p == pe { + goto _test_eof2779 + } + st_case_2779: + switch data[p] { + case 181: + goto tr0 + case 190: + goto tr0 + } + switch { + case data[p] < 184: + if 176 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr0 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr0 + } + goto tr2518 + st2780: + if p++; p == pe { + goto _test_eof2780 + } + st_case_2780: + if data[p] == 130 { + goto tr0 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr2518 + } + goto tr148 + st2781: + if p++; p == pe { + goto _test_eof2781 + } + st_case_2781: + if data[p] == 190 { + goto tr0 + } + switch { + case data[p] < 145: + if 136 <= data[p] && data[p] <= 144 { + goto tr0 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr0 + } + default: + goto tr2518 + } + goto tr148 + st2782: + if p++; p == pe { + goto _test_eof2782 + } + st_case_2782: + if data[p] == 135 { + goto tr2518 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr2518 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] >= 144: + goto tr148 + } + default: + goto tr2518 + } + goto tr0 + st2783: + if p++; p == pe { + goto _test_eof2783 + } + st_case_2783: + if data[p] == 156 { + goto tr2518 + } + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 133 { + goto tr2518 + } + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr2518 + } + goto tr0 + st2784: + if p++; p == pe { + goto _test_eof2784 + } + st_case_2784: + if data[p] == 176 { + goto tr2518 + } + switch { + case data[p] < 139: + if 128 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 159: + if 174 <= data[p] { + goto tr148 + } + default: + goto tr2518 + } + goto tr0 + st2785: + if p++; p == pe { + goto _test_eof2785 + } + st_case_2785: + switch data[p] { + case 148: + goto tr0 + case 158: + goto tr0 + case 169: + goto tr0 + } + switch { + case data[p] < 176: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr2518 + } + case data[p] >= 150: + goto tr2518 + } + case data[p] > 185: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr0 + } + case data[p] >= 189: + goto tr0 + } + default: + goto tr0 + } + goto tr148 + st2786: + if p++; p == pe { + goto _test_eof2786 + } + st_case_2786: + if data[p] == 144 { + goto tr148 + } + switch { + case data[p] < 146: + if 143 <= data[p] && data[p] <= 145 { + goto tr2518 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr2518 + } + default: + goto tr148 + } + goto tr0 + st2787: + if p++; p == pe { + goto _test_eof2787 + } + st_case_2787: + switch { + case data[p] > 140: + if 141 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr0 + } + goto tr2518 + st2788: + if p++; p == pe { + goto _test_eof2788 + } + st_case_2788: + switch { + case data[p] > 176: + if 178 <= data[p] { + goto tr0 + } + case data[p] >= 166: + goto tr2518 + } + goto tr148 + st2789: + if p++; p == pe { + goto _test_eof2789 + } + st_case_2789: + if data[p] == 186 { + goto tr148 + } + switch { + case data[p] < 171: + if 138 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 179: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + default: + goto tr2518 + } + goto tr0 + st2790: + if p++; p == pe { + goto _test_eof2790 + } + st_case_2790: + switch data[p] { + case 160: + goto st2791 + case 161: + goto st2792 + case 162: + goto st168 + case 163: + goto st2793 + case 164: + goto st2794 + case 165: + goto st2795 + case 166: + goto st2796 + case 167: + goto st2797 + case 168: + goto st2798 + case 169: + goto st2799 + case 170: + goto st2800 + case 171: + goto st2801 + case 172: + goto st2802 + case 173: + goto st2803 + case 174: + goto st2804 + case 175: + goto st2805 + case 176: + goto st2806 + case 177: + goto st2807 + case 178: + goto st2808 + case 179: + goto st2809 + case 180: + goto st2810 + case 181: + goto st2811 + case 182: + goto st2812 + case 183: + goto st2813 + case 184: + goto st2814 + case 185: + goto st2815 + case 186: + goto st2816 + case 187: + goto st2817 + case 188: + goto st2818 + case 189: + goto st2819 + case 190: + goto st2820 + case 191: + goto st2821 + } + goto tr0 + st2791: + if p++; p == pe { + goto _test_eof2791 + } + st_case_2791: + switch data[p] { + case 154: + goto tr148 + case 164: + goto tr148 + case 168: + goto tr148 + } + switch { + case data[p] > 149: + if 150 <= data[p] && data[p] <= 173 { + goto tr2518 + } + case data[p] >= 128: + goto tr148 + } + goto tr0 + st2792: + if p++; p == pe { + goto _test_eof2792 + } + st_case_2792: + switch { + case data[p] > 152: + if 153 <= data[p] && data[p] <= 155 { + goto tr2518 + } + case data[p] >= 128: + goto tr148 + } + goto tr0 + st2793: + if p++; p == pe { + goto _test_eof2793 + } + st_case_2793: + if 163 <= data[p] { + goto tr2518 + } + goto tr0 + st2794: + if p++; p == pe { + goto _test_eof2794 + } + st_case_2794: + if data[p] == 189 { + goto tr148 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr148 + } + goto tr2518 + st2795: + if p++; p == pe { + goto _test_eof2795 + } + st_case_2795: + if data[p] == 144 { + goto tr148 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 176: + if 177 <= data[p] { + goto tr148 + } + default: + goto tr0 + } + goto tr2518 + st2796: + if p++; p == pe { + goto _test_eof2796 + } + st_case_2796: + switch data[p] { + case 132: + goto tr0 + case 169: + goto tr0 + case 177: + goto tr0 + case 188: + goto tr2518 + } + switch { + case data[p] < 145: + switch { + case data[p] > 131: + if 141 <= data[p] && data[p] <= 142 { + goto tr0 + } + case data[p] >= 129: + goto tr2518 + } + case data[p] > 146: + switch { + case data[p] < 186: + if 179 <= data[p] && data[p] <= 181 { + goto tr0 + } + case data[p] > 187: + if 190 <= data[p] { + goto tr2518 + } + default: + goto tr0 + } + default: + goto tr0 + } + goto tr148 + st2797: + if p++; p == pe { + goto _test_eof2797 + } + st_case_2797: + switch data[p] { + case 142: + goto tr148 + case 158: + goto tr0 + } + switch { + case data[p] < 152: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr0 + } + case data[p] > 138: + if 143 <= data[p] && data[p] <= 150 { + goto tr0 + } + default: + goto tr0 + } + case data[p] > 155: + switch { + case data[p] < 164: + if 156 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr0 + } + case data[p] >= 176: + goto tr148 + } + default: + goto tr0 + } + default: + goto tr0 + } + goto tr2518 + st2798: + if p++; p == pe { + goto _test_eof2798 + } + st_case_2798: + if data[p] == 188 { + goto tr2518 + } + switch { + case data[p] < 170: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2518 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 147 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] >= 143: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 176: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 182: + switch { + case data[p] > 185: + if 190 <= data[p] { + goto tr2518 + } + case data[p] >= 184: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st2799: + if p++; p == pe { + goto _test_eof2799 + } + st_case_2799: + if data[p] == 157 { + goto tr0 + } + switch { + case data[p] < 146: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr0 + } + case data[p] > 138: + if 142 <= data[p] && data[p] <= 144 { + goto tr0 + } + default: + goto tr0 + } + case data[p] > 152: + switch { + case data[p] < 159: + if 153 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr0 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr0 + } + default: + goto tr0 + } + goto tr2518 + st2800: + if p++; p == pe { + goto _test_eof2800 + } + st_case_2800: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2518 + } + case data[p] > 141: + if 143 <= data[p] && data[p] <= 145 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] { + goto tr2518 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st2801: + if p++; p == pe { + goto _test_eof2801 + } + st_case_2801: + switch data[p] { + case 134: + goto tr0 + case 138: + goto tr0 + case 144: + goto tr148 + case 185: + goto tr148 + } + switch { + case data[p] < 160: + if 142 <= data[p] && data[p] <= 159 { + goto tr0 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr0 + } + default: + goto tr148 + } + goto tr2518 + st2802: + if p++; p == pe { + goto _test_eof2802 + } + st_case_2802: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2518 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2518 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st2803: + if p++; p == pe { + goto _test_eof2803 + } + st_case_2803: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] < 150: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr2518 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr2518 + } + default: + goto tr2518 + } + case data[p] > 151: + switch { + case data[p] < 159: + if 156 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 161: + if 162 <= data[p] && data[p] <= 163 { + goto tr2518 + } + default: + goto tr148 + } + default: + goto tr2518 + } + goto tr0 + st2804: + if p++; p == pe { + goto _test_eof2804 + } + st_case_2804: + switch data[p] { + case 130: + goto tr2518 + case 131: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 158: + switch { + case data[p] < 142: + if 133 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 144: + switch { + case data[p] > 149: + if 153 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] < 168: + if 163 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 185: + if 190 <= data[p] && data[p] <= 191 { + goto tr2518 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st2805: + if p++; p == pe { + goto _test_eof2805 + } + st_case_2805: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr2518 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr2518 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr2518 + } + default: + goto tr2518 + } + goto tr0 + st2806: + if p++; p == pe { + goto _test_eof2806 + } + st_case_2806: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 128: + goto tr2518 + } + case data[p] > 144: + switch { + case data[p] < 170: + if 146 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] > 185: + if 190 <= data[p] { + goto tr2518 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st2807: + if p++; p == pe { + goto _test_eof2807 + } + st_case_2807: + switch data[p] { + case 133: + goto tr0 + case 137: + goto tr0 + case 151: + goto tr0 + } + switch { + case data[p] < 155: + switch { + case data[p] > 148: + if 152 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 142: + goto tr0 + } + case data[p] > 159: + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr0 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr0 + } + goto tr2518 + st2808: + if p++; p == pe { + goto _test_eof2808 + } + st_case_2808: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 146: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2518 + } + case data[p] > 140: + if 142 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 181: + if 170 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2518 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st2809: + if p++; p == pe { + goto _test_eof2809 + } + st_case_2809: + if data[p] == 158 { + goto tr148 + } + switch { + case data[p] < 149: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr2518 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr2518 + } + default: + goto tr2518 + } + case data[p] > 150: + switch { + case data[p] < 162: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 163: + if 177 <= data[p] && data[p] <= 178 { + goto tr148 + } + default: + goto tr2518 + } + default: + goto tr2518 + } + goto tr0 + st2810: + if p++; p == pe { + goto _test_eof2810 + } + st_case_2810: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 129: + goto tr2518 + } + case data[p] > 144: + switch { + case data[p] > 186: + if 190 <= data[p] { + goto tr2518 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st2811: + if p++; p == pe { + goto _test_eof2811 + } + st_case_2811: + switch data[p] { + case 133: + goto tr0 + case 137: + goto tr0 + case 142: + goto tr148 + } + switch { + case data[p] < 159: + switch { + case data[p] > 150: + if 152 <= data[p] && data[p] <= 158 { + goto tr0 + } + case data[p] >= 143: + goto tr0 + } + case data[p] > 161: + switch { + case data[p] < 186: + if 164 <= data[p] && data[p] <= 185 { + goto tr0 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr0 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2518 + st2812: + if p++; p == pe { + goto _test_eof2812 + } + st_case_2812: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto tr2518 + } + case data[p] > 150: + switch { + case data[p] > 177: + if 179 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st2813: + if p++; p == pe { + goto _test_eof2813 + } + st_case_2813: + switch data[p] { + case 138: + goto tr2518 + case 150: + goto tr2518 + } + switch { + case data[p] < 143: + if 128 <= data[p] && data[p] <= 134 { + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 159: + if 178 <= data[p] && data[p] <= 179 { + goto tr2518 + } + case data[p] >= 152: + goto tr2518 + } + default: + goto tr2518 + } + goto tr0 + st2814: + if p++; p == pe { + goto _test_eof2814 + } + st_case_2814: + if data[p] == 177 { + goto tr2518 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr2518 + } + goto tr0 + st2815: + if p++; p == pe { + goto _test_eof2815 + } + st_case_2815: + if 135 <= data[p] && data[p] <= 142 { + goto tr2518 + } + goto tr0 + st2816: + if p++; p == pe { + goto _test_eof2816 + } + st_case_2816: + if data[p] == 177 { + goto tr2518 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr2518 + } + case data[p] >= 180: + goto tr2518 + } + goto tr0 + st2817: + if p++; p == pe { + goto _test_eof2817 + } + st_case_2817: + if 136 <= data[p] && data[p] <= 141 { + goto tr2518 + } + goto tr0 + st2818: + if p++; p == pe { + goto _test_eof2818 + } + st_case_2818: + switch data[p] { + case 128: + goto tr148 + case 181: + goto tr2518 + case 183: + goto tr2518 + case 185: + goto tr2518 + } + switch { + case data[p] > 153: + if 190 <= data[p] && data[p] <= 191 { + goto tr2518 + } + case data[p] >= 152: + goto tr2518 + } + goto tr0 + st2819: + if p++; p == pe { + goto _test_eof2819 + } + st_case_2819: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 172: + if 177 <= data[p] && data[p] <= 191 { + goto tr2518 + } + default: + goto tr148 + } + goto tr0 + st2820: + if p++; p == pe { + goto _test_eof2820 + } + st_case_2820: + switch { + case data[p] < 136: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 135 { + goto tr2518 + } + case data[p] >= 128: + goto tr2518 + } + case data[p] > 140: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr2518 + } + case data[p] >= 141: + goto tr2518 + } + default: + goto tr148 + } + goto tr0 + st2821: + if p++; p == pe { + goto _test_eof2821 + } + st_case_2821: + if data[p] == 134 { + goto tr2518 + } + goto tr0 + st2822: + if p++; p == pe { + goto _test_eof2822 + } + st_case_2822: + switch data[p] { + case 128: + goto st2823 + case 129: + goto st2824 + case 130: + goto st2825 + case 131: + goto st202 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st2826 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st2827 + case 157: + goto st2828 + case 158: + goto st2829 + case 159: + goto st2830 + case 160: + goto st2831 + case 161: + goto st219 + case 162: + goto st2832 + case 163: + goto st221 + case 164: + goto st2833 + case 168: + goto st2834 + case 169: + goto st2835 + case 170: + goto st2836 + case 172: + goto st2837 + case 173: + goto st2838 + case 174: + goto st2839 + case 175: + goto st2840 + case 176: + goto st2841 + case 177: + goto st231 + case 179: + goto st2842 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st2843 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + } + switch { + case data[p] < 180: + if 132 <= data[p] && data[p] <= 152 { + goto st145 + } + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + default: + goto st147 + } + goto tr0 + st2823: + if p++; p == pe { + goto _test_eof2823 + } + st_case_2823: + if 171 <= data[p] && data[p] <= 190 { + goto tr2518 + } + goto tr0 + st2824: + if p++; p == pe { + goto _test_eof2824 + } + st_case_2824: + switch { + case data[p] < 162: + switch { + case data[p] > 153: + if 158 <= data[p] && data[p] <= 160 { + goto tr2518 + } + case data[p] >= 150: + goto tr2518 + } + case data[p] > 164: + switch { + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr2518 + } + case data[p] >= 167: + goto tr2518 + } + default: + goto tr2518 + } + goto tr0 + st2825: + if p++; p == pe { + goto _test_eof2825 + } + st_case_2825: + if data[p] == 143 { + goto tr2518 + } + switch { + case data[p] < 154: + if 130 <= data[p] && data[p] <= 141 { + goto tr2518 + } + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr2518 + } + goto tr0 + st2826: + if p++; p == pe { + goto _test_eof2826 + } + st_case_2826: + switch { + case data[p] < 157: + if 155 <= data[p] && data[p] <= 156 { + goto tr0 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr0 + } + default: + goto tr2518 + } + goto tr148 + st2827: + if p++; p == pe { + goto _test_eof2827 + } + st_case_2827: + switch { + case data[p] < 146: + switch { + case data[p] > 140: + if 142 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 177: + if 178 <= data[p] && data[p] <= 180 { + goto tr2518 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr2518 + } + goto tr0 + st2828: + if p++; p == pe { + goto _test_eof2828 + } + st_case_2828: + switch { + case data[p] < 160: + switch { + case data[p] > 145: + if 146 <= data[p] && data[p] <= 147 { + goto tr2518 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 172: + switch { + case data[p] > 176: + if 178 <= data[p] && data[p] <= 179 { + goto tr2518 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st2829: + if p++; p == pe { + goto _test_eof2829 + } + st_case_2829: + if 180 <= data[p] { + goto tr2518 + } + goto tr0 + st2830: + if p++; p == pe { + goto _test_eof2830 + } + st_case_2830: + switch { + case data[p] > 156: + if 158 <= data[p] { + goto tr0 + } + case data[p] >= 148: + goto tr0 + } + goto tr2518 + st2831: + if p++; p == pe { + goto _test_eof2831 + } + st_case_2831: + switch { + case data[p] > 142: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr2518 + } + goto tr0 + st2832: + if p++; p == pe { + goto _test_eof2832 + } + st_case_2832: + if data[p] == 169 { + goto tr2518 + } + switch { + case data[p] > 170: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr0 + st2833: + if p++; p == pe { + goto _test_eof2833 + } + st_case_2833: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr2518 + } + default: + goto tr2518 + } + goto tr0 + st2834: + if p++; p == pe { + goto _test_eof2834 + } + st_case_2834: + switch { + case data[p] > 150: + if 151 <= data[p] && data[p] <= 155 { + goto tr2518 + } + case data[p] >= 128: + goto tr148 + } + goto tr0 + st2835: + if p++; p == pe { + goto _test_eof2835 + } + st_case_2835: + if data[p] == 191 { + goto tr2518 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr2518 + } + case data[p] >= 149: + goto tr2518 + } + goto tr0 + st2836: + if p++; p == pe { + goto _test_eof2836 + } + st_case_2836: + if 176 <= data[p] && data[p] <= 190 { + goto tr2518 + } + goto tr0 + st2837: + if p++; p == pe { + goto _test_eof2837 + } + st_case_2837: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 132 { + goto tr2518 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr2518 + } + default: + goto tr148 + } + goto tr0 + st2838: + if p++; p == pe { + goto _test_eof2838 + } + st_case_2838: + switch { + case data[p] < 140: + if 133 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 170: + if 180 <= data[p] { + goto tr0 + } + default: + goto tr0 + } + goto tr2518 + st2839: + if p++; p == pe { + goto _test_eof2839 + } + st_case_2839: + switch { + case data[p] < 161: + switch { + case data[p] > 130: + if 131 <= data[p] && data[p] <= 160 { + goto tr148 + } + case data[p] >= 128: + goto tr2518 + } + case data[p] > 173: + switch { + case data[p] > 175: + if 186 <= data[p] { + goto tr148 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr2518 + } + goto tr0 + st2840: + if p++; p == pe { + goto _test_eof2840 + } + st_case_2840: + switch { + case data[p] > 179: + if 180 <= data[p] { + goto tr0 + } + case data[p] >= 166: + goto tr2518 + } + goto tr148 + st2841: + if p++; p == pe { + goto _test_eof2841 + } + st_case_2841: + switch { + case data[p] > 163: + if 164 <= data[p] && data[p] <= 183 { + goto tr2518 + } + case data[p] >= 128: + goto tr148 + } + goto tr0 + st2842: + if p++; p == pe { + goto _test_eof2842 + } + st_case_2842: + if data[p] == 173 { + goto tr2518 + } + switch { + case data[p] < 169: + switch { + case data[p] > 146: + if 148 <= data[p] && data[p] <= 168 { + goto tr2518 + } + case data[p] >= 144: + goto tr2518 + } + case data[p] > 177: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 180 { + goto tr2518 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 185 { + goto tr2518 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st2843: + if p++; p == pe { + goto _test_eof2843 + } + st_case_2843: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr2518 + } + case data[p] >= 128: + goto tr2518 + } + goto tr0 + st2844: + if p++; p == pe { + goto _test_eof2844 + } + st_case_2844: + switch data[p] { + case 128: + goto st2845 + case 129: + goto st2846 + case 130: + goto st241 + case 131: + goto st2847 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st2848 + case 180: + goto st251 + case 181: + goto st2849 + case 182: + goto st253 + case 183: + goto st2850 + case 184: + goto st255 + } + goto tr0 + st2845: + if p++; p == pe { + goto _test_eof2845 + } + st_case_2845: + switch { + case data[p] > 143: + if 170 <= data[p] && data[p] <= 174 { + goto tr2518 + } + case data[p] >= 140: + goto tr2518 + } + goto tr0 + st2846: + if p++; p == pe { + goto _test_eof2846 + } + st_case_2846: + switch data[p] { + case 177: + goto tr148 + case 191: + goto tr148 + } + switch { + case data[p] > 164: + if 166 <= data[p] && data[p] <= 175 { + goto tr2518 + } + case data[p] >= 160: + goto tr2518 + } + goto tr0 + st2847: + if p++; p == pe { + goto _test_eof2847 + } + st_case_2847: + if 144 <= data[p] && data[p] <= 176 { + goto tr2518 + } + goto tr0 + st2848: + if p++; p == pe { + goto _test_eof2848 + } + st_case_2848: + switch { + case data[p] < 175: + if 165 <= data[p] && data[p] <= 170 { + goto tr0 + } + case data[p] > 177: + if 180 <= data[p] { + goto tr0 + } + default: + goto tr2518 + } + goto tr148 + st2849: + if p++; p == pe { + goto _test_eof2849 + } + st_case_2849: + if data[p] == 191 { + goto tr2518 + } + switch { + case data[p] > 174: + if 176 <= data[p] { + goto tr0 + } + case data[p] >= 168: + goto tr0 + } + goto tr148 + st2850: + if p++; p == pe { + goto _test_eof2850 + } + st_case_2850: + switch { + case data[p] < 144: + switch { + case data[p] > 134: + if 136 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 191 { + goto tr2518 + } + case data[p] >= 152: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st2851: + if p++; p == pe { + goto _test_eof2851 + } + st_case_2851: + switch data[p] { + case 128: + goto st2852 + case 130: + goto st2853 + case 132: + goto st259 + case 133: + goto st145 + case 134: + goto st260 + } + goto tr0 + st2852: + if p++; p == pe { + goto _test_eof2852 + } + st_case_2852: + if data[p] == 133 { + goto tr148 + } + switch { + case data[p] > 175: + if 187 <= data[p] && data[p] <= 188 { + goto tr148 + } + case data[p] >= 170: + goto tr2518 + } + goto tr0 + st2853: + if p++; p == pe { + goto _test_eof2853 + } + st_case_2853: + if 153 <= data[p] && data[p] <= 154 { + goto tr2518 + } + goto tr0 + st2854: + if p++; p == pe { + goto _test_eof2854 + } + st_case_2854: + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st264 + case 153: + goto st2855 + case 154: + goto st2856 + case 155: + goto st2857 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st2858 + case 161: + goto st272 + case 162: + goto st2859 + case 163: + goto st2860 + case 164: + goto st2861 + case 165: + goto st2862 + case 166: + goto st2863 + case 167: + goto st2864 + case 168: + goto st2865 + case 169: + goto st2866 + case 170: + goto st2867 + case 171: + goto st2868 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st2869 + case 176: + goto st147 + } + if 129 <= data[p] { + goto st145 + } + goto tr0 + st2855: + if p++; p == pe { + goto _test_eof2855 + } + st_case_2855: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr2518 + } + default: + goto tr2518 + } + goto tr0 + st2856: + if p++; p == pe { + goto _test_eof2856 + } + st_case_2856: + switch { + case data[p] < 158: + if 128 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr2518 + } + goto tr0 + st2857: + if p++; p == pe { + goto _test_eof2857 + } + st_case_2857: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr0 + } + case data[p] >= 176: + goto tr2518 + } + goto tr148 + st2858: + if p++; p == pe { + goto _test_eof2858 + } + st_case_2858: + switch data[p] { + case 130: + goto tr2518 + case 134: + goto tr2518 + case 139: + goto tr2518 + } + switch { + case data[p] > 167: + if 168 <= data[p] { + goto tr0 + } + case data[p] >= 163: + goto tr2518 + } + goto tr148 + st2859: + if p++; p == pe { + goto _test_eof2859 + } + st_case_2859: + switch { + case data[p] < 130: + if 128 <= data[p] && data[p] <= 129 { + goto tr2518 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr2518 + } + default: + goto tr148 + } + goto tr0 + st2860: + if p++; p == pe { + goto _test_eof2860 + } + st_case_2860: + switch data[p] { + case 187: + goto tr148 + case 189: + goto tr148 + } + switch { + case data[p] < 178: + if 133 <= data[p] && data[p] <= 159 { + goto tr0 + } + case data[p] > 183: + if 184 <= data[p] { + goto tr0 + } + default: + goto tr148 + } + goto tr2518 + st2861: + if p++; p == pe { + goto _test_eof2861 + } + st_case_2861: + switch { + case data[p] < 166: + if 138 <= data[p] && data[p] <= 165 { + goto tr148 + } + case data[p] > 173: + if 176 <= data[p] { + goto tr148 + } + default: + goto tr2518 + } + goto tr0 + st2862: + if p++; p == pe { + goto _test_eof2862 + } + st_case_2862: + switch { + case data[p] < 148: + if 135 <= data[p] && data[p] <= 147 { + goto tr2518 + } + case data[p] > 159: + if 189 <= data[p] { + goto tr0 + } + default: + goto tr0 + } + goto tr148 + st2863: + if p++; p == pe { + goto _test_eof2863 + } + st_case_2863: + switch { + case data[p] < 132: + if 128 <= data[p] && data[p] <= 131 { + goto tr2518 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr2518 + } + default: + goto tr148 + } + goto tr0 + st2864: + if p++; p == pe { + goto _test_eof2864 + } + st_case_2864: + if data[p] == 143 { + goto tr148 + } + switch { + case data[p] > 164: + if 166 <= data[p] { + goto tr0 + } + case data[p] >= 129: + goto tr0 + } + goto tr2518 + st2865: + if p++; p == pe { + goto _test_eof2865 + } + st_case_2865: + switch { + case data[p] > 168: + if 169 <= data[p] && data[p] <= 182 { + goto tr2518 + } + case data[p] >= 128: + goto tr148 + } + goto tr0 + st2866: + if p++; p == pe { + goto _test_eof2866 + } + st_case_2866: + if data[p] == 131 { + goto tr2518 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 141: + if 187 <= data[p] && data[p] <= 189 { + goto tr2518 + } + default: + goto tr2518 + } + goto tr0 + st2867: + if p++; p == pe { + goto _test_eof2867 + } + st_case_2867: + if data[p] == 176 { + goto tr2518 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr2518 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr2518 + } + default: + goto tr2518 + } + goto tr0 + st2868: + if p++; p == pe { + goto _test_eof2868 + } + st_case_2868: + if data[p] == 129 { + goto tr2518 + } + switch { + case data[p] < 171: + if 160 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 181 <= data[p] && data[p] <= 182 { + goto tr2518 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr2518 + } + goto tr0 + st2869: + if p++; p == pe { + goto _test_eof2869 + } + st_case_2869: + switch { + case data[p] < 163: + if 128 <= data[p] && data[p] <= 162 { + goto tr148 + } + case data[p] > 170: + if 172 <= data[p] && data[p] <= 173 { + goto tr2518 + } + default: + goto tr2518 + } + goto tr0 + st2870: + if p++; p == pe { + goto _test_eof2870 + } + st_case_2870: + switch data[p] { + case 172: + goto st2871 + case 173: + goto st292 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st2872 + case 185: + goto st300 + case 187: + goto st2873 + case 188: + goto st302 + case 189: + goto st303 + case 190: + goto st2874 + case 191: + goto st2875 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr0 + st2871: + if p++; p == pe { + goto _test_eof2871 + } + st_case_2871: + switch data[p] { + case 158: + goto tr2518 + case 190: + goto tr148 + } + switch { + case data[p] < 157: + switch { + case data[p] > 134: + if 147 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr148 + } + case data[p] >= 170: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st2872: + if p++; p == pe { + goto _test_eof2872 + } + st_case_2872: + switch { + case data[p] > 143: + if 160 <= data[p] && data[p] <= 175 { + goto tr2518 + } + case data[p] >= 128: + goto tr2518 + } + goto tr0 + st2873: + if p++; p == pe { + goto _test_eof2873 + } + st_case_2873: + if data[p] == 191 { + goto tr2518 + } + if 189 <= data[p] { + goto tr0 + } + goto tr148 + st2874: + if p++; p == pe { + goto _test_eof2874 + } + st_case_2874: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 190 { + goto tr148 + } + case data[p] >= 158: + goto tr2518 + } + goto tr0 + st2875: + if p++; p == pe { + goto _test_eof2875 + } + st_case_2875: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr148 + } + case data[p] >= 130: + goto tr148 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr2518 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st2876: + if p++; p == pe { + goto _test_eof2876 + } + st_case_2876: + switch data[p] { + case 144: + goto st2877 + case 145: + goto st2883 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st2902 + case 155: + goto st2907 + case 157: + goto st2909 + case 158: + goto st2916 + case 159: + goto st403 + } + goto tr0 + st2877: + if p++; p == pe { + goto _test_eof2877 + } + st_case_2877: + switch data[p] { + case 128: + goto st308 + case 129: + goto st309 + case 130: + goto st147 + case 131: + goto st310 + case 133: + goto st311 + case 135: + goto st2878 + case 138: + goto st313 + case 139: + goto st2879 + case 140: + goto st315 + case 141: + goto st2880 + case 142: + goto st317 + case 143: + goto st318 + case 144: + goto st147 + case 145: + goto st145 + case 146: + goto st319 + case 148: + goto st320 + case 149: + goto st321 + case 152: + goto st147 + case 156: + goto st322 + case 157: + goto st323 + case 160: + goto st324 + case 161: + goto st325 + case 162: + goto st326 + case 163: + goto st327 + case 164: + goto st328 + case 166: + goto st329 + case 168: + goto st2881 + case 169: + goto st331 + case 170: + goto st332 + case 171: + goto st2882 + case 172: + goto st334 + case 173: + goto st335 + case 174: + goto st336 + case 176: + goto st147 + case 177: + goto st245 + } + switch { + case data[p] > 155: + if 178 <= data[p] && data[p] <= 179 { + goto st337 + } + case data[p] >= 153: + goto st145 + } + goto tr0 + st2878: + if p++; p == pe { + goto _test_eof2878 + } + st_case_2878: + if data[p] == 189 { + goto tr2518 + } + goto tr0 + st2879: + if p++; p == pe { + goto _test_eof2879 + } + st_case_2879: + if data[p] == 160 { + goto tr2518 + } + if 145 <= data[p] { + goto tr0 + } + goto tr148 + st2880: + if p++; p == pe { + goto _test_eof2880 + } + st_case_2880: + switch { + case data[p] < 182: + if 139 <= data[p] && data[p] <= 143 { + goto tr0 + } + case data[p] > 186: + if 187 <= data[p] { + goto tr0 + } + default: + goto tr2518 + } + goto tr148 + st2881: + if p++; p == pe { + goto _test_eof2881 + } + st_case_2881: + switch data[p] { + case 128: + goto tr148 + case 191: + goto tr2518 + } + switch { + case data[p] < 144: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2518 + } + case data[p] > 134: + if 140 <= data[p] && data[p] <= 143 { + goto tr2518 + } + default: + goto tr2518 + } + case data[p] > 147: + switch { + case data[p] < 153: + if 149 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] > 179: + if 184 <= data[p] && data[p] <= 186 { + goto tr2518 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st2882: + if p++; p == pe { + goto _test_eof2882 + } + st_case_2882: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 164: + if 165 <= data[p] && data[p] <= 166 { + goto tr2518 + } + default: + goto tr148 + } + goto tr0 + st2883: + if p++; p == pe { + goto _test_eof2883 + } + st_case_2883: + switch data[p] { + case 128: + goto st2884 + case 129: + goto st2885 + case 130: + goto st2886 + case 131: + goto st342 + case 132: + goto st2887 + case 133: + goto st2888 + case 134: + goto st2889 + case 135: + goto st2890 + case 136: + goto st2891 + case 138: + goto st348 + case 139: + goto st2892 + case 140: + goto st2893 + case 141: + goto st2894 + case 146: + goto st2895 + case 147: + goto st2896 + case 150: + goto st2897 + case 151: + goto st2898 + case 152: + goto st2895 + case 153: + goto st2899 + case 154: + goto st2900 + case 156: + goto st2901 + case 162: + goto st359 + case 163: + goto st360 + case 171: + goto st361 + } + goto tr0 + st2884: + if p++; p == pe { + goto _test_eof2884 + } + st_case_2884: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2518 + } + case data[p] > 183: + if 184 <= data[p] { + goto tr2518 + } + default: + goto tr148 + } + goto tr0 + st2885: + if p++; p == pe { + goto _test_eof2885 + } + st_case_2885: + if 135 <= data[p] && data[p] <= 190 { + goto tr0 + } + goto tr2518 + st2886: + if p++; p == pe { + goto _test_eof2886 + } + st_case_2886: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr0 + } + default: + goto tr0 + } + goto tr2518 + st2887: + if p++; p == pe { + goto _test_eof2887 + } + st_case_2887: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2518 + } + case data[p] > 166: + if 167 <= data[p] && data[p] <= 180 { + goto tr2518 + } + default: + goto tr148 + } + goto tr0 + st2888: + if p++; p == pe { + goto _test_eof2888 + } + st_case_2888: + switch data[p] { + case 179: + goto tr2518 + case 182: + goto tr148 + } + if 144 <= data[p] && data[p] <= 178 { + goto tr148 + } + goto tr0 + st2889: + if p++; p == pe { + goto _test_eof2889 + } + st_case_2889: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2518 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr2518 + } + default: + goto tr148 + } + goto tr0 + st2890: + if p++; p == pe { + goto _test_eof2890 + } + st_case_2890: + switch data[p] { + case 154: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 132 { + goto tr148 + } + case data[p] > 137: + if 141 <= data[p] { + goto tr0 + } + default: + goto tr0 + } + goto tr2518 + st2891: + if p++; p == pe { + goto _test_eof2891 + } + st_case_2891: + switch { + case data[p] < 147: + if 128 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] > 171: + if 172 <= data[p] && data[p] <= 183 { + goto tr2518 + } + default: + goto tr148 + } + goto tr0 + st2892: + if p++; p == pe { + goto _test_eof2892 + } + st_case_2892: + switch { + case data[p] > 170: + if 171 <= data[p] { + goto tr0 + } + case data[p] >= 159: + goto tr2518 + } + goto tr148 + st2893: + if p++; p == pe { + goto _test_eof2893 + } + st_case_2893: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 131 { + goto tr2518 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2518 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st2894: + if p++; p == pe { + goto _test_eof2894 + } + st_case_2894: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr2518 + } + switch { + case data[p] < 157: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr2518 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr2518 + } + default: + goto tr2518 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr2518 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr2518 + } + default: + goto tr2518 + } + default: + goto tr148 + } + goto tr0 + st2895: + if p++; p == pe { + goto _test_eof2895 + } + st_case_2895: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr2518 + } + case data[p] >= 128: + goto tr148 + } + goto tr0 + st2896: + if p++; p == pe { + goto _test_eof2896 + } + st_case_2896: + if data[p] == 134 { + goto tr0 + } + switch { + case data[p] > 135: + if 136 <= data[p] { + goto tr0 + } + case data[p] >= 132: + goto tr148 + } + goto tr2518 + st2897: + if p++; p == pe { + goto _test_eof2897 + } + st_case_2897: + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 181: + if 184 <= data[p] { + goto tr2518 + } + default: + goto tr2518 + } + goto tr0 + st2898: + if p++; p == pe { + goto _test_eof2898 + } + st_case_2898: + switch { + case data[p] < 152: + if 129 <= data[p] && data[p] <= 151 { + goto tr0 + } + case data[p] > 155: + if 158 <= data[p] { + goto tr0 + } + default: + goto tr148 + } + goto tr2518 + st2899: + if p++; p == pe { + goto _test_eof2899 + } + st_case_2899: + if data[p] == 132 { + goto tr148 + } + if 129 <= data[p] { + goto tr0 + } + goto tr2518 + st2900: + if p++; p == pe { + goto _test_eof2900 + } + st_case_2900: + switch { + case data[p] > 170: + if 171 <= data[p] && data[p] <= 183 { + goto tr2518 + } + case data[p] >= 128: + goto tr148 + } + goto tr0 + st2901: + if p++; p == pe { + goto _test_eof2901 + } + st_case_2901: + if 157 <= data[p] && data[p] <= 171 { + goto tr2518 + } + goto tr0 + st2902: + if p++; p == pe { + goto _test_eof2902 + } + st_case_2902: + switch data[p] { + case 160: + goto st147 + case 168: + goto st370 + case 169: + goto st326 + case 171: + goto st2903 + case 172: + goto st2904 + case 173: + goto st373 + case 174: + goto st374 + case 188: + goto st147 + case 189: + goto st2905 + case 190: + goto st2906 + } + if 161 <= data[p] && data[p] <= 167 { + goto st145 + } + goto tr0 + st2903: + if p++; p == pe { + goto _test_eof2903 + } + st_case_2903: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 180 { + goto tr2518 + } + case data[p] >= 144: + goto tr148 + } + goto tr0 + st2904: + if p++; p == pe { + goto _test_eof2904 + } + st_case_2904: + switch { + case data[p] > 175: + if 176 <= data[p] && data[p] <= 182 { + goto tr2518 + } + case data[p] >= 128: + goto tr148 + } + goto tr0 + st2905: + if p++; p == pe { + goto _test_eof2905 + } + st_case_2905: + switch { + case data[p] < 145: + if 133 <= data[p] && data[p] <= 143 { + goto tr0 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr0 + } + default: + goto tr2518 + } + goto tr148 + st2906: + if p++; p == pe { + goto _test_eof2906 + } + st_case_2906: + switch { + case data[p] > 146: + if 147 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] >= 143: + goto tr2518 + } + goto tr0 + st2907: + if p++; p == pe { + goto _test_eof2907 + } + st_case_2907: + switch data[p] { + case 176: + goto st147 + case 177: + goto st378 + case 178: + goto st2908 + } + goto tr0 + st2908: + if p++; p == pe { + goto _test_eof2908 + } + st_case_2908: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr2518 + } + case data[p] >= 157: + goto tr2518 + } + default: + goto tr148 + } + goto tr0 + st2909: + if p++; p == pe { + goto _test_eof2909 + } + st_case_2909: + switch data[p] { + case 133: + goto st2910 + case 134: + goto st2911 + case 137: + goto st2912 + case 144: + goto st147 + case 145: + goto st384 + case 146: + goto st385 + case 147: + goto st386 + case 148: + goto st387 + case 149: + goto st388 + case 154: + goto st389 + case 155: + goto st390 + case 156: + goto st391 + case 157: + goto st392 + case 158: + goto st393 + case 159: + goto st394 + case 168: + goto st2913 + case 169: + goto st2914 + case 170: + goto st2915 + } + if 150 <= data[p] && data[p] <= 153 { + goto st145 + } + goto tr0 + st2910: + if p++; p == pe { + goto _test_eof2910 + } + st_case_2910: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr2518 + } + case data[p] >= 165: + goto tr2518 + } + goto tr0 + st2911: + if p++; p == pe { + goto _test_eof2911 + } + st_case_2911: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr0 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr0 + } + default: + goto tr0 + } + goto tr2518 + st2912: + if p++; p == pe { + goto _test_eof2912 + } + st_case_2912: + if 130 <= data[p] && data[p] <= 132 { + goto tr2518 + } + goto tr0 + st2913: + if p++; p == pe { + goto _test_eof2913 + } + st_case_2913: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr2518 + } + case data[p] >= 128: + goto tr2518 + } + goto tr0 + st2914: + if p++; p == pe { + goto _test_eof2914 + } + st_case_2914: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr0 + } + case data[p] >= 173: + goto tr0 + } + goto tr2518 + st2915: + if p++; p == pe { + goto _test_eof2915 + } + st_case_2915: + if data[p] == 132 { + goto tr2518 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr2518 + } + case data[p] >= 155: + goto tr2518 + } + goto tr0 + st2916: + if p++; p == pe { + goto _test_eof2916 + } + st_case_2916: + switch data[p] { + case 160: + goto st147 + case 163: + goto st2917 + case 184: + goto st400 + case 185: + goto st401 + case 186: + goto st402 + } + if 161 <= data[p] && data[p] <= 162 { + goto st145 + } + goto tr0 + st2917: + if p++; p == pe { + goto _test_eof2917 + } + st_case_2917: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr0 + } + case data[p] > 150: + if 151 <= data[p] { + goto tr0 + } + default: + goto tr2518 + } + goto tr148 + st2918: + if p++; p == pe { + goto _test_eof2918 + } + st_case_2918: + if data[p] == 160 { + goto st2919 + } + goto tr0 + st2919: + if p++; p == pe { + goto _test_eof2919 + } + st_case_2919: + switch data[p] { + case 128: + goto st2920 + case 129: + goto st2921 + case 132: + goto st2778 + case 135: + goto st2923 + } + if 133 <= data[p] && data[p] <= 134 { + goto st2922 + } + goto tr0 + st2920: + if p++; p == pe { + goto _test_eof2920 + } + st_case_2920: + if data[p] == 129 { + goto tr2518 + } + if 160 <= data[p] { + goto tr2518 + } + goto tr0 + st2921: + if p++; p == pe { + goto _test_eof2921 + } + st_case_2921: + if 192 <= data[p] { + goto tr0 + } + goto tr2518 + st2922: + if p++; p == pe { + goto _test_eof2922 + } + st_case_2922: + goto tr2518 + st2923: + if p++; p == pe { + goto _test_eof2923 + } + st_case_2923: + if 176 <= data[p] { + goto tr0 + } + goto tr2518 +tr2646: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4886 + st4886: + if p++; p == pe { + goto _test_eof4886 + } + st_case_4886: +//line segment_words_prod.go:76101 + switch data[p] { + case 39: + goto st413 + case 44: + goto st413 + case 46: + goto st413 + case 59: + goto st413 + case 95: + goto tr571 + case 194: + goto st2924 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st148 + case 204: + goto st2925 + case 205: + goto st2926 + case 206: + goto st151 + case 207: + goto st152 + case 210: + goto st2927 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st2928 + case 215: + goto st2929 + case 216: + goto st2930 + case 217: + goto st2931 + case 219: + goto st2932 + case 220: + goto st2933 + case 221: + goto st2934 + case 222: + goto st2935 + case 223: + goto st2936 + case 224: + goto st2937 + case 225: + goto st2969 + case 226: + goto st2991 + case 227: + goto st2998 + case 234: + goto st3001 + case 237: + goto st287 + case 239: + goto st3017 + case 240: + goto st3023 + case 243: + goto st3065 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 + st2924: + if p++; p == pe { + goto _test_eof2924 + } + st_case_2924: + switch data[p] { + case 170: + goto tr148 + case 173: + goto tr2646 + case 181: + goto tr148 + case 186: + goto tr148 + } + goto tr420 + st2925: + if p++; p == pe { + goto _test_eof2925 + } + st_case_2925: + if data[p] <= 127 { + goto tr420 + } + goto tr2646 + st2926: + if p++; p == pe { + goto _test_eof2926 + } + st_case_2926: + switch data[p] { + case 181: + goto tr420 + case 190: + goto st413 + } + switch { + case data[p] < 184: + if 176 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr420 + } + goto tr2646 + st2927: + if p++; p == pe { + goto _test_eof2927 + } + st_case_2927: + if data[p] == 130 { + goto tr420 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr2646 + } + goto tr148 + st2928: + if p++; p == pe { + goto _test_eof2928 + } + st_case_2928: + switch data[p] { + case 137: + goto st413 + case 190: + goto tr420 + } + switch { + case data[p] < 145: + if 136 <= data[p] && data[p] <= 144 { + goto tr420 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + default: + goto tr2646 + } + goto tr148 + st2929: + if p++; p == pe { + goto _test_eof2929 + } + st_case_2929: + switch data[p] { + case 135: + goto tr2646 + case 179: + goto tr148 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr2646 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + default: + goto tr2646 + } + goto tr420 + st2930: + if p++; p == pe { + goto _test_eof2930 + } + st_case_2930: + if data[p] == 156 { + goto tr2646 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 133 { + goto tr2646 + } + case data[p] > 141: + switch { + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + case data[p] >= 144: + goto tr2646 + } + default: + goto st413 + } + goto tr420 + st2931: + if p++; p == pe { + goto _test_eof2931 + } + st_case_2931: + switch data[p] { + case 171: + goto tr421 + case 172: + goto st413 + case 176: + goto tr2646 + } + switch { + case data[p] < 139: + if 128 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 174 <= data[p] { + goto tr148 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr2646 + } + goto tr420 + st2932: + if p++; p == pe { + goto _test_eof2932 + } + st_case_2932: + switch data[p] { + case 148: + goto tr420 + case 158: + goto tr420 + case 169: + goto tr420 + } + switch { + case data[p] < 176: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr2646 + } + case data[p] >= 150: + goto tr2646 + } + case data[p] > 185: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 189: + goto tr420 + } + default: + goto tr421 + } + goto tr148 + st2933: + if p++; p == pe { + goto _test_eof2933 + } + st_case_2933: + if data[p] == 144 { + goto tr148 + } + switch { + case data[p] < 146: + if 143 <= data[p] && data[p] <= 145 { + goto tr2646 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr2646 + } + default: + goto tr148 + } + goto tr420 + st2934: + if p++; p == pe { + goto _test_eof2934 + } + st_case_2934: + switch { + case data[p] > 140: + if 141 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr420 + } + goto tr2646 + st2935: + if p++; p == pe { + goto _test_eof2935 + } + st_case_2935: + switch { + case data[p] > 176: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr2646 + } + goto tr148 + st2936: + if p++; p == pe { + goto _test_eof2936 + } + st_case_2936: + switch data[p] { + case 184: + goto st413 + case 186: + goto tr148 + } + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 170: + switch { + case data[p] > 179: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 171: + goto tr2646 + } + default: + goto tr148 + } + goto tr420 + st2937: + if p++; p == pe { + goto _test_eof2937 + } + st_case_2937: + switch data[p] { + case 160: + goto st2938 + case 161: + goto st2939 + case 162: + goto st168 + case 163: + goto st2940 + case 164: + goto st2941 + case 165: + goto st2942 + case 166: + goto st2943 + case 167: + goto st2944 + case 168: + goto st2945 + case 169: + goto st2946 + case 170: + goto st2947 + case 171: + goto st2948 + case 172: + goto st2949 + case 173: + goto st2950 + case 174: + goto st2951 + case 175: + goto st2952 + case 176: + goto st2953 + case 177: + goto st2954 + case 178: + goto st2955 + case 179: + goto st2956 + case 180: + goto st2957 + case 181: + goto st2958 + case 182: + goto st2959 + case 183: + goto st2960 + case 184: + goto st2961 + case 185: + goto st2962 + case 186: + goto st2963 + case 187: + goto st2964 + case 188: + goto st2965 + case 189: + goto st2966 + case 190: + goto st2967 + case 191: + goto st2968 + } + goto tr420 + st2938: + if p++; p == pe { + goto _test_eof2938 + } + st_case_2938: + switch data[p] { + case 154: + goto tr148 + case 164: + goto tr148 + case 168: + goto tr148 + } + switch { + case data[p] > 149: + if 150 <= data[p] && data[p] <= 173 { + goto tr2646 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2939: + if p++; p == pe { + goto _test_eof2939 + } + st_case_2939: + switch { + case data[p] > 152: + if 153 <= data[p] && data[p] <= 155 { + goto tr2646 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2940: + if p++; p == pe { + goto _test_eof2940 + } + st_case_2940: + if 163 <= data[p] { + goto tr2646 + } + goto tr420 + st2941: + if p++; p == pe { + goto _test_eof2941 + } + st_case_2941: + if data[p] == 189 { + goto tr148 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr148 + } + goto tr2646 + st2942: + if p++; p == pe { + goto _test_eof2942 + } + st_case_2942: + switch data[p] { + case 144: + goto tr148 + case 176: + goto tr420 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 177 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr420 + } + goto tr2646 + st2943: + if p++; p == pe { + goto _test_eof2943 + } + st_case_2943: + switch data[p] { + case 132: + goto tr420 + case 169: + goto tr420 + case 177: + goto tr420 + case 188: + goto tr2646 + } + switch { + case data[p] < 145: + switch { + case data[p] > 131: + if 141 <= data[p] && data[p] <= 142 { + goto tr420 + } + case data[p] >= 129: + goto tr2646 + } + case data[p] > 146: + switch { + case data[p] < 186: + if 179 <= data[p] && data[p] <= 181 { + goto tr420 + } + case data[p] > 187: + if 190 <= data[p] { + goto tr2646 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st2944: + if p++; p == pe { + goto _test_eof2944 + } + st_case_2944: + switch data[p] { + case 142: + goto tr148 + case 158: + goto tr420 + } + switch { + case data[p] < 156: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + switch { + case data[p] > 150: + if 152 <= data[p] && data[p] <= 155 { + goto tr420 + } + case data[p] >= 143: + goto tr420 + } + default: + goto tr420 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr148 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr2646 + st2945: + if p++; p == pe { + goto _test_eof2945 + } + st_case_2945: + if data[p] == 188 { + goto tr2646 + } + switch { + case data[p] < 170: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2646 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 147 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] >= 143: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 176: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 182: + switch { + case data[p] > 185: + if 190 <= data[p] { + goto tr2646 + } + case data[p] >= 184: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2946: + if p++; p == pe { + goto _test_eof2946 + } + st_case_2946: + if data[p] == 157 { + goto tr420 + } + switch { + case data[p] < 153: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 146 <= data[p] && data[p] <= 152 { + goto tr420 + } + case data[p] >= 142: + goto tr420 + } + default: + goto tr420 + } + case data[p] > 158: + switch { + case data[p] < 166: + if 159 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr2646 + st2947: + if p++; p == pe { + goto _test_eof2947 + } + st_case_2947: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2646 + } + case data[p] > 141: + if 143 <= data[p] && data[p] <= 145 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] { + goto tr2646 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2948: + if p++; p == pe { + goto _test_eof2948 + } + st_case_2948: + switch data[p] { + case 134: + goto tr420 + case 138: + goto tr420 + case 144: + goto tr148 + case 185: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] >= 142: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr420 + } + goto tr2646 + st2949: + if p++; p == pe { + goto _test_eof2949 + } + st_case_2949: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2646 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2646 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2950: + if p++; p == pe { + goto _test_eof2950 + } + st_case_2950: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] < 150: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr2646 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr2646 + } + default: + goto tr2646 + } + case data[p] > 151: + switch { + case data[p] < 159: + if 156 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 161: + switch { + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 162: + goto tr2646 + } + default: + goto tr148 + } + default: + goto tr2646 + } + goto tr420 + st2951: + if p++; p == pe { + goto _test_eof2951 + } + st_case_2951: + switch data[p] { + case 130: + goto tr2646 + case 131: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 158: + switch { + case data[p] < 142: + if 133 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 144: + switch { + case data[p] > 149: + if 153 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] < 168: + if 163 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 185: + if 190 <= data[p] && data[p] <= 191 { + goto tr2646 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2952: + if p++; p == pe { + goto _test_eof2952 + } + st_case_2952: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr2646 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr2646 + } + case data[p] > 136: + switch { + case data[p] > 141: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 138: + goto tr2646 + } + default: + goto tr2646 + } + goto tr420 + st2953: + if p++; p == pe { + goto _test_eof2953 + } + st_case_2953: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 128: + goto tr2646 + } + case data[p] > 144: + switch { + case data[p] < 170: + if 146 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] > 185: + if 190 <= data[p] { + goto tr2646 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2954: + if p++; p == pe { + goto _test_eof2954 + } + st_case_2954: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 151: + goto tr420 + } + switch { + case data[p] < 160: + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 148 { + goto tr420 + } + case data[p] > 154: + if 155 <= data[p] && data[p] <= 159 { + goto tr420 + } + default: + goto tr148 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr2646 + st2955: + if p++; p == pe { + goto _test_eof2955 + } + st_case_2955: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 146: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2646 + } + case data[p] > 140: + if 142 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 181: + if 170 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2646 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2956: + if p++; p == pe { + goto _test_eof2956 + } + st_case_2956: + if data[p] == 158 { + goto tr148 + } + switch { + case data[p] < 149: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr2646 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr2646 + } + default: + goto tr2646 + } + case data[p] > 150: + switch { + case data[p] < 162: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 163: + switch { + case data[p] > 175: + if 177 <= data[p] && data[p] <= 178 { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr2646 + } + default: + goto tr2646 + } + goto tr420 + st2957: + if p++; p == pe { + goto _test_eof2957 + } + st_case_2957: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 129: + goto tr2646 + } + case data[p] > 144: + switch { + case data[p] > 186: + if 190 <= data[p] { + goto tr2646 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2958: + if p++; p == pe { + goto _test_eof2958 + } + st_case_2958: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 142: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 150 { + goto tr420 + } + case data[p] > 158: + if 159 <= data[p] && data[p] <= 161 { + goto tr148 + } + default: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr2646 + st2959: + if p++; p == pe { + goto _test_eof2959 + } + st_case_2959: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto tr2646 + } + case data[p] > 150: + switch { + case data[p] > 177: + if 179 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2960: + if p++; p == pe { + goto _test_eof2960 + } + st_case_2960: + switch data[p] { + case 138: + goto tr2646 + case 150: + goto tr2646 + } + switch { + case data[p] < 152: + switch { + case data[p] > 134: + if 143 <= data[p] && data[p] <= 148 { + goto tr2646 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 179 { + goto tr2646 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr2646 + } + goto tr420 + st2961: + if p++; p == pe { + goto _test_eof2961 + } + st_case_2961: + if data[p] == 177 { + goto tr2646 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr2646 + } + goto tr420 + st2962: + if p++; p == pe { + goto _test_eof2962 + } + st_case_2962: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 135: + goto tr2646 + } + goto tr420 + st2963: + if p++; p == pe { + goto _test_eof2963 + } + st_case_2963: + if data[p] == 177 { + goto tr2646 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr2646 + } + case data[p] >= 180: + goto tr2646 + } + goto tr420 + st2964: + if p++; p == pe { + goto _test_eof2964 + } + st_case_2964: + switch { + case data[p] > 141: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 136: + goto tr2646 + } + goto tr420 + st2965: + if p++; p == pe { + goto _test_eof2965 + } + st_case_2965: + switch data[p] { + case 128: + goto tr148 + case 181: + goto tr2646 + case 183: + goto tr2646 + case 185: + goto tr2646 + } + switch { + case data[p] < 160: + if 152 <= data[p] && data[p] <= 153 { + goto tr2646 + } + case data[p] > 169: + if 190 <= data[p] && data[p] <= 191 { + goto tr2646 + } + default: + goto tr421 + } + goto tr420 + st2966: + if p++; p == pe { + goto _test_eof2966 + } + st_case_2966: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 172: + if 177 <= data[p] && data[p] <= 191 { + goto tr2646 + } + default: + goto tr148 + } + goto tr420 + st2967: + if p++; p == pe { + goto _test_eof2967 + } + st_case_2967: + switch { + case data[p] < 136: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 135 { + goto tr2646 + } + case data[p] >= 128: + goto tr2646 + } + case data[p] > 140: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr2646 + } + case data[p] >= 141: + goto tr2646 + } + default: + goto tr148 + } + goto tr420 + st2968: + if p++; p == pe { + goto _test_eof2968 + } + st_case_2968: + if data[p] == 134 { + goto tr2646 + } + goto tr420 + st2969: + if p++; p == pe { + goto _test_eof2969 + } + st_case_2969: + switch data[p] { + case 128: + goto st2970 + case 129: + goto st2971 + case 130: + goto st2972 + case 131: + goto st202 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st2973 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st2974 + case 157: + goto st2975 + case 158: + goto st2976 + case 159: + goto st2977 + case 160: + goto st2978 + case 161: + goto st219 + case 162: + goto st2979 + case 163: + goto st221 + case 164: + goto st2980 + case 165: + goto st468 + case 167: + goto st469 + case 168: + goto st2981 + case 169: + goto st2982 + case 170: + goto st2983 + case 172: + goto st2984 + case 173: + goto st2985 + case 174: + goto st2986 + case 175: + goto st2987 + case 176: + goto st2988 + case 177: + goto st640 + case 179: + goto st2989 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st2990 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + } + switch { + case data[p] < 180: + if 132 <= data[p] && data[p] <= 152 { + goto st145 + } + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + default: + goto st147 + } + goto tr420 + st2970: + if p++; p == pe { + goto _test_eof2970 + } + st_case_2970: + if 171 <= data[p] && data[p] <= 190 { + goto tr2646 + } + goto tr420 + st2971: + if p++; p == pe { + goto _test_eof2971 + } + st_case_2971: + switch { + case data[p] < 158: + switch { + case data[p] > 137: + if 150 <= data[p] && data[p] <= 153 { + goto tr2646 + } + case data[p] >= 128: + goto tr421 + } + case data[p] > 160: + switch { + case data[p] < 167: + if 162 <= data[p] && data[p] <= 164 { + goto tr2646 + } + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr2646 + } + default: + goto tr2646 + } + default: + goto tr2646 + } + goto tr420 + st2972: + if p++; p == pe { + goto _test_eof2972 + } + st_case_2972: + if data[p] == 143 { + goto tr2646 + } + switch { + case data[p] < 144: + if 130 <= data[p] && data[p] <= 141 { + goto tr2646 + } + case data[p] > 153: + switch { + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 154: + goto tr2646 + } + default: + goto tr421 + } + goto tr420 + st2973: + if p++; p == pe { + goto _test_eof2973 + } + st_case_2973: + switch { + case data[p] < 157: + if 155 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr420 + } + default: + goto tr2646 + } + goto tr148 + st2974: + if p++; p == pe { + goto _test_eof2974 + } + st_case_2974: + switch { + case data[p] < 146: + switch { + case data[p] > 140: + if 142 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 177: + if 178 <= data[p] && data[p] <= 180 { + goto tr2646 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr2646 + } + goto tr420 + st2975: + if p++; p == pe { + goto _test_eof2975 + } + st_case_2975: + switch { + case data[p] < 160: + switch { + case data[p] > 145: + if 146 <= data[p] && data[p] <= 147 { + goto tr2646 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 172: + switch { + case data[p] > 176: + if 178 <= data[p] && data[p] <= 179 { + goto tr2646 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2976: + if p++; p == pe { + goto _test_eof2976 + } + st_case_2976: + if 180 <= data[p] { + goto tr2646 + } + goto tr420 + st2977: + if p++; p == pe { + goto _test_eof2977 + } + st_case_2977: + switch { + case data[p] < 158: + if 148 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 170 <= data[p] { + goto tr420 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr420 + } + goto tr2646 + st2978: + if p++; p == pe { + goto _test_eof2978 + } + st_case_2978: + switch { + case data[p] < 144: + if 139 <= data[p] && data[p] <= 142 { + goto tr2646 + } + case data[p] > 153: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + goto tr420 + st2979: + if p++; p == pe { + goto _test_eof2979 + } + st_case_2979: + if data[p] == 169 { + goto tr2646 + } + switch { + case data[p] > 170: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2980: + if p++; p == pe { + goto _test_eof2980 + } + st_case_2980: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr2646 + } + default: + goto tr2646 + } + goto tr420 + st2981: + if p++; p == pe { + goto _test_eof2981 + } + st_case_2981: + switch { + case data[p] > 150: + if 151 <= data[p] && data[p] <= 155 { + goto tr2646 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2982: + if p++; p == pe { + goto _test_eof2982 + } + st_case_2982: + if data[p] == 191 { + goto tr2646 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr2646 + } + case data[p] >= 149: + goto tr2646 + } + goto tr420 + st2983: + if p++; p == pe { + goto _test_eof2983 + } + st_case_2983: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 153: + if 176 <= data[p] && data[p] <= 190 { + goto tr2646 + } + default: + goto tr421 + } + goto tr420 + st2984: + if p++; p == pe { + goto _test_eof2984 + } + st_case_2984: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 132 { + goto tr2646 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr2646 + } + default: + goto tr148 + } + goto tr420 + st2985: + if p++; p == pe { + goto _test_eof2985 + } + st_case_2985: + switch { + case data[p] < 144: + switch { + case data[p] > 139: + if 140 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] >= 133: + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 154: + goto tr420 + } + default: + goto tr421 + } + goto tr2646 + st2986: + if p++; p == pe { + goto _test_eof2986 + } + st_case_2986: + switch { + case data[p] < 161: + switch { + case data[p] > 130: + if 131 <= data[p] && data[p] <= 160 { + goto tr148 + } + case data[p] >= 128: + goto tr2646 + } + case data[p] > 173: + switch { + case data[p] < 176: + if 174 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + default: + goto tr2646 + } + goto tr420 + st2987: + if p++; p == pe { + goto _test_eof2987 + } + st_case_2987: + switch { + case data[p] > 179: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr2646 + } + goto tr148 + st2988: + if p++; p == pe { + goto _test_eof2988 + } + st_case_2988: + switch { + case data[p] > 163: + if 164 <= data[p] && data[p] <= 183 { + goto tr2646 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st2989: + if p++; p == pe { + goto _test_eof2989 + } + st_case_2989: + if data[p] == 173 { + goto tr2646 + } + switch { + case data[p] < 169: + switch { + case data[p] > 146: + if 148 <= data[p] && data[p] <= 168 { + goto tr2646 + } + case data[p] >= 144: + goto tr2646 + } + case data[p] > 177: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 180 { + goto tr2646 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 185 { + goto tr2646 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2990: + if p++; p == pe { + goto _test_eof2990 + } + st_case_2990: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr2646 + } + case data[p] >= 128: + goto tr2646 + } + goto tr420 + st2991: + if p++; p == pe { + goto _test_eof2991 + } + st_case_2991: + switch data[p] { + case 128: + goto st2992 + case 129: + goto st2993 + case 130: + goto st241 + case 131: + goto st2994 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st2995 + case 180: + goto st251 + case 181: + goto st2996 + case 182: + goto st253 + case 183: + goto st2997 + case 184: + goto st255 + } + goto tr420 + st2992: + if p++; p == pe { + goto _test_eof2992 + } + st_case_2992: + if data[p] == 164 { + goto st413 + } + switch { + case data[p] < 152: + if 140 <= data[p] && data[p] <= 143 { + goto tr2646 + } + case data[p] > 153: + switch { + case data[p] > 174: + if 191 <= data[p] { + goto tr571 + } + case data[p] >= 170: + goto tr2646 + } + default: + goto st413 + } + goto tr420 + st2993: + if p++; p == pe { + goto _test_eof2993 + } + st_case_2993: + switch data[p] { + case 132: + goto st413 + case 165: + goto tr420 + case 177: + goto tr148 + case 191: + goto tr148 + } + switch { + case data[p] < 149: + if 129 <= data[p] && data[p] <= 147 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 160: + goto tr2646 + } + default: + goto tr420 + } + goto tr571 + st2994: + if p++; p == pe { + goto _test_eof2994 + } + st_case_2994: + if 144 <= data[p] && data[p] <= 176 { + goto tr2646 + } + goto tr420 + st2995: + if p++; p == pe { + goto _test_eof2995 + } + st_case_2995: + switch { + case data[p] < 175: + if 165 <= data[p] && data[p] <= 170 { + goto tr420 + } + case data[p] > 177: + if 180 <= data[p] { + goto tr420 + } + default: + goto tr2646 + } + goto tr148 + st2996: + if p++; p == pe { + goto _test_eof2996 + } + st_case_2996: + if data[p] == 191 { + goto tr2646 + } + switch { + case data[p] > 174: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 168: + goto tr420 + } + goto tr148 + st2997: + if p++; p == pe { + goto _test_eof2997 + } + st_case_2997: + switch { + case data[p] < 144: + switch { + case data[p] > 134: + if 136 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 191 { + goto tr2646 + } + case data[p] >= 152: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st2998: + if p++; p == pe { + goto _test_eof2998 + } + st_case_2998: + switch data[p] { + case 128: + goto st2999 + case 130: + goto st3000 + case 132: + goto st259 + case 133: + goto st145 + case 134: + goto st260 + } + goto tr420 + st2999: + if p++; p == pe { + goto _test_eof2999 + } + st_case_2999: + if data[p] == 133 { + goto tr148 + } + switch { + case data[p] > 175: + if 187 <= data[p] && data[p] <= 188 { + goto tr148 + } + case data[p] >= 170: + goto tr2646 + } + goto tr420 + st3000: + if p++; p == pe { + goto _test_eof3000 + } + st_case_3000: + if 153 <= data[p] && data[p] <= 154 { + goto tr2646 + } + goto tr420 + st3001: + if p++; p == pe { + goto _test_eof3001 + } + st_case_3001: + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st654 + case 153: + goto st3002 + case 154: + goto st3003 + case 155: + goto st3004 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st3005 + case 161: + goto st272 + case 162: + goto st3006 + case 163: + goto st3007 + case 164: + goto st3008 + case 165: + goto st3009 + case 166: + goto st3010 + case 167: + goto st3011 + case 168: + goto st3012 + case 169: + goto st3013 + case 170: + goto st3014 + case 171: + goto st3015 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st3016 + case 176: + goto st147 + } + if 129 <= data[p] { + goto st145 + } + goto tr420 + st3002: + if p++; p == pe { + goto _test_eof3002 + } + st_case_3002: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr2646 + } + default: + goto tr2646 + } + goto tr420 + st3003: + if p++; p == pe { + goto _test_eof3003 + } + st_case_3003: + switch { + case data[p] < 158: + if 128 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr2646 + } + goto tr420 + st3004: + if p++; p == pe { + goto _test_eof3004 + } + st_case_3004: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr2646 + } + goto tr148 + st3005: + if p++; p == pe { + goto _test_eof3005 + } + st_case_3005: + switch data[p] { + case 130: + goto tr2646 + case 134: + goto tr2646 + case 139: + goto tr2646 + } + switch { + case data[p] > 167: + if 168 <= data[p] { + goto tr420 + } + case data[p] >= 163: + goto tr2646 + } + goto tr148 + st3006: + if p++; p == pe { + goto _test_eof3006 + } + st_case_3006: + switch { + case data[p] < 130: + if 128 <= data[p] && data[p] <= 129 { + goto tr2646 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr2646 + } + default: + goto tr148 + } + goto tr420 + st3007: + if p++; p == pe { + goto _test_eof3007 + } + st_case_3007: + switch data[p] { + case 187: + goto tr148 + case 189: + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 143: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 133: + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 183: + if 184 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr420 + } + goto tr2646 + st3008: + if p++; p == pe { + goto _test_eof3008 + } + st_case_3008: + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 165: + switch { + case data[p] > 173: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr2646 + } + default: + goto tr148 + } + goto tr420 + st3009: + if p++; p == pe { + goto _test_eof3009 + } + st_case_3009: + switch { + case data[p] < 148: + if 135 <= data[p] && data[p] <= 147 { + goto tr2646 + } + case data[p] > 159: + if 189 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st3010: + if p++; p == pe { + goto _test_eof3010 + } + st_case_3010: + switch { + case data[p] < 132: + if 128 <= data[p] && data[p] <= 131 { + goto tr2646 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr2646 + } + default: + goto tr148 + } + goto tr420 + st3011: + if p++; p == pe { + goto _test_eof3011 + } + st_case_3011: + if data[p] == 143 { + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 129: + goto tr420 + } + case data[p] > 164: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr420 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + default: + goto tr420 + } + goto tr2646 + st3012: + if p++; p == pe { + goto _test_eof3012 + } + st_case_3012: + switch { + case data[p] > 168: + if 169 <= data[p] && data[p] <= 182 { + goto tr2646 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3013: + if p++; p == pe { + goto _test_eof3013 + } + st_case_3013: + if data[p] == 131 { + goto tr2646 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 141: + switch { + case data[p] > 153: + if 187 <= data[p] && data[p] <= 189 { + goto tr2646 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr2646 + } + goto tr420 + st3014: + if p++; p == pe { + goto _test_eof3014 + } + st_case_3014: + if data[p] == 176 { + goto tr2646 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr2646 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr2646 + } + default: + goto tr2646 + } + goto tr420 + st3015: + if p++; p == pe { + goto _test_eof3015 + } + st_case_3015: + if data[p] == 129 { + goto tr2646 + } + switch { + case data[p] < 171: + if 160 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 181 <= data[p] && data[p] <= 182 { + goto tr2646 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr2646 + } + goto tr420 + st3016: + if p++; p == pe { + goto _test_eof3016 + } + st_case_3016: + switch { + case data[p] < 163: + if 128 <= data[p] && data[p] <= 162 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 172: + goto tr2646 + } + default: + goto tr2646 + } + goto tr420 + st3017: + if p++; p == pe { + goto _test_eof3017 + } + st_case_3017: + switch data[p] { + case 172: + goto st3018 + case 173: + goto st672 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st3019 + case 185: + goto st1408 + case 187: + goto st3020 + case 188: + goto st1410 + case 189: + goto st303 + case 190: + goto st3021 + case 191: + goto st3022 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr420 + st3018: + if p++; p == pe { + goto _test_eof3018 + } + st_case_3018: + switch data[p] { + case 158: + goto tr2646 + case 190: + goto tr572 + } + switch { + case data[p] < 157: + switch { + case data[p] > 134: + if 147 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 170: + goto tr572 + } + default: + goto tr572 + } + goto tr420 + st3019: + if p++; p == pe { + goto _test_eof3019 + } + st_case_3019: + switch data[p] { + case 144: + goto st413 + case 148: + goto st413 + } + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 143 { + goto tr2646 + } + case data[p] > 175: + if 179 <= data[p] && data[p] <= 180 { + goto tr571 + } + default: + goto tr2646 + } + goto tr420 + st3020: + if p++; p == pe { + goto _test_eof3020 + } + st_case_3020: + if data[p] == 191 { + goto tr2646 + } + if 189 <= data[p] { + goto tr420 + } + goto tr148 + st3021: + if p++; p == pe { + goto _test_eof3021 + } + st_case_3021: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 190 { + goto tr148 + } + case data[p] >= 158: + goto tr2646 + } + goto tr420 + st3022: + if p++; p == pe { + goto _test_eof3022 + } + st_case_3022: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr148 + } + case data[p] >= 130: + goto tr148 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr2646 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3023: + if p++; p == pe { + goto _test_eof3023 + } + st_case_3023: + switch data[p] { + case 144: + goto st3024 + case 145: + goto st3030 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st3049 + case 155: + goto st3054 + case 157: + goto st3056 + case 158: + goto st3063 + case 159: + goto st403 + } + goto tr420 + st3024: + if p++; p == pe { + goto _test_eof3024 + } + st_case_3024: + switch data[p] { + case 128: + goto st308 + case 129: + goto st309 + case 130: + goto st147 + case 131: + goto st310 + case 133: + goto st311 + case 135: + goto st3025 + case 138: + goto st313 + case 139: + goto st3026 + case 140: + goto st315 + case 141: + goto st3027 + case 142: + goto st317 + case 143: + goto st318 + case 144: + goto st147 + case 145: + goto st145 + case 146: + goto st684 + case 148: + goto st320 + case 149: + goto st321 + case 152: + goto st147 + case 156: + goto st322 + case 157: + goto st323 + case 160: + goto st324 + case 161: + goto st325 + case 162: + goto st326 + case 163: + goto st327 + case 164: + goto st328 + case 166: + goto st329 + case 168: + goto st3028 + case 169: + goto st331 + case 170: + goto st332 + case 171: + goto st3029 + case 172: + goto st334 + case 173: + goto st335 + case 174: + goto st336 + case 176: + goto st147 + case 177: + goto st245 + } + switch { + case data[p] > 155: + if 178 <= data[p] && data[p] <= 179 { + goto st337 + } + case data[p] >= 153: + goto st145 + } + goto tr420 + st3025: + if p++; p == pe { + goto _test_eof3025 + } + st_case_3025: + if data[p] == 189 { + goto tr2646 + } + goto tr420 + st3026: + if p++; p == pe { + goto _test_eof3026 + } + st_case_3026: + if data[p] == 160 { + goto tr2646 + } + if 145 <= data[p] { + goto tr420 + } + goto tr148 + st3027: + if p++; p == pe { + goto _test_eof3027 + } + st_case_3027: + switch { + case data[p] < 182: + if 139 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 186: + if 187 <= data[p] { + goto tr420 + } + default: + goto tr2646 + } + goto tr148 + st3028: + if p++; p == pe { + goto _test_eof3028 + } + st_case_3028: + switch data[p] { + case 128: + goto tr148 + case 191: + goto tr2646 + } + switch { + case data[p] < 144: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2646 + } + case data[p] > 134: + if 140 <= data[p] && data[p] <= 143 { + goto tr2646 + } + default: + goto tr2646 + } + case data[p] > 147: + switch { + case data[p] < 153: + if 149 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] > 179: + if 184 <= data[p] && data[p] <= 186 { + goto tr2646 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3029: + if p++; p == pe { + goto _test_eof3029 + } + st_case_3029: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 164: + if 165 <= data[p] && data[p] <= 166 { + goto tr2646 + } + default: + goto tr148 + } + goto tr420 + st3030: + if p++; p == pe { + goto _test_eof3030 + } + st_case_3030: + switch data[p] { + case 128: + goto st3031 + case 129: + goto st3032 + case 130: + goto st3033 + case 131: + goto st691 + case 132: + goto st3034 + case 133: + goto st3035 + case 134: + goto st3036 + case 135: + goto st3037 + case 136: + goto st3038 + case 138: + goto st348 + case 139: + goto st3039 + case 140: + goto st3040 + case 141: + goto st3041 + case 146: + goto st3042 + case 147: + goto st3043 + case 150: + goto st3044 + case 151: + goto st3045 + case 152: + goto st3042 + case 153: + goto st3046 + case 154: + goto st3047 + case 155: + goto st538 + case 156: + goto st3048 + case 162: + goto st359 + case 163: + goto st707 + case 171: + goto st361 + } + goto tr420 + st3031: + if p++; p == pe { + goto _test_eof3031 + } + st_case_3031: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2646 + } + case data[p] > 183: + if 184 <= data[p] { + goto tr2646 + } + default: + goto tr148 + } + goto tr420 + st3032: + if p++; p == pe { + goto _test_eof3032 + } + st_case_3032: + switch { + case data[p] < 166: + if 135 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] && data[p] <= 190 { + goto tr420 + } + default: + goto tr421 + } + goto tr2646 + st3033: + if p++; p == pe { + goto _test_eof3033 + } + st_case_3033: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr2646 + st3034: + if p++; p == pe { + goto _test_eof3034 + } + st_case_3034: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2646 + } + case data[p] > 166: + switch { + case data[p] > 180: + if 182 <= data[p] && data[p] <= 191 { + goto tr421 + } + case data[p] >= 167: + goto tr2646 + } + default: + goto tr148 + } + goto tr420 + st3035: + if p++; p == pe { + goto _test_eof3035 + } + st_case_3035: + switch data[p] { + case 179: + goto tr2646 + case 182: + goto tr148 + } + if 144 <= data[p] && data[p] <= 178 { + goto tr148 + } + goto tr420 + st3036: + if p++; p == pe { + goto _test_eof3036 + } + st_case_3036: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2646 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr2646 + } + default: + goto tr148 + } + goto tr420 + st3037: + if p++; p == pe { + goto _test_eof3037 + } + st_case_3037: + if data[p] == 155 { + goto tr420 + } + switch { + case data[p] < 141: + switch { + case data[p] > 132: + if 133 <= data[p] && data[p] <= 137 { + goto tr420 + } + case data[p] >= 129: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] < 154: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] > 156: + if 157 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + default: + goto tr420 + } + goto tr2646 + st3038: + if p++; p == pe { + goto _test_eof3038 + } + st_case_3038: + switch { + case data[p] < 147: + if 128 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] > 171: + if 172 <= data[p] && data[p] <= 183 { + goto tr2646 + } + default: + goto tr148 + } + goto tr420 + st3039: + if p++; p == pe { + goto _test_eof3039 + } + st_case_3039: + switch { + case data[p] < 171: + if 159 <= data[p] && data[p] <= 170 { + goto tr2646 + } + case data[p] > 175: + switch { + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr421 + } + default: + goto tr420 + } + goto tr148 + st3040: + if p++; p == pe { + goto _test_eof3040 + } + st_case_3040: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 131 { + goto tr2646 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2646 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3041: + if p++; p == pe { + goto _test_eof3041 + } + st_case_3041: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr2646 + } + switch { + case data[p] < 157: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr2646 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr2646 + } + default: + goto tr2646 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr2646 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr2646 + } + default: + goto tr2646 + } + default: + goto tr148 + } + goto tr420 + st3042: + if p++; p == pe { + goto _test_eof3042 + } + st_case_3042: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr2646 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3043: + if p++; p == pe { + goto _test_eof3043 + } + st_case_3043: + if data[p] == 134 { + goto tr420 + } + switch { + case data[p] < 136: + if 132 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr420 + } + goto tr2646 + st3044: + if p++; p == pe { + goto _test_eof3044 + } + st_case_3044: + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 181: + if 184 <= data[p] { + goto tr2646 + } + default: + goto tr2646 + } + goto tr420 + st3045: + if p++; p == pe { + goto _test_eof3045 + } + st_case_3045: + switch { + case data[p] < 152: + if 129 <= data[p] && data[p] <= 151 { + goto tr420 + } + case data[p] > 155: + if 158 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + goto tr2646 + st3046: + if p++; p == pe { + goto _test_eof3046 + } + st_case_3046: + if data[p] == 132 { + goto tr148 + } + switch { + case data[p] < 144: + if 129 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + goto tr2646 + st3047: + if p++; p == pe { + goto _test_eof3047 + } + st_case_3047: + switch { + case data[p] > 170: + if 171 <= data[p] && data[p] <= 183 { + goto tr2646 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3048: + if p++; p == pe { + goto _test_eof3048 + } + st_case_3048: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 157: + goto tr2646 + } + goto tr420 + st3049: + if p++; p == pe { + goto _test_eof3049 + } + st_case_3049: + switch data[p] { + case 160: + goto st147 + case 168: + goto st370 + case 169: + goto st709 + case 171: + goto st3050 + case 172: + goto st3051 + case 173: + goto st712 + case 174: + goto st374 + case 188: + goto st147 + case 189: + goto st3052 + case 190: + goto st3053 + } + if 161 <= data[p] && data[p] <= 167 { + goto st145 + } + goto tr420 + st3050: + if p++; p == pe { + goto _test_eof3050 + } + st_case_3050: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 180 { + goto tr2646 + } + case data[p] >= 144: + goto tr148 + } + goto tr420 + st3051: + if p++; p == pe { + goto _test_eof3051 + } + st_case_3051: + switch { + case data[p] > 175: + if 176 <= data[p] && data[p] <= 182 { + goto tr2646 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3052: + if p++; p == pe { + goto _test_eof3052 + } + st_case_3052: + switch { + case data[p] < 145: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr420 + } + default: + goto tr2646 + } + goto tr148 + st3053: + if p++; p == pe { + goto _test_eof3053 + } + st_case_3053: + switch { + case data[p] > 146: + if 147 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] >= 143: + goto tr2646 + } + goto tr420 + st3054: + if p++; p == pe { + goto _test_eof3054 + } + st_case_3054: + switch data[p] { + case 176: + goto st147 + case 177: + goto st378 + case 178: + goto st3055 + } + goto tr420 + st3055: + if p++; p == pe { + goto _test_eof3055 + } + st_case_3055: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr2646 + } + case data[p] >= 157: + goto tr2646 + } + default: + goto tr148 + } + goto tr420 + st3056: + if p++; p == pe { + goto _test_eof3056 + } + st_case_3056: + switch data[p] { + case 133: + goto st3057 + case 134: + goto st3058 + case 137: + goto st3059 + case 144: + goto st147 + case 145: + goto st384 + case 146: + goto st385 + case 147: + goto st386 + case 148: + goto st387 + case 149: + goto st388 + case 154: + goto st389 + case 155: + goto st390 + case 156: + goto st391 + case 157: + goto st392 + case 158: + goto st393 + case 159: + goto st721 + case 168: + goto st3060 + case 169: + goto st3061 + case 170: + goto st3062 + } + if 150 <= data[p] && data[p] <= 153 { + goto st145 + } + goto tr420 + st3057: + if p++; p == pe { + goto _test_eof3057 + } + st_case_3057: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr2646 + } + case data[p] >= 165: + goto tr2646 + } + goto tr420 + st3058: + if p++; p == pe { + goto _test_eof3058 + } + st_case_3058: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr420 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr2646 + st3059: + if p++; p == pe { + goto _test_eof3059 + } + st_case_3059: + if 130 <= data[p] && data[p] <= 132 { + goto tr2646 + } + goto tr420 + st3060: + if p++; p == pe { + goto _test_eof3060 + } + st_case_3060: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr2646 + } + case data[p] >= 128: + goto tr2646 + } + goto tr420 + st3061: + if p++; p == pe { + goto _test_eof3061 + } + st_case_3061: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 173: + goto tr420 + } + goto tr2646 + st3062: + if p++; p == pe { + goto _test_eof3062 + } + st_case_3062: + if data[p] == 132 { + goto tr2646 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr2646 + } + case data[p] >= 155: + goto tr2646 + } + goto tr420 + st3063: + if p++; p == pe { + goto _test_eof3063 + } + st_case_3063: + switch data[p] { + case 160: + goto st147 + case 163: + goto st3064 + case 184: + goto st400 + case 185: + goto st401 + case 186: + goto st402 + } + if 161 <= data[p] && data[p] <= 162 { + goto st145 + } + goto tr420 + st3064: + if p++; p == pe { + goto _test_eof3064 + } + st_case_3064: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 150: + if 151 <= data[p] { + goto tr420 + } + default: + goto tr2646 + } + goto tr148 + st3065: + if p++; p == pe { + goto _test_eof3065 + } + st_case_3065: + if data[p] == 160 { + goto st3066 + } + goto tr420 + st3066: + if p++; p == pe { + goto _test_eof3066 + } + st_case_3066: + switch data[p] { + case 128: + goto st3067 + case 129: + goto st3068 + case 132: + goto st2925 + case 135: + goto st3070 + } + if 133 <= data[p] && data[p] <= 134 { + goto st3069 + } + goto tr420 + st3067: + if p++; p == pe { + goto _test_eof3067 + } + st_case_3067: + if data[p] == 129 { + goto tr2646 + } + if 160 <= data[p] { + goto tr2646 + } + goto tr420 + st3068: + if p++; p == pe { + goto _test_eof3068 + } + st_case_3068: + if 192 <= data[p] { + goto tr420 + } + goto tr2646 + st3069: + if p++; p == pe { + goto _test_eof3069 + } + st_case_3069: + goto tr2646 + st3070: + if p++; p == pe { + goto _test_eof3070 + } + st_case_3070: + if 176 <= data[p] { + goto tr420 + } + goto tr2646 +tr2774: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4887 + st4887: + if p++; p == pe { + goto _test_eof4887 + } + st_case_4887: +//line segment_words_prod.go:79886 + switch data[p] { + case 95: + goto tr571 + case 194: + goto st3071 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st148 + case 204: + goto st3072 + case 205: + goto st3073 + case 206: + goto st151 + case 207: + goto st152 + case 210: + goto st3074 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3075 + case 215: + goto st3076 + case 216: + goto st3077 + case 217: + goto st3078 + case 219: + goto st3079 + case 220: + goto st3080 + case 221: + goto st3081 + case 222: + goto st3082 + case 223: + goto st3083 + case 224: + goto st3084 + case 225: + goto st3116 + case 226: + goto st3138 + case 227: + goto st3145 + case 234: + goto st3148 + case 237: + goto st287 + case 239: + goto st3164 + case 240: + goto st3170 + case 243: + goto st3212 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 + st3071: + if p++; p == pe { + goto _test_eof3071 + } + st_case_3071: + switch data[p] { + case 170: + goto tr148 + case 173: + goto tr2774 + case 181: + goto tr148 + case 186: + goto tr148 + } + goto tr420 + st3072: + if p++; p == pe { + goto _test_eof3072 + } + st_case_3072: + if data[p] <= 127 { + goto tr420 + } + goto tr2774 + st3073: + if p++; p == pe { + goto _test_eof3073 + } + st_case_3073: + switch data[p] { + case 181: + goto tr420 + case 190: + goto tr420 + } + switch { + case data[p] < 184: + if 176 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr420 + } + goto tr2774 + st3074: + if p++; p == pe { + goto _test_eof3074 + } + st_case_3074: + if data[p] == 130 { + goto tr420 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr2774 + } + goto tr148 + st3075: + if p++; p == pe { + goto _test_eof3075 + } + st_case_3075: + if data[p] == 190 { + goto tr420 + } + switch { + case data[p] < 145: + if 136 <= data[p] && data[p] <= 144 { + goto tr420 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + default: + goto tr2774 + } + goto tr148 + st3076: + if p++; p == pe { + goto _test_eof3076 + } + st_case_3076: + switch data[p] { + case 135: + goto tr2774 + case 179: + goto tr148 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr2774 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + default: + goto tr2774 + } + goto tr420 + st3077: + if p++; p == pe { + goto _test_eof3077 + } + st_case_3077: + if data[p] == 156 { + goto tr2774 + } + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 133 { + goto tr2774 + } + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr2774 + } + goto tr420 + st3078: + if p++; p == pe { + goto _test_eof3078 + } + st_case_3078: + switch data[p] { + case 171: + goto tr421 + case 176: + goto tr2774 + } + switch { + case data[p] < 139: + if 128 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 174 <= data[p] { + goto tr148 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr2774 + } + goto tr420 + st3079: + if p++; p == pe { + goto _test_eof3079 + } + st_case_3079: + switch data[p] { + case 148: + goto tr420 + case 158: + goto tr420 + case 169: + goto tr420 + } + switch { + case data[p] < 176: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr2774 + } + case data[p] >= 150: + goto tr2774 + } + case data[p] > 185: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 189: + goto tr420 + } + default: + goto tr421 + } + goto tr148 + st3080: + if p++; p == pe { + goto _test_eof3080 + } + st_case_3080: + if data[p] == 144 { + goto tr148 + } + switch { + case data[p] < 146: + if 143 <= data[p] && data[p] <= 145 { + goto tr2774 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr2774 + } + default: + goto tr148 + } + goto tr420 + st3081: + if p++; p == pe { + goto _test_eof3081 + } + st_case_3081: + switch { + case data[p] > 140: + if 141 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr420 + } + goto tr2774 + st3082: + if p++; p == pe { + goto _test_eof3082 + } + st_case_3082: + switch { + case data[p] > 176: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr2774 + } + goto tr148 + st3083: + if p++; p == pe { + goto _test_eof3083 + } + st_case_3083: + if data[p] == 186 { + goto tr148 + } + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 170: + switch { + case data[p] > 179: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 171: + goto tr2774 + } + default: + goto tr148 + } + goto tr420 + st3084: + if p++; p == pe { + goto _test_eof3084 + } + st_case_3084: + switch data[p] { + case 160: + goto st3085 + case 161: + goto st3086 + case 162: + goto st168 + case 163: + goto st3087 + case 164: + goto st3088 + case 165: + goto st3089 + case 166: + goto st3090 + case 167: + goto st3091 + case 168: + goto st3092 + case 169: + goto st3093 + case 170: + goto st3094 + case 171: + goto st3095 + case 172: + goto st3096 + case 173: + goto st3097 + case 174: + goto st3098 + case 175: + goto st3099 + case 176: + goto st3100 + case 177: + goto st3101 + case 178: + goto st3102 + case 179: + goto st3103 + case 180: + goto st3104 + case 181: + goto st3105 + case 182: + goto st3106 + case 183: + goto st3107 + case 184: + goto st3108 + case 185: + goto st3109 + case 186: + goto st3110 + case 187: + goto st3111 + case 188: + goto st3112 + case 189: + goto st3113 + case 190: + goto st3114 + case 191: + goto st3115 + } + goto tr420 + st3085: + if p++; p == pe { + goto _test_eof3085 + } + st_case_3085: + switch data[p] { + case 154: + goto tr148 + case 164: + goto tr148 + case 168: + goto tr148 + } + switch { + case data[p] > 149: + if 150 <= data[p] && data[p] <= 173 { + goto tr2774 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3086: + if p++; p == pe { + goto _test_eof3086 + } + st_case_3086: + switch { + case data[p] > 152: + if 153 <= data[p] && data[p] <= 155 { + goto tr2774 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3087: + if p++; p == pe { + goto _test_eof3087 + } + st_case_3087: + if 163 <= data[p] { + goto tr2774 + } + goto tr420 + st3088: + if p++; p == pe { + goto _test_eof3088 + } + st_case_3088: + if data[p] == 189 { + goto tr148 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr148 + } + goto tr2774 + st3089: + if p++; p == pe { + goto _test_eof3089 + } + st_case_3089: + switch data[p] { + case 144: + goto tr148 + case 176: + goto tr420 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 177 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr420 + } + goto tr2774 + st3090: + if p++; p == pe { + goto _test_eof3090 + } + st_case_3090: + switch data[p] { + case 132: + goto tr420 + case 169: + goto tr420 + case 177: + goto tr420 + case 188: + goto tr2774 + } + switch { + case data[p] < 145: + switch { + case data[p] > 131: + if 141 <= data[p] && data[p] <= 142 { + goto tr420 + } + case data[p] >= 129: + goto tr2774 + } + case data[p] > 146: + switch { + case data[p] < 186: + if 179 <= data[p] && data[p] <= 181 { + goto tr420 + } + case data[p] > 187: + if 190 <= data[p] { + goto tr2774 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st3091: + if p++; p == pe { + goto _test_eof3091 + } + st_case_3091: + switch data[p] { + case 142: + goto tr148 + case 158: + goto tr420 + } + switch { + case data[p] < 156: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + switch { + case data[p] > 150: + if 152 <= data[p] && data[p] <= 155 { + goto tr420 + } + case data[p] >= 143: + goto tr420 + } + default: + goto tr420 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr148 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr2774 + st3092: + if p++; p == pe { + goto _test_eof3092 + } + st_case_3092: + if data[p] == 188 { + goto tr2774 + } + switch { + case data[p] < 170: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2774 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 147 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] >= 143: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 176: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 182: + switch { + case data[p] > 185: + if 190 <= data[p] { + goto tr2774 + } + case data[p] >= 184: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3093: + if p++; p == pe { + goto _test_eof3093 + } + st_case_3093: + if data[p] == 157 { + goto tr420 + } + switch { + case data[p] < 153: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 146 <= data[p] && data[p] <= 152 { + goto tr420 + } + case data[p] >= 142: + goto tr420 + } + default: + goto tr420 + } + case data[p] > 158: + switch { + case data[p] < 166: + if 159 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr2774 + st3094: + if p++; p == pe { + goto _test_eof3094 + } + st_case_3094: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2774 + } + case data[p] > 141: + if 143 <= data[p] && data[p] <= 145 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] { + goto tr2774 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3095: + if p++; p == pe { + goto _test_eof3095 + } + st_case_3095: + switch data[p] { + case 134: + goto tr420 + case 138: + goto tr420 + case 144: + goto tr148 + case 185: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] >= 142: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr420 + } + goto tr2774 + st3096: + if p++; p == pe { + goto _test_eof3096 + } + st_case_3096: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2774 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2774 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3097: + if p++; p == pe { + goto _test_eof3097 + } + st_case_3097: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] < 150: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr2774 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr2774 + } + default: + goto tr2774 + } + case data[p] > 151: + switch { + case data[p] < 159: + if 156 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 161: + switch { + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 162: + goto tr2774 + } + default: + goto tr148 + } + default: + goto tr2774 + } + goto tr420 + st3098: + if p++; p == pe { + goto _test_eof3098 + } + st_case_3098: + switch data[p] { + case 130: + goto tr2774 + case 131: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 158: + switch { + case data[p] < 142: + if 133 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 144: + switch { + case data[p] > 149: + if 153 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] < 168: + if 163 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 185: + if 190 <= data[p] && data[p] <= 191 { + goto tr2774 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3099: + if p++; p == pe { + goto _test_eof3099 + } + st_case_3099: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr2774 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr2774 + } + case data[p] > 136: + switch { + case data[p] > 141: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 138: + goto tr2774 + } + default: + goto tr2774 + } + goto tr420 + st3100: + if p++; p == pe { + goto _test_eof3100 + } + st_case_3100: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 128: + goto tr2774 + } + case data[p] > 144: + switch { + case data[p] < 170: + if 146 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] > 185: + if 190 <= data[p] { + goto tr2774 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3101: + if p++; p == pe { + goto _test_eof3101 + } + st_case_3101: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 151: + goto tr420 + } + switch { + case data[p] < 160: + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 148 { + goto tr420 + } + case data[p] > 154: + if 155 <= data[p] && data[p] <= 159 { + goto tr420 + } + default: + goto tr148 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr2774 + st3102: + if p++; p == pe { + goto _test_eof3102 + } + st_case_3102: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 146: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2774 + } + case data[p] > 140: + if 142 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 181: + if 170 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2774 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3103: + if p++; p == pe { + goto _test_eof3103 + } + st_case_3103: + if data[p] == 158 { + goto tr148 + } + switch { + case data[p] < 149: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr2774 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr2774 + } + default: + goto tr2774 + } + case data[p] > 150: + switch { + case data[p] < 162: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 163: + switch { + case data[p] > 175: + if 177 <= data[p] && data[p] <= 178 { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr2774 + } + default: + goto tr2774 + } + goto tr420 + st3104: + if p++; p == pe { + goto _test_eof3104 + } + st_case_3104: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 129: + goto tr2774 + } + case data[p] > 144: + switch { + case data[p] > 186: + if 190 <= data[p] { + goto tr2774 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3105: + if p++; p == pe { + goto _test_eof3105 + } + st_case_3105: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 142: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 150 { + goto tr420 + } + case data[p] > 158: + if 159 <= data[p] && data[p] <= 161 { + goto tr148 + } + default: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr2774 + st3106: + if p++; p == pe { + goto _test_eof3106 + } + st_case_3106: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto tr2774 + } + case data[p] > 150: + switch { + case data[p] > 177: + if 179 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3107: + if p++; p == pe { + goto _test_eof3107 + } + st_case_3107: + switch data[p] { + case 138: + goto tr2774 + case 150: + goto tr2774 + } + switch { + case data[p] < 152: + switch { + case data[p] > 134: + if 143 <= data[p] && data[p] <= 148 { + goto tr2774 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 179 { + goto tr2774 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr2774 + } + goto tr420 + st3108: + if p++; p == pe { + goto _test_eof3108 + } + st_case_3108: + if data[p] == 177 { + goto tr2774 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr2774 + } + goto tr420 + st3109: + if p++; p == pe { + goto _test_eof3109 + } + st_case_3109: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 135: + goto tr2774 + } + goto tr420 + st3110: + if p++; p == pe { + goto _test_eof3110 + } + st_case_3110: + if data[p] == 177 { + goto tr2774 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr2774 + } + case data[p] >= 180: + goto tr2774 + } + goto tr420 + st3111: + if p++; p == pe { + goto _test_eof3111 + } + st_case_3111: + switch { + case data[p] > 141: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 136: + goto tr2774 + } + goto tr420 + st3112: + if p++; p == pe { + goto _test_eof3112 + } + st_case_3112: + switch data[p] { + case 128: + goto tr148 + case 181: + goto tr2774 + case 183: + goto tr2774 + case 185: + goto tr2774 + } + switch { + case data[p] < 160: + if 152 <= data[p] && data[p] <= 153 { + goto tr2774 + } + case data[p] > 169: + if 190 <= data[p] && data[p] <= 191 { + goto tr2774 + } + default: + goto tr421 + } + goto tr420 + st3113: + if p++; p == pe { + goto _test_eof3113 + } + st_case_3113: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 172: + if 177 <= data[p] && data[p] <= 191 { + goto tr2774 + } + default: + goto tr148 + } + goto tr420 + st3114: + if p++; p == pe { + goto _test_eof3114 + } + st_case_3114: + switch { + case data[p] < 136: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 135 { + goto tr2774 + } + case data[p] >= 128: + goto tr2774 + } + case data[p] > 140: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr2774 + } + case data[p] >= 141: + goto tr2774 + } + default: + goto tr148 + } + goto tr420 + st3115: + if p++; p == pe { + goto _test_eof3115 + } + st_case_3115: + if data[p] == 134 { + goto tr2774 + } + goto tr420 + st3116: + if p++; p == pe { + goto _test_eof3116 + } + st_case_3116: + switch data[p] { + case 128: + goto st3117 + case 129: + goto st3118 + case 130: + goto st3119 + case 131: + goto st202 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st3120 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st3121 + case 157: + goto st3122 + case 158: + goto st3123 + case 159: + goto st3124 + case 160: + goto st3125 + case 161: + goto st219 + case 162: + goto st3126 + case 163: + goto st221 + case 164: + goto st3127 + case 165: + goto st468 + case 167: + goto st469 + case 168: + goto st3128 + case 169: + goto st3129 + case 170: + goto st3130 + case 172: + goto st3131 + case 173: + goto st3132 + case 174: + goto st3133 + case 175: + goto st3134 + case 176: + goto st3135 + case 177: + goto st640 + case 179: + goto st3136 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st3137 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + } + switch { + case data[p] < 180: + if 132 <= data[p] && data[p] <= 152 { + goto st145 + } + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + default: + goto st147 + } + goto tr420 + st3117: + if p++; p == pe { + goto _test_eof3117 + } + st_case_3117: + if 171 <= data[p] && data[p] <= 190 { + goto tr2774 + } + goto tr420 + st3118: + if p++; p == pe { + goto _test_eof3118 + } + st_case_3118: + switch { + case data[p] < 158: + switch { + case data[p] > 137: + if 150 <= data[p] && data[p] <= 153 { + goto tr2774 + } + case data[p] >= 128: + goto tr421 + } + case data[p] > 160: + switch { + case data[p] < 167: + if 162 <= data[p] && data[p] <= 164 { + goto tr2774 + } + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr2774 + } + default: + goto tr2774 + } + default: + goto tr2774 + } + goto tr420 + st3119: + if p++; p == pe { + goto _test_eof3119 + } + st_case_3119: + if data[p] == 143 { + goto tr2774 + } + switch { + case data[p] < 144: + if 130 <= data[p] && data[p] <= 141 { + goto tr2774 + } + case data[p] > 153: + switch { + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 154: + goto tr2774 + } + default: + goto tr421 + } + goto tr420 + st3120: + if p++; p == pe { + goto _test_eof3120 + } + st_case_3120: + switch { + case data[p] < 157: + if 155 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr420 + } + default: + goto tr2774 + } + goto tr148 + st3121: + if p++; p == pe { + goto _test_eof3121 + } + st_case_3121: + switch { + case data[p] < 146: + switch { + case data[p] > 140: + if 142 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 177: + if 178 <= data[p] && data[p] <= 180 { + goto tr2774 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr2774 + } + goto tr420 + st3122: + if p++; p == pe { + goto _test_eof3122 + } + st_case_3122: + switch { + case data[p] < 160: + switch { + case data[p] > 145: + if 146 <= data[p] && data[p] <= 147 { + goto tr2774 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 172: + switch { + case data[p] > 176: + if 178 <= data[p] && data[p] <= 179 { + goto tr2774 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3123: + if p++; p == pe { + goto _test_eof3123 + } + st_case_3123: + if 180 <= data[p] { + goto tr2774 + } + goto tr420 + st3124: + if p++; p == pe { + goto _test_eof3124 + } + st_case_3124: + switch { + case data[p] < 158: + if 148 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 170 <= data[p] { + goto tr420 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr420 + } + goto tr2774 + st3125: + if p++; p == pe { + goto _test_eof3125 + } + st_case_3125: + switch { + case data[p] < 144: + if 139 <= data[p] && data[p] <= 142 { + goto tr2774 + } + case data[p] > 153: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + goto tr420 + st3126: + if p++; p == pe { + goto _test_eof3126 + } + st_case_3126: + if data[p] == 169 { + goto tr2774 + } + switch { + case data[p] > 170: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3127: + if p++; p == pe { + goto _test_eof3127 + } + st_case_3127: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr2774 + } + default: + goto tr2774 + } + goto tr420 + st3128: + if p++; p == pe { + goto _test_eof3128 + } + st_case_3128: + switch { + case data[p] > 150: + if 151 <= data[p] && data[p] <= 155 { + goto tr2774 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3129: + if p++; p == pe { + goto _test_eof3129 + } + st_case_3129: + if data[p] == 191 { + goto tr2774 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr2774 + } + case data[p] >= 149: + goto tr2774 + } + goto tr420 + st3130: + if p++; p == pe { + goto _test_eof3130 + } + st_case_3130: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 153: + if 176 <= data[p] && data[p] <= 190 { + goto tr2774 + } + default: + goto tr421 + } + goto tr420 + st3131: + if p++; p == pe { + goto _test_eof3131 + } + st_case_3131: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 132 { + goto tr2774 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr2774 + } + default: + goto tr148 + } + goto tr420 + st3132: + if p++; p == pe { + goto _test_eof3132 + } + st_case_3132: + switch { + case data[p] < 144: + switch { + case data[p] > 139: + if 140 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] >= 133: + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 154: + goto tr420 + } + default: + goto tr421 + } + goto tr2774 + st3133: + if p++; p == pe { + goto _test_eof3133 + } + st_case_3133: + switch { + case data[p] < 161: + switch { + case data[p] > 130: + if 131 <= data[p] && data[p] <= 160 { + goto tr148 + } + case data[p] >= 128: + goto tr2774 + } + case data[p] > 173: + switch { + case data[p] < 176: + if 174 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + default: + goto tr2774 + } + goto tr420 + st3134: + if p++; p == pe { + goto _test_eof3134 + } + st_case_3134: + switch { + case data[p] > 179: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr2774 + } + goto tr148 + st3135: + if p++; p == pe { + goto _test_eof3135 + } + st_case_3135: + switch { + case data[p] > 163: + if 164 <= data[p] && data[p] <= 183 { + goto tr2774 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3136: + if p++; p == pe { + goto _test_eof3136 + } + st_case_3136: + if data[p] == 173 { + goto tr2774 + } + switch { + case data[p] < 169: + switch { + case data[p] > 146: + if 148 <= data[p] && data[p] <= 168 { + goto tr2774 + } + case data[p] >= 144: + goto tr2774 + } + case data[p] > 177: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 180 { + goto tr2774 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 185 { + goto tr2774 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3137: + if p++; p == pe { + goto _test_eof3137 + } + st_case_3137: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr2774 + } + case data[p] >= 128: + goto tr2774 + } + goto tr420 + st3138: + if p++; p == pe { + goto _test_eof3138 + } + st_case_3138: + switch data[p] { + case 128: + goto st3139 + case 129: + goto st3140 + case 130: + goto st241 + case 131: + goto st3141 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st3142 + case 180: + goto st251 + case 181: + goto st3143 + case 182: + goto st253 + case 183: + goto st3144 + case 184: + goto st255 + } + goto tr420 + st3139: + if p++; p == pe { + goto _test_eof3139 + } + st_case_3139: + switch { + case data[p] < 170: + if 140 <= data[p] && data[p] <= 143 { + goto tr2774 + } + case data[p] > 174: + if 191 <= data[p] { + goto tr571 + } + default: + goto tr2774 + } + goto tr420 + st3140: + if p++; p == pe { + goto _test_eof3140 + } + st_case_3140: + switch data[p] { + case 165: + goto tr420 + case 177: + goto tr148 + case 191: + goto tr148 + } + switch { + case data[p] < 149: + if 129 <= data[p] && data[p] <= 147 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 160: + goto tr2774 + } + default: + goto tr420 + } + goto tr571 + st3141: + if p++; p == pe { + goto _test_eof3141 + } + st_case_3141: + if 144 <= data[p] && data[p] <= 176 { + goto tr2774 + } + goto tr420 + st3142: + if p++; p == pe { + goto _test_eof3142 + } + st_case_3142: + switch { + case data[p] < 175: + if 165 <= data[p] && data[p] <= 170 { + goto tr420 + } + case data[p] > 177: + if 180 <= data[p] { + goto tr420 + } + default: + goto tr2774 + } + goto tr148 + st3143: + if p++; p == pe { + goto _test_eof3143 + } + st_case_3143: + if data[p] == 191 { + goto tr2774 + } + switch { + case data[p] > 174: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 168: + goto tr420 + } + goto tr148 + st3144: + if p++; p == pe { + goto _test_eof3144 + } + st_case_3144: + switch { + case data[p] < 144: + switch { + case data[p] > 134: + if 136 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 191 { + goto tr2774 + } + case data[p] >= 152: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3145: + if p++; p == pe { + goto _test_eof3145 + } + st_case_3145: + switch data[p] { + case 128: + goto st3146 + case 130: + goto st3147 + case 131: + goto st1164 + case 132: + goto st259 + case 133: + goto st145 + case 134: + goto st260 + case 135: + goto st1165 + case 139: + goto st1166 + case 140: + goto st1091 + case 141: + goto st1167 + } + goto tr420 + st3146: + if p++; p == pe { + goto _test_eof3146 + } + st_case_3146: + if data[p] == 133 { + goto tr148 + } + switch { + case data[p] < 177: + if 170 <= data[p] && data[p] <= 175 { + goto tr2774 + } + case data[p] > 181: + if 187 <= data[p] && data[p] <= 188 { + goto tr148 + } + default: + goto tr1049 + } + goto tr420 + st3147: + if p++; p == pe { + goto _test_eof3147 + } + st_case_3147: + switch { + case data[p] < 155: + if 153 <= data[p] && data[p] <= 154 { + goto tr2774 + } + case data[p] > 156: + if 160 <= data[p] { + goto tr1049 + } + default: + goto tr1049 + } + goto tr420 + st3148: + if p++; p == pe { + goto _test_eof3148 + } + st_case_3148: + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st654 + case 153: + goto st3149 + case 154: + goto st3150 + case 155: + goto st3151 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st3152 + case 161: + goto st272 + case 162: + goto st3153 + case 163: + goto st3154 + case 164: + goto st3155 + case 165: + goto st3156 + case 166: + goto st3157 + case 167: + goto st3158 + case 168: + goto st3159 + case 169: + goto st3160 + case 170: + goto st3161 + case 171: + goto st3162 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st3163 + case 176: + goto st147 + } + if 129 <= data[p] { + goto st145 + } + goto tr420 + st3149: + if p++; p == pe { + goto _test_eof3149 + } + st_case_3149: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr2774 + } + default: + goto tr2774 + } + goto tr420 + st3150: + if p++; p == pe { + goto _test_eof3150 + } + st_case_3150: + switch { + case data[p] < 158: + if 128 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr2774 + } + goto tr420 + st3151: + if p++; p == pe { + goto _test_eof3151 + } + st_case_3151: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr2774 + } + goto tr148 + st3152: + if p++; p == pe { + goto _test_eof3152 + } + st_case_3152: + switch data[p] { + case 130: + goto tr2774 + case 134: + goto tr2774 + case 139: + goto tr2774 + } + switch { + case data[p] > 167: + if 168 <= data[p] { + goto tr420 + } + case data[p] >= 163: + goto tr2774 + } + goto tr148 + st3153: + if p++; p == pe { + goto _test_eof3153 + } + st_case_3153: + switch { + case data[p] < 130: + if 128 <= data[p] && data[p] <= 129 { + goto tr2774 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr2774 + } + default: + goto tr148 + } + goto tr420 + st3154: + if p++; p == pe { + goto _test_eof3154 + } + st_case_3154: + switch data[p] { + case 187: + goto tr148 + case 189: + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 143: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 133: + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 183: + if 184 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr420 + } + goto tr2774 + st3155: + if p++; p == pe { + goto _test_eof3155 + } + st_case_3155: + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 165: + switch { + case data[p] > 173: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr2774 + } + default: + goto tr148 + } + goto tr420 + st3156: + if p++; p == pe { + goto _test_eof3156 + } + st_case_3156: + switch { + case data[p] < 148: + if 135 <= data[p] && data[p] <= 147 { + goto tr2774 + } + case data[p] > 159: + if 189 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st3157: + if p++; p == pe { + goto _test_eof3157 + } + st_case_3157: + switch { + case data[p] < 132: + if 128 <= data[p] && data[p] <= 131 { + goto tr2774 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr2774 + } + default: + goto tr148 + } + goto tr420 + st3158: + if p++; p == pe { + goto _test_eof3158 + } + st_case_3158: + if data[p] == 143 { + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 129: + goto tr420 + } + case data[p] > 164: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr420 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + default: + goto tr420 + } + goto tr2774 + st3159: + if p++; p == pe { + goto _test_eof3159 + } + st_case_3159: + switch { + case data[p] > 168: + if 169 <= data[p] && data[p] <= 182 { + goto tr2774 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3160: + if p++; p == pe { + goto _test_eof3160 + } + st_case_3160: + if data[p] == 131 { + goto tr2774 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 141: + switch { + case data[p] > 153: + if 187 <= data[p] && data[p] <= 189 { + goto tr2774 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr2774 + } + goto tr420 + st3161: + if p++; p == pe { + goto _test_eof3161 + } + st_case_3161: + if data[p] == 176 { + goto tr2774 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr2774 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr2774 + } + default: + goto tr2774 + } + goto tr420 + st3162: + if p++; p == pe { + goto _test_eof3162 + } + st_case_3162: + if data[p] == 129 { + goto tr2774 + } + switch { + case data[p] < 171: + if 160 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 181 <= data[p] && data[p] <= 182 { + goto tr2774 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr2774 + } + goto tr420 + st3163: + if p++; p == pe { + goto _test_eof3163 + } + st_case_3163: + switch { + case data[p] < 163: + if 128 <= data[p] && data[p] <= 162 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 172: + goto tr2774 + } + default: + goto tr2774 + } + goto tr420 + st3164: + if p++; p == pe { + goto _test_eof3164 + } + st_case_3164: + switch data[p] { + case 172: + goto st3165 + case 173: + goto st672 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st3166 + case 185: + goto st674 + case 187: + goto st3167 + case 188: + goto st676 + case 189: + goto st1261 + case 190: + goto st3168 + case 191: + goto st3169 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr420 + st3165: + if p++; p == pe { + goto _test_eof3165 + } + st_case_3165: + switch data[p] { + case 158: + goto tr2774 + case 190: + goto tr572 + } + switch { + case data[p] < 157: + switch { + case data[p] > 134: + if 147 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 170: + goto tr572 + } + default: + goto tr572 + } + goto tr420 + st3166: + if p++; p == pe { + goto _test_eof3166 + } + st_case_3166: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 143 { + goto tr2774 + } + case data[p] > 175: + if 179 <= data[p] && data[p] <= 180 { + goto tr571 + } + default: + goto tr2774 + } + goto tr420 + st3167: + if p++; p == pe { + goto _test_eof3167 + } + st_case_3167: + if data[p] == 191 { + goto tr2774 + } + if 189 <= data[p] { + goto tr420 + } + goto tr148 + st3168: + if p++; p == pe { + goto _test_eof3168 + } + st_case_3168: + switch { + case data[p] < 160: + if 158 <= data[p] && data[p] <= 159 { + goto tr2774 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + goto tr1049 + st3169: + if p++; p == pe { + goto _test_eof3169 + } + st_case_3169: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr148 + } + case data[p] >= 130: + goto tr148 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr2774 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3170: + if p++; p == pe { + goto _test_eof3170 + } + st_case_3170: + switch data[p] { + case 144: + goto st3171 + case 145: + goto st3177 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st3196 + case 155: + goto st3201 + case 157: + goto st3203 + case 158: + goto st3210 + case 159: + goto st403 + } + goto tr420 + st3171: + if p++; p == pe { + goto _test_eof3171 + } + st_case_3171: + switch data[p] { + case 128: + goto st308 + case 129: + goto st309 + case 130: + goto st147 + case 131: + goto st310 + case 133: + goto st311 + case 135: + goto st3172 + case 138: + goto st313 + case 139: + goto st3173 + case 140: + goto st315 + case 141: + goto st3174 + case 142: + goto st317 + case 143: + goto st318 + case 144: + goto st147 + case 145: + goto st145 + case 146: + goto st684 + case 148: + goto st320 + case 149: + goto st321 + case 152: + goto st147 + case 156: + goto st322 + case 157: + goto st323 + case 160: + goto st324 + case 161: + goto st325 + case 162: + goto st326 + case 163: + goto st327 + case 164: + goto st328 + case 166: + goto st329 + case 168: + goto st3175 + case 169: + goto st331 + case 170: + goto st332 + case 171: + goto st3176 + case 172: + goto st334 + case 173: + goto st335 + case 174: + goto st336 + case 176: + goto st147 + case 177: + goto st245 + } + switch { + case data[p] > 155: + if 178 <= data[p] && data[p] <= 179 { + goto st337 + } + case data[p] >= 153: + goto st145 + } + goto tr420 + st3172: + if p++; p == pe { + goto _test_eof3172 + } + st_case_3172: + if data[p] == 189 { + goto tr2774 + } + goto tr420 + st3173: + if p++; p == pe { + goto _test_eof3173 + } + st_case_3173: + if data[p] == 160 { + goto tr2774 + } + if 145 <= data[p] { + goto tr420 + } + goto tr148 + st3174: + if p++; p == pe { + goto _test_eof3174 + } + st_case_3174: + switch { + case data[p] < 182: + if 139 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 186: + if 187 <= data[p] { + goto tr420 + } + default: + goto tr2774 + } + goto tr148 + st3175: + if p++; p == pe { + goto _test_eof3175 + } + st_case_3175: + switch data[p] { + case 128: + goto tr148 + case 191: + goto tr2774 + } + switch { + case data[p] < 144: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2774 + } + case data[p] > 134: + if 140 <= data[p] && data[p] <= 143 { + goto tr2774 + } + default: + goto tr2774 + } + case data[p] > 147: + switch { + case data[p] < 153: + if 149 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] > 179: + if 184 <= data[p] && data[p] <= 186 { + goto tr2774 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3176: + if p++; p == pe { + goto _test_eof3176 + } + st_case_3176: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 164: + if 165 <= data[p] && data[p] <= 166 { + goto tr2774 + } + default: + goto tr148 + } + goto tr420 + st3177: + if p++; p == pe { + goto _test_eof3177 + } + st_case_3177: + switch data[p] { + case 128: + goto st3178 + case 129: + goto st3179 + case 130: + goto st3180 + case 131: + goto st691 + case 132: + goto st3181 + case 133: + goto st3182 + case 134: + goto st3183 + case 135: + goto st3184 + case 136: + goto st3185 + case 138: + goto st348 + case 139: + goto st3186 + case 140: + goto st3187 + case 141: + goto st3188 + case 146: + goto st3189 + case 147: + goto st3190 + case 150: + goto st3191 + case 151: + goto st3192 + case 152: + goto st3189 + case 153: + goto st3193 + case 154: + goto st3194 + case 155: + goto st538 + case 156: + goto st3195 + case 162: + goto st359 + case 163: + goto st707 + case 171: + goto st361 + } + goto tr420 + st3178: + if p++; p == pe { + goto _test_eof3178 + } + st_case_3178: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2774 + } + case data[p] > 183: + if 184 <= data[p] { + goto tr2774 + } + default: + goto tr148 + } + goto tr420 + st3179: + if p++; p == pe { + goto _test_eof3179 + } + st_case_3179: + switch { + case data[p] < 166: + if 135 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] && data[p] <= 190 { + goto tr420 + } + default: + goto tr421 + } + goto tr2774 + st3180: + if p++; p == pe { + goto _test_eof3180 + } + st_case_3180: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr2774 + st3181: + if p++; p == pe { + goto _test_eof3181 + } + st_case_3181: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2774 + } + case data[p] > 166: + switch { + case data[p] > 180: + if 182 <= data[p] && data[p] <= 191 { + goto tr421 + } + case data[p] >= 167: + goto tr2774 + } + default: + goto tr148 + } + goto tr420 + st3182: + if p++; p == pe { + goto _test_eof3182 + } + st_case_3182: + switch data[p] { + case 179: + goto tr2774 + case 182: + goto tr148 + } + if 144 <= data[p] && data[p] <= 178 { + goto tr148 + } + goto tr420 + st3183: + if p++; p == pe { + goto _test_eof3183 + } + st_case_3183: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2774 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr2774 + } + default: + goto tr148 + } + goto tr420 + st3184: + if p++; p == pe { + goto _test_eof3184 + } + st_case_3184: + if data[p] == 155 { + goto tr420 + } + switch { + case data[p] < 141: + switch { + case data[p] > 132: + if 133 <= data[p] && data[p] <= 137 { + goto tr420 + } + case data[p] >= 129: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] < 154: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] > 156: + if 157 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + default: + goto tr420 + } + goto tr2774 + st3185: + if p++; p == pe { + goto _test_eof3185 + } + st_case_3185: + switch { + case data[p] < 147: + if 128 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] > 171: + if 172 <= data[p] && data[p] <= 183 { + goto tr2774 + } + default: + goto tr148 + } + goto tr420 + st3186: + if p++; p == pe { + goto _test_eof3186 + } + st_case_3186: + switch { + case data[p] < 171: + if 159 <= data[p] && data[p] <= 170 { + goto tr2774 + } + case data[p] > 175: + switch { + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr421 + } + default: + goto tr420 + } + goto tr148 + st3187: + if p++; p == pe { + goto _test_eof3187 + } + st_case_3187: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 131 { + goto tr2774 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2774 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3188: + if p++; p == pe { + goto _test_eof3188 + } + st_case_3188: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr2774 + } + switch { + case data[p] < 157: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr2774 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr2774 + } + default: + goto tr2774 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr2774 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr2774 + } + default: + goto tr2774 + } + default: + goto tr148 + } + goto tr420 + st3189: + if p++; p == pe { + goto _test_eof3189 + } + st_case_3189: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr2774 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3190: + if p++; p == pe { + goto _test_eof3190 + } + st_case_3190: + if data[p] == 134 { + goto tr420 + } + switch { + case data[p] < 136: + if 132 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr420 + } + goto tr2774 + st3191: + if p++; p == pe { + goto _test_eof3191 + } + st_case_3191: + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 181: + if 184 <= data[p] { + goto tr2774 + } + default: + goto tr2774 + } + goto tr420 + st3192: + if p++; p == pe { + goto _test_eof3192 + } + st_case_3192: + switch { + case data[p] < 152: + if 129 <= data[p] && data[p] <= 151 { + goto tr420 + } + case data[p] > 155: + if 158 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + goto tr2774 + st3193: + if p++; p == pe { + goto _test_eof3193 + } + st_case_3193: + if data[p] == 132 { + goto tr148 + } + switch { + case data[p] < 144: + if 129 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + goto tr2774 + st3194: + if p++; p == pe { + goto _test_eof3194 + } + st_case_3194: + switch { + case data[p] > 170: + if 171 <= data[p] && data[p] <= 183 { + goto tr2774 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3195: + if p++; p == pe { + goto _test_eof3195 + } + st_case_3195: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 157: + goto tr2774 + } + goto tr420 + st3196: + if p++; p == pe { + goto _test_eof3196 + } + st_case_3196: + switch data[p] { + case 160: + goto st147 + case 168: + goto st370 + case 169: + goto st709 + case 171: + goto st3197 + case 172: + goto st3198 + case 173: + goto st712 + case 174: + goto st374 + case 188: + goto st147 + case 189: + goto st3199 + case 190: + goto st3200 + } + if 161 <= data[p] && data[p] <= 167 { + goto st145 + } + goto tr420 + st3197: + if p++; p == pe { + goto _test_eof3197 + } + st_case_3197: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 180 { + goto tr2774 + } + case data[p] >= 144: + goto tr148 + } + goto tr420 + st3198: + if p++; p == pe { + goto _test_eof3198 + } + st_case_3198: + switch { + case data[p] > 175: + if 176 <= data[p] && data[p] <= 182 { + goto tr2774 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3199: + if p++; p == pe { + goto _test_eof3199 + } + st_case_3199: + switch { + case data[p] < 145: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr420 + } + default: + goto tr2774 + } + goto tr148 + st3200: + if p++; p == pe { + goto _test_eof3200 + } + st_case_3200: + switch { + case data[p] > 146: + if 147 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] >= 143: + goto tr2774 + } + goto tr420 + st3201: + if p++; p == pe { + goto _test_eof3201 + } + st_case_3201: + switch data[p] { + case 128: + goto st1224 + case 176: + goto st147 + case 177: + goto st378 + case 178: + goto st3202 + } + goto tr420 + st3202: + if p++; p == pe { + goto _test_eof3202 + } + st_case_3202: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr2774 + } + case data[p] >= 157: + goto tr2774 + } + default: + goto tr148 + } + goto tr420 + st3203: + if p++; p == pe { + goto _test_eof3203 + } + st_case_3203: + switch data[p] { + case 133: + goto st3204 + case 134: + goto st3205 + case 137: + goto st3206 + case 144: + goto st147 + case 145: + goto st384 + case 146: + goto st385 + case 147: + goto st386 + case 148: + goto st387 + case 149: + goto st388 + case 154: + goto st389 + case 155: + goto st390 + case 156: + goto st391 + case 157: + goto st392 + case 158: + goto st393 + case 159: + goto st721 + case 168: + goto st3207 + case 169: + goto st3208 + case 170: + goto st3209 + } + if 150 <= data[p] && data[p] <= 153 { + goto st145 + } + goto tr420 + st3204: + if p++; p == pe { + goto _test_eof3204 + } + st_case_3204: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr2774 + } + case data[p] >= 165: + goto tr2774 + } + goto tr420 + st3205: + if p++; p == pe { + goto _test_eof3205 + } + st_case_3205: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr420 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr2774 + st3206: + if p++; p == pe { + goto _test_eof3206 + } + st_case_3206: + if 130 <= data[p] && data[p] <= 132 { + goto tr2774 + } + goto tr420 + st3207: + if p++; p == pe { + goto _test_eof3207 + } + st_case_3207: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr2774 + } + case data[p] >= 128: + goto tr2774 + } + goto tr420 + st3208: + if p++; p == pe { + goto _test_eof3208 + } + st_case_3208: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 173: + goto tr420 + } + goto tr2774 + st3209: + if p++; p == pe { + goto _test_eof3209 + } + st_case_3209: + if data[p] == 132 { + goto tr2774 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr2774 + } + case data[p] >= 155: + goto tr2774 + } + goto tr420 + st3210: + if p++; p == pe { + goto _test_eof3210 + } + st_case_3210: + switch data[p] { + case 160: + goto st147 + case 163: + goto st3211 + case 184: + goto st400 + case 185: + goto st401 + case 186: + goto st402 + } + if 161 <= data[p] && data[p] <= 162 { + goto st145 + } + goto tr420 + st3211: + if p++; p == pe { + goto _test_eof3211 + } + st_case_3211: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 150: + if 151 <= data[p] { + goto tr420 + } + default: + goto tr2774 + } + goto tr148 + st3212: + if p++; p == pe { + goto _test_eof3212 + } + st_case_3212: + if data[p] == 160 { + goto st3213 + } + goto tr420 + st3213: + if p++; p == pe { + goto _test_eof3213 + } + st_case_3213: + switch data[p] { + case 128: + goto st3214 + case 129: + goto st3215 + case 132: + goto st3072 + case 135: + goto st3217 + } + if 133 <= data[p] && data[p] <= 134 { + goto st3216 + } + goto tr420 + st3214: + if p++; p == pe { + goto _test_eof3214 + } + st_case_3214: + if data[p] == 129 { + goto tr2774 + } + if 160 <= data[p] { + goto tr2774 + } + goto tr420 + st3215: + if p++; p == pe { + goto _test_eof3215 + } + st_case_3215: + if 192 <= data[p] { + goto tr420 + } + goto tr2774 + st3216: + if p++; p == pe { + goto _test_eof3216 + } + st_case_3216: + goto tr2774 + st3217: + if p++; p == pe { + goto _test_eof3217 + } + st_case_3217: + if 176 <= data[p] { + goto tr420 + } + goto tr2774 +tr4786: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4888 + st4888: + if p++; p == pe { + goto _test_eof4888 + } + st_case_4888: +//line segment_words_prod.go:83663 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 131 <= data[p] && data[p] <= 137 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr4787: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4889 + st4889: + if p++; p == pe { + goto _test_eof4889 + } + st_case_4889: +//line segment_words_prod.go:83777 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 191: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 145 <= data[p] && data[p] <= 189 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr4788: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4890 + st4890: + if p++; p == pe { + goto _test_eof4890 + } + st_case_4890: +//line segment_words_prod.go:83893 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 135: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 129: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 130: + switch { + case data[p] < 196: + if 132 <= data[p] && data[p] <= 133 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr4562 +tr4789: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4891 + st4891: + if p++; p == pe { + goto _test_eof4891 + } + st_case_4891: +//line segment_words_prod.go:84013 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 156: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 128: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 133: + switch { + case data[p] < 196: + if 144 <= data[p] && data[p] <= 154 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr4562 +tr4790: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4892 + st4892: + if p++; p == pe { + goto _test_eof4892 + } + st_case_4892: +//line segment_words_prod.go:84133 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 176: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 139 <= data[p] && data[p] <= 159 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr4791: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4893 + st4893: + if p++; p == pe { + goto _test_eof4893 + } + st_case_4893: +//line segment_words_prod.go:84249 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 159: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 150 <= data[p] && data[p] <= 157 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 164: + switch { + case data[p] < 170: + if 167 <= data[p] && data[p] <= 168 { + goto tr1 + } + case data[p] > 173: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr4792: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4894 + st4894: + if p++; p == pe { + goto _test_eof4894 + } + st_case_4894: +//line segment_words_prod.go:84377 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 143: + goto tr1 + case 145: + goto tr1 + case 194: + goto tr4804 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr4808 + case 205: + goto tr4810 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr4813 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr4816 + case 215: + goto tr4817 + case 216: + goto tr4818 + case 217: + goto tr4819 + case 219: + goto tr4820 + case 220: + goto tr4821 + case 221: + goto tr4822 + case 222: + goto tr4823 + case 223: + goto tr4824 + case 224: + goto tr4825 + case 225: + goto tr4826 + case 226: + goto tr4827 + case 227: + goto tr4828 + case 234: + goto tr4829 + case 237: + goto tr4831 + case 239: + goto tr4832 + case 240: + goto tr4833 + case 243: + goto tr4834 + } + switch { + case data[p] < 176: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto tr4806 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr1 + } + case data[p] >= 235: + goto tr4830 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr4807: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4895 +tr4467: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4895 + st4895: + if p++; p == pe { + goto _test_eof4895 + } + st_case_4895: +//line segment_words_prod.go:84519 + switch data[p] { + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 192 <= data[p] { + goto tr4499 + } + goto tr148 +tr4808: +//line segment_words.rl:72 + + endPos = p + + goto st4896 +tr4468: +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + + goto st4896 + st4896: + if p++; p == pe { + goto _test_eof4896 + } + st_case_4896: +//line segment_words_prod.go:84589 + switch data[p] { + case 194: + goto tr4783 + case 204: + goto tr4784 + case 205: + goto tr4785 + case 210: + goto tr4786 + case 214: + goto tr4787 + case 215: + goto tr4788 + case 216: + goto tr4789 + case 217: + goto tr4790 + case 219: + goto tr4791 + case 220: + goto tr4792 + case 221: + goto tr4793 + case 222: + goto tr4794 + case 223: + goto tr4795 + case 224: + goto tr4796 + case 225: + goto tr4797 + case 226: + goto tr4798 + case 227: + goto tr4799 + case 234: + goto tr4800 + case 239: + goto tr4801 + case 240: + goto tr4802 + case 243: + goto tr4803 + } + if 128 <= data[p] { + goto tr148 + } + goto tr4499 +tr4793: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4897 + st4897: + if p++; p == pe { + goto _test_eof4897 + } + st_case_4897: +//line segment_words_prod.go:84654 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 91: + switch { + case data[p] < 48: + if data[p] <= 47 { + goto tr1 + } + case data[p] > 57: + switch { + case data[p] > 64: + if 65 <= data[p] && data[p] <= 90 { + goto tr2008 + } + case data[p] >= 59: + goto tr1 + } + default: + goto tr2646 + } + case data[p] > 96: + switch { + case data[p] < 123: + if 97 <= data[p] && data[p] <= 122 { + goto tr2008 + } + case data[p] > 138: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr4794: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4898 + st4898: + if p++; p == pe { + goto _test_eof4898 + } + st_case_4898: +//line segment_words_prod.go:84782 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 166 <= data[p] && data[p] <= 176 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr4795: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4899 + st4899: + if p++; p == pe { + goto _test_eof4899 + } + st_case_4899: +//line segment_words_prod.go:84896 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 171 <= data[p] && data[p] <= 179 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr4796: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4900 + st4900: + if p++; p == pe { + goto _test_eof4900 + } + st_case_4900: +//line segment_words_prod.go:85010 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 160: + goto st14 + case 161: + goto st15 + case 163: + goto st16 + case 164: + goto st17 + case 165: + goto st18 + case 167: + goto st20 + case 169: + goto st21 + case 171: + goto st22 + case 173: + goto st24 + case 174: + goto st25 + case 175: + goto st26 + case 176: + goto st27 + case 177: + goto st28 + case 179: + goto st29 + case 180: + goto st30 + case 181: + goto st31 + case 182: + goto st32 + case 183: + goto st33 + case 184: + goto st34 + case 185: + goto st35 + case 186: + goto st36 + case 187: + goto st37 + case 188: + goto st38 + case 189: + goto st39 + case 190: + goto st40 + case 191: + goto st41 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 166: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 170: + switch { + case data[p] < 196: + if 172 <= data[p] && data[p] <= 178 { + goto st23 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto st19 + } + goto tr4562 +tr4797: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4901 + st4901: + if p++; p == pe { + goto _test_eof4901 + } + st_case_4901: +//line segment_words_prod.go:85180 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 128: + goto st43 + case 129: + goto st44 + case 130: + goto st45 + case 141: + goto st46 + case 156: + goto st47 + case 157: + goto st48 + case 158: + goto st49 + case 159: + goto st50 + case 160: + goto st51 + case 162: + goto st52 + case 164: + goto st53 + case 168: + goto st54 + case 169: + goto st55 + case 170: + goto st56 + case 172: + goto st57 + case 173: + goto st58 + case 174: + goto st59 + case 175: + goto st60 + case 176: + goto st61 + case 179: + goto st62 + case 183: + goto st63 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr4798: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4902 + st4902: + if p++; p == pe { + goto _test_eof4902 + } + st_case_4902: +//line segment_words_prod.go:85332 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 128: + goto st65 + case 129: + goto st66 + case 131: + goto st67 + case 179: + goto st68 + case 181: + goto st69 + case 183: + goto st70 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr4799: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4903 + st4903: + if p++; p == pe { + goto _test_eof4903 + } + st_case_4903: +//line segment_words_prod.go:85454 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 128: + goto st72 + case 130: + goto st73 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr4800: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4904 + st4904: + if p++; p == pe { + goto _test_eof4904 + } + st_case_4904: +//line segment_words_prod.go:85568 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 153: + goto st75 + case 154: + goto st76 + case 155: + goto st77 + case 160: + goto st78 + case 162: + goto st79 + case 163: + goto st80 + case 164: + goto st81 + case 165: + goto st82 + case 166: + goto st83 + case 167: + goto st84 + case 168: + goto st85 + case 169: + goto st86 + case 170: + goto st87 + case 171: + goto st88 + case 175: + goto st89 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr4801: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4905 + st4905: + if p++; p == pe { + goto _test_eof4905 + } + st_case_4905: +//line segment_words_prod.go:85708 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 172: + goto st91 + case 184: + goto st92 + case 187: + goto st69 + case 190: + goto st76 + case 191: + goto st93 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr4802: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4906 + st4906: + if p++; p == pe { + goto _test_eof4906 + } + st_case_4906: +//line segment_words_prod.go:85828 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 144: + goto st95 + case 145: + goto st101 + case 150: + goto st120 + case 155: + goto st125 + case 157: + goto st127 + case 158: + goto st134 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr4803: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4907 + st4907: + if p++; p == pe { + goto _test_eof4907 + } + st_case_4907: +//line segment_words_prod.go:85950 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 160: + goto st137 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr4809: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4908 + st4908: + if p++; p == pe { + goto _test_eof4908 + } + st_case_4908: +//line segment_words_prod.go:86062 + switch data[p] { + case 151: + goto st142 + case 173: + goto tr4499 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 146: + if 130 <= data[p] && data[p] <= 133 { + goto tr4499 + } + case data[p] > 159: + switch { + case data[p] > 171: + if 175 <= data[p] { + goto tr4499 + } + case data[p] >= 165: + goto tr4499 + } + default: + goto tr4499 + } + goto tr148 +tr4810: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4909 + st4909: + if p++; p == pe { + goto _test_eof4909 + } + st_case_4909: +//line segment_words_prod.go:86145 + switch data[p] { + case 181: + goto tr4499 + case 190: + goto tr4499 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] > 185: + if 192 <= data[p] { + goto tr4499 + } + case data[p] >= 184: + goto tr4499 + } + goto tr148 +tr4811: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4910 + st4910: + if p++; p == pe { + goto _test_eof4910 + } + st_case_4910: +//line segment_words_prod.go:86219 + switch data[p] { + case 135: + goto st142 + case 140: + goto tr148 + case 194: + goto tr4783 + case 204: + goto tr4784 + case 205: + goto tr4785 + case 210: + goto tr4786 + case 214: + goto tr4787 + case 215: + goto tr4788 + case 216: + goto tr4789 + case 217: + goto tr4790 + case 219: + goto tr4791 + case 220: + goto tr4792 + case 221: + goto tr4793 + case 222: + goto tr4794 + case 223: + goto tr4795 + case 224: + goto tr4796 + case 225: + goto tr4797 + case 226: + goto tr4798 + case 227: + goto tr4799 + case 234: + goto tr4800 + case 239: + goto tr4801 + case 240: + goto tr4802 + case 243: + goto tr4803 + } + switch { + case data[p] < 142: + if 134 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 161: + if 163 <= data[p] { + goto tr148 + } + default: + goto tr148 + } + goto tr4499 +tr4812: +//line segment_words.rl:72 + + endPos = p + + goto st4911 +tr4473: +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + + goto st4911 + st4911: + if p++; p == pe { + goto _test_eof4911 + } + st_case_4911: +//line segment_words_prod.go:86302 + switch data[p] { + case 182: + goto tr4499 + case 194: + goto tr4783 + case 204: + goto tr4784 + case 205: + goto tr4785 + case 210: + goto tr4786 + case 214: + goto tr4787 + case 215: + goto tr4788 + case 216: + goto tr4789 + case 217: + goto tr4790 + case 219: + goto tr4791 + case 220: + goto tr4792 + case 221: + goto tr4793 + case 222: + goto tr4794 + case 223: + goto tr4795 + case 224: + goto tr4796 + case 225: + goto tr4797 + case 226: + goto tr4798 + case 227: + goto tr4799 + case 234: + goto tr4800 + case 239: + goto tr4801 + case 240: + goto tr4802 + case 243: + goto tr4803 + } + goto tr148 +tr4813: +//line segment_words.rl:72 + + endPos = p + + goto st4912 + st4912: + if p++; p == pe { + goto _test_eof4912 + } + st_case_4912: +//line segment_words_prod.go:86361 + switch data[p] { + case 130: + goto tr4499 + case 194: + goto tr4783 + case 204: + goto tr4784 + case 205: + goto tr4785 + case 210: + goto tr4786 + case 214: + goto tr4787 + case 215: + goto tr4788 + case 216: + goto tr4789 + case 217: + goto tr4790 + case 219: + goto tr4791 + case 220: + goto tr4792 + case 221: + goto tr4793 + case 222: + goto tr4794 + case 223: + goto tr4795 + case 224: + goto tr4796 + case 225: + goto tr4797 + case 226: + goto tr4798 + case 227: + goto tr4799 + case 234: + goto tr4800 + case 239: + goto tr4801 + case 240: + goto tr4802 + case 243: + goto tr4803 + } + goto tr148 +tr4814: +//line segment_words.rl:72 + + endPos = p + + goto st4913 +tr4475: +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + + goto st4913 + st4913: + if p++; p == pe { + goto _test_eof4913 + } + st_case_4913: +//line segment_words_prod.go:86430 + switch data[p] { + case 176: + goto tr4499 + case 194: + goto tr4783 + case 204: + goto tr4784 + case 205: + goto tr4785 + case 210: + goto tr4786 + case 214: + goto tr4787 + case 215: + goto tr4788 + case 216: + goto tr4789 + case 217: + goto tr4790 + case 219: + goto tr4791 + case 220: + goto tr4792 + case 221: + goto tr4793 + case 222: + goto tr4794 + case 223: + goto tr4795 + case 224: + goto tr4796 + case 225: + goto tr4797 + case 226: + goto tr4798 + case 227: + goto tr4799 + case 234: + goto tr4800 + case 239: + goto tr4801 + case 240: + goto tr4802 + case 243: + goto tr4803 + } + goto tr148 +tr4815: +//line segment_words.rl:72 + + endPos = p + + goto st4914 +tr4476: +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + + goto st4914 + st4914: + if p++; p == pe { + goto _test_eof4914 + } + st_case_4914: +//line segment_words_prod.go:86499 + switch data[p] { + case 194: + goto tr4783 + case 204: + goto tr4784 + case 205: + goto tr4785 + case 210: + goto tr4786 + case 214: + goto tr4787 + case 215: + goto tr4788 + case 216: + goto tr4789 + case 217: + goto tr4790 + case 219: + goto tr4791 + case 220: + goto tr4792 + case 221: + goto tr4793 + case 222: + goto tr4794 + case 223: + goto tr4795 + case 224: + goto tr4796 + case 225: + goto tr4797 + case 226: + goto tr4798 + case 227: + goto tr4799 + case 234: + goto tr4800 + case 239: + goto tr4801 + case 240: + goto tr4802 + case 243: + goto tr4803 + } + switch { + case data[p] > 152: + if 154 <= data[p] && data[p] <= 160 { + goto tr4499 + } + case data[p] >= 151: + goto tr4499 + } + goto tr148 +tr4816: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4915 + st4915: + if p++; p == pe { + goto _test_eof4915 + } + st_case_4915: +//line segment_words_prod.go:86569 + switch data[p] { + case 190: + goto tr4499 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] > 144: + if 192 <= data[p] { + goto tr4499 + } + case data[p] >= 136: + goto tr4499 + } + goto tr148 +tr4817: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4916 + st4916: + if p++; p == pe { + goto _test_eof4916 + } + st_case_4916: +//line segment_words_prod.go:86641 + switch data[p] { + case 135: + goto tr148 + case 179: + goto tr148 + case 180: + goto st142 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr148 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + default: + goto tr148 + } + goto tr4499 +tr4818: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4917 + st4917: + if p++; p == pe { + goto _test_eof4917 + } + st_case_4917: +//line segment_words_prod.go:86726 + switch data[p] { + case 156: + goto tr148 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 133 { + goto tr148 + } + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr148 + } + goto tr4499 +tr4819: +//line segment_words.rl:72 + + endPos = p + + goto st4918 + st4918: + if p++; p == pe { + goto _test_eof4918 + } + st_case_4918: +//line segment_words_prod.go:86797 + switch data[p] { + case 171: + goto tr421 + case 194: + goto tr4783 + case 204: + goto tr4784 + case 205: + goto tr4785 + case 210: + goto tr4786 + case 214: + goto tr4787 + case 215: + goto tr4788 + case 216: + goto tr4789 + case 217: + goto tr4790 + case 219: + goto tr4791 + case 220: + goto tr4792 + case 221: + goto tr4793 + case 222: + goto tr4794 + case 223: + goto tr4795 + case 224: + goto tr4796 + case 225: + goto tr4797 + case 226: + goto tr4798 + case 227: + goto tr4799 + case 234: + goto tr4800 + case 239: + goto tr4801 + case 240: + goto tr4802 + case 243: + goto tr4803 + } + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + goto tr4499 +tr4820: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4919 + st4919: + if p++; p == pe { + goto _test_eof4919 + } + st_case_4919: +//line segment_words_prod.go:86873 + switch data[p] { + case 148: + goto tr4499 + case 158: + goto tr4499 + case 169: + goto tr4499 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 189: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] > 190: + if 192 <= data[p] { + goto tr4499 + } + default: + goto tr4499 + } + goto tr148 +tr4821: +//line segment_words.rl:72 + + endPos = p + + goto st4920 + st4920: + if p++; p == pe { + goto _test_eof4920 + } + st_case_4920: +//line segment_words_prod.go:86948 + switch data[p] { + case 194: + goto tr4783 + case 204: + goto tr4784 + case 205: + goto tr4785 + case 210: + goto tr4786 + case 214: + goto tr4787 + case 215: + goto tr4788 + case 216: + goto tr4789 + case 217: + goto tr4790 + case 219: + goto tr4791 + case 220: + goto tr4792 + case 221: + goto tr4793 + case 222: + goto tr4794 + case 223: + goto tr4795 + case 224: + goto tr4796 + case 225: + goto tr4797 + case 226: + goto tr4798 + case 227: + goto tr4799 + case 234: + goto tr4800 + case 239: + goto tr4801 + case 240: + goto tr4802 + case 243: + goto tr4803 + } + if 143 <= data[p] { + goto tr148 + } + goto tr4499 +tr4822: +//line segment_words.rl:72 + + endPos = p + + goto st4921 + st4921: + if p++; p == pe { + goto _test_eof4921 + } + st_case_4921: +//line segment_words_prod.go:87008 + switch data[p] { + case 194: + goto tr4783 + case 204: + goto tr4784 + case 205: + goto tr4785 + case 210: + goto tr4786 + case 214: + goto tr4787 + case 215: + goto tr4788 + case 216: + goto tr4789 + case 217: + goto tr4790 + case 219: + goto tr4791 + case 220: + goto tr4792 + case 221: + goto tr4793 + case 222: + goto tr4794 + case 223: + goto tr4795 + case 224: + goto tr4796 + case 225: + goto tr4797 + case 226: + goto tr4798 + case 227: + goto tr4799 + case 234: + goto tr4800 + case 239: + goto tr4801 + case 240: + goto tr4802 + case 243: + goto tr4803 + } + if 139 <= data[p] && data[p] <= 140 { + goto tr4499 + } + goto tr148 +tr4823: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4922 + st4922: + if p++; p == pe { + goto _test_eof4922 + } + st_case_4922: +//line segment_words_prod.go:87073 + switch data[p] { + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 178 <= data[p] { + goto tr4499 + } + goto tr148 +tr4824: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4923 + st4923: + if p++; p == pe { + goto _test_eof4923 + } + st_case_4923: +//line segment_words_prod.go:87138 + switch data[p] { + case 186: + goto tr148 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] > 137: + if 138 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 128: + goto tr421 + } + goto tr4499 +tr4825: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4924 + st4924: + if p++; p == pe { + goto _test_eof4924 + } + st_case_4924: +//line segment_words_prod.go:87210 + switch data[p] { + case 160: + goto st1473 + case 161: + goto st1474 + case 162: + goto st168 + case 163: + goto st1475 + case 164: + goto st145 + case 165: + goto st1476 + case 166: + goto st1477 + case 167: + goto st1478 + case 168: + goto st1479 + case 169: + goto st1480 + case 170: + goto st1481 + case 171: + goto st1482 + case 172: + goto st1483 + case 173: + goto st1484 + case 174: + goto st1485 + case 175: + goto st1486 + case 176: + goto st1487 + case 177: + goto st1488 + case 178: + goto st1489 + case 179: + goto st1490 + case 180: + goto st1491 + case 181: + goto st1492 + case 182: + goto st1493 + case 183: + goto st1494 + case 184: + goto st1495 + case 185: + goto st1496 + case 186: + goto st1497 + case 187: + goto st1498 + case 188: + goto st1499 + case 189: + goto st1500 + case 190: + goto st1501 + case 191: + goto st1502 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr4826: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4925 + st4925: + if p++; p == pe { + goto _test_eof4925 + } + st_case_4925: +//line segment_words_prod.go:87336 + switch data[p] { + case 128: + goto st1504 + case 129: + goto st1505 + case 130: + goto st1506 + case 131: + goto st202 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st1507 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st1508 + case 157: + goto st1509 + case 158: + goto st1510 + case 159: + goto st1511 + case 160: + goto st1512 + case 161: + goto st219 + case 162: + goto st1513 + case 163: + goto st221 + case 164: + goto st1514 + case 165: + goto st468 + case 167: + goto st469 + case 168: + goto st1474 + case 169: + goto st1515 + case 170: + goto st1516 + case 172: + goto st147 + case 173: + goto st1517 + case 174: + goto st1518 + case 175: + goto st1519 + case 176: + goto st1520 + case 177: + goto st640 + case 179: + goto st1521 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st1522 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 180: + if 132 <= data[p] && data[p] <= 152 { + goto st145 + } + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + default: + goto st147 + } + goto tr4499 +tr4827: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4926 + st4926: + if p++; p == pe { + goto _test_eof4926 + } + st_case_4926: +//line segment_words_prod.go:87496 + switch data[p] { + case 128: + goto st1524 + case 129: + goto st1525 + case 130: + goto st241 + case 131: + goto st1526 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st1527 + case 180: + goto st251 + case 181: + goto st1528 + case 182: + goto st253 + case 183: + goto st1529 + case 184: + goto st255 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr4828: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4927 + st4927: + if p++; p == pe { + goto _test_eof4927 + } + st_case_4927: +//line segment_words_prod.go:87594 + switch data[p] { + case 128: + goto st1531 + case 130: + goto st1532 + case 132: + goto st259 + case 133: + goto st145 + case 134: + goto st260 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr4829: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4928 + st4928: + if p++; p == pe { + goto _test_eof4928 + } + st_case_4928: +//line segment_words_prod.go:87666 + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st654 + case 153: + goto st1534 + case 154: + goto st147 + case 155: + goto st293 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st1535 + case 161: + goto st272 + case 162: + goto st147 + case 163: + goto st1536 + case 164: + goto st1537 + case 165: + goto st1538 + case 166: + goto st147 + case 167: + goto st1539 + case 168: + goto st1540 + case 169: + goto st1541 + case 170: + goto st1542 + case 171: + goto st1543 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st1544 + case 176: + goto st147 + case 194: + goto st3218 + case 204: + goto st3219 + case 205: + goto st3220 + case 210: + goto st3221 + case 214: + goto st3222 + case 215: + goto st3223 + case 216: + goto st3224 + case 217: + goto st3225 + case 219: + goto st3226 + case 220: + goto st3227 + case 221: + goto st3228 + case 222: + goto st3229 + case 223: + goto st3230 + case 224: + goto st3231 + case 225: + goto st3232 + case 226: + goto st3233 + case 227: + goto st3234 + case 234: + goto st3235 + case 239: + goto st3236 + case 240: + goto st3237 + case 243: + goto st3238 + } + if 129 <= data[p] { + goto st145 + } + goto tr4499 + st3218: + if p++; p == pe { + goto _test_eof3218 + } + st_case_3218: + if data[p] == 173 { + goto tr2008 + } + goto tr148 + st3219: + if p++; p == pe { + goto _test_eof3219 + } + st_case_3219: + if 128 <= data[p] { + goto tr2008 + } + goto tr148 + st3220: + if p++; p == pe { + goto _test_eof3220 + } + st_case_3220: + if 176 <= data[p] { + goto tr148 + } + goto tr2008 + st3221: + if p++; p == pe { + goto _test_eof3221 + } + st_case_3221: + if 131 <= data[p] && data[p] <= 137 { + goto tr2008 + } + goto tr148 + st3222: + if p++; p == pe { + goto _test_eof3222 + } + st_case_3222: + if data[p] == 191 { + goto tr2008 + } + if 145 <= data[p] && data[p] <= 189 { + goto tr2008 + } + goto tr148 + st3223: + if p++; p == pe { + goto _test_eof3223 + } + st_case_3223: + if data[p] == 135 { + goto tr2008 + } + switch { + case data[p] > 130: + if 132 <= data[p] && data[p] <= 133 { + goto tr2008 + } + case data[p] >= 129: + goto tr2008 + } + goto tr148 + st3224: + if p++; p == pe { + goto _test_eof3224 + } + st_case_3224: + if data[p] == 156 { + goto tr2008 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto tr2008 + } + case data[p] >= 128: + goto tr2008 + } + goto tr148 + st3225: + if p++; p == pe { + goto _test_eof3225 + } + st_case_3225: + if data[p] == 176 { + goto tr2008 + } + if 139 <= data[p] && data[p] <= 159 { + goto tr2008 + } + goto tr148 + st3226: + if p++; p == pe { + goto _test_eof3226 + } + st_case_3226: + switch { + case data[p] < 159: + if 150 <= data[p] && data[p] <= 157 { + goto tr2008 + } + case data[p] > 164: + switch { + case data[p] > 168: + if 170 <= data[p] && data[p] <= 173 { + goto tr2008 + } + case data[p] >= 167: + goto tr2008 + } + default: + goto tr2008 + } + goto tr148 + st3227: + if p++; p == pe { + goto _test_eof3227 + } + st_case_3227: + switch data[p] { + case 143: + goto tr2008 + case 145: + goto tr2008 + } + if 176 <= data[p] { + goto tr2008 + } + goto tr148 + st3228: + if p++; p == pe { + goto _test_eof3228 + } + st_case_3228: + if 139 <= data[p] { + goto tr148 + } + goto tr2008 + st3229: + if p++; p == pe { + goto _test_eof3229 + } + st_case_3229: + if 166 <= data[p] && data[p] <= 176 { + goto tr2008 + } + goto tr148 + st3230: + if p++; p == pe { + goto _test_eof3230 + } + st_case_3230: + if 171 <= data[p] && data[p] <= 179 { + goto tr2008 + } + goto tr148 + st3231: + if p++; p == pe { + goto _test_eof3231 + } + st_case_3231: + switch data[p] { + case 160: + goto tr2902 + case 161: + goto tr2903 + case 163: + goto tr2904 + case 164: + goto tr2905 + case 165: + goto tr2906 + case 167: + goto tr2908 + case 169: + goto tr2909 + case 171: + goto tr2910 + case 173: + goto tr2912 + case 174: + goto tr2913 + case 175: + goto tr2914 + case 176: + goto tr2915 + case 177: + goto tr2916 + case 179: + goto tr2917 + case 180: + goto tr2918 + case 181: + goto tr2919 + case 182: + goto tr2920 + case 183: + goto tr2921 + case 184: + goto tr2922 + case 185: + goto tr2923 + case 186: + goto tr2924 + case 187: + goto tr2925 + case 188: + goto tr2926 + case 189: + goto tr2927 + case 190: + goto tr2928 + case 191: + goto tr2929 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto tr2911 + } + case data[p] >= 166: + goto tr2907 + } + goto tr148 +tr2902: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4929 + st4929: + if p++; p == pe { + goto _test_eof4929 + } + st_case_4929: +//line segment_words_prod.go:88014 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 155: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 150 <= data[p] && data[p] <= 153 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 163: + switch { + case data[p] < 169: + if 165 <= data[p] && data[p] <= 167 { + goto tr1 + } + case data[p] > 173: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr2903: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4930 + st4930: + if p++; p == pe { + goto _test_eof4930 + } + st_case_4930: +//line segment_words_prod.go:88142 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 153 <= data[p] && data[p] <= 155 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2904: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4931 + st4931: + if p++; p == pe { + goto _test_eof4931 + } + st_case_4931: +//line segment_words_prod.go:88256 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto tr4804 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr4808 + case 205: + goto tr4810 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr4813 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr4816 + case 215: + goto tr4817 + case 216: + goto tr4818 + case 217: + goto tr4819 + case 219: + goto tr4820 + case 220: + goto tr4821 + case 221: + goto tr4822 + case 222: + goto tr4823 + case 223: + goto tr4824 + case 224: + goto tr4825 + case 225: + goto tr4826 + case 226: + goto tr4827 + case 227: + goto tr4828 + case 234: + goto tr4829 + case 237: + goto tr4831 + case 239: + goto tr4832 + case 240: + goto tr4833 + case 243: + goto tr4834 + } + switch { + case data[p] < 163: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto tr4806 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr1 + } + case data[p] >= 235: + goto tr4830 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr4830: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4932 + st4932: + if p++; p == pe { + goto _test_eof4932 + } + st_case_4932: +//line segment_words_prod.go:88379 + switch data[p] { + case 194: + goto st3218 + case 204: + goto st3219 + case 205: + goto st3220 + case 210: + goto st3221 + case 214: + goto st3222 + case 215: + goto st3223 + case 216: + goto st3224 + case 217: + goto st3225 + case 219: + goto st3226 + case 220: + goto st3227 + case 221: + goto st3228 + case 222: + goto st3229 + case 223: + goto st3230 + case 224: + goto st3231 + case 225: + goto st3232 + case 226: + goto st3233 + case 227: + goto st3234 + case 234: + goto st3235 + case 239: + goto st3236 + case 240: + goto st3237 + case 243: + goto st3238 + } + goto st145 + st3232: + if p++; p == pe { + goto _test_eof3232 + } + st_case_3232: + switch data[p] { + case 128: + goto tr2930 + case 129: + goto tr2931 + case 130: + goto tr2932 + case 141: + goto tr2933 + case 156: + goto tr2934 + case 157: + goto tr2935 + case 158: + goto tr2936 + case 159: + goto tr2937 + case 160: + goto tr2938 + case 162: + goto tr2939 + case 164: + goto tr2940 + case 168: + goto tr2941 + case 169: + goto tr2942 + case 170: + goto tr2943 + case 172: + goto tr2944 + case 173: + goto tr2945 + case 174: + goto tr2946 + case 175: + goto tr2947 + case 176: + goto tr2948 + case 179: + goto tr2949 + case 183: + goto tr2950 + } + goto tr148 +tr2930: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4933 + st4933: + if p++; p == pe { + goto _test_eof4933 + } + st_case_4933: +//line segment_words_prod.go:88491 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 171 <= data[p] && data[p] <= 190 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2931: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4934 + st4934: + if p++; p == pe { + goto _test_eof4934 + } + st_case_4934: +//line segment_words_prod.go:88605 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 158: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 150 <= data[p] && data[p] <= 153 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 160: + switch { + case data[p] < 177: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr1 + } + case data[p] >= 162: + goto tr1 + } + case data[p] > 180: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr2932: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4935 + st4935: + if p++; p == pe { + goto _test_eof4935 + } + st_case_4935: +//line segment_words_prod.go:88738 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 143: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 130: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 141: + switch { + case data[p] < 196: + if 154 <= data[p] && data[p] <= 157 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr4562 +tr2933: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4936 + st4936: + if p++; p == pe { + goto _test_eof4936 + } + st_case_4936: +//line segment_words_prod.go:88858 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 157 <= data[p] && data[p] <= 159 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2934: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4937 + st4937: + if p++; p == pe { + goto _test_eof4937 + } + st_case_4937: +//line segment_words_prod.go:88972 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 146: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] < 196: + if 178 <= data[p] && data[p] <= 180 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr4562 +tr2935: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4938 + st4938: + if p++; p == pe { + goto _test_eof4938 + } + st_case_4938: +//line segment_words_prod.go:89090 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 146: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 147: + switch { + case data[p] < 196: + if 178 <= data[p] && data[p] <= 179 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr4562 +tr2936: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4939 + st4939: + if p++; p == pe { + goto _test_eof4939 + } + st_case_4939: +//line segment_words_prod.go:89208 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto tr4804 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr4808 + case 205: + goto tr4810 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr4813 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr4816 + case 215: + goto tr4817 + case 216: + goto tr4818 + case 217: + goto tr4819 + case 219: + goto tr4820 + case 220: + goto tr4821 + case 221: + goto tr4822 + case 222: + goto tr4823 + case 223: + goto tr4824 + case 224: + goto tr4825 + case 225: + goto tr4826 + case 226: + goto tr4827 + case 227: + goto tr4828 + case 234: + goto tr4829 + case 237: + goto tr4831 + case 239: + goto tr4832 + case 240: + goto tr4833 + case 243: + goto tr4834 + } + switch { + case data[p] < 180: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto tr4806 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr1 + } + case data[p] >= 235: + goto tr4830 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr4831: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4940 + st4940: + if p++; p == pe { + goto _test_eof4940 + } + st_case_4940: +//line segment_words_prod.go:89331 + switch data[p] { + case 158: + goto st288 + case 159: + goto st289 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 160 <= data[p] { + goto tr4499 + } + goto st145 +tr4832: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4941 + st4941: + if p++; p == pe { + goto _test_eof4941 + } + st_case_4941: +//line segment_words_prod.go:89400 + switch data[p] { + case 172: + goto st1546 + case 173: + goto st672 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st1547 + case 185: + goto st967 + case 187: + goto st1548 + case 188: + goto st969 + case 189: + goto st303 + case 190: + goto st1549 + case 191: + goto st1550 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr4499 +tr4833: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4942 + st4942: + if p++; p == pe { + goto _test_eof4942 + } + st_case_4942: +//line segment_words_prod.go:89495 + switch data[p] { + case 144: + goto st1552 + case 145: + goto st1558 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st1573 + case 155: + goto st1577 + case 157: + goto st1579 + case 158: + goto st1586 + case 159: + goto st403 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr4834: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st4943 + st4943: + if p++; p == pe { + goto _test_eof4943 + } + st_case_4943: +//line segment_words_prod.go:89577 + switch data[p] { + case 160: + goto st1589 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr2937: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4944 + st4944: + if p++; p == pe { + goto _test_eof4944 + } + st_case_4944: +//line segment_words_prod.go:89641 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 158: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr2646 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 148 <= data[p] && data[p] <= 156 { + goto tr4562 + } + case data[p] >= 97: + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto st145 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr4562 + } + case data[p] >= 235: + goto st286 + } + default: + goto tr4562 + } + default: + goto tr4562 + } + goto tr1 +tr2938: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4945 + st4945: + if p++; p == pe { + goto _test_eof4945 + } + st_case_4945: +//line segment_words_prod.go:89769 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 139 <= data[p] && data[p] <= 142 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2939: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4946 + st4946: + if p++; p == pe { + goto _test_eof4946 + } + st_case_4946: +//line segment_words_prod.go:89883 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 169: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2940: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4947 + st4947: + if p++; p == pe { + goto _test_eof4947 + } + st_case_4947: +//line segment_words_prod.go:89995 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 160: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 171: + switch { + case data[p] < 196: + if 176 <= data[p] && data[p] <= 187 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr4562 +tr2941: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4948 + st4948: + if p++; p == pe { + goto _test_eof4948 + } + st_case_4948: +//line segment_words_prod.go:90113 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 151 <= data[p] && data[p] <= 155 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2942: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4949 + st4949: + if p++; p == pe { + goto _test_eof4949 + } + st_case_4949: +//line segment_words_prod.go:90227 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 191: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 149: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 158: + switch { + case data[p] < 196: + if 160 <= data[p] && data[p] <= 188 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr4562 +tr2943: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4950 + st4950: + if p++; p == pe { + goto _test_eof4950 + } + st_case_4950: +//line segment_words_prod.go:90347 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 176 <= data[p] && data[p] <= 190 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2944: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4951 + st4951: + if p++; p == pe { + goto _test_eof4951 + } + st_case_4951: +//line segment_words_prod.go:90461 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto tr4804 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr4808 + case 205: + goto tr4810 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr4813 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr4816 + case 215: + goto tr4817 + case 216: + goto tr4818 + case 217: + goto tr4819 + case 219: + goto tr4820 + case 220: + goto tr4821 + case 221: + goto tr4822 + case 222: + goto tr4823 + case 223: + goto tr4824 + case 224: + goto tr4825 + case 225: + goto tr4826 + case 226: + goto tr4827 + case 227: + goto tr4828 + case 234: + goto tr4829 + case 237: + goto tr4831 + case 239: + goto tr4832 + case 240: + goto tr4833 + case 243: + goto tr4834 + } + switch { + case data[p] < 180: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 128 <= data[p] && data[p] <= 132 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto tr4806 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr1 + } + case data[p] >= 235: + goto tr4830 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr2945: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4952 + st4952: + if p++; p == pe { + goto _test_eof4952 + } + st_case_4952: +//line segment_words_prod.go:90589 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 180: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr2646 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 133 <= data[p] && data[p] <= 170 { + goto tr4562 + } + case data[p] >= 97: + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto st145 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr4562 + } + case data[p] >= 235: + goto st286 + } + default: + goto tr4562 + } + default: + goto tr4562 + } + goto tr1 +tr2946: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4953 + st4953: + if p++; p == pe { + goto _test_eof4953 + } + st_case_4953: +//line segment_words_prod.go:90717 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 128: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 130: + switch { + case data[p] < 196: + if 161 <= data[p] && data[p] <= 173 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr4562 +tr2947: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4954 + st4954: + if p++; p == pe { + goto _test_eof4954 + } + st_case_4954: +//line segment_words_prod.go:90835 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 166 <= data[p] && data[p] <= 179 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2948: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4955 + st4955: + if p++; p == pe { + goto _test_eof4955 + } + st_case_4955: +//line segment_words_prod.go:90949 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 164 <= data[p] && data[p] <= 183 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2949: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4956 + st4956: + if p++; p == pe { + goto _test_eof4956 + } + st_case_4956: +//line segment_words_prod.go:91063 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 173: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 148: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 144 <= data[p] && data[p] <= 146 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 184: + if 178 <= data[p] && data[p] <= 180 { + goto tr1 + } + case data[p] > 185: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr2950: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4957 + st4957: + if p++; p == pe { + goto _test_eof4957 + } + st_case_4957: +//line segment_words_prod.go:91193 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 128: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 181: + switch { + case data[p] < 196: + if 188 <= data[p] && data[p] <= 191 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr4562 + st3233: + if p++; p == pe { + goto _test_eof3233 + } + st_case_3233: + switch data[p] { + case 128: + goto tr2951 + case 129: + goto tr2952 + case 131: + goto tr2953 + case 179: + goto tr2954 + case 181: + goto tr2955 + case 183: + goto tr2956 + } + goto tr148 +tr2951: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4958 + st4958: + if p++; p == pe { + goto _test_eof4958 + } + st_case_4958: +//line segment_words_prod.go:91331 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 140: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] < 196: + if 170 <= data[p] && data[p] <= 174 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr4562 +tr2952: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4959 + st4959: + if p++; p == pe { + goto _test_eof4959 + } + st_case_4959: +//line segment_words_prod.go:91449 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 160: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 164: + switch { + case data[p] < 196: + if 166 <= data[p] && data[p] <= 175 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr4562 +tr2953: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4960 + st4960: + if p++; p == pe { + goto _test_eof4960 + } + st_case_4960: +//line segment_words_prod.go:91567 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 144 <= data[p] && data[p] <= 176 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2954: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4961 + st4961: + if p++; p == pe { + goto _test_eof4961 + } + st_case_4961: +//line segment_words_prod.go:91681 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 175 <= data[p] && data[p] <= 177 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2955: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4962 + st4962: + if p++; p == pe { + goto _test_eof4962 + } + st_case_4962: +//line segment_words_prod.go:91795 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 191: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2956: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4963 + st4963: + if p++; p == pe { + goto _test_eof4963 + } + st_case_4963: +//line segment_words_prod.go:91907 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 160 <= data[p] && data[p] <= 191 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 + st3234: + if p++; p == pe { + goto _test_eof3234 + } + st_case_3234: + switch data[p] { + case 128: + goto tr2957 + case 130: + goto tr2958 + } + goto tr148 +tr2957: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4964 + st4964: + if p++; p == pe { + goto _test_eof4964 + } + st_case_4964: +//line segment_words_prod.go:92033 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 170 <= data[p] && data[p] <= 175 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2958: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4965 + st4965: + if p++; p == pe { + goto _test_eof4965 + } + st_case_4965: +//line segment_words_prod.go:92147 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 153 <= data[p] && data[p] <= 154 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 + st3235: + if p++; p == pe { + goto _test_eof3235 + } + st_case_3235: + switch data[p] { + case 153: + goto tr2959 + case 154: + goto tr2960 + case 155: + goto tr2961 + case 160: + goto tr2962 + case 162: + goto tr2963 + case 163: + goto tr2964 + case 164: + goto tr2965 + case 165: + goto tr2966 + case 166: + goto tr2967 + case 167: + goto tr2968 + case 168: + goto tr2969 + case 169: + goto tr2970 + case 170: + goto tr2971 + case 171: + goto tr2972 + case 175: + goto tr2973 + } + goto tr148 +tr2959: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4966 + st4966: + if p++; p == pe { + goto _test_eof4966 + } + st_case_4966: +//line segment_words_prod.go:92299 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 175: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 178: + switch { + case data[p] < 196: + if 180 <= data[p] && data[p] <= 189 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr4562 +tr2960: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4967 + st4967: + if p++; p == pe { + goto _test_eof4967 + } + st_case_4967: +//line segment_words_prod.go:92417 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 158 <= data[p] && data[p] <= 159 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2961: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4968 + st4968: + if p++; p == pe { + goto _test_eof4968 + } + st_case_4968: +//line segment_words_prod.go:92531 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 176 <= data[p] && data[p] <= 177 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2962: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4969 + st4969: + if p++; p == pe { + goto _test_eof4969 + } + st_case_4969: +//line segment_words_prod.go:92645 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 130: + goto tr1 + case 134: + goto tr1 + case 139: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 163 <= data[p] && data[p] <= 167 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2963: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4970 + st4970: + if p++; p == pe { + goto _test_eof4970 + } + st_case_4970: +//line segment_words_prod.go:92765 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto tr4804 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr4808 + case 205: + goto tr4810 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr4813 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr4816 + case 215: + goto tr4817 + case 216: + goto tr4818 + case 217: + goto tr4819 + case 219: + goto tr4820 + case 220: + goto tr4821 + case 221: + goto tr4822 + case 222: + goto tr4823 + case 223: + goto tr4824 + case 224: + goto tr4825 + case 225: + goto tr4826 + case 226: + goto tr4827 + case 227: + goto tr4828 + case 234: + goto tr4829 + case 237: + goto tr4831 + case 239: + goto tr4832 + case 240: + goto tr4833 + case 243: + goto tr4834 + } + switch { + case data[p] < 180: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 128 <= data[p] && data[p] <= 129 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto tr4806 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr1 + } + case data[p] >= 235: + goto tr4830 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr2964: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4971 + st4971: + if p++; p == pe { + goto _test_eof4971 + } + st_case_4971: +//line segment_words_prod.go:92893 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 178: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr2646 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 133 <= data[p] && data[p] <= 159 { + goto tr4562 + } + case data[p] >= 97: + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto st145 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr4562 + } + case data[p] >= 235: + goto st286 + } + default: + goto tr4562 + } + default: + goto tr4562 + } + goto tr1 +tr2965: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4972 + st4972: + if p++; p == pe { + goto _test_eof4972 + } + st_case_4972: +//line segment_words_prod.go:93021 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 166 <= data[p] && data[p] <= 173 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2966: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4973 + st4973: + if p++; p == pe { + goto _test_eof4973 + } + st_case_4973: +//line segment_words_prod.go:93135 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 135 <= data[p] && data[p] <= 147 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2967: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4974 + st4974: + if p++; p == pe { + goto _test_eof4974 + } + st_case_4974: +//line segment_words_prod.go:93249 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto tr4804 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr4808 + case 205: + goto tr4810 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr4813 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr4816 + case 215: + goto tr4817 + case 216: + goto tr4818 + case 217: + goto tr4819 + case 219: + goto tr4820 + case 220: + goto tr4821 + case 221: + goto tr4822 + case 222: + goto tr4823 + case 223: + goto tr4824 + case 224: + goto tr4825 + case 225: + goto tr4826 + case 226: + goto tr4827 + case 227: + goto tr4828 + case 234: + goto tr4829 + case 237: + goto tr4831 + case 239: + goto tr4832 + case 240: + goto tr4833 + case 243: + goto tr4834 + } + switch { + case data[p] < 179: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 128 <= data[p] && data[p] <= 131 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto tr4806 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr1 + } + case data[p] >= 235: + goto tr4830 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr2968: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4975 + st4975: + if p++; p == pe { + goto _test_eof4975 + } + st_case_4975: +//line segment_words_prod.go:93377 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 165: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 91: + switch { + case data[p] < 48: + if data[p] <= 47 { + goto tr1 + } + case data[p] > 57: + switch { + case data[p] > 64: + if 65 <= data[p] && data[p] <= 90 { + goto tr2008 + } + case data[p] >= 59: + goto tr1 + } + default: + goto tr2646 + } + case data[p] > 96: + switch { + case data[p] < 123: + if 97 <= data[p] && data[p] <= 122 { + goto tr2008 + } + case data[p] > 128: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr2969: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4976 + st4976: + if p++; p == pe { + goto _test_eof4976 + } + st_case_4976: +//line segment_words_prod.go:93507 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 169 <= data[p] && data[p] <= 182 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2970: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4977 + st4977: + if p++; p == pe { + goto _test_eof4977 + } + st_case_4977: +//line segment_words_prod.go:93621 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 131: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 140: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 141: + switch { + case data[p] < 196: + if 187 <= data[p] && data[p] <= 189 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr4562 +tr2971: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4978 + st4978: + if p++; p == pe { + goto _test_eof4978 + } + st_case_4978: +//line segment_words_prod.go:93741 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 176: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 178: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 180: + switch { + case data[p] < 190: + if 183 <= data[p] && data[p] <= 184 { + goto tr1 + } + case data[p] > 191: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr2972: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4979 + st4979: + if p++; p == pe { + goto _test_eof4979 + } + st_case_4979: +//line segment_words_prod.go:93866 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 129: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 171: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 175: + switch { + case data[p] < 196: + if 181 <= data[p] && data[p] <= 182 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr4562 +tr2973: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4980 + st4980: + if p++; p == pe { + goto _test_eof4980 + } + st_case_4980: +//line segment_words_prod.go:93986 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 163: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 170: + switch { + case data[p] < 196: + if 172 <= data[p] && data[p] <= 173 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr4562 + st3236: + if p++; p == pe { + goto _test_eof3236 + } + st_case_3236: + switch data[p] { + case 172: + goto tr2974 + case 184: + goto tr2975 + case 187: + goto tr2955 + case 190: + goto tr2960 + case 191: + goto tr2976 + } + goto tr148 +tr2974: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4981 + st4981: + if p++; p == pe { + goto _test_eof4981 + } + st_case_4981: +//line segment_words_prod.go:94122 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 158: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2975: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4982 + st4982: + if p++; p == pe { + goto _test_eof4982 + } + st_case_4982: +//line segment_words_prod.go:94234 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 128: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] < 196: + if 160 <= data[p] && data[p] <= 175 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr4562 +tr2976: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4983 + st4983: + if p++; p == pe { + goto _test_eof4983 + } + st_case_4983: +//line segment_words_prod.go:94352 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 185 <= data[p] && data[p] <= 187 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 + st3237: + if p++; p == pe { + goto _test_eof3237 + } + st_case_3237: + switch data[p] { + case 144: + goto tr2977 + case 145: + goto tr2978 + case 150: + goto tr2979 + case 155: + goto tr2980 + case 157: + goto tr2981 + case 158: + goto tr2982 + } + goto tr148 +tr2977: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4984 + st4984: + if p++; p == pe { + goto _test_eof4984 + } + st_case_4984: +//line segment_words_prod.go:94486 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 135: + goto st96 + case 139: + goto st97 + case 141: + goto st98 + case 168: + goto st99 + case 171: + goto st100 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2978: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4985 + st4985: + if p++; p == pe { + goto _test_eof4985 + } + st_case_4985: +//line segment_words_prod.go:94606 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 128: + goto st102 + case 129: + goto st103 + case 130: + goto st104 + case 132: + goto st105 + case 133: + goto st106 + case 134: + goto st107 + case 135: + goto st108 + case 136: + goto st109 + case 139: + goto st110 + case 140: + goto st111 + case 141: + goto st112 + case 146: + goto st113 + case 147: + goto st114 + case 150: + goto st115 + case 151: + goto st116 + case 152: + goto st113 + case 153: + goto st117 + case 154: + goto st118 + case 156: + goto st119 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2979: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4986 + st4986: + if p++; p == pe { + goto _test_eof4986 + } + st_case_4986: +//line segment_words_prod.go:94754 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 171: + goto st121 + case 172: + goto st122 + case 189: + goto st123 + case 190: + goto st124 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2980: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4987 + st4987: + if p++; p == pe { + goto _test_eof4987 + } + st_case_4987: +//line segment_words_prod.go:94872 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 178: + goto st126 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2981: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4988 + st4988: + if p++; p == pe { + goto _test_eof4988 + } + st_case_4988: +//line segment_words_prod.go:94984 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 133: + goto st128 + case 134: + goto st129 + case 137: + goto st130 + case 168: + goto st131 + case 169: + goto st132 + case 170: + goto st133 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2982: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4989 + st4989: + if p++; p == pe { + goto _test_eof4989 + } + st_case_4989: +//line segment_words_prod.go:95106 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 163: + goto st135 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 + st3238: + if p++; p == pe { + goto _test_eof3238 + } + st_case_3238: + if data[p] == 160 { + goto tr2983 + } + goto tr148 +tr2983: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4990 + st4990: + if p++; p == pe { + goto _test_eof4990 + } + st_case_4990: +//line segment_words_prod.go:95227 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 128: + goto st138 + case 129: + goto st139 + case 132: + goto st1 + case 135: + goto st2 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 133 <= data[p] && data[p] <= 134 { + goto st140 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2905: +//line segment_words.rl:72 + + endPos = p + + goto st4991 + st4991: + if p++; p == pe { + goto _test_eof4991 + } + st_case_4991: +//line segment_words_prod.go:95344 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 189: + goto tr4562 + case 194: + goto tr4804 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr4808 + case 205: + goto tr4810 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr4813 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr4816 + case 215: + goto tr4817 + case 216: + goto tr4818 + case 217: + goto tr4819 + case 219: + goto tr4820 + case 220: + goto tr4821 + case 221: + goto tr4822 + case 222: + goto tr4823 + case 223: + goto tr4824 + case 224: + goto tr4825 + case 225: + goto tr4826 + case 226: + goto tr4827 + case 227: + goto tr4828 + case 234: + goto tr4829 + case 237: + goto tr4831 + case 239: + goto tr4832 + case 240: + goto tr4833 + case 243: + goto tr4834 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr2008 + } + case data[p] >= 48: + goto tr2646 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 132 <= data[p] && data[p] <= 185 { + goto tr4562 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto tr4830 + } + default: + goto tr4806 + } + default: + goto tr2008 + } + goto tr1 +tr2906: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4992 + st4992: + if p++; p == pe { + goto _test_eof4992 + } + st_case_4992: +//line segment_words_prod.go:95460 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 144: + goto tr4562 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 164: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr2646 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 152 <= data[p] && data[p] <= 161 { + goto tr4562 + } + case data[p] >= 97: + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto st145 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr4562 + } + case data[p] >= 235: + goto st286 + } + default: + goto tr4562 + } + default: + goto tr4562 + } + goto tr1 +tr2907: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4993 + st4993: + if p++; p == pe { + goto _test_eof4993 + } + st_case_4993: +//line segment_words_prod.go:95590 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 188: + goto tr1 + case 194: + goto tr4804 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr4808 + case 205: + goto tr4810 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr4813 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr4816 + case 215: + goto tr4817 + case 216: + goto tr4818 + case 217: + goto tr4819 + case 219: + goto tr4820 + case 220: + goto tr4821 + case 221: + goto tr4822 + case 222: + goto tr4823 + case 223: + goto tr4824 + case 224: + goto tr4825 + case 225: + goto tr4826 + case 226: + goto tr4827 + case 227: + goto tr4828 + case 234: + goto tr4829 + case 237: + goto tr4831 + case 239: + goto tr4832 + case 240: + goto tr4833 + case 243: + goto tr4834 + } + switch { + case data[p] < 190: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 129 <= data[p] && data[p] <= 131 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto tr4806 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr1 + } + case data[p] >= 235: + goto tr4830 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr2908: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4994 + st4994: + if p++; p == pe { + goto _test_eof4994 + } + st_case_4994: +//line segment_words_prod.go:95720 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 142: + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr2008 + } + case data[p] >= 48: + goto tr2646 + } + case data[p] > 122: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr4562 + } + case data[p] >= 133: + goto tr4562 + } + default: + goto tr2008 + } + case data[p] > 150: + switch { + case data[p] < 196: + switch { + case data[p] > 161: + if 164 <= data[p] && data[p] <= 193 { + goto tr4562 + } + case data[p] >= 152: + goto tr4562 + } + case data[p] > 218: + switch { + case data[p] < 235: + if 228 <= data[p] && data[p] <= 233 { + goto tr4562 + } + case data[p] > 236: + if 238 <= data[p] { + goto tr4562 + } + default: + goto st286 + } + default: + goto st145 + } + default: + goto tr4562 + } + goto tr1 +tr2909: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4995 + st4995: + if p++; p == pe { + goto _test_eof4995 + } + st_case_4995: +//line segment_words_prod.go:95862 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 145: + goto tr1 + case 181: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] < 59: + switch { + case data[p] > 47: + if 48 <= data[p] && data[p] <= 57 { + goto tr2646 + } + default: + goto tr1 + } + case data[p] > 64: + switch { + case data[p] > 90: + if 91 <= data[p] && data[p] <= 96 { + goto tr1 + } + case data[p] >= 65: + goto tr2008 + } + default: + goto tr1 + } + case data[p] > 122: + switch { + case data[p] < 139: + switch { + case data[p] > 130: + if 135 <= data[p] && data[p] <= 136 { + goto tr1 + } + case data[p] >= 123: + goto tr1 + } + case data[p] > 141: + switch { + case data[p] < 196: + if 176 <= data[p] && data[p] <= 177 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + default: + goto tr2008 + } + goto tr4562 +tr2910: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4996 + st4996: + if p++; p == pe { + goto _test_eof4996 + } + st_case_4996: +//line segment_words_prod.go:96008 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 134: + goto tr4562 + case 138: + goto tr4562 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 164: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr2646 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 142 <= data[p] && data[p] <= 161 { + goto tr4562 + } + case data[p] >= 97: + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto st145 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr4562 + } + case data[p] >= 235: + goto st286 + } + default: + goto tr4562 + } + default: + goto tr4562 + } + goto tr1 +tr2911: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4997 + st4997: + if p++; p == pe { + goto _test_eof4997 + } + st_case_4997: +//line segment_words_prod.go:96140 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 188: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 129: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 131: + switch { + case data[p] < 196: + if 190 <= data[p] && data[p] <= 191 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr4562 +tr2912: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4998 + st4998: + if p++; p == pe { + goto _test_eof4998 + } + st_case_4998: +//line segment_words_prod.go:96260 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 135: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 128 <= data[p] && data[p] <= 132 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 136: + switch { + case data[p] < 162: + switch { + case data[p] > 141: + if 150 <= data[p] && data[p] <= 151 { + goto tr1 + } + case data[p] >= 139: + goto tr1 + } + case data[p] > 163: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr2913: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st4999 + st4999: + if p++; p == pe { + goto _test_eof4999 + } + st_case_4999: +//line segment_words_prod.go:96393 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 130: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 190 <= data[p] && data[p] <= 191 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2914: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st5000 + st5000: + if p++; p == pe { + goto _test_eof5000 + } + st_case_5000: +//line segment_words_prod.go:96509 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 151: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 128: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 130: + switch { + case data[p] < 138: + if 134 <= data[p] && data[p] <= 136 { + goto tr1 + } + case data[p] > 141: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr2915: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st5001 + st5001: + if p++; p == pe { + goto _test_eof5001 + } + st_case_5001: +//line segment_words_prod.go:96634 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto tr4804 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr4808 + case 205: + goto tr4810 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr4813 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr4816 + case 215: + goto tr4817 + case 216: + goto tr4818 + case 217: + goto tr4819 + case 219: + goto tr4820 + case 220: + goto tr4821 + case 221: + goto tr4822 + case 222: + goto tr4823 + case 223: + goto tr4824 + case 224: + goto tr4825 + case 225: + goto tr4826 + case 226: + goto tr4827 + case 227: + goto tr4828 + case 234: + goto tr4829 + case 237: + goto tr4831 + case 239: + goto tr4832 + case 240: + goto tr4833 + case 243: + goto tr4834 + } + switch { + case data[p] < 190: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 128 <= data[p] && data[p] <= 131 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto tr4806 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr1 + } + case data[p] >= 235: + goto tr4830 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr2916: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st5002 + st5002: + if p++; p == pe { + goto _test_eof5002 + } + st_case_5002: +//line segment_words_prod.go:96762 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 133: + goto tr4562 + case 137: + goto tr4562 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 151: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr2646 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 142 <= data[p] && data[p] <= 148 { + goto tr4562 + } + case data[p] >= 97: + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 161: + switch { + case data[p] < 228: + switch { + case data[p] > 193: + if 196 <= data[p] && data[p] <= 218 { + goto st145 + } + case data[p] >= 164: + goto tr4562 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr4562 + } + case data[p] >= 235: + goto st286 + } + default: + goto tr4562 + } + default: + goto tr4562 + } + goto tr1 +tr2917: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st5003 + st5003: + if p++; p == pe { + goto _test_eof5003 + } + st_case_5003: +//line segment_words_prod.go:96899 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 134: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 128 <= data[p] && data[p] <= 132 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 136: + switch { + case data[p] < 162: + switch { + case data[p] > 141: + if 149 <= data[p] && data[p] <= 150 { + goto tr1 + } + case data[p] >= 138: + goto tr1 + } + case data[p] > 163: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr2918: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st5004 + st5004: + if p++; p == pe { + goto _test_eof5004 + } + st_case_5004: +//line segment_words_prod.go:97032 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto tr4804 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr4808 + case 205: + goto tr4810 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr4813 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr4816 + case 215: + goto tr4817 + case 216: + goto tr4818 + case 217: + goto tr4819 + case 219: + goto tr4820 + case 220: + goto tr4821 + case 221: + goto tr4822 + case 222: + goto tr4823 + case 223: + goto tr4824 + case 224: + goto tr4825 + case 225: + goto tr4826 + case 226: + goto tr4827 + case 227: + goto tr4828 + case 234: + goto tr4829 + case 237: + goto tr4831 + case 239: + goto tr4832 + case 240: + goto tr4833 + case 243: + goto tr4834 + } + switch { + case data[p] < 190: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 129 <= data[p] && data[p] <= 131 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto tr4806 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr1 + } + case data[p] >= 235: + goto tr4830 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr2919: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st5005 + st5005: + if p++; p == pe { + goto _test_eof5005 + } + st_case_5005: +//line segment_words_prod.go:97160 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 133: + goto tr4562 + case 137: + goto tr4562 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 152: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr2646 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 142 <= data[p] && data[p] <= 150 { + goto tr4562 + } + case data[p] >= 97: + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 161: + switch { + case data[p] < 228: + switch { + case data[p] > 193: + if 196 <= data[p] && data[p] <= 218 { + goto st145 + } + case data[p] >= 164: + goto tr4562 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr4562 + } + case data[p] >= 235: + goto st286 + } + default: + goto tr4562 + } + default: + goto tr4562 + } + goto tr1 +tr2920: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st5006 + st5006: + if p++; p == pe { + goto _test_eof5006 + } + st_case_5006: +//line segment_words_prod.go:97297 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 130 <= data[p] && data[p] <= 131 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2921: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st5007 + st5007: + if p++; p == pe { + goto _test_eof5007 + } + st_case_5007: +//line segment_words_prod.go:97411 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 138: + goto tr1 + case 150: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 143: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] < 178: + if 152 <= data[p] && data[p] <= 159 { + goto tr1 + } + case data[p] > 179: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr2922: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st5008 + st5008: + if p++; p == pe { + goto _test_eof5008 + } + st_case_5008: +//line segment_words_prod.go:97538 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 177: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 180 <= data[p] && data[p] <= 186 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2923: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st5009 + st5009: + if p++; p == pe { + goto _test_eof5009 + } + st_case_5009: +//line segment_words_prod.go:97654 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 135 <= data[p] && data[p] <= 142 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2924: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st5010 + st5010: + if p++; p == pe { + goto _test_eof5010 + } + st_case_5010: +//line segment_words_prod.go:97768 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 177: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 180: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 185: + switch { + case data[p] < 196: + if 187 <= data[p] && data[p] <= 188 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr4562 +tr2925: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st5011 + st5011: + if p++; p == pe { + goto _test_eof5011 + } + st_case_5011: +//line segment_words_prod.go:97888 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 136 <= data[p] && data[p] <= 141 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2926: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st5012 + st5012: + if p++; p == pe { + goto _test_eof5012 + } + st_case_5012: +//line segment_words_prod.go:98002 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 181: + goto tr1 + case 183: + goto tr1 + case 185: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 152: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 153: + switch { + case data[p] < 196: + if 190 <= data[p] && data[p] <= 191 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr4562 +tr2927: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st5013 + st5013: + if p++; p == pe { + goto _test_eof5013 + } + st_case_5013: +//line segment_words_prod.go:98126 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 177 <= data[p] && data[p] <= 191 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr2928: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st5014 + st5014: + if p++; p == pe { + goto _test_eof5014 + } + st_case_5014: +//line segment_words_prod.go:98240 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 134: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 128 <= data[p] && data[p] <= 132 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 135: + switch { + case data[p] < 153: + if 141 <= data[p] && data[p] <= 151 { + goto tr1 + } + case data[p] > 188: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr4562 +tr2929: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st5015 + st5015: + if p++; p == pe { + goto _test_eof5015 + } + st_case_5015: +//line segment_words_prod.go:98368 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 134: + goto tr1 + case 194: + goto st1461 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st147 + case 205: + goto st1462 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st1463 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st1464 + case 215: + goto st1465 + case 216: + goto st1466 + case 217: + goto st1467 + case 219: + goto st1468 + case 220: + goto st1469 + case 221: + goto st1470 + case 222: + goto st293 + case 223: + goto st1471 + case 224: + goto st1472 + case 225: + goto st1503 + case 226: + goto st1523 + case 227: + goto st1530 + case 234: + goto st1533 + case 237: + goto st287 + case 239: + goto st1545 + case 240: + goto st1551 + case 243: + goto st1588 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 +tr4469: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5016 + st5016: + if p++; p == pe { + goto _test_eof5016 + } + st_case_5016: +//line segment_words_prod.go:98484 + switch data[p] { + case 173: + goto tr4499 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 146: + if 130 <= data[p] && data[p] <= 133 { + goto tr4499 + } + case data[p] > 159: + switch { + case data[p] > 171: + if 175 <= data[p] { + goto tr4499 + } + case data[p] >= 165: + goto tr4499 + } + default: + goto tr4499 + } + goto tr148 +tr4938: +//line segment_words.rl:72 + + endPos = p + + goto st5017 +tr4470: +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + + goto st5017 + st5017: + if p++; p == pe { + goto _test_eof5017 + } + st_case_5017: +//line segment_words_prod.go:98570 + switch data[p] { + case 194: + goto tr4916 + case 204: + goto tr4917 + case 205: + goto tr4918 + case 210: + goto tr4919 + case 214: + goto tr4920 + case 215: + goto tr4921 + case 216: + goto tr4922 + case 217: + goto tr4923 + case 219: + goto tr4924 + case 220: + goto tr4925 + case 221: + goto tr4926 + case 222: + goto tr4927 + case 223: + goto tr4928 + case 224: + goto tr4929 + case 225: + goto tr4930 + case 226: + goto tr4931 + case 227: + goto tr4932 + case 234: + goto tr4933 + case 239: + goto tr4934 + case 240: + goto tr4935 + case 243: + goto tr4936 + } + if 128 <= data[p] { + goto tr2395 + } + goto tr4499 +tr4916: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st5018 + st5018: + if p++; p == pe { + goto _test_eof5018 + } + st_case_5018: +//line segment_words_prod.go:98635 + switch data[p] { + case 173: + goto tr1 + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + goto tr4763 +tr4917: +//line segment_words.rl:72 + + endPos = p + + goto st5019 + st5019: + if p++; p == pe { + goto _test_eof5019 + } + st_case_5019: +//line segment_words_prod.go:98694 + switch data[p] { + case 194: + goto tr4937 + case 204: + goto tr4938 + case 205: + goto tr4939 + case 210: + goto tr4940 + case 214: + goto tr4941 + case 215: + goto tr4942 + case 216: + goto tr4943 + case 217: + goto tr4944 + case 219: + goto tr4945 + case 220: + goto tr4946 + case 221: + goto tr4947 + case 222: + goto tr4948 + case 223: + goto tr4949 + case 224: + goto tr4950 + case 225: + goto tr4951 + case 226: + goto tr4952 + case 227: + goto tr4953 + case 234: + goto tr4954 + case 239: + goto tr4955 + case 240: + goto tr4956 + case 243: + goto tr4957 + } + if 128 <= data[p] { + goto tr1 + } + goto tr4763 +tr4937: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5020 + st5020: + if p++; p == pe { + goto _test_eof5020 + } + st_case_5020: +//line segment_words_prod.go:98759 + switch data[p] { + case 173: + goto tr2395 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr4939: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5021 + st5021: + if p++; p == pe { + goto _test_eof5021 + } + st_case_5021: +//line segment_words_prod.go:98823 + switch data[p] { + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 176 <= data[p] { + goto tr4499 + } + goto tr2395 +tr4940: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5022 + st5022: + if p++; p == pe { + goto _test_eof5022 + } + st_case_5022: +//line segment_words_prod.go:98888 + switch data[p] { + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr2395 + } + goto tr4499 +tr4941: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5023 + st5023: + if p++; p == pe { + goto _test_eof5023 + } + st_case_5023: +//line segment_words_prod.go:98953 + switch data[p] { + case 191: + goto tr2395 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 145 <= data[p] && data[p] <= 189 { + goto tr2395 + } + goto tr4499 +tr4942: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5024 + st5024: + if p++; p == pe { + goto _test_eof5024 + } + st_case_5024: +//line segment_words_prod.go:99020 + switch data[p] { + case 135: + goto tr2395 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] > 130: + if 132 <= data[p] && data[p] <= 133 { + goto tr2395 + } + case data[p] >= 129: + goto tr2395 + } + goto tr4499 +tr4943: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5025 + st5025: + if p++; p == pe { + goto _test_eof5025 + } + st_case_5025: +//line segment_words_prod.go:99092 + switch data[p] { + case 156: + goto tr2395 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto tr2395 + } + case data[p] >= 128: + goto tr2395 + } + goto tr4499 +tr4944: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5026 + st5026: + if p++; p == pe { + goto _test_eof5026 + } + st_case_5026: +//line segment_words_prod.go:99164 + switch data[p] { + case 176: + goto tr2395 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 139 <= data[p] && data[p] <= 159 { + goto tr2395 + } + goto tr4499 +tr4945: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5027 + st5027: + if p++; p == pe { + goto _test_eof5027 + } + st_case_5027: +//line segment_words_prod.go:99231 + switch data[p] { + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 159: + if 150 <= data[p] && data[p] <= 157 { + goto tr2395 + } + case data[p] > 164: + switch { + case data[p] > 168: + if 170 <= data[p] && data[p] <= 173 { + goto tr2395 + } + case data[p] >= 167: + goto tr2395 + } + default: + goto tr2395 + } + goto tr4499 +tr4946: +//line segment_words.rl:72 + + endPos = p + + goto st5028 + st5028: + if p++; p == pe { + goto _test_eof5028 + } + st_case_5028: +//line segment_words_prod.go:99305 + switch data[p] { + case 143: + goto tr2395 + case 145: + goto tr2395 + case 194: + goto tr4916 + case 204: + goto tr4917 + case 205: + goto tr4918 + case 210: + goto tr4919 + case 214: + goto tr4920 + case 215: + goto tr4921 + case 216: + goto tr4922 + case 217: + goto tr4923 + case 219: + goto tr4924 + case 220: + goto tr4925 + case 221: + goto tr4926 + case 222: + goto tr4927 + case 223: + goto tr4928 + case 224: + goto tr4929 + case 225: + goto tr4930 + case 226: + goto tr4931 + case 227: + goto tr4932 + case 234: + goto tr4933 + case 239: + goto tr4934 + case 240: + goto tr4935 + case 243: + goto tr4936 + } + if 176 <= data[p] { + goto tr2395 + } + goto tr4499 +tr4918: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st5029 + st5029: + if p++; p == pe { + goto _test_eof5029 + } + st_case_5029: +//line segment_words_prod.go:99374 + switch data[p] { + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + if 176 <= data[p] { + goto tr4763 + } + goto tr1 +tr4919: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st5030 + st5030: + if p++; p == pe { + goto _test_eof5030 + } + st_case_5030: +//line segment_words_prod.go:99439 + switch data[p] { + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr1 + } + goto tr4763 +tr4920: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st5031 + st5031: + if p++; p == pe { + goto _test_eof5031 + } + st_case_5031: +//line segment_words_prod.go:99504 + switch data[p] { + case 191: + goto tr1 + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + if 145 <= data[p] && data[p] <= 189 { + goto tr1 + } + goto tr4763 +tr4921: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st5032 + st5032: + if p++; p == pe { + goto _test_eof5032 + } + st_case_5032: +//line segment_words_prod.go:99571 + switch data[p] { + case 135: + goto tr1 + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + switch { + case data[p] > 130: + if 132 <= data[p] && data[p] <= 133 { + goto tr1 + } + case data[p] >= 129: + goto tr1 + } + goto tr4763 +tr4922: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st5033 + st5033: + if p++; p == pe { + goto _test_eof5033 + } + st_case_5033: +//line segment_words_prod.go:99643 + switch data[p] { + case 156: + goto tr1 + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr4763 +tr4923: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st5034 + st5034: + if p++; p == pe { + goto _test_eof5034 + } + st_case_5034: +//line segment_words_prod.go:99715 + switch data[p] { + case 176: + goto tr1 + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + if 139 <= data[p] && data[p] <= 159 { + goto tr1 + } + goto tr4763 +tr4924: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st5035 + st5035: + if p++; p == pe { + goto _test_eof5035 + } + st_case_5035: +//line segment_words_prod.go:99782 + switch data[p] { + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + switch { + case data[p] < 159: + if 150 <= data[p] && data[p] <= 157 { + goto tr1 + } + case data[p] > 164: + switch { + case data[p] > 168: + if 170 <= data[p] && data[p] <= 173 { + goto tr1 + } + case data[p] >= 167: + goto tr1 + } + default: + goto tr1 + } + goto tr4763 +tr4925: +//line segment_words.rl:72 + + endPos = p + + goto st5036 + st5036: + if p++; p == pe { + goto _test_eof5036 + } + st_case_5036: +//line segment_words_prod.go:99856 + switch data[p] { + case 143: + goto tr1 + case 145: + goto tr1 + case 194: + goto tr4937 + case 204: + goto tr4938 + case 205: + goto tr4939 + case 210: + goto tr4940 + case 214: + goto tr4941 + case 215: + goto tr4942 + case 216: + goto tr4943 + case 217: + goto tr4944 + case 219: + goto tr4945 + case 220: + goto tr4946 + case 221: + goto tr4947 + case 222: + goto tr4948 + case 223: + goto tr4949 + case 224: + goto tr4950 + case 225: + goto tr4951 + case 226: + goto tr4952 + case 227: + goto tr4953 + case 234: + goto tr4954 + case 239: + goto tr4955 + case 240: + goto tr4956 + case 243: + goto tr4957 + } + if 176 <= data[p] { + goto tr1 + } + goto tr4763 +tr4947: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5037 + st5037: + if p++; p == pe { + goto _test_eof5037 + } + st_case_5037: +//line segment_words_prod.go:99925 + switch data[p] { + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 139 <= data[p] { + goto tr4499 + } + goto tr2395 +tr4948: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5038 + st5038: + if p++; p == pe { + goto _test_eof5038 + } + st_case_5038: +//line segment_words_prod.go:99990 + switch data[p] { + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 166 <= data[p] && data[p] <= 176 { + goto tr2395 + } + goto tr4499 +tr4949: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5039 + st5039: + if p++; p == pe { + goto _test_eof5039 + } + st_case_5039: +//line segment_words_prod.go:100055 + switch data[p] { + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 171 <= data[p] && data[p] <= 179 { + goto tr2395 + } + goto tr4499 +tr4950: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5040 + st5040: + if p++; p == pe { + goto _test_eof5040 + } + st_case_5040: +//line segment_words_prod.go:100120 + switch data[p] { + case 160: + goto st2650 + case 161: + goto st2651 + case 163: + goto st2652 + case 164: + goto st2653 + case 165: + goto st2654 + case 167: + goto st2656 + case 169: + goto st2657 + case 171: + goto st2658 + case 173: + goto st2660 + case 174: + goto st2661 + case 175: + goto st2662 + case 176: + goto st2663 + case 177: + goto st2664 + case 179: + goto st2665 + case 180: + goto st2666 + case 181: + goto st2667 + case 182: + goto st2668 + case 183: + goto st2669 + case 184: + goto st2670 + case 185: + goto st2671 + case 186: + goto st2672 + case 187: + goto st2673 + case 188: + goto st2674 + case 189: + goto st2675 + case 190: + goto st2676 + case 191: + goto st2677 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto st2659 + } + case data[p] >= 166: + goto st2655 + } + goto tr4499 +tr4951: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5041 + st5041: + if p++; p == pe { + goto _test_eof5041 + } + st_case_5041: +//line segment_words_prod.go:100242 + switch data[p] { + case 128: + goto st2679 + case 129: + goto st2680 + case 130: + goto st2681 + case 141: + goto st2682 + case 156: + goto st2683 + case 157: + goto st2684 + case 158: + goto st2685 + case 159: + goto st2686 + case 160: + goto st2687 + case 162: + goto st2688 + case 164: + goto st2689 + case 168: + goto st2690 + case 169: + goto st2691 + case 170: + goto st2692 + case 172: + goto st2693 + case 173: + goto st2694 + case 174: + goto st2695 + case 175: + goto st2696 + case 176: + goto st2697 + case 179: + goto st2698 + case 183: + goto st2699 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr4952: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5042 + st5042: + if p++; p == pe { + goto _test_eof5042 + } + st_case_5042: +//line segment_words_prod.go:100346 + switch data[p] { + case 128: + goto st2701 + case 129: + goto st2702 + case 131: + goto st2703 + case 179: + goto st2704 + case 181: + goto st2705 + case 183: + goto st2706 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr4953: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5043 + st5043: + if p++; p == pe { + goto _test_eof5043 + } + st_case_5043: +//line segment_words_prod.go:100420 + switch data[p] { + case 128: + goto st2708 + case 130: + goto st2709 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr4954: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5044 + st5044: + if p++; p == pe { + goto _test_eof5044 + } + st_case_5044: +//line segment_words_prod.go:100486 + switch data[p] { + case 153: + goto st2711 + case 154: + goto st2712 + case 155: + goto st2713 + case 160: + goto st2714 + case 162: + goto st2715 + case 163: + goto st2716 + case 164: + goto st2717 + case 165: + goto st2718 + case 166: + goto st2719 + case 167: + goto st2720 + case 168: + goto st2721 + case 169: + goto st2722 + case 170: + goto st2723 + case 171: + goto st2724 + case 175: + goto st2725 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr4955: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5045 + st5045: + if p++; p == pe { + goto _test_eof5045 + } + st_case_5045: +//line segment_words_prod.go:100578 + switch data[p] { + case 172: + goto st2727 + case 184: + goto st2728 + case 187: + goto st2705 + case 190: + goto st2712 + case 191: + goto st2729 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr4956: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5046 + st5046: + if p++; p == pe { + goto _test_eof5046 + } + st_case_5046: +//line segment_words_prod.go:100650 + switch data[p] { + case 144: + goto st2731 + case 145: + goto st2737 + case 150: + goto st2756 + case 155: + goto st2761 + case 157: + goto st2763 + case 158: + goto st2770 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr4957: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5047 +tr4498: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5047 + st5047: + if p++; p == pe { + goto _test_eof5047 + } + st_case_5047: +//line segment_words_prod.go:100739 + switch data[p] { + case 160: + goto st2773 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr4926: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st5048 + st5048: + if p++; p == pe { + goto _test_eof5048 + } + st_case_5048: +//line segment_words_prod.go:100803 + switch data[p] { + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + if 139 <= data[p] { + goto tr4763 + } + goto tr1 +tr4927: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st5049 + st5049: + if p++; p == pe { + goto _test_eof5049 + } + st_case_5049: +//line segment_words_prod.go:100868 + switch data[p] { + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + if 166 <= data[p] && data[p] <= 176 { + goto tr1 + } + goto tr4763 +tr4928: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st5050 + st5050: + if p++; p == pe { + goto _test_eof5050 + } + st_case_5050: +//line segment_words_prod.go:100933 + switch data[p] { + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + if 171 <= data[p] && data[p] <= 179 { + goto tr1 + } + goto tr4763 +tr4929: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st5051 + st5051: + if p++; p == pe { + goto _test_eof5051 + } + st_case_5051: +//line segment_words_prod.go:100998 + switch data[p] { + case 160: + goto st14 + case 161: + goto st15 + case 163: + goto st16 + case 164: + goto st17 + case 165: + goto st18 + case 167: + goto st20 + case 169: + goto st21 + case 171: + goto st22 + case 173: + goto st24 + case 174: + goto st25 + case 175: + goto st26 + case 176: + goto st27 + case 177: + goto st28 + case 179: + goto st29 + case 180: + goto st30 + case 181: + goto st31 + case 182: + goto st32 + case 183: + goto st33 + case 184: + goto st34 + case 185: + goto st35 + case 186: + goto st36 + case 187: + goto st37 + case 188: + goto st38 + case 189: + goto st39 + case 190: + goto st40 + case 191: + goto st41 + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto st23 + } + case data[p] >= 166: + goto st19 + } + goto tr4763 +tr4930: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st5052 + st5052: + if p++; p == pe { + goto _test_eof5052 + } + st_case_5052: +//line segment_words_prod.go:101120 + switch data[p] { + case 128: + goto st43 + case 129: + goto st44 + case 130: + goto st45 + case 141: + goto st46 + case 156: + goto st47 + case 157: + goto st48 + case 158: + goto st49 + case 159: + goto st50 + case 160: + goto st51 + case 162: + goto st52 + case 164: + goto st53 + case 168: + goto st54 + case 169: + goto st55 + case 170: + goto st56 + case 172: + goto st57 + case 173: + goto st58 + case 174: + goto st59 + case 175: + goto st60 + case 176: + goto st61 + case 179: + goto st62 + case 183: + goto st63 + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + goto tr4763 +tr4931: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st5053 + st5053: + if p++; p == pe { + goto _test_eof5053 + } + st_case_5053: +//line segment_words_prod.go:101224 + switch data[p] { + case 128: + goto st65 + case 129: + goto st66 + case 131: + goto st67 + case 179: + goto st68 + case 181: + goto st69 + case 183: + goto st70 + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + goto tr4763 +tr4932: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st5054 + st5054: + if p++; p == pe { + goto _test_eof5054 + } + st_case_5054: +//line segment_words_prod.go:101298 + switch data[p] { + case 128: + goto st72 + case 130: + goto st73 + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + goto tr4763 +tr4933: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st5055 + st5055: + if p++; p == pe { + goto _test_eof5055 + } + st_case_5055: +//line segment_words_prod.go:101364 + switch data[p] { + case 153: + goto st75 + case 154: + goto st76 + case 155: + goto st77 + case 160: + goto st78 + case 162: + goto st79 + case 163: + goto st80 + case 164: + goto st81 + case 165: + goto st82 + case 166: + goto st83 + case 167: + goto st84 + case 168: + goto st85 + case 169: + goto st86 + case 170: + goto st87 + case 171: + goto st88 + case 175: + goto st89 + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + goto tr4763 +tr4934: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st5056 + st5056: + if p++; p == pe { + goto _test_eof5056 + } + st_case_5056: +//line segment_words_prod.go:101456 + switch data[p] { + case 172: + goto st91 + case 184: + goto st92 + case 187: + goto st69 + case 190: + goto st76 + case 191: + goto st93 + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + goto tr4763 +tr4935: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st5057 + st5057: + if p++; p == pe { + goto _test_eof5057 + } + st_case_5057: +//line segment_words_prod.go:101528 + switch data[p] { + case 144: + goto st95 + case 145: + goto st101 + case 150: + goto st120 + case 155: + goto st125 + case 157: + goto st127 + case 158: + goto st134 + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + goto tr4763 +tr4936: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 12; + goto st5058 + st5058: + if p++; p == pe { + goto _test_eof5058 + } + st_case_5058: +//line segment_words_prod.go:101602 + switch data[p] { + case 160: + goto st137 + case 194: + goto st2636 + case 204: + goto st2637 + case 205: + goto st2638 + case 210: + goto st2639 + case 214: + goto st2640 + case 215: + goto st2641 + case 216: + goto st2642 + case 217: + goto st2643 + case 219: + goto st2644 + case 220: + goto st2645 + case 221: + goto st2646 + case 222: + goto st2647 + case 223: + goto st2648 + case 224: + goto st2649 + case 225: + goto st2678 + case 226: + goto st2700 + case 227: + goto st2707 + case 234: + goto st2710 + case 239: + goto st2726 + case 240: + goto st2730 + case 243: + goto st2772 + } + goto tr4763 +tr4471: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5059 + st5059: + if p++; p == pe { + goto _test_eof5059 + } + st_case_5059: +//line segment_words_prod.go:101670 + switch data[p] { + case 181: + goto tr4499 + case 190: + goto tr4499 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 184: + if 176 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr4499 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr4499 + } + goto tr2395 +tr4472: +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + + goto st5060 + st5060: + if p++; p == pe { + goto _test_eof5060 + } + st_case_5060: +//line segment_words_prod.go:101752 + switch data[p] { + case 134: + goto tr148 + case 140: + goto tr148 + case 194: + goto tr4783 + case 204: + goto tr4784 + case 205: + goto tr4785 + case 210: + goto tr4786 + case 214: + goto tr4787 + case 215: + goto tr4788 + case 216: + goto tr4789 + case 217: + goto tr4790 + case 219: + goto tr4791 + case 220: + goto tr4792 + case 221: + goto tr4793 + case 222: + goto tr4794 + case 223: + goto tr4795 + case 224: + goto tr4796 + case 225: + goto tr4797 + case 226: + goto tr4798 + case 227: + goto tr4799 + case 234: + goto tr4800 + case 239: + goto tr4801 + case 240: + goto tr4802 + case 243: + goto tr4803 + } + switch { + case data[p] < 142: + if 136 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 161: + if 163 <= data[p] { + goto tr148 + } + default: + goto tr148 + } + goto tr4499 +tr4474: +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + + goto st5061 + st5061: + if p++; p == pe { + goto _test_eof5061 + } + st_case_5061: +//line segment_words_prod.go:101829 + switch data[p] { + case 130: + goto tr4499 + case 194: + goto tr4783 + case 204: + goto tr4784 + case 205: + goto tr4785 + case 210: + goto tr4786 + case 214: + goto tr4787 + case 215: + goto tr4788 + case 216: + goto tr4789 + case 217: + goto tr4790 + case 219: + goto tr4791 + case 220: + goto tr4792 + case 221: + goto tr4793 + case 222: + goto tr4794 + case 223: + goto tr4795 + case 224: + goto tr4796 + case 225: + goto tr4797 + case 226: + goto tr4798 + case 227: + goto tr4799 + case 234: + goto tr4800 + case 239: + goto tr4801 + case 240: + goto tr4802 + case 243: + goto tr4803 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr2395 + } + goto tr148 +tr4477: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5062 + st5062: + if p++; p == pe { + goto _test_eof5062 + } + st_case_5062: +//line segment_words_prod.go:101900 + switch data[p] { + case 190: + goto tr4499 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 145: + if 136 <= data[p] && data[p] <= 144 { + goto tr4499 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr4499 + } + default: + goto tr2395 + } + goto tr148 +tr4478: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5063 + st5063: + if p++; p == pe { + goto _test_eof5063 + } + st_case_5063: +//line segment_words_prod.go:101980 + switch data[p] { + case 135: + goto tr2395 + case 179: + goto tr148 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr2395 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + default: + goto tr2395 + } + goto tr4499 +tr4479: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5064 + st5064: + if p++; p == pe { + goto _test_eof5064 + } + st_case_5064: +//line segment_words_prod.go:102067 + switch data[p] { + case 156: + goto tr2395 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 133 { + goto tr2395 + } + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr2395 + } + goto tr4499 +tr4480: +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + + goto st5065 + st5065: + if p++; p == pe { + goto _test_eof5065 + } + st_case_5065: +//line segment_words_prod.go:102142 + switch data[p] { + case 171: + goto tr126 + case 176: + goto tr2395 + case 194: + goto tr4783 + case 204: + goto tr4784 + case 205: + goto tr4785 + case 210: + goto tr4786 + case 214: + goto tr4787 + case 215: + goto tr4788 + case 216: + goto tr4789 + case 217: + goto tr4790 + case 219: + goto tr4791 + case 220: + goto tr4792 + case 221: + goto tr4793 + case 222: + goto tr4794 + case 223: + goto tr4795 + case 224: + goto tr4796 + case 225: + goto tr4797 + case 226: + goto tr4798 + case 227: + goto tr4799 + case 234: + goto tr4800 + case 239: + goto tr4801 + case 240: + goto tr4802 + case 243: + goto tr4803 + } + switch { + case data[p] < 139: + if 128 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 174 <= data[p] { + goto tr148 + } + case data[p] >= 160: + goto tr126 + } + default: + goto tr2395 + } + goto tr4499 +tr4481: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5066 + st5066: + if p++; p == pe { + goto _test_eof5066 + } + st_case_5066: +//line segment_words_prod.go:102229 + switch data[p] { + case 148: + goto tr4499 + case 158: + goto tr4499 + case 169: + goto tr4499 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 176: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr2395 + } + case data[p] >= 150: + goto tr2395 + } + case data[p] > 185: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr4499 + } + case data[p] >= 189: + goto tr4499 + } + default: + goto tr126 + } + goto tr148 +tr4482: +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + + goto st5067 + st5067: + if p++; p == pe { + goto _test_eof5067 + } + st_case_5067: +//line segment_words_prod.go:102318 + switch data[p] { + case 144: + goto tr148 + case 194: + goto tr4916 + case 204: + goto tr4917 + case 205: + goto tr4918 + case 210: + goto tr4919 + case 214: + goto tr4920 + case 215: + goto tr4921 + case 216: + goto tr4922 + case 217: + goto tr4923 + case 219: + goto tr4924 + case 220: + goto tr4925 + case 221: + goto tr4926 + case 222: + goto tr4927 + case 223: + goto tr4928 + case 224: + goto tr4929 + case 225: + goto tr4930 + case 226: + goto tr4931 + case 227: + goto tr4932 + case 234: + goto tr4933 + case 239: + goto tr4934 + case 240: + goto tr4935 + case 243: + goto tr4936 + } + switch { + case data[p] < 146: + if 143 <= data[p] && data[p] <= 145 { + goto tr2395 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr2395 + } + default: + goto tr148 + } + goto tr4499 +tr4483: +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + + goto st5068 + st5068: + if p++; p == pe { + goto _test_eof5068 + } + st_case_5068: +//line segment_words_prod.go:102393 + switch data[p] { + case 194: + goto tr4783 + case 204: + goto tr4784 + case 205: + goto tr4785 + case 210: + goto tr4786 + case 214: + goto tr4787 + case 215: + goto tr4788 + case 216: + goto tr4789 + case 217: + goto tr4790 + case 219: + goto tr4791 + case 220: + goto tr4792 + case 221: + goto tr4793 + case 222: + goto tr4794 + case 223: + goto tr4795 + case 224: + goto tr4796 + case 225: + goto tr4797 + case 226: + goto tr4798 + case 227: + goto tr4799 + case 234: + goto tr4800 + case 239: + goto tr4801 + case 240: + goto tr4802 + case 243: + goto tr4803 + } + switch { + case data[p] > 140: + if 141 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr4499 + } + goto tr2395 +tr4484: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5069 + st5069: + if p++; p == pe { + goto _test_eof5069 + } + st_case_5069: +//line segment_words_prod.go:102467 + switch data[p] { + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] > 176: + if 178 <= data[p] { + goto tr4499 + } + case data[p] >= 166: + goto tr2395 + } + goto tr148 +tr4485: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5070 + st5070: + if p++; p == pe { + goto _test_eof5070 + } + st_case_5070: +//line segment_words_prod.go:102541 + switch data[p] { + case 186: + goto tr148 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 170: + switch { + case data[p] > 179: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 171: + goto tr2395 + } + default: + goto tr148 + } + goto tr4499 +tr4486: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5071 + st5071: + if p++; p == pe { + goto _test_eof5071 + } + st_case_5071: +//line segment_words_prod.go:102626 + switch data[p] { + case 160: + goto st3239 + case 161: + goto st3240 + case 162: + goto st168 + case 163: + goto st2652 + case 164: + goto st3241 + case 165: + goto st3242 + case 166: + goto st3243 + case 167: + goto st3244 + case 168: + goto st3245 + case 169: + goto st3246 + case 170: + goto st3247 + case 171: + goto st3248 + case 172: + goto st3249 + case 173: + goto st3250 + case 174: + goto st3251 + case 175: + goto st3252 + case 176: + goto st3253 + case 177: + goto st3254 + case 178: + goto st3255 + case 179: + goto st3256 + case 180: + goto st3257 + case 181: + goto st3258 + case 182: + goto st3259 + case 183: + goto st3260 + case 184: + goto st2670 + case 185: + goto st3261 + case 186: + goto st2672 + case 187: + goto st3262 + case 188: + goto st3263 + case 189: + goto st3264 + case 190: + goto st3265 + case 191: + goto st2677 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 + st3239: + if p++; p == pe { + goto _test_eof3239 + } + st_case_3239: + switch data[p] { + case 154: + goto tr148 + case 164: + goto tr148 + case 168: + goto tr148 + } + switch { + case data[p] > 149: + if 150 <= data[p] && data[p] <= 173 { + goto tr2395 + } + case data[p] >= 128: + goto tr148 + } + goto tr0 + st3240: + if p++; p == pe { + goto _test_eof3240 + } + st_case_3240: + switch { + case data[p] > 152: + if 153 <= data[p] && data[p] <= 155 { + goto tr2395 + } + case data[p] >= 128: + goto tr148 + } + goto tr0 + st3241: + if p++; p == pe { + goto _test_eof3241 + } + st_case_3241: + if data[p] == 189 { + goto tr148 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr148 + } + goto tr2395 + st3242: + if p++; p == pe { + goto _test_eof3242 + } + st_case_3242: + switch data[p] { + case 144: + goto tr148 + case 176: + goto tr0 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 177 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr0 + } + goto tr2395 + st3243: + if p++; p == pe { + goto _test_eof3243 + } + st_case_3243: + switch data[p] { + case 132: + goto tr0 + case 169: + goto tr0 + case 177: + goto tr0 + case 188: + goto tr2395 + } + switch { + case data[p] < 145: + switch { + case data[p] > 131: + if 141 <= data[p] && data[p] <= 142 { + goto tr0 + } + case data[p] >= 129: + goto tr2395 + } + case data[p] > 146: + switch { + case data[p] < 186: + if 179 <= data[p] && data[p] <= 181 { + goto tr0 + } + case data[p] > 187: + if 190 <= data[p] { + goto tr2395 + } + default: + goto tr0 + } + default: + goto tr0 + } + goto tr148 + st3244: + if p++; p == pe { + goto _test_eof3244 + } + st_case_3244: + switch data[p] { + case 142: + goto tr148 + case 158: + goto tr0 + } + switch { + case data[p] < 156: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr0 + } + case data[p] > 138: + switch { + case data[p] > 150: + if 152 <= data[p] && data[p] <= 155 { + goto tr0 + } + case data[p] >= 143: + goto tr0 + } + default: + goto tr0 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr0 + } + case data[p] > 175: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr0 + } + case data[p] >= 176: + goto tr148 + } + default: + goto tr126 + } + default: + goto tr148 + } + goto tr2395 + st3245: + if p++; p == pe { + goto _test_eof3245 + } + st_case_3245: + if data[p] == 188 { + goto tr2395 + } + switch { + case data[p] < 170: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2395 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 147 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] >= 143: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 176: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 182: + switch { + case data[p] > 185: + if 190 <= data[p] { + goto tr2395 + } + case data[p] >= 184: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st3246: + if p++; p == pe { + goto _test_eof3246 + } + st_case_3246: + if data[p] == 157 { + goto tr0 + } + switch { + case data[p] < 153: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr0 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 146 <= data[p] && data[p] <= 152 { + goto tr0 + } + case data[p] >= 142: + goto tr0 + } + default: + goto tr0 + } + case data[p] > 158: + switch { + case data[p] < 166: + if 159 <= data[p] && data[p] <= 165 { + goto tr0 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr0 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr126 + } + default: + goto tr148 + } + goto tr2395 + st3247: + if p++; p == pe { + goto _test_eof3247 + } + st_case_3247: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2395 + } + case data[p] > 141: + if 143 <= data[p] && data[p] <= 145 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] { + goto tr2395 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st3248: + if p++; p == pe { + goto _test_eof3248 + } + st_case_3248: + switch data[p] { + case 134: + goto tr0 + case 138: + goto tr0 + case 144: + goto tr148 + case 185: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] >= 142: + goto tr0 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr0 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr0 + } + goto tr2395 + st3249: + if p++; p == pe { + goto _test_eof3249 + } + st_case_3249: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2395 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2395 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st3250: + if p++; p == pe { + goto _test_eof3250 + } + st_case_3250: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] < 150: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr2395 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr2395 + } + default: + goto tr2395 + } + case data[p] > 151: + switch { + case data[p] < 159: + if 156 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 161: + switch { + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] >= 162: + goto tr2395 + } + default: + goto tr148 + } + default: + goto tr2395 + } + goto tr0 + st3251: + if p++; p == pe { + goto _test_eof3251 + } + st_case_3251: + switch data[p] { + case 130: + goto tr2395 + case 131: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 158: + switch { + case data[p] < 142: + if 133 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 144: + switch { + case data[p] > 149: + if 153 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] < 168: + if 163 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 185: + if 190 <= data[p] && data[p] <= 191 { + goto tr2395 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st3252: + if p++; p == pe { + goto _test_eof3252 + } + st_case_3252: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr2395 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr2395 + } + case data[p] > 136: + switch { + case data[p] > 141: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] >= 138: + goto tr2395 + } + default: + goto tr2395 + } + goto tr0 + st3253: + if p++; p == pe { + goto _test_eof3253 + } + st_case_3253: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 128: + goto tr2395 + } + case data[p] > 144: + switch { + case data[p] < 170: + if 146 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] > 185: + if 190 <= data[p] { + goto tr2395 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st3254: + if p++; p == pe { + goto _test_eof3254 + } + st_case_3254: + switch data[p] { + case 133: + goto tr0 + case 137: + goto tr0 + case 151: + goto tr0 + } + switch { + case data[p] < 160: + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 148 { + goto tr0 + } + case data[p] > 154: + if 155 <= data[p] && data[p] <= 159 { + goto tr0 + } + default: + goto tr148 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr0 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr0 + } + default: + goto tr126 + } + default: + goto tr148 + } + goto tr2395 + st3255: + if p++; p == pe { + goto _test_eof3255 + } + st_case_3255: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 146: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2395 + } + case data[p] > 140: + if 142 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 181: + if 170 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2395 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st3256: + if p++; p == pe { + goto _test_eof3256 + } + st_case_3256: + if data[p] == 158 { + goto tr148 + } + switch { + case data[p] < 149: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr2395 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr2395 + } + default: + goto tr2395 + } + case data[p] > 150: + switch { + case data[p] < 162: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 163: + switch { + case data[p] > 175: + if 177 <= data[p] && data[p] <= 178 { + goto tr148 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr2395 + } + default: + goto tr2395 + } + goto tr0 + st3257: + if p++; p == pe { + goto _test_eof3257 + } + st_case_3257: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 129: + goto tr2395 + } + case data[p] > 144: + switch { + case data[p] > 186: + if 190 <= data[p] { + goto tr2395 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st3258: + if p++; p == pe { + goto _test_eof3258 + } + st_case_3258: + switch data[p] { + case 133: + goto tr0 + case 137: + goto tr0 + case 142: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 150 { + goto tr0 + } + case data[p] > 158: + if 159 <= data[p] && data[p] <= 161 { + goto tr148 + } + default: + goto tr0 + } + case data[p] > 165: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr126 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr0 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr0 + } + default: + goto tr0 + } + goto tr2395 + st3259: + if p++; p == pe { + goto _test_eof3259 + } + st_case_3259: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto tr2395 + } + case data[p] > 150: + switch { + case data[p] > 177: + if 179 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st3260: + if p++; p == pe { + goto _test_eof3260 + } + st_case_3260: + switch data[p] { + case 138: + goto tr2395 + case 150: + goto tr2395 + } + switch { + case data[p] < 152: + switch { + case data[p] > 134: + if 143 <= data[p] && data[p] <= 148 { + goto tr2395 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 179 { + goto tr2395 + } + case data[p] >= 166: + goto tr126 + } + default: + goto tr2395 + } + goto tr0 + st3261: + if p++; p == pe { + goto _test_eof3261 + } + st_case_3261: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 135: + goto tr2395 + } + goto tr0 + st3262: + if p++; p == pe { + goto _test_eof3262 + } + st_case_3262: + switch { + case data[p] > 141: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 136: + goto tr2395 + } + goto tr0 + st3263: + if p++; p == pe { + goto _test_eof3263 + } + st_case_3263: + switch data[p] { + case 128: + goto tr148 + case 181: + goto tr2395 + case 183: + goto tr2395 + case 185: + goto tr2395 + } + switch { + case data[p] < 160: + if 152 <= data[p] && data[p] <= 153 { + goto tr2395 + } + case data[p] > 169: + if 190 <= data[p] && data[p] <= 191 { + goto tr2395 + } + default: + goto tr126 + } + goto tr0 + st3264: + if p++; p == pe { + goto _test_eof3264 + } + st_case_3264: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 172: + if 177 <= data[p] && data[p] <= 191 { + goto tr2395 + } + default: + goto tr148 + } + goto tr0 + st3265: + if p++; p == pe { + goto _test_eof3265 + } + st_case_3265: + switch { + case data[p] < 136: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 135 { + goto tr2395 + } + case data[p] >= 128: + goto tr2395 + } + case data[p] > 140: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr2395 + } + case data[p] >= 141: + goto tr2395 + } + default: + goto tr148 + } + goto tr0 +tr4487: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5072 + st5072: + if p++; p == pe { + goto _test_eof5072 + } + st_case_5072: +//line segment_words_prod.go:103682 + switch data[p] { + case 128: + goto st2679 + case 129: + goto st3266 + case 130: + goto st3267 + case 131: + goto st202 + case 132: + goto st3268 + case 135: + goto st3319 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st3573 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st3574 + case 157: + goto st3575 + case 158: + goto st2685 + case 159: + goto st3576 + case 160: + goto st3577 + case 161: + goto st219 + case 162: + goto st3578 + case 163: + goto st221 + case 164: + goto st3579 + case 165: + goto st1649 + case 167: + goto st1650 + case 168: + goto st3580 + case 169: + goto st2691 + case 170: + goto st3581 + case 172: + goto st3582 + case 173: + goto st3583 + case 174: + goto st3584 + case 175: + goto st3585 + case 176: + goto st3586 + case 177: + goto st1659 + case 179: + goto st3587 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st2699 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 136: + if 133 <= data[p] && data[p] <= 134 { + goto st3318 + } + case data[p] > 152: + switch { + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + case data[p] >= 180: + goto st147 + } + default: + goto st145 + } + goto tr4499 + st3266: + if p++; p == pe { + goto _test_eof3266 + } + st_case_3266: + switch { + case data[p] < 158: + switch { + case data[p] > 137: + if 150 <= data[p] && data[p] <= 153 { + goto tr2395 + } + case data[p] >= 128: + goto tr126 + } + case data[p] > 160: + switch { + case data[p] < 167: + if 162 <= data[p] && data[p] <= 164 { + goto tr2395 + } + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr2395 + } + default: + goto tr2395 + } + default: + goto tr2395 + } + goto tr0 + st3267: + if p++; p == pe { + goto _test_eof3267 + } + st_case_3267: + if data[p] == 143 { + goto tr2395 + } + switch { + case data[p] < 144: + if 130 <= data[p] && data[p] <= 141 { + goto tr2395 + } + case data[p] > 153: + switch { + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 154: + goto tr2395 + } + default: + goto tr126 + } + goto tr0 + st3268: + if p++; p == pe { + goto _test_eof3268 + } + st_case_3268: + if 128 <= data[p] { + goto tr2984 + } + goto tr148 +tr2984: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5073 + st5073: + if p++; p == pe { + goto _test_eof5073 + } + st_case_5073: +//line segment_words_prod.go:103918 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 + st3269: + if p++; p == pe { + goto _test_eof3269 + } + st_case_3269: + switch data[p] { + case 170: + goto tr148 + case 173: + goto tr2984 + case 181: + goto tr148 + case 183: + goto st142 + case 186: + goto tr148 + } + goto tr2985 + st3270: + if p++; p == pe { + goto _test_eof3270 + } + st_case_3270: + if 128 <= data[p] { + goto tr2984 + } + goto tr2 + st3271: + if p++; p == pe { + goto _test_eof3271 + } + st_case_3271: + switch data[p] { + case 181: + goto tr2985 + case 190: + goto tr2985 + } + switch { + case data[p] < 184: + if 176 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr2985 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr2985 + } + goto tr2984 + st3272: + if p++; p == pe { + goto _test_eof3272 + } + st_case_3272: + if data[p] == 130 { + goto tr2985 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr2984 + } + goto tr148 + st3273: + if p++; p == pe { + goto _test_eof3273 + } + st_case_3273: + if data[p] == 190 { + goto tr2985 + } + switch { + case data[p] < 145: + if 136 <= data[p] && data[p] <= 144 { + goto tr2985 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr2985 + } + default: + goto tr2984 + } + goto tr148 + st3274: + if p++; p == pe { + goto _test_eof3274 + } + st_case_3274: + switch data[p] { + case 135: + goto tr2984 + case 179: + goto tr148 + case 180: + goto st142 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr2984 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + default: + goto tr2984 + } + goto tr2985 + st3275: + if p++; p == pe { + goto _test_eof3275 + } + st_case_3275: + if data[p] == 156 { + goto tr2984 + } + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 133 { + goto tr2984 + } + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr2984 + } + goto tr2985 + st3276: + if p++; p == pe { + goto _test_eof3276 + } + st_case_3276: + switch data[p] { + case 171: + goto tr421 + case 176: + goto tr2984 + } + switch { + case data[p] < 139: + if 128 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 174 <= data[p] { + goto tr148 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr2984 + } + goto tr2985 + st3277: + if p++; p == pe { + goto _test_eof3277 + } + st_case_3277: + switch data[p] { + case 148: + goto tr2985 + case 158: + goto tr2985 + case 169: + goto tr2985 + } + switch { + case data[p] < 176: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr2984 + } + case data[p] >= 150: + goto tr2984 + } + case data[p] > 185: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr2985 + } + case data[p] >= 189: + goto tr2985 + } + default: + goto tr421 + } + goto tr148 + st3278: + if p++; p == pe { + goto _test_eof3278 + } + st_case_3278: + if data[p] == 144 { + goto tr148 + } + switch { + case data[p] < 146: + if 143 <= data[p] && data[p] <= 145 { + goto tr2984 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr2984 + } + default: + goto tr148 + } + goto tr2985 + st3279: + if p++; p == pe { + goto _test_eof3279 + } + st_case_3279: + switch { + case data[p] > 140: + if 141 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr2985 + } + goto tr2984 + st3280: + if p++; p == pe { + goto _test_eof3280 + } + st_case_3280: + switch { + case data[p] > 176: + if 178 <= data[p] { + goto tr2985 + } + case data[p] >= 166: + goto tr2984 + } + goto tr148 + st3281: + if p++; p == pe { + goto _test_eof3281 + } + st_case_3281: + if data[p] == 186 { + goto tr148 + } + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 170: + switch { + case data[p] > 179: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 171: + goto tr2984 + } + default: + goto tr148 + } + goto tr2985 + st3282: + if p++; p == pe { + goto _test_eof3282 + } + st_case_3282: + switch data[p] { + case 160: + goto st3283 + case 161: + goto st3284 + case 162: + goto st168 + case 163: + goto st3285 + case 164: + goto st3286 + case 165: + goto st3287 + case 166: + goto st3288 + case 167: + goto st3289 + case 168: + goto st3290 + case 169: + goto st3291 + case 170: + goto st3292 + case 171: + goto st3293 + case 172: + goto st3294 + case 173: + goto st3295 + case 174: + goto st3296 + case 175: + goto st3297 + case 176: + goto st3298 + case 177: + goto st3299 + case 178: + goto st3300 + case 179: + goto st3301 + case 180: + goto st3302 + case 181: + goto st3303 + case 182: + goto st3304 + case 183: + goto st3305 + case 184: + goto st3306 + case 185: + goto st3307 + case 186: + goto st3308 + case 187: + goto st3309 + case 188: + goto st3310 + case 189: + goto st3311 + case 190: + goto st3312 + case 191: + goto st3313 + } + goto tr2985 + st3283: + if p++; p == pe { + goto _test_eof3283 + } + st_case_3283: + switch data[p] { + case 154: + goto tr148 + case 164: + goto tr148 + case 168: + goto tr148 + } + switch { + case data[p] > 149: + if 150 <= data[p] && data[p] <= 173 { + goto tr2984 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st3284: + if p++; p == pe { + goto _test_eof3284 + } + st_case_3284: + switch { + case data[p] > 152: + if 153 <= data[p] && data[p] <= 155 { + goto tr2984 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st3285: + if p++; p == pe { + goto _test_eof3285 + } + st_case_3285: + if 163 <= data[p] { + goto tr2984 + } + goto tr2 + st3286: + if p++; p == pe { + goto _test_eof3286 + } + st_case_3286: + if data[p] == 189 { + goto tr148 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr148 + } + goto tr2984 + st3287: + if p++; p == pe { + goto _test_eof3287 + } + st_case_3287: + switch data[p] { + case 144: + goto tr148 + case 176: + goto tr2 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 177 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr2 + } + goto tr2984 + st3288: + if p++; p == pe { + goto _test_eof3288 + } + st_case_3288: + switch data[p] { + case 132: + goto tr2 + case 169: + goto tr2 + case 177: + goto tr2 + case 188: + goto tr2984 + } + switch { + case data[p] < 145: + switch { + case data[p] > 131: + if 141 <= data[p] && data[p] <= 142 { + goto tr2 + } + case data[p] >= 129: + goto tr2984 + } + case data[p] > 146: + switch { + case data[p] < 186: + if 179 <= data[p] && data[p] <= 181 { + goto tr2 + } + case data[p] > 187: + if 190 <= data[p] { + goto tr2984 + } + default: + goto tr2 + } + default: + goto tr2 + } + goto tr148 + st3289: + if p++; p == pe { + goto _test_eof3289 + } + st_case_3289: + switch data[p] { + case 142: + goto tr148 + case 158: + goto tr2 + } + switch { + case data[p] < 156: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr2 + } + case data[p] > 138: + switch { + case data[p] > 150: + if 152 <= data[p] && data[p] <= 155 { + goto tr2 + } + case data[p] >= 143: + goto tr2 + } + default: + goto tr2 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr2 + } + case data[p] > 175: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr2 + } + case data[p] >= 176: + goto tr148 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr2984 + st3290: + if p++; p == pe { + goto _test_eof3290 + } + st_case_3290: + if data[p] == 188 { + goto tr2984 + } + switch { + case data[p] < 170: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2984 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 147 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] >= 143: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 176: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 182: + switch { + case data[p] > 185: + if 190 <= data[p] { + goto tr2984 + } + case data[p] >= 184: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st3291: + if p++; p == pe { + goto _test_eof3291 + } + st_case_3291: + if data[p] == 157 { + goto tr2 + } + switch { + case data[p] < 153: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr2 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 146 <= data[p] && data[p] <= 152 { + goto tr2 + } + case data[p] >= 142: + goto tr2 + } + default: + goto tr2 + } + case data[p] > 158: + switch { + case data[p] < 166: + if 159 <= data[p] && data[p] <= 165 { + goto tr2 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr2 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr2984 + st3292: + if p++; p == pe { + goto _test_eof3292 + } + st_case_3292: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2984 + } + case data[p] > 141: + if 143 <= data[p] && data[p] <= 145 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] { + goto tr2984 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st3293: + if p++; p == pe { + goto _test_eof3293 + } + st_case_3293: + switch data[p] { + case 134: + goto tr2 + case 138: + goto tr2 + case 144: + goto tr148 + case 185: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] >= 142: + goto tr2 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr2 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr2 + } + goto tr2984 + st3294: + if p++; p == pe { + goto _test_eof3294 + } + st_case_3294: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2984 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2984 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st3295: + if p++; p == pe { + goto _test_eof3295 + } + st_case_3295: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] < 150: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr2984 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr2984 + } + default: + goto tr2984 + } + case data[p] > 151: + switch { + case data[p] < 159: + if 156 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 161: + switch { + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 162: + goto tr2984 + } + default: + goto tr148 + } + default: + goto tr2984 + } + goto tr2 + st3296: + if p++; p == pe { + goto _test_eof3296 + } + st_case_3296: + switch data[p] { + case 130: + goto tr2984 + case 131: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 158: + switch { + case data[p] < 142: + if 133 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 144: + switch { + case data[p] > 149: + if 153 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] < 168: + if 163 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 185: + if 190 <= data[p] && data[p] <= 191 { + goto tr2984 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st3297: + if p++; p == pe { + goto _test_eof3297 + } + st_case_3297: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr2984 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr2984 + } + case data[p] > 136: + switch { + case data[p] > 141: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 138: + goto tr2984 + } + default: + goto tr2984 + } + goto tr2 + st3298: + if p++; p == pe { + goto _test_eof3298 + } + st_case_3298: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 128: + goto tr2984 + } + case data[p] > 144: + switch { + case data[p] < 170: + if 146 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] > 185: + if 190 <= data[p] { + goto tr2984 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st3299: + if p++; p == pe { + goto _test_eof3299 + } + st_case_3299: + switch data[p] { + case 133: + goto tr2 + case 137: + goto tr2 + case 151: + goto tr2 + } + switch { + case data[p] < 160: + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 148 { + goto tr2 + } + case data[p] > 154: + if 155 <= data[p] && data[p] <= 159 { + goto tr2 + } + default: + goto tr148 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr2 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr2 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr2984 + st3300: + if p++; p == pe { + goto _test_eof3300 + } + st_case_3300: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 146: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2984 + } + case data[p] > 140: + if 142 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 181: + if 170 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2984 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st3301: + if p++; p == pe { + goto _test_eof3301 + } + st_case_3301: + if data[p] == 158 { + goto tr148 + } + switch { + case data[p] < 149: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr2984 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr2984 + } + default: + goto tr2984 + } + case data[p] > 150: + switch { + case data[p] < 162: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 163: + switch { + case data[p] > 175: + if 177 <= data[p] && data[p] <= 178 { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr2984 + } + default: + goto tr2984 + } + goto tr2 + st3302: + if p++; p == pe { + goto _test_eof3302 + } + st_case_3302: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 129: + goto tr2984 + } + case data[p] > 144: + switch { + case data[p] > 186: + if 190 <= data[p] { + goto tr2984 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st3303: + if p++; p == pe { + goto _test_eof3303 + } + st_case_3303: + switch data[p] { + case 133: + goto tr2 + case 137: + goto tr2 + case 142: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 150 { + goto tr2 + } + case data[p] > 158: + if 159 <= data[p] && data[p] <= 161 { + goto tr148 + } + default: + goto tr2 + } + case data[p] > 165: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr2 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr2 + } + default: + goto tr2 + } + goto tr2984 + st3304: + if p++; p == pe { + goto _test_eof3304 + } + st_case_3304: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto tr2984 + } + case data[p] > 150: + switch { + case data[p] > 177: + if 179 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st3305: + if p++; p == pe { + goto _test_eof3305 + } + st_case_3305: + switch data[p] { + case 138: + goto tr2984 + case 150: + goto tr2984 + } + switch { + case data[p] < 152: + switch { + case data[p] > 134: + if 143 <= data[p] && data[p] <= 148 { + goto tr2984 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 179 { + goto tr2984 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr2984 + } + goto tr2 + st3306: + if p++; p == pe { + goto _test_eof3306 + } + st_case_3306: + if data[p] == 177 { + goto tr2984 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr2984 + } + goto tr2 + st3307: + if p++; p == pe { + goto _test_eof3307 + } + st_case_3307: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 135: + goto tr2984 + } + goto tr2 + st3308: + if p++; p == pe { + goto _test_eof3308 + } + st_case_3308: + if data[p] == 177 { + goto tr2984 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr2984 + } + case data[p] >= 180: + goto tr2984 + } + goto tr2 + st3309: + if p++; p == pe { + goto _test_eof3309 + } + st_case_3309: + switch { + case data[p] > 141: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 136: + goto tr2984 + } + goto tr2 + st3310: + if p++; p == pe { + goto _test_eof3310 + } + st_case_3310: + switch data[p] { + case 128: + goto tr148 + case 181: + goto tr2984 + case 183: + goto tr2984 + case 185: + goto tr2984 + } + switch { + case data[p] < 160: + if 152 <= data[p] && data[p] <= 153 { + goto tr2984 + } + case data[p] > 169: + if 190 <= data[p] && data[p] <= 191 { + goto tr2984 + } + default: + goto tr421 + } + goto tr2 + st3311: + if p++; p == pe { + goto _test_eof3311 + } + st_case_3311: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 172: + if 177 <= data[p] && data[p] <= 191 { + goto tr2984 + } + default: + goto tr148 + } + goto tr2 + st3312: + if p++; p == pe { + goto _test_eof3312 + } + st_case_3312: + switch { + case data[p] < 136: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 135 { + goto tr2984 + } + case data[p] >= 128: + goto tr2984 + } + case data[p] > 140: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr2984 + } + case data[p] >= 141: + goto tr2984 + } + default: + goto tr148 + } + goto tr2 + st3313: + if p++; p == pe { + goto _test_eof3313 + } + st_case_3313: + if data[p] == 134 { + goto tr2984 + } + goto tr2 + st3314: + if p++; p == pe { + goto _test_eof3314 + } + st_case_3314: + switch data[p] { + case 128: + goto st3315 + case 129: + goto st3316 + case 130: + goto st3317 + case 131: + goto st202 + case 132: + goto st3268 + case 135: + goto st3319 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st3320 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st3321 + case 157: + goto st3322 + case 158: + goto st3323 + case 159: + goto st3324 + case 160: + goto st3325 + case 161: + goto st219 + case 162: + goto st3326 + case 163: + goto st221 + case 164: + goto st3327 + case 165: + goto st468 + case 167: + goto st469 + case 168: + goto st3328 + case 169: + goto st3329 + case 170: + goto st3330 + case 172: + goto st3331 + case 173: + goto st3332 + case 174: + goto st3333 + case 175: + goto st3334 + case 176: + goto st3335 + case 177: + goto st640 + case 179: + goto st3336 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st3337 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + } + switch { + case data[p] < 136: + if 133 <= data[p] && data[p] <= 134 { + goto st3318 + } + case data[p] > 152: + switch { + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + case data[p] >= 180: + goto st147 + } + default: + goto st145 + } + goto tr2985 + st3315: + if p++; p == pe { + goto _test_eof3315 + } + st_case_3315: + if 171 <= data[p] && data[p] <= 190 { + goto tr2984 + } + goto tr2 + st3316: + if p++; p == pe { + goto _test_eof3316 + } + st_case_3316: + switch { + case data[p] < 158: + switch { + case data[p] > 137: + if 150 <= data[p] && data[p] <= 153 { + goto tr2984 + } + case data[p] >= 128: + goto tr421 + } + case data[p] > 160: + switch { + case data[p] < 167: + if 162 <= data[p] && data[p] <= 164 { + goto tr2984 + } + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr2984 + } + default: + goto tr2984 + } + default: + goto tr2984 + } + goto tr2 + st3317: + if p++; p == pe { + goto _test_eof3317 + } + st_case_3317: + if data[p] == 143 { + goto tr2984 + } + switch { + case data[p] < 144: + if 130 <= data[p] && data[p] <= 141 { + goto tr2984 + } + case data[p] > 153: + switch { + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 154: + goto tr2984 + } + default: + goto tr421 + } + goto tr2 + st3318: + if p++; p == pe { + goto _test_eof3318 + } + st_case_3318: + goto tr2984 + st3319: + if p++; p == pe { + goto _test_eof3319 + } + st_case_3319: + if 192 <= data[p] { + goto tr148 + } + goto tr2984 + st3320: + if p++; p == pe { + goto _test_eof3320 + } + st_case_3320: + switch { + case data[p] < 157: + if 155 <= data[p] && data[p] <= 156 { + goto tr2 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr2 + } + default: + goto tr2984 + } + goto tr148 + st3321: + if p++; p == pe { + goto _test_eof3321 + } + st_case_3321: + switch { + case data[p] < 146: + switch { + case data[p] > 140: + if 142 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 177: + if 178 <= data[p] && data[p] <= 180 { + goto tr2984 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr2984 + } + goto tr2 + st3322: + if p++; p == pe { + goto _test_eof3322 + } + st_case_3322: + switch { + case data[p] < 160: + switch { + case data[p] > 145: + if 146 <= data[p] && data[p] <= 147 { + goto tr2984 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 172: + switch { + case data[p] > 176: + if 178 <= data[p] && data[p] <= 179 { + goto tr2984 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st3323: + if p++; p == pe { + goto _test_eof3323 + } + st_case_3323: + if 180 <= data[p] { + goto tr2984 + } + goto tr2 + st3324: + if p++; p == pe { + goto _test_eof3324 + } + st_case_3324: + switch { + case data[p] < 158: + if 148 <= data[p] && data[p] <= 156 { + goto tr2 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 170 <= data[p] { + goto tr2 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr2 + } + goto tr2984 + st3325: + if p++; p == pe { + goto _test_eof3325 + } + st_case_3325: + switch { + case data[p] < 144: + if 139 <= data[p] && data[p] <= 142 { + goto tr2984 + } + case data[p] > 153: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + goto tr2 + st3326: + if p++; p == pe { + goto _test_eof3326 + } + st_case_3326: + if data[p] == 169 { + goto tr2984 + } + switch { + case data[p] > 170: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st3327: + if p++; p == pe { + goto _test_eof3327 + } + st_case_3327: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr2984 + } + default: + goto tr2984 + } + goto tr2 + st3328: + if p++; p == pe { + goto _test_eof3328 + } + st_case_3328: + switch { + case data[p] > 150: + if 151 <= data[p] && data[p] <= 155 { + goto tr2984 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st3329: + if p++; p == pe { + goto _test_eof3329 + } + st_case_3329: + if data[p] == 191 { + goto tr2984 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr2984 + } + case data[p] >= 149: + goto tr2984 + } + goto tr2 + st3330: + if p++; p == pe { + goto _test_eof3330 + } + st_case_3330: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 153: + if 176 <= data[p] && data[p] <= 190 { + goto tr2984 + } + default: + goto tr421 + } + goto tr2 + st3331: + if p++; p == pe { + goto _test_eof3331 + } + st_case_3331: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 132 { + goto tr2984 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr2984 + } + default: + goto tr148 + } + goto tr2 + st3332: + if p++; p == pe { + goto _test_eof3332 + } + st_case_3332: + switch { + case data[p] < 144: + switch { + case data[p] > 139: + if 140 <= data[p] && data[p] <= 143 { + goto tr2 + } + case data[p] >= 133: + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr2 + } + case data[p] >= 154: + goto tr2 + } + default: + goto tr421 + } + goto tr2984 + st3333: + if p++; p == pe { + goto _test_eof3333 + } + st_case_3333: + switch { + case data[p] < 161: + switch { + case data[p] > 130: + if 131 <= data[p] && data[p] <= 160 { + goto tr148 + } + case data[p] >= 128: + goto tr2984 + } + case data[p] > 173: + switch { + case data[p] < 176: + if 174 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + default: + goto tr2984 + } + goto tr2 + st3334: + if p++; p == pe { + goto _test_eof3334 + } + st_case_3334: + switch { + case data[p] > 179: + if 180 <= data[p] { + goto tr2 + } + case data[p] >= 166: + goto tr2984 + } + goto tr148 + st3335: + if p++; p == pe { + goto _test_eof3335 + } + st_case_3335: + switch { + case data[p] > 163: + if 164 <= data[p] && data[p] <= 183 { + goto tr2984 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st3336: + if p++; p == pe { + goto _test_eof3336 + } + st_case_3336: + if data[p] == 173 { + goto tr2984 + } + switch { + case data[p] < 169: + switch { + case data[p] > 146: + if 148 <= data[p] && data[p] <= 168 { + goto tr2984 + } + case data[p] >= 144: + goto tr2984 + } + case data[p] > 177: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 180 { + goto tr2984 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 185 { + goto tr2984 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st3337: + if p++; p == pe { + goto _test_eof3337 + } + st_case_3337: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr2984 + } + case data[p] >= 128: + goto tr2984 + } + goto tr2 + st3338: + if p++; p == pe { + goto _test_eof3338 + } + st_case_3338: + switch data[p] { + case 128: + goto st3339 + case 129: + goto st3340 + case 130: + goto st241 + case 131: + goto st3341 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st3342 + case 180: + goto st251 + case 181: + goto st3343 + case 182: + goto st253 + case 183: + goto st3344 + case 184: + goto st255 + } + goto tr2985 + st3339: + if p++; p == pe { + goto _test_eof3339 + } + st_case_3339: + switch data[p] { + case 164: + goto st142 + case 167: + goto st142 + } + switch { + case data[p] < 152: + if 140 <= data[p] && data[p] <= 143 { + goto tr2984 + } + case data[p] > 153: + switch { + case data[p] > 174: + if 191 <= data[p] { + goto tr571 + } + case data[p] >= 170: + goto tr2984 + } + default: + goto st142 + } + goto tr2 + st3340: + if p++; p == pe { + goto _test_eof3340 + } + st_case_3340: + switch data[p] { + case 165: + goto tr2 + case 177: + goto tr148 + case 191: + goto tr148 + } + switch { + case data[p] < 149: + if 129 <= data[p] && data[p] <= 147 { + goto tr2 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr2 + } + case data[p] >= 160: + goto tr2984 + } + default: + goto tr2 + } + goto tr571 + st3341: + if p++; p == pe { + goto _test_eof3341 + } + st_case_3341: + if 144 <= data[p] && data[p] <= 176 { + goto tr2984 + } + goto tr2 + st3342: + if p++; p == pe { + goto _test_eof3342 + } + st_case_3342: + switch { + case data[p] < 175: + if 165 <= data[p] && data[p] <= 170 { + goto tr2 + } + case data[p] > 177: + if 180 <= data[p] { + goto tr2 + } + default: + goto tr2984 + } + goto tr148 + st3343: + if p++; p == pe { + goto _test_eof3343 + } + st_case_3343: + if data[p] == 191 { + goto tr2984 + } + switch { + case data[p] > 174: + if 176 <= data[p] { + goto tr2 + } + case data[p] >= 168: + goto tr2 + } + goto tr148 + st3344: + if p++; p == pe { + goto _test_eof3344 + } + st_case_3344: + switch { + case data[p] < 144: + switch { + case data[p] > 134: + if 136 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 191 { + goto tr2984 + } + case data[p] >= 152: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st3345: + if p++; p == pe { + goto _test_eof3345 + } + st_case_3345: + switch data[p] { + case 128: + goto st3346 + case 130: + goto st3347 + case 132: + goto st3348 + case 133: + goto st3318 + case 134: + goto st3349 + case 136: + goto st3350 + case 137: + goto st3429 + } + goto tr2985 + st3346: + if p++; p == pe { + goto _test_eof3346 + } + st_case_3346: + if data[p] == 133 { + goto tr148 + } + switch { + case data[p] > 175: + if 187 <= data[p] && data[p] <= 188 { + goto tr148 + } + case data[p] >= 170: + goto tr2984 + } + goto tr2 + st3347: + if p++; p == pe { + goto _test_eof3347 + } + st_case_3347: + if 153 <= data[p] && data[p] <= 154 { + goto tr2984 + } + goto tr2 + st3348: + if p++; p == pe { + goto _test_eof3348 + } + st_case_3348: + switch { + case data[p] > 173: + if 177 <= data[p] { + goto tr2984 + } + case data[p] >= 133: + goto tr148 + } + goto tr2 + st3349: + if p++; p == pe { + goto _test_eof3349 + } + st_case_3349: + switch { + case data[p] < 160: + if 143 <= data[p] && data[p] <= 159 { + goto tr2 + } + case data[p] > 186: + if 187 <= data[p] { + goto tr2 + } + default: + goto tr148 + } + goto tr2984 + st3350: + if p++; p == pe { + goto _test_eof3350 + } + st_case_3350: + if 128 <= data[p] && data[p] <= 158 { + goto tr3053 + } + goto tr2 +tr3053: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5074 + st5074: + if p++; p == pe { + goto _test_eof5074 + } + st_case_5074: +//line segment_words_prod.go:106179 + switch data[p] { + case 194: + goto st3351 + case 204: + goto st3352 + case 205: + goto st3353 + case 210: + goto st3354 + case 214: + goto st3355 + case 215: + goto st3356 + case 216: + goto st3357 + case 217: + goto st3358 + case 219: + goto st3359 + case 220: + goto st3360 + case 221: + goto st3361 + case 222: + goto st3362 + case 223: + goto st3363 + case 224: + goto st3364 + case 225: + goto st3393 + case 226: + goto st3417 + case 227: + goto st3424 + case 234: + goto st3430 + case 237: + goto st3447 + case 239: + goto st3450 + case 240: + goto st3455 + case 243: + goto st3497 + } + if 235 <= data[p] && data[p] <= 236 { + goto st3446 + } + goto tr5002 + st3351: + if p++; p == pe { + goto _test_eof3351 + } + st_case_3351: + if data[p] == 173 { + goto tr3053 + } + goto tr2985 + st3352: + if p++; p == pe { + goto _test_eof3352 + } + st_case_3352: + if 128 <= data[p] { + goto tr3053 + } + goto tr2985 + st3353: + if p++; p == pe { + goto _test_eof3353 + } + st_case_3353: + if 176 <= data[p] { + goto tr2985 + } + goto tr3053 + st3354: + if p++; p == pe { + goto _test_eof3354 + } + st_case_3354: + if 131 <= data[p] && data[p] <= 137 { + goto tr3053 + } + goto tr2985 + st3355: + if p++; p == pe { + goto _test_eof3355 + } + st_case_3355: + if data[p] == 191 { + goto tr3053 + } + if 145 <= data[p] && data[p] <= 189 { + goto tr3053 + } + goto tr2985 + st3356: + if p++; p == pe { + goto _test_eof3356 + } + st_case_3356: + if data[p] == 135 { + goto tr3053 + } + switch { + case data[p] > 130: + if 132 <= data[p] && data[p] <= 133 { + goto tr3053 + } + case data[p] >= 129: + goto tr3053 + } + goto tr2985 + st3357: + if p++; p == pe { + goto _test_eof3357 + } + st_case_3357: + if data[p] == 156 { + goto tr3053 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto tr3053 + } + case data[p] >= 128: + goto tr3053 + } + goto tr2985 + st3358: + if p++; p == pe { + goto _test_eof3358 + } + st_case_3358: + if data[p] == 176 { + goto tr3053 + } + if 139 <= data[p] && data[p] <= 159 { + goto tr3053 + } + goto tr2985 + st3359: + if p++; p == pe { + goto _test_eof3359 + } + st_case_3359: + switch { + case data[p] < 159: + if 150 <= data[p] && data[p] <= 157 { + goto tr3053 + } + case data[p] > 164: + switch { + case data[p] > 168: + if 170 <= data[p] && data[p] <= 173 { + goto tr3053 + } + case data[p] >= 167: + goto tr3053 + } + default: + goto tr3053 + } + goto tr2985 + st3360: + if p++; p == pe { + goto _test_eof3360 + } + st_case_3360: + switch data[p] { + case 143: + goto tr3053 + case 145: + goto tr3053 + } + if 176 <= data[p] { + goto tr3053 + } + goto tr2985 + st3361: + if p++; p == pe { + goto _test_eof3361 + } + st_case_3361: + if 139 <= data[p] { + goto tr2985 + } + goto tr3053 + st3362: + if p++; p == pe { + goto _test_eof3362 + } + st_case_3362: + if 166 <= data[p] && data[p] <= 176 { + goto tr3053 + } + goto tr2985 + st3363: + if p++; p == pe { + goto _test_eof3363 + } + st_case_3363: + if 171 <= data[p] && data[p] <= 179 { + goto tr3053 + } + goto tr2985 + st3364: + if p++; p == pe { + goto _test_eof3364 + } + st_case_3364: + switch data[p] { + case 160: + goto st3365 + case 161: + goto st3366 + case 163: + goto st3367 + case 164: + goto st3368 + case 165: + goto st3369 + case 167: + goto st3371 + case 169: + goto st3372 + case 171: + goto st3373 + case 173: + goto st3375 + case 174: + goto st3376 + case 175: + goto st3377 + case 176: + goto st3378 + case 177: + goto st3379 + case 179: + goto st3380 + case 180: + goto st3381 + case 181: + goto st3382 + case 182: + goto st3383 + case 183: + goto st3384 + case 184: + goto st3385 + case 185: + goto st3386 + case 186: + goto st3387 + case 187: + goto st3388 + case 188: + goto st3389 + case 189: + goto st3390 + case 190: + goto st3391 + case 191: + goto st3392 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto st3374 + } + case data[p] >= 166: + goto st3370 + } + goto tr2985 + st3365: + if p++; p == pe { + goto _test_eof3365 + } + st_case_3365: + switch { + case data[p] < 155: + if 150 <= data[p] && data[p] <= 153 { + goto tr3053 + } + case data[p] > 163: + switch { + case data[p] > 167: + if 169 <= data[p] && data[p] <= 173 { + goto tr3053 + } + case data[p] >= 165: + goto tr3053 + } + default: + goto tr3053 + } + goto tr2985 + st3366: + if p++; p == pe { + goto _test_eof3366 + } + st_case_3366: + if 153 <= data[p] && data[p] <= 155 { + goto tr3053 + } + goto tr2985 + st3367: + if p++; p == pe { + goto _test_eof3367 + } + st_case_3367: + if 163 <= data[p] { + goto tr3053 + } + goto tr2985 + st3368: + if p++; p == pe { + goto _test_eof3368 + } + st_case_3368: + if data[p] == 189 { + goto tr2985 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr2985 + } + goto tr3053 + st3369: + if p++; p == pe { + goto _test_eof3369 + } + st_case_3369: + if data[p] == 144 { + goto tr2985 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr2985 + } + case data[p] >= 152: + goto tr2985 + } + goto tr3053 + st3370: + if p++; p == pe { + goto _test_eof3370 + } + st_case_3370: + if data[p] == 188 { + goto tr3053 + } + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr3053 + } + case data[p] >= 129: + goto tr3053 + } + goto tr2985 + st3371: + if p++; p == pe { + goto _test_eof3371 + } + st_case_3371: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr2985 + } + case data[p] >= 133: + goto tr2985 + } + case data[p] > 150: + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr2985 + } + case data[p] >= 152: + goto tr2985 + } + default: + goto tr2985 + } + goto tr3053 + st3372: + if p++; p == pe { + goto _test_eof3372 + } + st_case_3372: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr2985 + } + case data[p] >= 131: + goto tr2985 + } + case data[p] > 144: + switch { + case data[p] < 178: + if 146 <= data[p] && data[p] <= 175 { + goto tr2985 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr2985 + } + default: + goto tr2985 + } + default: + goto tr2985 + } + goto tr3053 + st3373: + if p++; p == pe { + goto _test_eof3373 + } + st_case_3373: + switch data[p] { + case 134: + goto tr2985 + case 138: + goto tr2985 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr2985 + } + case data[p] >= 142: + goto tr2985 + } + goto tr3053 + st3374: + if p++; p == pe { + goto _test_eof3374 + } + st_case_3374: + if data[p] == 188 { + goto tr3053 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr3053 + } + case data[p] >= 129: + goto tr3053 + } + goto tr2985 + st3375: + if p++; p == pe { + goto _test_eof3375 + } + st_case_3375: + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr3053 + } + case data[p] >= 128: + goto tr3053 + } + case data[p] > 141: + switch { + case data[p] > 151: + if 162 <= data[p] && data[p] <= 163 { + goto tr3053 + } + case data[p] >= 150: + goto tr3053 + } + default: + goto tr3053 + } + goto tr2985 + st3376: + if p++; p == pe { + goto _test_eof3376 + } + st_case_3376: + if data[p] == 130 { + goto tr3053 + } + if 190 <= data[p] && data[p] <= 191 { + goto tr3053 + } + goto tr2985 + st3377: + if p++; p == pe { + goto _test_eof3377 + } + st_case_3377: + if data[p] == 151 { + goto tr3053 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr3053 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr3053 + } + default: + goto tr3053 + } + goto tr2985 + st3378: + if p++; p == pe { + goto _test_eof3378 + } + st_case_3378: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr3053 + } + case data[p] >= 128: + goto tr3053 + } + goto tr2985 + st3379: + if p++; p == pe { + goto _test_eof3379 + } + st_case_3379: + switch data[p] { + case 133: + goto tr2985 + case 137: + goto tr2985 + } + switch { + case data[p] < 151: + if 142 <= data[p] && data[p] <= 148 { + goto tr2985 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr2985 + } + default: + goto tr2985 + } + goto tr3053 + st3380: + if p++; p == pe { + goto _test_eof3380 + } + st_case_3380: + switch { + case data[p] < 138: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 136 { + goto tr3053 + } + case data[p] >= 128: + goto tr3053 + } + case data[p] > 141: + switch { + case data[p] > 150: + if 162 <= data[p] && data[p] <= 163 { + goto tr3053 + } + case data[p] >= 149: + goto tr3053 + } + default: + goto tr3053 + } + goto tr2985 + st3381: + if p++; p == pe { + goto _test_eof3381 + } + st_case_3381: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr3053 + } + case data[p] >= 129: + goto tr3053 + } + goto tr2985 + st3382: + if p++; p == pe { + goto _test_eof3382 + } + st_case_3382: + switch data[p] { + case 133: + goto tr2985 + case 137: + goto tr2985 + } + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 150 { + goto tr2985 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr2985 + } + default: + goto tr2985 + } + goto tr3053 + st3383: + if p++; p == pe { + goto _test_eof3383 + } + st_case_3383: + if 130 <= data[p] && data[p] <= 131 { + goto tr3053 + } + goto tr2985 + st3384: + if p++; p == pe { + goto _test_eof3384 + } + st_case_3384: + switch data[p] { + case 138: + goto tr3053 + case 150: + goto tr3053 + } + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 148 { + goto tr3053 + } + case data[p] > 159: + if 178 <= data[p] && data[p] <= 179 { + goto tr3053 + } + default: + goto tr3053 + } + goto tr2985 + st3385: + if p++; p == pe { + goto _test_eof3385 + } + st_case_3385: + if data[p] == 177 { + goto tr3053 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr3053 + } + goto tr2985 + st3386: + if p++; p == pe { + goto _test_eof3386 + } + st_case_3386: + if 135 <= data[p] && data[p] <= 142 { + goto tr3053 + } + goto tr2985 + st3387: + if p++; p == pe { + goto _test_eof3387 + } + st_case_3387: + if data[p] == 177 { + goto tr3053 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr3053 + } + case data[p] >= 180: + goto tr3053 + } + goto tr2985 + st3388: + if p++; p == pe { + goto _test_eof3388 + } + st_case_3388: + if 136 <= data[p] && data[p] <= 141 { + goto tr3053 + } + goto tr2985 + st3389: + if p++; p == pe { + goto _test_eof3389 + } + st_case_3389: + switch data[p] { + case 181: + goto tr3053 + case 183: + goto tr3053 + case 185: + goto tr3053 + } + switch { + case data[p] > 153: + if 190 <= data[p] && data[p] <= 191 { + goto tr3053 + } + case data[p] >= 152: + goto tr3053 + } + goto tr2985 + st3390: + if p++; p == pe { + goto _test_eof3390 + } + st_case_3390: + if 177 <= data[p] && data[p] <= 191 { + goto tr3053 + } + goto tr2985 + st3391: + if p++; p == pe { + goto _test_eof3391 + } + st_case_3391: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr3053 + } + case data[p] > 135: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr3053 + } + case data[p] >= 141: + goto tr3053 + } + default: + goto tr3053 + } + goto tr2985 + st3392: + if p++; p == pe { + goto _test_eof3392 + } + st_case_3392: + if data[p] == 134 { + goto tr3053 + } + goto tr2985 + st3393: + if p++; p == pe { + goto _test_eof3393 + } + st_case_3393: + switch data[p] { + case 128: + goto st3394 + case 129: + goto st3395 + case 130: + goto st3396 + case 132: + goto st3352 + case 135: + goto st3398 + case 141: + goto st3399 + case 156: + goto st3400 + case 157: + goto st3401 + case 158: + goto st3402 + case 159: + goto st3403 + case 160: + goto st3404 + case 162: + goto st3405 + case 164: + goto st3406 + case 168: + goto st3407 + case 169: + goto st3408 + case 170: + goto st3409 + case 172: + goto st3410 + case 173: + goto st3411 + case 174: + goto st3412 + case 175: + goto st3413 + case 176: + goto st3414 + case 179: + goto st3415 + case 183: + goto st3416 + } + if 133 <= data[p] && data[p] <= 134 { + goto st3397 + } + goto tr2985 + st3394: + if p++; p == pe { + goto _test_eof3394 + } + st_case_3394: + if 171 <= data[p] && data[p] <= 190 { + goto tr3053 + } + goto tr2985 + st3395: + if p++; p == pe { + goto _test_eof3395 + } + st_case_3395: + switch { + case data[p] < 162: + switch { + case data[p] > 153: + if 158 <= data[p] && data[p] <= 160 { + goto tr3053 + } + case data[p] >= 150: + goto tr3053 + } + case data[p] > 164: + switch { + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr3053 + } + case data[p] >= 167: + goto tr3053 + } + default: + goto tr3053 + } + goto tr2985 + st3396: + if p++; p == pe { + goto _test_eof3396 + } + st_case_3396: + if data[p] == 143 { + goto tr3053 + } + switch { + case data[p] > 141: + if 154 <= data[p] && data[p] <= 157 { + goto tr3053 + } + case data[p] >= 130: + goto tr3053 + } + goto tr2985 + st3397: + if p++; p == pe { + goto _test_eof3397 + } + st_case_3397: + goto tr3053 + st3398: + if p++; p == pe { + goto _test_eof3398 + } + st_case_3398: + if 192 <= data[p] { + goto tr2985 + } + goto tr3053 + st3399: + if p++; p == pe { + goto _test_eof3399 + } + st_case_3399: + if 157 <= data[p] && data[p] <= 159 { + goto tr3053 + } + goto tr2985 + st3400: + if p++; p == pe { + goto _test_eof3400 + } + st_case_3400: + switch { + case data[p] > 148: + if 178 <= data[p] && data[p] <= 180 { + goto tr3053 + } + case data[p] >= 146: + goto tr3053 + } + goto tr2985 + st3401: + if p++; p == pe { + goto _test_eof3401 + } + st_case_3401: + switch { + case data[p] > 147: + if 178 <= data[p] && data[p] <= 179 { + goto tr3053 + } + case data[p] >= 146: + goto tr3053 + } + goto tr2985 + st3402: + if p++; p == pe { + goto _test_eof3402 + } + st_case_3402: + if 180 <= data[p] { + goto tr3053 + } + goto tr2985 + st3403: + if p++; p == pe { + goto _test_eof3403 + } + st_case_3403: + switch { + case data[p] > 156: + if 158 <= data[p] { + goto tr2985 + } + case data[p] >= 148: + goto tr2985 + } + goto tr3053 + st3404: + if p++; p == pe { + goto _test_eof3404 + } + st_case_3404: + if 139 <= data[p] && data[p] <= 142 { + goto tr3053 + } + goto tr2985 + st3405: + if p++; p == pe { + goto _test_eof3405 + } + st_case_3405: + if data[p] == 169 { + goto tr3053 + } + goto tr2985 + st3406: + if p++; p == pe { + goto _test_eof3406 + } + st_case_3406: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr3053 + } + case data[p] >= 160: + goto tr3053 + } + goto tr2985 + st3407: + if p++; p == pe { + goto _test_eof3407 + } + st_case_3407: + if 151 <= data[p] && data[p] <= 155 { + goto tr3053 + } + goto tr2985 + st3408: + if p++; p == pe { + goto _test_eof3408 + } + st_case_3408: + if data[p] == 191 { + goto tr3053 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr3053 + } + case data[p] >= 149: + goto tr3053 + } + goto tr2985 + st3409: + if p++; p == pe { + goto _test_eof3409 + } + st_case_3409: + if 176 <= data[p] && data[p] <= 190 { + goto tr3053 + } + goto tr2985 + st3410: + if p++; p == pe { + goto _test_eof3410 + } + st_case_3410: + switch { + case data[p] > 132: + if 180 <= data[p] { + goto tr3053 + } + case data[p] >= 128: + goto tr3053 + } + goto tr2985 + st3411: + if p++; p == pe { + goto _test_eof3411 + } + st_case_3411: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr2985 + } + case data[p] >= 133: + goto tr2985 + } + goto tr3053 + st3412: + if p++; p == pe { + goto _test_eof3412 + } + st_case_3412: + switch { + case data[p] > 130: + if 161 <= data[p] && data[p] <= 173 { + goto tr3053 + } + case data[p] >= 128: + goto tr3053 + } + goto tr2985 + st3413: + if p++; p == pe { + goto _test_eof3413 + } + st_case_3413: + if 166 <= data[p] && data[p] <= 179 { + goto tr3053 + } + goto tr2985 + st3414: + if p++; p == pe { + goto _test_eof3414 + } + st_case_3414: + if 164 <= data[p] && data[p] <= 183 { + goto tr3053 + } + goto tr2985 + st3415: + if p++; p == pe { + goto _test_eof3415 + } + st_case_3415: + if data[p] == 173 { + goto tr3053 + } + switch { + case data[p] < 148: + if 144 <= data[p] && data[p] <= 146 { + goto tr3053 + } + case data[p] > 168: + switch { + case data[p] > 180: + if 184 <= data[p] && data[p] <= 185 { + goto tr3053 + } + case data[p] >= 178: + goto tr3053 + } + default: + goto tr3053 + } + goto tr2985 + st3416: + if p++; p == pe { + goto _test_eof3416 + } + st_case_3416: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr3053 + } + case data[p] >= 128: + goto tr3053 + } + goto tr2985 + st3417: + if p++; p == pe { + goto _test_eof3417 + } + st_case_3417: + switch data[p] { + case 128: + goto st3418 + case 129: + goto st3419 + case 131: + goto st3420 + case 179: + goto st3421 + case 181: + goto st3422 + case 183: + goto st3423 + } + goto tr2985 + st3418: + if p++; p == pe { + goto _test_eof3418 + } + st_case_3418: + switch { + case data[p] > 143: + if 170 <= data[p] && data[p] <= 174 { + goto tr3053 + } + case data[p] >= 140: + goto tr3053 + } + goto tr2985 + st3419: + if p++; p == pe { + goto _test_eof3419 + } + st_case_3419: + switch { + case data[p] > 164: + if 166 <= data[p] && data[p] <= 175 { + goto tr3053 + } + case data[p] >= 160: + goto tr3053 + } + goto tr2985 + st3420: + if p++; p == pe { + goto _test_eof3420 + } + st_case_3420: + if 144 <= data[p] && data[p] <= 176 { + goto tr3053 + } + goto tr2985 + st3421: + if p++; p == pe { + goto _test_eof3421 + } + st_case_3421: + if 175 <= data[p] && data[p] <= 177 { + goto tr3053 + } + goto tr2985 + st3422: + if p++; p == pe { + goto _test_eof3422 + } + st_case_3422: + if data[p] == 191 { + goto tr3053 + } + goto tr2985 + st3423: + if p++; p == pe { + goto _test_eof3423 + } + st_case_3423: + if 160 <= data[p] && data[p] <= 191 { + goto tr3053 + } + goto tr2985 + st3424: + if p++; p == pe { + goto _test_eof3424 + } + st_case_3424: + switch data[p] { + case 128: + goto st3425 + case 130: + goto st3426 + case 132: + goto st3427 + case 133: + goto st3397 + case 134: + goto st3428 + case 136: + goto st3350 + case 137: + goto st3429 + } + goto tr2985 + st3425: + if p++; p == pe { + goto _test_eof3425 + } + st_case_3425: + if 170 <= data[p] && data[p] <= 175 { + goto tr3053 + } + goto tr2985 + st3426: + if p++; p == pe { + goto _test_eof3426 + } + st_case_3426: + if 153 <= data[p] && data[p] <= 154 { + goto tr3053 + } + goto tr2985 + st3427: + if p++; p == pe { + goto _test_eof3427 + } + st_case_3427: + if 177 <= data[p] { + goto tr3053 + } + goto tr2985 + st3428: + if p++; p == pe { + goto _test_eof3428 + } + st_case_3428: + if 143 <= data[p] { + goto tr2985 + } + goto tr3053 + st3429: + if p++; p == pe { + goto _test_eof3429 + } + st_case_3429: + if 160 <= data[p] && data[p] <= 190 { + goto tr3053 + } + goto tr2 + st3430: + if p++; p == pe { + goto _test_eof3430 + } + st_case_3430: + switch data[p] { + case 153: + goto st3431 + case 154: + goto st3432 + case 155: + goto st3433 + case 160: + goto st3434 + case 162: + goto st3435 + case 163: + goto st3436 + case 164: + goto st3437 + case 165: + goto st3438 + case 166: + goto st3439 + case 167: + goto st3440 + case 168: + goto st3441 + case 169: + goto st3442 + case 170: + goto st3443 + case 171: + goto st3444 + case 175: + goto st3445 + case 176: + goto st3352 + } + if 177 <= data[p] { + goto st3397 + } + goto tr2985 + st3431: + if p++; p == pe { + goto _test_eof3431 + } + st_case_3431: + switch { + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr3053 + } + case data[p] >= 175: + goto tr3053 + } + goto tr2985 + st3432: + if p++; p == pe { + goto _test_eof3432 + } + st_case_3432: + if 158 <= data[p] && data[p] <= 159 { + goto tr3053 + } + goto tr2985 + st3433: + if p++; p == pe { + goto _test_eof3433 + } + st_case_3433: + if 176 <= data[p] && data[p] <= 177 { + goto tr3053 + } + goto tr2985 + st3434: + if p++; p == pe { + goto _test_eof3434 + } + st_case_3434: + switch data[p] { + case 130: + goto tr3053 + case 134: + goto tr3053 + case 139: + goto tr3053 + } + if 163 <= data[p] && data[p] <= 167 { + goto tr3053 + } + goto tr2985 + st3435: + if p++; p == pe { + goto _test_eof3435 + } + st_case_3435: + switch { + case data[p] > 129: + if 180 <= data[p] { + goto tr3053 + } + case data[p] >= 128: + goto tr3053 + } + goto tr2985 + st3436: + if p++; p == pe { + goto _test_eof3436 + } + st_case_3436: + switch { + case data[p] > 159: + if 178 <= data[p] { + goto tr2985 + } + case data[p] >= 133: + goto tr2985 + } + goto tr3053 + st3437: + if p++; p == pe { + goto _test_eof3437 + } + st_case_3437: + if 166 <= data[p] && data[p] <= 173 { + goto tr3053 + } + goto tr2985 + st3438: + if p++; p == pe { + goto _test_eof3438 + } + st_case_3438: + switch { + case data[p] > 147: + if 160 <= data[p] && data[p] <= 188 { + goto tr3053 + } + case data[p] >= 135: + goto tr3053 + } + goto tr2985 + st3439: + if p++; p == pe { + goto _test_eof3439 + } + st_case_3439: + switch { + case data[p] > 131: + if 179 <= data[p] { + goto tr3053 + } + case data[p] >= 128: + goto tr3053 + } + goto tr2985 + st3440: + if p++; p == pe { + goto _test_eof3440 + } + st_case_3440: + switch { + case data[p] > 164: + if 166 <= data[p] { + goto tr2985 + } + case data[p] >= 129: + goto tr2985 + } + goto tr3053 + st3441: + if p++; p == pe { + goto _test_eof3441 + } + st_case_3441: + if 169 <= data[p] && data[p] <= 182 { + goto tr3053 + } + goto tr2985 + st3442: + if p++; p == pe { + goto _test_eof3442 + } + st_case_3442: + if data[p] == 131 { + goto tr3053 + } + switch { + case data[p] > 141: + if 187 <= data[p] && data[p] <= 189 { + goto tr3053 + } + case data[p] >= 140: + goto tr3053 + } + goto tr2985 + st3443: + if p++; p == pe { + goto _test_eof3443 + } + st_case_3443: + if data[p] == 176 { + goto tr3053 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr3053 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr3053 + } + default: + goto tr3053 + } + goto tr2985 + st3444: + if p++; p == pe { + goto _test_eof3444 + } + st_case_3444: + if data[p] == 129 { + goto tr3053 + } + switch { + case data[p] > 175: + if 181 <= data[p] && data[p] <= 182 { + goto tr3053 + } + case data[p] >= 171: + goto tr3053 + } + goto tr2985 + st3445: + if p++; p == pe { + goto _test_eof3445 + } + st_case_3445: + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 173 { + goto tr3053 + } + case data[p] >= 163: + goto tr3053 + } + goto tr2985 + st3446: + if p++; p == pe { + goto _test_eof3446 + } + st_case_3446: + goto st3397 + st3447: + if p++; p == pe { + goto _test_eof3447 + } + st_case_3447: + switch data[p] { + case 158: + goto st3448 + case 159: + goto st3449 + } + if 160 <= data[p] { + goto tr2985 + } + goto st3397 + st3448: + if p++; p == pe { + goto _test_eof3448 + } + st_case_3448: + if 164 <= data[p] && data[p] <= 175 { + goto tr2985 + } + goto tr3053 + st3449: + if p++; p == pe { + goto _test_eof3449 + } + st_case_3449: + switch { + case data[p] > 138: + if 188 <= data[p] { + goto tr2985 + } + case data[p] >= 135: + goto tr2985 + } + goto tr3053 + st3450: + if p++; p == pe { + goto _test_eof3450 + } + st_case_3450: + switch data[p] { + case 172: + goto st3451 + case 184: + goto st3452 + case 187: + goto st3422 + case 190: + goto st3453 + case 191: + goto st3454 + } + goto tr2985 + st3451: + if p++; p == pe { + goto _test_eof3451 + } + st_case_3451: + if data[p] == 158 { + goto tr3053 + } + goto tr2985 + st3452: + if p++; p == pe { + goto _test_eof3452 + } + st_case_3452: + switch { + case data[p] > 143: + if 160 <= data[p] && data[p] <= 175 { + goto tr3053 + } + case data[p] >= 128: + goto tr3053 + } + goto tr2985 + st3453: + if p++; p == pe { + goto _test_eof3453 + } + st_case_3453: + if 158 <= data[p] && data[p] <= 190 { + goto tr3053 + } + goto tr2985 + st3454: + if p++; p == pe { + goto _test_eof3454 + } + st_case_3454: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr3053 + } + case data[p] >= 130: + goto tr3053 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr3053 + } + case data[p] >= 154: + goto tr3053 + } + default: + goto tr3053 + } + goto tr2985 + st3455: + if p++; p == pe { + goto _test_eof3455 + } + st_case_3455: + switch data[p] { + case 144: + goto st3456 + case 145: + goto st3462 + case 150: + goto st3481 + case 155: + goto st3486 + case 157: + goto st3488 + case 158: + goto st3495 + } + goto tr2985 + st3456: + if p++; p == pe { + goto _test_eof3456 + } + st_case_3456: + switch data[p] { + case 135: + goto st3457 + case 139: + goto st3458 + case 141: + goto st3459 + case 168: + goto st3460 + case 171: + goto st3461 + } + goto tr2985 + st3457: + if p++; p == pe { + goto _test_eof3457 + } + st_case_3457: + if data[p] == 189 { + goto tr3053 + } + goto tr2985 + st3458: + if p++; p == pe { + goto _test_eof3458 + } + st_case_3458: + if data[p] == 160 { + goto tr3053 + } + goto tr2985 + st3459: + if p++; p == pe { + goto _test_eof3459 + } + st_case_3459: + if 182 <= data[p] && data[p] <= 186 { + goto tr3053 + } + goto tr2985 + st3460: + if p++; p == pe { + goto _test_eof3460 + } + st_case_3460: + if data[p] == 191 { + goto tr3053 + } + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr3053 + } + case data[p] > 134: + switch { + case data[p] > 143: + if 184 <= data[p] && data[p] <= 186 { + goto tr3053 + } + case data[p] >= 140: + goto tr3053 + } + default: + goto tr3053 + } + goto tr2985 + st3461: + if p++; p == pe { + goto _test_eof3461 + } + st_case_3461: + if 165 <= data[p] && data[p] <= 166 { + goto tr3053 + } + goto tr2985 + st3462: + if p++; p == pe { + goto _test_eof3462 + } + st_case_3462: + switch data[p] { + case 128: + goto st3463 + case 129: + goto st3464 + case 130: + goto st3465 + case 132: + goto st3466 + case 133: + goto st3467 + case 134: + goto st3468 + case 135: + goto st3469 + case 136: + goto st3470 + case 139: + goto st3471 + case 140: + goto st3472 + case 141: + goto st3473 + case 146: + goto st3474 + case 147: + goto st3475 + case 150: + goto st3476 + case 151: + goto st3477 + case 152: + goto st3474 + case 153: + goto st3478 + case 154: + goto st3479 + case 156: + goto st3480 + } + goto tr2985 + st3463: + if p++; p == pe { + goto _test_eof3463 + } + st_case_3463: + switch { + case data[p] > 130: + if 184 <= data[p] { + goto tr3053 + } + case data[p] >= 128: + goto tr3053 + } + goto tr2985 + st3464: + if p++; p == pe { + goto _test_eof3464 + } + st_case_3464: + if 135 <= data[p] && data[p] <= 190 { + goto tr2985 + } + goto tr3053 + st3465: + if p++; p == pe { + goto _test_eof3465 + } + st_case_3465: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr2985 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr2985 + } + default: + goto tr2985 + } + goto tr3053 + st3466: + if p++; p == pe { + goto _test_eof3466 + } + st_case_3466: + switch { + case data[p] > 130: + if 167 <= data[p] && data[p] <= 180 { + goto tr3053 + } + case data[p] >= 128: + goto tr3053 + } + goto tr2985 + st3467: + if p++; p == pe { + goto _test_eof3467 + } + st_case_3467: + if data[p] == 179 { + goto tr3053 + } + goto tr2985 + st3468: + if p++; p == pe { + goto _test_eof3468 + } + st_case_3468: + switch { + case data[p] > 130: + if 179 <= data[p] { + goto tr3053 + } + case data[p] >= 128: + goto tr3053 + } + goto tr2985 + st3469: + if p++; p == pe { + goto _test_eof3469 + } + st_case_3469: + switch { + case data[p] > 137: + if 141 <= data[p] { + goto tr2985 + } + case data[p] >= 129: + goto tr2985 + } + goto tr3053 + st3470: + if p++; p == pe { + goto _test_eof3470 + } + st_case_3470: + if 172 <= data[p] && data[p] <= 183 { + goto tr3053 + } + goto tr2985 + st3471: + if p++; p == pe { + goto _test_eof3471 + } + st_case_3471: + if 159 <= data[p] && data[p] <= 170 { + goto tr3053 + } + goto tr2985 + st3472: + if p++; p == pe { + goto _test_eof3472 + } + st_case_3472: + if data[p] == 188 { + goto tr3053 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr3053 + } + case data[p] >= 128: + goto tr3053 + } + goto tr2985 + st3473: + if p++; p == pe { + goto _test_eof3473 + } + st_case_3473: + if data[p] == 151 { + goto tr3053 + } + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr3053 + } + case data[p] >= 128: + goto tr3053 + } + case data[p] > 141: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr3053 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr3053 + } + default: + goto tr3053 + } + default: + goto tr3053 + } + goto tr2985 + st3474: + if p++; p == pe { + goto _test_eof3474 + } + st_case_3474: + if 176 <= data[p] { + goto tr3053 + } + goto tr2985 + st3475: + if p++; p == pe { + goto _test_eof3475 + } + st_case_3475: + if 132 <= data[p] { + goto tr2985 + } + goto tr3053 + st3476: + if p++; p == pe { + goto _test_eof3476 + } + st_case_3476: + switch { + case data[p] > 181: + if 184 <= data[p] { + goto tr3053 + } + case data[p] >= 175: + goto tr3053 + } + goto tr2985 + st3477: + if p++; p == pe { + goto _test_eof3477 + } + st_case_3477: + switch { + case data[p] > 155: + if 158 <= data[p] { + goto tr2985 + } + case data[p] >= 129: + goto tr2985 + } + goto tr3053 + st3478: + if p++; p == pe { + goto _test_eof3478 + } + st_case_3478: + if 129 <= data[p] { + goto tr2985 + } + goto tr3053 + st3479: + if p++; p == pe { + goto _test_eof3479 + } + st_case_3479: + if 171 <= data[p] && data[p] <= 183 { + goto tr3053 + } + goto tr2985 + st3480: + if p++; p == pe { + goto _test_eof3480 + } + st_case_3480: + if 157 <= data[p] && data[p] <= 171 { + goto tr3053 + } + goto tr2985 + st3481: + if p++; p == pe { + goto _test_eof3481 + } + st_case_3481: + switch data[p] { + case 171: + goto st3482 + case 172: + goto st3483 + case 189: + goto st3484 + case 190: + goto st3485 + } + goto tr2985 + st3482: + if p++; p == pe { + goto _test_eof3482 + } + st_case_3482: + if 176 <= data[p] && data[p] <= 180 { + goto tr3053 + } + goto tr2985 + st3483: + if p++; p == pe { + goto _test_eof3483 + } + st_case_3483: + if 176 <= data[p] && data[p] <= 182 { + goto tr3053 + } + goto tr2985 + st3484: + if p++; p == pe { + goto _test_eof3484 + } + st_case_3484: + if 145 <= data[p] && data[p] <= 190 { + goto tr3053 + } + goto tr2985 + st3485: + if p++; p == pe { + goto _test_eof3485 + } + st_case_3485: + if 143 <= data[p] && data[p] <= 146 { + goto tr3053 + } + goto tr2985 + st3486: + if p++; p == pe { + goto _test_eof3486 + } + st_case_3486: + if data[p] == 178 { + goto st3487 + } + goto tr2985 + st3487: + if p++; p == pe { + goto _test_eof3487 + } + st_case_3487: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr3053 + } + case data[p] >= 157: + goto tr3053 + } + goto tr2985 + st3488: + if p++; p == pe { + goto _test_eof3488 + } + st_case_3488: + switch data[p] { + case 133: + goto st3489 + case 134: + goto st3490 + case 137: + goto st3491 + case 168: + goto st3492 + case 169: + goto st3493 + case 170: + goto st3494 + } + goto tr2985 + st3489: + if p++; p == pe { + goto _test_eof3489 + } + st_case_3489: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr3053 + } + case data[p] >= 165: + goto tr3053 + } + goto tr2985 + st3490: + if p++; p == pe { + goto _test_eof3490 + } + st_case_3490: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr2985 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr2985 + } + default: + goto tr2985 + } + goto tr3053 + st3491: + if p++; p == pe { + goto _test_eof3491 + } + st_case_3491: + if 130 <= data[p] && data[p] <= 132 { + goto tr3053 + } + goto tr2985 + st3492: + if p++; p == pe { + goto _test_eof3492 + } + st_case_3492: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr3053 + } + case data[p] >= 128: + goto tr3053 + } + goto tr2985 + st3493: + if p++; p == pe { + goto _test_eof3493 + } + st_case_3493: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr2985 + } + case data[p] >= 173: + goto tr2985 + } + goto tr3053 + st3494: + if p++; p == pe { + goto _test_eof3494 + } + st_case_3494: + if data[p] == 132 { + goto tr3053 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr3053 + } + case data[p] >= 155: + goto tr3053 + } + goto tr2985 + st3495: + if p++; p == pe { + goto _test_eof3495 + } + st_case_3495: + if data[p] == 163 { + goto st3496 + } + goto tr2985 + st3496: + if p++; p == pe { + goto _test_eof3496 + } + st_case_3496: + if 144 <= data[p] && data[p] <= 150 { + goto tr3053 + } + goto tr2985 + st3497: + if p++; p == pe { + goto _test_eof3497 + } + st_case_3497: + if data[p] == 160 { + goto st3498 + } + goto tr2985 + st3498: + if p++; p == pe { + goto _test_eof3498 + } + st_case_3498: + switch data[p] { + case 128: + goto st3499 + case 129: + goto st3398 + case 132: + goto st3352 + case 135: + goto st3353 + } + if 133 <= data[p] && data[p] <= 134 { + goto st3397 + } + goto tr2985 + st3499: + if p++; p == pe { + goto _test_eof3499 + } + st_case_3499: + if data[p] == 129 { + goto tr3053 + } + if 160 <= data[p] { + goto tr3053 + } + goto tr2985 + st3500: + if p++; p == pe { + goto _test_eof3500 + } + st_case_3500: + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st654 + case 153: + goto st3501 + case 154: + goto st3502 + case 155: + goto st3503 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st3504 + case 161: + goto st272 + case 162: + goto st3505 + case 163: + goto st3506 + case 164: + goto st3507 + case 165: + goto st3508 + case 166: + goto st3509 + case 167: + goto st3510 + case 168: + goto st3511 + case 169: + goto st3512 + case 170: + goto st3513 + case 171: + goto st3514 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st3515 + case 176: + goto st3270 + } + switch { + case data[p] > 157: + if 177 <= data[p] { + goto st3318 + } + case data[p] >= 129: + goto st145 + } + goto tr2985 + st3501: + if p++; p == pe { + goto _test_eof3501 + } + st_case_3501: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr2984 + } + default: + goto tr2984 + } + goto tr2 + st3502: + if p++; p == pe { + goto _test_eof3502 + } + st_case_3502: + switch { + case data[p] < 158: + if 128 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr2984 + } + goto tr2 + st3503: + if p++; p == pe { + goto _test_eof3503 + } + st_case_3503: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr2 + } + case data[p] >= 176: + goto tr2984 + } + goto tr148 + st3504: + if p++; p == pe { + goto _test_eof3504 + } + st_case_3504: + switch data[p] { + case 130: + goto tr2984 + case 134: + goto tr2984 + case 139: + goto tr2984 + } + switch { + case data[p] > 167: + if 168 <= data[p] { + goto tr2 + } + case data[p] >= 163: + goto tr2984 + } + goto tr148 + st3505: + if p++; p == pe { + goto _test_eof3505 + } + st_case_3505: + switch { + case data[p] < 130: + if 128 <= data[p] && data[p] <= 129 { + goto tr2984 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr2984 + } + default: + goto tr148 + } + goto tr2 + st3506: + if p++; p == pe { + goto _test_eof3506 + } + st_case_3506: + switch data[p] { + case 187: + goto tr148 + case 189: + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 143: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 133: + goto tr2 + } + case data[p] > 159: + switch { + case data[p] > 183: + if 184 <= data[p] { + goto tr2 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr2 + } + goto tr2984 + st3507: + if p++; p == pe { + goto _test_eof3507 + } + st_case_3507: + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 165: + switch { + case data[p] > 173: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr2984 + } + default: + goto tr148 + } + goto tr2 + st3508: + if p++; p == pe { + goto _test_eof3508 + } + st_case_3508: + switch { + case data[p] < 148: + if 135 <= data[p] && data[p] <= 147 { + goto tr2984 + } + case data[p] > 159: + switch { + case data[p] > 188: + if 189 <= data[p] { + goto tr2 + } + case data[p] >= 160: + goto tr2984 + } + default: + goto tr2 + } + goto tr148 + st3509: + if p++; p == pe { + goto _test_eof3509 + } + st_case_3509: + switch { + case data[p] < 132: + if 128 <= data[p] && data[p] <= 131 { + goto tr2984 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr2984 + } + default: + goto tr148 + } + goto tr2 + st3510: + if p++; p == pe { + goto _test_eof3510 + } + st_case_3510: + if data[p] == 143 { + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 129: + goto tr2 + } + case data[p] > 164: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr2 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr2 + } + default: + goto tr421 + } + default: + goto tr2 + } + goto tr2984 + st3511: + if p++; p == pe { + goto _test_eof3511 + } + st_case_3511: + switch { + case data[p] > 168: + if 169 <= data[p] && data[p] <= 182 { + goto tr2984 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st3512: + if p++; p == pe { + goto _test_eof3512 + } + st_case_3512: + if data[p] == 131 { + goto tr2984 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 141: + switch { + case data[p] > 153: + if 187 <= data[p] && data[p] <= 189 { + goto tr2984 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr2984 + } + goto tr2 + st3513: + if p++; p == pe { + goto _test_eof3513 + } + st_case_3513: + if data[p] == 176 { + goto tr2984 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr2984 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr2984 + } + default: + goto tr2984 + } + goto tr2 + st3514: + if p++; p == pe { + goto _test_eof3514 + } + st_case_3514: + if data[p] == 129 { + goto tr2984 + } + switch { + case data[p] < 171: + if 160 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 181 <= data[p] && data[p] <= 182 { + goto tr2984 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr2984 + } + goto tr2 + st3515: + if p++; p == pe { + goto _test_eof3515 + } + st_case_3515: + switch { + case data[p] < 163: + if 128 <= data[p] && data[p] <= 162 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 172: + goto tr2984 + } + default: + goto tr2984 + } + goto tr2 + st3516: + if p++; p == pe { + goto _test_eof3516 + } + st_case_3516: + goto st3318 + st3517: + if p++; p == pe { + goto _test_eof3517 + } + st_case_3517: + switch data[p] { + case 158: + goto st3518 + case 159: + goto st3519 + } + if 160 <= data[p] { + goto tr2985 + } + goto st3318 + st3518: + if p++; p == pe { + goto _test_eof3518 + } + st_case_3518: + if 164 <= data[p] && data[p] <= 175 { + goto tr2 + } + goto tr2984 + st3519: + if p++; p == pe { + goto _test_eof3519 + } + st_case_3519: + switch { + case data[p] > 138: + if 188 <= data[p] { + goto tr2 + } + case data[p] >= 135: + goto tr2 + } + goto tr2984 + st3520: + if p++; p == pe { + goto _test_eof3520 + } + st_case_3520: + switch data[p] { + case 172: + goto st3521 + case 173: + goto st672 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st3522 + case 185: + goto st967 + case 187: + goto st3523 + case 188: + goto st969 + case 189: + goto st303 + case 190: + goto st3524 + case 191: + goto st3525 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr2985 + st3521: + if p++; p == pe { + goto _test_eof3521 + } + st_case_3521: + switch data[p] { + case 158: + goto tr2984 + case 190: + goto tr572 + } + switch { + case data[p] < 157: + switch { + case data[p] > 134: + if 147 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 170: + goto tr572 + } + default: + goto tr572 + } + goto tr2 + st3522: + if p++; p == pe { + goto _test_eof3522 + } + st_case_3522: + if data[p] == 147 { + goto st142 + } + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 143 { + goto tr2984 + } + case data[p] > 175: + if 179 <= data[p] && data[p] <= 180 { + goto tr571 + } + default: + goto tr2984 + } + goto tr2 + st3523: + if p++; p == pe { + goto _test_eof3523 + } + st_case_3523: + if data[p] == 191 { + goto tr2984 + } + if 189 <= data[p] { + goto tr2 + } + goto tr148 + st3524: + if p++; p == pe { + goto _test_eof3524 + } + st_case_3524: + if 158 <= data[p] && data[p] <= 190 { + goto tr2984 + } + goto tr2 + st3525: + if p++; p == pe { + goto _test_eof3525 + } + st_case_3525: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr2984 + } + case data[p] >= 130: + goto tr2984 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr2984 + } + case data[p] >= 154: + goto tr2984 + } + default: + goto tr2984 + } + goto tr2 + st3526: + if p++; p == pe { + goto _test_eof3526 + } + st_case_3526: + switch data[p] { + case 144: + goto st3527 + case 145: + goto st3533 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st3552 + case 155: + goto st3557 + case 157: + goto st3559 + case 158: + goto st3566 + case 159: + goto st403 + } + goto tr2985 + st3527: + if p++; p == pe { + goto _test_eof3527 + } + st_case_3527: + switch data[p] { + case 128: + goto st308 + case 129: + goto st309 + case 130: + goto st147 + case 131: + goto st310 + case 133: + goto st311 + case 135: + goto st3528 + case 138: + goto st313 + case 139: + goto st3529 + case 140: + goto st315 + case 141: + goto st3530 + case 142: + goto st317 + case 143: + goto st318 + case 144: + goto st147 + case 145: + goto st145 + case 146: + goto st684 + case 148: + goto st320 + case 149: + goto st321 + case 152: + goto st147 + case 156: + goto st322 + case 157: + goto st323 + case 160: + goto st324 + case 161: + goto st325 + case 162: + goto st326 + case 163: + goto st327 + case 164: + goto st328 + case 166: + goto st329 + case 168: + goto st3531 + case 169: + goto st331 + case 170: + goto st332 + case 171: + goto st3532 + case 172: + goto st334 + case 173: + goto st335 + case 174: + goto st336 + case 176: + goto st147 + case 177: + goto st245 + } + switch { + case data[p] > 155: + if 178 <= data[p] && data[p] <= 179 { + goto st337 + } + case data[p] >= 153: + goto st145 + } + goto tr2 + st3528: + if p++; p == pe { + goto _test_eof3528 + } + st_case_3528: + if data[p] == 189 { + goto tr2984 + } + goto tr2 + st3529: + if p++; p == pe { + goto _test_eof3529 + } + st_case_3529: + if data[p] == 160 { + goto tr2984 + } + if 145 <= data[p] { + goto tr2 + } + goto tr148 + st3530: + if p++; p == pe { + goto _test_eof3530 + } + st_case_3530: + switch { + case data[p] < 182: + if 139 <= data[p] && data[p] <= 143 { + goto tr2 + } + case data[p] > 186: + if 187 <= data[p] { + goto tr2 + } + default: + goto tr2984 + } + goto tr148 + st3531: + if p++; p == pe { + goto _test_eof3531 + } + st_case_3531: + switch data[p] { + case 128: + goto tr148 + case 191: + goto tr2984 + } + switch { + case data[p] < 144: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2984 + } + case data[p] > 134: + if 140 <= data[p] && data[p] <= 143 { + goto tr2984 + } + default: + goto tr2984 + } + case data[p] > 147: + switch { + case data[p] < 153: + if 149 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] > 179: + if 184 <= data[p] && data[p] <= 186 { + goto tr2984 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st3532: + if p++; p == pe { + goto _test_eof3532 + } + st_case_3532: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 164: + if 165 <= data[p] && data[p] <= 166 { + goto tr2984 + } + default: + goto tr148 + } + goto tr2 + st3533: + if p++; p == pe { + goto _test_eof3533 + } + st_case_3533: + switch data[p] { + case 128: + goto st3534 + case 129: + goto st3535 + case 130: + goto st3536 + case 131: + goto st691 + case 132: + goto st3537 + case 133: + goto st3538 + case 134: + goto st3539 + case 135: + goto st3540 + case 136: + goto st3541 + case 138: + goto st348 + case 139: + goto st3542 + case 140: + goto st3543 + case 141: + goto st3544 + case 146: + goto st3545 + case 147: + goto st3546 + case 150: + goto st3547 + case 151: + goto st3548 + case 152: + goto st3545 + case 153: + goto st3549 + case 154: + goto st3550 + case 155: + goto st538 + case 156: + goto st3551 + case 162: + goto st359 + case 163: + goto st707 + case 171: + goto st361 + } + goto tr2 + st3534: + if p++; p == pe { + goto _test_eof3534 + } + st_case_3534: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2984 + } + case data[p] > 183: + if 184 <= data[p] { + goto tr2984 + } + default: + goto tr148 + } + goto tr2 + st3535: + if p++; p == pe { + goto _test_eof3535 + } + st_case_3535: + switch { + case data[p] < 166: + if 135 <= data[p] && data[p] <= 165 { + goto tr2 + } + case data[p] > 175: + if 176 <= data[p] && data[p] <= 190 { + goto tr2 + } + default: + goto tr421 + } + goto tr2984 + st3536: + if p++; p == pe { + goto _test_eof3536 + } + st_case_3536: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr2984 + st3537: + if p++; p == pe { + goto _test_eof3537 + } + st_case_3537: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2984 + } + case data[p] > 166: + switch { + case data[p] > 180: + if 182 <= data[p] && data[p] <= 191 { + goto tr421 + } + case data[p] >= 167: + goto tr2984 + } + default: + goto tr148 + } + goto tr2 + st3538: + if p++; p == pe { + goto _test_eof3538 + } + st_case_3538: + switch data[p] { + case 179: + goto tr2984 + case 182: + goto tr148 + } + if 144 <= data[p] && data[p] <= 178 { + goto tr148 + } + goto tr2 + st3539: + if p++; p == pe { + goto _test_eof3539 + } + st_case_3539: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2984 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr2984 + } + default: + goto tr148 + } + goto tr2 + st3540: + if p++; p == pe { + goto _test_eof3540 + } + st_case_3540: + if data[p] == 155 { + goto tr2 + } + switch { + case data[p] < 141: + switch { + case data[p] > 132: + if 133 <= data[p] && data[p] <= 137 { + goto tr2 + } + case data[p] >= 129: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] < 154: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] > 156: + if 157 <= data[p] { + goto tr2 + } + default: + goto tr148 + } + default: + goto tr2 + } + goto tr2984 + st3541: + if p++; p == pe { + goto _test_eof3541 + } + st_case_3541: + switch { + case data[p] < 147: + if 128 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] > 171: + if 172 <= data[p] && data[p] <= 183 { + goto tr2984 + } + default: + goto tr148 + } + goto tr2 + st3542: + if p++; p == pe { + goto _test_eof3542 + } + st_case_3542: + switch { + case data[p] < 171: + if 159 <= data[p] && data[p] <= 170 { + goto tr2984 + } + case data[p] > 175: + switch { + case data[p] > 185: + if 186 <= data[p] { + goto tr2 + } + case data[p] >= 176: + goto tr421 + } + default: + goto tr2 + } + goto tr148 + st3543: + if p++; p == pe { + goto _test_eof3543 + } + st_case_3543: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 131 { + goto tr2984 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2984 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2 + st3544: + if p++; p == pe { + goto _test_eof3544 + } + st_case_3544: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr2984 + } + switch { + case data[p] < 157: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr2984 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr2984 + } + default: + goto tr2984 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr2984 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr2984 + } + default: + goto tr2984 + } + default: + goto tr148 + } + goto tr2 + st3545: + if p++; p == pe { + goto _test_eof3545 + } + st_case_3545: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr2984 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st3546: + if p++; p == pe { + goto _test_eof3546 + } + st_case_3546: + if data[p] == 134 { + goto tr2 + } + switch { + case data[p] < 136: + if 132 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 154 <= data[p] { + goto tr2 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr2 + } + goto tr2984 + st3547: + if p++; p == pe { + goto _test_eof3547 + } + st_case_3547: + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 181: + if 184 <= data[p] { + goto tr2984 + } + default: + goto tr2984 + } + goto tr2 + st3548: + if p++; p == pe { + goto _test_eof3548 + } + st_case_3548: + switch { + case data[p] < 152: + if 129 <= data[p] && data[p] <= 151 { + goto tr2 + } + case data[p] > 155: + if 158 <= data[p] { + goto tr2 + } + default: + goto tr148 + } + goto tr2984 + st3549: + if p++; p == pe { + goto _test_eof3549 + } + st_case_3549: + if data[p] == 132 { + goto tr148 + } + switch { + case data[p] < 144: + if 129 <= data[p] && data[p] <= 143 { + goto tr2 + } + case data[p] > 153: + if 154 <= data[p] { + goto tr2 + } + default: + goto tr421 + } + goto tr2984 + st3550: + if p++; p == pe { + goto _test_eof3550 + } + st_case_3550: + switch { + case data[p] > 170: + if 171 <= data[p] && data[p] <= 183 { + goto tr2984 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st3551: + if p++; p == pe { + goto _test_eof3551 + } + st_case_3551: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 157: + goto tr2984 + } + goto tr2 + st3552: + if p++; p == pe { + goto _test_eof3552 + } + st_case_3552: + switch data[p] { + case 160: + goto st147 + case 168: + goto st370 + case 169: + goto st709 + case 171: + goto st3553 + case 172: + goto st3554 + case 173: + goto st712 + case 174: + goto st374 + case 188: + goto st147 + case 189: + goto st3555 + case 190: + goto st3556 + } + if 161 <= data[p] && data[p] <= 167 { + goto st145 + } + goto tr2 + st3553: + if p++; p == pe { + goto _test_eof3553 + } + st_case_3553: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 180 { + goto tr2984 + } + case data[p] >= 144: + goto tr148 + } + goto tr2 + st3554: + if p++; p == pe { + goto _test_eof3554 + } + st_case_3554: + switch { + case data[p] > 175: + if 176 <= data[p] && data[p] <= 182 { + goto tr2984 + } + case data[p] >= 128: + goto tr148 + } + goto tr2 + st3555: + if p++; p == pe { + goto _test_eof3555 + } + st_case_3555: + switch { + case data[p] < 145: + if 133 <= data[p] && data[p] <= 143 { + goto tr2 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr2 + } + default: + goto tr2984 + } + goto tr148 + st3556: + if p++; p == pe { + goto _test_eof3556 + } + st_case_3556: + switch { + case data[p] > 146: + if 147 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] >= 143: + goto tr2984 + } + goto tr2 + st3557: + if p++; p == pe { + goto _test_eof3557 + } + st_case_3557: + switch data[p] { + case 176: + goto st147 + case 177: + goto st378 + case 178: + goto st3558 + } + goto tr2 + st3558: + if p++; p == pe { + goto _test_eof3558 + } + st_case_3558: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr2984 + } + case data[p] >= 157: + goto tr2984 + } + default: + goto tr148 + } + goto tr2 + st3559: + if p++; p == pe { + goto _test_eof3559 + } + st_case_3559: + switch data[p] { + case 133: + goto st3560 + case 134: + goto st3561 + case 137: + goto st3562 + case 144: + goto st147 + case 145: + goto st384 + case 146: + goto st385 + case 147: + goto st386 + case 148: + goto st387 + case 149: + goto st388 + case 154: + goto st389 + case 155: + goto st390 + case 156: + goto st391 + case 157: + goto st392 + case 158: + goto st393 + case 159: + goto st721 + case 168: + goto st3563 + case 169: + goto st3564 + case 170: + goto st3565 + } + if 150 <= data[p] && data[p] <= 153 { + goto st145 + } + goto tr2 + st3560: + if p++; p == pe { + goto _test_eof3560 + } + st_case_3560: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr2984 + } + case data[p] >= 165: + goto tr2984 + } + goto tr2 + st3561: + if p++; p == pe { + goto _test_eof3561 + } + st_case_3561: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr2 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr2984 + st3562: + if p++; p == pe { + goto _test_eof3562 + } + st_case_3562: + if 130 <= data[p] && data[p] <= 132 { + goto tr2984 + } + goto tr2 + st3563: + if p++; p == pe { + goto _test_eof3563 + } + st_case_3563: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr2984 + } + case data[p] >= 128: + goto tr2984 + } + goto tr2 + st3564: + if p++; p == pe { + goto _test_eof3564 + } + st_case_3564: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr2 + } + case data[p] >= 173: + goto tr2 + } + goto tr2984 + st3565: + if p++; p == pe { + goto _test_eof3565 + } + st_case_3565: + if data[p] == 132 { + goto tr2984 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr2984 + } + case data[p] >= 155: + goto tr2984 + } + goto tr2 + st3566: + if p++; p == pe { + goto _test_eof3566 + } + st_case_3566: + switch data[p] { + case 160: + goto st147 + case 163: + goto st3567 + case 184: + goto st400 + case 185: + goto st401 + case 186: + goto st402 + } + if 161 <= data[p] && data[p] <= 162 { + goto st145 + } + goto tr2 + st3567: + if p++; p == pe { + goto _test_eof3567 + } + st_case_3567: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr2 + } + case data[p] > 150: + if 151 <= data[p] { + goto tr2 + } + default: + goto tr2984 + } + goto tr148 + st3568: + if p++; p == pe { + goto _test_eof3568 + } + st_case_3568: + if data[p] == 160 { + goto st3569 + } + goto tr2985 + st3569: + if p++; p == pe { + goto _test_eof3569 + } + st_case_3569: + switch data[p] { + case 128: + goto st3570 + case 129: + goto st3571 + case 132: + goto st3270 + case 135: + goto st3572 + } + if 133 <= data[p] && data[p] <= 134 { + goto st3318 + } + goto tr2 + st3570: + if p++; p == pe { + goto _test_eof3570 + } + st_case_3570: + if data[p] == 129 { + goto tr2984 + } + if 160 <= data[p] { + goto tr2984 + } + goto tr2 + st3571: + if p++; p == pe { + goto _test_eof3571 + } + st_case_3571: + if 192 <= data[p] { + goto tr2 + } + goto tr2984 + st3572: + if p++; p == pe { + goto _test_eof3572 + } + st_case_3572: + if 176 <= data[p] { + goto tr2 + } + goto tr2984 + st3573: + if p++; p == pe { + goto _test_eof3573 + } + st_case_3573: + switch { + case data[p] < 157: + if 155 <= data[p] && data[p] <= 156 { + goto tr0 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr0 + } + default: + goto tr2395 + } + goto tr148 + st3574: + if p++; p == pe { + goto _test_eof3574 + } + st_case_3574: + switch { + case data[p] < 146: + switch { + case data[p] > 140: + if 142 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 177: + if 178 <= data[p] && data[p] <= 180 { + goto tr2395 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr2395 + } + goto tr0 + st3575: + if p++; p == pe { + goto _test_eof3575 + } + st_case_3575: + switch { + case data[p] < 160: + switch { + case data[p] > 145: + if 146 <= data[p] && data[p] <= 147 { + goto tr2395 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 172: + switch { + case data[p] > 176: + if 178 <= data[p] && data[p] <= 179 { + goto tr2395 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st3576: + if p++; p == pe { + goto _test_eof3576 + } + st_case_3576: + switch { + case data[p] < 158: + if 148 <= data[p] && data[p] <= 156 { + goto tr0 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 170 <= data[p] { + goto tr0 + } + case data[p] >= 160: + goto tr126 + } + default: + goto tr0 + } + goto tr2395 + st3577: + if p++; p == pe { + goto _test_eof3577 + } + st_case_3577: + switch { + case data[p] < 144: + if 139 <= data[p] && data[p] <= 142 { + goto tr2395 + } + case data[p] > 153: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr126 + } + goto tr0 + st3578: + if p++; p == pe { + goto _test_eof3578 + } + st_case_3578: + if data[p] == 169 { + goto tr2395 + } + switch { + case data[p] > 170: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr0 + st3579: + if p++; p == pe { + goto _test_eof3579 + } + st_case_3579: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr2395 + } + default: + goto tr2395 + } + goto tr0 + st3580: + if p++; p == pe { + goto _test_eof3580 + } + st_case_3580: + switch { + case data[p] > 150: + if 151 <= data[p] && data[p] <= 155 { + goto tr2395 + } + case data[p] >= 128: + goto tr148 + } + goto tr0 + st3581: + if p++; p == pe { + goto _test_eof3581 + } + st_case_3581: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 153: + if 176 <= data[p] && data[p] <= 190 { + goto tr2395 + } + default: + goto tr126 + } + goto tr0 + st3582: + if p++; p == pe { + goto _test_eof3582 + } + st_case_3582: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 132 { + goto tr2395 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr2395 + } + default: + goto tr148 + } + goto tr0 + st3583: + if p++; p == pe { + goto _test_eof3583 + } + st_case_3583: + switch { + case data[p] < 144: + switch { + case data[p] > 139: + if 140 <= data[p] && data[p] <= 143 { + goto tr0 + } + case data[p] >= 133: + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr0 + } + case data[p] >= 154: + goto tr0 + } + default: + goto tr126 + } + goto tr2395 + st3584: + if p++; p == pe { + goto _test_eof3584 + } + st_case_3584: + switch { + case data[p] < 161: + switch { + case data[p] > 130: + if 131 <= data[p] && data[p] <= 160 { + goto tr148 + } + case data[p] >= 128: + goto tr2395 + } + case data[p] > 173: + switch { + case data[p] < 176: + if 174 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr148 + } + default: + goto tr126 + } + default: + goto tr2395 + } + goto tr0 + st3585: + if p++; p == pe { + goto _test_eof3585 + } + st_case_3585: + switch { + case data[p] > 179: + if 180 <= data[p] { + goto tr0 + } + case data[p] >= 166: + goto tr2395 + } + goto tr148 + st3586: + if p++; p == pe { + goto _test_eof3586 + } + st_case_3586: + switch { + case data[p] > 163: + if 164 <= data[p] && data[p] <= 183 { + goto tr2395 + } + case data[p] >= 128: + goto tr148 + } + goto tr0 + st3587: + if p++; p == pe { + goto _test_eof3587 + } + st_case_3587: + if data[p] == 173 { + goto tr2395 + } + switch { + case data[p] < 169: + switch { + case data[p] > 146: + if 148 <= data[p] && data[p] <= 168 { + goto tr2395 + } + case data[p] >= 144: + goto tr2395 + } + case data[p] > 177: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 180 { + goto tr2395 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 185 { + goto tr2395 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr0 +tr4488: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5075 + st5075: + if p++; p == pe { + goto _test_eof5075 + } + st_case_5075: +//line segment_words_prod.go:110410 + switch data[p] { + case 128: + goto st3588 + case 129: + goto st3589 + case 130: + goto st241 + case 131: + goto st2703 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st3590 + case 180: + goto st251 + case 181: + goto st3591 + case 182: + goto st253 + case 183: + goto st3592 + case 184: + goto st255 + case 186: + goto st3593 + case 187: + goto st3735 + case 188: + goto st3595 + case 191: + goto st3736 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 189 <= data[p] && data[p] <= 190 { + goto st3734 + } + goto tr4499 + st3588: + if p++; p == pe { + goto _test_eof3588 + } + st_case_3588: + switch { + case data[p] < 168: + if 140 <= data[p] && data[p] <= 143 { + goto tr2395 + } + case data[p] > 169: + switch { + case data[p] > 174: + if 191 <= data[p] { + goto tr2136 + } + case data[p] >= 170: + goto tr2395 + } + default: + goto tr3249 + } + goto tr0 + st3589: + if p++; p == pe { + goto _test_eof3589 + } + st_case_3589: + switch data[p] { + case 165: + goto tr0 + case 177: + goto tr148 + case 191: + goto tr148 + } + switch { + case data[p] < 149: + if 129 <= data[p] && data[p] <= 147 { + goto tr0 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr0 + } + case data[p] >= 160: + goto tr2395 + } + default: + goto tr0 + } + goto tr2136 + st3590: + if p++; p == pe { + goto _test_eof3590 + } + st_case_3590: + switch { + case data[p] < 175: + if 165 <= data[p] && data[p] <= 170 { + goto tr0 + } + case data[p] > 177: + if 180 <= data[p] { + goto tr0 + } + default: + goto tr2395 + } + goto tr148 + st3591: + if p++; p == pe { + goto _test_eof3591 + } + st_case_3591: + if data[p] == 191 { + goto tr2395 + } + switch { + case data[p] > 174: + if 176 <= data[p] { + goto tr0 + } + case data[p] >= 168: + goto tr0 + } + goto tr148 + st3592: + if p++; p == pe { + goto _test_eof3592 + } + st_case_3592: + switch { + case data[p] < 144: + switch { + case data[p] > 134: + if 136 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 191 { + goto tr2395 + } + case data[p] >= 152: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st3593: + if p++; p == pe { + goto _test_eof3593 + } + st_case_3593: + switch { + case data[p] > 153: + if 155 <= data[p] { + goto tr3250 + } + case data[p] >= 128: + goto tr3250 + } + goto tr0 +tr3250: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5076 + st5076: + if p++; p == pe { + goto _test_eof5076 + } + st_case_5076: +//line segment_words_prod.go:110650 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + goto tr5054 + st3594: + if p++; p == pe { + goto _test_eof3594 + } + st_case_3594: + if data[p] == 173 { + goto tr3250 + } + goto tr3251 + st3595: + if p++; p == pe { + goto _test_eof3595 + } + st_case_3595: + if data[p] <= 127 { + goto tr2 + } + goto tr3250 + st3596: + if p++; p == pe { + goto _test_eof3596 + } + st_case_3596: + if 176 <= data[p] { + goto tr2 + } + goto tr3250 + st3597: + if p++; p == pe { + goto _test_eof3597 + } + st_case_3597: + if 131 <= data[p] && data[p] <= 137 { + goto tr3250 + } + goto tr3251 + st3598: + if p++; p == pe { + goto _test_eof3598 + } + st_case_3598: + if data[p] == 191 { + goto tr3250 + } + if 145 <= data[p] && data[p] <= 189 { + goto tr3250 + } + goto tr3251 + st3599: + if p++; p == pe { + goto _test_eof3599 + } + st_case_3599: + if data[p] == 135 { + goto tr3250 + } + switch { + case data[p] > 130: + if 132 <= data[p] && data[p] <= 133 { + goto tr3250 + } + case data[p] >= 129: + goto tr3250 + } + goto tr3251 + st3600: + if p++; p == pe { + goto _test_eof3600 + } + st_case_3600: + if data[p] == 156 { + goto tr3250 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto tr3250 + } + case data[p] >= 128: + goto tr3250 + } + goto tr3251 + st3601: + if p++; p == pe { + goto _test_eof3601 + } + st_case_3601: + if data[p] == 176 { + goto tr3250 + } + if 139 <= data[p] && data[p] <= 159 { + goto tr3250 + } + goto tr3251 + st3602: + if p++; p == pe { + goto _test_eof3602 + } + st_case_3602: + switch { + case data[p] < 159: + if 150 <= data[p] && data[p] <= 157 { + goto tr3250 + } + case data[p] > 164: + switch { + case data[p] > 168: + if 170 <= data[p] && data[p] <= 173 { + goto tr3250 + } + case data[p] >= 167: + goto tr3250 + } + default: + goto tr3250 + } + goto tr3251 + st3603: + if p++; p == pe { + goto _test_eof3603 + } + st_case_3603: + switch data[p] { + case 143: + goto tr3250 + case 145: + goto tr3250 + } + if 176 <= data[p] { + goto tr3250 + } + goto tr3251 + st3604: + if p++; p == pe { + goto _test_eof3604 + } + st_case_3604: + if 139 <= data[p] { + goto tr3251 + } + goto tr3250 + st3605: + if p++; p == pe { + goto _test_eof3605 + } + st_case_3605: + if 166 <= data[p] && data[p] <= 176 { + goto tr3250 + } + goto tr3251 + st3606: + if p++; p == pe { + goto _test_eof3606 + } + st_case_3606: + if 171 <= data[p] && data[p] <= 179 { + goto tr3250 + } + goto tr3251 + st3607: + if p++; p == pe { + goto _test_eof3607 + } + st_case_3607: + switch data[p] { + case 160: + goto st3608 + case 161: + goto st3609 + case 163: + goto st3610 + case 164: + goto st3611 + case 165: + goto st3612 + case 167: + goto st3614 + case 169: + goto st3615 + case 171: + goto st3616 + case 173: + goto st3618 + case 174: + goto st3619 + case 175: + goto st3620 + case 176: + goto st3621 + case 177: + goto st3622 + case 179: + goto st3623 + case 180: + goto st3624 + case 181: + goto st3625 + case 182: + goto st3626 + case 183: + goto st3627 + case 184: + goto st3628 + case 185: + goto st3629 + case 186: + goto st3630 + case 187: + goto st3631 + case 188: + goto st3632 + case 189: + goto st3633 + case 190: + goto st3634 + case 191: + goto st3635 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto st3617 + } + case data[p] >= 166: + goto st3613 + } + goto tr3251 + st3608: + if p++; p == pe { + goto _test_eof3608 + } + st_case_3608: + switch { + case data[p] < 155: + if 150 <= data[p] && data[p] <= 153 { + goto tr3250 + } + case data[p] > 163: + switch { + case data[p] > 167: + if 169 <= data[p] && data[p] <= 173 { + goto tr3250 + } + case data[p] >= 165: + goto tr3250 + } + default: + goto tr3250 + } + goto tr2 + st3609: + if p++; p == pe { + goto _test_eof3609 + } + st_case_3609: + if 153 <= data[p] && data[p] <= 155 { + goto tr3250 + } + goto tr2 + st3610: + if p++; p == pe { + goto _test_eof3610 + } + st_case_3610: + if 163 <= data[p] { + goto tr3250 + } + goto tr2 + st3611: + if p++; p == pe { + goto _test_eof3611 + } + st_case_3611: + if data[p] == 189 { + goto tr2 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr2 + } + goto tr3250 + st3612: + if p++; p == pe { + goto _test_eof3612 + } + st_case_3612: + if data[p] == 144 { + goto tr2 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr2 + } + case data[p] >= 152: + goto tr2 + } + goto tr3250 + st3613: + if p++; p == pe { + goto _test_eof3613 + } + st_case_3613: + if data[p] == 188 { + goto tr3250 + } + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr3250 + } + case data[p] >= 129: + goto tr3250 + } + goto tr2 + st3614: + if p++; p == pe { + goto _test_eof3614 + } + st_case_3614: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr2 + } + case data[p] >= 133: + goto tr2 + } + case data[p] > 150: + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr2 + } + case data[p] >= 152: + goto tr2 + } + default: + goto tr2 + } + goto tr3250 + st3615: + if p++; p == pe { + goto _test_eof3615 + } + st_case_3615: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr2 + } + case data[p] >= 131: + goto tr2 + } + case data[p] > 144: + switch { + case data[p] < 178: + if 146 <= data[p] && data[p] <= 175 { + goto tr2 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + default: + goto tr2 + } + goto tr3250 + st3616: + if p++; p == pe { + goto _test_eof3616 + } + st_case_3616: + switch data[p] { + case 134: + goto tr2 + case 138: + goto tr2 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr2 + } + case data[p] >= 142: + goto tr2 + } + goto tr3250 + st3617: + if p++; p == pe { + goto _test_eof3617 + } + st_case_3617: + if data[p] == 188 { + goto tr3250 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr3250 + } + case data[p] >= 129: + goto tr3250 + } + goto tr2 + st3618: + if p++; p == pe { + goto _test_eof3618 + } + st_case_3618: + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr3250 + } + case data[p] >= 128: + goto tr3250 + } + case data[p] > 141: + switch { + case data[p] > 151: + if 162 <= data[p] && data[p] <= 163 { + goto tr3250 + } + case data[p] >= 150: + goto tr3250 + } + default: + goto tr3250 + } + goto tr2 + st3619: + if p++; p == pe { + goto _test_eof3619 + } + st_case_3619: + if data[p] == 130 { + goto tr3250 + } + if 190 <= data[p] && data[p] <= 191 { + goto tr3250 + } + goto tr2 + st3620: + if p++; p == pe { + goto _test_eof3620 + } + st_case_3620: + if data[p] == 151 { + goto tr3250 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr3250 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr3250 + } + default: + goto tr3250 + } + goto tr2 + st3621: + if p++; p == pe { + goto _test_eof3621 + } + st_case_3621: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr3250 + } + case data[p] >= 128: + goto tr3250 + } + goto tr2 + st3622: + if p++; p == pe { + goto _test_eof3622 + } + st_case_3622: + switch data[p] { + case 133: + goto tr2 + case 137: + goto tr2 + } + switch { + case data[p] < 151: + if 142 <= data[p] && data[p] <= 148 { + goto tr2 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr3250 + st3623: + if p++; p == pe { + goto _test_eof3623 + } + st_case_3623: + switch { + case data[p] < 138: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 136 { + goto tr3250 + } + case data[p] >= 128: + goto tr3250 + } + case data[p] > 141: + switch { + case data[p] > 150: + if 162 <= data[p] && data[p] <= 163 { + goto tr3250 + } + case data[p] >= 149: + goto tr3250 + } + default: + goto tr3250 + } + goto tr2 + st3624: + if p++; p == pe { + goto _test_eof3624 + } + st_case_3624: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr3250 + } + case data[p] >= 129: + goto tr3250 + } + goto tr2 + st3625: + if p++; p == pe { + goto _test_eof3625 + } + st_case_3625: + switch data[p] { + case 133: + goto tr2 + case 137: + goto tr2 + } + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 150 { + goto tr2 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr3250 + st3626: + if p++; p == pe { + goto _test_eof3626 + } + st_case_3626: + if 130 <= data[p] && data[p] <= 131 { + goto tr3250 + } + goto tr2 + st3627: + if p++; p == pe { + goto _test_eof3627 + } + st_case_3627: + switch data[p] { + case 138: + goto tr3250 + case 150: + goto tr3250 + } + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 148 { + goto tr3250 + } + case data[p] > 159: + if 178 <= data[p] && data[p] <= 179 { + goto tr3250 + } + default: + goto tr3250 + } + goto tr2 + st3628: + if p++; p == pe { + goto _test_eof3628 + } + st_case_3628: + if data[p] == 177 { + goto tr3250 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr3250 + } + goto tr2 + st3629: + if p++; p == pe { + goto _test_eof3629 + } + st_case_3629: + if 135 <= data[p] && data[p] <= 142 { + goto tr3250 + } + goto tr2 + st3630: + if p++; p == pe { + goto _test_eof3630 + } + st_case_3630: + if data[p] == 177 { + goto tr3250 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr3250 + } + case data[p] >= 180: + goto tr3250 + } + goto tr2 + st3631: + if p++; p == pe { + goto _test_eof3631 + } + st_case_3631: + if 136 <= data[p] && data[p] <= 141 { + goto tr3250 + } + goto tr2 + st3632: + if p++; p == pe { + goto _test_eof3632 + } + st_case_3632: + switch data[p] { + case 181: + goto tr3250 + case 183: + goto tr3250 + case 185: + goto tr3250 + } + switch { + case data[p] > 153: + if 190 <= data[p] && data[p] <= 191 { + goto tr3250 + } + case data[p] >= 152: + goto tr3250 + } + goto tr2 + st3633: + if p++; p == pe { + goto _test_eof3633 + } + st_case_3633: + if 177 <= data[p] && data[p] <= 191 { + goto tr3250 + } + goto tr2 + st3634: + if p++; p == pe { + goto _test_eof3634 + } + st_case_3634: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr3250 + } + case data[p] > 135: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr3250 + } + case data[p] >= 141: + goto tr3250 + } + default: + goto tr3250 + } + goto tr2 + st3635: + if p++; p == pe { + goto _test_eof3635 + } + st_case_3635: + if data[p] == 134 { + goto tr3250 + } + goto tr2 + st3636: + if p++; p == pe { + goto _test_eof3636 + } + st_case_3636: + switch data[p] { + case 128: + goto st3637 + case 129: + goto st3638 + case 130: + goto st3639 + case 141: + goto st3640 + case 156: + goto st3641 + case 157: + goto st3642 + case 158: + goto st3643 + case 159: + goto st3644 + case 160: + goto st3645 + case 162: + goto st3646 + case 164: + goto st3647 + case 168: + goto st3648 + case 169: + goto st3649 + case 170: + goto st3650 + case 172: + goto st3651 + case 173: + goto st3652 + case 174: + goto st3653 + case 175: + goto st3654 + case 176: + goto st3655 + case 179: + goto st3656 + case 183: + goto st3657 + } + goto tr3251 + st3637: + if p++; p == pe { + goto _test_eof3637 + } + st_case_3637: + if 171 <= data[p] && data[p] <= 190 { + goto tr3250 + } + goto tr2 + st3638: + if p++; p == pe { + goto _test_eof3638 + } + st_case_3638: + switch { + case data[p] < 162: + switch { + case data[p] > 153: + if 158 <= data[p] && data[p] <= 160 { + goto tr3250 + } + case data[p] >= 150: + goto tr3250 + } + case data[p] > 164: + switch { + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr3250 + } + case data[p] >= 167: + goto tr3250 + } + default: + goto tr3250 + } + goto tr2 + st3639: + if p++; p == pe { + goto _test_eof3639 + } + st_case_3639: + if data[p] == 143 { + goto tr3250 + } + switch { + case data[p] > 141: + if 154 <= data[p] && data[p] <= 157 { + goto tr3250 + } + case data[p] >= 130: + goto tr3250 + } + goto tr2 + st3640: + if p++; p == pe { + goto _test_eof3640 + } + st_case_3640: + if 157 <= data[p] && data[p] <= 159 { + goto tr3250 + } + goto tr2 + st3641: + if p++; p == pe { + goto _test_eof3641 + } + st_case_3641: + switch { + case data[p] > 148: + if 178 <= data[p] && data[p] <= 180 { + goto tr3250 + } + case data[p] >= 146: + goto tr3250 + } + goto tr2 + st3642: + if p++; p == pe { + goto _test_eof3642 + } + st_case_3642: + switch { + case data[p] > 147: + if 178 <= data[p] && data[p] <= 179 { + goto tr3250 + } + case data[p] >= 146: + goto tr3250 + } + goto tr2 + st3643: + if p++; p == pe { + goto _test_eof3643 + } + st_case_3643: + if 180 <= data[p] { + goto tr3250 + } + goto tr2 + st3644: + if p++; p == pe { + goto _test_eof3644 + } + st_case_3644: + switch { + case data[p] > 156: + if 158 <= data[p] { + goto tr2 + } + case data[p] >= 148: + goto tr2 + } + goto tr3250 + st3645: + if p++; p == pe { + goto _test_eof3645 + } + st_case_3645: + if 139 <= data[p] && data[p] <= 142 { + goto tr3250 + } + goto tr2 + st3646: + if p++; p == pe { + goto _test_eof3646 + } + st_case_3646: + if data[p] == 169 { + goto tr3250 + } + goto tr2 + st3647: + if p++; p == pe { + goto _test_eof3647 + } + st_case_3647: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr3250 + } + case data[p] >= 160: + goto tr3250 + } + goto tr2 + st3648: + if p++; p == pe { + goto _test_eof3648 + } + st_case_3648: + if 151 <= data[p] && data[p] <= 155 { + goto tr3250 + } + goto tr2 + st3649: + if p++; p == pe { + goto _test_eof3649 + } + st_case_3649: + if data[p] == 191 { + goto tr3250 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr3250 + } + case data[p] >= 149: + goto tr3250 + } + goto tr2 + st3650: + if p++; p == pe { + goto _test_eof3650 + } + st_case_3650: + if 176 <= data[p] && data[p] <= 190 { + goto tr3250 + } + goto tr2 + st3651: + if p++; p == pe { + goto _test_eof3651 + } + st_case_3651: + switch { + case data[p] > 132: + if 180 <= data[p] { + goto tr3250 + } + case data[p] >= 128: + goto tr3250 + } + goto tr2 + st3652: + if p++; p == pe { + goto _test_eof3652 + } + st_case_3652: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr2 + } + case data[p] >= 133: + goto tr2 + } + goto tr3250 + st3653: + if p++; p == pe { + goto _test_eof3653 + } + st_case_3653: + switch { + case data[p] > 130: + if 161 <= data[p] && data[p] <= 173 { + goto tr3250 + } + case data[p] >= 128: + goto tr3250 + } + goto tr2 + st3654: + if p++; p == pe { + goto _test_eof3654 + } + st_case_3654: + if 166 <= data[p] && data[p] <= 179 { + goto tr3250 + } + goto tr2 + st3655: + if p++; p == pe { + goto _test_eof3655 + } + st_case_3655: + if 164 <= data[p] && data[p] <= 183 { + goto tr3250 + } + goto tr2 + st3656: + if p++; p == pe { + goto _test_eof3656 + } + st_case_3656: + if data[p] == 173 { + goto tr3250 + } + switch { + case data[p] < 148: + if 144 <= data[p] && data[p] <= 146 { + goto tr3250 + } + case data[p] > 168: + switch { + case data[p] > 180: + if 184 <= data[p] && data[p] <= 185 { + goto tr3250 + } + case data[p] >= 178: + goto tr3250 + } + default: + goto tr3250 + } + goto tr2 + st3657: + if p++; p == pe { + goto _test_eof3657 + } + st_case_3657: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr3250 + } + case data[p] >= 128: + goto tr3250 + } + goto tr2 + st3658: + if p++; p == pe { + goto _test_eof3658 + } + st_case_3658: + switch data[p] { + case 128: + goto st3659 + case 129: + goto st3660 + case 131: + goto st3661 + case 179: + goto st3662 + case 181: + goto st3663 + case 183: + goto st3664 + } + goto tr3251 + st3659: + if p++; p == pe { + goto _test_eof3659 + } + st_case_3659: + switch { + case data[p] > 143: + if 170 <= data[p] && data[p] <= 174 { + goto tr3250 + } + case data[p] >= 140: + goto tr3250 + } + goto tr2 + st3660: + if p++; p == pe { + goto _test_eof3660 + } + st_case_3660: + switch { + case data[p] > 164: + if 166 <= data[p] && data[p] <= 175 { + goto tr3250 + } + case data[p] >= 160: + goto tr3250 + } + goto tr2 + st3661: + if p++; p == pe { + goto _test_eof3661 + } + st_case_3661: + if 144 <= data[p] && data[p] <= 176 { + goto tr3250 + } + goto tr2 + st3662: + if p++; p == pe { + goto _test_eof3662 + } + st_case_3662: + if 175 <= data[p] && data[p] <= 177 { + goto tr3250 + } + goto tr2 + st3663: + if p++; p == pe { + goto _test_eof3663 + } + st_case_3663: + if data[p] == 191 { + goto tr3250 + } + goto tr2 + st3664: + if p++; p == pe { + goto _test_eof3664 + } + st_case_3664: + if 160 <= data[p] && data[p] <= 191 { + goto tr3250 + } + goto tr2 + st3665: + if p++; p == pe { + goto _test_eof3665 + } + st_case_3665: + switch data[p] { + case 128: + goto st3666 + case 130: + goto st3667 + } + goto tr3251 + st3666: + if p++; p == pe { + goto _test_eof3666 + } + st_case_3666: + if 170 <= data[p] && data[p] <= 175 { + goto tr3250 + } + goto tr2 + st3667: + if p++; p == pe { + goto _test_eof3667 + } + st_case_3667: + if 153 <= data[p] && data[p] <= 154 { + goto tr3250 + } + goto tr2 + st3668: + if p++; p == pe { + goto _test_eof3668 + } + st_case_3668: + switch data[p] { + case 153: + goto st3669 + case 154: + goto st3670 + case 155: + goto st3671 + case 160: + goto st3672 + case 162: + goto st3673 + case 163: + goto st3674 + case 164: + goto st3675 + case 165: + goto st3676 + case 166: + goto st3677 + case 167: + goto st3678 + case 168: + goto st3679 + case 169: + goto st3680 + case 170: + goto st3681 + case 171: + goto st3682 + case 175: + goto st3683 + } + goto tr3251 + st3669: + if p++; p == pe { + goto _test_eof3669 + } + st_case_3669: + switch { + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr3250 + } + case data[p] >= 175: + goto tr3250 + } + goto tr2 + st3670: + if p++; p == pe { + goto _test_eof3670 + } + st_case_3670: + if 158 <= data[p] && data[p] <= 159 { + goto tr3250 + } + goto tr2 + st3671: + if p++; p == pe { + goto _test_eof3671 + } + st_case_3671: + if 176 <= data[p] && data[p] <= 177 { + goto tr3250 + } + goto tr2 + st3672: + if p++; p == pe { + goto _test_eof3672 + } + st_case_3672: + switch data[p] { + case 130: + goto tr3250 + case 134: + goto tr3250 + case 139: + goto tr3250 + } + if 163 <= data[p] && data[p] <= 167 { + goto tr3250 + } + goto tr2 + st3673: + if p++; p == pe { + goto _test_eof3673 + } + st_case_3673: + switch { + case data[p] > 129: + if 180 <= data[p] { + goto tr3250 + } + case data[p] >= 128: + goto tr3250 + } + goto tr2 + st3674: + if p++; p == pe { + goto _test_eof3674 + } + st_case_3674: + switch { + case data[p] > 159: + if 178 <= data[p] { + goto tr2 + } + case data[p] >= 133: + goto tr2 + } + goto tr3250 + st3675: + if p++; p == pe { + goto _test_eof3675 + } + st_case_3675: + if 166 <= data[p] && data[p] <= 173 { + goto tr3250 + } + goto tr2 + st3676: + if p++; p == pe { + goto _test_eof3676 + } + st_case_3676: + if 135 <= data[p] && data[p] <= 147 { + goto tr3250 + } + goto tr2 + st3677: + if p++; p == pe { + goto _test_eof3677 + } + st_case_3677: + switch { + case data[p] > 131: + if 179 <= data[p] { + goto tr3250 + } + case data[p] >= 128: + goto tr3250 + } + goto tr2 + st3678: + if p++; p == pe { + goto _test_eof3678 + } + st_case_3678: + switch { + case data[p] > 164: + if 166 <= data[p] { + goto tr2 + } + case data[p] >= 129: + goto tr2 + } + goto tr3250 + st3679: + if p++; p == pe { + goto _test_eof3679 + } + st_case_3679: + if 169 <= data[p] && data[p] <= 182 { + goto tr3250 + } + goto tr2 + st3680: + if p++; p == pe { + goto _test_eof3680 + } + st_case_3680: + if data[p] == 131 { + goto tr3250 + } + switch { + case data[p] > 141: + if 187 <= data[p] && data[p] <= 189 { + goto tr3250 + } + case data[p] >= 140: + goto tr3250 + } + goto tr2 + st3681: + if p++; p == pe { + goto _test_eof3681 + } + st_case_3681: + if data[p] == 176 { + goto tr3250 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr3250 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr3250 + } + default: + goto tr3250 + } + goto tr2 + st3682: + if p++; p == pe { + goto _test_eof3682 + } + st_case_3682: + if data[p] == 129 { + goto tr3250 + } + switch { + case data[p] > 175: + if 181 <= data[p] && data[p] <= 182 { + goto tr3250 + } + case data[p] >= 171: + goto tr3250 + } + goto tr2 + st3683: + if p++; p == pe { + goto _test_eof3683 + } + st_case_3683: + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 173 { + goto tr3250 + } + case data[p] >= 163: + goto tr3250 + } + goto tr2 + st3684: + if p++; p == pe { + goto _test_eof3684 + } + st_case_3684: + switch data[p] { + case 172: + goto st3685 + case 184: + goto st3686 + case 187: + goto st3663 + case 190: + goto st3670 + case 191: + goto st3687 + } + goto tr3251 + st3685: + if p++; p == pe { + goto _test_eof3685 + } + st_case_3685: + if data[p] == 158 { + goto tr3250 + } + goto tr2 + st3686: + if p++; p == pe { + goto _test_eof3686 + } + st_case_3686: + switch { + case data[p] > 143: + if 160 <= data[p] && data[p] <= 175 { + goto tr3250 + } + case data[p] >= 128: + goto tr3250 + } + goto tr2 + st3687: + if p++; p == pe { + goto _test_eof3687 + } + st_case_3687: + if 185 <= data[p] && data[p] <= 187 { + goto tr3250 + } + goto tr2 + st3688: + if p++; p == pe { + goto _test_eof3688 + } + st_case_3688: + switch data[p] { + case 144: + goto st3689 + case 145: + goto st3695 + case 150: + goto st3714 + case 155: + goto st3719 + case 157: + goto st3721 + case 158: + goto st3728 + } + goto tr3251 + st3689: + if p++; p == pe { + goto _test_eof3689 + } + st_case_3689: + switch data[p] { + case 135: + goto st3690 + case 139: + goto st3691 + case 141: + goto st3692 + case 168: + goto st3693 + case 171: + goto st3694 + } + goto tr2 + st3690: + if p++; p == pe { + goto _test_eof3690 + } + st_case_3690: + if data[p] == 189 { + goto tr3250 + } + goto tr2 + st3691: + if p++; p == pe { + goto _test_eof3691 + } + st_case_3691: + if data[p] == 160 { + goto tr3250 + } + goto tr2 + st3692: + if p++; p == pe { + goto _test_eof3692 + } + st_case_3692: + if 182 <= data[p] && data[p] <= 186 { + goto tr3250 + } + goto tr2 + st3693: + if p++; p == pe { + goto _test_eof3693 + } + st_case_3693: + if data[p] == 191 { + goto tr3250 + } + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr3250 + } + case data[p] > 134: + switch { + case data[p] > 143: + if 184 <= data[p] && data[p] <= 186 { + goto tr3250 + } + case data[p] >= 140: + goto tr3250 + } + default: + goto tr3250 + } + goto tr2 + st3694: + if p++; p == pe { + goto _test_eof3694 + } + st_case_3694: + if 165 <= data[p] && data[p] <= 166 { + goto tr3250 + } + goto tr2 + st3695: + if p++; p == pe { + goto _test_eof3695 + } + st_case_3695: + switch data[p] { + case 128: + goto st3696 + case 129: + goto st3697 + case 130: + goto st3698 + case 132: + goto st3699 + case 133: + goto st3700 + case 134: + goto st3701 + case 135: + goto st3702 + case 136: + goto st3703 + case 139: + goto st3704 + case 140: + goto st3705 + case 141: + goto st3706 + case 146: + goto st3707 + case 147: + goto st3708 + case 150: + goto st3709 + case 151: + goto st3710 + case 152: + goto st3707 + case 153: + goto st3711 + case 154: + goto st3712 + case 156: + goto st3713 + } + goto tr2 + st3696: + if p++; p == pe { + goto _test_eof3696 + } + st_case_3696: + switch { + case data[p] > 130: + if 184 <= data[p] { + goto tr3250 + } + case data[p] >= 128: + goto tr3250 + } + goto tr2 + st3697: + if p++; p == pe { + goto _test_eof3697 + } + st_case_3697: + if 135 <= data[p] && data[p] <= 190 { + goto tr2 + } + goto tr3250 + st3698: + if p++; p == pe { + goto _test_eof3698 + } + st_case_3698: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr2 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr3250 + st3699: + if p++; p == pe { + goto _test_eof3699 + } + st_case_3699: + switch { + case data[p] > 130: + if 167 <= data[p] && data[p] <= 180 { + goto tr3250 + } + case data[p] >= 128: + goto tr3250 + } + goto tr2 + st3700: + if p++; p == pe { + goto _test_eof3700 + } + st_case_3700: + if data[p] == 179 { + goto tr3250 + } + goto tr2 + st3701: + if p++; p == pe { + goto _test_eof3701 + } + st_case_3701: + switch { + case data[p] > 130: + if 179 <= data[p] { + goto tr3250 + } + case data[p] >= 128: + goto tr3250 + } + goto tr2 + st3702: + if p++; p == pe { + goto _test_eof3702 + } + st_case_3702: + switch { + case data[p] > 137: + if 141 <= data[p] { + goto tr2 + } + case data[p] >= 129: + goto tr2 + } + goto tr3250 + st3703: + if p++; p == pe { + goto _test_eof3703 + } + st_case_3703: + if 172 <= data[p] && data[p] <= 183 { + goto tr3250 + } + goto tr2 + st3704: + if p++; p == pe { + goto _test_eof3704 + } + st_case_3704: + if 159 <= data[p] && data[p] <= 170 { + goto tr3250 + } + goto tr2 + st3705: + if p++; p == pe { + goto _test_eof3705 + } + st_case_3705: + if data[p] == 188 { + goto tr3250 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr3250 + } + case data[p] >= 128: + goto tr3250 + } + goto tr2 + st3706: + if p++; p == pe { + goto _test_eof3706 + } + st_case_3706: + if data[p] == 151 { + goto tr3250 + } + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr3250 + } + case data[p] >= 128: + goto tr3250 + } + case data[p] > 141: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr3250 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr3250 + } + default: + goto tr3250 + } + default: + goto tr3250 + } + goto tr2 + st3707: + if p++; p == pe { + goto _test_eof3707 + } + st_case_3707: + if 176 <= data[p] { + goto tr3250 + } + goto tr2 + st3708: + if p++; p == pe { + goto _test_eof3708 + } + st_case_3708: + if 132 <= data[p] { + goto tr2 + } + goto tr3250 + st3709: + if p++; p == pe { + goto _test_eof3709 + } + st_case_3709: + switch { + case data[p] > 181: + if 184 <= data[p] { + goto tr3250 + } + case data[p] >= 175: + goto tr3250 + } + goto tr2 + st3710: + if p++; p == pe { + goto _test_eof3710 + } + st_case_3710: + switch { + case data[p] > 155: + if 158 <= data[p] { + goto tr2 + } + case data[p] >= 129: + goto tr2 + } + goto tr3250 + st3711: + if p++; p == pe { + goto _test_eof3711 + } + st_case_3711: + if 129 <= data[p] { + goto tr2 + } + goto tr3250 + st3712: + if p++; p == pe { + goto _test_eof3712 + } + st_case_3712: + if 171 <= data[p] && data[p] <= 183 { + goto tr3250 + } + goto tr2 + st3713: + if p++; p == pe { + goto _test_eof3713 + } + st_case_3713: + if 157 <= data[p] && data[p] <= 171 { + goto tr3250 + } + goto tr2 + st3714: + if p++; p == pe { + goto _test_eof3714 + } + st_case_3714: + switch data[p] { + case 171: + goto st3715 + case 172: + goto st3716 + case 189: + goto st3717 + case 190: + goto st3718 + } + goto tr2 + st3715: + if p++; p == pe { + goto _test_eof3715 + } + st_case_3715: + if 176 <= data[p] && data[p] <= 180 { + goto tr3250 + } + goto tr2 + st3716: + if p++; p == pe { + goto _test_eof3716 + } + st_case_3716: + if 176 <= data[p] && data[p] <= 182 { + goto tr3250 + } + goto tr2 + st3717: + if p++; p == pe { + goto _test_eof3717 + } + st_case_3717: + if 145 <= data[p] && data[p] <= 190 { + goto tr3250 + } + goto tr2 + st3718: + if p++; p == pe { + goto _test_eof3718 + } + st_case_3718: + if 143 <= data[p] && data[p] <= 146 { + goto tr3250 + } + goto tr2 + st3719: + if p++; p == pe { + goto _test_eof3719 + } + st_case_3719: + if data[p] == 178 { + goto st3720 + } + goto tr2 + st3720: + if p++; p == pe { + goto _test_eof3720 + } + st_case_3720: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr3250 + } + case data[p] >= 157: + goto tr3250 + } + goto tr2 + st3721: + if p++; p == pe { + goto _test_eof3721 + } + st_case_3721: + switch data[p] { + case 133: + goto st3722 + case 134: + goto st3723 + case 137: + goto st3724 + case 168: + goto st3725 + case 169: + goto st3726 + case 170: + goto st3727 + } + goto tr2 + st3722: + if p++; p == pe { + goto _test_eof3722 + } + st_case_3722: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr3250 + } + case data[p] >= 165: + goto tr3250 + } + goto tr2 + st3723: + if p++; p == pe { + goto _test_eof3723 + } + st_case_3723: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr2 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr2 + } + default: + goto tr2 + } + goto tr3250 + st3724: + if p++; p == pe { + goto _test_eof3724 + } + st_case_3724: + if 130 <= data[p] && data[p] <= 132 { + goto tr3250 + } + goto tr2 + st3725: + if p++; p == pe { + goto _test_eof3725 + } + st_case_3725: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr3250 + } + case data[p] >= 128: + goto tr3250 + } + goto tr2 + st3726: + if p++; p == pe { + goto _test_eof3726 + } + st_case_3726: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr2 + } + case data[p] >= 173: + goto tr2 + } + goto tr3250 + st3727: + if p++; p == pe { + goto _test_eof3727 + } + st_case_3727: + if data[p] == 132 { + goto tr3250 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr3250 + } + case data[p] >= 155: + goto tr3250 + } + goto tr2 + st3728: + if p++; p == pe { + goto _test_eof3728 + } + st_case_3728: + if data[p] == 163 { + goto st3729 + } + goto tr2 + st3729: + if p++; p == pe { + goto _test_eof3729 + } + st_case_3729: + if 144 <= data[p] && data[p] <= 150 { + goto tr3250 + } + goto tr2 + st3730: + if p++; p == pe { + goto _test_eof3730 + } + st_case_3730: + if data[p] == 160 { + goto st3731 + } + goto tr3251 + st3731: + if p++; p == pe { + goto _test_eof3731 + } + st_case_3731: + switch data[p] { + case 128: + goto st3732 + case 129: + goto st3733 + case 132: + goto st3595 + case 135: + goto st3596 + } + if 133 <= data[p] && data[p] <= 134 { + goto st3734 + } + goto tr2 + st3732: + if p++; p == pe { + goto _test_eof3732 + } + st_case_3732: + if data[p] == 129 { + goto tr3250 + } + if 160 <= data[p] { + goto tr3250 + } + goto tr2 + st3733: + if p++; p == pe { + goto _test_eof3733 + } + st_case_3733: + if 192 <= data[p] { + goto tr2 + } + goto tr3250 + st3734: + if p++; p == pe { + goto _test_eof3734 + } + st_case_3734: + goto tr3250 + st3735: + if p++; p == pe { + goto _test_eof3735 + } + st_case_3735: + if 180 <= data[p] { + goto tr0 + } + goto tr3250 + st3736: + if p++; p == pe { + goto _test_eof3736 + } + st_case_3736: + if 150 <= data[p] { + goto tr0 + } + goto tr3250 +tr4489: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5077 + st5077: + if p++; p == pe { + goto _test_eof5077 + } + st_case_5077: +//line segment_words_prod.go:112822 + switch data[p] { + case 128: + goto st3737 + case 129: + goto st4175 + case 130: + goto st4317 + case 131: + goto st4101 + case 132: + goto st3348 + case 133: + goto st3318 + case 134: + goto st3349 + case 135: + goto st4102 + case 136: + goto st3350 + case 137: + goto st3429 + case 139: + goto st4103 + case 140: + goto st4028 + case 141: + goto st4104 + case 144: + goto st3595 + case 194: + goto st4318 + case 204: + goto st4460 + case 205: + goto st4461 + case 210: + goto st4462 + case 214: + goto st4463 + case 215: + goto st4464 + case 216: + goto st4465 + case 217: + goto st4466 + case 219: + goto st4467 + case 220: + goto st4468 + case 221: + goto st4469 + case 222: + goto st4470 + case 223: + goto st4471 + case 224: + goto st4472 + case 225: + goto st4473 + case 226: + goto st4474 + case 227: + goto st4475 + case 234: + goto st4476 + case 239: + goto st4477 + case 240: + goto st4478 + case 243: + goto st4479 + } + if 145 <= data[p] { + goto st3734 + } + goto tr4499 + st3737: + if p++; p == pe { + goto _test_eof3737 + } + st_case_3737: + switch data[p] { + case 133: + goto tr3374 + case 135: + goto tr3250 + case 187: + goto tr3374 + case 188: + goto tr148 + } + switch { + case data[p] < 174: + switch { + case data[p] > 169: + if 170 <= data[p] && data[p] <= 173 { + goto tr2395 + } + case data[p] >= 161: + goto tr3250 + } + case data[p] > 175: + switch { + case data[p] > 181: + if 184 <= data[p] && data[p] <= 186 { + goto tr3250 + } + case data[p] >= 177: + goto tr3376 + } + default: + goto tr3375 + } + goto tr0 +tr3374: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:119 +act = 4; + goto st5078 + st5078: + if p++; p == pe { + goto _test_eof5078 + } + st_case_5078: +//line segment_words_prod.go:112953 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3738 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3739 + case 205: + goto st3740 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3741 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3742 + case 215: + goto st3743 + case 216: + goto st3744 + case 217: + goto st3745 + case 219: + goto st3746 + case 220: + goto st3747 + case 221: + goto st3748 + case 222: + goto st3749 + case 223: + goto st3750 + case 224: + goto st3751 + case 225: + goto st3783 + case 226: + goto st3805 + case 227: + goto st3812 + case 234: + goto st3815 + case 237: + goto st287 + case 239: + goto st3831 + case 240: + goto st3837 + case 243: + goto st3879 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st286 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr4562 + st3738: + if p++; p == pe { + goto _test_eof3738 + } + st_case_3738: + switch data[p] { + case 170: + goto tr148 + case 173: + goto tr3374 + case 181: + goto tr148 + case 183: + goto st142 + case 186: + goto tr148 + } + goto tr420 + st3739: + if p++; p == pe { + goto _test_eof3739 + } + st_case_3739: + if 128 <= data[p] { + goto tr3374 + } + goto tr420 + st3740: + if p++; p == pe { + goto _test_eof3740 + } + st_case_3740: + switch data[p] { + case 181: + goto tr420 + case 190: + goto tr420 + } + switch { + case data[p] < 184: + if 176 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr420 + } + goto tr3374 + st3741: + if p++; p == pe { + goto _test_eof3741 + } + st_case_3741: + if data[p] == 130 { + goto tr420 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr3374 + } + goto tr148 + st3742: + if p++; p == pe { + goto _test_eof3742 + } + st_case_3742: + if data[p] == 190 { + goto tr420 + } + switch { + case data[p] < 145: + if 136 <= data[p] && data[p] <= 144 { + goto tr420 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + default: + goto tr3374 + } + goto tr148 + st3743: + if p++; p == pe { + goto _test_eof3743 + } + st_case_3743: + switch data[p] { + case 135: + goto tr3374 + case 179: + goto tr148 + case 180: + goto st142 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr3374 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + default: + goto tr3374 + } + goto tr420 + st3744: + if p++; p == pe { + goto _test_eof3744 + } + st_case_3744: + if data[p] == 156 { + goto tr3374 + } + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 133 { + goto tr3374 + } + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr3374 + } + goto tr420 + st3745: + if p++; p == pe { + goto _test_eof3745 + } + st_case_3745: + switch data[p] { + case 171: + goto tr421 + case 176: + goto tr3374 + } + switch { + case data[p] < 139: + if 128 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 174 <= data[p] { + goto tr148 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr3374 + } + goto tr420 + st3746: + if p++; p == pe { + goto _test_eof3746 + } + st_case_3746: + switch data[p] { + case 148: + goto tr420 + case 158: + goto tr420 + case 169: + goto tr420 + } + switch { + case data[p] < 176: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr3374 + } + case data[p] >= 150: + goto tr3374 + } + case data[p] > 185: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 189: + goto tr420 + } + default: + goto tr421 + } + goto tr148 + st3747: + if p++; p == pe { + goto _test_eof3747 + } + st_case_3747: + if data[p] == 144 { + goto tr148 + } + switch { + case data[p] < 146: + if 143 <= data[p] && data[p] <= 145 { + goto tr3374 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr3374 + } + default: + goto tr148 + } + goto tr420 + st3748: + if p++; p == pe { + goto _test_eof3748 + } + st_case_3748: + switch { + case data[p] > 140: + if 141 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr420 + } + goto tr3374 + st3749: + if p++; p == pe { + goto _test_eof3749 + } + st_case_3749: + switch { + case data[p] > 176: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr3374 + } + goto tr148 + st3750: + if p++; p == pe { + goto _test_eof3750 + } + st_case_3750: + if data[p] == 186 { + goto tr148 + } + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 170: + switch { + case data[p] > 179: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 171: + goto tr3374 + } + default: + goto tr148 + } + goto tr420 + st3751: + if p++; p == pe { + goto _test_eof3751 + } + st_case_3751: + switch data[p] { + case 160: + goto st3752 + case 161: + goto st3753 + case 162: + goto st168 + case 163: + goto st3754 + case 164: + goto st3755 + case 165: + goto st3756 + case 166: + goto st3757 + case 167: + goto st3758 + case 168: + goto st3759 + case 169: + goto st3760 + case 170: + goto st3761 + case 171: + goto st3762 + case 172: + goto st3763 + case 173: + goto st3764 + case 174: + goto st3765 + case 175: + goto st3766 + case 176: + goto st3767 + case 177: + goto st3768 + case 178: + goto st3769 + case 179: + goto st3770 + case 180: + goto st3771 + case 181: + goto st3772 + case 182: + goto st3773 + case 183: + goto st3774 + case 184: + goto st3775 + case 185: + goto st3776 + case 186: + goto st3777 + case 187: + goto st3778 + case 188: + goto st3779 + case 189: + goto st3780 + case 190: + goto st3781 + case 191: + goto st3782 + } + goto tr420 + st3752: + if p++; p == pe { + goto _test_eof3752 + } + st_case_3752: + switch data[p] { + case 154: + goto tr148 + case 164: + goto tr148 + case 168: + goto tr148 + } + switch { + case data[p] > 149: + if 150 <= data[p] && data[p] <= 173 { + goto tr3374 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3753: + if p++; p == pe { + goto _test_eof3753 + } + st_case_3753: + switch { + case data[p] > 152: + if 153 <= data[p] && data[p] <= 155 { + goto tr3374 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3754: + if p++; p == pe { + goto _test_eof3754 + } + st_case_3754: + if 163 <= data[p] { + goto tr3374 + } + goto tr420 + st3755: + if p++; p == pe { + goto _test_eof3755 + } + st_case_3755: + if data[p] == 189 { + goto tr148 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr148 + } + goto tr3374 + st3756: + if p++; p == pe { + goto _test_eof3756 + } + st_case_3756: + switch data[p] { + case 144: + goto tr148 + case 176: + goto tr420 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 177 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr420 + } + goto tr3374 + st3757: + if p++; p == pe { + goto _test_eof3757 + } + st_case_3757: + switch data[p] { + case 132: + goto tr420 + case 169: + goto tr420 + case 177: + goto tr420 + case 188: + goto tr3374 + } + switch { + case data[p] < 145: + switch { + case data[p] > 131: + if 141 <= data[p] && data[p] <= 142 { + goto tr420 + } + case data[p] >= 129: + goto tr3374 + } + case data[p] > 146: + switch { + case data[p] < 186: + if 179 <= data[p] && data[p] <= 181 { + goto tr420 + } + case data[p] > 187: + if 190 <= data[p] { + goto tr3374 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st3758: + if p++; p == pe { + goto _test_eof3758 + } + st_case_3758: + switch data[p] { + case 142: + goto tr148 + case 158: + goto tr420 + } + switch { + case data[p] < 156: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + switch { + case data[p] > 150: + if 152 <= data[p] && data[p] <= 155 { + goto tr420 + } + case data[p] >= 143: + goto tr420 + } + default: + goto tr420 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr148 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr3374 + st3759: + if p++; p == pe { + goto _test_eof3759 + } + st_case_3759: + if data[p] == 188 { + goto tr3374 + } + switch { + case data[p] < 170: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr3374 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 147 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] >= 143: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 176: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 182: + switch { + case data[p] > 185: + if 190 <= data[p] { + goto tr3374 + } + case data[p] >= 184: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3760: + if p++; p == pe { + goto _test_eof3760 + } + st_case_3760: + if data[p] == 157 { + goto tr420 + } + switch { + case data[p] < 153: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr420 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 146 <= data[p] && data[p] <= 152 { + goto tr420 + } + case data[p] >= 142: + goto tr420 + } + default: + goto tr420 + } + case data[p] > 158: + switch { + case data[p] < 166: + if 159 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr3374 + st3761: + if p++; p == pe { + goto _test_eof3761 + } + st_case_3761: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr3374 + } + case data[p] > 141: + if 143 <= data[p] && data[p] <= 145 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] { + goto tr3374 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3762: + if p++; p == pe { + goto _test_eof3762 + } + st_case_3762: + switch data[p] { + case 134: + goto tr420 + case 138: + goto tr420 + case 144: + goto tr148 + case 185: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] >= 142: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr420 + } + goto tr3374 + st3763: + if p++; p == pe { + goto _test_eof3763 + } + st_case_3763: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr3374 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr3374 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3764: + if p++; p == pe { + goto _test_eof3764 + } + st_case_3764: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] < 150: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr3374 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr3374 + } + default: + goto tr3374 + } + case data[p] > 151: + switch { + case data[p] < 159: + if 156 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 161: + switch { + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 162: + goto tr3374 + } + default: + goto tr148 + } + default: + goto tr3374 + } + goto tr420 + st3765: + if p++; p == pe { + goto _test_eof3765 + } + st_case_3765: + switch data[p] { + case 130: + goto tr3374 + case 131: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 158: + switch { + case data[p] < 142: + if 133 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 144: + switch { + case data[p] > 149: + if 153 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] < 168: + if 163 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 185: + if 190 <= data[p] && data[p] <= 191 { + goto tr3374 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3766: + if p++; p == pe { + goto _test_eof3766 + } + st_case_3766: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr3374 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr3374 + } + case data[p] > 136: + switch { + case data[p] > 141: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 138: + goto tr3374 + } + default: + goto tr3374 + } + goto tr420 + st3767: + if p++; p == pe { + goto _test_eof3767 + } + st_case_3767: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 128: + goto tr3374 + } + case data[p] > 144: + switch { + case data[p] < 170: + if 146 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] > 185: + if 190 <= data[p] { + goto tr3374 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3768: + if p++; p == pe { + goto _test_eof3768 + } + st_case_3768: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 151: + goto tr420 + } + switch { + case data[p] < 160: + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 148 { + goto tr420 + } + case data[p] > 154: + if 155 <= data[p] && data[p] <= 159 { + goto tr420 + } + default: + goto tr148 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr3374 + st3769: + if p++; p == pe { + goto _test_eof3769 + } + st_case_3769: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 146: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr3374 + } + case data[p] > 140: + if 142 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 181: + if 170 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr3374 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3770: + if p++; p == pe { + goto _test_eof3770 + } + st_case_3770: + if data[p] == 158 { + goto tr148 + } + switch { + case data[p] < 149: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr3374 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr3374 + } + default: + goto tr3374 + } + case data[p] > 150: + switch { + case data[p] < 162: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 163: + switch { + case data[p] > 175: + if 177 <= data[p] && data[p] <= 178 { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr3374 + } + default: + goto tr3374 + } + goto tr420 + st3771: + if p++; p == pe { + goto _test_eof3771 + } + st_case_3771: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 129: + goto tr3374 + } + case data[p] > 144: + switch { + case data[p] > 186: + if 190 <= data[p] { + goto tr3374 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3772: + if p++; p == pe { + goto _test_eof3772 + } + st_case_3772: + switch data[p] { + case 133: + goto tr420 + case 137: + goto tr420 + case 142: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 150 { + goto tr420 + } + case data[p] > 158: + if 159 <= data[p] && data[p] <= 161 { + goto tr148 + } + default: + goto tr420 + } + case data[p] > 165: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr420 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr420 + } + default: + goto tr420 + } + goto tr3374 + st3773: + if p++; p == pe { + goto _test_eof3773 + } + st_case_3773: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto tr3374 + } + case data[p] > 150: + switch { + case data[p] > 177: + if 179 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3774: + if p++; p == pe { + goto _test_eof3774 + } + st_case_3774: + switch data[p] { + case 138: + goto tr3374 + case 150: + goto tr3374 + } + switch { + case data[p] < 152: + switch { + case data[p] > 134: + if 143 <= data[p] && data[p] <= 148 { + goto tr3374 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 179 { + goto tr3374 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr3374 + } + goto tr420 + st3775: + if p++; p == pe { + goto _test_eof3775 + } + st_case_3775: + if data[p] == 177 { + goto tr3374 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr3374 + } + goto tr420 + st3776: + if p++; p == pe { + goto _test_eof3776 + } + st_case_3776: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 135: + goto tr3374 + } + goto tr420 + st3777: + if p++; p == pe { + goto _test_eof3777 + } + st_case_3777: + if data[p] == 177 { + goto tr3374 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr3374 + } + case data[p] >= 180: + goto tr3374 + } + goto tr420 + st3778: + if p++; p == pe { + goto _test_eof3778 + } + st_case_3778: + switch { + case data[p] > 141: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 136: + goto tr3374 + } + goto tr420 + st3779: + if p++; p == pe { + goto _test_eof3779 + } + st_case_3779: + switch data[p] { + case 128: + goto tr148 + case 181: + goto tr3374 + case 183: + goto tr3374 + case 185: + goto tr3374 + } + switch { + case data[p] < 160: + if 152 <= data[p] && data[p] <= 153 { + goto tr3374 + } + case data[p] > 169: + if 190 <= data[p] && data[p] <= 191 { + goto tr3374 + } + default: + goto tr421 + } + goto tr420 + st3780: + if p++; p == pe { + goto _test_eof3780 + } + st_case_3780: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 172: + if 177 <= data[p] && data[p] <= 191 { + goto tr3374 + } + default: + goto tr148 + } + goto tr420 + st3781: + if p++; p == pe { + goto _test_eof3781 + } + st_case_3781: + switch { + case data[p] < 136: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 135 { + goto tr3374 + } + case data[p] >= 128: + goto tr3374 + } + case data[p] > 140: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr3374 + } + case data[p] >= 141: + goto tr3374 + } + default: + goto tr148 + } + goto tr420 + st3782: + if p++; p == pe { + goto _test_eof3782 + } + st_case_3782: + if data[p] == 134 { + goto tr3374 + } + goto tr420 + st3783: + if p++; p == pe { + goto _test_eof3783 + } + st_case_3783: + switch data[p] { + case 128: + goto st3784 + case 129: + goto st3785 + case 130: + goto st3786 + case 131: + goto st202 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st3787 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st3788 + case 157: + goto st3789 + case 158: + goto st3790 + case 159: + goto st3791 + case 160: + goto st3792 + case 161: + goto st219 + case 162: + goto st3793 + case 163: + goto st221 + case 164: + goto st3794 + case 165: + goto st468 + case 167: + goto st469 + case 168: + goto st3795 + case 169: + goto st3796 + case 170: + goto st3797 + case 172: + goto st3798 + case 173: + goto st3799 + case 174: + goto st3800 + case 175: + goto st3801 + case 176: + goto st3802 + case 177: + goto st640 + case 179: + goto st3803 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st3804 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + } + switch { + case data[p] < 180: + if 132 <= data[p] && data[p] <= 152 { + goto st145 + } + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + default: + goto st147 + } + goto tr420 + st3784: + if p++; p == pe { + goto _test_eof3784 + } + st_case_3784: + if 171 <= data[p] && data[p] <= 190 { + goto tr3374 + } + goto tr420 + st3785: + if p++; p == pe { + goto _test_eof3785 + } + st_case_3785: + switch { + case data[p] < 158: + switch { + case data[p] > 137: + if 150 <= data[p] && data[p] <= 153 { + goto tr3374 + } + case data[p] >= 128: + goto tr421 + } + case data[p] > 160: + switch { + case data[p] < 167: + if 162 <= data[p] && data[p] <= 164 { + goto tr3374 + } + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr3374 + } + default: + goto tr3374 + } + default: + goto tr3374 + } + goto tr420 + st3786: + if p++; p == pe { + goto _test_eof3786 + } + st_case_3786: + if data[p] == 143 { + goto tr3374 + } + switch { + case data[p] < 144: + if 130 <= data[p] && data[p] <= 141 { + goto tr3374 + } + case data[p] > 153: + switch { + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 154: + goto tr3374 + } + default: + goto tr421 + } + goto tr420 + st3787: + if p++; p == pe { + goto _test_eof3787 + } + st_case_3787: + switch { + case data[p] < 157: + if 155 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr420 + } + default: + goto tr3374 + } + goto tr148 + st3788: + if p++; p == pe { + goto _test_eof3788 + } + st_case_3788: + switch { + case data[p] < 146: + switch { + case data[p] > 140: + if 142 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 177: + if 178 <= data[p] && data[p] <= 180 { + goto tr3374 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr3374 + } + goto tr420 + st3789: + if p++; p == pe { + goto _test_eof3789 + } + st_case_3789: + switch { + case data[p] < 160: + switch { + case data[p] > 145: + if 146 <= data[p] && data[p] <= 147 { + goto tr3374 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 172: + switch { + case data[p] > 176: + if 178 <= data[p] && data[p] <= 179 { + goto tr3374 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3790: + if p++; p == pe { + goto _test_eof3790 + } + st_case_3790: + if 180 <= data[p] { + goto tr3374 + } + goto tr420 + st3791: + if p++; p == pe { + goto _test_eof3791 + } + st_case_3791: + switch { + case data[p] < 158: + if 148 <= data[p] && data[p] <= 156 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 170 <= data[p] { + goto tr420 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr420 + } + goto tr3374 + st3792: + if p++; p == pe { + goto _test_eof3792 + } + st_case_3792: + switch { + case data[p] < 144: + if 139 <= data[p] && data[p] <= 142 { + goto tr3374 + } + case data[p] > 153: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + goto tr420 + st3793: + if p++; p == pe { + goto _test_eof3793 + } + st_case_3793: + if data[p] == 169 { + goto tr3374 + } + switch { + case data[p] > 170: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3794: + if p++; p == pe { + goto _test_eof3794 + } + st_case_3794: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr3374 + } + default: + goto tr3374 + } + goto tr420 + st3795: + if p++; p == pe { + goto _test_eof3795 + } + st_case_3795: + switch { + case data[p] > 150: + if 151 <= data[p] && data[p] <= 155 { + goto tr3374 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3796: + if p++; p == pe { + goto _test_eof3796 + } + st_case_3796: + if data[p] == 191 { + goto tr3374 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr3374 + } + case data[p] >= 149: + goto tr3374 + } + goto tr420 + st3797: + if p++; p == pe { + goto _test_eof3797 + } + st_case_3797: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 153: + if 176 <= data[p] && data[p] <= 190 { + goto tr3374 + } + default: + goto tr421 + } + goto tr420 + st3798: + if p++; p == pe { + goto _test_eof3798 + } + st_case_3798: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 132 { + goto tr3374 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr3374 + } + default: + goto tr148 + } + goto tr420 + st3799: + if p++; p == pe { + goto _test_eof3799 + } + st_case_3799: + switch { + case data[p] < 144: + switch { + case data[p] > 139: + if 140 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] >= 133: + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 154: + goto tr420 + } + default: + goto tr421 + } + goto tr3374 + st3800: + if p++; p == pe { + goto _test_eof3800 + } + st_case_3800: + switch { + case data[p] < 161: + switch { + case data[p] > 130: + if 131 <= data[p] && data[p] <= 160 { + goto tr148 + } + case data[p] >= 128: + goto tr3374 + } + case data[p] > 173: + switch { + case data[p] < 176: + if 174 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + default: + goto tr3374 + } + goto tr420 + st3801: + if p++; p == pe { + goto _test_eof3801 + } + st_case_3801: + switch { + case data[p] > 179: + if 180 <= data[p] { + goto tr420 + } + case data[p] >= 166: + goto tr3374 + } + goto tr148 + st3802: + if p++; p == pe { + goto _test_eof3802 + } + st_case_3802: + switch { + case data[p] > 163: + if 164 <= data[p] && data[p] <= 183 { + goto tr3374 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3803: + if p++; p == pe { + goto _test_eof3803 + } + st_case_3803: + if data[p] == 173 { + goto tr3374 + } + switch { + case data[p] < 169: + switch { + case data[p] > 146: + if 148 <= data[p] && data[p] <= 168 { + goto tr3374 + } + case data[p] >= 144: + goto tr3374 + } + case data[p] > 177: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 180 { + goto tr3374 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 185 { + goto tr3374 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3804: + if p++; p == pe { + goto _test_eof3804 + } + st_case_3804: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr3374 + } + case data[p] >= 128: + goto tr3374 + } + goto tr420 + st3805: + if p++; p == pe { + goto _test_eof3805 + } + st_case_3805: + switch data[p] { + case 128: + goto st3806 + case 129: + goto st3807 + case 130: + goto st241 + case 131: + goto st3808 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st3809 + case 180: + goto st251 + case 181: + goto st3810 + case 182: + goto st253 + case 183: + goto st3811 + case 184: + goto st255 + } + goto tr420 + st3806: + if p++; p == pe { + goto _test_eof3806 + } + st_case_3806: + switch data[p] { + case 164: + goto st142 + case 167: + goto st142 + } + switch { + case data[p] < 152: + if 140 <= data[p] && data[p] <= 143 { + goto tr3374 + } + case data[p] > 153: + switch { + case data[p] > 174: + if 191 <= data[p] { + goto tr571 + } + case data[p] >= 170: + goto tr3374 + } + default: + goto st142 + } + goto tr420 + st3807: + if p++; p == pe { + goto _test_eof3807 + } + st_case_3807: + switch data[p] { + case 165: + goto tr420 + case 177: + goto tr148 + case 191: + goto tr148 + } + switch { + case data[p] < 149: + if 129 <= data[p] && data[p] <= 147 { + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 160: + goto tr3374 + } + default: + goto tr420 + } + goto tr571 + st3808: + if p++; p == pe { + goto _test_eof3808 + } + st_case_3808: + if 144 <= data[p] && data[p] <= 176 { + goto tr3374 + } + goto tr420 + st3809: + if p++; p == pe { + goto _test_eof3809 + } + st_case_3809: + switch { + case data[p] < 175: + if 165 <= data[p] && data[p] <= 170 { + goto tr420 + } + case data[p] > 177: + if 180 <= data[p] { + goto tr420 + } + default: + goto tr3374 + } + goto tr148 + st3810: + if p++; p == pe { + goto _test_eof3810 + } + st_case_3810: + if data[p] == 191 { + goto tr3374 + } + switch { + case data[p] > 174: + if 176 <= data[p] { + goto tr420 + } + case data[p] >= 168: + goto tr420 + } + goto tr148 + st3811: + if p++; p == pe { + goto _test_eof3811 + } + st_case_3811: + switch { + case data[p] < 144: + switch { + case data[p] > 134: + if 136 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 191 { + goto tr3374 + } + case data[p] >= 152: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3812: + if p++; p == pe { + goto _test_eof3812 + } + st_case_3812: + switch data[p] { + case 128: + goto st3813 + case 130: + goto st3814 + case 132: + goto st259 + case 133: + goto st145 + case 134: + goto st260 + } + goto tr420 + st3813: + if p++; p == pe { + goto _test_eof3813 + } + st_case_3813: + if data[p] == 133 { + goto tr148 + } + switch { + case data[p] > 175: + if 187 <= data[p] && data[p] <= 188 { + goto tr148 + } + case data[p] >= 170: + goto tr3374 + } + goto tr420 + st3814: + if p++; p == pe { + goto _test_eof3814 + } + st_case_3814: + if 153 <= data[p] && data[p] <= 154 { + goto tr3374 + } + goto tr420 + st3815: + if p++; p == pe { + goto _test_eof3815 + } + st_case_3815: + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st654 + case 153: + goto st3816 + case 154: + goto st3817 + case 155: + goto st3818 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st3819 + case 161: + goto st272 + case 162: + goto st3820 + case 163: + goto st3821 + case 164: + goto st3822 + case 165: + goto st3823 + case 166: + goto st3824 + case 167: + goto st3825 + case 168: + goto st3826 + case 169: + goto st3827 + case 170: + goto st3828 + case 171: + goto st3829 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st3830 + case 176: + goto st147 + } + if 129 <= data[p] { + goto st145 + } + goto tr420 + st3816: + if p++; p == pe { + goto _test_eof3816 + } + st_case_3816: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr3374 + } + default: + goto tr3374 + } + goto tr420 + st3817: + if p++; p == pe { + goto _test_eof3817 + } + st_case_3817: + switch { + case data[p] < 158: + if 128 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr3374 + } + goto tr420 + st3818: + if p++; p == pe { + goto _test_eof3818 + } + st_case_3818: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr3374 + } + goto tr148 + st3819: + if p++; p == pe { + goto _test_eof3819 + } + st_case_3819: + switch data[p] { + case 130: + goto tr3374 + case 134: + goto tr3374 + case 139: + goto tr3374 + } + switch { + case data[p] > 167: + if 168 <= data[p] { + goto tr420 + } + case data[p] >= 163: + goto tr3374 + } + goto tr148 + st3820: + if p++; p == pe { + goto _test_eof3820 + } + st_case_3820: + switch { + case data[p] < 130: + if 128 <= data[p] && data[p] <= 129 { + goto tr3374 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr3374 + } + default: + goto tr148 + } + goto tr420 + st3821: + if p++; p == pe { + goto _test_eof3821 + } + st_case_3821: + switch data[p] { + case 187: + goto tr148 + case 189: + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 143: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 133: + goto tr420 + } + case data[p] > 159: + switch { + case data[p] > 183: + if 184 <= data[p] { + goto tr420 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr420 + } + goto tr3374 + st3822: + if p++; p == pe { + goto _test_eof3822 + } + st_case_3822: + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 165: + switch { + case data[p] > 173: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr3374 + } + default: + goto tr148 + } + goto tr420 + st3823: + if p++; p == pe { + goto _test_eof3823 + } + st_case_3823: + switch { + case data[p] < 148: + if 135 <= data[p] && data[p] <= 147 { + goto tr3374 + } + case data[p] > 159: + if 189 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr148 + st3824: + if p++; p == pe { + goto _test_eof3824 + } + st_case_3824: + switch { + case data[p] < 132: + if 128 <= data[p] && data[p] <= 131 { + goto tr3374 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr3374 + } + default: + goto tr148 + } + goto tr420 + st3825: + if p++; p == pe { + goto _test_eof3825 + } + st_case_3825: + if data[p] == 143 { + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 129: + goto tr420 + } + case data[p] > 164: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr420 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + default: + goto tr420 + } + goto tr3374 + st3826: + if p++; p == pe { + goto _test_eof3826 + } + st_case_3826: + switch { + case data[p] > 168: + if 169 <= data[p] && data[p] <= 182 { + goto tr3374 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3827: + if p++; p == pe { + goto _test_eof3827 + } + st_case_3827: + if data[p] == 131 { + goto tr3374 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 141: + switch { + case data[p] > 153: + if 187 <= data[p] && data[p] <= 189 { + goto tr3374 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr3374 + } + goto tr420 + st3828: + if p++; p == pe { + goto _test_eof3828 + } + st_case_3828: + if data[p] == 176 { + goto tr3374 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr3374 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr3374 + } + default: + goto tr3374 + } + goto tr420 + st3829: + if p++; p == pe { + goto _test_eof3829 + } + st_case_3829: + if data[p] == 129 { + goto tr3374 + } + switch { + case data[p] < 171: + if 160 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 181 <= data[p] && data[p] <= 182 { + goto tr3374 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr3374 + } + goto tr420 + st3830: + if p++; p == pe { + goto _test_eof3830 + } + st_case_3830: + switch { + case data[p] < 163: + if 128 <= data[p] && data[p] <= 162 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 172: + goto tr3374 + } + default: + goto tr3374 + } + goto tr420 + st3831: + if p++; p == pe { + goto _test_eof3831 + } + st_case_3831: + switch data[p] { + case 172: + goto st3832 + case 173: + goto st672 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st3833 + case 185: + goto st967 + case 187: + goto st3834 + case 188: + goto st969 + case 189: + goto st303 + case 190: + goto st3835 + case 191: + goto st3836 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr420 + st3832: + if p++; p == pe { + goto _test_eof3832 + } + st_case_3832: + switch data[p] { + case 158: + goto tr3374 + case 190: + goto tr572 + } + switch { + case data[p] < 157: + switch { + case data[p] > 134: + if 147 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 170: + goto tr572 + } + default: + goto tr572 + } + goto tr420 + st3833: + if p++; p == pe { + goto _test_eof3833 + } + st_case_3833: + if data[p] == 147 { + goto st142 + } + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 143 { + goto tr3374 + } + case data[p] > 175: + if 179 <= data[p] && data[p] <= 180 { + goto tr571 + } + default: + goto tr3374 + } + goto tr420 + st3834: + if p++; p == pe { + goto _test_eof3834 + } + st_case_3834: + if data[p] == 191 { + goto tr3374 + } + if 189 <= data[p] { + goto tr420 + } + goto tr148 + st3835: + if p++; p == pe { + goto _test_eof3835 + } + st_case_3835: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 190 { + goto tr148 + } + case data[p] >= 158: + goto tr3374 + } + goto tr420 + st3836: + if p++; p == pe { + goto _test_eof3836 + } + st_case_3836: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr148 + } + case data[p] >= 130: + goto tr148 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr3374 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3837: + if p++; p == pe { + goto _test_eof3837 + } + st_case_3837: + switch data[p] { + case 144: + goto st3838 + case 145: + goto st3844 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st3863 + case 155: + goto st3868 + case 157: + goto st3870 + case 158: + goto st3877 + case 159: + goto st403 + } + goto tr420 + st3838: + if p++; p == pe { + goto _test_eof3838 + } + st_case_3838: + switch data[p] { + case 128: + goto st308 + case 129: + goto st309 + case 130: + goto st147 + case 131: + goto st310 + case 133: + goto st311 + case 135: + goto st3839 + case 138: + goto st313 + case 139: + goto st3840 + case 140: + goto st315 + case 141: + goto st3841 + case 142: + goto st317 + case 143: + goto st318 + case 144: + goto st147 + case 145: + goto st145 + case 146: + goto st684 + case 148: + goto st320 + case 149: + goto st321 + case 152: + goto st147 + case 156: + goto st322 + case 157: + goto st323 + case 160: + goto st324 + case 161: + goto st325 + case 162: + goto st326 + case 163: + goto st327 + case 164: + goto st328 + case 166: + goto st329 + case 168: + goto st3842 + case 169: + goto st331 + case 170: + goto st332 + case 171: + goto st3843 + case 172: + goto st334 + case 173: + goto st335 + case 174: + goto st336 + case 176: + goto st147 + case 177: + goto st245 + } + switch { + case data[p] > 155: + if 178 <= data[p] && data[p] <= 179 { + goto st337 + } + case data[p] >= 153: + goto st145 + } + goto tr420 + st3839: + if p++; p == pe { + goto _test_eof3839 + } + st_case_3839: + if data[p] == 189 { + goto tr3374 + } + goto tr420 + st3840: + if p++; p == pe { + goto _test_eof3840 + } + st_case_3840: + if data[p] == 160 { + goto tr3374 + } + if 145 <= data[p] { + goto tr420 + } + goto tr148 + st3841: + if p++; p == pe { + goto _test_eof3841 + } + st_case_3841: + switch { + case data[p] < 182: + if 139 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 186: + if 187 <= data[p] { + goto tr420 + } + default: + goto tr3374 + } + goto tr148 + st3842: + if p++; p == pe { + goto _test_eof3842 + } + st_case_3842: + switch data[p] { + case 128: + goto tr148 + case 191: + goto tr3374 + } + switch { + case data[p] < 144: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr3374 + } + case data[p] > 134: + if 140 <= data[p] && data[p] <= 143 { + goto tr3374 + } + default: + goto tr3374 + } + case data[p] > 147: + switch { + case data[p] < 153: + if 149 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] > 179: + if 184 <= data[p] && data[p] <= 186 { + goto tr3374 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3843: + if p++; p == pe { + goto _test_eof3843 + } + st_case_3843: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 164: + if 165 <= data[p] && data[p] <= 166 { + goto tr3374 + } + default: + goto tr148 + } + goto tr420 + st3844: + if p++; p == pe { + goto _test_eof3844 + } + st_case_3844: + switch data[p] { + case 128: + goto st3845 + case 129: + goto st3846 + case 130: + goto st3847 + case 131: + goto st691 + case 132: + goto st3848 + case 133: + goto st3849 + case 134: + goto st3850 + case 135: + goto st3851 + case 136: + goto st3852 + case 138: + goto st348 + case 139: + goto st3853 + case 140: + goto st3854 + case 141: + goto st3855 + case 146: + goto st3856 + case 147: + goto st3857 + case 150: + goto st3858 + case 151: + goto st3859 + case 152: + goto st3856 + case 153: + goto st3860 + case 154: + goto st3861 + case 155: + goto st538 + case 156: + goto st3862 + case 162: + goto st359 + case 163: + goto st707 + case 171: + goto st361 + } + goto tr420 + st3845: + if p++; p == pe { + goto _test_eof3845 + } + st_case_3845: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr3374 + } + case data[p] > 183: + if 184 <= data[p] { + goto tr3374 + } + default: + goto tr148 + } + goto tr420 + st3846: + if p++; p == pe { + goto _test_eof3846 + } + st_case_3846: + switch { + case data[p] < 166: + if 135 <= data[p] && data[p] <= 165 { + goto tr420 + } + case data[p] > 175: + if 176 <= data[p] && data[p] <= 190 { + goto tr420 + } + default: + goto tr421 + } + goto tr3374 + st3847: + if p++; p == pe { + goto _test_eof3847 + } + st_case_3847: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr3374 + st3848: + if p++; p == pe { + goto _test_eof3848 + } + st_case_3848: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr3374 + } + case data[p] > 166: + switch { + case data[p] > 180: + if 182 <= data[p] && data[p] <= 191 { + goto tr421 + } + case data[p] >= 167: + goto tr3374 + } + default: + goto tr148 + } + goto tr420 + st3849: + if p++; p == pe { + goto _test_eof3849 + } + st_case_3849: + switch data[p] { + case 179: + goto tr3374 + case 182: + goto tr148 + } + if 144 <= data[p] && data[p] <= 178 { + goto tr148 + } + goto tr420 + st3850: + if p++; p == pe { + goto _test_eof3850 + } + st_case_3850: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr3374 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr3374 + } + default: + goto tr148 + } + goto tr420 + st3851: + if p++; p == pe { + goto _test_eof3851 + } + st_case_3851: + if data[p] == 155 { + goto tr420 + } + switch { + case data[p] < 141: + switch { + case data[p] > 132: + if 133 <= data[p] && data[p] <= 137 { + goto tr420 + } + case data[p] >= 129: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] < 154: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] > 156: + if 157 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + default: + goto tr420 + } + goto tr3374 + st3852: + if p++; p == pe { + goto _test_eof3852 + } + st_case_3852: + switch { + case data[p] < 147: + if 128 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] > 171: + if 172 <= data[p] && data[p] <= 183 { + goto tr3374 + } + default: + goto tr148 + } + goto tr420 + st3853: + if p++; p == pe { + goto _test_eof3853 + } + st_case_3853: + switch { + case data[p] < 171: + if 159 <= data[p] && data[p] <= 170 { + goto tr3374 + } + case data[p] > 175: + switch { + case data[p] > 185: + if 186 <= data[p] { + goto tr420 + } + case data[p] >= 176: + goto tr421 + } + default: + goto tr420 + } + goto tr148 + st3854: + if p++; p == pe { + goto _test_eof3854 + } + st_case_3854: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 131 { + goto tr3374 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr3374 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr420 + st3855: + if p++; p == pe { + goto _test_eof3855 + } + st_case_3855: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr3374 + } + switch { + case data[p] < 157: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr3374 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr3374 + } + default: + goto tr3374 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr3374 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr3374 + } + default: + goto tr3374 + } + default: + goto tr148 + } + goto tr420 + st3856: + if p++; p == pe { + goto _test_eof3856 + } + st_case_3856: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr3374 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3857: + if p++; p == pe { + goto _test_eof3857 + } + st_case_3857: + if data[p] == 134 { + goto tr420 + } + switch { + case data[p] < 136: + if 132 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr420 + } + goto tr3374 + st3858: + if p++; p == pe { + goto _test_eof3858 + } + st_case_3858: + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 181: + if 184 <= data[p] { + goto tr3374 + } + default: + goto tr3374 + } + goto tr420 + st3859: + if p++; p == pe { + goto _test_eof3859 + } + st_case_3859: + switch { + case data[p] < 152: + if 129 <= data[p] && data[p] <= 151 { + goto tr420 + } + case data[p] > 155: + if 158 <= data[p] { + goto tr420 + } + default: + goto tr148 + } + goto tr3374 + st3860: + if p++; p == pe { + goto _test_eof3860 + } + st_case_3860: + if data[p] == 132 { + goto tr148 + } + switch { + case data[p] < 144: + if 129 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 153: + if 154 <= data[p] { + goto tr420 + } + default: + goto tr421 + } + goto tr3374 + st3861: + if p++; p == pe { + goto _test_eof3861 + } + st_case_3861: + switch { + case data[p] > 170: + if 171 <= data[p] && data[p] <= 183 { + goto tr3374 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3862: + if p++; p == pe { + goto _test_eof3862 + } + st_case_3862: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 157: + goto tr3374 + } + goto tr420 + st3863: + if p++; p == pe { + goto _test_eof3863 + } + st_case_3863: + switch data[p] { + case 160: + goto st147 + case 168: + goto st370 + case 169: + goto st709 + case 171: + goto st3864 + case 172: + goto st3865 + case 173: + goto st712 + case 174: + goto st374 + case 188: + goto st147 + case 189: + goto st3866 + case 190: + goto st3867 + } + if 161 <= data[p] && data[p] <= 167 { + goto st145 + } + goto tr420 + st3864: + if p++; p == pe { + goto _test_eof3864 + } + st_case_3864: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 180 { + goto tr3374 + } + case data[p] >= 144: + goto tr148 + } + goto tr420 + st3865: + if p++; p == pe { + goto _test_eof3865 + } + st_case_3865: + switch { + case data[p] > 175: + if 176 <= data[p] && data[p] <= 182 { + goto tr3374 + } + case data[p] >= 128: + goto tr148 + } + goto tr420 + st3866: + if p++; p == pe { + goto _test_eof3866 + } + st_case_3866: + switch { + case data[p] < 145: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr420 + } + default: + goto tr3374 + } + goto tr148 + st3867: + if p++; p == pe { + goto _test_eof3867 + } + st_case_3867: + switch { + case data[p] > 146: + if 147 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] >= 143: + goto tr3374 + } + goto tr420 + st3868: + if p++; p == pe { + goto _test_eof3868 + } + st_case_3868: + switch data[p] { + case 176: + goto st147 + case 177: + goto st378 + case 178: + goto st3869 + } + goto tr420 + st3869: + if p++; p == pe { + goto _test_eof3869 + } + st_case_3869: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr3374 + } + case data[p] >= 157: + goto tr3374 + } + default: + goto tr148 + } + goto tr420 + st3870: + if p++; p == pe { + goto _test_eof3870 + } + st_case_3870: + switch data[p] { + case 133: + goto st3871 + case 134: + goto st3872 + case 137: + goto st3873 + case 144: + goto st147 + case 145: + goto st384 + case 146: + goto st385 + case 147: + goto st386 + case 148: + goto st387 + case 149: + goto st388 + case 154: + goto st389 + case 155: + goto st390 + case 156: + goto st391 + case 157: + goto st392 + case 158: + goto st393 + case 159: + goto st721 + case 168: + goto st3874 + case 169: + goto st3875 + case 170: + goto st3876 + } + if 150 <= data[p] && data[p] <= 153 { + goto st145 + } + goto tr420 + st3871: + if p++; p == pe { + goto _test_eof3871 + } + st_case_3871: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr3374 + } + case data[p] >= 165: + goto tr3374 + } + goto tr420 + st3872: + if p++; p == pe { + goto _test_eof3872 + } + st_case_3872: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr420 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr420 + } + default: + goto tr420 + } + goto tr3374 + st3873: + if p++; p == pe { + goto _test_eof3873 + } + st_case_3873: + if 130 <= data[p] && data[p] <= 132 { + goto tr3374 + } + goto tr420 + st3874: + if p++; p == pe { + goto _test_eof3874 + } + st_case_3874: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr3374 + } + case data[p] >= 128: + goto tr3374 + } + goto tr420 + st3875: + if p++; p == pe { + goto _test_eof3875 + } + st_case_3875: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr420 + } + case data[p] >= 173: + goto tr420 + } + goto tr3374 + st3876: + if p++; p == pe { + goto _test_eof3876 + } + st_case_3876: + if data[p] == 132 { + goto tr3374 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr3374 + } + case data[p] >= 155: + goto tr3374 + } + goto tr420 + st3877: + if p++; p == pe { + goto _test_eof3877 + } + st_case_3877: + switch data[p] { + case 160: + goto st147 + case 163: + goto st3878 + case 184: + goto st400 + case 185: + goto st401 + case 186: + goto st402 + } + if 161 <= data[p] && data[p] <= 162 { + goto st145 + } + goto tr420 + st3878: + if p++; p == pe { + goto _test_eof3878 + } + st_case_3878: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr420 + } + case data[p] > 150: + if 151 <= data[p] { + goto tr420 + } + default: + goto tr3374 + } + goto tr148 + st3879: + if p++; p == pe { + goto _test_eof3879 + } + st_case_3879: + if data[p] == 160 { + goto st3880 + } + goto tr420 + st3880: + if p++; p == pe { + goto _test_eof3880 + } + st_case_3880: + switch data[p] { + case 128: + goto st3881 + case 129: + goto st3882 + case 132: + goto st3739 + case 135: + goto st3884 + } + if 133 <= data[p] && data[p] <= 134 { + goto st3883 + } + goto tr420 + st3881: + if p++; p == pe { + goto _test_eof3881 + } + st_case_3881: + if data[p] == 129 { + goto tr3374 + } + if 160 <= data[p] { + goto tr3374 + } + goto tr420 + st3882: + if p++; p == pe { + goto _test_eof3882 + } + st_case_3882: + if 192 <= data[p] { + goto tr420 + } + goto tr3374 + st3883: + if p++; p == pe { + goto _test_eof3883 + } + st_case_3883: + goto tr3374 + st3884: + if p++; p == pe { + goto _test_eof3884 + } + st_case_3884: + if 176 <= data[p] { + goto tr420 + } + goto tr3374 +tr3375: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5079 + st5079: + if p++; p == pe { + goto _test_eof5079 + } + st_case_5079: +//line segment_words_prod.go:116725 + switch data[p] { + case 194: + goto st3885 + case 204: + goto st3886 + case 205: + goto st3887 + case 210: + goto st3888 + case 214: + goto st3889 + case 215: + goto st3890 + case 216: + goto st3891 + case 217: + goto st3892 + case 219: + goto st3893 + case 220: + goto st3894 + case 221: + goto st3895 + case 222: + goto st3896 + case 223: + goto st3897 + case 224: + goto st3898 + case 225: + goto st3927 + case 226: + goto st3949 + case 227: + goto st3956 + case 234: + goto st3959 + case 237: + goto st3447 + case 239: + goto st3975 + case 240: + goto st3980 + case 243: + goto st4022 + } + if 235 <= data[p] && data[p] <= 236 { + goto st3446 + } + goto tr5002 + st3885: + if p++; p == pe { + goto _test_eof3885 + } + st_case_3885: + if data[p] == 173 { + goto tr3375 + } + goto tr2985 + st3886: + if p++; p == pe { + goto _test_eof3886 + } + st_case_3886: + if 128 <= data[p] { + goto tr3375 + } + goto tr2985 + st3887: + if p++; p == pe { + goto _test_eof3887 + } + st_case_3887: + if 176 <= data[p] { + goto tr2985 + } + goto tr3375 + st3888: + if p++; p == pe { + goto _test_eof3888 + } + st_case_3888: + if 131 <= data[p] && data[p] <= 137 { + goto tr3375 + } + goto tr2985 + st3889: + if p++; p == pe { + goto _test_eof3889 + } + st_case_3889: + if data[p] == 191 { + goto tr3375 + } + if 145 <= data[p] && data[p] <= 189 { + goto tr3375 + } + goto tr2985 + st3890: + if p++; p == pe { + goto _test_eof3890 + } + st_case_3890: + if data[p] == 135 { + goto tr3375 + } + switch { + case data[p] > 130: + if 132 <= data[p] && data[p] <= 133 { + goto tr3375 + } + case data[p] >= 129: + goto tr3375 + } + goto tr2985 + st3891: + if p++; p == pe { + goto _test_eof3891 + } + st_case_3891: + if data[p] == 156 { + goto tr3375 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto tr3375 + } + case data[p] >= 128: + goto tr3375 + } + goto tr2985 + st3892: + if p++; p == pe { + goto _test_eof3892 + } + st_case_3892: + if data[p] == 176 { + goto tr3375 + } + if 139 <= data[p] && data[p] <= 159 { + goto tr3375 + } + goto tr2985 + st3893: + if p++; p == pe { + goto _test_eof3893 + } + st_case_3893: + switch { + case data[p] < 159: + if 150 <= data[p] && data[p] <= 157 { + goto tr3375 + } + case data[p] > 164: + switch { + case data[p] > 168: + if 170 <= data[p] && data[p] <= 173 { + goto tr3375 + } + case data[p] >= 167: + goto tr3375 + } + default: + goto tr3375 + } + goto tr2985 + st3894: + if p++; p == pe { + goto _test_eof3894 + } + st_case_3894: + switch data[p] { + case 143: + goto tr3375 + case 145: + goto tr3375 + } + if 176 <= data[p] { + goto tr3375 + } + goto tr2985 + st3895: + if p++; p == pe { + goto _test_eof3895 + } + st_case_3895: + if 139 <= data[p] { + goto tr2985 + } + goto tr3375 + st3896: + if p++; p == pe { + goto _test_eof3896 + } + st_case_3896: + if 166 <= data[p] && data[p] <= 176 { + goto tr3375 + } + goto tr2985 + st3897: + if p++; p == pe { + goto _test_eof3897 + } + st_case_3897: + if 171 <= data[p] && data[p] <= 179 { + goto tr3375 + } + goto tr2985 + st3898: + if p++; p == pe { + goto _test_eof3898 + } + st_case_3898: + switch data[p] { + case 160: + goto st3899 + case 161: + goto st3900 + case 163: + goto st3901 + case 164: + goto st3902 + case 165: + goto st3903 + case 167: + goto st3905 + case 169: + goto st3906 + case 171: + goto st3907 + case 173: + goto st3909 + case 174: + goto st3910 + case 175: + goto st3911 + case 176: + goto st3912 + case 177: + goto st3913 + case 179: + goto st3914 + case 180: + goto st3915 + case 181: + goto st3916 + case 182: + goto st3917 + case 183: + goto st3918 + case 184: + goto st3919 + case 185: + goto st3920 + case 186: + goto st3921 + case 187: + goto st3922 + case 188: + goto st3923 + case 189: + goto st3924 + case 190: + goto st3925 + case 191: + goto st3926 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto st3908 + } + case data[p] >= 166: + goto st3904 + } + goto tr2985 + st3899: + if p++; p == pe { + goto _test_eof3899 + } + st_case_3899: + switch { + case data[p] < 155: + if 150 <= data[p] && data[p] <= 153 { + goto tr3375 + } + case data[p] > 163: + switch { + case data[p] > 167: + if 169 <= data[p] && data[p] <= 173 { + goto tr3375 + } + case data[p] >= 165: + goto tr3375 + } + default: + goto tr3375 + } + goto tr2985 + st3900: + if p++; p == pe { + goto _test_eof3900 + } + st_case_3900: + if 153 <= data[p] && data[p] <= 155 { + goto tr3375 + } + goto tr2985 + st3901: + if p++; p == pe { + goto _test_eof3901 + } + st_case_3901: + if 163 <= data[p] { + goto tr3375 + } + goto tr2985 + st3902: + if p++; p == pe { + goto _test_eof3902 + } + st_case_3902: + if data[p] == 189 { + goto tr2985 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr2985 + } + goto tr3375 + st3903: + if p++; p == pe { + goto _test_eof3903 + } + st_case_3903: + if data[p] == 144 { + goto tr2985 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr2985 + } + case data[p] >= 152: + goto tr2985 + } + goto tr3375 + st3904: + if p++; p == pe { + goto _test_eof3904 + } + st_case_3904: + if data[p] == 188 { + goto tr3375 + } + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr3375 + } + case data[p] >= 129: + goto tr3375 + } + goto tr2985 + st3905: + if p++; p == pe { + goto _test_eof3905 + } + st_case_3905: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr2985 + } + case data[p] >= 133: + goto tr2985 + } + case data[p] > 150: + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr2985 + } + case data[p] >= 152: + goto tr2985 + } + default: + goto tr2985 + } + goto tr3375 + st3906: + if p++; p == pe { + goto _test_eof3906 + } + st_case_3906: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr2985 + } + case data[p] >= 131: + goto tr2985 + } + case data[p] > 144: + switch { + case data[p] < 178: + if 146 <= data[p] && data[p] <= 175 { + goto tr2985 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr2985 + } + default: + goto tr2985 + } + default: + goto tr2985 + } + goto tr3375 + st3907: + if p++; p == pe { + goto _test_eof3907 + } + st_case_3907: + switch data[p] { + case 134: + goto tr2985 + case 138: + goto tr2985 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr2985 + } + case data[p] >= 142: + goto tr2985 + } + goto tr3375 + st3908: + if p++; p == pe { + goto _test_eof3908 + } + st_case_3908: + if data[p] == 188 { + goto tr3375 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr3375 + } + case data[p] >= 129: + goto tr3375 + } + goto tr2985 + st3909: + if p++; p == pe { + goto _test_eof3909 + } + st_case_3909: + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr3375 + } + case data[p] >= 128: + goto tr3375 + } + case data[p] > 141: + switch { + case data[p] > 151: + if 162 <= data[p] && data[p] <= 163 { + goto tr3375 + } + case data[p] >= 150: + goto tr3375 + } + default: + goto tr3375 + } + goto tr2985 + st3910: + if p++; p == pe { + goto _test_eof3910 + } + st_case_3910: + if data[p] == 130 { + goto tr3375 + } + if 190 <= data[p] && data[p] <= 191 { + goto tr3375 + } + goto tr2985 + st3911: + if p++; p == pe { + goto _test_eof3911 + } + st_case_3911: + if data[p] == 151 { + goto tr3375 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr3375 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr3375 + } + default: + goto tr3375 + } + goto tr2985 + st3912: + if p++; p == pe { + goto _test_eof3912 + } + st_case_3912: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr3375 + } + case data[p] >= 128: + goto tr3375 + } + goto tr2985 + st3913: + if p++; p == pe { + goto _test_eof3913 + } + st_case_3913: + switch data[p] { + case 133: + goto tr2985 + case 137: + goto tr2985 + } + switch { + case data[p] < 151: + if 142 <= data[p] && data[p] <= 148 { + goto tr2985 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr2985 + } + default: + goto tr2985 + } + goto tr3375 + st3914: + if p++; p == pe { + goto _test_eof3914 + } + st_case_3914: + switch { + case data[p] < 138: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 136 { + goto tr3375 + } + case data[p] >= 128: + goto tr3375 + } + case data[p] > 141: + switch { + case data[p] > 150: + if 162 <= data[p] && data[p] <= 163 { + goto tr3375 + } + case data[p] >= 149: + goto tr3375 + } + default: + goto tr3375 + } + goto tr2985 + st3915: + if p++; p == pe { + goto _test_eof3915 + } + st_case_3915: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr3375 + } + case data[p] >= 129: + goto tr3375 + } + goto tr2985 + st3916: + if p++; p == pe { + goto _test_eof3916 + } + st_case_3916: + switch data[p] { + case 133: + goto tr2985 + case 137: + goto tr2985 + } + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 150 { + goto tr2985 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr2985 + } + default: + goto tr2985 + } + goto tr3375 + st3917: + if p++; p == pe { + goto _test_eof3917 + } + st_case_3917: + if 130 <= data[p] && data[p] <= 131 { + goto tr3375 + } + goto tr2985 + st3918: + if p++; p == pe { + goto _test_eof3918 + } + st_case_3918: + switch data[p] { + case 138: + goto tr3375 + case 150: + goto tr3375 + } + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 148 { + goto tr3375 + } + case data[p] > 159: + if 178 <= data[p] && data[p] <= 179 { + goto tr3375 + } + default: + goto tr3375 + } + goto tr2985 + st3919: + if p++; p == pe { + goto _test_eof3919 + } + st_case_3919: + if data[p] == 177 { + goto tr3375 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr3375 + } + goto tr2985 + st3920: + if p++; p == pe { + goto _test_eof3920 + } + st_case_3920: + if 135 <= data[p] && data[p] <= 142 { + goto tr3375 + } + goto tr2985 + st3921: + if p++; p == pe { + goto _test_eof3921 + } + st_case_3921: + if data[p] == 177 { + goto tr3375 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr3375 + } + case data[p] >= 180: + goto tr3375 + } + goto tr2985 + st3922: + if p++; p == pe { + goto _test_eof3922 + } + st_case_3922: + if 136 <= data[p] && data[p] <= 141 { + goto tr3375 + } + goto tr2985 + st3923: + if p++; p == pe { + goto _test_eof3923 + } + st_case_3923: + switch data[p] { + case 181: + goto tr3375 + case 183: + goto tr3375 + case 185: + goto tr3375 + } + switch { + case data[p] > 153: + if 190 <= data[p] && data[p] <= 191 { + goto tr3375 + } + case data[p] >= 152: + goto tr3375 + } + goto tr2985 + st3924: + if p++; p == pe { + goto _test_eof3924 + } + st_case_3924: + if 177 <= data[p] && data[p] <= 191 { + goto tr3375 + } + goto tr2985 + st3925: + if p++; p == pe { + goto _test_eof3925 + } + st_case_3925: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr3375 + } + case data[p] > 135: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr3375 + } + case data[p] >= 141: + goto tr3375 + } + default: + goto tr3375 + } + goto tr2985 + st3926: + if p++; p == pe { + goto _test_eof3926 + } + st_case_3926: + if data[p] == 134 { + goto tr3375 + } + goto tr2985 + st3927: + if p++; p == pe { + goto _test_eof3927 + } + st_case_3927: + switch data[p] { + case 128: + goto st3928 + case 129: + goto st3929 + case 130: + goto st3930 + case 132: + goto st3352 + case 135: + goto st3398 + case 141: + goto st3931 + case 156: + goto st3932 + case 157: + goto st3933 + case 158: + goto st3934 + case 159: + goto st3935 + case 160: + goto st3936 + case 162: + goto st3937 + case 164: + goto st3938 + case 168: + goto st3939 + case 169: + goto st3940 + case 170: + goto st3941 + case 172: + goto st3942 + case 173: + goto st3943 + case 174: + goto st3944 + case 175: + goto st3945 + case 176: + goto st3946 + case 179: + goto st3947 + case 183: + goto st3948 + } + if 133 <= data[p] && data[p] <= 134 { + goto st3397 + } + goto tr2985 + st3928: + if p++; p == pe { + goto _test_eof3928 + } + st_case_3928: + if 171 <= data[p] && data[p] <= 190 { + goto tr3375 + } + goto tr2985 + st3929: + if p++; p == pe { + goto _test_eof3929 + } + st_case_3929: + switch { + case data[p] < 162: + switch { + case data[p] > 153: + if 158 <= data[p] && data[p] <= 160 { + goto tr3375 + } + case data[p] >= 150: + goto tr3375 + } + case data[p] > 164: + switch { + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr3375 + } + case data[p] >= 167: + goto tr3375 + } + default: + goto tr3375 + } + goto tr2985 + st3930: + if p++; p == pe { + goto _test_eof3930 + } + st_case_3930: + if data[p] == 143 { + goto tr3375 + } + switch { + case data[p] > 141: + if 154 <= data[p] && data[p] <= 157 { + goto tr3375 + } + case data[p] >= 130: + goto tr3375 + } + goto tr2985 + st3931: + if p++; p == pe { + goto _test_eof3931 + } + st_case_3931: + if 157 <= data[p] && data[p] <= 159 { + goto tr3375 + } + goto tr2985 + st3932: + if p++; p == pe { + goto _test_eof3932 + } + st_case_3932: + switch { + case data[p] > 148: + if 178 <= data[p] && data[p] <= 180 { + goto tr3375 + } + case data[p] >= 146: + goto tr3375 + } + goto tr2985 + st3933: + if p++; p == pe { + goto _test_eof3933 + } + st_case_3933: + switch { + case data[p] > 147: + if 178 <= data[p] && data[p] <= 179 { + goto tr3375 + } + case data[p] >= 146: + goto tr3375 + } + goto tr2985 + st3934: + if p++; p == pe { + goto _test_eof3934 + } + st_case_3934: + if 180 <= data[p] { + goto tr3375 + } + goto tr2985 + st3935: + if p++; p == pe { + goto _test_eof3935 + } + st_case_3935: + switch { + case data[p] > 156: + if 158 <= data[p] { + goto tr2985 + } + case data[p] >= 148: + goto tr2985 + } + goto tr3375 + st3936: + if p++; p == pe { + goto _test_eof3936 + } + st_case_3936: + if 139 <= data[p] && data[p] <= 142 { + goto tr3375 + } + goto tr2985 + st3937: + if p++; p == pe { + goto _test_eof3937 + } + st_case_3937: + if data[p] == 169 { + goto tr3375 + } + goto tr2985 + st3938: + if p++; p == pe { + goto _test_eof3938 + } + st_case_3938: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr3375 + } + case data[p] >= 160: + goto tr3375 + } + goto tr2985 + st3939: + if p++; p == pe { + goto _test_eof3939 + } + st_case_3939: + if 151 <= data[p] && data[p] <= 155 { + goto tr3375 + } + goto tr2985 + st3940: + if p++; p == pe { + goto _test_eof3940 + } + st_case_3940: + if data[p] == 191 { + goto tr3375 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr3375 + } + case data[p] >= 149: + goto tr3375 + } + goto tr2985 + st3941: + if p++; p == pe { + goto _test_eof3941 + } + st_case_3941: + if 176 <= data[p] && data[p] <= 190 { + goto tr3375 + } + goto tr2985 + st3942: + if p++; p == pe { + goto _test_eof3942 + } + st_case_3942: + switch { + case data[p] > 132: + if 180 <= data[p] { + goto tr3375 + } + case data[p] >= 128: + goto tr3375 + } + goto tr2985 + st3943: + if p++; p == pe { + goto _test_eof3943 + } + st_case_3943: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr2985 + } + case data[p] >= 133: + goto tr2985 + } + goto tr3375 + st3944: + if p++; p == pe { + goto _test_eof3944 + } + st_case_3944: + switch { + case data[p] > 130: + if 161 <= data[p] && data[p] <= 173 { + goto tr3375 + } + case data[p] >= 128: + goto tr3375 + } + goto tr2985 + st3945: + if p++; p == pe { + goto _test_eof3945 + } + st_case_3945: + if 166 <= data[p] && data[p] <= 179 { + goto tr3375 + } + goto tr2985 + st3946: + if p++; p == pe { + goto _test_eof3946 + } + st_case_3946: + if 164 <= data[p] && data[p] <= 183 { + goto tr3375 + } + goto tr2985 + st3947: + if p++; p == pe { + goto _test_eof3947 + } + st_case_3947: + if data[p] == 173 { + goto tr3375 + } + switch { + case data[p] < 148: + if 144 <= data[p] && data[p] <= 146 { + goto tr3375 + } + case data[p] > 168: + switch { + case data[p] > 180: + if 184 <= data[p] && data[p] <= 185 { + goto tr3375 + } + case data[p] >= 178: + goto tr3375 + } + default: + goto tr3375 + } + goto tr2985 + st3948: + if p++; p == pe { + goto _test_eof3948 + } + st_case_3948: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr3375 + } + case data[p] >= 128: + goto tr3375 + } + goto tr2985 + st3949: + if p++; p == pe { + goto _test_eof3949 + } + st_case_3949: + switch data[p] { + case 128: + goto st3950 + case 129: + goto st3951 + case 131: + goto st3952 + case 179: + goto st3953 + case 181: + goto st3954 + case 183: + goto st3955 + } + goto tr2985 + st3950: + if p++; p == pe { + goto _test_eof3950 + } + st_case_3950: + switch { + case data[p] > 143: + if 170 <= data[p] && data[p] <= 174 { + goto tr3375 + } + case data[p] >= 140: + goto tr3375 + } + goto tr2985 + st3951: + if p++; p == pe { + goto _test_eof3951 + } + st_case_3951: + switch { + case data[p] > 164: + if 166 <= data[p] && data[p] <= 175 { + goto tr3375 + } + case data[p] >= 160: + goto tr3375 + } + goto tr2985 + st3952: + if p++; p == pe { + goto _test_eof3952 + } + st_case_3952: + if 144 <= data[p] && data[p] <= 176 { + goto tr3375 + } + goto tr2985 + st3953: + if p++; p == pe { + goto _test_eof3953 + } + st_case_3953: + if 175 <= data[p] && data[p] <= 177 { + goto tr3375 + } + goto tr2985 + st3954: + if p++; p == pe { + goto _test_eof3954 + } + st_case_3954: + if data[p] == 191 { + goto tr3375 + } + goto tr2985 + st3955: + if p++; p == pe { + goto _test_eof3955 + } + st_case_3955: + if 160 <= data[p] && data[p] <= 191 { + goto tr3375 + } + goto tr2985 + st3956: + if p++; p == pe { + goto _test_eof3956 + } + st_case_3956: + switch data[p] { + case 128: + goto st3957 + case 130: + goto st3958 + case 132: + goto st3427 + case 133: + goto st3397 + case 134: + goto st3428 + case 136: + goto st3350 + case 137: + goto st3429 + } + goto tr2985 + st3957: + if p++; p == pe { + goto _test_eof3957 + } + st_case_3957: + if 170 <= data[p] && data[p] <= 175 { + goto tr3375 + } + goto tr2985 + st3958: + if p++; p == pe { + goto _test_eof3958 + } + st_case_3958: + if 153 <= data[p] && data[p] <= 154 { + goto tr3375 + } + goto tr2985 + st3959: + if p++; p == pe { + goto _test_eof3959 + } + st_case_3959: + switch data[p] { + case 153: + goto st3960 + case 154: + goto st3961 + case 155: + goto st3962 + case 160: + goto st3963 + case 162: + goto st3964 + case 163: + goto st3965 + case 164: + goto st3966 + case 165: + goto st3967 + case 166: + goto st3968 + case 167: + goto st3969 + case 168: + goto st3970 + case 169: + goto st3971 + case 170: + goto st3972 + case 171: + goto st3973 + case 175: + goto st3974 + case 176: + goto st3352 + } + if 177 <= data[p] { + goto st3397 + } + goto tr2985 + st3960: + if p++; p == pe { + goto _test_eof3960 + } + st_case_3960: + switch { + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr3375 + } + case data[p] >= 175: + goto tr3375 + } + goto tr2985 + st3961: + if p++; p == pe { + goto _test_eof3961 + } + st_case_3961: + if 158 <= data[p] && data[p] <= 159 { + goto tr3375 + } + goto tr2985 + st3962: + if p++; p == pe { + goto _test_eof3962 + } + st_case_3962: + if 176 <= data[p] && data[p] <= 177 { + goto tr3375 + } + goto tr2985 + st3963: + if p++; p == pe { + goto _test_eof3963 + } + st_case_3963: + switch data[p] { + case 130: + goto tr3375 + case 134: + goto tr3375 + case 139: + goto tr3375 + } + if 163 <= data[p] && data[p] <= 167 { + goto tr3375 + } + goto tr2985 + st3964: + if p++; p == pe { + goto _test_eof3964 + } + st_case_3964: + switch { + case data[p] > 129: + if 180 <= data[p] { + goto tr3375 + } + case data[p] >= 128: + goto tr3375 + } + goto tr2985 + st3965: + if p++; p == pe { + goto _test_eof3965 + } + st_case_3965: + switch { + case data[p] > 159: + if 178 <= data[p] { + goto tr2985 + } + case data[p] >= 133: + goto tr2985 + } + goto tr3375 + st3966: + if p++; p == pe { + goto _test_eof3966 + } + st_case_3966: + if 166 <= data[p] && data[p] <= 173 { + goto tr3375 + } + goto tr2985 + st3967: + if p++; p == pe { + goto _test_eof3967 + } + st_case_3967: + switch { + case data[p] > 147: + if 160 <= data[p] && data[p] <= 188 { + goto tr3053 + } + case data[p] >= 135: + goto tr3375 + } + goto tr2985 + st3968: + if p++; p == pe { + goto _test_eof3968 + } + st_case_3968: + switch { + case data[p] > 131: + if 179 <= data[p] { + goto tr3375 + } + case data[p] >= 128: + goto tr3375 + } + goto tr2985 + st3969: + if p++; p == pe { + goto _test_eof3969 + } + st_case_3969: + switch { + case data[p] > 164: + if 166 <= data[p] { + goto tr2985 + } + case data[p] >= 129: + goto tr2985 + } + goto tr3375 + st3970: + if p++; p == pe { + goto _test_eof3970 + } + st_case_3970: + if 169 <= data[p] && data[p] <= 182 { + goto tr3375 + } + goto tr2985 + st3971: + if p++; p == pe { + goto _test_eof3971 + } + st_case_3971: + if data[p] == 131 { + goto tr3375 + } + switch { + case data[p] > 141: + if 187 <= data[p] && data[p] <= 189 { + goto tr3375 + } + case data[p] >= 140: + goto tr3375 + } + goto tr2985 + st3972: + if p++; p == pe { + goto _test_eof3972 + } + st_case_3972: + if data[p] == 176 { + goto tr3375 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr3375 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr3375 + } + default: + goto tr3375 + } + goto tr2985 + st3973: + if p++; p == pe { + goto _test_eof3973 + } + st_case_3973: + if data[p] == 129 { + goto tr3375 + } + switch { + case data[p] > 175: + if 181 <= data[p] && data[p] <= 182 { + goto tr3375 + } + case data[p] >= 171: + goto tr3375 + } + goto tr2985 + st3974: + if p++; p == pe { + goto _test_eof3974 + } + st_case_3974: + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 173 { + goto tr3375 + } + case data[p] >= 163: + goto tr3375 + } + goto tr2985 + st3975: + if p++; p == pe { + goto _test_eof3975 + } + st_case_3975: + switch data[p] { + case 172: + goto st3976 + case 184: + goto st3977 + case 187: + goto st3954 + case 190: + goto st3978 + case 191: + goto st3979 + } + goto tr2985 + st3976: + if p++; p == pe { + goto _test_eof3976 + } + st_case_3976: + if data[p] == 158 { + goto tr3375 + } + goto tr2985 + st3977: + if p++; p == pe { + goto _test_eof3977 + } + st_case_3977: + switch { + case data[p] > 143: + if 160 <= data[p] && data[p] <= 175 { + goto tr3375 + } + case data[p] >= 128: + goto tr3375 + } + goto tr2985 + st3978: + if p++; p == pe { + goto _test_eof3978 + } + st_case_3978: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 190 { + goto tr3053 + } + case data[p] >= 158: + goto tr3375 + } + goto tr2985 + st3979: + if p++; p == pe { + goto _test_eof3979 + } + st_case_3979: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr3053 + } + case data[p] >= 130: + goto tr3053 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr3375 + } + case data[p] >= 154: + goto tr3053 + } + default: + goto tr3053 + } + goto tr2985 + st3980: + if p++; p == pe { + goto _test_eof3980 + } + st_case_3980: + switch data[p] { + case 144: + goto st3981 + case 145: + goto st3987 + case 150: + goto st4006 + case 155: + goto st4011 + case 157: + goto st4013 + case 158: + goto st4020 + } + goto tr2985 + st3981: + if p++; p == pe { + goto _test_eof3981 + } + st_case_3981: + switch data[p] { + case 135: + goto st3982 + case 139: + goto st3983 + case 141: + goto st3984 + case 168: + goto st3985 + case 171: + goto st3986 + } + goto tr2985 + st3982: + if p++; p == pe { + goto _test_eof3982 + } + st_case_3982: + if data[p] == 189 { + goto tr3375 + } + goto tr2985 + st3983: + if p++; p == pe { + goto _test_eof3983 + } + st_case_3983: + if data[p] == 160 { + goto tr3375 + } + goto tr2985 + st3984: + if p++; p == pe { + goto _test_eof3984 + } + st_case_3984: + if 182 <= data[p] && data[p] <= 186 { + goto tr3375 + } + goto tr2985 + st3985: + if p++; p == pe { + goto _test_eof3985 + } + st_case_3985: + if data[p] == 191 { + goto tr3375 + } + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr3375 + } + case data[p] > 134: + switch { + case data[p] > 143: + if 184 <= data[p] && data[p] <= 186 { + goto tr3375 + } + case data[p] >= 140: + goto tr3375 + } + default: + goto tr3375 + } + goto tr2985 + st3986: + if p++; p == pe { + goto _test_eof3986 + } + st_case_3986: + if 165 <= data[p] && data[p] <= 166 { + goto tr3375 + } + goto tr2985 + st3987: + if p++; p == pe { + goto _test_eof3987 + } + st_case_3987: + switch data[p] { + case 128: + goto st3988 + case 129: + goto st3989 + case 130: + goto st3990 + case 132: + goto st3991 + case 133: + goto st3992 + case 134: + goto st3993 + case 135: + goto st3994 + case 136: + goto st3995 + case 139: + goto st3996 + case 140: + goto st3997 + case 141: + goto st3998 + case 146: + goto st3999 + case 147: + goto st4000 + case 150: + goto st4001 + case 151: + goto st4002 + case 152: + goto st3999 + case 153: + goto st4003 + case 154: + goto st4004 + case 156: + goto st4005 + } + goto tr2985 + st3988: + if p++; p == pe { + goto _test_eof3988 + } + st_case_3988: + switch { + case data[p] > 130: + if 184 <= data[p] { + goto tr3375 + } + case data[p] >= 128: + goto tr3375 + } + goto tr2985 + st3989: + if p++; p == pe { + goto _test_eof3989 + } + st_case_3989: + if 135 <= data[p] && data[p] <= 190 { + goto tr2985 + } + goto tr3375 + st3990: + if p++; p == pe { + goto _test_eof3990 + } + st_case_3990: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr2985 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr2985 + } + default: + goto tr2985 + } + goto tr3375 + st3991: + if p++; p == pe { + goto _test_eof3991 + } + st_case_3991: + switch { + case data[p] > 130: + if 167 <= data[p] && data[p] <= 180 { + goto tr3375 + } + case data[p] >= 128: + goto tr3375 + } + goto tr2985 + st3992: + if p++; p == pe { + goto _test_eof3992 + } + st_case_3992: + if data[p] == 179 { + goto tr3375 + } + goto tr2985 + st3993: + if p++; p == pe { + goto _test_eof3993 + } + st_case_3993: + switch { + case data[p] > 130: + if 179 <= data[p] { + goto tr3375 + } + case data[p] >= 128: + goto tr3375 + } + goto tr2985 + st3994: + if p++; p == pe { + goto _test_eof3994 + } + st_case_3994: + switch { + case data[p] > 137: + if 141 <= data[p] { + goto tr2985 + } + case data[p] >= 129: + goto tr2985 + } + goto tr3375 + st3995: + if p++; p == pe { + goto _test_eof3995 + } + st_case_3995: + if 172 <= data[p] && data[p] <= 183 { + goto tr3375 + } + goto tr2985 + st3996: + if p++; p == pe { + goto _test_eof3996 + } + st_case_3996: + if 159 <= data[p] && data[p] <= 170 { + goto tr3375 + } + goto tr2985 + st3997: + if p++; p == pe { + goto _test_eof3997 + } + st_case_3997: + if data[p] == 188 { + goto tr3375 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr3375 + } + case data[p] >= 128: + goto tr3375 + } + goto tr2985 + st3998: + if p++; p == pe { + goto _test_eof3998 + } + st_case_3998: + if data[p] == 151 { + goto tr3375 + } + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr3375 + } + case data[p] >= 128: + goto tr3375 + } + case data[p] > 141: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr3375 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr3375 + } + default: + goto tr3375 + } + default: + goto tr3375 + } + goto tr2985 + st3999: + if p++; p == pe { + goto _test_eof3999 + } + st_case_3999: + if 176 <= data[p] { + goto tr3375 + } + goto tr2985 + st4000: + if p++; p == pe { + goto _test_eof4000 + } + st_case_4000: + if 132 <= data[p] { + goto tr2985 + } + goto tr3375 + st4001: + if p++; p == pe { + goto _test_eof4001 + } + st_case_4001: + switch { + case data[p] > 181: + if 184 <= data[p] { + goto tr3375 + } + case data[p] >= 175: + goto tr3375 + } + goto tr2985 + st4002: + if p++; p == pe { + goto _test_eof4002 + } + st_case_4002: + switch { + case data[p] > 155: + if 158 <= data[p] { + goto tr2985 + } + case data[p] >= 129: + goto tr2985 + } + goto tr3375 + st4003: + if p++; p == pe { + goto _test_eof4003 + } + st_case_4003: + if 129 <= data[p] { + goto tr2985 + } + goto tr3375 + st4004: + if p++; p == pe { + goto _test_eof4004 + } + st_case_4004: + if 171 <= data[p] && data[p] <= 183 { + goto tr3375 + } + goto tr2985 + st4005: + if p++; p == pe { + goto _test_eof4005 + } + st_case_4005: + if 157 <= data[p] && data[p] <= 171 { + goto tr3375 + } + goto tr2985 + st4006: + if p++; p == pe { + goto _test_eof4006 + } + st_case_4006: + switch data[p] { + case 171: + goto st4007 + case 172: + goto st4008 + case 189: + goto st4009 + case 190: + goto st4010 + } + goto tr2985 + st4007: + if p++; p == pe { + goto _test_eof4007 + } + st_case_4007: + if 176 <= data[p] && data[p] <= 180 { + goto tr3375 + } + goto tr2985 + st4008: + if p++; p == pe { + goto _test_eof4008 + } + st_case_4008: + if 176 <= data[p] && data[p] <= 182 { + goto tr3375 + } + goto tr2985 + st4009: + if p++; p == pe { + goto _test_eof4009 + } + st_case_4009: + if 145 <= data[p] && data[p] <= 190 { + goto tr3375 + } + goto tr2985 + st4010: + if p++; p == pe { + goto _test_eof4010 + } + st_case_4010: + if 143 <= data[p] && data[p] <= 146 { + goto tr3375 + } + goto tr2985 + st4011: + if p++; p == pe { + goto _test_eof4011 + } + st_case_4011: + if data[p] == 178 { + goto st4012 + } + goto tr2985 + st4012: + if p++; p == pe { + goto _test_eof4012 + } + st_case_4012: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr3375 + } + case data[p] >= 157: + goto tr3375 + } + goto tr2985 + st4013: + if p++; p == pe { + goto _test_eof4013 + } + st_case_4013: + switch data[p] { + case 133: + goto st4014 + case 134: + goto st4015 + case 137: + goto st4016 + case 168: + goto st4017 + case 169: + goto st4018 + case 170: + goto st4019 + } + goto tr2985 + st4014: + if p++; p == pe { + goto _test_eof4014 + } + st_case_4014: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr3375 + } + case data[p] >= 165: + goto tr3375 + } + goto tr2985 + st4015: + if p++; p == pe { + goto _test_eof4015 + } + st_case_4015: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr2985 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr2985 + } + default: + goto tr2985 + } + goto tr3375 + st4016: + if p++; p == pe { + goto _test_eof4016 + } + st_case_4016: + if 130 <= data[p] && data[p] <= 132 { + goto tr3375 + } + goto tr2985 + st4017: + if p++; p == pe { + goto _test_eof4017 + } + st_case_4017: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr3375 + } + case data[p] >= 128: + goto tr3375 + } + goto tr2985 + st4018: + if p++; p == pe { + goto _test_eof4018 + } + st_case_4018: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr2985 + } + case data[p] >= 173: + goto tr2985 + } + goto tr3375 + st4019: + if p++; p == pe { + goto _test_eof4019 + } + st_case_4019: + if data[p] == 132 { + goto tr3375 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr3375 + } + case data[p] >= 155: + goto tr3375 + } + goto tr2985 + st4020: + if p++; p == pe { + goto _test_eof4020 + } + st_case_4020: + if data[p] == 163 { + goto st4021 + } + goto tr2985 + st4021: + if p++; p == pe { + goto _test_eof4021 + } + st_case_4021: + if 144 <= data[p] && data[p] <= 150 { + goto tr3375 + } + goto tr2985 + st4022: + if p++; p == pe { + goto _test_eof4022 + } + st_case_4022: + if data[p] == 160 { + goto st4023 + } + goto tr2985 + st4023: + if p++; p == pe { + goto _test_eof4023 + } + st_case_4023: + switch data[p] { + case 128: + goto st4024 + case 129: + goto st4025 + case 132: + goto st3886 + case 135: + goto st3887 + } + if 133 <= data[p] && data[p] <= 134 { + goto st4026 + } + goto tr2985 + st4024: + if p++; p == pe { + goto _test_eof4024 + } + st_case_4024: + if data[p] == 129 { + goto tr3375 + } + if 160 <= data[p] { + goto tr3375 + } + goto tr2985 + st4025: + if p++; p == pe { + goto _test_eof4025 + } + st_case_4025: + if 192 <= data[p] { + goto tr2985 + } + goto tr3375 + st4026: + if p++; p == pe { + goto _test_eof4026 + } + st_case_4026: + goto tr3375 +tr3376: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:104 +act = 3; + goto st5080 + st5080: + if p++; p == pe { + goto _test_eof5080 + } + st_case_5080: +//line segment_words_prod.go:118940 + switch data[p] { + case 95: + goto tr571 + case 194: + goto st4027 + case 204: + goto st4028 + case 205: + goto st4029 + case 210: + goto st4030 + case 214: + goto st4031 + case 215: + goto st4032 + case 216: + goto st4033 + case 217: + goto st4034 + case 219: + goto st4035 + case 220: + goto st4036 + case 221: + goto st4037 + case 222: + goto st4038 + case 223: + goto st4039 + case 224: + goto st4040 + case 225: + goto st4069 + case 226: + goto st4091 + case 227: + goto st4098 + case 234: + goto st4105 + case 239: + goto st4121 + case 240: + goto st4127 + case 243: + goto st4170 + } + goto tr5137 + st4027: + if p++; p == pe { + goto _test_eof4027 + } + st_case_4027: + if data[p] == 173 { + goto tr3376 + } + goto tr3627 + st4028: + if p++; p == pe { + goto _test_eof4028 + } + st_case_4028: + if 128 <= data[p] { + goto tr3376 + } + goto tr2 + st4029: + if p++; p == pe { + goto _test_eof4029 + } + st_case_4029: + if 176 <= data[p] { + goto tr3627 + } + goto tr3376 + st4030: + if p++; p == pe { + goto _test_eof4030 + } + st_case_4030: + if 131 <= data[p] && data[p] <= 137 { + goto tr3376 + } + goto tr3627 + st4031: + if p++; p == pe { + goto _test_eof4031 + } + st_case_4031: + if data[p] == 191 { + goto tr3376 + } + if 145 <= data[p] && data[p] <= 189 { + goto tr3376 + } + goto tr3627 + st4032: + if p++; p == pe { + goto _test_eof4032 + } + st_case_4032: + if data[p] == 135 { + goto tr3376 + } + switch { + case data[p] > 130: + if 132 <= data[p] && data[p] <= 133 { + goto tr3376 + } + case data[p] >= 129: + goto tr3376 + } + goto tr3627 + st4033: + if p++; p == pe { + goto _test_eof4033 + } + st_case_4033: + if data[p] == 156 { + goto tr3376 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto tr3376 + } + case data[p] >= 128: + goto tr3376 + } + goto tr3627 + st4034: + if p++; p == pe { + goto _test_eof4034 + } + st_case_4034: + if data[p] == 176 { + goto tr3376 + } + if 139 <= data[p] && data[p] <= 159 { + goto tr3376 + } + goto tr3627 + st4035: + if p++; p == pe { + goto _test_eof4035 + } + st_case_4035: + switch { + case data[p] < 159: + if 150 <= data[p] && data[p] <= 157 { + goto tr3376 + } + case data[p] > 164: + switch { + case data[p] > 168: + if 170 <= data[p] && data[p] <= 173 { + goto tr3376 + } + case data[p] >= 167: + goto tr3376 + } + default: + goto tr3376 + } + goto tr3627 + st4036: + if p++; p == pe { + goto _test_eof4036 + } + st_case_4036: + switch data[p] { + case 143: + goto tr3376 + case 145: + goto tr3376 + } + if 176 <= data[p] { + goto tr3376 + } + goto tr3627 + st4037: + if p++; p == pe { + goto _test_eof4037 + } + st_case_4037: + if 139 <= data[p] { + goto tr3627 + } + goto tr3376 + st4038: + if p++; p == pe { + goto _test_eof4038 + } + st_case_4038: + if 166 <= data[p] && data[p] <= 176 { + goto tr3376 + } + goto tr3627 + st4039: + if p++; p == pe { + goto _test_eof4039 + } + st_case_4039: + if 171 <= data[p] && data[p] <= 179 { + goto tr3376 + } + goto tr3627 + st4040: + if p++; p == pe { + goto _test_eof4040 + } + st_case_4040: + switch data[p] { + case 160: + goto st4041 + case 161: + goto st4042 + case 163: + goto st4043 + case 164: + goto st4044 + case 165: + goto st4045 + case 167: + goto st4047 + case 169: + goto st4048 + case 171: + goto st4049 + case 173: + goto st4051 + case 174: + goto st4052 + case 175: + goto st4053 + case 176: + goto st4054 + case 177: + goto st4055 + case 179: + goto st4056 + case 180: + goto st4057 + case 181: + goto st4058 + case 182: + goto st4059 + case 183: + goto st4060 + case 184: + goto st4061 + case 185: + goto st4062 + case 186: + goto st4063 + case 187: + goto st4064 + case 188: + goto st4065 + case 189: + goto st4066 + case 190: + goto st4067 + case 191: + goto st4068 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto st4050 + } + case data[p] >= 166: + goto st4046 + } + goto tr3627 + st4041: + if p++; p == pe { + goto _test_eof4041 + } + st_case_4041: + switch { + case data[p] < 155: + if 150 <= data[p] && data[p] <= 153 { + goto tr3376 + } + case data[p] > 163: + switch { + case data[p] > 167: + if 169 <= data[p] && data[p] <= 173 { + goto tr3376 + } + case data[p] >= 165: + goto tr3376 + } + default: + goto tr3376 + } + goto tr3627 + st4042: + if p++; p == pe { + goto _test_eof4042 + } + st_case_4042: + if 153 <= data[p] && data[p] <= 155 { + goto tr3376 + } + goto tr3627 + st4043: + if p++; p == pe { + goto _test_eof4043 + } + st_case_4043: + if 163 <= data[p] { + goto tr3376 + } + goto tr3627 + st4044: + if p++; p == pe { + goto _test_eof4044 + } + st_case_4044: + if data[p] == 189 { + goto tr3627 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr3627 + } + goto tr3376 + st4045: + if p++; p == pe { + goto _test_eof4045 + } + st_case_4045: + if data[p] == 144 { + goto tr3627 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr3627 + } + case data[p] >= 152: + goto tr3627 + } + goto tr3376 + st4046: + if p++; p == pe { + goto _test_eof4046 + } + st_case_4046: + if data[p] == 188 { + goto tr3376 + } + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr3376 + } + case data[p] >= 129: + goto tr3376 + } + goto tr3627 + st4047: + if p++; p == pe { + goto _test_eof4047 + } + st_case_4047: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr3627 + } + case data[p] >= 133: + goto tr3627 + } + case data[p] > 150: + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr3627 + } + case data[p] >= 152: + goto tr3627 + } + default: + goto tr3627 + } + goto tr3376 + st4048: + if p++; p == pe { + goto _test_eof4048 + } + st_case_4048: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr3627 + } + case data[p] >= 131: + goto tr3627 + } + case data[p] > 144: + switch { + case data[p] < 178: + if 146 <= data[p] && data[p] <= 175 { + goto tr3627 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr3627 + } + default: + goto tr3627 + } + default: + goto tr3627 + } + goto tr3376 + st4049: + if p++; p == pe { + goto _test_eof4049 + } + st_case_4049: + switch data[p] { + case 134: + goto tr3627 + case 138: + goto tr3627 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr3627 + } + case data[p] >= 142: + goto tr3627 + } + goto tr3376 + st4050: + if p++; p == pe { + goto _test_eof4050 + } + st_case_4050: + if data[p] == 188 { + goto tr3376 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr3376 + } + case data[p] >= 129: + goto tr3376 + } + goto tr3627 + st4051: + if p++; p == pe { + goto _test_eof4051 + } + st_case_4051: + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr3376 + } + case data[p] >= 128: + goto tr3376 + } + case data[p] > 141: + switch { + case data[p] > 151: + if 162 <= data[p] && data[p] <= 163 { + goto tr3376 + } + case data[p] >= 150: + goto tr3376 + } + default: + goto tr3376 + } + goto tr3627 + st4052: + if p++; p == pe { + goto _test_eof4052 + } + st_case_4052: + if data[p] == 130 { + goto tr3376 + } + if 190 <= data[p] && data[p] <= 191 { + goto tr3376 + } + goto tr3627 + st4053: + if p++; p == pe { + goto _test_eof4053 + } + st_case_4053: + if data[p] == 151 { + goto tr3376 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr3376 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr3376 + } + default: + goto tr3376 + } + goto tr3627 + st4054: + if p++; p == pe { + goto _test_eof4054 + } + st_case_4054: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr3376 + } + case data[p] >= 128: + goto tr3376 + } + goto tr3627 + st4055: + if p++; p == pe { + goto _test_eof4055 + } + st_case_4055: + switch data[p] { + case 133: + goto tr3627 + case 137: + goto tr3627 + } + switch { + case data[p] < 151: + if 142 <= data[p] && data[p] <= 148 { + goto tr3627 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr3627 + } + default: + goto tr3627 + } + goto tr3376 + st4056: + if p++; p == pe { + goto _test_eof4056 + } + st_case_4056: + switch { + case data[p] < 138: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 136 { + goto tr3376 + } + case data[p] >= 128: + goto tr3376 + } + case data[p] > 141: + switch { + case data[p] > 150: + if 162 <= data[p] && data[p] <= 163 { + goto tr3376 + } + case data[p] >= 149: + goto tr3376 + } + default: + goto tr3376 + } + goto tr3627 + st4057: + if p++; p == pe { + goto _test_eof4057 + } + st_case_4057: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr3376 + } + case data[p] >= 129: + goto tr3376 + } + goto tr3627 + st4058: + if p++; p == pe { + goto _test_eof4058 + } + st_case_4058: + switch data[p] { + case 133: + goto tr3627 + case 137: + goto tr3627 + } + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 150 { + goto tr3627 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr3627 + } + default: + goto tr3627 + } + goto tr3376 + st4059: + if p++; p == pe { + goto _test_eof4059 + } + st_case_4059: + if 130 <= data[p] && data[p] <= 131 { + goto tr3376 + } + goto tr3627 + st4060: + if p++; p == pe { + goto _test_eof4060 + } + st_case_4060: + switch data[p] { + case 138: + goto tr3376 + case 150: + goto tr3376 + } + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 148 { + goto tr3376 + } + case data[p] > 159: + if 178 <= data[p] && data[p] <= 179 { + goto tr3376 + } + default: + goto tr3376 + } + goto tr3627 + st4061: + if p++; p == pe { + goto _test_eof4061 + } + st_case_4061: + if data[p] == 177 { + goto tr3376 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr3376 + } + goto tr3627 + st4062: + if p++; p == pe { + goto _test_eof4062 + } + st_case_4062: + if 135 <= data[p] && data[p] <= 142 { + goto tr3376 + } + goto tr3627 + st4063: + if p++; p == pe { + goto _test_eof4063 + } + st_case_4063: + if data[p] == 177 { + goto tr3376 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr3376 + } + case data[p] >= 180: + goto tr3376 + } + goto tr3627 + st4064: + if p++; p == pe { + goto _test_eof4064 + } + st_case_4064: + if 136 <= data[p] && data[p] <= 141 { + goto tr3376 + } + goto tr3627 + st4065: + if p++; p == pe { + goto _test_eof4065 + } + st_case_4065: + switch data[p] { + case 181: + goto tr3376 + case 183: + goto tr3376 + case 185: + goto tr3376 + } + switch { + case data[p] > 153: + if 190 <= data[p] && data[p] <= 191 { + goto tr3376 + } + case data[p] >= 152: + goto tr3376 + } + goto tr3627 + st4066: + if p++; p == pe { + goto _test_eof4066 + } + st_case_4066: + if 177 <= data[p] && data[p] <= 191 { + goto tr3376 + } + goto tr3627 + st4067: + if p++; p == pe { + goto _test_eof4067 + } + st_case_4067: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr3376 + } + case data[p] > 135: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr3376 + } + case data[p] >= 141: + goto tr3376 + } + default: + goto tr3376 + } + goto tr3627 + st4068: + if p++; p == pe { + goto _test_eof4068 + } + st_case_4068: + if data[p] == 134 { + goto tr3376 + } + goto tr3627 + st4069: + if p++; p == pe { + goto _test_eof4069 + } + st_case_4069: + switch data[p] { + case 128: + goto st4070 + case 129: + goto st4071 + case 130: + goto st4072 + case 141: + goto st4073 + case 156: + goto st4074 + case 157: + goto st4075 + case 158: + goto st4076 + case 159: + goto st4077 + case 160: + goto st4078 + case 162: + goto st4079 + case 164: + goto st4080 + case 168: + goto st4081 + case 169: + goto st4082 + case 170: + goto st4083 + case 172: + goto st4084 + case 173: + goto st4085 + case 174: + goto st4086 + case 175: + goto st4087 + case 176: + goto st4088 + case 179: + goto st4089 + case 183: + goto st4090 + } + goto tr3627 + st4070: + if p++; p == pe { + goto _test_eof4070 + } + st_case_4070: + if 171 <= data[p] && data[p] <= 190 { + goto tr3376 + } + goto tr3627 + st4071: + if p++; p == pe { + goto _test_eof4071 + } + st_case_4071: + switch { + case data[p] < 162: + switch { + case data[p] > 153: + if 158 <= data[p] && data[p] <= 160 { + goto tr3376 + } + case data[p] >= 150: + goto tr3376 + } + case data[p] > 164: + switch { + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr3376 + } + case data[p] >= 167: + goto tr3376 + } + default: + goto tr3376 + } + goto tr3627 + st4072: + if p++; p == pe { + goto _test_eof4072 + } + st_case_4072: + if data[p] == 143 { + goto tr3376 + } + switch { + case data[p] > 141: + if 154 <= data[p] && data[p] <= 157 { + goto tr3376 + } + case data[p] >= 130: + goto tr3376 + } + goto tr3627 + st4073: + if p++; p == pe { + goto _test_eof4073 + } + st_case_4073: + if 157 <= data[p] && data[p] <= 159 { + goto tr3376 + } + goto tr3627 + st4074: + if p++; p == pe { + goto _test_eof4074 + } + st_case_4074: + switch { + case data[p] > 148: + if 178 <= data[p] && data[p] <= 180 { + goto tr3376 + } + case data[p] >= 146: + goto tr3376 + } + goto tr3627 + st4075: + if p++; p == pe { + goto _test_eof4075 + } + st_case_4075: + switch { + case data[p] > 147: + if 178 <= data[p] && data[p] <= 179 { + goto tr3376 + } + case data[p] >= 146: + goto tr3376 + } + goto tr3627 + st4076: + if p++; p == pe { + goto _test_eof4076 + } + st_case_4076: + if 180 <= data[p] { + goto tr3376 + } + goto tr3627 + st4077: + if p++; p == pe { + goto _test_eof4077 + } + st_case_4077: + switch { + case data[p] > 156: + if 158 <= data[p] { + goto tr3627 + } + case data[p] >= 148: + goto tr3627 + } + goto tr3376 + st4078: + if p++; p == pe { + goto _test_eof4078 + } + st_case_4078: + if 139 <= data[p] && data[p] <= 142 { + goto tr3376 + } + goto tr3627 + st4079: + if p++; p == pe { + goto _test_eof4079 + } + st_case_4079: + if data[p] == 169 { + goto tr3376 + } + goto tr3627 + st4080: + if p++; p == pe { + goto _test_eof4080 + } + st_case_4080: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr3376 + } + case data[p] >= 160: + goto tr3376 + } + goto tr3627 + st4081: + if p++; p == pe { + goto _test_eof4081 + } + st_case_4081: + if 151 <= data[p] && data[p] <= 155 { + goto tr3376 + } + goto tr3627 + st4082: + if p++; p == pe { + goto _test_eof4082 + } + st_case_4082: + if data[p] == 191 { + goto tr3376 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr3376 + } + case data[p] >= 149: + goto tr3376 + } + goto tr3627 + st4083: + if p++; p == pe { + goto _test_eof4083 + } + st_case_4083: + if 176 <= data[p] && data[p] <= 190 { + goto tr3376 + } + goto tr3627 + st4084: + if p++; p == pe { + goto _test_eof4084 + } + st_case_4084: + switch { + case data[p] > 132: + if 180 <= data[p] { + goto tr3376 + } + case data[p] >= 128: + goto tr3376 + } + goto tr3627 + st4085: + if p++; p == pe { + goto _test_eof4085 + } + st_case_4085: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr3627 + } + case data[p] >= 133: + goto tr3627 + } + goto tr3376 + st4086: + if p++; p == pe { + goto _test_eof4086 + } + st_case_4086: + switch { + case data[p] > 130: + if 161 <= data[p] && data[p] <= 173 { + goto tr3376 + } + case data[p] >= 128: + goto tr3376 + } + goto tr3627 + st4087: + if p++; p == pe { + goto _test_eof4087 + } + st_case_4087: + if 166 <= data[p] && data[p] <= 179 { + goto tr3376 + } + goto tr3627 + st4088: + if p++; p == pe { + goto _test_eof4088 + } + st_case_4088: + if 164 <= data[p] && data[p] <= 183 { + goto tr3376 + } + goto tr3627 + st4089: + if p++; p == pe { + goto _test_eof4089 + } + st_case_4089: + if data[p] == 173 { + goto tr3376 + } + switch { + case data[p] < 148: + if 144 <= data[p] && data[p] <= 146 { + goto tr3376 + } + case data[p] > 168: + switch { + case data[p] > 180: + if 184 <= data[p] && data[p] <= 185 { + goto tr3376 + } + case data[p] >= 178: + goto tr3376 + } + default: + goto tr3376 + } + goto tr3627 + st4090: + if p++; p == pe { + goto _test_eof4090 + } + st_case_4090: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr3376 + } + case data[p] >= 128: + goto tr3376 + } + goto tr3627 + st4091: + if p++; p == pe { + goto _test_eof4091 + } + st_case_4091: + switch data[p] { + case 128: + goto st4092 + case 129: + goto st4093 + case 131: + goto st4094 + case 179: + goto st4095 + case 181: + goto st4096 + case 183: + goto st4097 + } + goto tr3627 + st4092: + if p++; p == pe { + goto _test_eof4092 + } + st_case_4092: + switch { + case data[p] < 170: + if 140 <= data[p] && data[p] <= 143 { + goto tr3376 + } + case data[p] > 174: + if 191 <= data[p] { + goto tr571 + } + default: + goto tr3376 + } + goto tr3627 + st4093: + if p++; p == pe { + goto _test_eof4093 + } + st_case_4093: + if data[p] == 165 { + goto tr3627 + } + switch { + case data[p] < 149: + if 129 <= data[p] && data[p] <= 147 { + goto tr3627 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr3627 + } + case data[p] >= 160: + goto tr3376 + } + default: + goto tr3627 + } + goto tr571 + st4094: + if p++; p == pe { + goto _test_eof4094 + } + st_case_4094: + if 144 <= data[p] && data[p] <= 176 { + goto tr3376 + } + goto tr3627 + st4095: + if p++; p == pe { + goto _test_eof4095 + } + st_case_4095: + if 175 <= data[p] && data[p] <= 177 { + goto tr3376 + } + goto tr3627 + st4096: + if p++; p == pe { + goto _test_eof4096 + } + st_case_4096: + if data[p] == 191 { + goto tr3376 + } + goto tr3627 + st4097: + if p++; p == pe { + goto _test_eof4097 + } + st_case_4097: + if 160 <= data[p] && data[p] <= 191 { + goto tr3376 + } + goto tr3627 + st4098: + if p++; p == pe { + goto _test_eof4098 + } + st_case_4098: + switch data[p] { + case 128: + goto st4099 + case 130: + goto st4100 + case 131: + goto st4101 + case 135: + goto st4102 + case 139: + goto st4103 + case 140: + goto st4028 + case 141: + goto st4104 + } + goto tr3627 + st4099: + if p++; p == pe { + goto _test_eof4099 + } + st_case_4099: + switch { + case data[p] > 175: + if 177 <= data[p] && data[p] <= 181 { + goto tr3376 + } + case data[p] >= 170: + goto tr3376 + } + goto tr3627 + st4100: + if p++; p == pe { + goto _test_eof4100 + } + st_case_4100: + switch { + case data[p] > 156: + if 160 <= data[p] { + goto tr3376 + } + case data[p] >= 153: + goto tr3376 + } + goto tr3627 + st4101: + if p++; p == pe { + goto _test_eof4101 + } + st_case_4101: + if data[p] == 187 { + goto tr2 + } + if 192 <= data[p] { + goto tr2 + } + goto tr3376 + st4102: + if p++; p == pe { + goto _test_eof4102 + } + st_case_4102: + if 176 <= data[p] && data[p] <= 191 { + goto tr3376 + } + goto tr2 + st4103: + if p++; p == pe { + goto _test_eof4103 + } + st_case_4103: + if 144 <= data[p] && data[p] <= 190 { + goto tr3376 + } + goto tr2 + st4104: + if p++; p == pe { + goto _test_eof4104 + } + st_case_4104: + if 152 <= data[p] { + goto tr2 + } + goto tr3376 + st4105: + if p++; p == pe { + goto _test_eof4105 + } + st_case_4105: + switch data[p] { + case 153: + goto st4106 + case 154: + goto st4107 + case 155: + goto st4108 + case 160: + goto st4109 + case 162: + goto st4110 + case 163: + goto st4111 + case 164: + goto st4112 + case 165: + goto st4113 + case 166: + goto st4114 + case 167: + goto st4115 + case 168: + goto st4116 + case 169: + goto st4117 + case 170: + goto st4118 + case 171: + goto st4119 + case 175: + goto st4120 + } + goto tr3627 + st4106: + if p++; p == pe { + goto _test_eof4106 + } + st_case_4106: + switch { + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr3376 + } + case data[p] >= 175: + goto tr3376 + } + goto tr3627 + st4107: + if p++; p == pe { + goto _test_eof4107 + } + st_case_4107: + if 158 <= data[p] && data[p] <= 159 { + goto tr3376 + } + goto tr3627 + st4108: + if p++; p == pe { + goto _test_eof4108 + } + st_case_4108: + if 176 <= data[p] && data[p] <= 177 { + goto tr3376 + } + goto tr3627 + st4109: + if p++; p == pe { + goto _test_eof4109 + } + st_case_4109: + switch data[p] { + case 130: + goto tr3376 + case 134: + goto tr3376 + case 139: + goto tr3376 + } + if 163 <= data[p] && data[p] <= 167 { + goto tr3376 + } + goto tr3627 + st4110: + if p++; p == pe { + goto _test_eof4110 + } + st_case_4110: + switch { + case data[p] > 129: + if 180 <= data[p] { + goto tr3376 + } + case data[p] >= 128: + goto tr3376 + } + goto tr3627 + st4111: + if p++; p == pe { + goto _test_eof4111 + } + st_case_4111: + switch { + case data[p] > 159: + if 178 <= data[p] { + goto tr3627 + } + case data[p] >= 133: + goto tr3627 + } + goto tr3376 + st4112: + if p++; p == pe { + goto _test_eof4112 + } + st_case_4112: + if 166 <= data[p] && data[p] <= 173 { + goto tr3376 + } + goto tr3627 + st4113: + if p++; p == pe { + goto _test_eof4113 + } + st_case_4113: + if 135 <= data[p] && data[p] <= 147 { + goto tr3376 + } + goto tr3627 + st4114: + if p++; p == pe { + goto _test_eof4114 + } + st_case_4114: + switch { + case data[p] > 131: + if 179 <= data[p] { + goto tr3376 + } + case data[p] >= 128: + goto tr3376 + } + goto tr3627 + st4115: + if p++; p == pe { + goto _test_eof4115 + } + st_case_4115: + switch { + case data[p] > 164: + if 166 <= data[p] { + goto tr3627 + } + case data[p] >= 129: + goto tr3627 + } + goto tr3376 + st4116: + if p++; p == pe { + goto _test_eof4116 + } + st_case_4116: + if 169 <= data[p] && data[p] <= 182 { + goto tr3376 + } + goto tr3627 + st4117: + if p++; p == pe { + goto _test_eof4117 + } + st_case_4117: + if data[p] == 131 { + goto tr3376 + } + switch { + case data[p] > 141: + if 187 <= data[p] && data[p] <= 189 { + goto tr3376 + } + case data[p] >= 140: + goto tr3376 + } + goto tr3627 + st4118: + if p++; p == pe { + goto _test_eof4118 + } + st_case_4118: + if data[p] == 176 { + goto tr3376 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr3376 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr3376 + } + default: + goto tr3376 + } + goto tr3627 + st4119: + if p++; p == pe { + goto _test_eof4119 + } + st_case_4119: + if data[p] == 129 { + goto tr3376 + } + switch { + case data[p] > 175: + if 181 <= data[p] && data[p] <= 182 { + goto tr3376 + } + case data[p] >= 171: + goto tr3376 + } + goto tr3627 + st4120: + if p++; p == pe { + goto _test_eof4120 + } + st_case_4120: + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 173 { + goto tr3376 + } + case data[p] >= 163: + goto tr3376 + } + goto tr3627 + st4121: + if p++; p == pe { + goto _test_eof4121 + } + st_case_4121: + switch data[p] { + case 172: + goto st4122 + case 184: + goto st4123 + case 185: + goto st1187 + case 187: + goto st4096 + case 188: + goto st1188 + case 189: + goto st4124 + case 190: + goto st4125 + case 191: + goto st4126 + } + goto tr3627 + st4122: + if p++; p == pe { + goto _test_eof4122 + } + st_case_4122: + if data[p] == 158 { + goto tr3376 + } + goto tr3627 + st4123: + if p++; p == pe { + goto _test_eof4123 + } + st_case_4123: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 143 { + goto tr3376 + } + case data[p] > 175: + if 179 <= data[p] && data[p] <= 180 { + goto tr571 + } + default: + goto tr3376 + } + goto tr3627 + st4124: + if p++; p == pe { + goto _test_eof4124 + } + st_case_4124: + if 166 <= data[p] { + goto tr3376 + } + goto tr3627 + st4125: + if p++; p == pe { + goto _test_eof4125 + } + st_case_4125: + if 160 <= data[p] { + goto tr3627 + } + goto tr3376 + st4126: + if p++; p == pe { + goto _test_eof4126 + } + st_case_4126: + if 185 <= data[p] && data[p] <= 187 { + goto tr3376 + } + goto tr3627 + st4127: + if p++; p == pe { + goto _test_eof4127 + } + st_case_4127: + switch data[p] { + case 144: + goto st4128 + case 145: + goto st4134 + case 150: + goto st4153 + case 155: + goto st4158 + case 157: + goto st4161 + case 158: + goto st4168 + } + goto tr3627 + st4128: + if p++; p == pe { + goto _test_eof4128 + } + st_case_4128: + switch data[p] { + case 135: + goto st4129 + case 139: + goto st4130 + case 141: + goto st4131 + case 168: + goto st4132 + case 171: + goto st4133 + } + goto tr3627 + st4129: + if p++; p == pe { + goto _test_eof4129 + } + st_case_4129: + if data[p] == 189 { + goto tr3376 + } + goto tr3627 + st4130: + if p++; p == pe { + goto _test_eof4130 + } + st_case_4130: + if data[p] == 160 { + goto tr3376 + } + goto tr3627 + st4131: + if p++; p == pe { + goto _test_eof4131 + } + st_case_4131: + if 182 <= data[p] && data[p] <= 186 { + goto tr3376 + } + goto tr3627 + st4132: + if p++; p == pe { + goto _test_eof4132 + } + st_case_4132: + if data[p] == 191 { + goto tr3376 + } + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr3376 + } + case data[p] > 134: + switch { + case data[p] > 143: + if 184 <= data[p] && data[p] <= 186 { + goto tr3376 + } + case data[p] >= 140: + goto tr3376 + } + default: + goto tr3376 + } + goto tr3627 + st4133: + if p++; p == pe { + goto _test_eof4133 + } + st_case_4133: + if 165 <= data[p] && data[p] <= 166 { + goto tr3376 + } + goto tr3627 + st4134: + if p++; p == pe { + goto _test_eof4134 + } + st_case_4134: + switch data[p] { + case 128: + goto st4135 + case 129: + goto st4136 + case 130: + goto st4137 + case 132: + goto st4138 + case 133: + goto st4139 + case 134: + goto st4140 + case 135: + goto st4141 + case 136: + goto st4142 + case 139: + goto st4143 + case 140: + goto st4144 + case 141: + goto st4145 + case 146: + goto st4146 + case 147: + goto st4147 + case 150: + goto st4148 + case 151: + goto st4149 + case 152: + goto st4146 + case 153: + goto st4150 + case 154: + goto st4151 + case 156: + goto st4152 + } + goto tr3627 + st4135: + if p++; p == pe { + goto _test_eof4135 + } + st_case_4135: + switch { + case data[p] > 130: + if 184 <= data[p] { + goto tr3376 + } + case data[p] >= 128: + goto tr3376 + } + goto tr3627 + st4136: + if p++; p == pe { + goto _test_eof4136 + } + st_case_4136: + if 135 <= data[p] && data[p] <= 190 { + goto tr3627 + } + goto tr3376 + st4137: + if p++; p == pe { + goto _test_eof4137 + } + st_case_4137: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr3627 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr3627 + } + default: + goto tr3627 + } + goto tr3376 + st4138: + if p++; p == pe { + goto _test_eof4138 + } + st_case_4138: + switch { + case data[p] > 130: + if 167 <= data[p] && data[p] <= 180 { + goto tr3376 + } + case data[p] >= 128: + goto tr3376 + } + goto tr3627 + st4139: + if p++; p == pe { + goto _test_eof4139 + } + st_case_4139: + if data[p] == 179 { + goto tr3376 + } + goto tr3627 + st4140: + if p++; p == pe { + goto _test_eof4140 + } + st_case_4140: + switch { + case data[p] > 130: + if 179 <= data[p] { + goto tr3376 + } + case data[p] >= 128: + goto tr3376 + } + goto tr3627 + st4141: + if p++; p == pe { + goto _test_eof4141 + } + st_case_4141: + switch { + case data[p] > 137: + if 141 <= data[p] { + goto tr3627 + } + case data[p] >= 129: + goto tr3627 + } + goto tr3376 + st4142: + if p++; p == pe { + goto _test_eof4142 + } + st_case_4142: + if 172 <= data[p] && data[p] <= 183 { + goto tr3376 + } + goto tr3627 + st4143: + if p++; p == pe { + goto _test_eof4143 + } + st_case_4143: + if 159 <= data[p] && data[p] <= 170 { + goto tr3376 + } + goto tr3627 + st4144: + if p++; p == pe { + goto _test_eof4144 + } + st_case_4144: + if data[p] == 188 { + goto tr3376 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr3376 + } + case data[p] >= 128: + goto tr3376 + } + goto tr3627 + st4145: + if p++; p == pe { + goto _test_eof4145 + } + st_case_4145: + if data[p] == 151 { + goto tr3376 + } + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr3376 + } + case data[p] >= 128: + goto tr3376 + } + case data[p] > 141: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr3376 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr3376 + } + default: + goto tr3376 + } + default: + goto tr3376 + } + goto tr3627 + st4146: + if p++; p == pe { + goto _test_eof4146 + } + st_case_4146: + if 176 <= data[p] { + goto tr3376 + } + goto tr3627 + st4147: + if p++; p == pe { + goto _test_eof4147 + } + st_case_4147: + if 132 <= data[p] { + goto tr3627 + } + goto tr3376 + st4148: + if p++; p == pe { + goto _test_eof4148 + } + st_case_4148: + switch { + case data[p] > 181: + if 184 <= data[p] { + goto tr3376 + } + case data[p] >= 175: + goto tr3376 + } + goto tr3627 + st4149: + if p++; p == pe { + goto _test_eof4149 + } + st_case_4149: + switch { + case data[p] > 155: + if 158 <= data[p] { + goto tr3627 + } + case data[p] >= 129: + goto tr3627 + } + goto tr3376 + st4150: + if p++; p == pe { + goto _test_eof4150 + } + st_case_4150: + if 129 <= data[p] { + goto tr3627 + } + goto tr3376 + st4151: + if p++; p == pe { + goto _test_eof4151 + } + st_case_4151: + if 171 <= data[p] && data[p] <= 183 { + goto tr3376 + } + goto tr3627 + st4152: + if p++; p == pe { + goto _test_eof4152 + } + st_case_4152: + if 157 <= data[p] && data[p] <= 171 { + goto tr3376 + } + goto tr3627 + st4153: + if p++; p == pe { + goto _test_eof4153 + } + st_case_4153: + switch data[p] { + case 171: + goto st4154 + case 172: + goto st4155 + case 189: + goto st4156 + case 190: + goto st4157 + } + goto tr3627 + st4154: + if p++; p == pe { + goto _test_eof4154 + } + st_case_4154: + if 176 <= data[p] && data[p] <= 180 { + goto tr3376 + } + goto tr3627 + st4155: + if p++; p == pe { + goto _test_eof4155 + } + st_case_4155: + if 176 <= data[p] && data[p] <= 182 { + goto tr3376 + } + goto tr3627 + st4156: + if p++; p == pe { + goto _test_eof4156 + } + st_case_4156: + if 145 <= data[p] && data[p] <= 190 { + goto tr3376 + } + goto tr3627 + st4157: + if p++; p == pe { + goto _test_eof4157 + } + st_case_4157: + if 143 <= data[p] && data[p] <= 146 { + goto tr3376 + } + goto tr3627 + st4158: + if p++; p == pe { + goto _test_eof4158 + } + st_case_4158: + switch data[p] { + case 128: + goto st4159 + case 178: + goto st4160 + } + goto tr3627 + st4159: + if p++; p == pe { + goto _test_eof4159 + } + st_case_4159: + if data[p] == 128 { + goto tr3376 + } + goto tr3627 + st4160: + if p++; p == pe { + goto _test_eof4160 + } + st_case_4160: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr3376 + } + case data[p] >= 157: + goto tr3376 + } + goto tr3627 + st4161: + if p++; p == pe { + goto _test_eof4161 + } + st_case_4161: + switch data[p] { + case 133: + goto st4162 + case 134: + goto st4163 + case 137: + goto st4164 + case 168: + goto st4165 + case 169: + goto st4166 + case 170: + goto st4167 + } + goto tr3627 + st4162: + if p++; p == pe { + goto _test_eof4162 + } + st_case_4162: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr3376 + } + case data[p] >= 165: + goto tr3376 + } + goto tr3627 + st4163: + if p++; p == pe { + goto _test_eof4163 + } + st_case_4163: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr3627 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr3627 + } + default: + goto tr3627 + } + goto tr3376 + st4164: + if p++; p == pe { + goto _test_eof4164 + } + st_case_4164: + if 130 <= data[p] && data[p] <= 132 { + goto tr3376 + } + goto tr3627 + st4165: + if p++; p == pe { + goto _test_eof4165 + } + st_case_4165: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr3376 + } + case data[p] >= 128: + goto tr3376 + } + goto tr3627 + st4166: + if p++; p == pe { + goto _test_eof4166 + } + st_case_4166: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr3627 + } + case data[p] >= 173: + goto tr3627 + } + goto tr3376 + st4167: + if p++; p == pe { + goto _test_eof4167 + } + st_case_4167: + if data[p] == 132 { + goto tr3376 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr3376 + } + case data[p] >= 155: + goto tr3376 + } + goto tr3627 + st4168: + if p++; p == pe { + goto _test_eof4168 + } + st_case_4168: + if data[p] == 163 { + goto st4169 + } + goto tr3627 + st4169: + if p++; p == pe { + goto _test_eof4169 + } + st_case_4169: + if 144 <= data[p] && data[p] <= 150 { + goto tr3376 + } + goto tr3627 + st4170: + if p++; p == pe { + goto _test_eof4170 + } + st_case_4170: + if data[p] == 160 { + goto st4171 + } + goto tr3627 + st4171: + if p++; p == pe { + goto _test_eof4171 + } + st_case_4171: + switch data[p] { + case 128: + goto st4172 + case 129: + goto st4173 + case 132: + goto st4028 + case 135: + goto st4029 + } + if 133 <= data[p] && data[p] <= 134 { + goto st4174 + } + goto tr3627 + st4172: + if p++; p == pe { + goto _test_eof4172 + } + st_case_4172: + if data[p] == 129 { + goto tr3376 + } + if 160 <= data[p] { + goto tr3376 + } + goto tr3627 + st4173: + if p++; p == pe { + goto _test_eof4173 + } + st_case_4173: + if 192 <= data[p] { + goto tr3627 + } + goto tr3376 + st4174: + if p++; p == pe { + goto _test_eof4174 + } + st_case_4174: + goto tr3376 + st4175: + if p++; p == pe { + goto _test_eof4175 + } + st_case_4175: + if 129 <= data[p] { + goto tr3757 + } + goto tr0 +tr3757: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + + goto st5081 + st5081: + if p++; p == pe { + goto _test_eof5081 + } + st_case_5081: +//line segment_words_prod.go:121214 + switch data[p] { + case 194: + goto st4176 + case 204: + goto st4177 + case 205: + goto st4178 + case 210: + goto st4179 + case 214: + goto st4180 + case 215: + goto st4181 + case 216: + goto st4182 + case 217: + goto st4183 + case 219: + goto st4184 + case 220: + goto st4185 + case 221: + goto st4186 + case 222: + goto st4187 + case 223: + goto st4188 + case 224: + goto st4189 + case 225: + goto st4218 + case 226: + goto st4240 + case 227: + goto st4247 + case 234: + goto st4250 + case 239: + goto st4266 + case 240: + goto st4270 + case 243: + goto st4312 + } + goto tr5157 + st4176: + if p++; p == pe { + goto _test_eof4176 + } + st_case_4176: + if data[p] == 173 { + goto tr3757 + } + goto tr3758 + st4177: + if p++; p == pe { + goto _test_eof4177 + } + st_case_4177: + if 128 <= data[p] { + goto tr3757 + } + goto tr3758 + st4178: + if p++; p == pe { + goto _test_eof4178 + } + st_case_4178: + if 176 <= data[p] { + goto tr3758 + } + goto tr3757 + st4179: + if p++; p == pe { + goto _test_eof4179 + } + st_case_4179: + if 131 <= data[p] && data[p] <= 137 { + goto tr3757 + } + goto tr3758 + st4180: + if p++; p == pe { + goto _test_eof4180 + } + st_case_4180: + if data[p] == 191 { + goto tr3757 + } + if 145 <= data[p] && data[p] <= 189 { + goto tr3757 + } + goto tr3758 + st4181: + if p++; p == pe { + goto _test_eof4181 + } + st_case_4181: + if data[p] == 135 { + goto tr3757 + } + switch { + case data[p] > 130: + if 132 <= data[p] && data[p] <= 133 { + goto tr3757 + } + case data[p] >= 129: + goto tr3757 + } + goto tr3758 + st4182: + if p++; p == pe { + goto _test_eof4182 + } + st_case_4182: + if data[p] == 156 { + goto tr3757 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto tr3757 + } + case data[p] >= 128: + goto tr3757 + } + goto tr3758 + st4183: + if p++; p == pe { + goto _test_eof4183 + } + st_case_4183: + if data[p] == 176 { + goto tr3757 + } + if 139 <= data[p] && data[p] <= 159 { + goto tr3757 + } + goto tr3758 + st4184: + if p++; p == pe { + goto _test_eof4184 + } + st_case_4184: + switch { + case data[p] < 159: + if 150 <= data[p] && data[p] <= 157 { + goto tr3757 + } + case data[p] > 164: + switch { + case data[p] > 168: + if 170 <= data[p] && data[p] <= 173 { + goto tr3757 + } + case data[p] >= 167: + goto tr3757 + } + default: + goto tr3757 + } + goto tr3758 + st4185: + if p++; p == pe { + goto _test_eof4185 + } + st_case_4185: + switch data[p] { + case 143: + goto tr3757 + case 145: + goto tr3757 + } + if 176 <= data[p] { + goto tr3757 + } + goto tr3758 + st4186: + if p++; p == pe { + goto _test_eof4186 + } + st_case_4186: + if 139 <= data[p] { + goto tr3758 + } + goto tr3757 + st4187: + if p++; p == pe { + goto _test_eof4187 + } + st_case_4187: + if 166 <= data[p] && data[p] <= 176 { + goto tr3757 + } + goto tr3758 + st4188: + if p++; p == pe { + goto _test_eof4188 + } + st_case_4188: + if 171 <= data[p] && data[p] <= 179 { + goto tr3757 + } + goto tr3758 + st4189: + if p++; p == pe { + goto _test_eof4189 + } + st_case_4189: + switch data[p] { + case 160: + goto st4190 + case 161: + goto st4191 + case 163: + goto st4192 + case 164: + goto st4193 + case 165: + goto st4194 + case 167: + goto st4196 + case 169: + goto st4197 + case 171: + goto st4198 + case 173: + goto st4200 + case 174: + goto st4201 + case 175: + goto st4202 + case 176: + goto st4203 + case 177: + goto st4204 + case 179: + goto st4205 + case 180: + goto st4206 + case 181: + goto st4207 + case 182: + goto st4208 + case 183: + goto st4209 + case 184: + goto st4210 + case 185: + goto st4211 + case 186: + goto st4212 + case 187: + goto st4213 + case 188: + goto st4214 + case 189: + goto st4215 + case 190: + goto st4216 + case 191: + goto st4217 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto st4199 + } + case data[p] >= 166: + goto st4195 + } + goto tr3758 + st4190: + if p++; p == pe { + goto _test_eof4190 + } + st_case_4190: + switch { + case data[p] < 155: + if 150 <= data[p] && data[p] <= 153 { + goto tr3757 + } + case data[p] > 163: + switch { + case data[p] > 167: + if 169 <= data[p] && data[p] <= 173 { + goto tr3757 + } + case data[p] >= 165: + goto tr3757 + } + default: + goto tr3757 + } + goto tr3758 + st4191: + if p++; p == pe { + goto _test_eof4191 + } + st_case_4191: + if 153 <= data[p] && data[p] <= 155 { + goto tr3757 + } + goto tr3758 + st4192: + if p++; p == pe { + goto _test_eof4192 + } + st_case_4192: + if 163 <= data[p] { + goto tr3757 + } + goto tr3758 + st4193: + if p++; p == pe { + goto _test_eof4193 + } + st_case_4193: + if data[p] == 189 { + goto tr3758 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr3758 + } + goto tr3757 + st4194: + if p++; p == pe { + goto _test_eof4194 + } + st_case_4194: + if data[p] == 144 { + goto tr3758 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr3758 + } + case data[p] >= 152: + goto tr3758 + } + goto tr3757 + st4195: + if p++; p == pe { + goto _test_eof4195 + } + st_case_4195: + if data[p] == 188 { + goto tr3757 + } + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr3757 + } + case data[p] >= 129: + goto tr3757 + } + goto tr3758 + st4196: + if p++; p == pe { + goto _test_eof4196 + } + st_case_4196: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr3758 + } + case data[p] >= 133: + goto tr3758 + } + case data[p] > 150: + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr3758 + } + case data[p] >= 152: + goto tr3758 + } + default: + goto tr3758 + } + goto tr3757 + st4197: + if p++; p == pe { + goto _test_eof4197 + } + st_case_4197: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr3758 + } + case data[p] >= 131: + goto tr3758 + } + case data[p] > 144: + switch { + case data[p] < 178: + if 146 <= data[p] && data[p] <= 175 { + goto tr3758 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr3758 + } + default: + goto tr3758 + } + default: + goto tr3758 + } + goto tr3757 + st4198: + if p++; p == pe { + goto _test_eof4198 + } + st_case_4198: + switch data[p] { + case 134: + goto tr3758 + case 138: + goto tr3758 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr3758 + } + case data[p] >= 142: + goto tr3758 + } + goto tr3757 + st4199: + if p++; p == pe { + goto _test_eof4199 + } + st_case_4199: + if data[p] == 188 { + goto tr3757 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr3757 + } + case data[p] >= 129: + goto tr3757 + } + goto tr3758 + st4200: + if p++; p == pe { + goto _test_eof4200 + } + st_case_4200: + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr3757 + } + case data[p] >= 128: + goto tr3757 + } + case data[p] > 141: + switch { + case data[p] > 151: + if 162 <= data[p] && data[p] <= 163 { + goto tr3757 + } + case data[p] >= 150: + goto tr3757 + } + default: + goto tr3757 + } + goto tr3758 + st4201: + if p++; p == pe { + goto _test_eof4201 + } + st_case_4201: + if data[p] == 130 { + goto tr3757 + } + if 190 <= data[p] && data[p] <= 191 { + goto tr3757 + } + goto tr3758 + st4202: + if p++; p == pe { + goto _test_eof4202 + } + st_case_4202: + if data[p] == 151 { + goto tr3757 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr3757 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr3757 + } + default: + goto tr3757 + } + goto tr3758 + st4203: + if p++; p == pe { + goto _test_eof4203 + } + st_case_4203: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr3757 + } + case data[p] >= 128: + goto tr3757 + } + goto tr3758 + st4204: + if p++; p == pe { + goto _test_eof4204 + } + st_case_4204: + switch data[p] { + case 133: + goto tr3758 + case 137: + goto tr3758 + } + switch { + case data[p] < 151: + if 142 <= data[p] && data[p] <= 148 { + goto tr3758 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr3758 + } + default: + goto tr3758 + } + goto tr3757 + st4205: + if p++; p == pe { + goto _test_eof4205 + } + st_case_4205: + switch { + case data[p] < 138: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 136 { + goto tr3757 + } + case data[p] >= 128: + goto tr3757 + } + case data[p] > 141: + switch { + case data[p] > 150: + if 162 <= data[p] && data[p] <= 163 { + goto tr3757 + } + case data[p] >= 149: + goto tr3757 + } + default: + goto tr3757 + } + goto tr3758 + st4206: + if p++; p == pe { + goto _test_eof4206 + } + st_case_4206: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr3757 + } + case data[p] >= 129: + goto tr3757 + } + goto tr3758 + st4207: + if p++; p == pe { + goto _test_eof4207 + } + st_case_4207: + switch data[p] { + case 133: + goto tr3758 + case 137: + goto tr3758 + } + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 150 { + goto tr3758 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr3758 + } + default: + goto tr3758 + } + goto tr3757 + st4208: + if p++; p == pe { + goto _test_eof4208 + } + st_case_4208: + if 130 <= data[p] && data[p] <= 131 { + goto tr3757 + } + goto tr3758 + st4209: + if p++; p == pe { + goto _test_eof4209 + } + st_case_4209: + switch data[p] { + case 138: + goto tr3757 + case 150: + goto tr3757 + } + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 148 { + goto tr3757 + } + case data[p] > 159: + if 178 <= data[p] && data[p] <= 179 { + goto tr3757 + } + default: + goto tr3757 + } + goto tr3758 + st4210: + if p++; p == pe { + goto _test_eof4210 + } + st_case_4210: + if data[p] == 177 { + goto tr3757 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr3757 + } + goto tr3758 + st4211: + if p++; p == pe { + goto _test_eof4211 + } + st_case_4211: + if 135 <= data[p] && data[p] <= 142 { + goto tr3757 + } + goto tr3758 + st4212: + if p++; p == pe { + goto _test_eof4212 + } + st_case_4212: + if data[p] == 177 { + goto tr3757 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr3757 + } + case data[p] >= 180: + goto tr3757 + } + goto tr3758 + st4213: + if p++; p == pe { + goto _test_eof4213 + } + st_case_4213: + if 136 <= data[p] && data[p] <= 141 { + goto tr3757 + } + goto tr3758 + st4214: + if p++; p == pe { + goto _test_eof4214 + } + st_case_4214: + switch data[p] { + case 181: + goto tr3757 + case 183: + goto tr3757 + case 185: + goto tr3757 + } + switch { + case data[p] > 153: + if 190 <= data[p] && data[p] <= 191 { + goto tr3757 + } + case data[p] >= 152: + goto tr3757 + } + goto tr3758 + st4215: + if p++; p == pe { + goto _test_eof4215 + } + st_case_4215: + if 177 <= data[p] && data[p] <= 191 { + goto tr3757 + } + goto tr3758 + st4216: + if p++; p == pe { + goto _test_eof4216 + } + st_case_4216: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr3757 + } + case data[p] > 135: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr3757 + } + case data[p] >= 141: + goto tr3757 + } + default: + goto tr3757 + } + goto tr3758 + st4217: + if p++; p == pe { + goto _test_eof4217 + } + st_case_4217: + if data[p] == 134 { + goto tr3757 + } + goto tr3758 + st4218: + if p++; p == pe { + goto _test_eof4218 + } + st_case_4218: + switch data[p] { + case 128: + goto st4219 + case 129: + goto st4220 + case 130: + goto st4221 + case 141: + goto st4222 + case 156: + goto st4223 + case 157: + goto st4224 + case 158: + goto st4225 + case 159: + goto st4226 + case 160: + goto st4227 + case 162: + goto st4228 + case 164: + goto st4229 + case 168: + goto st4230 + case 169: + goto st4231 + case 170: + goto st4232 + case 172: + goto st4233 + case 173: + goto st4234 + case 174: + goto st4235 + case 175: + goto st4236 + case 176: + goto st4237 + case 179: + goto st4238 + case 183: + goto st4239 + } + goto tr3758 + st4219: + if p++; p == pe { + goto _test_eof4219 + } + st_case_4219: + if 171 <= data[p] && data[p] <= 190 { + goto tr3757 + } + goto tr3758 + st4220: + if p++; p == pe { + goto _test_eof4220 + } + st_case_4220: + switch { + case data[p] < 162: + switch { + case data[p] > 153: + if 158 <= data[p] && data[p] <= 160 { + goto tr3757 + } + case data[p] >= 150: + goto tr3757 + } + case data[p] > 164: + switch { + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr3757 + } + case data[p] >= 167: + goto tr3757 + } + default: + goto tr3757 + } + goto tr3758 + st4221: + if p++; p == pe { + goto _test_eof4221 + } + st_case_4221: + if data[p] == 143 { + goto tr3757 + } + switch { + case data[p] > 141: + if 154 <= data[p] && data[p] <= 157 { + goto tr3757 + } + case data[p] >= 130: + goto tr3757 + } + goto tr3758 + st4222: + if p++; p == pe { + goto _test_eof4222 + } + st_case_4222: + if 157 <= data[p] && data[p] <= 159 { + goto tr3757 + } + goto tr3758 + st4223: + if p++; p == pe { + goto _test_eof4223 + } + st_case_4223: + switch { + case data[p] > 148: + if 178 <= data[p] && data[p] <= 180 { + goto tr3757 + } + case data[p] >= 146: + goto tr3757 + } + goto tr3758 + st4224: + if p++; p == pe { + goto _test_eof4224 + } + st_case_4224: + switch { + case data[p] > 147: + if 178 <= data[p] && data[p] <= 179 { + goto tr3757 + } + case data[p] >= 146: + goto tr3757 + } + goto tr3758 + st4225: + if p++; p == pe { + goto _test_eof4225 + } + st_case_4225: + if 180 <= data[p] { + goto tr3757 + } + goto tr3758 + st4226: + if p++; p == pe { + goto _test_eof4226 + } + st_case_4226: + switch { + case data[p] > 156: + if 158 <= data[p] { + goto tr3758 + } + case data[p] >= 148: + goto tr3758 + } + goto tr3757 + st4227: + if p++; p == pe { + goto _test_eof4227 + } + st_case_4227: + if 139 <= data[p] && data[p] <= 142 { + goto tr3757 + } + goto tr3758 + st4228: + if p++; p == pe { + goto _test_eof4228 + } + st_case_4228: + if data[p] == 169 { + goto tr3757 + } + goto tr3758 + st4229: + if p++; p == pe { + goto _test_eof4229 + } + st_case_4229: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr3757 + } + case data[p] >= 160: + goto tr3757 + } + goto tr3758 + st4230: + if p++; p == pe { + goto _test_eof4230 + } + st_case_4230: + if 151 <= data[p] && data[p] <= 155 { + goto tr3757 + } + goto tr3758 + st4231: + if p++; p == pe { + goto _test_eof4231 + } + st_case_4231: + if data[p] == 191 { + goto tr3757 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr3757 + } + case data[p] >= 149: + goto tr3757 + } + goto tr3758 + st4232: + if p++; p == pe { + goto _test_eof4232 + } + st_case_4232: + if 176 <= data[p] && data[p] <= 190 { + goto tr3757 + } + goto tr3758 + st4233: + if p++; p == pe { + goto _test_eof4233 + } + st_case_4233: + switch { + case data[p] > 132: + if 180 <= data[p] { + goto tr3757 + } + case data[p] >= 128: + goto tr3757 + } + goto tr3758 + st4234: + if p++; p == pe { + goto _test_eof4234 + } + st_case_4234: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr3758 + } + case data[p] >= 133: + goto tr3758 + } + goto tr3757 + st4235: + if p++; p == pe { + goto _test_eof4235 + } + st_case_4235: + switch { + case data[p] > 130: + if 161 <= data[p] && data[p] <= 173 { + goto tr3757 + } + case data[p] >= 128: + goto tr3757 + } + goto tr3758 + st4236: + if p++; p == pe { + goto _test_eof4236 + } + st_case_4236: + if 166 <= data[p] && data[p] <= 179 { + goto tr3757 + } + goto tr3758 + st4237: + if p++; p == pe { + goto _test_eof4237 + } + st_case_4237: + if 164 <= data[p] && data[p] <= 183 { + goto tr3757 + } + goto tr3758 + st4238: + if p++; p == pe { + goto _test_eof4238 + } + st_case_4238: + if data[p] == 173 { + goto tr3757 + } + switch { + case data[p] < 148: + if 144 <= data[p] && data[p] <= 146 { + goto tr3757 + } + case data[p] > 168: + switch { + case data[p] > 180: + if 184 <= data[p] && data[p] <= 185 { + goto tr3757 + } + case data[p] >= 178: + goto tr3757 + } + default: + goto tr3757 + } + goto tr3758 + st4239: + if p++; p == pe { + goto _test_eof4239 + } + st_case_4239: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr3757 + } + case data[p] >= 128: + goto tr3757 + } + goto tr3758 + st4240: + if p++; p == pe { + goto _test_eof4240 + } + st_case_4240: + switch data[p] { + case 128: + goto st4241 + case 129: + goto st4242 + case 131: + goto st4243 + case 179: + goto st4244 + case 181: + goto st4245 + case 183: + goto st4246 + } + goto tr3758 + st4241: + if p++; p == pe { + goto _test_eof4241 + } + st_case_4241: + switch { + case data[p] > 143: + if 170 <= data[p] && data[p] <= 174 { + goto tr3757 + } + case data[p] >= 140: + goto tr3757 + } + goto tr3758 + st4242: + if p++; p == pe { + goto _test_eof4242 + } + st_case_4242: + switch { + case data[p] > 164: + if 166 <= data[p] && data[p] <= 175 { + goto tr3757 + } + case data[p] >= 160: + goto tr3757 + } + goto tr3758 + st4243: + if p++; p == pe { + goto _test_eof4243 + } + st_case_4243: + if 144 <= data[p] && data[p] <= 176 { + goto tr3757 + } + goto tr3758 + st4244: + if p++; p == pe { + goto _test_eof4244 + } + st_case_4244: + if 175 <= data[p] && data[p] <= 177 { + goto tr3757 + } + goto tr3758 + st4245: + if p++; p == pe { + goto _test_eof4245 + } + st_case_4245: + if data[p] == 191 { + goto tr3757 + } + goto tr3758 + st4246: + if p++; p == pe { + goto _test_eof4246 + } + st_case_4246: + if 160 <= data[p] && data[p] <= 191 { + goto tr3757 + } + goto tr3758 + st4247: + if p++; p == pe { + goto _test_eof4247 + } + st_case_4247: + switch data[p] { + case 128: + goto st4248 + case 130: + goto st4249 + } + goto tr3758 + st4248: + if p++; p == pe { + goto _test_eof4248 + } + st_case_4248: + if 170 <= data[p] && data[p] <= 175 { + goto tr3757 + } + goto tr3758 + st4249: + if p++; p == pe { + goto _test_eof4249 + } + st_case_4249: + if 153 <= data[p] && data[p] <= 154 { + goto tr3757 + } + goto tr3758 + st4250: + if p++; p == pe { + goto _test_eof4250 + } + st_case_4250: + switch data[p] { + case 153: + goto st4251 + case 154: + goto st4252 + case 155: + goto st4253 + case 160: + goto st4254 + case 162: + goto st4255 + case 163: + goto st4256 + case 164: + goto st4257 + case 165: + goto st4258 + case 166: + goto st4259 + case 167: + goto st4260 + case 168: + goto st4261 + case 169: + goto st4262 + case 170: + goto st4263 + case 171: + goto st4264 + case 175: + goto st4265 + } + goto tr3758 + st4251: + if p++; p == pe { + goto _test_eof4251 + } + st_case_4251: + switch { + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr3757 + } + case data[p] >= 175: + goto tr3757 + } + goto tr3758 + st4252: + if p++; p == pe { + goto _test_eof4252 + } + st_case_4252: + if 158 <= data[p] && data[p] <= 159 { + goto tr3757 + } + goto tr3758 + st4253: + if p++; p == pe { + goto _test_eof4253 + } + st_case_4253: + if 176 <= data[p] && data[p] <= 177 { + goto tr3757 + } + goto tr3758 + st4254: + if p++; p == pe { + goto _test_eof4254 + } + st_case_4254: + switch data[p] { + case 130: + goto tr3757 + case 134: + goto tr3757 + case 139: + goto tr3757 + } + if 163 <= data[p] && data[p] <= 167 { + goto tr3757 + } + goto tr3758 + st4255: + if p++; p == pe { + goto _test_eof4255 + } + st_case_4255: + switch { + case data[p] > 129: + if 180 <= data[p] { + goto tr3757 + } + case data[p] >= 128: + goto tr3757 + } + goto tr3758 + st4256: + if p++; p == pe { + goto _test_eof4256 + } + st_case_4256: + switch { + case data[p] > 159: + if 178 <= data[p] { + goto tr3758 + } + case data[p] >= 133: + goto tr3758 + } + goto tr3757 + st4257: + if p++; p == pe { + goto _test_eof4257 + } + st_case_4257: + if 166 <= data[p] && data[p] <= 173 { + goto tr3757 + } + goto tr3758 + st4258: + if p++; p == pe { + goto _test_eof4258 + } + st_case_4258: + if 135 <= data[p] && data[p] <= 147 { + goto tr3757 + } + goto tr3758 + st4259: + if p++; p == pe { + goto _test_eof4259 + } + st_case_4259: + switch { + case data[p] > 131: + if 179 <= data[p] { + goto tr3757 + } + case data[p] >= 128: + goto tr3757 + } + goto tr3758 + st4260: + if p++; p == pe { + goto _test_eof4260 + } + st_case_4260: + switch { + case data[p] > 164: + if 166 <= data[p] { + goto tr3758 + } + case data[p] >= 129: + goto tr3758 + } + goto tr3757 + st4261: + if p++; p == pe { + goto _test_eof4261 + } + st_case_4261: + if 169 <= data[p] && data[p] <= 182 { + goto tr3757 + } + goto tr3758 + st4262: + if p++; p == pe { + goto _test_eof4262 + } + st_case_4262: + if data[p] == 131 { + goto tr3757 + } + switch { + case data[p] > 141: + if 187 <= data[p] && data[p] <= 189 { + goto tr3757 + } + case data[p] >= 140: + goto tr3757 + } + goto tr3758 + st4263: + if p++; p == pe { + goto _test_eof4263 + } + st_case_4263: + if data[p] == 176 { + goto tr3757 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr3757 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr3757 + } + default: + goto tr3757 + } + goto tr3758 + st4264: + if p++; p == pe { + goto _test_eof4264 + } + st_case_4264: + if data[p] == 129 { + goto tr3757 + } + switch { + case data[p] > 175: + if 181 <= data[p] && data[p] <= 182 { + goto tr3757 + } + case data[p] >= 171: + goto tr3757 + } + goto tr3758 + st4265: + if p++; p == pe { + goto _test_eof4265 + } + st_case_4265: + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 173 { + goto tr3757 + } + case data[p] >= 163: + goto tr3757 + } + goto tr3758 + st4266: + if p++; p == pe { + goto _test_eof4266 + } + st_case_4266: + switch data[p] { + case 172: + goto st4267 + case 184: + goto st4268 + case 187: + goto st4245 + case 190: + goto st4252 + case 191: + goto st4269 + } + goto tr3758 + st4267: + if p++; p == pe { + goto _test_eof4267 + } + st_case_4267: + if data[p] == 158 { + goto tr3757 + } + goto tr3758 + st4268: + if p++; p == pe { + goto _test_eof4268 + } + st_case_4268: + switch { + case data[p] > 143: + if 160 <= data[p] && data[p] <= 175 { + goto tr3757 + } + case data[p] >= 128: + goto tr3757 + } + goto tr3758 + st4269: + if p++; p == pe { + goto _test_eof4269 + } + st_case_4269: + if 185 <= data[p] && data[p] <= 187 { + goto tr3757 + } + goto tr3758 + st4270: + if p++; p == pe { + goto _test_eof4270 + } + st_case_4270: + switch data[p] { + case 144: + goto st4271 + case 145: + goto st4277 + case 150: + goto st4296 + case 155: + goto st4301 + case 157: + goto st4303 + case 158: + goto st4310 + } + goto tr3758 + st4271: + if p++; p == pe { + goto _test_eof4271 + } + st_case_4271: + switch data[p] { + case 135: + goto st4272 + case 139: + goto st4273 + case 141: + goto st4274 + case 168: + goto st4275 + case 171: + goto st4276 + } + goto tr3758 + st4272: + if p++; p == pe { + goto _test_eof4272 + } + st_case_4272: + if data[p] == 189 { + goto tr3757 + } + goto tr3758 + st4273: + if p++; p == pe { + goto _test_eof4273 + } + st_case_4273: + if data[p] == 160 { + goto tr3757 + } + goto tr3758 + st4274: + if p++; p == pe { + goto _test_eof4274 + } + st_case_4274: + if 182 <= data[p] && data[p] <= 186 { + goto tr3757 + } + goto tr3758 + st4275: + if p++; p == pe { + goto _test_eof4275 + } + st_case_4275: + if data[p] == 191 { + goto tr3757 + } + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr3757 + } + case data[p] > 134: + switch { + case data[p] > 143: + if 184 <= data[p] && data[p] <= 186 { + goto tr3757 + } + case data[p] >= 140: + goto tr3757 + } + default: + goto tr3757 + } + goto tr3758 + st4276: + if p++; p == pe { + goto _test_eof4276 + } + st_case_4276: + if 165 <= data[p] && data[p] <= 166 { + goto tr3757 + } + goto tr3758 + st4277: + if p++; p == pe { + goto _test_eof4277 + } + st_case_4277: + switch data[p] { + case 128: + goto st4278 + case 129: + goto st4279 + case 130: + goto st4280 + case 132: + goto st4281 + case 133: + goto st4282 + case 134: + goto st4283 + case 135: + goto st4284 + case 136: + goto st4285 + case 139: + goto st4286 + case 140: + goto st4287 + case 141: + goto st4288 + case 146: + goto st4289 + case 147: + goto st4290 + case 150: + goto st4291 + case 151: + goto st4292 + case 152: + goto st4289 + case 153: + goto st4293 + case 154: + goto st4294 + case 156: + goto st4295 + } + goto tr3758 + st4278: + if p++; p == pe { + goto _test_eof4278 + } + st_case_4278: + switch { + case data[p] > 130: + if 184 <= data[p] { + goto tr3757 + } + case data[p] >= 128: + goto tr3757 + } + goto tr3758 + st4279: + if p++; p == pe { + goto _test_eof4279 + } + st_case_4279: + if 135 <= data[p] && data[p] <= 190 { + goto tr3758 + } + goto tr3757 + st4280: + if p++; p == pe { + goto _test_eof4280 + } + st_case_4280: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr3758 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr3758 + } + default: + goto tr3758 + } + goto tr3757 + st4281: + if p++; p == pe { + goto _test_eof4281 + } + st_case_4281: + switch { + case data[p] > 130: + if 167 <= data[p] && data[p] <= 180 { + goto tr3757 + } + case data[p] >= 128: + goto tr3757 + } + goto tr3758 + st4282: + if p++; p == pe { + goto _test_eof4282 + } + st_case_4282: + if data[p] == 179 { + goto tr3757 + } + goto tr3758 + st4283: + if p++; p == pe { + goto _test_eof4283 + } + st_case_4283: + switch { + case data[p] > 130: + if 179 <= data[p] { + goto tr3757 + } + case data[p] >= 128: + goto tr3757 + } + goto tr3758 + st4284: + if p++; p == pe { + goto _test_eof4284 + } + st_case_4284: + switch { + case data[p] > 137: + if 141 <= data[p] { + goto tr3758 + } + case data[p] >= 129: + goto tr3758 + } + goto tr3757 + st4285: + if p++; p == pe { + goto _test_eof4285 + } + st_case_4285: + if 172 <= data[p] && data[p] <= 183 { + goto tr3757 + } + goto tr3758 + st4286: + if p++; p == pe { + goto _test_eof4286 + } + st_case_4286: + if 159 <= data[p] && data[p] <= 170 { + goto tr3757 + } + goto tr3758 + st4287: + if p++; p == pe { + goto _test_eof4287 + } + st_case_4287: + if data[p] == 188 { + goto tr3757 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr3757 + } + case data[p] >= 128: + goto tr3757 + } + goto tr3758 + st4288: + if p++; p == pe { + goto _test_eof4288 + } + st_case_4288: + if data[p] == 151 { + goto tr3757 + } + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr3757 + } + case data[p] >= 128: + goto tr3757 + } + case data[p] > 141: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr3757 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr3757 + } + default: + goto tr3757 + } + default: + goto tr3757 + } + goto tr3758 + st4289: + if p++; p == pe { + goto _test_eof4289 + } + st_case_4289: + if 176 <= data[p] { + goto tr3757 + } + goto tr3758 + st4290: + if p++; p == pe { + goto _test_eof4290 + } + st_case_4290: + if 132 <= data[p] { + goto tr3758 + } + goto tr3757 + st4291: + if p++; p == pe { + goto _test_eof4291 + } + st_case_4291: + switch { + case data[p] > 181: + if 184 <= data[p] { + goto tr3757 + } + case data[p] >= 175: + goto tr3757 + } + goto tr3758 + st4292: + if p++; p == pe { + goto _test_eof4292 + } + st_case_4292: + switch { + case data[p] > 155: + if 158 <= data[p] { + goto tr3758 + } + case data[p] >= 129: + goto tr3758 + } + goto tr3757 + st4293: + if p++; p == pe { + goto _test_eof4293 + } + st_case_4293: + if 129 <= data[p] { + goto tr3758 + } + goto tr3757 + st4294: + if p++; p == pe { + goto _test_eof4294 + } + st_case_4294: + if 171 <= data[p] && data[p] <= 183 { + goto tr3757 + } + goto tr3758 + st4295: + if p++; p == pe { + goto _test_eof4295 + } + st_case_4295: + if 157 <= data[p] && data[p] <= 171 { + goto tr3757 + } + goto tr3758 + st4296: + if p++; p == pe { + goto _test_eof4296 + } + st_case_4296: + switch data[p] { + case 171: + goto st4297 + case 172: + goto st4298 + case 189: + goto st4299 + case 190: + goto st4300 + } + goto tr3758 + st4297: + if p++; p == pe { + goto _test_eof4297 + } + st_case_4297: + if 176 <= data[p] && data[p] <= 180 { + goto tr3757 + } + goto tr3758 + st4298: + if p++; p == pe { + goto _test_eof4298 + } + st_case_4298: + if 176 <= data[p] && data[p] <= 182 { + goto tr3757 + } + goto tr3758 + st4299: + if p++; p == pe { + goto _test_eof4299 + } + st_case_4299: + if 145 <= data[p] && data[p] <= 190 { + goto tr3757 + } + goto tr3758 + st4300: + if p++; p == pe { + goto _test_eof4300 + } + st_case_4300: + if 143 <= data[p] && data[p] <= 146 { + goto tr3757 + } + goto tr3758 + st4301: + if p++; p == pe { + goto _test_eof4301 + } + st_case_4301: + if data[p] == 178 { + goto st4302 + } + goto tr3758 + st4302: + if p++; p == pe { + goto _test_eof4302 + } + st_case_4302: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr3757 + } + case data[p] >= 157: + goto tr3757 + } + goto tr3758 + st4303: + if p++; p == pe { + goto _test_eof4303 + } + st_case_4303: + switch data[p] { + case 133: + goto st4304 + case 134: + goto st4305 + case 137: + goto st4306 + case 168: + goto st4307 + case 169: + goto st4308 + case 170: + goto st4309 + } + goto tr3758 + st4304: + if p++; p == pe { + goto _test_eof4304 + } + st_case_4304: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr3757 + } + case data[p] >= 165: + goto tr3757 + } + goto tr3758 + st4305: + if p++; p == pe { + goto _test_eof4305 + } + st_case_4305: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr3758 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr3758 + } + default: + goto tr3758 + } + goto tr3757 + st4306: + if p++; p == pe { + goto _test_eof4306 + } + st_case_4306: + if 130 <= data[p] && data[p] <= 132 { + goto tr3757 + } + goto tr3758 + st4307: + if p++; p == pe { + goto _test_eof4307 + } + st_case_4307: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr3757 + } + case data[p] >= 128: + goto tr3757 + } + goto tr3758 + st4308: + if p++; p == pe { + goto _test_eof4308 + } + st_case_4308: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr3758 + } + case data[p] >= 173: + goto tr3758 + } + goto tr3757 + st4309: + if p++; p == pe { + goto _test_eof4309 + } + st_case_4309: + if data[p] == 132 { + goto tr3757 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr3757 + } + case data[p] >= 155: + goto tr3757 + } + goto tr3758 + st4310: + if p++; p == pe { + goto _test_eof4310 + } + st_case_4310: + if data[p] == 163 { + goto st4311 + } + goto tr3758 + st4311: + if p++; p == pe { + goto _test_eof4311 + } + st_case_4311: + if 144 <= data[p] && data[p] <= 150 { + goto tr3757 + } + goto tr3758 + st4312: + if p++; p == pe { + goto _test_eof4312 + } + st_case_4312: + if data[p] == 160 { + goto st4313 + } + goto tr3758 + st4313: + if p++; p == pe { + goto _test_eof4313 + } + st_case_4313: + switch data[p] { + case 128: + goto st4314 + case 129: + goto st4315 + case 132: + goto st4177 + case 135: + goto st4178 + } + if 133 <= data[p] && data[p] <= 134 { + goto st4316 + } + goto tr3758 + st4314: + if p++; p == pe { + goto _test_eof4314 + } + st_case_4314: + if data[p] == 129 { + goto tr3757 + } + if 160 <= data[p] { + goto tr3757 + } + goto tr3758 + st4315: + if p++; p == pe { + goto _test_eof4315 + } + st_case_4315: + if 192 <= data[p] { + goto tr3758 + } + goto tr3757 + st4316: + if p++; p == pe { + goto _test_eof4316 + } + st_case_4316: + goto tr3757 + st4317: + if p++; p == pe { + goto _test_eof4317 + } + st_case_4317: + switch { + case data[p] < 153: + if 151 <= data[p] && data[p] <= 152 { + goto tr0 + } + case data[p] > 154: + switch { + case data[p] > 156: + if 160 <= data[p] { + goto tr3376 + } + case data[p] >= 155: + goto tr3376 + } + default: + goto tr2395 + } + goto tr3757 + st4318: + if p++; p == pe { + goto _test_eof4318 + } + st_case_4318: + if data[p] == 173 { + goto tr3881 + } + goto tr3250 +tr3881: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + + goto st5082 + st5082: + if p++; p == pe { + goto _test_eof5082 + } + st_case_5082: +//line segment_words_prod.go:123394 + switch data[p] { + case 194: + goto st4319 + case 204: + goto st4320 + case 205: + goto st4321 + case 210: + goto st4322 + case 214: + goto st4323 + case 215: + goto st4324 + case 216: + goto st4325 + case 217: + goto st4326 + case 219: + goto st4327 + case 220: + goto st4328 + case 221: + goto st4329 + case 222: + goto st4330 + case 223: + goto st4331 + case 224: + goto st4332 + case 225: + goto st4361 + case 226: + goto st4383 + case 227: + goto st4390 + case 234: + goto st4393 + case 239: + goto st4409 + case 240: + goto st4413 + case 243: + goto st4455 + } + goto tr5054 + st4319: + if p++; p == pe { + goto _test_eof4319 + } + st_case_4319: + if data[p] == 173 { + goto tr3881 + } + goto tr3251 + st4320: + if p++; p == pe { + goto _test_eof4320 + } + st_case_4320: + if 128 <= data[p] { + goto tr3881 + } + goto tr3251 + st4321: + if p++; p == pe { + goto _test_eof4321 + } + st_case_4321: + if 176 <= data[p] { + goto tr3251 + } + goto tr3881 + st4322: + if p++; p == pe { + goto _test_eof4322 + } + st_case_4322: + if 131 <= data[p] && data[p] <= 137 { + goto tr3881 + } + goto tr3251 + st4323: + if p++; p == pe { + goto _test_eof4323 + } + st_case_4323: + if data[p] == 191 { + goto tr3881 + } + if 145 <= data[p] && data[p] <= 189 { + goto tr3881 + } + goto tr3251 + st4324: + if p++; p == pe { + goto _test_eof4324 + } + st_case_4324: + if data[p] == 135 { + goto tr3881 + } + switch { + case data[p] > 130: + if 132 <= data[p] && data[p] <= 133 { + goto tr3881 + } + case data[p] >= 129: + goto tr3881 + } + goto tr3251 + st4325: + if p++; p == pe { + goto _test_eof4325 + } + st_case_4325: + if data[p] == 156 { + goto tr3881 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto tr3881 + } + case data[p] >= 128: + goto tr3881 + } + goto tr3251 + st4326: + if p++; p == pe { + goto _test_eof4326 + } + st_case_4326: + if data[p] == 176 { + goto tr3881 + } + if 139 <= data[p] && data[p] <= 159 { + goto tr3881 + } + goto tr3251 + st4327: + if p++; p == pe { + goto _test_eof4327 + } + st_case_4327: + switch { + case data[p] < 159: + if 150 <= data[p] && data[p] <= 157 { + goto tr3881 + } + case data[p] > 164: + switch { + case data[p] > 168: + if 170 <= data[p] && data[p] <= 173 { + goto tr3881 + } + case data[p] >= 167: + goto tr3881 + } + default: + goto tr3881 + } + goto tr3251 + st4328: + if p++; p == pe { + goto _test_eof4328 + } + st_case_4328: + switch data[p] { + case 143: + goto tr3881 + case 145: + goto tr3881 + } + if 176 <= data[p] { + goto tr3881 + } + goto tr3251 + st4329: + if p++; p == pe { + goto _test_eof4329 + } + st_case_4329: + if 139 <= data[p] { + goto tr3251 + } + goto tr3881 + st4330: + if p++; p == pe { + goto _test_eof4330 + } + st_case_4330: + if 166 <= data[p] && data[p] <= 176 { + goto tr3881 + } + goto tr3251 + st4331: + if p++; p == pe { + goto _test_eof4331 + } + st_case_4331: + if 171 <= data[p] && data[p] <= 179 { + goto tr3881 + } + goto tr3251 + st4332: + if p++; p == pe { + goto _test_eof4332 + } + st_case_4332: + switch data[p] { + case 160: + goto st4333 + case 161: + goto st4334 + case 163: + goto st4335 + case 164: + goto st4336 + case 165: + goto st4337 + case 167: + goto st4339 + case 169: + goto st4340 + case 171: + goto st4341 + case 173: + goto st4343 + case 174: + goto st4344 + case 175: + goto st4345 + case 176: + goto st4346 + case 177: + goto st4347 + case 179: + goto st4348 + case 180: + goto st4349 + case 181: + goto st4350 + case 182: + goto st4351 + case 183: + goto st4352 + case 184: + goto st4353 + case 185: + goto st4354 + case 186: + goto st4355 + case 187: + goto st4356 + case 188: + goto st4357 + case 189: + goto st4358 + case 190: + goto st4359 + case 191: + goto st4360 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto st4342 + } + case data[p] >= 166: + goto st4338 + } + goto tr3251 + st4333: + if p++; p == pe { + goto _test_eof4333 + } + st_case_4333: + switch { + case data[p] < 155: + if 150 <= data[p] && data[p] <= 153 { + goto tr3881 + } + case data[p] > 163: + switch { + case data[p] > 167: + if 169 <= data[p] && data[p] <= 173 { + goto tr3881 + } + case data[p] >= 165: + goto tr3881 + } + default: + goto tr3881 + } + goto tr3251 + st4334: + if p++; p == pe { + goto _test_eof4334 + } + st_case_4334: + if 153 <= data[p] && data[p] <= 155 { + goto tr3881 + } + goto tr3251 + st4335: + if p++; p == pe { + goto _test_eof4335 + } + st_case_4335: + if 163 <= data[p] { + goto tr3881 + } + goto tr3251 + st4336: + if p++; p == pe { + goto _test_eof4336 + } + st_case_4336: + if data[p] == 189 { + goto tr3251 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr3251 + } + goto tr3881 + st4337: + if p++; p == pe { + goto _test_eof4337 + } + st_case_4337: + if data[p] == 144 { + goto tr3251 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr3251 + } + case data[p] >= 152: + goto tr3251 + } + goto tr3881 + st4338: + if p++; p == pe { + goto _test_eof4338 + } + st_case_4338: + if data[p] == 188 { + goto tr3881 + } + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr3881 + } + case data[p] >= 129: + goto tr3881 + } + goto tr3251 + st4339: + if p++; p == pe { + goto _test_eof4339 + } + st_case_4339: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr3251 + } + case data[p] >= 133: + goto tr3251 + } + case data[p] > 150: + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr3251 + } + case data[p] >= 152: + goto tr3251 + } + default: + goto tr3251 + } + goto tr3881 + st4340: + if p++; p == pe { + goto _test_eof4340 + } + st_case_4340: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr3251 + } + case data[p] >= 131: + goto tr3251 + } + case data[p] > 144: + switch { + case data[p] < 178: + if 146 <= data[p] && data[p] <= 175 { + goto tr3251 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr3251 + } + default: + goto tr3251 + } + default: + goto tr3251 + } + goto tr3881 + st4341: + if p++; p == pe { + goto _test_eof4341 + } + st_case_4341: + switch data[p] { + case 134: + goto tr3251 + case 138: + goto tr3251 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr3251 + } + case data[p] >= 142: + goto tr3251 + } + goto tr3881 + st4342: + if p++; p == pe { + goto _test_eof4342 + } + st_case_4342: + if data[p] == 188 { + goto tr3881 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr3881 + } + case data[p] >= 129: + goto tr3881 + } + goto tr3251 + st4343: + if p++; p == pe { + goto _test_eof4343 + } + st_case_4343: + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr3881 + } + case data[p] >= 128: + goto tr3881 + } + case data[p] > 141: + switch { + case data[p] > 151: + if 162 <= data[p] && data[p] <= 163 { + goto tr3881 + } + case data[p] >= 150: + goto tr3881 + } + default: + goto tr3881 + } + goto tr3251 + st4344: + if p++; p == pe { + goto _test_eof4344 + } + st_case_4344: + if data[p] == 130 { + goto tr3881 + } + if 190 <= data[p] && data[p] <= 191 { + goto tr3881 + } + goto tr3251 + st4345: + if p++; p == pe { + goto _test_eof4345 + } + st_case_4345: + if data[p] == 151 { + goto tr3881 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr3881 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr3881 + } + default: + goto tr3881 + } + goto tr3251 + st4346: + if p++; p == pe { + goto _test_eof4346 + } + st_case_4346: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr3881 + } + case data[p] >= 128: + goto tr3881 + } + goto tr3251 + st4347: + if p++; p == pe { + goto _test_eof4347 + } + st_case_4347: + switch data[p] { + case 133: + goto tr3251 + case 137: + goto tr3251 + } + switch { + case data[p] < 151: + if 142 <= data[p] && data[p] <= 148 { + goto tr3251 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr3251 + } + default: + goto tr3251 + } + goto tr3881 + st4348: + if p++; p == pe { + goto _test_eof4348 + } + st_case_4348: + switch { + case data[p] < 138: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 136 { + goto tr3881 + } + case data[p] >= 128: + goto tr3881 + } + case data[p] > 141: + switch { + case data[p] > 150: + if 162 <= data[p] && data[p] <= 163 { + goto tr3881 + } + case data[p] >= 149: + goto tr3881 + } + default: + goto tr3881 + } + goto tr3251 + st4349: + if p++; p == pe { + goto _test_eof4349 + } + st_case_4349: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr3881 + } + case data[p] >= 129: + goto tr3881 + } + goto tr3251 + st4350: + if p++; p == pe { + goto _test_eof4350 + } + st_case_4350: + switch data[p] { + case 133: + goto tr3251 + case 137: + goto tr3251 + } + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 150 { + goto tr3251 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr3251 + } + default: + goto tr3251 + } + goto tr3881 + st4351: + if p++; p == pe { + goto _test_eof4351 + } + st_case_4351: + if 130 <= data[p] && data[p] <= 131 { + goto tr3881 + } + goto tr3251 + st4352: + if p++; p == pe { + goto _test_eof4352 + } + st_case_4352: + switch data[p] { + case 138: + goto tr3881 + case 150: + goto tr3881 + } + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 148 { + goto tr3881 + } + case data[p] > 159: + if 178 <= data[p] && data[p] <= 179 { + goto tr3881 + } + default: + goto tr3881 + } + goto tr3251 + st4353: + if p++; p == pe { + goto _test_eof4353 + } + st_case_4353: + if data[p] == 177 { + goto tr3881 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr3881 + } + goto tr3251 + st4354: + if p++; p == pe { + goto _test_eof4354 + } + st_case_4354: + if 135 <= data[p] && data[p] <= 142 { + goto tr3881 + } + goto tr3251 + st4355: + if p++; p == pe { + goto _test_eof4355 + } + st_case_4355: + if data[p] == 177 { + goto tr3881 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr3881 + } + case data[p] >= 180: + goto tr3881 + } + goto tr3251 + st4356: + if p++; p == pe { + goto _test_eof4356 + } + st_case_4356: + if 136 <= data[p] && data[p] <= 141 { + goto tr3881 + } + goto tr3251 + st4357: + if p++; p == pe { + goto _test_eof4357 + } + st_case_4357: + switch data[p] { + case 181: + goto tr3881 + case 183: + goto tr3881 + case 185: + goto tr3881 + } + switch { + case data[p] > 153: + if 190 <= data[p] && data[p] <= 191 { + goto tr3881 + } + case data[p] >= 152: + goto tr3881 + } + goto tr3251 + st4358: + if p++; p == pe { + goto _test_eof4358 + } + st_case_4358: + if 177 <= data[p] && data[p] <= 191 { + goto tr3881 + } + goto tr3251 + st4359: + if p++; p == pe { + goto _test_eof4359 + } + st_case_4359: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr3881 + } + case data[p] > 135: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr3881 + } + case data[p] >= 141: + goto tr3881 + } + default: + goto tr3881 + } + goto tr3251 + st4360: + if p++; p == pe { + goto _test_eof4360 + } + st_case_4360: + if data[p] == 134 { + goto tr3881 + } + goto tr3251 + st4361: + if p++; p == pe { + goto _test_eof4361 + } + st_case_4361: + switch data[p] { + case 128: + goto st4362 + case 129: + goto st4363 + case 130: + goto st4364 + case 141: + goto st4365 + case 156: + goto st4366 + case 157: + goto st4367 + case 158: + goto st4368 + case 159: + goto st4369 + case 160: + goto st4370 + case 162: + goto st4371 + case 164: + goto st4372 + case 168: + goto st4373 + case 169: + goto st4374 + case 170: + goto st4375 + case 172: + goto st4376 + case 173: + goto st4377 + case 174: + goto st4378 + case 175: + goto st4379 + case 176: + goto st4380 + case 179: + goto st4381 + case 183: + goto st4382 + } + goto tr3251 + st4362: + if p++; p == pe { + goto _test_eof4362 + } + st_case_4362: + if 171 <= data[p] && data[p] <= 190 { + goto tr3881 + } + goto tr3251 + st4363: + if p++; p == pe { + goto _test_eof4363 + } + st_case_4363: + switch { + case data[p] < 162: + switch { + case data[p] > 153: + if 158 <= data[p] && data[p] <= 160 { + goto tr3881 + } + case data[p] >= 150: + goto tr3881 + } + case data[p] > 164: + switch { + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr3881 + } + case data[p] >= 167: + goto tr3881 + } + default: + goto tr3881 + } + goto tr3251 + st4364: + if p++; p == pe { + goto _test_eof4364 + } + st_case_4364: + if data[p] == 143 { + goto tr3881 + } + switch { + case data[p] > 141: + if 154 <= data[p] && data[p] <= 157 { + goto tr3881 + } + case data[p] >= 130: + goto tr3881 + } + goto tr3251 + st4365: + if p++; p == pe { + goto _test_eof4365 + } + st_case_4365: + if 157 <= data[p] && data[p] <= 159 { + goto tr3881 + } + goto tr3251 + st4366: + if p++; p == pe { + goto _test_eof4366 + } + st_case_4366: + switch { + case data[p] > 148: + if 178 <= data[p] && data[p] <= 180 { + goto tr3881 + } + case data[p] >= 146: + goto tr3881 + } + goto tr3251 + st4367: + if p++; p == pe { + goto _test_eof4367 + } + st_case_4367: + switch { + case data[p] > 147: + if 178 <= data[p] && data[p] <= 179 { + goto tr3881 + } + case data[p] >= 146: + goto tr3881 + } + goto tr3251 + st4368: + if p++; p == pe { + goto _test_eof4368 + } + st_case_4368: + if 180 <= data[p] { + goto tr3881 + } + goto tr3251 + st4369: + if p++; p == pe { + goto _test_eof4369 + } + st_case_4369: + switch { + case data[p] > 156: + if 158 <= data[p] { + goto tr3251 + } + case data[p] >= 148: + goto tr3251 + } + goto tr3881 + st4370: + if p++; p == pe { + goto _test_eof4370 + } + st_case_4370: + if 139 <= data[p] && data[p] <= 142 { + goto tr3881 + } + goto tr3251 + st4371: + if p++; p == pe { + goto _test_eof4371 + } + st_case_4371: + if data[p] == 169 { + goto tr3881 + } + goto tr3251 + st4372: + if p++; p == pe { + goto _test_eof4372 + } + st_case_4372: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr3881 + } + case data[p] >= 160: + goto tr3881 + } + goto tr3251 + st4373: + if p++; p == pe { + goto _test_eof4373 + } + st_case_4373: + if 151 <= data[p] && data[p] <= 155 { + goto tr3881 + } + goto tr3251 + st4374: + if p++; p == pe { + goto _test_eof4374 + } + st_case_4374: + if data[p] == 191 { + goto tr3881 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr3881 + } + case data[p] >= 149: + goto tr3881 + } + goto tr3251 + st4375: + if p++; p == pe { + goto _test_eof4375 + } + st_case_4375: + if 176 <= data[p] && data[p] <= 190 { + goto tr3881 + } + goto tr3251 + st4376: + if p++; p == pe { + goto _test_eof4376 + } + st_case_4376: + switch { + case data[p] > 132: + if 180 <= data[p] { + goto tr3881 + } + case data[p] >= 128: + goto tr3881 + } + goto tr3251 + st4377: + if p++; p == pe { + goto _test_eof4377 + } + st_case_4377: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr3251 + } + case data[p] >= 133: + goto tr3251 + } + goto tr3881 + st4378: + if p++; p == pe { + goto _test_eof4378 + } + st_case_4378: + switch { + case data[p] > 130: + if 161 <= data[p] && data[p] <= 173 { + goto tr3881 + } + case data[p] >= 128: + goto tr3881 + } + goto tr3251 + st4379: + if p++; p == pe { + goto _test_eof4379 + } + st_case_4379: + if 166 <= data[p] && data[p] <= 179 { + goto tr3881 + } + goto tr3251 + st4380: + if p++; p == pe { + goto _test_eof4380 + } + st_case_4380: + if 164 <= data[p] && data[p] <= 183 { + goto tr3881 + } + goto tr3251 + st4381: + if p++; p == pe { + goto _test_eof4381 + } + st_case_4381: + if data[p] == 173 { + goto tr3881 + } + switch { + case data[p] < 148: + if 144 <= data[p] && data[p] <= 146 { + goto tr3881 + } + case data[p] > 168: + switch { + case data[p] > 180: + if 184 <= data[p] && data[p] <= 185 { + goto tr3881 + } + case data[p] >= 178: + goto tr3881 + } + default: + goto tr3881 + } + goto tr3251 + st4382: + if p++; p == pe { + goto _test_eof4382 + } + st_case_4382: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr3881 + } + case data[p] >= 128: + goto tr3881 + } + goto tr3251 + st4383: + if p++; p == pe { + goto _test_eof4383 + } + st_case_4383: + switch data[p] { + case 128: + goto st4384 + case 129: + goto st4385 + case 131: + goto st4386 + case 179: + goto st4387 + case 181: + goto st4388 + case 183: + goto st4389 + } + goto tr3251 + st4384: + if p++; p == pe { + goto _test_eof4384 + } + st_case_4384: + switch { + case data[p] > 143: + if 170 <= data[p] && data[p] <= 174 { + goto tr3881 + } + case data[p] >= 140: + goto tr3881 + } + goto tr3251 + st4385: + if p++; p == pe { + goto _test_eof4385 + } + st_case_4385: + switch { + case data[p] > 164: + if 166 <= data[p] && data[p] <= 175 { + goto tr3881 + } + case data[p] >= 160: + goto tr3881 + } + goto tr3251 + st4386: + if p++; p == pe { + goto _test_eof4386 + } + st_case_4386: + if 144 <= data[p] && data[p] <= 176 { + goto tr3881 + } + goto tr3251 + st4387: + if p++; p == pe { + goto _test_eof4387 + } + st_case_4387: + if 175 <= data[p] && data[p] <= 177 { + goto tr3881 + } + goto tr3251 + st4388: + if p++; p == pe { + goto _test_eof4388 + } + st_case_4388: + if data[p] == 191 { + goto tr3881 + } + goto tr3251 + st4389: + if p++; p == pe { + goto _test_eof4389 + } + st_case_4389: + if 160 <= data[p] && data[p] <= 191 { + goto tr3881 + } + goto tr3251 + st4390: + if p++; p == pe { + goto _test_eof4390 + } + st_case_4390: + switch data[p] { + case 128: + goto st4391 + case 130: + goto st4392 + } + goto tr3251 + st4391: + if p++; p == pe { + goto _test_eof4391 + } + st_case_4391: + if 170 <= data[p] && data[p] <= 175 { + goto tr3881 + } + goto tr3251 + st4392: + if p++; p == pe { + goto _test_eof4392 + } + st_case_4392: + if 153 <= data[p] && data[p] <= 154 { + goto tr3881 + } + goto tr3251 + st4393: + if p++; p == pe { + goto _test_eof4393 + } + st_case_4393: + switch data[p] { + case 153: + goto st4394 + case 154: + goto st4395 + case 155: + goto st4396 + case 160: + goto st4397 + case 162: + goto st4398 + case 163: + goto st4399 + case 164: + goto st4400 + case 165: + goto st4401 + case 166: + goto st4402 + case 167: + goto st4403 + case 168: + goto st4404 + case 169: + goto st4405 + case 170: + goto st4406 + case 171: + goto st4407 + case 175: + goto st4408 + } + goto tr3251 + st4394: + if p++; p == pe { + goto _test_eof4394 + } + st_case_4394: + switch { + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr3881 + } + case data[p] >= 175: + goto tr3881 + } + goto tr3251 + st4395: + if p++; p == pe { + goto _test_eof4395 + } + st_case_4395: + if 158 <= data[p] && data[p] <= 159 { + goto tr3881 + } + goto tr3251 + st4396: + if p++; p == pe { + goto _test_eof4396 + } + st_case_4396: + if 176 <= data[p] && data[p] <= 177 { + goto tr3881 + } + goto tr3251 + st4397: + if p++; p == pe { + goto _test_eof4397 + } + st_case_4397: + switch data[p] { + case 130: + goto tr3881 + case 134: + goto tr3881 + case 139: + goto tr3881 + } + if 163 <= data[p] && data[p] <= 167 { + goto tr3881 + } + goto tr3251 + st4398: + if p++; p == pe { + goto _test_eof4398 + } + st_case_4398: + switch { + case data[p] > 129: + if 180 <= data[p] { + goto tr3881 + } + case data[p] >= 128: + goto tr3881 + } + goto tr3251 + st4399: + if p++; p == pe { + goto _test_eof4399 + } + st_case_4399: + switch { + case data[p] > 159: + if 178 <= data[p] { + goto tr3251 + } + case data[p] >= 133: + goto tr3251 + } + goto tr3881 + st4400: + if p++; p == pe { + goto _test_eof4400 + } + st_case_4400: + if 166 <= data[p] && data[p] <= 173 { + goto tr3881 + } + goto tr3251 + st4401: + if p++; p == pe { + goto _test_eof4401 + } + st_case_4401: + if 135 <= data[p] && data[p] <= 147 { + goto tr3881 + } + goto tr3251 + st4402: + if p++; p == pe { + goto _test_eof4402 + } + st_case_4402: + switch { + case data[p] > 131: + if 179 <= data[p] { + goto tr3881 + } + case data[p] >= 128: + goto tr3881 + } + goto tr3251 + st4403: + if p++; p == pe { + goto _test_eof4403 + } + st_case_4403: + switch { + case data[p] > 164: + if 166 <= data[p] { + goto tr3251 + } + case data[p] >= 129: + goto tr3251 + } + goto tr3881 + st4404: + if p++; p == pe { + goto _test_eof4404 + } + st_case_4404: + if 169 <= data[p] && data[p] <= 182 { + goto tr3881 + } + goto tr3251 + st4405: + if p++; p == pe { + goto _test_eof4405 + } + st_case_4405: + if data[p] == 131 { + goto tr3881 + } + switch { + case data[p] > 141: + if 187 <= data[p] && data[p] <= 189 { + goto tr3881 + } + case data[p] >= 140: + goto tr3881 + } + goto tr3251 + st4406: + if p++; p == pe { + goto _test_eof4406 + } + st_case_4406: + if data[p] == 176 { + goto tr3881 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr3881 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr3881 + } + default: + goto tr3881 + } + goto tr3251 + st4407: + if p++; p == pe { + goto _test_eof4407 + } + st_case_4407: + if data[p] == 129 { + goto tr3881 + } + switch { + case data[p] > 175: + if 181 <= data[p] && data[p] <= 182 { + goto tr3881 + } + case data[p] >= 171: + goto tr3881 + } + goto tr3251 + st4408: + if p++; p == pe { + goto _test_eof4408 + } + st_case_4408: + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 173 { + goto tr3881 + } + case data[p] >= 163: + goto tr3881 + } + goto tr3251 + st4409: + if p++; p == pe { + goto _test_eof4409 + } + st_case_4409: + switch data[p] { + case 172: + goto st4410 + case 184: + goto st4411 + case 187: + goto st4388 + case 190: + goto st4395 + case 191: + goto st4412 + } + goto tr3251 + st4410: + if p++; p == pe { + goto _test_eof4410 + } + st_case_4410: + if data[p] == 158 { + goto tr3881 + } + goto tr3251 + st4411: + if p++; p == pe { + goto _test_eof4411 + } + st_case_4411: + switch { + case data[p] > 143: + if 160 <= data[p] && data[p] <= 175 { + goto tr3881 + } + case data[p] >= 128: + goto tr3881 + } + goto tr3251 + st4412: + if p++; p == pe { + goto _test_eof4412 + } + st_case_4412: + if 185 <= data[p] && data[p] <= 187 { + goto tr3881 + } + goto tr3251 + st4413: + if p++; p == pe { + goto _test_eof4413 + } + st_case_4413: + switch data[p] { + case 144: + goto st4414 + case 145: + goto st4420 + case 150: + goto st4439 + case 155: + goto st4444 + case 157: + goto st4446 + case 158: + goto st4453 + } + goto tr3251 + st4414: + if p++; p == pe { + goto _test_eof4414 + } + st_case_4414: + switch data[p] { + case 135: + goto st4415 + case 139: + goto st4416 + case 141: + goto st4417 + case 168: + goto st4418 + case 171: + goto st4419 + } + goto tr3251 + st4415: + if p++; p == pe { + goto _test_eof4415 + } + st_case_4415: + if data[p] == 189 { + goto tr3881 + } + goto tr3251 + st4416: + if p++; p == pe { + goto _test_eof4416 + } + st_case_4416: + if data[p] == 160 { + goto tr3881 + } + goto tr3251 + st4417: + if p++; p == pe { + goto _test_eof4417 + } + st_case_4417: + if 182 <= data[p] && data[p] <= 186 { + goto tr3881 + } + goto tr3251 + st4418: + if p++; p == pe { + goto _test_eof4418 + } + st_case_4418: + if data[p] == 191 { + goto tr3881 + } + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr3881 + } + case data[p] > 134: + switch { + case data[p] > 143: + if 184 <= data[p] && data[p] <= 186 { + goto tr3881 + } + case data[p] >= 140: + goto tr3881 + } + default: + goto tr3881 + } + goto tr3251 + st4419: + if p++; p == pe { + goto _test_eof4419 + } + st_case_4419: + if 165 <= data[p] && data[p] <= 166 { + goto tr3881 + } + goto tr3251 + st4420: + if p++; p == pe { + goto _test_eof4420 + } + st_case_4420: + switch data[p] { + case 128: + goto st4421 + case 129: + goto st4422 + case 130: + goto st4423 + case 132: + goto st4424 + case 133: + goto st4425 + case 134: + goto st4426 + case 135: + goto st4427 + case 136: + goto st4428 + case 139: + goto st4429 + case 140: + goto st4430 + case 141: + goto st4431 + case 146: + goto st4432 + case 147: + goto st4433 + case 150: + goto st4434 + case 151: + goto st4435 + case 152: + goto st4432 + case 153: + goto st4436 + case 154: + goto st4437 + case 156: + goto st4438 + } + goto tr3251 + st4421: + if p++; p == pe { + goto _test_eof4421 + } + st_case_4421: + switch { + case data[p] > 130: + if 184 <= data[p] { + goto tr3881 + } + case data[p] >= 128: + goto tr3881 + } + goto tr3251 + st4422: + if p++; p == pe { + goto _test_eof4422 + } + st_case_4422: + if 135 <= data[p] && data[p] <= 190 { + goto tr3251 + } + goto tr3881 + st4423: + if p++; p == pe { + goto _test_eof4423 + } + st_case_4423: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr3251 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr3251 + } + default: + goto tr3251 + } + goto tr3881 + st4424: + if p++; p == pe { + goto _test_eof4424 + } + st_case_4424: + switch { + case data[p] > 130: + if 167 <= data[p] && data[p] <= 180 { + goto tr3881 + } + case data[p] >= 128: + goto tr3881 + } + goto tr3251 + st4425: + if p++; p == pe { + goto _test_eof4425 + } + st_case_4425: + if data[p] == 179 { + goto tr3881 + } + goto tr3251 + st4426: + if p++; p == pe { + goto _test_eof4426 + } + st_case_4426: + switch { + case data[p] > 130: + if 179 <= data[p] { + goto tr3881 + } + case data[p] >= 128: + goto tr3881 + } + goto tr3251 + st4427: + if p++; p == pe { + goto _test_eof4427 + } + st_case_4427: + switch { + case data[p] > 137: + if 141 <= data[p] { + goto tr3251 + } + case data[p] >= 129: + goto tr3251 + } + goto tr3881 + st4428: + if p++; p == pe { + goto _test_eof4428 + } + st_case_4428: + if 172 <= data[p] && data[p] <= 183 { + goto tr3881 + } + goto tr3251 + st4429: + if p++; p == pe { + goto _test_eof4429 + } + st_case_4429: + if 159 <= data[p] && data[p] <= 170 { + goto tr3881 + } + goto tr3251 + st4430: + if p++; p == pe { + goto _test_eof4430 + } + st_case_4430: + if data[p] == 188 { + goto tr3881 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr3881 + } + case data[p] >= 128: + goto tr3881 + } + goto tr3251 + st4431: + if p++; p == pe { + goto _test_eof4431 + } + st_case_4431: + if data[p] == 151 { + goto tr3881 + } + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr3881 + } + case data[p] >= 128: + goto tr3881 + } + case data[p] > 141: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr3881 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr3881 + } + default: + goto tr3881 + } + default: + goto tr3881 + } + goto tr3251 + st4432: + if p++; p == pe { + goto _test_eof4432 + } + st_case_4432: + if 176 <= data[p] { + goto tr3881 + } + goto tr3251 + st4433: + if p++; p == pe { + goto _test_eof4433 + } + st_case_4433: + if 132 <= data[p] { + goto tr3251 + } + goto tr3881 + st4434: + if p++; p == pe { + goto _test_eof4434 + } + st_case_4434: + switch { + case data[p] > 181: + if 184 <= data[p] { + goto tr3881 + } + case data[p] >= 175: + goto tr3881 + } + goto tr3251 + st4435: + if p++; p == pe { + goto _test_eof4435 + } + st_case_4435: + switch { + case data[p] > 155: + if 158 <= data[p] { + goto tr3251 + } + case data[p] >= 129: + goto tr3251 + } + goto tr3881 + st4436: + if p++; p == pe { + goto _test_eof4436 + } + st_case_4436: + if 129 <= data[p] { + goto tr3251 + } + goto tr3881 + st4437: + if p++; p == pe { + goto _test_eof4437 + } + st_case_4437: + if 171 <= data[p] && data[p] <= 183 { + goto tr3881 + } + goto tr3251 + st4438: + if p++; p == pe { + goto _test_eof4438 + } + st_case_4438: + if 157 <= data[p] && data[p] <= 171 { + goto tr3881 + } + goto tr3251 + st4439: + if p++; p == pe { + goto _test_eof4439 + } + st_case_4439: + switch data[p] { + case 171: + goto st4440 + case 172: + goto st4441 + case 189: + goto st4442 + case 190: + goto st4443 + } + goto tr3251 + st4440: + if p++; p == pe { + goto _test_eof4440 + } + st_case_4440: + if 176 <= data[p] && data[p] <= 180 { + goto tr3881 + } + goto tr3251 + st4441: + if p++; p == pe { + goto _test_eof4441 + } + st_case_4441: + if 176 <= data[p] && data[p] <= 182 { + goto tr3881 + } + goto tr3251 + st4442: + if p++; p == pe { + goto _test_eof4442 + } + st_case_4442: + if 145 <= data[p] && data[p] <= 190 { + goto tr3881 + } + goto tr3251 + st4443: + if p++; p == pe { + goto _test_eof4443 + } + st_case_4443: + if 143 <= data[p] && data[p] <= 146 { + goto tr3881 + } + goto tr3251 + st4444: + if p++; p == pe { + goto _test_eof4444 + } + st_case_4444: + if data[p] == 178 { + goto st4445 + } + goto tr3251 + st4445: + if p++; p == pe { + goto _test_eof4445 + } + st_case_4445: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr3881 + } + case data[p] >= 157: + goto tr3881 + } + goto tr3251 + st4446: + if p++; p == pe { + goto _test_eof4446 + } + st_case_4446: + switch data[p] { + case 133: + goto st4447 + case 134: + goto st4448 + case 137: + goto st4449 + case 168: + goto st4450 + case 169: + goto st4451 + case 170: + goto st4452 + } + goto tr3251 + st4447: + if p++; p == pe { + goto _test_eof4447 + } + st_case_4447: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr3881 + } + case data[p] >= 165: + goto tr3881 + } + goto tr3251 + st4448: + if p++; p == pe { + goto _test_eof4448 + } + st_case_4448: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr3251 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr3251 + } + default: + goto tr3251 + } + goto tr3881 + st4449: + if p++; p == pe { + goto _test_eof4449 + } + st_case_4449: + if 130 <= data[p] && data[p] <= 132 { + goto tr3881 + } + goto tr3251 + st4450: + if p++; p == pe { + goto _test_eof4450 + } + st_case_4450: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr3881 + } + case data[p] >= 128: + goto tr3881 + } + goto tr3251 + st4451: + if p++; p == pe { + goto _test_eof4451 + } + st_case_4451: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr3251 + } + case data[p] >= 173: + goto tr3251 + } + goto tr3881 + st4452: + if p++; p == pe { + goto _test_eof4452 + } + st_case_4452: + if data[p] == 132 { + goto tr3881 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr3881 + } + case data[p] >= 155: + goto tr3881 + } + goto tr3251 + st4453: + if p++; p == pe { + goto _test_eof4453 + } + st_case_4453: + if data[p] == 163 { + goto st4454 + } + goto tr3251 + st4454: + if p++; p == pe { + goto _test_eof4454 + } + st_case_4454: + if 144 <= data[p] && data[p] <= 150 { + goto tr3881 + } + goto tr3251 + st4455: + if p++; p == pe { + goto _test_eof4455 + } + st_case_4455: + if data[p] == 160 { + goto st4456 + } + goto tr3251 + st4456: + if p++; p == pe { + goto _test_eof4456 + } + st_case_4456: + switch data[p] { + case 128: + goto st4457 + case 129: + goto st4458 + case 132: + goto st4320 + case 135: + goto st4321 + } + if 133 <= data[p] && data[p] <= 134 { + goto st4459 + } + goto tr3251 + st4457: + if p++; p == pe { + goto _test_eof4457 + } + st_case_4457: + if data[p] == 129 { + goto tr3881 + } + if 160 <= data[p] { + goto tr3881 + } + goto tr3251 + st4458: + if p++; p == pe { + goto _test_eof4458 + } + st_case_4458: + if 192 <= data[p] { + goto tr3251 + } + goto tr3881 + st4459: + if p++; p == pe { + goto _test_eof4459 + } + st_case_4459: + goto tr3881 + st4460: + if p++; p == pe { + goto _test_eof4460 + } + st_case_4460: + if 128 <= data[p] { + goto tr3881 + } + goto tr3250 + st4461: + if p++; p == pe { + goto _test_eof4461 + } + st_case_4461: + if 176 <= data[p] { + goto tr3250 + } + goto tr3881 + st4462: + if p++; p == pe { + goto _test_eof4462 + } + st_case_4462: + if 131 <= data[p] && data[p] <= 137 { + goto tr3881 + } + goto tr3250 + st4463: + if p++; p == pe { + goto _test_eof4463 + } + st_case_4463: + if data[p] == 191 { + goto tr3881 + } + if 145 <= data[p] && data[p] <= 189 { + goto tr3881 + } + goto tr3250 + st4464: + if p++; p == pe { + goto _test_eof4464 + } + st_case_4464: + if data[p] == 135 { + goto tr3881 + } + switch { + case data[p] > 130: + if 132 <= data[p] && data[p] <= 133 { + goto tr3881 + } + case data[p] >= 129: + goto tr3881 + } + goto tr3250 + st4465: + if p++; p == pe { + goto _test_eof4465 + } + st_case_4465: + if data[p] == 156 { + goto tr3881 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto tr3881 + } + case data[p] >= 128: + goto tr3881 + } + goto tr3250 + st4466: + if p++; p == pe { + goto _test_eof4466 + } + st_case_4466: + if data[p] == 176 { + goto tr3881 + } + if 139 <= data[p] && data[p] <= 159 { + goto tr3881 + } + goto tr3250 + st4467: + if p++; p == pe { + goto _test_eof4467 + } + st_case_4467: + switch { + case data[p] < 159: + if 150 <= data[p] && data[p] <= 157 { + goto tr3881 + } + case data[p] > 164: + switch { + case data[p] > 168: + if 170 <= data[p] && data[p] <= 173 { + goto tr3881 + } + case data[p] >= 167: + goto tr3881 + } + default: + goto tr3881 + } + goto tr3250 + st4468: + if p++; p == pe { + goto _test_eof4468 + } + st_case_4468: + switch data[p] { + case 143: + goto tr3881 + case 145: + goto tr3881 + } + if 176 <= data[p] { + goto tr3881 + } + goto tr3250 + st4469: + if p++; p == pe { + goto _test_eof4469 + } + st_case_4469: + if 139 <= data[p] { + goto tr3250 + } + goto tr3881 + st4470: + if p++; p == pe { + goto _test_eof4470 + } + st_case_4470: + if 166 <= data[p] && data[p] <= 176 { + goto tr3881 + } + goto tr3250 + st4471: + if p++; p == pe { + goto _test_eof4471 + } + st_case_4471: + if 171 <= data[p] && data[p] <= 179 { + goto tr3881 + } + goto tr3250 + st4472: + if p++; p == pe { + goto _test_eof4472 + } + st_case_4472: + switch data[p] { + case 160: + goto tr4004 + case 161: + goto tr4005 + case 163: + goto tr4006 + case 164: + goto tr4007 + case 165: + goto tr4008 + case 167: + goto tr4010 + case 169: + goto tr4011 + case 171: + goto tr4012 + case 173: + goto tr4014 + case 174: + goto tr4015 + case 175: + goto tr4016 + case 176: + goto tr4017 + case 177: + goto tr4018 + case 179: + goto tr4019 + case 180: + goto tr4020 + case 181: + goto tr4021 + case 182: + goto tr4022 + case 183: + goto tr4023 + case 184: + goto tr4024 + case 185: + goto tr4025 + case 186: + goto tr4026 + case 187: + goto tr4027 + case 188: + goto tr4028 + case 189: + goto tr4029 + case 190: + goto tr4030 + case 191: + goto tr4031 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto tr4013 + } + case data[p] >= 166: + goto tr4009 + } + goto tr3250 +tr4004: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5083 + st5083: + if p++; p == pe { + goto _test_eof5083 + } + st_case_5083: +//line segment_words_prod.go:125762 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] < 155: + if 150 <= data[p] && data[p] <= 153 { + goto tr1 + } + case data[p] > 163: + switch { + case data[p] > 167: + if 169 <= data[p] && data[p] <= 173 { + goto tr1 + } + case data[p] >= 165: + goto tr1 + } + default: + goto tr1 + } + goto tr5054 +tr4005: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5084 + st5084: + if p++; p == pe { + goto _test_eof5084 + } + st_case_5084: +//line segment_words_prod.go:125841 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 153 <= data[p] && data[p] <= 155 { + goto tr1 + } + goto tr5054 +tr4006: +//line segment_words.rl:72 + + endPos = p + + goto st5085 + st5085: + if p++; p == pe { + goto _test_eof5085 + } + st_case_5085: +//line segment_words_prod.go:125901 + switch data[p] { + case 194: + goto tr5196 + case 204: + goto tr5197 + case 205: + goto tr5198 + case 210: + goto tr5199 + case 214: + goto tr5200 + case 215: + goto tr5201 + case 216: + goto tr5202 + case 217: + goto tr5203 + case 219: + goto tr5204 + case 220: + goto tr5205 + case 221: + goto tr5206 + case 222: + goto tr5207 + case 223: + goto tr5208 + case 224: + goto tr5209 + case 225: + goto tr5210 + case 226: + goto tr5211 + case 227: + goto tr5212 + case 234: + goto tr5213 + case 239: + goto tr5214 + case 240: + goto tr5215 + case 243: + goto tr5216 + } + if 163 <= data[p] { + goto tr1 + } + goto tr5054 +tr5196: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5086 + st5086: + if p++; p == pe { + goto _test_eof5086 + } + st_case_5086: +//line segment_words_prod.go:125966 + switch data[p] { + case 173: + goto tr3250 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr5197: +//line segment_words.rl:72 + + endPos = p + + goto st5087 + st5087: + if p++; p == pe { + goto _test_eof5087 + } + st_case_5087: +//line segment_words_prod.go:126025 + switch data[p] { + case 194: + goto tr5217 + case 204: + goto tr5218 + case 205: + goto tr5219 + case 210: + goto tr5220 + case 214: + goto tr5221 + case 215: + goto tr5222 + case 216: + goto tr5223 + case 217: + goto tr5224 + case 219: + goto tr5225 + case 220: + goto tr5226 + case 221: + goto tr5227 + case 222: + goto tr5228 + case 223: + goto tr5229 + case 224: + goto tr5230 + case 225: + goto tr5231 + case 226: + goto tr5232 + case 227: + goto tr5233 + case 234: + goto tr5234 + case 239: + goto tr5235 + case 240: + goto tr5236 + case 243: + goto tr5237 + } + if 128 <= data[p] { + goto tr3250 + } + goto tr4499 +tr5217: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5088 + st5088: + if p++; p == pe { + goto _test_eof5088 + } + st_case_5088: +//line segment_words_prod.go:126090 + switch data[p] { + case 173: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + goto tr5054 +tr5218: +//line segment_words.rl:72 + + endPos = p + + goto st5089 + st5089: + if p++; p == pe { + goto _test_eof5089 + } + st_case_5089: +//line segment_words_prod.go:126149 + switch data[p] { + case 194: + goto tr5196 + case 204: + goto tr5197 + case 205: + goto tr5198 + case 210: + goto tr5199 + case 214: + goto tr5200 + case 215: + goto tr5201 + case 216: + goto tr5202 + case 217: + goto tr5203 + case 219: + goto tr5204 + case 220: + goto tr5205 + case 221: + goto tr5206 + case 222: + goto tr5207 + case 223: + goto tr5208 + case 224: + goto tr5209 + case 225: + goto tr5210 + case 226: + goto tr5211 + case 227: + goto tr5212 + case 234: + goto tr5213 + case 239: + goto tr5214 + case 240: + goto tr5215 + case 243: + goto tr5216 + } + if 128 <= data[p] { + goto tr1 + } + goto tr5054 +tr5198: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5090 + st5090: + if p++; p == pe { + goto _test_eof5090 + } + st_case_5090: +//line segment_words_prod.go:126214 + switch data[p] { + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 176 <= data[p] { + goto tr4499 + } + goto tr3250 +tr5199: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5091 + st5091: + if p++; p == pe { + goto _test_eof5091 + } + st_case_5091: +//line segment_words_prod.go:126279 + switch data[p] { + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr3250 + } + goto tr4499 +tr5200: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5092 + st5092: + if p++; p == pe { + goto _test_eof5092 + } + st_case_5092: +//line segment_words_prod.go:126344 + switch data[p] { + case 191: + goto tr3250 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 145 <= data[p] && data[p] <= 189 { + goto tr3250 + } + goto tr4499 +tr5201: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5093 + st5093: + if p++; p == pe { + goto _test_eof5093 + } + st_case_5093: +//line segment_words_prod.go:126411 + switch data[p] { + case 135: + goto tr3250 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] > 130: + if 132 <= data[p] && data[p] <= 133 { + goto tr3250 + } + case data[p] >= 129: + goto tr3250 + } + goto tr4499 +tr5202: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5094 + st5094: + if p++; p == pe { + goto _test_eof5094 + } + st_case_5094: +//line segment_words_prod.go:126483 + switch data[p] { + case 156: + goto tr3250 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto tr3250 + } + case data[p] >= 128: + goto tr3250 + } + goto tr4499 +tr5203: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5095 + st5095: + if p++; p == pe { + goto _test_eof5095 + } + st_case_5095: +//line segment_words_prod.go:126555 + switch data[p] { + case 176: + goto tr3250 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 139 <= data[p] && data[p] <= 159 { + goto tr3250 + } + goto tr4499 +tr5204: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5096 + st5096: + if p++; p == pe { + goto _test_eof5096 + } + st_case_5096: +//line segment_words_prod.go:126622 + switch data[p] { + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 159: + if 150 <= data[p] && data[p] <= 157 { + goto tr3250 + } + case data[p] > 164: + switch { + case data[p] > 168: + if 170 <= data[p] && data[p] <= 173 { + goto tr3250 + } + case data[p] >= 167: + goto tr3250 + } + default: + goto tr3250 + } + goto tr4499 +tr5205: +//line segment_words.rl:72 + + endPos = p + + goto st5097 + st5097: + if p++; p == pe { + goto _test_eof5097 + } + st_case_5097: +//line segment_words_prod.go:126696 + switch data[p] { + case 143: + goto tr3250 + case 145: + goto tr3250 + case 194: + goto tr5217 + case 204: + goto tr5218 + case 205: + goto tr5219 + case 210: + goto tr5220 + case 214: + goto tr5221 + case 215: + goto tr5222 + case 216: + goto tr5223 + case 217: + goto tr5224 + case 219: + goto tr5225 + case 220: + goto tr5226 + case 221: + goto tr5227 + case 222: + goto tr5228 + case 223: + goto tr5229 + case 224: + goto tr5230 + case 225: + goto tr5231 + case 226: + goto tr5232 + case 227: + goto tr5233 + case 234: + goto tr5234 + case 239: + goto tr5235 + case 240: + goto tr5236 + case 243: + goto tr5237 + } + if 176 <= data[p] { + goto tr3250 + } + goto tr4499 +tr5219: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5098 + st5098: + if p++; p == pe { + goto _test_eof5098 + } + st_case_5098: +//line segment_words_prod.go:126765 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 176 <= data[p] { + goto tr5054 + } + goto tr1 +tr5220: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5099 + st5099: + if p++; p == pe { + goto _test_eof5099 + } + st_case_5099: +//line segment_words_prod.go:126830 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr1 + } + goto tr5054 +tr5221: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5100 + st5100: + if p++; p == pe { + goto _test_eof5100 + } + st_case_5100: +//line segment_words_prod.go:126895 + switch data[p] { + case 191: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 145 <= data[p] && data[p] <= 189 { + goto tr1 + } + goto tr5054 +tr5222: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5101 + st5101: + if p++; p == pe { + goto _test_eof5101 + } + st_case_5101: +//line segment_words_prod.go:126962 + switch data[p] { + case 135: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 130: + if 132 <= data[p] && data[p] <= 133 { + goto tr1 + } + case data[p] >= 129: + goto tr1 + } + goto tr5054 +tr5223: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5102 + st5102: + if p++; p == pe { + goto _test_eof5102 + } + st_case_5102: +//line segment_words_prod.go:127034 + switch data[p] { + case 156: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr5054 +tr5224: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5103 + st5103: + if p++; p == pe { + goto _test_eof5103 + } + st_case_5103: +//line segment_words_prod.go:127106 + switch data[p] { + case 176: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 139 <= data[p] && data[p] <= 159 { + goto tr1 + } + goto tr5054 +tr5225: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5104 + st5104: + if p++; p == pe { + goto _test_eof5104 + } + st_case_5104: +//line segment_words_prod.go:127173 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] < 159: + if 150 <= data[p] && data[p] <= 157 { + goto tr1 + } + case data[p] > 164: + switch { + case data[p] > 168: + if 170 <= data[p] && data[p] <= 173 { + goto tr1 + } + case data[p] >= 167: + goto tr1 + } + default: + goto tr1 + } + goto tr5054 +tr5226: +//line segment_words.rl:72 + + endPos = p + + goto st5105 + st5105: + if p++; p == pe { + goto _test_eof5105 + } + st_case_5105: +//line segment_words_prod.go:127247 + switch data[p] { + case 143: + goto tr1 + case 145: + goto tr1 + case 194: + goto tr5196 + case 204: + goto tr5197 + case 205: + goto tr5198 + case 210: + goto tr5199 + case 214: + goto tr5200 + case 215: + goto tr5201 + case 216: + goto tr5202 + case 217: + goto tr5203 + case 219: + goto tr5204 + case 220: + goto tr5205 + case 221: + goto tr5206 + case 222: + goto tr5207 + case 223: + goto tr5208 + case 224: + goto tr5209 + case 225: + goto tr5210 + case 226: + goto tr5211 + case 227: + goto tr5212 + case 234: + goto tr5213 + case 239: + goto tr5214 + case 240: + goto tr5215 + case 243: + goto tr5216 + } + if 176 <= data[p] { + goto tr1 + } + goto tr5054 +tr5206: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5106 + st5106: + if p++; p == pe { + goto _test_eof5106 + } + st_case_5106: +//line segment_words_prod.go:127316 + switch data[p] { + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 139 <= data[p] { + goto tr4499 + } + goto tr3250 +tr5207: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5107 + st5107: + if p++; p == pe { + goto _test_eof5107 + } + st_case_5107: +//line segment_words_prod.go:127381 + switch data[p] { + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 166 <= data[p] && data[p] <= 176 { + goto tr3250 + } + goto tr4499 +tr5208: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5108 + st5108: + if p++; p == pe { + goto _test_eof5108 + } + st_case_5108: +//line segment_words_prod.go:127446 + switch data[p] { + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 171 <= data[p] && data[p] <= 179 { + goto tr3250 + } + goto tr4499 +tr5209: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5109 + st5109: + if p++; p == pe { + goto _test_eof5109 + } + st_case_5109: +//line segment_words_prod.go:127511 + switch data[p] { + case 160: + goto st3608 + case 161: + goto st3609 + case 163: + goto st3610 + case 164: + goto st3611 + case 165: + goto st3612 + case 167: + goto st3614 + case 169: + goto st3615 + case 171: + goto st3616 + case 173: + goto st3618 + case 174: + goto st3619 + case 175: + goto st3620 + case 176: + goto st3621 + case 177: + goto st3622 + case 179: + goto st3623 + case 180: + goto st3624 + case 181: + goto st3625 + case 182: + goto st3626 + case 183: + goto st3627 + case 184: + goto st3628 + case 185: + goto st3629 + case 186: + goto st3630 + case 187: + goto st3631 + case 188: + goto st3632 + case 189: + goto st3633 + case 190: + goto st3634 + case 191: + goto st3635 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto st3617 + } + case data[p] >= 166: + goto st3613 + } + goto tr4499 +tr5210: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5110 + st5110: + if p++; p == pe { + goto _test_eof5110 + } + st_case_5110: +//line segment_words_prod.go:127633 + switch data[p] { + case 128: + goto st3637 + case 129: + goto st3638 + case 130: + goto st3639 + case 141: + goto st3640 + case 156: + goto st3641 + case 157: + goto st3642 + case 158: + goto st3643 + case 159: + goto st3644 + case 160: + goto st3645 + case 162: + goto st3646 + case 164: + goto st3647 + case 168: + goto st3648 + case 169: + goto st3649 + case 170: + goto st3650 + case 172: + goto st3651 + case 173: + goto st3652 + case 174: + goto st3653 + case 175: + goto st3654 + case 176: + goto st3655 + case 179: + goto st3656 + case 183: + goto st3657 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr5211: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5111 + st5111: + if p++; p == pe { + goto _test_eof5111 + } + st_case_5111: +//line segment_words_prod.go:127737 + switch data[p] { + case 128: + goto st3659 + case 129: + goto st3660 + case 131: + goto st3661 + case 179: + goto st3662 + case 181: + goto st3663 + case 183: + goto st3664 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr5212: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5112 + st5112: + if p++; p == pe { + goto _test_eof5112 + } + st_case_5112: +//line segment_words_prod.go:127811 + switch data[p] { + case 128: + goto st3666 + case 130: + goto st3667 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr5213: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5113 + st5113: + if p++; p == pe { + goto _test_eof5113 + } + st_case_5113: +//line segment_words_prod.go:127877 + switch data[p] { + case 153: + goto st3669 + case 154: + goto st3670 + case 155: + goto st3671 + case 160: + goto st3672 + case 162: + goto st3673 + case 163: + goto st3674 + case 164: + goto st3675 + case 165: + goto st3676 + case 166: + goto st3677 + case 167: + goto st3678 + case 168: + goto st3679 + case 169: + goto st3680 + case 170: + goto st3681 + case 171: + goto st3682 + case 175: + goto st3683 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr5214: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5114 + st5114: + if p++; p == pe { + goto _test_eof5114 + } + st_case_5114: +//line segment_words_prod.go:127969 + switch data[p] { + case 172: + goto st3685 + case 184: + goto st3686 + case 187: + goto st3663 + case 190: + goto st3670 + case 191: + goto st3687 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr5215: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5115 + st5115: + if p++; p == pe { + goto _test_eof5115 + } + st_case_5115: +//line segment_words_prod.go:128041 + switch data[p] { + case 144: + goto st3689 + case 145: + goto st3695 + case 150: + goto st3714 + case 155: + goto st3719 + case 157: + goto st3721 + case 158: + goto st3728 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr5216: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5116 + st5116: + if p++; p == pe { + goto _test_eof5116 + } + st_case_5116: +//line segment_words_prod.go:128115 + switch data[p] { + case 160: + goto st3731 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr5227: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5117 + st5117: + if p++; p == pe { + goto _test_eof5117 + } + st_case_5117: +//line segment_words_prod.go:128179 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 139 <= data[p] { + goto tr5054 + } + goto tr1 +tr5228: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5118 + st5118: + if p++; p == pe { + goto _test_eof5118 + } + st_case_5118: +//line segment_words_prod.go:128244 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 166 <= data[p] && data[p] <= 176 { + goto tr1 + } + goto tr5054 +tr5229: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5119 + st5119: + if p++; p == pe { + goto _test_eof5119 + } + st_case_5119: +//line segment_words_prod.go:128309 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 171 <= data[p] && data[p] <= 179 { + goto tr1 + } + goto tr5054 +tr5230: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5120 + st5120: + if p++; p == pe { + goto _test_eof5120 + } + st_case_5120: +//line segment_words_prod.go:128374 + switch data[p] { + case 160: + goto st14 + case 161: + goto st15 + case 163: + goto st16 + case 164: + goto st17 + case 165: + goto st18 + case 167: + goto st20 + case 169: + goto st21 + case 171: + goto st22 + case 173: + goto st24 + case 174: + goto st25 + case 175: + goto st26 + case 176: + goto st27 + case 177: + goto st28 + case 179: + goto st29 + case 180: + goto st30 + case 181: + goto st31 + case 182: + goto st32 + case 183: + goto st33 + case 184: + goto st34 + case 185: + goto st35 + case 186: + goto st36 + case 187: + goto st37 + case 188: + goto st38 + case 189: + goto st39 + case 190: + goto st40 + case 191: + goto st41 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto st23 + } + case data[p] >= 166: + goto st19 + } + goto tr5054 +tr5231: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5121 + st5121: + if p++; p == pe { + goto _test_eof5121 + } + st_case_5121: +//line segment_words_prod.go:128496 + switch data[p] { + case 128: + goto st43 + case 129: + goto st44 + case 130: + goto st45 + case 141: + goto st46 + case 156: + goto st47 + case 157: + goto st48 + case 158: + goto st49 + case 159: + goto st50 + case 160: + goto st51 + case 162: + goto st52 + case 164: + goto st53 + case 168: + goto st54 + case 169: + goto st55 + case 170: + goto st56 + case 172: + goto st57 + case 173: + goto st58 + case 174: + goto st59 + case 175: + goto st60 + case 176: + goto st61 + case 179: + goto st62 + case 183: + goto st63 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + goto tr5054 +tr5232: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5122 + st5122: + if p++; p == pe { + goto _test_eof5122 + } + st_case_5122: +//line segment_words_prod.go:128600 + switch data[p] { + case 128: + goto st65 + case 129: + goto st66 + case 131: + goto st67 + case 179: + goto st68 + case 181: + goto st69 + case 183: + goto st70 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + goto tr5054 +tr5233: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5123 + st5123: + if p++; p == pe { + goto _test_eof5123 + } + st_case_5123: +//line segment_words_prod.go:128674 + switch data[p] { + case 128: + goto st72 + case 130: + goto st73 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + goto tr5054 +tr5234: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5124 + st5124: + if p++; p == pe { + goto _test_eof5124 + } + st_case_5124: +//line segment_words_prod.go:128740 + switch data[p] { + case 153: + goto st75 + case 154: + goto st76 + case 155: + goto st77 + case 160: + goto st78 + case 162: + goto st79 + case 163: + goto st80 + case 164: + goto st81 + case 165: + goto st82 + case 166: + goto st83 + case 167: + goto st84 + case 168: + goto st85 + case 169: + goto st86 + case 170: + goto st87 + case 171: + goto st88 + case 175: + goto st89 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + goto tr5054 +tr5235: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5125 + st5125: + if p++; p == pe { + goto _test_eof5125 + } + st_case_5125: +//line segment_words_prod.go:128832 + switch data[p] { + case 172: + goto st91 + case 184: + goto st92 + case 187: + goto st69 + case 190: + goto st76 + case 191: + goto st93 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + goto tr5054 +tr5236: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5126 + st5126: + if p++; p == pe { + goto _test_eof5126 + } + st_case_5126: +//line segment_words_prod.go:128904 + switch data[p] { + case 144: + goto st95 + case 145: + goto st101 + case 150: + goto st120 + case 155: + goto st125 + case 157: + goto st127 + case 158: + goto st134 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + goto tr5054 +tr5237: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5127 + st5127: + if p++; p == pe { + goto _test_eof5127 + } + st_case_5127: +//line segment_words_prod.go:128978 + switch data[p] { + case 160: + goto st137 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + goto tr5054 +tr4007: +//line segment_words.rl:72 + + endPos = p + + goto st5128 + st5128: + if p++; p == pe { + goto _test_eof5128 + } + st_case_5128: +//line segment_words_prod.go:129037 + switch data[p] { + case 189: + goto tr5054 + case 194: + goto tr5196 + case 204: + goto tr5197 + case 205: + goto tr5198 + case 210: + goto tr5199 + case 214: + goto tr5200 + case 215: + goto tr5201 + case 216: + goto tr5202 + case 217: + goto tr5203 + case 219: + goto tr5204 + case 220: + goto tr5205 + case 221: + goto tr5206 + case 222: + goto tr5207 + case 223: + goto tr5208 + case 224: + goto tr5209 + case 225: + goto tr5210 + case 226: + goto tr5211 + case 227: + goto tr5212 + case 234: + goto tr5213 + case 239: + goto tr5214 + case 240: + goto tr5215 + case 243: + goto tr5216 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr5054 + } + goto tr1 +tr4008: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5129 + st5129: + if p++; p == pe { + goto _test_eof5129 + } + st_case_5129: +//line segment_words_prod.go:129104 + switch data[p] { + case 144: + goto tr5054 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr5054 + } + case data[p] >= 152: + goto tr5054 + } + goto tr1 +tr4009: +//line segment_words.rl:72 + + endPos = p + + goto st5130 + st5130: + if p++; p == pe { + goto _test_eof5130 + } + st_case_5130: +//line segment_words_prod.go:129171 + switch data[p] { + case 188: + goto tr1 + case 194: + goto tr5196 + case 204: + goto tr5197 + case 205: + goto tr5198 + case 210: + goto tr5199 + case 214: + goto tr5200 + case 215: + goto tr5201 + case 216: + goto tr5202 + case 217: + goto tr5203 + case 219: + goto tr5204 + case 220: + goto tr5205 + case 221: + goto tr5206 + case 222: + goto tr5207 + case 223: + goto tr5208 + case 224: + goto tr5209 + case 225: + goto tr5210 + case 226: + goto tr5211 + case 227: + goto tr5212 + case 234: + goto tr5213 + case 239: + goto tr5214 + case 240: + goto tr5215 + case 243: + goto tr5216 + } + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr1 + } + case data[p] >= 129: + goto tr1 + } + goto tr5054 +tr4010: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5131 + st5131: + if p++; p == pe { + goto _test_eof5131 + } + st_case_5131: +//line segment_words_prod.go:129243 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr5054 + } + case data[p] >= 133: + goto tr5054 + } + case data[p] > 150: + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr5054 + } + case data[p] >= 152: + goto tr5054 + } + default: + goto tr5054 + } + goto tr1 +tr4011: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5132 + st5132: + if p++; p == pe { + goto _test_eof5132 + } + st_case_5132: +//line segment_words_prod.go:129327 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr5054 + } + case data[p] >= 131: + goto tr5054 + } + case data[p] > 144: + switch { + case data[p] < 178: + if 146 <= data[p] && data[p] <= 175 { + goto tr5054 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr5054 + } + default: + goto tr5054 + } + default: + goto tr5054 + } + goto tr1 +tr4012: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5133 + st5133: + if p++; p == pe { + goto _test_eof5133 + } + st_case_5133: +//line segment_words_prod.go:129415 + switch data[p] { + case 134: + goto tr5054 + case 138: + goto tr5054 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr5054 + } + case data[p] >= 142: + goto tr5054 + } + goto tr1 +tr4013: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5134 + st5134: + if p++; p == pe { + goto _test_eof5134 + } + st_case_5134: +//line segment_words_prod.go:129489 + switch data[p] { + case 188: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr1 + } + case data[p] >= 129: + goto tr1 + } + goto tr5054 +tr4014: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5135 + st5135: + if p++; p == pe { + goto _test_eof5135 + } + st_case_5135: +//line segment_words_prod.go:129561 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + case data[p] > 141: + switch { + case data[p] > 151: + if 162 <= data[p] && data[p] <= 163 { + goto tr1 + } + case data[p] >= 150: + goto tr1 + } + default: + goto tr1 + } + goto tr5054 +tr4015: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5136 + st5136: + if p++; p == pe { + goto _test_eof5136 + } + st_case_5136: +//line segment_words_prod.go:129645 + switch data[p] { + case 130: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 190 <= data[p] && data[p] <= 191 { + goto tr1 + } + goto tr5054 +tr4016: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5137 + st5137: + if p++; p == pe { + goto _test_eof5137 + } + st_case_5137: +//line segment_words_prod.go:129712 + switch data[p] { + case 151: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr1 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr1 + } + default: + goto tr1 + } + goto tr5054 +tr4017: +//line segment_words.rl:72 + + endPos = p + + goto st5138 + st5138: + if p++; p == pe { + goto _test_eof5138 + } + st_case_5138: +//line segment_words_prod.go:129783 + switch data[p] { + case 194: + goto tr5196 + case 204: + goto tr5197 + case 205: + goto tr5198 + case 210: + goto tr5199 + case 214: + goto tr5200 + case 215: + goto tr5201 + case 216: + goto tr5202 + case 217: + goto tr5203 + case 219: + goto tr5204 + case 220: + goto tr5205 + case 221: + goto tr5206 + case 222: + goto tr5207 + case 223: + goto tr5208 + case 224: + goto tr5209 + case 225: + goto tr5210 + case 226: + goto tr5211 + case 227: + goto tr5212 + case 234: + goto tr5213 + case 239: + goto tr5214 + case 240: + goto tr5215 + case 243: + goto tr5216 + } + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr5054 +tr4018: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5139 + st5139: + if p++; p == pe { + goto _test_eof5139 + } + st_case_5139: +//line segment_words_prod.go:129853 + switch data[p] { + case 133: + goto tr5054 + case 137: + goto tr5054 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] < 151: + if 142 <= data[p] && data[p] <= 148 { + goto tr5054 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr5054 + } + default: + goto tr5054 + } + goto tr1 +tr4019: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5140 + st5140: + if p++; p == pe { + goto _test_eof5140 + } + st_case_5140: +//line segment_words_prod.go:129931 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] < 138: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 136 { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + case data[p] > 141: + switch { + case data[p] > 150: + if 162 <= data[p] && data[p] <= 163 { + goto tr1 + } + case data[p] >= 149: + goto tr1 + } + default: + goto tr1 + } + goto tr5054 +tr4020: +//line segment_words.rl:72 + + endPos = p + + goto st5141 + st5141: + if p++; p == pe { + goto _test_eof5141 + } + st_case_5141: +//line segment_words_prod.go:130010 + switch data[p] { + case 194: + goto tr5196 + case 204: + goto tr5197 + case 205: + goto tr5198 + case 210: + goto tr5199 + case 214: + goto tr5200 + case 215: + goto tr5201 + case 216: + goto tr5202 + case 217: + goto tr5203 + case 219: + goto tr5204 + case 220: + goto tr5205 + case 221: + goto tr5206 + case 222: + goto tr5207 + case 223: + goto tr5208 + case 224: + goto tr5209 + case 225: + goto tr5210 + case 226: + goto tr5211 + case 227: + goto tr5212 + case 234: + goto tr5213 + case 239: + goto tr5214 + case 240: + goto tr5215 + case 243: + goto tr5216 + } + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr1 + } + case data[p] >= 129: + goto tr1 + } + goto tr5054 +tr4021: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5142 + st5142: + if p++; p == pe { + goto _test_eof5142 + } + st_case_5142: +//line segment_words_prod.go:130080 + switch data[p] { + case 133: + goto tr5054 + case 137: + goto tr5054 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 150 { + goto tr5054 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr5054 + } + default: + goto tr5054 + } + goto tr1 +tr4022: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5143 + st5143: + if p++; p == pe { + goto _test_eof5143 + } + st_case_5143: +//line segment_words_prod.go:130158 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 130 <= data[p] && data[p] <= 131 { + goto tr1 + } + goto tr5054 +tr4023: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5144 + st5144: + if p++; p == pe { + goto _test_eof5144 + } + st_case_5144: +//line segment_words_prod.go:130223 + switch data[p] { + case 138: + goto tr1 + case 150: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 148 { + goto tr1 + } + case data[p] > 159: + if 178 <= data[p] && data[p] <= 179 { + goto tr1 + } + default: + goto tr1 + } + goto tr5054 +tr4024: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5145 + st5145: + if p++; p == pe { + goto _test_eof5145 + } + st_case_5145: +//line segment_words_prod.go:130301 + switch data[p] { + case 177: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr1 + } + goto tr5054 +tr4025: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5146 + st5146: + if p++; p == pe { + goto _test_eof5146 + } + st_case_5146: +//line segment_words_prod.go:130368 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 135 <= data[p] && data[p] <= 142 { + goto tr1 + } + goto tr5054 +tr4026: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5147 + st5147: + if p++; p == pe { + goto _test_eof5147 + } + st_case_5147: +//line segment_words_prod.go:130433 + switch data[p] { + case 177: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr1 + } + case data[p] >= 180: + goto tr1 + } + goto tr5054 +tr4027: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5148 + st5148: + if p++; p == pe { + goto _test_eof5148 + } + st_case_5148: +//line segment_words_prod.go:130505 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 136 <= data[p] && data[p] <= 141 { + goto tr1 + } + goto tr5054 +tr4028: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5149 + st5149: + if p++; p == pe { + goto _test_eof5149 + } + st_case_5149: +//line segment_words_prod.go:130570 + switch data[p] { + case 181: + goto tr1 + case 183: + goto tr1 + case 185: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 153: + if 190 <= data[p] && data[p] <= 191 { + goto tr1 + } + case data[p] >= 152: + goto tr1 + } + goto tr5054 +tr4029: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5150 + st5150: + if p++; p == pe { + goto _test_eof5150 + } + st_case_5150: +//line segment_words_prod.go:130646 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 177 <= data[p] && data[p] <= 191 { + goto tr1 + } + goto tr5054 +tr4030: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5151 + st5151: + if p++; p == pe { + goto _test_eof5151 + } + st_case_5151: +//line segment_words_prod.go:130711 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr1 + } + case data[p] > 135: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr1 + } + case data[p] >= 141: + goto tr1 + } + default: + goto tr1 + } + goto tr5054 +tr4031: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5152 + st5152: + if p++; p == pe { + goto _test_eof5152 + } + st_case_5152: +//line segment_words_prod.go:130790 + switch data[p] { + case 134: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + goto tr5054 + st4473: + if p++; p == pe { + goto _test_eof4473 + } + st_case_4473: + switch data[p] { + case 128: + goto tr4032 + case 129: + goto tr4033 + case 130: + goto tr4034 + case 141: + goto tr4035 + case 156: + goto tr4036 + case 157: + goto tr4037 + case 158: + goto tr4038 + case 159: + goto tr4039 + case 160: + goto tr4040 + case 162: + goto tr4041 + case 164: + goto tr4042 + case 168: + goto tr4043 + case 169: + goto tr4044 + case 170: + goto tr4045 + case 172: + goto tr4046 + case 173: + goto tr4047 + case 174: + goto tr4048 + case 175: + goto tr4049 + case 176: + goto tr4050 + case 179: + goto tr4051 + case 183: + goto tr4052 + } + goto tr3250 +tr4032: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5153 + st5153: + if p++; p == pe { + goto _test_eof5153 + } + st_case_5153: +//line segment_words_prod.go:130904 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 171 <= data[p] && data[p] <= 190 { + goto tr1 + } + goto tr5054 +tr4033: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5154 + st5154: + if p++; p == pe { + goto _test_eof5154 + } + st_case_5154: +//line segment_words_prod.go:130969 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] < 162: + switch { + case data[p] > 153: + if 158 <= data[p] && data[p] <= 160 { + goto tr1 + } + case data[p] >= 150: + goto tr1 + } + case data[p] > 164: + switch { + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr1 + } + case data[p] >= 167: + goto tr1 + } + default: + goto tr1 + } + goto tr5054 +tr4034: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5155 + st5155: + if p++; p == pe { + goto _test_eof5155 + } + st_case_5155: +//line segment_words_prod.go:131053 + switch data[p] { + case 143: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 141: + if 154 <= data[p] && data[p] <= 157 { + goto tr1 + } + case data[p] >= 130: + goto tr1 + } + goto tr5054 +tr4035: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5156 + st5156: + if p++; p == pe { + goto _test_eof5156 + } + st_case_5156: +//line segment_words_prod.go:131125 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 157 <= data[p] && data[p] <= 159 { + goto tr1 + } + goto tr5054 +tr4036: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5157 + st5157: + if p++; p == pe { + goto _test_eof5157 + } + st_case_5157: +//line segment_words_prod.go:131190 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 148: + if 178 <= data[p] && data[p] <= 180 { + goto tr1 + } + case data[p] >= 146: + goto tr1 + } + goto tr5054 +tr4037: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5158 + st5158: + if p++; p == pe { + goto _test_eof5158 + } + st_case_5158: +//line segment_words_prod.go:131260 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 147: + if 178 <= data[p] && data[p] <= 179 { + goto tr1 + } + case data[p] >= 146: + goto tr1 + } + goto tr5054 +tr4038: +//line segment_words.rl:72 + + endPos = p + + goto st5159 + st5159: + if p++; p == pe { + goto _test_eof5159 + } + st_case_5159: +//line segment_words_prod.go:131325 + switch data[p] { + case 194: + goto tr5196 + case 204: + goto tr5197 + case 205: + goto tr5198 + case 210: + goto tr5199 + case 214: + goto tr5200 + case 215: + goto tr5201 + case 216: + goto tr5202 + case 217: + goto tr5203 + case 219: + goto tr5204 + case 220: + goto tr5205 + case 221: + goto tr5206 + case 222: + goto tr5207 + case 223: + goto tr5208 + case 224: + goto tr5209 + case 225: + goto tr5210 + case 226: + goto tr5211 + case 227: + goto tr5212 + case 234: + goto tr5213 + case 239: + goto tr5214 + case 240: + goto tr5215 + case 243: + goto tr5216 + } + if 180 <= data[p] { + goto tr1 + } + goto tr5054 +tr4039: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5160 + st5160: + if p++; p == pe { + goto _test_eof5160 + } + st_case_5160: +//line segment_words_prod.go:131390 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 156: + if 158 <= data[p] { + goto tr5054 + } + case data[p] >= 148: + goto tr5054 + } + goto tr1 +tr4040: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5161 + st5161: + if p++; p == pe { + goto _test_eof5161 + } + st_case_5161: +//line segment_words_prod.go:131460 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 139 <= data[p] && data[p] <= 142 { + goto tr1 + } + goto tr5054 +tr4041: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5162 + st5162: + if p++; p == pe { + goto _test_eof5162 + } + st_case_5162: +//line segment_words_prod.go:131525 + switch data[p] { + case 169: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + goto tr5054 +tr4042: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5163 + st5163: + if p++; p == pe { + goto _test_eof5163 + } + st_case_5163: +//line segment_words_prod.go:131589 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr1 + } + case data[p] >= 160: + goto tr1 + } + goto tr5054 +tr4043: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5164 + st5164: + if p++; p == pe { + goto _test_eof5164 + } + st_case_5164: +//line segment_words_prod.go:131659 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 151 <= data[p] && data[p] <= 155 { + goto tr1 + } + goto tr5054 +tr4044: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5165 + st5165: + if p++; p == pe { + goto _test_eof5165 + } + st_case_5165: +//line segment_words_prod.go:131724 + switch data[p] { + case 191: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr1 + } + case data[p] >= 149: + goto tr1 + } + goto tr5054 +tr4045: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5166 + st5166: + if p++; p == pe { + goto _test_eof5166 + } + st_case_5166: +//line segment_words_prod.go:131796 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 176 <= data[p] && data[p] <= 190 { + goto tr1 + } + goto tr5054 +tr4046: +//line segment_words.rl:72 + + endPos = p + + goto st5167 + st5167: + if p++; p == pe { + goto _test_eof5167 + } + st_case_5167: +//line segment_words_prod.go:131856 + switch data[p] { + case 194: + goto tr5196 + case 204: + goto tr5197 + case 205: + goto tr5198 + case 210: + goto tr5199 + case 214: + goto tr5200 + case 215: + goto tr5201 + case 216: + goto tr5202 + case 217: + goto tr5203 + case 219: + goto tr5204 + case 220: + goto tr5205 + case 221: + goto tr5206 + case 222: + goto tr5207 + case 223: + goto tr5208 + case 224: + goto tr5209 + case 225: + goto tr5210 + case 226: + goto tr5211 + case 227: + goto tr5212 + case 234: + goto tr5213 + case 239: + goto tr5214 + case 240: + goto tr5215 + case 243: + goto tr5216 + } + switch { + case data[p] > 132: + if 180 <= data[p] { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr5054 +tr4047: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5168 + st5168: + if p++; p == pe { + goto _test_eof5168 + } + st_case_5168: +//line segment_words_prod.go:131926 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr5054 + } + case data[p] >= 133: + goto tr5054 + } + goto tr1 +tr4048: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5169 + st5169: + if p++; p == pe { + goto _test_eof5169 + } + st_case_5169: +//line segment_words_prod.go:131996 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 130: + if 161 <= data[p] && data[p] <= 173 { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr5054 +tr4049: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5170 + st5170: + if p++; p == pe { + goto _test_eof5170 + } + st_case_5170: +//line segment_words_prod.go:132066 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 166 <= data[p] && data[p] <= 179 { + goto tr1 + } + goto tr5054 +tr4050: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5171 + st5171: + if p++; p == pe { + goto _test_eof5171 + } + st_case_5171: +//line segment_words_prod.go:132131 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 164 <= data[p] && data[p] <= 183 { + goto tr1 + } + goto tr5054 +tr4051: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5172 + st5172: + if p++; p == pe { + goto _test_eof5172 + } + st_case_5172: +//line segment_words_prod.go:132196 + switch data[p] { + case 173: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] < 148: + if 144 <= data[p] && data[p] <= 146 { + goto tr1 + } + case data[p] > 168: + switch { + case data[p] > 180: + if 184 <= data[p] && data[p] <= 185 { + goto tr1 + } + case data[p] >= 178: + goto tr1 + } + default: + goto tr1 + } + goto tr5054 +tr4052: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5173 + st5173: + if p++; p == pe { + goto _test_eof5173 + } + st_case_5173: +//line segment_words_prod.go:132277 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr5054 + st4474: + if p++; p == pe { + goto _test_eof4474 + } + st_case_4474: + switch data[p] { + case 128: + goto tr4053 + case 129: + goto tr4054 + case 131: + goto tr4055 + case 179: + goto tr4056 + case 181: + goto tr4057 + case 183: + goto tr4058 + } + goto tr3250 +tr4053: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5174 + st5174: + if p++; p == pe { + goto _test_eof5174 + } + st_case_5174: +//line segment_words_prod.go:132367 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 143: + if 170 <= data[p] && data[p] <= 174 { + goto tr1 + } + case data[p] >= 140: + goto tr1 + } + goto tr5054 +tr4054: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5175 + st5175: + if p++; p == pe { + goto _test_eof5175 + } + st_case_5175: +//line segment_words_prod.go:132437 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 164: + if 166 <= data[p] && data[p] <= 175 { + goto tr1 + } + case data[p] >= 160: + goto tr1 + } + goto tr5054 +tr4055: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5176 + st5176: + if p++; p == pe { + goto _test_eof5176 + } + st_case_5176: +//line segment_words_prod.go:132507 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 144 <= data[p] && data[p] <= 176 { + goto tr1 + } + goto tr5054 +tr4056: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5177 + st5177: + if p++; p == pe { + goto _test_eof5177 + } + st_case_5177: +//line segment_words_prod.go:132572 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 175 <= data[p] && data[p] <= 177 { + goto tr1 + } + goto tr5054 +tr4057: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5178 + st5178: + if p++; p == pe { + goto _test_eof5178 + } + st_case_5178: +//line segment_words_prod.go:132637 + switch data[p] { + case 191: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + goto tr5054 +tr4058: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5179 + st5179: + if p++; p == pe { + goto _test_eof5179 + } + st_case_5179: +//line segment_words_prod.go:132701 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 160 <= data[p] && data[p] <= 191 { + goto tr1 + } + goto tr5054 + st4475: + if p++; p == pe { + goto _test_eof4475 + } + st_case_4475: + switch data[p] { + case 128: + goto tr4059 + case 130: + goto tr4060 + } + goto tr3250 +tr4059: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5180 + st5180: + if p++; p == pe { + goto _test_eof5180 + } + st_case_5180: +//line segment_words_prod.go:132778 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 170 <= data[p] && data[p] <= 175 { + goto tr1 + } + goto tr5054 +tr4060: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5181 + st5181: + if p++; p == pe { + goto _test_eof5181 + } + st_case_5181: +//line segment_words_prod.go:132843 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 153 <= data[p] && data[p] <= 154 { + goto tr1 + } + goto tr5054 + st4476: + if p++; p == pe { + goto _test_eof4476 + } + st_case_4476: + switch data[p] { + case 153: + goto tr4061 + case 154: + goto tr4062 + case 155: + goto tr4063 + case 160: + goto tr4064 + case 162: + goto tr4065 + case 163: + goto tr4066 + case 164: + goto tr4067 + case 165: + goto tr4068 + case 166: + goto tr4069 + case 167: + goto tr4070 + case 168: + goto tr4071 + case 169: + goto tr4072 + case 170: + goto tr4073 + case 171: + goto tr4074 + case 175: + goto tr4075 + } + goto tr3250 +tr4061: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5182 + st5182: + if p++; p == pe { + goto _test_eof5182 + } + st_case_5182: +//line segment_words_prod.go:132946 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr1 + } + case data[p] >= 175: + goto tr1 + } + goto tr5054 +tr4062: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5183 + st5183: + if p++; p == pe { + goto _test_eof5183 + } + st_case_5183: +//line segment_words_prod.go:133016 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 158 <= data[p] && data[p] <= 159 { + goto tr1 + } + goto tr5054 +tr4063: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5184 + st5184: + if p++; p == pe { + goto _test_eof5184 + } + st_case_5184: +//line segment_words_prod.go:133081 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 176 <= data[p] && data[p] <= 177 { + goto tr1 + } + goto tr5054 +tr4064: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5185 + st5185: + if p++; p == pe { + goto _test_eof5185 + } + st_case_5185: +//line segment_words_prod.go:133146 + switch data[p] { + case 130: + goto tr1 + case 134: + goto tr1 + case 139: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 163 <= data[p] && data[p] <= 167 { + goto tr1 + } + goto tr5054 +tr4065: +//line segment_words.rl:72 + + endPos = p + + goto st5186 + st5186: + if p++; p == pe { + goto _test_eof5186 + } + st_case_5186: +//line segment_words_prod.go:133212 + switch data[p] { + case 194: + goto tr5196 + case 204: + goto tr5197 + case 205: + goto tr5198 + case 210: + goto tr5199 + case 214: + goto tr5200 + case 215: + goto tr5201 + case 216: + goto tr5202 + case 217: + goto tr5203 + case 219: + goto tr5204 + case 220: + goto tr5205 + case 221: + goto tr5206 + case 222: + goto tr5207 + case 223: + goto tr5208 + case 224: + goto tr5209 + case 225: + goto tr5210 + case 226: + goto tr5211 + case 227: + goto tr5212 + case 234: + goto tr5213 + case 239: + goto tr5214 + case 240: + goto tr5215 + case 243: + goto tr5216 + } + switch { + case data[p] > 129: + if 180 <= data[p] { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr5054 +tr4066: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5187 + st5187: + if p++; p == pe { + goto _test_eof5187 + } + st_case_5187: +//line segment_words_prod.go:133282 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 159: + if 178 <= data[p] { + goto tr5054 + } + case data[p] >= 133: + goto tr5054 + } + goto tr1 +tr4067: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5188 + st5188: + if p++; p == pe { + goto _test_eof5188 + } + st_case_5188: +//line segment_words_prod.go:133352 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 166 <= data[p] && data[p] <= 173 { + goto tr1 + } + goto tr5054 +tr4068: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5189 + st5189: + if p++; p == pe { + goto _test_eof5189 + } + st_case_5189: +//line segment_words_prod.go:133417 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 135 <= data[p] && data[p] <= 147 { + goto tr1 + } + goto tr5054 +tr4069: +//line segment_words.rl:72 + + endPos = p + + goto st5190 + st5190: + if p++; p == pe { + goto _test_eof5190 + } + st_case_5190: +//line segment_words_prod.go:133477 + switch data[p] { + case 194: + goto tr5196 + case 204: + goto tr5197 + case 205: + goto tr5198 + case 210: + goto tr5199 + case 214: + goto tr5200 + case 215: + goto tr5201 + case 216: + goto tr5202 + case 217: + goto tr5203 + case 219: + goto tr5204 + case 220: + goto tr5205 + case 221: + goto tr5206 + case 222: + goto tr5207 + case 223: + goto tr5208 + case 224: + goto tr5209 + case 225: + goto tr5210 + case 226: + goto tr5211 + case 227: + goto tr5212 + case 234: + goto tr5213 + case 239: + goto tr5214 + case 240: + goto tr5215 + case 243: + goto tr5216 + } + switch { + case data[p] > 131: + if 179 <= data[p] { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr5054 +tr4070: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5191 + st5191: + if p++; p == pe { + goto _test_eof5191 + } + st_case_5191: +//line segment_words_prod.go:133547 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 164: + if 166 <= data[p] { + goto tr5054 + } + case data[p] >= 129: + goto tr5054 + } + goto tr1 +tr4071: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5192 + st5192: + if p++; p == pe { + goto _test_eof5192 + } + st_case_5192: +//line segment_words_prod.go:133617 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 169 <= data[p] && data[p] <= 182 { + goto tr1 + } + goto tr5054 +tr4072: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5193 + st5193: + if p++; p == pe { + goto _test_eof5193 + } + st_case_5193: +//line segment_words_prod.go:133682 + switch data[p] { + case 131: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 141: + if 187 <= data[p] && data[p] <= 189 { + goto tr1 + } + case data[p] >= 140: + goto tr1 + } + goto tr5054 +tr4073: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5194 + st5194: + if p++; p == pe { + goto _test_eof5194 + } + st_case_5194: +//line segment_words_prod.go:133754 + switch data[p] { + case 176: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr1 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr1 + } + default: + goto tr1 + } + goto tr5054 +tr4074: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5195 + st5195: + if p++; p == pe { + goto _test_eof5195 + } + st_case_5195: +//line segment_words_prod.go:133830 + switch data[p] { + case 129: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 175: + if 181 <= data[p] && data[p] <= 182 { + goto tr1 + } + case data[p] >= 171: + goto tr1 + } + goto tr5054 +tr4075: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5196 + st5196: + if p++; p == pe { + goto _test_eof5196 + } + st_case_5196: +//line segment_words_prod.go:133902 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 173 { + goto tr1 + } + case data[p] >= 163: + goto tr1 + } + goto tr5054 + st4477: + if p++; p == pe { + goto _test_eof4477 + } + st_case_4477: + switch data[p] { + case 172: + goto tr4076 + case 184: + goto tr4077 + case 187: + goto tr4057 + case 190: + goto tr4062 + case 191: + goto tr4078 + } + goto tr3250 +tr4076: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5197 + st5197: + if p++; p == pe { + goto _test_eof5197 + } + st_case_5197: +//line segment_words_prod.go:133990 + switch data[p] { + case 158: + goto tr1 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + goto tr5054 +tr4077: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5198 + st5198: + if p++; p == pe { + goto _test_eof5198 + } + st_case_5198: +//line segment_words_prod.go:134054 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + switch { + case data[p] > 143: + if 160 <= data[p] && data[p] <= 175 { + goto tr1 + } + case data[p] >= 128: + goto tr1 + } + goto tr5054 +tr4078: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5199 + st5199: + if p++; p == pe { + goto _test_eof5199 + } + st_case_5199: +//line segment_words_prod.go:134124 + switch data[p] { + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 185 <= data[p] && data[p] <= 187 { + goto tr1 + } + goto tr5054 + st4478: + if p++; p == pe { + goto _test_eof4478 + } + st_case_4478: + switch data[p] { + case 144: + goto tr4079 + case 145: + goto tr4080 + case 150: + goto tr4081 + case 155: + goto tr4082 + case 157: + goto tr4083 + case 158: + goto tr4084 + } + goto tr3250 +tr4079: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5200 + st5200: + if p++; p == pe { + goto _test_eof5200 + } + st_case_5200: +//line segment_words_prod.go:134209 + switch data[p] { + case 135: + goto st96 + case 139: + goto st97 + case 141: + goto st98 + case 168: + goto st99 + case 171: + goto st100 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + goto tr5054 +tr4080: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5201 + st5201: + if p++; p == pe { + goto _test_eof5201 + } + st_case_5201: +//line segment_words_prod.go:134281 + switch data[p] { + case 128: + goto st102 + case 129: + goto st103 + case 130: + goto st104 + case 132: + goto st105 + case 133: + goto st106 + case 134: + goto st107 + case 135: + goto st108 + case 136: + goto st109 + case 139: + goto st110 + case 140: + goto st111 + case 141: + goto st112 + case 146: + goto st113 + case 147: + goto st114 + case 150: + goto st115 + case 151: + goto st116 + case 152: + goto st113 + case 153: + goto st117 + case 154: + goto st118 + case 156: + goto st119 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + goto tr5054 +tr4081: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5202 + st5202: + if p++; p == pe { + goto _test_eof5202 + } + st_case_5202: +//line segment_words_prod.go:134381 + switch data[p] { + case 171: + goto st121 + case 172: + goto st122 + case 189: + goto st123 + case 190: + goto st124 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + goto tr5054 +tr4082: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5203 + st5203: + if p++; p == pe { + goto _test_eof5203 + } + st_case_5203: +//line segment_words_prod.go:134451 + switch data[p] { + case 178: + goto st126 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + goto tr5054 +tr4083: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5204 + st5204: + if p++; p == pe { + goto _test_eof5204 + } + st_case_5204: +//line segment_words_prod.go:134515 + switch data[p] { + case 133: + goto st128 + case 134: + goto st129 + case 137: + goto st130 + case 168: + goto st131 + case 169: + goto st132 + case 170: + goto st133 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + goto tr5054 +tr4084: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5205 + st5205: + if p++; p == pe { + goto _test_eof5205 + } + st_case_5205: +//line segment_words_prod.go:134589 + switch data[p] { + case 163: + goto st135 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + goto tr5054 + st4479: + if p++; p == pe { + goto _test_eof4479 + } + st_case_4479: + if data[p] == 160 { + goto tr4085 + } + goto tr3250 +tr4085: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:131 +act = 5; + goto st5206 + st5206: + if p++; p == pe { + goto _test_eof5206 + } + st_case_5206: +//line segment_words_prod.go:134662 + switch data[p] { + case 128: + goto st138 + case 129: + goto st139 + case 132: + goto st1 + case 135: + goto st2 + case 194: + goto st3594 + case 204: + goto st3595 + case 205: + goto st3596 + case 210: + goto st3597 + case 214: + goto st3598 + case 215: + goto st3599 + case 216: + goto st3600 + case 217: + goto st3601 + case 219: + goto st3602 + case 220: + goto st3603 + case 221: + goto st3604 + case 222: + goto st3605 + case 223: + goto st3606 + case 224: + goto st3607 + case 225: + goto st3636 + case 226: + goto st3658 + case 227: + goto st3665 + case 234: + goto st3668 + case 239: + goto st3684 + case 240: + goto st3688 + case 243: + goto st3730 + } + if 133 <= data[p] && data[p] <= 134 { + goto st140 + } + goto tr5054 +tr4490: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5207 + st5207: + if p++; p == pe { + goto _test_eof5207 + } + st_case_5207: +//line segment_words_prod.go:134739 + switch data[p] { + case 182: + goto st4480 + case 183: + goto tr4499 + case 184: + goto st3595 + case 194: + goto st4318 + case 204: + goto st4460 + case 205: + goto st4461 + case 210: + goto st4462 + case 214: + goto st4463 + case 215: + goto st4464 + case 216: + goto st4465 + case 217: + goto st4466 + case 219: + goto st4467 + case 220: + goto st4468 + case 221: + goto st4469 + case 222: + goto st4470 + case 223: + goto st4471 + case 224: + goto st4472 + case 225: + goto st4473 + case 226: + goto st4474 + case 227: + goto st4475 + case 234: + goto st4476 + case 239: + goto st4477 + case 240: + goto st4478 + case 243: + goto st4479 + } + goto st3734 + st4480: + if p++; p == pe { + goto _test_eof4480 + } + st_case_4480: + if 182 <= data[p] { + goto tr0 + } + goto tr3250 +tr4491: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5208 + st5208: + if p++; p == pe { + goto _test_eof5208 + } + st_case_5208: +//line segment_words_prod.go:134820 + switch data[p] { + case 194: + goto st4318 + case 204: + goto st4460 + case 205: + goto st4461 + case 210: + goto st4462 + case 214: + goto st4463 + case 215: + goto st4464 + case 216: + goto st4465 + case 217: + goto st4466 + case 219: + goto st4467 + case 220: + goto st4468 + case 221: + goto st4469 + case 222: + goto st4470 + case 223: + goto st4471 + case 224: + goto st4472 + case 225: + goto st4473 + case 226: + goto st4474 + case 227: + goto st4475 + case 234: + goto st4476 + case 239: + goto st4477 + case 240: + goto st4478 + case 243: + goto st4479 + } + goto st3734 +tr4492: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5209 + st5209: + if p++; p == pe { + goto _test_eof5209 + } + st_case_5209: +//line segment_words_prod.go:134886 + switch data[p] { + case 191: + goto st3736 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 192 <= data[p] { + goto tr4499 + } + goto st3734 +tr4493: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5210 + st5210: + if p++; p == pe { + goto _test_eof5210 + } + st_case_5210: +//line segment_words_prod.go:134957 + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st1673 + case 153: + goto st4481 + case 154: + goto st4482 + case 155: + goto st4483 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st4484 + case 161: + goto st272 + case 162: + goto st4485 + case 163: + goto st4486 + case 164: + goto st4487 + case 165: + goto st4488 + case 166: + goto st4489 + case 167: + goto st4490 + case 168: + goto st4491 + case 169: + goto st4492 + case 170: + goto st2723 + case 171: + goto st4493 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st4494 + case 176: + goto st3270 + case 194: + goto st4495 + case 204: + goto st4643 + case 205: + goto st4644 + case 210: + goto st4645 + case 214: + goto st4646 + case 215: + goto st4647 + case 216: + goto st4648 + case 217: + goto st4649 + case 219: + goto st4650 + case 220: + goto st4651 + case 221: + goto st4652 + case 222: + goto st4653 + case 223: + goto st4654 + case 224: + goto st4655 + case 225: + goto st4656 + case 226: + goto st4657 + case 227: + goto st4658 + case 234: + goto st4659 + case 239: + goto st4660 + case 240: + goto st4661 + case 243: + goto st4662 + } + switch { + case data[p] > 157: + if 177 <= data[p] { + goto st3318 + } + case data[p] >= 129: + goto st145 + } + goto tr4499 + st4481: + if p++; p == pe { + goto _test_eof4481 + } + st_case_4481: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr2395 + } + default: + goto tr2395 + } + goto tr0 + st4482: + if p++; p == pe { + goto _test_eof4482 + } + st_case_4482: + switch { + case data[p] < 158: + if 128 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr2395 + } + goto tr0 + st4483: + if p++; p == pe { + goto _test_eof4483 + } + st_case_4483: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr0 + } + case data[p] >= 176: + goto tr2395 + } + goto tr148 + st4484: + if p++; p == pe { + goto _test_eof4484 + } + st_case_4484: + switch data[p] { + case 130: + goto tr2395 + case 134: + goto tr2395 + case 139: + goto tr2395 + } + switch { + case data[p] > 167: + if 168 <= data[p] { + goto tr0 + } + case data[p] >= 163: + goto tr2395 + } + goto tr148 + st4485: + if p++; p == pe { + goto _test_eof4485 + } + st_case_4485: + switch { + case data[p] < 130: + if 128 <= data[p] && data[p] <= 129 { + goto tr2395 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr2395 + } + default: + goto tr148 + } + goto tr0 + st4486: + if p++; p == pe { + goto _test_eof4486 + } + st_case_4486: + switch data[p] { + case 187: + goto tr148 + case 189: + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 143: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 133: + goto tr0 + } + case data[p] > 159: + switch { + case data[p] > 183: + if 184 <= data[p] { + goto tr0 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr0 + } + goto tr2395 + st4487: + if p++; p == pe { + goto _test_eof4487 + } + st_case_4487: + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr126 + } + case data[p] > 165: + switch { + case data[p] > 173: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr2395 + } + default: + goto tr148 + } + goto tr0 + st4488: + if p++; p == pe { + goto _test_eof4488 + } + st_case_4488: + switch { + case data[p] < 148: + if 135 <= data[p] && data[p] <= 147 { + goto tr2395 + } + case data[p] > 159: + switch { + case data[p] > 188: + if 189 <= data[p] { + goto tr0 + } + case data[p] >= 160: + goto tr2984 + } + default: + goto tr0 + } + goto tr148 + st4489: + if p++; p == pe { + goto _test_eof4489 + } + st_case_4489: + switch { + case data[p] < 132: + if 128 <= data[p] && data[p] <= 131 { + goto tr2395 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr2395 + } + default: + goto tr148 + } + goto tr0 + st4490: + if p++; p == pe { + goto _test_eof4490 + } + st_case_4490: + if data[p] == 143 { + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] >= 129: + goto tr0 + } + case data[p] > 164: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr0 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr0 + } + default: + goto tr126 + } + default: + goto tr0 + } + goto tr2395 + st4491: + if p++; p == pe { + goto _test_eof4491 + } + st_case_4491: + switch { + case data[p] > 168: + if 169 <= data[p] && data[p] <= 182 { + goto tr2395 + } + case data[p] >= 128: + goto tr148 + } + goto tr0 + st4492: + if p++; p == pe { + goto _test_eof4492 + } + st_case_4492: + if data[p] == 131 { + goto tr2395 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 141: + switch { + case data[p] > 153: + if 187 <= data[p] && data[p] <= 189 { + goto tr2395 + } + case data[p] >= 144: + goto tr126 + } + default: + goto tr2395 + } + goto tr0 + st4493: + if p++; p == pe { + goto _test_eof4493 + } + st_case_4493: + if data[p] == 129 { + goto tr2395 + } + switch { + case data[p] < 171: + if 160 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 181 <= data[p] && data[p] <= 182 { + goto tr2395 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr2395 + } + goto tr0 + st4494: + if p++; p == pe { + goto _test_eof4494 + } + st_case_4494: + switch { + case data[p] < 163: + if 128 <= data[p] && data[p] <= 162 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr126 + } + case data[p] >= 172: + goto tr2395 + } + default: + goto tr2395 + } + goto tr0 + st4495: + if p++; p == pe { + goto _test_eof4495 + } + st_case_4495: + if data[p] == 173 { + goto tr4086 + } + goto tr2984 +tr4086: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5211 + st5211: + if p++; p == pe { + goto _test_eof5211 + } + st_case_5211: +//line segment_words_prod.go:135407 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st4496 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st4497 + case 205: + goto st4498 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st4499 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st4500 + case 215: + goto st4501 + case 216: + goto st4502 + case 217: + goto st4503 + case 219: + goto st4504 + case 220: + goto st4505 + case 221: + goto st4506 + case 222: + goto st4507 + case 223: + goto st4508 + case 224: + goto st4509 + case 225: + goto st4541 + case 226: + goto st4563 + case 227: + goto st4570 + case 234: + goto st4573 + case 237: + goto st3517 + case 239: + goto st4589 + case 240: + goto st4595 + case 243: + goto st4637 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 + st4496: + if p++; p == pe { + goto _test_eof4496 + } + st_case_4496: + switch data[p] { + case 170: + goto tr148 + case 173: + goto tr4086 + case 181: + goto tr148 + case 183: + goto st142 + case 186: + goto tr148 + } + goto tr2985 + st4497: + if p++; p == pe { + goto _test_eof4497 + } + st_case_4497: + if data[p] <= 127 { + goto tr2985 + } + goto tr4086 + st4498: + if p++; p == pe { + goto _test_eof4498 + } + st_case_4498: + switch data[p] { + case 181: + goto tr2985 + case 190: + goto tr2985 + } + switch { + case data[p] < 184: + if 176 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr2985 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr2985 + } + goto tr4086 + st4499: + if p++; p == pe { + goto _test_eof4499 + } + st_case_4499: + if data[p] == 130 { + goto tr2985 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr4086 + } + goto tr148 + st4500: + if p++; p == pe { + goto _test_eof4500 + } + st_case_4500: + if data[p] == 190 { + goto tr2985 + } + switch { + case data[p] < 145: + if 136 <= data[p] && data[p] <= 144 { + goto tr2985 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr2985 + } + default: + goto tr4086 + } + goto tr148 + st4501: + if p++; p == pe { + goto _test_eof4501 + } + st_case_4501: + switch data[p] { + case 135: + goto tr4086 + case 179: + goto tr148 + case 180: + goto st142 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr4086 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + default: + goto tr4086 + } + goto tr2985 + st4502: + if p++; p == pe { + goto _test_eof4502 + } + st_case_4502: + if data[p] == 156 { + goto tr4086 + } + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 133 { + goto tr4086 + } + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr4086 + } + goto tr2985 + st4503: + if p++; p == pe { + goto _test_eof4503 + } + st_case_4503: + switch data[p] { + case 171: + goto tr421 + case 176: + goto tr4086 + } + switch { + case data[p] < 139: + if 128 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 174 <= data[p] { + goto tr148 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr4086 + } + goto tr2985 + st4504: + if p++; p == pe { + goto _test_eof4504 + } + st_case_4504: + switch data[p] { + case 148: + goto tr2985 + case 158: + goto tr2985 + case 169: + goto tr2985 + } + switch { + case data[p] < 176: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr4086 + } + case data[p] >= 150: + goto tr4086 + } + case data[p] > 185: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr2985 + } + case data[p] >= 189: + goto tr2985 + } + default: + goto tr421 + } + goto tr148 + st4505: + if p++; p == pe { + goto _test_eof4505 + } + st_case_4505: + if data[p] == 144 { + goto tr148 + } + switch { + case data[p] < 146: + if 143 <= data[p] && data[p] <= 145 { + goto tr4086 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr4086 + } + default: + goto tr148 + } + goto tr2985 + st4506: + if p++; p == pe { + goto _test_eof4506 + } + st_case_4506: + switch { + case data[p] > 140: + if 141 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr2985 + } + goto tr4086 + st4507: + if p++; p == pe { + goto _test_eof4507 + } + st_case_4507: + switch { + case data[p] > 176: + if 178 <= data[p] { + goto tr2985 + } + case data[p] >= 166: + goto tr4086 + } + goto tr148 + st4508: + if p++; p == pe { + goto _test_eof4508 + } + st_case_4508: + if data[p] == 186 { + goto tr148 + } + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 170: + switch { + case data[p] > 179: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 171: + goto tr4086 + } + default: + goto tr148 + } + goto tr2985 + st4509: + if p++; p == pe { + goto _test_eof4509 + } + st_case_4509: + switch data[p] { + case 160: + goto st4510 + case 161: + goto st4511 + case 162: + goto st168 + case 163: + goto st4512 + case 164: + goto st4513 + case 165: + goto st4514 + case 166: + goto st4515 + case 167: + goto st4516 + case 168: + goto st4517 + case 169: + goto st4518 + case 170: + goto st4519 + case 171: + goto st4520 + case 172: + goto st4521 + case 173: + goto st4522 + case 174: + goto st4523 + case 175: + goto st4524 + case 176: + goto st4525 + case 177: + goto st4526 + case 178: + goto st4527 + case 179: + goto st4528 + case 180: + goto st4529 + case 181: + goto st4530 + case 182: + goto st4531 + case 183: + goto st4532 + case 184: + goto st4533 + case 185: + goto st4534 + case 186: + goto st4535 + case 187: + goto st4536 + case 188: + goto st4537 + case 189: + goto st4538 + case 190: + goto st4539 + case 191: + goto st4540 + } + goto tr2985 + st4510: + if p++; p == pe { + goto _test_eof4510 + } + st_case_4510: + switch data[p] { + case 154: + goto tr148 + case 164: + goto tr148 + case 168: + goto tr148 + } + switch { + case data[p] > 149: + if 150 <= data[p] && data[p] <= 173 { + goto tr4086 + } + case data[p] >= 128: + goto tr148 + } + goto tr2985 + st4511: + if p++; p == pe { + goto _test_eof4511 + } + st_case_4511: + switch { + case data[p] > 152: + if 153 <= data[p] && data[p] <= 155 { + goto tr4086 + } + case data[p] >= 128: + goto tr148 + } + goto tr2985 + st4512: + if p++; p == pe { + goto _test_eof4512 + } + st_case_4512: + if 163 <= data[p] { + goto tr4086 + } + goto tr2985 + st4513: + if p++; p == pe { + goto _test_eof4513 + } + st_case_4513: + if data[p] == 189 { + goto tr148 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr148 + } + goto tr4086 + st4514: + if p++; p == pe { + goto _test_eof4514 + } + st_case_4514: + switch data[p] { + case 144: + goto tr148 + case 176: + goto tr2985 + } + switch { + case data[p] < 164: + if 152 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 177 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr2985 + } + goto tr4086 + st4515: + if p++; p == pe { + goto _test_eof4515 + } + st_case_4515: + switch data[p] { + case 132: + goto tr2985 + case 169: + goto tr2985 + case 177: + goto tr2985 + case 188: + goto tr4086 + } + switch { + case data[p] < 145: + switch { + case data[p] > 131: + if 141 <= data[p] && data[p] <= 142 { + goto tr2985 + } + case data[p] >= 129: + goto tr4086 + } + case data[p] > 146: + switch { + case data[p] < 186: + if 179 <= data[p] && data[p] <= 181 { + goto tr2985 + } + case data[p] > 187: + if 190 <= data[p] { + goto tr4086 + } + default: + goto tr2985 + } + default: + goto tr2985 + } + goto tr148 + st4516: + if p++; p == pe { + goto _test_eof4516 + } + st_case_4516: + switch data[p] { + case 142: + goto tr148 + case 158: + goto tr2985 + } + switch { + case data[p] < 156: + switch { + case data[p] < 137: + if 133 <= data[p] && data[p] <= 134 { + goto tr2985 + } + case data[p] > 138: + switch { + case data[p] > 150: + if 152 <= data[p] && data[p] <= 155 { + goto tr2985 + } + case data[p] >= 143: + goto tr2985 + } + default: + goto tr2985 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr2985 + } + case data[p] > 175: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr2985 + } + case data[p] >= 176: + goto tr148 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr4086 + st4517: + if p++; p == pe { + goto _test_eof4517 + } + st_case_4517: + if data[p] == 188 { + goto tr4086 + } + switch { + case data[p] < 170: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr4086 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 147 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] >= 143: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 176: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 182: + switch { + case data[p] > 185: + if 190 <= data[p] { + goto tr4086 + } + case data[p] >= 184: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2985 + st4518: + if p++; p == pe { + goto _test_eof4518 + } + st_case_4518: + if data[p] == 157 { + goto tr2985 + } + switch { + case data[p] < 153: + switch { + case data[p] < 137: + if 131 <= data[p] && data[p] <= 134 { + goto tr2985 + } + case data[p] > 138: + switch { + case data[p] > 144: + if 146 <= data[p] && data[p] <= 152 { + goto tr2985 + } + case data[p] >= 142: + goto tr2985 + } + default: + goto tr2985 + } + case data[p] > 158: + switch { + case data[p] < 166: + if 159 <= data[p] && data[p] <= 165 { + goto tr2985 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr2985 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr4086 + st4519: + if p++; p == pe { + goto _test_eof4519 + } + st_case_4519: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr4086 + } + case data[p] > 141: + if 143 <= data[p] && data[p] <= 145 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] { + goto tr4086 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2985 + st4520: + if p++; p == pe { + goto _test_eof4520 + } + st_case_4520: + switch data[p] { + case 134: + goto tr2985 + case 138: + goto tr2985 + case 144: + goto tr148 + case 185: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] >= 142: + goto tr2985 + } + case data[p] > 165: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr2985 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr2985 + } + goto tr4086 + st4521: + if p++; p == pe { + goto _test_eof4521 + } + st_case_4521: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr4086 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr4086 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2985 + st4522: + if p++; p == pe { + goto _test_eof4522 + } + st_case_4522: + if data[p] == 177 { + goto tr148 + } + switch { + case data[p] < 150: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr4086 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr4086 + } + default: + goto tr4086 + } + case data[p] > 151: + switch { + case data[p] < 159: + if 156 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 161: + switch { + case data[p] > 163: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 162: + goto tr4086 + } + default: + goto tr148 + } + default: + goto tr4086 + } + goto tr2985 + st4523: + if p++; p == pe { + goto _test_eof4523 + } + st_case_4523: + switch data[p] { + case 130: + goto tr4086 + case 131: + goto tr148 + case 156: + goto tr148 + } + switch { + case data[p] < 158: + switch { + case data[p] < 142: + if 133 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 144: + switch { + case data[p] > 149: + if 153 <= data[p] && data[p] <= 154 { + goto tr148 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] < 168: + if 163 <= data[p] && data[p] <= 164 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 185: + if 190 <= data[p] && data[p] <= 191 { + goto tr4086 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2985 + st4524: + if p++; p == pe { + goto _test_eof4524 + } + st_case_4524: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr4086 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr4086 + } + case data[p] > 136: + switch { + case data[p] > 141: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] >= 138: + goto tr4086 + } + default: + goto tr4086 + } + goto tr2985 + st4525: + if p++; p == pe { + goto _test_eof4525 + } + st_case_4525: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 128: + goto tr4086 + } + case data[p] > 144: + switch { + case data[p] < 170: + if 146 <= data[p] && data[p] <= 168 { + goto tr148 + } + case data[p] > 185: + if 190 <= data[p] { + goto tr4086 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2985 + st4526: + if p++; p == pe { + goto _test_eof4526 + } + st_case_4526: + switch data[p] { + case 133: + goto tr2985 + case 137: + goto tr2985 + case 151: + goto tr2985 + } + switch { + case data[p] < 160: + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 148 { + goto tr2985 + } + case data[p] > 154: + if 155 <= data[p] && data[p] <= 159 { + goto tr2985 + } + default: + goto tr148 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 164 <= data[p] && data[p] <= 165 { + goto tr2985 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr2985 + } + default: + goto tr421 + } + default: + goto tr148 + } + goto tr4086 + st4527: + if p++; p == pe { + goto _test_eof4527 + } + st_case_4527: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 146: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr4086 + } + case data[p] > 140: + if 142 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 181: + if 170 <= data[p] && data[p] <= 179 { + goto tr148 + } + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr4086 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2985 + st4528: + if p++; p == pe { + goto _test_eof4528 + } + st_case_4528: + if data[p] == 158 { + goto tr148 + } + switch { + case data[p] < 149: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr4086 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr4086 + } + default: + goto tr4086 + } + case data[p] > 150: + switch { + case data[p] < 162: + if 160 <= data[p] && data[p] <= 161 { + goto tr148 + } + case data[p] > 163: + switch { + case data[p] > 175: + if 177 <= data[p] && data[p] <= 178 { + goto tr148 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr4086 + } + default: + goto tr4086 + } + goto tr2985 + st4529: + if p++; p == pe { + goto _test_eof4529 + } + st_case_4529: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 142: + switch { + case data[p] > 131: + if 133 <= data[p] && data[p] <= 140 { + goto tr148 + } + case data[p] >= 129: + goto tr4086 + } + case data[p] > 144: + switch { + case data[p] > 186: + if 190 <= data[p] { + goto tr4086 + } + case data[p] >= 146: + goto tr148 + } + default: + goto tr148 + } + goto tr2985 + st4530: + if p++; p == pe { + goto _test_eof4530 + } + st_case_4530: + switch data[p] { + case 133: + goto tr2985 + case 137: + goto tr2985 + case 142: + goto tr148 + } + switch { + case data[p] < 164: + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 150 { + goto tr2985 + } + case data[p] > 158: + if 159 <= data[p] && data[p] <= 161 { + goto tr148 + } + default: + goto tr2985 + } + case data[p] > 165: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr421 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr2985 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr2985 + } + default: + goto tr2985 + } + goto tr4086 + st4531: + if p++; p == pe { + goto _test_eof4531 + } + st_case_4531: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 133: + if 130 <= data[p] && data[p] <= 131 { + goto tr4086 + } + case data[p] > 150: + switch { + case data[p] > 177: + if 179 <= data[p] && data[p] <= 187 { + goto tr148 + } + case data[p] >= 154: + goto tr148 + } + default: + goto tr148 + } + goto tr2985 + st4532: + if p++; p == pe { + goto _test_eof4532 + } + st_case_4532: + switch data[p] { + case 138: + goto tr4086 + case 150: + goto tr4086 + } + switch { + case data[p] < 152: + switch { + case data[p] > 134: + if 143 <= data[p] && data[p] <= 148 { + goto tr4086 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 178 <= data[p] && data[p] <= 179 { + goto tr4086 + } + case data[p] >= 166: + goto tr421 + } + default: + goto tr4086 + } + goto tr2985 + st4533: + if p++; p == pe { + goto _test_eof4533 + } + st_case_4533: + if data[p] == 177 { + goto tr4086 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr4086 + } + goto tr2985 + st4534: + if p++; p == pe { + goto _test_eof4534 + } + st_case_4534: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 135: + goto tr4086 + } + goto tr2985 + st4535: + if p++; p == pe { + goto _test_eof4535 + } + st_case_4535: + if data[p] == 177 { + goto tr4086 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr4086 + } + case data[p] >= 180: + goto tr4086 + } + goto tr2985 + st4536: + if p++; p == pe { + goto _test_eof4536 + } + st_case_4536: + switch { + case data[p] > 141: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 136: + goto tr4086 + } + goto tr2985 + st4537: + if p++; p == pe { + goto _test_eof4537 + } + st_case_4537: + switch data[p] { + case 128: + goto tr148 + case 181: + goto tr4086 + case 183: + goto tr4086 + case 185: + goto tr4086 + } + switch { + case data[p] < 160: + if 152 <= data[p] && data[p] <= 153 { + goto tr4086 + } + case data[p] > 169: + if 190 <= data[p] && data[p] <= 191 { + goto tr4086 + } + default: + goto tr421 + } + goto tr2985 + st4538: + if p++; p == pe { + goto _test_eof4538 + } + st_case_4538: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 172: + if 177 <= data[p] && data[p] <= 191 { + goto tr4086 + } + default: + goto tr148 + } + goto tr2985 + st4539: + if p++; p == pe { + goto _test_eof4539 + } + st_case_4539: + switch { + case data[p] < 136: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 135 { + goto tr4086 + } + case data[p] >= 128: + goto tr4086 + } + case data[p] > 140: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr4086 + } + case data[p] >= 141: + goto tr4086 + } + default: + goto tr148 + } + goto tr2985 + st4540: + if p++; p == pe { + goto _test_eof4540 + } + st_case_4540: + if data[p] == 134 { + goto tr4086 + } + goto tr2985 + st4541: + if p++; p == pe { + goto _test_eof4541 + } + st_case_4541: + switch data[p] { + case 128: + goto st4542 + case 129: + goto st4543 + case 130: + goto st4544 + case 131: + goto st202 + case 132: + goto st3268 + case 135: + goto st3319 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st4545 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st4546 + case 157: + goto st4547 + case 158: + goto st4548 + case 159: + goto st4549 + case 160: + goto st4550 + case 161: + goto st219 + case 162: + goto st4551 + case 163: + goto st221 + case 164: + goto st4552 + case 165: + goto st468 + case 167: + goto st469 + case 168: + goto st4553 + case 169: + goto st4554 + case 170: + goto st4555 + case 172: + goto st4556 + case 173: + goto st4557 + case 174: + goto st4558 + case 175: + goto st4559 + case 176: + goto st4560 + case 177: + goto st640 + case 179: + goto st4561 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st4562 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + } + switch { + case data[p] < 136: + if 133 <= data[p] && data[p] <= 134 { + goto st3318 + } + case data[p] > 152: + switch { + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + case data[p] >= 180: + goto st147 + } + default: + goto st145 + } + goto tr2985 + st4542: + if p++; p == pe { + goto _test_eof4542 + } + st_case_4542: + if 171 <= data[p] && data[p] <= 190 { + goto tr4086 + } + goto tr2985 + st4543: + if p++; p == pe { + goto _test_eof4543 + } + st_case_4543: + switch { + case data[p] < 158: + switch { + case data[p] > 137: + if 150 <= data[p] && data[p] <= 153 { + goto tr4086 + } + case data[p] >= 128: + goto tr421 + } + case data[p] > 160: + switch { + case data[p] < 167: + if 162 <= data[p] && data[p] <= 164 { + goto tr4086 + } + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr4086 + } + default: + goto tr4086 + } + default: + goto tr4086 + } + goto tr2985 + st4544: + if p++; p == pe { + goto _test_eof4544 + } + st_case_4544: + if data[p] == 143 { + goto tr4086 + } + switch { + case data[p] < 144: + if 130 <= data[p] && data[p] <= 141 { + goto tr4086 + } + case data[p] > 153: + switch { + case data[p] > 157: + if 160 <= data[p] { + goto tr148 + } + case data[p] >= 154: + goto tr4086 + } + default: + goto tr421 + } + goto tr2985 + st4545: + if p++; p == pe { + goto _test_eof4545 + } + st_case_4545: + switch { + case data[p] < 157: + if 155 <= data[p] && data[p] <= 156 { + goto tr2985 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr2985 + } + default: + goto tr4086 + } + goto tr148 + st4546: + if p++; p == pe { + goto _test_eof4546 + } + st_case_4546: + switch { + case data[p] < 146: + switch { + case data[p] > 140: + if 142 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] > 177: + if 178 <= data[p] && data[p] <= 180 { + goto tr4086 + } + case data[p] >= 160: + goto tr148 + } + default: + goto tr4086 + } + goto tr2985 + st4547: + if p++; p == pe { + goto _test_eof4547 + } + st_case_4547: + switch { + case data[p] < 160: + switch { + case data[p] > 145: + if 146 <= data[p] && data[p] <= 147 { + goto tr4086 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 172: + switch { + case data[p] > 176: + if 178 <= data[p] && data[p] <= 179 { + goto tr4086 + } + case data[p] >= 174: + goto tr148 + } + default: + goto tr148 + } + goto tr2985 + st4548: + if p++; p == pe { + goto _test_eof4548 + } + st_case_4548: + if 180 <= data[p] { + goto tr4086 + } + goto tr2985 + st4549: + if p++; p == pe { + goto _test_eof4549 + } + st_case_4549: + switch { + case data[p] < 158: + if 148 <= data[p] && data[p] <= 156 { + goto tr2985 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 170 <= data[p] { + goto tr2985 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr2985 + } + goto tr4086 + st4550: + if p++; p == pe { + goto _test_eof4550 + } + st_case_4550: + switch { + case data[p] < 144: + if 139 <= data[p] && data[p] <= 142 { + goto tr4086 + } + case data[p] > 153: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + goto tr2985 + st4551: + if p++; p == pe { + goto _test_eof4551 + } + st_case_4551: + if data[p] == 169 { + goto tr4086 + } + switch { + case data[p] > 170: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + goto tr2985 + st4552: + if p++; p == pe { + goto _test_eof4552 + } + st_case_4552: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 158 { + goto tr148 + } + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr4086 + } + default: + goto tr4086 + } + goto tr2985 + st4553: + if p++; p == pe { + goto _test_eof4553 + } + st_case_4553: + switch { + case data[p] > 150: + if 151 <= data[p] && data[p] <= 155 { + goto tr4086 + } + case data[p] >= 128: + goto tr148 + } + goto tr2985 + st4554: + if p++; p == pe { + goto _test_eof4554 + } + st_case_4554: + if data[p] == 191 { + goto tr4086 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr4086 + } + case data[p] >= 149: + goto tr4086 + } + goto tr2985 + st4555: + if p++; p == pe { + goto _test_eof4555 + } + st_case_4555: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 153: + if 176 <= data[p] && data[p] <= 190 { + goto tr4086 + } + default: + goto tr421 + } + goto tr2985 + st4556: + if p++; p == pe { + goto _test_eof4556 + } + st_case_4556: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 132 { + goto tr4086 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr4086 + } + default: + goto tr148 + } + goto tr2985 + st4557: + if p++; p == pe { + goto _test_eof4557 + } + st_case_4557: + switch { + case data[p] < 144: + switch { + case data[p] > 139: + if 140 <= data[p] && data[p] <= 143 { + goto tr2985 + } + case data[p] >= 133: + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr2985 + } + case data[p] >= 154: + goto tr2985 + } + default: + goto tr421 + } + goto tr4086 + st4558: + if p++; p == pe { + goto _test_eof4558 + } + st_case_4558: + switch { + case data[p] < 161: + switch { + case data[p] > 130: + if 131 <= data[p] && data[p] <= 160 { + goto tr148 + } + case data[p] >= 128: + goto tr4086 + } + case data[p] > 173: + switch { + case data[p] < 176: + if 174 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr148 + } + default: + goto tr421 + } + default: + goto tr4086 + } + goto tr2985 + st4559: + if p++; p == pe { + goto _test_eof4559 + } + st_case_4559: + switch { + case data[p] > 179: + if 180 <= data[p] { + goto tr2985 + } + case data[p] >= 166: + goto tr4086 + } + goto tr148 + st4560: + if p++; p == pe { + goto _test_eof4560 + } + st_case_4560: + switch { + case data[p] > 163: + if 164 <= data[p] && data[p] <= 183 { + goto tr4086 + } + case data[p] >= 128: + goto tr148 + } + goto tr2985 + st4561: + if p++; p == pe { + goto _test_eof4561 + } + st_case_4561: + if data[p] == 173 { + goto tr4086 + } + switch { + case data[p] < 169: + switch { + case data[p] > 146: + if 148 <= data[p] && data[p] <= 168 { + goto tr4086 + } + case data[p] >= 144: + goto tr4086 + } + case data[p] > 177: + switch { + case data[p] < 181: + if 178 <= data[p] && data[p] <= 180 { + goto tr4086 + } + case data[p] > 182: + if 184 <= data[p] && data[p] <= 185 { + goto tr4086 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2985 + st4562: + if p++; p == pe { + goto _test_eof4562 + } + st_case_4562: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr4086 + } + case data[p] >= 128: + goto tr4086 + } + goto tr2985 + st4563: + if p++; p == pe { + goto _test_eof4563 + } + st_case_4563: + switch data[p] { + case 128: + goto st4564 + case 129: + goto st4565 + case 130: + goto st241 + case 131: + goto st4566 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st4567 + case 180: + goto st251 + case 181: + goto st4568 + case 182: + goto st253 + case 183: + goto st4569 + case 184: + goto st255 + } + goto tr2985 + st4564: + if p++; p == pe { + goto _test_eof4564 + } + st_case_4564: + switch data[p] { + case 164: + goto st142 + case 167: + goto st142 + } + switch { + case data[p] < 152: + if 140 <= data[p] && data[p] <= 143 { + goto tr4086 + } + case data[p] > 153: + switch { + case data[p] > 174: + if 191 <= data[p] { + goto tr571 + } + case data[p] >= 170: + goto tr4086 + } + default: + goto st142 + } + goto tr2985 + st4565: + if p++; p == pe { + goto _test_eof4565 + } + st_case_4565: + switch data[p] { + case 165: + goto tr2985 + case 177: + goto tr148 + case 191: + goto tr148 + } + switch { + case data[p] < 149: + if 129 <= data[p] && data[p] <= 147 { + goto tr2985 + } + case data[p] > 159: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr2985 + } + case data[p] >= 160: + goto tr4086 + } + default: + goto tr2985 + } + goto tr571 + st4566: + if p++; p == pe { + goto _test_eof4566 + } + st_case_4566: + if 144 <= data[p] && data[p] <= 176 { + goto tr4086 + } + goto tr2985 + st4567: + if p++; p == pe { + goto _test_eof4567 + } + st_case_4567: + switch { + case data[p] < 175: + if 165 <= data[p] && data[p] <= 170 { + goto tr2985 + } + case data[p] > 177: + if 180 <= data[p] { + goto tr2985 + } + default: + goto tr4086 + } + goto tr148 + st4568: + if p++; p == pe { + goto _test_eof4568 + } + st_case_4568: + if data[p] == 191 { + goto tr4086 + } + switch { + case data[p] > 174: + if 176 <= data[p] { + goto tr2985 + } + case data[p] >= 168: + goto tr2985 + } + goto tr148 + st4569: + if p++; p == pe { + goto _test_eof4569 + } + st_case_4569: + switch { + case data[p] < 144: + switch { + case data[p] > 134: + if 136 <= data[p] && data[p] <= 142 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 150: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 191 { + goto tr4086 + } + case data[p] >= 152: + goto tr148 + } + default: + goto tr148 + } + goto tr2985 + st4570: + if p++; p == pe { + goto _test_eof4570 + } + st_case_4570: + switch data[p] { + case 128: + goto st4571 + case 130: + goto st4572 + case 132: + goto st3348 + case 133: + goto st3318 + case 134: + goto st3349 + case 136: + goto st3350 + case 137: + goto st3429 + } + goto tr2985 + st4571: + if p++; p == pe { + goto _test_eof4571 + } + st_case_4571: + if data[p] == 133 { + goto tr148 + } + switch { + case data[p] > 175: + if 187 <= data[p] && data[p] <= 188 { + goto tr148 + } + case data[p] >= 170: + goto tr4086 + } + goto tr2985 + st4572: + if p++; p == pe { + goto _test_eof4572 + } + st_case_4572: + if 153 <= data[p] && data[p] <= 154 { + goto tr4086 + } + goto tr2985 + st4573: + if p++; p == pe { + goto _test_eof4573 + } + st_case_4573: + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st654 + case 153: + goto st4574 + case 154: + goto st4575 + case 155: + goto st4576 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st4577 + case 161: + goto st272 + case 162: + goto st4578 + case 163: + goto st4579 + case 164: + goto st4580 + case 165: + goto st4581 + case 166: + goto st4582 + case 167: + goto st4583 + case 168: + goto st4584 + case 169: + goto st4585 + case 170: + goto st4586 + case 171: + goto st4587 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st4588 + case 176: + goto st3270 + } + switch { + case data[p] > 157: + if 177 <= data[p] { + goto st3318 + } + case data[p] >= 129: + goto st145 + } + goto tr2985 + st4574: + if p++; p == pe { + goto _test_eof4574 + } + st_case_4574: + if data[p] == 191 { + goto tr148 + } + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr4086 + } + default: + goto tr4086 + } + goto tr2985 + st4575: + if p++; p == pe { + goto _test_eof4575 + } + st_case_4575: + switch { + case data[p] < 158: + if 128 <= data[p] && data[p] <= 157 { + goto tr148 + } + case data[p] > 159: + if 160 <= data[p] { + goto tr148 + } + default: + goto tr4086 + } + goto tr2985 + st4576: + if p++; p == pe { + goto _test_eof4576 + } + st_case_4576: + switch { + case data[p] > 177: + if 178 <= data[p] { + goto tr2985 + } + case data[p] >= 176: + goto tr4086 + } + goto tr148 + st4577: + if p++; p == pe { + goto _test_eof4577 + } + st_case_4577: + switch data[p] { + case 130: + goto tr4086 + case 134: + goto tr4086 + case 139: + goto tr4086 + } + switch { + case data[p] > 167: + if 168 <= data[p] { + goto tr2985 + } + case data[p] >= 163: + goto tr4086 + } + goto tr148 + st4578: + if p++; p == pe { + goto _test_eof4578 + } + st_case_4578: + switch { + case data[p] < 130: + if 128 <= data[p] && data[p] <= 129 { + goto tr4086 + } + case data[p] > 179: + if 180 <= data[p] { + goto tr4086 + } + default: + goto tr148 + } + goto tr2985 + st4579: + if p++; p == pe { + goto _test_eof4579 + } + st_case_4579: + switch data[p] { + case 187: + goto tr148 + case 189: + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 143: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 133: + goto tr2985 + } + case data[p] > 159: + switch { + case data[p] > 183: + if 184 <= data[p] { + goto tr2985 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr2985 + } + goto tr4086 + st4580: + if p++; p == pe { + goto _test_eof4580 + } + st_case_4580: + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 165: + switch { + case data[p] > 173: + if 176 <= data[p] { + goto tr148 + } + case data[p] >= 166: + goto tr4086 + } + default: + goto tr148 + } + goto tr2985 + st4581: + if p++; p == pe { + goto _test_eof4581 + } + st_case_4581: + switch { + case data[p] < 148: + if 135 <= data[p] && data[p] <= 147 { + goto tr4086 + } + case data[p] > 159: + switch { + case data[p] > 188: + if 189 <= data[p] { + goto tr2985 + } + case data[p] >= 160: + goto tr2984 + } + default: + goto tr2985 + } + goto tr148 + st4582: + if p++; p == pe { + goto _test_eof4582 + } + st_case_4582: + switch { + case data[p] < 132: + if 128 <= data[p] && data[p] <= 131 { + goto tr4086 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr4086 + } + default: + goto tr148 + } + goto tr2985 + st4583: + if p++; p == pe { + goto _test_eof4583 + } + st_case_4583: + if data[p] == 143 { + goto tr148 + } + switch { + case data[p] < 154: + switch { + case data[p] > 142: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] >= 129: + goto tr2985 + } + case data[p] > 164: + switch { + case data[p] < 176: + if 166 <= data[p] && data[p] <= 175 { + goto tr2985 + } + case data[p] > 185: + if 186 <= data[p] { + goto tr2985 + } + default: + goto tr421 + } + default: + goto tr2985 + } + goto tr4086 + st4584: + if p++; p == pe { + goto _test_eof4584 + } + st_case_4584: + switch { + case data[p] > 168: + if 169 <= data[p] && data[p] <= 182 { + goto tr4086 + } + case data[p] >= 128: + goto tr148 + } + goto tr2985 + st4585: + if p++; p == pe { + goto _test_eof4585 + } + st_case_4585: + if data[p] == 131 { + goto tr4086 + } + switch { + case data[p] < 140: + if 128 <= data[p] && data[p] <= 139 { + goto tr148 + } + case data[p] > 141: + switch { + case data[p] > 153: + if 187 <= data[p] && data[p] <= 189 { + goto tr4086 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr4086 + } + goto tr2985 + st4586: + if p++; p == pe { + goto _test_eof4586 + } + st_case_4586: + if data[p] == 176 { + goto tr4086 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr4086 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr4086 + } + default: + goto tr4086 + } + goto tr2985 + st4587: + if p++; p == pe { + goto _test_eof4587 + } + st_case_4587: + if data[p] == 129 { + goto tr4086 + } + switch { + case data[p] < 171: + if 160 <= data[p] && data[p] <= 170 { + goto tr148 + } + case data[p] > 175: + switch { + case data[p] > 180: + if 181 <= data[p] && data[p] <= 182 { + goto tr4086 + } + case data[p] >= 178: + goto tr148 + } + default: + goto tr4086 + } + goto tr2985 + st4588: + if p++; p == pe { + goto _test_eof4588 + } + st_case_4588: + switch { + case data[p] < 163: + if 128 <= data[p] && data[p] <= 162 { + goto tr148 + } + case data[p] > 170: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 172: + goto tr4086 + } + default: + goto tr4086 + } + goto tr2985 + st4589: + if p++; p == pe { + goto _test_eof4589 + } + st_case_4589: + switch data[p] { + case 172: + goto st4590 + case 173: + goto st672 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st4591 + case 185: + goto st967 + case 187: + goto st4592 + case 188: + goto st969 + case 189: + goto st303 + case 190: + goto st4593 + case 191: + goto st4594 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr2985 + st4590: + if p++; p == pe { + goto _test_eof4590 + } + st_case_4590: + switch data[p] { + case 158: + goto tr4086 + case 190: + goto tr572 + } + switch { + case data[p] < 157: + switch { + case data[p] > 134: + if 147 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 170: + goto tr572 + } + default: + goto tr572 + } + goto tr2985 + st4591: + if p++; p == pe { + goto _test_eof4591 + } + st_case_4591: + if data[p] == 147 { + goto st142 + } + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 143 { + goto tr4086 + } + case data[p] > 175: + if 179 <= data[p] && data[p] <= 180 { + goto tr571 + } + default: + goto tr4086 + } + goto tr2985 + st4592: + if p++; p == pe { + goto _test_eof4592 + } + st_case_4592: + if data[p] == 191 { + goto tr4086 + } + if 189 <= data[p] { + goto tr2985 + } + goto tr148 + st4593: + if p++; p == pe { + goto _test_eof4593 + } + st_case_4593: + switch { + case data[p] > 159: + if 160 <= data[p] && data[p] <= 190 { + goto tr2984 + } + case data[p] >= 158: + goto tr4086 + } + goto tr2985 + st4594: + if p++; p == pe { + goto _test_eof4594 + } + st_case_4594: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr2984 + } + case data[p] >= 130: + goto tr2984 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr4086 + } + case data[p] >= 154: + goto tr2984 + } + default: + goto tr2984 + } + goto tr2985 + st4595: + if p++; p == pe { + goto _test_eof4595 + } + st_case_4595: + switch data[p] { + case 144: + goto st4596 + case 145: + goto st4602 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st4621 + case 155: + goto st4626 + case 157: + goto st4628 + case 158: + goto st4635 + case 159: + goto st403 + } + goto tr2985 + st4596: + if p++; p == pe { + goto _test_eof4596 + } + st_case_4596: + switch data[p] { + case 128: + goto st308 + case 129: + goto st309 + case 130: + goto st147 + case 131: + goto st310 + case 133: + goto st311 + case 135: + goto st4597 + case 138: + goto st313 + case 139: + goto st4598 + case 140: + goto st315 + case 141: + goto st4599 + case 142: + goto st317 + case 143: + goto st318 + case 144: + goto st147 + case 145: + goto st145 + case 146: + goto st684 + case 148: + goto st320 + case 149: + goto st321 + case 152: + goto st147 + case 156: + goto st322 + case 157: + goto st323 + case 160: + goto st324 + case 161: + goto st325 + case 162: + goto st326 + case 163: + goto st327 + case 164: + goto st328 + case 166: + goto st329 + case 168: + goto st4600 + case 169: + goto st331 + case 170: + goto st332 + case 171: + goto st4601 + case 172: + goto st334 + case 173: + goto st335 + case 174: + goto st336 + case 176: + goto st147 + case 177: + goto st245 + } + switch { + case data[p] > 155: + if 178 <= data[p] && data[p] <= 179 { + goto st337 + } + case data[p] >= 153: + goto st145 + } + goto tr2985 + st4597: + if p++; p == pe { + goto _test_eof4597 + } + st_case_4597: + if data[p] == 189 { + goto tr4086 + } + goto tr2985 + st4598: + if p++; p == pe { + goto _test_eof4598 + } + st_case_4598: + if data[p] == 160 { + goto tr4086 + } + if 145 <= data[p] { + goto tr2985 + } + goto tr148 + st4599: + if p++; p == pe { + goto _test_eof4599 + } + st_case_4599: + switch { + case data[p] < 182: + if 139 <= data[p] && data[p] <= 143 { + goto tr2985 + } + case data[p] > 186: + if 187 <= data[p] { + goto tr2985 + } + default: + goto tr4086 + } + goto tr148 + st4600: + if p++; p == pe { + goto _test_eof4600 + } + st_case_4600: + switch data[p] { + case 128: + goto tr148 + case 191: + goto tr4086 + } + switch { + case data[p] < 144: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr4086 + } + case data[p] > 134: + if 140 <= data[p] && data[p] <= 143 { + goto tr4086 + } + default: + goto tr4086 + } + case data[p] > 147: + switch { + case data[p] < 153: + if 149 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] > 179: + if 184 <= data[p] && data[p] <= 186 { + goto tr4086 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2985 + st4601: + if p++; p == pe { + goto _test_eof4601 + } + st_case_4601: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 164: + if 165 <= data[p] && data[p] <= 166 { + goto tr4086 + } + default: + goto tr148 + } + goto tr2985 + st4602: + if p++; p == pe { + goto _test_eof4602 + } + st_case_4602: + switch data[p] { + case 128: + goto st4603 + case 129: + goto st4604 + case 130: + goto st4605 + case 131: + goto st691 + case 132: + goto st4606 + case 133: + goto st4607 + case 134: + goto st4608 + case 135: + goto st4609 + case 136: + goto st4610 + case 138: + goto st348 + case 139: + goto st4611 + case 140: + goto st4612 + case 141: + goto st4613 + case 146: + goto st4614 + case 147: + goto st4615 + case 150: + goto st4616 + case 151: + goto st4617 + case 152: + goto st4614 + case 153: + goto st4618 + case 154: + goto st4619 + case 155: + goto st538 + case 156: + goto st4620 + case 162: + goto st359 + case 163: + goto st707 + case 171: + goto st361 + } + goto tr2985 + st4603: + if p++; p == pe { + goto _test_eof4603 + } + st_case_4603: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr4086 + } + case data[p] > 183: + if 184 <= data[p] { + goto tr4086 + } + default: + goto tr148 + } + goto tr2985 + st4604: + if p++; p == pe { + goto _test_eof4604 + } + st_case_4604: + switch { + case data[p] < 166: + if 135 <= data[p] && data[p] <= 165 { + goto tr2985 + } + case data[p] > 175: + if 176 <= data[p] && data[p] <= 190 { + goto tr2985 + } + default: + goto tr421 + } + goto tr4086 + st4605: + if p++; p == pe { + goto _test_eof4605 + } + st_case_4605: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr2985 + } + default: + goto tr2985 + } + goto tr4086 + st4606: + if p++; p == pe { + goto _test_eof4606 + } + st_case_4606: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr4086 + } + case data[p] > 166: + switch { + case data[p] > 180: + if 182 <= data[p] && data[p] <= 191 { + goto tr421 + } + case data[p] >= 167: + goto tr4086 + } + default: + goto tr148 + } + goto tr2985 + st4607: + if p++; p == pe { + goto _test_eof4607 + } + st_case_4607: + switch data[p] { + case 179: + goto tr4086 + case 182: + goto tr148 + } + if 144 <= data[p] && data[p] <= 178 { + goto tr148 + } + goto tr2985 + st4608: + if p++; p == pe { + goto _test_eof4608 + } + st_case_4608: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr4086 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr4086 + } + default: + goto tr148 + } + goto tr2985 + st4609: + if p++; p == pe { + goto _test_eof4609 + } + st_case_4609: + if data[p] == 155 { + goto tr2985 + } + switch { + case data[p] < 141: + switch { + case data[p] > 132: + if 133 <= data[p] && data[p] <= 137 { + goto tr2985 + } + case data[p] >= 129: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] < 154: + if 144 <= data[p] && data[p] <= 153 { + goto tr421 + } + case data[p] > 156: + if 157 <= data[p] { + goto tr2985 + } + default: + goto tr148 + } + default: + goto tr2985 + } + goto tr4086 + st4610: + if p++; p == pe { + goto _test_eof4610 + } + st_case_4610: + switch { + case data[p] < 147: + if 128 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] > 171: + if 172 <= data[p] && data[p] <= 183 { + goto tr4086 + } + default: + goto tr148 + } + goto tr2985 + st4611: + if p++; p == pe { + goto _test_eof4611 + } + st_case_4611: + switch { + case data[p] < 171: + if 159 <= data[p] && data[p] <= 170 { + goto tr4086 + } + case data[p] > 175: + switch { + case data[p] > 185: + if 186 <= data[p] { + goto tr2985 + } + case data[p] >= 176: + goto tr421 + } + default: + goto tr2985 + } + goto tr148 + st4612: + if p++; p == pe { + goto _test_eof4612 + } + st_case_4612: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 131 { + goto tr4086 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr4086 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr2985 + st4613: + if p++; p == pe { + goto _test_eof4613 + } + st_case_4613: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr4086 + } + switch { + case data[p] < 157: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr4086 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr4086 + } + default: + goto tr4086 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr4086 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr4086 + } + default: + goto tr4086 + } + default: + goto tr148 + } + goto tr2985 + st4614: + if p++; p == pe { + goto _test_eof4614 + } + st_case_4614: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr4086 + } + case data[p] >= 128: + goto tr148 + } + goto tr2985 + st4615: + if p++; p == pe { + goto _test_eof4615 + } + st_case_4615: + if data[p] == 134 { + goto tr2985 + } + switch { + case data[p] < 136: + if 132 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 154 <= data[p] { + goto tr2985 + } + case data[p] >= 144: + goto tr421 + } + default: + goto tr2985 + } + goto tr4086 + st4616: + if p++; p == pe { + goto _test_eof4616 + } + st_case_4616: + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 181: + if 184 <= data[p] { + goto tr4086 + } + default: + goto tr4086 + } + goto tr2985 + st4617: + if p++; p == pe { + goto _test_eof4617 + } + st_case_4617: + switch { + case data[p] < 152: + if 129 <= data[p] && data[p] <= 151 { + goto tr2985 + } + case data[p] > 155: + if 158 <= data[p] { + goto tr2985 + } + default: + goto tr148 + } + goto tr4086 + st4618: + if p++; p == pe { + goto _test_eof4618 + } + st_case_4618: + if data[p] == 132 { + goto tr148 + } + switch { + case data[p] < 144: + if 129 <= data[p] && data[p] <= 143 { + goto tr2985 + } + case data[p] > 153: + if 154 <= data[p] { + goto tr2985 + } + default: + goto tr421 + } + goto tr4086 + st4619: + if p++; p == pe { + goto _test_eof4619 + } + st_case_4619: + switch { + case data[p] > 170: + if 171 <= data[p] && data[p] <= 183 { + goto tr4086 + } + case data[p] >= 128: + goto tr148 + } + goto tr2985 + st4620: + if p++; p == pe { + goto _test_eof4620 + } + st_case_4620: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 185 { + goto tr421 + } + case data[p] >= 157: + goto tr4086 + } + goto tr2985 + st4621: + if p++; p == pe { + goto _test_eof4621 + } + st_case_4621: + switch data[p] { + case 160: + goto st147 + case 168: + goto st370 + case 169: + goto st709 + case 171: + goto st4622 + case 172: + goto st4623 + case 173: + goto st712 + case 174: + goto st374 + case 188: + goto st147 + case 189: + goto st4624 + case 190: + goto st4625 + } + if 161 <= data[p] && data[p] <= 167 { + goto st145 + } + goto tr2985 + st4622: + if p++; p == pe { + goto _test_eof4622 + } + st_case_4622: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 180 { + goto tr4086 + } + case data[p] >= 144: + goto tr148 + } + goto tr2985 + st4623: + if p++; p == pe { + goto _test_eof4623 + } + st_case_4623: + switch { + case data[p] > 175: + if 176 <= data[p] && data[p] <= 182 { + goto tr4086 + } + case data[p] >= 128: + goto tr148 + } + goto tr2985 + st4624: + if p++; p == pe { + goto _test_eof4624 + } + st_case_4624: + switch { + case data[p] < 145: + if 133 <= data[p] && data[p] <= 143 { + goto tr2985 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr2985 + } + default: + goto tr4086 + } + goto tr148 + st4625: + if p++; p == pe { + goto _test_eof4625 + } + st_case_4625: + switch { + case data[p] > 146: + if 147 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] >= 143: + goto tr4086 + } + goto tr2985 + st4626: + if p++; p == pe { + goto _test_eof4626 + } + st_case_4626: + switch data[p] { + case 176: + goto st147 + case 177: + goto st378 + case 178: + goto st4627 + } + goto tr2985 + st4627: + if p++; p == pe { + goto _test_eof4627 + } + st_case_4627: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr4086 + } + case data[p] >= 157: + goto tr4086 + } + default: + goto tr148 + } + goto tr2985 + st4628: + if p++; p == pe { + goto _test_eof4628 + } + st_case_4628: + switch data[p] { + case 133: + goto st4629 + case 134: + goto st4630 + case 137: + goto st4631 + case 144: + goto st147 + case 145: + goto st384 + case 146: + goto st385 + case 147: + goto st386 + case 148: + goto st387 + case 149: + goto st388 + case 154: + goto st389 + case 155: + goto st390 + case 156: + goto st391 + case 157: + goto st392 + case 158: + goto st393 + case 159: + goto st721 + case 168: + goto st4632 + case 169: + goto st4633 + case 170: + goto st4634 + } + if 150 <= data[p] && data[p] <= 153 { + goto st145 + } + goto tr2985 + st4629: + if p++; p == pe { + goto _test_eof4629 + } + st_case_4629: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr4086 + } + case data[p] >= 165: + goto tr4086 + } + goto tr2985 + st4630: + if p++; p == pe { + goto _test_eof4630 + } + st_case_4630: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr2985 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr2985 + } + default: + goto tr2985 + } + goto tr4086 + st4631: + if p++; p == pe { + goto _test_eof4631 + } + st_case_4631: + if 130 <= data[p] && data[p] <= 132 { + goto tr4086 + } + goto tr2985 + st4632: + if p++; p == pe { + goto _test_eof4632 + } + st_case_4632: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr4086 + } + case data[p] >= 128: + goto tr4086 + } + goto tr2985 + st4633: + if p++; p == pe { + goto _test_eof4633 + } + st_case_4633: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr2985 + } + case data[p] >= 173: + goto tr2985 + } + goto tr4086 + st4634: + if p++; p == pe { + goto _test_eof4634 + } + st_case_4634: + if data[p] == 132 { + goto tr4086 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr4086 + } + case data[p] >= 155: + goto tr4086 + } + goto tr2985 + st4635: + if p++; p == pe { + goto _test_eof4635 + } + st_case_4635: + switch data[p] { + case 160: + goto st147 + case 163: + goto st4636 + case 184: + goto st400 + case 185: + goto st401 + case 186: + goto st402 + } + if 161 <= data[p] && data[p] <= 162 { + goto st145 + } + goto tr2985 + st4636: + if p++; p == pe { + goto _test_eof4636 + } + st_case_4636: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr2985 + } + case data[p] > 150: + if 151 <= data[p] { + goto tr2985 + } + default: + goto tr4086 + } + goto tr148 + st4637: + if p++; p == pe { + goto _test_eof4637 + } + st_case_4637: + if data[p] == 160 { + goto st4638 + } + goto tr2985 + st4638: + if p++; p == pe { + goto _test_eof4638 + } + st_case_4638: + switch data[p] { + case 128: + goto st4639 + case 129: + goto st4640 + case 132: + goto st4497 + case 135: + goto st4642 + } + if 133 <= data[p] && data[p] <= 134 { + goto st4641 + } + goto tr2985 + st4639: + if p++; p == pe { + goto _test_eof4639 + } + st_case_4639: + if data[p] == 129 { + goto tr4086 + } + if 160 <= data[p] { + goto tr4086 + } + goto tr2985 + st4640: + if p++; p == pe { + goto _test_eof4640 + } + st_case_4640: + if 192 <= data[p] { + goto tr2985 + } + goto tr4086 + st4641: + if p++; p == pe { + goto _test_eof4641 + } + st_case_4641: + goto tr4086 + st4642: + if p++; p == pe { + goto _test_eof4642 + } + st_case_4642: + if 176 <= data[p] { + goto tr2985 + } + goto tr4086 + st4643: + if p++; p == pe { + goto _test_eof4643 + } + st_case_4643: + if data[p] <= 127 { + goto tr2984 + } + goto tr4086 + st4644: + if p++; p == pe { + goto _test_eof4644 + } + st_case_4644: + if 176 <= data[p] { + goto tr2984 + } + goto tr4086 + st4645: + if p++; p == pe { + goto _test_eof4645 + } + st_case_4645: + if 131 <= data[p] && data[p] <= 137 { + goto tr4086 + } + goto tr2984 + st4646: + if p++; p == pe { + goto _test_eof4646 + } + st_case_4646: + if data[p] == 191 { + goto tr4086 + } + if 145 <= data[p] && data[p] <= 189 { + goto tr4086 + } + goto tr2984 + st4647: + if p++; p == pe { + goto _test_eof4647 + } + st_case_4647: + if data[p] == 135 { + goto tr4086 + } + switch { + case data[p] > 130: + if 132 <= data[p] && data[p] <= 133 { + goto tr4086 + } + case data[p] >= 129: + goto tr4086 + } + goto tr2984 + st4648: + if p++; p == pe { + goto _test_eof4648 + } + st_case_4648: + if data[p] == 156 { + goto tr4086 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto tr4086 + } + case data[p] >= 128: + goto tr4086 + } + goto tr2984 + st4649: + if p++; p == pe { + goto _test_eof4649 + } + st_case_4649: + if data[p] == 176 { + goto tr4086 + } + if 139 <= data[p] && data[p] <= 159 { + goto tr4086 + } + goto tr2984 + st4650: + if p++; p == pe { + goto _test_eof4650 + } + st_case_4650: + switch { + case data[p] < 159: + if 150 <= data[p] && data[p] <= 157 { + goto tr4086 + } + case data[p] > 164: + switch { + case data[p] > 168: + if 170 <= data[p] && data[p] <= 173 { + goto tr4086 + } + case data[p] >= 167: + goto tr4086 + } + default: + goto tr4086 + } + goto tr2984 + st4651: + if p++; p == pe { + goto _test_eof4651 + } + st_case_4651: + switch data[p] { + case 143: + goto tr4086 + case 145: + goto tr4086 + } + if 176 <= data[p] { + goto tr4086 + } + goto tr2984 + st4652: + if p++; p == pe { + goto _test_eof4652 + } + st_case_4652: + if 139 <= data[p] { + goto tr2984 + } + goto tr4086 + st4653: + if p++; p == pe { + goto _test_eof4653 + } + st_case_4653: + if 166 <= data[p] && data[p] <= 176 { + goto tr4086 + } + goto tr2984 + st4654: + if p++; p == pe { + goto _test_eof4654 + } + st_case_4654: + if 171 <= data[p] && data[p] <= 179 { + goto tr4086 + } + goto tr2984 + st4655: + if p++; p == pe { + goto _test_eof4655 + } + st_case_4655: + switch data[p] { + case 160: + goto tr4214 + case 161: + goto tr4215 + case 163: + goto tr4216 + case 164: + goto tr4217 + case 165: + goto tr4218 + case 167: + goto tr4220 + case 169: + goto tr4221 + case 171: + goto tr4222 + case 173: + goto tr4224 + case 174: + goto tr4225 + case 175: + goto tr4226 + case 176: + goto tr4227 + case 177: + goto tr4228 + case 179: + goto tr4229 + case 180: + goto tr4230 + case 181: + goto tr4231 + case 182: + goto tr4232 + case 183: + goto tr4233 + case 184: + goto tr4234 + case 185: + goto tr4235 + case 186: + goto tr4236 + case 187: + goto tr4237 + case 188: + goto tr4238 + case 189: + goto tr4239 + case 190: + goto tr4240 + case 191: + goto tr4241 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto tr4223 + } + case data[p] >= 166: + goto tr4219 + } + goto tr2984 +tr4214: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5212 + st5212: + if p++; p == pe { + goto _test_eof5212 + } + st_case_5212: +//line segment_words_prod.go:139420 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 155: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 150 <= data[p] && data[p] <= 153 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 163: + switch { + case data[p] < 169: + if 165 <= data[p] && data[p] <= 167 { + goto tr1 + } + case data[p] > 173: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr4215: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5213 + st5213: + if p++; p == pe { + goto _test_eof5213 + } + st_case_5213: +//line segment_words_prod.go:139548 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 153 <= data[p] && data[p] <= 155 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4216: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5214 + st5214: + if p++; p == pe { + goto _test_eof5214 + } + st_case_5214: +//line segment_words_prod.go:139662 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto tr5294 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr5295 + case 205: + goto tr5296 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr5297 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr5298 + case 215: + goto tr5299 + case 216: + goto tr5300 + case 217: + goto tr5301 + case 219: + goto tr5302 + case 220: + goto tr5303 + case 221: + goto tr5304 + case 222: + goto tr5305 + case 223: + goto tr5306 + case 224: + goto tr5307 + case 225: + goto tr5308 + case 226: + goto tr5309 + case 227: + goto tr5310 + case 234: + goto tr5311 + case 237: + goto tr5313 + case 239: + goto tr5314 + case 240: + goto tr5315 + case 243: + goto tr5316 + } + switch { + case data[p] < 163: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto tr4806 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr1 + } + case data[p] >= 235: + goto tr5312 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr5294: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5215 + st5215: + if p++; p == pe { + goto _test_eof5215 + } + st_case_5215: +//line segment_words_prod.go:139785 + switch data[p] { + case 170: + goto tr148 + case 173: + goto tr2984 + case 181: + goto tr148 + case 183: + goto st142 + case 186: + goto tr148 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr5295: +//line segment_words.rl:72 + + endPos = p + + goto st5216 + st5216: + if p++; p == pe { + goto _test_eof5216 + } + st_case_5216: +//line segment_words_prod.go:139852 + switch data[p] { + case 194: + goto tr5317 + case 204: + goto tr5318 + case 205: + goto tr5319 + case 210: + goto tr5320 + case 214: + goto tr5321 + case 215: + goto tr5322 + case 216: + goto tr5323 + case 217: + goto tr5324 + case 219: + goto tr5325 + case 220: + goto tr5326 + case 221: + goto tr5327 + case 222: + goto tr5328 + case 223: + goto tr5329 + case 224: + goto tr5330 + case 225: + goto tr5331 + case 226: + goto tr5332 + case 227: + goto tr5333 + case 234: + goto tr5334 + case 239: + goto tr5335 + case 240: + goto tr5336 + case 243: + goto tr5337 + } + if 128 <= data[p] { + goto tr2984 + } + goto tr4499 +tr5317: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5217 + st5217: + if p++; p == pe { + goto _test_eof5217 + } + st_case_5217: +//line segment_words_prod.go:139917 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 173: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr5318: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5218 + st5218: + if p++; p == pe { + goto _test_eof5218 + } + st_case_5218: +//line segment_words_prod.go:140029 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto tr5294 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr5295 + case 205: + goto tr5296 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr5297 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr5298 + case 215: + goto tr5299 + case 216: + goto tr5300 + case 217: + goto tr5301 + case 219: + goto tr5302 + case 220: + goto tr5303 + case 221: + goto tr5304 + case 222: + goto tr5305 + case 223: + goto tr5306 + case 224: + goto tr5307 + case 225: + goto tr5308 + case 226: + goto tr5309 + case 227: + goto tr5310 + case 234: + goto tr5311 + case 237: + goto tr5313 + case 239: + goto tr5314 + case 240: + goto tr5315 + case 243: + goto tr5316 + } + switch { + case data[p] < 91: + switch { + case data[p] < 48: + if data[p] <= 47 { + goto tr5002 + } + case data[p] > 57: + switch { + case data[p] > 64: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 59: + goto tr5002 + } + default: + goto tr421 + } + case data[p] > 96: + switch { + case data[p] < 123: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + case data[p] > 127: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto tr5312 + } + case data[p] >= 196: + goto tr4806 + } + default: + goto tr5002 + } + default: + goto tr5002 + } + goto tr1 +tr5296: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5219 + st5219: + if p++; p == pe { + goto _test_eof5219 + } + st_case_5219: +//line segment_words_prod.go:140157 + switch data[p] { + case 181: + goto tr4499 + case 190: + goto tr4499 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 184: + if 176 <= data[p] && data[p] <= 183 { + goto tr148 + } + case data[p] > 185: + switch { + case data[p] > 191: + if 192 <= data[p] { + goto tr4499 + } + case data[p] >= 186: + goto tr148 + } + default: + goto tr4499 + } + goto tr2984 +tr5297: +//line segment_words.rl:72 + + endPos = p + + goto st5220 + st5220: + if p++; p == pe { + goto _test_eof5220 + } + st_case_5220: +//line segment_words_prod.go:140235 + switch data[p] { + case 130: + goto tr4499 + case 194: + goto tr4783 + case 204: + goto tr4784 + case 205: + goto tr4785 + case 210: + goto tr4786 + case 214: + goto tr4787 + case 215: + goto tr4788 + case 216: + goto tr4789 + case 217: + goto tr4790 + case 219: + goto tr4791 + case 220: + goto tr4792 + case 221: + goto tr4793 + case 222: + goto tr4794 + case 223: + goto tr4795 + case 224: + goto tr4796 + case 225: + goto tr4797 + case 226: + goto tr4798 + case 227: + goto tr4799 + case 234: + goto tr4800 + case 239: + goto tr4801 + case 240: + goto tr4802 + case 243: + goto tr4803 + } + if 131 <= data[p] && data[p] <= 137 { + goto tr2984 + } + goto tr148 +tr5298: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5221 + st5221: + if p++; p == pe { + goto _test_eof5221 + } + st_case_5221: +//line segment_words_prod.go:140302 + switch data[p] { + case 190: + goto tr4499 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 145: + if 136 <= data[p] && data[p] <= 144 { + goto tr4499 + } + case data[p] > 191: + if 192 <= data[p] { + goto tr4499 + } + default: + goto tr2984 + } + goto tr148 +tr5299: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5222 + st5222: + if p++; p == pe { + goto _test_eof5222 + } + st_case_5222: +//line segment_words_prod.go:140378 + switch data[p] { + case 135: + goto tr2984 + case 179: + goto tr148 + case 180: + goto st142 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 132: + if 129 <= data[p] && data[p] <= 130 { + goto tr2984 + } + case data[p] > 133: + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 178 { + goto tr572 + } + case data[p] >= 144: + goto tr572 + } + default: + goto tr2984 + } + goto tr4499 +tr5300: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5223 + st5223: + if p++; p == pe { + goto _test_eof5223 + } + st_case_5223: +//line segment_words_prod.go:140463 + switch data[p] { + case 156: + goto tr2984 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 133 { + goto tr2984 + } + case data[p] > 154: + if 160 <= data[p] && data[p] <= 191 { + goto tr148 + } + default: + goto tr2984 + } + goto tr4499 +tr5301: +//line segment_words.rl:72 + + endPos = p + + goto st5224 + st5224: + if p++; p == pe { + goto _test_eof5224 + } + st_case_5224: +//line segment_words_prod.go:140534 + switch data[p] { + case 171: + goto tr421 + case 176: + goto tr2984 + case 194: + goto tr4783 + case 204: + goto tr4784 + case 205: + goto tr4785 + case 210: + goto tr4786 + case 214: + goto tr4787 + case 215: + goto tr4788 + case 216: + goto tr4789 + case 217: + goto tr4790 + case 219: + goto tr4791 + case 220: + goto tr4792 + case 221: + goto tr4793 + case 222: + goto tr4794 + case 223: + goto tr4795 + case 224: + goto tr4796 + case 225: + goto tr4797 + case 226: + goto tr4798 + case 227: + goto tr4799 + case 234: + goto tr4800 + case 239: + goto tr4801 + case 240: + goto tr4802 + case 243: + goto tr4803 + } + switch { + case data[p] < 139: + if 128 <= data[p] && data[p] <= 138 { + goto tr148 + } + case data[p] > 159: + switch { + case data[p] > 169: + if 174 <= data[p] { + goto tr148 + } + case data[p] >= 160: + goto tr421 + } + default: + goto tr2984 + } + goto tr4499 +tr5302: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5225 + st5225: + if p++; p == pe { + goto _test_eof5225 + } + st_case_5225: +//line segment_words_prod.go:140617 + switch data[p] { + case 148: + goto tr4499 + case 158: + goto tr4499 + case 169: + goto tr4499 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 176: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr2984 + } + case data[p] >= 150: + goto tr2984 + } + case data[p] > 185: + switch { + case data[p] > 190: + if 192 <= data[p] { + goto tr4499 + } + case data[p] >= 189: + goto tr4499 + } + default: + goto tr421 + } + goto tr148 +tr5303: +//line segment_words.rl:72 + + endPos = p + + goto st5226 + st5226: + if p++; p == pe { + goto _test_eof5226 + } + st_case_5226: +//line segment_words_prod.go:140702 + switch data[p] { + case 144: + goto tr148 + case 194: + goto tr5317 + case 204: + goto tr5318 + case 205: + goto tr5319 + case 210: + goto tr5320 + case 214: + goto tr5321 + case 215: + goto tr5322 + case 216: + goto tr5323 + case 217: + goto tr5324 + case 219: + goto tr5325 + case 220: + goto tr5326 + case 221: + goto tr5327 + case 222: + goto tr5328 + case 223: + goto tr5329 + case 224: + goto tr5330 + case 225: + goto tr5331 + case 226: + goto tr5332 + case 227: + goto tr5333 + case 234: + goto tr5334 + case 239: + goto tr5335 + case 240: + goto tr5336 + case 243: + goto tr5337 + } + switch { + case data[p] < 146: + if 143 <= data[p] && data[p] <= 145 { + goto tr2984 + } + case data[p] > 175: + if 176 <= data[p] { + goto tr2984 + } + default: + goto tr148 + } + goto tr4499 +tr5319: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5227 + st5227: + if p++; p == pe { + goto _test_eof5227 + } + st_case_5227: +//line segment_words_prod.go:140778 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 176: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr2646 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto st145 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr5002 + } + case data[p] >= 235: + goto st3516 + } + default: + goto tr5002 + } + default: + goto tr5002 + } + goto tr1 +tr5320: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5228 + st5228: + if p++; p == pe { + goto _test_eof5228 + } + st_case_5228: +//line segment_words_prod.go:140901 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 131 <= data[p] && data[p] <= 137 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr5321: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5229 + st5229: + if p++; p == pe { + goto _test_eof5229 + } + st_case_5229: +//line segment_words_prod.go:141015 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 191: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 145 <= data[p] && data[p] <= 189 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr5322: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5230 + st5230: + if p++; p == pe { + goto _test_eof5230 + } + st_case_5230: +//line segment_words_prod.go:141131 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 135: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 129: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 130: + switch { + case data[p] < 196: + if 132 <= data[p] && data[p] <= 133 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr5002 +tr5323: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5231 + st5231: + if p++; p == pe { + goto _test_eof5231 + } + st_case_5231: +//line segment_words_prod.go:141251 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 156: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 128: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 133: + switch { + case data[p] < 196: + if 144 <= data[p] && data[p] <= 154 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr5002 +tr5324: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5232 + st5232: + if p++; p == pe { + goto _test_eof5232 + } + st_case_5232: +//line segment_words_prod.go:141371 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 176: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 139 <= data[p] && data[p] <= 159 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr5325: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5233 + st5233: + if p++; p == pe { + goto _test_eof5233 + } + st_case_5233: +//line segment_words_prod.go:141487 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 159: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 150 <= data[p] && data[p] <= 157 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 164: + switch { + case data[p] < 170: + if 167 <= data[p] && data[p] <= 168 { + goto tr1 + } + case data[p] > 173: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr5326: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5234 + st5234: + if p++; p == pe { + goto _test_eof5234 + } + st_case_5234: +//line segment_words_prod.go:141615 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 143: + goto tr1 + case 145: + goto tr1 + case 194: + goto tr5294 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr5295 + case 205: + goto tr5296 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr5297 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr5298 + case 215: + goto tr5299 + case 216: + goto tr5300 + case 217: + goto tr5301 + case 219: + goto tr5302 + case 220: + goto tr5303 + case 221: + goto tr5304 + case 222: + goto tr5305 + case 223: + goto tr5306 + case 224: + goto tr5307 + case 225: + goto tr5308 + case 226: + goto tr5309 + case 227: + goto tr5310 + case 234: + goto tr5311 + case 237: + goto tr5313 + case 239: + goto tr5314 + case 240: + goto tr5315 + case 243: + goto tr5316 + } + switch { + case data[p] < 176: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto tr4806 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr1 + } + case data[p] >= 235: + goto tr5312 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr5304: +//line segment_words.rl:72 + + endPos = p + + goto st5235 + st5235: + if p++; p == pe { + goto _test_eof5235 + } + st_case_5235: +//line segment_words_prod.go:141737 + switch data[p] { + case 194: + goto tr4783 + case 204: + goto tr4784 + case 205: + goto tr4785 + case 210: + goto tr4786 + case 214: + goto tr4787 + case 215: + goto tr4788 + case 216: + goto tr4789 + case 217: + goto tr4790 + case 219: + goto tr4791 + case 220: + goto tr4792 + case 221: + goto tr4793 + case 222: + goto tr4794 + case 223: + goto tr4795 + case 224: + goto tr4796 + case 225: + goto tr4797 + case 226: + goto tr4798 + case 227: + goto tr4799 + case 234: + goto tr4800 + case 239: + goto tr4801 + case 240: + goto tr4802 + case 243: + goto tr4803 + } + switch { + case data[p] > 140: + if 141 <= data[p] { + goto tr148 + } + case data[p] >= 139: + goto tr4499 + } + goto tr2984 +tr5305: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5236 + st5236: + if p++; p == pe { + goto _test_eof5236 + } + st_case_5236: +//line segment_words_prod.go:141807 + switch data[p] { + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] > 176: + if 178 <= data[p] { + goto tr4499 + } + case data[p] >= 166: + goto tr2984 + } + goto tr148 +tr5306: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5237 + st5237: + if p++; p == pe { + goto _test_eof5237 + } + st_case_5237: +//line segment_words_prod.go:141877 + switch data[p] { + case 186: + goto tr148 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 138: + if 128 <= data[p] && data[p] <= 137 { + goto tr421 + } + case data[p] > 170: + switch { + case data[p] > 179: + if 180 <= data[p] && data[p] <= 181 { + goto tr148 + } + case data[p] >= 171: + goto tr2984 + } + default: + goto tr148 + } + goto tr4499 +tr5307: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5238 + st5238: + if p++; p == pe { + goto _test_eof5238 + } + st_case_5238: +//line segment_words_prod.go:141958 + switch data[p] { + case 160: + goto st3283 + case 161: + goto st3284 + case 162: + goto st168 + case 163: + goto st3285 + case 164: + goto st3286 + case 165: + goto st3287 + case 166: + goto st3288 + case 167: + goto st3289 + case 168: + goto st3290 + case 169: + goto st3291 + case 170: + goto st3292 + case 171: + goto st3293 + case 172: + goto st3294 + case 173: + goto st3295 + case 174: + goto st3296 + case 175: + goto st3297 + case 176: + goto st3298 + case 177: + goto st3299 + case 178: + goto st3300 + case 179: + goto st3301 + case 180: + goto st3302 + case 181: + goto st3303 + case 182: + goto st3304 + case 183: + goto st3305 + case 184: + goto st3306 + case 185: + goto st3307 + case 186: + goto st3308 + case 187: + goto st3309 + case 188: + goto st3310 + case 189: + goto st3311 + case 190: + goto st3312 + case 191: + goto st3313 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr5308: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5239 + st5239: + if p++; p == pe { + goto _test_eof5239 + } + st_case_5239: +//line segment_words_prod.go:142084 + switch data[p] { + case 128: + goto st3315 + case 129: + goto st3316 + case 130: + goto st3317 + case 131: + goto st202 + case 132: + goto st3268 + case 135: + goto st3319 + case 137: + goto st203 + case 138: + goto st204 + case 139: + goto st205 + case 140: + goto st206 + case 141: + goto st3320 + case 142: + goto st208 + case 143: + goto st209 + case 144: + goto st210 + case 153: + goto st211 + case 154: + goto st212 + case 155: + goto st213 + case 156: + goto st3321 + case 157: + goto st3322 + case 158: + goto st3323 + case 159: + goto st3324 + case 160: + goto st3325 + case 161: + goto st219 + case 162: + goto st3326 + case 163: + goto st221 + case 164: + goto st3327 + case 165: + goto st468 + case 167: + goto st469 + case 168: + goto st3328 + case 169: + goto st3329 + case 170: + goto st3330 + case 172: + goto st3331 + case 173: + goto st3332 + case 174: + goto st3333 + case 175: + goto st3334 + case 176: + goto st3335 + case 177: + goto st640 + case 179: + goto st3336 + case 181: + goto st145 + case 182: + goto st146 + case 183: + goto st3337 + case 188: + goto st234 + case 189: + goto st235 + case 190: + goto st236 + case 191: + goto st237 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] < 136: + if 133 <= data[p] && data[p] <= 134 { + goto st3318 + } + case data[p] > 152: + switch { + case data[p] > 184: + if 185 <= data[p] && data[p] <= 187 { + goto st145 + } + case data[p] >= 180: + goto st147 + } + default: + goto st145 + } + goto tr4499 +tr5309: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5240 + st5240: + if p++; p == pe { + goto _test_eof5240 + } + st_case_5240: +//line segment_words_prod.go:142253 + switch data[p] { + case 128: + goto st3339 + case 129: + goto st3340 + case 130: + goto st241 + case 131: + goto st3341 + case 132: + goto st243 + case 133: + goto st244 + case 134: + goto st245 + case 146: + goto st246 + case 147: + goto st247 + case 176: + goto st248 + case 177: + goto st249 + case 178: + goto st145 + case 179: + goto st3342 + case 180: + goto st251 + case 181: + goto st3343 + case 182: + goto st253 + case 183: + goto st3344 + case 184: + goto st255 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr5310: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5241 + st5241: + if p++; p == pe { + goto _test_eof5241 + } + st_case_5241: +//line segment_words_prod.go:142351 + switch data[p] { + case 128: + goto st3346 + case 130: + goto st3347 + case 132: + goto st3348 + case 133: + goto st3318 + case 134: + goto st3349 + case 136: + goto st3350 + case 137: + goto st3429 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr5311: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5242 + st5242: + if p++; p == pe { + goto _test_eof5242 + } + st_case_5242: +//line segment_words_prod.go:142427 + switch data[p] { + case 128: + goto st147 + case 146: + goto st262 + case 147: + goto st263 + case 148: + goto st147 + case 152: + goto st654 + case 153: + goto st3501 + case 154: + goto st3502 + case 155: + goto st3503 + case 156: + goto st268 + case 158: + goto st269 + case 159: + goto st270 + case 160: + goto st3504 + case 161: + goto st272 + case 162: + goto st3505 + case 163: + goto st3506 + case 164: + goto st3507 + case 165: + goto st3508 + case 166: + goto st3509 + case 167: + goto st3510 + case 168: + goto st3511 + case 169: + goto st3512 + case 170: + goto st3513 + case 171: + goto st3514 + case 172: + goto st283 + case 173: + goto st284 + case 174: + goto st146 + case 175: + goto st3515 + case 176: + goto st3270 + case 194: + goto st4495 + case 204: + goto st4643 + case 205: + goto st4644 + case 210: + goto st4645 + case 214: + goto st4646 + case 215: + goto st4647 + case 216: + goto st4648 + case 217: + goto st4649 + case 219: + goto st4650 + case 220: + goto st4651 + case 221: + goto st4652 + case 222: + goto st4653 + case 223: + goto st4654 + case 224: + goto st4655 + case 225: + goto st4656 + case 226: + goto st4657 + case 227: + goto st4658 + case 234: + goto st4659 + case 239: + goto st4660 + case 240: + goto st4661 + case 243: + goto st4662 + } + switch { + case data[p] > 157: + if 177 <= data[p] { + goto st3318 + } + case data[p] >= 129: + goto st145 + } + goto tr4499 + st4656: + if p++; p == pe { + goto _test_eof4656 + } + st_case_4656: + switch data[p] { + case 128: + goto tr4242 + case 129: + goto tr4243 + case 130: + goto tr4244 + case 141: + goto tr4245 + case 156: + goto tr4246 + case 157: + goto tr4247 + case 158: + goto tr4248 + case 159: + goto tr4249 + case 160: + goto tr4250 + case 162: + goto tr4251 + case 164: + goto tr4252 + case 168: + goto tr4253 + case 169: + goto tr4254 + case 170: + goto tr4255 + case 172: + goto tr4256 + case 173: + goto tr4257 + case 174: + goto tr4258 + case 175: + goto tr4259 + case 176: + goto tr4260 + case 179: + goto tr4261 + case 183: + goto tr4262 + } + goto tr2984 +tr4242: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5243 + st5243: + if p++; p == pe { + goto _test_eof5243 + } + st_case_5243: +//line segment_words_prod.go:142603 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 171 <= data[p] && data[p] <= 190 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4243: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5244 + st5244: + if p++; p == pe { + goto _test_eof5244 + } + st_case_5244: +//line segment_words_prod.go:142717 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 158: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 150 <= data[p] && data[p] <= 153 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 160: + switch { + case data[p] < 177: + switch { + case data[p] > 164: + if 167 <= data[p] && data[p] <= 173 { + goto tr1 + } + case data[p] >= 162: + goto tr1 + } + case data[p] > 180: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr4244: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5245 + st5245: + if p++; p == pe { + goto _test_eof5245 + } + st_case_5245: +//line segment_words_prod.go:142850 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 143: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 130: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 141: + switch { + case data[p] < 196: + if 154 <= data[p] && data[p] <= 157 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr5002 +tr4245: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5246 + st5246: + if p++; p == pe { + goto _test_eof5246 + } + st_case_5246: +//line segment_words_prod.go:142970 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 157 <= data[p] && data[p] <= 159 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4246: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5247 + st5247: + if p++; p == pe { + goto _test_eof5247 + } + st_case_5247: +//line segment_words_prod.go:143084 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 146: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] < 196: + if 178 <= data[p] && data[p] <= 180 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr5002 +tr4247: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5248 + st5248: + if p++; p == pe { + goto _test_eof5248 + } + st_case_5248: +//line segment_words_prod.go:143202 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 146: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 147: + switch { + case data[p] < 196: + if 178 <= data[p] && data[p] <= 179 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr5002 +tr4248: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5249 + st5249: + if p++; p == pe { + goto _test_eof5249 + } + st_case_5249: +//line segment_words_prod.go:143320 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto tr5294 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr5295 + case 205: + goto tr5296 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr5297 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr5298 + case 215: + goto tr5299 + case 216: + goto tr5300 + case 217: + goto tr5301 + case 219: + goto tr5302 + case 220: + goto tr5303 + case 221: + goto tr5304 + case 222: + goto tr5305 + case 223: + goto tr5306 + case 224: + goto tr5307 + case 225: + goto tr5308 + case 226: + goto tr5309 + case 227: + goto tr5310 + case 234: + goto tr5311 + case 237: + goto tr5313 + case 239: + goto tr5314 + case 240: + goto tr5315 + case 243: + goto tr5316 + } + switch { + case data[p] < 180: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto tr4806 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr1 + } + case data[p] >= 235: + goto tr5312 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr5312: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5250 +tr4494: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5250 + st5250: + if p++; p == pe { + goto _test_eof5250 + } + st_case_5250: +//line segment_words_prod.go:143458 + switch data[p] { + case 194: + goto st4495 + case 204: + goto st4643 + case 205: + goto st4644 + case 210: + goto st4645 + case 214: + goto st4646 + case 215: + goto st4647 + case 216: + goto st4648 + case 217: + goto st4649 + case 219: + goto st4650 + case 220: + goto st4651 + case 221: + goto st4652 + case 222: + goto st4653 + case 223: + goto st4654 + case 224: + goto st4655 + case 225: + goto st4656 + case 226: + goto st4657 + case 227: + goto st4658 + case 234: + goto st4659 + case 239: + goto st4660 + case 240: + goto st4661 + case 243: + goto st4662 + } + goto st3318 + st4657: + if p++; p == pe { + goto _test_eof4657 + } + st_case_4657: + switch data[p] { + case 128: + goto tr4263 + case 129: + goto tr4264 + case 131: + goto tr4265 + case 179: + goto tr4266 + case 181: + goto tr4267 + case 183: + goto tr4268 + } + goto tr2984 +tr4263: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5251 + st5251: + if p++; p == pe { + goto _test_eof5251 + } + st_case_5251: +//line segment_words_prod.go:143540 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 140: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] < 196: + if 170 <= data[p] && data[p] <= 174 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr5002 +tr4264: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5252 + st5252: + if p++; p == pe { + goto _test_eof5252 + } + st_case_5252: +//line segment_words_prod.go:143658 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 160: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 164: + switch { + case data[p] < 196: + if 166 <= data[p] && data[p] <= 175 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr5002 +tr4265: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5253 + st5253: + if p++; p == pe { + goto _test_eof5253 + } + st_case_5253: +//line segment_words_prod.go:143776 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 144 <= data[p] && data[p] <= 176 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4266: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5254 + st5254: + if p++; p == pe { + goto _test_eof5254 + } + st_case_5254: +//line segment_words_prod.go:143890 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 175 <= data[p] && data[p] <= 177 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4267: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5255 + st5255: + if p++; p == pe { + goto _test_eof5255 + } + st_case_5255: +//line segment_words_prod.go:144004 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 191: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4268: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5256 + st5256: + if p++; p == pe { + goto _test_eof5256 + } + st_case_5256: +//line segment_words_prod.go:144116 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 160 <= data[p] && data[p] <= 191 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 + st4658: + if p++; p == pe { + goto _test_eof4658 + } + st_case_4658: + switch data[p] { + case 128: + goto tr4269 + case 130: + goto tr4270 + } + goto tr2984 +tr4269: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5257 + st5257: + if p++; p == pe { + goto _test_eof5257 + } + st_case_5257: +//line segment_words_prod.go:144242 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 170 <= data[p] && data[p] <= 175 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4270: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5258 + st5258: + if p++; p == pe { + goto _test_eof5258 + } + st_case_5258: +//line segment_words_prod.go:144356 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 153 <= data[p] && data[p] <= 154 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 + st4659: + if p++; p == pe { + goto _test_eof4659 + } + st_case_4659: + switch data[p] { + case 153: + goto tr4271 + case 154: + goto tr4272 + case 155: + goto tr4273 + case 160: + goto tr4274 + case 162: + goto tr4275 + case 163: + goto tr4276 + case 164: + goto tr4277 + case 165: + goto tr4278 + case 166: + goto tr4279 + case 167: + goto tr4280 + case 168: + goto tr4281 + case 169: + goto tr4282 + case 170: + goto tr4283 + case 171: + goto tr4284 + case 175: + goto tr4285 + } + goto tr2984 +tr4271: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5259 + st5259: + if p++; p == pe { + goto _test_eof5259 + } + st_case_5259: +//line segment_words_prod.go:144508 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 175: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 178: + switch { + case data[p] < 196: + if 180 <= data[p] && data[p] <= 189 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr5002 +tr4272: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5260 + st5260: + if p++; p == pe { + goto _test_eof5260 + } + st_case_5260: +//line segment_words_prod.go:144626 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 158 <= data[p] && data[p] <= 159 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4273: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5261 + st5261: + if p++; p == pe { + goto _test_eof5261 + } + st_case_5261: +//line segment_words_prod.go:144740 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 176 <= data[p] && data[p] <= 177 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4274: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5262 + st5262: + if p++; p == pe { + goto _test_eof5262 + } + st_case_5262: +//line segment_words_prod.go:144854 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 130: + goto tr1 + case 134: + goto tr1 + case 139: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 163 <= data[p] && data[p] <= 167 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4275: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5263 + st5263: + if p++; p == pe { + goto _test_eof5263 + } + st_case_5263: +//line segment_words_prod.go:144974 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto tr5294 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr5295 + case 205: + goto tr5296 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr5297 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr5298 + case 215: + goto tr5299 + case 216: + goto tr5300 + case 217: + goto tr5301 + case 219: + goto tr5302 + case 220: + goto tr5303 + case 221: + goto tr5304 + case 222: + goto tr5305 + case 223: + goto tr5306 + case 224: + goto tr5307 + case 225: + goto tr5308 + case 226: + goto tr5309 + case 227: + goto tr5310 + case 234: + goto tr5311 + case 237: + goto tr5313 + case 239: + goto tr5314 + case 240: + goto tr5315 + case 243: + goto tr5316 + } + switch { + case data[p] < 180: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 128 <= data[p] && data[p] <= 129 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto tr4806 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr1 + } + case data[p] >= 235: + goto tr5312 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr5313: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5264 +tr4495: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5264 + st5264: + if p++; p == pe { + goto _test_eof5264 + } + st_case_5264: +//line segment_words_prod.go:145117 + switch data[p] { + case 158: + goto st3518 + case 159: + goto st3519 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 160 <= data[p] { + goto tr4499 + } + goto st3318 +tr5314: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5265 + st5265: + if p++; p == pe { + goto _test_eof5265 + } + st_case_5265: +//line segment_words_prod.go:145186 + switch data[p] { + case 172: + goto st3521 + case 173: + goto st672 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st3522 + case 185: + goto st967 + case 187: + goto st3523 + case 188: + goto st969 + case 189: + goto st303 + case 190: + goto st3524 + case 191: + goto st3525 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + goto tr4499 +tr5315: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5266 + st5266: + if p++; p == pe { + goto _test_eof5266 + } + st_case_5266: +//line segment_words_prod.go:145281 + switch data[p] { + case 144: + goto st3527 + case 145: + goto st3533 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st3552 + case 155: + goto st3557 + case 157: + goto st3559 + case 158: + goto st3566 + case 159: + goto st403 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr5316: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5267 + st5267: + if p++; p == pe { + goto _test_eof5267 + } + st_case_5267: +//line segment_words_prod.go:145363 + switch data[p] { + case 160: + goto st3569 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + goto tr4499 +tr4276: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5268 + st5268: + if p++; p == pe { + goto _test_eof5268 + } + st_case_5268: +//line segment_words_prod.go:145427 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 178: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr2646 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 133 <= data[p] && data[p] <= 159 { + goto tr5002 + } + case data[p] >= 97: + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto st145 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr5002 + } + case data[p] >= 235: + goto st3516 + } + default: + goto tr5002 + } + default: + goto tr5002 + } + goto tr1 +tr4277: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5269 + st5269: + if p++; p == pe { + goto _test_eof5269 + } + st_case_5269: +//line segment_words_prod.go:145555 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 166 <= data[p] && data[p] <= 173 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4278: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5270 + st5270: + if p++; p == pe { + goto _test_eof5270 + } + st_case_5270: +//line segment_words_prod.go:145669 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 135 <= data[p] && data[p] <= 147 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4279: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5271 + st5271: + if p++; p == pe { + goto _test_eof5271 + } + st_case_5271: +//line segment_words_prod.go:145783 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto tr5294 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr5295 + case 205: + goto tr5296 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr5297 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr5298 + case 215: + goto tr5299 + case 216: + goto tr5300 + case 217: + goto tr5301 + case 219: + goto tr5302 + case 220: + goto tr5303 + case 221: + goto tr5304 + case 222: + goto tr5305 + case 223: + goto tr5306 + case 224: + goto tr5307 + case 225: + goto tr5308 + case 226: + goto tr5309 + case 227: + goto tr5310 + case 234: + goto tr5311 + case 237: + goto tr5313 + case 239: + goto tr5314 + case 240: + goto tr5315 + case 243: + goto tr5316 + } + switch { + case data[p] < 179: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 128 <= data[p] && data[p] <= 131 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto tr4806 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr1 + } + case data[p] >= 235: + goto tr5312 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr4280: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5272 + st5272: + if p++; p == pe { + goto _test_eof5272 + } + st_case_5272: +//line segment_words_prod.go:145911 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 165: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 91: + switch { + case data[p] < 48: + if data[p] <= 47 { + goto tr1 + } + case data[p] > 57: + switch { + case data[p] > 64: + if 65 <= data[p] && data[p] <= 90 { + goto tr2008 + } + case data[p] >= 59: + goto tr1 + } + default: + goto tr2646 + } + case data[p] > 96: + switch { + case data[p] < 123: + if 97 <= data[p] && data[p] <= 122 { + goto tr2008 + } + case data[p] > 128: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr4281: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5273 + st5273: + if p++; p == pe { + goto _test_eof5273 + } + st_case_5273: +//line segment_words_prod.go:146041 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 169 <= data[p] && data[p] <= 182 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4282: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5274 + st5274: + if p++; p == pe { + goto _test_eof5274 + } + st_case_5274: +//line segment_words_prod.go:146155 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 131: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 140: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 141: + switch { + case data[p] < 196: + if 187 <= data[p] && data[p] <= 189 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr5002 +tr4283: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5275 + st5275: + if p++; p == pe { + goto _test_eof5275 + } + st_case_5275: +//line segment_words_prod.go:146275 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 176: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 178: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 180: + switch { + case data[p] < 190: + if 183 <= data[p] && data[p] <= 184 { + goto tr1 + } + case data[p] > 191: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr4284: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5276 + st5276: + if p++; p == pe { + goto _test_eof5276 + } + st_case_5276: +//line segment_words_prod.go:146400 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 129: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 171: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 175: + switch { + case data[p] < 196: + if 181 <= data[p] && data[p] <= 182 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr5002 +tr4285: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5277 + st5277: + if p++; p == pe { + goto _test_eof5277 + } + st_case_5277: +//line segment_words_prod.go:146520 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 163: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 170: + switch { + case data[p] < 196: + if 172 <= data[p] && data[p] <= 173 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr5002 + st4660: + if p++; p == pe { + goto _test_eof4660 + } + st_case_4660: + switch data[p] { + case 172: + goto tr4286 + case 184: + goto tr4287 + case 187: + goto tr4267 + case 190: + goto tr4272 + case 191: + goto tr4288 + } + goto tr2984 +tr4286: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5278 + st5278: + if p++; p == pe { + goto _test_eof5278 + } + st_case_5278: +//line segment_words_prod.go:146656 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 158: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4287: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5279 + st5279: + if p++; p == pe { + goto _test_eof5279 + } + st_case_5279: +//line segment_words_prod.go:146768 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 128: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] < 196: + if 160 <= data[p] && data[p] <= 175 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr5002 +tr4288: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5280 + st5280: + if p++; p == pe { + goto _test_eof5280 + } + st_case_5280: +//line segment_words_prod.go:146886 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 185 <= data[p] && data[p] <= 187 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 + st4661: + if p++; p == pe { + goto _test_eof4661 + } + st_case_4661: + switch data[p] { + case 144: + goto tr4289 + case 145: + goto tr4290 + case 150: + goto tr4291 + case 155: + goto tr4292 + case 157: + goto tr4293 + case 158: + goto tr4294 + } + goto tr2984 +tr4289: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5281 + st5281: + if p++; p == pe { + goto _test_eof5281 + } + st_case_5281: +//line segment_words_prod.go:147020 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 135: + goto st96 + case 139: + goto st97 + case 141: + goto st98 + case 168: + goto st99 + case 171: + goto st100 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4290: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5282 + st5282: + if p++; p == pe { + goto _test_eof5282 + } + st_case_5282: +//line segment_words_prod.go:147140 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 128: + goto st102 + case 129: + goto st103 + case 130: + goto st104 + case 132: + goto st105 + case 133: + goto st106 + case 134: + goto st107 + case 135: + goto st108 + case 136: + goto st109 + case 139: + goto st110 + case 140: + goto st111 + case 141: + goto st112 + case 146: + goto st113 + case 147: + goto st114 + case 150: + goto st115 + case 151: + goto st116 + case 152: + goto st113 + case 153: + goto st117 + case 154: + goto st118 + case 156: + goto st119 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4291: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5283 + st5283: + if p++; p == pe { + goto _test_eof5283 + } + st_case_5283: +//line segment_words_prod.go:147288 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 171: + goto st121 + case 172: + goto st122 + case 189: + goto st123 + case 190: + goto st124 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4292: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5284 + st5284: + if p++; p == pe { + goto _test_eof5284 + } + st_case_5284: +//line segment_words_prod.go:147406 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 178: + goto st126 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4293: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5285 + st5285: + if p++; p == pe { + goto _test_eof5285 + } + st_case_5285: +//line segment_words_prod.go:147518 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 133: + goto st128 + case 134: + goto st129 + case 137: + goto st130 + case 168: + goto st131 + case 169: + goto st132 + case 170: + goto st133 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4294: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5286 + st5286: + if p++; p == pe { + goto _test_eof5286 + } + st_case_5286: +//line segment_words_prod.go:147640 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 163: + goto st135 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 + st4662: + if p++; p == pe { + goto _test_eof4662 + } + st_case_4662: + if data[p] == 160 { + goto tr4295 + } + goto tr2984 +tr4295: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5287 + st5287: + if p++; p == pe { + goto _test_eof5287 + } + st_case_5287: +//line segment_words_prod.go:147761 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 128: + goto st138 + case 129: + goto st139 + case 132: + goto st1 + case 135: + goto st2 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 133 <= data[p] && data[p] <= 134 { + goto st140 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4249: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5288 + st5288: + if p++; p == pe { + goto _test_eof5288 + } + st_case_5288: +//line segment_words_prod.go:147883 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 158: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr2646 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 148 <= data[p] && data[p] <= 156 { + goto tr5002 + } + case data[p] >= 97: + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto st145 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr5002 + } + case data[p] >= 235: + goto st3516 + } + default: + goto tr5002 + } + default: + goto tr5002 + } + goto tr1 +tr4250: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5289 + st5289: + if p++; p == pe { + goto _test_eof5289 + } + st_case_5289: +//line segment_words_prod.go:148011 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 139 <= data[p] && data[p] <= 142 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4251: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5290 + st5290: + if p++; p == pe { + goto _test_eof5290 + } + st_case_5290: +//line segment_words_prod.go:148125 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 169: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4252: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5291 + st5291: + if p++; p == pe { + goto _test_eof5291 + } + st_case_5291: +//line segment_words_prod.go:148237 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 160: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 171: + switch { + case data[p] < 196: + if 176 <= data[p] && data[p] <= 187 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr5002 +tr4253: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5292 + st5292: + if p++; p == pe { + goto _test_eof5292 + } + st_case_5292: +//line segment_words_prod.go:148355 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 151 <= data[p] && data[p] <= 155 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4254: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5293 + st5293: + if p++; p == pe { + goto _test_eof5293 + } + st_case_5293: +//line segment_words_prod.go:148469 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 191: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 149: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 158: + switch { + case data[p] < 196: + if 160 <= data[p] && data[p] <= 188 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr5002 +tr4255: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5294 + st5294: + if p++; p == pe { + goto _test_eof5294 + } + st_case_5294: +//line segment_words_prod.go:148589 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 176 <= data[p] && data[p] <= 190 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4256: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5295 + st5295: + if p++; p == pe { + goto _test_eof5295 + } + st_case_5295: +//line segment_words_prod.go:148703 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto tr5294 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr5295 + case 205: + goto tr5296 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr5297 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr5298 + case 215: + goto tr5299 + case 216: + goto tr5300 + case 217: + goto tr5301 + case 219: + goto tr5302 + case 220: + goto tr5303 + case 221: + goto tr5304 + case 222: + goto tr5305 + case 223: + goto tr5306 + case 224: + goto tr5307 + case 225: + goto tr5308 + case 226: + goto tr5309 + case 227: + goto tr5310 + case 234: + goto tr5311 + case 237: + goto tr5313 + case 239: + goto tr5314 + case 240: + goto tr5315 + case 243: + goto tr5316 + } + switch { + case data[p] < 180: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 128 <= data[p] && data[p] <= 132 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto tr4806 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr1 + } + case data[p] >= 235: + goto tr5312 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr4257: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5296 + st5296: + if p++; p == pe { + goto _test_eof5296 + } + st_case_5296: +//line segment_words_prod.go:148831 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 180: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr2646 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 133 <= data[p] && data[p] <= 170 { + goto tr5002 + } + case data[p] >= 97: + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto st145 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr5002 + } + case data[p] >= 235: + goto st3516 + } + default: + goto tr5002 + } + default: + goto tr5002 + } + goto tr1 +tr4258: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5297 + st5297: + if p++; p == pe { + goto _test_eof5297 + } + st_case_5297: +//line segment_words_prod.go:148959 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 128: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 130: + switch { + case data[p] < 196: + if 161 <= data[p] && data[p] <= 173 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr5002 +tr4259: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5298 + st5298: + if p++; p == pe { + goto _test_eof5298 + } + st_case_5298: +//line segment_words_prod.go:149077 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 166 <= data[p] && data[p] <= 179 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4260: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5299 + st5299: + if p++; p == pe { + goto _test_eof5299 + } + st_case_5299: +//line segment_words_prod.go:149191 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 164 <= data[p] && data[p] <= 183 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4261: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5300 + st5300: + if p++; p == pe { + goto _test_eof5300 + } + st_case_5300: +//line segment_words_prod.go:149305 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 173: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 148: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 144 <= data[p] && data[p] <= 146 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 184: + if 178 <= data[p] && data[p] <= 180 { + goto tr1 + } + case data[p] > 185: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr4262: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5301 + st5301: + if p++; p == pe { + goto _test_eof5301 + } + st_case_5301: +//line segment_words_prod.go:149435 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 128: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 181: + switch { + case data[p] < 196: + if 188 <= data[p] && data[p] <= 191 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr5002 +tr5327: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5302 + st5302: + if p++; p == pe { + goto _test_eof5302 + } + st_case_5302: +//line segment_words_prod.go:149553 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 91: + switch { + case data[p] < 48: + if data[p] <= 47 { + goto tr1 + } + case data[p] > 57: + switch { + case data[p] > 64: + if 65 <= data[p] && data[p] <= 90 { + goto tr2008 + } + case data[p] >= 59: + goto tr1 + } + default: + goto tr2646 + } + case data[p] > 96: + switch { + case data[p] < 123: + if 97 <= data[p] && data[p] <= 122 { + goto tr2008 + } + case data[p] > 138: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr5328: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5303 + st5303: + if p++; p == pe { + goto _test_eof5303 + } + st_case_5303: +//line segment_words_prod.go:149681 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 166 <= data[p] && data[p] <= 176 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr5329: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5304 + st5304: + if p++; p == pe { + goto _test_eof5304 + } + st_case_5304: +//line segment_words_prod.go:149795 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 171 <= data[p] && data[p] <= 179 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr5330: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5305 + st5305: + if p++; p == pe { + goto _test_eof5305 + } + st_case_5305: +//line segment_words_prod.go:149909 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 160: + goto st14 + case 161: + goto st15 + case 163: + goto st16 + case 164: + goto st17 + case 165: + goto st18 + case 167: + goto st20 + case 169: + goto st21 + case 171: + goto st22 + case 173: + goto st24 + case 174: + goto st25 + case 175: + goto st26 + case 176: + goto st27 + case 177: + goto st28 + case 179: + goto st29 + case 180: + goto st30 + case 181: + goto st31 + case 182: + goto st32 + case 183: + goto st33 + case 184: + goto st34 + case 185: + goto st35 + case 186: + goto st36 + case 187: + goto st37 + case 188: + goto st38 + case 189: + goto st39 + case 190: + goto st40 + case 191: + goto st41 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 166: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 170: + switch { + case data[p] < 196: + if 172 <= data[p] && data[p] <= 178 { + goto st23 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto st19 + } + goto tr5002 +tr5331: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5306 + st5306: + if p++; p == pe { + goto _test_eof5306 + } + st_case_5306: +//line segment_words_prod.go:150079 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 128: + goto st43 + case 129: + goto st44 + case 130: + goto st45 + case 141: + goto st46 + case 156: + goto st47 + case 157: + goto st48 + case 158: + goto st49 + case 159: + goto st50 + case 160: + goto st51 + case 162: + goto st52 + case 164: + goto st53 + case 168: + goto st54 + case 169: + goto st55 + case 170: + goto st56 + case 172: + goto st57 + case 173: + goto st58 + case 174: + goto st59 + case 175: + goto st60 + case 176: + goto st61 + case 179: + goto st62 + case 183: + goto st63 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr5332: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5307 + st5307: + if p++; p == pe { + goto _test_eof5307 + } + st_case_5307: +//line segment_words_prod.go:150231 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 128: + goto st65 + case 129: + goto st66 + case 131: + goto st67 + case 179: + goto st68 + case 181: + goto st69 + case 183: + goto st70 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr5333: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5308 + st5308: + if p++; p == pe { + goto _test_eof5308 + } + st_case_5308: +//line segment_words_prod.go:150353 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 128: + goto st72 + case 130: + goto st73 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr5334: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5309 + st5309: + if p++; p == pe { + goto _test_eof5309 + } + st_case_5309: +//line segment_words_prod.go:150467 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 153: + goto st75 + case 154: + goto st76 + case 155: + goto st77 + case 160: + goto st78 + case 162: + goto st79 + case 163: + goto st80 + case 164: + goto st81 + case 165: + goto st82 + case 166: + goto st83 + case 167: + goto st84 + case 168: + goto st85 + case 169: + goto st86 + case 170: + goto st87 + case 171: + goto st88 + case 175: + goto st89 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr5335: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5310 + st5310: + if p++; p == pe { + goto _test_eof5310 + } + st_case_5310: +//line segment_words_prod.go:150607 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 172: + goto st91 + case 184: + goto st92 + case 187: + goto st69 + case 190: + goto st76 + case 191: + goto st93 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr5336: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5311 + st5311: + if p++; p == pe { + goto _test_eof5311 + } + st_case_5311: +//line segment_words_prod.go:150727 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 144: + goto st95 + case 145: + goto st101 + case 150: + goto st120 + case 155: + goto st125 + case 157: + goto st127 + case 158: + goto st134 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr5337: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5312 + st5312: + if p++; p == pe { + goto _test_eof5312 + } + st_case_5312: +//line segment_words_prod.go:150849 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 160: + goto st137 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4217: +//line segment_words.rl:72 + + endPos = p + + goto st5313 + st5313: + if p++; p == pe { + goto _test_eof5313 + } + st_case_5313: +//line segment_words_prod.go:150956 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 189: + goto tr5002 + case 194: + goto tr5294 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr5295 + case 205: + goto tr5296 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr5297 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr5298 + case 215: + goto tr5299 + case 216: + goto tr5300 + case 217: + goto tr5301 + case 219: + goto tr5302 + case 220: + goto tr5303 + case 221: + goto tr5304 + case 222: + goto tr5305 + case 223: + goto tr5306 + case 224: + goto tr5307 + case 225: + goto tr5308 + case 226: + goto tr5309 + case 227: + goto tr5310 + case 234: + goto tr5311 + case 237: + goto tr5313 + case 239: + goto tr5314 + case 240: + goto tr5315 + case 243: + goto tr5316 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr2008 + } + case data[p] >= 48: + goto tr2646 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 132 <= data[p] && data[p] <= 185 { + goto tr5002 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto tr5312 + } + default: + goto tr4806 + } + default: + goto tr2008 + } + goto tr1 +tr4218: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5314 + st5314: + if p++; p == pe { + goto _test_eof5314 + } + st_case_5314: +//line segment_words_prod.go:151072 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 144: + goto tr5002 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 164: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr2646 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 152 <= data[p] && data[p] <= 161 { + goto tr5002 + } + case data[p] >= 97: + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto st145 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr5002 + } + case data[p] >= 235: + goto st3516 + } + default: + goto tr5002 + } + default: + goto tr5002 + } + goto tr1 +tr4219: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5315 + st5315: + if p++; p == pe { + goto _test_eof5315 + } + st_case_5315: +//line segment_words_prod.go:151202 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 188: + goto tr1 + case 194: + goto tr5294 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr5295 + case 205: + goto tr5296 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr5297 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr5298 + case 215: + goto tr5299 + case 216: + goto tr5300 + case 217: + goto tr5301 + case 219: + goto tr5302 + case 220: + goto tr5303 + case 221: + goto tr5304 + case 222: + goto tr5305 + case 223: + goto tr5306 + case 224: + goto tr5307 + case 225: + goto tr5308 + case 226: + goto tr5309 + case 227: + goto tr5310 + case 234: + goto tr5311 + case 237: + goto tr5313 + case 239: + goto tr5314 + case 240: + goto tr5315 + case 243: + goto tr5316 + } + switch { + case data[p] < 190: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 129 <= data[p] && data[p] <= 131 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto tr4806 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr1 + } + case data[p] >= 235: + goto tr5312 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr4220: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5316 + st5316: + if p++; p == pe { + goto _test_eof5316 + } + st_case_5316: +//line segment_words_prod.go:151332 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 142: + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr2008 + } + case data[p] >= 48: + goto tr2646 + } + case data[p] > 122: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr5002 + } + case data[p] >= 133: + goto tr5002 + } + default: + goto tr2008 + } + case data[p] > 150: + switch { + case data[p] < 196: + switch { + case data[p] > 161: + if 164 <= data[p] && data[p] <= 193 { + goto tr5002 + } + case data[p] >= 152: + goto tr5002 + } + case data[p] > 218: + switch { + case data[p] < 235: + if 228 <= data[p] && data[p] <= 233 { + goto tr5002 + } + case data[p] > 236: + if 238 <= data[p] { + goto tr5002 + } + default: + goto st3516 + } + default: + goto st145 + } + default: + goto tr5002 + } + goto tr1 +tr4221: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5317 + st5317: + if p++; p == pe { + goto _test_eof5317 + } + st_case_5317: +//line segment_words_prod.go:151474 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 145: + goto tr1 + case 181: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] < 59: + switch { + case data[p] > 47: + if 48 <= data[p] && data[p] <= 57 { + goto tr2646 + } + default: + goto tr1 + } + case data[p] > 64: + switch { + case data[p] > 90: + if 91 <= data[p] && data[p] <= 96 { + goto tr1 + } + case data[p] >= 65: + goto tr2008 + } + default: + goto tr1 + } + case data[p] > 122: + switch { + case data[p] < 139: + switch { + case data[p] > 130: + if 135 <= data[p] && data[p] <= 136 { + goto tr1 + } + case data[p] >= 123: + goto tr1 + } + case data[p] > 141: + switch { + case data[p] < 196: + if 176 <= data[p] && data[p] <= 177 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + default: + goto tr2008 + } + goto tr5002 +tr4222: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5318 + st5318: + if p++; p == pe { + goto _test_eof5318 + } + st_case_5318: +//line segment_words_prod.go:151620 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 134: + goto tr5002 + case 138: + goto tr5002 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 164: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr2646 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 142 <= data[p] && data[p] <= 161 { + goto tr5002 + } + case data[p] >= 97: + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto st145 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr5002 + } + case data[p] >= 235: + goto st3516 + } + default: + goto tr5002 + } + default: + goto tr5002 + } + goto tr1 +tr4223: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5319 + st5319: + if p++; p == pe { + goto _test_eof5319 + } + st_case_5319: +//line segment_words_prod.go:151752 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 188: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 129: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 131: + switch { + case data[p] < 196: + if 190 <= data[p] && data[p] <= 191 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr5002 +tr4224: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5320 + st5320: + if p++; p == pe { + goto _test_eof5320 + } + st_case_5320: +//line segment_words_prod.go:151872 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 135: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 128 <= data[p] && data[p] <= 132 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 136: + switch { + case data[p] < 162: + switch { + case data[p] > 141: + if 150 <= data[p] && data[p] <= 151 { + goto tr1 + } + case data[p] >= 139: + goto tr1 + } + case data[p] > 163: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr4225: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5321 + st5321: + if p++; p == pe { + goto _test_eof5321 + } + st_case_5321: +//line segment_words_prod.go:152005 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 130: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 190 <= data[p] && data[p] <= 191 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4226: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5322 + st5322: + if p++; p == pe { + goto _test_eof5322 + } + st_case_5322: +//line segment_words_prod.go:152121 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 151: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 128: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 130: + switch { + case data[p] < 138: + if 134 <= data[p] && data[p] <= 136 { + goto tr1 + } + case data[p] > 141: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr4227: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5323 + st5323: + if p++; p == pe { + goto _test_eof5323 + } + st_case_5323: +//line segment_words_prod.go:152246 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto tr5294 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr5295 + case 205: + goto tr5296 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr5297 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr5298 + case 215: + goto tr5299 + case 216: + goto tr5300 + case 217: + goto tr5301 + case 219: + goto tr5302 + case 220: + goto tr5303 + case 221: + goto tr5304 + case 222: + goto tr5305 + case 223: + goto tr5306 + case 224: + goto tr5307 + case 225: + goto tr5308 + case 226: + goto tr5309 + case 227: + goto tr5310 + case 234: + goto tr5311 + case 237: + goto tr5313 + case 239: + goto tr5314 + case 240: + goto tr5315 + case 243: + goto tr5316 + } + switch { + case data[p] < 190: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 128 <= data[p] && data[p] <= 131 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto tr4806 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr1 + } + case data[p] >= 235: + goto tr5312 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr4228: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5324 + st5324: + if p++; p == pe { + goto _test_eof5324 + } + st_case_5324: +//line segment_words_prod.go:152374 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 133: + goto tr5002 + case 137: + goto tr5002 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 151: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr2646 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 142 <= data[p] && data[p] <= 148 { + goto tr5002 + } + case data[p] >= 97: + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 161: + switch { + case data[p] < 228: + switch { + case data[p] > 193: + if 196 <= data[p] && data[p] <= 218 { + goto st145 + } + case data[p] >= 164: + goto tr5002 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr5002 + } + case data[p] >= 235: + goto st3516 + } + default: + goto tr5002 + } + default: + goto tr5002 + } + goto tr1 +tr4229: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5325 + st5325: + if p++; p == pe { + goto _test_eof5325 + } + st_case_5325: +//line segment_words_prod.go:152511 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 134: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 128 <= data[p] && data[p] <= 132 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 136: + switch { + case data[p] < 162: + switch { + case data[p] > 141: + if 149 <= data[p] && data[p] <= 150 { + goto tr1 + } + case data[p] >= 138: + goto tr1 + } + case data[p] > 163: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr4230: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5326 + st5326: + if p++; p == pe { + goto _test_eof5326 + } + st_case_5326: +//line segment_words_prod.go:152644 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto tr5294 + case 195: + goto tr4805 + case 198: + goto tr4807 + case 199: + goto tr4808 + case 203: + goto tr4809 + case 204: + goto tr5295 + case 205: + goto tr5296 + case 206: + goto tr4811 + case 207: + goto tr4812 + case 210: + goto tr5297 + case 212: + goto tr4814 + case 213: + goto tr4815 + case 214: + goto tr5298 + case 215: + goto tr5299 + case 216: + goto tr5300 + case 217: + goto tr5301 + case 219: + goto tr5302 + case 220: + goto tr5303 + case 221: + goto tr5304 + case 222: + goto tr5305 + case 223: + goto tr5306 + case 224: + goto tr5307 + case 225: + goto tr5308 + case 226: + goto tr5309 + case 227: + goto tr5310 + case 234: + goto tr5311 + case 237: + goto tr5313 + case 239: + goto tr5314 + case 240: + goto tr5315 + case 243: + goto tr5316 + } + switch { + case data[p] < 190: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 129 <= data[p] && data[p] <= 131 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 193: + switch { + case data[p] < 228: + if 196 <= data[p] && data[p] <= 218 { + goto tr4806 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr1 + } + case data[p] >= 235: + goto tr5312 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr4231: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5327 + st5327: + if p++; p == pe { + goto _test_eof5327 + } + st_case_5327: +//line segment_words_prod.go:152772 + switch data[p] { + case 39: + goto tr2518 + case 46: + goto tr2518 + case 58: + goto tr2518 + case 95: + goto tr2774 + case 133: + goto tr5002 + case 137: + goto tr5002 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 152: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr2646 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 142 <= data[p] && data[p] <= 150 { + goto tr5002 + } + case data[p] >= 97: + goto tr2008 + } + default: + goto tr2008 + } + case data[p] > 161: + switch { + case data[p] < 228: + switch { + case data[p] > 193: + if 196 <= data[p] && data[p] <= 218 { + goto st145 + } + case data[p] >= 164: + goto tr5002 + } + case data[p] > 233: + switch { + case data[p] > 236: + if 238 <= data[p] { + goto tr5002 + } + case data[p] >= 235: + goto st3516 + } + default: + goto tr5002 + } + default: + goto tr5002 + } + goto tr1 +tr4232: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5328 + st5328: + if p++; p == pe { + goto _test_eof5328 + } + st_case_5328: +//line segment_words_prod.go:152909 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 130 <= data[p] && data[p] <= 131 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4233: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5329 + st5329: + if p++; p == pe { + goto _test_eof5329 + } + st_case_5329: +//line segment_words_prod.go:153023 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 138: + goto tr1 + case 150: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 143: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 148: + switch { + case data[p] < 178: + if 152 <= data[p] && data[p] <= 159 { + goto tr1 + } + case data[p] > 179: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr4234: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5330 + st5330: + if p++; p == pe { + goto _test_eof5330 + } + st_case_5330: +//line segment_words_prod.go:153150 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 177: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 180 <= data[p] && data[p] <= 186 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4235: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5331 + st5331: + if p++; p == pe { + goto _test_eof5331 + } + st_case_5331: +//line segment_words_prod.go:153266 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 135 <= data[p] && data[p] <= 142 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4236: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5332 + st5332: + if p++; p == pe { + goto _test_eof5332 + } + st_case_5332: +//line segment_words_prod.go:153380 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 177: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 180: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 185: + switch { + case data[p] < 196: + if 187 <= data[p] && data[p] <= 188 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr5002 +tr4237: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5333 + st5333: + if p++; p == pe { + goto _test_eof5333 + } + st_case_5333: +//line segment_words_prod.go:153500 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 136 <= data[p] && data[p] <= 141 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4238: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5334 + st5334: + if p++; p == pe { + goto _test_eof5334 + } + st_case_5334: +//line segment_words_prod.go:153614 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 181: + goto tr1 + case 183: + goto tr1 + case 185: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 152: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + if 97 <= data[p] && data[p] <= 122 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 153: + switch { + case data[p] < 196: + if 190 <= data[p] && data[p] <= 191 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr1 + } + goto tr5002 +tr4239: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5335 + st5335: + if p++; p == pe { + goto _test_eof5335 + } + st_case_5335: +//line segment_words_prod.go:153738 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] < 196: + if 177 <= data[p] && data[p] <= 191 { + goto tr1 + } + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + default: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4240: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5336 + st5336: + if p++; p == pe { + goto _test_eof5336 + } + st_case_5336: +//line segment_words_prod.go:153852 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 134: + switch { + case data[p] < 65: + if 48 <= data[p] && data[p] <= 57 { + goto tr421 + } + case data[p] > 90: + switch { + case data[p] > 122: + if 128 <= data[p] && data[p] <= 132 { + goto tr1 + } + case data[p] >= 97: + goto tr148 + } + default: + goto tr148 + } + case data[p] > 135: + switch { + case data[p] < 153: + if 141 <= data[p] && data[p] <= 151 { + goto tr1 + } + case data[p] > 188: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr1 + } + default: + goto tr1 + } + goto tr5002 +tr4241: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:89 +act = 2; + goto st5337 + st5337: + if p++; p == pe { + goto _test_eof5337 + } + st_case_5337: +//line segment_words_prod.go:153980 + switch data[p] { + case 39: + goto st142 + case 46: + goto st142 + case 58: + goto st142 + case 95: + goto tr571 + case 134: + goto tr1 + case 194: + goto st3269 + case 195: + goto st144 + case 198: + goto st146 + case 199: + goto st147 + case 203: + goto st870 + case 204: + goto st3270 + case 205: + goto st3271 + case 206: + goto st873 + case 207: + goto st152 + case 210: + goto st3272 + case 212: + goto st154 + case 213: + goto st155 + case 214: + goto st3273 + case 215: + goto st3274 + case 216: + goto st3275 + case 217: + goto st3276 + case 219: + goto st3277 + case 220: + goto st3278 + case 221: + goto st3279 + case 222: + goto st3280 + case 223: + goto st3281 + case 224: + goto st3282 + case 225: + goto st3314 + case 226: + goto st3338 + case 227: + goto st3345 + case 234: + goto st3500 + case 237: + goto st3517 + case 239: + goto st3520 + case 240: + goto st3526 + case 243: + goto st3568 + } + switch { + case data[p] < 97: + switch { + case data[p] > 57: + if 65 <= data[p] && data[p] <= 90 { + goto tr148 + } + case data[p] >= 48: + goto tr421 + } + case data[p] > 122: + switch { + case data[p] > 218: + if 235 <= data[p] && data[p] <= 236 { + goto st3516 + } + case data[p] >= 196: + goto st145 + } + default: + goto tr148 + } + goto tr5002 +tr4496: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5338 + st5338: + if p++; p == pe { + goto _test_eof5338 + } + st_case_5338: +//line segment_words_prod.go:154096 + switch data[p] { + case 164: + goto st3595 + case 169: + goto st4663 + case 171: + goto st4664 + case 172: + goto st4665 + case 173: + goto st672 + case 174: + goto st293 + case 175: + goto st294 + case 180: + goto st295 + case 181: + goto st296 + case 182: + goto st297 + case 183: + goto st298 + case 184: + goto st4666 + case 185: + goto st2436 + case 187: + goto st4667 + case 188: + goto st2438 + case 189: + goto st4668 + case 190: + goto st4669 + case 191: + goto st4670 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + switch { + case data[p] > 170: + if 176 <= data[p] && data[p] <= 186 { + goto st145 + } + case data[p] >= 165: + goto st3734 + } + goto tr4499 + st4663: + if p++; p == pe { + goto _test_eof4663 + } + st_case_4663: + if 174 <= data[p] && data[p] <= 175 { + goto tr0 + } + goto tr3250 + st4664: + if p++; p == pe { + goto _test_eof4664 + } + st_case_4664: + if 154 <= data[p] { + goto tr0 + } + goto tr3250 + st4665: + if p++; p == pe { + goto _test_eof4665 + } + st_case_4665: + switch data[p] { + case 158: + goto tr2395 + case 190: + goto tr572 + } + switch { + case data[p] < 157: + switch { + case data[p] > 134: + if 147 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] >= 128: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] > 182: + if 184 <= data[p] && data[p] <= 188 { + goto tr572 + } + case data[p] >= 170: + goto tr572 + } + default: + goto tr572 + } + goto tr0 + st4666: + if p++; p == pe { + goto _test_eof4666 + } + st_case_4666: + switch { + case data[p] < 160: + if 128 <= data[p] && data[p] <= 143 { + goto tr2395 + } + case data[p] > 175: + if 179 <= data[p] && data[p] <= 180 { + goto tr2136 + } + default: + goto tr2395 + } + goto tr0 + st4667: + if p++; p == pe { + goto _test_eof4667 + } + st_case_4667: + if data[p] == 191 { + goto tr2395 + } + if 189 <= data[p] { + goto tr0 + } + goto tr148 + st4668: + if p++; p == pe { + goto _test_eof4668 + } + st_case_4668: + switch { + case data[p] > 154: + if 166 <= data[p] { + goto tr3376 + } + case data[p] >= 129: + goto tr148 + } + goto tr0 + st4669: + if p++; p == pe { + goto _test_eof4669 + } + st_case_4669: + switch { + case data[p] < 160: + if 158 <= data[p] && data[p] <= 159 { + goto tr2395 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr0 + } + default: + goto tr2984 + } + goto tr3376 + st4670: + if p++; p == pe { + goto _test_eof4670 + } + st_case_4670: + switch { + case data[p] < 146: + switch { + case data[p] > 135: + if 138 <= data[p] && data[p] <= 143 { + goto tr2984 + } + case data[p] >= 130: + goto tr2984 + } + case data[p] > 151: + switch { + case data[p] > 156: + if 185 <= data[p] && data[p] <= 187 { + goto tr2395 + } + case data[p] >= 154: + goto tr2984 + } + default: + goto tr2984 + } + goto tr0 +tr4497: +//line NONE:1 +te = p+1 + +//line segment_words.rl:68 + + startPos = p + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 13; + goto st5339 + st5339: + if p++; p == pe { + goto _test_eof5339 + } + st_case_5339: +//line segment_words_prod.go:154348 + switch data[p] { + case 144: + goto st4671 + case 145: + goto st4676 + case 146: + goto st362 + case 147: + goto st366 + case 148: + goto st367 + case 150: + goto st4695 + case 155: + goto st4700 + case 157: + goto st4703 + case 158: + goto st4704 + case 159: + goto st4706 + case 160: + goto st4851 + case 170: + goto st4853 + case 171: + goto st4855 + case 172: + goto st4858 + case 175: + goto st4860 + case 194: + goto st0 + case 204: + goto st1 + case 205: + goto st2 + case 210: + goto st3 + case 214: + goto st4 + case 215: + goto st5 + case 216: + goto st6 + case 217: + goto st7 + case 219: + goto st8 + case 220: + goto st9 + case 221: + goto st10 + case 222: + goto st11 + case 223: + goto st12 + case 224: + goto st13 + case 225: + goto st42 + case 226: + goto st64 + case 227: + goto st71 + case 234: + goto st74 + case 239: + goto st90 + case 240: + goto st94 + case 243: + goto st136 + } + if 161 <= data[p] && data[p] <= 169 { + goto st4852 + } + goto tr4499 + st4671: + if p++; p == pe { + goto _test_eof4671 + } + st_case_4671: + switch data[p] { + case 128: + goto st308 + case 129: + goto st309 + case 130: + goto st147 + case 131: + goto st310 + case 133: + goto st311 + case 135: + goto st2732 + case 138: + goto st313 + case 139: + goto st4672 + case 140: + goto st315 + case 141: + goto st4673 + case 142: + goto st317 + case 143: + goto st318 + case 144: + goto st147 + case 145: + goto st145 + case 146: + goto st1702 + case 148: + goto st320 + case 149: + goto st321 + case 152: + goto st147 + case 156: + goto st322 + case 157: + goto st323 + case 160: + goto st324 + case 161: + goto st325 + case 162: + goto st326 + case 163: + goto st327 + case 164: + goto st328 + case 166: + goto st329 + case 168: + goto st4674 + case 169: + goto st331 + case 170: + goto st332 + case 171: + goto st4675 + case 172: + goto st334 + case 173: + goto st335 + case 174: + goto st336 + case 176: + goto st147 + case 177: + goto st245 + } + switch { + case data[p] > 155: + if 178 <= data[p] && data[p] <= 179 { + goto st337 + } + case data[p] >= 153: + goto st145 + } + goto tr0 + st4672: + if p++; p == pe { + goto _test_eof4672 + } + st_case_4672: + if data[p] == 160 { + goto tr2395 + } + if 145 <= data[p] { + goto tr0 + } + goto tr148 + st4673: + if p++; p == pe { + goto _test_eof4673 + } + st_case_4673: + switch { + case data[p] < 182: + if 139 <= data[p] && data[p] <= 143 { + goto tr0 + } + case data[p] > 186: + if 187 <= data[p] { + goto tr0 + } + default: + goto tr2395 + } + goto tr148 + st4674: + if p++; p == pe { + goto _test_eof4674 + } + st_case_4674: + switch data[p] { + case 128: + goto tr148 + case 191: + goto tr2395 + } + switch { + case data[p] < 144: + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr2395 + } + case data[p] > 134: + if 140 <= data[p] && data[p] <= 143 { + goto tr2395 + } + default: + goto tr2395 + } + case data[p] > 147: + switch { + case data[p] < 153: + if 149 <= data[p] && data[p] <= 151 { + goto tr148 + } + case data[p] > 179: + if 184 <= data[p] && data[p] <= 186 { + goto tr2395 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st4675: + if p++; p == pe { + goto _test_eof4675 + } + st_case_4675: + switch { + case data[p] < 137: + if 128 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 164: + if 165 <= data[p] && data[p] <= 166 { + goto tr2395 + } + default: + goto tr148 + } + goto tr0 + st4676: + if p++; p == pe { + goto _test_eof4676 + } + st_case_4676: + switch data[p] { + case 128: + goto st4677 + case 129: + goto st4678 + case 130: + goto st4679 + case 131: + goto st1709 + case 132: + goto st4680 + case 133: + goto st4681 + case 134: + goto st4682 + case 135: + goto st4683 + case 136: + goto st4684 + case 138: + goto st348 + case 139: + goto st4685 + case 140: + goto st4686 + case 141: + goto st4687 + case 146: + goto st4688 + case 147: + goto st4689 + case 150: + goto st4690 + case 151: + goto st4691 + case 152: + goto st4688 + case 153: + goto st4692 + case 154: + goto st4693 + case 155: + goto st1724 + case 156: + goto st4694 + case 162: + goto st359 + case 163: + goto st1726 + case 171: + goto st361 + } + goto tr0 + st4677: + if p++; p == pe { + goto _test_eof4677 + } + st_case_4677: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2395 + } + case data[p] > 183: + if 184 <= data[p] { + goto tr2395 + } + default: + goto tr148 + } + goto tr0 + st4678: + if p++; p == pe { + goto _test_eof4678 + } + st_case_4678: + switch { + case data[p] < 166: + if 135 <= data[p] && data[p] <= 165 { + goto tr0 + } + case data[p] > 175: + if 176 <= data[p] && data[p] <= 190 { + goto tr0 + } + default: + goto tr126 + } + goto tr2395 + st4679: + if p++; p == pe { + goto _test_eof4679 + } + st_case_4679: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr148 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr0 + } + default: + goto tr0 + } + goto tr2395 + st4680: + if p++; p == pe { + goto _test_eof4680 + } + st_case_4680: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2395 + } + case data[p] > 166: + switch { + case data[p] > 180: + if 182 <= data[p] && data[p] <= 191 { + goto tr126 + } + case data[p] >= 167: + goto tr2395 + } + default: + goto tr148 + } + goto tr0 + st4681: + if p++; p == pe { + goto _test_eof4681 + } + st_case_4681: + switch data[p] { + case 179: + goto tr2395 + case 182: + goto tr148 + } + if 144 <= data[p] && data[p] <= 178 { + goto tr148 + } + goto tr0 + st4682: + if p++; p == pe { + goto _test_eof4682 + } + st_case_4682: + switch { + case data[p] < 131: + if 128 <= data[p] && data[p] <= 130 { + goto tr2395 + } + case data[p] > 178: + if 179 <= data[p] { + goto tr2395 + } + default: + goto tr148 + } + goto tr0 + st4683: + if p++; p == pe { + goto _test_eof4683 + } + st_case_4683: + if data[p] == 155 { + goto tr0 + } + switch { + case data[p] < 141: + switch { + case data[p] > 132: + if 133 <= data[p] && data[p] <= 137 { + goto tr0 + } + case data[p] >= 129: + goto tr148 + } + case data[p] > 143: + switch { + case data[p] < 154: + if 144 <= data[p] && data[p] <= 153 { + goto tr126 + } + case data[p] > 156: + if 157 <= data[p] { + goto tr0 + } + default: + goto tr148 + } + default: + goto tr0 + } + goto tr2395 + st4684: + if p++; p == pe { + goto _test_eof4684 + } + st_case_4684: + switch { + case data[p] < 147: + if 128 <= data[p] && data[p] <= 145 { + goto tr148 + } + case data[p] > 171: + if 172 <= data[p] && data[p] <= 183 { + goto tr2395 + } + default: + goto tr148 + } + goto tr0 + st4685: + if p++; p == pe { + goto _test_eof4685 + } + st_case_4685: + switch { + case data[p] < 171: + if 159 <= data[p] && data[p] <= 170 { + goto tr2395 + } + case data[p] > 175: + switch { + case data[p] > 185: + if 186 <= data[p] { + goto tr0 + } + case data[p] >= 176: + goto tr126 + } + default: + goto tr0 + } + goto tr148 + st4686: + if p++; p == pe { + goto _test_eof4686 + } + st_case_4686: + if data[p] == 189 { + goto tr148 + } + switch { + case data[p] < 147: + switch { + case data[p] < 133: + if 128 <= data[p] && data[p] <= 131 { + goto tr2395 + } + case data[p] > 140: + if 143 <= data[p] && data[p] <= 144 { + goto tr148 + } + default: + goto tr148 + } + case data[p] > 168: + switch { + case data[p] < 178: + if 170 <= data[p] && data[p] <= 176 { + goto tr148 + } + case data[p] > 179: + switch { + case data[p] > 185: + if 188 <= data[p] && data[p] <= 191 { + goto tr2395 + } + case data[p] >= 181: + goto tr148 + } + default: + goto tr148 + } + default: + goto tr148 + } + goto tr0 + st4687: + if p++; p == pe { + goto _test_eof4687 + } + st_case_4687: + switch data[p] { + case 144: + goto tr148 + case 151: + goto tr2395 + } + switch { + case data[p] < 157: + switch { + case data[p] < 135: + if 128 <= data[p] && data[p] <= 132 { + goto tr2395 + } + case data[p] > 136: + if 139 <= data[p] && data[p] <= 141 { + goto tr2395 + } + default: + goto tr2395 + } + case data[p] > 161: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr2395 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr2395 + } + default: + goto tr2395 + } + default: + goto tr148 + } + goto tr0 + st4688: + if p++; p == pe { + goto _test_eof4688 + } + st_case_4688: + switch { + case data[p] > 175: + if 176 <= data[p] { + goto tr2395 + } + case data[p] >= 128: + goto tr148 + } + goto tr0 + st4689: + if p++; p == pe { + goto _test_eof4689 + } + st_case_4689: + if data[p] == 134 { + goto tr0 + } + switch { + case data[p] < 136: + if 132 <= data[p] && data[p] <= 135 { + goto tr148 + } + case data[p] > 143: + switch { + case data[p] > 153: + if 154 <= data[p] { + goto tr0 + } + case data[p] >= 144: + goto tr126 + } + default: + goto tr0 + } + goto tr2395 + st4690: + if p++; p == pe { + goto _test_eof4690 + } + st_case_4690: + switch { + case data[p] < 175: + if 128 <= data[p] && data[p] <= 174 { + goto tr148 + } + case data[p] > 181: + if 184 <= data[p] { + goto tr2395 + } + default: + goto tr2395 + } + goto tr0 + st4691: + if p++; p == pe { + goto _test_eof4691 + } + st_case_4691: + switch { + case data[p] < 152: + if 129 <= data[p] && data[p] <= 151 { + goto tr0 + } + case data[p] > 155: + if 158 <= data[p] { + goto tr0 + } + default: + goto tr148 + } + goto tr2395 + st4692: + if p++; p == pe { + goto _test_eof4692 + } + st_case_4692: + if data[p] == 132 { + goto tr148 + } + switch { + case data[p] < 144: + if 129 <= data[p] && data[p] <= 143 { + goto tr0 + } + case data[p] > 153: + if 154 <= data[p] { + goto tr0 + } + default: + goto tr126 + } + goto tr2395 + st4693: + if p++; p == pe { + goto _test_eof4693 + } + st_case_4693: + switch { + case data[p] > 170: + if 171 <= data[p] && data[p] <= 183 { + goto tr2395 + } + case data[p] >= 128: + goto tr148 + } + goto tr0 + st4694: + if p++; p == pe { + goto _test_eof4694 + } + st_case_4694: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 185 { + goto tr126 + } + case data[p] >= 157: + goto tr2395 + } + goto tr0 + st4695: + if p++; p == pe { + goto _test_eof4695 + } + st_case_4695: + switch data[p] { + case 160: + goto st147 + case 168: + goto st370 + case 169: + goto st1728 + case 171: + goto st4696 + case 172: + goto st4697 + case 173: + goto st1731 + case 174: + goto st374 + case 188: + goto st147 + case 189: + goto st4698 + case 190: + goto st4699 + } + if 161 <= data[p] && data[p] <= 167 { + goto st145 + } + goto tr0 + st4696: + if p++; p == pe { + goto _test_eof4696 + } + st_case_4696: + switch { + case data[p] > 173: + if 176 <= data[p] && data[p] <= 180 { + goto tr2395 + } + case data[p] >= 144: + goto tr148 + } + goto tr0 + st4697: + if p++; p == pe { + goto _test_eof4697 + } + st_case_4697: + switch { + case data[p] > 175: + if 176 <= data[p] && data[p] <= 182 { + goto tr2395 + } + case data[p] >= 128: + goto tr148 + } + goto tr0 + st4698: + if p++; p == pe { + goto _test_eof4698 + } + st_case_4698: + switch { + case data[p] < 145: + if 133 <= data[p] && data[p] <= 143 { + goto tr0 + } + case data[p] > 190: + if 191 <= data[p] { + goto tr0 + } + default: + goto tr2395 + } + goto tr148 + st4699: + if p++; p == pe { + goto _test_eof4699 + } + st_case_4699: + switch { + case data[p] > 146: + if 147 <= data[p] && data[p] <= 159 { + goto tr148 + } + case data[p] >= 143: + goto tr2395 + } + goto tr0 + st4700: + if p++; p == pe { + goto _test_eof4700 + } + st_case_4700: + switch data[p] { + case 128: + goto st4701 + case 176: + goto st147 + case 177: + goto st378 + case 178: + goto st4702 + } + goto tr0 + st4701: + if p++; p == pe { + goto _test_eof4701 + } + st_case_4701: + switch data[p] { + case 128: + goto tr3376 + case 129: + goto tr3757 + } + goto tr0 + st4702: + if p++; p == pe { + goto _test_eof4702 + } + st_case_4702: + switch { + case data[p] < 144: + if 128 <= data[p] && data[p] <= 136 { + goto tr148 + } + case data[p] > 153: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr2395 + } + case data[p] >= 157: + goto tr2395 + } + default: + goto tr148 + } + goto tr0 + st4703: + if p++; p == pe { + goto _test_eof4703 + } + st_case_4703: + switch data[p] { + case 133: + goto st2764 + case 134: + goto st2765 + case 137: + goto st2766 + case 144: + goto st147 + case 145: + goto st384 + case 146: + goto st385 + case 147: + goto st386 + case 148: + goto st387 + case 149: + goto st388 + case 154: + goto st389 + case 155: + goto st390 + case 156: + goto st391 + case 157: + goto st392 + case 158: + goto st393 + case 159: + goto st1740 + case 168: + goto st2767 + case 169: + goto st2768 + case 170: + goto st2769 + } + if 150 <= data[p] && data[p] <= 153 { + goto st145 + } + goto tr0 + st4704: + if p++; p == pe { + goto _test_eof4704 + } + st_case_4704: + switch data[p] { + case 160: + goto st147 + case 163: + goto st4705 + case 184: + goto st400 + case 185: + goto st401 + case 186: + goto st402 + } + if 161 <= data[p] && data[p] <= 162 { + goto st145 + } + goto tr0 + st4705: + if p++; p == pe { + goto _test_eof4705 + } + st_case_4705: + switch { + case data[p] < 144: + if 133 <= data[p] && data[p] <= 143 { + goto tr0 + } + case data[p] > 150: + if 151 <= data[p] { + goto tr0 + } + default: + goto tr2395 + } + goto tr148 + st4706: + if p++; p == pe { + goto _test_eof4706 + } + st_case_4706: + switch data[p] { + case 132: + goto st404 + case 133: + goto st405 + case 134: + goto st406 + case 135: + goto st4707 + case 136: + goto st4850 + } + goto tr0 + st4707: + if p++; p == pe { + goto _test_eof4707 + } + st_case_4707: + if 166 <= data[p] && data[p] <= 191 { + goto tr4327 + } + goto tr2 +tr4327: +//line NONE:1 +te = p+1 + +//line segment_words.rl:72 + + endPos = p + +//line segment_words.rl:161 +act = 7; + goto st5340 + st5340: + if p++; p == pe { + goto _test_eof5340 + } + st_case_5340: +//line segment_words_prod.go:155329 + switch data[p] { + case 194: + goto st4708 + case 204: + goto st4709 + case 205: + goto st4710 + case 210: + goto st4711 + case 214: + goto st4712 + case 215: + goto st4713 + case 216: + goto st4714 + case 217: + goto st4715 + case 219: + goto st4716 + case 220: + goto st4717 + case 221: + goto st4718 + case 222: + goto st4719 + case 223: + goto st4720 + case 224: + goto st4721 + case 225: + goto st4750 + case 226: + goto st4772 + case 227: + goto st4779 + case 234: + goto st4782 + case 239: + goto st4798 + case 240: + goto st4802 + case 243: + goto st4845 + } + goto tr5359 + st4708: + if p++; p == pe { + goto _test_eof4708 + } + st_case_4708: + if data[p] == 173 { + goto tr4327 + } + goto tr4328 + st4709: + if p++; p == pe { + goto _test_eof4709 + } + st_case_4709: + if data[p] <= 127 { + goto tr4328 + } + goto tr4327 + st4710: + if p++; p == pe { + goto _test_eof4710 + } + st_case_4710: + if 176 <= data[p] { + goto tr4328 + } + goto tr4327 + st4711: + if p++; p == pe { + goto _test_eof4711 + } + st_case_4711: + if 131 <= data[p] && data[p] <= 137 { + goto tr4327 + } + goto tr4328 + st4712: + if p++; p == pe { + goto _test_eof4712 + } + st_case_4712: + if data[p] == 191 { + goto tr4327 + } + if 145 <= data[p] && data[p] <= 189 { + goto tr4327 + } + goto tr4328 + st4713: + if p++; p == pe { + goto _test_eof4713 + } + st_case_4713: + if data[p] == 135 { + goto tr4327 + } + switch { + case data[p] > 130: + if 132 <= data[p] && data[p] <= 133 { + goto tr4327 + } + case data[p] >= 129: + goto tr4327 + } + goto tr4328 + st4714: + if p++; p == pe { + goto _test_eof4714 + } + st_case_4714: + if data[p] == 156 { + goto tr4327 + } + switch { + case data[p] > 133: + if 144 <= data[p] && data[p] <= 154 { + goto tr4327 + } + case data[p] >= 128: + goto tr4327 + } + goto tr4328 + st4715: + if p++; p == pe { + goto _test_eof4715 + } + st_case_4715: + if data[p] == 176 { + goto tr4327 + } + if 139 <= data[p] && data[p] <= 159 { + goto tr4327 + } + goto tr4328 + st4716: + if p++; p == pe { + goto _test_eof4716 + } + st_case_4716: + switch { + case data[p] < 159: + if 150 <= data[p] && data[p] <= 157 { + goto tr4327 + } + case data[p] > 164: + switch { + case data[p] > 168: + if 170 <= data[p] && data[p] <= 173 { + goto tr4327 + } + case data[p] >= 167: + goto tr4327 + } + default: + goto tr4327 + } + goto tr4328 + st4717: + if p++; p == pe { + goto _test_eof4717 + } + st_case_4717: + switch data[p] { + case 143: + goto tr4327 + case 145: + goto tr4327 + } + if 176 <= data[p] { + goto tr4327 + } + goto tr4328 + st4718: + if p++; p == pe { + goto _test_eof4718 + } + st_case_4718: + if 139 <= data[p] { + goto tr4328 + } + goto tr4327 + st4719: + if p++; p == pe { + goto _test_eof4719 + } + st_case_4719: + if 166 <= data[p] && data[p] <= 176 { + goto tr4327 + } + goto tr4328 + st4720: + if p++; p == pe { + goto _test_eof4720 + } + st_case_4720: + if 171 <= data[p] && data[p] <= 179 { + goto tr4327 + } + goto tr4328 + st4721: + if p++; p == pe { + goto _test_eof4721 + } + st_case_4721: + switch data[p] { + case 160: + goto st4722 + case 161: + goto st4723 + case 163: + goto st4724 + case 164: + goto st4725 + case 165: + goto st4726 + case 167: + goto st4728 + case 169: + goto st4729 + case 171: + goto st4730 + case 173: + goto st4732 + case 174: + goto st4733 + case 175: + goto st4734 + case 176: + goto st4735 + case 177: + goto st4736 + case 179: + goto st4737 + case 180: + goto st4738 + case 181: + goto st4739 + case 182: + goto st4740 + case 183: + goto st4741 + case 184: + goto st4742 + case 185: + goto st4743 + case 186: + goto st4744 + case 187: + goto st4745 + case 188: + goto st4746 + case 189: + goto st4747 + case 190: + goto st4748 + case 191: + goto st4749 + } + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 178 { + goto st4731 + } + case data[p] >= 166: + goto st4727 + } + goto tr4328 + st4722: + if p++; p == pe { + goto _test_eof4722 + } + st_case_4722: + switch { + case data[p] < 155: + if 150 <= data[p] && data[p] <= 153 { + goto tr4327 + } + case data[p] > 163: + switch { + case data[p] > 167: + if 169 <= data[p] && data[p] <= 173 { + goto tr4327 + } + case data[p] >= 165: + goto tr4327 + } + default: + goto tr4327 + } + goto tr4328 + st4723: + if p++; p == pe { + goto _test_eof4723 + } + st_case_4723: + if 153 <= data[p] && data[p] <= 155 { + goto tr4327 + } + goto tr4328 + st4724: + if p++; p == pe { + goto _test_eof4724 + } + st_case_4724: + if 163 <= data[p] { + goto tr4327 + } + goto tr4328 + st4725: + if p++; p == pe { + goto _test_eof4725 + } + st_case_4725: + if data[p] == 189 { + goto tr4328 + } + if 132 <= data[p] && data[p] <= 185 { + goto tr4328 + } + goto tr4327 + st4726: + if p++; p == pe { + goto _test_eof4726 + } + st_case_4726: + if data[p] == 144 { + goto tr4328 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr4328 + } + case data[p] >= 152: + goto tr4328 + } + goto tr4327 + st4727: + if p++; p == pe { + goto _test_eof4727 + } + st_case_4727: + if data[p] == 188 { + goto tr4327 + } + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr4327 + } + case data[p] >= 129: + goto tr4327 + } + goto tr4328 + st4728: + if p++; p == pe { + goto _test_eof4728 + } + st_case_4728: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr4328 + } + case data[p] >= 133: + goto tr4328 + } + case data[p] > 150: + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr4328 + } + case data[p] >= 152: + goto tr4328 + } + default: + goto tr4328 + } + goto tr4327 + st4729: + if p++; p == pe { + goto _test_eof4729 + } + st_case_4729: + switch { + case data[p] < 142: + switch { + case data[p] > 134: + if 137 <= data[p] && data[p] <= 138 { + goto tr4328 + } + case data[p] >= 131: + goto tr4328 + } + case data[p] > 144: + switch { + case data[p] < 178: + if 146 <= data[p] && data[p] <= 175 { + goto tr4328 + } + case data[p] > 180: + if 182 <= data[p] { + goto tr4328 + } + default: + goto tr4328 + } + default: + goto tr4328 + } + goto tr4327 + st4730: + if p++; p == pe { + goto _test_eof4730 + } + st_case_4730: + switch data[p] { + case 134: + goto tr4328 + case 138: + goto tr4328 + } + switch { + case data[p] > 161: + if 164 <= data[p] { + goto tr4328 + } + case data[p] >= 142: + goto tr4328 + } + goto tr4327 + st4731: + if p++; p == pe { + goto _test_eof4731 + } + st_case_4731: + if data[p] == 188 { + goto tr4327 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr4327 + } + case data[p] >= 129: + goto tr4327 + } + goto tr4328 + st4732: + if p++; p == pe { + goto _test_eof4732 + } + st_case_4732: + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr4327 + } + case data[p] >= 128: + goto tr4327 + } + case data[p] > 141: + switch { + case data[p] > 151: + if 162 <= data[p] && data[p] <= 163 { + goto tr4327 + } + case data[p] >= 150: + goto tr4327 + } + default: + goto tr4327 + } + goto tr4328 + st4733: + if p++; p == pe { + goto _test_eof4733 + } + st_case_4733: + if data[p] == 130 { + goto tr4327 + } + if 190 <= data[p] && data[p] <= 191 { + goto tr4327 + } + goto tr4328 + st4734: + if p++; p == pe { + goto _test_eof4734 + } + st_case_4734: + if data[p] == 151 { + goto tr4327 + } + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 130 { + goto tr4327 + } + case data[p] > 136: + if 138 <= data[p] && data[p] <= 141 { + goto tr4327 + } + default: + goto tr4327 + } + goto tr4328 + st4735: + if p++; p == pe { + goto _test_eof4735 + } + st_case_4735: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr4327 + } + case data[p] >= 128: + goto tr4327 + } + goto tr4328 + st4736: + if p++; p == pe { + goto _test_eof4736 + } + st_case_4736: + switch data[p] { + case 133: + goto tr4328 + case 137: + goto tr4328 + } + switch { + case data[p] < 151: + if 142 <= data[p] && data[p] <= 148 { + goto tr4328 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr4328 + } + default: + goto tr4328 + } + goto tr4327 + st4737: + if p++; p == pe { + goto _test_eof4737 + } + st_case_4737: + switch { + case data[p] < 138: + switch { + case data[p] > 132: + if 134 <= data[p] && data[p] <= 136 { + goto tr4327 + } + case data[p] >= 128: + goto tr4327 + } + case data[p] > 141: + switch { + case data[p] > 150: + if 162 <= data[p] && data[p] <= 163 { + goto tr4327 + } + case data[p] >= 149: + goto tr4327 + } + default: + goto tr4327 + } + goto tr4328 + st4738: + if p++; p == pe { + goto _test_eof4738 + } + st_case_4738: + switch { + case data[p] > 131: + if 190 <= data[p] { + goto tr4327 + } + case data[p] >= 129: + goto tr4327 + } + goto tr4328 + st4739: + if p++; p == pe { + goto _test_eof4739 + } + st_case_4739: + switch data[p] { + case 133: + goto tr4328 + case 137: + goto tr4328 + } + switch { + case data[p] < 152: + if 142 <= data[p] && data[p] <= 150 { + goto tr4328 + } + case data[p] > 161: + if 164 <= data[p] { + goto tr4328 + } + default: + goto tr4328 + } + goto tr4327 + st4740: + if p++; p == pe { + goto _test_eof4740 + } + st_case_4740: + if 130 <= data[p] && data[p] <= 131 { + goto tr4327 + } + goto tr4328 + st4741: + if p++; p == pe { + goto _test_eof4741 + } + st_case_4741: + switch data[p] { + case 138: + goto tr4327 + case 150: + goto tr4327 + } + switch { + case data[p] < 152: + if 143 <= data[p] && data[p] <= 148 { + goto tr4327 + } + case data[p] > 159: + if 178 <= data[p] && data[p] <= 179 { + goto tr4327 + } + default: + goto tr4327 + } + goto tr4328 + st4742: + if p++; p == pe { + goto _test_eof4742 + } + st_case_4742: + if data[p] == 177 { + goto tr4327 + } + if 180 <= data[p] && data[p] <= 186 { + goto tr4327 + } + goto tr4328 + st4743: + if p++; p == pe { + goto _test_eof4743 + } + st_case_4743: + if 135 <= data[p] && data[p] <= 142 { + goto tr4327 + } + goto tr4328 + st4744: + if p++; p == pe { + goto _test_eof4744 + } + st_case_4744: + if data[p] == 177 { + goto tr4327 + } + switch { + case data[p] > 185: + if 187 <= data[p] && data[p] <= 188 { + goto tr4327 + } + case data[p] >= 180: + goto tr4327 + } + goto tr4328 + st4745: + if p++; p == pe { + goto _test_eof4745 + } + st_case_4745: + if 136 <= data[p] && data[p] <= 141 { + goto tr4327 + } + goto tr4328 + st4746: + if p++; p == pe { + goto _test_eof4746 + } + st_case_4746: + switch data[p] { + case 181: + goto tr4327 + case 183: + goto tr4327 + case 185: + goto tr4327 + } + switch { + case data[p] > 153: + if 190 <= data[p] && data[p] <= 191 { + goto tr4327 + } + case data[p] >= 152: + goto tr4327 + } + goto tr4328 + st4747: + if p++; p == pe { + goto _test_eof4747 + } + st_case_4747: + if 177 <= data[p] && data[p] <= 191 { + goto tr4327 + } + goto tr4328 + st4748: + if p++; p == pe { + goto _test_eof4748 + } + st_case_4748: + switch { + case data[p] < 134: + if 128 <= data[p] && data[p] <= 132 { + goto tr4327 + } + case data[p] > 135: + switch { + case data[p] > 151: + if 153 <= data[p] && data[p] <= 188 { + goto tr4327 + } + case data[p] >= 141: + goto tr4327 + } + default: + goto tr4327 + } + goto tr4328 + st4749: + if p++; p == pe { + goto _test_eof4749 + } + st_case_4749: + if data[p] == 134 { + goto tr4327 + } + goto tr4328 + st4750: + if p++; p == pe { + goto _test_eof4750 + } + st_case_4750: + switch data[p] { + case 128: + goto st4751 + case 129: + goto st4752 + case 130: + goto st4753 + case 141: + goto st4754 + case 156: + goto st4755 + case 157: + goto st4756 + case 158: + goto st4757 + case 159: + goto st4758 + case 160: + goto st4759 + case 162: + goto st4760 + case 164: + goto st4761 + case 168: + goto st4762 + case 169: + goto st4763 + case 170: + goto st4764 + case 172: + goto st4765 + case 173: + goto st4766 + case 174: + goto st4767 + case 175: + goto st4768 + case 176: + goto st4769 + case 179: + goto st4770 + case 183: + goto st4771 + } + goto tr4328 + st4751: + if p++; p == pe { + goto _test_eof4751 + } + st_case_4751: + if 171 <= data[p] && data[p] <= 190 { + goto tr4327 + } + goto tr4328 + st4752: + if p++; p == pe { + goto _test_eof4752 + } + st_case_4752: + switch { + case data[p] < 162: + switch { + case data[p] > 153: + if 158 <= data[p] && data[p] <= 160 { + goto tr4327 + } + case data[p] >= 150: + goto tr4327 + } + case data[p] > 164: + switch { + case data[p] > 173: + if 177 <= data[p] && data[p] <= 180 { + goto tr4327 + } + case data[p] >= 167: + goto tr4327 + } + default: + goto tr4327 + } + goto tr4328 + st4753: + if p++; p == pe { + goto _test_eof4753 + } + st_case_4753: + if data[p] == 143 { + goto tr4327 + } + switch { + case data[p] > 141: + if 154 <= data[p] && data[p] <= 157 { + goto tr4327 + } + case data[p] >= 130: + goto tr4327 + } + goto tr4328 + st4754: + if p++; p == pe { + goto _test_eof4754 + } + st_case_4754: + if 157 <= data[p] && data[p] <= 159 { + goto tr4327 + } + goto tr4328 + st4755: + if p++; p == pe { + goto _test_eof4755 + } + st_case_4755: + switch { + case data[p] > 148: + if 178 <= data[p] && data[p] <= 180 { + goto tr4327 + } + case data[p] >= 146: + goto tr4327 + } + goto tr4328 + st4756: + if p++; p == pe { + goto _test_eof4756 + } + st_case_4756: + switch { + case data[p] > 147: + if 178 <= data[p] && data[p] <= 179 { + goto tr4327 + } + case data[p] >= 146: + goto tr4327 + } + goto tr4328 + st4757: + if p++; p == pe { + goto _test_eof4757 + } + st_case_4757: + if 180 <= data[p] { + goto tr4327 + } + goto tr4328 + st4758: + if p++; p == pe { + goto _test_eof4758 + } + st_case_4758: + switch { + case data[p] > 156: + if 158 <= data[p] { + goto tr4328 + } + case data[p] >= 148: + goto tr4328 + } + goto tr4327 + st4759: + if p++; p == pe { + goto _test_eof4759 + } + st_case_4759: + if 139 <= data[p] && data[p] <= 142 { + goto tr4327 + } + goto tr4328 + st4760: + if p++; p == pe { + goto _test_eof4760 + } + st_case_4760: + if data[p] == 169 { + goto tr4327 + } + goto tr4328 + st4761: + if p++; p == pe { + goto _test_eof4761 + } + st_case_4761: + switch { + case data[p] > 171: + if 176 <= data[p] && data[p] <= 187 { + goto tr4327 + } + case data[p] >= 160: + goto tr4327 + } + goto tr4328 + st4762: + if p++; p == pe { + goto _test_eof4762 + } + st_case_4762: + if 151 <= data[p] && data[p] <= 155 { + goto tr4327 + } + goto tr4328 + st4763: + if p++; p == pe { + goto _test_eof4763 + } + st_case_4763: + if data[p] == 191 { + goto tr4327 + } + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 188 { + goto tr4327 + } + case data[p] >= 149: + goto tr4327 + } + goto tr4328 + st4764: + if p++; p == pe { + goto _test_eof4764 + } + st_case_4764: + if 176 <= data[p] && data[p] <= 190 { + goto tr4327 + } + goto tr4328 + st4765: + if p++; p == pe { + goto _test_eof4765 + } + st_case_4765: + switch { + case data[p] > 132: + if 180 <= data[p] { + goto tr4327 + } + case data[p] >= 128: + goto tr4327 + } + goto tr4328 + st4766: + if p++; p == pe { + goto _test_eof4766 + } + st_case_4766: + switch { + case data[p] > 170: + if 180 <= data[p] { + goto tr4328 + } + case data[p] >= 133: + goto tr4328 + } + goto tr4327 + st4767: + if p++; p == pe { + goto _test_eof4767 + } + st_case_4767: + switch { + case data[p] > 130: + if 161 <= data[p] && data[p] <= 173 { + goto tr4327 + } + case data[p] >= 128: + goto tr4327 + } + goto tr4328 + st4768: + if p++; p == pe { + goto _test_eof4768 + } + st_case_4768: + if 166 <= data[p] && data[p] <= 179 { + goto tr4327 + } + goto tr4328 + st4769: + if p++; p == pe { + goto _test_eof4769 + } + st_case_4769: + if 164 <= data[p] && data[p] <= 183 { + goto tr4327 + } + goto tr4328 + st4770: + if p++; p == pe { + goto _test_eof4770 + } + st_case_4770: + if data[p] == 173 { + goto tr4327 + } + switch { + case data[p] < 148: + if 144 <= data[p] && data[p] <= 146 { + goto tr4327 + } + case data[p] > 168: + switch { + case data[p] > 180: + if 184 <= data[p] && data[p] <= 185 { + goto tr4327 + } + case data[p] >= 178: + goto tr4327 + } + default: + goto tr4327 + } + goto tr4328 + st4771: + if p++; p == pe { + goto _test_eof4771 + } + st_case_4771: + switch { + case data[p] > 181: + if 188 <= data[p] && data[p] <= 191 { + goto tr4327 + } + case data[p] >= 128: + goto tr4327 + } + goto tr4328 + st4772: + if p++; p == pe { + goto _test_eof4772 + } + st_case_4772: + switch data[p] { + case 128: + goto st4773 + case 129: + goto st4774 + case 131: + goto st4775 + case 179: + goto st4776 + case 181: + goto st4777 + case 183: + goto st4778 + } + goto tr4328 + st4773: + if p++; p == pe { + goto _test_eof4773 + } + st_case_4773: + switch { + case data[p] > 143: + if 170 <= data[p] && data[p] <= 174 { + goto tr4327 + } + case data[p] >= 140: + goto tr4327 + } + goto tr4328 + st4774: + if p++; p == pe { + goto _test_eof4774 + } + st_case_4774: + switch { + case data[p] > 164: + if 166 <= data[p] && data[p] <= 175 { + goto tr4327 + } + case data[p] >= 160: + goto tr4327 + } + goto tr4328 + st4775: + if p++; p == pe { + goto _test_eof4775 + } + st_case_4775: + if 144 <= data[p] && data[p] <= 176 { + goto tr4327 + } + goto tr4328 + st4776: + if p++; p == pe { + goto _test_eof4776 + } + st_case_4776: + if 175 <= data[p] && data[p] <= 177 { + goto tr4327 + } + goto tr4328 + st4777: + if p++; p == pe { + goto _test_eof4777 + } + st_case_4777: + if data[p] == 191 { + goto tr4327 + } + goto tr4328 + st4778: + if p++; p == pe { + goto _test_eof4778 + } + st_case_4778: + if 160 <= data[p] && data[p] <= 191 { + goto tr4327 + } + goto tr4328 + st4779: + if p++; p == pe { + goto _test_eof4779 + } + st_case_4779: + switch data[p] { + case 128: + goto st4780 + case 130: + goto st4781 + } + goto tr4328 + st4780: + if p++; p == pe { + goto _test_eof4780 + } + st_case_4780: + if 170 <= data[p] && data[p] <= 175 { + goto tr4327 + } + goto tr4328 + st4781: + if p++; p == pe { + goto _test_eof4781 + } + st_case_4781: + if 153 <= data[p] && data[p] <= 154 { + goto tr4327 + } + goto tr4328 + st4782: + if p++; p == pe { + goto _test_eof4782 + } + st_case_4782: + switch data[p] { + case 153: + goto st4783 + case 154: + goto st4784 + case 155: + goto st4785 + case 160: + goto st4786 + case 162: + goto st4787 + case 163: + goto st4788 + case 164: + goto st4789 + case 165: + goto st4790 + case 166: + goto st4791 + case 167: + goto st4792 + case 168: + goto st4793 + case 169: + goto st4794 + case 170: + goto st4795 + case 171: + goto st4796 + case 175: + goto st4797 + } + goto tr4328 + st4783: + if p++; p == pe { + goto _test_eof4783 + } + st_case_4783: + switch { + case data[p] > 178: + if 180 <= data[p] && data[p] <= 189 { + goto tr4327 + } + case data[p] >= 175: + goto tr4327 + } + goto tr4328 + st4784: + if p++; p == pe { + goto _test_eof4784 + } + st_case_4784: + if 158 <= data[p] && data[p] <= 159 { + goto tr4327 + } + goto tr4328 + st4785: + if p++; p == pe { + goto _test_eof4785 + } + st_case_4785: + if 176 <= data[p] && data[p] <= 177 { + goto tr4327 + } + goto tr4328 + st4786: + if p++; p == pe { + goto _test_eof4786 + } + st_case_4786: + switch data[p] { + case 130: + goto tr4327 + case 134: + goto tr4327 + case 139: + goto tr4327 + } + if 163 <= data[p] && data[p] <= 167 { + goto tr4327 + } + goto tr4328 + st4787: + if p++; p == pe { + goto _test_eof4787 + } + st_case_4787: + switch { + case data[p] > 129: + if 180 <= data[p] { + goto tr4327 + } + case data[p] >= 128: + goto tr4327 + } + goto tr4328 + st4788: + if p++; p == pe { + goto _test_eof4788 + } + st_case_4788: + switch { + case data[p] > 159: + if 178 <= data[p] { + goto tr4328 + } + case data[p] >= 133: + goto tr4328 + } + goto tr4327 + st4789: + if p++; p == pe { + goto _test_eof4789 + } + st_case_4789: + if 166 <= data[p] && data[p] <= 173 { + goto tr4327 + } + goto tr4328 + st4790: + if p++; p == pe { + goto _test_eof4790 + } + st_case_4790: + if 135 <= data[p] && data[p] <= 147 { + goto tr4327 + } + goto tr4328 + st4791: + if p++; p == pe { + goto _test_eof4791 + } + st_case_4791: + switch { + case data[p] > 131: + if 179 <= data[p] { + goto tr4327 + } + case data[p] >= 128: + goto tr4327 + } + goto tr4328 + st4792: + if p++; p == pe { + goto _test_eof4792 + } + st_case_4792: + switch { + case data[p] > 164: + if 166 <= data[p] { + goto tr4328 + } + case data[p] >= 129: + goto tr4328 + } + goto tr4327 + st4793: + if p++; p == pe { + goto _test_eof4793 + } + st_case_4793: + if 169 <= data[p] && data[p] <= 182 { + goto tr4327 + } + goto tr4328 + st4794: + if p++; p == pe { + goto _test_eof4794 + } + st_case_4794: + if data[p] == 131 { + goto tr4327 + } + switch { + case data[p] > 141: + if 187 <= data[p] && data[p] <= 189 { + goto tr4327 + } + case data[p] >= 140: + goto tr4327 + } + goto tr4328 + st4795: + if p++; p == pe { + goto _test_eof4795 + } + st_case_4795: + if data[p] == 176 { + goto tr4327 + } + switch { + case data[p] < 183: + if 178 <= data[p] && data[p] <= 180 { + goto tr4327 + } + case data[p] > 184: + if 190 <= data[p] && data[p] <= 191 { + goto tr4327 + } + default: + goto tr4327 + } + goto tr4328 + st4796: + if p++; p == pe { + goto _test_eof4796 + } + st_case_4796: + if data[p] == 129 { + goto tr4327 + } + switch { + case data[p] > 175: + if 181 <= data[p] && data[p] <= 182 { + goto tr4327 + } + case data[p] >= 171: + goto tr4327 + } + goto tr4328 + st4797: + if p++; p == pe { + goto _test_eof4797 + } + st_case_4797: + switch { + case data[p] > 170: + if 172 <= data[p] && data[p] <= 173 { + goto tr4327 + } + case data[p] >= 163: + goto tr4327 + } + goto tr4328 + st4798: + if p++; p == pe { + goto _test_eof4798 + } + st_case_4798: + switch data[p] { + case 172: + goto st4799 + case 184: + goto st4800 + case 187: + goto st4777 + case 190: + goto st4784 + case 191: + goto st4801 + } + goto tr4328 + st4799: + if p++; p == pe { + goto _test_eof4799 + } + st_case_4799: + if data[p] == 158 { + goto tr4327 + } + goto tr4328 + st4800: + if p++; p == pe { + goto _test_eof4800 + } + st_case_4800: + switch { + case data[p] > 143: + if 160 <= data[p] && data[p] <= 175 { + goto tr4327 + } + case data[p] >= 128: + goto tr4327 + } + goto tr4328 + st4801: + if p++; p == pe { + goto _test_eof4801 + } + st_case_4801: + if 185 <= data[p] && data[p] <= 187 { + goto tr4327 + } + goto tr4328 + st4802: + if p++; p == pe { + goto _test_eof4802 + } + st_case_4802: + switch data[p] { + case 144: + goto st4803 + case 145: + goto st4809 + case 150: + goto st4828 + case 155: + goto st4833 + case 157: + goto st4835 + case 158: + goto st4842 + case 159: + goto st4844 + } + goto tr4328 + st4803: + if p++; p == pe { + goto _test_eof4803 + } + st_case_4803: + switch data[p] { + case 135: + goto st4804 + case 139: + goto st4805 + case 141: + goto st4806 + case 168: + goto st4807 + case 171: + goto st4808 + } + goto tr4328 + st4804: + if p++; p == pe { + goto _test_eof4804 + } + st_case_4804: + if data[p] == 189 { + goto tr4327 + } + goto tr4328 + st4805: + if p++; p == pe { + goto _test_eof4805 + } + st_case_4805: + if data[p] == 160 { + goto tr4327 + } + goto tr4328 + st4806: + if p++; p == pe { + goto _test_eof4806 + } + st_case_4806: + if 182 <= data[p] && data[p] <= 186 { + goto tr4327 + } + goto tr4328 + st4807: + if p++; p == pe { + goto _test_eof4807 + } + st_case_4807: + if data[p] == 191 { + goto tr4327 + } + switch { + case data[p] < 133: + if 129 <= data[p] && data[p] <= 131 { + goto tr4327 + } + case data[p] > 134: + switch { + case data[p] > 143: + if 184 <= data[p] && data[p] <= 186 { + goto tr4327 + } + case data[p] >= 140: + goto tr4327 + } + default: + goto tr4327 + } + goto tr4328 + st4808: + if p++; p == pe { + goto _test_eof4808 + } + st_case_4808: + if 165 <= data[p] && data[p] <= 166 { + goto tr4327 + } + goto tr4328 + st4809: + if p++; p == pe { + goto _test_eof4809 + } + st_case_4809: + switch data[p] { + case 128: + goto st4810 + case 129: + goto st4811 + case 130: + goto st4812 + case 132: + goto st4813 + case 133: + goto st4814 + case 134: + goto st4815 + case 135: + goto st4816 + case 136: + goto st4817 + case 139: + goto st4818 + case 140: + goto st4819 + case 141: + goto st4820 + case 146: + goto st4821 + case 147: + goto st4822 + case 150: + goto st4823 + case 151: + goto st4824 + case 152: + goto st4821 + case 153: + goto st4825 + case 154: + goto st4826 + case 156: + goto st4827 + } + goto tr4328 + st4810: + if p++; p == pe { + goto _test_eof4810 + } + st_case_4810: + switch { + case data[p] > 130: + if 184 <= data[p] { + goto tr4327 + } + case data[p] >= 128: + goto tr4327 + } + goto tr4328 + st4811: + if p++; p == pe { + goto _test_eof4811 + } + st_case_4811: + if 135 <= data[p] && data[p] <= 190 { + goto tr4328 + } + goto tr4327 + st4812: + if p++; p == pe { + goto _test_eof4812 + } + st_case_4812: + switch { + case data[p] < 187: + if 131 <= data[p] && data[p] <= 175 { + goto tr4328 + } + case data[p] > 188: + if 190 <= data[p] { + goto tr4328 + } + default: + goto tr4328 + } + goto tr4327 + st4813: + if p++; p == pe { + goto _test_eof4813 + } + st_case_4813: + switch { + case data[p] > 130: + if 167 <= data[p] && data[p] <= 180 { + goto tr4327 + } + case data[p] >= 128: + goto tr4327 + } + goto tr4328 + st4814: + if p++; p == pe { + goto _test_eof4814 + } + st_case_4814: + if data[p] == 179 { + goto tr4327 + } + goto tr4328 + st4815: + if p++; p == pe { + goto _test_eof4815 + } + st_case_4815: + switch { + case data[p] > 130: + if 179 <= data[p] { + goto tr4327 + } + case data[p] >= 128: + goto tr4327 + } + goto tr4328 + st4816: + if p++; p == pe { + goto _test_eof4816 + } + st_case_4816: + switch { + case data[p] > 137: + if 141 <= data[p] { + goto tr4328 + } + case data[p] >= 129: + goto tr4328 + } + goto tr4327 + st4817: + if p++; p == pe { + goto _test_eof4817 + } + st_case_4817: + if 172 <= data[p] && data[p] <= 183 { + goto tr4327 + } + goto tr4328 + st4818: + if p++; p == pe { + goto _test_eof4818 + } + st_case_4818: + if 159 <= data[p] && data[p] <= 170 { + goto tr4327 + } + goto tr4328 + st4819: + if p++; p == pe { + goto _test_eof4819 + } + st_case_4819: + if data[p] == 188 { + goto tr4327 + } + switch { + case data[p] > 131: + if 190 <= data[p] && data[p] <= 191 { + goto tr4327 + } + case data[p] >= 128: + goto tr4327 + } + goto tr4328 + st4820: + if p++; p == pe { + goto _test_eof4820 + } + st_case_4820: + if data[p] == 151 { + goto tr4327 + } + switch { + case data[p] < 139: + switch { + case data[p] > 132: + if 135 <= data[p] && data[p] <= 136 { + goto tr4327 + } + case data[p] >= 128: + goto tr4327 + } + case data[p] > 141: + switch { + case data[p] < 166: + if 162 <= data[p] && data[p] <= 163 { + goto tr4327 + } + case data[p] > 172: + if 176 <= data[p] && data[p] <= 180 { + goto tr4327 + } + default: + goto tr4327 + } + default: + goto tr4327 + } + goto tr4328 + st4821: + if p++; p == pe { + goto _test_eof4821 + } + st_case_4821: + if 176 <= data[p] { + goto tr4327 + } + goto tr4328 + st4822: + if p++; p == pe { + goto _test_eof4822 + } + st_case_4822: + if 132 <= data[p] { + goto tr4328 + } + goto tr4327 + st4823: + if p++; p == pe { + goto _test_eof4823 + } + st_case_4823: + switch { + case data[p] > 181: + if 184 <= data[p] { + goto tr4327 + } + case data[p] >= 175: + goto tr4327 + } + goto tr4328 + st4824: + if p++; p == pe { + goto _test_eof4824 + } + st_case_4824: + switch { + case data[p] > 155: + if 158 <= data[p] { + goto tr4328 + } + case data[p] >= 129: + goto tr4328 + } + goto tr4327 + st4825: + if p++; p == pe { + goto _test_eof4825 + } + st_case_4825: + if 129 <= data[p] { + goto tr4328 + } + goto tr4327 + st4826: + if p++; p == pe { + goto _test_eof4826 + } + st_case_4826: + if 171 <= data[p] && data[p] <= 183 { + goto tr4327 + } + goto tr4328 + st4827: + if p++; p == pe { + goto _test_eof4827 + } + st_case_4827: + if 157 <= data[p] && data[p] <= 171 { + goto tr4327 + } + goto tr4328 + st4828: + if p++; p == pe { + goto _test_eof4828 + } + st_case_4828: + switch data[p] { + case 171: + goto st4829 + case 172: + goto st4830 + case 189: + goto st4831 + case 190: + goto st4832 + } + goto tr4328 + st4829: + if p++; p == pe { + goto _test_eof4829 + } + st_case_4829: + if 176 <= data[p] && data[p] <= 180 { + goto tr4327 + } + goto tr4328 + st4830: + if p++; p == pe { + goto _test_eof4830 + } + st_case_4830: + if 176 <= data[p] && data[p] <= 182 { + goto tr4327 + } + goto tr4328 + st4831: + if p++; p == pe { + goto _test_eof4831 + } + st_case_4831: + if 145 <= data[p] && data[p] <= 190 { + goto tr4327 + } + goto tr4328 + st4832: + if p++; p == pe { + goto _test_eof4832 + } + st_case_4832: + if 143 <= data[p] && data[p] <= 146 { + goto tr4327 + } + goto tr4328 + st4833: + if p++; p == pe { + goto _test_eof4833 + } + st_case_4833: + if data[p] == 178 { + goto st4834 + } + goto tr4328 + st4834: + if p++; p == pe { + goto _test_eof4834 + } + st_case_4834: + switch { + case data[p] > 158: + if 160 <= data[p] && data[p] <= 163 { + goto tr4327 + } + case data[p] >= 157: + goto tr4327 + } + goto tr4328 + st4835: + if p++; p == pe { + goto _test_eof4835 + } + st_case_4835: + switch data[p] { + case 133: + goto st4836 + case 134: + goto st4837 + case 137: + goto st4838 + case 168: + goto st4839 + case 169: + goto st4840 + case 170: + goto st4841 + } + goto tr4328 + st4836: + if p++; p == pe { + goto _test_eof4836 + } + st_case_4836: + switch { + case data[p] > 169: + if 173 <= data[p] { + goto tr4327 + } + case data[p] >= 165: + goto tr4327 + } + goto tr4328 + st4837: + if p++; p == pe { + goto _test_eof4837 + } + st_case_4837: + switch { + case data[p] < 140: + if 131 <= data[p] && data[p] <= 132 { + goto tr4328 + } + case data[p] > 169: + if 174 <= data[p] { + goto tr4328 + } + default: + goto tr4328 + } + goto tr4327 + st4838: + if p++; p == pe { + goto _test_eof4838 + } + st_case_4838: + if 130 <= data[p] && data[p] <= 132 { + goto tr4327 + } + goto tr4328 + st4839: + if p++; p == pe { + goto _test_eof4839 + } + st_case_4839: + switch { + case data[p] > 182: + if 187 <= data[p] { + goto tr4327 + } + case data[p] >= 128: + goto tr4327 + } + goto tr4328 + st4840: + if p++; p == pe { + goto _test_eof4840 + } + st_case_4840: + switch { + case data[p] > 180: + if 182 <= data[p] { + goto tr4328 + } + case data[p] >= 173: + goto tr4328 + } + goto tr4327 + st4841: + if p++; p == pe { + goto _test_eof4841 + } + st_case_4841: + if data[p] == 132 { + goto tr4327 + } + switch { + case data[p] > 159: + if 161 <= data[p] && data[p] <= 175 { + goto tr4327 + } + case data[p] >= 155: + goto tr4327 + } + goto tr4328 + st4842: + if p++; p == pe { + goto _test_eof4842 + } + st_case_4842: + if data[p] == 163 { + goto st4843 + } + goto tr4328 + st4843: + if p++; p == pe { + goto _test_eof4843 + } + st_case_4843: + if 144 <= data[p] && data[p] <= 150 { + goto tr4327 + } + goto tr4328 + st4844: + if p++; p == pe { + goto _test_eof4844 + } + st_case_4844: + if data[p] == 135 { + goto st4707 + } + goto tr4328 + st4845: + if p++; p == pe { + goto _test_eof4845 + } + st_case_4845: + if data[p] == 160 { + goto st4846 + } + goto tr4328 + st4846: + if p++; p == pe { + goto _test_eof4846 + } + st_case_4846: + switch data[p] { + case 128: + goto st4847 + case 129: + goto st4848 + case 132: + goto st4709 + case 135: + goto st4710 + } + if 133 <= data[p] && data[p] <= 134 { + goto st4849 + } + goto tr4328 + st4847: + if p++; p == pe { + goto _test_eof4847 + } + st_case_4847: + if data[p] == 129 { + goto tr4327 + } + if 160 <= data[p] { + goto tr4327 + } + goto tr4328 + st4848: + if p++; p == pe { + goto _test_eof4848 + } + st_case_4848: + if 192 <= data[p] { + goto tr4328 + } + goto tr4327 + st4849: + if p++; p == pe { + goto _test_eof4849 + } + st_case_4849: + goto tr4327 + st4850: + if p++; p == pe { + goto _test_eof4850 + } + st_case_4850: + if data[p] == 128 { + goto tr3757 + } + goto tr0 + st4851: + if p++; p == pe { + goto _test_eof4851 + } + st_case_4851: + if data[p] == 128 { + goto st3595 + } + if 129 <= data[p] { + goto st3734 + } + goto tr0 + st4852: + if p++; p == pe { + goto _test_eof4852 + } + st_case_4852: + goto st3734 + st4853: + if p++; p == pe { + goto _test_eof4853 + } + st_case_4853: + switch data[p] { + case 155: + goto st4854 + case 156: + goto st3595 + } + goto st3734 + st4854: + if p++; p == pe { + goto _test_eof4854 + } + st_case_4854: + if 151 <= data[p] { + goto tr0 + } + goto tr3250 + st4855: + if p++; p == pe { + goto _test_eof4855 + } + st_case_4855: + switch data[p] { + case 156: + goto st4856 + case 157: + goto st3595 + case 160: + goto st4857 + } + goto st3734 + st4856: + if p++; p == pe { + goto _test_eof4856 + } + st_case_4856: + if 181 <= data[p] { + goto tr0 + } + goto tr3250 + st4857: + if p++; p == pe { + goto _test_eof4857 + } + st_case_4857: + if 158 <= data[p] && data[p] <= 159 { + goto tr0 + } + goto tr3250 + st4858: + if p++; p == pe { + goto _test_eof4858 + } + st_case_4858: + if data[p] == 186 { + goto st4859 + } + if 187 <= data[p] { + goto tr0 + } + goto st3734 + st4859: + if p++; p == pe { + goto _test_eof4859 + } + st_case_4859: + if 162 <= data[p] { + goto tr0 + } + goto tr3250 + st4860: + if p++; p == pe { + goto _test_eof4860 + } + st_case_4860: + switch data[p] { + case 160: + goto st3595 + case 168: + goto st4861 + } + if 161 <= data[p] && data[p] <= 167 { + goto st3734 + } + goto tr0 + st4861: + if p++; p == pe { + goto _test_eof4861 + } + st_case_4861: + if 158 <= data[p] { + goto tr0 + } + goto tr3250 + st_out: + _test_eof4862: cs = 4862; goto _test_eof + _test_eof4863: cs = 4863; goto _test_eof + _test_eof0: cs = 0; goto _test_eof + _test_eof1: cs = 1; goto _test_eof + _test_eof2: cs = 2; goto _test_eof + _test_eof3: cs = 3; goto _test_eof + _test_eof4: cs = 4; goto _test_eof + _test_eof5: cs = 5; goto _test_eof + _test_eof6: cs = 6; goto _test_eof + _test_eof7: cs = 7; goto _test_eof + _test_eof8: cs = 8; goto _test_eof + _test_eof9: cs = 9; goto _test_eof + _test_eof10: cs = 10; goto _test_eof + _test_eof11: cs = 11; goto _test_eof + _test_eof12: cs = 12; goto _test_eof + _test_eof13: cs = 13; goto _test_eof + _test_eof14: cs = 14; goto _test_eof + _test_eof15: cs = 15; goto _test_eof + _test_eof16: cs = 16; goto _test_eof + _test_eof17: cs = 17; goto _test_eof + _test_eof18: cs = 18; goto _test_eof + _test_eof19: cs = 19; goto _test_eof + _test_eof20: cs = 20; goto _test_eof + _test_eof21: cs = 21; goto _test_eof + _test_eof22: cs = 22; goto _test_eof + _test_eof23: cs = 23; goto _test_eof + _test_eof24: cs = 24; goto _test_eof + _test_eof25: cs = 25; goto _test_eof + _test_eof26: cs = 26; goto _test_eof + _test_eof27: cs = 27; goto _test_eof + _test_eof28: cs = 28; goto _test_eof + _test_eof29: cs = 29; goto _test_eof + _test_eof30: cs = 30; goto _test_eof + _test_eof31: cs = 31; goto _test_eof + _test_eof32: cs = 32; goto _test_eof + _test_eof33: cs = 33; goto _test_eof + _test_eof34: cs = 34; goto _test_eof + _test_eof35: cs = 35; goto _test_eof + _test_eof36: cs = 36; goto _test_eof + _test_eof37: cs = 37; goto _test_eof + _test_eof38: cs = 38; goto _test_eof + _test_eof39: cs = 39; goto _test_eof + _test_eof40: cs = 40; goto _test_eof + _test_eof41: cs = 41; goto _test_eof + _test_eof42: cs = 42; goto _test_eof + _test_eof43: cs = 43; goto _test_eof + _test_eof44: cs = 44; goto _test_eof + _test_eof45: cs = 45; goto _test_eof + _test_eof46: cs = 46; goto _test_eof + _test_eof47: cs = 47; goto _test_eof + _test_eof48: cs = 48; goto _test_eof + _test_eof49: cs = 49; goto _test_eof + _test_eof50: cs = 50; goto _test_eof + _test_eof51: cs = 51; goto _test_eof + _test_eof52: cs = 52; goto _test_eof + _test_eof53: cs = 53; goto _test_eof + _test_eof54: cs = 54; goto _test_eof + _test_eof55: cs = 55; goto _test_eof + _test_eof56: cs = 56; goto _test_eof + _test_eof57: cs = 57; goto _test_eof + _test_eof58: cs = 58; goto _test_eof + _test_eof59: cs = 59; goto _test_eof + _test_eof60: cs = 60; goto _test_eof + _test_eof61: cs = 61; goto _test_eof + _test_eof62: cs = 62; goto _test_eof + _test_eof63: cs = 63; goto _test_eof + _test_eof64: cs = 64; goto _test_eof + _test_eof65: cs = 65; goto _test_eof + _test_eof66: cs = 66; goto _test_eof + _test_eof67: cs = 67; goto _test_eof + _test_eof68: cs = 68; goto _test_eof + _test_eof69: cs = 69; goto _test_eof + _test_eof70: cs = 70; goto _test_eof + _test_eof71: cs = 71; goto _test_eof + _test_eof72: cs = 72; goto _test_eof + _test_eof73: cs = 73; goto _test_eof + _test_eof74: cs = 74; goto _test_eof + _test_eof75: cs = 75; goto _test_eof + _test_eof76: cs = 76; goto _test_eof + _test_eof77: cs = 77; goto _test_eof + _test_eof78: cs = 78; goto _test_eof + _test_eof79: cs = 79; goto _test_eof + _test_eof80: cs = 80; goto _test_eof + _test_eof81: cs = 81; goto _test_eof + _test_eof82: cs = 82; goto _test_eof + _test_eof83: cs = 83; goto _test_eof + _test_eof84: cs = 84; goto _test_eof + _test_eof85: cs = 85; goto _test_eof + _test_eof86: cs = 86; goto _test_eof + _test_eof87: cs = 87; goto _test_eof + _test_eof88: cs = 88; goto _test_eof + _test_eof89: cs = 89; goto _test_eof + _test_eof90: cs = 90; goto _test_eof + _test_eof91: cs = 91; goto _test_eof + _test_eof92: cs = 92; goto _test_eof + _test_eof93: cs = 93; goto _test_eof + _test_eof94: cs = 94; goto _test_eof + _test_eof95: cs = 95; goto _test_eof + _test_eof96: cs = 96; goto _test_eof + _test_eof97: cs = 97; goto _test_eof + _test_eof98: cs = 98; goto _test_eof + _test_eof99: cs = 99; goto _test_eof + _test_eof100: cs = 100; goto _test_eof + _test_eof101: cs = 101; goto _test_eof + _test_eof102: cs = 102; goto _test_eof + _test_eof103: cs = 103; goto _test_eof + _test_eof104: cs = 104; goto _test_eof + _test_eof105: cs = 105; goto _test_eof + _test_eof106: cs = 106; goto _test_eof + _test_eof107: cs = 107; goto _test_eof + _test_eof108: cs = 108; goto _test_eof + _test_eof109: cs = 109; goto _test_eof + _test_eof110: cs = 110; goto _test_eof + _test_eof111: cs = 111; goto _test_eof + _test_eof112: cs = 112; goto _test_eof + _test_eof113: cs = 113; goto _test_eof + _test_eof114: cs = 114; goto _test_eof + _test_eof115: cs = 115; goto _test_eof + _test_eof116: cs = 116; goto _test_eof + _test_eof117: cs = 117; goto _test_eof + _test_eof118: cs = 118; goto _test_eof + _test_eof119: cs = 119; goto _test_eof + _test_eof120: cs = 120; goto _test_eof + _test_eof121: cs = 121; goto _test_eof + _test_eof122: cs = 122; goto _test_eof + _test_eof123: cs = 123; goto _test_eof + _test_eof124: cs = 124; goto _test_eof + _test_eof125: cs = 125; goto _test_eof + _test_eof126: cs = 126; goto _test_eof + _test_eof127: cs = 127; goto _test_eof + _test_eof128: cs = 128; goto _test_eof + _test_eof129: cs = 129; goto _test_eof + _test_eof130: cs = 130; goto _test_eof + _test_eof131: cs = 131; goto _test_eof + _test_eof132: cs = 132; goto _test_eof + _test_eof133: cs = 133; goto _test_eof + _test_eof134: cs = 134; goto _test_eof + _test_eof135: cs = 135; goto _test_eof + _test_eof136: cs = 136; goto _test_eof + _test_eof137: cs = 137; goto _test_eof + _test_eof138: cs = 138; goto _test_eof + _test_eof139: cs = 139; goto _test_eof + _test_eof140: cs = 140; goto _test_eof + _test_eof4864: cs = 4864; goto _test_eof + _test_eof4865: cs = 4865; goto _test_eof + _test_eof141: cs = 141; goto _test_eof + _test_eof4866: cs = 4866; goto _test_eof + _test_eof4867: cs = 4867; goto _test_eof + _test_eof142: cs = 142; goto _test_eof + _test_eof143: cs = 143; goto _test_eof + _test_eof144: cs = 144; goto _test_eof + _test_eof145: cs = 145; goto _test_eof + _test_eof146: cs = 146; goto _test_eof + _test_eof147: cs = 147; goto _test_eof + _test_eof148: cs = 148; goto _test_eof + _test_eof149: cs = 149; goto _test_eof + _test_eof150: cs = 150; goto _test_eof + _test_eof151: cs = 151; goto _test_eof + _test_eof152: cs = 152; goto _test_eof + _test_eof153: cs = 153; goto _test_eof + _test_eof154: cs = 154; goto _test_eof + _test_eof155: cs = 155; goto _test_eof + _test_eof156: cs = 156; goto _test_eof + _test_eof157: cs = 157; goto _test_eof + _test_eof158: cs = 158; goto _test_eof + _test_eof159: cs = 159; goto _test_eof + _test_eof160: cs = 160; goto _test_eof + _test_eof161: cs = 161; goto _test_eof + _test_eof162: cs = 162; goto _test_eof + _test_eof163: cs = 163; goto _test_eof + _test_eof164: cs = 164; goto _test_eof + _test_eof165: cs = 165; goto _test_eof + _test_eof166: cs = 166; goto _test_eof + _test_eof167: cs = 167; goto _test_eof + _test_eof168: cs = 168; goto _test_eof + _test_eof169: cs = 169; goto _test_eof + _test_eof170: cs = 170; goto _test_eof + _test_eof171: cs = 171; goto _test_eof + _test_eof172: cs = 172; goto _test_eof + _test_eof173: cs = 173; goto _test_eof + _test_eof174: cs = 174; goto _test_eof + _test_eof175: cs = 175; goto _test_eof + _test_eof176: cs = 176; goto _test_eof + _test_eof177: cs = 177; goto _test_eof + _test_eof178: cs = 178; goto _test_eof + _test_eof179: cs = 179; goto _test_eof + _test_eof180: cs = 180; goto _test_eof + _test_eof181: cs = 181; goto _test_eof + _test_eof182: cs = 182; goto _test_eof + _test_eof183: cs = 183; goto _test_eof + _test_eof184: cs = 184; goto _test_eof + _test_eof185: cs = 185; goto _test_eof + _test_eof186: cs = 186; goto _test_eof + _test_eof187: cs = 187; goto _test_eof + _test_eof188: cs = 188; goto _test_eof + _test_eof189: cs = 189; goto _test_eof + _test_eof190: cs = 190; goto _test_eof + _test_eof191: cs = 191; goto _test_eof + _test_eof192: cs = 192; goto _test_eof + _test_eof193: cs = 193; goto _test_eof + _test_eof194: cs = 194; goto _test_eof + _test_eof195: cs = 195; goto _test_eof + _test_eof196: cs = 196; goto _test_eof + _test_eof197: cs = 197; goto _test_eof + _test_eof198: cs = 198; goto _test_eof + _test_eof199: cs = 199; goto _test_eof + _test_eof200: cs = 200; goto _test_eof + _test_eof201: cs = 201; goto _test_eof + _test_eof202: cs = 202; goto _test_eof + _test_eof203: cs = 203; goto _test_eof + _test_eof204: cs = 204; goto _test_eof + _test_eof205: cs = 205; goto _test_eof + _test_eof206: cs = 206; goto _test_eof + _test_eof207: cs = 207; goto _test_eof + _test_eof208: cs = 208; goto _test_eof + _test_eof209: cs = 209; goto _test_eof + _test_eof210: cs = 210; goto _test_eof + _test_eof211: cs = 211; goto _test_eof + _test_eof212: cs = 212; goto _test_eof + _test_eof213: cs = 213; goto _test_eof + _test_eof214: cs = 214; goto _test_eof + _test_eof215: cs = 215; goto _test_eof + _test_eof216: cs = 216; goto _test_eof + _test_eof217: cs = 217; goto _test_eof + _test_eof218: cs = 218; goto _test_eof + _test_eof219: cs = 219; goto _test_eof + _test_eof220: cs = 220; goto _test_eof + _test_eof221: cs = 221; goto _test_eof + _test_eof222: cs = 222; goto _test_eof + _test_eof223: cs = 223; goto _test_eof + _test_eof224: cs = 224; goto _test_eof + _test_eof225: cs = 225; goto _test_eof + _test_eof226: cs = 226; goto _test_eof + _test_eof227: cs = 227; goto _test_eof + _test_eof228: cs = 228; goto _test_eof + _test_eof229: cs = 229; goto _test_eof + _test_eof230: cs = 230; goto _test_eof + _test_eof231: cs = 231; goto _test_eof + _test_eof232: cs = 232; goto _test_eof + _test_eof233: cs = 233; goto _test_eof + _test_eof234: cs = 234; goto _test_eof + _test_eof235: cs = 235; goto _test_eof + _test_eof236: cs = 236; goto _test_eof + _test_eof237: cs = 237; goto _test_eof + _test_eof238: cs = 238; goto _test_eof + _test_eof239: cs = 239; goto _test_eof + _test_eof240: cs = 240; goto _test_eof + _test_eof241: cs = 241; goto _test_eof + _test_eof242: cs = 242; goto _test_eof + _test_eof243: cs = 243; goto _test_eof + _test_eof244: cs = 244; goto _test_eof + _test_eof245: cs = 245; goto _test_eof + _test_eof246: cs = 246; goto _test_eof + _test_eof247: cs = 247; goto _test_eof + _test_eof248: cs = 248; goto _test_eof + _test_eof249: cs = 249; goto _test_eof + _test_eof250: cs = 250; goto _test_eof + _test_eof251: cs = 251; goto _test_eof + _test_eof252: cs = 252; goto _test_eof + _test_eof253: cs = 253; goto _test_eof + _test_eof254: cs = 254; goto _test_eof + _test_eof255: cs = 255; goto _test_eof + _test_eof256: cs = 256; goto _test_eof + _test_eof257: cs = 257; goto _test_eof + _test_eof258: cs = 258; goto _test_eof + _test_eof259: cs = 259; goto _test_eof + _test_eof260: cs = 260; goto _test_eof + _test_eof261: cs = 261; goto _test_eof + _test_eof262: cs = 262; goto _test_eof + _test_eof263: cs = 263; goto _test_eof + _test_eof264: cs = 264; goto _test_eof + _test_eof265: cs = 265; goto _test_eof + _test_eof266: cs = 266; goto _test_eof + _test_eof267: cs = 267; goto _test_eof + _test_eof268: cs = 268; goto _test_eof + _test_eof269: cs = 269; goto _test_eof + _test_eof270: cs = 270; goto _test_eof + _test_eof271: cs = 271; goto _test_eof + _test_eof272: cs = 272; goto _test_eof + _test_eof273: cs = 273; goto _test_eof + _test_eof274: cs = 274; goto _test_eof + _test_eof275: cs = 275; goto _test_eof + _test_eof276: cs = 276; goto _test_eof + _test_eof277: cs = 277; goto _test_eof + _test_eof278: cs = 278; goto _test_eof + _test_eof279: cs = 279; goto _test_eof + _test_eof280: cs = 280; goto _test_eof + _test_eof281: cs = 281; goto _test_eof + _test_eof282: cs = 282; goto _test_eof + _test_eof283: cs = 283; goto _test_eof + _test_eof284: cs = 284; goto _test_eof + _test_eof285: cs = 285; goto _test_eof + _test_eof286: cs = 286; goto _test_eof + _test_eof287: cs = 287; goto _test_eof + _test_eof288: cs = 288; goto _test_eof + _test_eof289: cs = 289; goto _test_eof + _test_eof290: cs = 290; goto _test_eof + _test_eof291: cs = 291; goto _test_eof + _test_eof292: cs = 292; goto _test_eof + _test_eof293: cs = 293; goto _test_eof + _test_eof294: cs = 294; goto _test_eof + _test_eof295: cs = 295; goto _test_eof + _test_eof296: cs = 296; goto _test_eof + _test_eof297: cs = 297; goto _test_eof + _test_eof298: cs = 298; goto _test_eof + _test_eof299: cs = 299; goto _test_eof + _test_eof300: cs = 300; goto _test_eof + _test_eof301: cs = 301; goto _test_eof + _test_eof302: cs = 302; goto _test_eof + _test_eof303: cs = 303; goto _test_eof + _test_eof304: cs = 304; goto _test_eof + _test_eof305: cs = 305; goto _test_eof + _test_eof306: cs = 306; goto _test_eof + _test_eof307: cs = 307; goto _test_eof + _test_eof308: cs = 308; goto _test_eof + _test_eof309: cs = 309; goto _test_eof + _test_eof310: cs = 310; goto _test_eof + _test_eof311: cs = 311; goto _test_eof + _test_eof312: cs = 312; goto _test_eof + _test_eof313: cs = 313; goto _test_eof + _test_eof314: cs = 314; goto _test_eof + _test_eof315: cs = 315; goto _test_eof + _test_eof316: cs = 316; goto _test_eof + _test_eof317: cs = 317; goto _test_eof + _test_eof318: cs = 318; goto _test_eof + _test_eof319: cs = 319; goto _test_eof + _test_eof320: cs = 320; goto _test_eof + _test_eof321: cs = 321; goto _test_eof + _test_eof322: cs = 322; goto _test_eof + _test_eof323: cs = 323; goto _test_eof + _test_eof324: cs = 324; goto _test_eof + _test_eof325: cs = 325; goto _test_eof + _test_eof326: cs = 326; goto _test_eof + _test_eof327: cs = 327; goto _test_eof + _test_eof328: cs = 328; goto _test_eof + _test_eof329: cs = 329; goto _test_eof + _test_eof330: cs = 330; goto _test_eof + _test_eof331: cs = 331; goto _test_eof + _test_eof332: cs = 332; goto _test_eof + _test_eof333: cs = 333; goto _test_eof + _test_eof334: cs = 334; goto _test_eof + _test_eof335: cs = 335; goto _test_eof + _test_eof336: cs = 336; goto _test_eof + _test_eof337: cs = 337; goto _test_eof + _test_eof338: cs = 338; goto _test_eof + _test_eof339: cs = 339; goto _test_eof + _test_eof340: cs = 340; goto _test_eof + _test_eof341: cs = 341; goto _test_eof + _test_eof342: cs = 342; goto _test_eof + _test_eof343: cs = 343; goto _test_eof + _test_eof344: cs = 344; goto _test_eof + _test_eof345: cs = 345; goto _test_eof + _test_eof346: cs = 346; goto _test_eof + _test_eof347: cs = 347; goto _test_eof + _test_eof348: cs = 348; goto _test_eof + _test_eof349: cs = 349; goto _test_eof + _test_eof350: cs = 350; goto _test_eof + _test_eof351: cs = 351; goto _test_eof + _test_eof352: cs = 352; goto _test_eof + _test_eof353: cs = 353; goto _test_eof + _test_eof354: cs = 354; goto _test_eof + _test_eof355: cs = 355; goto _test_eof + _test_eof356: cs = 356; goto _test_eof + _test_eof357: cs = 357; goto _test_eof + _test_eof358: cs = 358; goto _test_eof + _test_eof359: cs = 359; goto _test_eof + _test_eof360: cs = 360; goto _test_eof + _test_eof361: cs = 361; goto _test_eof + _test_eof362: cs = 362; goto _test_eof + _test_eof363: cs = 363; goto _test_eof + _test_eof364: cs = 364; goto _test_eof + _test_eof365: cs = 365; goto _test_eof + _test_eof366: cs = 366; goto _test_eof + _test_eof367: cs = 367; goto _test_eof + _test_eof368: cs = 368; goto _test_eof + _test_eof369: cs = 369; goto _test_eof + _test_eof370: cs = 370; goto _test_eof + _test_eof371: cs = 371; goto _test_eof + _test_eof372: cs = 372; goto _test_eof + _test_eof373: cs = 373; goto _test_eof + _test_eof374: cs = 374; goto _test_eof + _test_eof375: cs = 375; goto _test_eof + _test_eof376: cs = 376; goto _test_eof + _test_eof377: cs = 377; goto _test_eof + _test_eof378: cs = 378; goto _test_eof + _test_eof379: cs = 379; goto _test_eof + _test_eof380: cs = 380; goto _test_eof + _test_eof381: cs = 381; goto _test_eof + _test_eof382: cs = 382; goto _test_eof + _test_eof383: cs = 383; goto _test_eof + _test_eof384: cs = 384; goto _test_eof + _test_eof385: cs = 385; goto _test_eof + _test_eof386: cs = 386; goto _test_eof + _test_eof387: cs = 387; goto _test_eof + _test_eof388: cs = 388; goto _test_eof + _test_eof389: cs = 389; goto _test_eof + _test_eof390: cs = 390; goto _test_eof + _test_eof391: cs = 391; goto _test_eof + _test_eof392: cs = 392; goto _test_eof + _test_eof393: cs = 393; goto _test_eof + _test_eof394: cs = 394; goto _test_eof + _test_eof395: cs = 395; goto _test_eof + _test_eof396: cs = 396; goto _test_eof + _test_eof397: cs = 397; goto _test_eof + _test_eof398: cs = 398; goto _test_eof + _test_eof399: cs = 399; goto _test_eof + _test_eof400: cs = 400; goto _test_eof + _test_eof401: cs = 401; goto _test_eof + _test_eof402: cs = 402; goto _test_eof + _test_eof403: cs = 403; goto _test_eof + _test_eof404: cs = 404; goto _test_eof + _test_eof405: cs = 405; goto _test_eof + _test_eof406: cs = 406; goto _test_eof + _test_eof407: cs = 407; goto _test_eof + _test_eof408: cs = 408; goto _test_eof + _test_eof409: cs = 409; goto _test_eof + _test_eof410: cs = 410; goto _test_eof + _test_eof411: cs = 411; goto _test_eof + _test_eof412: cs = 412; goto _test_eof + _test_eof4868: cs = 4868; goto _test_eof + _test_eof413: cs = 413; goto _test_eof + _test_eof414: cs = 414; goto _test_eof + _test_eof415: cs = 415; goto _test_eof + _test_eof416: cs = 416; goto _test_eof + _test_eof417: cs = 417; goto _test_eof + _test_eof418: cs = 418; goto _test_eof + _test_eof419: cs = 419; goto _test_eof + _test_eof420: cs = 420; goto _test_eof + _test_eof421: cs = 421; goto _test_eof + _test_eof422: cs = 422; goto _test_eof + _test_eof423: cs = 423; goto _test_eof + _test_eof424: cs = 424; goto _test_eof + _test_eof425: cs = 425; goto _test_eof + _test_eof426: cs = 426; goto _test_eof + _test_eof427: cs = 427; goto _test_eof + _test_eof428: cs = 428; goto _test_eof + _test_eof429: cs = 429; goto _test_eof + _test_eof430: cs = 430; goto _test_eof + _test_eof431: cs = 431; goto _test_eof + _test_eof432: cs = 432; goto _test_eof + _test_eof433: cs = 433; goto _test_eof + _test_eof434: cs = 434; goto _test_eof + _test_eof435: cs = 435; goto _test_eof + _test_eof436: cs = 436; goto _test_eof + _test_eof437: cs = 437; goto _test_eof + _test_eof438: cs = 438; goto _test_eof + _test_eof439: cs = 439; goto _test_eof + _test_eof440: cs = 440; goto _test_eof + _test_eof441: cs = 441; goto _test_eof + _test_eof442: cs = 442; goto _test_eof + _test_eof443: cs = 443; goto _test_eof + _test_eof444: cs = 444; goto _test_eof + _test_eof445: cs = 445; goto _test_eof + _test_eof446: cs = 446; goto _test_eof + _test_eof447: cs = 447; goto _test_eof + _test_eof448: cs = 448; goto _test_eof + _test_eof449: cs = 449; goto _test_eof + _test_eof450: cs = 450; goto _test_eof + _test_eof451: cs = 451; goto _test_eof + _test_eof452: cs = 452; goto _test_eof + _test_eof453: cs = 453; goto _test_eof + _test_eof454: cs = 454; goto _test_eof + _test_eof455: cs = 455; goto _test_eof + _test_eof456: cs = 456; goto _test_eof + _test_eof457: cs = 457; goto _test_eof + _test_eof458: cs = 458; goto _test_eof + _test_eof459: cs = 459; goto _test_eof + _test_eof460: cs = 460; goto _test_eof + _test_eof461: cs = 461; goto _test_eof + _test_eof462: cs = 462; goto _test_eof + _test_eof463: cs = 463; goto _test_eof + _test_eof464: cs = 464; goto _test_eof + _test_eof465: cs = 465; goto _test_eof + _test_eof466: cs = 466; goto _test_eof + _test_eof467: cs = 467; goto _test_eof + _test_eof468: cs = 468; goto _test_eof + _test_eof469: cs = 469; goto _test_eof + _test_eof470: cs = 470; goto _test_eof + _test_eof471: cs = 471; goto _test_eof + _test_eof472: cs = 472; goto _test_eof + _test_eof473: cs = 473; goto _test_eof + _test_eof474: cs = 474; goto _test_eof + _test_eof475: cs = 475; goto _test_eof + _test_eof476: cs = 476; goto _test_eof + _test_eof477: cs = 477; goto _test_eof + _test_eof478: cs = 478; goto _test_eof + _test_eof479: cs = 479; goto _test_eof + _test_eof480: cs = 480; goto _test_eof + _test_eof481: cs = 481; goto _test_eof + _test_eof482: cs = 482; goto _test_eof + _test_eof483: cs = 483; goto _test_eof + _test_eof484: cs = 484; goto _test_eof + _test_eof485: cs = 485; goto _test_eof + _test_eof486: cs = 486; goto _test_eof + _test_eof487: cs = 487; goto _test_eof + _test_eof488: cs = 488; goto _test_eof + _test_eof489: cs = 489; goto _test_eof + _test_eof490: cs = 490; goto _test_eof + _test_eof491: cs = 491; goto _test_eof + _test_eof492: cs = 492; goto _test_eof + _test_eof493: cs = 493; goto _test_eof + _test_eof494: cs = 494; goto _test_eof + _test_eof495: cs = 495; goto _test_eof + _test_eof496: cs = 496; goto _test_eof + _test_eof497: cs = 497; goto _test_eof + _test_eof498: cs = 498; goto _test_eof + _test_eof499: cs = 499; goto _test_eof + _test_eof500: cs = 500; goto _test_eof + _test_eof501: cs = 501; goto _test_eof + _test_eof502: cs = 502; goto _test_eof + _test_eof503: cs = 503; goto _test_eof + _test_eof504: cs = 504; goto _test_eof + _test_eof505: cs = 505; goto _test_eof + _test_eof506: cs = 506; goto _test_eof + _test_eof507: cs = 507; goto _test_eof + _test_eof508: cs = 508; goto _test_eof + _test_eof509: cs = 509; goto _test_eof + _test_eof510: cs = 510; goto _test_eof + _test_eof511: cs = 511; goto _test_eof + _test_eof512: cs = 512; goto _test_eof + _test_eof513: cs = 513; goto _test_eof + _test_eof514: cs = 514; goto _test_eof + _test_eof515: cs = 515; goto _test_eof + _test_eof516: cs = 516; goto _test_eof + _test_eof517: cs = 517; goto _test_eof + _test_eof518: cs = 518; goto _test_eof + _test_eof519: cs = 519; goto _test_eof + _test_eof520: cs = 520; goto _test_eof + _test_eof521: cs = 521; goto _test_eof + _test_eof522: cs = 522; goto _test_eof + _test_eof523: cs = 523; goto _test_eof + _test_eof524: cs = 524; goto _test_eof + _test_eof525: cs = 525; goto _test_eof + _test_eof526: cs = 526; goto _test_eof + _test_eof527: cs = 527; goto _test_eof + _test_eof528: cs = 528; goto _test_eof + _test_eof529: cs = 529; goto _test_eof + _test_eof530: cs = 530; goto _test_eof + _test_eof531: cs = 531; goto _test_eof + _test_eof532: cs = 532; goto _test_eof + _test_eof533: cs = 533; goto _test_eof + _test_eof534: cs = 534; goto _test_eof + _test_eof535: cs = 535; goto _test_eof + _test_eof536: cs = 536; goto _test_eof + _test_eof537: cs = 537; goto _test_eof + _test_eof538: cs = 538; goto _test_eof + _test_eof539: cs = 539; goto _test_eof + _test_eof540: cs = 540; goto _test_eof + _test_eof541: cs = 541; goto _test_eof + _test_eof542: cs = 542; goto _test_eof + _test_eof543: cs = 543; goto _test_eof + _test_eof544: cs = 544; goto _test_eof + _test_eof545: cs = 545; goto _test_eof + _test_eof546: cs = 546; goto _test_eof + _test_eof547: cs = 547; goto _test_eof + _test_eof548: cs = 548; goto _test_eof + _test_eof549: cs = 549; goto _test_eof + _test_eof550: cs = 550; goto _test_eof + _test_eof551: cs = 551; goto _test_eof + _test_eof552: cs = 552; goto _test_eof + _test_eof553: cs = 553; goto _test_eof + _test_eof554: cs = 554; goto _test_eof + _test_eof555: cs = 555; goto _test_eof + _test_eof556: cs = 556; goto _test_eof + _test_eof557: cs = 557; goto _test_eof + _test_eof558: cs = 558; goto _test_eof + _test_eof559: cs = 559; goto _test_eof + _test_eof560: cs = 560; goto _test_eof + _test_eof561: cs = 561; goto _test_eof + _test_eof4869: cs = 4869; goto _test_eof + _test_eof562: cs = 562; goto _test_eof + _test_eof563: cs = 563; goto _test_eof + _test_eof564: cs = 564; goto _test_eof + _test_eof565: cs = 565; goto _test_eof + _test_eof566: cs = 566; goto _test_eof + _test_eof567: cs = 567; goto _test_eof + _test_eof4870: cs = 4870; goto _test_eof + _test_eof568: cs = 568; goto _test_eof + _test_eof569: cs = 569; goto _test_eof + _test_eof570: cs = 570; goto _test_eof + _test_eof571: cs = 571; goto _test_eof + _test_eof572: cs = 572; goto _test_eof + _test_eof573: cs = 573; goto _test_eof + _test_eof574: cs = 574; goto _test_eof + _test_eof4871: cs = 4871; goto _test_eof + _test_eof575: cs = 575; goto _test_eof + _test_eof576: cs = 576; goto _test_eof + _test_eof577: cs = 577; goto _test_eof + _test_eof578: cs = 578; goto _test_eof + _test_eof579: cs = 579; goto _test_eof + _test_eof580: cs = 580; goto _test_eof + _test_eof581: cs = 581; goto _test_eof + _test_eof582: cs = 582; goto _test_eof + _test_eof583: cs = 583; goto _test_eof + _test_eof584: cs = 584; goto _test_eof + _test_eof585: cs = 585; goto _test_eof + _test_eof586: cs = 586; goto _test_eof + _test_eof587: cs = 587; goto _test_eof + _test_eof588: cs = 588; goto _test_eof + _test_eof589: cs = 589; goto _test_eof + _test_eof590: cs = 590; goto _test_eof + _test_eof591: cs = 591; goto _test_eof + _test_eof592: cs = 592; goto _test_eof + _test_eof593: cs = 593; goto _test_eof + _test_eof594: cs = 594; goto _test_eof + _test_eof595: cs = 595; goto _test_eof + _test_eof596: cs = 596; goto _test_eof + _test_eof597: cs = 597; goto _test_eof + _test_eof598: cs = 598; goto _test_eof + _test_eof599: cs = 599; goto _test_eof + _test_eof600: cs = 600; goto _test_eof + _test_eof601: cs = 601; goto _test_eof + _test_eof602: cs = 602; goto _test_eof + _test_eof603: cs = 603; goto _test_eof + _test_eof604: cs = 604; goto _test_eof + _test_eof605: cs = 605; goto _test_eof + _test_eof606: cs = 606; goto _test_eof + _test_eof607: cs = 607; goto _test_eof + _test_eof608: cs = 608; goto _test_eof + _test_eof609: cs = 609; goto _test_eof + _test_eof610: cs = 610; goto _test_eof + _test_eof611: cs = 611; goto _test_eof + _test_eof612: cs = 612; goto _test_eof + _test_eof613: cs = 613; goto _test_eof + _test_eof614: cs = 614; goto _test_eof + _test_eof615: cs = 615; goto _test_eof + _test_eof616: cs = 616; goto _test_eof + _test_eof617: cs = 617; goto _test_eof + _test_eof618: cs = 618; goto _test_eof + _test_eof619: cs = 619; goto _test_eof + _test_eof620: cs = 620; goto _test_eof + _test_eof621: cs = 621; goto _test_eof + _test_eof622: cs = 622; goto _test_eof + _test_eof623: cs = 623; goto _test_eof + _test_eof624: cs = 624; goto _test_eof + _test_eof625: cs = 625; goto _test_eof + _test_eof626: cs = 626; goto _test_eof + _test_eof627: cs = 627; goto _test_eof + _test_eof628: cs = 628; goto _test_eof + _test_eof629: cs = 629; goto _test_eof + _test_eof630: cs = 630; goto _test_eof + _test_eof631: cs = 631; goto _test_eof + _test_eof632: cs = 632; goto _test_eof + _test_eof633: cs = 633; goto _test_eof + _test_eof634: cs = 634; goto _test_eof + _test_eof635: cs = 635; goto _test_eof + _test_eof636: cs = 636; goto _test_eof + _test_eof637: cs = 637; goto _test_eof + _test_eof638: cs = 638; goto _test_eof + _test_eof639: cs = 639; goto _test_eof + _test_eof640: cs = 640; goto _test_eof + _test_eof641: cs = 641; goto _test_eof + _test_eof642: cs = 642; goto _test_eof + _test_eof643: cs = 643; goto _test_eof + _test_eof644: cs = 644; goto _test_eof + _test_eof645: cs = 645; goto _test_eof + _test_eof646: cs = 646; goto _test_eof + _test_eof647: cs = 647; goto _test_eof + _test_eof648: cs = 648; goto _test_eof + _test_eof649: cs = 649; goto _test_eof + _test_eof650: cs = 650; goto _test_eof + _test_eof651: cs = 651; goto _test_eof + _test_eof652: cs = 652; goto _test_eof + _test_eof653: cs = 653; goto _test_eof + _test_eof654: cs = 654; goto _test_eof + _test_eof655: cs = 655; goto _test_eof + _test_eof656: cs = 656; goto _test_eof + _test_eof657: cs = 657; goto _test_eof + _test_eof658: cs = 658; goto _test_eof + _test_eof659: cs = 659; goto _test_eof + _test_eof660: cs = 660; goto _test_eof + _test_eof661: cs = 661; goto _test_eof + _test_eof662: cs = 662; goto _test_eof + _test_eof663: cs = 663; goto _test_eof + _test_eof664: cs = 664; goto _test_eof + _test_eof665: cs = 665; goto _test_eof + _test_eof666: cs = 666; goto _test_eof + _test_eof667: cs = 667; goto _test_eof + _test_eof668: cs = 668; goto _test_eof + _test_eof669: cs = 669; goto _test_eof + _test_eof670: cs = 670; goto _test_eof + _test_eof671: cs = 671; goto _test_eof + _test_eof672: cs = 672; goto _test_eof + _test_eof673: cs = 673; goto _test_eof + _test_eof674: cs = 674; goto _test_eof + _test_eof675: cs = 675; goto _test_eof + _test_eof676: cs = 676; goto _test_eof + _test_eof677: cs = 677; goto _test_eof + _test_eof678: cs = 678; goto _test_eof + _test_eof679: cs = 679; goto _test_eof + _test_eof680: cs = 680; goto _test_eof + _test_eof681: cs = 681; goto _test_eof + _test_eof682: cs = 682; goto _test_eof + _test_eof683: cs = 683; goto _test_eof + _test_eof684: cs = 684; goto _test_eof + _test_eof685: cs = 685; goto _test_eof + _test_eof686: cs = 686; goto _test_eof + _test_eof687: cs = 687; goto _test_eof + _test_eof688: cs = 688; goto _test_eof + _test_eof689: cs = 689; goto _test_eof + _test_eof690: cs = 690; goto _test_eof + _test_eof691: cs = 691; goto _test_eof + _test_eof692: cs = 692; goto _test_eof + _test_eof693: cs = 693; goto _test_eof + _test_eof694: cs = 694; goto _test_eof + _test_eof695: cs = 695; goto _test_eof + _test_eof696: cs = 696; goto _test_eof + _test_eof697: cs = 697; goto _test_eof + _test_eof698: cs = 698; goto _test_eof + _test_eof699: cs = 699; goto _test_eof + _test_eof700: cs = 700; goto _test_eof + _test_eof701: cs = 701; goto _test_eof + _test_eof702: cs = 702; goto _test_eof + _test_eof703: cs = 703; goto _test_eof + _test_eof704: cs = 704; goto _test_eof + _test_eof705: cs = 705; goto _test_eof + _test_eof706: cs = 706; goto _test_eof + _test_eof707: cs = 707; goto _test_eof + _test_eof708: cs = 708; goto _test_eof + _test_eof709: cs = 709; goto _test_eof + _test_eof710: cs = 710; goto _test_eof + _test_eof711: cs = 711; goto _test_eof + _test_eof712: cs = 712; goto _test_eof + _test_eof713: cs = 713; goto _test_eof + _test_eof714: cs = 714; goto _test_eof + _test_eof715: cs = 715; goto _test_eof + _test_eof716: cs = 716; goto _test_eof + _test_eof717: cs = 717; goto _test_eof + _test_eof718: cs = 718; goto _test_eof + _test_eof719: cs = 719; goto _test_eof + _test_eof720: cs = 720; goto _test_eof + _test_eof721: cs = 721; goto _test_eof + _test_eof722: cs = 722; goto _test_eof + _test_eof723: cs = 723; goto _test_eof + _test_eof724: cs = 724; goto _test_eof + _test_eof725: cs = 725; goto _test_eof + _test_eof726: cs = 726; goto _test_eof + _test_eof727: cs = 727; goto _test_eof + _test_eof728: cs = 728; goto _test_eof + _test_eof729: cs = 729; goto _test_eof + _test_eof730: cs = 730; goto _test_eof + _test_eof731: cs = 731; goto _test_eof + _test_eof732: cs = 732; goto _test_eof + _test_eof733: cs = 733; goto _test_eof + _test_eof734: cs = 734; goto _test_eof + _test_eof735: cs = 735; goto _test_eof + _test_eof736: cs = 736; goto _test_eof + _test_eof737: cs = 737; goto _test_eof + _test_eof738: cs = 738; goto _test_eof + _test_eof739: cs = 739; goto _test_eof + _test_eof740: cs = 740; goto _test_eof + _test_eof741: cs = 741; goto _test_eof + _test_eof742: cs = 742; goto _test_eof + _test_eof743: cs = 743; goto _test_eof + _test_eof744: cs = 744; goto _test_eof + _test_eof745: cs = 745; goto _test_eof + _test_eof746: cs = 746; goto _test_eof + _test_eof747: cs = 747; goto _test_eof + _test_eof748: cs = 748; goto _test_eof + _test_eof749: cs = 749; goto _test_eof + _test_eof750: cs = 750; goto _test_eof + _test_eof751: cs = 751; goto _test_eof + _test_eof752: cs = 752; goto _test_eof + _test_eof753: cs = 753; goto _test_eof + _test_eof754: cs = 754; goto _test_eof + _test_eof755: cs = 755; goto _test_eof + _test_eof756: cs = 756; goto _test_eof + _test_eof757: cs = 757; goto _test_eof + _test_eof758: cs = 758; goto _test_eof + _test_eof759: cs = 759; goto _test_eof + _test_eof760: cs = 760; goto _test_eof + _test_eof761: cs = 761; goto _test_eof + _test_eof762: cs = 762; goto _test_eof + _test_eof763: cs = 763; goto _test_eof + _test_eof764: cs = 764; goto _test_eof + _test_eof765: cs = 765; goto _test_eof + _test_eof766: cs = 766; goto _test_eof + _test_eof767: cs = 767; goto _test_eof + _test_eof768: cs = 768; goto _test_eof + _test_eof769: cs = 769; goto _test_eof + _test_eof770: cs = 770; goto _test_eof + _test_eof771: cs = 771; goto _test_eof + _test_eof772: cs = 772; goto _test_eof + _test_eof773: cs = 773; goto _test_eof + _test_eof774: cs = 774; goto _test_eof + _test_eof775: cs = 775; goto _test_eof + _test_eof776: cs = 776; goto _test_eof + _test_eof777: cs = 777; goto _test_eof + _test_eof778: cs = 778; goto _test_eof + _test_eof779: cs = 779; goto _test_eof + _test_eof780: cs = 780; goto _test_eof + _test_eof781: cs = 781; goto _test_eof + _test_eof782: cs = 782; goto _test_eof + _test_eof783: cs = 783; goto _test_eof + _test_eof784: cs = 784; goto _test_eof + _test_eof785: cs = 785; goto _test_eof + _test_eof786: cs = 786; goto _test_eof + _test_eof787: cs = 787; goto _test_eof + _test_eof788: cs = 788; goto _test_eof + _test_eof789: cs = 789; goto _test_eof + _test_eof790: cs = 790; goto _test_eof + _test_eof791: cs = 791; goto _test_eof + _test_eof792: cs = 792; goto _test_eof + _test_eof793: cs = 793; goto _test_eof + _test_eof794: cs = 794; goto _test_eof + _test_eof795: cs = 795; goto _test_eof + _test_eof796: cs = 796; goto _test_eof + _test_eof797: cs = 797; goto _test_eof + _test_eof798: cs = 798; goto _test_eof + _test_eof799: cs = 799; goto _test_eof + _test_eof800: cs = 800; goto _test_eof + _test_eof801: cs = 801; goto _test_eof + _test_eof802: cs = 802; goto _test_eof + _test_eof803: cs = 803; goto _test_eof + _test_eof804: cs = 804; goto _test_eof + _test_eof805: cs = 805; goto _test_eof + _test_eof806: cs = 806; goto _test_eof + _test_eof807: cs = 807; goto _test_eof + _test_eof808: cs = 808; goto _test_eof + _test_eof809: cs = 809; goto _test_eof + _test_eof810: cs = 810; goto _test_eof + _test_eof811: cs = 811; goto _test_eof + _test_eof812: cs = 812; goto _test_eof + _test_eof813: cs = 813; goto _test_eof + _test_eof814: cs = 814; goto _test_eof + _test_eof815: cs = 815; goto _test_eof + _test_eof816: cs = 816; goto _test_eof + _test_eof817: cs = 817; goto _test_eof + _test_eof818: cs = 818; goto _test_eof + _test_eof819: cs = 819; goto _test_eof + _test_eof820: cs = 820; goto _test_eof + _test_eof821: cs = 821; goto _test_eof + _test_eof822: cs = 822; goto _test_eof + _test_eof823: cs = 823; goto _test_eof + _test_eof824: cs = 824; goto _test_eof + _test_eof825: cs = 825; goto _test_eof + _test_eof826: cs = 826; goto _test_eof + _test_eof827: cs = 827; goto _test_eof + _test_eof828: cs = 828; goto _test_eof + _test_eof829: cs = 829; goto _test_eof + _test_eof830: cs = 830; goto _test_eof + _test_eof831: cs = 831; goto _test_eof + _test_eof832: cs = 832; goto _test_eof + _test_eof833: cs = 833; goto _test_eof + _test_eof834: cs = 834; goto _test_eof + _test_eof835: cs = 835; goto _test_eof + _test_eof836: cs = 836; goto _test_eof + _test_eof837: cs = 837; goto _test_eof + _test_eof838: cs = 838; goto _test_eof + _test_eof839: cs = 839; goto _test_eof + _test_eof840: cs = 840; goto _test_eof + _test_eof841: cs = 841; goto _test_eof + _test_eof842: cs = 842; goto _test_eof + _test_eof843: cs = 843; goto _test_eof + _test_eof844: cs = 844; goto _test_eof + _test_eof845: cs = 845; goto _test_eof + _test_eof846: cs = 846; goto _test_eof + _test_eof847: cs = 847; goto _test_eof + _test_eof848: cs = 848; goto _test_eof + _test_eof849: cs = 849; goto _test_eof + _test_eof850: cs = 850; goto _test_eof + _test_eof851: cs = 851; goto _test_eof + _test_eof852: cs = 852; goto _test_eof + _test_eof853: cs = 853; goto _test_eof + _test_eof854: cs = 854; goto _test_eof + _test_eof855: cs = 855; goto _test_eof + _test_eof856: cs = 856; goto _test_eof + _test_eof857: cs = 857; goto _test_eof + _test_eof858: cs = 858; goto _test_eof + _test_eof859: cs = 859; goto _test_eof + _test_eof860: cs = 860; goto _test_eof + _test_eof861: cs = 861; goto _test_eof + _test_eof862: cs = 862; goto _test_eof + _test_eof863: cs = 863; goto _test_eof + _test_eof864: cs = 864; goto _test_eof + _test_eof865: cs = 865; goto _test_eof + _test_eof866: cs = 866; goto _test_eof + _test_eof867: cs = 867; goto _test_eof + _test_eof868: cs = 868; goto _test_eof + _test_eof869: cs = 869; goto _test_eof + _test_eof870: cs = 870; goto _test_eof + _test_eof871: cs = 871; goto _test_eof + _test_eof872: cs = 872; goto _test_eof + _test_eof873: cs = 873; goto _test_eof + _test_eof874: cs = 874; goto _test_eof + _test_eof875: cs = 875; goto _test_eof + _test_eof876: cs = 876; goto _test_eof + _test_eof877: cs = 877; goto _test_eof + _test_eof878: cs = 878; goto _test_eof + _test_eof879: cs = 879; goto _test_eof + _test_eof880: cs = 880; goto _test_eof + _test_eof881: cs = 881; goto _test_eof + _test_eof882: cs = 882; goto _test_eof + _test_eof883: cs = 883; goto _test_eof + _test_eof884: cs = 884; goto _test_eof + _test_eof885: cs = 885; goto _test_eof + _test_eof886: cs = 886; goto _test_eof + _test_eof887: cs = 887; goto _test_eof + _test_eof888: cs = 888; goto _test_eof + _test_eof889: cs = 889; goto _test_eof + _test_eof890: cs = 890; goto _test_eof + _test_eof891: cs = 891; goto _test_eof + _test_eof892: cs = 892; goto _test_eof + _test_eof893: cs = 893; goto _test_eof + _test_eof894: cs = 894; goto _test_eof + _test_eof895: cs = 895; goto _test_eof + _test_eof896: cs = 896; goto _test_eof + _test_eof897: cs = 897; goto _test_eof + _test_eof898: cs = 898; goto _test_eof + _test_eof899: cs = 899; goto _test_eof + _test_eof900: cs = 900; goto _test_eof + _test_eof901: cs = 901; goto _test_eof + _test_eof902: cs = 902; goto _test_eof + _test_eof903: cs = 903; goto _test_eof + _test_eof904: cs = 904; goto _test_eof + _test_eof905: cs = 905; goto _test_eof + _test_eof906: cs = 906; goto _test_eof + _test_eof907: cs = 907; goto _test_eof + _test_eof908: cs = 908; goto _test_eof + _test_eof909: cs = 909; goto _test_eof + _test_eof910: cs = 910; goto _test_eof + _test_eof911: cs = 911; goto _test_eof + _test_eof912: cs = 912; goto _test_eof + _test_eof913: cs = 913; goto _test_eof + _test_eof914: cs = 914; goto _test_eof + _test_eof915: cs = 915; goto _test_eof + _test_eof916: cs = 916; goto _test_eof + _test_eof917: cs = 917; goto _test_eof + _test_eof918: cs = 918; goto _test_eof + _test_eof919: cs = 919; goto _test_eof + _test_eof920: cs = 920; goto _test_eof + _test_eof921: cs = 921; goto _test_eof + _test_eof922: cs = 922; goto _test_eof + _test_eof923: cs = 923; goto _test_eof + _test_eof924: cs = 924; goto _test_eof + _test_eof925: cs = 925; goto _test_eof + _test_eof926: cs = 926; goto _test_eof + _test_eof927: cs = 927; goto _test_eof + _test_eof928: cs = 928; goto _test_eof + _test_eof929: cs = 929; goto _test_eof + _test_eof930: cs = 930; goto _test_eof + _test_eof931: cs = 931; goto _test_eof + _test_eof932: cs = 932; goto _test_eof + _test_eof933: cs = 933; goto _test_eof + _test_eof934: cs = 934; goto _test_eof + _test_eof935: cs = 935; goto _test_eof + _test_eof936: cs = 936; goto _test_eof + _test_eof937: cs = 937; goto _test_eof + _test_eof938: cs = 938; goto _test_eof + _test_eof939: cs = 939; goto _test_eof + _test_eof940: cs = 940; goto _test_eof + _test_eof941: cs = 941; goto _test_eof + _test_eof942: cs = 942; goto _test_eof + _test_eof943: cs = 943; goto _test_eof + _test_eof944: cs = 944; goto _test_eof + _test_eof945: cs = 945; goto _test_eof + _test_eof946: cs = 946; goto _test_eof + _test_eof947: cs = 947; goto _test_eof + _test_eof948: cs = 948; goto _test_eof + _test_eof949: cs = 949; goto _test_eof + _test_eof950: cs = 950; goto _test_eof + _test_eof951: cs = 951; goto _test_eof + _test_eof952: cs = 952; goto _test_eof + _test_eof953: cs = 953; goto _test_eof + _test_eof954: cs = 954; goto _test_eof + _test_eof955: cs = 955; goto _test_eof + _test_eof956: cs = 956; goto _test_eof + _test_eof957: cs = 957; goto _test_eof + _test_eof958: cs = 958; goto _test_eof + _test_eof959: cs = 959; goto _test_eof + _test_eof960: cs = 960; goto _test_eof + _test_eof961: cs = 961; goto _test_eof + _test_eof962: cs = 962; goto _test_eof + _test_eof963: cs = 963; goto _test_eof + _test_eof964: cs = 964; goto _test_eof + _test_eof965: cs = 965; goto _test_eof + _test_eof966: cs = 966; goto _test_eof + _test_eof967: cs = 967; goto _test_eof + _test_eof968: cs = 968; goto _test_eof + _test_eof969: cs = 969; goto _test_eof + _test_eof970: cs = 970; goto _test_eof + _test_eof971: cs = 971; goto _test_eof + _test_eof972: cs = 972; goto _test_eof + _test_eof973: cs = 973; goto _test_eof + _test_eof974: cs = 974; goto _test_eof + _test_eof975: cs = 975; goto _test_eof + _test_eof976: cs = 976; goto _test_eof + _test_eof977: cs = 977; goto _test_eof + _test_eof978: cs = 978; goto _test_eof + _test_eof979: cs = 979; goto _test_eof + _test_eof980: cs = 980; goto _test_eof + _test_eof981: cs = 981; goto _test_eof + _test_eof982: cs = 982; goto _test_eof + _test_eof983: cs = 983; goto _test_eof + _test_eof984: cs = 984; goto _test_eof + _test_eof985: cs = 985; goto _test_eof + _test_eof986: cs = 986; goto _test_eof + _test_eof987: cs = 987; goto _test_eof + _test_eof988: cs = 988; goto _test_eof + _test_eof989: cs = 989; goto _test_eof + _test_eof990: cs = 990; goto _test_eof + _test_eof991: cs = 991; goto _test_eof + _test_eof992: cs = 992; goto _test_eof + _test_eof993: cs = 993; goto _test_eof + _test_eof994: cs = 994; goto _test_eof + _test_eof995: cs = 995; goto _test_eof + _test_eof996: cs = 996; goto _test_eof + _test_eof997: cs = 997; goto _test_eof + _test_eof998: cs = 998; goto _test_eof + _test_eof999: cs = 999; goto _test_eof + _test_eof1000: cs = 1000; goto _test_eof + _test_eof1001: cs = 1001; goto _test_eof + _test_eof1002: cs = 1002; goto _test_eof + _test_eof1003: cs = 1003; goto _test_eof + _test_eof1004: cs = 1004; goto _test_eof + _test_eof1005: cs = 1005; goto _test_eof + _test_eof1006: cs = 1006; goto _test_eof + _test_eof1007: cs = 1007; goto _test_eof + _test_eof1008: cs = 1008; goto _test_eof + _test_eof1009: cs = 1009; goto _test_eof + _test_eof1010: cs = 1010; goto _test_eof + _test_eof1011: cs = 1011; goto _test_eof + _test_eof1012: cs = 1012; goto _test_eof + _test_eof1013: cs = 1013; goto _test_eof + _test_eof1014: cs = 1014; goto _test_eof + _test_eof1015: cs = 1015; goto _test_eof + _test_eof1016: cs = 1016; goto _test_eof + _test_eof1017: cs = 1017; goto _test_eof + _test_eof1018: cs = 1018; goto _test_eof + _test_eof1019: cs = 1019; goto _test_eof + _test_eof1020: cs = 1020; goto _test_eof + _test_eof1021: cs = 1021; goto _test_eof + _test_eof1022: cs = 1022; goto _test_eof + _test_eof1023: cs = 1023; goto _test_eof + _test_eof1024: cs = 1024; goto _test_eof + _test_eof1025: cs = 1025; goto _test_eof + _test_eof1026: cs = 1026; goto _test_eof + _test_eof1027: cs = 1027; goto _test_eof + _test_eof1028: cs = 1028; goto _test_eof + _test_eof1029: cs = 1029; goto _test_eof + _test_eof1030: cs = 1030; goto _test_eof + _test_eof1031: cs = 1031; goto _test_eof + _test_eof1032: cs = 1032; goto _test_eof + _test_eof1033: cs = 1033; goto _test_eof + _test_eof1034: cs = 1034; goto _test_eof + _test_eof1035: cs = 1035; goto _test_eof + _test_eof1036: cs = 1036; goto _test_eof + _test_eof1037: cs = 1037; goto _test_eof + _test_eof1038: cs = 1038; goto _test_eof + _test_eof1039: cs = 1039; goto _test_eof + _test_eof1040: cs = 1040; goto _test_eof + _test_eof1041: cs = 1041; goto _test_eof + _test_eof1042: cs = 1042; goto _test_eof + _test_eof1043: cs = 1043; goto _test_eof + _test_eof1044: cs = 1044; goto _test_eof + _test_eof1045: cs = 1045; goto _test_eof + _test_eof1046: cs = 1046; goto _test_eof + _test_eof1047: cs = 1047; goto _test_eof + _test_eof1048: cs = 1048; goto _test_eof + _test_eof1049: cs = 1049; goto _test_eof + _test_eof1050: cs = 1050; goto _test_eof + _test_eof1051: cs = 1051; goto _test_eof + _test_eof1052: cs = 1052; goto _test_eof + _test_eof1053: cs = 1053; goto _test_eof + _test_eof1054: cs = 1054; goto _test_eof + _test_eof1055: cs = 1055; goto _test_eof + _test_eof1056: cs = 1056; goto _test_eof + _test_eof1057: cs = 1057; goto _test_eof + _test_eof1058: cs = 1058; goto _test_eof + _test_eof1059: cs = 1059; goto _test_eof + _test_eof1060: cs = 1060; goto _test_eof + _test_eof1061: cs = 1061; goto _test_eof + _test_eof1062: cs = 1062; goto _test_eof + _test_eof1063: cs = 1063; goto _test_eof + _test_eof1064: cs = 1064; goto _test_eof + _test_eof1065: cs = 1065; goto _test_eof + _test_eof1066: cs = 1066; goto _test_eof + _test_eof1067: cs = 1067; goto _test_eof + _test_eof1068: cs = 1068; goto _test_eof + _test_eof1069: cs = 1069; goto _test_eof + _test_eof1070: cs = 1070; goto _test_eof + _test_eof1071: cs = 1071; goto _test_eof + _test_eof1072: cs = 1072; goto _test_eof + _test_eof1073: cs = 1073; goto _test_eof + _test_eof1074: cs = 1074; goto _test_eof + _test_eof1075: cs = 1075; goto _test_eof + _test_eof1076: cs = 1076; goto _test_eof + _test_eof1077: cs = 1077; goto _test_eof + _test_eof1078: cs = 1078; goto _test_eof + _test_eof1079: cs = 1079; goto _test_eof + _test_eof1080: cs = 1080; goto _test_eof + _test_eof1081: cs = 1081; goto _test_eof + _test_eof1082: cs = 1082; goto _test_eof + _test_eof1083: cs = 1083; goto _test_eof + _test_eof1084: cs = 1084; goto _test_eof + _test_eof1085: cs = 1085; goto _test_eof + _test_eof1086: cs = 1086; goto _test_eof + _test_eof1087: cs = 1087; goto _test_eof + _test_eof1088: cs = 1088; goto _test_eof + _test_eof1089: cs = 1089; goto _test_eof + _test_eof4872: cs = 4872; goto _test_eof + _test_eof1090: cs = 1090; goto _test_eof + _test_eof1091: cs = 1091; goto _test_eof + _test_eof1092: cs = 1092; goto _test_eof + _test_eof1093: cs = 1093; goto _test_eof + _test_eof1094: cs = 1094; goto _test_eof + _test_eof1095: cs = 1095; goto _test_eof + _test_eof1096: cs = 1096; goto _test_eof + _test_eof1097: cs = 1097; goto _test_eof + _test_eof1098: cs = 1098; goto _test_eof + _test_eof1099: cs = 1099; goto _test_eof + _test_eof1100: cs = 1100; goto _test_eof + _test_eof1101: cs = 1101; goto _test_eof + _test_eof1102: cs = 1102; goto _test_eof + _test_eof1103: cs = 1103; goto _test_eof + _test_eof1104: cs = 1104; goto _test_eof + _test_eof1105: cs = 1105; goto _test_eof + _test_eof1106: cs = 1106; goto _test_eof + _test_eof1107: cs = 1107; goto _test_eof + _test_eof1108: cs = 1108; goto _test_eof + _test_eof1109: cs = 1109; goto _test_eof + _test_eof1110: cs = 1110; goto _test_eof + _test_eof1111: cs = 1111; goto _test_eof + _test_eof1112: cs = 1112; goto _test_eof + _test_eof1113: cs = 1113; goto _test_eof + _test_eof1114: cs = 1114; goto _test_eof + _test_eof1115: cs = 1115; goto _test_eof + _test_eof1116: cs = 1116; goto _test_eof + _test_eof1117: cs = 1117; goto _test_eof + _test_eof1118: cs = 1118; goto _test_eof + _test_eof1119: cs = 1119; goto _test_eof + _test_eof1120: cs = 1120; goto _test_eof + _test_eof1121: cs = 1121; goto _test_eof + _test_eof1122: cs = 1122; goto _test_eof + _test_eof1123: cs = 1123; goto _test_eof + _test_eof1124: cs = 1124; goto _test_eof + _test_eof1125: cs = 1125; goto _test_eof + _test_eof1126: cs = 1126; goto _test_eof + _test_eof1127: cs = 1127; goto _test_eof + _test_eof1128: cs = 1128; goto _test_eof + _test_eof1129: cs = 1129; goto _test_eof + _test_eof1130: cs = 1130; goto _test_eof + _test_eof1131: cs = 1131; goto _test_eof + _test_eof1132: cs = 1132; goto _test_eof + _test_eof1133: cs = 1133; goto _test_eof + _test_eof1134: cs = 1134; goto _test_eof + _test_eof1135: cs = 1135; goto _test_eof + _test_eof1136: cs = 1136; goto _test_eof + _test_eof1137: cs = 1137; goto _test_eof + _test_eof1138: cs = 1138; goto _test_eof + _test_eof1139: cs = 1139; goto _test_eof + _test_eof1140: cs = 1140; goto _test_eof + _test_eof1141: cs = 1141; goto _test_eof + _test_eof1142: cs = 1142; goto _test_eof + _test_eof1143: cs = 1143; goto _test_eof + _test_eof1144: cs = 1144; goto _test_eof + _test_eof1145: cs = 1145; goto _test_eof + _test_eof1146: cs = 1146; goto _test_eof + _test_eof1147: cs = 1147; goto _test_eof + _test_eof1148: cs = 1148; goto _test_eof + _test_eof1149: cs = 1149; goto _test_eof + _test_eof1150: cs = 1150; goto _test_eof + _test_eof1151: cs = 1151; goto _test_eof + _test_eof1152: cs = 1152; goto _test_eof + _test_eof1153: cs = 1153; goto _test_eof + _test_eof1154: cs = 1154; goto _test_eof + _test_eof1155: cs = 1155; goto _test_eof + _test_eof1156: cs = 1156; goto _test_eof + _test_eof1157: cs = 1157; goto _test_eof + _test_eof1158: cs = 1158; goto _test_eof + _test_eof1159: cs = 1159; goto _test_eof + _test_eof1160: cs = 1160; goto _test_eof + _test_eof1161: cs = 1161; goto _test_eof + _test_eof1162: cs = 1162; goto _test_eof + _test_eof1163: cs = 1163; goto _test_eof + _test_eof1164: cs = 1164; goto _test_eof + _test_eof1165: cs = 1165; goto _test_eof + _test_eof1166: cs = 1166; goto _test_eof + _test_eof1167: cs = 1167; goto _test_eof + _test_eof1168: cs = 1168; goto _test_eof + _test_eof1169: cs = 1169; goto _test_eof + _test_eof1170: cs = 1170; goto _test_eof + _test_eof1171: cs = 1171; goto _test_eof + _test_eof1172: cs = 1172; goto _test_eof + _test_eof1173: cs = 1173; goto _test_eof + _test_eof1174: cs = 1174; goto _test_eof + _test_eof1175: cs = 1175; goto _test_eof + _test_eof1176: cs = 1176; goto _test_eof + _test_eof1177: cs = 1177; goto _test_eof + _test_eof1178: cs = 1178; goto _test_eof + _test_eof1179: cs = 1179; goto _test_eof + _test_eof1180: cs = 1180; goto _test_eof + _test_eof1181: cs = 1181; goto _test_eof + _test_eof1182: cs = 1182; goto _test_eof + _test_eof1183: cs = 1183; goto _test_eof + _test_eof1184: cs = 1184; goto _test_eof + _test_eof1185: cs = 1185; goto _test_eof + _test_eof1186: cs = 1186; goto _test_eof + _test_eof1187: cs = 1187; goto _test_eof + _test_eof1188: cs = 1188; goto _test_eof + _test_eof1189: cs = 1189; goto _test_eof + _test_eof1190: cs = 1190; goto _test_eof + _test_eof1191: cs = 1191; goto _test_eof + _test_eof1192: cs = 1192; goto _test_eof + _test_eof1193: cs = 1193; goto _test_eof + _test_eof1194: cs = 1194; goto _test_eof + _test_eof1195: cs = 1195; goto _test_eof + _test_eof1196: cs = 1196; goto _test_eof + _test_eof1197: cs = 1197; goto _test_eof + _test_eof1198: cs = 1198; goto _test_eof + _test_eof1199: cs = 1199; goto _test_eof + _test_eof1200: cs = 1200; goto _test_eof + _test_eof1201: cs = 1201; goto _test_eof + _test_eof1202: cs = 1202; goto _test_eof + _test_eof1203: cs = 1203; goto _test_eof + _test_eof1204: cs = 1204; goto _test_eof + _test_eof1205: cs = 1205; goto _test_eof + _test_eof1206: cs = 1206; goto _test_eof + _test_eof1207: cs = 1207; goto _test_eof + _test_eof1208: cs = 1208; goto _test_eof + _test_eof1209: cs = 1209; goto _test_eof + _test_eof1210: cs = 1210; goto _test_eof + _test_eof1211: cs = 1211; goto _test_eof + _test_eof1212: cs = 1212; goto _test_eof + _test_eof1213: cs = 1213; goto _test_eof + _test_eof1214: cs = 1214; goto _test_eof + _test_eof1215: cs = 1215; goto _test_eof + _test_eof1216: cs = 1216; goto _test_eof + _test_eof1217: cs = 1217; goto _test_eof + _test_eof1218: cs = 1218; goto _test_eof + _test_eof1219: cs = 1219; goto _test_eof + _test_eof1220: cs = 1220; goto _test_eof + _test_eof1221: cs = 1221; goto _test_eof + _test_eof1222: cs = 1222; goto _test_eof + _test_eof1223: cs = 1223; goto _test_eof + _test_eof1224: cs = 1224; goto _test_eof + _test_eof1225: cs = 1225; goto _test_eof + _test_eof1226: cs = 1226; goto _test_eof + _test_eof1227: cs = 1227; goto _test_eof + _test_eof1228: cs = 1228; goto _test_eof + _test_eof1229: cs = 1229; goto _test_eof + _test_eof1230: cs = 1230; goto _test_eof + _test_eof1231: cs = 1231; goto _test_eof + _test_eof1232: cs = 1232; goto _test_eof + _test_eof1233: cs = 1233; goto _test_eof + _test_eof1234: cs = 1234; goto _test_eof + _test_eof1235: cs = 1235; goto _test_eof + _test_eof1236: cs = 1236; goto _test_eof + _test_eof1237: cs = 1237; goto _test_eof + _test_eof1238: cs = 1238; goto _test_eof + _test_eof1239: cs = 1239; goto _test_eof + _test_eof1240: cs = 1240; goto _test_eof + _test_eof1241: cs = 1241; goto _test_eof + _test_eof1242: cs = 1242; goto _test_eof + _test_eof1243: cs = 1243; goto _test_eof + _test_eof1244: cs = 1244; goto _test_eof + _test_eof1245: cs = 1245; goto _test_eof + _test_eof1246: cs = 1246; goto _test_eof + _test_eof1247: cs = 1247; goto _test_eof + _test_eof1248: cs = 1248; goto _test_eof + _test_eof1249: cs = 1249; goto _test_eof + _test_eof1250: cs = 1250; goto _test_eof + _test_eof1251: cs = 1251; goto _test_eof + _test_eof1252: cs = 1252; goto _test_eof + _test_eof1253: cs = 1253; goto _test_eof + _test_eof1254: cs = 1254; goto _test_eof + _test_eof1255: cs = 1255; goto _test_eof + _test_eof1256: cs = 1256; goto _test_eof + _test_eof1257: cs = 1257; goto _test_eof + _test_eof1258: cs = 1258; goto _test_eof + _test_eof1259: cs = 1259; goto _test_eof + _test_eof1260: cs = 1260; goto _test_eof + _test_eof1261: cs = 1261; goto _test_eof + _test_eof1262: cs = 1262; goto _test_eof + _test_eof1263: cs = 1263; goto _test_eof + _test_eof1264: cs = 1264; goto _test_eof + _test_eof1265: cs = 1265; goto _test_eof + _test_eof1266: cs = 1266; goto _test_eof + _test_eof1267: cs = 1267; goto _test_eof + _test_eof1268: cs = 1268; goto _test_eof + _test_eof1269: cs = 1269; goto _test_eof + _test_eof1270: cs = 1270; goto _test_eof + _test_eof1271: cs = 1271; goto _test_eof + _test_eof1272: cs = 1272; goto _test_eof + _test_eof1273: cs = 1273; goto _test_eof + _test_eof1274: cs = 1274; goto _test_eof + _test_eof1275: cs = 1275; goto _test_eof + _test_eof1276: cs = 1276; goto _test_eof + _test_eof1277: cs = 1277; goto _test_eof + _test_eof1278: cs = 1278; goto _test_eof + _test_eof1279: cs = 1279; goto _test_eof + _test_eof1280: cs = 1280; goto _test_eof + _test_eof1281: cs = 1281; goto _test_eof + _test_eof1282: cs = 1282; goto _test_eof + _test_eof1283: cs = 1283; goto _test_eof + _test_eof1284: cs = 1284; goto _test_eof + _test_eof1285: cs = 1285; goto _test_eof + _test_eof1286: cs = 1286; goto _test_eof + _test_eof1287: cs = 1287; goto _test_eof + _test_eof1288: cs = 1288; goto _test_eof + _test_eof1289: cs = 1289; goto _test_eof + _test_eof1290: cs = 1290; goto _test_eof + _test_eof1291: cs = 1291; goto _test_eof + _test_eof1292: cs = 1292; goto _test_eof + _test_eof1293: cs = 1293; goto _test_eof + _test_eof1294: cs = 1294; goto _test_eof + _test_eof1295: cs = 1295; goto _test_eof + _test_eof1296: cs = 1296; goto _test_eof + _test_eof1297: cs = 1297; goto _test_eof + _test_eof1298: cs = 1298; goto _test_eof + _test_eof1299: cs = 1299; goto _test_eof + _test_eof1300: cs = 1300; goto _test_eof + _test_eof1301: cs = 1301; goto _test_eof + _test_eof1302: cs = 1302; goto _test_eof + _test_eof1303: cs = 1303; goto _test_eof + _test_eof1304: cs = 1304; goto _test_eof + _test_eof1305: cs = 1305; goto _test_eof + _test_eof1306: cs = 1306; goto _test_eof + _test_eof1307: cs = 1307; goto _test_eof + _test_eof1308: cs = 1308; goto _test_eof + _test_eof1309: cs = 1309; goto _test_eof + _test_eof1310: cs = 1310; goto _test_eof + _test_eof1311: cs = 1311; goto _test_eof + _test_eof1312: cs = 1312; goto _test_eof + _test_eof1313: cs = 1313; goto _test_eof + _test_eof1314: cs = 1314; goto _test_eof + _test_eof1315: cs = 1315; goto _test_eof + _test_eof1316: cs = 1316; goto _test_eof + _test_eof1317: cs = 1317; goto _test_eof + _test_eof1318: cs = 1318; goto _test_eof + _test_eof1319: cs = 1319; goto _test_eof + _test_eof1320: cs = 1320; goto _test_eof + _test_eof1321: cs = 1321; goto _test_eof + _test_eof1322: cs = 1322; goto _test_eof + _test_eof1323: cs = 1323; goto _test_eof + _test_eof1324: cs = 1324; goto _test_eof + _test_eof1325: cs = 1325; goto _test_eof + _test_eof1326: cs = 1326; goto _test_eof + _test_eof1327: cs = 1327; goto _test_eof + _test_eof1328: cs = 1328; goto _test_eof + _test_eof1329: cs = 1329; goto _test_eof + _test_eof1330: cs = 1330; goto _test_eof + _test_eof1331: cs = 1331; goto _test_eof + _test_eof1332: cs = 1332; goto _test_eof + _test_eof1333: cs = 1333; goto _test_eof + _test_eof1334: cs = 1334; goto _test_eof + _test_eof1335: cs = 1335; goto _test_eof + _test_eof1336: cs = 1336; goto _test_eof + _test_eof1337: cs = 1337; goto _test_eof + _test_eof1338: cs = 1338; goto _test_eof + _test_eof1339: cs = 1339; goto _test_eof + _test_eof1340: cs = 1340; goto _test_eof + _test_eof1341: cs = 1341; goto _test_eof + _test_eof1342: cs = 1342; goto _test_eof + _test_eof1343: cs = 1343; goto _test_eof + _test_eof1344: cs = 1344; goto _test_eof + _test_eof1345: cs = 1345; goto _test_eof + _test_eof1346: cs = 1346; goto _test_eof + _test_eof1347: cs = 1347; goto _test_eof + _test_eof1348: cs = 1348; goto _test_eof + _test_eof1349: cs = 1349; goto _test_eof + _test_eof1350: cs = 1350; goto _test_eof + _test_eof1351: cs = 1351; goto _test_eof + _test_eof1352: cs = 1352; goto _test_eof + _test_eof1353: cs = 1353; goto _test_eof + _test_eof1354: cs = 1354; goto _test_eof + _test_eof1355: cs = 1355; goto _test_eof + _test_eof1356: cs = 1356; goto _test_eof + _test_eof1357: cs = 1357; goto _test_eof + _test_eof1358: cs = 1358; goto _test_eof + _test_eof1359: cs = 1359; goto _test_eof + _test_eof1360: cs = 1360; goto _test_eof + _test_eof1361: cs = 1361; goto _test_eof + _test_eof1362: cs = 1362; goto _test_eof + _test_eof1363: cs = 1363; goto _test_eof + _test_eof1364: cs = 1364; goto _test_eof + _test_eof1365: cs = 1365; goto _test_eof + _test_eof1366: cs = 1366; goto _test_eof + _test_eof1367: cs = 1367; goto _test_eof + _test_eof1368: cs = 1368; goto _test_eof + _test_eof1369: cs = 1369; goto _test_eof + _test_eof1370: cs = 1370; goto _test_eof + _test_eof1371: cs = 1371; goto _test_eof + _test_eof1372: cs = 1372; goto _test_eof + _test_eof1373: cs = 1373; goto _test_eof + _test_eof1374: cs = 1374; goto _test_eof + _test_eof1375: cs = 1375; goto _test_eof + _test_eof1376: cs = 1376; goto _test_eof + _test_eof1377: cs = 1377; goto _test_eof + _test_eof1378: cs = 1378; goto _test_eof + _test_eof1379: cs = 1379; goto _test_eof + _test_eof1380: cs = 1380; goto _test_eof + _test_eof1381: cs = 1381; goto _test_eof + _test_eof1382: cs = 1382; goto _test_eof + _test_eof1383: cs = 1383; goto _test_eof + _test_eof1384: cs = 1384; goto _test_eof + _test_eof1385: cs = 1385; goto _test_eof + _test_eof1386: cs = 1386; goto _test_eof + _test_eof1387: cs = 1387; goto _test_eof + _test_eof1388: cs = 1388; goto _test_eof + _test_eof1389: cs = 1389; goto _test_eof + _test_eof1390: cs = 1390; goto _test_eof + _test_eof1391: cs = 1391; goto _test_eof + _test_eof1392: cs = 1392; goto _test_eof + _test_eof1393: cs = 1393; goto _test_eof + _test_eof1394: cs = 1394; goto _test_eof + _test_eof1395: cs = 1395; goto _test_eof + _test_eof1396: cs = 1396; goto _test_eof + _test_eof1397: cs = 1397; goto _test_eof + _test_eof1398: cs = 1398; goto _test_eof + _test_eof1399: cs = 1399; goto _test_eof + _test_eof1400: cs = 1400; goto _test_eof + _test_eof1401: cs = 1401; goto _test_eof + _test_eof1402: cs = 1402; goto _test_eof + _test_eof1403: cs = 1403; goto _test_eof + _test_eof1404: cs = 1404; goto _test_eof + _test_eof1405: cs = 1405; goto _test_eof + _test_eof1406: cs = 1406; goto _test_eof + _test_eof1407: cs = 1407; goto _test_eof + _test_eof1408: cs = 1408; goto _test_eof + _test_eof1409: cs = 1409; goto _test_eof + _test_eof1410: cs = 1410; goto _test_eof + _test_eof1411: cs = 1411; goto _test_eof + _test_eof1412: cs = 1412; goto _test_eof + _test_eof1413: cs = 1413; goto _test_eof + _test_eof1414: cs = 1414; goto _test_eof + _test_eof1415: cs = 1415; goto _test_eof + _test_eof1416: cs = 1416; goto _test_eof + _test_eof1417: cs = 1417; goto _test_eof + _test_eof1418: cs = 1418; goto _test_eof + _test_eof1419: cs = 1419; goto _test_eof + _test_eof1420: cs = 1420; goto _test_eof + _test_eof1421: cs = 1421; goto _test_eof + _test_eof1422: cs = 1422; goto _test_eof + _test_eof1423: cs = 1423; goto _test_eof + _test_eof1424: cs = 1424; goto _test_eof + _test_eof1425: cs = 1425; goto _test_eof + _test_eof1426: cs = 1426; goto _test_eof + _test_eof1427: cs = 1427; goto _test_eof + _test_eof1428: cs = 1428; goto _test_eof + _test_eof1429: cs = 1429; goto _test_eof + _test_eof1430: cs = 1430; goto _test_eof + _test_eof1431: cs = 1431; goto _test_eof + _test_eof1432: cs = 1432; goto _test_eof + _test_eof1433: cs = 1433; goto _test_eof + _test_eof1434: cs = 1434; goto _test_eof + _test_eof1435: cs = 1435; goto _test_eof + _test_eof1436: cs = 1436; goto _test_eof + _test_eof1437: cs = 1437; goto _test_eof + _test_eof1438: cs = 1438; goto _test_eof + _test_eof1439: cs = 1439; goto _test_eof + _test_eof1440: cs = 1440; goto _test_eof + _test_eof1441: cs = 1441; goto _test_eof + _test_eof1442: cs = 1442; goto _test_eof + _test_eof1443: cs = 1443; goto _test_eof + _test_eof1444: cs = 1444; goto _test_eof + _test_eof1445: cs = 1445; goto _test_eof + _test_eof1446: cs = 1446; goto _test_eof + _test_eof1447: cs = 1447; goto _test_eof + _test_eof1448: cs = 1448; goto _test_eof + _test_eof1449: cs = 1449; goto _test_eof + _test_eof1450: cs = 1450; goto _test_eof + _test_eof1451: cs = 1451; goto _test_eof + _test_eof1452: cs = 1452; goto _test_eof + _test_eof1453: cs = 1453; goto _test_eof + _test_eof1454: cs = 1454; goto _test_eof + _test_eof1455: cs = 1455; goto _test_eof + _test_eof1456: cs = 1456; goto _test_eof + _test_eof1457: cs = 1457; goto _test_eof + _test_eof1458: cs = 1458; goto _test_eof + _test_eof1459: cs = 1459; goto _test_eof + _test_eof1460: cs = 1460; goto _test_eof + _test_eof1461: cs = 1461; goto _test_eof + _test_eof1462: cs = 1462; goto _test_eof + _test_eof1463: cs = 1463; goto _test_eof + _test_eof1464: cs = 1464; goto _test_eof + _test_eof1465: cs = 1465; goto _test_eof + _test_eof1466: cs = 1466; goto _test_eof + _test_eof1467: cs = 1467; goto _test_eof + _test_eof1468: cs = 1468; goto _test_eof + _test_eof1469: cs = 1469; goto _test_eof + _test_eof1470: cs = 1470; goto _test_eof + _test_eof1471: cs = 1471; goto _test_eof + _test_eof1472: cs = 1472; goto _test_eof + _test_eof1473: cs = 1473; goto _test_eof + _test_eof1474: cs = 1474; goto _test_eof + _test_eof1475: cs = 1475; goto _test_eof + _test_eof1476: cs = 1476; goto _test_eof + _test_eof1477: cs = 1477; goto _test_eof + _test_eof1478: cs = 1478; goto _test_eof + _test_eof1479: cs = 1479; goto _test_eof + _test_eof1480: cs = 1480; goto _test_eof + _test_eof1481: cs = 1481; goto _test_eof + _test_eof1482: cs = 1482; goto _test_eof + _test_eof1483: cs = 1483; goto _test_eof + _test_eof1484: cs = 1484; goto _test_eof + _test_eof1485: cs = 1485; goto _test_eof + _test_eof1486: cs = 1486; goto _test_eof + _test_eof1487: cs = 1487; goto _test_eof + _test_eof1488: cs = 1488; goto _test_eof + _test_eof1489: cs = 1489; goto _test_eof + _test_eof1490: cs = 1490; goto _test_eof + _test_eof1491: cs = 1491; goto _test_eof + _test_eof1492: cs = 1492; goto _test_eof + _test_eof1493: cs = 1493; goto _test_eof + _test_eof1494: cs = 1494; goto _test_eof + _test_eof1495: cs = 1495; goto _test_eof + _test_eof1496: cs = 1496; goto _test_eof + _test_eof1497: cs = 1497; goto _test_eof + _test_eof1498: cs = 1498; goto _test_eof + _test_eof1499: cs = 1499; goto _test_eof + _test_eof1500: cs = 1500; goto _test_eof + _test_eof1501: cs = 1501; goto _test_eof + _test_eof1502: cs = 1502; goto _test_eof + _test_eof1503: cs = 1503; goto _test_eof + _test_eof1504: cs = 1504; goto _test_eof + _test_eof1505: cs = 1505; goto _test_eof + _test_eof1506: cs = 1506; goto _test_eof + _test_eof1507: cs = 1507; goto _test_eof + _test_eof1508: cs = 1508; goto _test_eof + _test_eof1509: cs = 1509; goto _test_eof + _test_eof1510: cs = 1510; goto _test_eof + _test_eof1511: cs = 1511; goto _test_eof + _test_eof1512: cs = 1512; goto _test_eof + _test_eof1513: cs = 1513; goto _test_eof + _test_eof1514: cs = 1514; goto _test_eof + _test_eof1515: cs = 1515; goto _test_eof + _test_eof1516: cs = 1516; goto _test_eof + _test_eof1517: cs = 1517; goto _test_eof + _test_eof1518: cs = 1518; goto _test_eof + _test_eof1519: cs = 1519; goto _test_eof + _test_eof1520: cs = 1520; goto _test_eof + _test_eof1521: cs = 1521; goto _test_eof + _test_eof1522: cs = 1522; goto _test_eof + _test_eof1523: cs = 1523; goto _test_eof + _test_eof1524: cs = 1524; goto _test_eof + _test_eof1525: cs = 1525; goto _test_eof + _test_eof1526: cs = 1526; goto _test_eof + _test_eof1527: cs = 1527; goto _test_eof + _test_eof1528: cs = 1528; goto _test_eof + _test_eof1529: cs = 1529; goto _test_eof + _test_eof1530: cs = 1530; goto _test_eof + _test_eof1531: cs = 1531; goto _test_eof + _test_eof1532: cs = 1532; goto _test_eof + _test_eof1533: cs = 1533; goto _test_eof + _test_eof1534: cs = 1534; goto _test_eof + _test_eof1535: cs = 1535; goto _test_eof + _test_eof1536: cs = 1536; goto _test_eof + _test_eof1537: cs = 1537; goto _test_eof + _test_eof1538: cs = 1538; goto _test_eof + _test_eof1539: cs = 1539; goto _test_eof + _test_eof1540: cs = 1540; goto _test_eof + _test_eof1541: cs = 1541; goto _test_eof + _test_eof1542: cs = 1542; goto _test_eof + _test_eof1543: cs = 1543; goto _test_eof + _test_eof1544: cs = 1544; goto _test_eof + _test_eof1545: cs = 1545; goto _test_eof + _test_eof1546: cs = 1546; goto _test_eof + _test_eof1547: cs = 1547; goto _test_eof + _test_eof1548: cs = 1548; goto _test_eof + _test_eof1549: cs = 1549; goto _test_eof + _test_eof1550: cs = 1550; goto _test_eof + _test_eof1551: cs = 1551; goto _test_eof + _test_eof1552: cs = 1552; goto _test_eof + _test_eof1553: cs = 1553; goto _test_eof + _test_eof1554: cs = 1554; goto _test_eof + _test_eof1555: cs = 1555; goto _test_eof + _test_eof1556: cs = 1556; goto _test_eof + _test_eof1557: cs = 1557; goto _test_eof + _test_eof1558: cs = 1558; goto _test_eof + _test_eof1559: cs = 1559; goto _test_eof + _test_eof1560: cs = 1560; goto _test_eof + _test_eof1561: cs = 1561; goto _test_eof + _test_eof1562: cs = 1562; goto _test_eof + _test_eof1563: cs = 1563; goto _test_eof + _test_eof1564: cs = 1564; goto _test_eof + _test_eof1565: cs = 1565; goto _test_eof + _test_eof1566: cs = 1566; goto _test_eof + _test_eof1567: cs = 1567; goto _test_eof + _test_eof1568: cs = 1568; goto _test_eof + _test_eof1569: cs = 1569; goto _test_eof + _test_eof1570: cs = 1570; goto _test_eof + _test_eof1571: cs = 1571; goto _test_eof + _test_eof1572: cs = 1572; goto _test_eof + _test_eof1573: cs = 1573; goto _test_eof + _test_eof1574: cs = 1574; goto _test_eof + _test_eof1575: cs = 1575; goto _test_eof + _test_eof1576: cs = 1576; goto _test_eof + _test_eof1577: cs = 1577; goto _test_eof + _test_eof1578: cs = 1578; goto _test_eof + _test_eof1579: cs = 1579; goto _test_eof + _test_eof1580: cs = 1580; goto _test_eof + _test_eof1581: cs = 1581; goto _test_eof + _test_eof1582: cs = 1582; goto _test_eof + _test_eof1583: cs = 1583; goto _test_eof + _test_eof1584: cs = 1584; goto _test_eof + _test_eof1585: cs = 1585; goto _test_eof + _test_eof1586: cs = 1586; goto _test_eof + _test_eof1587: cs = 1587; goto _test_eof + _test_eof1588: cs = 1588; goto _test_eof + _test_eof1589: cs = 1589; goto _test_eof + _test_eof1590: cs = 1590; goto _test_eof + _test_eof1591: cs = 1591; goto _test_eof + _test_eof4873: cs = 4873; goto _test_eof + _test_eof1592: cs = 1592; goto _test_eof + _test_eof1593: cs = 1593; goto _test_eof + _test_eof1594: cs = 1594; goto _test_eof + _test_eof1595: cs = 1595; goto _test_eof + _test_eof1596: cs = 1596; goto _test_eof + _test_eof1597: cs = 1597; goto _test_eof + _test_eof1598: cs = 1598; goto _test_eof + _test_eof1599: cs = 1599; goto _test_eof + _test_eof1600: cs = 1600; goto _test_eof + _test_eof1601: cs = 1601; goto _test_eof + _test_eof1602: cs = 1602; goto _test_eof + _test_eof1603: cs = 1603; goto _test_eof + _test_eof1604: cs = 1604; goto _test_eof + _test_eof1605: cs = 1605; goto _test_eof + _test_eof1606: cs = 1606; goto _test_eof + _test_eof1607: cs = 1607; goto _test_eof + _test_eof1608: cs = 1608; goto _test_eof + _test_eof1609: cs = 1609; goto _test_eof + _test_eof1610: cs = 1610; goto _test_eof + _test_eof1611: cs = 1611; goto _test_eof + _test_eof1612: cs = 1612; goto _test_eof + _test_eof1613: cs = 1613; goto _test_eof + _test_eof1614: cs = 1614; goto _test_eof + _test_eof1615: cs = 1615; goto _test_eof + _test_eof1616: cs = 1616; goto _test_eof + _test_eof1617: cs = 1617; goto _test_eof + _test_eof1618: cs = 1618; goto _test_eof + _test_eof1619: cs = 1619; goto _test_eof + _test_eof1620: cs = 1620; goto _test_eof + _test_eof1621: cs = 1621; goto _test_eof + _test_eof1622: cs = 1622; goto _test_eof + _test_eof1623: cs = 1623; goto _test_eof + _test_eof1624: cs = 1624; goto _test_eof + _test_eof1625: cs = 1625; goto _test_eof + _test_eof1626: cs = 1626; goto _test_eof + _test_eof1627: cs = 1627; goto _test_eof + _test_eof1628: cs = 1628; goto _test_eof + _test_eof1629: cs = 1629; goto _test_eof + _test_eof1630: cs = 1630; goto _test_eof + _test_eof1631: cs = 1631; goto _test_eof + _test_eof1632: cs = 1632; goto _test_eof + _test_eof1633: cs = 1633; goto _test_eof + _test_eof1634: cs = 1634; goto _test_eof + _test_eof1635: cs = 1635; goto _test_eof + _test_eof1636: cs = 1636; goto _test_eof + _test_eof1637: cs = 1637; goto _test_eof + _test_eof1638: cs = 1638; goto _test_eof + _test_eof1639: cs = 1639; goto _test_eof + _test_eof1640: cs = 1640; goto _test_eof + _test_eof1641: cs = 1641; goto _test_eof + _test_eof1642: cs = 1642; goto _test_eof + _test_eof1643: cs = 1643; goto _test_eof + _test_eof1644: cs = 1644; goto _test_eof + _test_eof1645: cs = 1645; goto _test_eof + _test_eof1646: cs = 1646; goto _test_eof + _test_eof1647: cs = 1647; goto _test_eof + _test_eof1648: cs = 1648; goto _test_eof + _test_eof1649: cs = 1649; goto _test_eof + _test_eof1650: cs = 1650; goto _test_eof + _test_eof1651: cs = 1651; goto _test_eof + _test_eof1652: cs = 1652; goto _test_eof + _test_eof1653: cs = 1653; goto _test_eof + _test_eof1654: cs = 1654; goto _test_eof + _test_eof1655: cs = 1655; goto _test_eof + _test_eof1656: cs = 1656; goto _test_eof + _test_eof1657: cs = 1657; goto _test_eof + _test_eof1658: cs = 1658; goto _test_eof + _test_eof1659: cs = 1659; goto _test_eof + _test_eof1660: cs = 1660; goto _test_eof + _test_eof1661: cs = 1661; goto _test_eof + _test_eof1662: cs = 1662; goto _test_eof + _test_eof1663: cs = 1663; goto _test_eof + _test_eof1664: cs = 1664; goto _test_eof + _test_eof1665: cs = 1665; goto _test_eof + _test_eof1666: cs = 1666; goto _test_eof + _test_eof1667: cs = 1667; goto _test_eof + _test_eof1668: cs = 1668; goto _test_eof + _test_eof1669: cs = 1669; goto _test_eof + _test_eof1670: cs = 1670; goto _test_eof + _test_eof1671: cs = 1671; goto _test_eof + _test_eof1672: cs = 1672; goto _test_eof + _test_eof1673: cs = 1673; goto _test_eof + _test_eof1674: cs = 1674; goto _test_eof + _test_eof1675: cs = 1675; goto _test_eof + _test_eof1676: cs = 1676; goto _test_eof + _test_eof1677: cs = 1677; goto _test_eof + _test_eof1678: cs = 1678; goto _test_eof + _test_eof1679: cs = 1679; goto _test_eof + _test_eof1680: cs = 1680; goto _test_eof + _test_eof1681: cs = 1681; goto _test_eof + _test_eof1682: cs = 1682; goto _test_eof + _test_eof1683: cs = 1683; goto _test_eof + _test_eof1684: cs = 1684; goto _test_eof + _test_eof1685: cs = 1685; goto _test_eof + _test_eof1686: cs = 1686; goto _test_eof + _test_eof1687: cs = 1687; goto _test_eof + _test_eof1688: cs = 1688; goto _test_eof + _test_eof1689: cs = 1689; goto _test_eof + _test_eof1690: cs = 1690; goto _test_eof + _test_eof1691: cs = 1691; goto _test_eof + _test_eof1692: cs = 1692; goto _test_eof + _test_eof1693: cs = 1693; goto _test_eof + _test_eof1694: cs = 1694; goto _test_eof + _test_eof1695: cs = 1695; goto _test_eof + _test_eof1696: cs = 1696; goto _test_eof + _test_eof1697: cs = 1697; goto _test_eof + _test_eof1698: cs = 1698; goto _test_eof + _test_eof1699: cs = 1699; goto _test_eof + _test_eof1700: cs = 1700; goto _test_eof + _test_eof1701: cs = 1701; goto _test_eof + _test_eof1702: cs = 1702; goto _test_eof + _test_eof1703: cs = 1703; goto _test_eof + _test_eof1704: cs = 1704; goto _test_eof + _test_eof1705: cs = 1705; goto _test_eof + _test_eof1706: cs = 1706; goto _test_eof + _test_eof1707: cs = 1707; goto _test_eof + _test_eof1708: cs = 1708; goto _test_eof + _test_eof1709: cs = 1709; goto _test_eof + _test_eof1710: cs = 1710; goto _test_eof + _test_eof1711: cs = 1711; goto _test_eof + _test_eof1712: cs = 1712; goto _test_eof + _test_eof1713: cs = 1713; goto _test_eof + _test_eof1714: cs = 1714; goto _test_eof + _test_eof1715: cs = 1715; goto _test_eof + _test_eof1716: cs = 1716; goto _test_eof + _test_eof1717: cs = 1717; goto _test_eof + _test_eof1718: cs = 1718; goto _test_eof + _test_eof1719: cs = 1719; goto _test_eof + _test_eof1720: cs = 1720; goto _test_eof + _test_eof1721: cs = 1721; goto _test_eof + _test_eof1722: cs = 1722; goto _test_eof + _test_eof1723: cs = 1723; goto _test_eof + _test_eof1724: cs = 1724; goto _test_eof + _test_eof1725: cs = 1725; goto _test_eof + _test_eof1726: cs = 1726; goto _test_eof + _test_eof1727: cs = 1727; goto _test_eof + _test_eof1728: cs = 1728; goto _test_eof + _test_eof1729: cs = 1729; goto _test_eof + _test_eof1730: cs = 1730; goto _test_eof + _test_eof1731: cs = 1731; goto _test_eof + _test_eof1732: cs = 1732; goto _test_eof + _test_eof1733: cs = 1733; goto _test_eof + _test_eof1734: cs = 1734; goto _test_eof + _test_eof1735: cs = 1735; goto _test_eof + _test_eof1736: cs = 1736; goto _test_eof + _test_eof1737: cs = 1737; goto _test_eof + _test_eof1738: cs = 1738; goto _test_eof + _test_eof1739: cs = 1739; goto _test_eof + _test_eof1740: cs = 1740; goto _test_eof + _test_eof1741: cs = 1741; goto _test_eof + _test_eof1742: cs = 1742; goto _test_eof + _test_eof1743: cs = 1743; goto _test_eof + _test_eof1744: cs = 1744; goto _test_eof + _test_eof1745: cs = 1745; goto _test_eof + _test_eof1746: cs = 1746; goto _test_eof + _test_eof1747: cs = 1747; goto _test_eof + _test_eof1748: cs = 1748; goto _test_eof + _test_eof1749: cs = 1749; goto _test_eof + _test_eof1750: cs = 1750; goto _test_eof + _test_eof1751: cs = 1751; goto _test_eof + _test_eof1752: cs = 1752; goto _test_eof + _test_eof1753: cs = 1753; goto _test_eof + _test_eof1754: cs = 1754; goto _test_eof + _test_eof1755: cs = 1755; goto _test_eof + _test_eof1756: cs = 1756; goto _test_eof + _test_eof1757: cs = 1757; goto _test_eof + _test_eof1758: cs = 1758; goto _test_eof + _test_eof1759: cs = 1759; goto _test_eof + _test_eof1760: cs = 1760; goto _test_eof + _test_eof1761: cs = 1761; goto _test_eof + _test_eof1762: cs = 1762; goto _test_eof + _test_eof1763: cs = 1763; goto _test_eof + _test_eof1764: cs = 1764; goto _test_eof + _test_eof1765: cs = 1765; goto _test_eof + _test_eof1766: cs = 1766; goto _test_eof + _test_eof1767: cs = 1767; goto _test_eof + _test_eof1768: cs = 1768; goto _test_eof + _test_eof1769: cs = 1769; goto _test_eof + _test_eof1770: cs = 1770; goto _test_eof + _test_eof1771: cs = 1771; goto _test_eof + _test_eof1772: cs = 1772; goto _test_eof + _test_eof1773: cs = 1773; goto _test_eof + _test_eof1774: cs = 1774; goto _test_eof + _test_eof1775: cs = 1775; goto _test_eof + _test_eof1776: cs = 1776; goto _test_eof + _test_eof1777: cs = 1777; goto _test_eof + _test_eof1778: cs = 1778; goto _test_eof + _test_eof1779: cs = 1779; goto _test_eof + _test_eof1780: cs = 1780; goto _test_eof + _test_eof1781: cs = 1781; goto _test_eof + _test_eof1782: cs = 1782; goto _test_eof + _test_eof1783: cs = 1783; goto _test_eof + _test_eof1784: cs = 1784; goto _test_eof + _test_eof1785: cs = 1785; goto _test_eof + _test_eof1786: cs = 1786; goto _test_eof + _test_eof1787: cs = 1787; goto _test_eof + _test_eof1788: cs = 1788; goto _test_eof + _test_eof1789: cs = 1789; goto _test_eof + _test_eof1790: cs = 1790; goto _test_eof + _test_eof1791: cs = 1791; goto _test_eof + _test_eof1792: cs = 1792; goto _test_eof + _test_eof1793: cs = 1793; goto _test_eof + _test_eof1794: cs = 1794; goto _test_eof + _test_eof1795: cs = 1795; goto _test_eof + _test_eof1796: cs = 1796; goto _test_eof + _test_eof1797: cs = 1797; goto _test_eof + _test_eof1798: cs = 1798; goto _test_eof + _test_eof1799: cs = 1799; goto _test_eof + _test_eof1800: cs = 1800; goto _test_eof + _test_eof1801: cs = 1801; goto _test_eof + _test_eof1802: cs = 1802; goto _test_eof + _test_eof1803: cs = 1803; goto _test_eof + _test_eof1804: cs = 1804; goto _test_eof + _test_eof1805: cs = 1805; goto _test_eof + _test_eof1806: cs = 1806; goto _test_eof + _test_eof1807: cs = 1807; goto _test_eof + _test_eof1808: cs = 1808; goto _test_eof + _test_eof1809: cs = 1809; goto _test_eof + _test_eof1810: cs = 1810; goto _test_eof + _test_eof1811: cs = 1811; goto _test_eof + _test_eof1812: cs = 1812; goto _test_eof + _test_eof1813: cs = 1813; goto _test_eof + _test_eof1814: cs = 1814; goto _test_eof + _test_eof1815: cs = 1815; goto _test_eof + _test_eof1816: cs = 1816; goto _test_eof + _test_eof1817: cs = 1817; goto _test_eof + _test_eof1818: cs = 1818; goto _test_eof + _test_eof1819: cs = 1819; goto _test_eof + _test_eof1820: cs = 1820; goto _test_eof + _test_eof1821: cs = 1821; goto _test_eof + _test_eof1822: cs = 1822; goto _test_eof + _test_eof1823: cs = 1823; goto _test_eof + _test_eof1824: cs = 1824; goto _test_eof + _test_eof1825: cs = 1825; goto _test_eof + _test_eof1826: cs = 1826; goto _test_eof + _test_eof1827: cs = 1827; goto _test_eof + _test_eof1828: cs = 1828; goto _test_eof + _test_eof1829: cs = 1829; goto _test_eof + _test_eof1830: cs = 1830; goto _test_eof + _test_eof1831: cs = 1831; goto _test_eof + _test_eof1832: cs = 1832; goto _test_eof + _test_eof1833: cs = 1833; goto _test_eof + _test_eof1834: cs = 1834; goto _test_eof + _test_eof1835: cs = 1835; goto _test_eof + _test_eof1836: cs = 1836; goto _test_eof + _test_eof1837: cs = 1837; goto _test_eof + _test_eof1838: cs = 1838; goto _test_eof + _test_eof1839: cs = 1839; goto _test_eof + _test_eof1840: cs = 1840; goto _test_eof + _test_eof1841: cs = 1841; goto _test_eof + _test_eof1842: cs = 1842; goto _test_eof + _test_eof1843: cs = 1843; goto _test_eof + _test_eof1844: cs = 1844; goto _test_eof + _test_eof1845: cs = 1845; goto _test_eof + _test_eof1846: cs = 1846; goto _test_eof + _test_eof1847: cs = 1847; goto _test_eof + _test_eof1848: cs = 1848; goto _test_eof + _test_eof1849: cs = 1849; goto _test_eof + _test_eof1850: cs = 1850; goto _test_eof + _test_eof1851: cs = 1851; goto _test_eof + _test_eof1852: cs = 1852; goto _test_eof + _test_eof1853: cs = 1853; goto _test_eof + _test_eof1854: cs = 1854; goto _test_eof + _test_eof1855: cs = 1855; goto _test_eof + _test_eof1856: cs = 1856; goto _test_eof + _test_eof1857: cs = 1857; goto _test_eof + _test_eof1858: cs = 1858; goto _test_eof + _test_eof1859: cs = 1859; goto _test_eof + _test_eof1860: cs = 1860; goto _test_eof + _test_eof1861: cs = 1861; goto _test_eof + _test_eof1862: cs = 1862; goto _test_eof + _test_eof1863: cs = 1863; goto _test_eof + _test_eof1864: cs = 1864; goto _test_eof + _test_eof1865: cs = 1865; goto _test_eof + _test_eof1866: cs = 1866; goto _test_eof + _test_eof1867: cs = 1867; goto _test_eof + _test_eof1868: cs = 1868; goto _test_eof + _test_eof1869: cs = 1869; goto _test_eof + _test_eof1870: cs = 1870; goto _test_eof + _test_eof1871: cs = 1871; goto _test_eof + _test_eof1872: cs = 1872; goto _test_eof + _test_eof1873: cs = 1873; goto _test_eof + _test_eof1874: cs = 1874; goto _test_eof + _test_eof1875: cs = 1875; goto _test_eof + _test_eof1876: cs = 1876; goto _test_eof + _test_eof1877: cs = 1877; goto _test_eof + _test_eof1878: cs = 1878; goto _test_eof + _test_eof1879: cs = 1879; goto _test_eof + _test_eof1880: cs = 1880; goto _test_eof + _test_eof1881: cs = 1881; goto _test_eof + _test_eof1882: cs = 1882; goto _test_eof + _test_eof1883: cs = 1883; goto _test_eof + _test_eof1884: cs = 1884; goto _test_eof + _test_eof1885: cs = 1885; goto _test_eof + _test_eof1886: cs = 1886; goto _test_eof + _test_eof1887: cs = 1887; goto _test_eof + _test_eof1888: cs = 1888; goto _test_eof + _test_eof1889: cs = 1889; goto _test_eof + _test_eof1890: cs = 1890; goto _test_eof + _test_eof1891: cs = 1891; goto _test_eof + _test_eof1892: cs = 1892; goto _test_eof + _test_eof1893: cs = 1893; goto _test_eof + _test_eof1894: cs = 1894; goto _test_eof + _test_eof1895: cs = 1895; goto _test_eof + _test_eof1896: cs = 1896; goto _test_eof + _test_eof1897: cs = 1897; goto _test_eof + _test_eof1898: cs = 1898; goto _test_eof + _test_eof1899: cs = 1899; goto _test_eof + _test_eof1900: cs = 1900; goto _test_eof + _test_eof1901: cs = 1901; goto _test_eof + _test_eof1902: cs = 1902; goto _test_eof + _test_eof1903: cs = 1903; goto _test_eof + _test_eof1904: cs = 1904; goto _test_eof + _test_eof1905: cs = 1905; goto _test_eof + _test_eof1906: cs = 1906; goto _test_eof + _test_eof1907: cs = 1907; goto _test_eof + _test_eof1908: cs = 1908; goto _test_eof + _test_eof1909: cs = 1909; goto _test_eof + _test_eof1910: cs = 1910; goto _test_eof + _test_eof1911: cs = 1911; goto _test_eof + _test_eof1912: cs = 1912; goto _test_eof + _test_eof1913: cs = 1913; goto _test_eof + _test_eof1914: cs = 1914; goto _test_eof + _test_eof1915: cs = 1915; goto _test_eof + _test_eof1916: cs = 1916; goto _test_eof + _test_eof1917: cs = 1917; goto _test_eof + _test_eof1918: cs = 1918; goto _test_eof + _test_eof1919: cs = 1919; goto _test_eof + _test_eof1920: cs = 1920; goto _test_eof + _test_eof1921: cs = 1921; goto _test_eof + _test_eof1922: cs = 1922; goto _test_eof + _test_eof1923: cs = 1923; goto _test_eof + _test_eof1924: cs = 1924; goto _test_eof + _test_eof1925: cs = 1925; goto _test_eof + _test_eof1926: cs = 1926; goto _test_eof + _test_eof1927: cs = 1927; goto _test_eof + _test_eof1928: cs = 1928; goto _test_eof + _test_eof1929: cs = 1929; goto _test_eof + _test_eof1930: cs = 1930; goto _test_eof + _test_eof1931: cs = 1931; goto _test_eof + _test_eof1932: cs = 1932; goto _test_eof + _test_eof1933: cs = 1933; goto _test_eof + _test_eof1934: cs = 1934; goto _test_eof + _test_eof1935: cs = 1935; goto _test_eof + _test_eof1936: cs = 1936; goto _test_eof + _test_eof1937: cs = 1937; goto _test_eof + _test_eof1938: cs = 1938; goto _test_eof + _test_eof1939: cs = 1939; goto _test_eof + _test_eof1940: cs = 1940; goto _test_eof + _test_eof1941: cs = 1941; goto _test_eof + _test_eof1942: cs = 1942; goto _test_eof + _test_eof1943: cs = 1943; goto _test_eof + _test_eof1944: cs = 1944; goto _test_eof + _test_eof1945: cs = 1945; goto _test_eof + _test_eof1946: cs = 1946; goto _test_eof + _test_eof1947: cs = 1947; goto _test_eof + _test_eof1948: cs = 1948; goto _test_eof + _test_eof1949: cs = 1949; goto _test_eof + _test_eof1950: cs = 1950; goto _test_eof + _test_eof1951: cs = 1951; goto _test_eof + _test_eof1952: cs = 1952; goto _test_eof + _test_eof1953: cs = 1953; goto _test_eof + _test_eof1954: cs = 1954; goto _test_eof + _test_eof1955: cs = 1955; goto _test_eof + _test_eof1956: cs = 1956; goto _test_eof + _test_eof1957: cs = 1957; goto _test_eof + _test_eof1958: cs = 1958; goto _test_eof + _test_eof1959: cs = 1959; goto _test_eof + _test_eof1960: cs = 1960; goto _test_eof + _test_eof1961: cs = 1961; goto _test_eof + _test_eof1962: cs = 1962; goto _test_eof + _test_eof1963: cs = 1963; goto _test_eof + _test_eof1964: cs = 1964; goto _test_eof + _test_eof1965: cs = 1965; goto _test_eof + _test_eof1966: cs = 1966; goto _test_eof + _test_eof1967: cs = 1967; goto _test_eof + _test_eof1968: cs = 1968; goto _test_eof + _test_eof1969: cs = 1969; goto _test_eof + _test_eof1970: cs = 1970; goto _test_eof + _test_eof1971: cs = 1971; goto _test_eof + _test_eof1972: cs = 1972; goto _test_eof + _test_eof1973: cs = 1973; goto _test_eof + _test_eof1974: cs = 1974; goto _test_eof + _test_eof1975: cs = 1975; goto _test_eof + _test_eof1976: cs = 1976; goto _test_eof + _test_eof1977: cs = 1977; goto _test_eof + _test_eof1978: cs = 1978; goto _test_eof + _test_eof1979: cs = 1979; goto _test_eof + _test_eof1980: cs = 1980; goto _test_eof + _test_eof1981: cs = 1981; goto _test_eof + _test_eof1982: cs = 1982; goto _test_eof + _test_eof1983: cs = 1983; goto _test_eof + _test_eof1984: cs = 1984; goto _test_eof + _test_eof1985: cs = 1985; goto _test_eof + _test_eof1986: cs = 1986; goto _test_eof + _test_eof1987: cs = 1987; goto _test_eof + _test_eof1988: cs = 1988; goto _test_eof + _test_eof1989: cs = 1989; goto _test_eof + _test_eof1990: cs = 1990; goto _test_eof + _test_eof1991: cs = 1991; goto _test_eof + _test_eof1992: cs = 1992; goto _test_eof + _test_eof1993: cs = 1993; goto _test_eof + _test_eof1994: cs = 1994; goto _test_eof + _test_eof1995: cs = 1995; goto _test_eof + _test_eof1996: cs = 1996; goto _test_eof + _test_eof1997: cs = 1997; goto _test_eof + _test_eof1998: cs = 1998; goto _test_eof + _test_eof1999: cs = 1999; goto _test_eof + _test_eof2000: cs = 2000; goto _test_eof + _test_eof2001: cs = 2001; goto _test_eof + _test_eof2002: cs = 2002; goto _test_eof + _test_eof2003: cs = 2003; goto _test_eof + _test_eof2004: cs = 2004; goto _test_eof + _test_eof2005: cs = 2005; goto _test_eof + _test_eof2006: cs = 2006; goto _test_eof + _test_eof2007: cs = 2007; goto _test_eof + _test_eof2008: cs = 2008; goto _test_eof + _test_eof2009: cs = 2009; goto _test_eof + _test_eof2010: cs = 2010; goto _test_eof + _test_eof2011: cs = 2011; goto _test_eof + _test_eof2012: cs = 2012; goto _test_eof + _test_eof2013: cs = 2013; goto _test_eof + _test_eof2014: cs = 2014; goto _test_eof + _test_eof2015: cs = 2015; goto _test_eof + _test_eof2016: cs = 2016; goto _test_eof + _test_eof2017: cs = 2017; goto _test_eof + _test_eof2018: cs = 2018; goto _test_eof + _test_eof2019: cs = 2019; goto _test_eof + _test_eof2020: cs = 2020; goto _test_eof + _test_eof2021: cs = 2021; goto _test_eof + _test_eof2022: cs = 2022; goto _test_eof + _test_eof2023: cs = 2023; goto _test_eof + _test_eof2024: cs = 2024; goto _test_eof + _test_eof2025: cs = 2025; goto _test_eof + _test_eof2026: cs = 2026; goto _test_eof + _test_eof2027: cs = 2027; goto _test_eof + _test_eof2028: cs = 2028; goto _test_eof + _test_eof2029: cs = 2029; goto _test_eof + _test_eof2030: cs = 2030; goto _test_eof + _test_eof2031: cs = 2031; goto _test_eof + _test_eof2032: cs = 2032; goto _test_eof + _test_eof2033: cs = 2033; goto _test_eof + _test_eof2034: cs = 2034; goto _test_eof + _test_eof2035: cs = 2035; goto _test_eof + _test_eof2036: cs = 2036; goto _test_eof + _test_eof2037: cs = 2037; goto _test_eof + _test_eof2038: cs = 2038; goto _test_eof + _test_eof2039: cs = 2039; goto _test_eof + _test_eof2040: cs = 2040; goto _test_eof + _test_eof2041: cs = 2041; goto _test_eof + _test_eof2042: cs = 2042; goto _test_eof + _test_eof2043: cs = 2043; goto _test_eof + _test_eof2044: cs = 2044; goto _test_eof + _test_eof2045: cs = 2045; goto _test_eof + _test_eof2046: cs = 2046; goto _test_eof + _test_eof2047: cs = 2047; goto _test_eof + _test_eof2048: cs = 2048; goto _test_eof + _test_eof2049: cs = 2049; goto _test_eof + _test_eof2050: cs = 2050; goto _test_eof + _test_eof2051: cs = 2051; goto _test_eof + _test_eof2052: cs = 2052; goto _test_eof + _test_eof2053: cs = 2053; goto _test_eof + _test_eof2054: cs = 2054; goto _test_eof + _test_eof2055: cs = 2055; goto _test_eof + _test_eof2056: cs = 2056; goto _test_eof + _test_eof2057: cs = 2057; goto _test_eof + _test_eof2058: cs = 2058; goto _test_eof + _test_eof2059: cs = 2059; goto _test_eof + _test_eof2060: cs = 2060; goto _test_eof + _test_eof2061: cs = 2061; goto _test_eof + _test_eof2062: cs = 2062; goto _test_eof + _test_eof2063: cs = 2063; goto _test_eof + _test_eof2064: cs = 2064; goto _test_eof + _test_eof2065: cs = 2065; goto _test_eof + _test_eof2066: cs = 2066; goto _test_eof + _test_eof2067: cs = 2067; goto _test_eof + _test_eof2068: cs = 2068; goto _test_eof + _test_eof2069: cs = 2069; goto _test_eof + _test_eof2070: cs = 2070; goto _test_eof + _test_eof2071: cs = 2071; goto _test_eof + _test_eof2072: cs = 2072; goto _test_eof + _test_eof2073: cs = 2073; goto _test_eof + _test_eof2074: cs = 2074; goto _test_eof + _test_eof2075: cs = 2075; goto _test_eof + _test_eof2076: cs = 2076; goto _test_eof + _test_eof2077: cs = 2077; goto _test_eof + _test_eof2078: cs = 2078; goto _test_eof + _test_eof2079: cs = 2079; goto _test_eof + _test_eof2080: cs = 2080; goto _test_eof + _test_eof2081: cs = 2081; goto _test_eof + _test_eof2082: cs = 2082; goto _test_eof + _test_eof2083: cs = 2083; goto _test_eof + _test_eof2084: cs = 2084; goto _test_eof + _test_eof2085: cs = 2085; goto _test_eof + _test_eof2086: cs = 2086; goto _test_eof + _test_eof2087: cs = 2087; goto _test_eof + _test_eof2088: cs = 2088; goto _test_eof + _test_eof2089: cs = 2089; goto _test_eof + _test_eof2090: cs = 2090; goto _test_eof + _test_eof2091: cs = 2091; goto _test_eof + _test_eof2092: cs = 2092; goto _test_eof + _test_eof2093: cs = 2093; goto _test_eof + _test_eof2094: cs = 2094; goto _test_eof + _test_eof2095: cs = 2095; goto _test_eof + _test_eof2096: cs = 2096; goto _test_eof + _test_eof2097: cs = 2097; goto _test_eof + _test_eof2098: cs = 2098; goto _test_eof + _test_eof2099: cs = 2099; goto _test_eof + _test_eof2100: cs = 2100; goto _test_eof + _test_eof2101: cs = 2101; goto _test_eof + _test_eof2102: cs = 2102; goto _test_eof + _test_eof2103: cs = 2103; goto _test_eof + _test_eof2104: cs = 2104; goto _test_eof + _test_eof2105: cs = 2105; goto _test_eof + _test_eof2106: cs = 2106; goto _test_eof + _test_eof2107: cs = 2107; goto _test_eof + _test_eof2108: cs = 2108; goto _test_eof + _test_eof2109: cs = 2109; goto _test_eof + _test_eof2110: cs = 2110; goto _test_eof + _test_eof2111: cs = 2111; goto _test_eof + _test_eof2112: cs = 2112; goto _test_eof + _test_eof2113: cs = 2113; goto _test_eof + _test_eof2114: cs = 2114; goto _test_eof + _test_eof2115: cs = 2115; goto _test_eof + _test_eof2116: cs = 2116; goto _test_eof + _test_eof2117: cs = 2117; goto _test_eof + _test_eof2118: cs = 2118; goto _test_eof + _test_eof2119: cs = 2119; goto _test_eof + _test_eof2120: cs = 2120; goto _test_eof + _test_eof2121: cs = 2121; goto _test_eof + _test_eof2122: cs = 2122; goto _test_eof + _test_eof2123: cs = 2123; goto _test_eof + _test_eof2124: cs = 2124; goto _test_eof + _test_eof2125: cs = 2125; goto _test_eof + _test_eof2126: cs = 2126; goto _test_eof + _test_eof2127: cs = 2127; goto _test_eof + _test_eof2128: cs = 2128; goto _test_eof + _test_eof2129: cs = 2129; goto _test_eof + _test_eof2130: cs = 2130; goto _test_eof + _test_eof2131: cs = 2131; goto _test_eof + _test_eof2132: cs = 2132; goto _test_eof + _test_eof2133: cs = 2133; goto _test_eof + _test_eof2134: cs = 2134; goto _test_eof + _test_eof2135: cs = 2135; goto _test_eof + _test_eof2136: cs = 2136; goto _test_eof + _test_eof2137: cs = 2137; goto _test_eof + _test_eof2138: cs = 2138; goto _test_eof + _test_eof2139: cs = 2139; goto _test_eof + _test_eof2140: cs = 2140; goto _test_eof + _test_eof2141: cs = 2141; goto _test_eof + _test_eof2142: cs = 2142; goto _test_eof + _test_eof2143: cs = 2143; goto _test_eof + _test_eof2144: cs = 2144; goto _test_eof + _test_eof2145: cs = 2145; goto _test_eof + _test_eof2146: cs = 2146; goto _test_eof + _test_eof2147: cs = 2147; goto _test_eof + _test_eof2148: cs = 2148; goto _test_eof + _test_eof2149: cs = 2149; goto _test_eof + _test_eof2150: cs = 2150; goto _test_eof + _test_eof2151: cs = 2151; goto _test_eof + _test_eof2152: cs = 2152; goto _test_eof + _test_eof2153: cs = 2153; goto _test_eof + _test_eof2154: cs = 2154; goto _test_eof + _test_eof2155: cs = 2155; goto _test_eof + _test_eof2156: cs = 2156; goto _test_eof + _test_eof2157: cs = 2157; goto _test_eof + _test_eof2158: cs = 2158; goto _test_eof + _test_eof2159: cs = 2159; goto _test_eof + _test_eof2160: cs = 2160; goto _test_eof + _test_eof2161: cs = 2161; goto _test_eof + _test_eof2162: cs = 2162; goto _test_eof + _test_eof2163: cs = 2163; goto _test_eof + _test_eof2164: cs = 2164; goto _test_eof + _test_eof2165: cs = 2165; goto _test_eof + _test_eof2166: cs = 2166; goto _test_eof + _test_eof2167: cs = 2167; goto _test_eof + _test_eof2168: cs = 2168; goto _test_eof + _test_eof2169: cs = 2169; goto _test_eof + _test_eof2170: cs = 2170; goto _test_eof + _test_eof2171: cs = 2171; goto _test_eof + _test_eof2172: cs = 2172; goto _test_eof + _test_eof2173: cs = 2173; goto _test_eof + _test_eof2174: cs = 2174; goto _test_eof + _test_eof2175: cs = 2175; goto _test_eof + _test_eof2176: cs = 2176; goto _test_eof + _test_eof2177: cs = 2177; goto _test_eof + _test_eof2178: cs = 2178; goto _test_eof + _test_eof2179: cs = 2179; goto _test_eof + _test_eof2180: cs = 2180; goto _test_eof + _test_eof2181: cs = 2181; goto _test_eof + _test_eof2182: cs = 2182; goto _test_eof + _test_eof2183: cs = 2183; goto _test_eof + _test_eof2184: cs = 2184; goto _test_eof + _test_eof2185: cs = 2185; goto _test_eof + _test_eof2186: cs = 2186; goto _test_eof + _test_eof2187: cs = 2187; goto _test_eof + _test_eof2188: cs = 2188; goto _test_eof + _test_eof2189: cs = 2189; goto _test_eof + _test_eof2190: cs = 2190; goto _test_eof + _test_eof2191: cs = 2191; goto _test_eof + _test_eof2192: cs = 2192; goto _test_eof + _test_eof4874: cs = 4874; goto _test_eof + _test_eof2193: cs = 2193; goto _test_eof + _test_eof2194: cs = 2194; goto _test_eof + _test_eof2195: cs = 2195; goto _test_eof + _test_eof2196: cs = 2196; goto _test_eof + _test_eof2197: cs = 2197; goto _test_eof + _test_eof2198: cs = 2198; goto _test_eof + _test_eof2199: cs = 2199; goto _test_eof + _test_eof2200: cs = 2200; goto _test_eof + _test_eof2201: cs = 2201; goto _test_eof + _test_eof2202: cs = 2202; goto _test_eof + _test_eof2203: cs = 2203; goto _test_eof + _test_eof2204: cs = 2204; goto _test_eof + _test_eof2205: cs = 2205; goto _test_eof + _test_eof2206: cs = 2206; goto _test_eof + _test_eof2207: cs = 2207; goto _test_eof + _test_eof2208: cs = 2208; goto _test_eof + _test_eof2209: cs = 2209; goto _test_eof + _test_eof2210: cs = 2210; goto _test_eof + _test_eof2211: cs = 2211; goto _test_eof + _test_eof2212: cs = 2212; goto _test_eof + _test_eof2213: cs = 2213; goto _test_eof + _test_eof2214: cs = 2214; goto _test_eof + _test_eof2215: cs = 2215; goto _test_eof + _test_eof2216: cs = 2216; goto _test_eof + _test_eof2217: cs = 2217; goto _test_eof + _test_eof2218: cs = 2218; goto _test_eof + _test_eof2219: cs = 2219; goto _test_eof + _test_eof2220: cs = 2220; goto _test_eof + _test_eof2221: cs = 2221; goto _test_eof + _test_eof2222: cs = 2222; goto _test_eof + _test_eof2223: cs = 2223; goto _test_eof + _test_eof2224: cs = 2224; goto _test_eof + _test_eof2225: cs = 2225; goto _test_eof + _test_eof2226: cs = 2226; goto _test_eof + _test_eof2227: cs = 2227; goto _test_eof + _test_eof2228: cs = 2228; goto _test_eof + _test_eof2229: cs = 2229; goto _test_eof + _test_eof2230: cs = 2230; goto _test_eof + _test_eof2231: cs = 2231; goto _test_eof + _test_eof2232: cs = 2232; goto _test_eof + _test_eof2233: cs = 2233; goto _test_eof + _test_eof2234: cs = 2234; goto _test_eof + _test_eof2235: cs = 2235; goto _test_eof + _test_eof2236: cs = 2236; goto _test_eof + _test_eof2237: cs = 2237; goto _test_eof + _test_eof2238: cs = 2238; goto _test_eof + _test_eof2239: cs = 2239; goto _test_eof + _test_eof2240: cs = 2240; goto _test_eof + _test_eof2241: cs = 2241; goto _test_eof + _test_eof2242: cs = 2242; goto _test_eof + _test_eof2243: cs = 2243; goto _test_eof + _test_eof2244: cs = 2244; goto _test_eof + _test_eof2245: cs = 2245; goto _test_eof + _test_eof2246: cs = 2246; goto _test_eof + _test_eof2247: cs = 2247; goto _test_eof + _test_eof2248: cs = 2248; goto _test_eof + _test_eof2249: cs = 2249; goto _test_eof + _test_eof2250: cs = 2250; goto _test_eof + _test_eof2251: cs = 2251; goto _test_eof + _test_eof2252: cs = 2252; goto _test_eof + _test_eof2253: cs = 2253; goto _test_eof + _test_eof2254: cs = 2254; goto _test_eof + _test_eof2255: cs = 2255; goto _test_eof + _test_eof2256: cs = 2256; goto _test_eof + _test_eof2257: cs = 2257; goto _test_eof + _test_eof2258: cs = 2258; goto _test_eof + _test_eof2259: cs = 2259; goto _test_eof + _test_eof2260: cs = 2260; goto _test_eof + _test_eof2261: cs = 2261; goto _test_eof + _test_eof2262: cs = 2262; goto _test_eof + _test_eof2263: cs = 2263; goto _test_eof + _test_eof2264: cs = 2264; goto _test_eof + _test_eof2265: cs = 2265; goto _test_eof + _test_eof2266: cs = 2266; goto _test_eof + _test_eof2267: cs = 2267; goto _test_eof + _test_eof2268: cs = 2268; goto _test_eof + _test_eof2269: cs = 2269; goto _test_eof + _test_eof2270: cs = 2270; goto _test_eof + _test_eof2271: cs = 2271; goto _test_eof + _test_eof2272: cs = 2272; goto _test_eof + _test_eof2273: cs = 2273; goto _test_eof + _test_eof2274: cs = 2274; goto _test_eof + _test_eof2275: cs = 2275; goto _test_eof + _test_eof2276: cs = 2276; goto _test_eof + _test_eof2277: cs = 2277; goto _test_eof + _test_eof2278: cs = 2278; goto _test_eof + _test_eof2279: cs = 2279; goto _test_eof + _test_eof2280: cs = 2280; goto _test_eof + _test_eof2281: cs = 2281; goto _test_eof + _test_eof2282: cs = 2282; goto _test_eof + _test_eof2283: cs = 2283; goto _test_eof + _test_eof2284: cs = 2284; goto _test_eof + _test_eof2285: cs = 2285; goto _test_eof + _test_eof2286: cs = 2286; goto _test_eof + _test_eof2287: cs = 2287; goto _test_eof + _test_eof2288: cs = 2288; goto _test_eof + _test_eof2289: cs = 2289; goto _test_eof + _test_eof2290: cs = 2290; goto _test_eof + _test_eof2291: cs = 2291; goto _test_eof + _test_eof2292: cs = 2292; goto _test_eof + _test_eof2293: cs = 2293; goto _test_eof + _test_eof2294: cs = 2294; goto _test_eof + _test_eof2295: cs = 2295; goto _test_eof + _test_eof2296: cs = 2296; goto _test_eof + _test_eof2297: cs = 2297; goto _test_eof + _test_eof2298: cs = 2298; goto _test_eof + _test_eof2299: cs = 2299; goto _test_eof + _test_eof2300: cs = 2300; goto _test_eof + _test_eof2301: cs = 2301; goto _test_eof + _test_eof2302: cs = 2302; goto _test_eof + _test_eof2303: cs = 2303; goto _test_eof + _test_eof2304: cs = 2304; goto _test_eof + _test_eof2305: cs = 2305; goto _test_eof + _test_eof2306: cs = 2306; goto _test_eof + _test_eof2307: cs = 2307; goto _test_eof + _test_eof2308: cs = 2308; goto _test_eof + _test_eof2309: cs = 2309; goto _test_eof + _test_eof2310: cs = 2310; goto _test_eof + _test_eof2311: cs = 2311; goto _test_eof + _test_eof2312: cs = 2312; goto _test_eof + _test_eof2313: cs = 2313; goto _test_eof + _test_eof2314: cs = 2314; goto _test_eof + _test_eof2315: cs = 2315; goto _test_eof + _test_eof2316: cs = 2316; goto _test_eof + _test_eof2317: cs = 2317; goto _test_eof + _test_eof2318: cs = 2318; goto _test_eof + _test_eof2319: cs = 2319; goto _test_eof + _test_eof2320: cs = 2320; goto _test_eof + _test_eof2321: cs = 2321; goto _test_eof + _test_eof2322: cs = 2322; goto _test_eof + _test_eof2323: cs = 2323; goto _test_eof + _test_eof2324: cs = 2324; goto _test_eof + _test_eof2325: cs = 2325; goto _test_eof + _test_eof2326: cs = 2326; goto _test_eof + _test_eof2327: cs = 2327; goto _test_eof + _test_eof2328: cs = 2328; goto _test_eof + _test_eof2329: cs = 2329; goto _test_eof + _test_eof2330: cs = 2330; goto _test_eof + _test_eof2331: cs = 2331; goto _test_eof + _test_eof2332: cs = 2332; goto _test_eof + _test_eof2333: cs = 2333; goto _test_eof + _test_eof2334: cs = 2334; goto _test_eof + _test_eof2335: cs = 2335; goto _test_eof + _test_eof2336: cs = 2336; goto _test_eof + _test_eof2337: cs = 2337; goto _test_eof + _test_eof2338: cs = 2338; goto _test_eof + _test_eof2339: cs = 2339; goto _test_eof + _test_eof4875: cs = 4875; goto _test_eof + _test_eof4876: cs = 4876; goto _test_eof + _test_eof2340: cs = 2340; goto _test_eof + _test_eof2341: cs = 2341; goto _test_eof + _test_eof2342: cs = 2342; goto _test_eof + _test_eof2343: cs = 2343; goto _test_eof + _test_eof2344: cs = 2344; goto _test_eof + _test_eof2345: cs = 2345; goto _test_eof + _test_eof2346: cs = 2346; goto _test_eof + _test_eof2347: cs = 2347; goto _test_eof + _test_eof2348: cs = 2348; goto _test_eof + _test_eof2349: cs = 2349; goto _test_eof + _test_eof2350: cs = 2350; goto _test_eof + _test_eof2351: cs = 2351; goto _test_eof + _test_eof2352: cs = 2352; goto _test_eof + _test_eof2353: cs = 2353; goto _test_eof + _test_eof2354: cs = 2354; goto _test_eof + _test_eof2355: cs = 2355; goto _test_eof + _test_eof2356: cs = 2356; goto _test_eof + _test_eof2357: cs = 2357; goto _test_eof + _test_eof2358: cs = 2358; goto _test_eof + _test_eof2359: cs = 2359; goto _test_eof + _test_eof2360: cs = 2360; goto _test_eof + _test_eof2361: cs = 2361; goto _test_eof + _test_eof2362: cs = 2362; goto _test_eof + _test_eof2363: cs = 2363; goto _test_eof + _test_eof2364: cs = 2364; goto _test_eof + _test_eof2365: cs = 2365; goto _test_eof + _test_eof2366: cs = 2366; goto _test_eof + _test_eof2367: cs = 2367; goto _test_eof + _test_eof2368: cs = 2368; goto _test_eof + _test_eof2369: cs = 2369; goto _test_eof + _test_eof2370: cs = 2370; goto _test_eof + _test_eof2371: cs = 2371; goto _test_eof + _test_eof2372: cs = 2372; goto _test_eof + _test_eof2373: cs = 2373; goto _test_eof + _test_eof2374: cs = 2374; goto _test_eof + _test_eof2375: cs = 2375; goto _test_eof + _test_eof2376: cs = 2376; goto _test_eof + _test_eof2377: cs = 2377; goto _test_eof + _test_eof2378: cs = 2378; goto _test_eof + _test_eof2379: cs = 2379; goto _test_eof + _test_eof2380: cs = 2380; goto _test_eof + _test_eof2381: cs = 2381; goto _test_eof + _test_eof2382: cs = 2382; goto _test_eof + _test_eof2383: cs = 2383; goto _test_eof + _test_eof2384: cs = 2384; goto _test_eof + _test_eof2385: cs = 2385; goto _test_eof + _test_eof2386: cs = 2386; goto _test_eof + _test_eof2387: cs = 2387; goto _test_eof + _test_eof2388: cs = 2388; goto _test_eof + _test_eof2389: cs = 2389; goto _test_eof + _test_eof2390: cs = 2390; goto _test_eof + _test_eof2391: cs = 2391; goto _test_eof + _test_eof2392: cs = 2392; goto _test_eof + _test_eof2393: cs = 2393; goto _test_eof + _test_eof2394: cs = 2394; goto _test_eof + _test_eof2395: cs = 2395; goto _test_eof + _test_eof2396: cs = 2396; goto _test_eof + _test_eof2397: cs = 2397; goto _test_eof + _test_eof2398: cs = 2398; goto _test_eof + _test_eof2399: cs = 2399; goto _test_eof + _test_eof2400: cs = 2400; goto _test_eof + _test_eof2401: cs = 2401; goto _test_eof + _test_eof2402: cs = 2402; goto _test_eof + _test_eof2403: cs = 2403; goto _test_eof + _test_eof2404: cs = 2404; goto _test_eof + _test_eof2405: cs = 2405; goto _test_eof + _test_eof2406: cs = 2406; goto _test_eof + _test_eof2407: cs = 2407; goto _test_eof + _test_eof2408: cs = 2408; goto _test_eof + _test_eof2409: cs = 2409; goto _test_eof + _test_eof2410: cs = 2410; goto _test_eof + _test_eof2411: cs = 2411; goto _test_eof + _test_eof2412: cs = 2412; goto _test_eof + _test_eof2413: cs = 2413; goto _test_eof + _test_eof2414: cs = 2414; goto _test_eof + _test_eof2415: cs = 2415; goto _test_eof + _test_eof2416: cs = 2416; goto _test_eof + _test_eof2417: cs = 2417; goto _test_eof + _test_eof2418: cs = 2418; goto _test_eof + _test_eof2419: cs = 2419; goto _test_eof + _test_eof2420: cs = 2420; goto _test_eof + _test_eof2421: cs = 2421; goto _test_eof + _test_eof2422: cs = 2422; goto _test_eof + _test_eof2423: cs = 2423; goto _test_eof + _test_eof2424: cs = 2424; goto _test_eof + _test_eof2425: cs = 2425; goto _test_eof + _test_eof2426: cs = 2426; goto _test_eof + _test_eof2427: cs = 2427; goto _test_eof + _test_eof2428: cs = 2428; goto _test_eof + _test_eof2429: cs = 2429; goto _test_eof + _test_eof2430: cs = 2430; goto _test_eof + _test_eof2431: cs = 2431; goto _test_eof + _test_eof2432: cs = 2432; goto _test_eof + _test_eof2433: cs = 2433; goto _test_eof + _test_eof2434: cs = 2434; goto _test_eof + _test_eof2435: cs = 2435; goto _test_eof + _test_eof2436: cs = 2436; goto _test_eof + _test_eof2437: cs = 2437; goto _test_eof + _test_eof2438: cs = 2438; goto _test_eof + _test_eof2439: cs = 2439; goto _test_eof + _test_eof2440: cs = 2440; goto _test_eof + _test_eof2441: cs = 2441; goto _test_eof + _test_eof2442: cs = 2442; goto _test_eof + _test_eof2443: cs = 2443; goto _test_eof + _test_eof2444: cs = 2444; goto _test_eof + _test_eof2445: cs = 2445; goto _test_eof + _test_eof2446: cs = 2446; goto _test_eof + _test_eof2447: cs = 2447; goto _test_eof + _test_eof2448: cs = 2448; goto _test_eof + _test_eof2449: cs = 2449; goto _test_eof + _test_eof2450: cs = 2450; goto _test_eof + _test_eof2451: cs = 2451; goto _test_eof + _test_eof2452: cs = 2452; goto _test_eof + _test_eof2453: cs = 2453; goto _test_eof + _test_eof2454: cs = 2454; goto _test_eof + _test_eof2455: cs = 2455; goto _test_eof + _test_eof2456: cs = 2456; goto _test_eof + _test_eof2457: cs = 2457; goto _test_eof + _test_eof2458: cs = 2458; goto _test_eof + _test_eof2459: cs = 2459; goto _test_eof + _test_eof2460: cs = 2460; goto _test_eof + _test_eof2461: cs = 2461; goto _test_eof + _test_eof2462: cs = 2462; goto _test_eof + _test_eof2463: cs = 2463; goto _test_eof + _test_eof2464: cs = 2464; goto _test_eof + _test_eof2465: cs = 2465; goto _test_eof + _test_eof2466: cs = 2466; goto _test_eof + _test_eof2467: cs = 2467; goto _test_eof + _test_eof2468: cs = 2468; goto _test_eof + _test_eof2469: cs = 2469; goto _test_eof + _test_eof2470: cs = 2470; goto _test_eof + _test_eof2471: cs = 2471; goto _test_eof + _test_eof2472: cs = 2472; goto _test_eof + _test_eof2473: cs = 2473; goto _test_eof + _test_eof2474: cs = 2474; goto _test_eof + _test_eof2475: cs = 2475; goto _test_eof + _test_eof2476: cs = 2476; goto _test_eof + _test_eof2477: cs = 2477; goto _test_eof + _test_eof2478: cs = 2478; goto _test_eof + _test_eof2479: cs = 2479; goto _test_eof + _test_eof2480: cs = 2480; goto _test_eof + _test_eof2481: cs = 2481; goto _test_eof + _test_eof2482: cs = 2482; goto _test_eof + _test_eof2483: cs = 2483; goto _test_eof + _test_eof2484: cs = 2484; goto _test_eof + _test_eof2485: cs = 2485; goto _test_eof + _test_eof2486: cs = 2486; goto _test_eof + _test_eof2487: cs = 2487; goto _test_eof + _test_eof2488: cs = 2488; goto _test_eof + _test_eof2489: cs = 2489; goto _test_eof + _test_eof2490: cs = 2490; goto _test_eof + _test_eof2491: cs = 2491; goto _test_eof + _test_eof2492: cs = 2492; goto _test_eof + _test_eof2493: cs = 2493; goto _test_eof + _test_eof2494: cs = 2494; goto _test_eof + _test_eof2495: cs = 2495; goto _test_eof + _test_eof2496: cs = 2496; goto _test_eof + _test_eof2497: cs = 2497; goto _test_eof + _test_eof2498: cs = 2498; goto _test_eof + _test_eof2499: cs = 2499; goto _test_eof + _test_eof2500: cs = 2500; goto _test_eof + _test_eof2501: cs = 2501; goto _test_eof + _test_eof2502: cs = 2502; goto _test_eof + _test_eof2503: cs = 2503; goto _test_eof + _test_eof2504: cs = 2504; goto _test_eof + _test_eof2505: cs = 2505; goto _test_eof + _test_eof2506: cs = 2506; goto _test_eof + _test_eof2507: cs = 2507; goto _test_eof + _test_eof2508: cs = 2508; goto _test_eof + _test_eof2509: cs = 2509; goto _test_eof + _test_eof2510: cs = 2510; goto _test_eof + _test_eof2511: cs = 2511; goto _test_eof + _test_eof2512: cs = 2512; goto _test_eof + _test_eof2513: cs = 2513; goto _test_eof + _test_eof2514: cs = 2514; goto _test_eof + _test_eof2515: cs = 2515; goto _test_eof + _test_eof2516: cs = 2516; goto _test_eof + _test_eof2517: cs = 2517; goto _test_eof + _test_eof2518: cs = 2518; goto _test_eof + _test_eof2519: cs = 2519; goto _test_eof + _test_eof2520: cs = 2520; goto _test_eof + _test_eof2521: cs = 2521; goto _test_eof + _test_eof2522: cs = 2522; goto _test_eof + _test_eof2523: cs = 2523; goto _test_eof + _test_eof2524: cs = 2524; goto _test_eof + _test_eof2525: cs = 2525; goto _test_eof + _test_eof2526: cs = 2526; goto _test_eof + _test_eof2527: cs = 2527; goto _test_eof + _test_eof2528: cs = 2528; goto _test_eof + _test_eof2529: cs = 2529; goto _test_eof + _test_eof2530: cs = 2530; goto _test_eof + _test_eof2531: cs = 2531; goto _test_eof + _test_eof2532: cs = 2532; goto _test_eof + _test_eof2533: cs = 2533; goto _test_eof + _test_eof2534: cs = 2534; goto _test_eof + _test_eof2535: cs = 2535; goto _test_eof + _test_eof2536: cs = 2536; goto _test_eof + _test_eof2537: cs = 2537; goto _test_eof + _test_eof2538: cs = 2538; goto _test_eof + _test_eof2539: cs = 2539; goto _test_eof + _test_eof2540: cs = 2540; goto _test_eof + _test_eof2541: cs = 2541; goto _test_eof + _test_eof2542: cs = 2542; goto _test_eof + _test_eof2543: cs = 2543; goto _test_eof + _test_eof2544: cs = 2544; goto _test_eof + _test_eof2545: cs = 2545; goto _test_eof + _test_eof2546: cs = 2546; goto _test_eof + _test_eof2547: cs = 2547; goto _test_eof + _test_eof2548: cs = 2548; goto _test_eof + _test_eof2549: cs = 2549; goto _test_eof + _test_eof2550: cs = 2550; goto _test_eof + _test_eof2551: cs = 2551; goto _test_eof + _test_eof2552: cs = 2552; goto _test_eof + _test_eof2553: cs = 2553; goto _test_eof + _test_eof2554: cs = 2554; goto _test_eof + _test_eof2555: cs = 2555; goto _test_eof + _test_eof2556: cs = 2556; goto _test_eof + _test_eof2557: cs = 2557; goto _test_eof + _test_eof2558: cs = 2558; goto _test_eof + _test_eof2559: cs = 2559; goto _test_eof + _test_eof2560: cs = 2560; goto _test_eof + _test_eof2561: cs = 2561; goto _test_eof + _test_eof2562: cs = 2562; goto _test_eof + _test_eof2563: cs = 2563; goto _test_eof + _test_eof2564: cs = 2564; goto _test_eof + _test_eof2565: cs = 2565; goto _test_eof + _test_eof2566: cs = 2566; goto _test_eof + _test_eof2567: cs = 2567; goto _test_eof + _test_eof2568: cs = 2568; goto _test_eof + _test_eof2569: cs = 2569; goto _test_eof + _test_eof2570: cs = 2570; goto _test_eof + _test_eof2571: cs = 2571; goto _test_eof + _test_eof2572: cs = 2572; goto _test_eof + _test_eof2573: cs = 2573; goto _test_eof + _test_eof2574: cs = 2574; goto _test_eof + _test_eof2575: cs = 2575; goto _test_eof + _test_eof2576: cs = 2576; goto _test_eof + _test_eof2577: cs = 2577; goto _test_eof + _test_eof2578: cs = 2578; goto _test_eof + _test_eof2579: cs = 2579; goto _test_eof + _test_eof2580: cs = 2580; goto _test_eof + _test_eof2581: cs = 2581; goto _test_eof + _test_eof2582: cs = 2582; goto _test_eof + _test_eof2583: cs = 2583; goto _test_eof + _test_eof2584: cs = 2584; goto _test_eof + _test_eof2585: cs = 2585; goto _test_eof + _test_eof2586: cs = 2586; goto _test_eof + _test_eof2587: cs = 2587; goto _test_eof + _test_eof2588: cs = 2588; goto _test_eof + _test_eof2589: cs = 2589; goto _test_eof + _test_eof2590: cs = 2590; goto _test_eof + _test_eof2591: cs = 2591; goto _test_eof + _test_eof2592: cs = 2592; goto _test_eof + _test_eof2593: cs = 2593; goto _test_eof + _test_eof2594: cs = 2594; goto _test_eof + _test_eof2595: cs = 2595; goto _test_eof + _test_eof2596: cs = 2596; goto _test_eof + _test_eof2597: cs = 2597; goto _test_eof + _test_eof2598: cs = 2598; goto _test_eof + _test_eof2599: cs = 2599; goto _test_eof + _test_eof2600: cs = 2600; goto _test_eof + _test_eof2601: cs = 2601; goto _test_eof + _test_eof2602: cs = 2602; goto _test_eof + _test_eof2603: cs = 2603; goto _test_eof + _test_eof2604: cs = 2604; goto _test_eof + _test_eof2605: cs = 2605; goto _test_eof + _test_eof2606: cs = 2606; goto _test_eof + _test_eof2607: cs = 2607; goto _test_eof + _test_eof2608: cs = 2608; goto _test_eof + _test_eof2609: cs = 2609; goto _test_eof + _test_eof2610: cs = 2610; goto _test_eof + _test_eof2611: cs = 2611; goto _test_eof + _test_eof2612: cs = 2612; goto _test_eof + _test_eof2613: cs = 2613; goto _test_eof + _test_eof2614: cs = 2614; goto _test_eof + _test_eof2615: cs = 2615; goto _test_eof + _test_eof2616: cs = 2616; goto _test_eof + _test_eof2617: cs = 2617; goto _test_eof + _test_eof2618: cs = 2618; goto _test_eof + _test_eof2619: cs = 2619; goto _test_eof + _test_eof2620: cs = 2620; goto _test_eof + _test_eof2621: cs = 2621; goto _test_eof + _test_eof2622: cs = 2622; goto _test_eof + _test_eof2623: cs = 2623; goto _test_eof + _test_eof2624: cs = 2624; goto _test_eof + _test_eof2625: cs = 2625; goto _test_eof + _test_eof2626: cs = 2626; goto _test_eof + _test_eof2627: cs = 2627; goto _test_eof + _test_eof2628: cs = 2628; goto _test_eof + _test_eof2629: cs = 2629; goto _test_eof + _test_eof2630: cs = 2630; goto _test_eof + _test_eof2631: cs = 2631; goto _test_eof + _test_eof2632: cs = 2632; goto _test_eof + _test_eof2633: cs = 2633; goto _test_eof + _test_eof2634: cs = 2634; goto _test_eof + _test_eof2635: cs = 2635; goto _test_eof + _test_eof4877: cs = 4877; goto _test_eof + _test_eof4878: cs = 4878; goto _test_eof + _test_eof2636: cs = 2636; goto _test_eof + _test_eof2637: cs = 2637; goto _test_eof + _test_eof2638: cs = 2638; goto _test_eof + _test_eof2639: cs = 2639; goto _test_eof + _test_eof2640: cs = 2640; goto _test_eof + _test_eof2641: cs = 2641; goto _test_eof + _test_eof2642: cs = 2642; goto _test_eof + _test_eof2643: cs = 2643; goto _test_eof + _test_eof2644: cs = 2644; goto _test_eof + _test_eof2645: cs = 2645; goto _test_eof + _test_eof2646: cs = 2646; goto _test_eof + _test_eof2647: cs = 2647; goto _test_eof + _test_eof2648: cs = 2648; goto _test_eof + _test_eof2649: cs = 2649; goto _test_eof + _test_eof2650: cs = 2650; goto _test_eof + _test_eof2651: cs = 2651; goto _test_eof + _test_eof2652: cs = 2652; goto _test_eof + _test_eof2653: cs = 2653; goto _test_eof + _test_eof2654: cs = 2654; goto _test_eof + _test_eof2655: cs = 2655; goto _test_eof + _test_eof2656: cs = 2656; goto _test_eof + _test_eof2657: cs = 2657; goto _test_eof + _test_eof2658: cs = 2658; goto _test_eof + _test_eof2659: cs = 2659; goto _test_eof + _test_eof2660: cs = 2660; goto _test_eof + _test_eof2661: cs = 2661; goto _test_eof + _test_eof2662: cs = 2662; goto _test_eof + _test_eof2663: cs = 2663; goto _test_eof + _test_eof2664: cs = 2664; goto _test_eof + _test_eof2665: cs = 2665; goto _test_eof + _test_eof2666: cs = 2666; goto _test_eof + _test_eof2667: cs = 2667; goto _test_eof + _test_eof2668: cs = 2668; goto _test_eof + _test_eof2669: cs = 2669; goto _test_eof + _test_eof2670: cs = 2670; goto _test_eof + _test_eof2671: cs = 2671; goto _test_eof + _test_eof2672: cs = 2672; goto _test_eof + _test_eof2673: cs = 2673; goto _test_eof + _test_eof2674: cs = 2674; goto _test_eof + _test_eof2675: cs = 2675; goto _test_eof + _test_eof2676: cs = 2676; goto _test_eof + _test_eof2677: cs = 2677; goto _test_eof + _test_eof2678: cs = 2678; goto _test_eof + _test_eof2679: cs = 2679; goto _test_eof + _test_eof2680: cs = 2680; goto _test_eof + _test_eof2681: cs = 2681; goto _test_eof + _test_eof2682: cs = 2682; goto _test_eof + _test_eof2683: cs = 2683; goto _test_eof + _test_eof2684: cs = 2684; goto _test_eof + _test_eof2685: cs = 2685; goto _test_eof + _test_eof2686: cs = 2686; goto _test_eof + _test_eof2687: cs = 2687; goto _test_eof + _test_eof2688: cs = 2688; goto _test_eof + _test_eof2689: cs = 2689; goto _test_eof + _test_eof2690: cs = 2690; goto _test_eof + _test_eof2691: cs = 2691; goto _test_eof + _test_eof2692: cs = 2692; goto _test_eof + _test_eof2693: cs = 2693; goto _test_eof + _test_eof2694: cs = 2694; goto _test_eof + _test_eof2695: cs = 2695; goto _test_eof + _test_eof2696: cs = 2696; goto _test_eof + _test_eof2697: cs = 2697; goto _test_eof + _test_eof2698: cs = 2698; goto _test_eof + _test_eof2699: cs = 2699; goto _test_eof + _test_eof2700: cs = 2700; goto _test_eof + _test_eof2701: cs = 2701; goto _test_eof + _test_eof2702: cs = 2702; goto _test_eof + _test_eof2703: cs = 2703; goto _test_eof + _test_eof2704: cs = 2704; goto _test_eof + _test_eof2705: cs = 2705; goto _test_eof + _test_eof2706: cs = 2706; goto _test_eof + _test_eof2707: cs = 2707; goto _test_eof + _test_eof2708: cs = 2708; goto _test_eof + _test_eof2709: cs = 2709; goto _test_eof + _test_eof2710: cs = 2710; goto _test_eof + _test_eof2711: cs = 2711; goto _test_eof + _test_eof2712: cs = 2712; goto _test_eof + _test_eof2713: cs = 2713; goto _test_eof + _test_eof2714: cs = 2714; goto _test_eof + _test_eof2715: cs = 2715; goto _test_eof + _test_eof2716: cs = 2716; goto _test_eof + _test_eof2717: cs = 2717; goto _test_eof + _test_eof2718: cs = 2718; goto _test_eof + _test_eof2719: cs = 2719; goto _test_eof + _test_eof2720: cs = 2720; goto _test_eof + _test_eof2721: cs = 2721; goto _test_eof + _test_eof2722: cs = 2722; goto _test_eof + _test_eof2723: cs = 2723; goto _test_eof + _test_eof2724: cs = 2724; goto _test_eof + _test_eof2725: cs = 2725; goto _test_eof + _test_eof2726: cs = 2726; goto _test_eof + _test_eof2727: cs = 2727; goto _test_eof + _test_eof2728: cs = 2728; goto _test_eof + _test_eof2729: cs = 2729; goto _test_eof + _test_eof2730: cs = 2730; goto _test_eof + _test_eof2731: cs = 2731; goto _test_eof + _test_eof2732: cs = 2732; goto _test_eof + _test_eof2733: cs = 2733; goto _test_eof + _test_eof2734: cs = 2734; goto _test_eof + _test_eof2735: cs = 2735; goto _test_eof + _test_eof2736: cs = 2736; goto _test_eof + _test_eof2737: cs = 2737; goto _test_eof + _test_eof2738: cs = 2738; goto _test_eof + _test_eof2739: cs = 2739; goto _test_eof + _test_eof2740: cs = 2740; goto _test_eof + _test_eof2741: cs = 2741; goto _test_eof + _test_eof2742: cs = 2742; goto _test_eof + _test_eof2743: cs = 2743; goto _test_eof + _test_eof2744: cs = 2744; goto _test_eof + _test_eof2745: cs = 2745; goto _test_eof + _test_eof2746: cs = 2746; goto _test_eof + _test_eof2747: cs = 2747; goto _test_eof + _test_eof2748: cs = 2748; goto _test_eof + _test_eof2749: cs = 2749; goto _test_eof + _test_eof2750: cs = 2750; goto _test_eof + _test_eof2751: cs = 2751; goto _test_eof + _test_eof2752: cs = 2752; goto _test_eof + _test_eof2753: cs = 2753; goto _test_eof + _test_eof2754: cs = 2754; goto _test_eof + _test_eof2755: cs = 2755; goto _test_eof + _test_eof2756: cs = 2756; goto _test_eof + _test_eof2757: cs = 2757; goto _test_eof + _test_eof2758: cs = 2758; goto _test_eof + _test_eof2759: cs = 2759; goto _test_eof + _test_eof2760: cs = 2760; goto _test_eof + _test_eof2761: cs = 2761; goto _test_eof + _test_eof2762: cs = 2762; goto _test_eof + _test_eof2763: cs = 2763; goto _test_eof + _test_eof2764: cs = 2764; goto _test_eof + _test_eof2765: cs = 2765; goto _test_eof + _test_eof2766: cs = 2766; goto _test_eof + _test_eof2767: cs = 2767; goto _test_eof + _test_eof2768: cs = 2768; goto _test_eof + _test_eof2769: cs = 2769; goto _test_eof + _test_eof2770: cs = 2770; goto _test_eof + _test_eof2771: cs = 2771; goto _test_eof + _test_eof2772: cs = 2772; goto _test_eof + _test_eof2773: cs = 2773; goto _test_eof + _test_eof2774: cs = 2774; goto _test_eof + _test_eof2775: cs = 2775; goto _test_eof + _test_eof2776: cs = 2776; goto _test_eof + _test_eof4879: cs = 4879; goto _test_eof + _test_eof4880: cs = 4880; goto _test_eof + _test_eof4881: cs = 4881; goto _test_eof + _test_eof4882: cs = 4882; goto _test_eof + _test_eof4883: cs = 4883; goto _test_eof + _test_eof4884: cs = 4884; goto _test_eof + _test_eof4885: cs = 4885; goto _test_eof + _test_eof2777: cs = 2777; goto _test_eof + _test_eof2778: cs = 2778; goto _test_eof + _test_eof2779: cs = 2779; goto _test_eof + _test_eof2780: cs = 2780; goto _test_eof + _test_eof2781: cs = 2781; goto _test_eof + _test_eof2782: cs = 2782; goto _test_eof + _test_eof2783: cs = 2783; goto _test_eof + _test_eof2784: cs = 2784; goto _test_eof + _test_eof2785: cs = 2785; goto _test_eof + _test_eof2786: cs = 2786; goto _test_eof + _test_eof2787: cs = 2787; goto _test_eof + _test_eof2788: cs = 2788; goto _test_eof + _test_eof2789: cs = 2789; goto _test_eof + _test_eof2790: cs = 2790; goto _test_eof + _test_eof2791: cs = 2791; goto _test_eof + _test_eof2792: cs = 2792; goto _test_eof + _test_eof2793: cs = 2793; goto _test_eof + _test_eof2794: cs = 2794; goto _test_eof + _test_eof2795: cs = 2795; goto _test_eof + _test_eof2796: cs = 2796; goto _test_eof + _test_eof2797: cs = 2797; goto _test_eof + _test_eof2798: cs = 2798; goto _test_eof + _test_eof2799: cs = 2799; goto _test_eof + _test_eof2800: cs = 2800; goto _test_eof + _test_eof2801: cs = 2801; goto _test_eof + _test_eof2802: cs = 2802; goto _test_eof + _test_eof2803: cs = 2803; goto _test_eof + _test_eof2804: cs = 2804; goto _test_eof + _test_eof2805: cs = 2805; goto _test_eof + _test_eof2806: cs = 2806; goto _test_eof + _test_eof2807: cs = 2807; goto _test_eof + _test_eof2808: cs = 2808; goto _test_eof + _test_eof2809: cs = 2809; goto _test_eof + _test_eof2810: cs = 2810; goto _test_eof + _test_eof2811: cs = 2811; goto _test_eof + _test_eof2812: cs = 2812; goto _test_eof + _test_eof2813: cs = 2813; goto _test_eof + _test_eof2814: cs = 2814; goto _test_eof + _test_eof2815: cs = 2815; goto _test_eof + _test_eof2816: cs = 2816; goto _test_eof + _test_eof2817: cs = 2817; goto _test_eof + _test_eof2818: cs = 2818; goto _test_eof + _test_eof2819: cs = 2819; goto _test_eof + _test_eof2820: cs = 2820; goto _test_eof + _test_eof2821: cs = 2821; goto _test_eof + _test_eof2822: cs = 2822; goto _test_eof + _test_eof2823: cs = 2823; goto _test_eof + _test_eof2824: cs = 2824; goto _test_eof + _test_eof2825: cs = 2825; goto _test_eof + _test_eof2826: cs = 2826; goto _test_eof + _test_eof2827: cs = 2827; goto _test_eof + _test_eof2828: cs = 2828; goto _test_eof + _test_eof2829: cs = 2829; goto _test_eof + _test_eof2830: cs = 2830; goto _test_eof + _test_eof2831: cs = 2831; goto _test_eof + _test_eof2832: cs = 2832; goto _test_eof + _test_eof2833: cs = 2833; goto _test_eof + _test_eof2834: cs = 2834; goto _test_eof + _test_eof2835: cs = 2835; goto _test_eof + _test_eof2836: cs = 2836; goto _test_eof + _test_eof2837: cs = 2837; goto _test_eof + _test_eof2838: cs = 2838; goto _test_eof + _test_eof2839: cs = 2839; goto _test_eof + _test_eof2840: cs = 2840; goto _test_eof + _test_eof2841: cs = 2841; goto _test_eof + _test_eof2842: cs = 2842; goto _test_eof + _test_eof2843: cs = 2843; goto _test_eof + _test_eof2844: cs = 2844; goto _test_eof + _test_eof2845: cs = 2845; goto _test_eof + _test_eof2846: cs = 2846; goto _test_eof + _test_eof2847: cs = 2847; goto _test_eof + _test_eof2848: cs = 2848; goto _test_eof + _test_eof2849: cs = 2849; goto _test_eof + _test_eof2850: cs = 2850; goto _test_eof + _test_eof2851: cs = 2851; goto _test_eof + _test_eof2852: cs = 2852; goto _test_eof + _test_eof2853: cs = 2853; goto _test_eof + _test_eof2854: cs = 2854; goto _test_eof + _test_eof2855: cs = 2855; goto _test_eof + _test_eof2856: cs = 2856; goto _test_eof + _test_eof2857: cs = 2857; goto _test_eof + _test_eof2858: cs = 2858; goto _test_eof + _test_eof2859: cs = 2859; goto _test_eof + _test_eof2860: cs = 2860; goto _test_eof + _test_eof2861: cs = 2861; goto _test_eof + _test_eof2862: cs = 2862; goto _test_eof + _test_eof2863: cs = 2863; goto _test_eof + _test_eof2864: cs = 2864; goto _test_eof + _test_eof2865: cs = 2865; goto _test_eof + _test_eof2866: cs = 2866; goto _test_eof + _test_eof2867: cs = 2867; goto _test_eof + _test_eof2868: cs = 2868; goto _test_eof + _test_eof2869: cs = 2869; goto _test_eof + _test_eof2870: cs = 2870; goto _test_eof + _test_eof2871: cs = 2871; goto _test_eof + _test_eof2872: cs = 2872; goto _test_eof + _test_eof2873: cs = 2873; goto _test_eof + _test_eof2874: cs = 2874; goto _test_eof + _test_eof2875: cs = 2875; goto _test_eof + _test_eof2876: cs = 2876; goto _test_eof + _test_eof2877: cs = 2877; goto _test_eof + _test_eof2878: cs = 2878; goto _test_eof + _test_eof2879: cs = 2879; goto _test_eof + _test_eof2880: cs = 2880; goto _test_eof + _test_eof2881: cs = 2881; goto _test_eof + _test_eof2882: cs = 2882; goto _test_eof + _test_eof2883: cs = 2883; goto _test_eof + _test_eof2884: cs = 2884; goto _test_eof + _test_eof2885: cs = 2885; goto _test_eof + _test_eof2886: cs = 2886; goto _test_eof + _test_eof2887: cs = 2887; goto _test_eof + _test_eof2888: cs = 2888; goto _test_eof + _test_eof2889: cs = 2889; goto _test_eof + _test_eof2890: cs = 2890; goto _test_eof + _test_eof2891: cs = 2891; goto _test_eof + _test_eof2892: cs = 2892; goto _test_eof + _test_eof2893: cs = 2893; goto _test_eof + _test_eof2894: cs = 2894; goto _test_eof + _test_eof2895: cs = 2895; goto _test_eof + _test_eof2896: cs = 2896; goto _test_eof + _test_eof2897: cs = 2897; goto _test_eof + _test_eof2898: cs = 2898; goto _test_eof + _test_eof2899: cs = 2899; goto _test_eof + _test_eof2900: cs = 2900; goto _test_eof + _test_eof2901: cs = 2901; goto _test_eof + _test_eof2902: cs = 2902; goto _test_eof + _test_eof2903: cs = 2903; goto _test_eof + _test_eof2904: cs = 2904; goto _test_eof + _test_eof2905: cs = 2905; goto _test_eof + _test_eof2906: cs = 2906; goto _test_eof + _test_eof2907: cs = 2907; goto _test_eof + _test_eof2908: cs = 2908; goto _test_eof + _test_eof2909: cs = 2909; goto _test_eof + _test_eof2910: cs = 2910; goto _test_eof + _test_eof2911: cs = 2911; goto _test_eof + _test_eof2912: cs = 2912; goto _test_eof + _test_eof2913: cs = 2913; goto _test_eof + _test_eof2914: cs = 2914; goto _test_eof + _test_eof2915: cs = 2915; goto _test_eof + _test_eof2916: cs = 2916; goto _test_eof + _test_eof2917: cs = 2917; goto _test_eof + _test_eof2918: cs = 2918; goto _test_eof + _test_eof2919: cs = 2919; goto _test_eof + _test_eof2920: cs = 2920; goto _test_eof + _test_eof2921: cs = 2921; goto _test_eof + _test_eof2922: cs = 2922; goto _test_eof + _test_eof2923: cs = 2923; goto _test_eof + _test_eof4886: cs = 4886; goto _test_eof + _test_eof2924: cs = 2924; goto _test_eof + _test_eof2925: cs = 2925; goto _test_eof + _test_eof2926: cs = 2926; goto _test_eof + _test_eof2927: cs = 2927; goto _test_eof + _test_eof2928: cs = 2928; goto _test_eof + _test_eof2929: cs = 2929; goto _test_eof + _test_eof2930: cs = 2930; goto _test_eof + _test_eof2931: cs = 2931; goto _test_eof + _test_eof2932: cs = 2932; goto _test_eof + _test_eof2933: cs = 2933; goto _test_eof + _test_eof2934: cs = 2934; goto _test_eof + _test_eof2935: cs = 2935; goto _test_eof + _test_eof2936: cs = 2936; goto _test_eof + _test_eof2937: cs = 2937; goto _test_eof + _test_eof2938: cs = 2938; goto _test_eof + _test_eof2939: cs = 2939; goto _test_eof + _test_eof2940: cs = 2940; goto _test_eof + _test_eof2941: cs = 2941; goto _test_eof + _test_eof2942: cs = 2942; goto _test_eof + _test_eof2943: cs = 2943; goto _test_eof + _test_eof2944: cs = 2944; goto _test_eof + _test_eof2945: cs = 2945; goto _test_eof + _test_eof2946: cs = 2946; goto _test_eof + _test_eof2947: cs = 2947; goto _test_eof + _test_eof2948: cs = 2948; goto _test_eof + _test_eof2949: cs = 2949; goto _test_eof + _test_eof2950: cs = 2950; goto _test_eof + _test_eof2951: cs = 2951; goto _test_eof + _test_eof2952: cs = 2952; goto _test_eof + _test_eof2953: cs = 2953; goto _test_eof + _test_eof2954: cs = 2954; goto _test_eof + _test_eof2955: cs = 2955; goto _test_eof + _test_eof2956: cs = 2956; goto _test_eof + _test_eof2957: cs = 2957; goto _test_eof + _test_eof2958: cs = 2958; goto _test_eof + _test_eof2959: cs = 2959; goto _test_eof + _test_eof2960: cs = 2960; goto _test_eof + _test_eof2961: cs = 2961; goto _test_eof + _test_eof2962: cs = 2962; goto _test_eof + _test_eof2963: cs = 2963; goto _test_eof + _test_eof2964: cs = 2964; goto _test_eof + _test_eof2965: cs = 2965; goto _test_eof + _test_eof2966: cs = 2966; goto _test_eof + _test_eof2967: cs = 2967; goto _test_eof + _test_eof2968: cs = 2968; goto _test_eof + _test_eof2969: cs = 2969; goto _test_eof + _test_eof2970: cs = 2970; goto _test_eof + _test_eof2971: cs = 2971; goto _test_eof + _test_eof2972: cs = 2972; goto _test_eof + _test_eof2973: cs = 2973; goto _test_eof + _test_eof2974: cs = 2974; goto _test_eof + _test_eof2975: cs = 2975; goto _test_eof + _test_eof2976: cs = 2976; goto _test_eof + _test_eof2977: cs = 2977; goto _test_eof + _test_eof2978: cs = 2978; goto _test_eof + _test_eof2979: cs = 2979; goto _test_eof + _test_eof2980: cs = 2980; goto _test_eof + _test_eof2981: cs = 2981; goto _test_eof + _test_eof2982: cs = 2982; goto _test_eof + _test_eof2983: cs = 2983; goto _test_eof + _test_eof2984: cs = 2984; goto _test_eof + _test_eof2985: cs = 2985; goto _test_eof + _test_eof2986: cs = 2986; goto _test_eof + _test_eof2987: cs = 2987; goto _test_eof + _test_eof2988: cs = 2988; goto _test_eof + _test_eof2989: cs = 2989; goto _test_eof + _test_eof2990: cs = 2990; goto _test_eof + _test_eof2991: cs = 2991; goto _test_eof + _test_eof2992: cs = 2992; goto _test_eof + _test_eof2993: cs = 2993; goto _test_eof + _test_eof2994: cs = 2994; goto _test_eof + _test_eof2995: cs = 2995; goto _test_eof + _test_eof2996: cs = 2996; goto _test_eof + _test_eof2997: cs = 2997; goto _test_eof + _test_eof2998: cs = 2998; goto _test_eof + _test_eof2999: cs = 2999; goto _test_eof + _test_eof3000: cs = 3000; goto _test_eof + _test_eof3001: cs = 3001; goto _test_eof + _test_eof3002: cs = 3002; goto _test_eof + _test_eof3003: cs = 3003; goto _test_eof + _test_eof3004: cs = 3004; goto _test_eof + _test_eof3005: cs = 3005; goto _test_eof + _test_eof3006: cs = 3006; goto _test_eof + _test_eof3007: cs = 3007; goto _test_eof + _test_eof3008: cs = 3008; goto _test_eof + _test_eof3009: cs = 3009; goto _test_eof + _test_eof3010: cs = 3010; goto _test_eof + _test_eof3011: cs = 3011; goto _test_eof + _test_eof3012: cs = 3012; goto _test_eof + _test_eof3013: cs = 3013; goto _test_eof + _test_eof3014: cs = 3014; goto _test_eof + _test_eof3015: cs = 3015; goto _test_eof + _test_eof3016: cs = 3016; goto _test_eof + _test_eof3017: cs = 3017; goto _test_eof + _test_eof3018: cs = 3018; goto _test_eof + _test_eof3019: cs = 3019; goto _test_eof + _test_eof3020: cs = 3020; goto _test_eof + _test_eof3021: cs = 3021; goto _test_eof + _test_eof3022: cs = 3022; goto _test_eof + _test_eof3023: cs = 3023; goto _test_eof + _test_eof3024: cs = 3024; goto _test_eof + _test_eof3025: cs = 3025; goto _test_eof + _test_eof3026: cs = 3026; goto _test_eof + _test_eof3027: cs = 3027; goto _test_eof + _test_eof3028: cs = 3028; goto _test_eof + _test_eof3029: cs = 3029; goto _test_eof + _test_eof3030: cs = 3030; goto _test_eof + _test_eof3031: cs = 3031; goto _test_eof + _test_eof3032: cs = 3032; goto _test_eof + _test_eof3033: cs = 3033; goto _test_eof + _test_eof3034: cs = 3034; goto _test_eof + _test_eof3035: cs = 3035; goto _test_eof + _test_eof3036: cs = 3036; goto _test_eof + _test_eof3037: cs = 3037; goto _test_eof + _test_eof3038: cs = 3038; goto _test_eof + _test_eof3039: cs = 3039; goto _test_eof + _test_eof3040: cs = 3040; goto _test_eof + _test_eof3041: cs = 3041; goto _test_eof + _test_eof3042: cs = 3042; goto _test_eof + _test_eof3043: cs = 3043; goto _test_eof + _test_eof3044: cs = 3044; goto _test_eof + _test_eof3045: cs = 3045; goto _test_eof + _test_eof3046: cs = 3046; goto _test_eof + _test_eof3047: cs = 3047; goto _test_eof + _test_eof3048: cs = 3048; goto _test_eof + _test_eof3049: cs = 3049; goto _test_eof + _test_eof3050: cs = 3050; goto _test_eof + _test_eof3051: cs = 3051; goto _test_eof + _test_eof3052: cs = 3052; goto _test_eof + _test_eof3053: cs = 3053; goto _test_eof + _test_eof3054: cs = 3054; goto _test_eof + _test_eof3055: cs = 3055; goto _test_eof + _test_eof3056: cs = 3056; goto _test_eof + _test_eof3057: cs = 3057; goto _test_eof + _test_eof3058: cs = 3058; goto _test_eof + _test_eof3059: cs = 3059; goto _test_eof + _test_eof3060: cs = 3060; goto _test_eof + _test_eof3061: cs = 3061; goto _test_eof + _test_eof3062: cs = 3062; goto _test_eof + _test_eof3063: cs = 3063; goto _test_eof + _test_eof3064: cs = 3064; goto _test_eof + _test_eof3065: cs = 3065; goto _test_eof + _test_eof3066: cs = 3066; goto _test_eof + _test_eof3067: cs = 3067; goto _test_eof + _test_eof3068: cs = 3068; goto _test_eof + _test_eof3069: cs = 3069; goto _test_eof + _test_eof3070: cs = 3070; goto _test_eof + _test_eof4887: cs = 4887; goto _test_eof + _test_eof3071: cs = 3071; goto _test_eof + _test_eof3072: cs = 3072; goto _test_eof + _test_eof3073: cs = 3073; goto _test_eof + _test_eof3074: cs = 3074; goto _test_eof + _test_eof3075: cs = 3075; goto _test_eof + _test_eof3076: cs = 3076; goto _test_eof + _test_eof3077: cs = 3077; goto _test_eof + _test_eof3078: cs = 3078; goto _test_eof + _test_eof3079: cs = 3079; goto _test_eof + _test_eof3080: cs = 3080; goto _test_eof + _test_eof3081: cs = 3081; goto _test_eof + _test_eof3082: cs = 3082; goto _test_eof + _test_eof3083: cs = 3083; goto _test_eof + _test_eof3084: cs = 3084; goto _test_eof + _test_eof3085: cs = 3085; goto _test_eof + _test_eof3086: cs = 3086; goto _test_eof + _test_eof3087: cs = 3087; goto _test_eof + _test_eof3088: cs = 3088; goto _test_eof + _test_eof3089: cs = 3089; goto _test_eof + _test_eof3090: cs = 3090; goto _test_eof + _test_eof3091: cs = 3091; goto _test_eof + _test_eof3092: cs = 3092; goto _test_eof + _test_eof3093: cs = 3093; goto _test_eof + _test_eof3094: cs = 3094; goto _test_eof + _test_eof3095: cs = 3095; goto _test_eof + _test_eof3096: cs = 3096; goto _test_eof + _test_eof3097: cs = 3097; goto _test_eof + _test_eof3098: cs = 3098; goto _test_eof + _test_eof3099: cs = 3099; goto _test_eof + _test_eof3100: cs = 3100; goto _test_eof + _test_eof3101: cs = 3101; goto _test_eof + _test_eof3102: cs = 3102; goto _test_eof + _test_eof3103: cs = 3103; goto _test_eof + _test_eof3104: cs = 3104; goto _test_eof + _test_eof3105: cs = 3105; goto _test_eof + _test_eof3106: cs = 3106; goto _test_eof + _test_eof3107: cs = 3107; goto _test_eof + _test_eof3108: cs = 3108; goto _test_eof + _test_eof3109: cs = 3109; goto _test_eof + _test_eof3110: cs = 3110; goto _test_eof + _test_eof3111: cs = 3111; goto _test_eof + _test_eof3112: cs = 3112; goto _test_eof + _test_eof3113: cs = 3113; goto _test_eof + _test_eof3114: cs = 3114; goto _test_eof + _test_eof3115: cs = 3115; goto _test_eof + _test_eof3116: cs = 3116; goto _test_eof + _test_eof3117: cs = 3117; goto _test_eof + _test_eof3118: cs = 3118; goto _test_eof + _test_eof3119: cs = 3119; goto _test_eof + _test_eof3120: cs = 3120; goto _test_eof + _test_eof3121: cs = 3121; goto _test_eof + _test_eof3122: cs = 3122; goto _test_eof + _test_eof3123: cs = 3123; goto _test_eof + _test_eof3124: cs = 3124; goto _test_eof + _test_eof3125: cs = 3125; goto _test_eof + _test_eof3126: cs = 3126; goto _test_eof + _test_eof3127: cs = 3127; goto _test_eof + _test_eof3128: cs = 3128; goto _test_eof + _test_eof3129: cs = 3129; goto _test_eof + _test_eof3130: cs = 3130; goto _test_eof + _test_eof3131: cs = 3131; goto _test_eof + _test_eof3132: cs = 3132; goto _test_eof + _test_eof3133: cs = 3133; goto _test_eof + _test_eof3134: cs = 3134; goto _test_eof + _test_eof3135: cs = 3135; goto _test_eof + _test_eof3136: cs = 3136; goto _test_eof + _test_eof3137: cs = 3137; goto _test_eof + _test_eof3138: cs = 3138; goto _test_eof + _test_eof3139: cs = 3139; goto _test_eof + _test_eof3140: cs = 3140; goto _test_eof + _test_eof3141: cs = 3141; goto _test_eof + _test_eof3142: cs = 3142; goto _test_eof + _test_eof3143: cs = 3143; goto _test_eof + _test_eof3144: cs = 3144; goto _test_eof + _test_eof3145: cs = 3145; goto _test_eof + _test_eof3146: cs = 3146; goto _test_eof + _test_eof3147: cs = 3147; goto _test_eof + _test_eof3148: cs = 3148; goto _test_eof + _test_eof3149: cs = 3149; goto _test_eof + _test_eof3150: cs = 3150; goto _test_eof + _test_eof3151: cs = 3151; goto _test_eof + _test_eof3152: cs = 3152; goto _test_eof + _test_eof3153: cs = 3153; goto _test_eof + _test_eof3154: cs = 3154; goto _test_eof + _test_eof3155: cs = 3155; goto _test_eof + _test_eof3156: cs = 3156; goto _test_eof + _test_eof3157: cs = 3157; goto _test_eof + _test_eof3158: cs = 3158; goto _test_eof + _test_eof3159: cs = 3159; goto _test_eof + _test_eof3160: cs = 3160; goto _test_eof + _test_eof3161: cs = 3161; goto _test_eof + _test_eof3162: cs = 3162; goto _test_eof + _test_eof3163: cs = 3163; goto _test_eof + _test_eof3164: cs = 3164; goto _test_eof + _test_eof3165: cs = 3165; goto _test_eof + _test_eof3166: cs = 3166; goto _test_eof + _test_eof3167: cs = 3167; goto _test_eof + _test_eof3168: cs = 3168; goto _test_eof + _test_eof3169: cs = 3169; goto _test_eof + _test_eof3170: cs = 3170; goto _test_eof + _test_eof3171: cs = 3171; goto _test_eof + _test_eof3172: cs = 3172; goto _test_eof + _test_eof3173: cs = 3173; goto _test_eof + _test_eof3174: cs = 3174; goto _test_eof + _test_eof3175: cs = 3175; goto _test_eof + _test_eof3176: cs = 3176; goto _test_eof + _test_eof3177: cs = 3177; goto _test_eof + _test_eof3178: cs = 3178; goto _test_eof + _test_eof3179: cs = 3179; goto _test_eof + _test_eof3180: cs = 3180; goto _test_eof + _test_eof3181: cs = 3181; goto _test_eof + _test_eof3182: cs = 3182; goto _test_eof + _test_eof3183: cs = 3183; goto _test_eof + _test_eof3184: cs = 3184; goto _test_eof + _test_eof3185: cs = 3185; goto _test_eof + _test_eof3186: cs = 3186; goto _test_eof + _test_eof3187: cs = 3187; goto _test_eof + _test_eof3188: cs = 3188; goto _test_eof + _test_eof3189: cs = 3189; goto _test_eof + _test_eof3190: cs = 3190; goto _test_eof + _test_eof3191: cs = 3191; goto _test_eof + _test_eof3192: cs = 3192; goto _test_eof + _test_eof3193: cs = 3193; goto _test_eof + _test_eof3194: cs = 3194; goto _test_eof + _test_eof3195: cs = 3195; goto _test_eof + _test_eof3196: cs = 3196; goto _test_eof + _test_eof3197: cs = 3197; goto _test_eof + _test_eof3198: cs = 3198; goto _test_eof + _test_eof3199: cs = 3199; goto _test_eof + _test_eof3200: cs = 3200; goto _test_eof + _test_eof3201: cs = 3201; goto _test_eof + _test_eof3202: cs = 3202; goto _test_eof + _test_eof3203: cs = 3203; goto _test_eof + _test_eof3204: cs = 3204; goto _test_eof + _test_eof3205: cs = 3205; goto _test_eof + _test_eof3206: cs = 3206; goto _test_eof + _test_eof3207: cs = 3207; goto _test_eof + _test_eof3208: cs = 3208; goto _test_eof + _test_eof3209: cs = 3209; goto _test_eof + _test_eof3210: cs = 3210; goto _test_eof + _test_eof3211: cs = 3211; goto _test_eof + _test_eof3212: cs = 3212; goto _test_eof + _test_eof3213: cs = 3213; goto _test_eof + _test_eof3214: cs = 3214; goto _test_eof + _test_eof3215: cs = 3215; goto _test_eof + _test_eof3216: cs = 3216; goto _test_eof + _test_eof3217: cs = 3217; goto _test_eof + _test_eof4888: cs = 4888; goto _test_eof + _test_eof4889: cs = 4889; goto _test_eof + _test_eof4890: cs = 4890; goto _test_eof + _test_eof4891: cs = 4891; goto _test_eof + _test_eof4892: cs = 4892; goto _test_eof + _test_eof4893: cs = 4893; goto _test_eof + _test_eof4894: cs = 4894; goto _test_eof + _test_eof4895: cs = 4895; goto _test_eof + _test_eof4896: cs = 4896; goto _test_eof + _test_eof4897: cs = 4897; goto _test_eof + _test_eof4898: cs = 4898; goto _test_eof + _test_eof4899: cs = 4899; goto _test_eof + _test_eof4900: cs = 4900; goto _test_eof + _test_eof4901: cs = 4901; goto _test_eof + _test_eof4902: cs = 4902; goto _test_eof + _test_eof4903: cs = 4903; goto _test_eof + _test_eof4904: cs = 4904; goto _test_eof + _test_eof4905: cs = 4905; goto _test_eof + _test_eof4906: cs = 4906; goto _test_eof + _test_eof4907: cs = 4907; goto _test_eof + _test_eof4908: cs = 4908; goto _test_eof + _test_eof4909: cs = 4909; goto _test_eof + _test_eof4910: cs = 4910; goto _test_eof + _test_eof4911: cs = 4911; goto _test_eof + _test_eof4912: cs = 4912; goto _test_eof + _test_eof4913: cs = 4913; goto _test_eof + _test_eof4914: cs = 4914; goto _test_eof + _test_eof4915: cs = 4915; goto _test_eof + _test_eof4916: cs = 4916; goto _test_eof + _test_eof4917: cs = 4917; goto _test_eof + _test_eof4918: cs = 4918; goto _test_eof + _test_eof4919: cs = 4919; goto _test_eof + _test_eof4920: cs = 4920; goto _test_eof + _test_eof4921: cs = 4921; goto _test_eof + _test_eof4922: cs = 4922; goto _test_eof + _test_eof4923: cs = 4923; goto _test_eof + _test_eof4924: cs = 4924; goto _test_eof + _test_eof4925: cs = 4925; goto _test_eof + _test_eof4926: cs = 4926; goto _test_eof + _test_eof4927: cs = 4927; goto _test_eof + _test_eof4928: cs = 4928; goto _test_eof + _test_eof3218: cs = 3218; goto _test_eof + _test_eof3219: cs = 3219; goto _test_eof + _test_eof3220: cs = 3220; goto _test_eof + _test_eof3221: cs = 3221; goto _test_eof + _test_eof3222: cs = 3222; goto _test_eof + _test_eof3223: cs = 3223; goto _test_eof + _test_eof3224: cs = 3224; goto _test_eof + _test_eof3225: cs = 3225; goto _test_eof + _test_eof3226: cs = 3226; goto _test_eof + _test_eof3227: cs = 3227; goto _test_eof + _test_eof3228: cs = 3228; goto _test_eof + _test_eof3229: cs = 3229; goto _test_eof + _test_eof3230: cs = 3230; goto _test_eof + _test_eof3231: cs = 3231; goto _test_eof + _test_eof4929: cs = 4929; goto _test_eof + _test_eof4930: cs = 4930; goto _test_eof + _test_eof4931: cs = 4931; goto _test_eof + _test_eof4932: cs = 4932; goto _test_eof + _test_eof3232: cs = 3232; goto _test_eof + _test_eof4933: cs = 4933; goto _test_eof + _test_eof4934: cs = 4934; goto _test_eof + _test_eof4935: cs = 4935; goto _test_eof + _test_eof4936: cs = 4936; goto _test_eof + _test_eof4937: cs = 4937; goto _test_eof + _test_eof4938: cs = 4938; goto _test_eof + _test_eof4939: cs = 4939; goto _test_eof + _test_eof4940: cs = 4940; goto _test_eof + _test_eof4941: cs = 4941; goto _test_eof + _test_eof4942: cs = 4942; goto _test_eof + _test_eof4943: cs = 4943; goto _test_eof + _test_eof4944: cs = 4944; goto _test_eof + _test_eof4945: cs = 4945; goto _test_eof + _test_eof4946: cs = 4946; goto _test_eof + _test_eof4947: cs = 4947; goto _test_eof + _test_eof4948: cs = 4948; goto _test_eof + _test_eof4949: cs = 4949; goto _test_eof + _test_eof4950: cs = 4950; goto _test_eof + _test_eof4951: cs = 4951; goto _test_eof + _test_eof4952: cs = 4952; goto _test_eof + _test_eof4953: cs = 4953; goto _test_eof + _test_eof4954: cs = 4954; goto _test_eof + _test_eof4955: cs = 4955; goto _test_eof + _test_eof4956: cs = 4956; goto _test_eof + _test_eof4957: cs = 4957; goto _test_eof + _test_eof3233: cs = 3233; goto _test_eof + _test_eof4958: cs = 4958; goto _test_eof + _test_eof4959: cs = 4959; goto _test_eof + _test_eof4960: cs = 4960; goto _test_eof + _test_eof4961: cs = 4961; goto _test_eof + _test_eof4962: cs = 4962; goto _test_eof + _test_eof4963: cs = 4963; goto _test_eof + _test_eof3234: cs = 3234; goto _test_eof + _test_eof4964: cs = 4964; goto _test_eof + _test_eof4965: cs = 4965; goto _test_eof + _test_eof3235: cs = 3235; goto _test_eof + _test_eof4966: cs = 4966; goto _test_eof + _test_eof4967: cs = 4967; goto _test_eof + _test_eof4968: cs = 4968; goto _test_eof + _test_eof4969: cs = 4969; goto _test_eof + _test_eof4970: cs = 4970; goto _test_eof + _test_eof4971: cs = 4971; goto _test_eof + _test_eof4972: cs = 4972; goto _test_eof + _test_eof4973: cs = 4973; goto _test_eof + _test_eof4974: cs = 4974; goto _test_eof + _test_eof4975: cs = 4975; goto _test_eof + _test_eof4976: cs = 4976; goto _test_eof + _test_eof4977: cs = 4977; goto _test_eof + _test_eof4978: cs = 4978; goto _test_eof + _test_eof4979: cs = 4979; goto _test_eof + _test_eof4980: cs = 4980; goto _test_eof + _test_eof3236: cs = 3236; goto _test_eof + _test_eof4981: cs = 4981; goto _test_eof + _test_eof4982: cs = 4982; goto _test_eof + _test_eof4983: cs = 4983; goto _test_eof + _test_eof3237: cs = 3237; goto _test_eof + _test_eof4984: cs = 4984; goto _test_eof + _test_eof4985: cs = 4985; goto _test_eof + _test_eof4986: cs = 4986; goto _test_eof + _test_eof4987: cs = 4987; goto _test_eof + _test_eof4988: cs = 4988; goto _test_eof + _test_eof4989: cs = 4989; goto _test_eof + _test_eof3238: cs = 3238; goto _test_eof + _test_eof4990: cs = 4990; goto _test_eof + _test_eof4991: cs = 4991; goto _test_eof + _test_eof4992: cs = 4992; goto _test_eof + _test_eof4993: cs = 4993; goto _test_eof + _test_eof4994: cs = 4994; goto _test_eof + _test_eof4995: cs = 4995; goto _test_eof + _test_eof4996: cs = 4996; goto _test_eof + _test_eof4997: cs = 4997; goto _test_eof + _test_eof4998: cs = 4998; goto _test_eof + _test_eof4999: cs = 4999; goto _test_eof + _test_eof5000: cs = 5000; goto _test_eof + _test_eof5001: cs = 5001; goto _test_eof + _test_eof5002: cs = 5002; goto _test_eof + _test_eof5003: cs = 5003; goto _test_eof + _test_eof5004: cs = 5004; goto _test_eof + _test_eof5005: cs = 5005; goto _test_eof + _test_eof5006: cs = 5006; goto _test_eof + _test_eof5007: cs = 5007; goto _test_eof + _test_eof5008: cs = 5008; goto _test_eof + _test_eof5009: cs = 5009; goto _test_eof + _test_eof5010: cs = 5010; goto _test_eof + _test_eof5011: cs = 5011; goto _test_eof + _test_eof5012: cs = 5012; goto _test_eof + _test_eof5013: cs = 5013; goto _test_eof + _test_eof5014: cs = 5014; goto _test_eof + _test_eof5015: cs = 5015; goto _test_eof + _test_eof5016: cs = 5016; goto _test_eof + _test_eof5017: cs = 5017; goto _test_eof + _test_eof5018: cs = 5018; goto _test_eof + _test_eof5019: cs = 5019; goto _test_eof + _test_eof5020: cs = 5020; goto _test_eof + _test_eof5021: cs = 5021; goto _test_eof + _test_eof5022: cs = 5022; goto _test_eof + _test_eof5023: cs = 5023; goto _test_eof + _test_eof5024: cs = 5024; goto _test_eof + _test_eof5025: cs = 5025; goto _test_eof + _test_eof5026: cs = 5026; goto _test_eof + _test_eof5027: cs = 5027; goto _test_eof + _test_eof5028: cs = 5028; goto _test_eof + _test_eof5029: cs = 5029; goto _test_eof + _test_eof5030: cs = 5030; goto _test_eof + _test_eof5031: cs = 5031; goto _test_eof + _test_eof5032: cs = 5032; goto _test_eof + _test_eof5033: cs = 5033; goto _test_eof + _test_eof5034: cs = 5034; goto _test_eof + _test_eof5035: cs = 5035; goto _test_eof + _test_eof5036: cs = 5036; goto _test_eof + _test_eof5037: cs = 5037; goto _test_eof + _test_eof5038: cs = 5038; goto _test_eof + _test_eof5039: cs = 5039; goto _test_eof + _test_eof5040: cs = 5040; goto _test_eof + _test_eof5041: cs = 5041; goto _test_eof + _test_eof5042: cs = 5042; goto _test_eof + _test_eof5043: cs = 5043; goto _test_eof + _test_eof5044: cs = 5044; goto _test_eof + _test_eof5045: cs = 5045; goto _test_eof + _test_eof5046: cs = 5046; goto _test_eof + _test_eof5047: cs = 5047; goto _test_eof + _test_eof5048: cs = 5048; goto _test_eof + _test_eof5049: cs = 5049; goto _test_eof + _test_eof5050: cs = 5050; goto _test_eof + _test_eof5051: cs = 5051; goto _test_eof + _test_eof5052: cs = 5052; goto _test_eof + _test_eof5053: cs = 5053; goto _test_eof + _test_eof5054: cs = 5054; goto _test_eof + _test_eof5055: cs = 5055; goto _test_eof + _test_eof5056: cs = 5056; goto _test_eof + _test_eof5057: cs = 5057; goto _test_eof + _test_eof5058: cs = 5058; goto _test_eof + _test_eof5059: cs = 5059; goto _test_eof + _test_eof5060: cs = 5060; goto _test_eof + _test_eof5061: cs = 5061; goto _test_eof + _test_eof5062: cs = 5062; goto _test_eof + _test_eof5063: cs = 5063; goto _test_eof + _test_eof5064: cs = 5064; goto _test_eof + _test_eof5065: cs = 5065; goto _test_eof + _test_eof5066: cs = 5066; goto _test_eof + _test_eof5067: cs = 5067; goto _test_eof + _test_eof5068: cs = 5068; goto _test_eof + _test_eof5069: cs = 5069; goto _test_eof + _test_eof5070: cs = 5070; goto _test_eof + _test_eof5071: cs = 5071; goto _test_eof + _test_eof3239: cs = 3239; goto _test_eof + _test_eof3240: cs = 3240; goto _test_eof + _test_eof3241: cs = 3241; goto _test_eof + _test_eof3242: cs = 3242; goto _test_eof + _test_eof3243: cs = 3243; goto _test_eof + _test_eof3244: cs = 3244; goto _test_eof + _test_eof3245: cs = 3245; goto _test_eof + _test_eof3246: cs = 3246; goto _test_eof + _test_eof3247: cs = 3247; goto _test_eof + _test_eof3248: cs = 3248; goto _test_eof + _test_eof3249: cs = 3249; goto _test_eof + _test_eof3250: cs = 3250; goto _test_eof + _test_eof3251: cs = 3251; goto _test_eof + _test_eof3252: cs = 3252; goto _test_eof + _test_eof3253: cs = 3253; goto _test_eof + _test_eof3254: cs = 3254; goto _test_eof + _test_eof3255: cs = 3255; goto _test_eof + _test_eof3256: cs = 3256; goto _test_eof + _test_eof3257: cs = 3257; goto _test_eof + _test_eof3258: cs = 3258; goto _test_eof + _test_eof3259: cs = 3259; goto _test_eof + _test_eof3260: cs = 3260; goto _test_eof + _test_eof3261: cs = 3261; goto _test_eof + _test_eof3262: cs = 3262; goto _test_eof + _test_eof3263: cs = 3263; goto _test_eof + _test_eof3264: cs = 3264; goto _test_eof + _test_eof3265: cs = 3265; goto _test_eof + _test_eof5072: cs = 5072; goto _test_eof + _test_eof3266: cs = 3266; goto _test_eof + _test_eof3267: cs = 3267; goto _test_eof + _test_eof3268: cs = 3268; goto _test_eof + _test_eof5073: cs = 5073; goto _test_eof + _test_eof3269: cs = 3269; goto _test_eof + _test_eof3270: cs = 3270; goto _test_eof + _test_eof3271: cs = 3271; goto _test_eof + _test_eof3272: cs = 3272; goto _test_eof + _test_eof3273: cs = 3273; goto _test_eof + _test_eof3274: cs = 3274; goto _test_eof + _test_eof3275: cs = 3275; goto _test_eof + _test_eof3276: cs = 3276; goto _test_eof + _test_eof3277: cs = 3277; goto _test_eof + _test_eof3278: cs = 3278; goto _test_eof + _test_eof3279: cs = 3279; goto _test_eof + _test_eof3280: cs = 3280; goto _test_eof + _test_eof3281: cs = 3281; goto _test_eof + _test_eof3282: cs = 3282; goto _test_eof + _test_eof3283: cs = 3283; goto _test_eof + _test_eof3284: cs = 3284; goto _test_eof + _test_eof3285: cs = 3285; goto _test_eof + _test_eof3286: cs = 3286; goto _test_eof + _test_eof3287: cs = 3287; goto _test_eof + _test_eof3288: cs = 3288; goto _test_eof + _test_eof3289: cs = 3289; goto _test_eof + _test_eof3290: cs = 3290; goto _test_eof + _test_eof3291: cs = 3291; goto _test_eof + _test_eof3292: cs = 3292; goto _test_eof + _test_eof3293: cs = 3293; goto _test_eof + _test_eof3294: cs = 3294; goto _test_eof + _test_eof3295: cs = 3295; goto _test_eof + _test_eof3296: cs = 3296; goto _test_eof + _test_eof3297: cs = 3297; goto _test_eof + _test_eof3298: cs = 3298; goto _test_eof + _test_eof3299: cs = 3299; goto _test_eof + _test_eof3300: cs = 3300; goto _test_eof + _test_eof3301: cs = 3301; goto _test_eof + _test_eof3302: cs = 3302; goto _test_eof + _test_eof3303: cs = 3303; goto _test_eof + _test_eof3304: cs = 3304; goto _test_eof + _test_eof3305: cs = 3305; goto _test_eof + _test_eof3306: cs = 3306; goto _test_eof + _test_eof3307: cs = 3307; goto _test_eof + _test_eof3308: cs = 3308; goto _test_eof + _test_eof3309: cs = 3309; goto _test_eof + _test_eof3310: cs = 3310; goto _test_eof + _test_eof3311: cs = 3311; goto _test_eof + _test_eof3312: cs = 3312; goto _test_eof + _test_eof3313: cs = 3313; goto _test_eof + _test_eof3314: cs = 3314; goto _test_eof + _test_eof3315: cs = 3315; goto _test_eof + _test_eof3316: cs = 3316; goto _test_eof + _test_eof3317: cs = 3317; goto _test_eof + _test_eof3318: cs = 3318; goto _test_eof + _test_eof3319: cs = 3319; goto _test_eof + _test_eof3320: cs = 3320; goto _test_eof + _test_eof3321: cs = 3321; goto _test_eof + _test_eof3322: cs = 3322; goto _test_eof + _test_eof3323: cs = 3323; goto _test_eof + _test_eof3324: cs = 3324; goto _test_eof + _test_eof3325: cs = 3325; goto _test_eof + _test_eof3326: cs = 3326; goto _test_eof + _test_eof3327: cs = 3327; goto _test_eof + _test_eof3328: cs = 3328; goto _test_eof + _test_eof3329: cs = 3329; goto _test_eof + _test_eof3330: cs = 3330; goto _test_eof + _test_eof3331: cs = 3331; goto _test_eof + _test_eof3332: cs = 3332; goto _test_eof + _test_eof3333: cs = 3333; goto _test_eof + _test_eof3334: cs = 3334; goto _test_eof + _test_eof3335: cs = 3335; goto _test_eof + _test_eof3336: cs = 3336; goto _test_eof + _test_eof3337: cs = 3337; goto _test_eof + _test_eof3338: cs = 3338; goto _test_eof + _test_eof3339: cs = 3339; goto _test_eof + _test_eof3340: cs = 3340; goto _test_eof + _test_eof3341: cs = 3341; goto _test_eof + _test_eof3342: cs = 3342; goto _test_eof + _test_eof3343: cs = 3343; goto _test_eof + _test_eof3344: cs = 3344; goto _test_eof + _test_eof3345: cs = 3345; goto _test_eof + _test_eof3346: cs = 3346; goto _test_eof + _test_eof3347: cs = 3347; goto _test_eof + _test_eof3348: cs = 3348; goto _test_eof + _test_eof3349: cs = 3349; goto _test_eof + _test_eof3350: cs = 3350; goto _test_eof + _test_eof5074: cs = 5074; goto _test_eof + _test_eof3351: cs = 3351; goto _test_eof + _test_eof3352: cs = 3352; goto _test_eof + _test_eof3353: cs = 3353; goto _test_eof + _test_eof3354: cs = 3354; goto _test_eof + _test_eof3355: cs = 3355; goto _test_eof + _test_eof3356: cs = 3356; goto _test_eof + _test_eof3357: cs = 3357; goto _test_eof + _test_eof3358: cs = 3358; goto _test_eof + _test_eof3359: cs = 3359; goto _test_eof + _test_eof3360: cs = 3360; goto _test_eof + _test_eof3361: cs = 3361; goto _test_eof + _test_eof3362: cs = 3362; goto _test_eof + _test_eof3363: cs = 3363; goto _test_eof + _test_eof3364: cs = 3364; goto _test_eof + _test_eof3365: cs = 3365; goto _test_eof + _test_eof3366: cs = 3366; goto _test_eof + _test_eof3367: cs = 3367; goto _test_eof + _test_eof3368: cs = 3368; goto _test_eof + _test_eof3369: cs = 3369; goto _test_eof + _test_eof3370: cs = 3370; goto _test_eof + _test_eof3371: cs = 3371; goto _test_eof + _test_eof3372: cs = 3372; goto _test_eof + _test_eof3373: cs = 3373; goto _test_eof + _test_eof3374: cs = 3374; goto _test_eof + _test_eof3375: cs = 3375; goto _test_eof + _test_eof3376: cs = 3376; goto _test_eof + _test_eof3377: cs = 3377; goto _test_eof + _test_eof3378: cs = 3378; goto _test_eof + _test_eof3379: cs = 3379; goto _test_eof + _test_eof3380: cs = 3380; goto _test_eof + _test_eof3381: cs = 3381; goto _test_eof + _test_eof3382: cs = 3382; goto _test_eof + _test_eof3383: cs = 3383; goto _test_eof + _test_eof3384: cs = 3384; goto _test_eof + _test_eof3385: cs = 3385; goto _test_eof + _test_eof3386: cs = 3386; goto _test_eof + _test_eof3387: cs = 3387; goto _test_eof + _test_eof3388: cs = 3388; goto _test_eof + _test_eof3389: cs = 3389; goto _test_eof + _test_eof3390: cs = 3390; goto _test_eof + _test_eof3391: cs = 3391; goto _test_eof + _test_eof3392: cs = 3392; goto _test_eof + _test_eof3393: cs = 3393; goto _test_eof + _test_eof3394: cs = 3394; goto _test_eof + _test_eof3395: cs = 3395; goto _test_eof + _test_eof3396: cs = 3396; goto _test_eof + _test_eof3397: cs = 3397; goto _test_eof + _test_eof3398: cs = 3398; goto _test_eof + _test_eof3399: cs = 3399; goto _test_eof + _test_eof3400: cs = 3400; goto _test_eof + _test_eof3401: cs = 3401; goto _test_eof + _test_eof3402: cs = 3402; goto _test_eof + _test_eof3403: cs = 3403; goto _test_eof + _test_eof3404: cs = 3404; goto _test_eof + _test_eof3405: cs = 3405; goto _test_eof + _test_eof3406: cs = 3406; goto _test_eof + _test_eof3407: cs = 3407; goto _test_eof + _test_eof3408: cs = 3408; goto _test_eof + _test_eof3409: cs = 3409; goto _test_eof + _test_eof3410: cs = 3410; goto _test_eof + _test_eof3411: cs = 3411; goto _test_eof + _test_eof3412: cs = 3412; goto _test_eof + _test_eof3413: cs = 3413; goto _test_eof + _test_eof3414: cs = 3414; goto _test_eof + _test_eof3415: cs = 3415; goto _test_eof + _test_eof3416: cs = 3416; goto _test_eof + _test_eof3417: cs = 3417; goto _test_eof + _test_eof3418: cs = 3418; goto _test_eof + _test_eof3419: cs = 3419; goto _test_eof + _test_eof3420: cs = 3420; goto _test_eof + _test_eof3421: cs = 3421; goto _test_eof + _test_eof3422: cs = 3422; goto _test_eof + _test_eof3423: cs = 3423; goto _test_eof + _test_eof3424: cs = 3424; goto _test_eof + _test_eof3425: cs = 3425; goto _test_eof + _test_eof3426: cs = 3426; goto _test_eof + _test_eof3427: cs = 3427; goto _test_eof + _test_eof3428: cs = 3428; goto _test_eof + _test_eof3429: cs = 3429; goto _test_eof + _test_eof3430: cs = 3430; goto _test_eof + _test_eof3431: cs = 3431; goto _test_eof + _test_eof3432: cs = 3432; goto _test_eof + _test_eof3433: cs = 3433; goto _test_eof + _test_eof3434: cs = 3434; goto _test_eof + _test_eof3435: cs = 3435; goto _test_eof + _test_eof3436: cs = 3436; goto _test_eof + _test_eof3437: cs = 3437; goto _test_eof + _test_eof3438: cs = 3438; goto _test_eof + _test_eof3439: cs = 3439; goto _test_eof + _test_eof3440: cs = 3440; goto _test_eof + _test_eof3441: cs = 3441; goto _test_eof + _test_eof3442: cs = 3442; goto _test_eof + _test_eof3443: cs = 3443; goto _test_eof + _test_eof3444: cs = 3444; goto _test_eof + _test_eof3445: cs = 3445; goto _test_eof + _test_eof3446: cs = 3446; goto _test_eof + _test_eof3447: cs = 3447; goto _test_eof + _test_eof3448: cs = 3448; goto _test_eof + _test_eof3449: cs = 3449; goto _test_eof + _test_eof3450: cs = 3450; goto _test_eof + _test_eof3451: cs = 3451; goto _test_eof + _test_eof3452: cs = 3452; goto _test_eof + _test_eof3453: cs = 3453; goto _test_eof + _test_eof3454: cs = 3454; goto _test_eof + _test_eof3455: cs = 3455; goto _test_eof + _test_eof3456: cs = 3456; goto _test_eof + _test_eof3457: cs = 3457; goto _test_eof + _test_eof3458: cs = 3458; goto _test_eof + _test_eof3459: cs = 3459; goto _test_eof + _test_eof3460: cs = 3460; goto _test_eof + _test_eof3461: cs = 3461; goto _test_eof + _test_eof3462: cs = 3462; goto _test_eof + _test_eof3463: cs = 3463; goto _test_eof + _test_eof3464: cs = 3464; goto _test_eof + _test_eof3465: cs = 3465; goto _test_eof + _test_eof3466: cs = 3466; goto _test_eof + _test_eof3467: cs = 3467; goto _test_eof + _test_eof3468: cs = 3468; goto _test_eof + _test_eof3469: cs = 3469; goto _test_eof + _test_eof3470: cs = 3470; goto _test_eof + _test_eof3471: cs = 3471; goto _test_eof + _test_eof3472: cs = 3472; goto _test_eof + _test_eof3473: cs = 3473; goto _test_eof + _test_eof3474: cs = 3474; goto _test_eof + _test_eof3475: cs = 3475; goto _test_eof + _test_eof3476: cs = 3476; goto _test_eof + _test_eof3477: cs = 3477; goto _test_eof + _test_eof3478: cs = 3478; goto _test_eof + _test_eof3479: cs = 3479; goto _test_eof + _test_eof3480: cs = 3480; goto _test_eof + _test_eof3481: cs = 3481; goto _test_eof + _test_eof3482: cs = 3482; goto _test_eof + _test_eof3483: cs = 3483; goto _test_eof + _test_eof3484: cs = 3484; goto _test_eof + _test_eof3485: cs = 3485; goto _test_eof + _test_eof3486: cs = 3486; goto _test_eof + _test_eof3487: cs = 3487; goto _test_eof + _test_eof3488: cs = 3488; goto _test_eof + _test_eof3489: cs = 3489; goto _test_eof + _test_eof3490: cs = 3490; goto _test_eof + _test_eof3491: cs = 3491; goto _test_eof + _test_eof3492: cs = 3492; goto _test_eof + _test_eof3493: cs = 3493; goto _test_eof + _test_eof3494: cs = 3494; goto _test_eof + _test_eof3495: cs = 3495; goto _test_eof + _test_eof3496: cs = 3496; goto _test_eof + _test_eof3497: cs = 3497; goto _test_eof + _test_eof3498: cs = 3498; goto _test_eof + _test_eof3499: cs = 3499; goto _test_eof + _test_eof3500: cs = 3500; goto _test_eof + _test_eof3501: cs = 3501; goto _test_eof + _test_eof3502: cs = 3502; goto _test_eof + _test_eof3503: cs = 3503; goto _test_eof + _test_eof3504: cs = 3504; goto _test_eof + _test_eof3505: cs = 3505; goto _test_eof + _test_eof3506: cs = 3506; goto _test_eof + _test_eof3507: cs = 3507; goto _test_eof + _test_eof3508: cs = 3508; goto _test_eof + _test_eof3509: cs = 3509; goto _test_eof + _test_eof3510: cs = 3510; goto _test_eof + _test_eof3511: cs = 3511; goto _test_eof + _test_eof3512: cs = 3512; goto _test_eof + _test_eof3513: cs = 3513; goto _test_eof + _test_eof3514: cs = 3514; goto _test_eof + _test_eof3515: cs = 3515; goto _test_eof + _test_eof3516: cs = 3516; goto _test_eof + _test_eof3517: cs = 3517; goto _test_eof + _test_eof3518: cs = 3518; goto _test_eof + _test_eof3519: cs = 3519; goto _test_eof + _test_eof3520: cs = 3520; goto _test_eof + _test_eof3521: cs = 3521; goto _test_eof + _test_eof3522: cs = 3522; goto _test_eof + _test_eof3523: cs = 3523; goto _test_eof + _test_eof3524: cs = 3524; goto _test_eof + _test_eof3525: cs = 3525; goto _test_eof + _test_eof3526: cs = 3526; goto _test_eof + _test_eof3527: cs = 3527; goto _test_eof + _test_eof3528: cs = 3528; goto _test_eof + _test_eof3529: cs = 3529; goto _test_eof + _test_eof3530: cs = 3530; goto _test_eof + _test_eof3531: cs = 3531; goto _test_eof + _test_eof3532: cs = 3532; goto _test_eof + _test_eof3533: cs = 3533; goto _test_eof + _test_eof3534: cs = 3534; goto _test_eof + _test_eof3535: cs = 3535; goto _test_eof + _test_eof3536: cs = 3536; goto _test_eof + _test_eof3537: cs = 3537; goto _test_eof + _test_eof3538: cs = 3538; goto _test_eof + _test_eof3539: cs = 3539; goto _test_eof + _test_eof3540: cs = 3540; goto _test_eof + _test_eof3541: cs = 3541; goto _test_eof + _test_eof3542: cs = 3542; goto _test_eof + _test_eof3543: cs = 3543; goto _test_eof + _test_eof3544: cs = 3544; goto _test_eof + _test_eof3545: cs = 3545; goto _test_eof + _test_eof3546: cs = 3546; goto _test_eof + _test_eof3547: cs = 3547; goto _test_eof + _test_eof3548: cs = 3548; goto _test_eof + _test_eof3549: cs = 3549; goto _test_eof + _test_eof3550: cs = 3550; goto _test_eof + _test_eof3551: cs = 3551; goto _test_eof + _test_eof3552: cs = 3552; goto _test_eof + _test_eof3553: cs = 3553; goto _test_eof + _test_eof3554: cs = 3554; goto _test_eof + _test_eof3555: cs = 3555; goto _test_eof + _test_eof3556: cs = 3556; goto _test_eof + _test_eof3557: cs = 3557; goto _test_eof + _test_eof3558: cs = 3558; goto _test_eof + _test_eof3559: cs = 3559; goto _test_eof + _test_eof3560: cs = 3560; goto _test_eof + _test_eof3561: cs = 3561; goto _test_eof + _test_eof3562: cs = 3562; goto _test_eof + _test_eof3563: cs = 3563; goto _test_eof + _test_eof3564: cs = 3564; goto _test_eof + _test_eof3565: cs = 3565; goto _test_eof + _test_eof3566: cs = 3566; goto _test_eof + _test_eof3567: cs = 3567; goto _test_eof + _test_eof3568: cs = 3568; goto _test_eof + _test_eof3569: cs = 3569; goto _test_eof + _test_eof3570: cs = 3570; goto _test_eof + _test_eof3571: cs = 3571; goto _test_eof + _test_eof3572: cs = 3572; goto _test_eof + _test_eof3573: cs = 3573; goto _test_eof + _test_eof3574: cs = 3574; goto _test_eof + _test_eof3575: cs = 3575; goto _test_eof + _test_eof3576: cs = 3576; goto _test_eof + _test_eof3577: cs = 3577; goto _test_eof + _test_eof3578: cs = 3578; goto _test_eof + _test_eof3579: cs = 3579; goto _test_eof + _test_eof3580: cs = 3580; goto _test_eof + _test_eof3581: cs = 3581; goto _test_eof + _test_eof3582: cs = 3582; goto _test_eof + _test_eof3583: cs = 3583; goto _test_eof + _test_eof3584: cs = 3584; goto _test_eof + _test_eof3585: cs = 3585; goto _test_eof + _test_eof3586: cs = 3586; goto _test_eof + _test_eof3587: cs = 3587; goto _test_eof + _test_eof5075: cs = 5075; goto _test_eof + _test_eof3588: cs = 3588; goto _test_eof + _test_eof3589: cs = 3589; goto _test_eof + _test_eof3590: cs = 3590; goto _test_eof + _test_eof3591: cs = 3591; goto _test_eof + _test_eof3592: cs = 3592; goto _test_eof + _test_eof3593: cs = 3593; goto _test_eof + _test_eof5076: cs = 5076; goto _test_eof + _test_eof3594: cs = 3594; goto _test_eof + _test_eof3595: cs = 3595; goto _test_eof + _test_eof3596: cs = 3596; goto _test_eof + _test_eof3597: cs = 3597; goto _test_eof + _test_eof3598: cs = 3598; goto _test_eof + _test_eof3599: cs = 3599; goto _test_eof + _test_eof3600: cs = 3600; goto _test_eof + _test_eof3601: cs = 3601; goto _test_eof + _test_eof3602: cs = 3602; goto _test_eof + _test_eof3603: cs = 3603; goto _test_eof + _test_eof3604: cs = 3604; goto _test_eof + _test_eof3605: cs = 3605; goto _test_eof + _test_eof3606: cs = 3606; goto _test_eof + _test_eof3607: cs = 3607; goto _test_eof + _test_eof3608: cs = 3608; goto _test_eof + _test_eof3609: cs = 3609; goto _test_eof + _test_eof3610: cs = 3610; goto _test_eof + _test_eof3611: cs = 3611; goto _test_eof + _test_eof3612: cs = 3612; goto _test_eof + _test_eof3613: cs = 3613; goto _test_eof + _test_eof3614: cs = 3614; goto _test_eof + _test_eof3615: cs = 3615; goto _test_eof + _test_eof3616: cs = 3616; goto _test_eof + _test_eof3617: cs = 3617; goto _test_eof + _test_eof3618: cs = 3618; goto _test_eof + _test_eof3619: cs = 3619; goto _test_eof + _test_eof3620: cs = 3620; goto _test_eof + _test_eof3621: cs = 3621; goto _test_eof + _test_eof3622: cs = 3622; goto _test_eof + _test_eof3623: cs = 3623; goto _test_eof + _test_eof3624: cs = 3624; goto _test_eof + _test_eof3625: cs = 3625; goto _test_eof + _test_eof3626: cs = 3626; goto _test_eof + _test_eof3627: cs = 3627; goto _test_eof + _test_eof3628: cs = 3628; goto _test_eof + _test_eof3629: cs = 3629; goto _test_eof + _test_eof3630: cs = 3630; goto _test_eof + _test_eof3631: cs = 3631; goto _test_eof + _test_eof3632: cs = 3632; goto _test_eof + _test_eof3633: cs = 3633; goto _test_eof + _test_eof3634: cs = 3634; goto _test_eof + _test_eof3635: cs = 3635; goto _test_eof + _test_eof3636: cs = 3636; goto _test_eof + _test_eof3637: cs = 3637; goto _test_eof + _test_eof3638: cs = 3638; goto _test_eof + _test_eof3639: cs = 3639; goto _test_eof + _test_eof3640: cs = 3640; goto _test_eof + _test_eof3641: cs = 3641; goto _test_eof + _test_eof3642: cs = 3642; goto _test_eof + _test_eof3643: cs = 3643; goto _test_eof + _test_eof3644: cs = 3644; goto _test_eof + _test_eof3645: cs = 3645; goto _test_eof + _test_eof3646: cs = 3646; goto _test_eof + _test_eof3647: cs = 3647; goto _test_eof + _test_eof3648: cs = 3648; goto _test_eof + _test_eof3649: cs = 3649; goto _test_eof + _test_eof3650: cs = 3650; goto _test_eof + _test_eof3651: cs = 3651; goto _test_eof + _test_eof3652: cs = 3652; goto _test_eof + _test_eof3653: cs = 3653; goto _test_eof + _test_eof3654: cs = 3654; goto _test_eof + _test_eof3655: cs = 3655; goto _test_eof + _test_eof3656: cs = 3656; goto _test_eof + _test_eof3657: cs = 3657; goto _test_eof + _test_eof3658: cs = 3658; goto _test_eof + _test_eof3659: cs = 3659; goto _test_eof + _test_eof3660: cs = 3660; goto _test_eof + _test_eof3661: cs = 3661; goto _test_eof + _test_eof3662: cs = 3662; goto _test_eof + _test_eof3663: cs = 3663; goto _test_eof + _test_eof3664: cs = 3664; goto _test_eof + _test_eof3665: cs = 3665; goto _test_eof + _test_eof3666: cs = 3666; goto _test_eof + _test_eof3667: cs = 3667; goto _test_eof + _test_eof3668: cs = 3668; goto _test_eof + _test_eof3669: cs = 3669; goto _test_eof + _test_eof3670: cs = 3670; goto _test_eof + _test_eof3671: cs = 3671; goto _test_eof + _test_eof3672: cs = 3672; goto _test_eof + _test_eof3673: cs = 3673; goto _test_eof + _test_eof3674: cs = 3674; goto _test_eof + _test_eof3675: cs = 3675; goto _test_eof + _test_eof3676: cs = 3676; goto _test_eof + _test_eof3677: cs = 3677; goto _test_eof + _test_eof3678: cs = 3678; goto _test_eof + _test_eof3679: cs = 3679; goto _test_eof + _test_eof3680: cs = 3680; goto _test_eof + _test_eof3681: cs = 3681; goto _test_eof + _test_eof3682: cs = 3682; goto _test_eof + _test_eof3683: cs = 3683; goto _test_eof + _test_eof3684: cs = 3684; goto _test_eof + _test_eof3685: cs = 3685; goto _test_eof + _test_eof3686: cs = 3686; goto _test_eof + _test_eof3687: cs = 3687; goto _test_eof + _test_eof3688: cs = 3688; goto _test_eof + _test_eof3689: cs = 3689; goto _test_eof + _test_eof3690: cs = 3690; goto _test_eof + _test_eof3691: cs = 3691; goto _test_eof + _test_eof3692: cs = 3692; goto _test_eof + _test_eof3693: cs = 3693; goto _test_eof + _test_eof3694: cs = 3694; goto _test_eof + _test_eof3695: cs = 3695; goto _test_eof + _test_eof3696: cs = 3696; goto _test_eof + _test_eof3697: cs = 3697; goto _test_eof + _test_eof3698: cs = 3698; goto _test_eof + _test_eof3699: cs = 3699; goto _test_eof + _test_eof3700: cs = 3700; goto _test_eof + _test_eof3701: cs = 3701; goto _test_eof + _test_eof3702: cs = 3702; goto _test_eof + _test_eof3703: cs = 3703; goto _test_eof + _test_eof3704: cs = 3704; goto _test_eof + _test_eof3705: cs = 3705; goto _test_eof + _test_eof3706: cs = 3706; goto _test_eof + _test_eof3707: cs = 3707; goto _test_eof + _test_eof3708: cs = 3708; goto _test_eof + _test_eof3709: cs = 3709; goto _test_eof + _test_eof3710: cs = 3710; goto _test_eof + _test_eof3711: cs = 3711; goto _test_eof + _test_eof3712: cs = 3712; goto _test_eof + _test_eof3713: cs = 3713; goto _test_eof + _test_eof3714: cs = 3714; goto _test_eof + _test_eof3715: cs = 3715; goto _test_eof + _test_eof3716: cs = 3716; goto _test_eof + _test_eof3717: cs = 3717; goto _test_eof + _test_eof3718: cs = 3718; goto _test_eof + _test_eof3719: cs = 3719; goto _test_eof + _test_eof3720: cs = 3720; goto _test_eof + _test_eof3721: cs = 3721; goto _test_eof + _test_eof3722: cs = 3722; goto _test_eof + _test_eof3723: cs = 3723; goto _test_eof + _test_eof3724: cs = 3724; goto _test_eof + _test_eof3725: cs = 3725; goto _test_eof + _test_eof3726: cs = 3726; goto _test_eof + _test_eof3727: cs = 3727; goto _test_eof + _test_eof3728: cs = 3728; goto _test_eof + _test_eof3729: cs = 3729; goto _test_eof + _test_eof3730: cs = 3730; goto _test_eof + _test_eof3731: cs = 3731; goto _test_eof + _test_eof3732: cs = 3732; goto _test_eof + _test_eof3733: cs = 3733; goto _test_eof + _test_eof3734: cs = 3734; goto _test_eof + _test_eof3735: cs = 3735; goto _test_eof + _test_eof3736: cs = 3736; goto _test_eof + _test_eof5077: cs = 5077; goto _test_eof + _test_eof3737: cs = 3737; goto _test_eof + _test_eof5078: cs = 5078; goto _test_eof + _test_eof3738: cs = 3738; goto _test_eof + _test_eof3739: cs = 3739; goto _test_eof + _test_eof3740: cs = 3740; goto _test_eof + _test_eof3741: cs = 3741; goto _test_eof + _test_eof3742: cs = 3742; goto _test_eof + _test_eof3743: cs = 3743; goto _test_eof + _test_eof3744: cs = 3744; goto _test_eof + _test_eof3745: cs = 3745; goto _test_eof + _test_eof3746: cs = 3746; goto _test_eof + _test_eof3747: cs = 3747; goto _test_eof + _test_eof3748: cs = 3748; goto _test_eof + _test_eof3749: cs = 3749; goto _test_eof + _test_eof3750: cs = 3750; goto _test_eof + _test_eof3751: cs = 3751; goto _test_eof + _test_eof3752: cs = 3752; goto _test_eof + _test_eof3753: cs = 3753; goto _test_eof + _test_eof3754: cs = 3754; goto _test_eof + _test_eof3755: cs = 3755; goto _test_eof + _test_eof3756: cs = 3756; goto _test_eof + _test_eof3757: cs = 3757; goto _test_eof + _test_eof3758: cs = 3758; goto _test_eof + _test_eof3759: cs = 3759; goto _test_eof + _test_eof3760: cs = 3760; goto _test_eof + _test_eof3761: cs = 3761; goto _test_eof + _test_eof3762: cs = 3762; goto _test_eof + _test_eof3763: cs = 3763; goto _test_eof + _test_eof3764: cs = 3764; goto _test_eof + _test_eof3765: cs = 3765; goto _test_eof + _test_eof3766: cs = 3766; goto _test_eof + _test_eof3767: cs = 3767; goto _test_eof + _test_eof3768: cs = 3768; goto _test_eof + _test_eof3769: cs = 3769; goto _test_eof + _test_eof3770: cs = 3770; goto _test_eof + _test_eof3771: cs = 3771; goto _test_eof + _test_eof3772: cs = 3772; goto _test_eof + _test_eof3773: cs = 3773; goto _test_eof + _test_eof3774: cs = 3774; goto _test_eof + _test_eof3775: cs = 3775; goto _test_eof + _test_eof3776: cs = 3776; goto _test_eof + _test_eof3777: cs = 3777; goto _test_eof + _test_eof3778: cs = 3778; goto _test_eof + _test_eof3779: cs = 3779; goto _test_eof + _test_eof3780: cs = 3780; goto _test_eof + _test_eof3781: cs = 3781; goto _test_eof + _test_eof3782: cs = 3782; goto _test_eof + _test_eof3783: cs = 3783; goto _test_eof + _test_eof3784: cs = 3784; goto _test_eof + _test_eof3785: cs = 3785; goto _test_eof + _test_eof3786: cs = 3786; goto _test_eof + _test_eof3787: cs = 3787; goto _test_eof + _test_eof3788: cs = 3788; goto _test_eof + _test_eof3789: cs = 3789; goto _test_eof + _test_eof3790: cs = 3790; goto _test_eof + _test_eof3791: cs = 3791; goto _test_eof + _test_eof3792: cs = 3792; goto _test_eof + _test_eof3793: cs = 3793; goto _test_eof + _test_eof3794: cs = 3794; goto _test_eof + _test_eof3795: cs = 3795; goto _test_eof + _test_eof3796: cs = 3796; goto _test_eof + _test_eof3797: cs = 3797; goto _test_eof + _test_eof3798: cs = 3798; goto _test_eof + _test_eof3799: cs = 3799; goto _test_eof + _test_eof3800: cs = 3800; goto _test_eof + _test_eof3801: cs = 3801; goto _test_eof + _test_eof3802: cs = 3802; goto _test_eof + _test_eof3803: cs = 3803; goto _test_eof + _test_eof3804: cs = 3804; goto _test_eof + _test_eof3805: cs = 3805; goto _test_eof + _test_eof3806: cs = 3806; goto _test_eof + _test_eof3807: cs = 3807; goto _test_eof + _test_eof3808: cs = 3808; goto _test_eof + _test_eof3809: cs = 3809; goto _test_eof + _test_eof3810: cs = 3810; goto _test_eof + _test_eof3811: cs = 3811; goto _test_eof + _test_eof3812: cs = 3812; goto _test_eof + _test_eof3813: cs = 3813; goto _test_eof + _test_eof3814: cs = 3814; goto _test_eof + _test_eof3815: cs = 3815; goto _test_eof + _test_eof3816: cs = 3816; goto _test_eof + _test_eof3817: cs = 3817; goto _test_eof + _test_eof3818: cs = 3818; goto _test_eof + _test_eof3819: cs = 3819; goto _test_eof + _test_eof3820: cs = 3820; goto _test_eof + _test_eof3821: cs = 3821; goto _test_eof + _test_eof3822: cs = 3822; goto _test_eof + _test_eof3823: cs = 3823; goto _test_eof + _test_eof3824: cs = 3824; goto _test_eof + _test_eof3825: cs = 3825; goto _test_eof + _test_eof3826: cs = 3826; goto _test_eof + _test_eof3827: cs = 3827; goto _test_eof + _test_eof3828: cs = 3828; goto _test_eof + _test_eof3829: cs = 3829; goto _test_eof + _test_eof3830: cs = 3830; goto _test_eof + _test_eof3831: cs = 3831; goto _test_eof + _test_eof3832: cs = 3832; goto _test_eof + _test_eof3833: cs = 3833; goto _test_eof + _test_eof3834: cs = 3834; goto _test_eof + _test_eof3835: cs = 3835; goto _test_eof + _test_eof3836: cs = 3836; goto _test_eof + _test_eof3837: cs = 3837; goto _test_eof + _test_eof3838: cs = 3838; goto _test_eof + _test_eof3839: cs = 3839; goto _test_eof + _test_eof3840: cs = 3840; goto _test_eof + _test_eof3841: cs = 3841; goto _test_eof + _test_eof3842: cs = 3842; goto _test_eof + _test_eof3843: cs = 3843; goto _test_eof + _test_eof3844: cs = 3844; goto _test_eof + _test_eof3845: cs = 3845; goto _test_eof + _test_eof3846: cs = 3846; goto _test_eof + _test_eof3847: cs = 3847; goto _test_eof + _test_eof3848: cs = 3848; goto _test_eof + _test_eof3849: cs = 3849; goto _test_eof + _test_eof3850: cs = 3850; goto _test_eof + _test_eof3851: cs = 3851; goto _test_eof + _test_eof3852: cs = 3852; goto _test_eof + _test_eof3853: cs = 3853; goto _test_eof + _test_eof3854: cs = 3854; goto _test_eof + _test_eof3855: cs = 3855; goto _test_eof + _test_eof3856: cs = 3856; goto _test_eof + _test_eof3857: cs = 3857; goto _test_eof + _test_eof3858: cs = 3858; goto _test_eof + _test_eof3859: cs = 3859; goto _test_eof + _test_eof3860: cs = 3860; goto _test_eof + _test_eof3861: cs = 3861; goto _test_eof + _test_eof3862: cs = 3862; goto _test_eof + _test_eof3863: cs = 3863; goto _test_eof + _test_eof3864: cs = 3864; goto _test_eof + _test_eof3865: cs = 3865; goto _test_eof + _test_eof3866: cs = 3866; goto _test_eof + _test_eof3867: cs = 3867; goto _test_eof + _test_eof3868: cs = 3868; goto _test_eof + _test_eof3869: cs = 3869; goto _test_eof + _test_eof3870: cs = 3870; goto _test_eof + _test_eof3871: cs = 3871; goto _test_eof + _test_eof3872: cs = 3872; goto _test_eof + _test_eof3873: cs = 3873; goto _test_eof + _test_eof3874: cs = 3874; goto _test_eof + _test_eof3875: cs = 3875; goto _test_eof + _test_eof3876: cs = 3876; goto _test_eof + _test_eof3877: cs = 3877; goto _test_eof + _test_eof3878: cs = 3878; goto _test_eof + _test_eof3879: cs = 3879; goto _test_eof + _test_eof3880: cs = 3880; goto _test_eof + _test_eof3881: cs = 3881; goto _test_eof + _test_eof3882: cs = 3882; goto _test_eof + _test_eof3883: cs = 3883; goto _test_eof + _test_eof3884: cs = 3884; goto _test_eof + _test_eof5079: cs = 5079; goto _test_eof + _test_eof3885: cs = 3885; goto _test_eof + _test_eof3886: cs = 3886; goto _test_eof + _test_eof3887: cs = 3887; goto _test_eof + _test_eof3888: cs = 3888; goto _test_eof + _test_eof3889: cs = 3889; goto _test_eof + _test_eof3890: cs = 3890; goto _test_eof + _test_eof3891: cs = 3891; goto _test_eof + _test_eof3892: cs = 3892; goto _test_eof + _test_eof3893: cs = 3893; goto _test_eof + _test_eof3894: cs = 3894; goto _test_eof + _test_eof3895: cs = 3895; goto _test_eof + _test_eof3896: cs = 3896; goto _test_eof + _test_eof3897: cs = 3897; goto _test_eof + _test_eof3898: cs = 3898; goto _test_eof + _test_eof3899: cs = 3899; goto _test_eof + _test_eof3900: cs = 3900; goto _test_eof + _test_eof3901: cs = 3901; goto _test_eof + _test_eof3902: cs = 3902; goto _test_eof + _test_eof3903: cs = 3903; goto _test_eof + _test_eof3904: cs = 3904; goto _test_eof + _test_eof3905: cs = 3905; goto _test_eof + _test_eof3906: cs = 3906; goto _test_eof + _test_eof3907: cs = 3907; goto _test_eof + _test_eof3908: cs = 3908; goto _test_eof + _test_eof3909: cs = 3909; goto _test_eof + _test_eof3910: cs = 3910; goto _test_eof + _test_eof3911: cs = 3911; goto _test_eof + _test_eof3912: cs = 3912; goto _test_eof + _test_eof3913: cs = 3913; goto _test_eof + _test_eof3914: cs = 3914; goto _test_eof + _test_eof3915: cs = 3915; goto _test_eof + _test_eof3916: cs = 3916; goto _test_eof + _test_eof3917: cs = 3917; goto _test_eof + _test_eof3918: cs = 3918; goto _test_eof + _test_eof3919: cs = 3919; goto _test_eof + _test_eof3920: cs = 3920; goto _test_eof + _test_eof3921: cs = 3921; goto _test_eof + _test_eof3922: cs = 3922; goto _test_eof + _test_eof3923: cs = 3923; goto _test_eof + _test_eof3924: cs = 3924; goto _test_eof + _test_eof3925: cs = 3925; goto _test_eof + _test_eof3926: cs = 3926; goto _test_eof + _test_eof3927: cs = 3927; goto _test_eof + _test_eof3928: cs = 3928; goto _test_eof + _test_eof3929: cs = 3929; goto _test_eof + _test_eof3930: cs = 3930; goto _test_eof + _test_eof3931: cs = 3931; goto _test_eof + _test_eof3932: cs = 3932; goto _test_eof + _test_eof3933: cs = 3933; goto _test_eof + _test_eof3934: cs = 3934; goto _test_eof + _test_eof3935: cs = 3935; goto _test_eof + _test_eof3936: cs = 3936; goto _test_eof + _test_eof3937: cs = 3937; goto _test_eof + _test_eof3938: cs = 3938; goto _test_eof + _test_eof3939: cs = 3939; goto _test_eof + _test_eof3940: cs = 3940; goto _test_eof + _test_eof3941: cs = 3941; goto _test_eof + _test_eof3942: cs = 3942; goto _test_eof + _test_eof3943: cs = 3943; goto _test_eof + _test_eof3944: cs = 3944; goto _test_eof + _test_eof3945: cs = 3945; goto _test_eof + _test_eof3946: cs = 3946; goto _test_eof + _test_eof3947: cs = 3947; goto _test_eof + _test_eof3948: cs = 3948; goto _test_eof + _test_eof3949: cs = 3949; goto _test_eof + _test_eof3950: cs = 3950; goto _test_eof + _test_eof3951: cs = 3951; goto _test_eof + _test_eof3952: cs = 3952; goto _test_eof + _test_eof3953: cs = 3953; goto _test_eof + _test_eof3954: cs = 3954; goto _test_eof + _test_eof3955: cs = 3955; goto _test_eof + _test_eof3956: cs = 3956; goto _test_eof + _test_eof3957: cs = 3957; goto _test_eof + _test_eof3958: cs = 3958; goto _test_eof + _test_eof3959: cs = 3959; goto _test_eof + _test_eof3960: cs = 3960; goto _test_eof + _test_eof3961: cs = 3961; goto _test_eof + _test_eof3962: cs = 3962; goto _test_eof + _test_eof3963: cs = 3963; goto _test_eof + _test_eof3964: cs = 3964; goto _test_eof + _test_eof3965: cs = 3965; goto _test_eof + _test_eof3966: cs = 3966; goto _test_eof + _test_eof3967: cs = 3967; goto _test_eof + _test_eof3968: cs = 3968; goto _test_eof + _test_eof3969: cs = 3969; goto _test_eof + _test_eof3970: cs = 3970; goto _test_eof + _test_eof3971: cs = 3971; goto _test_eof + _test_eof3972: cs = 3972; goto _test_eof + _test_eof3973: cs = 3973; goto _test_eof + _test_eof3974: cs = 3974; goto _test_eof + _test_eof3975: cs = 3975; goto _test_eof + _test_eof3976: cs = 3976; goto _test_eof + _test_eof3977: cs = 3977; goto _test_eof + _test_eof3978: cs = 3978; goto _test_eof + _test_eof3979: cs = 3979; goto _test_eof + _test_eof3980: cs = 3980; goto _test_eof + _test_eof3981: cs = 3981; goto _test_eof + _test_eof3982: cs = 3982; goto _test_eof + _test_eof3983: cs = 3983; goto _test_eof + _test_eof3984: cs = 3984; goto _test_eof + _test_eof3985: cs = 3985; goto _test_eof + _test_eof3986: cs = 3986; goto _test_eof + _test_eof3987: cs = 3987; goto _test_eof + _test_eof3988: cs = 3988; goto _test_eof + _test_eof3989: cs = 3989; goto _test_eof + _test_eof3990: cs = 3990; goto _test_eof + _test_eof3991: cs = 3991; goto _test_eof + _test_eof3992: cs = 3992; goto _test_eof + _test_eof3993: cs = 3993; goto _test_eof + _test_eof3994: cs = 3994; goto _test_eof + _test_eof3995: cs = 3995; goto _test_eof + _test_eof3996: cs = 3996; goto _test_eof + _test_eof3997: cs = 3997; goto _test_eof + _test_eof3998: cs = 3998; goto _test_eof + _test_eof3999: cs = 3999; goto _test_eof + _test_eof4000: cs = 4000; goto _test_eof + _test_eof4001: cs = 4001; goto _test_eof + _test_eof4002: cs = 4002; goto _test_eof + _test_eof4003: cs = 4003; goto _test_eof + _test_eof4004: cs = 4004; goto _test_eof + _test_eof4005: cs = 4005; goto _test_eof + _test_eof4006: cs = 4006; goto _test_eof + _test_eof4007: cs = 4007; goto _test_eof + _test_eof4008: cs = 4008; goto _test_eof + _test_eof4009: cs = 4009; goto _test_eof + _test_eof4010: cs = 4010; goto _test_eof + _test_eof4011: cs = 4011; goto _test_eof + _test_eof4012: cs = 4012; goto _test_eof + _test_eof4013: cs = 4013; goto _test_eof + _test_eof4014: cs = 4014; goto _test_eof + _test_eof4015: cs = 4015; goto _test_eof + _test_eof4016: cs = 4016; goto _test_eof + _test_eof4017: cs = 4017; goto _test_eof + _test_eof4018: cs = 4018; goto _test_eof + _test_eof4019: cs = 4019; goto _test_eof + _test_eof4020: cs = 4020; goto _test_eof + _test_eof4021: cs = 4021; goto _test_eof + _test_eof4022: cs = 4022; goto _test_eof + _test_eof4023: cs = 4023; goto _test_eof + _test_eof4024: cs = 4024; goto _test_eof + _test_eof4025: cs = 4025; goto _test_eof + _test_eof4026: cs = 4026; goto _test_eof + _test_eof5080: cs = 5080; goto _test_eof + _test_eof4027: cs = 4027; goto _test_eof + _test_eof4028: cs = 4028; goto _test_eof + _test_eof4029: cs = 4029; goto _test_eof + _test_eof4030: cs = 4030; goto _test_eof + _test_eof4031: cs = 4031; goto _test_eof + _test_eof4032: cs = 4032; goto _test_eof + _test_eof4033: cs = 4033; goto _test_eof + _test_eof4034: cs = 4034; goto _test_eof + _test_eof4035: cs = 4035; goto _test_eof + _test_eof4036: cs = 4036; goto _test_eof + _test_eof4037: cs = 4037; goto _test_eof + _test_eof4038: cs = 4038; goto _test_eof + _test_eof4039: cs = 4039; goto _test_eof + _test_eof4040: cs = 4040; goto _test_eof + _test_eof4041: cs = 4041; goto _test_eof + _test_eof4042: cs = 4042; goto _test_eof + _test_eof4043: cs = 4043; goto _test_eof + _test_eof4044: cs = 4044; goto _test_eof + _test_eof4045: cs = 4045; goto _test_eof + _test_eof4046: cs = 4046; goto _test_eof + _test_eof4047: cs = 4047; goto _test_eof + _test_eof4048: cs = 4048; goto _test_eof + _test_eof4049: cs = 4049; goto _test_eof + _test_eof4050: cs = 4050; goto _test_eof + _test_eof4051: cs = 4051; goto _test_eof + _test_eof4052: cs = 4052; goto _test_eof + _test_eof4053: cs = 4053; goto _test_eof + _test_eof4054: cs = 4054; goto _test_eof + _test_eof4055: cs = 4055; goto _test_eof + _test_eof4056: cs = 4056; goto _test_eof + _test_eof4057: cs = 4057; goto _test_eof + _test_eof4058: cs = 4058; goto _test_eof + _test_eof4059: cs = 4059; goto _test_eof + _test_eof4060: cs = 4060; goto _test_eof + _test_eof4061: cs = 4061; goto _test_eof + _test_eof4062: cs = 4062; goto _test_eof + _test_eof4063: cs = 4063; goto _test_eof + _test_eof4064: cs = 4064; goto _test_eof + _test_eof4065: cs = 4065; goto _test_eof + _test_eof4066: cs = 4066; goto _test_eof + _test_eof4067: cs = 4067; goto _test_eof + _test_eof4068: cs = 4068; goto _test_eof + _test_eof4069: cs = 4069; goto _test_eof + _test_eof4070: cs = 4070; goto _test_eof + _test_eof4071: cs = 4071; goto _test_eof + _test_eof4072: cs = 4072; goto _test_eof + _test_eof4073: cs = 4073; goto _test_eof + _test_eof4074: cs = 4074; goto _test_eof + _test_eof4075: cs = 4075; goto _test_eof + _test_eof4076: cs = 4076; goto _test_eof + _test_eof4077: cs = 4077; goto _test_eof + _test_eof4078: cs = 4078; goto _test_eof + _test_eof4079: cs = 4079; goto _test_eof + _test_eof4080: cs = 4080; goto _test_eof + _test_eof4081: cs = 4081; goto _test_eof + _test_eof4082: cs = 4082; goto _test_eof + _test_eof4083: cs = 4083; goto _test_eof + _test_eof4084: cs = 4084; goto _test_eof + _test_eof4085: cs = 4085; goto _test_eof + _test_eof4086: cs = 4086; goto _test_eof + _test_eof4087: cs = 4087; goto _test_eof + _test_eof4088: cs = 4088; goto _test_eof + _test_eof4089: cs = 4089; goto _test_eof + _test_eof4090: cs = 4090; goto _test_eof + _test_eof4091: cs = 4091; goto _test_eof + _test_eof4092: cs = 4092; goto _test_eof + _test_eof4093: cs = 4093; goto _test_eof + _test_eof4094: cs = 4094; goto _test_eof + _test_eof4095: cs = 4095; goto _test_eof + _test_eof4096: cs = 4096; goto _test_eof + _test_eof4097: cs = 4097; goto _test_eof + _test_eof4098: cs = 4098; goto _test_eof + _test_eof4099: cs = 4099; goto _test_eof + _test_eof4100: cs = 4100; goto _test_eof + _test_eof4101: cs = 4101; goto _test_eof + _test_eof4102: cs = 4102; goto _test_eof + _test_eof4103: cs = 4103; goto _test_eof + _test_eof4104: cs = 4104; goto _test_eof + _test_eof4105: cs = 4105; goto _test_eof + _test_eof4106: cs = 4106; goto _test_eof + _test_eof4107: cs = 4107; goto _test_eof + _test_eof4108: cs = 4108; goto _test_eof + _test_eof4109: cs = 4109; goto _test_eof + _test_eof4110: cs = 4110; goto _test_eof + _test_eof4111: cs = 4111; goto _test_eof + _test_eof4112: cs = 4112; goto _test_eof + _test_eof4113: cs = 4113; goto _test_eof + _test_eof4114: cs = 4114; goto _test_eof + _test_eof4115: cs = 4115; goto _test_eof + _test_eof4116: cs = 4116; goto _test_eof + _test_eof4117: cs = 4117; goto _test_eof + _test_eof4118: cs = 4118; goto _test_eof + _test_eof4119: cs = 4119; goto _test_eof + _test_eof4120: cs = 4120; goto _test_eof + _test_eof4121: cs = 4121; goto _test_eof + _test_eof4122: cs = 4122; goto _test_eof + _test_eof4123: cs = 4123; goto _test_eof + _test_eof4124: cs = 4124; goto _test_eof + _test_eof4125: cs = 4125; goto _test_eof + _test_eof4126: cs = 4126; goto _test_eof + _test_eof4127: cs = 4127; goto _test_eof + _test_eof4128: cs = 4128; goto _test_eof + _test_eof4129: cs = 4129; goto _test_eof + _test_eof4130: cs = 4130; goto _test_eof + _test_eof4131: cs = 4131; goto _test_eof + _test_eof4132: cs = 4132; goto _test_eof + _test_eof4133: cs = 4133; goto _test_eof + _test_eof4134: cs = 4134; goto _test_eof + _test_eof4135: cs = 4135; goto _test_eof + _test_eof4136: cs = 4136; goto _test_eof + _test_eof4137: cs = 4137; goto _test_eof + _test_eof4138: cs = 4138; goto _test_eof + _test_eof4139: cs = 4139; goto _test_eof + _test_eof4140: cs = 4140; goto _test_eof + _test_eof4141: cs = 4141; goto _test_eof + _test_eof4142: cs = 4142; goto _test_eof + _test_eof4143: cs = 4143; goto _test_eof + _test_eof4144: cs = 4144; goto _test_eof + _test_eof4145: cs = 4145; goto _test_eof + _test_eof4146: cs = 4146; goto _test_eof + _test_eof4147: cs = 4147; goto _test_eof + _test_eof4148: cs = 4148; goto _test_eof + _test_eof4149: cs = 4149; goto _test_eof + _test_eof4150: cs = 4150; goto _test_eof + _test_eof4151: cs = 4151; goto _test_eof + _test_eof4152: cs = 4152; goto _test_eof + _test_eof4153: cs = 4153; goto _test_eof + _test_eof4154: cs = 4154; goto _test_eof + _test_eof4155: cs = 4155; goto _test_eof + _test_eof4156: cs = 4156; goto _test_eof + _test_eof4157: cs = 4157; goto _test_eof + _test_eof4158: cs = 4158; goto _test_eof + _test_eof4159: cs = 4159; goto _test_eof + _test_eof4160: cs = 4160; goto _test_eof + _test_eof4161: cs = 4161; goto _test_eof + _test_eof4162: cs = 4162; goto _test_eof + _test_eof4163: cs = 4163; goto _test_eof + _test_eof4164: cs = 4164; goto _test_eof + _test_eof4165: cs = 4165; goto _test_eof + _test_eof4166: cs = 4166; goto _test_eof + _test_eof4167: cs = 4167; goto _test_eof + _test_eof4168: cs = 4168; goto _test_eof + _test_eof4169: cs = 4169; goto _test_eof + _test_eof4170: cs = 4170; goto _test_eof + _test_eof4171: cs = 4171; goto _test_eof + _test_eof4172: cs = 4172; goto _test_eof + _test_eof4173: cs = 4173; goto _test_eof + _test_eof4174: cs = 4174; goto _test_eof + _test_eof4175: cs = 4175; goto _test_eof + _test_eof5081: cs = 5081; goto _test_eof + _test_eof4176: cs = 4176; goto _test_eof + _test_eof4177: cs = 4177; goto _test_eof + _test_eof4178: cs = 4178; goto _test_eof + _test_eof4179: cs = 4179; goto _test_eof + _test_eof4180: cs = 4180; goto _test_eof + _test_eof4181: cs = 4181; goto _test_eof + _test_eof4182: cs = 4182; goto _test_eof + _test_eof4183: cs = 4183; goto _test_eof + _test_eof4184: cs = 4184; goto _test_eof + _test_eof4185: cs = 4185; goto _test_eof + _test_eof4186: cs = 4186; goto _test_eof + _test_eof4187: cs = 4187; goto _test_eof + _test_eof4188: cs = 4188; goto _test_eof + _test_eof4189: cs = 4189; goto _test_eof + _test_eof4190: cs = 4190; goto _test_eof + _test_eof4191: cs = 4191; goto _test_eof + _test_eof4192: cs = 4192; goto _test_eof + _test_eof4193: cs = 4193; goto _test_eof + _test_eof4194: cs = 4194; goto _test_eof + _test_eof4195: cs = 4195; goto _test_eof + _test_eof4196: cs = 4196; goto _test_eof + _test_eof4197: cs = 4197; goto _test_eof + _test_eof4198: cs = 4198; goto _test_eof + _test_eof4199: cs = 4199; goto _test_eof + _test_eof4200: cs = 4200; goto _test_eof + _test_eof4201: cs = 4201; goto _test_eof + _test_eof4202: cs = 4202; goto _test_eof + _test_eof4203: cs = 4203; goto _test_eof + _test_eof4204: cs = 4204; goto _test_eof + _test_eof4205: cs = 4205; goto _test_eof + _test_eof4206: cs = 4206; goto _test_eof + _test_eof4207: cs = 4207; goto _test_eof + _test_eof4208: cs = 4208; goto _test_eof + _test_eof4209: cs = 4209; goto _test_eof + _test_eof4210: cs = 4210; goto _test_eof + _test_eof4211: cs = 4211; goto _test_eof + _test_eof4212: cs = 4212; goto _test_eof + _test_eof4213: cs = 4213; goto _test_eof + _test_eof4214: cs = 4214; goto _test_eof + _test_eof4215: cs = 4215; goto _test_eof + _test_eof4216: cs = 4216; goto _test_eof + _test_eof4217: cs = 4217; goto _test_eof + _test_eof4218: cs = 4218; goto _test_eof + _test_eof4219: cs = 4219; goto _test_eof + _test_eof4220: cs = 4220; goto _test_eof + _test_eof4221: cs = 4221; goto _test_eof + _test_eof4222: cs = 4222; goto _test_eof + _test_eof4223: cs = 4223; goto _test_eof + _test_eof4224: cs = 4224; goto _test_eof + _test_eof4225: cs = 4225; goto _test_eof + _test_eof4226: cs = 4226; goto _test_eof + _test_eof4227: cs = 4227; goto _test_eof + _test_eof4228: cs = 4228; goto _test_eof + _test_eof4229: cs = 4229; goto _test_eof + _test_eof4230: cs = 4230; goto _test_eof + _test_eof4231: cs = 4231; goto _test_eof + _test_eof4232: cs = 4232; goto _test_eof + _test_eof4233: cs = 4233; goto _test_eof + _test_eof4234: cs = 4234; goto _test_eof + _test_eof4235: cs = 4235; goto _test_eof + _test_eof4236: cs = 4236; goto _test_eof + _test_eof4237: cs = 4237; goto _test_eof + _test_eof4238: cs = 4238; goto _test_eof + _test_eof4239: cs = 4239; goto _test_eof + _test_eof4240: cs = 4240; goto _test_eof + _test_eof4241: cs = 4241; goto _test_eof + _test_eof4242: cs = 4242; goto _test_eof + _test_eof4243: cs = 4243; goto _test_eof + _test_eof4244: cs = 4244; goto _test_eof + _test_eof4245: cs = 4245; goto _test_eof + _test_eof4246: cs = 4246; goto _test_eof + _test_eof4247: cs = 4247; goto _test_eof + _test_eof4248: cs = 4248; goto _test_eof + _test_eof4249: cs = 4249; goto _test_eof + _test_eof4250: cs = 4250; goto _test_eof + _test_eof4251: cs = 4251; goto _test_eof + _test_eof4252: cs = 4252; goto _test_eof + _test_eof4253: cs = 4253; goto _test_eof + _test_eof4254: cs = 4254; goto _test_eof + _test_eof4255: cs = 4255; goto _test_eof + _test_eof4256: cs = 4256; goto _test_eof + _test_eof4257: cs = 4257; goto _test_eof + _test_eof4258: cs = 4258; goto _test_eof + _test_eof4259: cs = 4259; goto _test_eof + _test_eof4260: cs = 4260; goto _test_eof + _test_eof4261: cs = 4261; goto _test_eof + _test_eof4262: cs = 4262; goto _test_eof + _test_eof4263: cs = 4263; goto _test_eof + _test_eof4264: cs = 4264; goto _test_eof + _test_eof4265: cs = 4265; goto _test_eof + _test_eof4266: cs = 4266; goto _test_eof + _test_eof4267: cs = 4267; goto _test_eof + _test_eof4268: cs = 4268; goto _test_eof + _test_eof4269: cs = 4269; goto _test_eof + _test_eof4270: cs = 4270; goto _test_eof + _test_eof4271: cs = 4271; goto _test_eof + _test_eof4272: cs = 4272; goto _test_eof + _test_eof4273: cs = 4273; goto _test_eof + _test_eof4274: cs = 4274; goto _test_eof + _test_eof4275: cs = 4275; goto _test_eof + _test_eof4276: cs = 4276; goto _test_eof + _test_eof4277: cs = 4277; goto _test_eof + _test_eof4278: cs = 4278; goto _test_eof + _test_eof4279: cs = 4279; goto _test_eof + _test_eof4280: cs = 4280; goto _test_eof + _test_eof4281: cs = 4281; goto _test_eof + _test_eof4282: cs = 4282; goto _test_eof + _test_eof4283: cs = 4283; goto _test_eof + _test_eof4284: cs = 4284; goto _test_eof + _test_eof4285: cs = 4285; goto _test_eof + _test_eof4286: cs = 4286; goto _test_eof + _test_eof4287: cs = 4287; goto _test_eof + _test_eof4288: cs = 4288; goto _test_eof + _test_eof4289: cs = 4289; goto _test_eof + _test_eof4290: cs = 4290; goto _test_eof + _test_eof4291: cs = 4291; goto _test_eof + _test_eof4292: cs = 4292; goto _test_eof + _test_eof4293: cs = 4293; goto _test_eof + _test_eof4294: cs = 4294; goto _test_eof + _test_eof4295: cs = 4295; goto _test_eof + _test_eof4296: cs = 4296; goto _test_eof + _test_eof4297: cs = 4297; goto _test_eof + _test_eof4298: cs = 4298; goto _test_eof + _test_eof4299: cs = 4299; goto _test_eof + _test_eof4300: cs = 4300; goto _test_eof + _test_eof4301: cs = 4301; goto _test_eof + _test_eof4302: cs = 4302; goto _test_eof + _test_eof4303: cs = 4303; goto _test_eof + _test_eof4304: cs = 4304; goto _test_eof + _test_eof4305: cs = 4305; goto _test_eof + _test_eof4306: cs = 4306; goto _test_eof + _test_eof4307: cs = 4307; goto _test_eof + _test_eof4308: cs = 4308; goto _test_eof + _test_eof4309: cs = 4309; goto _test_eof + _test_eof4310: cs = 4310; goto _test_eof + _test_eof4311: cs = 4311; goto _test_eof + _test_eof4312: cs = 4312; goto _test_eof + _test_eof4313: cs = 4313; goto _test_eof + _test_eof4314: cs = 4314; goto _test_eof + _test_eof4315: cs = 4315; goto _test_eof + _test_eof4316: cs = 4316; goto _test_eof + _test_eof4317: cs = 4317; goto _test_eof + _test_eof4318: cs = 4318; goto _test_eof + _test_eof5082: cs = 5082; goto _test_eof + _test_eof4319: cs = 4319; goto _test_eof + _test_eof4320: cs = 4320; goto _test_eof + _test_eof4321: cs = 4321; goto _test_eof + _test_eof4322: cs = 4322; goto _test_eof + _test_eof4323: cs = 4323; goto _test_eof + _test_eof4324: cs = 4324; goto _test_eof + _test_eof4325: cs = 4325; goto _test_eof + _test_eof4326: cs = 4326; goto _test_eof + _test_eof4327: cs = 4327; goto _test_eof + _test_eof4328: cs = 4328; goto _test_eof + _test_eof4329: cs = 4329; goto _test_eof + _test_eof4330: cs = 4330; goto _test_eof + _test_eof4331: cs = 4331; goto _test_eof + _test_eof4332: cs = 4332; goto _test_eof + _test_eof4333: cs = 4333; goto _test_eof + _test_eof4334: cs = 4334; goto _test_eof + _test_eof4335: cs = 4335; goto _test_eof + _test_eof4336: cs = 4336; goto _test_eof + _test_eof4337: cs = 4337; goto _test_eof + _test_eof4338: cs = 4338; goto _test_eof + _test_eof4339: cs = 4339; goto _test_eof + _test_eof4340: cs = 4340; goto _test_eof + _test_eof4341: cs = 4341; goto _test_eof + _test_eof4342: cs = 4342; goto _test_eof + _test_eof4343: cs = 4343; goto _test_eof + _test_eof4344: cs = 4344; goto _test_eof + _test_eof4345: cs = 4345; goto _test_eof + _test_eof4346: cs = 4346; goto _test_eof + _test_eof4347: cs = 4347; goto _test_eof + _test_eof4348: cs = 4348; goto _test_eof + _test_eof4349: cs = 4349; goto _test_eof + _test_eof4350: cs = 4350; goto _test_eof + _test_eof4351: cs = 4351; goto _test_eof + _test_eof4352: cs = 4352; goto _test_eof + _test_eof4353: cs = 4353; goto _test_eof + _test_eof4354: cs = 4354; goto _test_eof + _test_eof4355: cs = 4355; goto _test_eof + _test_eof4356: cs = 4356; goto _test_eof + _test_eof4357: cs = 4357; goto _test_eof + _test_eof4358: cs = 4358; goto _test_eof + _test_eof4359: cs = 4359; goto _test_eof + _test_eof4360: cs = 4360; goto _test_eof + _test_eof4361: cs = 4361; goto _test_eof + _test_eof4362: cs = 4362; goto _test_eof + _test_eof4363: cs = 4363; goto _test_eof + _test_eof4364: cs = 4364; goto _test_eof + _test_eof4365: cs = 4365; goto _test_eof + _test_eof4366: cs = 4366; goto _test_eof + _test_eof4367: cs = 4367; goto _test_eof + _test_eof4368: cs = 4368; goto _test_eof + _test_eof4369: cs = 4369; goto _test_eof + _test_eof4370: cs = 4370; goto _test_eof + _test_eof4371: cs = 4371; goto _test_eof + _test_eof4372: cs = 4372; goto _test_eof + _test_eof4373: cs = 4373; goto _test_eof + _test_eof4374: cs = 4374; goto _test_eof + _test_eof4375: cs = 4375; goto _test_eof + _test_eof4376: cs = 4376; goto _test_eof + _test_eof4377: cs = 4377; goto _test_eof + _test_eof4378: cs = 4378; goto _test_eof + _test_eof4379: cs = 4379; goto _test_eof + _test_eof4380: cs = 4380; goto _test_eof + _test_eof4381: cs = 4381; goto _test_eof + _test_eof4382: cs = 4382; goto _test_eof + _test_eof4383: cs = 4383; goto _test_eof + _test_eof4384: cs = 4384; goto _test_eof + _test_eof4385: cs = 4385; goto _test_eof + _test_eof4386: cs = 4386; goto _test_eof + _test_eof4387: cs = 4387; goto _test_eof + _test_eof4388: cs = 4388; goto _test_eof + _test_eof4389: cs = 4389; goto _test_eof + _test_eof4390: cs = 4390; goto _test_eof + _test_eof4391: cs = 4391; goto _test_eof + _test_eof4392: cs = 4392; goto _test_eof + _test_eof4393: cs = 4393; goto _test_eof + _test_eof4394: cs = 4394; goto _test_eof + _test_eof4395: cs = 4395; goto _test_eof + _test_eof4396: cs = 4396; goto _test_eof + _test_eof4397: cs = 4397; goto _test_eof + _test_eof4398: cs = 4398; goto _test_eof + _test_eof4399: cs = 4399; goto _test_eof + _test_eof4400: cs = 4400; goto _test_eof + _test_eof4401: cs = 4401; goto _test_eof + _test_eof4402: cs = 4402; goto _test_eof + _test_eof4403: cs = 4403; goto _test_eof + _test_eof4404: cs = 4404; goto _test_eof + _test_eof4405: cs = 4405; goto _test_eof + _test_eof4406: cs = 4406; goto _test_eof + _test_eof4407: cs = 4407; goto _test_eof + _test_eof4408: cs = 4408; goto _test_eof + _test_eof4409: cs = 4409; goto _test_eof + _test_eof4410: cs = 4410; goto _test_eof + _test_eof4411: cs = 4411; goto _test_eof + _test_eof4412: cs = 4412; goto _test_eof + _test_eof4413: cs = 4413; goto _test_eof + _test_eof4414: cs = 4414; goto _test_eof + _test_eof4415: cs = 4415; goto _test_eof + _test_eof4416: cs = 4416; goto _test_eof + _test_eof4417: cs = 4417; goto _test_eof + _test_eof4418: cs = 4418; goto _test_eof + _test_eof4419: cs = 4419; goto _test_eof + _test_eof4420: cs = 4420; goto _test_eof + _test_eof4421: cs = 4421; goto _test_eof + _test_eof4422: cs = 4422; goto _test_eof + _test_eof4423: cs = 4423; goto _test_eof + _test_eof4424: cs = 4424; goto _test_eof + _test_eof4425: cs = 4425; goto _test_eof + _test_eof4426: cs = 4426; goto _test_eof + _test_eof4427: cs = 4427; goto _test_eof + _test_eof4428: cs = 4428; goto _test_eof + _test_eof4429: cs = 4429; goto _test_eof + _test_eof4430: cs = 4430; goto _test_eof + _test_eof4431: cs = 4431; goto _test_eof + _test_eof4432: cs = 4432; goto _test_eof + _test_eof4433: cs = 4433; goto _test_eof + _test_eof4434: cs = 4434; goto _test_eof + _test_eof4435: cs = 4435; goto _test_eof + _test_eof4436: cs = 4436; goto _test_eof + _test_eof4437: cs = 4437; goto _test_eof + _test_eof4438: cs = 4438; goto _test_eof + _test_eof4439: cs = 4439; goto _test_eof + _test_eof4440: cs = 4440; goto _test_eof + _test_eof4441: cs = 4441; goto _test_eof + _test_eof4442: cs = 4442; goto _test_eof + _test_eof4443: cs = 4443; goto _test_eof + _test_eof4444: cs = 4444; goto _test_eof + _test_eof4445: cs = 4445; goto _test_eof + _test_eof4446: cs = 4446; goto _test_eof + _test_eof4447: cs = 4447; goto _test_eof + _test_eof4448: cs = 4448; goto _test_eof + _test_eof4449: cs = 4449; goto _test_eof + _test_eof4450: cs = 4450; goto _test_eof + _test_eof4451: cs = 4451; goto _test_eof + _test_eof4452: cs = 4452; goto _test_eof + _test_eof4453: cs = 4453; goto _test_eof + _test_eof4454: cs = 4454; goto _test_eof + _test_eof4455: cs = 4455; goto _test_eof + _test_eof4456: cs = 4456; goto _test_eof + _test_eof4457: cs = 4457; goto _test_eof + _test_eof4458: cs = 4458; goto _test_eof + _test_eof4459: cs = 4459; goto _test_eof + _test_eof4460: cs = 4460; goto _test_eof + _test_eof4461: cs = 4461; goto _test_eof + _test_eof4462: cs = 4462; goto _test_eof + _test_eof4463: cs = 4463; goto _test_eof + _test_eof4464: cs = 4464; goto _test_eof + _test_eof4465: cs = 4465; goto _test_eof + _test_eof4466: cs = 4466; goto _test_eof + _test_eof4467: cs = 4467; goto _test_eof + _test_eof4468: cs = 4468; goto _test_eof + _test_eof4469: cs = 4469; goto _test_eof + _test_eof4470: cs = 4470; goto _test_eof + _test_eof4471: cs = 4471; goto _test_eof + _test_eof4472: cs = 4472; goto _test_eof + _test_eof5083: cs = 5083; goto _test_eof + _test_eof5084: cs = 5084; goto _test_eof + _test_eof5085: cs = 5085; goto _test_eof + _test_eof5086: cs = 5086; goto _test_eof + _test_eof5087: cs = 5087; goto _test_eof + _test_eof5088: cs = 5088; goto _test_eof + _test_eof5089: cs = 5089; goto _test_eof + _test_eof5090: cs = 5090; goto _test_eof + _test_eof5091: cs = 5091; goto _test_eof + _test_eof5092: cs = 5092; goto _test_eof + _test_eof5093: cs = 5093; goto _test_eof + _test_eof5094: cs = 5094; goto _test_eof + _test_eof5095: cs = 5095; goto _test_eof + _test_eof5096: cs = 5096; goto _test_eof + _test_eof5097: cs = 5097; goto _test_eof + _test_eof5098: cs = 5098; goto _test_eof + _test_eof5099: cs = 5099; goto _test_eof + _test_eof5100: cs = 5100; goto _test_eof + _test_eof5101: cs = 5101; goto _test_eof + _test_eof5102: cs = 5102; goto _test_eof + _test_eof5103: cs = 5103; goto _test_eof + _test_eof5104: cs = 5104; goto _test_eof + _test_eof5105: cs = 5105; goto _test_eof + _test_eof5106: cs = 5106; goto _test_eof + _test_eof5107: cs = 5107; goto _test_eof + _test_eof5108: cs = 5108; goto _test_eof + _test_eof5109: cs = 5109; goto _test_eof + _test_eof5110: cs = 5110; goto _test_eof + _test_eof5111: cs = 5111; goto _test_eof + _test_eof5112: cs = 5112; goto _test_eof + _test_eof5113: cs = 5113; goto _test_eof + _test_eof5114: cs = 5114; goto _test_eof + _test_eof5115: cs = 5115; goto _test_eof + _test_eof5116: cs = 5116; goto _test_eof + _test_eof5117: cs = 5117; goto _test_eof + _test_eof5118: cs = 5118; goto _test_eof + _test_eof5119: cs = 5119; goto _test_eof + _test_eof5120: cs = 5120; goto _test_eof + _test_eof5121: cs = 5121; goto _test_eof + _test_eof5122: cs = 5122; goto _test_eof + _test_eof5123: cs = 5123; goto _test_eof + _test_eof5124: cs = 5124; goto _test_eof + _test_eof5125: cs = 5125; goto _test_eof + _test_eof5126: cs = 5126; goto _test_eof + _test_eof5127: cs = 5127; goto _test_eof + _test_eof5128: cs = 5128; goto _test_eof + _test_eof5129: cs = 5129; goto _test_eof + _test_eof5130: cs = 5130; goto _test_eof + _test_eof5131: cs = 5131; goto _test_eof + _test_eof5132: cs = 5132; goto _test_eof + _test_eof5133: cs = 5133; goto _test_eof + _test_eof5134: cs = 5134; goto _test_eof + _test_eof5135: cs = 5135; goto _test_eof + _test_eof5136: cs = 5136; goto _test_eof + _test_eof5137: cs = 5137; goto _test_eof + _test_eof5138: cs = 5138; goto _test_eof + _test_eof5139: cs = 5139; goto _test_eof + _test_eof5140: cs = 5140; goto _test_eof + _test_eof5141: cs = 5141; goto _test_eof + _test_eof5142: cs = 5142; goto _test_eof + _test_eof5143: cs = 5143; goto _test_eof + _test_eof5144: cs = 5144; goto _test_eof + _test_eof5145: cs = 5145; goto _test_eof + _test_eof5146: cs = 5146; goto _test_eof + _test_eof5147: cs = 5147; goto _test_eof + _test_eof5148: cs = 5148; goto _test_eof + _test_eof5149: cs = 5149; goto _test_eof + _test_eof5150: cs = 5150; goto _test_eof + _test_eof5151: cs = 5151; goto _test_eof + _test_eof5152: cs = 5152; goto _test_eof + _test_eof4473: cs = 4473; goto _test_eof + _test_eof5153: cs = 5153; goto _test_eof + _test_eof5154: cs = 5154; goto _test_eof + _test_eof5155: cs = 5155; goto _test_eof + _test_eof5156: cs = 5156; goto _test_eof + _test_eof5157: cs = 5157; goto _test_eof + _test_eof5158: cs = 5158; goto _test_eof + _test_eof5159: cs = 5159; goto _test_eof + _test_eof5160: cs = 5160; goto _test_eof + _test_eof5161: cs = 5161; goto _test_eof + _test_eof5162: cs = 5162; goto _test_eof + _test_eof5163: cs = 5163; goto _test_eof + _test_eof5164: cs = 5164; goto _test_eof + _test_eof5165: cs = 5165; goto _test_eof + _test_eof5166: cs = 5166; goto _test_eof + _test_eof5167: cs = 5167; goto _test_eof + _test_eof5168: cs = 5168; goto _test_eof + _test_eof5169: cs = 5169; goto _test_eof + _test_eof5170: cs = 5170; goto _test_eof + _test_eof5171: cs = 5171; goto _test_eof + _test_eof5172: cs = 5172; goto _test_eof + _test_eof5173: cs = 5173; goto _test_eof + _test_eof4474: cs = 4474; goto _test_eof + _test_eof5174: cs = 5174; goto _test_eof + _test_eof5175: cs = 5175; goto _test_eof + _test_eof5176: cs = 5176; goto _test_eof + _test_eof5177: cs = 5177; goto _test_eof + _test_eof5178: cs = 5178; goto _test_eof + _test_eof5179: cs = 5179; goto _test_eof + _test_eof4475: cs = 4475; goto _test_eof + _test_eof5180: cs = 5180; goto _test_eof + _test_eof5181: cs = 5181; goto _test_eof + _test_eof4476: cs = 4476; goto _test_eof + _test_eof5182: cs = 5182; goto _test_eof + _test_eof5183: cs = 5183; goto _test_eof + _test_eof5184: cs = 5184; goto _test_eof + _test_eof5185: cs = 5185; goto _test_eof + _test_eof5186: cs = 5186; goto _test_eof + _test_eof5187: cs = 5187; goto _test_eof + _test_eof5188: cs = 5188; goto _test_eof + _test_eof5189: cs = 5189; goto _test_eof + _test_eof5190: cs = 5190; goto _test_eof + _test_eof5191: cs = 5191; goto _test_eof + _test_eof5192: cs = 5192; goto _test_eof + _test_eof5193: cs = 5193; goto _test_eof + _test_eof5194: cs = 5194; goto _test_eof + _test_eof5195: cs = 5195; goto _test_eof + _test_eof5196: cs = 5196; goto _test_eof + _test_eof4477: cs = 4477; goto _test_eof + _test_eof5197: cs = 5197; goto _test_eof + _test_eof5198: cs = 5198; goto _test_eof + _test_eof5199: cs = 5199; goto _test_eof + _test_eof4478: cs = 4478; goto _test_eof + _test_eof5200: cs = 5200; goto _test_eof + _test_eof5201: cs = 5201; goto _test_eof + _test_eof5202: cs = 5202; goto _test_eof + _test_eof5203: cs = 5203; goto _test_eof + _test_eof5204: cs = 5204; goto _test_eof + _test_eof5205: cs = 5205; goto _test_eof + _test_eof4479: cs = 4479; goto _test_eof + _test_eof5206: cs = 5206; goto _test_eof + _test_eof5207: cs = 5207; goto _test_eof + _test_eof4480: cs = 4480; goto _test_eof + _test_eof5208: cs = 5208; goto _test_eof + _test_eof5209: cs = 5209; goto _test_eof + _test_eof5210: cs = 5210; goto _test_eof + _test_eof4481: cs = 4481; goto _test_eof + _test_eof4482: cs = 4482; goto _test_eof + _test_eof4483: cs = 4483; goto _test_eof + _test_eof4484: cs = 4484; goto _test_eof + _test_eof4485: cs = 4485; goto _test_eof + _test_eof4486: cs = 4486; goto _test_eof + _test_eof4487: cs = 4487; goto _test_eof + _test_eof4488: cs = 4488; goto _test_eof + _test_eof4489: cs = 4489; goto _test_eof + _test_eof4490: cs = 4490; goto _test_eof + _test_eof4491: cs = 4491; goto _test_eof + _test_eof4492: cs = 4492; goto _test_eof + _test_eof4493: cs = 4493; goto _test_eof + _test_eof4494: cs = 4494; goto _test_eof + _test_eof4495: cs = 4495; goto _test_eof + _test_eof5211: cs = 5211; goto _test_eof + _test_eof4496: cs = 4496; goto _test_eof + _test_eof4497: cs = 4497; goto _test_eof + _test_eof4498: cs = 4498; goto _test_eof + _test_eof4499: cs = 4499; goto _test_eof + _test_eof4500: cs = 4500; goto _test_eof + _test_eof4501: cs = 4501; goto _test_eof + _test_eof4502: cs = 4502; goto _test_eof + _test_eof4503: cs = 4503; goto _test_eof + _test_eof4504: cs = 4504; goto _test_eof + _test_eof4505: cs = 4505; goto _test_eof + _test_eof4506: cs = 4506; goto _test_eof + _test_eof4507: cs = 4507; goto _test_eof + _test_eof4508: cs = 4508; goto _test_eof + _test_eof4509: cs = 4509; goto _test_eof + _test_eof4510: cs = 4510; goto _test_eof + _test_eof4511: cs = 4511; goto _test_eof + _test_eof4512: cs = 4512; goto _test_eof + _test_eof4513: cs = 4513; goto _test_eof + _test_eof4514: cs = 4514; goto _test_eof + _test_eof4515: cs = 4515; goto _test_eof + _test_eof4516: cs = 4516; goto _test_eof + _test_eof4517: cs = 4517; goto _test_eof + _test_eof4518: cs = 4518; goto _test_eof + _test_eof4519: cs = 4519; goto _test_eof + _test_eof4520: cs = 4520; goto _test_eof + _test_eof4521: cs = 4521; goto _test_eof + _test_eof4522: cs = 4522; goto _test_eof + _test_eof4523: cs = 4523; goto _test_eof + _test_eof4524: cs = 4524; goto _test_eof + _test_eof4525: cs = 4525; goto _test_eof + _test_eof4526: cs = 4526; goto _test_eof + _test_eof4527: cs = 4527; goto _test_eof + _test_eof4528: cs = 4528; goto _test_eof + _test_eof4529: cs = 4529; goto _test_eof + _test_eof4530: cs = 4530; goto _test_eof + _test_eof4531: cs = 4531; goto _test_eof + _test_eof4532: cs = 4532; goto _test_eof + _test_eof4533: cs = 4533; goto _test_eof + _test_eof4534: cs = 4534; goto _test_eof + _test_eof4535: cs = 4535; goto _test_eof + _test_eof4536: cs = 4536; goto _test_eof + _test_eof4537: cs = 4537; goto _test_eof + _test_eof4538: cs = 4538; goto _test_eof + _test_eof4539: cs = 4539; goto _test_eof + _test_eof4540: cs = 4540; goto _test_eof + _test_eof4541: cs = 4541; goto _test_eof + _test_eof4542: cs = 4542; goto _test_eof + _test_eof4543: cs = 4543; goto _test_eof + _test_eof4544: cs = 4544; goto _test_eof + _test_eof4545: cs = 4545; goto _test_eof + _test_eof4546: cs = 4546; goto _test_eof + _test_eof4547: cs = 4547; goto _test_eof + _test_eof4548: cs = 4548; goto _test_eof + _test_eof4549: cs = 4549; goto _test_eof + _test_eof4550: cs = 4550; goto _test_eof + _test_eof4551: cs = 4551; goto _test_eof + _test_eof4552: cs = 4552; goto _test_eof + _test_eof4553: cs = 4553; goto _test_eof + _test_eof4554: cs = 4554; goto _test_eof + _test_eof4555: cs = 4555; goto _test_eof + _test_eof4556: cs = 4556; goto _test_eof + _test_eof4557: cs = 4557; goto _test_eof + _test_eof4558: cs = 4558; goto _test_eof + _test_eof4559: cs = 4559; goto _test_eof + _test_eof4560: cs = 4560; goto _test_eof + _test_eof4561: cs = 4561; goto _test_eof + _test_eof4562: cs = 4562; goto _test_eof + _test_eof4563: cs = 4563; goto _test_eof + _test_eof4564: cs = 4564; goto _test_eof + _test_eof4565: cs = 4565; goto _test_eof + _test_eof4566: cs = 4566; goto _test_eof + _test_eof4567: cs = 4567; goto _test_eof + _test_eof4568: cs = 4568; goto _test_eof + _test_eof4569: cs = 4569; goto _test_eof + _test_eof4570: cs = 4570; goto _test_eof + _test_eof4571: cs = 4571; goto _test_eof + _test_eof4572: cs = 4572; goto _test_eof + _test_eof4573: cs = 4573; goto _test_eof + _test_eof4574: cs = 4574; goto _test_eof + _test_eof4575: cs = 4575; goto _test_eof + _test_eof4576: cs = 4576; goto _test_eof + _test_eof4577: cs = 4577; goto _test_eof + _test_eof4578: cs = 4578; goto _test_eof + _test_eof4579: cs = 4579; goto _test_eof + _test_eof4580: cs = 4580; goto _test_eof + _test_eof4581: cs = 4581; goto _test_eof + _test_eof4582: cs = 4582; goto _test_eof + _test_eof4583: cs = 4583; goto _test_eof + _test_eof4584: cs = 4584; goto _test_eof + _test_eof4585: cs = 4585; goto _test_eof + _test_eof4586: cs = 4586; goto _test_eof + _test_eof4587: cs = 4587; goto _test_eof + _test_eof4588: cs = 4588; goto _test_eof + _test_eof4589: cs = 4589; goto _test_eof + _test_eof4590: cs = 4590; goto _test_eof + _test_eof4591: cs = 4591; goto _test_eof + _test_eof4592: cs = 4592; goto _test_eof + _test_eof4593: cs = 4593; goto _test_eof + _test_eof4594: cs = 4594; goto _test_eof + _test_eof4595: cs = 4595; goto _test_eof + _test_eof4596: cs = 4596; goto _test_eof + _test_eof4597: cs = 4597; goto _test_eof + _test_eof4598: cs = 4598; goto _test_eof + _test_eof4599: cs = 4599; goto _test_eof + _test_eof4600: cs = 4600; goto _test_eof + _test_eof4601: cs = 4601; goto _test_eof + _test_eof4602: cs = 4602; goto _test_eof + _test_eof4603: cs = 4603; goto _test_eof + _test_eof4604: cs = 4604; goto _test_eof + _test_eof4605: cs = 4605; goto _test_eof + _test_eof4606: cs = 4606; goto _test_eof + _test_eof4607: cs = 4607; goto _test_eof + _test_eof4608: cs = 4608; goto _test_eof + _test_eof4609: cs = 4609; goto _test_eof + _test_eof4610: cs = 4610; goto _test_eof + _test_eof4611: cs = 4611; goto _test_eof + _test_eof4612: cs = 4612; goto _test_eof + _test_eof4613: cs = 4613; goto _test_eof + _test_eof4614: cs = 4614; goto _test_eof + _test_eof4615: cs = 4615; goto _test_eof + _test_eof4616: cs = 4616; goto _test_eof + _test_eof4617: cs = 4617; goto _test_eof + _test_eof4618: cs = 4618; goto _test_eof + _test_eof4619: cs = 4619; goto _test_eof + _test_eof4620: cs = 4620; goto _test_eof + _test_eof4621: cs = 4621; goto _test_eof + _test_eof4622: cs = 4622; goto _test_eof + _test_eof4623: cs = 4623; goto _test_eof + _test_eof4624: cs = 4624; goto _test_eof + _test_eof4625: cs = 4625; goto _test_eof + _test_eof4626: cs = 4626; goto _test_eof + _test_eof4627: cs = 4627; goto _test_eof + _test_eof4628: cs = 4628; goto _test_eof + _test_eof4629: cs = 4629; goto _test_eof + _test_eof4630: cs = 4630; goto _test_eof + _test_eof4631: cs = 4631; goto _test_eof + _test_eof4632: cs = 4632; goto _test_eof + _test_eof4633: cs = 4633; goto _test_eof + _test_eof4634: cs = 4634; goto _test_eof + _test_eof4635: cs = 4635; goto _test_eof + _test_eof4636: cs = 4636; goto _test_eof + _test_eof4637: cs = 4637; goto _test_eof + _test_eof4638: cs = 4638; goto _test_eof + _test_eof4639: cs = 4639; goto _test_eof + _test_eof4640: cs = 4640; goto _test_eof + _test_eof4641: cs = 4641; goto _test_eof + _test_eof4642: cs = 4642; goto _test_eof + _test_eof4643: cs = 4643; goto _test_eof + _test_eof4644: cs = 4644; goto _test_eof + _test_eof4645: cs = 4645; goto _test_eof + _test_eof4646: cs = 4646; goto _test_eof + _test_eof4647: cs = 4647; goto _test_eof + _test_eof4648: cs = 4648; goto _test_eof + _test_eof4649: cs = 4649; goto _test_eof + _test_eof4650: cs = 4650; goto _test_eof + _test_eof4651: cs = 4651; goto _test_eof + _test_eof4652: cs = 4652; goto _test_eof + _test_eof4653: cs = 4653; goto _test_eof + _test_eof4654: cs = 4654; goto _test_eof + _test_eof4655: cs = 4655; goto _test_eof + _test_eof5212: cs = 5212; goto _test_eof + _test_eof5213: cs = 5213; goto _test_eof + _test_eof5214: cs = 5214; goto _test_eof + _test_eof5215: cs = 5215; goto _test_eof + _test_eof5216: cs = 5216; goto _test_eof + _test_eof5217: cs = 5217; goto _test_eof + _test_eof5218: cs = 5218; goto _test_eof + _test_eof5219: cs = 5219; goto _test_eof + _test_eof5220: cs = 5220; goto _test_eof + _test_eof5221: cs = 5221; goto _test_eof + _test_eof5222: cs = 5222; goto _test_eof + _test_eof5223: cs = 5223; goto _test_eof + _test_eof5224: cs = 5224; goto _test_eof + _test_eof5225: cs = 5225; goto _test_eof + _test_eof5226: cs = 5226; goto _test_eof + _test_eof5227: cs = 5227; goto _test_eof + _test_eof5228: cs = 5228; goto _test_eof + _test_eof5229: cs = 5229; goto _test_eof + _test_eof5230: cs = 5230; goto _test_eof + _test_eof5231: cs = 5231; goto _test_eof + _test_eof5232: cs = 5232; goto _test_eof + _test_eof5233: cs = 5233; goto _test_eof + _test_eof5234: cs = 5234; goto _test_eof + _test_eof5235: cs = 5235; goto _test_eof + _test_eof5236: cs = 5236; goto _test_eof + _test_eof5237: cs = 5237; goto _test_eof + _test_eof5238: cs = 5238; goto _test_eof + _test_eof5239: cs = 5239; goto _test_eof + _test_eof5240: cs = 5240; goto _test_eof + _test_eof5241: cs = 5241; goto _test_eof + _test_eof5242: cs = 5242; goto _test_eof + _test_eof4656: cs = 4656; goto _test_eof + _test_eof5243: cs = 5243; goto _test_eof + _test_eof5244: cs = 5244; goto _test_eof + _test_eof5245: cs = 5245; goto _test_eof + _test_eof5246: cs = 5246; goto _test_eof + _test_eof5247: cs = 5247; goto _test_eof + _test_eof5248: cs = 5248; goto _test_eof + _test_eof5249: cs = 5249; goto _test_eof + _test_eof5250: cs = 5250; goto _test_eof + _test_eof4657: cs = 4657; goto _test_eof + _test_eof5251: cs = 5251; goto _test_eof + _test_eof5252: cs = 5252; goto _test_eof + _test_eof5253: cs = 5253; goto _test_eof + _test_eof5254: cs = 5254; goto _test_eof + _test_eof5255: cs = 5255; goto _test_eof + _test_eof5256: cs = 5256; goto _test_eof + _test_eof4658: cs = 4658; goto _test_eof + _test_eof5257: cs = 5257; goto _test_eof + _test_eof5258: cs = 5258; goto _test_eof + _test_eof4659: cs = 4659; goto _test_eof + _test_eof5259: cs = 5259; goto _test_eof + _test_eof5260: cs = 5260; goto _test_eof + _test_eof5261: cs = 5261; goto _test_eof + _test_eof5262: cs = 5262; goto _test_eof + _test_eof5263: cs = 5263; goto _test_eof + _test_eof5264: cs = 5264; goto _test_eof + _test_eof5265: cs = 5265; goto _test_eof + _test_eof5266: cs = 5266; goto _test_eof + _test_eof5267: cs = 5267; goto _test_eof + _test_eof5268: cs = 5268; goto _test_eof + _test_eof5269: cs = 5269; goto _test_eof + _test_eof5270: cs = 5270; goto _test_eof + _test_eof5271: cs = 5271; goto _test_eof + _test_eof5272: cs = 5272; goto _test_eof + _test_eof5273: cs = 5273; goto _test_eof + _test_eof5274: cs = 5274; goto _test_eof + _test_eof5275: cs = 5275; goto _test_eof + _test_eof5276: cs = 5276; goto _test_eof + _test_eof5277: cs = 5277; goto _test_eof + _test_eof4660: cs = 4660; goto _test_eof + _test_eof5278: cs = 5278; goto _test_eof + _test_eof5279: cs = 5279; goto _test_eof + _test_eof5280: cs = 5280; goto _test_eof + _test_eof4661: cs = 4661; goto _test_eof + _test_eof5281: cs = 5281; goto _test_eof + _test_eof5282: cs = 5282; goto _test_eof + _test_eof5283: cs = 5283; goto _test_eof + _test_eof5284: cs = 5284; goto _test_eof + _test_eof5285: cs = 5285; goto _test_eof + _test_eof5286: cs = 5286; goto _test_eof + _test_eof4662: cs = 4662; goto _test_eof + _test_eof5287: cs = 5287; goto _test_eof + _test_eof5288: cs = 5288; goto _test_eof + _test_eof5289: cs = 5289; goto _test_eof + _test_eof5290: cs = 5290; goto _test_eof + _test_eof5291: cs = 5291; goto _test_eof + _test_eof5292: cs = 5292; goto _test_eof + _test_eof5293: cs = 5293; goto _test_eof + _test_eof5294: cs = 5294; goto _test_eof + _test_eof5295: cs = 5295; goto _test_eof + _test_eof5296: cs = 5296; goto _test_eof + _test_eof5297: cs = 5297; goto _test_eof + _test_eof5298: cs = 5298; goto _test_eof + _test_eof5299: cs = 5299; goto _test_eof + _test_eof5300: cs = 5300; goto _test_eof + _test_eof5301: cs = 5301; goto _test_eof + _test_eof5302: cs = 5302; goto _test_eof + _test_eof5303: cs = 5303; goto _test_eof + _test_eof5304: cs = 5304; goto _test_eof + _test_eof5305: cs = 5305; goto _test_eof + _test_eof5306: cs = 5306; goto _test_eof + _test_eof5307: cs = 5307; goto _test_eof + _test_eof5308: cs = 5308; goto _test_eof + _test_eof5309: cs = 5309; goto _test_eof + _test_eof5310: cs = 5310; goto _test_eof + _test_eof5311: cs = 5311; goto _test_eof + _test_eof5312: cs = 5312; goto _test_eof + _test_eof5313: cs = 5313; goto _test_eof + _test_eof5314: cs = 5314; goto _test_eof + _test_eof5315: cs = 5315; goto _test_eof + _test_eof5316: cs = 5316; goto _test_eof + _test_eof5317: cs = 5317; goto _test_eof + _test_eof5318: cs = 5318; goto _test_eof + _test_eof5319: cs = 5319; goto _test_eof + _test_eof5320: cs = 5320; goto _test_eof + _test_eof5321: cs = 5321; goto _test_eof + _test_eof5322: cs = 5322; goto _test_eof + _test_eof5323: cs = 5323; goto _test_eof + _test_eof5324: cs = 5324; goto _test_eof + _test_eof5325: cs = 5325; goto _test_eof + _test_eof5326: cs = 5326; goto _test_eof + _test_eof5327: cs = 5327; goto _test_eof + _test_eof5328: cs = 5328; goto _test_eof + _test_eof5329: cs = 5329; goto _test_eof + _test_eof5330: cs = 5330; goto _test_eof + _test_eof5331: cs = 5331; goto _test_eof + _test_eof5332: cs = 5332; goto _test_eof + _test_eof5333: cs = 5333; goto _test_eof + _test_eof5334: cs = 5334; goto _test_eof + _test_eof5335: cs = 5335; goto _test_eof + _test_eof5336: cs = 5336; goto _test_eof + _test_eof5337: cs = 5337; goto _test_eof + _test_eof5338: cs = 5338; goto _test_eof + _test_eof4663: cs = 4663; goto _test_eof + _test_eof4664: cs = 4664; goto _test_eof + _test_eof4665: cs = 4665; goto _test_eof + _test_eof4666: cs = 4666; goto _test_eof + _test_eof4667: cs = 4667; goto _test_eof + _test_eof4668: cs = 4668; goto _test_eof + _test_eof4669: cs = 4669; goto _test_eof + _test_eof4670: cs = 4670; goto _test_eof + _test_eof5339: cs = 5339; goto _test_eof + _test_eof4671: cs = 4671; goto _test_eof + _test_eof4672: cs = 4672; goto _test_eof + _test_eof4673: cs = 4673; goto _test_eof + _test_eof4674: cs = 4674; goto _test_eof + _test_eof4675: cs = 4675; goto _test_eof + _test_eof4676: cs = 4676; goto _test_eof + _test_eof4677: cs = 4677; goto _test_eof + _test_eof4678: cs = 4678; goto _test_eof + _test_eof4679: cs = 4679; goto _test_eof + _test_eof4680: cs = 4680; goto _test_eof + _test_eof4681: cs = 4681; goto _test_eof + _test_eof4682: cs = 4682; goto _test_eof + _test_eof4683: cs = 4683; goto _test_eof + _test_eof4684: cs = 4684; goto _test_eof + _test_eof4685: cs = 4685; goto _test_eof + _test_eof4686: cs = 4686; goto _test_eof + _test_eof4687: cs = 4687; goto _test_eof + _test_eof4688: cs = 4688; goto _test_eof + _test_eof4689: cs = 4689; goto _test_eof + _test_eof4690: cs = 4690; goto _test_eof + _test_eof4691: cs = 4691; goto _test_eof + _test_eof4692: cs = 4692; goto _test_eof + _test_eof4693: cs = 4693; goto _test_eof + _test_eof4694: cs = 4694; goto _test_eof + _test_eof4695: cs = 4695; goto _test_eof + _test_eof4696: cs = 4696; goto _test_eof + _test_eof4697: cs = 4697; goto _test_eof + _test_eof4698: cs = 4698; goto _test_eof + _test_eof4699: cs = 4699; goto _test_eof + _test_eof4700: cs = 4700; goto _test_eof + _test_eof4701: cs = 4701; goto _test_eof + _test_eof4702: cs = 4702; goto _test_eof + _test_eof4703: cs = 4703; goto _test_eof + _test_eof4704: cs = 4704; goto _test_eof + _test_eof4705: cs = 4705; goto _test_eof + _test_eof4706: cs = 4706; goto _test_eof + _test_eof4707: cs = 4707; goto _test_eof + _test_eof5340: cs = 5340; goto _test_eof + _test_eof4708: cs = 4708; goto _test_eof + _test_eof4709: cs = 4709; goto _test_eof + _test_eof4710: cs = 4710; goto _test_eof + _test_eof4711: cs = 4711; goto _test_eof + _test_eof4712: cs = 4712; goto _test_eof + _test_eof4713: cs = 4713; goto _test_eof + _test_eof4714: cs = 4714; goto _test_eof + _test_eof4715: cs = 4715; goto _test_eof + _test_eof4716: cs = 4716; goto _test_eof + _test_eof4717: cs = 4717; goto _test_eof + _test_eof4718: cs = 4718; goto _test_eof + _test_eof4719: cs = 4719; goto _test_eof + _test_eof4720: cs = 4720; goto _test_eof + _test_eof4721: cs = 4721; goto _test_eof + _test_eof4722: cs = 4722; goto _test_eof + _test_eof4723: cs = 4723; goto _test_eof + _test_eof4724: cs = 4724; goto _test_eof + _test_eof4725: cs = 4725; goto _test_eof + _test_eof4726: cs = 4726; goto _test_eof + _test_eof4727: cs = 4727; goto _test_eof + _test_eof4728: cs = 4728; goto _test_eof + _test_eof4729: cs = 4729; goto _test_eof + _test_eof4730: cs = 4730; goto _test_eof + _test_eof4731: cs = 4731; goto _test_eof + _test_eof4732: cs = 4732; goto _test_eof + _test_eof4733: cs = 4733; goto _test_eof + _test_eof4734: cs = 4734; goto _test_eof + _test_eof4735: cs = 4735; goto _test_eof + _test_eof4736: cs = 4736; goto _test_eof + _test_eof4737: cs = 4737; goto _test_eof + _test_eof4738: cs = 4738; goto _test_eof + _test_eof4739: cs = 4739; goto _test_eof + _test_eof4740: cs = 4740; goto _test_eof + _test_eof4741: cs = 4741; goto _test_eof + _test_eof4742: cs = 4742; goto _test_eof + _test_eof4743: cs = 4743; goto _test_eof + _test_eof4744: cs = 4744; goto _test_eof + _test_eof4745: cs = 4745; goto _test_eof + _test_eof4746: cs = 4746; goto _test_eof + _test_eof4747: cs = 4747; goto _test_eof + _test_eof4748: cs = 4748; goto _test_eof + _test_eof4749: cs = 4749; goto _test_eof + _test_eof4750: cs = 4750; goto _test_eof + _test_eof4751: cs = 4751; goto _test_eof + _test_eof4752: cs = 4752; goto _test_eof + _test_eof4753: cs = 4753; goto _test_eof + _test_eof4754: cs = 4754; goto _test_eof + _test_eof4755: cs = 4755; goto _test_eof + _test_eof4756: cs = 4756; goto _test_eof + _test_eof4757: cs = 4757; goto _test_eof + _test_eof4758: cs = 4758; goto _test_eof + _test_eof4759: cs = 4759; goto _test_eof + _test_eof4760: cs = 4760; goto _test_eof + _test_eof4761: cs = 4761; goto _test_eof + _test_eof4762: cs = 4762; goto _test_eof + _test_eof4763: cs = 4763; goto _test_eof + _test_eof4764: cs = 4764; goto _test_eof + _test_eof4765: cs = 4765; goto _test_eof + _test_eof4766: cs = 4766; goto _test_eof + _test_eof4767: cs = 4767; goto _test_eof + _test_eof4768: cs = 4768; goto _test_eof + _test_eof4769: cs = 4769; goto _test_eof + _test_eof4770: cs = 4770; goto _test_eof + _test_eof4771: cs = 4771; goto _test_eof + _test_eof4772: cs = 4772; goto _test_eof + _test_eof4773: cs = 4773; goto _test_eof + _test_eof4774: cs = 4774; goto _test_eof + _test_eof4775: cs = 4775; goto _test_eof + _test_eof4776: cs = 4776; goto _test_eof + _test_eof4777: cs = 4777; goto _test_eof + _test_eof4778: cs = 4778; goto _test_eof + _test_eof4779: cs = 4779; goto _test_eof + _test_eof4780: cs = 4780; goto _test_eof + _test_eof4781: cs = 4781; goto _test_eof + _test_eof4782: cs = 4782; goto _test_eof + _test_eof4783: cs = 4783; goto _test_eof + _test_eof4784: cs = 4784; goto _test_eof + _test_eof4785: cs = 4785; goto _test_eof + _test_eof4786: cs = 4786; goto _test_eof + _test_eof4787: cs = 4787; goto _test_eof + _test_eof4788: cs = 4788; goto _test_eof + _test_eof4789: cs = 4789; goto _test_eof + _test_eof4790: cs = 4790; goto _test_eof + _test_eof4791: cs = 4791; goto _test_eof + _test_eof4792: cs = 4792; goto _test_eof + _test_eof4793: cs = 4793; goto _test_eof + _test_eof4794: cs = 4794; goto _test_eof + _test_eof4795: cs = 4795; goto _test_eof + _test_eof4796: cs = 4796; goto _test_eof + _test_eof4797: cs = 4797; goto _test_eof + _test_eof4798: cs = 4798; goto _test_eof + _test_eof4799: cs = 4799; goto _test_eof + _test_eof4800: cs = 4800; goto _test_eof + _test_eof4801: cs = 4801; goto _test_eof + _test_eof4802: cs = 4802; goto _test_eof + _test_eof4803: cs = 4803; goto _test_eof + _test_eof4804: cs = 4804; goto _test_eof + _test_eof4805: cs = 4805; goto _test_eof + _test_eof4806: cs = 4806; goto _test_eof + _test_eof4807: cs = 4807; goto _test_eof + _test_eof4808: cs = 4808; goto _test_eof + _test_eof4809: cs = 4809; goto _test_eof + _test_eof4810: cs = 4810; goto _test_eof + _test_eof4811: cs = 4811; goto _test_eof + _test_eof4812: cs = 4812; goto _test_eof + _test_eof4813: cs = 4813; goto _test_eof + _test_eof4814: cs = 4814; goto _test_eof + _test_eof4815: cs = 4815; goto _test_eof + _test_eof4816: cs = 4816; goto _test_eof + _test_eof4817: cs = 4817; goto _test_eof + _test_eof4818: cs = 4818; goto _test_eof + _test_eof4819: cs = 4819; goto _test_eof + _test_eof4820: cs = 4820; goto _test_eof + _test_eof4821: cs = 4821; goto _test_eof + _test_eof4822: cs = 4822; goto _test_eof + _test_eof4823: cs = 4823; goto _test_eof + _test_eof4824: cs = 4824; goto _test_eof + _test_eof4825: cs = 4825; goto _test_eof + _test_eof4826: cs = 4826; goto _test_eof + _test_eof4827: cs = 4827; goto _test_eof + _test_eof4828: cs = 4828; goto _test_eof + _test_eof4829: cs = 4829; goto _test_eof + _test_eof4830: cs = 4830; goto _test_eof + _test_eof4831: cs = 4831; goto _test_eof + _test_eof4832: cs = 4832; goto _test_eof + _test_eof4833: cs = 4833; goto _test_eof + _test_eof4834: cs = 4834; goto _test_eof + _test_eof4835: cs = 4835; goto _test_eof + _test_eof4836: cs = 4836; goto _test_eof + _test_eof4837: cs = 4837; goto _test_eof + _test_eof4838: cs = 4838; goto _test_eof + _test_eof4839: cs = 4839; goto _test_eof + _test_eof4840: cs = 4840; goto _test_eof + _test_eof4841: cs = 4841; goto _test_eof + _test_eof4842: cs = 4842; goto _test_eof + _test_eof4843: cs = 4843; goto _test_eof + _test_eof4844: cs = 4844; goto _test_eof + _test_eof4845: cs = 4845; goto _test_eof + _test_eof4846: cs = 4846; goto _test_eof + _test_eof4847: cs = 4847; goto _test_eof + _test_eof4848: cs = 4848; goto _test_eof + _test_eof4849: cs = 4849; goto _test_eof + _test_eof4850: cs = 4850; goto _test_eof + _test_eof4851: cs = 4851; goto _test_eof + _test_eof4852: cs = 4852; goto _test_eof + _test_eof4853: cs = 4853; goto _test_eof + _test_eof4854: cs = 4854; goto _test_eof + _test_eof4855: cs = 4855; goto _test_eof + _test_eof4856: cs = 4856; goto _test_eof + _test_eof4857: cs = 4857; goto _test_eof + _test_eof4858: cs = 4858; goto _test_eof + _test_eof4859: cs = 4859; goto _test_eof + _test_eof4860: cs = 4860; goto _test_eof + _test_eof4861: cs = 4861; goto _test_eof + + _test_eof: {} + if p == eof { + switch cs { + case 4863: + goto tr4499 + case 0: + goto tr0 + case 1: + goto tr2 + case 2: + goto tr2 + case 3: + goto tr0 + case 4: + goto tr0 + case 5: + goto tr0 + case 6: + goto tr0 + case 7: + goto tr0 + case 8: + goto tr0 + case 9: + goto tr0 + case 10: + goto tr0 + case 11: + goto tr0 + case 12: + goto tr0 + case 13: + goto tr0 + case 14: + goto tr2 + case 15: + goto tr2 + case 16: + goto tr2 + case 17: + goto tr2 + case 18: + goto tr2 + case 19: + goto tr2 + case 20: + goto tr2 + case 21: + goto tr2 + case 22: + goto tr2 + case 23: + goto tr2 + case 24: + goto tr2 + case 25: + goto tr2 + case 26: + goto tr2 + case 27: + goto tr2 + case 28: + goto tr2 + case 29: + goto tr2 + case 30: + goto tr2 + case 31: + goto tr2 + case 32: + goto tr2 + case 33: + goto tr2 + case 34: + goto tr2 + case 35: + goto tr2 + case 36: + goto tr2 + case 37: + goto tr2 + case 38: + goto tr2 + case 39: + goto tr2 + case 40: + goto tr2 + case 41: + goto tr2 + case 42: + goto tr0 + case 43: + goto tr2 + case 44: + goto tr2 + case 45: + goto tr2 + case 46: + goto tr2 + case 47: + goto tr2 + case 48: + goto tr2 + case 49: + goto tr2 + case 50: + goto tr2 + case 51: + goto tr2 + case 52: + goto tr2 + case 53: + goto tr2 + case 54: + goto tr2 + case 55: + goto tr2 + case 56: + goto tr2 + case 57: + goto tr2 + case 58: + goto tr2 + case 59: + goto tr2 + case 60: + goto tr2 + case 61: + goto tr2 + case 62: + goto tr2 + case 63: + goto tr2 + case 64: + goto tr0 + case 65: + goto tr2 + case 66: + goto tr2 + case 67: + goto tr2 + case 68: + goto tr2 + case 69: + goto tr2 + case 70: + goto tr2 + case 71: + goto tr0 + case 72: + goto tr2 + case 73: + goto tr2 + case 74: + goto tr0 + case 75: + goto tr2 + case 76: + goto tr2 + case 77: + goto tr2 + case 78: + goto tr2 + case 79: + goto tr2 + case 80: + goto tr2 + case 81: + goto tr2 + case 82: + goto tr2 + case 83: + goto tr2 + case 84: + goto tr2 + case 85: + goto tr2 + case 86: + goto tr2 + case 87: + goto tr2 + case 88: + goto tr2 + case 89: + goto tr2 + case 90: + goto tr0 + case 91: + goto tr2 + case 92: + goto tr2 + case 93: + goto tr2 + case 94: + goto tr0 + case 95: + goto tr2 + case 96: + goto tr2 + case 97: + goto tr2 + case 98: + goto tr2 + case 99: + goto tr2 + case 100: + goto tr2 + case 101: + goto tr2 + case 102: + goto tr2 + case 103: + goto tr2 + case 104: + goto tr2 + case 105: + goto tr2 + case 106: + goto tr2 + case 107: + goto tr2 + case 108: + goto tr2 + case 109: + goto tr2 + case 110: + goto tr2 + case 111: + goto tr2 + case 112: + goto tr2 + case 113: + goto tr2 + case 114: + goto tr2 + case 115: + goto tr2 + case 116: + goto tr2 + case 117: + goto tr2 + case 118: + goto tr2 + case 119: + goto tr2 + case 120: + goto tr2 + case 121: + goto tr2 + case 122: + goto tr2 + case 123: + goto tr2 + case 124: + goto tr2 + case 125: + goto tr2 + case 126: + goto tr2 + case 127: + goto tr2 + case 128: + goto tr2 + case 129: + goto tr2 + case 130: + goto tr2 + case 131: + goto tr2 + case 132: + goto tr2 + case 133: + goto tr2 + case 134: + goto tr2 + case 135: + goto tr2 + case 136: + goto tr0 + case 137: + goto tr2 + case 138: + goto tr2 + case 139: + goto tr2 + case 140: + goto tr2 + case 4864: + goto tr4519 + case 4865: + goto tr4521 + case 141: + goto tr125 + case 4866: + goto tr4521 + case 4867: + goto tr4562 + case 142: + goto tr2 + case 143: + goto tr2 + case 144: + goto tr2 + case 145: + goto tr2 + case 146: + goto tr2 + case 147: + goto tr2 + case 148: + goto tr2 + case 149: + goto tr2 + case 150: + goto tr2 + case 151: + goto tr2 + case 152: + goto tr2 + case 153: + goto tr2 + case 154: + goto tr2 + case 155: + goto tr2 + case 156: + goto tr2 + case 157: + goto tr2 + case 158: + goto tr2 + case 159: + goto tr2 + case 160: + goto tr2 + case 161: + goto tr2 + case 162: + goto tr2 + case 163: + goto tr2 + case 164: + goto tr2 + case 165: + goto tr2 + case 166: + goto tr2 + case 167: + goto tr2 + case 168: + goto tr2 + case 169: + goto tr2 + case 170: + goto tr2 + case 171: + goto tr2 + case 172: + goto tr2 + case 173: + goto tr2 + case 174: + goto tr2 + case 175: + goto tr2 + case 176: + goto tr2 + case 177: + goto tr2 + case 178: + goto tr2 + case 179: + goto tr2 + case 180: + goto tr2 + case 181: + goto tr2 + case 182: + goto tr2 + case 183: + goto tr2 + case 184: + goto tr2 + case 185: + goto tr2 + case 186: + goto tr2 + case 187: + goto tr2 + case 188: + goto tr2 + case 189: + goto tr2 + case 190: + goto tr2 + case 191: + goto tr2 + case 192: + goto tr2 + case 193: + goto tr2 + case 194: + goto tr2 + case 195: + goto tr2 + case 196: + goto tr2 + case 197: + goto tr2 + case 198: + goto tr2 + case 199: + goto tr2 + case 200: + goto tr2 + case 201: + goto tr2 + case 202: + goto tr2 + case 203: + goto tr2 + case 204: + goto tr2 + case 205: + goto tr2 + case 206: + goto tr2 + case 207: + goto tr2 + case 208: + goto tr2 + case 209: + goto tr2 + case 210: + goto tr2 + case 211: + goto tr2 + case 212: + goto tr2 + case 213: + goto tr2 + case 214: + goto tr2 + case 215: + goto tr2 + case 216: + goto tr2 + case 217: + goto tr2 + case 218: + goto tr2 + case 219: + goto tr2 + case 220: + goto tr2 + case 221: + goto tr2 + case 222: + goto tr2 + case 223: + goto tr2 + case 224: + goto tr2 + case 225: + goto tr2 + case 226: + goto tr2 + case 227: + goto tr2 + case 228: + goto tr2 + case 229: + goto tr2 + case 230: + goto tr2 + case 231: + goto tr2 + case 232: + goto tr2 + case 233: + goto tr2 + case 234: + goto tr2 + case 235: + goto tr2 + case 236: + goto tr2 + case 237: + goto tr2 + case 238: + goto tr2 + case 239: + goto tr2 + case 240: + goto tr2 + case 241: + goto tr2 + case 242: + goto tr2 + case 243: + goto tr2 + case 244: + goto tr2 + case 245: + goto tr2 + case 246: + goto tr2 + case 247: + goto tr2 + case 248: + goto tr2 + case 249: + goto tr2 + case 250: + goto tr2 + case 251: + goto tr2 + case 252: + goto tr2 + case 253: + goto tr2 + case 254: + goto tr2 + case 255: + goto tr2 + case 256: + goto tr2 + case 257: + goto tr2 + case 258: + goto tr2 + case 259: + goto tr2 + case 260: + goto tr2 + case 261: + goto tr2 + case 262: + goto tr2 + case 263: + goto tr2 + case 264: + goto tr2 + case 265: + goto tr2 + case 266: + goto tr2 + case 267: + goto tr2 + case 268: + goto tr2 + case 269: + goto tr2 + case 270: + goto tr2 + case 271: + goto tr2 + case 272: + goto tr2 + case 273: + goto tr2 + case 274: + goto tr2 + case 275: + goto tr2 + case 276: + goto tr2 + case 277: + goto tr2 + case 278: + goto tr2 + case 279: + goto tr2 + case 280: + goto tr2 + case 281: + goto tr2 + case 282: + goto tr2 + case 283: + goto tr2 + case 284: + goto tr2 + case 285: + goto tr2 + case 286: + goto tr2 + case 287: + goto tr2 + case 288: + goto tr2 + case 289: + goto tr2 + case 290: + goto tr2 + case 291: + goto tr2 + case 292: + goto tr2 + case 293: + goto tr2 + case 294: + goto tr2 + case 295: + goto tr2 + case 296: + goto tr2 + case 297: + goto tr2 + case 298: + goto tr2 + case 299: + goto tr2 + case 300: + goto tr2 + case 301: + goto tr2 + case 302: + goto tr2 + case 303: + goto tr2 + case 304: + goto tr2 + case 305: + goto tr2 + case 306: + goto tr2 + case 307: + goto tr2 + case 308: + goto tr2 + case 309: + goto tr2 + case 310: + goto tr2 + case 311: + goto tr2 + case 312: + goto tr2 + case 313: + goto tr2 + case 314: + goto tr2 + case 315: + goto tr2 + case 316: + goto tr2 + case 317: + goto tr2 + case 318: + goto tr2 + case 319: + goto tr2 + case 320: + goto tr2 + case 321: + goto tr2 + case 322: + goto tr2 + case 323: + goto tr2 + case 324: + goto tr2 + case 325: + goto tr2 + case 326: + goto tr2 + case 327: + goto tr2 + case 328: + goto tr2 + case 329: + goto tr2 + case 330: + goto tr2 + case 331: + goto tr2 + case 332: + goto tr2 + case 333: + goto tr2 + case 334: + goto tr2 + case 335: + goto tr2 + case 336: + goto tr2 + case 337: + goto tr2 + case 338: + goto tr2 + case 339: + goto tr2 + case 340: + goto tr2 + case 341: + goto tr2 + case 342: + goto tr2 + case 343: + goto tr2 + case 344: + goto tr2 + case 345: + goto tr2 + case 346: + goto tr2 + case 347: + goto tr2 + case 348: + goto tr2 + case 349: + goto tr2 + case 350: + goto tr2 + case 351: + goto tr2 + case 352: + goto tr2 + case 353: + goto tr2 + case 354: + goto tr2 + case 355: + goto tr2 + case 356: + goto tr2 + case 357: + goto tr2 + case 358: + goto tr2 + case 359: + goto tr2 + case 360: + goto tr2 + case 361: + goto tr2 + case 362: + goto tr2 + case 363: + goto tr2 + case 364: + goto tr2 + case 365: + goto tr2 + case 366: + goto tr2 + case 367: + goto tr2 + case 368: + goto tr2 + case 369: + goto tr2 + case 370: + goto tr2 + case 371: + goto tr2 + case 372: + goto tr2 + case 373: + goto tr2 + case 374: + goto tr2 + case 375: + goto tr2 + case 376: + goto tr2 + case 377: + goto tr2 + case 378: + goto tr2 + case 379: + goto tr2 + case 380: + goto tr2 + case 381: + goto tr2 + case 382: + goto tr2 + case 383: + goto tr2 + case 384: + goto tr2 + case 385: + goto tr2 + case 386: + goto tr2 + case 387: + goto tr2 + case 388: + goto tr2 + case 389: + goto tr2 + case 390: + goto tr2 + case 391: + goto tr2 + case 392: + goto tr2 + case 393: + goto tr2 + case 394: + goto tr2 + case 395: + goto tr2 + case 396: + goto tr2 + case 397: + goto tr2 + case 398: + goto tr2 + case 399: + goto tr2 + case 400: + goto tr2 + case 401: + goto tr2 + case 402: + goto tr2 + case 403: + goto tr2 + case 404: + goto tr2 + case 405: + goto tr2 + case 406: + goto tr2 + case 407: + goto tr2 + case 408: + goto tr2 + case 409: + goto tr2 + case 410: + goto tr2 + case 411: + goto tr2 + case 412: + goto tr2 + case 4868: + goto tr4562 + case 413: + goto tr420 + case 414: + goto tr420 + case 415: + goto tr420 + case 416: + goto tr420 + case 417: + goto tr420 + case 418: + goto tr420 + case 419: + goto tr420 + case 420: + goto tr420 + case 421: + goto tr420 + case 422: + goto tr420 + case 423: + goto tr420 + case 424: + goto tr420 + case 425: + goto tr420 + case 426: + goto tr420 + case 427: + goto tr420 + case 428: + goto tr420 + case 429: + goto tr420 + case 430: + goto tr420 + case 431: + goto tr420 + case 432: + goto tr420 + case 433: + goto tr420 + case 434: + goto tr420 + case 435: + goto tr420 + case 436: + goto tr420 + case 437: + goto tr420 + case 438: + goto tr420 + case 439: + goto tr420 + case 440: + goto tr420 + case 441: + goto tr420 + case 442: + goto tr420 + case 443: + goto tr420 + case 444: + goto tr420 + case 445: + goto tr420 + case 446: + goto tr420 + case 447: + goto tr420 + case 448: + goto tr420 + case 449: + goto tr420 + case 450: + goto tr420 + case 451: + goto tr420 + case 452: + goto tr420 + case 453: + goto tr420 + case 454: + goto tr420 + case 455: + goto tr420 + case 456: + goto tr420 + case 457: + goto tr420 + case 458: + goto tr420 + case 459: + goto tr420 + case 460: + goto tr420 + case 461: + goto tr420 + case 462: + goto tr420 + case 463: + goto tr420 + case 464: + goto tr420 + case 465: + goto tr420 + case 466: + goto tr420 + case 467: + goto tr420 + case 468: + goto tr2 + case 469: + goto tr2 + case 470: + goto tr420 + case 471: + goto tr420 + case 472: + goto tr420 + case 473: + goto tr420 + case 474: + goto tr420 + case 475: + goto tr420 + case 476: + goto tr420 + case 477: + goto tr420 + case 478: + goto tr420 + case 479: + goto tr420 + case 480: + goto tr420 + case 481: + goto tr420 + case 482: + goto tr420 + case 483: + goto tr420 + case 484: + goto tr420 + case 485: + goto tr420 + case 486: + goto tr420 + case 487: + goto tr420 + case 488: + goto tr420 + case 489: + goto tr420 + case 490: + goto tr420 + case 491: + goto tr420 + case 492: + goto tr420 + case 493: + goto tr420 + case 494: + goto tr420 + case 495: + goto tr420 + case 496: + goto tr420 + case 497: + goto tr420 + case 498: + goto tr420 + case 499: + goto tr420 + case 500: + goto tr420 + case 501: + goto tr420 + case 502: + goto tr420 + case 503: + goto tr420 + case 504: + goto tr420 + case 505: + goto tr420 + case 506: + goto tr420 + case 507: + goto tr420 + case 508: + goto tr420 + case 509: + goto tr420 + case 510: + goto tr420 + case 511: + goto tr420 + case 512: + goto tr420 + case 513: + goto tr420 + case 514: + goto tr420 + case 515: + goto tr420 + case 516: + goto tr420 + case 517: + goto tr420 + case 518: + goto tr420 + case 519: + goto tr420 + case 520: + goto tr420 + case 521: + goto tr420 + case 522: + goto tr420 + case 523: + goto tr420 + case 524: + goto tr420 + case 525: + goto tr420 + case 526: + goto tr420 + case 527: + goto tr420 + case 528: + goto tr420 + case 529: + goto tr420 + case 530: + goto tr420 + case 531: + goto tr420 + case 532: + goto tr420 + case 533: + goto tr420 + case 534: + goto tr420 + case 535: + goto tr420 + case 536: + goto tr420 + case 537: + goto tr420 + case 538: + goto tr2 + case 539: + goto tr420 + case 540: + goto tr420 + case 541: + goto tr420 + case 542: + goto tr420 + case 543: + goto tr420 + case 544: + goto tr420 + case 545: + goto tr420 + case 546: + goto tr420 + case 547: + goto tr420 + case 548: + goto tr420 + case 549: + goto tr420 + case 550: + goto tr420 + case 551: + goto tr420 + case 552: + goto tr420 + case 553: + goto tr420 + case 554: + goto tr420 + case 555: + goto tr420 + case 556: + goto tr420 + case 557: + goto tr420 + case 558: + goto tr420 + case 559: + goto tr420 + case 560: + goto tr420 + case 561: + goto tr420 + case 4869: + goto tr4562 + case 562: + goto tr420 + case 563: + goto tr420 + case 564: + goto tr420 + case 565: + goto tr420 + case 566: + goto tr420 + case 567: + goto tr420 + case 4870: + goto tr4562 + case 568: + goto tr420 + case 569: + goto tr420 + case 570: + goto tr420 + case 571: + goto tr420 + case 572: + goto tr420 + case 573: + goto tr420 + case 574: + goto tr420 + case 4871: + goto tr4562 + case 575: + goto tr420 + case 576: + goto tr420 + case 577: + goto tr420 + case 578: + goto tr420 + case 579: + goto tr420 + case 580: + goto tr420 + case 581: + goto tr420 + case 582: + goto tr420 + case 583: + goto tr420 + case 584: + goto tr420 + case 585: + goto tr420 + case 586: + goto tr420 + case 587: + goto tr420 + case 588: + goto tr420 + case 589: + goto tr420 + case 590: + goto tr420 + case 591: + goto tr420 + case 592: + goto tr420 + case 593: + goto tr420 + case 594: + goto tr420 + case 595: + goto tr420 + case 596: + goto tr420 + case 597: + goto tr420 + case 598: + goto tr420 + case 599: + goto tr420 + case 600: + goto tr420 + case 601: + goto tr420 + case 602: + goto tr420 + case 603: + goto tr420 + case 604: + goto tr420 + case 605: + goto tr420 + case 606: + goto tr420 + case 607: + goto tr420 + case 608: + goto tr420 + case 609: + goto tr420 + case 610: + goto tr420 + case 611: + goto tr420 + case 612: + goto tr420 + case 613: + goto tr420 + case 614: + goto tr420 + case 615: + goto tr420 + case 616: + goto tr420 + case 617: + goto tr420 + case 618: + goto tr420 + case 619: + goto tr420 + case 620: + goto tr420 + case 621: + goto tr420 + case 622: + goto tr420 + case 623: + goto tr420 + case 624: + goto tr420 + case 625: + goto tr420 + case 626: + goto tr420 + case 627: + goto tr420 + case 628: + goto tr420 + case 629: + goto tr420 + case 630: + goto tr420 + case 631: + goto tr420 + case 632: + goto tr420 + case 633: + goto tr420 + case 634: + goto tr420 + case 635: + goto tr420 + case 636: + goto tr420 + case 637: + goto tr420 + case 638: + goto tr420 + case 639: + goto tr420 + case 640: + goto tr2 + case 641: + goto tr420 + case 642: + goto tr420 + case 643: + goto tr420 + case 644: + goto tr420 + case 645: + goto tr420 + case 646: + goto tr420 + case 647: + goto tr420 + case 648: + goto tr420 + case 649: + goto tr420 + case 650: + goto tr420 + case 651: + goto tr420 + case 652: + goto tr420 + case 653: + goto tr420 + case 654: + goto tr2 + case 655: + goto tr420 + case 656: + goto tr420 + case 657: + goto tr420 + case 658: + goto tr420 + case 659: + goto tr420 + case 660: + goto tr420 + case 661: + goto tr420 + case 662: + goto tr420 + case 663: + goto tr420 + case 664: + goto tr420 + case 665: + goto tr420 + case 666: + goto tr420 + case 667: + goto tr420 + case 668: + goto tr420 + case 669: + goto tr420 + case 670: + goto tr420 + case 671: + goto tr420 + case 672: + goto tr2 + case 673: + goto tr420 + case 674: + goto tr420 + case 675: + goto tr420 + case 676: + goto tr420 + case 677: + goto tr420 + case 678: + goto tr420 + case 679: + goto tr420 + case 680: + goto tr420 + case 681: + goto tr420 + case 682: + goto tr420 + case 683: + goto tr420 + case 684: + goto tr2 + case 685: + goto tr420 + case 686: + goto tr420 + case 687: + goto tr420 + case 688: + goto tr420 + case 689: + goto tr420 + case 690: + goto tr420 + case 691: + goto tr2 + case 692: + goto tr420 + case 693: + goto tr420 + case 694: + goto tr420 + case 695: + goto tr420 + case 696: + goto tr420 + case 697: + goto tr420 + case 698: + goto tr420 + case 699: + goto tr420 + case 700: + goto tr420 + case 701: + goto tr420 + case 702: + goto tr420 + case 703: + goto tr420 + case 704: + goto tr420 + case 705: + goto tr420 + case 706: + goto tr420 + case 707: + goto tr2 + case 708: + goto tr420 + case 709: + goto tr2 + case 710: + goto tr420 + case 711: + goto tr420 + case 712: + goto tr2 + case 713: + goto tr420 + case 714: + goto tr420 + case 715: + goto tr420 + case 716: + goto tr420 + case 717: + goto tr420 + case 718: + goto tr420 + case 719: + goto tr420 + case 720: + goto tr420 + case 721: + goto tr2 + case 722: + goto tr420 + case 723: + goto tr420 + case 724: + goto tr420 + case 725: + goto tr420 + case 726: + goto tr420 + case 727: + goto tr420 + case 728: + goto tr420 + case 729: + goto tr420 + case 730: + goto tr420 + case 731: + goto tr420 + case 732: + goto tr420 + case 733: + goto tr420 + case 734: + goto tr420 + case 735: + goto tr420 + case 736: + goto tr420 + case 737: + goto tr420 + case 738: + goto tr420 + case 739: + goto tr420 + case 740: + goto tr420 + case 741: + goto tr420 + case 742: + goto tr420 + case 743: + goto tr420 + case 744: + goto tr420 + case 745: + goto tr420 + case 746: + goto tr420 + case 747: + goto tr420 + case 748: + goto tr420 + case 749: + goto tr420 + case 750: + goto tr420 + case 751: + goto tr420 + case 752: + goto tr420 + case 753: + goto tr420 + case 754: + goto tr420 + case 755: + goto tr420 + case 756: + goto tr420 + case 757: + goto tr420 + case 758: + goto tr420 + case 759: + goto tr420 + case 760: + goto tr420 + case 761: + goto tr420 + case 762: + goto tr420 + case 763: + goto tr420 + case 764: + goto tr420 + case 765: + goto tr420 + case 766: + goto tr420 + case 767: + goto tr420 + case 768: + goto tr420 + case 769: + goto tr420 + case 770: + goto tr420 + case 771: + goto tr420 + case 772: + goto tr420 + case 773: + goto tr420 + case 774: + goto tr420 + case 775: + goto tr420 + case 776: + goto tr420 + case 777: + goto tr420 + case 778: + goto tr420 + case 779: + goto tr420 + case 780: + goto tr420 + case 781: + goto tr420 + case 782: + goto tr420 + case 783: + goto tr420 + case 784: + goto tr420 + case 785: + goto tr420 + case 786: + goto tr420 + case 787: + goto tr420 + case 788: + goto tr420 + case 789: + goto tr420 + case 790: + goto tr420 + case 791: + goto tr420 + case 792: + goto tr420 + case 793: + goto tr420 + case 794: + goto tr420 + case 795: + goto tr420 + case 796: + goto tr420 + case 797: + goto tr420 + case 798: + goto tr420 + case 799: + goto tr420 + case 800: + goto tr420 + case 801: + goto tr420 + case 802: + goto tr420 + case 803: + goto tr420 + case 804: + goto tr420 + case 805: + goto tr420 + case 806: + goto tr420 + case 807: + goto tr420 + case 808: + goto tr420 + case 809: + goto tr420 + case 810: + goto tr420 + case 811: + goto tr420 + case 812: + goto tr420 + case 813: + goto tr420 + case 814: + goto tr420 + case 815: + goto tr420 + case 816: + goto tr420 + case 817: + goto tr420 + case 818: + goto tr420 + case 819: + goto tr420 + case 820: + goto tr420 + case 821: + goto tr420 + case 822: + goto tr420 + case 823: + goto tr420 + case 824: + goto tr420 + case 825: + goto tr420 + case 826: + goto tr420 + case 827: + goto tr420 + case 828: + goto tr420 + case 829: + goto tr420 + case 830: + goto tr420 + case 831: + goto tr420 + case 832: + goto tr420 + case 833: + goto tr420 + case 834: + goto tr420 + case 835: + goto tr420 + case 836: + goto tr420 + case 837: + goto tr420 + case 838: + goto tr420 + case 839: + goto tr420 + case 840: + goto tr420 + case 841: + goto tr420 + case 842: + goto tr420 + case 843: + goto tr420 + case 844: + goto tr420 + case 845: + goto tr420 + case 846: + goto tr420 + case 847: + goto tr420 + case 848: + goto tr420 + case 849: + goto tr420 + case 850: + goto tr420 + case 851: + goto tr420 + case 852: + goto tr420 + case 853: + goto tr420 + case 854: + goto tr420 + case 855: + goto tr420 + case 856: + goto tr420 + case 857: + goto tr420 + case 858: + goto tr420 + case 859: + goto tr420 + case 860: + goto tr420 + case 861: + goto tr420 + case 862: + goto tr420 + case 863: + goto tr420 + case 864: + goto tr420 + case 865: + goto tr420 + case 866: + goto tr420 + case 867: + goto tr420 + case 868: + goto tr420 + case 869: + goto tr420 + case 870: + goto tr2 + case 871: + goto tr420 + case 872: + goto tr420 + case 873: + goto tr2 + case 874: + goto tr420 + case 875: + goto tr420 + case 876: + goto tr420 + case 877: + goto tr420 + case 878: + goto tr420 + case 879: + goto tr420 + case 880: + goto tr420 + case 881: + goto tr420 + case 882: + goto tr420 + case 883: + goto tr420 + case 884: + goto tr420 + case 885: + goto tr420 + case 886: + goto tr420 + case 887: + goto tr420 + case 888: + goto tr420 + case 889: + goto tr420 + case 890: + goto tr420 + case 891: + goto tr420 + case 892: + goto tr420 + case 893: + goto tr420 + case 894: + goto tr420 + case 895: + goto tr420 + case 896: + goto tr420 + case 897: + goto tr420 + case 898: + goto tr420 + case 899: + goto tr420 + case 900: + goto tr420 + case 901: + goto tr420 + case 902: + goto tr420 + case 903: + goto tr420 + case 904: + goto tr420 + case 905: + goto tr420 + case 906: + goto tr420 + case 907: + goto tr420 + case 908: + goto tr420 + case 909: + goto tr420 + case 910: + goto tr420 + case 911: + goto tr420 + case 912: + goto tr420 + case 913: + goto tr420 + case 914: + goto tr420 + case 915: + goto tr420 + case 916: + goto tr420 + case 917: + goto tr420 + case 918: + goto tr420 + case 919: + goto tr420 + case 920: + goto tr420 + case 921: + goto tr420 + case 922: + goto tr420 + case 923: + goto tr420 + case 924: + goto tr420 + case 925: + goto tr420 + case 926: + goto tr420 + case 927: + goto tr420 + case 928: + goto tr420 + case 929: + goto tr420 + case 930: + goto tr420 + case 931: + goto tr420 + case 932: + goto tr420 + case 933: + goto tr420 + case 934: + goto tr420 + case 935: + goto tr420 + case 936: + goto tr420 + case 937: + goto tr420 + case 938: + goto tr420 + case 939: + goto tr420 + case 940: + goto tr420 + case 941: + goto tr420 + case 942: + goto tr420 + case 943: + goto tr420 + case 944: + goto tr420 + case 945: + goto tr420 + case 946: + goto tr420 + case 947: + goto tr420 + case 948: + goto tr420 + case 949: + goto tr420 + case 950: + goto tr420 + case 951: + goto tr420 + case 952: + goto tr420 + case 953: + goto tr420 + case 954: + goto tr420 + case 955: + goto tr420 + case 956: + goto tr420 + case 957: + goto tr420 + case 958: + goto tr420 + case 959: + goto tr420 + case 960: + goto tr420 + case 961: + goto tr420 + case 962: + goto tr420 + case 963: + goto tr420 + case 964: + goto tr420 + case 965: + goto tr420 + case 966: + goto tr420 + case 967: + goto tr2 + case 968: + goto tr420 + case 969: + goto tr2 + case 970: + goto tr420 + case 971: + goto tr420 + case 972: + goto tr420 + case 973: + goto tr420 + case 974: + goto tr420 + case 975: + goto tr420 + case 976: + goto tr420 + case 977: + goto tr420 + case 978: + goto tr420 + case 979: + goto tr420 + case 980: + goto tr420 + case 981: + goto tr420 + case 982: + goto tr420 + case 983: + goto tr420 + case 984: + goto tr420 + case 985: + goto tr420 + case 986: + goto tr420 + case 987: + goto tr420 + case 988: + goto tr420 + case 989: + goto tr420 + case 990: + goto tr420 + case 991: + goto tr420 + case 992: + goto tr420 + case 993: + goto tr420 + case 994: + goto tr420 + case 995: + goto tr420 + case 996: + goto tr420 + case 997: + goto tr420 + case 998: + goto tr420 + case 999: + goto tr420 + case 1000: + goto tr420 + case 1001: + goto tr420 + case 1002: + goto tr420 + case 1003: + goto tr420 + case 1004: + goto tr420 + case 1005: + goto tr420 + case 1006: + goto tr420 + case 1007: + goto tr420 + case 1008: + goto tr420 + case 1009: + goto tr420 + case 1010: + goto tr420 + case 1011: + goto tr420 + case 1012: + goto tr420 + case 1013: + goto tr420 + case 1014: + goto tr420 + case 1015: + goto tr420 + case 1016: + goto tr420 + case 1017: + goto tr420 + case 1018: + goto tr420 + case 1019: + goto tr420 + case 1020: + goto tr420 + case 1021: + goto tr420 + case 1022: + goto tr420 + case 1023: + goto tr420 + case 1024: + goto tr420 + case 1025: + goto tr420 + case 1026: + goto tr420 + case 1027: + goto tr420 + case 1028: + goto tr420 + case 1029: + goto tr420 + case 1030: + goto tr420 + case 1031: + goto tr420 + case 1032: + goto tr420 + case 1033: + goto tr420 + case 1034: + goto tr420 + case 1035: + goto tr420 + case 1036: + goto tr420 + case 1037: + goto tr420 + case 1038: + goto tr420 + case 1039: + goto tr420 + case 1040: + goto tr420 + case 1041: + goto tr420 + case 1042: + goto tr420 + case 1043: + goto tr420 + case 1044: + goto tr420 + case 1045: + goto tr420 + case 1046: + goto tr420 + case 1047: + goto tr420 + case 1048: + goto tr420 + case 1049: + goto tr420 + case 1050: + goto tr420 + case 1051: + goto tr420 + case 1052: + goto tr420 + case 1053: + goto tr420 + case 1054: + goto tr420 + case 1055: + goto tr420 + case 1056: + goto tr420 + case 1057: + goto tr420 + case 1058: + goto tr420 + case 1059: + goto tr420 + case 1060: + goto tr420 + case 1061: + goto tr420 + case 1062: + goto tr420 + case 1063: + goto tr420 + case 1064: + goto tr420 + case 1065: + goto tr420 + case 1066: + goto tr420 + case 1067: + goto tr420 + case 1068: + goto tr420 + case 1069: + goto tr420 + case 1070: + goto tr420 + case 1071: + goto tr420 + case 1072: + goto tr420 + case 1073: + goto tr420 + case 1074: + goto tr420 + case 1075: + goto tr420 + case 1076: + goto tr420 + case 1077: + goto tr420 + case 1078: + goto tr420 + case 1079: + goto tr420 + case 1080: + goto tr420 + case 1081: + goto tr420 + case 1082: + goto tr420 + case 1083: + goto tr420 + case 1084: + goto tr420 + case 1085: + goto tr420 + case 1086: + goto tr420 + case 1087: + goto tr420 + case 1088: + goto tr420 + case 1089: + goto tr420 + case 4872: + goto tr4562 + case 1090: + goto tr420 + case 1091: + goto tr2 + case 1092: + goto tr420 + case 1093: + goto tr420 + case 1094: + goto tr420 + case 1095: + goto tr420 + case 1096: + goto tr420 + case 1097: + goto tr420 + case 1098: + goto tr420 + case 1099: + goto tr420 + case 1100: + goto tr420 + case 1101: + goto tr420 + case 1102: + goto tr420 + case 1103: + goto tr420 + case 1104: + goto tr420 + case 1105: + goto tr420 + case 1106: + goto tr420 + case 1107: + goto tr420 + case 1108: + goto tr420 + case 1109: + goto tr420 + case 1110: + goto tr420 + case 1111: + goto tr420 + case 1112: + goto tr420 + case 1113: + goto tr420 + case 1114: + goto tr420 + case 1115: + goto tr420 + case 1116: + goto tr420 + case 1117: + goto tr420 + case 1118: + goto tr420 + case 1119: + goto tr420 + case 1120: + goto tr420 + case 1121: + goto tr420 + case 1122: + goto tr420 + case 1123: + goto tr420 + case 1124: + goto tr420 + case 1125: + goto tr420 + case 1126: + goto tr420 + case 1127: + goto tr420 + case 1128: + goto tr420 + case 1129: + goto tr420 + case 1130: + goto tr420 + case 1131: + goto tr420 + case 1132: + goto tr420 + case 1133: + goto tr420 + case 1134: + goto tr420 + case 1135: + goto tr420 + case 1136: + goto tr420 + case 1137: + goto tr420 + case 1138: + goto tr420 + case 1139: + goto tr420 + case 1140: + goto tr420 + case 1141: + goto tr420 + case 1142: + goto tr420 + case 1143: + goto tr420 + case 1144: + goto tr420 + case 1145: + goto tr420 + case 1146: + goto tr420 + case 1147: + goto tr420 + case 1148: + goto tr420 + case 1149: + goto tr420 + case 1150: + goto tr420 + case 1151: + goto tr420 + case 1152: + goto tr420 + case 1153: + goto tr420 + case 1154: + goto tr420 + case 1155: + goto tr420 + case 1156: + goto tr420 + case 1157: + goto tr420 + case 1158: + goto tr420 + case 1159: + goto tr420 + case 1160: + goto tr420 + case 1161: + goto tr420 + case 1162: + goto tr420 + case 1163: + goto tr420 + case 1164: + goto tr2 + case 1165: + goto tr2 + case 1166: + goto tr2 + case 1167: + goto tr2 + case 1168: + goto tr420 + case 1169: + goto tr420 + case 1170: + goto tr420 + case 1171: + goto tr420 + case 1172: + goto tr420 + case 1173: + goto tr420 + case 1174: + goto tr420 + case 1175: + goto tr420 + case 1176: + goto tr420 + case 1177: + goto tr420 + case 1178: + goto tr420 + case 1179: + goto tr420 + case 1180: + goto tr420 + case 1181: + goto tr420 + case 1182: + goto tr420 + case 1183: + goto tr420 + case 1184: + goto tr420 + case 1185: + goto tr420 + case 1186: + goto tr420 + case 1187: + goto tr2 + case 1188: + goto tr2 + case 1189: + goto tr420 + case 1190: + goto tr420 + case 1191: + goto tr420 + case 1192: + goto tr420 + case 1193: + goto tr420 + case 1194: + goto tr420 + case 1195: + goto tr420 + case 1196: + goto tr420 + case 1197: + goto tr420 + case 1198: + goto tr420 + case 1199: + goto tr420 + case 1200: + goto tr420 + case 1201: + goto tr420 + case 1202: + goto tr420 + case 1203: + goto tr420 + case 1204: + goto tr420 + case 1205: + goto tr420 + case 1206: + goto tr420 + case 1207: + goto tr420 + case 1208: + goto tr420 + case 1209: + goto tr420 + case 1210: + goto tr420 + case 1211: + goto tr420 + case 1212: + goto tr420 + case 1213: + goto tr420 + case 1214: + goto tr420 + case 1215: + goto tr420 + case 1216: + goto tr420 + case 1217: + goto tr420 + case 1218: + goto tr420 + case 1219: + goto tr420 + case 1220: + goto tr420 + case 1221: + goto tr420 + case 1222: + goto tr420 + case 1223: + goto tr420 + case 1224: + goto tr2 + case 1225: + goto tr420 + case 1226: + goto tr420 + case 1227: + goto tr420 + case 1228: + goto tr420 + case 1229: + goto tr420 + case 1230: + goto tr420 + case 1231: + goto tr420 + case 1232: + goto tr420 + case 1233: + goto tr420 + case 1234: + goto tr420 + case 1235: + goto tr420 + case 1236: + goto tr420 + case 1237: + goto tr420 + case 1238: + goto tr420 + case 1239: + goto tr420 + case 1240: + goto tr420 + case 1241: + goto tr420 + case 1242: + goto tr420 + case 1243: + goto tr420 + case 1244: + goto tr420 + case 1245: + goto tr420 + case 1246: + goto tr420 + case 1247: + goto tr420 + case 1248: + goto tr420 + case 1249: + goto tr420 + case 1250: + goto tr420 + case 1251: + goto tr420 + case 1252: + goto tr420 + case 1253: + goto tr420 + case 1254: + goto tr420 + case 1255: + goto tr420 + case 1256: + goto tr420 + case 1257: + goto tr420 + case 1258: + goto tr420 + case 1259: + goto tr420 + case 1260: + goto tr420 + case 1261: + goto tr2 + case 1262: + goto tr420 + case 1263: + goto tr420 + case 1264: + goto tr420 + case 1265: + goto tr420 + case 1266: + goto tr420 + case 1267: + goto tr420 + case 1268: + goto tr420 + case 1269: + goto tr420 + case 1270: + goto tr420 + case 1271: + goto tr420 + case 1272: + goto tr420 + case 1273: + goto tr420 + case 1274: + goto tr420 + case 1275: + goto tr420 + case 1276: + goto tr420 + case 1277: + goto tr420 + case 1278: + goto tr420 + case 1279: + goto tr420 + case 1280: + goto tr420 + case 1281: + goto tr420 + case 1282: + goto tr420 + case 1283: + goto tr420 + case 1284: + goto tr420 + case 1285: + goto tr420 + case 1286: + goto tr420 + case 1287: + goto tr420 + case 1288: + goto tr420 + case 1289: + goto tr420 + case 1290: + goto tr420 + case 1291: + goto tr420 + case 1292: + goto tr420 + case 1293: + goto tr420 + case 1294: + goto tr420 + case 1295: + goto tr420 + case 1296: + goto tr420 + case 1297: + goto tr420 + case 1298: + goto tr420 + case 1299: + goto tr420 + case 1300: + goto tr420 + case 1301: + goto tr420 + case 1302: + goto tr420 + case 1303: + goto tr420 + case 1304: + goto tr420 + case 1305: + goto tr420 + case 1306: + goto tr420 + case 1307: + goto tr420 + case 1308: + goto tr420 + case 1309: + goto tr420 + case 1310: + goto tr420 + case 1311: + goto tr420 + case 1312: + goto tr420 + case 1313: + goto tr420 + case 1314: + goto tr420 + case 1315: + goto tr420 + case 1316: + goto tr420 + case 1317: + goto tr420 + case 1318: + goto tr420 + case 1319: + goto tr420 + case 1320: + goto tr420 + case 1321: + goto tr420 + case 1322: + goto tr420 + case 1323: + goto tr420 + case 1324: + goto tr420 + case 1325: + goto tr420 + case 1326: + goto tr420 + case 1327: + goto tr420 + case 1328: + goto tr420 + case 1329: + goto tr420 + case 1330: + goto tr420 + case 1331: + goto tr420 + case 1332: + goto tr420 + case 1333: + goto tr420 + case 1334: + goto tr420 + case 1335: + goto tr420 + case 1336: + goto tr420 + case 1337: + goto tr420 + case 1338: + goto tr420 + case 1339: + goto tr420 + case 1340: + goto tr420 + case 1341: + goto tr420 + case 1342: + goto tr420 + case 1343: + goto tr420 + case 1344: + goto tr420 + case 1345: + goto tr420 + case 1346: + goto tr420 + case 1347: + goto tr420 + case 1348: + goto tr420 + case 1349: + goto tr420 + case 1350: + goto tr420 + case 1351: + goto tr420 + case 1352: + goto tr420 + case 1353: + goto tr420 + case 1354: + goto tr420 + case 1355: + goto tr420 + case 1356: + goto tr420 + case 1357: + goto tr420 + case 1358: + goto tr420 + case 1359: + goto tr420 + case 1360: + goto tr420 + case 1361: + goto tr420 + case 1362: + goto tr420 + case 1363: + goto tr420 + case 1364: + goto tr420 + case 1365: + goto tr420 + case 1366: + goto tr420 + case 1367: + goto tr420 + case 1368: + goto tr420 + case 1369: + goto tr420 + case 1370: + goto tr420 + case 1371: + goto tr420 + case 1372: + goto tr420 + case 1373: + goto tr420 + case 1374: + goto tr420 + case 1375: + goto tr420 + case 1376: + goto tr420 + case 1377: + goto tr420 + case 1378: + goto tr420 + case 1379: + goto tr420 + case 1380: + goto tr420 + case 1381: + goto tr420 + case 1382: + goto tr420 + case 1383: + goto tr420 + case 1384: + goto tr420 + case 1385: + goto tr420 + case 1386: + goto tr420 + case 1387: + goto tr420 + case 1388: + goto tr420 + case 1389: + goto tr420 + case 1390: + goto tr420 + case 1391: + goto tr420 + case 1392: + goto tr420 + case 1393: + goto tr420 + case 1394: + goto tr420 + case 1395: + goto tr420 + case 1396: + goto tr420 + case 1397: + goto tr420 + case 1398: + goto tr420 + case 1399: + goto tr420 + case 1400: + goto tr420 + case 1401: + goto tr420 + case 1402: + goto tr420 + case 1403: + goto tr420 + case 1404: + goto tr420 + case 1405: + goto tr420 + case 1406: + goto tr420 + case 1407: + goto tr420 + case 1408: + goto tr420 + case 1409: + goto tr420 + case 1410: + goto tr420 + case 1411: + goto tr420 + case 1412: + goto tr420 + case 1413: + goto tr420 + case 1414: + goto tr420 + case 1415: + goto tr420 + case 1416: + goto tr420 + case 1417: + goto tr420 + case 1418: + goto tr420 + case 1419: + goto tr420 + case 1420: + goto tr420 + case 1421: + goto tr420 + case 1422: + goto tr420 + case 1423: + goto tr420 + case 1424: + goto tr420 + case 1425: + goto tr420 + case 1426: + goto tr420 + case 1427: + goto tr420 + case 1428: + goto tr420 + case 1429: + goto tr420 + case 1430: + goto tr420 + case 1431: + goto tr420 + case 1432: + goto tr420 + case 1433: + goto tr420 + case 1434: + goto tr420 + case 1435: + goto tr420 + case 1436: + goto tr420 + case 1437: + goto tr420 + case 1438: + goto tr420 + case 1439: + goto tr420 + case 1440: + goto tr420 + case 1441: + goto tr420 + case 1442: + goto tr420 + case 1443: + goto tr420 + case 1444: + goto tr420 + case 1445: + goto tr420 + case 1446: + goto tr420 + case 1447: + goto tr420 + case 1448: + goto tr420 + case 1449: + goto tr420 + case 1450: + goto tr420 + case 1451: + goto tr420 + case 1452: + goto tr420 + case 1453: + goto tr420 + case 1454: + goto tr420 + case 1455: + goto tr420 + case 1456: + goto tr420 + case 1457: + goto tr420 + case 1458: + goto tr420 + case 1459: + goto tr420 + case 1460: + goto tr420 + case 1461: + goto tr420 + case 1462: + goto tr420 + case 1463: + goto tr420 + case 1464: + goto tr420 + case 1465: + goto tr420 + case 1466: + goto tr420 + case 1467: + goto tr420 + case 1468: + goto tr420 + case 1469: + goto tr420 + case 1470: + goto tr420 + case 1471: + goto tr420 + case 1472: + goto tr420 + case 1473: + goto tr2 + case 1474: + goto tr2 + case 1475: + goto tr2 + case 1476: + goto tr2 + case 1477: + goto tr2 + case 1478: + goto tr2 + case 1479: + goto tr2 + case 1480: + goto tr2 + case 1481: + goto tr2 + case 1482: + goto tr2 + case 1483: + goto tr2 + case 1484: + goto tr2 + case 1485: + goto tr2 + case 1486: + goto tr2 + case 1487: + goto tr2 + case 1488: + goto tr2 + case 1489: + goto tr2 + case 1490: + goto tr2 + case 1491: + goto tr2 + case 1492: + goto tr2 + case 1493: + goto tr2 + case 1494: + goto tr2 + case 1495: + goto tr2 + case 1496: + goto tr2 + case 1497: + goto tr2 + case 1498: + goto tr2 + case 1499: + goto tr2 + case 1500: + goto tr2 + case 1501: + goto tr2 + case 1502: + goto tr2 + case 1503: + goto tr420 + case 1504: + goto tr2 + case 1505: + goto tr2 + case 1506: + goto tr2 + case 1507: + goto tr2 + case 1508: + goto tr2 + case 1509: + goto tr2 + case 1510: + goto tr2 + case 1511: + goto tr2 + case 1512: + goto tr2 + case 1513: + goto tr2 + case 1514: + goto tr2 + case 1515: + goto tr2 + case 1516: + goto tr2 + case 1517: + goto tr2 + case 1518: + goto tr2 + case 1519: + goto tr2 + case 1520: + goto tr2 + case 1521: + goto tr2 + case 1522: + goto tr2 + case 1523: + goto tr420 + case 1524: + goto tr2 + case 1525: + goto tr2 + case 1526: + goto tr2 + case 1527: + goto tr2 + case 1528: + goto tr2 + case 1529: + goto tr2 + case 1530: + goto tr420 + case 1531: + goto tr2 + case 1532: + goto tr2 + case 1533: + goto tr420 + case 1534: + goto tr2 + case 1535: + goto tr2 + case 1536: + goto tr2 + case 1537: + goto tr2 + case 1538: + goto tr2 + case 1539: + goto tr2 + case 1540: + goto tr2 + case 1541: + goto tr2 + case 1542: + goto tr2 + case 1543: + goto tr2 + case 1544: + goto tr2 + case 1545: + goto tr420 + case 1546: + goto tr2 + case 1547: + goto tr2 + case 1548: + goto tr2 + case 1549: + goto tr2 + case 1550: + goto tr2 + case 1551: + goto tr420 + case 1552: + goto tr2 + case 1553: + goto tr2 + case 1554: + goto tr2 + case 1555: + goto tr2 + case 1556: + goto tr2 + case 1557: + goto tr2 + case 1558: + goto tr2 + case 1559: + goto tr2 + case 1560: + goto tr2 + case 1561: + goto tr2 + case 1562: + goto tr2 + case 1563: + goto tr2 + case 1564: + goto tr2 + case 1565: + goto tr2 + case 1566: + goto tr2 + case 1567: + goto tr2 + case 1568: + goto tr2 + case 1569: + goto tr2 + case 1570: + goto tr2 + case 1571: + goto tr2 + case 1572: + goto tr2 + case 1573: + goto tr2 + case 1574: + goto tr2 + case 1575: + goto tr2 + case 1576: + goto tr2 + case 1577: + goto tr2 + case 1578: + goto tr2 + case 1579: + goto tr2 + case 1580: + goto tr2 + case 1581: + goto tr2 + case 1582: + goto tr2 + case 1583: + goto tr2 + case 1584: + goto tr2 + case 1585: + goto tr2 + case 1586: + goto tr2 + case 1587: + goto tr2 + case 1588: + goto tr420 + case 1589: + goto tr2 + case 1590: + goto tr2 + case 1591: + goto tr2 + case 4873: + goto tr4521 + case 1592: + goto tr125 + case 1593: + goto tr125 + case 1594: + goto tr125 + case 1595: + goto tr125 + case 1596: + goto tr125 + case 1597: + goto tr125 + case 1598: + goto tr125 + case 1599: + goto tr125 + case 1600: + goto tr125 + case 1601: + goto tr125 + case 1602: + goto tr125 + case 1603: + goto tr125 + case 1604: + goto tr125 + case 1605: + goto tr125 + case 1606: + goto tr125 + case 1607: + goto tr125 + case 1608: + goto tr125 + case 1609: + goto tr125 + case 1610: + goto tr125 + case 1611: + goto tr125 + case 1612: + goto tr125 + case 1613: + goto tr125 + case 1614: + goto tr125 + case 1615: + goto tr125 + case 1616: + goto tr125 + case 1617: + goto tr125 + case 1618: + goto tr125 + case 1619: + goto tr125 + case 1620: + goto tr125 + case 1621: + goto tr125 + case 1622: + goto tr125 + case 1623: + goto tr125 + case 1624: + goto tr125 + case 1625: + goto tr125 + case 1626: + goto tr125 + case 1627: + goto tr125 + case 1628: + goto tr125 + case 1629: + goto tr125 + case 1630: + goto tr125 + case 1631: + goto tr125 + case 1632: + goto tr125 + case 1633: + goto tr125 + case 1634: + goto tr125 + case 1635: + goto tr125 + case 1636: + goto tr125 + case 1637: + goto tr125 + case 1638: + goto tr125 + case 1639: + goto tr125 + case 1640: + goto tr125 + case 1641: + goto tr125 + case 1642: + goto tr125 + case 1643: + goto tr125 + case 1644: + goto tr125 + case 1645: + goto tr125 + case 1646: + goto tr125 + case 1647: + goto tr125 + case 1648: + goto tr125 + case 1649: + goto tr2 + case 1650: + goto tr2 + case 1651: + goto tr125 + case 1652: + goto tr125 + case 1653: + goto tr125 + case 1654: + goto tr125 + case 1655: + goto tr125 + case 1656: + goto tr125 + case 1657: + goto tr125 + case 1658: + goto tr125 + case 1659: + goto tr2 + case 1660: + goto tr125 + case 1661: + goto tr125 + case 1662: + goto tr125 + case 1663: + goto tr125 + case 1664: + goto tr125 + case 1665: + goto tr125 + case 1666: + goto tr125 + case 1667: + goto tr125 + case 1668: + goto tr125 + case 1669: + goto tr125 + case 1670: + goto tr125 + case 1671: + goto tr125 + case 1672: + goto tr125 + case 1673: + goto tr2 + case 1674: + goto tr125 + case 1675: + goto tr125 + case 1676: + goto tr125 + case 1677: + goto tr125 + case 1678: + goto tr125 + case 1679: + goto tr125 + case 1680: + goto tr125 + case 1681: + goto tr125 + case 1682: + goto tr125 + case 1683: + goto tr125 + case 1684: + goto tr125 + case 1685: + goto tr125 + case 1686: + goto tr125 + case 1687: + goto tr125 + case 1688: + goto tr125 + case 1689: + goto tr125 + case 1690: + goto tr125 + case 1691: + goto tr125 + case 1692: + goto tr125 + case 1693: + goto tr125 + case 1694: + goto tr125 + case 1695: + goto tr125 + case 1696: + goto tr125 + case 1697: + goto tr125 + case 1698: + goto tr125 + case 1699: + goto tr125 + case 1700: + goto tr125 + case 1701: + goto tr125 + case 1702: + goto tr2 + case 1703: + goto tr125 + case 1704: + goto tr125 + case 1705: + goto tr125 + case 1706: + goto tr125 + case 1707: + goto tr125 + case 1708: + goto tr125 + case 1709: + goto tr2 + case 1710: + goto tr125 + case 1711: + goto tr125 + case 1712: + goto tr125 + case 1713: + goto tr125 + case 1714: + goto tr125 + case 1715: + goto tr125 + case 1716: + goto tr125 + case 1717: + goto tr125 + case 1718: + goto tr125 + case 1719: + goto tr125 + case 1720: + goto tr125 + case 1721: + goto tr125 + case 1722: + goto tr125 + case 1723: + goto tr125 + case 1724: + goto tr2 + case 1725: + goto tr125 + case 1726: + goto tr2 + case 1727: + goto tr125 + case 1728: + goto tr2 + case 1729: + goto tr125 + case 1730: + goto tr125 + case 1731: + goto tr2 + case 1732: + goto tr125 + case 1733: + goto tr125 + case 1734: + goto tr125 + case 1735: + goto tr125 + case 1736: + goto tr125 + case 1737: + goto tr125 + case 1738: + goto tr125 + case 1739: + goto tr125 + case 1740: + goto tr2 + case 1741: + goto tr125 + case 1742: + goto tr125 + case 1743: + goto tr125 + case 1744: + goto tr125 + case 1745: + goto tr125 + case 1746: + goto tr125 + case 1747: + goto tr125 + case 1748: + goto tr125 + case 1749: + goto tr125 + case 1750: + goto tr125 + case 1751: + goto tr125 + case 1752: + goto tr125 + case 1753: + goto tr125 + case 1754: + goto tr125 + case 1755: + goto tr125 + case 1756: + goto tr125 + case 1757: + goto tr125 + case 1758: + goto tr125 + case 1759: + goto tr125 + case 1760: + goto tr125 + case 1761: + goto tr125 + case 1762: + goto tr125 + case 1763: + goto tr125 + case 1764: + goto tr125 + case 1765: + goto tr125 + case 1766: + goto tr125 + case 1767: + goto tr125 + case 1768: + goto tr125 + case 1769: + goto tr125 + case 1770: + goto tr125 + case 1771: + goto tr125 + case 1772: + goto tr125 + case 1773: + goto tr125 + case 1774: + goto tr125 + case 1775: + goto tr125 + case 1776: + goto tr125 + case 1777: + goto tr125 + case 1778: + goto tr125 + case 1779: + goto tr125 + case 1780: + goto tr125 + case 1781: + goto tr125 + case 1782: + goto tr125 + case 1783: + goto tr125 + case 1784: + goto tr125 + case 1785: + goto tr125 + case 1786: + goto tr125 + case 1787: + goto tr125 + case 1788: + goto tr125 + case 1789: + goto tr125 + case 1790: + goto tr125 + case 1791: + goto tr125 + case 1792: + goto tr125 + case 1793: + goto tr125 + case 1794: + goto tr125 + case 1795: + goto tr125 + case 1796: + goto tr125 + case 1797: + goto tr125 + case 1798: + goto tr125 + case 1799: + goto tr125 + case 1800: + goto tr125 + case 1801: + goto tr125 + case 1802: + goto tr125 + case 1803: + goto tr125 + case 1804: + goto tr125 + case 1805: + goto tr125 + case 1806: + goto tr125 + case 1807: + goto tr125 + case 1808: + goto tr125 + case 1809: + goto tr125 + case 1810: + goto tr125 + case 1811: + goto tr125 + case 1812: + goto tr125 + case 1813: + goto tr125 + case 1814: + goto tr125 + case 1815: + goto tr125 + case 1816: + goto tr125 + case 1817: + goto tr125 + case 1818: + goto tr125 + case 1819: + goto tr125 + case 1820: + goto tr125 + case 1821: + goto tr125 + case 1822: + goto tr125 + case 1823: + goto tr125 + case 1824: + goto tr125 + case 1825: + goto tr125 + case 1826: + goto tr125 + case 1827: + goto tr125 + case 1828: + goto tr125 + case 1829: + goto tr125 + case 1830: + goto tr125 + case 1831: + goto tr125 + case 1832: + goto tr125 + case 1833: + goto tr125 + case 1834: + goto tr125 + case 1835: + goto tr125 + case 1836: + goto tr125 + case 1837: + goto tr125 + case 1838: + goto tr125 + case 1839: + goto tr125 + case 1840: + goto tr125 + case 1841: + goto tr125 + case 1842: + goto tr125 + case 1843: + goto tr125 + case 1844: + goto tr125 + case 1845: + goto tr125 + case 1846: + goto tr125 + case 1847: + goto tr125 + case 1848: + goto tr125 + case 1849: + goto tr125 + case 1850: + goto tr125 + case 1851: + goto tr125 + case 1852: + goto tr125 + case 1853: + goto tr125 + case 1854: + goto tr125 + case 1855: + goto tr125 + case 1856: + goto tr125 + case 1857: + goto tr125 + case 1858: + goto tr125 + case 1859: + goto tr125 + case 1860: + goto tr125 + case 1861: + goto tr125 + case 1862: + goto tr125 + case 1863: + goto tr125 + case 1864: + goto tr125 + case 1865: + goto tr125 + case 1866: + goto tr125 + case 1867: + goto tr125 + case 1868: + goto tr125 + case 1869: + goto tr125 + case 1870: + goto tr125 + case 1871: + goto tr125 + case 1872: + goto tr125 + case 1873: + goto tr125 + case 1874: + goto tr125 + case 1875: + goto tr125 + case 1876: + goto tr125 + case 1877: + goto tr125 + case 1878: + goto tr125 + case 1879: + goto tr125 + case 1880: + goto tr125 + case 1881: + goto tr125 + case 1882: + goto tr125 + case 1883: + goto tr125 + case 1884: + goto tr125 + case 1885: + goto tr125 + case 1886: + goto tr125 + case 1887: + goto tr125 + case 1888: + goto tr125 + case 1889: + goto tr125 + case 1890: + goto tr125 + case 1891: + goto tr125 + case 1892: + goto tr125 + case 1893: + goto tr125 + case 1894: + goto tr125 + case 1895: + goto tr125 + case 1896: + goto tr125 + case 1897: + goto tr125 + case 1898: + goto tr125 + case 1899: + goto tr125 + case 1900: + goto tr125 + case 1901: + goto tr125 + case 1902: + goto tr125 + case 1903: + goto tr125 + case 1904: + goto tr125 + case 1905: + goto tr125 + case 1906: + goto tr125 + case 1907: + goto tr125 + case 1908: + goto tr125 + case 1909: + goto tr125 + case 1910: + goto tr125 + case 1911: + goto tr125 + case 1912: + goto tr125 + case 1913: + goto tr125 + case 1914: + goto tr125 + case 1915: + goto tr125 + case 1916: + goto tr125 + case 1917: + goto tr125 + case 1918: + goto tr125 + case 1919: + goto tr125 + case 1920: + goto tr125 + case 1921: + goto tr125 + case 1922: + goto tr125 + case 1923: + goto tr125 + case 1924: + goto tr125 + case 1925: + goto tr125 + case 1926: + goto tr125 + case 1927: + goto tr125 + case 1928: + goto tr125 + case 1929: + goto tr125 + case 1930: + goto tr125 + case 1931: + goto tr125 + case 1932: + goto tr125 + case 1933: + goto tr125 + case 1934: + goto tr125 + case 1935: + goto tr125 + case 1936: + goto tr125 + case 1937: + goto tr125 + case 1938: + goto tr125 + case 1939: + goto tr125 + case 1940: + goto tr125 + case 1941: + goto tr125 + case 1942: + goto tr125 + case 1943: + goto tr125 + case 1944: + goto tr125 + case 1945: + goto tr125 + case 1946: + goto tr125 + case 1947: + goto tr125 + case 1948: + goto tr125 + case 1949: + goto tr125 + case 1950: + goto tr125 + case 1951: + goto tr125 + case 1952: + goto tr125 + case 1953: + goto tr125 + case 1954: + goto tr125 + case 1955: + goto tr125 + case 1956: + goto tr125 + case 1957: + goto tr125 + case 1958: + goto tr125 + case 1959: + goto tr125 + case 1960: + goto tr125 + case 1961: + goto tr125 + case 1962: + goto tr125 + case 1963: + goto tr125 + case 1964: + goto tr125 + case 1965: + goto tr125 + case 1966: + goto tr125 + case 1967: + goto tr125 + case 1968: + goto tr125 + case 1969: + goto tr125 + case 1970: + goto tr125 + case 1971: + goto tr125 + case 1972: + goto tr125 + case 1973: + goto tr125 + case 1974: + goto tr125 + case 1975: + goto tr125 + case 1976: + goto tr125 + case 1977: + goto tr125 + case 1978: + goto tr125 + case 1979: + goto tr125 + case 1980: + goto tr125 + case 1981: + goto tr125 + case 1982: + goto tr125 + case 1983: + goto tr125 + case 1984: + goto tr125 + case 1985: + goto tr125 + case 1986: + goto tr125 + case 1987: + goto tr125 + case 1988: + goto tr125 + case 1989: + goto tr125 + case 1990: + goto tr125 + case 1991: + goto tr125 + case 1992: + goto tr125 + case 1993: + goto tr125 + case 1994: + goto tr125 + case 1995: + goto tr125 + case 1996: + goto tr125 + case 1997: + goto tr125 + case 1998: + goto tr125 + case 1999: + goto tr125 + case 2000: + goto tr125 + case 2001: + goto tr125 + case 2002: + goto tr125 + case 2003: + goto tr125 + case 2004: + goto tr125 + case 2005: + goto tr125 + case 2006: + goto tr125 + case 2007: + goto tr125 + case 2008: + goto tr125 + case 2009: + goto tr125 + case 2010: + goto tr125 + case 2011: + goto tr125 + case 2012: + goto tr125 + case 2013: + goto tr125 + case 2014: + goto tr125 + case 2015: + goto tr125 + case 2016: + goto tr125 + case 2017: + goto tr125 + case 2018: + goto tr125 + case 2019: + goto tr125 + case 2020: + goto tr125 + case 2021: + goto tr125 + case 2022: + goto tr125 + case 2023: + goto tr125 + case 2024: + goto tr125 + case 2025: + goto tr125 + case 2026: + goto tr125 + case 2027: + goto tr125 + case 2028: + goto tr125 + case 2029: + goto tr125 + case 2030: + goto tr125 + case 2031: + goto tr125 + case 2032: + goto tr125 + case 2033: + goto tr125 + case 2034: + goto tr125 + case 2035: + goto tr125 + case 2036: + goto tr125 + case 2037: + goto tr125 + case 2038: + goto tr125 + case 2039: + goto tr125 + case 2040: + goto tr125 + case 2041: + goto tr125 + case 2042: + goto tr125 + case 2043: + goto tr125 + case 2044: + goto tr125 + case 2045: + goto tr125 + case 2046: + goto tr125 + case 2047: + goto tr125 + case 2048: + goto tr125 + case 2049: + goto tr125 + case 2050: + goto tr125 + case 2051: + goto tr125 + case 2052: + goto tr125 + case 2053: + goto tr125 + case 2054: + goto tr125 + case 2055: + goto tr125 + case 2056: + goto tr125 + case 2057: + goto tr125 + case 2058: + goto tr125 + case 2059: + goto tr125 + case 2060: + goto tr125 + case 2061: + goto tr125 + case 2062: + goto tr125 + case 2063: + goto tr125 + case 2064: + goto tr125 + case 2065: + goto tr125 + case 2066: + goto tr125 + case 2067: + goto tr125 + case 2068: + goto tr125 + case 2069: + goto tr125 + case 2070: + goto tr125 + case 2071: + goto tr125 + case 2072: + goto tr125 + case 2073: + goto tr125 + case 2074: + goto tr125 + case 2075: + goto tr125 + case 2076: + goto tr125 + case 2077: + goto tr125 + case 2078: + goto tr125 + case 2079: + goto tr125 + case 2080: + goto tr125 + case 2081: + goto tr125 + case 2082: + goto tr125 + case 2083: + goto tr125 + case 2084: + goto tr125 + case 2085: + goto tr125 + case 2086: + goto tr125 + case 2087: + goto tr125 + case 2088: + goto tr125 + case 2089: + goto tr125 + case 2090: + goto tr125 + case 2091: + goto tr125 + case 2092: + goto tr125 + case 2093: + goto tr125 + case 2094: + goto tr125 + case 2095: + goto tr125 + case 2096: + goto tr125 + case 2097: + goto tr125 + case 2098: + goto tr125 + case 2099: + goto tr125 + case 2100: + goto tr125 + case 2101: + goto tr125 + case 2102: + goto tr125 + case 2103: + goto tr125 + case 2104: + goto tr125 + case 2105: + goto tr125 + case 2106: + goto tr125 + case 2107: + goto tr125 + case 2108: + goto tr125 + case 2109: + goto tr125 + case 2110: + goto tr125 + case 2111: + goto tr125 + case 2112: + goto tr125 + case 2113: + goto tr125 + case 2114: + goto tr125 + case 2115: + goto tr125 + case 2116: + goto tr125 + case 2117: + goto tr125 + case 2118: + goto tr125 + case 2119: + goto tr125 + case 2120: + goto tr125 + case 2121: + goto tr125 + case 2122: + goto tr125 + case 2123: + goto tr125 + case 2124: + goto tr125 + case 2125: + goto tr125 + case 2126: + goto tr125 + case 2127: + goto tr125 + case 2128: + goto tr125 + case 2129: + goto tr125 + case 2130: + goto tr125 + case 2131: + goto tr125 + case 2132: + goto tr125 + case 2133: + goto tr125 + case 2134: + goto tr125 + case 2135: + goto tr125 + case 2136: + goto tr125 + case 2137: + goto tr125 + case 2138: + goto tr125 + case 2139: + goto tr125 + case 2140: + goto tr125 + case 2141: + goto tr125 + case 2142: + goto tr125 + case 2143: + goto tr125 + case 2144: + goto tr125 + case 2145: + goto tr125 + case 2146: + goto tr125 + case 2147: + goto tr125 + case 2148: + goto tr125 + case 2149: + goto tr125 + case 2150: + goto tr125 + case 2151: + goto tr125 + case 2152: + goto tr125 + case 2153: + goto tr125 + case 2154: + goto tr125 + case 2155: + goto tr125 + case 2156: + goto tr125 + case 2157: + goto tr125 + case 2158: + goto tr125 + case 2159: + goto tr125 + case 2160: + goto tr125 + case 2161: + goto tr125 + case 2162: + goto tr125 + case 2163: + goto tr125 + case 2164: + goto tr125 + case 2165: + goto tr125 + case 2166: + goto tr125 + case 2167: + goto tr125 + case 2168: + goto tr125 + case 2169: + goto tr125 + case 2170: + goto tr125 + case 2171: + goto tr125 + case 2172: + goto tr125 + case 2173: + goto tr125 + case 2174: + goto tr125 + case 2175: + goto tr125 + case 2176: + goto tr125 + case 2177: + goto tr125 + case 2178: + goto tr125 + case 2179: + goto tr125 + case 2180: + goto tr125 + case 2181: + goto tr125 + case 2182: + goto tr125 + case 2183: + goto tr125 + case 2184: + goto tr125 + case 2185: + goto tr125 + case 2186: + goto tr125 + case 2187: + goto tr125 + case 2188: + goto tr125 + case 2189: + goto tr125 + case 2190: + goto tr125 + case 2191: + goto tr125 + case 2192: + goto tr125 + case 4874: + goto tr4562 + case 2193: + goto tr420 + case 2194: + goto tr420 + case 2195: + goto tr420 + case 2196: + goto tr420 + case 2197: + goto tr420 + case 2198: + goto tr420 + case 2199: + goto tr420 + case 2200: + goto tr420 + case 2201: + goto tr420 + case 2202: + goto tr420 + case 2203: + goto tr420 + case 2204: + goto tr420 + case 2205: + goto tr420 + case 2206: + goto tr420 + case 2207: + goto tr420 + case 2208: + goto tr420 + case 2209: + goto tr420 + case 2210: + goto tr420 + case 2211: + goto tr420 + case 2212: + goto tr420 + case 2213: + goto tr420 + case 2214: + goto tr420 + case 2215: + goto tr420 + case 2216: + goto tr420 + case 2217: + goto tr420 + case 2218: + goto tr420 + case 2219: + goto tr420 + case 2220: + goto tr420 + case 2221: + goto tr420 + case 2222: + goto tr420 + case 2223: + goto tr420 + case 2224: + goto tr420 + case 2225: + goto tr420 + case 2226: + goto tr420 + case 2227: + goto tr420 + case 2228: + goto tr420 + case 2229: + goto tr420 + case 2230: + goto tr420 + case 2231: + goto tr420 + case 2232: + goto tr420 + case 2233: + goto tr420 + case 2234: + goto tr420 + case 2235: + goto tr420 + case 2236: + goto tr420 + case 2237: + goto tr420 + case 2238: + goto tr420 + case 2239: + goto tr420 + case 2240: + goto tr420 + case 2241: + goto tr420 + case 2242: + goto tr420 + case 2243: + goto tr420 + case 2244: + goto tr420 + case 2245: + goto tr420 + case 2246: + goto tr420 + case 2247: + goto tr420 + case 2248: + goto tr420 + case 2249: + goto tr420 + case 2250: + goto tr420 + case 2251: + goto tr420 + case 2252: + goto tr420 + case 2253: + goto tr420 + case 2254: + goto tr420 + case 2255: + goto tr420 + case 2256: + goto tr420 + case 2257: + goto tr420 + case 2258: + goto tr420 + case 2259: + goto tr420 + case 2260: + goto tr420 + case 2261: + goto tr420 + case 2262: + goto tr420 + case 2263: + goto tr420 + case 2264: + goto tr420 + case 2265: + goto tr420 + case 2266: + goto tr420 + case 2267: + goto tr420 + case 2268: + goto tr420 + case 2269: + goto tr420 + case 2270: + goto tr420 + case 2271: + goto tr420 + case 2272: + goto tr420 + case 2273: + goto tr420 + case 2274: + goto tr420 + case 2275: + goto tr420 + case 2276: + goto tr420 + case 2277: + goto tr420 + case 2278: + goto tr420 + case 2279: + goto tr420 + case 2280: + goto tr420 + case 2281: + goto tr420 + case 2282: + goto tr420 + case 2283: + goto tr420 + case 2284: + goto tr420 + case 2285: + goto tr420 + case 2286: + goto tr420 + case 2287: + goto tr420 + case 2288: + goto tr420 + case 2289: + goto tr420 + case 2290: + goto tr420 + case 2291: + goto tr420 + case 2292: + goto tr420 + case 2293: + goto tr420 + case 2294: + goto tr420 + case 2295: + goto tr420 + case 2296: + goto tr420 + case 2297: + goto tr420 + case 2298: + goto tr420 + case 2299: + goto tr420 + case 2300: + goto tr420 + case 2301: + goto tr420 + case 2302: + goto tr420 + case 2303: + goto tr420 + case 2304: + goto tr420 + case 2305: + goto tr420 + case 2306: + goto tr420 + case 2307: + goto tr420 + case 2308: + goto tr420 + case 2309: + goto tr420 + case 2310: + goto tr420 + case 2311: + goto tr420 + case 2312: + goto tr420 + case 2313: + goto tr420 + case 2314: + goto tr420 + case 2315: + goto tr420 + case 2316: + goto tr420 + case 2317: + goto tr420 + case 2318: + goto tr420 + case 2319: + goto tr420 + case 2320: + goto tr420 + case 2321: + goto tr420 + case 2322: + goto tr420 + case 2323: + goto tr420 + case 2324: + goto tr420 + case 2325: + goto tr420 + case 2326: + goto tr420 + case 2327: + goto tr420 + case 2328: + goto tr420 + case 2329: + goto tr420 + case 2330: + goto tr420 + case 2331: + goto tr420 + case 2332: + goto tr420 + case 2333: + goto tr420 + case 2334: + goto tr420 + case 2335: + goto tr420 + case 2336: + goto tr420 + case 2337: + goto tr420 + case 2338: + goto tr420 + case 2339: + goto tr420 + case 4875: + goto tr4562 + case 4876: + goto tr4562 + case 2340: + goto tr420 + case 2341: + goto tr420 + case 2342: + goto tr420 + case 2343: + goto tr420 + case 2344: + goto tr420 + case 2345: + goto tr420 + case 2346: + goto tr420 + case 2347: + goto tr420 + case 2348: + goto tr420 + case 2349: + goto tr420 + case 2350: + goto tr420 + case 2351: + goto tr420 + case 2352: + goto tr420 + case 2353: + goto tr420 + case 2354: + goto tr420 + case 2355: + goto tr420 + case 2356: + goto tr420 + case 2357: + goto tr420 + case 2358: + goto tr420 + case 2359: + goto tr420 + case 2360: + goto tr420 + case 2361: + goto tr420 + case 2362: + goto tr420 + case 2363: + goto tr420 + case 2364: + goto tr420 + case 2365: + goto tr420 + case 2366: + goto tr420 + case 2367: + goto tr420 + case 2368: + goto tr420 + case 2369: + goto tr420 + case 2370: + goto tr420 + case 2371: + goto tr420 + case 2372: + goto tr420 + case 2373: + goto tr420 + case 2374: + goto tr420 + case 2375: + goto tr420 + case 2376: + goto tr420 + case 2377: + goto tr420 + case 2378: + goto tr420 + case 2379: + goto tr420 + case 2380: + goto tr420 + case 2381: + goto tr420 + case 2382: + goto tr420 + case 2383: + goto tr420 + case 2384: + goto tr420 + case 2385: + goto tr420 + case 2386: + goto tr420 + case 2387: + goto tr420 + case 2388: + goto tr420 + case 2389: + goto tr420 + case 2390: + goto tr420 + case 2391: + goto tr420 + case 2392: + goto tr420 + case 2393: + goto tr420 + case 2394: + goto tr420 + case 2395: + goto tr420 + case 2396: + goto tr420 + case 2397: + goto tr420 + case 2398: + goto tr420 + case 2399: + goto tr420 + case 2400: + goto tr420 + case 2401: + goto tr420 + case 2402: + goto tr420 + case 2403: + goto tr420 + case 2404: + goto tr420 + case 2405: + goto tr420 + case 2406: + goto tr420 + case 2407: + goto tr420 + case 2408: + goto tr420 + case 2409: + goto tr420 + case 2410: + goto tr420 + case 2411: + goto tr420 + case 2412: + goto tr420 + case 2413: + goto tr420 + case 2414: + goto tr420 + case 2415: + goto tr420 + case 2416: + goto tr420 + case 2417: + goto tr420 + case 2418: + goto tr420 + case 2419: + goto tr420 + case 2420: + goto tr420 + case 2421: + goto tr420 + case 2422: + goto tr420 + case 2423: + goto tr420 + case 2424: + goto tr420 + case 2425: + goto tr420 + case 2426: + goto tr420 + case 2427: + goto tr420 + case 2428: + goto tr420 + case 2429: + goto tr420 + case 2430: + goto tr420 + case 2431: + goto tr420 + case 2432: + goto tr420 + case 2433: + goto tr420 + case 2434: + goto tr420 + case 2435: + goto tr420 + case 2436: + goto tr2 + case 2437: + goto tr420 + case 2438: + goto tr2 + case 2439: + goto tr420 + case 2440: + goto tr420 + case 2441: + goto tr420 + case 2442: + goto tr420 + case 2443: + goto tr420 + case 2444: + goto tr420 + case 2445: + goto tr420 + case 2446: + goto tr420 + case 2447: + goto tr420 + case 2448: + goto tr420 + case 2449: + goto tr420 + case 2450: + goto tr420 + case 2451: + goto tr420 + case 2452: + goto tr420 + case 2453: + goto tr420 + case 2454: + goto tr420 + case 2455: + goto tr420 + case 2456: + goto tr420 + case 2457: + goto tr420 + case 2458: + goto tr420 + case 2459: + goto tr420 + case 2460: + goto tr420 + case 2461: + goto tr420 + case 2462: + goto tr420 + case 2463: + goto tr420 + case 2464: + goto tr420 + case 2465: + goto tr420 + case 2466: + goto tr420 + case 2467: + goto tr420 + case 2468: + goto tr420 + case 2469: + goto tr420 + case 2470: + goto tr420 + case 2471: + goto tr420 + case 2472: + goto tr420 + case 2473: + goto tr420 + case 2474: + goto tr420 + case 2475: + goto tr420 + case 2476: + goto tr420 + case 2477: + goto tr420 + case 2478: + goto tr420 + case 2479: + goto tr420 + case 2480: + goto tr420 + case 2481: + goto tr420 + case 2482: + goto tr420 + case 2483: + goto tr420 + case 2484: + goto tr420 + case 2485: + goto tr420 + case 2486: + goto tr420 + case 2487: + goto tr420 + case 2488: + goto tr420 + case 2489: + goto tr420 + case 2490: + goto tr420 + case 2491: + goto tr420 + case 2492: + goto tr420 + case 2493: + goto tr420 + case 2494: + goto tr420 + case 2495: + goto tr420 + case 2496: + goto tr420 + case 2497: + goto tr420 + case 2498: + goto tr420 + case 2499: + goto tr420 + case 2500: + goto tr420 + case 2501: + goto tr420 + case 2502: + goto tr420 + case 2503: + goto tr420 + case 2504: + goto tr420 + case 2505: + goto tr420 + case 2506: + goto tr420 + case 2507: + goto tr420 + case 2508: + goto tr420 + case 2509: + goto tr420 + case 2510: + goto tr420 + case 2511: + goto tr420 + case 2512: + goto tr420 + case 2513: + goto tr420 + case 2514: + goto tr420 + case 2515: + goto tr420 + case 2516: + goto tr420 + case 2517: + goto tr420 + case 2518: + goto tr420 + case 2519: + goto tr420 + case 2520: + goto tr420 + case 2521: + goto tr420 + case 2522: + goto tr420 + case 2523: + goto tr420 + case 2524: + goto tr420 + case 2525: + goto tr420 + case 2526: + goto tr420 + case 2527: + goto tr420 + case 2528: + goto tr420 + case 2529: + goto tr420 + case 2530: + goto tr420 + case 2531: + goto tr420 + case 2532: + goto tr420 + case 2533: + goto tr420 + case 2534: + goto tr420 + case 2535: + goto tr420 + case 2536: + goto tr420 + case 2537: + goto tr420 + case 2538: + goto tr420 + case 2539: + goto tr420 + case 2540: + goto tr420 + case 2541: + goto tr420 + case 2542: + goto tr420 + case 2543: + goto tr420 + case 2544: + goto tr420 + case 2545: + goto tr420 + case 2546: + goto tr420 + case 2547: + goto tr420 + case 2548: + goto tr420 + case 2549: + goto tr420 + case 2550: + goto tr420 + case 2551: + goto tr420 + case 2552: + goto tr420 + case 2553: + goto tr420 + case 2554: + goto tr420 + case 2555: + goto tr420 + case 2556: + goto tr420 + case 2557: + goto tr420 + case 2558: + goto tr420 + case 2559: + goto tr420 + case 2560: + goto tr420 + case 2561: + goto tr420 + case 2562: + goto tr420 + case 2563: + goto tr420 + case 2564: + goto tr420 + case 2565: + goto tr420 + case 2566: + goto tr420 + case 2567: + goto tr420 + case 2568: + goto tr420 + case 2569: + goto tr420 + case 2570: + goto tr420 + case 2571: + goto tr420 + case 2572: + goto tr420 + case 2573: + goto tr420 + case 2574: + goto tr420 + case 2575: + goto tr420 + case 2576: + goto tr420 + case 2577: + goto tr420 + case 2578: + goto tr420 + case 2579: + goto tr420 + case 2580: + goto tr420 + case 2581: + goto tr420 + case 2582: + goto tr420 + case 2583: + goto tr420 + case 2584: + goto tr420 + case 2585: + goto tr420 + case 2586: + goto tr420 + case 2587: + goto tr420 + case 2588: + goto tr420 + case 2589: + goto tr420 + case 2590: + goto tr420 + case 2591: + goto tr420 + case 2592: + goto tr420 + case 2593: + goto tr420 + case 2594: + goto tr420 + case 2595: + goto tr420 + case 2596: + goto tr420 + case 2597: + goto tr420 + case 2598: + goto tr420 + case 2599: + goto tr420 + case 2600: + goto tr420 + case 2601: + goto tr420 + case 2602: + goto tr420 + case 2603: + goto tr420 + case 2604: + goto tr420 + case 2605: + goto tr420 + case 2606: + goto tr420 + case 2607: + goto tr420 + case 2608: + goto tr420 + case 2609: + goto tr420 + case 2610: + goto tr420 + case 2611: + goto tr420 + case 2612: + goto tr420 + case 2613: + goto tr420 + case 2614: + goto tr420 + case 2615: + goto tr420 + case 2616: + goto tr420 + case 2617: + goto tr420 + case 2618: + goto tr420 + case 2619: + goto tr420 + case 2620: + goto tr420 + case 2621: + goto tr420 + case 2622: + goto tr420 + case 2623: + goto tr420 + case 2624: + goto tr420 + case 2625: + goto tr420 + case 2626: + goto tr420 + case 2627: + goto tr420 + case 2628: + goto tr420 + case 2629: + goto tr420 + case 2630: + goto tr420 + case 2631: + goto tr420 + case 2632: + goto tr420 + case 2633: + goto tr420 + case 2634: + goto tr420 + case 2635: + goto tr420 + case 4877: + goto tr4499 + case 4878: + goto tr4763 + case 2636: + goto tr2394 + case 2637: + goto tr2 + case 2638: + goto tr2 + case 2639: + goto tr2394 + case 2640: + goto tr2394 + case 2641: + goto tr2394 + case 2642: + goto tr2394 + case 2643: + goto tr2394 + case 2644: + goto tr2394 + case 2645: + goto tr2394 + case 2646: + goto tr2394 + case 2647: + goto tr2394 + case 2648: + goto tr2394 + case 2649: + goto tr2394 + case 2650: + goto tr2 + case 2651: + goto tr2 + case 2652: + goto tr2 + case 2653: + goto tr2 + case 2654: + goto tr2 + case 2655: + goto tr2 + case 2656: + goto tr2 + case 2657: + goto tr2 + case 2658: + goto tr2 + case 2659: + goto tr2 + case 2660: + goto tr2 + case 2661: + goto tr2 + case 2662: + goto tr2 + case 2663: + goto tr2 + case 2664: + goto tr2 + case 2665: + goto tr2 + case 2666: + goto tr2 + case 2667: + goto tr2 + case 2668: + goto tr2 + case 2669: + goto tr2 + case 2670: + goto tr2 + case 2671: + goto tr2 + case 2672: + goto tr2 + case 2673: + goto tr2 + case 2674: + goto tr2 + case 2675: + goto tr2 + case 2676: + goto tr2 + case 2677: + goto tr2 + case 2678: + goto tr2394 + case 2679: + goto tr2 + case 2680: + goto tr2 + case 2681: + goto tr2 + case 2682: + goto tr2 + case 2683: + goto tr2 + case 2684: + goto tr2 + case 2685: + goto tr2 + case 2686: + goto tr2 + case 2687: + goto tr2 + case 2688: + goto tr2 + case 2689: + goto tr2 + case 2690: + goto tr2 + case 2691: + goto tr2 + case 2692: + goto tr2 + case 2693: + goto tr2 + case 2694: + goto tr2 + case 2695: + goto tr2 + case 2696: + goto tr2 + case 2697: + goto tr2 + case 2698: + goto tr2 + case 2699: + goto tr2 + case 2700: + goto tr2394 + case 2701: + goto tr2 + case 2702: + goto tr2 + case 2703: + goto tr2 + case 2704: + goto tr2 + case 2705: + goto tr2 + case 2706: + goto tr2 + case 2707: + goto tr2394 + case 2708: + goto tr2 + case 2709: + goto tr2 + case 2710: + goto tr2394 + case 2711: + goto tr2 + case 2712: + goto tr2 + case 2713: + goto tr2 + case 2714: + goto tr2 + case 2715: + goto tr2 + case 2716: + goto tr2 + case 2717: + goto tr2 + case 2718: + goto tr2 + case 2719: + goto tr2 + case 2720: + goto tr2 + case 2721: + goto tr2 + case 2722: + goto tr2 + case 2723: + goto tr2 + case 2724: + goto tr2 + case 2725: + goto tr2 + case 2726: + goto tr2394 + case 2727: + goto tr2 + case 2728: + goto tr2 + case 2729: + goto tr2 + case 2730: + goto tr2394 + case 2731: + goto tr2 + case 2732: + goto tr2 + case 2733: + goto tr2 + case 2734: + goto tr2 + case 2735: + goto tr2 + case 2736: + goto tr2 + case 2737: + goto tr2 + case 2738: + goto tr2 + case 2739: + goto tr2 + case 2740: + goto tr2 + case 2741: + goto tr2 + case 2742: + goto tr2 + case 2743: + goto tr2 + case 2744: + goto tr2 + case 2745: + goto tr2 + case 2746: + goto tr2 + case 2747: + goto tr2 + case 2748: + goto tr2 + case 2749: + goto tr2 + case 2750: + goto tr2 + case 2751: + goto tr2 + case 2752: + goto tr2 + case 2753: + goto tr2 + case 2754: + goto tr2 + case 2755: + goto tr2 + case 2756: + goto tr2 + case 2757: + goto tr2 + case 2758: + goto tr2 + case 2759: + goto tr2 + case 2760: + goto tr2 + case 2761: + goto tr2 + case 2762: + goto tr2 + case 2763: + goto tr2 + case 2764: + goto tr2 + case 2765: + goto tr2 + case 2766: + goto tr2 + case 2767: + goto tr2 + case 2768: + goto tr2 + case 2769: + goto tr2 + case 2770: + goto tr2 + case 2771: + goto tr2 + case 2772: + goto tr2394 + case 2773: + goto tr2 + case 2774: + goto tr2 + case 2775: + goto tr2 + case 2776: + goto tr2 + case 4879: + goto tr4499 + case 4880: + goto tr4562 + case 4881: + goto tr4562 + case 4882: + goto tr4499 + case 4883: + goto tr4499 + case 4884: + goto tr4562 + case 4885: + goto tr4499 + case 2777: + goto tr0 + case 2778: + goto tr0 + case 2779: + goto tr0 + case 2780: + goto tr0 + case 2781: + goto tr0 + case 2782: + goto tr0 + case 2783: + goto tr0 + case 2784: + goto tr0 + case 2785: + goto tr0 + case 2786: + goto tr0 + case 2787: + goto tr0 + case 2788: + goto tr0 + case 2789: + goto tr0 + case 2790: + goto tr0 + case 2791: + goto tr0 + case 2792: + goto tr0 + case 2793: + goto tr0 + case 2794: + goto tr0 + case 2795: + goto tr0 + case 2796: + goto tr0 + case 2797: + goto tr0 + case 2798: + goto tr0 + case 2799: + goto tr0 + case 2800: + goto tr0 + case 2801: + goto tr0 + case 2802: + goto tr0 + case 2803: + goto tr0 + case 2804: + goto tr0 + case 2805: + goto tr0 + case 2806: + goto tr0 + case 2807: + goto tr0 + case 2808: + goto tr0 + case 2809: + goto tr0 + case 2810: + goto tr0 + case 2811: + goto tr0 + case 2812: + goto tr0 + case 2813: + goto tr0 + case 2814: + goto tr0 + case 2815: + goto tr0 + case 2816: + goto tr0 + case 2817: + goto tr0 + case 2818: + goto tr0 + case 2819: + goto tr0 + case 2820: + goto tr0 + case 2821: + goto tr0 + case 2822: + goto tr0 + case 2823: + goto tr0 + case 2824: + goto tr0 + case 2825: + goto tr0 + case 2826: + goto tr0 + case 2827: + goto tr0 + case 2828: + goto tr0 + case 2829: + goto tr0 + case 2830: + goto tr0 + case 2831: + goto tr0 + case 2832: + goto tr0 + case 2833: + goto tr0 + case 2834: + goto tr0 + case 2835: + goto tr0 + case 2836: + goto tr0 + case 2837: + goto tr0 + case 2838: + goto tr0 + case 2839: + goto tr0 + case 2840: + goto tr0 + case 2841: + goto tr0 + case 2842: + goto tr0 + case 2843: + goto tr0 + case 2844: + goto tr0 + case 2845: + goto tr0 + case 2846: + goto tr0 + case 2847: + goto tr0 + case 2848: + goto tr0 + case 2849: + goto tr0 + case 2850: + goto tr0 + case 2851: + goto tr0 + case 2852: + goto tr0 + case 2853: + goto tr0 + case 2854: + goto tr0 + case 2855: + goto tr0 + case 2856: + goto tr0 + case 2857: + goto tr0 + case 2858: + goto tr0 + case 2859: + goto tr0 + case 2860: + goto tr0 + case 2861: + goto tr0 + case 2862: + goto tr0 + case 2863: + goto tr0 + case 2864: + goto tr0 + case 2865: + goto tr0 + case 2866: + goto tr0 + case 2867: + goto tr0 + case 2868: + goto tr0 + case 2869: + goto tr0 + case 2870: + goto tr0 + case 2871: + goto tr0 + case 2872: + goto tr0 + case 2873: + goto tr0 + case 2874: + goto tr0 + case 2875: + goto tr0 + case 2876: + goto tr0 + case 2877: + goto tr0 + case 2878: + goto tr0 + case 2879: + goto tr0 + case 2880: + goto tr0 + case 2881: + goto tr0 + case 2882: + goto tr0 + case 2883: + goto tr0 + case 2884: + goto tr0 + case 2885: + goto tr0 + case 2886: + goto tr0 + case 2887: + goto tr0 + case 2888: + goto tr0 + case 2889: + goto tr0 + case 2890: + goto tr0 + case 2891: + goto tr0 + case 2892: + goto tr0 + case 2893: + goto tr0 + case 2894: + goto tr0 + case 2895: + goto tr0 + case 2896: + goto tr0 + case 2897: + goto tr0 + case 2898: + goto tr0 + case 2899: + goto tr0 + case 2900: + goto tr0 + case 2901: + goto tr0 + case 2902: + goto tr0 + case 2903: + goto tr0 + case 2904: + goto tr0 + case 2905: + goto tr0 + case 2906: + goto tr0 + case 2907: + goto tr0 + case 2908: + goto tr0 + case 2909: + goto tr0 + case 2910: + goto tr0 + case 2911: + goto tr0 + case 2912: + goto tr0 + case 2913: + goto tr0 + case 2914: + goto tr0 + case 2915: + goto tr0 + case 2916: + goto tr0 + case 2917: + goto tr0 + case 2918: + goto tr0 + case 2919: + goto tr0 + case 2920: + goto tr0 + case 2921: + goto tr0 + case 2922: + goto tr0 + case 2923: + goto tr0 + case 4886: + goto tr4562 + case 2924: + goto tr420 + case 2925: + goto tr420 + case 2926: + goto tr420 + case 2927: + goto tr420 + case 2928: + goto tr420 + case 2929: + goto tr420 + case 2930: + goto tr420 + case 2931: + goto tr420 + case 2932: + goto tr420 + case 2933: + goto tr420 + case 2934: + goto tr420 + case 2935: + goto tr420 + case 2936: + goto tr420 + case 2937: + goto tr420 + case 2938: + goto tr420 + case 2939: + goto tr420 + case 2940: + goto tr420 + case 2941: + goto tr420 + case 2942: + goto tr420 + case 2943: + goto tr420 + case 2944: + goto tr420 + case 2945: + goto tr420 + case 2946: + goto tr420 + case 2947: + goto tr420 + case 2948: + goto tr420 + case 2949: + goto tr420 + case 2950: + goto tr420 + case 2951: + goto tr420 + case 2952: + goto tr420 + case 2953: + goto tr420 + case 2954: + goto tr420 + case 2955: + goto tr420 + case 2956: + goto tr420 + case 2957: + goto tr420 + case 2958: + goto tr420 + case 2959: + goto tr420 + case 2960: + goto tr420 + case 2961: + goto tr420 + case 2962: + goto tr420 + case 2963: + goto tr420 + case 2964: + goto tr420 + case 2965: + goto tr420 + case 2966: + goto tr420 + case 2967: + goto tr420 + case 2968: + goto tr420 + case 2969: + goto tr420 + case 2970: + goto tr420 + case 2971: + goto tr420 + case 2972: + goto tr420 + case 2973: + goto tr420 + case 2974: + goto tr420 + case 2975: + goto tr420 + case 2976: + goto tr420 + case 2977: + goto tr420 + case 2978: + goto tr420 + case 2979: + goto tr420 + case 2980: + goto tr420 + case 2981: + goto tr420 + case 2982: + goto tr420 + case 2983: + goto tr420 + case 2984: + goto tr420 + case 2985: + goto tr420 + case 2986: + goto tr420 + case 2987: + goto tr420 + case 2988: + goto tr420 + case 2989: + goto tr420 + case 2990: + goto tr420 + case 2991: + goto tr420 + case 2992: + goto tr420 + case 2993: + goto tr420 + case 2994: + goto tr420 + case 2995: + goto tr420 + case 2996: + goto tr420 + case 2997: + goto tr420 + case 2998: + goto tr420 + case 2999: + goto tr420 + case 3000: + goto tr420 + case 3001: + goto tr420 + case 3002: + goto tr420 + case 3003: + goto tr420 + case 3004: + goto tr420 + case 3005: + goto tr420 + case 3006: + goto tr420 + case 3007: + goto tr420 + case 3008: + goto tr420 + case 3009: + goto tr420 + case 3010: + goto tr420 + case 3011: + goto tr420 + case 3012: + goto tr420 + case 3013: + goto tr420 + case 3014: + goto tr420 + case 3015: + goto tr420 + case 3016: + goto tr420 + case 3017: + goto tr420 + case 3018: + goto tr420 + case 3019: + goto tr420 + case 3020: + goto tr420 + case 3021: + goto tr420 + case 3022: + goto tr420 + case 3023: + goto tr420 + case 3024: + goto tr420 + case 3025: + goto tr420 + case 3026: + goto tr420 + case 3027: + goto tr420 + case 3028: + goto tr420 + case 3029: + goto tr420 + case 3030: + goto tr420 + case 3031: + goto tr420 + case 3032: + goto tr420 + case 3033: + goto tr420 + case 3034: + goto tr420 + case 3035: + goto tr420 + case 3036: + goto tr420 + case 3037: + goto tr420 + case 3038: + goto tr420 + case 3039: + goto tr420 + case 3040: + goto tr420 + case 3041: + goto tr420 + case 3042: + goto tr420 + case 3043: + goto tr420 + case 3044: + goto tr420 + case 3045: + goto tr420 + case 3046: + goto tr420 + case 3047: + goto tr420 + case 3048: + goto tr420 + case 3049: + goto tr420 + case 3050: + goto tr420 + case 3051: + goto tr420 + case 3052: + goto tr420 + case 3053: + goto tr420 + case 3054: + goto tr420 + case 3055: + goto tr420 + case 3056: + goto tr420 + case 3057: + goto tr420 + case 3058: + goto tr420 + case 3059: + goto tr420 + case 3060: + goto tr420 + case 3061: + goto tr420 + case 3062: + goto tr420 + case 3063: + goto tr420 + case 3064: + goto tr420 + case 3065: + goto tr420 + case 3066: + goto tr420 + case 3067: + goto tr420 + case 3068: + goto tr420 + case 3069: + goto tr420 + case 3070: + goto tr420 + case 4887: + goto tr4562 + case 3071: + goto tr420 + case 3072: + goto tr420 + case 3073: + goto tr420 + case 3074: + goto tr420 + case 3075: + goto tr420 + case 3076: + goto tr420 + case 3077: + goto tr420 + case 3078: + goto tr420 + case 3079: + goto tr420 + case 3080: + goto tr420 + case 3081: + goto tr420 + case 3082: + goto tr420 + case 3083: + goto tr420 + case 3084: + goto tr420 + case 3085: + goto tr420 + case 3086: + goto tr420 + case 3087: + goto tr420 + case 3088: + goto tr420 + case 3089: + goto tr420 + case 3090: + goto tr420 + case 3091: + goto tr420 + case 3092: + goto tr420 + case 3093: + goto tr420 + case 3094: + goto tr420 + case 3095: + goto tr420 + case 3096: + goto tr420 + case 3097: + goto tr420 + case 3098: + goto tr420 + case 3099: + goto tr420 + case 3100: + goto tr420 + case 3101: + goto tr420 + case 3102: + goto tr420 + case 3103: + goto tr420 + case 3104: + goto tr420 + case 3105: + goto tr420 + case 3106: + goto tr420 + case 3107: + goto tr420 + case 3108: + goto tr420 + case 3109: + goto tr420 + case 3110: + goto tr420 + case 3111: + goto tr420 + case 3112: + goto tr420 + case 3113: + goto tr420 + case 3114: + goto tr420 + case 3115: + goto tr420 + case 3116: + goto tr420 + case 3117: + goto tr420 + case 3118: + goto tr420 + case 3119: + goto tr420 + case 3120: + goto tr420 + case 3121: + goto tr420 + case 3122: + goto tr420 + case 3123: + goto tr420 + case 3124: + goto tr420 + case 3125: + goto tr420 + case 3126: + goto tr420 + case 3127: + goto tr420 + case 3128: + goto tr420 + case 3129: + goto tr420 + case 3130: + goto tr420 + case 3131: + goto tr420 + case 3132: + goto tr420 + case 3133: + goto tr420 + case 3134: + goto tr420 + case 3135: + goto tr420 + case 3136: + goto tr420 + case 3137: + goto tr420 + case 3138: + goto tr420 + case 3139: + goto tr420 + case 3140: + goto tr420 + case 3141: + goto tr420 + case 3142: + goto tr420 + case 3143: + goto tr420 + case 3144: + goto tr420 + case 3145: + goto tr420 + case 3146: + goto tr420 + case 3147: + goto tr420 + case 3148: + goto tr420 + case 3149: + goto tr420 + case 3150: + goto tr420 + case 3151: + goto tr420 + case 3152: + goto tr420 + case 3153: + goto tr420 + case 3154: + goto tr420 + case 3155: + goto tr420 + case 3156: + goto tr420 + case 3157: + goto tr420 + case 3158: + goto tr420 + case 3159: + goto tr420 + case 3160: + goto tr420 + case 3161: + goto tr420 + case 3162: + goto tr420 + case 3163: + goto tr420 + case 3164: + goto tr420 + case 3165: + goto tr420 + case 3166: + goto tr420 + case 3167: + goto tr420 + case 3168: + goto tr420 + case 3169: + goto tr420 + case 3170: + goto tr420 + case 3171: + goto tr420 + case 3172: + goto tr420 + case 3173: + goto tr420 + case 3174: + goto tr420 + case 3175: + goto tr420 + case 3176: + goto tr420 + case 3177: + goto tr420 + case 3178: + goto tr420 + case 3179: + goto tr420 + case 3180: + goto tr420 + case 3181: + goto tr420 + case 3182: + goto tr420 + case 3183: + goto tr420 + case 3184: + goto tr420 + case 3185: + goto tr420 + case 3186: + goto tr420 + case 3187: + goto tr420 + case 3188: + goto tr420 + case 3189: + goto tr420 + case 3190: + goto tr420 + case 3191: + goto tr420 + case 3192: + goto tr420 + case 3193: + goto tr420 + case 3194: + goto tr420 + case 3195: + goto tr420 + case 3196: + goto tr420 + case 3197: + goto tr420 + case 3198: + goto tr420 + case 3199: + goto tr420 + case 3200: + goto tr420 + case 3201: + goto tr420 + case 3202: + goto tr420 + case 3203: + goto tr420 + case 3204: + goto tr420 + case 3205: + goto tr420 + case 3206: + goto tr420 + case 3207: + goto tr420 + case 3208: + goto tr420 + case 3209: + goto tr420 + case 3210: + goto tr420 + case 3211: + goto tr420 + case 3212: + goto tr420 + case 3213: + goto tr420 + case 3214: + goto tr420 + case 3215: + goto tr420 + case 3216: + goto tr420 + case 3217: + goto tr420 + case 4888: + goto tr4562 + case 4889: + goto tr4562 + case 4890: + goto tr4562 + case 4891: + goto tr4562 + case 4892: + goto tr4562 + case 4893: + goto tr4562 + case 4894: + goto tr4562 + case 4895: + goto tr4499 + case 4896: + goto tr4499 + case 4897: + goto tr4562 + case 4898: + goto tr4562 + case 4899: + goto tr4562 + case 4900: + goto tr4562 + case 4901: + goto tr4562 + case 4902: + goto tr4562 + case 4903: + goto tr4562 + case 4904: + goto tr4562 + case 4905: + goto tr4562 + case 4906: + goto tr4562 + case 4907: + goto tr4562 + case 4908: + goto tr4499 + case 4909: + goto tr4499 + case 4910: + goto tr4499 + case 4911: + goto tr4499 + case 4912: + goto tr4499 + case 4913: + goto tr4499 + case 4914: + goto tr4499 + case 4915: + goto tr4499 + case 4916: + goto tr4499 + case 4917: + goto tr4499 + case 4918: + goto tr4499 + case 4919: + goto tr4499 + case 4920: + goto tr4499 + case 4921: + goto tr4499 + case 4922: + goto tr4499 + case 4923: + goto tr4499 + case 4924: + goto tr4499 + case 4925: + goto tr4499 + case 4926: + goto tr4499 + case 4927: + goto tr4499 + case 4928: + goto tr4499 + case 3218: + goto tr0 + case 3219: + goto tr0 + case 3220: + goto tr0 + case 3221: + goto tr0 + case 3222: + goto tr0 + case 3223: + goto tr0 + case 3224: + goto tr0 + case 3225: + goto tr0 + case 3226: + goto tr0 + case 3227: + goto tr0 + case 3228: + goto tr0 + case 3229: + goto tr0 + case 3230: + goto tr0 + case 3231: + goto tr0 + case 4929: + goto tr4562 + case 4930: + goto tr4562 + case 4931: + goto tr4562 + case 4932: + goto tr4499 + case 3232: + goto tr0 + case 4933: + goto tr4562 + case 4934: + goto tr4562 + case 4935: + goto tr4562 + case 4936: + goto tr4562 + case 4937: + goto tr4562 + case 4938: + goto tr4562 + case 4939: + goto tr4562 + case 4940: + goto tr4499 + case 4941: + goto tr4499 + case 4942: + goto tr4499 + case 4943: + goto tr4499 + case 4944: + goto tr4562 + case 4945: + goto tr4562 + case 4946: + goto tr4562 + case 4947: + goto tr4562 + case 4948: + goto tr4562 + case 4949: + goto tr4562 + case 4950: + goto tr4562 + case 4951: + goto tr4562 + case 4952: + goto tr4562 + case 4953: + goto tr4562 + case 4954: + goto tr4562 + case 4955: + goto tr4562 + case 4956: + goto tr4562 + case 4957: + goto tr4562 + case 3233: + goto tr0 + case 4958: + goto tr4562 + case 4959: + goto tr4562 + case 4960: + goto tr4562 + case 4961: + goto tr4562 + case 4962: + goto tr4562 + case 4963: + goto tr4562 + case 3234: + goto tr0 + case 4964: + goto tr4562 + case 4965: + goto tr4562 + case 3235: + goto tr0 + case 4966: + goto tr4562 + case 4967: + goto tr4562 + case 4968: + goto tr4562 + case 4969: + goto tr4562 + case 4970: + goto tr4562 + case 4971: + goto tr4562 + case 4972: + goto tr4562 + case 4973: + goto tr4562 + case 4974: + goto tr4562 + case 4975: + goto tr4562 + case 4976: + goto tr4562 + case 4977: + goto tr4562 + case 4978: + goto tr4562 + case 4979: + goto tr4562 + case 4980: + goto tr4562 + case 3236: + goto tr0 + case 4981: + goto tr4562 + case 4982: + goto tr4562 + case 4983: + goto tr4562 + case 3237: + goto tr0 + case 4984: + goto tr4562 + case 4985: + goto tr4562 + case 4986: + goto tr4562 + case 4987: + goto tr4562 + case 4988: + goto tr4562 + case 4989: + goto tr4562 + case 3238: + goto tr0 + case 4990: + goto tr4562 + case 4991: + goto tr4562 + case 4992: + goto tr4562 + case 4993: + goto tr4562 + case 4994: + goto tr4562 + case 4995: + goto tr4562 + case 4996: + goto tr4562 + case 4997: + goto tr4562 + case 4998: + goto tr4562 + case 4999: + goto tr4562 + case 5000: + goto tr4562 + case 5001: + goto tr4562 + case 5002: + goto tr4562 + case 5003: + goto tr4562 + case 5004: + goto tr4562 + case 5005: + goto tr4562 + case 5006: + goto tr4562 + case 5007: + goto tr4562 + case 5008: + goto tr4562 + case 5009: + goto tr4562 + case 5010: + goto tr4562 + case 5011: + goto tr4562 + case 5012: + goto tr4562 + case 5013: + goto tr4562 + case 5014: + goto tr4562 + case 5015: + goto tr4562 + case 5016: + goto tr4499 + case 5017: + goto tr4499 + case 5018: + goto tr4763 + case 5019: + goto tr4763 + case 5020: + goto tr4499 + case 5021: + goto tr4499 + case 5022: + goto tr4499 + case 5023: + goto tr4499 + case 5024: + goto tr4499 + case 5025: + goto tr4499 + case 5026: + goto tr4499 + case 5027: + goto tr4499 + case 5028: + goto tr4499 + case 5029: + goto tr4763 + case 5030: + goto tr4763 + case 5031: + goto tr4763 + case 5032: + goto tr4763 + case 5033: + goto tr4763 + case 5034: + goto tr4763 + case 5035: + goto tr4763 + case 5036: + goto tr4763 + case 5037: + goto tr4499 + case 5038: + goto tr4499 + case 5039: + goto tr4499 + case 5040: + goto tr4499 + case 5041: + goto tr4499 + case 5042: + goto tr4499 + case 5043: + goto tr4499 + case 5044: + goto tr4499 + case 5045: + goto tr4499 + case 5046: + goto tr4499 + case 5047: + goto tr4499 + case 5048: + goto tr4763 + case 5049: + goto tr4763 + case 5050: + goto tr4763 + case 5051: + goto tr4763 + case 5052: + goto tr4763 + case 5053: + goto tr4763 + case 5054: + goto tr4763 + case 5055: + goto tr4763 + case 5056: + goto tr4763 + case 5057: + goto tr4763 + case 5058: + goto tr4763 + case 5059: + goto tr4499 + case 5060: + goto tr4499 + case 5061: + goto tr4499 + case 5062: + goto tr4499 + case 5063: + goto tr4499 + case 5064: + goto tr4499 + case 5065: + goto tr4499 + case 5066: + goto tr4499 + case 5067: + goto tr4499 + case 5068: + goto tr4499 + case 5069: + goto tr4499 + case 5070: + goto tr4499 + case 5071: + goto tr4499 + case 3239: + goto tr0 + case 3240: + goto tr0 + case 3241: + goto tr0 + case 3242: + goto tr0 + case 3243: + goto tr0 + case 3244: + goto tr0 + case 3245: + goto tr0 + case 3246: + goto tr0 + case 3247: + goto tr0 + case 3248: + goto tr0 + case 3249: + goto tr0 + case 3250: + goto tr0 + case 3251: + goto tr0 + case 3252: + goto tr0 + case 3253: + goto tr0 + case 3254: + goto tr0 + case 3255: + goto tr0 + case 3256: + goto tr0 + case 3257: + goto tr0 + case 3258: + goto tr0 + case 3259: + goto tr0 + case 3260: + goto tr0 + case 3261: + goto tr0 + case 3262: + goto tr0 + case 3263: + goto tr0 + case 3264: + goto tr0 + case 3265: + goto tr0 + case 5072: + goto tr4499 + case 3266: + goto tr0 + case 3267: + goto tr0 + case 3268: + goto tr2 + case 5073: + goto tr5002 + case 3269: + goto tr2985 + case 3270: + goto tr2 + case 3271: + goto tr2985 + case 3272: + goto tr2985 + case 3273: + goto tr2985 + case 3274: + goto tr2985 + case 3275: + goto tr2985 + case 3276: + goto tr2985 + case 3277: + goto tr2985 + case 3278: + goto tr2985 + case 3279: + goto tr2985 + case 3280: + goto tr2985 + case 3281: + goto tr2985 + case 3282: + goto tr2985 + case 3283: + goto tr2 + case 3284: + goto tr2 + case 3285: + goto tr2 + case 3286: + goto tr2 + case 3287: + goto tr2 + case 3288: + goto tr2 + case 3289: + goto tr2 + case 3290: + goto tr2 + case 3291: + goto tr2 + case 3292: + goto tr2 + case 3293: + goto tr2 + case 3294: + goto tr2 + case 3295: + goto tr2 + case 3296: + goto tr2 + case 3297: + goto tr2 + case 3298: + goto tr2 + case 3299: + goto tr2 + case 3300: + goto tr2 + case 3301: + goto tr2 + case 3302: + goto tr2 + case 3303: + goto tr2 + case 3304: + goto tr2 + case 3305: + goto tr2 + case 3306: + goto tr2 + case 3307: + goto tr2 + case 3308: + goto tr2 + case 3309: + goto tr2 + case 3310: + goto tr2 + case 3311: + goto tr2 + case 3312: + goto tr2 + case 3313: + goto tr2 + case 3314: + goto tr2985 + case 3315: + goto tr2 + case 3316: + goto tr2 + case 3317: + goto tr2 + case 3318: + goto tr2 + case 3319: + goto tr2 + case 3320: + goto tr2 + case 3321: + goto tr2 + case 3322: + goto tr2 + case 3323: + goto tr2 + case 3324: + goto tr2 + case 3325: + goto tr2 + case 3326: + goto tr2 + case 3327: + goto tr2 + case 3328: + goto tr2 + case 3329: + goto tr2 + case 3330: + goto tr2 + case 3331: + goto tr2 + case 3332: + goto tr2 + case 3333: + goto tr2 + case 3334: + goto tr2 + case 3335: + goto tr2 + case 3336: + goto tr2 + case 3337: + goto tr2 + case 3338: + goto tr2985 + case 3339: + goto tr2 + case 3340: + goto tr2 + case 3341: + goto tr2 + case 3342: + goto tr2 + case 3343: + goto tr2 + case 3344: + goto tr2 + case 3345: + goto tr2985 + case 3346: + goto tr2 + case 3347: + goto tr2 + case 3348: + goto tr2 + case 3349: + goto tr2 + case 3350: + goto tr2 + case 5074: + goto tr5002 + case 3351: + goto tr2985 + case 3352: + goto tr2985 + case 3353: + goto tr2985 + case 3354: + goto tr2985 + case 3355: + goto tr2985 + case 3356: + goto tr2985 + case 3357: + goto tr2985 + case 3358: + goto tr2985 + case 3359: + goto tr2985 + case 3360: + goto tr2985 + case 3361: + goto tr2985 + case 3362: + goto tr2985 + case 3363: + goto tr2985 + case 3364: + goto tr2985 + case 3365: + goto tr2985 + case 3366: + goto tr2985 + case 3367: + goto tr2985 + case 3368: + goto tr2985 + case 3369: + goto tr2985 + case 3370: + goto tr2985 + case 3371: + goto tr2985 + case 3372: + goto tr2985 + case 3373: + goto tr2985 + case 3374: + goto tr2985 + case 3375: + goto tr2985 + case 3376: + goto tr2985 + case 3377: + goto tr2985 + case 3378: + goto tr2985 + case 3379: + goto tr2985 + case 3380: + goto tr2985 + case 3381: + goto tr2985 + case 3382: + goto tr2985 + case 3383: + goto tr2985 + case 3384: + goto tr2985 + case 3385: + goto tr2985 + case 3386: + goto tr2985 + case 3387: + goto tr2985 + case 3388: + goto tr2985 + case 3389: + goto tr2985 + case 3390: + goto tr2985 + case 3391: + goto tr2985 + case 3392: + goto tr2985 + case 3393: + goto tr2985 + case 3394: + goto tr2985 + case 3395: + goto tr2985 + case 3396: + goto tr2985 + case 3397: + goto tr2985 + case 3398: + goto tr2985 + case 3399: + goto tr2985 + case 3400: + goto tr2985 + case 3401: + goto tr2985 + case 3402: + goto tr2985 + case 3403: + goto tr2985 + case 3404: + goto tr2985 + case 3405: + goto tr2985 + case 3406: + goto tr2985 + case 3407: + goto tr2985 + case 3408: + goto tr2985 + case 3409: + goto tr2985 + case 3410: + goto tr2985 + case 3411: + goto tr2985 + case 3412: + goto tr2985 + case 3413: + goto tr2985 + case 3414: + goto tr2985 + case 3415: + goto tr2985 + case 3416: + goto tr2985 + case 3417: + goto tr2985 + case 3418: + goto tr2985 + case 3419: + goto tr2985 + case 3420: + goto tr2985 + case 3421: + goto tr2985 + case 3422: + goto tr2985 + case 3423: + goto tr2985 + case 3424: + goto tr2985 + case 3425: + goto tr2985 + case 3426: + goto tr2985 + case 3427: + goto tr2985 + case 3428: + goto tr2985 + case 3429: + goto tr2 + case 3430: + goto tr2985 + case 3431: + goto tr2985 + case 3432: + goto tr2985 + case 3433: + goto tr2985 + case 3434: + goto tr2985 + case 3435: + goto tr2985 + case 3436: + goto tr2985 + case 3437: + goto tr2985 + case 3438: + goto tr2985 + case 3439: + goto tr2985 + case 3440: + goto tr2985 + case 3441: + goto tr2985 + case 3442: + goto tr2985 + case 3443: + goto tr2985 + case 3444: + goto tr2985 + case 3445: + goto tr2985 + case 3446: + goto tr2985 + case 3447: + goto tr2985 + case 3448: + goto tr2985 + case 3449: + goto tr2985 + case 3450: + goto tr2985 + case 3451: + goto tr2985 + case 3452: + goto tr2985 + case 3453: + goto tr2985 + case 3454: + goto tr2985 + case 3455: + goto tr2985 + case 3456: + goto tr2985 + case 3457: + goto tr2985 + case 3458: + goto tr2985 + case 3459: + goto tr2985 + case 3460: + goto tr2985 + case 3461: + goto tr2985 + case 3462: + goto tr2985 + case 3463: + goto tr2985 + case 3464: + goto tr2985 + case 3465: + goto tr2985 + case 3466: + goto tr2985 + case 3467: + goto tr2985 + case 3468: + goto tr2985 + case 3469: + goto tr2985 + case 3470: + goto tr2985 + case 3471: + goto tr2985 + case 3472: + goto tr2985 + case 3473: + goto tr2985 + case 3474: + goto tr2985 + case 3475: + goto tr2985 + case 3476: + goto tr2985 + case 3477: + goto tr2985 + case 3478: + goto tr2985 + case 3479: + goto tr2985 + case 3480: + goto tr2985 + case 3481: + goto tr2985 + case 3482: + goto tr2985 + case 3483: + goto tr2985 + case 3484: + goto tr2985 + case 3485: + goto tr2985 + case 3486: + goto tr2985 + case 3487: + goto tr2985 + case 3488: + goto tr2985 + case 3489: + goto tr2985 + case 3490: + goto tr2985 + case 3491: + goto tr2985 + case 3492: + goto tr2985 + case 3493: + goto tr2985 + case 3494: + goto tr2985 + case 3495: + goto tr2985 + case 3496: + goto tr2985 + case 3497: + goto tr2985 + case 3498: + goto tr2985 + case 3499: + goto tr2985 + case 3500: + goto tr2985 + case 3501: + goto tr2 + case 3502: + goto tr2 + case 3503: + goto tr2 + case 3504: + goto tr2 + case 3505: + goto tr2 + case 3506: + goto tr2 + case 3507: + goto tr2 + case 3508: + goto tr2 + case 3509: + goto tr2 + case 3510: + goto tr2 + case 3511: + goto tr2 + case 3512: + goto tr2 + case 3513: + goto tr2 + case 3514: + goto tr2 + case 3515: + goto tr2 + case 3516: + goto tr2985 + case 3517: + goto tr2985 + case 3518: + goto tr2 + case 3519: + goto tr2 + case 3520: + goto tr2985 + case 3521: + goto tr2 + case 3522: + goto tr2 + case 3523: + goto tr2 + case 3524: + goto tr2 + case 3525: + goto tr2 + case 3526: + goto tr2985 + case 3527: + goto tr2 + case 3528: + goto tr2 + case 3529: + goto tr2 + case 3530: + goto tr2 + case 3531: + goto tr2 + case 3532: + goto tr2 + case 3533: + goto tr2 + case 3534: + goto tr2 + case 3535: + goto tr2 + case 3536: + goto tr2 + case 3537: + goto tr2 + case 3538: + goto tr2 + case 3539: + goto tr2 + case 3540: + goto tr2 + case 3541: + goto tr2 + case 3542: + goto tr2 + case 3543: + goto tr2 + case 3544: + goto tr2 + case 3545: + goto tr2 + case 3546: + goto tr2 + case 3547: + goto tr2 + case 3548: + goto tr2 + case 3549: + goto tr2 + case 3550: + goto tr2 + case 3551: + goto tr2 + case 3552: + goto tr2 + case 3553: + goto tr2 + case 3554: + goto tr2 + case 3555: + goto tr2 + case 3556: + goto tr2 + case 3557: + goto tr2 + case 3558: + goto tr2 + case 3559: + goto tr2 + case 3560: + goto tr2 + case 3561: + goto tr2 + case 3562: + goto tr2 + case 3563: + goto tr2 + case 3564: + goto tr2 + case 3565: + goto tr2 + case 3566: + goto tr2 + case 3567: + goto tr2 + case 3568: + goto tr2985 + case 3569: + goto tr2 + case 3570: + goto tr2 + case 3571: + goto tr2 + case 3572: + goto tr2 + case 3573: + goto tr0 + case 3574: + goto tr0 + case 3575: + goto tr0 + case 3576: + goto tr0 + case 3577: + goto tr0 + case 3578: + goto tr0 + case 3579: + goto tr0 + case 3580: + goto tr0 + case 3581: + goto tr0 + case 3582: + goto tr0 + case 3583: + goto tr0 + case 3584: + goto tr0 + case 3585: + goto tr0 + case 3586: + goto tr0 + case 3587: + goto tr0 + case 5075: + goto tr4499 + case 3588: + goto tr0 + case 3589: + goto tr0 + case 3590: + goto tr0 + case 3591: + goto tr0 + case 3592: + goto tr0 + case 3593: + goto tr0 + case 5076: + goto tr5054 + case 3594: + goto tr3251 + case 3595: + goto tr2 + case 3596: + goto tr2 + case 3597: + goto tr3251 + case 3598: + goto tr3251 + case 3599: + goto tr3251 + case 3600: + goto tr3251 + case 3601: + goto tr3251 + case 3602: + goto tr3251 + case 3603: + goto tr3251 + case 3604: + goto tr3251 + case 3605: + goto tr3251 + case 3606: + goto tr3251 + case 3607: + goto tr3251 + case 3608: + goto tr2 + case 3609: + goto tr2 + case 3610: + goto tr2 + case 3611: + goto tr2 + case 3612: + goto tr2 + case 3613: + goto tr2 + case 3614: + goto tr2 + case 3615: + goto tr2 + case 3616: + goto tr2 + case 3617: + goto tr2 + case 3618: + goto tr2 + case 3619: + goto tr2 + case 3620: + goto tr2 + case 3621: + goto tr2 + case 3622: + goto tr2 + case 3623: + goto tr2 + case 3624: + goto tr2 + case 3625: + goto tr2 + case 3626: + goto tr2 + case 3627: + goto tr2 + case 3628: + goto tr2 + case 3629: + goto tr2 + case 3630: + goto tr2 + case 3631: + goto tr2 + case 3632: + goto tr2 + case 3633: + goto tr2 + case 3634: + goto tr2 + case 3635: + goto tr2 + case 3636: + goto tr3251 + case 3637: + goto tr2 + case 3638: + goto tr2 + case 3639: + goto tr2 + case 3640: + goto tr2 + case 3641: + goto tr2 + case 3642: + goto tr2 + case 3643: + goto tr2 + case 3644: + goto tr2 + case 3645: + goto tr2 + case 3646: + goto tr2 + case 3647: + goto tr2 + case 3648: + goto tr2 + case 3649: + goto tr2 + case 3650: + goto tr2 + case 3651: + goto tr2 + case 3652: + goto tr2 + case 3653: + goto tr2 + case 3654: + goto tr2 + case 3655: + goto tr2 + case 3656: + goto tr2 + case 3657: + goto tr2 + case 3658: + goto tr3251 + case 3659: + goto tr2 + case 3660: + goto tr2 + case 3661: + goto tr2 + case 3662: + goto tr2 + case 3663: + goto tr2 + case 3664: + goto tr2 + case 3665: + goto tr3251 + case 3666: + goto tr2 + case 3667: + goto tr2 + case 3668: + goto tr3251 + case 3669: + goto tr2 + case 3670: + goto tr2 + case 3671: + goto tr2 + case 3672: + goto tr2 + case 3673: + goto tr2 + case 3674: + goto tr2 + case 3675: + goto tr2 + case 3676: + goto tr2 + case 3677: + goto tr2 + case 3678: + goto tr2 + case 3679: + goto tr2 + case 3680: + goto tr2 + case 3681: + goto tr2 + case 3682: + goto tr2 + case 3683: + goto tr2 + case 3684: + goto tr3251 + case 3685: + goto tr2 + case 3686: + goto tr2 + case 3687: + goto tr2 + case 3688: + goto tr3251 + case 3689: + goto tr2 + case 3690: + goto tr2 + case 3691: + goto tr2 + case 3692: + goto tr2 + case 3693: + goto tr2 + case 3694: + goto tr2 + case 3695: + goto tr2 + case 3696: + goto tr2 + case 3697: + goto tr2 + case 3698: + goto tr2 + case 3699: + goto tr2 + case 3700: + goto tr2 + case 3701: + goto tr2 + case 3702: + goto tr2 + case 3703: + goto tr2 + case 3704: + goto tr2 + case 3705: + goto tr2 + case 3706: + goto tr2 + case 3707: + goto tr2 + case 3708: + goto tr2 + case 3709: + goto tr2 + case 3710: + goto tr2 + case 3711: + goto tr2 + case 3712: + goto tr2 + case 3713: + goto tr2 + case 3714: + goto tr2 + case 3715: + goto tr2 + case 3716: + goto tr2 + case 3717: + goto tr2 + case 3718: + goto tr2 + case 3719: + goto tr2 + case 3720: + goto tr2 + case 3721: + goto tr2 + case 3722: + goto tr2 + case 3723: + goto tr2 + case 3724: + goto tr2 + case 3725: + goto tr2 + case 3726: + goto tr2 + case 3727: + goto tr2 + case 3728: + goto tr2 + case 3729: + goto tr2 + case 3730: + goto tr3251 + case 3731: + goto tr2 + case 3732: + goto tr2 + case 3733: + goto tr2 + case 3734: + goto tr2 + case 3735: + goto tr0 + case 3736: + goto tr0 + case 5077: + goto tr4499 + case 3737: + goto tr0 + case 5078: + goto tr4562 + case 3738: + goto tr420 + case 3739: + goto tr420 + case 3740: + goto tr420 + case 3741: + goto tr420 + case 3742: + goto tr420 + case 3743: + goto tr420 + case 3744: + goto tr420 + case 3745: + goto tr420 + case 3746: + goto tr420 + case 3747: + goto tr420 + case 3748: + goto tr420 + case 3749: + goto tr420 + case 3750: + goto tr420 + case 3751: + goto tr420 + case 3752: + goto tr420 + case 3753: + goto tr420 + case 3754: + goto tr420 + case 3755: + goto tr420 + case 3756: + goto tr420 + case 3757: + goto tr420 + case 3758: + goto tr420 + case 3759: + goto tr420 + case 3760: + goto tr420 + case 3761: + goto tr420 + case 3762: + goto tr420 + case 3763: + goto tr420 + case 3764: + goto tr420 + case 3765: + goto tr420 + case 3766: + goto tr420 + case 3767: + goto tr420 + case 3768: + goto tr420 + case 3769: + goto tr420 + case 3770: + goto tr420 + case 3771: + goto tr420 + case 3772: + goto tr420 + case 3773: + goto tr420 + case 3774: + goto tr420 + case 3775: + goto tr420 + case 3776: + goto tr420 + case 3777: + goto tr420 + case 3778: + goto tr420 + case 3779: + goto tr420 + case 3780: + goto tr420 + case 3781: + goto tr420 + case 3782: + goto tr420 + case 3783: + goto tr420 + case 3784: + goto tr420 + case 3785: + goto tr420 + case 3786: + goto tr420 + case 3787: + goto tr420 + case 3788: + goto tr420 + case 3789: + goto tr420 + case 3790: + goto tr420 + case 3791: + goto tr420 + case 3792: + goto tr420 + case 3793: + goto tr420 + case 3794: + goto tr420 + case 3795: + goto tr420 + case 3796: + goto tr420 + case 3797: + goto tr420 + case 3798: + goto tr420 + case 3799: + goto tr420 + case 3800: + goto tr420 + case 3801: + goto tr420 + case 3802: + goto tr420 + case 3803: + goto tr420 + case 3804: + goto tr420 + case 3805: + goto tr420 + case 3806: + goto tr420 + case 3807: + goto tr420 + case 3808: + goto tr420 + case 3809: + goto tr420 + case 3810: + goto tr420 + case 3811: + goto tr420 + case 3812: + goto tr420 + case 3813: + goto tr420 + case 3814: + goto tr420 + case 3815: + goto tr420 + case 3816: + goto tr420 + case 3817: + goto tr420 + case 3818: + goto tr420 + case 3819: + goto tr420 + case 3820: + goto tr420 + case 3821: + goto tr420 + case 3822: + goto tr420 + case 3823: + goto tr420 + case 3824: + goto tr420 + case 3825: + goto tr420 + case 3826: + goto tr420 + case 3827: + goto tr420 + case 3828: + goto tr420 + case 3829: + goto tr420 + case 3830: + goto tr420 + case 3831: + goto tr420 + case 3832: + goto tr420 + case 3833: + goto tr420 + case 3834: + goto tr420 + case 3835: + goto tr420 + case 3836: + goto tr420 + case 3837: + goto tr420 + case 3838: + goto tr420 + case 3839: + goto tr420 + case 3840: + goto tr420 + case 3841: + goto tr420 + case 3842: + goto tr420 + case 3843: + goto tr420 + case 3844: + goto tr420 + case 3845: + goto tr420 + case 3846: + goto tr420 + case 3847: + goto tr420 + case 3848: + goto tr420 + case 3849: + goto tr420 + case 3850: + goto tr420 + case 3851: + goto tr420 + case 3852: + goto tr420 + case 3853: + goto tr420 + case 3854: + goto tr420 + case 3855: + goto tr420 + case 3856: + goto tr420 + case 3857: + goto tr420 + case 3858: + goto tr420 + case 3859: + goto tr420 + case 3860: + goto tr420 + case 3861: + goto tr420 + case 3862: + goto tr420 + case 3863: + goto tr420 + case 3864: + goto tr420 + case 3865: + goto tr420 + case 3866: + goto tr420 + case 3867: + goto tr420 + case 3868: + goto tr420 + case 3869: + goto tr420 + case 3870: + goto tr420 + case 3871: + goto tr420 + case 3872: + goto tr420 + case 3873: + goto tr420 + case 3874: + goto tr420 + case 3875: + goto tr420 + case 3876: + goto tr420 + case 3877: + goto tr420 + case 3878: + goto tr420 + case 3879: + goto tr420 + case 3880: + goto tr420 + case 3881: + goto tr420 + case 3882: + goto tr420 + case 3883: + goto tr420 + case 3884: + goto tr420 + case 5079: + goto tr5002 + case 3885: + goto tr2985 + case 3886: + goto tr2985 + case 3887: + goto tr2985 + case 3888: + goto tr2985 + case 3889: + goto tr2985 + case 3890: + goto tr2985 + case 3891: + goto tr2985 + case 3892: + goto tr2985 + case 3893: + goto tr2985 + case 3894: + goto tr2985 + case 3895: + goto tr2985 + case 3896: + goto tr2985 + case 3897: + goto tr2985 + case 3898: + goto tr2985 + case 3899: + goto tr2985 + case 3900: + goto tr2985 + case 3901: + goto tr2985 + case 3902: + goto tr2985 + case 3903: + goto tr2985 + case 3904: + goto tr2985 + case 3905: + goto tr2985 + case 3906: + goto tr2985 + case 3907: + goto tr2985 + case 3908: + goto tr2985 + case 3909: + goto tr2985 + case 3910: + goto tr2985 + case 3911: + goto tr2985 + case 3912: + goto tr2985 + case 3913: + goto tr2985 + case 3914: + goto tr2985 + case 3915: + goto tr2985 + case 3916: + goto tr2985 + case 3917: + goto tr2985 + case 3918: + goto tr2985 + case 3919: + goto tr2985 + case 3920: + goto tr2985 + case 3921: + goto tr2985 + case 3922: + goto tr2985 + case 3923: + goto tr2985 + case 3924: + goto tr2985 + case 3925: + goto tr2985 + case 3926: + goto tr2985 + case 3927: + goto tr2985 + case 3928: + goto tr2985 + case 3929: + goto tr2985 + case 3930: + goto tr2985 + case 3931: + goto tr2985 + case 3932: + goto tr2985 + case 3933: + goto tr2985 + case 3934: + goto tr2985 + case 3935: + goto tr2985 + case 3936: + goto tr2985 + case 3937: + goto tr2985 + case 3938: + goto tr2985 + case 3939: + goto tr2985 + case 3940: + goto tr2985 + case 3941: + goto tr2985 + case 3942: + goto tr2985 + case 3943: + goto tr2985 + case 3944: + goto tr2985 + case 3945: + goto tr2985 + case 3946: + goto tr2985 + case 3947: + goto tr2985 + case 3948: + goto tr2985 + case 3949: + goto tr2985 + case 3950: + goto tr2985 + case 3951: + goto tr2985 + case 3952: + goto tr2985 + case 3953: + goto tr2985 + case 3954: + goto tr2985 + case 3955: + goto tr2985 + case 3956: + goto tr2985 + case 3957: + goto tr2985 + case 3958: + goto tr2985 + case 3959: + goto tr2985 + case 3960: + goto tr2985 + case 3961: + goto tr2985 + case 3962: + goto tr2985 + case 3963: + goto tr2985 + case 3964: + goto tr2985 + case 3965: + goto tr2985 + case 3966: + goto tr2985 + case 3967: + goto tr2985 + case 3968: + goto tr2985 + case 3969: + goto tr2985 + case 3970: + goto tr2985 + case 3971: + goto tr2985 + case 3972: + goto tr2985 + case 3973: + goto tr2985 + case 3974: + goto tr2985 + case 3975: + goto tr2985 + case 3976: + goto tr2985 + case 3977: + goto tr2985 + case 3978: + goto tr2985 + case 3979: + goto tr2985 + case 3980: + goto tr2985 + case 3981: + goto tr2985 + case 3982: + goto tr2985 + case 3983: + goto tr2985 + case 3984: + goto tr2985 + case 3985: + goto tr2985 + case 3986: + goto tr2985 + case 3987: + goto tr2985 + case 3988: + goto tr2985 + case 3989: + goto tr2985 + case 3990: + goto tr2985 + case 3991: + goto tr2985 + case 3992: + goto tr2985 + case 3993: + goto tr2985 + case 3994: + goto tr2985 + case 3995: + goto tr2985 + case 3996: + goto tr2985 + case 3997: + goto tr2985 + case 3998: + goto tr2985 + case 3999: + goto tr2985 + case 4000: + goto tr2985 + case 4001: + goto tr2985 + case 4002: + goto tr2985 + case 4003: + goto tr2985 + case 4004: + goto tr2985 + case 4005: + goto tr2985 + case 4006: + goto tr2985 + case 4007: + goto tr2985 + case 4008: + goto tr2985 + case 4009: + goto tr2985 + case 4010: + goto tr2985 + case 4011: + goto tr2985 + case 4012: + goto tr2985 + case 4013: + goto tr2985 + case 4014: + goto tr2985 + case 4015: + goto tr2985 + case 4016: + goto tr2985 + case 4017: + goto tr2985 + case 4018: + goto tr2985 + case 4019: + goto tr2985 + case 4020: + goto tr2985 + case 4021: + goto tr2985 + case 4022: + goto tr2985 + case 4023: + goto tr2985 + case 4024: + goto tr2985 + case 4025: + goto tr2985 + case 4026: + goto tr2985 + case 5080: + goto tr5137 + case 4027: + goto tr3627 + case 4028: + goto tr2 + case 4029: + goto tr3627 + case 4030: + goto tr3627 + case 4031: + goto tr3627 + case 4032: + goto tr3627 + case 4033: + goto tr3627 + case 4034: + goto tr3627 + case 4035: + goto tr3627 + case 4036: + goto tr3627 + case 4037: + goto tr3627 + case 4038: + goto tr3627 + case 4039: + goto tr3627 + case 4040: + goto tr3627 + case 4041: + goto tr3627 + case 4042: + goto tr3627 + case 4043: + goto tr3627 + case 4044: + goto tr3627 + case 4045: + goto tr3627 + case 4046: + goto tr3627 + case 4047: + goto tr3627 + case 4048: + goto tr3627 + case 4049: + goto tr3627 + case 4050: + goto tr3627 + case 4051: + goto tr3627 + case 4052: + goto tr3627 + case 4053: + goto tr3627 + case 4054: + goto tr3627 + case 4055: + goto tr3627 + case 4056: + goto tr3627 + case 4057: + goto tr3627 + case 4058: + goto tr3627 + case 4059: + goto tr3627 + case 4060: + goto tr3627 + case 4061: + goto tr3627 + case 4062: + goto tr3627 + case 4063: + goto tr3627 + case 4064: + goto tr3627 + case 4065: + goto tr3627 + case 4066: + goto tr3627 + case 4067: + goto tr3627 + case 4068: + goto tr3627 + case 4069: + goto tr3627 + case 4070: + goto tr3627 + case 4071: + goto tr3627 + case 4072: + goto tr3627 + case 4073: + goto tr3627 + case 4074: + goto tr3627 + case 4075: + goto tr3627 + case 4076: + goto tr3627 + case 4077: + goto tr3627 + case 4078: + goto tr3627 + case 4079: + goto tr3627 + case 4080: + goto tr3627 + case 4081: + goto tr3627 + case 4082: + goto tr3627 + case 4083: + goto tr3627 + case 4084: + goto tr3627 + case 4085: + goto tr3627 + case 4086: + goto tr3627 + case 4087: + goto tr3627 + case 4088: + goto tr3627 + case 4089: + goto tr3627 + case 4090: + goto tr3627 + case 4091: + goto tr3627 + case 4092: + goto tr3627 + case 4093: + goto tr3627 + case 4094: + goto tr3627 + case 4095: + goto tr3627 + case 4096: + goto tr3627 + case 4097: + goto tr3627 + case 4098: + goto tr3627 + case 4099: + goto tr3627 + case 4100: + goto tr3627 + case 4101: + goto tr2 + case 4102: + goto tr2 + case 4103: + goto tr2 + case 4104: + goto tr2 + case 4105: + goto tr3627 + case 4106: + goto tr3627 + case 4107: + goto tr3627 + case 4108: + goto tr3627 + case 4109: + goto tr3627 + case 4110: + goto tr3627 + case 4111: + goto tr3627 + case 4112: + goto tr3627 + case 4113: + goto tr3627 + case 4114: + goto tr3627 + case 4115: + goto tr3627 + case 4116: + goto tr3627 + case 4117: + goto tr3627 + case 4118: + goto tr3627 + case 4119: + goto tr3627 + case 4120: + goto tr3627 + case 4121: + goto tr3627 + case 4122: + goto tr3627 + case 4123: + goto tr3627 + case 4124: + goto tr3627 + case 4125: + goto tr3627 + case 4126: + goto tr3627 + case 4127: + goto tr3627 + case 4128: + goto tr3627 + case 4129: + goto tr3627 + case 4130: + goto tr3627 + case 4131: + goto tr3627 + case 4132: + goto tr3627 + case 4133: + goto tr3627 + case 4134: + goto tr3627 + case 4135: + goto tr3627 + case 4136: + goto tr3627 + case 4137: + goto tr3627 + case 4138: + goto tr3627 + case 4139: + goto tr3627 + case 4140: + goto tr3627 + case 4141: + goto tr3627 + case 4142: + goto tr3627 + case 4143: + goto tr3627 + case 4144: + goto tr3627 + case 4145: + goto tr3627 + case 4146: + goto tr3627 + case 4147: + goto tr3627 + case 4148: + goto tr3627 + case 4149: + goto tr3627 + case 4150: + goto tr3627 + case 4151: + goto tr3627 + case 4152: + goto tr3627 + case 4153: + goto tr3627 + case 4154: + goto tr3627 + case 4155: + goto tr3627 + case 4156: + goto tr3627 + case 4157: + goto tr3627 + case 4158: + goto tr3627 + case 4159: + goto tr3627 + case 4160: + goto tr3627 + case 4161: + goto tr3627 + case 4162: + goto tr3627 + case 4163: + goto tr3627 + case 4164: + goto tr3627 + case 4165: + goto tr3627 + case 4166: + goto tr3627 + case 4167: + goto tr3627 + case 4168: + goto tr3627 + case 4169: + goto tr3627 + case 4170: + goto tr3627 + case 4171: + goto tr3627 + case 4172: + goto tr3627 + case 4173: + goto tr3627 + case 4174: + goto tr3627 + case 4175: + goto tr0 + case 5081: + goto tr5157 + case 4176: + goto tr3758 + case 4177: + goto tr3758 + case 4178: + goto tr3758 + case 4179: + goto tr3758 + case 4180: + goto tr3758 + case 4181: + goto tr3758 + case 4182: + goto tr3758 + case 4183: + goto tr3758 + case 4184: + goto tr3758 + case 4185: + goto tr3758 + case 4186: + goto tr3758 + case 4187: + goto tr3758 + case 4188: + goto tr3758 + case 4189: + goto tr3758 + case 4190: + goto tr3758 + case 4191: + goto tr3758 + case 4192: + goto tr3758 + case 4193: + goto tr3758 + case 4194: + goto tr3758 + case 4195: + goto tr3758 + case 4196: + goto tr3758 + case 4197: + goto tr3758 + case 4198: + goto tr3758 + case 4199: + goto tr3758 + case 4200: + goto tr3758 + case 4201: + goto tr3758 + case 4202: + goto tr3758 + case 4203: + goto tr3758 + case 4204: + goto tr3758 + case 4205: + goto tr3758 + case 4206: + goto tr3758 + case 4207: + goto tr3758 + case 4208: + goto tr3758 + case 4209: + goto tr3758 + case 4210: + goto tr3758 + case 4211: + goto tr3758 + case 4212: + goto tr3758 + case 4213: + goto tr3758 + case 4214: + goto tr3758 + case 4215: + goto tr3758 + case 4216: + goto tr3758 + case 4217: + goto tr3758 + case 4218: + goto tr3758 + case 4219: + goto tr3758 + case 4220: + goto tr3758 + case 4221: + goto tr3758 + case 4222: + goto tr3758 + case 4223: + goto tr3758 + case 4224: + goto tr3758 + case 4225: + goto tr3758 + case 4226: + goto tr3758 + case 4227: + goto tr3758 + case 4228: + goto tr3758 + case 4229: + goto tr3758 + case 4230: + goto tr3758 + case 4231: + goto tr3758 + case 4232: + goto tr3758 + case 4233: + goto tr3758 + case 4234: + goto tr3758 + case 4235: + goto tr3758 + case 4236: + goto tr3758 + case 4237: + goto tr3758 + case 4238: + goto tr3758 + case 4239: + goto tr3758 + case 4240: + goto tr3758 + case 4241: + goto tr3758 + case 4242: + goto tr3758 + case 4243: + goto tr3758 + case 4244: + goto tr3758 + case 4245: + goto tr3758 + case 4246: + goto tr3758 + case 4247: + goto tr3758 + case 4248: + goto tr3758 + case 4249: + goto tr3758 + case 4250: + goto tr3758 + case 4251: + goto tr3758 + case 4252: + goto tr3758 + case 4253: + goto tr3758 + case 4254: + goto tr3758 + case 4255: + goto tr3758 + case 4256: + goto tr3758 + case 4257: + goto tr3758 + case 4258: + goto tr3758 + case 4259: + goto tr3758 + case 4260: + goto tr3758 + case 4261: + goto tr3758 + case 4262: + goto tr3758 + case 4263: + goto tr3758 + case 4264: + goto tr3758 + case 4265: + goto tr3758 + case 4266: + goto tr3758 + case 4267: + goto tr3758 + case 4268: + goto tr3758 + case 4269: + goto tr3758 + case 4270: + goto tr3758 + case 4271: + goto tr3758 + case 4272: + goto tr3758 + case 4273: + goto tr3758 + case 4274: + goto tr3758 + case 4275: + goto tr3758 + case 4276: + goto tr3758 + case 4277: + goto tr3758 + case 4278: + goto tr3758 + case 4279: + goto tr3758 + case 4280: + goto tr3758 + case 4281: + goto tr3758 + case 4282: + goto tr3758 + case 4283: + goto tr3758 + case 4284: + goto tr3758 + case 4285: + goto tr3758 + case 4286: + goto tr3758 + case 4287: + goto tr3758 + case 4288: + goto tr3758 + case 4289: + goto tr3758 + case 4290: + goto tr3758 + case 4291: + goto tr3758 + case 4292: + goto tr3758 + case 4293: + goto tr3758 + case 4294: + goto tr3758 + case 4295: + goto tr3758 + case 4296: + goto tr3758 + case 4297: + goto tr3758 + case 4298: + goto tr3758 + case 4299: + goto tr3758 + case 4300: + goto tr3758 + case 4301: + goto tr3758 + case 4302: + goto tr3758 + case 4303: + goto tr3758 + case 4304: + goto tr3758 + case 4305: + goto tr3758 + case 4306: + goto tr3758 + case 4307: + goto tr3758 + case 4308: + goto tr3758 + case 4309: + goto tr3758 + case 4310: + goto tr3758 + case 4311: + goto tr3758 + case 4312: + goto tr3758 + case 4313: + goto tr3758 + case 4314: + goto tr3758 + case 4315: + goto tr3758 + case 4316: + goto tr3758 + case 4317: + goto tr0 + case 4318: + goto tr0 + case 5082: + goto tr5054 + case 4319: + goto tr3251 + case 4320: + goto tr3251 + case 4321: + goto tr3251 + case 4322: + goto tr3251 + case 4323: + goto tr3251 + case 4324: + goto tr3251 + case 4325: + goto tr3251 + case 4326: + goto tr3251 + case 4327: + goto tr3251 + case 4328: + goto tr3251 + case 4329: + goto tr3251 + case 4330: + goto tr3251 + case 4331: + goto tr3251 + case 4332: + goto tr3251 + case 4333: + goto tr3251 + case 4334: + goto tr3251 + case 4335: + goto tr3251 + case 4336: + goto tr3251 + case 4337: + goto tr3251 + case 4338: + goto tr3251 + case 4339: + goto tr3251 + case 4340: + goto tr3251 + case 4341: + goto tr3251 + case 4342: + goto tr3251 + case 4343: + goto tr3251 + case 4344: + goto tr3251 + case 4345: + goto tr3251 + case 4346: + goto tr3251 + case 4347: + goto tr3251 + case 4348: + goto tr3251 + case 4349: + goto tr3251 + case 4350: + goto tr3251 + case 4351: + goto tr3251 + case 4352: + goto tr3251 + case 4353: + goto tr3251 + case 4354: + goto tr3251 + case 4355: + goto tr3251 + case 4356: + goto tr3251 + case 4357: + goto tr3251 + case 4358: + goto tr3251 + case 4359: + goto tr3251 + case 4360: + goto tr3251 + case 4361: + goto tr3251 + case 4362: + goto tr3251 + case 4363: + goto tr3251 + case 4364: + goto tr3251 + case 4365: + goto tr3251 + case 4366: + goto tr3251 + case 4367: + goto tr3251 + case 4368: + goto tr3251 + case 4369: + goto tr3251 + case 4370: + goto tr3251 + case 4371: + goto tr3251 + case 4372: + goto tr3251 + case 4373: + goto tr3251 + case 4374: + goto tr3251 + case 4375: + goto tr3251 + case 4376: + goto tr3251 + case 4377: + goto tr3251 + case 4378: + goto tr3251 + case 4379: + goto tr3251 + case 4380: + goto tr3251 + case 4381: + goto tr3251 + case 4382: + goto tr3251 + case 4383: + goto tr3251 + case 4384: + goto tr3251 + case 4385: + goto tr3251 + case 4386: + goto tr3251 + case 4387: + goto tr3251 + case 4388: + goto tr3251 + case 4389: + goto tr3251 + case 4390: + goto tr3251 + case 4391: + goto tr3251 + case 4392: + goto tr3251 + case 4393: + goto tr3251 + case 4394: + goto tr3251 + case 4395: + goto tr3251 + case 4396: + goto tr3251 + case 4397: + goto tr3251 + case 4398: + goto tr3251 + case 4399: + goto tr3251 + case 4400: + goto tr3251 + case 4401: + goto tr3251 + case 4402: + goto tr3251 + case 4403: + goto tr3251 + case 4404: + goto tr3251 + case 4405: + goto tr3251 + case 4406: + goto tr3251 + case 4407: + goto tr3251 + case 4408: + goto tr3251 + case 4409: + goto tr3251 + case 4410: + goto tr3251 + case 4411: + goto tr3251 + case 4412: + goto tr3251 + case 4413: + goto tr3251 + case 4414: + goto tr3251 + case 4415: + goto tr3251 + case 4416: + goto tr3251 + case 4417: + goto tr3251 + case 4418: + goto tr3251 + case 4419: + goto tr3251 + case 4420: + goto tr3251 + case 4421: + goto tr3251 + case 4422: + goto tr3251 + case 4423: + goto tr3251 + case 4424: + goto tr3251 + case 4425: + goto tr3251 + case 4426: + goto tr3251 + case 4427: + goto tr3251 + case 4428: + goto tr3251 + case 4429: + goto tr3251 + case 4430: + goto tr3251 + case 4431: + goto tr3251 + case 4432: + goto tr3251 + case 4433: + goto tr3251 + case 4434: + goto tr3251 + case 4435: + goto tr3251 + case 4436: + goto tr3251 + case 4437: + goto tr3251 + case 4438: + goto tr3251 + case 4439: + goto tr3251 + case 4440: + goto tr3251 + case 4441: + goto tr3251 + case 4442: + goto tr3251 + case 4443: + goto tr3251 + case 4444: + goto tr3251 + case 4445: + goto tr3251 + case 4446: + goto tr3251 + case 4447: + goto tr3251 + case 4448: + goto tr3251 + case 4449: + goto tr3251 + case 4450: + goto tr3251 + case 4451: + goto tr3251 + case 4452: + goto tr3251 + case 4453: + goto tr3251 + case 4454: + goto tr3251 + case 4455: + goto tr3251 + case 4456: + goto tr3251 + case 4457: + goto tr3251 + case 4458: + goto tr3251 + case 4459: + goto tr3251 + case 4460: + goto tr0 + case 4461: + goto tr0 + case 4462: + goto tr0 + case 4463: + goto tr0 + case 4464: + goto tr0 + case 4465: + goto tr0 + case 4466: + goto tr0 + case 4467: + goto tr0 + case 4468: + goto tr0 + case 4469: + goto tr0 + case 4470: + goto tr0 + case 4471: + goto tr0 + case 4472: + goto tr0 + case 5083: + goto tr5054 + case 5084: + goto tr5054 + case 5085: + goto tr5054 + case 5086: + goto tr4499 + case 5087: + goto tr4499 + case 5088: + goto tr5054 + case 5089: + goto tr5054 + case 5090: + goto tr4499 + case 5091: + goto tr4499 + case 5092: + goto tr4499 + case 5093: + goto tr4499 + case 5094: + goto tr4499 + case 5095: + goto tr4499 + case 5096: + goto tr4499 + case 5097: + goto tr4499 + case 5098: + goto tr5054 + case 5099: + goto tr5054 + case 5100: + goto tr5054 + case 5101: + goto tr5054 + case 5102: + goto tr5054 + case 5103: + goto tr5054 + case 5104: + goto tr5054 + case 5105: + goto tr5054 + case 5106: + goto tr4499 + case 5107: + goto tr4499 + case 5108: + goto tr4499 + case 5109: + goto tr4499 + case 5110: + goto tr4499 + case 5111: + goto tr4499 + case 5112: + goto tr4499 + case 5113: + goto tr4499 + case 5114: + goto tr4499 + case 5115: + goto tr4499 + case 5116: + goto tr4499 + case 5117: + goto tr5054 + case 5118: + goto tr5054 + case 5119: + goto tr5054 + case 5120: + goto tr5054 + case 5121: + goto tr5054 + case 5122: + goto tr5054 + case 5123: + goto tr5054 + case 5124: + goto tr5054 + case 5125: + goto tr5054 + case 5126: + goto tr5054 + case 5127: + goto tr5054 + case 5128: + goto tr5054 + case 5129: + goto tr5054 + case 5130: + goto tr5054 + case 5131: + goto tr5054 + case 5132: + goto tr5054 + case 5133: + goto tr5054 + case 5134: + goto tr5054 + case 5135: + goto tr5054 + case 5136: + goto tr5054 + case 5137: + goto tr5054 + case 5138: + goto tr5054 + case 5139: + goto tr5054 + case 5140: + goto tr5054 + case 5141: + goto tr5054 + case 5142: + goto tr5054 + case 5143: + goto tr5054 + case 5144: + goto tr5054 + case 5145: + goto tr5054 + case 5146: + goto tr5054 + case 5147: + goto tr5054 + case 5148: + goto tr5054 + case 5149: + goto tr5054 + case 5150: + goto tr5054 + case 5151: + goto tr5054 + case 5152: + goto tr5054 + case 4473: + goto tr0 + case 5153: + goto tr5054 + case 5154: + goto tr5054 + case 5155: + goto tr5054 + case 5156: + goto tr5054 + case 5157: + goto tr5054 + case 5158: + goto tr5054 + case 5159: + goto tr5054 + case 5160: + goto tr5054 + case 5161: + goto tr5054 + case 5162: + goto tr5054 + case 5163: + goto tr5054 + case 5164: + goto tr5054 + case 5165: + goto tr5054 + case 5166: + goto tr5054 + case 5167: + goto tr5054 + case 5168: + goto tr5054 + case 5169: + goto tr5054 + case 5170: + goto tr5054 + case 5171: + goto tr5054 + case 5172: + goto tr5054 + case 5173: + goto tr5054 + case 4474: + goto tr0 + case 5174: + goto tr5054 + case 5175: + goto tr5054 + case 5176: + goto tr5054 + case 5177: + goto tr5054 + case 5178: + goto tr5054 + case 5179: + goto tr5054 + case 4475: + goto tr0 + case 5180: + goto tr5054 + case 5181: + goto tr5054 + case 4476: + goto tr0 + case 5182: + goto tr5054 + case 5183: + goto tr5054 + case 5184: + goto tr5054 + case 5185: + goto tr5054 + case 5186: + goto tr5054 + case 5187: + goto tr5054 + case 5188: + goto tr5054 + case 5189: + goto tr5054 + case 5190: + goto tr5054 + case 5191: + goto tr5054 + case 5192: + goto tr5054 + case 5193: + goto tr5054 + case 5194: + goto tr5054 + case 5195: + goto tr5054 + case 5196: + goto tr5054 + case 4477: + goto tr0 + case 5197: + goto tr5054 + case 5198: + goto tr5054 + case 5199: + goto tr5054 + case 4478: + goto tr0 + case 5200: + goto tr5054 + case 5201: + goto tr5054 + case 5202: + goto tr5054 + case 5203: + goto tr5054 + case 5204: + goto tr5054 + case 5205: + goto tr5054 + case 4479: + goto tr0 + case 5206: + goto tr5054 + case 5207: + goto tr4499 + case 4480: + goto tr0 + case 5208: + goto tr4499 + case 5209: + goto tr4499 + case 5210: + goto tr4499 + case 4481: + goto tr0 + case 4482: + goto tr0 + case 4483: + goto tr0 + case 4484: + goto tr0 + case 4485: + goto tr0 + case 4486: + goto tr0 + case 4487: + goto tr0 + case 4488: + goto tr0 + case 4489: + goto tr0 + case 4490: + goto tr0 + case 4491: + goto tr0 + case 4492: + goto tr0 + case 4493: + goto tr0 + case 4494: + goto tr0 + case 4495: + goto tr0 + case 5211: + goto tr5002 + case 4496: + goto tr2985 + case 4497: + goto tr2985 + case 4498: + goto tr2985 + case 4499: + goto tr2985 + case 4500: + goto tr2985 + case 4501: + goto tr2985 + case 4502: + goto tr2985 + case 4503: + goto tr2985 + case 4504: + goto tr2985 + case 4505: + goto tr2985 + case 4506: + goto tr2985 + case 4507: + goto tr2985 + case 4508: + goto tr2985 + case 4509: + goto tr2985 + case 4510: + goto tr2985 + case 4511: + goto tr2985 + case 4512: + goto tr2985 + case 4513: + goto tr2985 + case 4514: + goto tr2985 + case 4515: + goto tr2985 + case 4516: + goto tr2985 + case 4517: + goto tr2985 + case 4518: + goto tr2985 + case 4519: + goto tr2985 + case 4520: + goto tr2985 + case 4521: + goto tr2985 + case 4522: + goto tr2985 + case 4523: + goto tr2985 + case 4524: + goto tr2985 + case 4525: + goto tr2985 + case 4526: + goto tr2985 + case 4527: + goto tr2985 + case 4528: + goto tr2985 + case 4529: + goto tr2985 + case 4530: + goto tr2985 + case 4531: + goto tr2985 + case 4532: + goto tr2985 + case 4533: + goto tr2985 + case 4534: + goto tr2985 + case 4535: + goto tr2985 + case 4536: + goto tr2985 + case 4537: + goto tr2985 + case 4538: + goto tr2985 + case 4539: + goto tr2985 + case 4540: + goto tr2985 + case 4541: + goto tr2985 + case 4542: + goto tr2985 + case 4543: + goto tr2985 + case 4544: + goto tr2985 + case 4545: + goto tr2985 + case 4546: + goto tr2985 + case 4547: + goto tr2985 + case 4548: + goto tr2985 + case 4549: + goto tr2985 + case 4550: + goto tr2985 + case 4551: + goto tr2985 + case 4552: + goto tr2985 + case 4553: + goto tr2985 + case 4554: + goto tr2985 + case 4555: + goto tr2985 + case 4556: + goto tr2985 + case 4557: + goto tr2985 + case 4558: + goto tr2985 + case 4559: + goto tr2985 + case 4560: + goto tr2985 + case 4561: + goto tr2985 + case 4562: + goto tr2985 + case 4563: + goto tr2985 + case 4564: + goto tr2985 + case 4565: + goto tr2985 + case 4566: + goto tr2985 + case 4567: + goto tr2985 + case 4568: + goto tr2985 + case 4569: + goto tr2985 + case 4570: + goto tr2985 + case 4571: + goto tr2985 + case 4572: + goto tr2985 + case 4573: + goto tr2985 + case 4574: + goto tr2985 + case 4575: + goto tr2985 + case 4576: + goto tr2985 + case 4577: + goto tr2985 + case 4578: + goto tr2985 + case 4579: + goto tr2985 + case 4580: + goto tr2985 + case 4581: + goto tr2985 + case 4582: + goto tr2985 + case 4583: + goto tr2985 + case 4584: + goto tr2985 + case 4585: + goto tr2985 + case 4586: + goto tr2985 + case 4587: + goto tr2985 + case 4588: + goto tr2985 + case 4589: + goto tr2985 + case 4590: + goto tr2985 + case 4591: + goto tr2985 + case 4592: + goto tr2985 + case 4593: + goto tr2985 + case 4594: + goto tr2985 + case 4595: + goto tr2985 + case 4596: + goto tr2985 + case 4597: + goto tr2985 + case 4598: + goto tr2985 + case 4599: + goto tr2985 + case 4600: + goto tr2985 + case 4601: + goto tr2985 + case 4602: + goto tr2985 + case 4603: + goto tr2985 + case 4604: + goto tr2985 + case 4605: + goto tr2985 + case 4606: + goto tr2985 + case 4607: + goto tr2985 + case 4608: + goto tr2985 + case 4609: + goto tr2985 + case 4610: + goto tr2985 + case 4611: + goto tr2985 + case 4612: + goto tr2985 + case 4613: + goto tr2985 + case 4614: + goto tr2985 + case 4615: + goto tr2985 + case 4616: + goto tr2985 + case 4617: + goto tr2985 + case 4618: + goto tr2985 + case 4619: + goto tr2985 + case 4620: + goto tr2985 + case 4621: + goto tr2985 + case 4622: + goto tr2985 + case 4623: + goto tr2985 + case 4624: + goto tr2985 + case 4625: + goto tr2985 + case 4626: + goto tr2985 + case 4627: + goto tr2985 + case 4628: + goto tr2985 + case 4629: + goto tr2985 + case 4630: + goto tr2985 + case 4631: + goto tr2985 + case 4632: + goto tr2985 + case 4633: + goto tr2985 + case 4634: + goto tr2985 + case 4635: + goto tr2985 + case 4636: + goto tr2985 + case 4637: + goto tr2985 + case 4638: + goto tr2985 + case 4639: + goto tr2985 + case 4640: + goto tr2985 + case 4641: + goto tr2985 + case 4642: + goto tr2985 + case 4643: + goto tr0 + case 4644: + goto tr0 + case 4645: + goto tr0 + case 4646: + goto tr0 + case 4647: + goto tr0 + case 4648: + goto tr0 + case 4649: + goto tr0 + case 4650: + goto tr0 + case 4651: + goto tr0 + case 4652: + goto tr0 + case 4653: + goto tr0 + case 4654: + goto tr0 + case 4655: + goto tr0 + case 5212: + goto tr5002 + case 5213: + goto tr5002 + case 5214: + goto tr5002 + case 5215: + goto tr4499 + case 5216: + goto tr4499 + case 5217: + goto tr5002 + case 5218: + goto tr5002 + case 5219: + goto tr4499 + case 5220: + goto tr4499 + case 5221: + goto tr4499 + case 5222: + goto tr4499 + case 5223: + goto tr4499 + case 5224: + goto tr4499 + case 5225: + goto tr4499 + case 5226: + goto tr4499 + case 5227: + goto tr5002 + case 5228: + goto tr5002 + case 5229: + goto tr5002 + case 5230: + goto tr5002 + case 5231: + goto tr5002 + case 5232: + goto tr5002 + case 5233: + goto tr5002 + case 5234: + goto tr5002 + case 5235: + goto tr4499 + case 5236: + goto tr4499 + case 5237: + goto tr4499 + case 5238: + goto tr4499 + case 5239: + goto tr4499 + case 5240: + goto tr4499 + case 5241: + goto tr4499 + case 5242: + goto tr4499 + case 4656: + goto tr0 + case 5243: + goto tr5002 + case 5244: + goto tr5002 + case 5245: + goto tr5002 + case 5246: + goto tr5002 + case 5247: + goto tr5002 + case 5248: + goto tr5002 + case 5249: + goto tr5002 + case 5250: + goto tr4499 + case 4657: + goto tr0 + case 5251: + goto tr5002 + case 5252: + goto tr5002 + case 5253: + goto tr5002 + case 5254: + goto tr5002 + case 5255: + goto tr5002 + case 5256: + goto tr5002 + case 4658: + goto tr0 + case 5257: + goto tr5002 + case 5258: + goto tr5002 + case 4659: + goto tr0 + case 5259: + goto tr5002 + case 5260: + goto tr5002 + case 5261: + goto tr5002 + case 5262: + goto tr5002 + case 5263: + goto tr5002 + case 5264: + goto tr4499 + case 5265: + goto tr4499 + case 5266: + goto tr4499 + case 5267: + goto tr4499 + case 5268: + goto tr5002 + case 5269: + goto tr5002 + case 5270: + goto tr5002 + case 5271: + goto tr5002 + case 5272: + goto tr5002 + case 5273: + goto tr5002 + case 5274: + goto tr5002 + case 5275: + goto tr5002 + case 5276: + goto tr5002 + case 5277: + goto tr5002 + case 4660: + goto tr0 + case 5278: + goto tr5002 + case 5279: + goto tr5002 + case 5280: + goto tr5002 + case 4661: + goto tr0 + case 5281: + goto tr5002 + case 5282: + goto tr5002 + case 5283: + goto tr5002 + case 5284: + goto tr5002 + case 5285: + goto tr5002 + case 5286: + goto tr5002 + case 4662: + goto tr0 + case 5287: + goto tr5002 + case 5288: + goto tr5002 + case 5289: + goto tr5002 + case 5290: + goto tr5002 + case 5291: + goto tr5002 + case 5292: + goto tr5002 + case 5293: + goto tr5002 + case 5294: + goto tr5002 + case 5295: + goto tr5002 + case 5296: + goto tr5002 + case 5297: + goto tr5002 + case 5298: + goto tr5002 + case 5299: + goto tr5002 + case 5300: + goto tr5002 + case 5301: + goto tr5002 + case 5302: + goto tr5002 + case 5303: + goto tr5002 + case 5304: + goto tr5002 + case 5305: + goto tr5002 + case 5306: + goto tr5002 + case 5307: + goto tr5002 + case 5308: + goto tr5002 + case 5309: + goto tr5002 + case 5310: + goto tr5002 + case 5311: + goto tr5002 + case 5312: + goto tr5002 + case 5313: + goto tr5002 + case 5314: + goto tr5002 + case 5315: + goto tr5002 + case 5316: + goto tr5002 + case 5317: + goto tr5002 + case 5318: + goto tr5002 + case 5319: + goto tr5002 + case 5320: + goto tr5002 + case 5321: + goto tr5002 + case 5322: + goto tr5002 + case 5323: + goto tr5002 + case 5324: + goto tr5002 + case 5325: + goto tr5002 + case 5326: + goto tr5002 + case 5327: + goto tr5002 + case 5328: + goto tr5002 + case 5329: + goto tr5002 + case 5330: + goto tr5002 + case 5331: + goto tr5002 + case 5332: + goto tr5002 + case 5333: + goto tr5002 + case 5334: + goto tr5002 + case 5335: + goto tr5002 + case 5336: + goto tr5002 + case 5337: + goto tr5002 + case 5338: + goto tr4499 + case 4663: + goto tr0 + case 4664: + goto tr0 + case 4665: + goto tr0 + case 4666: + goto tr0 + case 4667: + goto tr0 + case 4668: + goto tr0 + case 4669: + goto tr0 + case 4670: + goto tr0 + case 5339: + goto tr4499 + case 4671: + goto tr0 + case 4672: + goto tr0 + case 4673: + goto tr0 + case 4674: + goto tr0 + case 4675: + goto tr0 + case 4676: + goto tr0 + case 4677: + goto tr0 + case 4678: + goto tr0 + case 4679: + goto tr0 + case 4680: + goto tr0 + case 4681: + goto tr0 + case 4682: + goto tr0 + case 4683: + goto tr0 + case 4684: + goto tr0 + case 4685: + goto tr0 + case 4686: + goto tr0 + case 4687: + goto tr0 + case 4688: + goto tr0 + case 4689: + goto tr0 + case 4690: + goto tr0 + case 4691: + goto tr0 + case 4692: + goto tr0 + case 4693: + goto tr0 + case 4694: + goto tr0 + case 4695: + goto tr0 + case 4696: + goto tr0 + case 4697: + goto tr0 + case 4698: + goto tr0 + case 4699: + goto tr0 + case 4700: + goto tr0 + case 4701: + goto tr0 + case 4702: + goto tr0 + case 4703: + goto tr0 + case 4704: + goto tr0 + case 4705: + goto tr0 + case 4706: + goto tr0 + case 4707: + goto tr2 + case 5340: + goto tr5359 + case 4708: + goto tr4328 + case 4709: + goto tr4328 + case 4710: + goto tr4328 + case 4711: + goto tr4328 + case 4712: + goto tr4328 + case 4713: + goto tr4328 + case 4714: + goto tr4328 + case 4715: + goto tr4328 + case 4716: + goto tr4328 + case 4717: + goto tr4328 + case 4718: + goto tr4328 + case 4719: + goto tr4328 + case 4720: + goto tr4328 + case 4721: + goto tr4328 + case 4722: + goto tr4328 + case 4723: + goto tr4328 + case 4724: + goto tr4328 + case 4725: + goto tr4328 + case 4726: + goto tr4328 + case 4727: + goto tr4328 + case 4728: + goto tr4328 + case 4729: + goto tr4328 + case 4730: + goto tr4328 + case 4731: + goto tr4328 + case 4732: + goto tr4328 + case 4733: + goto tr4328 + case 4734: + goto tr4328 + case 4735: + goto tr4328 + case 4736: + goto tr4328 + case 4737: + goto tr4328 + case 4738: + goto tr4328 + case 4739: + goto tr4328 + case 4740: + goto tr4328 + case 4741: + goto tr4328 + case 4742: + goto tr4328 + case 4743: + goto tr4328 + case 4744: + goto tr4328 + case 4745: + goto tr4328 + case 4746: + goto tr4328 + case 4747: + goto tr4328 + case 4748: + goto tr4328 + case 4749: + goto tr4328 + case 4750: + goto tr4328 + case 4751: + goto tr4328 + case 4752: + goto tr4328 + case 4753: + goto tr4328 + case 4754: + goto tr4328 + case 4755: + goto tr4328 + case 4756: + goto tr4328 + case 4757: + goto tr4328 + case 4758: + goto tr4328 + case 4759: + goto tr4328 + case 4760: + goto tr4328 + case 4761: + goto tr4328 + case 4762: + goto tr4328 + case 4763: + goto tr4328 + case 4764: + goto tr4328 + case 4765: + goto tr4328 + case 4766: + goto tr4328 + case 4767: + goto tr4328 + case 4768: + goto tr4328 + case 4769: + goto tr4328 + case 4770: + goto tr4328 + case 4771: + goto tr4328 + case 4772: + goto tr4328 + case 4773: + goto tr4328 + case 4774: + goto tr4328 + case 4775: + goto tr4328 + case 4776: + goto tr4328 + case 4777: + goto tr4328 + case 4778: + goto tr4328 + case 4779: + goto tr4328 + case 4780: + goto tr4328 + case 4781: + goto tr4328 + case 4782: + goto tr4328 + case 4783: + goto tr4328 + case 4784: + goto tr4328 + case 4785: + goto tr4328 + case 4786: + goto tr4328 + case 4787: + goto tr4328 + case 4788: + goto tr4328 + case 4789: + goto tr4328 + case 4790: + goto tr4328 + case 4791: + goto tr4328 + case 4792: + goto tr4328 + case 4793: + goto tr4328 + case 4794: + goto tr4328 + case 4795: + goto tr4328 + case 4796: + goto tr4328 + case 4797: + goto tr4328 + case 4798: + goto tr4328 + case 4799: + goto tr4328 + case 4800: + goto tr4328 + case 4801: + goto tr4328 + case 4802: + goto tr4328 + case 4803: + goto tr4328 + case 4804: + goto tr4328 + case 4805: + goto tr4328 + case 4806: + goto tr4328 + case 4807: + goto tr4328 + case 4808: + goto tr4328 + case 4809: + goto tr4328 + case 4810: + goto tr4328 + case 4811: + goto tr4328 + case 4812: + goto tr4328 + case 4813: + goto tr4328 + case 4814: + goto tr4328 + case 4815: + goto tr4328 + case 4816: + goto tr4328 + case 4817: + goto tr4328 + case 4818: + goto tr4328 + case 4819: + goto tr4328 + case 4820: + goto tr4328 + case 4821: + goto tr4328 + case 4822: + goto tr4328 + case 4823: + goto tr4328 + case 4824: + goto tr4328 + case 4825: + goto tr4328 + case 4826: + goto tr4328 + case 4827: + goto tr4328 + case 4828: + goto tr4328 + case 4829: + goto tr4328 + case 4830: + goto tr4328 + case 4831: + goto tr4328 + case 4832: + goto tr4328 + case 4833: + goto tr4328 + case 4834: + goto tr4328 + case 4835: + goto tr4328 + case 4836: + goto tr4328 + case 4837: + goto tr4328 + case 4838: + goto tr4328 + case 4839: + goto tr4328 + case 4840: + goto tr4328 + case 4841: + goto tr4328 + case 4842: + goto tr4328 + case 4843: + goto tr4328 + case 4844: + goto tr4328 + case 4845: + goto tr4328 + case 4846: + goto tr4328 + case 4847: + goto tr4328 + case 4848: + goto tr4328 + case 4849: + goto tr4328 + case 4850: + goto tr0 + case 4851: + goto tr0 + case 4852: + goto tr0 + case 4853: + goto tr0 + case 4854: + goto tr0 + case 4855: + goto tr0 + case 4856: + goto tr0 + case 4857: + goto tr0 + case 4858: + goto tr0 + case 4859: + goto tr0 + case 4860: + goto tr0 + case 4861: + goto tr0 + case 4862: +//line segment_words.rl:68 + + startPos = p + +//line segment_words_prod.go:173630 + } + } + + } + +//line segment_words.rl:278 + + + if cs < s_first_final { + return val, types, totalConsumed, ParseError + } + + return val, types, totalConsumed, nil +} diff --git a/vendor/github.com/blevesearch/snowballstem/COPYING b/vendor/github.com/blevesearch/snowballstem/COPYING new file mode 100644 index 0000000..f36607f --- /dev/null +++ b/vendor/github.com/blevesearch/snowballstem/COPYING @@ -0,0 +1,29 @@ +Copyright (c) 2001, Dr Martin Porter +Copyright (c) 2004,2005, Richard Boulton +Copyright (c) 2013, Yoshiki Shibukawa +Copyright (c) 2006,2007,2009,2010,2011,2014-2019, Olly Betts +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + 3. Neither the name of the Snowball project nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/blevesearch/snowballstem/README.md b/vendor/github.com/blevesearch/snowballstem/README.md new file mode 100644 index 0000000..bb4ff8a --- /dev/null +++ b/vendor/github.com/blevesearch/snowballstem/README.md @@ -0,0 +1,66 @@ +# snowballstem + +This repository contains the Go stemmers generated by the [Snowball](https://github.com/snowballstem/snowball) project. They are maintained outside of the core bleve package so that they may be more easily be reused in other contexts. + +## Usage + +All these stemmers export a single `Stem()` method which operates on a snowball `Env` structure. The `Env` structure maintains all state for the stemmer. A new `Env` is created to point at an initial string. After stemming, the results of the `Stem()` operation can be retrieved using the `Current()` method. The `Env` structure can be reused for subsequent calls by using the `SetCurrent()` method. + +## Example + +``` +package main + +import ( + "fmt" + + "github.com/blevesearch/snowballstem" + "github.com/blevesearch/snowballstem/english" +) + +func main() { + + // words to stem + words := []string{ + "running", + "jumping", + } + + // build new environment + env := snowballstem.NewEnv("") + + for _, word := range words { + // set up environment for word + env.SetCurrent(word) + // invoke stemmer + english.Stem(env) + // print results + fmt.Printf("%s stemmed to %s\n", word, env.Current()) + } +} +``` +Produces Output: +``` +$ ./snowtest +running stemmed to run +jumping stemmed to jump +``` + +## Testing + +The test harness for these stemmers is hosted in the main [Snowball](https://github.com/snowballstem/snowball) repository. There are functional tests built around the separate [snowballstem-data](https://github.com/snowballstem/snowball-data) repository, and there is support for fuzz-testing the stemmers there as well. + +## Generating the Stemmers + +``` +$ export SNOWBALL=/path/to/github.com/snowballstem/snowball/after/snowball/built +$ go generate +``` + +## Updated the Go Generate Commands + +A simple tool is provided to automate these from the snowball algorithms directory: + +``` +$ go run gengen.go /path/to/github.com/snowballstem/snowball/algorithms +``` diff --git a/vendor/github.com/blevesearch/snowballstem/among.go b/vendor/github.com/blevesearch/snowballstem/among.go new file mode 100644 index 0000000..1a0c702 --- /dev/null +++ b/vendor/github.com/blevesearch/snowballstem/among.go @@ -0,0 +1,16 @@ +package snowballstem + +import "fmt" + +type AmongF func(env *Env, ctx interface{}) bool + +type Among struct { + Str string + A int32 + B int32 + F AmongF +} + +func (a *Among) String() string { + return fmt.Sprintf("str: `%s`, a: %d, b: %d, f: %p", a.Str, a.A, a.B, a.F) +} diff --git a/vendor/github.com/blevesearch/snowballstem/english/english_stemmer.go b/vendor/github.com/blevesearch/snowballstem/english/english_stemmer.go new file mode 100644 index 0000000..87e1d48 --- /dev/null +++ b/vendor/github.com/blevesearch/snowballstem/english/english_stemmer.go @@ -0,0 +1,1341 @@ +//! This file was generated automatically by the Snowball to Go compiler +//! http://snowballstem.org/ + +package english + +import ( + snowballRuntime "github.com/blevesearch/snowballstem" +) + +var A_0 = []*snowballRuntime.Among{ + {Str: "arsen", A: -1, B: -1, F: nil}, + {Str: "commun", A: -1, B: -1, F: nil}, + {Str: "gener", A: -1, B: -1, F: nil}, +} + +var A_1 = []*snowballRuntime.Among{ + {Str: "'", A: -1, B: 1, F: nil}, + {Str: "'s'", A: 0, B: 1, F: nil}, + {Str: "'s", A: -1, B: 1, F: nil}, +} + +var A_2 = []*snowballRuntime.Among{ + {Str: "ied", A: -1, B: 2, F: nil}, + {Str: "s", A: -1, B: 3, F: nil}, + {Str: "ies", A: 1, B: 2, F: nil}, + {Str: "sses", A: 1, B: 1, F: nil}, + {Str: "ss", A: 1, B: -1, F: nil}, + {Str: "us", A: 1, B: -1, F: nil}, +} + +var A_3 = []*snowballRuntime.Among{ + {Str: "", A: -1, B: 3, F: nil}, + {Str: "bb", A: 0, B: 2, F: nil}, + {Str: "dd", A: 0, B: 2, F: nil}, + {Str: "ff", A: 0, B: 2, F: nil}, + {Str: "gg", A: 0, B: 2, F: nil}, + {Str: "bl", A: 0, B: 1, F: nil}, + {Str: "mm", A: 0, B: 2, F: nil}, + {Str: "nn", A: 0, B: 2, F: nil}, + {Str: "pp", A: 0, B: 2, F: nil}, + {Str: "rr", A: 0, B: 2, F: nil}, + {Str: "at", A: 0, B: 1, F: nil}, + {Str: "tt", A: 0, B: 2, F: nil}, + {Str: "iz", A: 0, B: 1, F: nil}, +} + +var A_4 = []*snowballRuntime.Among{ + {Str: "ed", A: -1, B: 2, F: nil}, + {Str: "eed", A: 0, B: 1, F: nil}, + {Str: "ing", A: -1, B: 2, F: nil}, + {Str: "edly", A: -1, B: 2, F: nil}, + {Str: "eedly", A: 3, B: 1, F: nil}, + {Str: "ingly", A: -1, B: 2, F: nil}, +} + +var A_5 = []*snowballRuntime.Among{ + {Str: "anci", A: -1, B: 3, F: nil}, + {Str: "enci", A: -1, B: 2, F: nil}, + {Str: "ogi", A: -1, B: 13, F: nil}, + {Str: "li", A: -1, B: 16, F: nil}, + {Str: "bli", A: 3, B: 12, F: nil}, + {Str: "abli", A: 4, B: 4, F: nil}, + {Str: "alli", A: 3, B: 8, F: nil}, + {Str: "fulli", A: 3, B: 14, F: nil}, + {Str: "lessli", A: 3, B: 15, F: nil}, + {Str: "ousli", A: 3, B: 10, F: nil}, + {Str: "entli", A: 3, B: 5, F: nil}, + {Str: "aliti", A: -1, B: 8, F: nil}, + {Str: "biliti", A: -1, B: 12, F: nil}, + {Str: "iviti", A: -1, B: 11, F: nil}, + {Str: "tional", A: -1, B: 1, F: nil}, + {Str: "ational", A: 14, B: 7, F: nil}, + {Str: "alism", A: -1, B: 8, F: nil}, + {Str: "ation", A: -1, B: 7, F: nil}, + {Str: "ization", A: 17, B: 6, F: nil}, + {Str: "izer", A: -1, B: 6, F: nil}, + {Str: "ator", A: -1, B: 7, F: nil}, + {Str: "iveness", A: -1, B: 11, F: nil}, + {Str: "fulness", A: -1, B: 9, F: nil}, + {Str: "ousness", A: -1, B: 10, F: nil}, +} + +var A_6 = []*snowballRuntime.Among{ + {Str: "icate", A: -1, B: 4, F: nil}, + {Str: "ative", A: -1, B: 6, F: nil}, + {Str: "alize", A: -1, B: 3, F: nil}, + {Str: "iciti", A: -1, B: 4, F: nil}, + {Str: "ical", A: -1, B: 4, F: nil}, + {Str: "tional", A: -1, B: 1, F: nil}, + {Str: "ational", A: 5, B: 2, F: nil}, + {Str: "ful", A: -1, B: 5, F: nil}, + {Str: "ness", A: -1, B: 5, F: nil}, +} + +var A_7 = []*snowballRuntime.Among{ + {Str: "ic", A: -1, B: 1, F: nil}, + {Str: "ance", A: -1, B: 1, F: nil}, + {Str: "ence", A: -1, B: 1, F: nil}, + {Str: "able", A: -1, B: 1, F: nil}, + {Str: "ible", A: -1, B: 1, F: nil}, + {Str: "ate", A: -1, B: 1, F: nil}, + {Str: "ive", A: -1, B: 1, F: nil}, + {Str: "ize", A: -1, B: 1, F: nil}, + {Str: "iti", A: -1, B: 1, F: nil}, + {Str: "al", A: -1, B: 1, F: nil}, + {Str: "ism", A: -1, B: 1, F: nil}, + {Str: "ion", A: -1, B: 2, F: nil}, + {Str: "er", A: -1, B: 1, F: nil}, + {Str: "ous", A: -1, B: 1, F: nil}, + {Str: "ant", A: -1, B: 1, F: nil}, + {Str: "ent", A: -1, B: 1, F: nil}, + {Str: "ment", A: 15, B: 1, F: nil}, + {Str: "ement", A: 16, B: 1, F: nil}, +} + +var A_8 = []*snowballRuntime.Among{ + {Str: "e", A: -1, B: 1, F: nil}, + {Str: "l", A: -1, B: 2, F: nil}, +} + +var A_9 = []*snowballRuntime.Among{ + {Str: "succeed", A: -1, B: -1, F: nil}, + {Str: "proceed", A: -1, B: -1, F: nil}, + {Str: "exceed", A: -1, B: -1, F: nil}, + {Str: "canning", A: -1, B: -1, F: nil}, + {Str: "inning", A: -1, B: -1, F: nil}, + {Str: "earring", A: -1, B: -1, F: nil}, + {Str: "herring", A: -1, B: -1, F: nil}, + {Str: "outing", A: -1, B: -1, F: nil}, +} + +var A_10 = []*snowballRuntime.Among{ + {Str: "andes", A: -1, B: -1, F: nil}, + {Str: "atlas", A: -1, B: -1, F: nil}, + {Str: "bias", A: -1, B: -1, F: nil}, + {Str: "cosmos", A: -1, B: -1, F: nil}, + {Str: "dying", A: -1, B: 3, F: nil}, + {Str: "early", A: -1, B: 9, F: nil}, + {Str: "gently", A: -1, B: 7, F: nil}, + {Str: "howe", A: -1, B: -1, F: nil}, + {Str: "idly", A: -1, B: 6, F: nil}, + {Str: "lying", A: -1, B: 4, F: nil}, + {Str: "news", A: -1, B: -1, F: nil}, + {Str: "only", A: -1, B: 10, F: nil}, + {Str: "singly", A: -1, B: 11, F: nil}, + {Str: "skies", A: -1, B: 2, F: nil}, + {Str: "skis", A: -1, B: 1, F: nil}, + {Str: "sky", A: -1, B: -1, F: nil}, + {Str: "tying", A: -1, B: 5, F: nil}, + {Str: "ugly", A: -1, B: 8, F: nil}, +} + +var G_v = []byte{17, 65, 16, 1} + +var G_v_WXY = []byte{1, 17, 65, 208, 1} + +var G_valid_LI = []byte{55, 141, 2} + +type Context struct { + b_Y_found bool + i_p2 int + i_p1 int +} + +func r_prelude(env *snowballRuntime.Env, ctx interface{}) bool { + context := ctx.(*Context) + _ = context + // (, line 25 + // unset Y_found, line 26 + context.b_Y_found = false + // do, line 27 + var v_1 = env.Cursor +lab0: + for { + // (, line 27 + // [, line 27 + env.Bra = env.Cursor + // literal, line 27 + if !env.EqS("'") { + break lab0 + } + // ], line 27 + env.Ket = env.Cursor + // delete, line 27 + if !env.SliceDel() { + return false + } + break lab0 + } + env.Cursor = v_1 + // do, line 28 + var v_2 = env.Cursor +lab1: + for { + // (, line 28 + // [, line 28 + env.Bra = env.Cursor + // literal, line 28 + if !env.EqS("y") { + break lab1 + } + // ], line 28 + env.Ket = env.Cursor + // <-, line 28 + if !env.SliceFrom("Y") { + return false + } + // set Y_found, line 28 + context.b_Y_found = true + break lab1 + } + env.Cursor = v_2 + // do, line 29 + var v_3 = env.Cursor +lab2: + for { + // repeat, line 29 + replab3: + for { + var v_4 = env.Cursor + lab4: + for range [2]struct{}{} { + // (, line 29 + // goto, line 29 + golab5: + for { + var v_5 = env.Cursor + lab6: + for { + // (, line 29 + if !env.InGrouping(G_v, 97, 121) { + break lab6 + } + // [, line 29 + env.Bra = env.Cursor + // literal, line 29 + if !env.EqS("y") { + break lab6 + } + // ], line 29 + env.Ket = env.Cursor + env.Cursor = v_5 + break golab5 + } + env.Cursor = v_5 + if env.Cursor >= env.Limit { + break lab4 + } + env.NextChar() + } + // <-, line 29 + if !env.SliceFrom("Y") { + return false + } + // set Y_found, line 29 + context.b_Y_found = true + continue replab3 + } + env.Cursor = v_4 + break replab3 + } + break lab2 + } + env.Cursor = v_3 + return true +} + +func r_mark_regions(env *snowballRuntime.Env, ctx interface{}) bool { + context := ctx.(*Context) + _ = context + // (, line 32 + context.i_p1 = env.Limit + context.i_p2 = env.Limit + // do, line 35 + var v_1 = env.Cursor +lab0: + for { + // (, line 35 + // or, line 41 + lab1: + for { + var v_2 = env.Cursor + lab2: + for { + // among, line 36 + if env.FindAmong(A_0, context) == 0 { + break lab2 + } + break lab1 + } + env.Cursor = v_2 + // (, line 41 + // gopast, line 41 + golab3: + for { + lab4: + for { + if !env.InGrouping(G_v, 97, 121) { + break lab4 + } + break golab3 + } + if env.Cursor >= env.Limit { + break lab0 + } + env.NextChar() + } + // gopast, line 41 + golab5: + for { + lab6: + for { + if !env.OutGrouping(G_v, 97, 121) { + break lab6 + } + break golab5 + } + if env.Cursor >= env.Limit { + break lab0 + } + env.NextChar() + } + break lab1 + } + // setmark p1, line 42 + context.i_p1 = env.Cursor + // gopast, line 43 + golab7: + for { + lab8: + for { + if !env.InGrouping(G_v, 97, 121) { + break lab8 + } + break golab7 + } + if env.Cursor >= env.Limit { + break lab0 + } + env.NextChar() + } + // gopast, line 43 + golab9: + for { + lab10: + for { + if !env.OutGrouping(G_v, 97, 121) { + break lab10 + } + break golab9 + } + if env.Cursor >= env.Limit { + break lab0 + } + env.NextChar() + } + // setmark p2, line 43 + context.i_p2 = env.Cursor + break lab0 + } + env.Cursor = v_1 + return true +} + +func r_shortv(env *snowballRuntime.Env, ctx interface{}) bool { + context := ctx.(*Context) + _ = context + // (, line 49 + // or, line 51 +lab0: + for { + var v_1 = env.Limit - env.Cursor + lab1: + for { + // (, line 50 + if !env.OutGroupingB(G_v_WXY, 89, 121) { + break lab1 + } + if !env.InGroupingB(G_v, 97, 121) { + break lab1 + } + if !env.OutGroupingB(G_v, 97, 121) { + break lab1 + } + break lab0 + } + env.Cursor = env.Limit - v_1 + // (, line 52 + if !env.OutGroupingB(G_v, 97, 121) { + return false + } + if !env.InGroupingB(G_v, 97, 121) { + return false + } + // atlimit, line 52 + if env.Cursor > env.LimitBackward { + return false + } + break lab0 + } + return true +} + +func r_R1(env *snowballRuntime.Env, ctx interface{}) bool { + context := ctx.(*Context) + _ = context + if !(context.i_p1 <= env.Cursor) { + return false + } + return true +} + +func r_R2(env *snowballRuntime.Env, ctx interface{}) bool { + context := ctx.(*Context) + _ = context + if !(context.i_p2 <= env.Cursor) { + return false + } + return true +} + +func r_Step_1a(env *snowballRuntime.Env, ctx interface{}) bool { + context := ctx.(*Context) + _ = context + var among_var int32 + // (, line 58 + // try, line 59 + var v_1 = env.Limit - env.Cursor +lab0: + for { + // (, line 59 + // [, line 60 + env.Ket = env.Cursor + // substring, line 60 + among_var = env.FindAmongB(A_1, context) + if among_var == 0 { + env.Cursor = env.Limit - v_1 + break lab0 + } + // ], line 60 + env.Bra = env.Cursor + if among_var == 0 { + env.Cursor = env.Limit - v_1 + break lab0 + } else if among_var == 1 { + // (, line 62 + // delete, line 62 + if !env.SliceDel() { + return false + } + } + break lab0 + } + // [, line 65 + env.Ket = env.Cursor + // substring, line 65 + among_var = env.FindAmongB(A_2, context) + if among_var == 0 { + return false + } + // ], line 65 + env.Bra = env.Cursor + if among_var == 0 { + return false + } else if among_var == 1 { + // (, line 66 + // <-, line 66 + if !env.SliceFrom("ss") { + return false + } + } else if among_var == 2 { + // (, line 68 + // or, line 68 + lab1: + for { + var v_2 = env.Limit - env.Cursor + lab2: + for { + // (, line 68 + { + // hop, line 68 + var c = env.ByteIndexForHop(-(2)) + if int32(env.LimitBackward) > c || c > int32(env.Limit) { + break lab2 + } + env.Cursor = int(c) + } + // <-, line 68 + if !env.SliceFrom("i") { + return false + } + break lab1 + } + env.Cursor = env.Limit - v_2 + // <-, line 68 + if !env.SliceFrom("ie") { + return false + } + break lab1 + } + } else if among_var == 3 { + // (, line 69 + // next, line 69 + if env.Cursor <= env.LimitBackward { + return false + } + env.PrevChar() + // gopast, line 69 + golab3: + for { + lab4: + for { + if !env.InGroupingB(G_v, 97, 121) { + break lab4 + } + break golab3 + } + if env.Cursor <= env.LimitBackward { + return false + } + env.PrevChar() + } + // delete, line 69 + if !env.SliceDel() { + return false + } + } + return true +} + +func r_Step_1b(env *snowballRuntime.Env, ctx interface{}) bool { + context := ctx.(*Context) + _ = context + var among_var int32 + // (, line 74 + // [, line 75 + env.Ket = env.Cursor + // substring, line 75 + among_var = env.FindAmongB(A_4, context) + if among_var == 0 { + return false + } + // ], line 75 + env.Bra = env.Cursor + if among_var == 0 { + return false + } else if among_var == 1 { + // (, line 77 + // call R1, line 77 + if !r_R1(env, context) { + return false + } + // <-, line 77 + if !env.SliceFrom("ee") { + return false + } + } else if among_var == 2 { + // (, line 79 + // test, line 80 + var v_1 = env.Limit - env.Cursor + // gopast, line 80 + golab0: + for { + lab1: + for { + if !env.InGroupingB(G_v, 97, 121) { + break lab1 + } + break golab0 + } + if env.Cursor <= env.LimitBackward { + return false + } + env.PrevChar() + } + env.Cursor = env.Limit - v_1 + // delete, line 80 + if !env.SliceDel() { + return false + } + // test, line 81 + var v_3 = env.Limit - env.Cursor + // substring, line 81 + among_var = env.FindAmongB(A_3, context) + if among_var == 0 { + return false + } + env.Cursor = env.Limit - v_3 + if among_var == 0 { + return false + } else if among_var == 1 { + // (, line 83 + { + // <+, line 83 + var c = env.Cursor + bra, ket := env.Cursor, env.Cursor + env.Insert(bra, ket, "e") + env.Cursor = c + } + } else if among_var == 2 { + // (, line 86 + // [, line 86 + env.Ket = env.Cursor + // next, line 86 + if env.Cursor <= env.LimitBackward { + return false + } + env.PrevChar() + // ], line 86 + env.Bra = env.Cursor + // delete, line 86 + if !env.SliceDel() { + return false + } + } else if among_var == 3 { + // (, line 87 + // atmark, line 87 + if env.Cursor != context.i_p1 { + return false + } + // test, line 87 + var v_4 = env.Limit - env.Cursor + // call shortv, line 87 + if !r_shortv(env, context) { + return false + } + env.Cursor = env.Limit - v_4 + { + // <+, line 87 + var c = env.Cursor + bra, ket := env.Cursor, env.Cursor + env.Insert(bra, ket, "e") + env.Cursor = c + } + } + } + return true +} + +func r_Step_1c(env *snowballRuntime.Env, ctx interface{}) bool { + context := ctx.(*Context) + _ = context + // (, line 93 + // [, line 94 + env.Ket = env.Cursor + // or, line 94 +lab0: + for { + var v_1 = env.Limit - env.Cursor + lab1: + for { + // literal, line 94 + if !env.EqSB("y") { + break lab1 + } + break lab0 + } + env.Cursor = env.Limit - v_1 + // literal, line 94 + if !env.EqSB("Y") { + return false + } + break lab0 + } + // ], line 94 + env.Bra = env.Cursor + if !env.OutGroupingB(G_v, 97, 121) { + return false + } + // not, line 95 + var v_2 = env.Limit - env.Cursor +lab2: + for { + // atlimit, line 95 + if env.Cursor > env.LimitBackward { + break lab2 + } + return false + } + env.Cursor = env.Limit - v_2 + // <-, line 96 + if !env.SliceFrom("i") { + return false + } + return true +} + +func r_Step_2(env *snowballRuntime.Env, ctx interface{}) bool { + context := ctx.(*Context) + _ = context + var among_var int32 + // (, line 99 + // [, line 100 + env.Ket = env.Cursor + // substring, line 100 + among_var = env.FindAmongB(A_5, context) + if among_var == 0 { + return false + } + // ], line 100 + env.Bra = env.Cursor + // call R1, line 100 + if !r_R1(env, context) { + return false + } + if among_var == 0 { + return false + } else if among_var == 1 { + // (, line 101 + // <-, line 101 + if !env.SliceFrom("tion") { + return false + } + } else if among_var == 2 { + // (, line 102 + // <-, line 102 + if !env.SliceFrom("ence") { + return false + } + } else if among_var == 3 { + // (, line 103 + // <-, line 103 + if !env.SliceFrom("ance") { + return false + } + } else if among_var == 4 { + // (, line 104 + // <-, line 104 + if !env.SliceFrom("able") { + return false + } + } else if among_var == 5 { + // (, line 105 + // <-, line 105 + if !env.SliceFrom("ent") { + return false + } + } else if among_var == 6 { + // (, line 107 + // <-, line 107 + if !env.SliceFrom("ize") { + return false + } + } else if among_var == 7 { + // (, line 109 + // <-, line 109 + if !env.SliceFrom("ate") { + return false + } + } else if among_var == 8 { + // (, line 111 + // <-, line 111 + if !env.SliceFrom("al") { + return false + } + } else if among_var == 9 { + // (, line 112 + // <-, line 112 + if !env.SliceFrom("ful") { + return false + } + } else if among_var == 10 { + // (, line 114 + // <-, line 114 + if !env.SliceFrom("ous") { + return false + } + } else if among_var == 11 { + // (, line 116 + // <-, line 116 + if !env.SliceFrom("ive") { + return false + } + } else if among_var == 12 { + // (, line 118 + // <-, line 118 + if !env.SliceFrom("ble") { + return false + } + } else if among_var == 13 { + // (, line 119 + // literal, line 119 + if !env.EqSB("l") { + return false + } + // <-, line 119 + if !env.SliceFrom("og") { + return false + } + } else if among_var == 14 { + // (, line 120 + // <-, line 120 + if !env.SliceFrom("ful") { + return false + } + } else if among_var == 15 { + // (, line 121 + // <-, line 121 + if !env.SliceFrom("less") { + return false + } + } else if among_var == 16 { + // (, line 122 + if !env.InGroupingB(G_valid_LI, 99, 116) { + return false + } + // delete, line 122 + if !env.SliceDel() { + return false + } + } + return true +} + +func r_Step_3(env *snowballRuntime.Env, ctx interface{}) bool { + context := ctx.(*Context) + _ = context + var among_var int32 + // (, line 126 + // [, line 127 + env.Ket = env.Cursor + // substring, line 127 + among_var = env.FindAmongB(A_6, context) + if among_var == 0 { + return false + } + // ], line 127 + env.Bra = env.Cursor + // call R1, line 127 + if !r_R1(env, context) { + return false + } + if among_var == 0 { + return false + } else if among_var == 1 { + // (, line 128 + // <-, line 128 + if !env.SliceFrom("tion") { + return false + } + } else if among_var == 2 { + // (, line 129 + // <-, line 129 + if !env.SliceFrom("ate") { + return false + } + } else if among_var == 3 { + // (, line 130 + // <-, line 130 + if !env.SliceFrom("al") { + return false + } + } else if among_var == 4 { + // (, line 132 + // <-, line 132 + if !env.SliceFrom("ic") { + return false + } + } else if among_var == 5 { + // (, line 134 + // delete, line 134 + if !env.SliceDel() { + return false + } + } else if among_var == 6 { + // (, line 136 + // call R2, line 136 + if !r_R2(env, context) { + return false + } + // delete, line 136 + if !env.SliceDel() { + return false + } + } + return true +} + +func r_Step_4(env *snowballRuntime.Env, ctx interface{}) bool { + context := ctx.(*Context) + _ = context + var among_var int32 + // (, line 140 + // [, line 141 + env.Ket = env.Cursor + // substring, line 141 + among_var = env.FindAmongB(A_7, context) + if among_var == 0 { + return false + } + // ], line 141 + env.Bra = env.Cursor + // call R2, line 141 + if !r_R2(env, context) { + return false + } + if among_var == 0 { + return false + } else if among_var == 1 { + // (, line 144 + // delete, line 144 + if !env.SliceDel() { + return false + } + } else if among_var == 2 { + // (, line 145 + // or, line 145 + lab0: + for { + var v_1 = env.Limit - env.Cursor + lab1: + for { + // literal, line 145 + if !env.EqSB("s") { + break lab1 + } + break lab0 + } + env.Cursor = env.Limit - v_1 + // literal, line 145 + if !env.EqSB("t") { + return false + } + break lab0 + } + // delete, line 145 + if !env.SliceDel() { + return false + } + } + return true +} + +func r_Step_5(env *snowballRuntime.Env, ctx interface{}) bool { + context := ctx.(*Context) + _ = context + var among_var int32 + // (, line 149 + // [, line 150 + env.Ket = env.Cursor + // substring, line 150 + among_var = env.FindAmongB(A_8, context) + if among_var == 0 { + return false + } + // ], line 150 + env.Bra = env.Cursor + if among_var == 0 { + return false + } else if among_var == 1 { + // (, line 151 + // or, line 151 + lab0: + for { + var v_1 = env.Limit - env.Cursor + lab1: + for { + // call R2, line 151 + if !r_R2(env, context) { + break lab1 + } + break lab0 + } + env.Cursor = env.Limit - v_1 + // (, line 151 + // call R1, line 151 + if !r_R1(env, context) { + return false + } + // not, line 151 + var v_2 = env.Limit - env.Cursor + lab2: + for { + // call shortv, line 151 + if !r_shortv(env, context) { + break lab2 + } + return false + } + env.Cursor = env.Limit - v_2 + break lab0 + } + // delete, line 151 + if !env.SliceDel() { + return false + } + } else if among_var == 2 { + // (, line 152 + // call R2, line 152 + if !r_R2(env, context) { + return false + } + // literal, line 152 + if !env.EqSB("l") { + return false + } + // delete, line 152 + if !env.SliceDel() { + return false + } + } + return true +} + +func r_exception2(env *snowballRuntime.Env, ctx interface{}) bool { + context := ctx.(*Context) + _ = context + // (, line 156 + // [, line 158 + env.Ket = env.Cursor + // substring, line 158 + if env.FindAmongB(A_9, context) == 0 { + return false + } + // ], line 158 + env.Bra = env.Cursor + // atlimit, line 158 + if env.Cursor > env.LimitBackward { + return false + } + return true +} + +func r_exception1(env *snowballRuntime.Env, ctx interface{}) bool { + context := ctx.(*Context) + _ = context + var among_var int32 + // (, line 168 + // [, line 170 + env.Bra = env.Cursor + // substring, line 170 + among_var = env.FindAmong(A_10, context) + if among_var == 0 { + return false + } + // ], line 170 + env.Ket = env.Cursor + // atlimit, line 170 + if env.Cursor < env.Limit { + return false + } + if among_var == 0 { + return false + } else if among_var == 1 { + // (, line 174 + // <-, line 174 + if !env.SliceFrom("ski") { + return false + } + } else if among_var == 2 { + // (, line 175 + // <-, line 175 + if !env.SliceFrom("sky") { + return false + } + } else if among_var == 3 { + // (, line 176 + // <-, line 176 + if !env.SliceFrom("die") { + return false + } + } else if among_var == 4 { + // (, line 177 + // <-, line 177 + if !env.SliceFrom("lie") { + return false + } + } else if among_var == 5 { + // (, line 178 + // <-, line 178 + if !env.SliceFrom("tie") { + return false + } + } else if among_var == 6 { + // (, line 182 + // <-, line 182 + if !env.SliceFrom("idl") { + return false + } + } else if among_var == 7 { + // (, line 183 + // <-, line 183 + if !env.SliceFrom("gentl") { + return false + } + } else if among_var == 8 { + // (, line 184 + // <-, line 184 + if !env.SliceFrom("ugli") { + return false + } + } else if among_var == 9 { + // (, line 185 + // <-, line 185 + if !env.SliceFrom("earli") { + return false + } + } else if among_var == 10 { + // (, line 186 + // <-, line 186 + if !env.SliceFrom("onli") { + return false + } + } else if among_var == 11 { + // (, line 187 + // <-, line 187 + if !env.SliceFrom("singl") { + return false + } + } + return true +} + +func r_postlude(env *snowballRuntime.Env, ctx interface{}) bool { + context := ctx.(*Context) + _ = context + // (, line 203 + // Boolean test Y_found, line 203 + if !context.b_Y_found { + return false + } + // repeat, line 203 +replab0: + for { + var v_1 = env.Cursor + lab1: + for range [2]struct{}{} { + // (, line 203 + // goto, line 203 + golab2: + for { + var v_2 = env.Cursor + lab3: + for { + // (, line 203 + // [, line 203 + env.Bra = env.Cursor + // literal, line 203 + if !env.EqS("Y") { + break lab3 + } + // ], line 203 + env.Ket = env.Cursor + env.Cursor = v_2 + break golab2 + } + env.Cursor = v_2 + if env.Cursor >= env.Limit { + break lab1 + } + env.NextChar() + } + // <-, line 203 + if !env.SliceFrom("y") { + return false + } + continue replab0 + } + env.Cursor = v_1 + break replab0 + } + return true +} + +func Stem(env *snowballRuntime.Env) bool { + var context = &Context{ + b_Y_found: false, + i_p2: 0, + i_p1: 0, + } + _ = context + // (, line 205 + // or, line 207 +lab0: + for { + var v_1 = env.Cursor + lab1: + for { + // call exception1, line 207 + if !r_exception1(env, context) { + break lab1 + } + break lab0 + } + env.Cursor = v_1 + lab2: + for { + // not, line 208 + var v_2 = env.Cursor + lab3: + for { + { + // hop, line 208 + var c = env.ByteIndexForHop((3)) + if int32(0) > c || c > int32(env.Limit) { + break lab3 + } + env.Cursor = int(c) + } + break lab2 + } + env.Cursor = v_2 + break lab0 + } + env.Cursor = v_1 + // (, line 208 + // do, line 209 + var v_3 = env.Cursor + lab4: + for { + // call prelude, line 209 + if !r_prelude(env, context) { + break lab4 + } + break lab4 + } + env.Cursor = v_3 + // do, line 210 + var v_4 = env.Cursor + lab5: + for { + // call mark_regions, line 210 + if !r_mark_regions(env, context) { + break lab5 + } + break lab5 + } + env.Cursor = v_4 + // backwards, line 211 + env.LimitBackward = env.Cursor + env.Cursor = env.Limit + // (, line 211 + // do, line 213 + var v_5 = env.Limit - env.Cursor + lab6: + for { + // call Step_1a, line 213 + if !r_Step_1a(env, context) { + break lab6 + } + break lab6 + } + env.Cursor = env.Limit - v_5 + // or, line 215 + lab7: + for { + var v_6 = env.Limit - env.Cursor + lab8: + for { + // call exception2, line 215 + if !r_exception2(env, context) { + break lab8 + } + break lab7 + } + env.Cursor = env.Limit - v_6 + // (, line 215 + // do, line 217 + var v_7 = env.Limit - env.Cursor + lab9: + for { + // call Step_1b, line 217 + if !r_Step_1b(env, context) { + break lab9 + } + break lab9 + } + env.Cursor = env.Limit - v_7 + // do, line 218 + var v_8 = env.Limit - env.Cursor + lab10: + for { + // call Step_1c, line 218 + if !r_Step_1c(env, context) { + break lab10 + } + break lab10 + } + env.Cursor = env.Limit - v_8 + // do, line 220 + var v_9 = env.Limit - env.Cursor + lab11: + for { + // call Step_2, line 220 + if !r_Step_2(env, context) { + break lab11 + } + break lab11 + } + env.Cursor = env.Limit - v_9 + // do, line 221 + var v_10 = env.Limit - env.Cursor + lab12: + for { + // call Step_3, line 221 + if !r_Step_3(env, context) { + break lab12 + } + break lab12 + } + env.Cursor = env.Limit - v_10 + // do, line 222 + var v_11 = env.Limit - env.Cursor + lab13: + for { + // call Step_4, line 222 + if !r_Step_4(env, context) { + break lab13 + } + break lab13 + } + env.Cursor = env.Limit - v_11 + // do, line 224 + var v_12 = env.Limit - env.Cursor + lab14: + for { + // call Step_5, line 224 + if !r_Step_5(env, context) { + break lab14 + } + break lab14 + } + env.Cursor = env.Limit - v_12 + break lab7 + } + env.Cursor = env.LimitBackward + // do, line 227 + var v_13 = env.Cursor + lab15: + for { + // call postlude, line 227 + if !r_postlude(env, context) { + break lab15 + } + break lab15 + } + env.Cursor = v_13 + break lab0 + } + return true +} diff --git a/vendor/github.com/blevesearch/snowballstem/env.go b/vendor/github.com/blevesearch/snowballstem/env.go new file mode 100644 index 0000000..6636994 --- /dev/null +++ b/vendor/github.com/blevesearch/snowballstem/env.go @@ -0,0 +1,389 @@ +package snowballstem + +import ( + "log" + "strings" + "unicode/utf8" +) + +// Env represents the Snowball execution environment +type Env struct { + current string + Cursor int + Limit int + LimitBackward int + Bra int + Ket int +} + +// NewEnv creates a new Snowball execution environment on the provided string +func NewEnv(val string) *Env { + return &Env{ + current: val, + Cursor: 0, + Limit: len(val), + LimitBackward: 0, + Bra: 0, + Ket: len(val), + } +} + +func (env *Env) Current() string { + return env.current +} + +func (env *Env) SetCurrent(s string) { + env.current = s + env.Cursor = 0 + env.Limit = len(s) + env.LimitBackward = 0 + env.Bra = 0 + env.Ket = len(s) +} + +func (env *Env) ReplaceS(bra, ket int, s string) int32 { + adjustment := int32(len(s)) - (int32(ket) - int32(bra)) + result, _ := splitAt(env.current, bra) + rsplit := ket + if ket < bra { + rsplit = bra + } + _, rhs := splitAt(env.current, rsplit) + result += s + result += rhs + + newLim := int32(env.Limit) + adjustment + env.Limit = int(newLim) + + if env.Cursor >= ket { + newCur := int32(env.Cursor) + adjustment + env.Cursor = int(newCur) + } else if env.Cursor > bra { + env.Cursor = bra + } + + env.current = result + return adjustment +} + +func (env *Env) EqS(s string) bool { + if env.Cursor >= env.Limit { + return false + } + + if strings.HasPrefix(env.current[env.Cursor:], s) { + env.Cursor += len(s) + for !onCharBoundary(env.current, env.Cursor) { + env.Cursor++ + } + return true + } + return false +} + +func (env *Env) EqSB(s string) bool { + if int32(env.Cursor)-int32(env.LimitBackward) < int32(len(s)) { + return false + } else if !onCharBoundary(env.current, env.Cursor-len(s)) || + !strings.HasPrefix(env.current[env.Cursor-len(s):], s) { + return false + } else { + env.Cursor -= len(s) + return true + } +} + +func (env *Env) SliceFrom(s string) bool { + bra, ket := env.Bra, env.Ket + env.ReplaceS(bra, ket, s) + return true +} + +func (env *Env) NextChar() { + env.Cursor++ + for !onCharBoundary(env.current, env.Cursor) { + env.Cursor++ + } +} + +func (env *Env) PrevChar() { + env.Cursor-- + for !onCharBoundary(env.current, env.Cursor) { + env.Cursor-- + } +} + +func (env *Env) ByteIndexForHop(delta int32) int32 { + if delta > 0 { + res := env.Cursor + for delta > 0 { + res++ + delta-- + for res <= len(env.current) && !onCharBoundary(env.current, res) { + res++ + } + } + return int32(res) + } else if delta < 0 { + res := env.Cursor + for delta < 0 { + res-- + delta++ + for res >= 0 && !onCharBoundary(env.current, res) { + res-- + } + } + return int32(res) + } else { + return int32(env.Cursor) + } +} + +func (env *Env) InGrouping(chars []byte, min, max int32) bool { + if env.Cursor >= env.Limit { + return false + } + + r, _ := utf8.DecodeRuneInString(env.current[env.Cursor:]) + if r != utf8.RuneError { + if r > max || r < min { + return false + } + r -= min + if (chars[uint(r>>3)] & (0x1 << uint(r&0x7))) == 0 { + return false + } + env.NextChar() + return true + } + return false +} + +func (env *Env) InGroupingB(chars []byte, min, max int32) bool { + if env.Cursor <= env.LimitBackward { + return false + } + env.PrevChar() + r, _ := utf8.DecodeRuneInString(env.current[env.Cursor:]) + if r != utf8.RuneError { + env.NextChar() + if r > max || r < min { + return false + } + r -= min + if (chars[uint(r>>3)] & (0x1 << uint(r&0x7))) == 0 { + return false + } + env.PrevChar() + return true + } + return false +} + +func (env *Env) OutGrouping(chars []byte, min, max int32) bool { + if env.Cursor >= env.Limit { + return false + } + r, _ := utf8.DecodeRuneInString(env.current[env.Cursor:]) + if r != utf8.RuneError { + if r > max || r < min { + env.NextChar() + return true + } + r -= min + if (chars[uint(r>>3)] & (0x1 << uint(r&0x7))) == 0 { + env.NextChar() + return true + } + } + return false +} + +func (env *Env) OutGroupingB(chars []byte, min, max int32) bool { + if env.Cursor <= env.LimitBackward { + return false + } + env.PrevChar() + r, _ := utf8.DecodeRuneInString(env.current[env.Cursor:]) + if r != utf8.RuneError { + env.NextChar() + if r > max || r < min { + env.PrevChar() + return true + } + r -= min + if (chars[uint(r>>3)] & (0x1 << uint(r&0x7))) == 0 { + env.PrevChar() + return true + } + } + return false +} + +func (env *Env) SliceDel() bool { + return env.SliceFrom("") +} + +func (env *Env) Insert(bra, ket int, s string) { + adjustment := env.ReplaceS(bra, ket, s) + if bra <= env.Bra { + env.Bra = int(int32(env.Bra) + adjustment) + } + if bra <= env.Ket { + env.Ket = int(int32(env.Ket) + adjustment) + } +} + +func (env *Env) SliceTo() string { + return env.current[env.Bra:env.Ket] +} + +func (env *Env) FindAmong(amongs []*Among, ctx interface{}) int32 { + var i int32 + j := int32(len(amongs)) + + c := env.Cursor + l := env.Limit + + var commonI, commonJ int + + firstKeyInspected := false + for { + k := i + ((j - i) >> 1) + var diff int32 + common := min(commonI, commonJ) + w := amongs[k] + for lvar := common; lvar < len(w.Str); lvar++ { + if c+common == l { + diff-- + break + } + diff = int32(env.current[c+common]) - int32(w.Str[lvar]) + if diff != 0 { + break + } + common++ + } + if diff < 0 { + j = k + commonJ = common + } else { + i = k + commonI = common + } + if j-i <= 1 { + if i > 0 { + break + } + if j == i { + break + } + if firstKeyInspected { + break + } + firstKeyInspected = true + } + } + + for { + w := amongs[i] + if commonI >= len(w.Str) { + env.Cursor = c + len(w.Str) + if w.F != nil { + res := w.F(env, ctx) + env.Cursor = c + len(w.Str) + if res { + return w.B + } + } else { + return w.B + } + } + i = w.A + if i < 0 { + return 0 + } + } +} + +func (env *Env) FindAmongB(amongs []*Among, ctx interface{}) int32 { + var i int32 + j := int32(len(amongs)) + + c := env.Cursor + lb := env.LimitBackward + + var commonI, commonJ int + + firstKeyInspected := false + + for { + k := i + ((j - i) >> 1) + diff := int32(0) + common := min(commonI, commonJ) + w := amongs[k] + for lvar := len(w.Str) - int(common) - 1; lvar >= 0; lvar-- { + if c-common == lb { + diff-- + break + } + diff = int32(env.current[c-common-1]) - int32(w.Str[lvar]) + if diff != 0 { + break + } + // Count up commons. But not one character but the byte width of that char + common++ + } + if diff < 0 { + j = k + commonJ = common + } else { + i = k + commonI = common + } + if j-i <= 1 { + if i > 0 { + break + } + if j == i { + break + } + if firstKeyInspected { + break + } + firstKeyInspected = true + } + } + for { + w := amongs[i] + if commonI >= len(w.Str) { + env.Cursor = c - len(w.Str) + if w.F != nil { + res := w.F(env, ctx) + env.Cursor = c - len(w.Str) + if res { + return w.B + } + } else { + return w.B + } + } + i = w.A + if i < 0 { + return 0 + } + } +} + +func (env *Env) Debug(count, lineNumber int) { + log.Printf("snowball debug, count: %d, line: %d", count, lineNumber) +} + +func (env *Env) Clone() *Env { + clone := *env + return &clone +} + +func (env *Env) AssignTo() string { + return env.Current() +} diff --git a/vendor/github.com/blevesearch/snowballstem/gen.go b/vendor/github.com/blevesearch/snowballstem/gen.go new file mode 100644 index 0000000..92548b0 --- /dev/null +++ b/vendor/github.com/blevesearch/snowballstem/gen.go @@ -0,0 +1,61 @@ +package snowballstem + +// to regenerate these commands, run +// go run gengen.go /path/to/snowball/algorithms/directory + +//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/arabic/stem_Unicode.sbl -go -o arabic/arabic_stemmer -gop arabic -gor github.com/blevesearch/snowballstem +//go:generate gofmt -s -w arabic/arabic_stemmer.go + +//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/danish/stem_ISO_8859_1.sbl -go -o danish/danish_stemmer -gop danish -gor github.com/blevesearch/snowballstem +//go:generate gofmt -s -w danish/danish_stemmer.go + +//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/dutch/stem_ISO_8859_1.sbl -go -o dutch/dutch_stemmer -gop dutch -gor github.com/blevesearch/snowballstem +//go:generate gofmt -s -w dutch/dutch_stemmer.go + +//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/english/stem_ISO_8859_1.sbl -go -o english/english_stemmer -gop english -gor github.com/blevesearch/snowballstem +//go:generate gofmt -s -w english/english_stemmer.go + +//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/finnish/stem_ISO_8859_1.sbl -go -o finnish/finnish_stemmer -gop finnish -gor github.com/blevesearch/snowballstem +//go:generate gofmt -s -w finnish/finnish_stemmer.go + +//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/french/stem_ISO_8859_1.sbl -go -o french/french_stemmer -gop french -gor github.com/blevesearch/snowballstem +//go:generate gofmt -s -w french/french_stemmer.go + +//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/german/stem_ISO_8859_1.sbl -go -o german/german_stemmer -gop german -gor github.com/blevesearch/snowballstem +//go:generate gofmt -s -w german/german_stemmer.go + +//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/hungarian/stem_Unicode.sbl -go -o hungarian/hungarian_stemmer -gop hungarian -gor github.com/blevesearch/snowballstem +//go:generate gofmt -s -w hungarian/hungarian_stemmer.go + +//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/irish/stem_ISO_8859_1.sbl -go -o irish/irish_stemmer -gop irish -gor github.com/blevesearch/snowballstem +//go:generate gofmt -s -w irish/irish_stemmer.go + +//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/italian/stem_ISO_8859_1.sbl -go -o italian/italian_stemmer -gop italian -gor github.com/blevesearch/snowballstem +//go:generate gofmt -s -w italian/italian_stemmer.go + +//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/norwegian/stem_ISO_8859_1.sbl -go -o norwegian/norwegian_stemmer -gop norwegian -gor github.com/blevesearch/snowballstem +//go:generate gofmt -s -w norwegian/norwegian_stemmer.go + +//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/porter/stem_ISO_8859_1.sbl -go -o porter/porter_stemmer -gop porter -gor github.com/blevesearch/snowballstem +//go:generate gofmt -s -w porter/porter_stemmer.go + +//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/portuguese/stem_ISO_8859_1.sbl -go -o portuguese/portuguese_stemmer -gop portuguese -gor github.com/blevesearch/snowballstem +//go:generate gofmt -s -w portuguese/portuguese_stemmer.go + +//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/romanian/stem_Unicode.sbl -go -o romanian/romanian_stemmer -gop romanian -gor github.com/blevesearch/snowballstem +//go:generate gofmt -s -w romanian/romanian_stemmer.go + +//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/russian/stem_Unicode.sbl -go -o russian/russian_stemmer -gop russian -gor github.com/blevesearch/snowballstem +//go:generate gofmt -s -w russian/russian_stemmer.go + +//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/spanish/stem_ISO_8859_1.sbl -go -o spanish/spanish_stemmer -gop spanish -gor github.com/blevesearch/snowballstem +//go:generate gofmt -s -w spanish/spanish_stemmer.go + +//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/swedish/stem_ISO_8859_1.sbl -go -o swedish/swedish_stemmer -gop swedish -gor github.com/blevesearch/snowballstem +//go:generate gofmt -s -w swedish/swedish_stemmer.go + +//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/tamil/stem_Unicode.sbl -go -o tamil/tamil_stemmer -gop tamil -gor github.com/blevesearch/snowballstem +//go:generate gofmt -s -w tamil/tamil_stemmer.go + +//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/turkish/stem_Unicode.sbl -go -o turkish/turkish_stemmer -gop turkish -gor github.com/blevesearch/snowballstem +//go:generate gofmt -s -w turkish/turkish_stemmer.go diff --git a/vendor/github.com/blevesearch/snowballstem/util.go b/vendor/github.com/blevesearch/snowballstem/util.go new file mode 100644 index 0000000..7c68f6e --- /dev/null +++ b/vendor/github.com/blevesearch/snowballstem/util.go @@ -0,0 +1,34 @@ +package snowballstem + +import ( + "math" + "unicode/utf8" +) + +const MaxInt = math.MaxInt32 +const MinInt = math.MinInt32 + +func splitAt(str string, mid int) (string, string) { + return str[:mid], str[mid:] +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +func onCharBoundary(s string, pos int) bool { + if pos <= 0 || pos >= len(s) { + return true + } + return utf8.RuneStart(s[pos]) +} + +// RuneCountInString is a wrapper around utf8.RuneCountInString +// this allows us to not have to conditionally include +// the utf8 package into some stemmers and not others +func RuneCountInString(str string) int { + return utf8.RuneCountInString(str) +} diff --git a/vendor/github.com/blevesearch/zap/v11/.gitignore b/vendor/github.com/blevesearch/zap/v11/.gitignore new file mode 100644 index 0000000..46d1cfa --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v11/.gitignore @@ -0,0 +1,12 @@ +#* +*.sublime-* +*~ +.#* +.project +.settings +**/.idea/ +**/*.iml +.DS_Store +/cmd/zap/zap +*.test +tags diff --git a/vendor/github.com/blevesearch/zap/v11/LICENSE b/vendor/github.com/blevesearch/zap/v11/LICENSE new file mode 100644 index 0000000..7a4a3ea --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v11/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/vendor/github.com/blevesearch/zap/v11/README.md b/vendor/github.com/blevesearch/zap/v11/README.md new file mode 100644 index 0000000..0facb66 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v11/README.md @@ -0,0 +1,158 @@ +# zap file format + +Advanced ZAP File Format Documentation is [here](zap.md). + +The file is written in the reverse order that we typically access data. This helps us write in one pass since later sections of the file require file offsets of things we've already written. + +Current usage: + +- mmap the entire file +- crc-32 bytes and version are in fixed position at end of the file +- reading remainder of footer could be version specific +- remainder of footer gives us: + - 3 important offsets (docValue , fields index and stored data index) + - 2 important values (number of docs and chunk factor) +- field data is processed once and memoized onto the heap so that we never have to go back to disk for it +- access to stored data by doc number means first navigating to the stored data index, then accessing a fixed position offset into that slice, which gives us the actual address of the data. the first bytes of that section tell us the size of data so that we know where it ends. +- access to all other indexed data follows the following pattern: + - first know the field name -> convert to id + - next navigate to term dictionary for that field + - some operations stop here and do dictionary ops + - next use dictionary to navigate to posting list for a specific term + - walk posting list + - if necessary, walk posting details as we go + - if location info is desired, consult location bitmap to see if it is there + +## stored fields section + +- for each document + - preparation phase: + - produce a slice of metadata bytes and data bytes + - produce these slices in field id order + - field value is appended to the data slice + - metadata slice is varint encoded with the following values for each field value + - field id (uint16) + - field type (byte) + - field value start offset in uncompressed data slice (uint64) + - field value length (uint64) + - field number of array positions (uint64) + - one additional value for each array position (uint64) + - compress the data slice using snappy + - file writing phase: + - remember the start offset for this document + - write out meta data length (varint uint64) + - write out compressed data length (varint uint64) + - write out the metadata bytes + - write out the compressed data bytes + +## stored fields idx + +- for each document + - write start offset (remembered from previous section) of stored data (big endian uint64) + +With this index and a known document number, we have direct access to all the stored field data. + +## posting details (freq/norm) section + +- for each posting list + - produce a slice containing multiple consecutive chunks (each chunk is varint stream) + - produce a slice remembering offsets of where each chunk starts + - preparation phase: + - for each hit in the posting list + - if this hit is in next chunk close out encoding of last chunk and record offset start of next + - encode term frequency (uint64) + - encode norm factor (float32) + - file writing phase: + - remember start position for this posting list details + - write out number of chunks that follow (varint uint64) + - write out length of each chunk (each a varint uint64) + - write out the byte slice containing all the chunk data + +If you know the doc number you're interested in, this format lets you jump to the correct chunk (docNum/chunkFactor) directly and then seek within that chunk until you find it. + +## posting details (location) section + +- for each posting list + - produce a slice containing multiple consecutive chunks (each chunk is varint stream) + - produce a slice remembering offsets of where each chunk starts + - preparation phase: + - for each hit in the posting list + - if this hit is in next chunk close out encoding of last chunk and record offset start of next + - encode field (uint16) + - encode field pos (uint64) + - encode field start (uint64) + - encode field end (uint64) + - encode number of array positions to follow (uint64) + - encode each array position (each uint64) + - file writing phase: + - remember start position for this posting list details + - write out number of chunks that follow (varint uint64) + - write out length of each chunk (each a varint uint64) + - write out the byte slice containing all the chunk data + +If you know the doc number you're interested in, this format lets you jump to the correct chunk (docNum/chunkFactor) directly and then seek within that chunk until you find it. + +## postings list section + +- for each posting list + - preparation phase: + - encode roaring bitmap posting list to bytes (so we know the length) + - file writing phase: + - remember the start position for this posting list + - write freq/norm details offset (remembered from previous, as varint uint64) + - write location details offset (remembered from previous, as varint uint64) + - write length of encoded roaring bitmap + - write the serialized roaring bitmap data + +## dictionary + +- for each field + - preparation phase: + - encode vellum FST with dictionary data pointing to file offset of posting list (remembered from previous) + - file writing phase: + - remember the start position of this persistDictionary + - write length of vellum data (varint uint64) + - write out vellum data + +## fields section + +- for each field + - file writing phase: + - remember start offset for each field + - write dictionary address (remembered from previous) (varint uint64) + - write length of field name (varint uint64) + - write field name bytes + +## fields idx + +- for each field + - file writing phase: + - write big endian uint64 of start offset for each field + +NOTE: currently we don't know or record the length of this fields index. Instead we rely on the fact that we know it immediately precedes a footer of known size. + +## fields DocValue + +- for each field + - preparation phase: + - produce a slice containing multiple consecutive chunks, where each chunk is composed of a meta section followed by compressed columnar field data + - produce a slice remembering the length of each chunk + - file writing phase: + - remember the start position of this first field DocValue offset in the footer + - write out number of chunks that follow (varint uint64) + - write out length of each chunk (each a varint uint64) + - write out the byte slice containing all the chunk data + +NOTE: currently the meta header inside each chunk gives clue to the location offsets and size of the data pertaining to a given docID and any +read operation leverage that meta information to extract the document specific data from the file. + +## footer + +- file writing phase + - write number of docs (big endian uint64) + - write stored field index location (big endian uint64) + - write field index location (big endian uint64) + - write field docValue location (big endian uint64) + - write out chunk factor (big endian uint32) + - write out version (big endian uint32) + - write out file CRC of everything preceding this (big endian uint32) diff --git a/vendor/github.com/blevesearch/zap/v11/build.go b/vendor/github.com/blevesearch/zap/v11/build.go new file mode 100644 index 0000000..bac1edb --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v11/build.go @@ -0,0 +1,156 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bufio" + "math" + "os" + + "github.com/couchbase/vellum" +) + +const Version uint32 = 11 + +const Type string = "zap" + +const fieldNotUninverted = math.MaxUint64 + +func (sb *SegmentBase) Persist(path string) error { + return PersistSegmentBase(sb, path) +} + +// PersistSegmentBase persists SegmentBase in the zap file format. +func PersistSegmentBase(sb *SegmentBase, path string) error { + flag := os.O_RDWR | os.O_CREATE + + f, err := os.OpenFile(path, flag, 0600) + if err != nil { + return err + } + + cleanup := func() { + _ = f.Close() + _ = os.Remove(path) + } + + br := bufio.NewWriter(f) + + _, err = br.Write(sb.mem) + if err != nil { + cleanup() + return err + } + + err = persistFooter(sb.numDocs, sb.storedIndexOffset, sb.fieldsIndexOffset, sb.docValueOffset, + sb.chunkFactor, sb.memCRC, br) + if err != nil { + cleanup() + return err + } + + err = br.Flush() + if err != nil { + cleanup() + return err + } + + err = f.Sync() + if err != nil { + cleanup() + return err + } + + err = f.Close() + if err != nil { + cleanup() + return err + } + + return nil +} + +func persistStoredFieldValues(fieldID int, + storedFieldValues [][]byte, stf []byte, spf [][]uint64, + curr int, metaEncode varintEncoder, data []byte) ( + int, []byte, error) { + for i := 0; i < len(storedFieldValues); i++ { + // encode field + _, err := metaEncode(uint64(fieldID)) + if err != nil { + return 0, nil, err + } + // encode type + _, err = metaEncode(uint64(stf[i])) + if err != nil { + return 0, nil, err + } + // encode start offset + _, err = metaEncode(uint64(curr)) + if err != nil { + return 0, nil, err + } + // end len + _, err = metaEncode(uint64(len(storedFieldValues[i]))) + if err != nil { + return 0, nil, err + } + // encode number of array pos + _, err = metaEncode(uint64(len(spf[i]))) + if err != nil { + return 0, nil, err + } + // encode all array positions + for _, pos := range spf[i] { + _, err = metaEncode(pos) + if err != nil { + return 0, nil, err + } + } + + data = append(data, storedFieldValues[i]...) + curr += len(storedFieldValues[i]) + } + + return curr, data, nil +} + +func InitSegmentBase(mem []byte, memCRC uint32, chunkFactor uint32, + fieldsMap map[string]uint16, fieldsInv []string, numDocs uint64, + storedIndexOffset uint64, fieldsIndexOffset uint64, docValueOffset uint64, + dictLocs []uint64) (*SegmentBase, error) { + sb := &SegmentBase{ + mem: mem, + memCRC: memCRC, + chunkFactor: chunkFactor, + fieldsMap: fieldsMap, + fieldsInv: fieldsInv, + numDocs: numDocs, + storedIndexOffset: storedIndexOffset, + fieldsIndexOffset: fieldsIndexOffset, + docValueOffset: docValueOffset, + dictLocs: dictLocs, + fieldDvReaders: make(map[uint16]*docValueReader), + fieldFSTs: make(map[uint16]*vellum.FST), + } + sb.updateSize() + + err := sb.loadDvReaders() + if err != nil { + return nil, err + } + + return sb, nil +} diff --git a/vendor/github.com/blevesearch/zap/v11/contentcoder.go b/vendor/github.com/blevesearch/zap/v11/contentcoder.go new file mode 100644 index 0000000..b9ff817 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v11/contentcoder.go @@ -0,0 +1,230 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "io" + "reflect" + + "github.com/golang/snappy" +) + +var reflectStaticSizeMetaData int + +func init() { + var md MetaData + reflectStaticSizeMetaData = int(reflect.TypeOf(md).Size()) +} + +var termSeparator byte = 0xff +var termSeparatorSplitSlice = []byte{termSeparator} + +type chunkedContentCoder struct { + final []byte + chunkSize uint64 + currChunk uint64 + chunkLens []uint64 + + w io.Writer + progressiveWrite bool + + chunkMetaBuf bytes.Buffer + chunkBuf bytes.Buffer + + chunkMeta []MetaData + + compressed []byte // temp buf for snappy compression +} + +// MetaData represents the data information inside a +// chunk. +type MetaData struct { + DocNum uint64 // docNum of the data inside the chunk + DocDvOffset uint64 // offset of data inside the chunk for the given docid +} + +// newChunkedContentCoder returns a new chunk content coder which +// packs data into chunks based on the provided chunkSize +func newChunkedContentCoder(chunkSize uint64, maxDocNum uint64, + w io.Writer, progressiveWrite bool) *chunkedContentCoder { + total := maxDocNum/chunkSize + 1 + rv := &chunkedContentCoder{ + chunkSize: chunkSize, + chunkLens: make([]uint64, total), + chunkMeta: make([]MetaData, 0, total), + w: w, + progressiveWrite: progressiveWrite, + } + + return rv +} + +// Reset lets you reuse this chunked content coder. Buffers are reset +// and re used. You cannot change the chunk size. +func (c *chunkedContentCoder) Reset() { + c.currChunk = 0 + c.final = c.final[:0] + c.chunkBuf.Reset() + c.chunkMetaBuf.Reset() + for i := range c.chunkLens { + c.chunkLens[i] = 0 + } + c.chunkMeta = c.chunkMeta[:0] +} + +// Close indicates you are done calling Add() this allows +// the final chunk to be encoded. +func (c *chunkedContentCoder) Close() error { + return c.flushContents() +} + +func (c *chunkedContentCoder) flushContents() error { + // flush the contents, with meta information at first + buf := make([]byte, binary.MaxVarintLen64) + n := binary.PutUvarint(buf, uint64(len(c.chunkMeta))) + _, err := c.chunkMetaBuf.Write(buf[:n]) + if err != nil { + return err + } + + // write out the metaData slice + for _, meta := range c.chunkMeta { + _, err := writeUvarints(&c.chunkMetaBuf, meta.DocNum, meta.DocDvOffset) + if err != nil { + return err + } + } + + // write the metadata to final data + metaData := c.chunkMetaBuf.Bytes() + c.final = append(c.final, c.chunkMetaBuf.Bytes()...) + // write the compressed data to the final data + c.compressed = snappy.Encode(c.compressed[:cap(c.compressed)], c.chunkBuf.Bytes()) + c.final = append(c.final, c.compressed...) + + c.chunkLens[c.currChunk] = uint64(len(c.compressed) + len(metaData)) + + if c.progressiveWrite { + _, err := c.w.Write(c.final) + if err != nil { + return err + } + c.final = c.final[:0] + } + + return nil +} + +// Add encodes the provided byte slice into the correct chunk for the provided +// doc num. You MUST call Add() with increasing docNums. +func (c *chunkedContentCoder) Add(docNum uint64, vals []byte) error { + chunk := docNum / c.chunkSize + if chunk != c.currChunk { + // flush out the previous chunk details + err := c.flushContents() + if err != nil { + return err + } + // clearing the chunk specific meta for next chunk + c.chunkBuf.Reset() + c.chunkMetaBuf.Reset() + c.chunkMeta = c.chunkMeta[:0] + c.currChunk = chunk + } + + // get the starting offset for this doc + dvOffset := c.chunkBuf.Len() + dvSize, err := c.chunkBuf.Write(vals) + if err != nil { + return err + } + + c.chunkMeta = append(c.chunkMeta, MetaData{ + DocNum: docNum, + DocDvOffset: uint64(dvOffset + dvSize), + }) + return nil +} + +// Write commits all the encoded chunked contents to the provided writer. +// +// | ..... data ..... | chunk offsets (varints) +// | position of chunk offsets (uint64) | number of offsets (uint64) | +// +func (c *chunkedContentCoder) Write() (int, error) { + var tw int + + if c.final != nil { + // write out the data section first + nw, err := c.w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + } + + chunkOffsetsStart := uint64(tw) + + if cap(c.final) < binary.MaxVarintLen64 { + c.final = make([]byte, binary.MaxVarintLen64) + } else { + c.final = c.final[0:binary.MaxVarintLen64] + } + chunkOffsets := modifyLengthsToEndOffsets(c.chunkLens) + // write out the chunk offsets + for _, chunkOffset := range chunkOffsets { + n := binary.PutUvarint(c.final, chunkOffset) + nw, err := c.w.Write(c.final[:n]) + tw += nw + if err != nil { + return tw, err + } + } + + chunkOffsetsLen := uint64(tw) - chunkOffsetsStart + + c.final = c.final[0:8] + // write out the length of chunk offsets + binary.BigEndian.PutUint64(c.final, chunkOffsetsLen) + nw, err := c.w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + + // write out the number of chunks + binary.BigEndian.PutUint64(c.final, uint64(len(c.chunkLens))) + nw, err = c.w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + + c.final = c.final[:0] + + return tw, nil +} + +// ReadDocValueBoundary elicits the start, end offsets from a +// metaData header slice +func ReadDocValueBoundary(chunk int, metaHeaders []MetaData) (uint64, uint64) { + var start uint64 + if chunk > 0 { + start = metaHeaders[chunk-1].DocDvOffset + } + return start, metaHeaders[chunk].DocDvOffset +} diff --git a/vendor/github.com/blevesearch/zap/v11/count.go b/vendor/github.com/blevesearch/zap/v11/count.go new file mode 100644 index 0000000..50290f8 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v11/count.go @@ -0,0 +1,61 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "hash/crc32" + "io" + + "github.com/blevesearch/bleve/index/scorch/segment" +) + +// CountHashWriter is a wrapper around a Writer which counts the number of +// bytes which have been written and computes a crc32 hash +type CountHashWriter struct { + w io.Writer + crc uint32 + n int + s segment.StatsReporter +} + +// NewCountHashWriter returns a CountHashWriter which wraps the provided Writer +func NewCountHashWriter(w io.Writer) *CountHashWriter { + return &CountHashWriter{w: w} +} + +func NewCountHashWriterWithStatsReporter(w io.Writer, s segment.StatsReporter) *CountHashWriter { + return &CountHashWriter{w: w, s: s} +} + +// Write writes the provided bytes to the wrapped writer and counts the bytes +func (c *CountHashWriter) Write(b []byte) (int, error) { + n, err := c.w.Write(b) + c.crc = crc32.Update(c.crc, crc32.IEEETable, b[:n]) + c.n += n + if c.s != nil { + c.s.ReportBytesWritten(uint64(n)) + } + return n, err +} + +// Count returns the number of bytes written +func (c *CountHashWriter) Count() int { + return c.n +} + +// Sum32 returns the CRC-32 hash of the content written to this writer +func (c *CountHashWriter) Sum32() uint32 { + return c.crc +} diff --git a/vendor/github.com/blevesearch/zap/v11/dict.go b/vendor/github.com/blevesearch/zap/v11/dict.go new file mode 100644 index 0000000..ad4a8f8 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v11/dict.go @@ -0,0 +1,263 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "fmt" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/couchbase/vellum" +) + +// Dictionary is the zap representation of the term dictionary +type Dictionary struct { + sb *SegmentBase + field string + fieldID uint16 + fst *vellum.FST + fstReader *vellum.Reader +} + +// PostingsList returns the postings list for the specified term +func (d *Dictionary) PostingsList(term []byte, except *roaring.Bitmap, + prealloc segment.PostingsList) (segment.PostingsList, error) { + var preallocPL *PostingsList + pl, ok := prealloc.(*PostingsList) + if ok && pl != nil { + preallocPL = pl + } + return d.postingsList(term, except, preallocPL) +} + +func (d *Dictionary) postingsList(term []byte, except *roaring.Bitmap, rv *PostingsList) (*PostingsList, error) { + if d.fstReader == nil { + if rv == nil || rv == emptyPostingsList { + return emptyPostingsList, nil + } + return d.postingsListInit(rv, except), nil + } + + postingsOffset, exists, err := d.fstReader.Get(term) + if err != nil { + return nil, fmt.Errorf("vellum err: %v", err) + } + if !exists { + if rv == nil || rv == emptyPostingsList { + return emptyPostingsList, nil + } + return d.postingsListInit(rv, except), nil + } + + return d.postingsListFromOffset(postingsOffset, except, rv) +} + +func (d *Dictionary) postingsListFromOffset(postingsOffset uint64, except *roaring.Bitmap, rv *PostingsList) (*PostingsList, error) { + rv = d.postingsListInit(rv, except) + + err := rv.read(postingsOffset, d) + if err != nil { + return nil, err + } + + return rv, nil +} + +func (d *Dictionary) postingsListInit(rv *PostingsList, except *roaring.Bitmap) *PostingsList { + if rv == nil || rv == emptyPostingsList { + rv = &PostingsList{} + } else { + postings := rv.postings + if postings != nil { + postings.Clear() + } + + *rv = PostingsList{} // clear the struct + + rv.postings = postings + } + rv.sb = d.sb + rv.except = except + return rv +} + +func (d *Dictionary) Contains(key []byte) (bool, error) { + return d.fst.Contains(key) +} + +// Iterator returns an iterator for this dictionary +func (d *Dictionary) Iterator() segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + if d.fst != nil { + itr, err := d.fst.Iterator(nil, nil) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +// PrefixIterator returns an iterator which only visits terms having the +// the specified prefix +func (d *Dictionary) PrefixIterator(prefix string) segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + kBeg := []byte(prefix) + kEnd := segment.IncrementBytes(kBeg) + + if d.fst != nil { + itr, err := d.fst.Iterator(kBeg, kEnd) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +// RangeIterator returns an iterator which only visits terms between the +// start and end terms. NOTE: bleve.index API specifies the end is inclusive. +func (d *Dictionary) RangeIterator(start, end string) segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + // need to increment the end position to be inclusive + var endBytes []byte + if len(end) > 0 { + endBytes = []byte(end) + if endBytes[len(endBytes)-1] < 0xff { + endBytes[len(endBytes)-1]++ + } else { + endBytes = append(endBytes, 0xff) + } + } + + if d.fst != nil { + itr, err := d.fst.Iterator([]byte(start), endBytes) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +// AutomatonIterator returns an iterator which only visits terms +// having the the vellum automaton and start/end key range +func (d *Dictionary) AutomatonIterator(a vellum.Automaton, + startKeyInclusive, endKeyExclusive []byte) segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + if d.fst != nil { + itr, err := d.fst.Search(a, startKeyInclusive, endKeyExclusive) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +func (d *Dictionary) OnlyIterator(onlyTerms [][]byte, + includeCount bool) segment.DictionaryIterator { + + rv := &DictionaryIterator{ + d: d, + omitCount: !includeCount, + } + + var buf bytes.Buffer + builder, err := vellum.New(&buf, nil) + if err != nil { + rv.err = err + return rv + } + for _, term := range onlyTerms { + err = builder.Insert(term, 0) + if err != nil { + rv.err = err + return rv + } + } + err = builder.Close() + if err != nil { + rv.err = err + return rv + } + + onlyFST, err := vellum.Load(buf.Bytes()) + if err != nil { + rv.err = err + return rv + } + + itr, err := d.fst.Search(onlyFST, nil, nil) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + + return rv +} + +// DictionaryIterator is an iterator for term dictionary +type DictionaryIterator struct { + d *Dictionary + itr vellum.Iterator + err error + tmp PostingsList + entry index.DictEntry + omitCount bool +} + +// Next returns the next entry in the dictionary +func (i *DictionaryIterator) Next() (*index.DictEntry, error) { + if i.err != nil && i.err != vellum.ErrIteratorDone { + return nil, i.err + } else if i.itr == nil || i.err == vellum.ErrIteratorDone { + return nil, nil + } + term, postingsOffset := i.itr.Current() + i.entry.Term = string(term) + if !i.omitCount { + i.err = i.tmp.read(postingsOffset, i.d) + if i.err != nil { + return nil, i.err + } + i.entry.Count = i.tmp.Count() + } + i.err = i.itr.Next() + return &i.entry, nil +} diff --git a/vendor/github.com/blevesearch/zap/v11/docvalues.go b/vendor/github.com/blevesearch/zap/v11/docvalues.go new file mode 100644 index 0000000..2566dc6 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v11/docvalues.go @@ -0,0 +1,307 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "fmt" + "math" + "reflect" + "sort" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/blevesearch/bleve/size" + "github.com/golang/snappy" +) + +var reflectStaticSizedocValueReader int + +func init() { + var dvi docValueReader + reflectStaticSizedocValueReader = int(reflect.TypeOf(dvi).Size()) +} + +type docNumTermsVisitor func(docNum uint64, terms []byte) error + +type docVisitState struct { + dvrs map[uint16]*docValueReader + segment *SegmentBase +} + +type docValueReader struct { + field string + curChunkNum uint64 + chunkOffsets []uint64 + dvDataLoc uint64 + curChunkHeader []MetaData + curChunkData []byte // compressed data cache + uncompressed []byte // temp buf for snappy decompression +} + +func (di *docValueReader) size() int { + return reflectStaticSizedocValueReader + size.SizeOfPtr + + len(di.field) + + len(di.chunkOffsets)*size.SizeOfUint64 + + len(di.curChunkHeader)*reflectStaticSizeMetaData + + len(di.curChunkData) +} + +func (di *docValueReader) cloneInto(rv *docValueReader) *docValueReader { + if rv == nil { + rv = &docValueReader{} + } + + rv.field = di.field + rv.curChunkNum = math.MaxUint64 + rv.chunkOffsets = di.chunkOffsets // immutable, so it's sharable + rv.dvDataLoc = di.dvDataLoc + rv.curChunkHeader = rv.curChunkHeader[:0] + rv.curChunkData = nil + rv.uncompressed = rv.uncompressed[:0] + + return rv +} + +func (di *docValueReader) curChunkNumber() uint64 { + return di.curChunkNum +} + +func (s *SegmentBase) loadFieldDocValueReader(field string, + fieldDvLocStart, fieldDvLocEnd uint64) (*docValueReader, error) { + // get the docValue offset for the given fields + if fieldDvLocStart == fieldNotUninverted { + // no docValues found, nothing to do + return nil, nil + } + + // read the number of chunks, and chunk offsets position + var numChunks, chunkOffsetsPosition uint64 + + if fieldDvLocEnd-fieldDvLocStart > 16 { + numChunks = binary.BigEndian.Uint64(s.mem[fieldDvLocEnd-8 : fieldDvLocEnd]) + // read the length of chunk offsets + chunkOffsetsLen := binary.BigEndian.Uint64(s.mem[fieldDvLocEnd-16 : fieldDvLocEnd-8]) + // acquire position of chunk offsets + chunkOffsetsPosition = (fieldDvLocEnd - 16) - chunkOffsetsLen + } else { + return nil, fmt.Errorf("loadFieldDocValueReader: fieldDvLoc too small: %d-%d", fieldDvLocEnd, fieldDvLocStart) + } + + fdvIter := &docValueReader{ + curChunkNum: math.MaxUint64, + field: field, + chunkOffsets: make([]uint64, int(numChunks)), + } + + // read the chunk offsets + var offset uint64 + for i := 0; i < int(numChunks); i++ { + loc, read := binary.Uvarint(s.mem[chunkOffsetsPosition+offset : chunkOffsetsPosition+offset+binary.MaxVarintLen64]) + if read <= 0 { + return nil, fmt.Errorf("corrupted chunk offset during segment load") + } + fdvIter.chunkOffsets[i] = loc + offset += uint64(read) + } + + // set the data offset + fdvIter.dvDataLoc = fieldDvLocStart + + return fdvIter, nil +} + +func (di *docValueReader) loadDvChunk(chunkNumber uint64, s *SegmentBase) error { + // advance to the chunk where the docValues + // reside for the given docNum + destChunkDataLoc, curChunkEnd := di.dvDataLoc, di.dvDataLoc + start, end := readChunkBoundary(int(chunkNumber), di.chunkOffsets) + if start >= end { + di.curChunkHeader = di.curChunkHeader[:0] + di.curChunkData = nil + di.curChunkNum = chunkNumber + di.uncompressed = di.uncompressed[:0] + return nil + } + + destChunkDataLoc += start + curChunkEnd += end + + // read the number of docs reside in the chunk + numDocs, read := binary.Uvarint(s.mem[destChunkDataLoc : destChunkDataLoc+binary.MaxVarintLen64]) + if read <= 0 { + return fmt.Errorf("failed to read the chunk") + } + chunkMetaLoc := destChunkDataLoc + uint64(read) + + offset := uint64(0) + if cap(di.curChunkHeader) < int(numDocs) { + di.curChunkHeader = make([]MetaData, int(numDocs)) + } else { + di.curChunkHeader = di.curChunkHeader[:int(numDocs)] + } + for i := 0; i < int(numDocs); i++ { + di.curChunkHeader[i].DocNum, read = binary.Uvarint(s.mem[chunkMetaLoc+offset : chunkMetaLoc+offset+binary.MaxVarintLen64]) + offset += uint64(read) + di.curChunkHeader[i].DocDvOffset, read = binary.Uvarint(s.mem[chunkMetaLoc+offset : chunkMetaLoc+offset+binary.MaxVarintLen64]) + offset += uint64(read) + } + + compressedDataLoc := chunkMetaLoc + offset + dataLength := curChunkEnd - compressedDataLoc + di.curChunkData = s.mem[compressedDataLoc : compressedDataLoc+dataLength] + di.curChunkNum = chunkNumber + di.uncompressed = di.uncompressed[:0] + return nil +} + +func (di *docValueReader) iterateAllDocValues(s *SegmentBase, visitor docNumTermsVisitor) error { + for i := 0; i < len(di.chunkOffsets); i++ { + err := di.loadDvChunk(uint64(i), s) + if err != nil { + return err + } + if di.curChunkData == nil || len(di.curChunkHeader) == 0 { + continue + } + + // uncompress the already loaded data + uncompressed, err := snappy.Decode(di.uncompressed[:cap(di.uncompressed)], di.curChunkData) + if err != nil { + return err + } + di.uncompressed = uncompressed + + start := uint64(0) + for _, entry := range di.curChunkHeader { + err = visitor(entry.DocNum, uncompressed[start:entry.DocDvOffset]) + if err != nil { + return err + } + + start = entry.DocDvOffset + } + } + + return nil +} + +func (di *docValueReader) visitDocValues(docNum uint64, + visitor index.DocumentFieldTermVisitor) error { + // binary search the term locations for the docNum + start, end := di.getDocValueLocs(docNum) + if start == math.MaxUint64 || end == math.MaxUint64 || start == end { + return nil + } + + var uncompressed []byte + var err error + // use the uncompressed copy if available + if len(di.uncompressed) > 0 { + uncompressed = di.uncompressed + } else { + // uncompress the already loaded data + uncompressed, err = snappy.Decode(di.uncompressed[:cap(di.uncompressed)], di.curChunkData) + if err != nil { + return err + } + di.uncompressed = uncompressed + } + + // pick the terms for the given docNum + uncompressed = uncompressed[start:end] + for { + i := bytes.Index(uncompressed, termSeparatorSplitSlice) + if i < 0 { + break + } + + visitor(di.field, uncompressed[0:i]) + uncompressed = uncompressed[i+1:] + } + + return nil +} + +func (di *docValueReader) getDocValueLocs(docNum uint64) (uint64, uint64) { + i := sort.Search(len(di.curChunkHeader), func(i int) bool { + return di.curChunkHeader[i].DocNum >= docNum + }) + if i < len(di.curChunkHeader) && di.curChunkHeader[i].DocNum == docNum { + return ReadDocValueBoundary(i, di.curChunkHeader) + } + return math.MaxUint64, math.MaxUint64 +} + +// VisitDocumentFieldTerms is an implementation of the +// DocumentFieldTermVisitable interface +func (s *SegmentBase) VisitDocumentFieldTerms(localDocNum uint64, fields []string, + visitor index.DocumentFieldTermVisitor, dvsIn segment.DocVisitState) ( + segment.DocVisitState, error) { + dvs, ok := dvsIn.(*docVisitState) + if !ok || dvs == nil { + dvs = &docVisitState{} + } else { + if dvs.segment != s { + dvs.segment = s + dvs.dvrs = nil + } + } + + var fieldIDPlus1 uint16 + if dvs.dvrs == nil { + dvs.dvrs = make(map[uint16]*docValueReader, len(fields)) + for _, field := range fields { + if fieldIDPlus1, ok = s.fieldsMap[field]; !ok { + continue + } + fieldID := fieldIDPlus1 - 1 + if dvIter, exists := s.fieldDvReaders[fieldID]; exists && + dvIter != nil { + dvs.dvrs[fieldID] = dvIter.cloneInto(dvs.dvrs[fieldID]) + } + } + } + + // find the chunkNumber where the docValues are stored + docInChunk := localDocNum / uint64(s.chunkFactor) + var dvr *docValueReader + for _, field := range fields { + if fieldIDPlus1, ok = s.fieldsMap[field]; !ok { + continue + } + fieldID := fieldIDPlus1 - 1 + if dvr, ok = dvs.dvrs[fieldID]; ok && dvr != nil { + // check if the chunk is already loaded + if docInChunk != dvr.curChunkNumber() { + err := dvr.loadDvChunk(docInChunk, s) + if err != nil { + return dvs, err + } + } + + _ = dvr.visitDocValues(localDocNum, visitor) + } + } + return dvs, nil +} + +// VisitableDocValueFields returns the list of fields with +// persisted doc value terms ready to be visitable using the +// VisitDocumentFieldTerms method. +func (s *SegmentBase) VisitableDocValueFields() ([]string, error) { + return s.fieldDvNames, nil +} diff --git a/vendor/github.com/blevesearch/zap/v11/enumerator.go b/vendor/github.com/blevesearch/zap/v11/enumerator.go new file mode 100644 index 0000000..cd6ff73 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v11/enumerator.go @@ -0,0 +1,126 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + + "github.com/couchbase/vellum" +) + +// enumerator provides an ordered traversal of multiple vellum +// iterators. Like JOIN of iterators, the enumerator produces a +// sequence of (key, iteratorIndex, value) tuples, sorted by key ASC, +// then iteratorIndex ASC, where the same key might be seen or +// repeated across multiple child iterators. +type enumerator struct { + itrs []vellum.Iterator + currKs [][]byte + currVs []uint64 + + lowK []byte + lowIdxs []int + lowCurr int +} + +// newEnumerator returns a new enumerator over the vellum Iterators +func newEnumerator(itrs []vellum.Iterator) (*enumerator, error) { + rv := &enumerator{ + itrs: itrs, + currKs: make([][]byte, len(itrs)), + currVs: make([]uint64, len(itrs)), + lowIdxs: make([]int, 0, len(itrs)), + } + for i, itr := range rv.itrs { + rv.currKs[i], rv.currVs[i] = itr.Current() + } + rv.updateMatches(false) + if rv.lowK == nil && len(rv.lowIdxs) == 0 { + return rv, vellum.ErrIteratorDone + } + return rv, nil +} + +// updateMatches maintains the low key matches based on the currKs +func (m *enumerator) updateMatches(skipEmptyKey bool) { + m.lowK = nil + m.lowIdxs = m.lowIdxs[:0] + m.lowCurr = 0 + + for i, key := range m.currKs { + if (key == nil && m.currVs[i] == 0) || // in case of empty iterator + (len(key) == 0 && skipEmptyKey) { // skip empty keys + continue + } + + cmp := bytes.Compare(key, m.lowK) + if cmp < 0 || len(m.lowIdxs) == 0 { + // reached a new low + m.lowK = key + m.lowIdxs = m.lowIdxs[:0] + m.lowIdxs = append(m.lowIdxs, i) + } else if cmp == 0 { + m.lowIdxs = append(m.lowIdxs, i) + } + } +} + +// Current returns the enumerator's current key, iterator-index, and +// value. If the enumerator is not pointing at a valid value (because +// Next returned an error previously), Current will return nil,0,0. +func (m *enumerator) Current() ([]byte, int, uint64) { + var i int + var v uint64 + if m.lowCurr < len(m.lowIdxs) { + i = m.lowIdxs[m.lowCurr] + v = m.currVs[i] + } + return m.lowK, i, v +} + +// Next advances the enumerator to the next key/iterator/value result, +// else vellum.ErrIteratorDone is returned. +func (m *enumerator) Next() error { + m.lowCurr += 1 + if m.lowCurr >= len(m.lowIdxs) { + // move all the current low iterators forwards + for _, vi := range m.lowIdxs { + err := m.itrs[vi].Next() + if err != nil && err != vellum.ErrIteratorDone { + return err + } + m.currKs[vi], m.currVs[vi] = m.itrs[vi].Current() + } + // can skip any empty keys encountered at this point + m.updateMatches(true) + } + if m.lowK == nil && len(m.lowIdxs) == 0 { + return vellum.ErrIteratorDone + } + return nil +} + +// Close all the underlying Iterators. The first error, if any, will +// be returned. +func (m *enumerator) Close() error { + var rv error + for _, itr := range m.itrs { + err := itr.Close() + if rv == nil { + rv = err + } + } + return rv +} diff --git a/vendor/github.com/blevesearch/zap/v11/intcoder.go b/vendor/github.com/blevesearch/zap/v11/intcoder.go new file mode 100644 index 0000000..571d06e --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v11/intcoder.go @@ -0,0 +1,172 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "io" +) + +type chunkedIntCoder struct { + final []byte + chunkSize uint64 + chunkBuf bytes.Buffer + chunkLens []uint64 + currChunk uint64 + + buf []byte +} + +// newChunkedIntCoder returns a new chunk int coder which packs data into +// chunks based on the provided chunkSize and supports up to the specified +// maxDocNum +func newChunkedIntCoder(chunkSize uint64, maxDocNum uint64) *chunkedIntCoder { + total := maxDocNum/chunkSize + 1 + rv := &chunkedIntCoder{ + chunkSize: chunkSize, + chunkLens: make([]uint64, total), + final: make([]byte, 0, 64), + } + + return rv +} + +// Reset lets you reuse this chunked int coder. buffers are reset and reused +// from previous use. you cannot change the chunk size or max doc num. +func (c *chunkedIntCoder) Reset() { + c.final = c.final[:0] + c.chunkBuf.Reset() + c.currChunk = 0 + for i := range c.chunkLens { + c.chunkLens[i] = 0 + } +} + +// Add encodes the provided integers into the correct chunk for the provided +// doc num. You MUST call Add() with increasing docNums. +func (c *chunkedIntCoder) Add(docNum uint64, vals ...uint64) error { + chunk := docNum / c.chunkSize + if chunk != c.currChunk { + // starting a new chunk + c.Close() + c.chunkBuf.Reset() + c.currChunk = chunk + } + + if len(c.buf) < binary.MaxVarintLen64 { + c.buf = make([]byte, binary.MaxVarintLen64) + } + + for _, val := range vals { + wb := binary.PutUvarint(c.buf, val) + _, err := c.chunkBuf.Write(c.buf[:wb]) + if err != nil { + return err + } + } + + return nil +} + +func (c *chunkedIntCoder) AddBytes(docNum uint64, buf []byte) error { + chunk := docNum / c.chunkSize + if chunk != c.currChunk { + // starting a new chunk + c.Close() + c.chunkBuf.Reset() + c.currChunk = chunk + } + + _, err := c.chunkBuf.Write(buf) + return err +} + +// Close indicates you are done calling Add() this allows the final chunk +// to be encoded. +func (c *chunkedIntCoder) Close() { + encodingBytes := c.chunkBuf.Bytes() + c.chunkLens[c.currChunk] = uint64(len(encodingBytes)) + c.final = append(c.final, encodingBytes...) + c.currChunk = uint64(cap(c.chunkLens)) // sentinel to detect double close +} + +// Write commits all the encoded chunked integers to the provided writer. +func (c *chunkedIntCoder) Write(w io.Writer) (int, error) { + bufNeeded := binary.MaxVarintLen64 * (1 + len(c.chunkLens)) + if len(c.buf) < bufNeeded { + c.buf = make([]byte, bufNeeded) + } + buf := c.buf + + // convert the chunk lengths into chunk offsets + chunkOffsets := modifyLengthsToEndOffsets(c.chunkLens) + + // write out the number of chunks & each chunk offsets + n := binary.PutUvarint(buf, uint64(len(chunkOffsets))) + for _, chunkOffset := range chunkOffsets { + n += binary.PutUvarint(buf[n:], chunkOffset) + } + + tw, err := w.Write(buf[:n]) + if err != nil { + return tw, err + } + + // write out the data + nw, err := w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + return tw, nil +} + +func (c *chunkedIntCoder) FinalSize() int { + return len(c.final) +} + +// modifyLengthsToEndOffsets converts the chunk length array +// to a chunk offset array. The readChunkBoundary +// will figure out the start and end of every chunk from +// these offsets. Starting offset of i'th index is stored +// in i-1'th position except for 0'th index and ending offset +// is stored at i'th index position. +// For 0'th element, starting position is always zero. +// eg: +// Lens -> 5 5 5 5 => 5 10 15 20 +// Lens -> 0 5 0 5 => 0 5 5 10 +// Lens -> 0 0 0 5 => 0 0 0 5 +// Lens -> 5 0 0 0 => 5 5 5 5 +// Lens -> 0 5 0 0 => 0 5 5 5 +// Lens -> 0 0 5 0 => 0 0 5 5 +func modifyLengthsToEndOffsets(lengths []uint64) []uint64 { + var runningOffset uint64 + var index, i int + for i = 1; i <= len(lengths); i++ { + runningOffset += lengths[i-1] + lengths[index] = runningOffset + index++ + } + return lengths +} + +func readChunkBoundary(chunk int, offsets []uint64) (uint64, uint64) { + var start uint64 + if chunk > 0 { + start = offsets[chunk-1] + } + return start, offsets[chunk] +} diff --git a/vendor/github.com/blevesearch/zap/v11/merge.go b/vendor/github.com/blevesearch/zap/v11/merge.go new file mode 100644 index 0000000..0d3f545 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v11/merge.go @@ -0,0 +1,860 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bufio" + "bytes" + "encoding/binary" + "fmt" + "math" + "os" + "sort" + + "github.com/RoaringBitmap/roaring" + seg "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/couchbase/vellum" + "github.com/golang/snappy" +) + +var DefaultFileMergerBufferSize = 1024 * 1024 + +const docDropped = math.MaxUint64 // sentinel docNum to represent a deleted doc + +// Merge takes a slice of segments and bit masks describing which +// documents may be dropped, and creates a new segment containing the +// remaining data. This new segment is built at the specified path. +func (*ZapPlugin) Merge(segments []seg.Segment, drops []*roaring.Bitmap, path string, + closeCh chan struct{}, s seg.StatsReporter) ( + [][]uint64, uint64, error) { + + segmentBases := make([]*SegmentBase, len(segments)) + for segmenti, segment := range segments { + switch segmentx := segment.(type) { + case *Segment: + segmentBases[segmenti] = &segmentx.SegmentBase + case *SegmentBase: + segmentBases[segmenti] = segmentx + default: + panic(fmt.Sprintf("oops, unexpected segment type: %T", segment)) + } + } + return mergeSegmentBases(segmentBases, drops, path, defaultChunkFactor, closeCh, s) +} + +func mergeSegmentBases(segmentBases []*SegmentBase, drops []*roaring.Bitmap, path string, + chunkFactor uint32, closeCh chan struct{}, s seg.StatsReporter) ( + [][]uint64, uint64, error) { + flag := os.O_RDWR | os.O_CREATE + + f, err := os.OpenFile(path, flag, 0600) + if err != nil { + return nil, 0, err + } + + cleanup := func() { + _ = f.Close() + _ = os.Remove(path) + } + + // buffer the output + br := bufio.NewWriterSize(f, DefaultFileMergerBufferSize) + + // wrap it for counting (tracking offsets) + cr := NewCountHashWriterWithStatsReporter(br, s) + + newDocNums, numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset, _, _, _, err := + MergeToWriter(segmentBases, drops, chunkFactor, cr, closeCh) + if err != nil { + cleanup() + return nil, 0, err + } + + err = persistFooter(numDocs, storedIndexOffset, fieldsIndexOffset, + docValueOffset, chunkFactor, cr.Sum32(), cr) + if err != nil { + cleanup() + return nil, 0, err + } + + err = br.Flush() + if err != nil { + cleanup() + return nil, 0, err + } + + err = f.Sync() + if err != nil { + cleanup() + return nil, 0, err + } + + err = f.Close() + if err != nil { + cleanup() + return nil, 0, err + } + + return newDocNums, uint64(cr.Count()), nil +} + +func MergeToWriter(segments []*SegmentBase, drops []*roaring.Bitmap, + chunkFactor uint32, cr *CountHashWriter, closeCh chan struct{}) ( + newDocNums [][]uint64, + numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset uint64, + dictLocs []uint64, fieldsInv []string, fieldsMap map[string]uint16, + err error) { + docValueOffset = uint64(fieldNotUninverted) + + var fieldsSame bool + fieldsSame, fieldsInv = mergeFields(segments) + fieldsMap = mapFields(fieldsInv) + + numDocs = computeNewDocCount(segments, drops) + + if isClosed(closeCh) { + return nil, 0, 0, 0, 0, nil, nil, nil, seg.ErrClosed + } + + if numDocs > 0 { + storedIndexOffset, newDocNums, err = mergeStoredAndRemap(segments, drops, + fieldsMap, fieldsInv, fieldsSame, numDocs, cr, closeCh) + if err != nil { + return nil, 0, 0, 0, 0, nil, nil, nil, err + } + + dictLocs, docValueOffset, err = persistMergedRest(segments, drops, + fieldsInv, fieldsMap, fieldsSame, + newDocNums, numDocs, chunkFactor, cr, closeCh) + if err != nil { + return nil, 0, 0, 0, 0, nil, nil, nil, err + } + } else { + dictLocs = make([]uint64, len(fieldsInv)) + } + + fieldsIndexOffset, err = persistFields(fieldsInv, cr, dictLocs) + if err != nil { + return nil, 0, 0, 0, 0, nil, nil, nil, err + } + + return newDocNums, numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset, dictLocs, fieldsInv, fieldsMap, nil +} + +// mapFields takes the fieldsInv list and returns a map of fieldName +// to fieldID+1 +func mapFields(fields []string) map[string]uint16 { + rv := make(map[string]uint16, len(fields)) + for i, fieldName := range fields { + rv[fieldName] = uint16(i) + 1 + } + return rv +} + +// computeNewDocCount determines how many documents will be in the newly +// merged segment when obsoleted docs are dropped +func computeNewDocCount(segments []*SegmentBase, drops []*roaring.Bitmap) uint64 { + var newDocCount uint64 + for segI, segment := range segments { + newDocCount += segment.numDocs + if drops[segI] != nil { + newDocCount -= drops[segI].GetCardinality() + } + } + return newDocCount +} + +func persistMergedRest(segments []*SegmentBase, dropsIn []*roaring.Bitmap, + fieldsInv []string, fieldsMap map[string]uint16, fieldsSame bool, + newDocNumsIn [][]uint64, newSegDocCount uint64, chunkFactor uint32, + w *CountHashWriter, closeCh chan struct{}) ([]uint64, uint64, error) { + + var bufMaxVarintLen64 []byte = make([]byte, binary.MaxVarintLen64) + var bufLoc []uint64 + + var postings *PostingsList + var postItr *PostingsIterator + + rv := make([]uint64, len(fieldsInv)) + fieldDvLocsStart := make([]uint64, len(fieldsInv)) + fieldDvLocsEnd := make([]uint64, len(fieldsInv)) + + tfEncoder := newChunkedIntCoder(uint64(chunkFactor), newSegDocCount-1) + locEncoder := newChunkedIntCoder(uint64(chunkFactor), newSegDocCount-1) + + var vellumBuf bytes.Buffer + newVellum, err := vellum.New(&vellumBuf, nil) + if err != nil { + return nil, 0, err + } + + newRoaring := roaring.NewBitmap() + + // for each field + for fieldID, fieldName := range fieldsInv { + + // collect FST iterators from all active segments for this field + var newDocNums [][]uint64 + var drops []*roaring.Bitmap + var dicts []*Dictionary + var itrs []vellum.Iterator + + var segmentsInFocus []*SegmentBase + + for segmentI, segment := range segments { + + // check for the closure in meantime + if isClosed(closeCh) { + return nil, 0, seg.ErrClosed + } + + dict, err2 := segment.dictionary(fieldName) + if err2 != nil { + return nil, 0, err2 + } + if dict != nil && dict.fst != nil { + itr, err2 := dict.fst.Iterator(nil, nil) + if err2 != nil && err2 != vellum.ErrIteratorDone { + return nil, 0, err2 + } + if itr != nil { + newDocNums = append(newDocNums, newDocNumsIn[segmentI]) + if dropsIn[segmentI] != nil && !dropsIn[segmentI].IsEmpty() { + drops = append(drops, dropsIn[segmentI]) + } else { + drops = append(drops, nil) + } + dicts = append(dicts, dict) + itrs = append(itrs, itr) + segmentsInFocus = append(segmentsInFocus, segment) + } + } + } + + var prevTerm []byte + + newRoaring.Clear() + + var lastDocNum, lastFreq, lastNorm uint64 + + // determines whether to use "1-hit" encoding optimization + // when a term appears in only 1 doc, with no loc info, + // has freq of 1, and the docNum fits into 31-bits + use1HitEncoding := func(termCardinality uint64) (bool, uint64, uint64) { + if termCardinality == uint64(1) && locEncoder.FinalSize() <= 0 { + docNum := uint64(newRoaring.Minimum()) + if under32Bits(docNum) && docNum == lastDocNum && lastFreq == 1 { + return true, docNum, lastNorm + } + } + return false, 0, 0 + } + + finishTerm := func(term []byte) error { + tfEncoder.Close() + locEncoder.Close() + + postingsOffset, err := writePostings(newRoaring, + tfEncoder, locEncoder, use1HitEncoding, w, bufMaxVarintLen64) + if err != nil { + return err + } + + if postingsOffset > 0 { + err = newVellum.Insert(term, postingsOffset) + if err != nil { + return err + } + } + + newRoaring.Clear() + + tfEncoder.Reset() + locEncoder.Reset() + + lastDocNum = 0 + lastFreq = 0 + lastNorm = 0 + + return nil + } + + enumerator, err := newEnumerator(itrs) + + for err == nil { + term, itrI, postingsOffset := enumerator.Current() + + if !bytes.Equal(prevTerm, term) { + // check for the closure in meantime + if isClosed(closeCh) { + return nil, 0, seg.ErrClosed + } + + // if the term changed, write out the info collected + // for the previous term + err = finishTerm(prevTerm) + if err != nil { + return nil, 0, err + } + } + + postings, err = dicts[itrI].postingsListFromOffset( + postingsOffset, drops[itrI], postings) + if err != nil { + return nil, 0, err + } + + postItr = postings.iterator(true, true, true, postItr) + + if fieldsSame { + // can optimize by copying freq/norm/loc bytes directly + lastDocNum, lastFreq, lastNorm, err = mergeTermFreqNormLocsByCopying( + term, postItr, newDocNums[itrI], newRoaring, + tfEncoder, locEncoder) + } else { + lastDocNum, lastFreq, lastNorm, bufLoc, err = mergeTermFreqNormLocs( + fieldsMap, term, postItr, newDocNums[itrI], newRoaring, + tfEncoder, locEncoder, bufLoc) + } + if err != nil { + return nil, 0, err + } + + prevTerm = prevTerm[:0] // copy to prevTerm in case Next() reuses term mem + prevTerm = append(prevTerm, term...) + + err = enumerator.Next() + } + if err != vellum.ErrIteratorDone { + return nil, 0, err + } + + err = finishTerm(prevTerm) + if err != nil { + return nil, 0, err + } + + dictOffset := uint64(w.Count()) + + err = newVellum.Close() + if err != nil { + return nil, 0, err + } + vellumData := vellumBuf.Bytes() + + // write out the length of the vellum data + n := binary.PutUvarint(bufMaxVarintLen64, uint64(len(vellumData))) + _, err = w.Write(bufMaxVarintLen64[:n]) + if err != nil { + return nil, 0, err + } + + // write this vellum to disk + _, err = w.Write(vellumData) + if err != nil { + return nil, 0, err + } + + rv[fieldID] = dictOffset + + // get the field doc value offset (start) + fieldDvLocsStart[fieldID] = uint64(w.Count()) + + // update the field doc values + fdvEncoder := newChunkedContentCoder(uint64(chunkFactor), newSegDocCount-1, w, true) + + fdvReadersAvailable := false + var dvIterClone *docValueReader + for segmentI, segment := range segmentsInFocus { + // check for the closure in meantime + if isClosed(closeCh) { + return nil, 0, seg.ErrClosed + } + + fieldIDPlus1 := uint16(segment.fieldsMap[fieldName]) + if dvIter, exists := segment.fieldDvReaders[fieldIDPlus1-1]; exists && + dvIter != nil { + fdvReadersAvailable = true + dvIterClone = dvIter.cloneInto(dvIterClone) + err = dvIterClone.iterateAllDocValues(segment, func(docNum uint64, terms []byte) error { + if newDocNums[segmentI][docNum] == docDropped { + return nil + } + err := fdvEncoder.Add(newDocNums[segmentI][docNum], terms) + if err != nil { + return err + } + return nil + }) + if err != nil { + return nil, 0, err + } + } + } + + if fdvReadersAvailable { + err = fdvEncoder.Close() + if err != nil { + return nil, 0, err + } + + // persist the doc value details for this field + _, err = fdvEncoder.Write() + if err != nil { + return nil, 0, err + } + + // get the field doc value offset (end) + fieldDvLocsEnd[fieldID] = uint64(w.Count()) + } else { + fieldDvLocsStart[fieldID] = fieldNotUninverted + fieldDvLocsEnd[fieldID] = fieldNotUninverted + } + + // reset vellum buffer and vellum builder + vellumBuf.Reset() + err = newVellum.Reset(&vellumBuf) + if err != nil { + return nil, 0, err + } + } + + fieldDvLocsOffset := uint64(w.Count()) + + buf := bufMaxVarintLen64 + for i := 0; i < len(fieldDvLocsStart); i++ { + n := binary.PutUvarint(buf, fieldDvLocsStart[i]) + _, err := w.Write(buf[:n]) + if err != nil { + return nil, 0, err + } + n = binary.PutUvarint(buf, fieldDvLocsEnd[i]) + _, err = w.Write(buf[:n]) + if err != nil { + return nil, 0, err + } + } + + return rv, fieldDvLocsOffset, nil +} + +func mergeTermFreqNormLocs(fieldsMap map[string]uint16, term []byte, postItr *PostingsIterator, + newDocNums []uint64, newRoaring *roaring.Bitmap, + tfEncoder *chunkedIntCoder, locEncoder *chunkedIntCoder, bufLoc []uint64) ( + lastDocNum uint64, lastFreq uint64, lastNorm uint64, bufLocOut []uint64, err error) { + next, err := postItr.Next() + for next != nil && err == nil { + hitNewDocNum := newDocNums[next.Number()] + if hitNewDocNum == docDropped { + return 0, 0, 0, nil, fmt.Errorf("see hit with dropped docNum") + } + + newRoaring.Add(uint32(hitNewDocNum)) + + nextFreq := next.Frequency() + nextNorm := uint64(math.Float32bits(float32(next.Norm()))) + + locs := next.Locations() + + err = tfEncoder.Add(hitNewDocNum, + encodeFreqHasLocs(nextFreq, len(locs) > 0), nextNorm) + if err != nil { + return 0, 0, 0, nil, err + } + + if len(locs) > 0 { + numBytesLocs := 0 + for _, loc := range locs { + ap := loc.ArrayPositions() + numBytesLocs += totalUvarintBytes(uint64(fieldsMap[loc.Field()]-1), + loc.Pos(), loc.Start(), loc.End(), uint64(len(ap)), ap) + } + + err = locEncoder.Add(hitNewDocNum, uint64(numBytesLocs)) + if err != nil { + return 0, 0, 0, nil, err + } + + for _, loc := range locs { + ap := loc.ArrayPositions() + if cap(bufLoc) < 5+len(ap) { + bufLoc = make([]uint64, 0, 5+len(ap)) + } + args := bufLoc[0:5] + args[0] = uint64(fieldsMap[loc.Field()] - 1) + args[1] = loc.Pos() + args[2] = loc.Start() + args[3] = loc.End() + args[4] = uint64(len(ap)) + args = append(args, ap...) + err = locEncoder.Add(hitNewDocNum, args...) + if err != nil { + return 0, 0, 0, nil, err + } + } + } + + lastDocNum = hitNewDocNum + lastFreq = nextFreq + lastNorm = nextNorm + + next, err = postItr.Next() + } + + return lastDocNum, lastFreq, lastNorm, bufLoc, err +} + +func mergeTermFreqNormLocsByCopying(term []byte, postItr *PostingsIterator, + newDocNums []uint64, newRoaring *roaring.Bitmap, + tfEncoder *chunkedIntCoder, locEncoder *chunkedIntCoder) ( + lastDocNum uint64, lastFreq uint64, lastNorm uint64, err error) { + nextDocNum, nextFreq, nextNorm, nextFreqNormBytes, nextLocBytes, err := + postItr.nextBytes() + for err == nil && len(nextFreqNormBytes) > 0 { + hitNewDocNum := newDocNums[nextDocNum] + if hitNewDocNum == docDropped { + return 0, 0, 0, fmt.Errorf("see hit with dropped doc num") + } + + newRoaring.Add(uint32(hitNewDocNum)) + err = tfEncoder.AddBytes(hitNewDocNum, nextFreqNormBytes) + if err != nil { + return 0, 0, 0, err + } + + if len(nextLocBytes) > 0 { + err = locEncoder.AddBytes(hitNewDocNum, nextLocBytes) + if err != nil { + return 0, 0, 0, err + } + } + + lastDocNum = hitNewDocNum + lastFreq = nextFreq + lastNorm = nextNorm + + nextDocNum, nextFreq, nextNorm, nextFreqNormBytes, nextLocBytes, err = + postItr.nextBytes() + } + + return lastDocNum, lastFreq, lastNorm, err +} + +func writePostings(postings *roaring.Bitmap, tfEncoder, locEncoder *chunkedIntCoder, + use1HitEncoding func(uint64) (bool, uint64, uint64), + w *CountHashWriter, bufMaxVarintLen64 []byte) ( + offset uint64, err error) { + termCardinality := postings.GetCardinality() + if termCardinality <= 0 { + return 0, nil + } + + if use1HitEncoding != nil { + encodeAs1Hit, docNum1Hit, normBits1Hit := use1HitEncoding(termCardinality) + if encodeAs1Hit { + return FSTValEncode1Hit(docNum1Hit, normBits1Hit), nil + } + } + + tfOffset := uint64(w.Count()) + _, err = tfEncoder.Write(w) + if err != nil { + return 0, err + } + + locOffset := uint64(w.Count()) + _, err = locEncoder.Write(w) + if err != nil { + return 0, err + } + + postingsOffset := uint64(w.Count()) + + n := binary.PutUvarint(bufMaxVarintLen64, tfOffset) + _, err = w.Write(bufMaxVarintLen64[:n]) + if err != nil { + return 0, err + } + + n = binary.PutUvarint(bufMaxVarintLen64, locOffset) + _, err = w.Write(bufMaxVarintLen64[:n]) + if err != nil { + return 0, err + } + + _, err = writeRoaringWithLen(postings, w, bufMaxVarintLen64) + if err != nil { + return 0, err + } + + return postingsOffset, nil +} + +type varintEncoder func(uint64) (int, error) + +func mergeStoredAndRemap(segments []*SegmentBase, drops []*roaring.Bitmap, + fieldsMap map[string]uint16, fieldsInv []string, fieldsSame bool, newSegDocCount uint64, + w *CountHashWriter, closeCh chan struct{}) (uint64, [][]uint64, error) { + var rv [][]uint64 // The remapped or newDocNums for each segment. + + var newDocNum uint64 + + var curr int + var data, compressed []byte + var metaBuf bytes.Buffer + varBuf := make([]byte, binary.MaxVarintLen64) + metaEncode := func(val uint64) (int, error) { + wb := binary.PutUvarint(varBuf, val) + return metaBuf.Write(varBuf[:wb]) + } + + vals := make([][][]byte, len(fieldsInv)) + typs := make([][]byte, len(fieldsInv)) + poss := make([][][]uint64, len(fieldsInv)) + + var posBuf []uint64 + + docNumOffsets := make([]uint64, newSegDocCount) + + vdc := visitDocumentCtxPool.Get().(*visitDocumentCtx) + defer visitDocumentCtxPool.Put(vdc) + + // for each segment + for segI, segment := range segments { + // check for the closure in meantime + if isClosed(closeCh) { + return 0, nil, seg.ErrClosed + } + + segNewDocNums := make([]uint64, segment.numDocs) + + dropsI := drops[segI] + + // optimize when the field mapping is the same across all + // segments and there are no deletions, via byte-copying + // of stored docs bytes directly to the writer + if fieldsSame && (dropsI == nil || dropsI.GetCardinality() == 0) { + err := segment.copyStoredDocs(newDocNum, docNumOffsets, w) + if err != nil { + return 0, nil, err + } + + for i := uint64(0); i < segment.numDocs; i++ { + segNewDocNums[i] = newDocNum + newDocNum++ + } + rv = append(rv, segNewDocNums) + + continue + } + + // for each doc num + for docNum := uint64(0); docNum < segment.numDocs; docNum++ { + // TODO: roaring's API limits docNums to 32-bits? + if dropsI != nil && dropsI.Contains(uint32(docNum)) { + segNewDocNums[docNum] = docDropped + continue + } + + segNewDocNums[docNum] = newDocNum + + curr = 0 + metaBuf.Reset() + data = data[:0] + + posTemp := posBuf + + // collect all the data + for i := 0; i < len(fieldsInv); i++ { + vals[i] = vals[i][:0] + typs[i] = typs[i][:0] + poss[i] = poss[i][:0] + } + err := segment.visitDocument(vdc, docNum, func(field string, typ byte, value []byte, pos []uint64) bool { + fieldID := int(fieldsMap[field]) - 1 + vals[fieldID] = append(vals[fieldID], value) + typs[fieldID] = append(typs[fieldID], typ) + + // copy array positions to preserve them beyond the scope of this callback + var curPos []uint64 + if len(pos) > 0 { + if cap(posTemp) < len(pos) { + posBuf = make([]uint64, len(pos)*len(fieldsInv)) + posTemp = posBuf + } + curPos = posTemp[0:len(pos)] + copy(curPos, pos) + posTemp = posTemp[len(pos):] + } + poss[fieldID] = append(poss[fieldID], curPos) + + return true + }) + if err != nil { + return 0, nil, err + } + + // _id field special case optimizes ExternalID() lookups + idFieldVal := vals[uint16(0)][0] + _, err = metaEncode(uint64(len(idFieldVal))) + if err != nil { + return 0, nil, err + } + + // now walk the non-"_id" fields in order + for fieldID := 1; fieldID < len(fieldsInv); fieldID++ { + storedFieldValues := vals[fieldID] + + stf := typs[fieldID] + spf := poss[fieldID] + + var err2 error + curr, data, err2 = persistStoredFieldValues(fieldID, + storedFieldValues, stf, spf, curr, metaEncode, data) + if err2 != nil { + return 0, nil, err2 + } + } + + metaBytes := metaBuf.Bytes() + + compressed = snappy.Encode(compressed[:cap(compressed)], data) + + // record where we're about to start writing + docNumOffsets[newDocNum] = uint64(w.Count()) + + // write out the meta len and compressed data len + _, err = writeUvarints(w, + uint64(len(metaBytes)), + uint64(len(idFieldVal)+len(compressed))) + if err != nil { + return 0, nil, err + } + // now write the meta + _, err = w.Write(metaBytes) + if err != nil { + return 0, nil, err + } + // now write the _id field val (counted as part of the 'compressed' data) + _, err = w.Write(idFieldVal) + if err != nil { + return 0, nil, err + } + // now write the compressed data + _, err = w.Write(compressed) + if err != nil { + return 0, nil, err + } + + newDocNum++ + } + + rv = append(rv, segNewDocNums) + } + + // return value is the start of the stored index + storedIndexOffset := uint64(w.Count()) + + // now write out the stored doc index + for _, docNumOffset := range docNumOffsets { + err := binary.Write(w, binary.BigEndian, docNumOffset) + if err != nil { + return 0, nil, err + } + } + + return storedIndexOffset, rv, nil +} + +// copyStoredDocs writes out a segment's stored doc info, optimized by +// using a single Write() call for the entire set of bytes. The +// newDocNumOffsets is filled with the new offsets for each doc. +func (s *SegmentBase) copyStoredDocs(newDocNum uint64, newDocNumOffsets []uint64, + w *CountHashWriter) error { + if s.numDocs <= 0 { + return nil + } + + indexOffset0, storedOffset0, _, _, _ := + s.getDocStoredOffsets(0) // the segment's first doc + + indexOffsetN, storedOffsetN, readN, metaLenN, dataLenN := + s.getDocStoredOffsets(s.numDocs - 1) // the segment's last doc + + storedOffset0New := uint64(w.Count()) + + storedBytes := s.mem[storedOffset0 : storedOffsetN+readN+metaLenN+dataLenN] + _, err := w.Write(storedBytes) + if err != nil { + return err + } + + // remap the storedOffset's for the docs into new offsets relative + // to storedOffset0New, filling the given docNumOffsetsOut array + for indexOffset := indexOffset0; indexOffset <= indexOffsetN; indexOffset += 8 { + storedOffset := binary.BigEndian.Uint64(s.mem[indexOffset : indexOffset+8]) + storedOffsetNew := storedOffset - storedOffset0 + storedOffset0New + newDocNumOffsets[newDocNum] = storedOffsetNew + newDocNum += 1 + } + + return nil +} + +// mergeFields builds a unified list of fields used across all the +// input segments, and computes whether the fields are the same across +// segments (which depends on fields to be sorted in the same way +// across segments) +func mergeFields(segments []*SegmentBase) (bool, []string) { + fieldsSame := true + + var segment0Fields []string + if len(segments) > 0 { + segment0Fields = segments[0].Fields() + } + + fieldsExist := map[string]struct{}{} + for _, segment := range segments { + fields := segment.Fields() + for fieldi, field := range fields { + fieldsExist[field] = struct{}{} + if len(segment0Fields) != len(fields) || segment0Fields[fieldi] != field { + fieldsSame = false + } + } + } + + rv := make([]string, 0, len(fieldsExist)) + // ensure _id stays first + rv = append(rv, "_id") + for k := range fieldsExist { + if k != "_id" { + rv = append(rv, k) + } + } + + sort.Strings(rv[1:]) // leave _id as first + + return fieldsSame, rv +} + +func isClosed(closeCh chan struct{}) bool { + select { + case <-closeCh: + return true + default: + return false + } +} diff --git a/vendor/github.com/blevesearch/zap/v11/new.go b/vendor/github.com/blevesearch/zap/v11/new.go new file mode 100644 index 0000000..6c75f2f --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v11/new.go @@ -0,0 +1,847 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "math" + "sort" + "sync" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/couchbase/vellum" + "github.com/golang/snappy" +) + +var NewSegmentBufferNumResultsBump int = 100 +var NewSegmentBufferNumResultsFactor float64 = 1.0 +var NewSegmentBufferAvgBytesPerDocFactor float64 = 1.0 + +// ValidateDocFields can be set by applications to perform additional checks +// on fields in a document being added to a new segment, by default it does +// nothing. +// This API is experimental and may be removed at any time. +var ValidateDocFields = func(field document.Field) error { + return nil +} + +var defaultChunkFactor uint32 = 1024 + +// AnalysisResultsToSegmentBase produces an in-memory zap-encoded +// SegmentBase from analysis results +func (z *ZapPlugin) New(results []*index.AnalysisResult) ( + segment.Segment, uint64, error) { + return z.newWithChunkFactor(results, defaultChunkFactor) +} + +func (*ZapPlugin) newWithChunkFactor(results []*index.AnalysisResult, + chunkFactor uint32) (segment.Segment, uint64, error) { + s := interimPool.Get().(*interim) + + var br bytes.Buffer + if s.lastNumDocs > 0 { + // use previous results to initialize the buf with an estimate + // size, but note that the interim instance comes from a + // global interimPool, so multiple scorch instances indexing + // different docs can lead to low quality estimates + estimateAvgBytesPerDoc := int(float64(s.lastOutSize/s.lastNumDocs) * + NewSegmentBufferNumResultsFactor) + estimateNumResults := int(float64(len(results)+NewSegmentBufferNumResultsBump) * + NewSegmentBufferAvgBytesPerDocFactor) + br.Grow(estimateAvgBytesPerDoc * estimateNumResults) + } + + s.results = results + s.chunkFactor = chunkFactor + s.w = NewCountHashWriter(&br) + + storedIndexOffset, fieldsIndexOffset, fdvIndexOffset, dictOffsets, + err := s.convert() + if err != nil { + return nil, uint64(0), err + } + + sb, err := InitSegmentBase(br.Bytes(), s.w.Sum32(), chunkFactor, + s.FieldsMap, s.FieldsInv, uint64(len(results)), + storedIndexOffset, fieldsIndexOffset, fdvIndexOffset, dictOffsets) + + if err == nil && s.reset() == nil { + s.lastNumDocs = len(results) + s.lastOutSize = len(br.Bytes()) + interimPool.Put(s) + } + + return sb, uint64(len(br.Bytes())), err +} + +var interimPool = sync.Pool{New: func() interface{} { return &interim{} }} + +// interim holds temporary working data used while converting from +// analysis results to a zap-encoded segment +type interim struct { + results []*index.AnalysisResult + + chunkFactor uint32 + + w *CountHashWriter + + // FieldsMap adds 1 to field id to avoid zero value issues + // name -> field id + 1 + FieldsMap map[string]uint16 + + // FieldsInv is the inverse of FieldsMap + // field id -> name + FieldsInv []string + + // Term dictionaries for each field + // field id -> term -> postings list id + 1 + Dicts []map[string]uint64 + + // Terms for each field, where terms are sorted ascending + // field id -> []term + DictKeys [][]string + + // Fields whose IncludeDocValues is true + // field id -> bool + IncludeDocValues []bool + + // postings id -> bitmap of docNums + Postings []*roaring.Bitmap + + // postings id -> freq/norm's, one for each docNum in postings + FreqNorms [][]interimFreqNorm + freqNormsBacking []interimFreqNorm + + // postings id -> locs, one for each freq + Locs [][]interimLoc + locsBacking []interimLoc + + numTermsPerPostingsList []int // key is postings list id + numLocsPerPostingsList []int // key is postings list id + + builder *vellum.Builder + builderBuf bytes.Buffer + + metaBuf bytes.Buffer + + tmp0 []byte + tmp1 []byte + + lastNumDocs int + lastOutSize int +} + +func (s *interim) reset() (err error) { + s.results = nil + s.chunkFactor = 0 + s.w = nil + s.FieldsMap = nil + s.FieldsInv = nil + for i := range s.Dicts { + s.Dicts[i] = nil + } + s.Dicts = s.Dicts[:0] + for i := range s.DictKeys { + s.DictKeys[i] = s.DictKeys[i][:0] + } + s.DictKeys = s.DictKeys[:0] + for i := range s.IncludeDocValues { + s.IncludeDocValues[i] = false + } + s.IncludeDocValues = s.IncludeDocValues[:0] + for _, idn := range s.Postings { + idn.Clear() + } + s.Postings = s.Postings[:0] + s.FreqNorms = s.FreqNorms[:0] + for i := range s.freqNormsBacking { + s.freqNormsBacking[i] = interimFreqNorm{} + } + s.freqNormsBacking = s.freqNormsBacking[:0] + s.Locs = s.Locs[:0] + for i := range s.locsBacking { + s.locsBacking[i] = interimLoc{} + } + s.locsBacking = s.locsBacking[:0] + s.numTermsPerPostingsList = s.numTermsPerPostingsList[:0] + s.numLocsPerPostingsList = s.numLocsPerPostingsList[:0] + s.builderBuf.Reset() + if s.builder != nil { + err = s.builder.Reset(&s.builderBuf) + } + s.metaBuf.Reset() + s.tmp0 = s.tmp0[:0] + s.tmp1 = s.tmp1[:0] + s.lastNumDocs = 0 + s.lastOutSize = 0 + + return err +} + +func (s *interim) grabBuf(size int) []byte { + buf := s.tmp0 + if cap(buf) < size { + buf = make([]byte, size) + s.tmp0 = buf + } + return buf[0:size] +} + +type interimStoredField struct { + vals [][]byte + typs []byte + arrayposs [][]uint64 // array positions +} + +type interimFreqNorm struct { + freq uint64 + norm float32 + numLocs int +} + +type interimLoc struct { + fieldID uint16 + pos uint64 + start uint64 + end uint64 + arrayposs []uint64 +} + +func (s *interim) convert() (uint64, uint64, uint64, []uint64, error) { + s.FieldsMap = map[string]uint16{} + + s.getOrDefineField("_id") // _id field is fieldID 0 + + for _, result := range s.results { + for _, field := range result.Document.CompositeFields { + s.getOrDefineField(field.Name()) + } + for _, field := range result.Document.Fields { + s.getOrDefineField(field.Name()) + } + } + + sort.Strings(s.FieldsInv[1:]) // keep _id as first field + + for fieldID, fieldName := range s.FieldsInv { + s.FieldsMap[fieldName] = uint16(fieldID + 1) + } + + if cap(s.IncludeDocValues) >= len(s.FieldsInv) { + s.IncludeDocValues = s.IncludeDocValues[:len(s.FieldsInv)] + } else { + s.IncludeDocValues = make([]bool, len(s.FieldsInv)) + } + + s.prepareDicts() + + for _, dict := range s.DictKeys { + sort.Strings(dict) + } + + s.processDocuments() + + storedIndexOffset, err := s.writeStoredFields() + if err != nil { + return 0, 0, 0, nil, err + } + + var fdvIndexOffset uint64 + var dictOffsets []uint64 + + if len(s.results) > 0 { + fdvIndexOffset, dictOffsets, err = s.writeDicts() + if err != nil { + return 0, 0, 0, nil, err + } + } else { + dictOffsets = make([]uint64, len(s.FieldsInv)) + } + + fieldsIndexOffset, err := persistFields(s.FieldsInv, s.w, dictOffsets) + if err != nil { + return 0, 0, 0, nil, err + } + + return storedIndexOffset, fieldsIndexOffset, fdvIndexOffset, dictOffsets, nil +} + +func (s *interim) getOrDefineField(fieldName string) int { + fieldIDPlus1, exists := s.FieldsMap[fieldName] + if !exists { + fieldIDPlus1 = uint16(len(s.FieldsInv) + 1) + s.FieldsMap[fieldName] = fieldIDPlus1 + s.FieldsInv = append(s.FieldsInv, fieldName) + + s.Dicts = append(s.Dicts, make(map[string]uint64)) + + n := len(s.DictKeys) + if n < cap(s.DictKeys) { + s.DictKeys = s.DictKeys[:n+1] + s.DictKeys[n] = s.DictKeys[n][:0] + } else { + s.DictKeys = append(s.DictKeys, []string(nil)) + } + } + + return int(fieldIDPlus1 - 1) +} + +// fill Dicts and DictKeys from analysis results +func (s *interim) prepareDicts() { + var pidNext int + + var totTFs int + var totLocs int + + visitField := func(fieldID uint16, tfs analysis.TokenFrequencies) { + dict := s.Dicts[fieldID] + dictKeys := s.DictKeys[fieldID] + + for term, tf := range tfs { + pidPlus1, exists := dict[term] + if !exists { + pidNext++ + pidPlus1 = uint64(pidNext) + + dict[term] = pidPlus1 + dictKeys = append(dictKeys, term) + + s.numTermsPerPostingsList = append(s.numTermsPerPostingsList, 0) + s.numLocsPerPostingsList = append(s.numLocsPerPostingsList, 0) + } + + pid := pidPlus1 - 1 + + s.numTermsPerPostingsList[pid] += 1 + s.numLocsPerPostingsList[pid] += len(tf.Locations) + + totLocs += len(tf.Locations) + } + + totTFs += len(tfs) + + s.DictKeys[fieldID] = dictKeys + } + + for _, result := range s.results { + // walk each composite field + for _, field := range result.Document.CompositeFields { + fieldID := uint16(s.getOrDefineField(field.Name())) + _, tf := field.Analyze() + visitField(fieldID, tf) + } + + // walk each field + for i, field := range result.Document.Fields { + fieldID := uint16(s.getOrDefineField(field.Name())) + tf := result.Analyzed[i] + visitField(fieldID, tf) + } + } + + numPostingsLists := pidNext + + if cap(s.Postings) >= numPostingsLists { + s.Postings = s.Postings[:numPostingsLists] + } else { + postings := make([]*roaring.Bitmap, numPostingsLists) + copy(postings, s.Postings[:cap(s.Postings)]) + for i := 0; i < numPostingsLists; i++ { + if postings[i] == nil { + postings[i] = roaring.New() + } + } + s.Postings = postings + } + + if cap(s.FreqNorms) >= numPostingsLists { + s.FreqNorms = s.FreqNorms[:numPostingsLists] + } else { + s.FreqNorms = make([][]interimFreqNorm, numPostingsLists) + } + + if cap(s.freqNormsBacking) >= totTFs { + s.freqNormsBacking = s.freqNormsBacking[:totTFs] + } else { + s.freqNormsBacking = make([]interimFreqNorm, totTFs) + } + + freqNormsBacking := s.freqNormsBacking + for pid, numTerms := range s.numTermsPerPostingsList { + s.FreqNorms[pid] = freqNormsBacking[0:0] + freqNormsBacking = freqNormsBacking[numTerms:] + } + + if cap(s.Locs) >= numPostingsLists { + s.Locs = s.Locs[:numPostingsLists] + } else { + s.Locs = make([][]interimLoc, numPostingsLists) + } + + if cap(s.locsBacking) >= totLocs { + s.locsBacking = s.locsBacking[:totLocs] + } else { + s.locsBacking = make([]interimLoc, totLocs) + } + + locsBacking := s.locsBacking + for pid, numLocs := range s.numLocsPerPostingsList { + s.Locs[pid] = locsBacking[0:0] + locsBacking = locsBacking[numLocs:] + } +} + +func (s *interim) processDocuments() { + numFields := len(s.FieldsInv) + reuseFieldLens := make([]int, numFields) + reuseFieldTFs := make([]analysis.TokenFrequencies, numFields) + + for docNum, result := range s.results { + for i := 0; i < numFields; i++ { // clear these for reuse + reuseFieldLens[i] = 0 + reuseFieldTFs[i] = nil + } + + s.processDocument(uint64(docNum), result, + reuseFieldLens, reuseFieldTFs) + } +} + +func (s *interim) processDocument(docNum uint64, + result *index.AnalysisResult, + fieldLens []int, fieldTFs []analysis.TokenFrequencies) { + visitField := func(fieldID uint16, fieldName string, + ln int, tf analysis.TokenFrequencies) { + fieldLens[fieldID] += ln + + existingFreqs := fieldTFs[fieldID] + if existingFreqs != nil { + existingFreqs.MergeAll(fieldName, tf) + } else { + fieldTFs[fieldID] = tf + } + } + + // walk each composite field + for _, field := range result.Document.CompositeFields { + fieldID := uint16(s.getOrDefineField(field.Name())) + ln, tf := field.Analyze() + visitField(fieldID, field.Name(), ln, tf) + } + + // walk each field + for i, field := range result.Document.Fields { + fieldID := uint16(s.getOrDefineField(field.Name())) + ln := result.Length[i] + tf := result.Analyzed[i] + visitField(fieldID, field.Name(), ln, tf) + } + + // now that it's been rolled up into fieldTFs, walk that + for fieldID, tfs := range fieldTFs { + dict := s.Dicts[fieldID] + norm := float32(1.0 / math.Sqrt(float64(fieldLens[fieldID]))) + + for term, tf := range tfs { + pid := dict[term] - 1 + bs := s.Postings[pid] + bs.Add(uint32(docNum)) + + s.FreqNorms[pid] = append(s.FreqNorms[pid], + interimFreqNorm{ + freq: uint64(tf.Frequency()), + norm: norm, + numLocs: len(tf.Locations), + }) + + if len(tf.Locations) > 0 { + locs := s.Locs[pid] + + for _, loc := range tf.Locations { + var locf = uint16(fieldID) + if loc.Field != "" { + locf = uint16(s.getOrDefineField(loc.Field)) + } + var arrayposs []uint64 + if len(loc.ArrayPositions) > 0 { + arrayposs = loc.ArrayPositions + } + locs = append(locs, interimLoc{ + fieldID: locf, + pos: uint64(loc.Position), + start: uint64(loc.Start), + end: uint64(loc.End), + arrayposs: arrayposs, + }) + } + + s.Locs[pid] = locs + } + } + } +} + +func (s *interim) writeStoredFields() ( + storedIndexOffset uint64, err error) { + varBuf := make([]byte, binary.MaxVarintLen64) + metaEncode := func(val uint64) (int, error) { + wb := binary.PutUvarint(varBuf, val) + return s.metaBuf.Write(varBuf[:wb]) + } + + data, compressed := s.tmp0[:0], s.tmp1[:0] + defer func() { s.tmp0, s.tmp1 = data, compressed }() + + // keyed by docNum + docStoredOffsets := make([]uint64, len(s.results)) + + // keyed by fieldID, for the current doc in the loop + docStoredFields := map[uint16]interimStoredField{} + + for docNum, result := range s.results { + for fieldID := range docStoredFields { // reset for next doc + delete(docStoredFields, fieldID) + } + + for _, field := range result.Document.Fields { + fieldID := uint16(s.getOrDefineField(field.Name())) + + opts := field.Options() + + if opts.IsStored() { + isf := docStoredFields[fieldID] + isf.vals = append(isf.vals, field.Value()) + isf.typs = append(isf.typs, encodeFieldType(field)) + isf.arrayposs = append(isf.arrayposs, field.ArrayPositions()) + docStoredFields[fieldID] = isf + } + + if opts.IncludeDocValues() { + s.IncludeDocValues[fieldID] = true + } + + err := ValidateDocFields(field) + if err != nil { + return 0, err + } + } + + var curr int + + s.metaBuf.Reset() + data = data[:0] + + // _id field special case optimizes ExternalID() lookups + idFieldVal := docStoredFields[uint16(0)].vals[0] + _, err = metaEncode(uint64(len(idFieldVal))) + if err != nil { + return 0, err + } + + // handle non-"_id" fields + for fieldID := 1; fieldID < len(s.FieldsInv); fieldID++ { + isf, exists := docStoredFields[uint16(fieldID)] + if exists { + curr, data, err = persistStoredFieldValues( + fieldID, isf.vals, isf.typs, isf.arrayposs, + curr, metaEncode, data) + if err != nil { + return 0, err + } + } + } + + metaBytes := s.metaBuf.Bytes() + + compressed = snappy.Encode(compressed[:cap(compressed)], data) + + docStoredOffsets[docNum] = uint64(s.w.Count()) + + _, err := writeUvarints(s.w, + uint64(len(metaBytes)), + uint64(len(idFieldVal)+len(compressed))) + if err != nil { + return 0, err + } + + _, err = s.w.Write(metaBytes) + if err != nil { + return 0, err + } + + _, err = s.w.Write(idFieldVal) + if err != nil { + return 0, err + } + + _, err = s.w.Write(compressed) + if err != nil { + return 0, err + } + } + + storedIndexOffset = uint64(s.w.Count()) + + for _, docStoredOffset := range docStoredOffsets { + err = binary.Write(s.w, binary.BigEndian, docStoredOffset) + if err != nil { + return 0, err + } + } + + return storedIndexOffset, nil +} + +func (s *interim) writeDicts() (fdvIndexOffset uint64, dictOffsets []uint64, err error) { + dictOffsets = make([]uint64, len(s.FieldsInv)) + + fdvOffsetsStart := make([]uint64, len(s.FieldsInv)) + fdvOffsetsEnd := make([]uint64, len(s.FieldsInv)) + + buf := s.grabBuf(binary.MaxVarintLen64) + + tfEncoder := newChunkedIntCoder(uint64(s.chunkFactor), uint64(len(s.results)-1)) + locEncoder := newChunkedIntCoder(uint64(s.chunkFactor), uint64(len(s.results)-1)) + fdvEncoder := newChunkedContentCoder(uint64(s.chunkFactor), uint64(len(s.results)-1), s.w, false) + + var docTermMap [][]byte + + if s.builder == nil { + s.builder, err = vellum.New(&s.builderBuf, nil) + if err != nil { + return 0, nil, err + } + } + + for fieldID, terms := range s.DictKeys { + if cap(docTermMap) < len(s.results) { + docTermMap = make([][]byte, len(s.results)) + } else { + docTermMap = docTermMap[0:len(s.results)] + for docNum := range docTermMap { // reset the docTermMap + docTermMap[docNum] = docTermMap[docNum][:0] + } + } + + dict := s.Dicts[fieldID] + + for _, term := range terms { // terms are already sorted + pid := dict[term] - 1 + + postingsBS := s.Postings[pid] + + freqNorms := s.FreqNorms[pid] + freqNormOffset := 0 + + locs := s.Locs[pid] + locOffset := 0 + + postingsItr := postingsBS.Iterator() + for postingsItr.HasNext() { + docNum := uint64(postingsItr.Next()) + + freqNorm := freqNorms[freqNormOffset] + + err = tfEncoder.Add(docNum, + encodeFreqHasLocs(freqNorm.freq, freqNorm.numLocs > 0), + uint64(math.Float32bits(freqNorm.norm))) + if err != nil { + return 0, nil, err + } + + if freqNorm.numLocs > 0 { + numBytesLocs := 0 + for _, loc := range locs[locOffset : locOffset+freqNorm.numLocs] { + numBytesLocs += totalUvarintBytes( + uint64(loc.fieldID), loc.pos, loc.start, loc.end, + uint64(len(loc.arrayposs)), loc.arrayposs) + } + + err = locEncoder.Add(docNum, uint64(numBytesLocs)) + if err != nil { + return 0, nil, err + } + + for _, loc := range locs[locOffset : locOffset+freqNorm.numLocs] { + err = locEncoder.Add(docNum, + uint64(loc.fieldID), loc.pos, loc.start, loc.end, + uint64(len(loc.arrayposs))) + if err != nil { + return 0, nil, err + } + + err = locEncoder.Add(docNum, loc.arrayposs...) + if err != nil { + return 0, nil, err + } + } + + locOffset += freqNorm.numLocs + } + + freqNormOffset++ + + docTermMap[docNum] = append( + append(docTermMap[docNum], term...), + termSeparator) + } + + tfEncoder.Close() + locEncoder.Close() + + postingsOffset, err := + writePostings(postingsBS, tfEncoder, locEncoder, nil, s.w, buf) + if err != nil { + return 0, nil, err + } + + if postingsOffset > uint64(0) { + err = s.builder.Insert([]byte(term), postingsOffset) + if err != nil { + return 0, nil, err + } + } + + tfEncoder.Reset() + locEncoder.Reset() + } + + err = s.builder.Close() + if err != nil { + return 0, nil, err + } + + // record where this dictionary starts + dictOffsets[fieldID] = uint64(s.w.Count()) + + vellumData := s.builderBuf.Bytes() + + // write out the length of the vellum data + n := binary.PutUvarint(buf, uint64(len(vellumData))) + _, err = s.w.Write(buf[:n]) + if err != nil { + return 0, nil, err + } + + // write this vellum to disk + _, err = s.w.Write(vellumData) + if err != nil { + return 0, nil, err + } + + // reset vellum for reuse + s.builderBuf.Reset() + + err = s.builder.Reset(&s.builderBuf) + if err != nil { + return 0, nil, err + } + + // write the field doc values + if s.IncludeDocValues[fieldID] { + for docNum, docTerms := range docTermMap { + if len(docTerms) > 0 { + err = fdvEncoder.Add(uint64(docNum), docTerms) + if err != nil { + return 0, nil, err + } + } + } + err = fdvEncoder.Close() + if err != nil { + return 0, nil, err + } + + fdvOffsetsStart[fieldID] = uint64(s.w.Count()) + + _, err = fdvEncoder.Write() + if err != nil { + return 0, nil, err + } + + fdvOffsetsEnd[fieldID] = uint64(s.w.Count()) + + fdvEncoder.Reset() + } else { + fdvOffsetsStart[fieldID] = fieldNotUninverted + fdvOffsetsEnd[fieldID] = fieldNotUninverted + } + } + + fdvIndexOffset = uint64(s.w.Count()) + + for i := 0; i < len(fdvOffsetsStart); i++ { + n := binary.PutUvarint(buf, fdvOffsetsStart[i]) + _, err := s.w.Write(buf[:n]) + if err != nil { + return 0, nil, err + } + n = binary.PutUvarint(buf, fdvOffsetsEnd[i]) + _, err = s.w.Write(buf[:n]) + if err != nil { + return 0, nil, err + } + } + + return fdvIndexOffset, dictOffsets, nil +} + +func encodeFieldType(f document.Field) byte { + fieldType := byte('x') + switch f.(type) { + case *document.TextField: + fieldType = 't' + case *document.NumericField: + fieldType = 'n' + case *document.DateTimeField: + fieldType = 'd' + case *document.BooleanField: + fieldType = 'b' + case *document.GeoPointField: + fieldType = 'g' + case *document.CompositeField: + fieldType = 'c' + } + return fieldType +} + +// returns the total # of bytes needed to encode the given uint64's +// into binary.PutUVarint() encoding +func totalUvarintBytes(a, b, c, d, e uint64, more []uint64) (n int) { + n = numUvarintBytes(a) + n += numUvarintBytes(b) + n += numUvarintBytes(c) + n += numUvarintBytes(d) + n += numUvarintBytes(e) + for _, v := range more { + n += numUvarintBytes(v) + } + return n +} + +// returns # of bytes needed to encode x in binary.PutUvarint() encoding +func numUvarintBytes(x uint64) (n int) { + for x >= 0x80 { + x >>= 7 + n++ + } + return n + 1 +} diff --git a/vendor/github.com/blevesearch/zap/v11/plugin.go b/vendor/github.com/blevesearch/zap/v11/plugin.go new file mode 100644 index 0000000..38a0638 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v11/plugin.go @@ -0,0 +1,37 @@ +// Copyright (c) 2020 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "github.com/blevesearch/bleve/index/scorch/segment" +) + +// ZapPlugin implements the Plugin interface of +// the blevesearch/bleve/index/scorch/segment pkg +type ZapPlugin struct{} + +func (*ZapPlugin) Type() string { + return Type +} + +func (*ZapPlugin) Version() uint32 { + return Version +} + +// Plugin returns an instance segment.Plugin for use +// by the Scorch indexing scheme +func Plugin() segment.Plugin { + return &ZapPlugin{} +} diff --git a/vendor/github.com/blevesearch/zap/v11/posting.go b/vendor/github.com/blevesearch/zap/v11/posting.go new file mode 100644 index 0000000..619dc4c --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v11/posting.go @@ -0,0 +1,910 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "encoding/binary" + "fmt" + "math" + "reflect" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizePostingsList int +var reflectStaticSizePostingsIterator int +var reflectStaticSizePosting int +var reflectStaticSizeLocation int + +func init() { + var pl PostingsList + reflectStaticSizePostingsList = int(reflect.TypeOf(pl).Size()) + var pi PostingsIterator + reflectStaticSizePostingsIterator = int(reflect.TypeOf(pi).Size()) + var p Posting + reflectStaticSizePosting = int(reflect.TypeOf(p).Size()) + var l Location + reflectStaticSizeLocation = int(reflect.TypeOf(l).Size()) +} + +// FST or vellum value (uint64) encoding is determined by the top two +// highest-order or most significant bits... +// +// encoding : MSB +// name : 63 62 61...to...bit #0 (LSB) +// ----------+---+---+--------------------------------------------------- +// general : 0 | 0 | 62-bits of postingsOffset. +// ~ : 0 | 1 | reserved for future. +// 1-hit : 1 | 0 | 31-bits of positive float31 norm | 31-bits docNum. +// ~ : 1 | 1 | reserved for future. +// +// Encoding "general" is able to handle all cases, where the +// postingsOffset points to more information about the postings for +// the term. +// +// Encoding "1-hit" is used to optimize a commonly seen case when a +// term has only a single hit. For example, a term in the _id field +// will have only 1 hit. The "1-hit" encoding is used for a term +// in a field when... +// +// - term vector info is disabled for that field; +// - and, the term appears in only a single doc for that field; +// - and, the term's freq is exactly 1 in that single doc for that field; +// - and, the docNum must fit into 31-bits; +// +// Otherwise, the "general" encoding is used instead. +// +// In the "1-hit" encoding, the field in that single doc may have +// other terms, which is supported in the "1-hit" encoding by the +// positive float31 norm. + +const FSTValEncodingMask = uint64(0xc000000000000000) +const FSTValEncodingGeneral = uint64(0x0000000000000000) +const FSTValEncoding1Hit = uint64(0x8000000000000000) + +func FSTValEncode1Hit(docNum uint64, normBits uint64) uint64 { + return FSTValEncoding1Hit | ((mask31Bits & normBits) << 31) | (mask31Bits & docNum) +} + +func FSTValDecode1Hit(v uint64) (docNum uint64, normBits uint64) { + return (mask31Bits & v), (mask31Bits & (v >> 31)) +} + +const mask31Bits = uint64(0x000000007fffffff) + +func under32Bits(x uint64) bool { + return x <= mask31Bits +} + +const DocNum1HitFinished = math.MaxUint64 + +var NormBits1Hit = uint64(math.Float32bits(float32(1))) + +// PostingsList is an in-memory representation of a postings list +type PostingsList struct { + sb *SegmentBase + postingsOffset uint64 + freqOffset uint64 + locOffset uint64 + postings *roaring.Bitmap + except *roaring.Bitmap + + // when normBits1Hit != 0, then this postings list came from a + // 1-hit encoding, and only the docNum1Hit & normBits1Hit apply + docNum1Hit uint64 + normBits1Hit uint64 +} + +// represents an immutable, empty postings list +var emptyPostingsList = &PostingsList{} + +func (p *PostingsList) Size() int { + sizeInBytes := reflectStaticSizePostingsList + size.SizeOfPtr + + if p.except != nil { + sizeInBytes += int(p.except.GetSizeInBytes()) + } + + return sizeInBytes +} + +func (p *PostingsList) OrInto(receiver *roaring.Bitmap) { + if p.normBits1Hit != 0 { + receiver.Add(uint32(p.docNum1Hit)) + return + } + + if p.postings != nil { + receiver.Or(p.postings) + } +} + +// Iterator returns an iterator for this postings list +func (p *PostingsList) Iterator(includeFreq, includeNorm, includeLocs bool, + prealloc segment.PostingsIterator) segment.PostingsIterator { + if p.normBits1Hit == 0 && p.postings == nil { + return emptyPostingsIterator + } + + var preallocPI *PostingsIterator + pi, ok := prealloc.(*PostingsIterator) + if ok && pi != nil { + preallocPI = pi + } + if preallocPI == emptyPostingsIterator { + preallocPI = nil + } + + return p.iterator(includeFreq, includeNorm, includeLocs, preallocPI) +} + +func (p *PostingsList) iterator(includeFreq, includeNorm, includeLocs bool, + rv *PostingsIterator) *PostingsIterator { + if rv == nil { + rv = &PostingsIterator{} + } else { + freqNormReader := rv.freqNormReader + if freqNormReader != nil { + freqNormReader.Reset([]byte(nil)) + } + + locReader := rv.locReader + if locReader != nil { + locReader.Reset([]byte(nil)) + } + + freqChunkOffsets := rv.freqChunkOffsets[:0] + locChunkOffsets := rv.locChunkOffsets[:0] + + nextLocs := rv.nextLocs[:0] + nextSegmentLocs := rv.nextSegmentLocs[:0] + + buf := rv.buf + + *rv = PostingsIterator{} // clear the struct + + rv.freqNormReader = freqNormReader + rv.locReader = locReader + + rv.freqChunkOffsets = freqChunkOffsets + rv.locChunkOffsets = locChunkOffsets + + rv.nextLocs = nextLocs + rv.nextSegmentLocs = nextSegmentLocs + + rv.buf = buf + } + + rv.postings = p + rv.includeFreqNorm = includeFreq || includeNorm || includeLocs + rv.includeLocs = includeLocs + + if p.normBits1Hit != 0 { + // "1-hit" encoding + rv.docNum1Hit = p.docNum1Hit + rv.normBits1Hit = p.normBits1Hit + + if p.except != nil && p.except.Contains(uint32(rv.docNum1Hit)) { + rv.docNum1Hit = DocNum1HitFinished + } + + return rv + } + + // "general" encoding, check if empty + if p.postings == nil { + return rv + } + + var n uint64 + var read int + + // prepare the freq chunk details + if rv.includeFreqNorm { + var numFreqChunks uint64 + numFreqChunks, read = binary.Uvarint(p.sb.mem[p.freqOffset+n : p.freqOffset+n+binary.MaxVarintLen64]) + n += uint64(read) + if cap(rv.freqChunkOffsets) >= int(numFreqChunks) { + rv.freqChunkOffsets = rv.freqChunkOffsets[:int(numFreqChunks)] + } else { + rv.freqChunkOffsets = make([]uint64, int(numFreqChunks)) + } + for i := 0; i < int(numFreqChunks); i++ { + rv.freqChunkOffsets[i], read = binary.Uvarint(p.sb.mem[p.freqOffset+n : p.freqOffset+n+binary.MaxVarintLen64]) + n += uint64(read) + } + rv.freqChunkStart = p.freqOffset + n + } + + // prepare the loc chunk details + if rv.includeLocs { + n = 0 + var numLocChunks uint64 + numLocChunks, read = binary.Uvarint(p.sb.mem[p.locOffset+n : p.locOffset+n+binary.MaxVarintLen64]) + n += uint64(read) + if cap(rv.locChunkOffsets) >= int(numLocChunks) { + rv.locChunkOffsets = rv.locChunkOffsets[:int(numLocChunks)] + } else { + rv.locChunkOffsets = make([]uint64, int(numLocChunks)) + } + for i := 0; i < int(numLocChunks); i++ { + rv.locChunkOffsets[i], read = binary.Uvarint(p.sb.mem[p.locOffset+n : p.locOffset+n+binary.MaxVarintLen64]) + n += uint64(read) + } + rv.locChunkStart = p.locOffset + n + } + + rv.all = p.postings.Iterator() + if p.except != nil { + rv.ActualBM = roaring.AndNot(p.postings, p.except) + rv.Actual = rv.ActualBM.Iterator() + } else { + rv.ActualBM = p.postings + rv.Actual = rv.all // Optimize to use same iterator for all & Actual. + } + + return rv +} + +// Count returns the number of items on this postings list +func (p *PostingsList) Count() uint64 { + var n, e uint64 + if p.normBits1Hit != 0 { + n = 1 + if p.except != nil && p.except.Contains(uint32(p.docNum1Hit)) { + e = 1 + } + } else if p.postings != nil { + n = p.postings.GetCardinality() + if p.except != nil { + e = p.postings.AndCardinality(p.except) + } + } + return n - e +} + +func (rv *PostingsList) read(postingsOffset uint64, d *Dictionary) error { + rv.postingsOffset = postingsOffset + + // handle "1-hit" encoding special case + if rv.postingsOffset&FSTValEncodingMask == FSTValEncoding1Hit { + return rv.init1Hit(postingsOffset) + } + + // read the location of the freq/norm details + var n uint64 + var read int + + rv.freqOffset, read = binary.Uvarint(d.sb.mem[postingsOffset+n : postingsOffset+binary.MaxVarintLen64]) + n += uint64(read) + + rv.locOffset, read = binary.Uvarint(d.sb.mem[postingsOffset+n : postingsOffset+n+binary.MaxVarintLen64]) + n += uint64(read) + + var postingsLen uint64 + postingsLen, read = binary.Uvarint(d.sb.mem[postingsOffset+n : postingsOffset+n+binary.MaxVarintLen64]) + n += uint64(read) + + roaringBytes := d.sb.mem[postingsOffset+n : postingsOffset+n+postingsLen] + + if rv.postings == nil { + rv.postings = roaring.NewBitmap() + } + _, err := rv.postings.FromBuffer(roaringBytes) + if err != nil { + return fmt.Errorf("error loading roaring bitmap: %v", err) + } + + return nil +} + +func (rv *PostingsList) init1Hit(fstVal uint64) error { + docNum, normBits := FSTValDecode1Hit(fstVal) + + rv.docNum1Hit = docNum + rv.normBits1Hit = normBits + + return nil +} + +// PostingsIterator provides a way to iterate through the postings list +type PostingsIterator struct { + postings *PostingsList + all roaring.IntPeekable + Actual roaring.IntPeekable + ActualBM *roaring.Bitmap + + currChunk uint32 + currChunkFreqNorm []byte + currChunkLoc []byte + + freqNormReader *segment.MemUvarintReader + locReader *segment.MemUvarintReader + + freqChunkOffsets []uint64 + freqChunkStart uint64 + + locChunkOffsets []uint64 + locChunkStart uint64 + + next Posting // reused across Next() calls + nextLocs []Location // reused across Next() calls + nextSegmentLocs []segment.Location // reused across Next() calls + + docNum1Hit uint64 + normBits1Hit uint64 + + buf []byte + + includeFreqNorm bool + includeLocs bool +} + +var emptyPostingsIterator = &PostingsIterator{} + +func (i *PostingsIterator) Size() int { + sizeInBytes := reflectStaticSizePostingsIterator + size.SizeOfPtr + + len(i.currChunkFreqNorm) + + len(i.currChunkLoc) + + len(i.freqChunkOffsets)*size.SizeOfUint64 + + len(i.locChunkOffsets)*size.SizeOfUint64 + + i.next.Size() + + for _, entry := range i.nextLocs { + sizeInBytes += entry.Size() + } + + return sizeInBytes +} + +func (i *PostingsIterator) loadChunk(chunk int) error { + if i.includeFreqNorm { + if chunk >= len(i.freqChunkOffsets) { + return fmt.Errorf("tried to load freq chunk that doesn't exist %d/(%d)", + chunk, len(i.freqChunkOffsets)) + } + + end, start := i.freqChunkStart, i.freqChunkStart + s, e := readChunkBoundary(chunk, i.freqChunkOffsets) + start += s + end += e + i.currChunkFreqNorm = i.postings.sb.mem[start:end] + if i.freqNormReader == nil { + i.freqNormReader = segment.NewMemUvarintReader(i.currChunkFreqNorm) + } else { + i.freqNormReader.Reset(i.currChunkFreqNorm) + } + } + + if i.includeLocs { + if chunk >= len(i.locChunkOffsets) { + return fmt.Errorf("tried to load loc chunk that doesn't exist %d/(%d)", + chunk, len(i.locChunkOffsets)) + } + + end, start := i.locChunkStart, i.locChunkStart + s, e := readChunkBoundary(chunk, i.locChunkOffsets) + start += s + end += e + i.currChunkLoc = i.postings.sb.mem[start:end] + if i.locReader == nil { + i.locReader = segment.NewMemUvarintReader(i.currChunkLoc) + } else { + i.locReader.Reset(i.currChunkLoc) + } + } + + i.currChunk = uint32(chunk) + return nil +} + +func (i *PostingsIterator) readFreqNormHasLocs() (uint64, uint64, bool, error) { + if i.normBits1Hit != 0 { + return 1, i.normBits1Hit, false, nil + } + + freqHasLocs, err := i.freqNormReader.ReadUvarint() + if err != nil { + return 0, 0, false, fmt.Errorf("error reading frequency: %v", err) + } + + freq, hasLocs := decodeFreqHasLocs(freqHasLocs) + + normBits, err := i.freqNormReader.ReadUvarint() + if err != nil { + return 0, 0, false, fmt.Errorf("error reading norm: %v", err) + } + + return freq, normBits, hasLocs, nil +} + +func (i *PostingsIterator) skipFreqNormReadHasLocs() (bool, error) { + if i.normBits1Hit != 0 { + return false, nil + } + + freqHasLocs, err := i.freqNormReader.ReadUvarint() + if err != nil { + return false, fmt.Errorf("error reading freqHasLocs: %v", err) + } + + i.freqNormReader.SkipUvarint() // Skip normBits. + + return freqHasLocs&0x01 != 0, nil // See decodeFreqHasLocs() / hasLocs. +} + +func encodeFreqHasLocs(freq uint64, hasLocs bool) uint64 { + rv := freq << 1 + if hasLocs { + rv = rv | 0x01 // 0'th LSB encodes whether there are locations + } + return rv +} + +func decodeFreqHasLocs(freqHasLocs uint64) (uint64, bool) { + freq := freqHasLocs >> 1 + hasLocs := freqHasLocs&0x01 != 0 + return freq, hasLocs +} + +// readLocation processes all the integers on the stream representing a single +// location. +func (i *PostingsIterator) readLocation(l *Location) error { + // read off field + fieldID, err := i.locReader.ReadUvarint() + if err != nil { + return fmt.Errorf("error reading location field: %v", err) + } + // read off pos + pos, err := i.locReader.ReadUvarint() + if err != nil { + return fmt.Errorf("error reading location pos: %v", err) + } + // read off start + start, err := i.locReader.ReadUvarint() + if err != nil { + return fmt.Errorf("error reading location start: %v", err) + } + // read off end + end, err := i.locReader.ReadUvarint() + if err != nil { + return fmt.Errorf("error reading location end: %v", err) + } + // read off num array pos + numArrayPos, err := i.locReader.ReadUvarint() + if err != nil { + return fmt.Errorf("error reading location num array pos: %v", err) + } + + l.field = i.postings.sb.fieldsInv[fieldID] + l.pos = pos + l.start = start + l.end = end + + if cap(l.ap) < int(numArrayPos) { + l.ap = make([]uint64, int(numArrayPos)) + } else { + l.ap = l.ap[:int(numArrayPos)] + } + + // read off array positions + for k := 0; k < int(numArrayPos); k++ { + ap, err := i.locReader.ReadUvarint() + if err != nil { + return fmt.Errorf("error reading array position: %v", err) + } + + l.ap[k] = ap + } + + return nil +} + +// Next returns the next posting on the postings list, or nil at the end +func (i *PostingsIterator) Next() (segment.Posting, error) { + return i.nextAtOrAfter(0) +} + +// Advance returns the posting at the specified docNum or it is not present +// the next posting, or if the end is reached, nil +func (i *PostingsIterator) Advance(docNum uint64) (segment.Posting, error) { + return i.nextAtOrAfter(docNum) +} + +// Next returns the next posting on the postings list, or nil at the end +func (i *PostingsIterator) nextAtOrAfter(atOrAfter uint64) (segment.Posting, error) { + docNum, exists, err := i.nextDocNumAtOrAfter(atOrAfter) + if err != nil || !exists { + return nil, err + } + + i.next = Posting{} // clear the struct + rv := &i.next + rv.docNum = docNum + + if !i.includeFreqNorm { + return rv, nil + } + + var normBits uint64 + var hasLocs bool + + rv.freq, normBits, hasLocs, err = i.readFreqNormHasLocs() + if err != nil { + return nil, err + } + + rv.norm = math.Float32frombits(uint32(normBits)) + + if i.includeLocs && hasLocs { + // prepare locations into reused slices, where we assume + // rv.freq >= "number of locs", since in a composite field, + // some component fields might have their IncludeTermVector + // flags disabled while other component fields are enabled + if cap(i.nextLocs) >= int(rv.freq) { + i.nextLocs = i.nextLocs[0:rv.freq] + } else { + i.nextLocs = make([]Location, rv.freq, rv.freq*2) + } + if cap(i.nextSegmentLocs) < int(rv.freq) { + i.nextSegmentLocs = make([]segment.Location, rv.freq, rv.freq*2) + } + rv.locs = i.nextSegmentLocs[:0] + + numLocsBytes, err := i.locReader.ReadUvarint() + if err != nil { + return nil, fmt.Errorf("error reading location numLocsBytes: %v", err) + } + + j := 0 + startBytesRemaining := i.locReader.Len() // # bytes remaining in the locReader + for startBytesRemaining-i.locReader.Len() < int(numLocsBytes) { + err := i.readLocation(&i.nextLocs[j]) + if err != nil { + return nil, err + } + rv.locs = append(rv.locs, &i.nextLocs[j]) + j++ + } + } + + return rv, nil +} + +var freqHasLocs1Hit = encodeFreqHasLocs(1, false) + +// nextBytes returns the docNum and the encoded freq & loc bytes for +// the next posting +func (i *PostingsIterator) nextBytes() ( + docNumOut uint64, freq uint64, normBits uint64, + bytesFreqNorm []byte, bytesLoc []byte, err error) { + docNum, exists, err := i.nextDocNumAtOrAfter(0) + if err != nil || !exists { + return 0, 0, 0, nil, nil, err + } + + if i.normBits1Hit != 0 { + if i.buf == nil { + i.buf = make([]byte, binary.MaxVarintLen64*2) + } + n := binary.PutUvarint(i.buf, freqHasLocs1Hit) + n += binary.PutUvarint(i.buf[n:], i.normBits1Hit) + return docNum, uint64(1), i.normBits1Hit, i.buf[:n], nil, nil + } + + startFreqNorm := len(i.currChunkFreqNorm) - i.freqNormReader.Len() + + var hasLocs bool + + freq, normBits, hasLocs, err = i.readFreqNormHasLocs() + if err != nil { + return 0, 0, 0, nil, nil, err + } + + endFreqNorm := len(i.currChunkFreqNorm) - i.freqNormReader.Len() + bytesFreqNorm = i.currChunkFreqNorm[startFreqNorm:endFreqNorm] + + if hasLocs { + startLoc := len(i.currChunkLoc) - i.locReader.Len() + + numLocsBytes, err := i.locReader.ReadUvarint() + if err != nil { + return 0, 0, 0, nil, nil, + fmt.Errorf("error reading location nextBytes numLocs: %v", err) + } + + // skip over all the location bytes + i.locReader.SkipBytes(int(numLocsBytes)) + + endLoc := len(i.currChunkLoc) - i.locReader.Len() + bytesLoc = i.currChunkLoc[startLoc:endLoc] + } + + return docNum, freq, normBits, bytesFreqNorm, bytesLoc, nil +} + +// nextDocNum returns the next docNum on the postings list, and also +// sets up the currChunk / loc related fields of the iterator. +func (i *PostingsIterator) nextDocNumAtOrAfter(atOrAfter uint64) (uint64, bool, error) { + if i.normBits1Hit != 0 { + if i.docNum1Hit == DocNum1HitFinished { + return 0, false, nil + } + if i.docNum1Hit < atOrAfter { + // advanced past our 1-hit + i.docNum1Hit = DocNum1HitFinished // consume our 1-hit docNum + return 0, false, nil + } + docNum := i.docNum1Hit + i.docNum1Hit = DocNum1HitFinished // consume our 1-hit docNum + return docNum, true, nil + } + + if i.Actual == nil || !i.Actual.HasNext() { + return 0, false, nil + } + + if i.postings == nil || i.postings.postings == i.ActualBM { + return i.nextDocNumAtOrAfterClean(atOrAfter) + } + + i.Actual.AdvanceIfNeeded(uint32(atOrAfter)) + + if !i.Actual.HasNext() { + // couldn't find anything + return 0, false, nil + } + + n := i.Actual.Next() + allN := i.all.Next() + + nChunk := n / i.postings.sb.chunkFactor + + // when allN becomes >= to here, then allN is in the same chunk as nChunk. + allNReachesNChunk := nChunk * i.postings.sb.chunkFactor + + // n is the next actual hit (excluding some postings), and + // allN is the next hit in the full postings, and + // if they don't match, move 'all' forwards until they do + for allN != n { + // we've reached same chunk, so move the freq/norm/loc decoders forward + if i.includeFreqNorm && allN >= allNReachesNChunk { + err := i.currChunkNext(nChunk) + if err != nil { + return 0, false, err + } + } + + allN = i.all.Next() + } + + if i.includeFreqNorm && (i.currChunk != nChunk || i.currChunkFreqNorm == nil) { + err := i.loadChunk(int(nChunk)) + if err != nil { + return 0, false, fmt.Errorf("error loading chunk: %v", err) + } + } + + return uint64(n), true, nil +} + +// optimization when the postings list is "clean" (e.g., no updates & +// no deletions) where the all bitmap is the same as the actual bitmap +func (i *PostingsIterator) nextDocNumAtOrAfterClean( + atOrAfter uint64) (uint64, bool, error) { + + if !i.includeFreqNorm { + i.Actual.AdvanceIfNeeded(uint32(atOrAfter)) + + if !i.Actual.HasNext() { + return 0, false, nil // couldn't find anything + } + + return uint64(i.Actual.Next()), true, nil + } + + // freq-norm's needed, so maintain freq-norm chunk reader + sameChunkNexts := 0 // # of times we called Next() in the same chunk + n := i.Actual.Next() + nChunk := n / i.postings.sb.chunkFactor + + for uint64(n) < atOrAfter && i.Actual.HasNext() { + n = i.Actual.Next() + + nChunkPrev := nChunk + nChunk = n / i.postings.sb.chunkFactor + + if nChunk != nChunkPrev { + sameChunkNexts = 0 + } else { + sameChunkNexts += 1 + } + } + + if uint64(n) < atOrAfter { + // couldn't find anything + return 0, false, nil + } + + for j := 0; j < sameChunkNexts; j++ { + err := i.currChunkNext(nChunk) + if err != nil { + return 0, false, fmt.Errorf("error optimized currChunkNext: %v", err) + } + } + + if i.currChunk != nChunk || i.currChunkFreqNorm == nil { + err := i.loadChunk(int(nChunk)) + if err != nil { + return 0, false, fmt.Errorf("error loading chunk: %v", err) + } + } + + return uint64(n), true, nil +} + +func (i *PostingsIterator) currChunkNext(nChunk uint32) error { + if i.currChunk != nChunk || i.currChunkFreqNorm == nil { + err := i.loadChunk(int(nChunk)) + if err != nil { + return fmt.Errorf("error loading chunk: %v", err) + } + } + + // read off freq/offsets even though we don't care about them + hasLocs, err := i.skipFreqNormReadHasLocs() + if err != nil { + return err + } + + if i.includeLocs && hasLocs { + numLocsBytes, err := i.locReader.ReadUvarint() + if err != nil { + return fmt.Errorf("error reading location numLocsBytes: %v", err) + } + + // skip over all the location bytes + i.locReader.SkipBytes(int(numLocsBytes)) + } + + return nil +} + +// DocNum1Hit returns the docNum and true if this is "1-hit" optimized +// and the docNum is available. +func (p *PostingsIterator) DocNum1Hit() (uint64, bool) { + if p.normBits1Hit != 0 && p.docNum1Hit != DocNum1HitFinished { + return p.docNum1Hit, true + } + return 0, false +} + +// ActualBitmap returns the underlying actual bitmap +// which can be used up the stack for optimizations +func (p *PostingsIterator) ActualBitmap() *roaring.Bitmap { + return p.ActualBM +} + +// ReplaceActual replaces the ActualBM with the provided +// bitmap +func (p *PostingsIterator) ReplaceActual(abm *roaring.Bitmap) { + p.ActualBM = abm + p.Actual = abm.Iterator() +} + +// PostingsIteratorFromBitmap constructs a PostingsIterator given an +// "actual" bitmap. +func PostingsIteratorFromBitmap(bm *roaring.Bitmap, + includeFreqNorm, includeLocs bool) (segment.PostingsIterator, error) { + return &PostingsIterator{ + ActualBM: bm, + Actual: bm.Iterator(), + includeFreqNorm: includeFreqNorm, + includeLocs: includeLocs, + }, nil +} + +// PostingsIteratorFrom1Hit constructs a PostingsIterator given a +// 1-hit docNum. +func PostingsIteratorFrom1Hit(docNum1Hit uint64, + includeFreqNorm, includeLocs bool) (segment.PostingsIterator, error) { + return &PostingsIterator{ + docNum1Hit: docNum1Hit, + normBits1Hit: NormBits1Hit, + includeFreqNorm: includeFreqNorm, + includeLocs: includeLocs, + }, nil +} + +// Posting is a single entry in a postings list +type Posting struct { + docNum uint64 + freq uint64 + norm float32 + locs []segment.Location +} + +func (p *Posting) Size() int { + sizeInBytes := reflectStaticSizePosting + + for _, entry := range p.locs { + sizeInBytes += entry.Size() + } + + return sizeInBytes +} + +// Number returns the document number of this posting in this segment +func (p *Posting) Number() uint64 { + return p.docNum +} + +// Frequency returns the frequencies of occurrence of this term in this doc/field +func (p *Posting) Frequency() uint64 { + return p.freq +} + +// Norm returns the normalization factor for this posting +func (p *Posting) Norm() float64 { + return float64(p.norm) +} + +// Locations returns the location information for each occurrence +func (p *Posting) Locations() []segment.Location { + return p.locs +} + +// Location represents the location of a single occurrence +type Location struct { + field string + pos uint64 + start uint64 + end uint64 + ap []uint64 +} + +func (l *Location) Size() int { + return reflectStaticSizeLocation + + len(l.field) + + len(l.ap)*size.SizeOfUint64 +} + +// Field returns the name of the field (useful in composite fields to know +// which original field the value came from) +func (l *Location) Field() string { + return l.field +} + +// Start returns the start byte offset of this occurrence +func (l *Location) Start() uint64 { + return l.start +} + +// End returns the end byte offset of this occurrence +func (l *Location) End() uint64 { + return l.end +} + +// Pos returns the 1-based phrase position of this occurrence +func (l *Location) Pos() uint64 { + return l.pos +} + +// ArrayPositions returns the array position vector associated with this occurrence +func (l *Location) ArrayPositions() []uint64 { + return l.ap +} diff --git a/vendor/github.com/blevesearch/zap/v11/read.go b/vendor/github.com/blevesearch/zap/v11/read.go new file mode 100644 index 0000000..e47d4c6 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v11/read.go @@ -0,0 +1,43 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import "encoding/binary" + +func (s *SegmentBase) getDocStoredMetaAndCompressed(docNum uint64) ([]byte, []byte) { + _, storedOffset, n, metaLen, dataLen := s.getDocStoredOffsets(docNum) + + meta := s.mem[storedOffset+n : storedOffset+n+metaLen] + data := s.mem[storedOffset+n+metaLen : storedOffset+n+metaLen+dataLen] + + return meta, data +} + +func (s *SegmentBase) getDocStoredOffsets(docNum uint64) ( + uint64, uint64, uint64, uint64, uint64) { + indexOffset := s.storedIndexOffset + (8 * docNum) + + storedOffset := binary.BigEndian.Uint64(s.mem[indexOffset : indexOffset+8]) + + var n uint64 + + metaLen, read := binary.Uvarint(s.mem[storedOffset : storedOffset+binary.MaxVarintLen64]) + n += uint64(read) + + dataLen, read := binary.Uvarint(s.mem[storedOffset+n : storedOffset+n+binary.MaxVarintLen64]) + n += uint64(read) + + return indexOffset, storedOffset, n, metaLen, dataLen +} diff --git a/vendor/github.com/blevesearch/zap/v11/segment.go b/vendor/github.com/blevesearch/zap/v11/segment.go new file mode 100644 index 0000000..517b6af --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v11/segment.go @@ -0,0 +1,572 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "fmt" + "io" + "os" + "sync" + "unsafe" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/blevesearch/bleve/size" + "github.com/couchbase/vellum" + mmap "github.com/blevesearch/mmap-go" + "github.com/golang/snappy" +) + +var reflectStaticSizeSegmentBase int + +func init() { + var sb SegmentBase + reflectStaticSizeSegmentBase = int(unsafe.Sizeof(sb)) +} + +// Open returns a zap impl of a segment +func (*ZapPlugin) Open(path string) (segment.Segment, error) { + f, err := os.Open(path) + if err != nil { + return nil, err + } + mm, err := mmap.Map(f, mmap.RDONLY, 0) + if err != nil { + // mmap failed, try to close the file + _ = f.Close() + return nil, err + } + + rv := &Segment{ + SegmentBase: SegmentBase{ + mem: mm[0 : len(mm)-FooterSize], + fieldsMap: make(map[string]uint16), + fieldDvReaders: make(map[uint16]*docValueReader), + fieldFSTs: make(map[uint16]*vellum.FST), + }, + f: f, + mm: mm, + path: path, + refs: 1, + } + rv.SegmentBase.updateSize() + + err = rv.loadConfig() + if err != nil { + _ = rv.Close() + return nil, err + } + + err = rv.loadFields() + if err != nil { + _ = rv.Close() + return nil, err + } + + err = rv.loadDvReaders() + if err != nil { + _ = rv.Close() + return nil, err + } + + return rv, nil +} + +// SegmentBase is a memory only, read-only implementation of the +// segment.Segment interface, using zap's data representation. +type SegmentBase struct { + mem []byte + memCRC uint32 + chunkFactor uint32 + fieldsMap map[string]uint16 // fieldName -> fieldID+1 + fieldsInv []string // fieldID -> fieldName + numDocs uint64 + storedIndexOffset uint64 + fieldsIndexOffset uint64 + docValueOffset uint64 + dictLocs []uint64 + fieldDvReaders map[uint16]*docValueReader // naive chunk cache per field + fieldDvNames []string // field names cached in fieldDvReaders + size uint64 + + m sync.Mutex + fieldFSTs map[uint16]*vellum.FST +} + +func (sb *SegmentBase) Size() int { + return int(sb.size) +} + +func (sb *SegmentBase) updateSize() { + sizeInBytes := reflectStaticSizeSegmentBase + + cap(sb.mem) + + // fieldsMap + for k := range sb.fieldsMap { + sizeInBytes += (len(k) + size.SizeOfString) + size.SizeOfUint16 + } + + // fieldsInv, dictLocs + for _, entry := range sb.fieldsInv { + sizeInBytes += len(entry) + size.SizeOfString + } + sizeInBytes += len(sb.dictLocs) * size.SizeOfUint64 + + // fieldDvReaders + for _, v := range sb.fieldDvReaders { + sizeInBytes += size.SizeOfUint16 + size.SizeOfPtr + if v != nil { + sizeInBytes += v.size() + } + } + + sb.size = uint64(sizeInBytes) +} + +func (sb *SegmentBase) AddRef() {} +func (sb *SegmentBase) DecRef() (err error) { return nil } +func (sb *SegmentBase) Close() (err error) { return nil } + +// Segment implements a persisted segment.Segment interface, by +// embedding an mmap()'ed SegmentBase. +type Segment struct { + SegmentBase + + f *os.File + mm mmap.MMap + path string + version uint32 + crc uint32 + + m sync.Mutex // Protects the fields that follow. + refs int64 +} + +func (s *Segment) Size() int { + // 8 /* size of file pointer */ + // 4 /* size of version -> uint32 */ + // 4 /* size of crc -> uint32 */ + sizeOfUints := 16 + + sizeInBytes := (len(s.path) + size.SizeOfString) + sizeOfUints + + // mutex, refs -> int64 + sizeInBytes += 16 + + // do not include the mmap'ed part + return sizeInBytes + s.SegmentBase.Size() - cap(s.mem) +} + +func (s *Segment) AddRef() { + s.m.Lock() + s.refs++ + s.m.Unlock() +} + +func (s *Segment) DecRef() (err error) { + s.m.Lock() + s.refs-- + if s.refs == 0 { + err = s.closeActual() + } + s.m.Unlock() + return err +} + +func (s *Segment) loadConfig() error { + crcOffset := len(s.mm) - 4 + s.crc = binary.BigEndian.Uint32(s.mm[crcOffset : crcOffset+4]) + + verOffset := crcOffset - 4 + s.version = binary.BigEndian.Uint32(s.mm[verOffset : verOffset+4]) + if s.version != Version { + return fmt.Errorf("unsupported version %d", s.version) + } + + chunkOffset := verOffset - 4 + s.chunkFactor = binary.BigEndian.Uint32(s.mm[chunkOffset : chunkOffset+4]) + + docValueOffset := chunkOffset - 8 + s.docValueOffset = binary.BigEndian.Uint64(s.mm[docValueOffset : docValueOffset+8]) + + fieldsIndexOffset := docValueOffset - 8 + s.fieldsIndexOffset = binary.BigEndian.Uint64(s.mm[fieldsIndexOffset : fieldsIndexOffset+8]) + + storedIndexOffset := fieldsIndexOffset - 8 + s.storedIndexOffset = binary.BigEndian.Uint64(s.mm[storedIndexOffset : storedIndexOffset+8]) + + numDocsOffset := storedIndexOffset - 8 + s.numDocs = binary.BigEndian.Uint64(s.mm[numDocsOffset : numDocsOffset+8]) + return nil +} + +func (s *SegmentBase) loadFields() error { + // NOTE for now we assume the fields index immediately precedes + // the footer, and if this changes, need to adjust accordingly (or + // store explicit length), where s.mem was sliced from s.mm in Open(). + fieldsIndexEnd := uint64(len(s.mem)) + + // iterate through fields index + var fieldID uint64 + for s.fieldsIndexOffset+(8*fieldID) < fieldsIndexEnd { + addr := binary.BigEndian.Uint64(s.mem[s.fieldsIndexOffset+(8*fieldID) : s.fieldsIndexOffset+(8*fieldID)+8]) + + dictLoc, read := binary.Uvarint(s.mem[addr:fieldsIndexEnd]) + n := uint64(read) + s.dictLocs = append(s.dictLocs, dictLoc) + + var nameLen uint64 + nameLen, read = binary.Uvarint(s.mem[addr+n : fieldsIndexEnd]) + n += uint64(read) + + name := string(s.mem[addr+n : addr+n+nameLen]) + s.fieldsInv = append(s.fieldsInv, name) + s.fieldsMap[name] = uint16(fieldID + 1) + + fieldID++ + } + return nil +} + +// Dictionary returns the term dictionary for the specified field +func (s *SegmentBase) Dictionary(field string) (segment.TermDictionary, error) { + dict, err := s.dictionary(field) + if err == nil && dict == nil { + return &segment.EmptyDictionary{}, nil + } + return dict, err +} + +func (sb *SegmentBase) dictionary(field string) (rv *Dictionary, err error) { + fieldIDPlus1 := sb.fieldsMap[field] + if fieldIDPlus1 > 0 { + rv = &Dictionary{ + sb: sb, + field: field, + fieldID: fieldIDPlus1 - 1, + } + + dictStart := sb.dictLocs[rv.fieldID] + if dictStart > 0 { + var ok bool + sb.m.Lock() + if rv.fst, ok = sb.fieldFSTs[rv.fieldID]; !ok { + // read the length of the vellum data + vellumLen, read := binary.Uvarint(sb.mem[dictStart : dictStart+binary.MaxVarintLen64]) + fstBytes := sb.mem[dictStart+uint64(read) : dictStart+uint64(read)+vellumLen] + rv.fst, err = vellum.Load(fstBytes) + if err != nil { + sb.m.Unlock() + return nil, fmt.Errorf("dictionary field %s vellum err: %v", field, err) + } + + sb.fieldFSTs[rv.fieldID] = rv.fst + } + + sb.m.Unlock() + rv.fstReader, err = rv.fst.Reader() + if err != nil { + return nil, fmt.Errorf("dictionary field %s vellum reader err: %v", field, err) + } + + } + } + + return rv, nil +} + +// visitDocumentCtx holds data structures that are reusable across +// multiple VisitDocument() calls to avoid memory allocations +type visitDocumentCtx struct { + buf []byte + reader bytes.Reader + arrayPos []uint64 +} + +var visitDocumentCtxPool = sync.Pool{ + New: func() interface{} { + reuse := &visitDocumentCtx{} + return reuse + }, +} + +// VisitDocument invokes the DocFieldValueVistor for each stored field +// for the specified doc number +func (s *SegmentBase) VisitDocument(num uint64, visitor segment.DocumentFieldValueVisitor) error { + vdc := visitDocumentCtxPool.Get().(*visitDocumentCtx) + defer visitDocumentCtxPool.Put(vdc) + return s.visitDocument(vdc, num, visitor) +} + +func (s *SegmentBase) visitDocument(vdc *visitDocumentCtx, num uint64, + visitor segment.DocumentFieldValueVisitor) error { + // first make sure this is a valid number in this segment + if num < s.numDocs { + meta, compressed := s.getDocStoredMetaAndCompressed(num) + + vdc.reader.Reset(meta) + + // handle _id field special case + idFieldValLen, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + idFieldVal := compressed[:idFieldValLen] + + keepGoing := visitor("_id", byte('t'), idFieldVal, nil) + if !keepGoing { + visitDocumentCtxPool.Put(vdc) + return nil + } + + // handle non-"_id" fields + compressed = compressed[idFieldValLen:] + + uncompressed, err := snappy.Decode(vdc.buf[:cap(vdc.buf)], compressed) + if err != nil { + return err + } + + for keepGoing { + field, err := binary.ReadUvarint(&vdc.reader) + if err == io.EOF { + break + } + if err != nil { + return err + } + typ, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + offset, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + l, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + numap, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + var arrayPos []uint64 + if numap > 0 { + if cap(vdc.arrayPos) < int(numap) { + vdc.arrayPos = make([]uint64, numap) + } + arrayPos = vdc.arrayPos[:numap] + for i := 0; i < int(numap); i++ { + ap, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + arrayPos[i] = ap + } + } + + value := uncompressed[offset : offset+l] + keepGoing = visitor(s.fieldsInv[field], byte(typ), value, arrayPos) + } + + vdc.buf = uncompressed + } + return nil +} + +// DocID returns the value of the _id field for the given docNum +func (s *SegmentBase) DocID(num uint64) ([]byte, error) { + if num >= s.numDocs { + return nil, nil + } + + vdc := visitDocumentCtxPool.Get().(*visitDocumentCtx) + + meta, compressed := s.getDocStoredMetaAndCompressed(num) + + vdc.reader.Reset(meta) + + // handle _id field special case + idFieldValLen, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return nil, err + } + idFieldVal := compressed[:idFieldValLen] + + visitDocumentCtxPool.Put(vdc) + + return idFieldVal, nil +} + +// Count returns the number of documents in this segment. +func (s *SegmentBase) Count() uint64 { + return s.numDocs +} + +// DocNumbers returns a bitset corresponding to the doc numbers of all the +// provided _id strings +func (s *SegmentBase) DocNumbers(ids []string) (*roaring.Bitmap, error) { + rv := roaring.New() + + if len(s.fieldsMap) > 0 { + idDict, err := s.dictionary("_id") + if err != nil { + return nil, err + } + + postingsList := emptyPostingsList + + sMax, err := idDict.fst.GetMaxKey() + if err != nil { + return nil, err + } + sMaxStr := string(sMax) + filteredIds := make([]string, 0, len(ids)) + for _, id := range ids { + if id <= sMaxStr { + filteredIds = append(filteredIds, id) + } + } + + for _, id := range filteredIds { + postingsList, err = idDict.postingsList([]byte(id), nil, postingsList) + if err != nil { + return nil, err + } + postingsList.OrInto(rv) + } + } + + return rv, nil +} + +// Fields returns the field names used in this segment +func (s *SegmentBase) Fields() []string { + return s.fieldsInv +} + +// Path returns the path of this segment on disk +func (s *Segment) Path() string { + return s.path +} + +// Close releases all resources associated with this segment +func (s *Segment) Close() (err error) { + return s.DecRef() +} + +func (s *Segment) closeActual() (err error) { + if s.mm != nil { + err = s.mm.Unmap() + } + // try to close file even if unmap failed + if s.f != nil { + err2 := s.f.Close() + if err == nil { + // try to return first error + err = err2 + } + } + return +} + +// some helpers i started adding for the command-line utility + +// Data returns the underlying mmaped data slice +func (s *Segment) Data() []byte { + return s.mm +} + +// CRC returns the CRC value stored in the file footer +func (s *Segment) CRC() uint32 { + return s.crc +} + +// Version returns the file version in the file footer +func (s *Segment) Version() uint32 { + return s.version +} + +// ChunkFactor returns the chunk factor in the file footer +func (s *Segment) ChunkFactor() uint32 { + return s.chunkFactor +} + +// FieldsIndexOffset returns the fields index offset in the file footer +func (s *Segment) FieldsIndexOffset() uint64 { + return s.fieldsIndexOffset +} + +// StoredIndexOffset returns the stored value index offset in the file footer +func (s *Segment) StoredIndexOffset() uint64 { + return s.storedIndexOffset +} + +// DocValueOffset returns the docValue offset in the file footer +func (s *Segment) DocValueOffset() uint64 { + return s.docValueOffset +} + +// NumDocs returns the number of documents in the file footer +func (s *Segment) NumDocs() uint64 { + return s.numDocs +} + +// DictAddr is a helper function to compute the file offset where the +// dictionary is stored for the specified field. +func (s *Segment) DictAddr(field string) (uint64, error) { + fieldIDPlus1, ok := s.fieldsMap[field] + if !ok { + return 0, fmt.Errorf("no such field '%s'", field) + } + + return s.dictLocs[fieldIDPlus1-1], nil +} + +func (s *SegmentBase) loadDvReaders() error { + if s.docValueOffset == fieldNotUninverted || s.numDocs == 0 { + return nil + } + + var read uint64 + for fieldID, field := range s.fieldsInv { + var fieldLocStart, fieldLocEnd uint64 + var n int + fieldLocStart, n = binary.Uvarint(s.mem[s.docValueOffset+read : s.docValueOffset+read+binary.MaxVarintLen64]) + if n <= 0 { + return fmt.Errorf("loadDvReaders: failed to read the docvalue offset start for field %d", fieldID) + } + read += uint64(n) + fieldLocEnd, n = binary.Uvarint(s.mem[s.docValueOffset+read : s.docValueOffset+read+binary.MaxVarintLen64]) + if n <= 0 { + return fmt.Errorf("loadDvReaders: failed to read the docvalue offset end for field %d", fieldID) + } + read += uint64(n) + + fieldDvReader, err := s.loadFieldDocValueReader(field, fieldLocStart, fieldLocEnd) + if err != nil { + return err + } + if fieldDvReader != nil { + s.fieldDvReaders[uint16(fieldID)] = fieldDvReader + s.fieldDvNames = append(s.fieldDvNames, field) + } + } + + return nil +} diff --git a/vendor/github.com/blevesearch/zap/v11/write.go b/vendor/github.com/blevesearch/zap/v11/write.go new file mode 100644 index 0000000..cddaedd --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v11/write.go @@ -0,0 +1,145 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "encoding/binary" + "io" + + "github.com/RoaringBitmap/roaring" +) + +// writes out the length of the roaring bitmap in bytes as varint +// then writes out the roaring bitmap itself +func writeRoaringWithLen(r *roaring.Bitmap, w io.Writer, + reuseBufVarint []byte) (int, error) { + buf, err := r.ToBytes() + if err != nil { + return 0, err + } + + var tw int + + // write out the length + n := binary.PutUvarint(reuseBufVarint, uint64(len(buf))) + nw, err := w.Write(reuseBufVarint[:n]) + tw += nw + if err != nil { + return tw, err + } + + // write out the roaring bytes + nw, err = w.Write(buf) + tw += nw + if err != nil { + return tw, err + } + + return tw, nil +} + +func persistFields(fieldsInv []string, w *CountHashWriter, dictLocs []uint64) (uint64, error) { + var rv uint64 + var fieldsOffsets []uint64 + + for fieldID, fieldName := range fieldsInv { + // record start of this field + fieldsOffsets = append(fieldsOffsets, uint64(w.Count())) + + // write out the dict location and field name length + _, err := writeUvarints(w, dictLocs[fieldID], uint64(len(fieldName))) + if err != nil { + return 0, err + } + + // write out the field name + _, err = w.Write([]byte(fieldName)) + if err != nil { + return 0, err + } + } + + // now write out the fields index + rv = uint64(w.Count()) + for fieldID := range fieldsInv { + err := binary.Write(w, binary.BigEndian, fieldsOffsets[fieldID]) + if err != nil { + return 0, err + } + } + + return rv, nil +} + +// FooterSize is the size of the footer record in bytes +// crc + ver + chunk + field offset + stored offset + num docs + docValueOffset +const FooterSize = 4 + 4 + 4 + 8 + 8 + 8 + 8 + +func persistFooter(numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset uint64, + chunkFactor uint32, crcBeforeFooter uint32, writerIn io.Writer) error { + w := NewCountHashWriter(writerIn) + w.crc = crcBeforeFooter + + // write out the number of docs + err := binary.Write(w, binary.BigEndian, numDocs) + if err != nil { + return err + } + // write out the stored field index location: + err = binary.Write(w, binary.BigEndian, storedIndexOffset) + if err != nil { + return err + } + // write out the field index location + err = binary.Write(w, binary.BigEndian, fieldsIndexOffset) + if err != nil { + return err + } + // write out the fieldDocValue location + err = binary.Write(w, binary.BigEndian, docValueOffset) + if err != nil { + return err + } + // write out 32-bit chunk factor + err = binary.Write(w, binary.BigEndian, chunkFactor) + if err != nil { + return err + } + // write out 32-bit version + err = binary.Write(w, binary.BigEndian, Version) + if err != nil { + return err + } + // write out CRC-32 of everything upto but not including this CRC + err = binary.Write(w, binary.BigEndian, w.crc) + if err != nil { + return err + } + return nil +} + +func writeUvarints(w io.Writer, vals ...uint64) (tw int, err error) { + buf := make([]byte, binary.MaxVarintLen64) + for _, val := range vals { + n := binary.PutUvarint(buf, val) + var nw int + nw, err = w.Write(buf[:n]) + tw += nw + if err != nil { + return tw, err + } + } + return tw, err +} diff --git a/vendor/github.com/blevesearch/zap/v11/zap.md b/vendor/github.com/blevesearch/zap/v11/zap.md new file mode 100644 index 0000000..d74dc54 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v11/zap.md @@ -0,0 +1,177 @@ +# ZAP File Format + +## Legend + +### Sections + + |========| + | | section + |========| + +### Fixed-size fields + + |--------| |----| |--| |-| + | | uint64 | | uint32 | | uint16 | | uint8 + |--------| |----| |--| |-| + +### Varints + + |~~~~~~~~| + | | varint(up to uint64) + |~~~~~~~~| + +### Arbitrary-length fields + + |--------...---| + | | arbitrary-length field (string, vellum, roaring bitmap) + |--------...---| + +### Chunked data + + [--------] + [ ] + [--------] + +## Overview + +Footer section describes the configuration of particular ZAP file. The format of footer is version-dependent, so it is necessary to check `V` field before the parsing. + + |==================================================| + | Stored Fields | + |==================================================| + |-----> | Stored Fields Index | + | |==================================================| + | | Dictionaries + Postings + DocValues | + | |==================================================| + | |---> | DocValues Index | + | | |==================================================| + | | | Fields | + | | |==================================================| + | | |-> | Fields Index | + | | | |========|========|========|========|====|====|====| + | | | | D# | SF | F | FDV | CF | V | CC | (Footer) + | | | |========|====|===|====|===|====|===|====|====|====| + | | | | | | + |-+-+-----------------| | | + | |--------------------------| | + |-------------------------------------| + + D#. Number of Docs. + SF. Stored Fields Index Offset. + F. Field Index Offset. + FDV. Field DocValue Offset. + CF. Chunk Factor. + V. Version. + CC. CRC32. + +## Stored Fields + +Stored Fields Index is `D#` consecutive 64-bit unsigned integers - offsets, where relevant Stored Fields Data records are located. + + 0 [SF] [SF + D# * 8] + | Stored Fields | Stored Fields Index | + |================================|==================================| + | | | + | |--------------------| ||--------|--------|. . .|--------|| + | |-> | Stored Fields Data | || 0 | 1 | | D# - 1 || + | | |--------------------| ||--------|----|---|. . .|--------|| + | | | | | + |===|============================|==============|===================| + | | + |-------------------------------------------| + +Stored Fields Data is an arbitrary size record, which consists of metadata and [Snappy](https://github.com/golang/snappy)-compressed data. + + Stored Fields Data + |~~~~~~~~|~~~~~~~~|~~~~~~~~...~~~~~~~~|~~~~~~~~...~~~~~~~~| + | MDS | CDS | MD | CD | + |~~~~~~~~|~~~~~~~~|~~~~~~~~...~~~~~~~~|~~~~~~~~...~~~~~~~~| + + MDS. Metadata size. + CDS. Compressed data size. + MD. Metadata. + CD. Snappy-compressed data. + +## Fields + +Fields Index section located between addresses `F` and `len(file) - len(footer)` and consist of `uint64` values (`F1`, `F2`, ...) which are offsets to records in Fields section. We have `F# = (len(file) - len(footer) - F) / sizeof(uint64)` fields. + + + (...) [F] [F + F#] + | Fields | Fields Index. | + |================================|================================| + | | | + | |~~~~~~~~|~~~~~~~~|---...---|||--------|--------|...|--------|| + ||->| Dict | Length | Name ||| 0 | 1 | | F# - 1 || + || |~~~~~~~~|~~~~~~~~|---...---|||--------|----|---|...|--------|| + || | | | + ||===============================|==============|=================| + | | + |----------------------------------------------| + + +## Dictionaries + Postings + +Each of fields has its own dictionary, encoded in [Vellum](https://github.com/couchbase/vellum) format. Dictionary consists of pairs `(term, offset)`, where `offset` indicates the position of postings (list of documents) for this particular term. + + |================================================================|- Dictionaries + + | | Postings + + | | DocValues + | Freq/Norm (chunked) | + | [~~~~~~|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~] | + | |->[ Freq | Norm (float32 under varint) ] | + | | [~~~~~~|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~] | + | | | + | |------------------------------------------------------------| | + | Location Details (chunked) | | + | [~~~~~~|~~~~~|~~~~~~~|~~~~~|~~~~~~|~~~~~~~~|~~~~~] | | + | |->[ Size | Pos | Start | End | Arr# | ArrPos | ... ] | | + | | [~~~~~~|~~~~~|~~~~~~~|~~~~~|~~~~~~|~~~~~~~~|~~~~~] | | + | | | | + | |----------------------| | | + | Postings List | | | + | |~~~~~~~~|~~~~~|~~|~~~~~~~~|-----------...--| | | + | |->| F/N | LD | Length | ROARING BITMAP | | | + | | |~~~~~|~~|~~~~~~~~|~~~~~~~~|-----------...--| | | + | | |----------------------------------------------| | + | |--------------------------------------| | + | Dictionary | | + | |~~~~~~~~|--------------------------|-...-| | + | |->| Length | VELLUM DATA : (TERM -> OFFSET) | | + | | |~~~~~~~~|----------------------------...-| | + | | | + |======|=========================================================|- DocValues Index + | | | + |======|=========================================================|- Fields + | | | + | |~~~~|~~~|~~~~~~~~|---...---| | + | | Dict | Length | Name | | + | |~~~~~~~~|~~~~~~~~|---...---| | + | | + |================================================================| + +## DocValues + +DocValues Index is `F#` pairs of varints, one pair per field. Each pair of varints indicates start and end point of DocValues slice. + + |================================================================| + | |------...--| | + | |->| DocValues |<-| | + | | |------...--| | | + |==|=================|===========================================|- DocValues Index + ||~|~~~~~~~~~|~~~~~~~|~~| |~~~~~~~~~~~~~~|~~~~~~~~~~~~|| + || DV1 START | DV1 STOP | . . . . . | DV(F#) START | DV(F#) END || + ||~~~~~~~~~~~|~~~~~~~~~~| |~~~~~~~~~~~~~~|~~~~~~~~~~~~|| + |================================================================| + +DocValues is chunked Snappy-compressed values for each document and field. + + [~~~~~~~~~~~~~~~|~~~~~~|~~~~~~~~~|-...-|~~~~~~|~~~~~~~~~|--------------------...-] + [ Doc# in Chunk | Doc1 | Offset1 | ... | DocN | OffsetN | SNAPPY COMPRESSED DATA ] + [~~~~~~~~~~~~~~~|~~~~~~|~~~~~~~~~|-...-|~~~~~~|~~~~~~~~~|--------------------...-] + +Last 16 bytes are description of chunks. + + |~~~~~~~~~~~~...~|----------------|----------------| + | Chunk Sizes | Chunk Size Arr | Chunk# | + |~~~~~~~~~~~~...~|----------------|----------------| diff --git a/vendor/github.com/blevesearch/zap/v12/.gitignore b/vendor/github.com/blevesearch/zap/v12/.gitignore new file mode 100644 index 0000000..46d1cfa --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/.gitignore @@ -0,0 +1,12 @@ +#* +*.sublime-* +*~ +.#* +.project +.settings +**/.idea/ +**/*.iml +.DS_Store +/cmd/zap/zap +*.test +tags diff --git a/vendor/github.com/blevesearch/zap/v12/LICENSE b/vendor/github.com/blevesearch/zap/v12/LICENSE new file mode 100644 index 0000000..7a4a3ea --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/vendor/github.com/blevesearch/zap/v12/README.md b/vendor/github.com/blevesearch/zap/v12/README.md new file mode 100644 index 0000000..0facb66 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/README.md @@ -0,0 +1,158 @@ +# zap file format + +Advanced ZAP File Format Documentation is [here](zap.md). + +The file is written in the reverse order that we typically access data. This helps us write in one pass since later sections of the file require file offsets of things we've already written. + +Current usage: + +- mmap the entire file +- crc-32 bytes and version are in fixed position at end of the file +- reading remainder of footer could be version specific +- remainder of footer gives us: + - 3 important offsets (docValue , fields index and stored data index) + - 2 important values (number of docs and chunk factor) +- field data is processed once and memoized onto the heap so that we never have to go back to disk for it +- access to stored data by doc number means first navigating to the stored data index, then accessing a fixed position offset into that slice, which gives us the actual address of the data. the first bytes of that section tell us the size of data so that we know where it ends. +- access to all other indexed data follows the following pattern: + - first know the field name -> convert to id + - next navigate to term dictionary for that field + - some operations stop here and do dictionary ops + - next use dictionary to navigate to posting list for a specific term + - walk posting list + - if necessary, walk posting details as we go + - if location info is desired, consult location bitmap to see if it is there + +## stored fields section + +- for each document + - preparation phase: + - produce a slice of metadata bytes and data bytes + - produce these slices in field id order + - field value is appended to the data slice + - metadata slice is varint encoded with the following values for each field value + - field id (uint16) + - field type (byte) + - field value start offset in uncompressed data slice (uint64) + - field value length (uint64) + - field number of array positions (uint64) + - one additional value for each array position (uint64) + - compress the data slice using snappy + - file writing phase: + - remember the start offset for this document + - write out meta data length (varint uint64) + - write out compressed data length (varint uint64) + - write out the metadata bytes + - write out the compressed data bytes + +## stored fields idx + +- for each document + - write start offset (remembered from previous section) of stored data (big endian uint64) + +With this index and a known document number, we have direct access to all the stored field data. + +## posting details (freq/norm) section + +- for each posting list + - produce a slice containing multiple consecutive chunks (each chunk is varint stream) + - produce a slice remembering offsets of where each chunk starts + - preparation phase: + - for each hit in the posting list + - if this hit is in next chunk close out encoding of last chunk and record offset start of next + - encode term frequency (uint64) + - encode norm factor (float32) + - file writing phase: + - remember start position for this posting list details + - write out number of chunks that follow (varint uint64) + - write out length of each chunk (each a varint uint64) + - write out the byte slice containing all the chunk data + +If you know the doc number you're interested in, this format lets you jump to the correct chunk (docNum/chunkFactor) directly and then seek within that chunk until you find it. + +## posting details (location) section + +- for each posting list + - produce a slice containing multiple consecutive chunks (each chunk is varint stream) + - produce a slice remembering offsets of where each chunk starts + - preparation phase: + - for each hit in the posting list + - if this hit is in next chunk close out encoding of last chunk and record offset start of next + - encode field (uint16) + - encode field pos (uint64) + - encode field start (uint64) + - encode field end (uint64) + - encode number of array positions to follow (uint64) + - encode each array position (each uint64) + - file writing phase: + - remember start position for this posting list details + - write out number of chunks that follow (varint uint64) + - write out length of each chunk (each a varint uint64) + - write out the byte slice containing all the chunk data + +If you know the doc number you're interested in, this format lets you jump to the correct chunk (docNum/chunkFactor) directly and then seek within that chunk until you find it. + +## postings list section + +- for each posting list + - preparation phase: + - encode roaring bitmap posting list to bytes (so we know the length) + - file writing phase: + - remember the start position for this posting list + - write freq/norm details offset (remembered from previous, as varint uint64) + - write location details offset (remembered from previous, as varint uint64) + - write length of encoded roaring bitmap + - write the serialized roaring bitmap data + +## dictionary + +- for each field + - preparation phase: + - encode vellum FST with dictionary data pointing to file offset of posting list (remembered from previous) + - file writing phase: + - remember the start position of this persistDictionary + - write length of vellum data (varint uint64) + - write out vellum data + +## fields section + +- for each field + - file writing phase: + - remember start offset for each field + - write dictionary address (remembered from previous) (varint uint64) + - write length of field name (varint uint64) + - write field name bytes + +## fields idx + +- for each field + - file writing phase: + - write big endian uint64 of start offset for each field + +NOTE: currently we don't know or record the length of this fields index. Instead we rely on the fact that we know it immediately precedes a footer of known size. + +## fields DocValue + +- for each field + - preparation phase: + - produce a slice containing multiple consecutive chunks, where each chunk is composed of a meta section followed by compressed columnar field data + - produce a slice remembering the length of each chunk + - file writing phase: + - remember the start position of this first field DocValue offset in the footer + - write out number of chunks that follow (varint uint64) + - write out length of each chunk (each a varint uint64) + - write out the byte slice containing all the chunk data + +NOTE: currently the meta header inside each chunk gives clue to the location offsets and size of the data pertaining to a given docID and any +read operation leverage that meta information to extract the document specific data from the file. + +## footer + +- file writing phase + - write number of docs (big endian uint64) + - write stored field index location (big endian uint64) + - write field index location (big endian uint64) + - write field docValue location (big endian uint64) + - write out chunk factor (big endian uint32) + - write out version (big endian uint32) + - write out file CRC of everything preceding this (big endian uint32) diff --git a/vendor/github.com/blevesearch/zap/v12/build.go b/vendor/github.com/blevesearch/zap/v12/build.go new file mode 100644 index 0000000..467e5e0 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/build.go @@ -0,0 +1,156 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bufio" + "math" + "os" + + "github.com/couchbase/vellum" +) + +const Version uint32 = 12 + +const Type string = "zap" + +const fieldNotUninverted = math.MaxUint64 + +func (sb *SegmentBase) Persist(path string) error { + return PersistSegmentBase(sb, path) +} + +// PersistSegmentBase persists SegmentBase in the zap file format. +func PersistSegmentBase(sb *SegmentBase, path string) error { + flag := os.O_RDWR | os.O_CREATE + + f, err := os.OpenFile(path, flag, 0600) + if err != nil { + return err + } + + cleanup := func() { + _ = f.Close() + _ = os.Remove(path) + } + + br := bufio.NewWriter(f) + + _, err = br.Write(sb.mem) + if err != nil { + cleanup() + return err + } + + err = persistFooter(sb.numDocs, sb.storedIndexOffset, sb.fieldsIndexOffset, sb.docValueOffset, + sb.chunkMode, sb.memCRC, br) + if err != nil { + cleanup() + return err + } + + err = br.Flush() + if err != nil { + cleanup() + return err + } + + err = f.Sync() + if err != nil { + cleanup() + return err + } + + err = f.Close() + if err != nil { + cleanup() + return err + } + + return nil +} + +func persistStoredFieldValues(fieldID int, + storedFieldValues [][]byte, stf []byte, spf [][]uint64, + curr int, metaEncode varintEncoder, data []byte) ( + int, []byte, error) { + for i := 0; i < len(storedFieldValues); i++ { + // encode field + _, err := metaEncode(uint64(fieldID)) + if err != nil { + return 0, nil, err + } + // encode type + _, err = metaEncode(uint64(stf[i])) + if err != nil { + return 0, nil, err + } + // encode start offset + _, err = metaEncode(uint64(curr)) + if err != nil { + return 0, nil, err + } + // end len + _, err = metaEncode(uint64(len(storedFieldValues[i]))) + if err != nil { + return 0, nil, err + } + // encode number of array pos + _, err = metaEncode(uint64(len(spf[i]))) + if err != nil { + return 0, nil, err + } + // encode all array positions + for _, pos := range spf[i] { + _, err = metaEncode(pos) + if err != nil { + return 0, nil, err + } + } + + data = append(data, storedFieldValues[i]...) + curr += len(storedFieldValues[i]) + } + + return curr, data, nil +} + +func InitSegmentBase(mem []byte, memCRC uint32, chunkMode uint32, + fieldsMap map[string]uint16, fieldsInv []string, numDocs uint64, + storedIndexOffset uint64, fieldsIndexOffset uint64, docValueOffset uint64, + dictLocs []uint64) (*SegmentBase, error) { + sb := &SegmentBase{ + mem: mem, + memCRC: memCRC, + chunkMode: chunkMode, + fieldsMap: fieldsMap, + fieldsInv: fieldsInv, + numDocs: numDocs, + storedIndexOffset: storedIndexOffset, + fieldsIndexOffset: fieldsIndexOffset, + docValueOffset: docValueOffset, + dictLocs: dictLocs, + fieldDvReaders: make(map[uint16]*docValueReader), + fieldFSTs: make(map[uint16]*vellum.FST), + } + sb.updateSize() + + err := sb.loadDvReaders() + if err != nil { + return nil, err + } + + return sb, nil +} diff --git a/vendor/github.com/blevesearch/zap/v12/chunk.go b/vendor/github.com/blevesearch/zap/v12/chunk.go new file mode 100644 index 0000000..fe9f398 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/chunk.go @@ -0,0 +1,54 @@ +// Copyright (c) 2019 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "fmt" +) + +// LegacyChunkMode was the original chunk mode (always chunk size 1024) +// this mode is still used for chunking doc values. +var LegacyChunkMode uint32 = 1024 + +// DefaultChunkMode is the most recent improvement to chunking and should +// be used by default. +var DefaultChunkMode uint32 = 1025 + +func getChunkSize(chunkMode uint32, cardinality uint64, maxDocs uint64) (uint64, error) { + switch { + // any chunkMode <= 1024 will always chunk with chunkSize=chunkMode + case chunkMode <= 1024: + // legacy chunk size + return uint64(chunkMode), nil + + case chunkMode == 1025: + // attempt at simple improvement + // theory - the point of chunking is to put a bound on the maximum number of + // calls to Next() needed to find a random document. ie, you should be able + // to do one jump to the correct chunk, and then walk through at most + // chunk-size items + // previously 1024 was chosen as the chunk size, but this is particularly + // wasteful for low cardinality terms. the observation is that if there + // are less than 1024 items, why not put them all in one chunk, + // this way you'll still achieve the same goal of visiting at most + // chunk-size items. + // no attempt is made to tweak any other case + if cardinality <= 1024 { + return maxDocs, nil + } + return 1024, nil + } + return 0, fmt.Errorf("unknown chunk mode %d", chunkMode) +} diff --git a/vendor/github.com/blevesearch/zap/v12/contentcoder.go b/vendor/github.com/blevesearch/zap/v12/contentcoder.go new file mode 100644 index 0000000..c145b5a --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/contentcoder.go @@ -0,0 +1,243 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "io" + "reflect" + + "github.com/golang/snappy" +) + +var reflectStaticSizeMetaData int + +func init() { + var md MetaData + reflectStaticSizeMetaData = int(reflect.TypeOf(md).Size()) +} + +var termSeparator byte = 0xff +var termSeparatorSplitSlice = []byte{termSeparator} + +type chunkedContentCoder struct { + final []byte + chunkSize uint64 + currChunk uint64 + chunkLens []uint64 + + w io.Writer + progressiveWrite bool + + chunkMetaBuf bytes.Buffer + chunkBuf bytes.Buffer + + chunkMeta []MetaData + + compressed []byte // temp buf for snappy compression +} + +// MetaData represents the data information inside a +// chunk. +type MetaData struct { + DocNum uint64 // docNum of the data inside the chunk + DocDvOffset uint64 // offset of data inside the chunk for the given docid +} + +// newChunkedContentCoder returns a new chunk content coder which +// packs data into chunks based on the provided chunkSize +func newChunkedContentCoder(chunkSize uint64, maxDocNum uint64, + w io.Writer, progressiveWrite bool) *chunkedContentCoder { + total := maxDocNum/chunkSize + 1 + rv := &chunkedContentCoder{ + chunkSize: chunkSize, + chunkLens: make([]uint64, total), + chunkMeta: make([]MetaData, 0, total), + w: w, + progressiveWrite: progressiveWrite, + } + + return rv +} + +// Reset lets you reuse this chunked content coder. Buffers are reset +// and re used. You cannot change the chunk size. +func (c *chunkedContentCoder) Reset() { + c.currChunk = 0 + c.final = c.final[:0] + c.chunkBuf.Reset() + c.chunkMetaBuf.Reset() + for i := range c.chunkLens { + c.chunkLens[i] = 0 + } + c.chunkMeta = c.chunkMeta[:0] +} + +func (c *chunkedContentCoder) SetChunkSize(chunkSize uint64, maxDocNum uint64) { + total := int(maxDocNum/chunkSize + 1) + c.chunkSize = chunkSize + if cap(c.chunkLens) < total { + c.chunkLens = make([]uint64, total) + } else { + c.chunkLens = c.chunkLens[:total] + } + if cap(c.chunkMeta) < total { + c.chunkMeta = make([]MetaData, 0, total) + } +} + +// Close indicates you are done calling Add() this allows +// the final chunk to be encoded. +func (c *chunkedContentCoder) Close() error { + return c.flushContents() +} + +func (c *chunkedContentCoder) flushContents() error { + // flush the contents, with meta information at first + buf := make([]byte, binary.MaxVarintLen64) + n := binary.PutUvarint(buf, uint64(len(c.chunkMeta))) + _, err := c.chunkMetaBuf.Write(buf[:n]) + if err != nil { + return err + } + + // write out the metaData slice + for _, meta := range c.chunkMeta { + _, err := writeUvarints(&c.chunkMetaBuf, meta.DocNum, meta.DocDvOffset) + if err != nil { + return err + } + } + + // write the metadata to final data + metaData := c.chunkMetaBuf.Bytes() + c.final = append(c.final, c.chunkMetaBuf.Bytes()...) + // write the compressed data to the final data + c.compressed = snappy.Encode(c.compressed[:cap(c.compressed)], c.chunkBuf.Bytes()) + c.final = append(c.final, c.compressed...) + + c.chunkLens[c.currChunk] = uint64(len(c.compressed) + len(metaData)) + + if c.progressiveWrite { + _, err := c.w.Write(c.final) + if err != nil { + return err + } + c.final = c.final[:0] + } + + return nil +} + +// Add encodes the provided byte slice into the correct chunk for the provided +// doc num. You MUST call Add() with increasing docNums. +func (c *chunkedContentCoder) Add(docNum uint64, vals []byte) error { + chunk := docNum / c.chunkSize + if chunk != c.currChunk { + // flush out the previous chunk details + err := c.flushContents() + if err != nil { + return err + } + // clearing the chunk specific meta for next chunk + c.chunkBuf.Reset() + c.chunkMetaBuf.Reset() + c.chunkMeta = c.chunkMeta[:0] + c.currChunk = chunk + } + + // get the starting offset for this doc + dvOffset := c.chunkBuf.Len() + dvSize, err := c.chunkBuf.Write(vals) + if err != nil { + return err + } + + c.chunkMeta = append(c.chunkMeta, MetaData{ + DocNum: docNum, + DocDvOffset: uint64(dvOffset + dvSize), + }) + return nil +} + +// Write commits all the encoded chunked contents to the provided writer. +// +// | ..... data ..... | chunk offsets (varints) +// | position of chunk offsets (uint64) | number of offsets (uint64) | +// +func (c *chunkedContentCoder) Write() (int, error) { + var tw int + + if c.final != nil { + // write out the data section first + nw, err := c.w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + } + + chunkOffsetsStart := uint64(tw) + + if cap(c.final) < binary.MaxVarintLen64 { + c.final = make([]byte, binary.MaxVarintLen64) + } else { + c.final = c.final[0:binary.MaxVarintLen64] + } + chunkOffsets := modifyLengthsToEndOffsets(c.chunkLens) + // write out the chunk offsets + for _, chunkOffset := range chunkOffsets { + n := binary.PutUvarint(c.final, chunkOffset) + nw, err := c.w.Write(c.final[:n]) + tw += nw + if err != nil { + return tw, err + } + } + + chunkOffsetsLen := uint64(tw) - chunkOffsetsStart + + c.final = c.final[0:8] + // write out the length of chunk offsets + binary.BigEndian.PutUint64(c.final, chunkOffsetsLen) + nw, err := c.w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + + // write out the number of chunks + binary.BigEndian.PutUint64(c.final, uint64(len(c.chunkLens))) + nw, err = c.w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + + c.final = c.final[:0] + + return tw, nil +} + +// ReadDocValueBoundary elicits the start, end offsets from a +// metaData header slice +func ReadDocValueBoundary(chunk int, metaHeaders []MetaData) (uint64, uint64) { + var start uint64 + if chunk > 0 { + start = metaHeaders[chunk-1].DocDvOffset + } + return start, metaHeaders[chunk].DocDvOffset +} diff --git a/vendor/github.com/blevesearch/zap/v12/count.go b/vendor/github.com/blevesearch/zap/v12/count.go new file mode 100644 index 0000000..50290f8 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/count.go @@ -0,0 +1,61 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "hash/crc32" + "io" + + "github.com/blevesearch/bleve/index/scorch/segment" +) + +// CountHashWriter is a wrapper around a Writer which counts the number of +// bytes which have been written and computes a crc32 hash +type CountHashWriter struct { + w io.Writer + crc uint32 + n int + s segment.StatsReporter +} + +// NewCountHashWriter returns a CountHashWriter which wraps the provided Writer +func NewCountHashWriter(w io.Writer) *CountHashWriter { + return &CountHashWriter{w: w} +} + +func NewCountHashWriterWithStatsReporter(w io.Writer, s segment.StatsReporter) *CountHashWriter { + return &CountHashWriter{w: w, s: s} +} + +// Write writes the provided bytes to the wrapped writer and counts the bytes +func (c *CountHashWriter) Write(b []byte) (int, error) { + n, err := c.w.Write(b) + c.crc = crc32.Update(c.crc, crc32.IEEETable, b[:n]) + c.n += n + if c.s != nil { + c.s.ReportBytesWritten(uint64(n)) + } + return n, err +} + +// Count returns the number of bytes written +func (c *CountHashWriter) Count() int { + return c.n +} + +// Sum32 returns the CRC-32 hash of the content written to this writer +func (c *CountHashWriter) Sum32() uint32 { + return c.crc +} diff --git a/vendor/github.com/blevesearch/zap/v12/dict.go b/vendor/github.com/blevesearch/zap/v12/dict.go new file mode 100644 index 0000000..ad4a8f8 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/dict.go @@ -0,0 +1,263 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "fmt" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/couchbase/vellum" +) + +// Dictionary is the zap representation of the term dictionary +type Dictionary struct { + sb *SegmentBase + field string + fieldID uint16 + fst *vellum.FST + fstReader *vellum.Reader +} + +// PostingsList returns the postings list for the specified term +func (d *Dictionary) PostingsList(term []byte, except *roaring.Bitmap, + prealloc segment.PostingsList) (segment.PostingsList, error) { + var preallocPL *PostingsList + pl, ok := prealloc.(*PostingsList) + if ok && pl != nil { + preallocPL = pl + } + return d.postingsList(term, except, preallocPL) +} + +func (d *Dictionary) postingsList(term []byte, except *roaring.Bitmap, rv *PostingsList) (*PostingsList, error) { + if d.fstReader == nil { + if rv == nil || rv == emptyPostingsList { + return emptyPostingsList, nil + } + return d.postingsListInit(rv, except), nil + } + + postingsOffset, exists, err := d.fstReader.Get(term) + if err != nil { + return nil, fmt.Errorf("vellum err: %v", err) + } + if !exists { + if rv == nil || rv == emptyPostingsList { + return emptyPostingsList, nil + } + return d.postingsListInit(rv, except), nil + } + + return d.postingsListFromOffset(postingsOffset, except, rv) +} + +func (d *Dictionary) postingsListFromOffset(postingsOffset uint64, except *roaring.Bitmap, rv *PostingsList) (*PostingsList, error) { + rv = d.postingsListInit(rv, except) + + err := rv.read(postingsOffset, d) + if err != nil { + return nil, err + } + + return rv, nil +} + +func (d *Dictionary) postingsListInit(rv *PostingsList, except *roaring.Bitmap) *PostingsList { + if rv == nil || rv == emptyPostingsList { + rv = &PostingsList{} + } else { + postings := rv.postings + if postings != nil { + postings.Clear() + } + + *rv = PostingsList{} // clear the struct + + rv.postings = postings + } + rv.sb = d.sb + rv.except = except + return rv +} + +func (d *Dictionary) Contains(key []byte) (bool, error) { + return d.fst.Contains(key) +} + +// Iterator returns an iterator for this dictionary +func (d *Dictionary) Iterator() segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + if d.fst != nil { + itr, err := d.fst.Iterator(nil, nil) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +// PrefixIterator returns an iterator which only visits terms having the +// the specified prefix +func (d *Dictionary) PrefixIterator(prefix string) segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + kBeg := []byte(prefix) + kEnd := segment.IncrementBytes(kBeg) + + if d.fst != nil { + itr, err := d.fst.Iterator(kBeg, kEnd) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +// RangeIterator returns an iterator which only visits terms between the +// start and end terms. NOTE: bleve.index API specifies the end is inclusive. +func (d *Dictionary) RangeIterator(start, end string) segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + // need to increment the end position to be inclusive + var endBytes []byte + if len(end) > 0 { + endBytes = []byte(end) + if endBytes[len(endBytes)-1] < 0xff { + endBytes[len(endBytes)-1]++ + } else { + endBytes = append(endBytes, 0xff) + } + } + + if d.fst != nil { + itr, err := d.fst.Iterator([]byte(start), endBytes) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +// AutomatonIterator returns an iterator which only visits terms +// having the the vellum automaton and start/end key range +func (d *Dictionary) AutomatonIterator(a vellum.Automaton, + startKeyInclusive, endKeyExclusive []byte) segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + if d.fst != nil { + itr, err := d.fst.Search(a, startKeyInclusive, endKeyExclusive) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +func (d *Dictionary) OnlyIterator(onlyTerms [][]byte, + includeCount bool) segment.DictionaryIterator { + + rv := &DictionaryIterator{ + d: d, + omitCount: !includeCount, + } + + var buf bytes.Buffer + builder, err := vellum.New(&buf, nil) + if err != nil { + rv.err = err + return rv + } + for _, term := range onlyTerms { + err = builder.Insert(term, 0) + if err != nil { + rv.err = err + return rv + } + } + err = builder.Close() + if err != nil { + rv.err = err + return rv + } + + onlyFST, err := vellum.Load(buf.Bytes()) + if err != nil { + rv.err = err + return rv + } + + itr, err := d.fst.Search(onlyFST, nil, nil) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + + return rv +} + +// DictionaryIterator is an iterator for term dictionary +type DictionaryIterator struct { + d *Dictionary + itr vellum.Iterator + err error + tmp PostingsList + entry index.DictEntry + omitCount bool +} + +// Next returns the next entry in the dictionary +func (i *DictionaryIterator) Next() (*index.DictEntry, error) { + if i.err != nil && i.err != vellum.ErrIteratorDone { + return nil, i.err + } else if i.itr == nil || i.err == vellum.ErrIteratorDone { + return nil, nil + } + term, postingsOffset := i.itr.Current() + i.entry.Term = string(term) + if !i.omitCount { + i.err = i.tmp.read(postingsOffset, i.d) + if i.err != nil { + return nil, i.err + } + i.entry.Count = i.tmp.Count() + } + i.err = i.itr.Next() + return &i.entry, nil +} diff --git a/vendor/github.com/blevesearch/zap/v12/docvalues.go b/vendor/github.com/blevesearch/zap/v12/docvalues.go new file mode 100644 index 0000000..793797b --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/docvalues.go @@ -0,0 +1,312 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "fmt" + "math" + "reflect" + "sort" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/blevesearch/bleve/size" + "github.com/golang/snappy" +) + +var reflectStaticSizedocValueReader int + +func init() { + var dvi docValueReader + reflectStaticSizedocValueReader = int(reflect.TypeOf(dvi).Size()) +} + +type docNumTermsVisitor func(docNum uint64, terms []byte) error + +type docVisitState struct { + dvrs map[uint16]*docValueReader + segment *SegmentBase +} + +type docValueReader struct { + field string + curChunkNum uint64 + chunkOffsets []uint64 + dvDataLoc uint64 + curChunkHeader []MetaData + curChunkData []byte // compressed data cache + uncompressed []byte // temp buf for snappy decompression +} + +func (di *docValueReader) size() int { + return reflectStaticSizedocValueReader + size.SizeOfPtr + + len(di.field) + + len(di.chunkOffsets)*size.SizeOfUint64 + + len(di.curChunkHeader)*reflectStaticSizeMetaData + + len(di.curChunkData) +} + +func (di *docValueReader) cloneInto(rv *docValueReader) *docValueReader { + if rv == nil { + rv = &docValueReader{} + } + + rv.field = di.field + rv.curChunkNum = math.MaxUint64 + rv.chunkOffsets = di.chunkOffsets // immutable, so it's sharable + rv.dvDataLoc = di.dvDataLoc + rv.curChunkHeader = rv.curChunkHeader[:0] + rv.curChunkData = nil + rv.uncompressed = rv.uncompressed[:0] + + return rv +} + +func (di *docValueReader) curChunkNumber() uint64 { + return di.curChunkNum +} + +func (s *SegmentBase) loadFieldDocValueReader(field string, + fieldDvLocStart, fieldDvLocEnd uint64) (*docValueReader, error) { + // get the docValue offset for the given fields + if fieldDvLocStart == fieldNotUninverted { + // no docValues found, nothing to do + return nil, nil + } + + // read the number of chunks, and chunk offsets position + var numChunks, chunkOffsetsPosition uint64 + + if fieldDvLocEnd-fieldDvLocStart > 16 { + numChunks = binary.BigEndian.Uint64(s.mem[fieldDvLocEnd-8 : fieldDvLocEnd]) + // read the length of chunk offsets + chunkOffsetsLen := binary.BigEndian.Uint64(s.mem[fieldDvLocEnd-16 : fieldDvLocEnd-8]) + // acquire position of chunk offsets + chunkOffsetsPosition = (fieldDvLocEnd - 16) - chunkOffsetsLen + } else { + return nil, fmt.Errorf("loadFieldDocValueReader: fieldDvLoc too small: %d-%d", fieldDvLocEnd, fieldDvLocStart) + } + + fdvIter := &docValueReader{ + curChunkNum: math.MaxUint64, + field: field, + chunkOffsets: make([]uint64, int(numChunks)), + } + + // read the chunk offsets + var offset uint64 + for i := 0; i < int(numChunks); i++ { + loc, read := binary.Uvarint(s.mem[chunkOffsetsPosition+offset : chunkOffsetsPosition+offset+binary.MaxVarintLen64]) + if read <= 0 { + return nil, fmt.Errorf("corrupted chunk offset during segment load") + } + fdvIter.chunkOffsets[i] = loc + offset += uint64(read) + } + + // set the data offset + fdvIter.dvDataLoc = fieldDvLocStart + + return fdvIter, nil +} + +func (di *docValueReader) loadDvChunk(chunkNumber uint64, s *SegmentBase) error { + // advance to the chunk where the docValues + // reside for the given docNum + destChunkDataLoc, curChunkEnd := di.dvDataLoc, di.dvDataLoc + start, end := readChunkBoundary(int(chunkNumber), di.chunkOffsets) + if start >= end { + di.curChunkHeader = di.curChunkHeader[:0] + di.curChunkData = nil + di.curChunkNum = chunkNumber + di.uncompressed = di.uncompressed[:0] + return nil + } + + destChunkDataLoc += start + curChunkEnd += end + + // read the number of docs reside in the chunk + numDocs, read := binary.Uvarint(s.mem[destChunkDataLoc : destChunkDataLoc+binary.MaxVarintLen64]) + if read <= 0 { + return fmt.Errorf("failed to read the chunk") + } + chunkMetaLoc := destChunkDataLoc + uint64(read) + + offset := uint64(0) + if cap(di.curChunkHeader) < int(numDocs) { + di.curChunkHeader = make([]MetaData, int(numDocs)) + } else { + di.curChunkHeader = di.curChunkHeader[:int(numDocs)] + } + for i := 0; i < int(numDocs); i++ { + di.curChunkHeader[i].DocNum, read = binary.Uvarint(s.mem[chunkMetaLoc+offset : chunkMetaLoc+offset+binary.MaxVarintLen64]) + offset += uint64(read) + di.curChunkHeader[i].DocDvOffset, read = binary.Uvarint(s.mem[chunkMetaLoc+offset : chunkMetaLoc+offset+binary.MaxVarintLen64]) + offset += uint64(read) + } + + compressedDataLoc := chunkMetaLoc + offset + dataLength := curChunkEnd - compressedDataLoc + di.curChunkData = s.mem[compressedDataLoc : compressedDataLoc+dataLength] + di.curChunkNum = chunkNumber + di.uncompressed = di.uncompressed[:0] + return nil +} + +func (di *docValueReader) iterateAllDocValues(s *SegmentBase, visitor docNumTermsVisitor) error { + for i := 0; i < len(di.chunkOffsets); i++ { + err := di.loadDvChunk(uint64(i), s) + if err != nil { + return err + } + if di.curChunkData == nil || len(di.curChunkHeader) == 0 { + continue + } + + // uncompress the already loaded data + uncompressed, err := snappy.Decode(di.uncompressed[:cap(di.uncompressed)], di.curChunkData) + if err != nil { + return err + } + di.uncompressed = uncompressed + + start := uint64(0) + for _, entry := range di.curChunkHeader { + err = visitor(entry.DocNum, uncompressed[start:entry.DocDvOffset]) + if err != nil { + return err + } + + start = entry.DocDvOffset + } + } + + return nil +} + +func (di *docValueReader) visitDocValues(docNum uint64, + visitor index.DocumentFieldTermVisitor) error { + // binary search the term locations for the docNum + start, end := di.getDocValueLocs(docNum) + if start == math.MaxUint64 || end == math.MaxUint64 || start == end { + return nil + } + + var uncompressed []byte + var err error + // use the uncompressed copy if available + if len(di.uncompressed) > 0 { + uncompressed = di.uncompressed + } else { + // uncompress the already loaded data + uncompressed, err = snappy.Decode(di.uncompressed[:cap(di.uncompressed)], di.curChunkData) + if err != nil { + return err + } + di.uncompressed = uncompressed + } + + // pick the terms for the given docNum + uncompressed = uncompressed[start:end] + for { + i := bytes.Index(uncompressed, termSeparatorSplitSlice) + if i < 0 { + break + } + + visitor(di.field, uncompressed[0:i]) + uncompressed = uncompressed[i+1:] + } + + return nil +} + +func (di *docValueReader) getDocValueLocs(docNum uint64) (uint64, uint64) { + i := sort.Search(len(di.curChunkHeader), func(i int) bool { + return di.curChunkHeader[i].DocNum >= docNum + }) + if i < len(di.curChunkHeader) && di.curChunkHeader[i].DocNum == docNum { + return ReadDocValueBoundary(i, di.curChunkHeader) + } + return math.MaxUint64, math.MaxUint64 +} + +// VisitDocumentFieldTerms is an implementation of the +// DocumentFieldTermVisitable interface +func (s *SegmentBase) VisitDocumentFieldTerms(localDocNum uint64, fields []string, + visitor index.DocumentFieldTermVisitor, dvsIn segment.DocVisitState) ( + segment.DocVisitState, error) { + dvs, ok := dvsIn.(*docVisitState) + if !ok || dvs == nil { + dvs = &docVisitState{} + } else { + if dvs.segment != s { + dvs.segment = s + dvs.dvrs = nil + } + } + + var fieldIDPlus1 uint16 + if dvs.dvrs == nil { + dvs.dvrs = make(map[uint16]*docValueReader, len(fields)) + for _, field := range fields { + if fieldIDPlus1, ok = s.fieldsMap[field]; !ok { + continue + } + fieldID := fieldIDPlus1 - 1 + if dvIter, exists := s.fieldDvReaders[fieldID]; exists && + dvIter != nil { + dvs.dvrs[fieldID] = dvIter.cloneInto(dvs.dvrs[fieldID]) + } + } + } + + // find the chunkNumber where the docValues are stored + // NOTE: doc values continue to use legacy chunk mode + chunkFactor, err := getChunkSize(LegacyChunkMode, 0, 0) + if err != nil { + return nil, err + } + docInChunk := localDocNum / chunkFactor + var dvr *docValueReader + for _, field := range fields { + if fieldIDPlus1, ok = s.fieldsMap[field]; !ok { + continue + } + fieldID := fieldIDPlus1 - 1 + if dvr, ok = dvs.dvrs[fieldID]; ok && dvr != nil { + // check if the chunk is already loaded + if docInChunk != dvr.curChunkNumber() { + err := dvr.loadDvChunk(docInChunk, s) + if err != nil { + return dvs, err + } + } + + _ = dvr.visitDocValues(localDocNum, visitor) + } + } + return dvs, nil +} + +// VisitableDocValueFields returns the list of fields with +// persisted doc value terms ready to be visitable using the +// VisitDocumentFieldTerms method. +func (s *SegmentBase) VisitableDocValueFields() ([]string, error) { + return s.fieldDvNames, nil +} diff --git a/vendor/github.com/blevesearch/zap/v12/enumerator.go b/vendor/github.com/blevesearch/zap/v12/enumerator.go new file mode 100644 index 0000000..bc5b7e6 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/enumerator.go @@ -0,0 +1,138 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + + "github.com/couchbase/vellum" +) + +// enumerator provides an ordered traversal of multiple vellum +// iterators. Like JOIN of iterators, the enumerator produces a +// sequence of (key, iteratorIndex, value) tuples, sorted by key ASC, +// then iteratorIndex ASC, where the same key might be seen or +// repeated across multiple child iterators. +type enumerator struct { + itrs []vellum.Iterator + currKs [][]byte + currVs []uint64 + + lowK []byte + lowIdxs []int + lowCurr int +} + +// newEnumerator returns a new enumerator over the vellum Iterators +func newEnumerator(itrs []vellum.Iterator) (*enumerator, error) { + rv := &enumerator{ + itrs: itrs, + currKs: make([][]byte, len(itrs)), + currVs: make([]uint64, len(itrs)), + lowIdxs: make([]int, 0, len(itrs)), + } + for i, itr := range rv.itrs { + rv.currKs[i], rv.currVs[i] = itr.Current() + } + rv.updateMatches(false) + if rv.lowK == nil && len(rv.lowIdxs) == 0 { + return rv, vellum.ErrIteratorDone + } + return rv, nil +} + +// updateMatches maintains the low key matches based on the currKs +func (m *enumerator) updateMatches(skipEmptyKey bool) { + m.lowK = nil + m.lowIdxs = m.lowIdxs[:0] + m.lowCurr = 0 + + for i, key := range m.currKs { + if (key == nil && m.currVs[i] == 0) || // in case of empty iterator + (len(key) == 0 && skipEmptyKey) { // skip empty keys + continue + } + + cmp := bytes.Compare(key, m.lowK) + if cmp < 0 || len(m.lowIdxs) == 0 { + // reached a new low + m.lowK = key + m.lowIdxs = m.lowIdxs[:0] + m.lowIdxs = append(m.lowIdxs, i) + } else if cmp == 0 { + m.lowIdxs = append(m.lowIdxs, i) + } + } +} + +// Current returns the enumerator's current key, iterator-index, and +// value. If the enumerator is not pointing at a valid value (because +// Next returned an error previously), Current will return nil,0,0. +func (m *enumerator) Current() ([]byte, int, uint64) { + var i int + var v uint64 + if m.lowCurr < len(m.lowIdxs) { + i = m.lowIdxs[m.lowCurr] + v = m.currVs[i] + } + return m.lowK, i, v +} + +// GetLowIdxsAndValues will return all of the iterator indices +// which point to the current key, and their corresponding +// values. This can be used by advanced caller which may need +// to peek into these other sets of data before processing. +func (m *enumerator) GetLowIdxsAndValues() ([]int, []uint64) { + values := make([]uint64, 0, len(m.lowIdxs)) + for _, idx := range m.lowIdxs { + values = append(values, m.currVs[idx]) + } + return m.lowIdxs, values +} + +// Next advances the enumerator to the next key/iterator/value result, +// else vellum.ErrIteratorDone is returned. +func (m *enumerator) Next() error { + m.lowCurr += 1 + if m.lowCurr >= len(m.lowIdxs) { + // move all the current low iterators forwards + for _, vi := range m.lowIdxs { + err := m.itrs[vi].Next() + if err != nil && err != vellum.ErrIteratorDone { + return err + } + m.currKs[vi], m.currVs[vi] = m.itrs[vi].Current() + } + // can skip any empty keys encountered at this point + m.updateMatches(true) + } + if m.lowK == nil && len(m.lowIdxs) == 0 { + return vellum.ErrIteratorDone + } + return nil +} + +// Close all the underlying Iterators. The first error, if any, will +// be returned. +func (m *enumerator) Close() error { + var rv error + for _, itr := range m.itrs { + err := itr.Close() + if rv == nil { + rv = err + } + } + return rv +} diff --git a/vendor/github.com/blevesearch/zap/v12/intDecoder.go b/vendor/github.com/blevesearch/zap/v12/intDecoder.go new file mode 100644 index 0000000..4cd008f --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/intDecoder.go @@ -0,0 +1,111 @@ +// Copyright (c) 2019 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "encoding/binary" + "fmt" + + "github.com/blevesearch/bleve/index/scorch/segment" +) + +type chunkedIntDecoder struct { + startOffset uint64 + dataStartOffset uint64 + chunkOffsets []uint64 + curChunkBytes []byte + data []byte + r *segment.MemUvarintReader +} + +func newChunkedIntDecoder(buf []byte, offset uint64) *chunkedIntDecoder { + rv := &chunkedIntDecoder{startOffset: offset, data: buf} + var n, numChunks uint64 + var read int + if offset == termNotEncoded { + numChunks = 0 + } else { + numChunks, read = binary.Uvarint(buf[offset+n : offset+n+binary.MaxVarintLen64]) + } + + n += uint64(read) + if cap(rv.chunkOffsets) >= int(numChunks) { + rv.chunkOffsets = rv.chunkOffsets[:int(numChunks)] + } else { + rv.chunkOffsets = make([]uint64, int(numChunks)) + } + for i := 0; i < int(numChunks); i++ { + rv.chunkOffsets[i], read = binary.Uvarint(buf[offset+n : offset+n+binary.MaxVarintLen64]) + n += uint64(read) + } + rv.dataStartOffset = offset + n + return rv +} + +func (d *chunkedIntDecoder) loadChunk(chunk int) error { + if d.startOffset == termNotEncoded { + d.r = segment.NewMemUvarintReader([]byte(nil)) + return nil + } + + if chunk >= len(d.chunkOffsets) { + return fmt.Errorf("tried to load freq chunk that doesn't exist %d/(%d)", + chunk, len(d.chunkOffsets)) + } + + end, start := d.dataStartOffset, d.dataStartOffset + s, e := readChunkBoundary(chunk, d.chunkOffsets) + start += s + end += e + d.curChunkBytes = d.data[start:end] + if d.r == nil { + d.r = segment.NewMemUvarintReader(d.curChunkBytes) + } else { + d.r.Reset(d.curChunkBytes) + } + + return nil +} + +func (d *chunkedIntDecoder) reset() { + d.startOffset = 0 + d.dataStartOffset = 0 + d.chunkOffsets = d.chunkOffsets[:0] + d.curChunkBytes = d.curChunkBytes[:0] + d.data = d.data[:0] + if d.r != nil { + d.r.Reset([]byte(nil)) + } +} + +func (d *chunkedIntDecoder) isNil() bool { + return d.curChunkBytes == nil +} + +func (d *chunkedIntDecoder) readUvarint() (uint64, error) { + return d.r.ReadUvarint() +} + +func (d *chunkedIntDecoder) SkipUvarint() { + d.r.SkipUvarint() +} + +func (d *chunkedIntDecoder) SkipBytes(count int) { + d.r.SkipBytes(count) +} + +func (d *chunkedIntDecoder) Len() int { + return d.r.Len() +} diff --git a/vendor/github.com/blevesearch/zap/v12/intcoder.go b/vendor/github.com/blevesearch/zap/v12/intcoder.go new file mode 100644 index 0000000..7682593 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/intcoder.go @@ -0,0 +1,203 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "io" + "math" +) + +const termNotEncoded = math.MaxUint64 + +type chunkedIntCoder struct { + final []byte + chunkSize uint64 + chunkBuf bytes.Buffer + chunkLens []uint64 + currChunk uint64 + + buf []byte +} + +// newChunkedIntCoder returns a new chunk int coder which packs data into +// chunks based on the provided chunkSize and supports up to the specified +// maxDocNum +func newChunkedIntCoder(chunkSize uint64, maxDocNum uint64) *chunkedIntCoder { + total := maxDocNum/chunkSize + 1 + rv := &chunkedIntCoder{ + chunkSize: chunkSize, + chunkLens: make([]uint64, total), + final: make([]byte, 0, 64), + } + + return rv +} + +// Reset lets you reuse this chunked int coder. buffers are reset and reused +// from previous use. you cannot change the chunk size or max doc num. +func (c *chunkedIntCoder) Reset() { + c.final = c.final[:0] + c.chunkBuf.Reset() + c.currChunk = 0 + for i := range c.chunkLens { + c.chunkLens[i] = 0 + } +} + +// SetChunkSize changes the chunk size. It is only valid to do so +// with a new chunkedIntCoder, or immediately after calling Reset() +func (c *chunkedIntCoder) SetChunkSize(chunkSize uint64, maxDocNum uint64) { + total := int(maxDocNum/chunkSize + 1) + c.chunkSize = chunkSize + if cap(c.chunkLens) < total { + c.chunkLens = make([]uint64, total) + } else { + c.chunkLens = c.chunkLens[:total] + } +} + +// Add encodes the provided integers into the correct chunk for the provided +// doc num. You MUST call Add() with increasing docNums. +func (c *chunkedIntCoder) Add(docNum uint64, vals ...uint64) error { + chunk := docNum / c.chunkSize + if chunk != c.currChunk { + // starting a new chunk + c.Close() + c.chunkBuf.Reset() + c.currChunk = chunk + } + + if len(c.buf) < binary.MaxVarintLen64 { + c.buf = make([]byte, binary.MaxVarintLen64) + } + + for _, val := range vals { + wb := binary.PutUvarint(c.buf, val) + _, err := c.chunkBuf.Write(c.buf[:wb]) + if err != nil { + return err + } + } + + return nil +} + +func (c *chunkedIntCoder) AddBytes(docNum uint64, buf []byte) error { + chunk := docNum / c.chunkSize + if chunk != c.currChunk { + // starting a new chunk + c.Close() + c.chunkBuf.Reset() + c.currChunk = chunk + } + + _, err := c.chunkBuf.Write(buf) + return err +} + +// Close indicates you are done calling Add() this allows the final chunk +// to be encoded. +func (c *chunkedIntCoder) Close() { + encodingBytes := c.chunkBuf.Bytes() + c.chunkLens[c.currChunk] = uint64(len(encodingBytes)) + c.final = append(c.final, encodingBytes...) + c.currChunk = uint64(cap(c.chunkLens)) // sentinel to detect double close +} + +// Write commits all the encoded chunked integers to the provided writer. +func (c *chunkedIntCoder) Write(w io.Writer) (int, error) { + bufNeeded := binary.MaxVarintLen64 * (1 + len(c.chunkLens)) + if len(c.buf) < bufNeeded { + c.buf = make([]byte, bufNeeded) + } + buf := c.buf + + // convert the chunk lengths into chunk offsets + chunkOffsets := modifyLengthsToEndOffsets(c.chunkLens) + + // write out the number of chunks & each chunk offsets + n := binary.PutUvarint(buf, uint64(len(chunkOffsets))) + for _, chunkOffset := range chunkOffsets { + n += binary.PutUvarint(buf[n:], chunkOffset) + } + + tw, err := w.Write(buf[:n]) + if err != nil { + return tw, err + } + + // write out the data + nw, err := w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + return tw, nil +} + +// writeAt commits all the encoded chunked integers to the provided writer +// and returns the starting offset, total bytes written and an error +func (c *chunkedIntCoder) writeAt(w io.Writer) (uint64, int, error) { + startOffset := uint64(termNotEncoded) + if len(c.final) <= 0 { + return startOffset, 0, nil + } + + if chw := w.(*CountHashWriter); chw != nil { + startOffset = uint64(chw.Count()) + } + + tw, err := c.Write(w) + return startOffset, tw, err +} + +func (c *chunkedIntCoder) FinalSize() int { + return len(c.final) +} + +// modifyLengthsToEndOffsets converts the chunk length array +// to a chunk offset array. The readChunkBoundary +// will figure out the start and end of every chunk from +// these offsets. Starting offset of i'th index is stored +// in i-1'th position except for 0'th index and ending offset +// is stored at i'th index position. +// For 0'th element, starting position is always zero. +// eg: +// Lens -> 5 5 5 5 => 5 10 15 20 +// Lens -> 0 5 0 5 => 0 5 5 10 +// Lens -> 0 0 0 5 => 0 0 0 5 +// Lens -> 5 0 0 0 => 5 5 5 5 +// Lens -> 0 5 0 0 => 0 5 5 5 +// Lens -> 0 0 5 0 => 0 0 5 5 +func modifyLengthsToEndOffsets(lengths []uint64) []uint64 { + var runningOffset uint64 + var index, i int + for i = 1; i <= len(lengths); i++ { + runningOffset += lengths[i-1] + lengths[index] = runningOffset + index++ + } + return lengths +} + +func readChunkBoundary(chunk int, offsets []uint64) (uint64, uint64) { + var start uint64 + if chunk > 0 { + start = offsets[chunk-1] + } + return start, offsets[chunk] +} diff --git a/vendor/github.com/blevesearch/zap/v12/merge.go b/vendor/github.com/blevesearch/zap/v12/merge.go new file mode 100644 index 0000000..805100f --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/merge.go @@ -0,0 +1,847 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bufio" + "bytes" + "encoding/binary" + "fmt" + "math" + "os" + "sort" + + "github.com/RoaringBitmap/roaring" + seg "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/couchbase/vellum" + "github.com/golang/snappy" +) + +var DefaultFileMergerBufferSize = 1024 * 1024 + +const docDropped = math.MaxUint64 // sentinel docNum to represent a deleted doc + +// Merge takes a slice of segments and bit masks describing which +// documents may be dropped, and creates a new segment containing the +// remaining data. This new segment is built at the specified path. +func (*ZapPlugin) Merge(segments []seg.Segment, drops []*roaring.Bitmap, path string, + closeCh chan struct{}, s seg.StatsReporter) ( + [][]uint64, uint64, error) { + + segmentBases := make([]*SegmentBase, len(segments)) + for segmenti, segment := range segments { + switch segmentx := segment.(type) { + case *Segment: + segmentBases[segmenti] = &segmentx.SegmentBase + case *SegmentBase: + segmentBases[segmenti] = segmentx + default: + panic(fmt.Sprintf("oops, unexpected segment type: %T", segment)) + } + } + return mergeSegmentBases(segmentBases, drops, path, DefaultChunkMode, closeCh, s) +} + +func mergeSegmentBases(segmentBases []*SegmentBase, drops []*roaring.Bitmap, path string, + chunkMode uint32, closeCh chan struct{}, s seg.StatsReporter) ( + [][]uint64, uint64, error) { + flag := os.O_RDWR | os.O_CREATE + + f, err := os.OpenFile(path, flag, 0600) + if err != nil { + return nil, 0, err + } + + cleanup := func() { + _ = f.Close() + _ = os.Remove(path) + } + + // buffer the output + br := bufio.NewWriterSize(f, DefaultFileMergerBufferSize) + + // wrap it for counting (tracking offsets) + cr := NewCountHashWriterWithStatsReporter(br, s) + + newDocNums, numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset, _, _, _, err := + MergeToWriter(segmentBases, drops, chunkMode, cr, closeCh) + if err != nil { + cleanup() + return nil, 0, err + } + + err = persistFooter(numDocs, storedIndexOffset, fieldsIndexOffset, + docValueOffset, chunkMode, cr.Sum32(), cr) + if err != nil { + cleanup() + return nil, 0, err + } + + err = br.Flush() + if err != nil { + cleanup() + return nil, 0, err + } + + err = f.Sync() + if err != nil { + cleanup() + return nil, 0, err + } + + err = f.Close() + if err != nil { + cleanup() + return nil, 0, err + } + + return newDocNums, uint64(cr.Count()), nil +} + +func MergeToWriter(segments []*SegmentBase, drops []*roaring.Bitmap, + chunkMode uint32, cr *CountHashWriter, closeCh chan struct{}) ( + newDocNums [][]uint64, + numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset uint64, + dictLocs []uint64, fieldsInv []string, fieldsMap map[string]uint16, + err error) { + docValueOffset = uint64(fieldNotUninverted) + + var fieldsSame bool + fieldsSame, fieldsInv = mergeFields(segments) + fieldsMap = mapFields(fieldsInv) + + numDocs = computeNewDocCount(segments, drops) + + if isClosed(closeCh) { + return nil, 0, 0, 0, 0, nil, nil, nil, seg.ErrClosed + } + + if numDocs > 0 { + storedIndexOffset, newDocNums, err = mergeStoredAndRemap(segments, drops, + fieldsMap, fieldsInv, fieldsSame, numDocs, cr, closeCh) + if err != nil { + return nil, 0, 0, 0, 0, nil, nil, nil, err + } + + dictLocs, docValueOffset, err = persistMergedRest(segments, drops, + fieldsInv, fieldsMap, fieldsSame, + newDocNums, numDocs, chunkMode, cr, closeCh) + if err != nil { + return nil, 0, 0, 0, 0, nil, nil, nil, err + } + } else { + dictLocs = make([]uint64, len(fieldsInv)) + } + + fieldsIndexOffset, err = persistFields(fieldsInv, cr, dictLocs) + if err != nil { + return nil, 0, 0, 0, 0, nil, nil, nil, err + } + + return newDocNums, numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset, dictLocs, fieldsInv, fieldsMap, nil +} + +// mapFields takes the fieldsInv list and returns a map of fieldName +// to fieldID+1 +func mapFields(fields []string) map[string]uint16 { + rv := make(map[string]uint16, len(fields)) + for i, fieldName := range fields { + rv[fieldName] = uint16(i) + 1 + } + return rv +} + +// computeNewDocCount determines how many documents will be in the newly +// merged segment when obsoleted docs are dropped +func computeNewDocCount(segments []*SegmentBase, drops []*roaring.Bitmap) uint64 { + var newDocCount uint64 + for segI, segment := range segments { + newDocCount += segment.numDocs + if drops[segI] != nil { + newDocCount -= drops[segI].GetCardinality() + } + } + return newDocCount +} + +func persistMergedRest(segments []*SegmentBase, dropsIn []*roaring.Bitmap, + fieldsInv []string, fieldsMap map[string]uint16, fieldsSame bool, + newDocNumsIn [][]uint64, newSegDocCount uint64, chunkMode uint32, + w *CountHashWriter, closeCh chan struct{}) ([]uint64, uint64, error) { + + var bufMaxVarintLen64 []byte = make([]byte, binary.MaxVarintLen64) + var bufLoc []uint64 + + var postings *PostingsList + var postItr *PostingsIterator + + rv := make([]uint64, len(fieldsInv)) + fieldDvLocsStart := make([]uint64, len(fieldsInv)) + fieldDvLocsEnd := make([]uint64, len(fieldsInv)) + + // these int coders are initialized with chunk size 1024 + // however this will be reset to the correct chunk size + // while processing each individual field-term section + tfEncoder := newChunkedIntCoder(1024, newSegDocCount-1) + locEncoder := newChunkedIntCoder(1024, newSegDocCount-1) + + var vellumBuf bytes.Buffer + newVellum, err := vellum.New(&vellumBuf, nil) + if err != nil { + return nil, 0, err + } + + newRoaring := roaring.NewBitmap() + + // for each field + for fieldID, fieldName := range fieldsInv { + + // collect FST iterators from all active segments for this field + var newDocNums [][]uint64 + var drops []*roaring.Bitmap + var dicts []*Dictionary + var itrs []vellum.Iterator + + var segmentsInFocus []*SegmentBase + + for segmentI, segment := range segments { + + // check for the closure in meantime + if isClosed(closeCh) { + return nil, 0, seg.ErrClosed + } + + dict, err2 := segment.dictionary(fieldName) + if err2 != nil { + return nil, 0, err2 + } + if dict != nil && dict.fst != nil { + itr, err2 := dict.fst.Iterator(nil, nil) + if err2 != nil && err2 != vellum.ErrIteratorDone { + return nil, 0, err2 + } + if itr != nil { + newDocNums = append(newDocNums, newDocNumsIn[segmentI]) + if dropsIn[segmentI] != nil && !dropsIn[segmentI].IsEmpty() { + drops = append(drops, dropsIn[segmentI]) + } else { + drops = append(drops, nil) + } + dicts = append(dicts, dict) + itrs = append(itrs, itr) + segmentsInFocus = append(segmentsInFocus, segment) + } + } + } + + var prevTerm []byte + + newRoaring.Clear() + + var lastDocNum, lastFreq, lastNorm uint64 + + // determines whether to use "1-hit" encoding optimization + // when a term appears in only 1 doc, with no loc info, + // has freq of 1, and the docNum fits into 31-bits + use1HitEncoding := func(termCardinality uint64) (bool, uint64, uint64) { + if termCardinality == uint64(1) && locEncoder.FinalSize() <= 0 { + docNum := uint64(newRoaring.Minimum()) + if under32Bits(docNum) && docNum == lastDocNum && lastFreq == 1 { + return true, docNum, lastNorm + } + } + return false, 0, 0 + } + + finishTerm := func(term []byte) error { + tfEncoder.Close() + locEncoder.Close() + + postingsOffset, err := writePostings(newRoaring, + tfEncoder, locEncoder, use1HitEncoding, w, bufMaxVarintLen64) + if err != nil { + return err + } + + if postingsOffset > 0 { + err = newVellum.Insert(term, postingsOffset) + if err != nil { + return err + } + } + + newRoaring.Clear() + + tfEncoder.Reset() + locEncoder.Reset() + + lastDocNum = 0 + lastFreq = 0 + lastNorm = 0 + + return nil + } + + enumerator, err := newEnumerator(itrs) + + for err == nil { + term, itrI, postingsOffset := enumerator.Current() + + if !bytes.Equal(prevTerm, term) { + // check for the closure in meantime + if isClosed(closeCh) { + return nil, 0, seg.ErrClosed + } + + // if the term changed, write out the info collected + // for the previous term + err = finishTerm(prevTerm) + if err != nil { + return nil, 0, err + } + } + if !bytes.Equal(prevTerm, term) || prevTerm == nil { + // compute cardinality of field-term in new seg + var newCard uint64 + lowItrIdxs, lowItrVals := enumerator.GetLowIdxsAndValues() + for i, idx := range lowItrIdxs { + pl, err := dicts[idx].postingsListFromOffset(lowItrVals[i], drops[idx], nil) + if err != nil { + return nil, 0, err + } + newCard += pl.Count() + } + // compute correct chunk size with this + chunkSize, err := getChunkSize(chunkMode, newCard, newSegDocCount) + if err != nil { + return nil, 0, err + } + // update encoders chunk + tfEncoder.SetChunkSize(chunkSize, newSegDocCount-1) + locEncoder.SetChunkSize(chunkSize, newSegDocCount-1) + } + + postings, err = dicts[itrI].postingsListFromOffset( + postingsOffset, drops[itrI], postings) + if err != nil { + return nil, 0, err + } + + postItr = postings.iterator(true, true, true, postItr) + + // can no longer optimize by copying, since chunk factor could have changed + lastDocNum, lastFreq, lastNorm, bufLoc, err = mergeTermFreqNormLocs( + fieldsMap, term, postItr, newDocNums[itrI], newRoaring, + tfEncoder, locEncoder, bufLoc) + + if err != nil { + return nil, 0, err + } + + prevTerm = prevTerm[:0] // copy to prevTerm in case Next() reuses term mem + prevTerm = append(prevTerm, term...) + + err = enumerator.Next() + } + if err != vellum.ErrIteratorDone { + return nil, 0, err + } + + err = finishTerm(prevTerm) + if err != nil { + return nil, 0, err + } + + dictOffset := uint64(w.Count()) + + err = newVellum.Close() + if err != nil { + return nil, 0, err + } + vellumData := vellumBuf.Bytes() + + // write out the length of the vellum data + n := binary.PutUvarint(bufMaxVarintLen64, uint64(len(vellumData))) + _, err = w.Write(bufMaxVarintLen64[:n]) + if err != nil { + return nil, 0, err + } + + // write this vellum to disk + _, err = w.Write(vellumData) + if err != nil { + return nil, 0, err + } + + rv[fieldID] = dictOffset + + // get the field doc value offset (start) + fieldDvLocsStart[fieldID] = uint64(w.Count()) + + // update the field doc values + // NOTE: doc values continue to use legacy chunk mode + chunkSize, err := getChunkSize(LegacyChunkMode, 0, 0) + if err != nil { + return nil, 0, err + } + fdvEncoder := newChunkedContentCoder(chunkSize, newSegDocCount-1, w, true) + + fdvReadersAvailable := false + var dvIterClone *docValueReader + for segmentI, segment := range segmentsInFocus { + // check for the closure in meantime + if isClosed(closeCh) { + return nil, 0, seg.ErrClosed + } + + fieldIDPlus1 := uint16(segment.fieldsMap[fieldName]) + if dvIter, exists := segment.fieldDvReaders[fieldIDPlus1-1]; exists && + dvIter != nil { + fdvReadersAvailable = true + dvIterClone = dvIter.cloneInto(dvIterClone) + err = dvIterClone.iterateAllDocValues(segment, func(docNum uint64, terms []byte) error { + if newDocNums[segmentI][docNum] == docDropped { + return nil + } + err := fdvEncoder.Add(newDocNums[segmentI][docNum], terms) + if err != nil { + return err + } + return nil + }) + if err != nil { + return nil, 0, err + } + } + } + + if fdvReadersAvailable { + err = fdvEncoder.Close() + if err != nil { + return nil, 0, err + } + + // persist the doc value details for this field + _, err = fdvEncoder.Write() + if err != nil { + return nil, 0, err + } + + // get the field doc value offset (end) + fieldDvLocsEnd[fieldID] = uint64(w.Count()) + } else { + fieldDvLocsStart[fieldID] = fieldNotUninverted + fieldDvLocsEnd[fieldID] = fieldNotUninverted + } + + // reset vellum buffer and vellum builder + vellumBuf.Reset() + err = newVellum.Reset(&vellumBuf) + if err != nil { + return nil, 0, err + } + } + + fieldDvLocsOffset := uint64(w.Count()) + + buf := bufMaxVarintLen64 + for i := 0; i < len(fieldDvLocsStart); i++ { + n := binary.PutUvarint(buf, fieldDvLocsStart[i]) + _, err := w.Write(buf[:n]) + if err != nil { + return nil, 0, err + } + n = binary.PutUvarint(buf, fieldDvLocsEnd[i]) + _, err = w.Write(buf[:n]) + if err != nil { + return nil, 0, err + } + } + + return rv, fieldDvLocsOffset, nil +} + +func mergeTermFreqNormLocs(fieldsMap map[string]uint16, term []byte, postItr *PostingsIterator, + newDocNums []uint64, newRoaring *roaring.Bitmap, + tfEncoder *chunkedIntCoder, locEncoder *chunkedIntCoder, bufLoc []uint64) ( + lastDocNum uint64, lastFreq uint64, lastNorm uint64, bufLocOut []uint64, err error) { + next, err := postItr.Next() + for next != nil && err == nil { + hitNewDocNum := newDocNums[next.Number()] + if hitNewDocNum == docDropped { + return 0, 0, 0, nil, fmt.Errorf("see hit with dropped docNum") + } + + newRoaring.Add(uint32(hitNewDocNum)) + + nextFreq := next.Frequency() + nextNorm := uint64(math.Float32bits(float32(next.Norm()))) + + locs := next.Locations() + + err = tfEncoder.Add(hitNewDocNum, + encodeFreqHasLocs(nextFreq, len(locs) > 0), nextNorm) + if err != nil { + return 0, 0, 0, nil, err + } + + if len(locs) > 0 { + numBytesLocs := 0 + for _, loc := range locs { + ap := loc.ArrayPositions() + numBytesLocs += totalUvarintBytes(uint64(fieldsMap[loc.Field()]-1), + loc.Pos(), loc.Start(), loc.End(), uint64(len(ap)), ap) + } + + err = locEncoder.Add(hitNewDocNum, uint64(numBytesLocs)) + if err != nil { + return 0, 0, 0, nil, err + } + + for _, loc := range locs { + ap := loc.ArrayPositions() + if cap(bufLoc) < 5+len(ap) { + bufLoc = make([]uint64, 0, 5+len(ap)) + } + args := bufLoc[0:5] + args[0] = uint64(fieldsMap[loc.Field()] - 1) + args[1] = loc.Pos() + args[2] = loc.Start() + args[3] = loc.End() + args[4] = uint64(len(ap)) + args = append(args, ap...) + err = locEncoder.Add(hitNewDocNum, args...) + if err != nil { + return 0, 0, 0, nil, err + } + } + } + + lastDocNum = hitNewDocNum + lastFreq = nextFreq + lastNorm = nextNorm + + next, err = postItr.Next() + } + + return lastDocNum, lastFreq, lastNorm, bufLoc, err +} + +func writePostings(postings *roaring.Bitmap, tfEncoder, locEncoder *chunkedIntCoder, + use1HitEncoding func(uint64) (bool, uint64, uint64), + w *CountHashWriter, bufMaxVarintLen64 []byte) ( + offset uint64, err error) { + termCardinality := postings.GetCardinality() + if termCardinality <= 0 { + return 0, nil + } + + if use1HitEncoding != nil { + encodeAs1Hit, docNum1Hit, normBits1Hit := use1HitEncoding(termCardinality) + if encodeAs1Hit { + return FSTValEncode1Hit(docNum1Hit, normBits1Hit), nil + } + } + + var tfOffset uint64 + tfOffset, _, err = tfEncoder.writeAt(w) + if err != nil { + return 0, err + } + + var locOffset uint64 + locOffset, _, err = locEncoder.writeAt(w) + if err != nil { + return 0, err + } + + postingsOffset := uint64(w.Count()) + + n := binary.PutUvarint(bufMaxVarintLen64, tfOffset) + _, err = w.Write(bufMaxVarintLen64[:n]) + if err != nil { + return 0, err + } + + n = binary.PutUvarint(bufMaxVarintLen64, locOffset) + _, err = w.Write(bufMaxVarintLen64[:n]) + if err != nil { + return 0, err + } + + _, err = writeRoaringWithLen(postings, w, bufMaxVarintLen64) + if err != nil { + return 0, err + } + + return postingsOffset, nil +} + +type varintEncoder func(uint64) (int, error) + +func mergeStoredAndRemap(segments []*SegmentBase, drops []*roaring.Bitmap, + fieldsMap map[string]uint16, fieldsInv []string, fieldsSame bool, newSegDocCount uint64, + w *CountHashWriter, closeCh chan struct{}) (uint64, [][]uint64, error) { + var rv [][]uint64 // The remapped or newDocNums for each segment. + + var newDocNum uint64 + + var curr int + var data, compressed []byte + var metaBuf bytes.Buffer + varBuf := make([]byte, binary.MaxVarintLen64) + metaEncode := func(val uint64) (int, error) { + wb := binary.PutUvarint(varBuf, val) + return metaBuf.Write(varBuf[:wb]) + } + + vals := make([][][]byte, len(fieldsInv)) + typs := make([][]byte, len(fieldsInv)) + poss := make([][][]uint64, len(fieldsInv)) + + var posBuf []uint64 + + docNumOffsets := make([]uint64, newSegDocCount) + + vdc := visitDocumentCtxPool.Get().(*visitDocumentCtx) + defer visitDocumentCtxPool.Put(vdc) + + // for each segment + for segI, segment := range segments { + // check for the closure in meantime + if isClosed(closeCh) { + return 0, nil, seg.ErrClosed + } + + segNewDocNums := make([]uint64, segment.numDocs) + + dropsI := drops[segI] + + // optimize when the field mapping is the same across all + // segments and there are no deletions, via byte-copying + // of stored docs bytes directly to the writer + if fieldsSame && (dropsI == nil || dropsI.GetCardinality() == 0) { + err := segment.copyStoredDocs(newDocNum, docNumOffsets, w) + if err != nil { + return 0, nil, err + } + + for i := uint64(0); i < segment.numDocs; i++ { + segNewDocNums[i] = newDocNum + newDocNum++ + } + rv = append(rv, segNewDocNums) + + continue + } + + // for each doc num + for docNum := uint64(0); docNum < segment.numDocs; docNum++ { + // TODO: roaring's API limits docNums to 32-bits? + if dropsI != nil && dropsI.Contains(uint32(docNum)) { + segNewDocNums[docNum] = docDropped + continue + } + + segNewDocNums[docNum] = newDocNum + + curr = 0 + metaBuf.Reset() + data = data[:0] + + posTemp := posBuf + + // collect all the data + for i := 0; i < len(fieldsInv); i++ { + vals[i] = vals[i][:0] + typs[i] = typs[i][:0] + poss[i] = poss[i][:0] + } + err := segment.visitDocument(vdc, docNum, func(field string, typ byte, value []byte, pos []uint64) bool { + fieldID := int(fieldsMap[field]) - 1 + vals[fieldID] = append(vals[fieldID], value) + typs[fieldID] = append(typs[fieldID], typ) + + // copy array positions to preserve them beyond the scope of this callback + var curPos []uint64 + if len(pos) > 0 { + if cap(posTemp) < len(pos) { + posBuf = make([]uint64, len(pos)*len(fieldsInv)) + posTemp = posBuf + } + curPos = posTemp[0:len(pos)] + copy(curPos, pos) + posTemp = posTemp[len(pos):] + } + poss[fieldID] = append(poss[fieldID], curPos) + + return true + }) + if err != nil { + return 0, nil, err + } + + // _id field special case optimizes ExternalID() lookups + idFieldVal := vals[uint16(0)][0] + _, err = metaEncode(uint64(len(idFieldVal))) + if err != nil { + return 0, nil, err + } + + // now walk the non-"_id" fields in order + for fieldID := 1; fieldID < len(fieldsInv); fieldID++ { + storedFieldValues := vals[fieldID] + + stf := typs[fieldID] + spf := poss[fieldID] + + var err2 error + curr, data, err2 = persistStoredFieldValues(fieldID, + storedFieldValues, stf, spf, curr, metaEncode, data) + if err2 != nil { + return 0, nil, err2 + } + } + + metaBytes := metaBuf.Bytes() + + compressed = snappy.Encode(compressed[:cap(compressed)], data) + + // record where we're about to start writing + docNumOffsets[newDocNum] = uint64(w.Count()) + + // write out the meta len and compressed data len + _, err = writeUvarints(w, + uint64(len(metaBytes)), + uint64(len(idFieldVal)+len(compressed))) + if err != nil { + return 0, nil, err + } + // now write the meta + _, err = w.Write(metaBytes) + if err != nil { + return 0, nil, err + } + // now write the _id field val (counted as part of the 'compressed' data) + _, err = w.Write(idFieldVal) + if err != nil { + return 0, nil, err + } + // now write the compressed data + _, err = w.Write(compressed) + if err != nil { + return 0, nil, err + } + + newDocNum++ + } + + rv = append(rv, segNewDocNums) + } + + // return value is the start of the stored index + storedIndexOffset := uint64(w.Count()) + + // now write out the stored doc index + for _, docNumOffset := range docNumOffsets { + err := binary.Write(w, binary.BigEndian, docNumOffset) + if err != nil { + return 0, nil, err + } + } + + return storedIndexOffset, rv, nil +} + +// copyStoredDocs writes out a segment's stored doc info, optimized by +// using a single Write() call for the entire set of bytes. The +// newDocNumOffsets is filled with the new offsets for each doc. +func (s *SegmentBase) copyStoredDocs(newDocNum uint64, newDocNumOffsets []uint64, + w *CountHashWriter) error { + if s.numDocs <= 0 { + return nil + } + + indexOffset0, storedOffset0, _, _, _ := + s.getDocStoredOffsets(0) // the segment's first doc + + indexOffsetN, storedOffsetN, readN, metaLenN, dataLenN := + s.getDocStoredOffsets(s.numDocs - 1) // the segment's last doc + + storedOffset0New := uint64(w.Count()) + + storedBytes := s.mem[storedOffset0 : storedOffsetN+readN+metaLenN+dataLenN] + _, err := w.Write(storedBytes) + if err != nil { + return err + } + + // remap the storedOffset's for the docs into new offsets relative + // to storedOffset0New, filling the given docNumOffsetsOut array + for indexOffset := indexOffset0; indexOffset <= indexOffsetN; indexOffset += 8 { + storedOffset := binary.BigEndian.Uint64(s.mem[indexOffset : indexOffset+8]) + storedOffsetNew := storedOffset - storedOffset0 + storedOffset0New + newDocNumOffsets[newDocNum] = storedOffsetNew + newDocNum += 1 + } + + return nil +} + +// mergeFields builds a unified list of fields used across all the +// input segments, and computes whether the fields are the same across +// segments (which depends on fields to be sorted in the same way +// across segments) +func mergeFields(segments []*SegmentBase) (bool, []string) { + fieldsSame := true + + var segment0Fields []string + if len(segments) > 0 { + segment0Fields = segments[0].Fields() + } + + fieldsExist := map[string]struct{}{} + for _, segment := range segments { + fields := segment.Fields() + for fieldi, field := range fields { + fieldsExist[field] = struct{}{} + if len(segment0Fields) != len(fields) || segment0Fields[fieldi] != field { + fieldsSame = false + } + } + } + + rv := make([]string, 0, len(fieldsExist)) + // ensure _id stays first + rv = append(rv, "_id") + for k := range fieldsExist { + if k != "_id" { + rv = append(rv, k) + } + } + + sort.Strings(rv[1:]) // leave _id as first + + return fieldsSame, rv +} + +func isClosed(closeCh chan struct{}) bool { + select { + case <-closeCh: + return true + default: + return false + } +} diff --git a/vendor/github.com/blevesearch/zap/v12/new.go b/vendor/github.com/blevesearch/zap/v12/new.go new file mode 100644 index 0000000..9815818 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/new.go @@ -0,0 +1,860 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "math" + "sort" + "sync" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/couchbase/vellum" + "github.com/golang/snappy" +) + +var NewSegmentBufferNumResultsBump int = 100 +var NewSegmentBufferNumResultsFactor float64 = 1.0 +var NewSegmentBufferAvgBytesPerDocFactor float64 = 1.0 + +// ValidateDocFields can be set by applications to perform additional checks +// on fields in a document being added to a new segment, by default it does +// nothing. +// This API is experimental and may be removed at any time. +var ValidateDocFields = func(field document.Field) error { + return nil +} + +// AnalysisResultsToSegmentBase produces an in-memory zap-encoded +// SegmentBase from analysis results +func (z *ZapPlugin) New(results []*index.AnalysisResult) ( + segment.Segment, uint64, error) { + return z.newWithChunkMode(results, DefaultChunkMode) +} + +func (*ZapPlugin) newWithChunkMode(results []*index.AnalysisResult, + chunkMode uint32) (segment.Segment, uint64, error) { + s := interimPool.Get().(*interim) + + var br bytes.Buffer + if s.lastNumDocs > 0 { + // use previous results to initialize the buf with an estimate + // size, but note that the interim instance comes from a + // global interimPool, so multiple scorch instances indexing + // different docs can lead to low quality estimates + estimateAvgBytesPerDoc := int(float64(s.lastOutSize/s.lastNumDocs) * + NewSegmentBufferNumResultsFactor) + estimateNumResults := int(float64(len(results)+NewSegmentBufferNumResultsBump) * + NewSegmentBufferAvgBytesPerDocFactor) + br.Grow(estimateAvgBytesPerDoc * estimateNumResults) + } + + s.results = results + s.chunkMode = chunkMode + s.w = NewCountHashWriter(&br) + + storedIndexOffset, fieldsIndexOffset, fdvIndexOffset, dictOffsets, + err := s.convert() + if err != nil { + return nil, uint64(0), err + } + + sb, err := InitSegmentBase(br.Bytes(), s.w.Sum32(), chunkMode, + s.FieldsMap, s.FieldsInv, uint64(len(results)), + storedIndexOffset, fieldsIndexOffset, fdvIndexOffset, dictOffsets) + + if err == nil && s.reset() == nil { + s.lastNumDocs = len(results) + s.lastOutSize = len(br.Bytes()) + interimPool.Put(s) + } + + return sb, uint64(len(br.Bytes())), err +} + +var interimPool = sync.Pool{New: func() interface{} { return &interim{} }} + +// interim holds temporary working data used while converting from +// analysis results to a zap-encoded segment +type interim struct { + results []*index.AnalysisResult + + chunkMode uint32 + + w *CountHashWriter + + // FieldsMap adds 1 to field id to avoid zero value issues + // name -> field id + 1 + FieldsMap map[string]uint16 + + // FieldsInv is the inverse of FieldsMap + // field id -> name + FieldsInv []string + + // Term dictionaries for each field + // field id -> term -> postings list id + 1 + Dicts []map[string]uint64 + + // Terms for each field, where terms are sorted ascending + // field id -> []term + DictKeys [][]string + + // Fields whose IncludeDocValues is true + // field id -> bool + IncludeDocValues []bool + + // postings id -> bitmap of docNums + Postings []*roaring.Bitmap + + // postings id -> freq/norm's, one for each docNum in postings + FreqNorms [][]interimFreqNorm + freqNormsBacking []interimFreqNorm + + // postings id -> locs, one for each freq + Locs [][]interimLoc + locsBacking []interimLoc + + numTermsPerPostingsList []int // key is postings list id + numLocsPerPostingsList []int // key is postings list id + + builder *vellum.Builder + builderBuf bytes.Buffer + + metaBuf bytes.Buffer + + tmp0 []byte + tmp1 []byte + + lastNumDocs int + lastOutSize int +} + +func (s *interim) reset() (err error) { + s.results = nil + s.chunkMode = 0 + s.w = nil + s.FieldsMap = nil + s.FieldsInv = nil + for i := range s.Dicts { + s.Dicts[i] = nil + } + s.Dicts = s.Dicts[:0] + for i := range s.DictKeys { + s.DictKeys[i] = s.DictKeys[i][:0] + } + s.DictKeys = s.DictKeys[:0] + for i := range s.IncludeDocValues { + s.IncludeDocValues[i] = false + } + s.IncludeDocValues = s.IncludeDocValues[:0] + for _, idn := range s.Postings { + idn.Clear() + } + s.Postings = s.Postings[:0] + s.FreqNorms = s.FreqNorms[:0] + for i := range s.freqNormsBacking { + s.freqNormsBacking[i] = interimFreqNorm{} + } + s.freqNormsBacking = s.freqNormsBacking[:0] + s.Locs = s.Locs[:0] + for i := range s.locsBacking { + s.locsBacking[i] = interimLoc{} + } + s.locsBacking = s.locsBacking[:0] + s.numTermsPerPostingsList = s.numTermsPerPostingsList[:0] + s.numLocsPerPostingsList = s.numLocsPerPostingsList[:0] + s.builderBuf.Reset() + if s.builder != nil { + err = s.builder.Reset(&s.builderBuf) + } + s.metaBuf.Reset() + s.tmp0 = s.tmp0[:0] + s.tmp1 = s.tmp1[:0] + s.lastNumDocs = 0 + s.lastOutSize = 0 + + return err +} + +func (s *interim) grabBuf(size int) []byte { + buf := s.tmp0 + if cap(buf) < size { + buf = make([]byte, size) + s.tmp0 = buf + } + return buf[0:size] +} + +type interimStoredField struct { + vals [][]byte + typs []byte + arrayposs [][]uint64 // array positions +} + +type interimFreqNorm struct { + freq uint64 + norm float32 + numLocs int +} + +type interimLoc struct { + fieldID uint16 + pos uint64 + start uint64 + end uint64 + arrayposs []uint64 +} + +func (s *interim) convert() (uint64, uint64, uint64, []uint64, error) { + s.FieldsMap = map[string]uint16{} + + s.getOrDefineField("_id") // _id field is fieldID 0 + + for _, result := range s.results { + for _, field := range result.Document.CompositeFields { + s.getOrDefineField(field.Name()) + } + for _, field := range result.Document.Fields { + s.getOrDefineField(field.Name()) + } + } + + sort.Strings(s.FieldsInv[1:]) // keep _id as first field + + for fieldID, fieldName := range s.FieldsInv { + s.FieldsMap[fieldName] = uint16(fieldID + 1) + } + + if cap(s.IncludeDocValues) >= len(s.FieldsInv) { + s.IncludeDocValues = s.IncludeDocValues[:len(s.FieldsInv)] + } else { + s.IncludeDocValues = make([]bool, len(s.FieldsInv)) + } + + s.prepareDicts() + + for _, dict := range s.DictKeys { + sort.Strings(dict) + } + + s.processDocuments() + + storedIndexOffset, err := s.writeStoredFields() + if err != nil { + return 0, 0, 0, nil, err + } + + var fdvIndexOffset uint64 + var dictOffsets []uint64 + + if len(s.results) > 0 { + fdvIndexOffset, dictOffsets, err = s.writeDicts() + if err != nil { + return 0, 0, 0, nil, err + } + } else { + dictOffsets = make([]uint64, len(s.FieldsInv)) + } + + fieldsIndexOffset, err := persistFields(s.FieldsInv, s.w, dictOffsets) + if err != nil { + return 0, 0, 0, nil, err + } + + return storedIndexOffset, fieldsIndexOffset, fdvIndexOffset, dictOffsets, nil +} + +func (s *interim) getOrDefineField(fieldName string) int { + fieldIDPlus1, exists := s.FieldsMap[fieldName] + if !exists { + fieldIDPlus1 = uint16(len(s.FieldsInv) + 1) + s.FieldsMap[fieldName] = fieldIDPlus1 + s.FieldsInv = append(s.FieldsInv, fieldName) + + s.Dicts = append(s.Dicts, make(map[string]uint64)) + + n := len(s.DictKeys) + if n < cap(s.DictKeys) { + s.DictKeys = s.DictKeys[:n+1] + s.DictKeys[n] = s.DictKeys[n][:0] + } else { + s.DictKeys = append(s.DictKeys, []string(nil)) + } + } + + return int(fieldIDPlus1 - 1) +} + +// fill Dicts and DictKeys from analysis results +func (s *interim) prepareDicts() { + var pidNext int + + var totTFs int + var totLocs int + + visitField := func(fieldID uint16, tfs analysis.TokenFrequencies) { + dict := s.Dicts[fieldID] + dictKeys := s.DictKeys[fieldID] + + for term, tf := range tfs { + pidPlus1, exists := dict[term] + if !exists { + pidNext++ + pidPlus1 = uint64(pidNext) + + dict[term] = pidPlus1 + dictKeys = append(dictKeys, term) + + s.numTermsPerPostingsList = append(s.numTermsPerPostingsList, 0) + s.numLocsPerPostingsList = append(s.numLocsPerPostingsList, 0) + } + + pid := pidPlus1 - 1 + + s.numTermsPerPostingsList[pid] += 1 + s.numLocsPerPostingsList[pid] += len(tf.Locations) + + totLocs += len(tf.Locations) + } + + totTFs += len(tfs) + + s.DictKeys[fieldID] = dictKeys + } + + for _, result := range s.results { + // walk each composite field + for _, field := range result.Document.CompositeFields { + fieldID := uint16(s.getOrDefineField(field.Name())) + _, tf := field.Analyze() + visitField(fieldID, tf) + } + + // walk each field + for i, field := range result.Document.Fields { + fieldID := uint16(s.getOrDefineField(field.Name())) + tf := result.Analyzed[i] + visitField(fieldID, tf) + } + } + + numPostingsLists := pidNext + + if cap(s.Postings) >= numPostingsLists { + s.Postings = s.Postings[:numPostingsLists] + } else { + postings := make([]*roaring.Bitmap, numPostingsLists) + copy(postings, s.Postings[:cap(s.Postings)]) + for i := 0; i < numPostingsLists; i++ { + if postings[i] == nil { + postings[i] = roaring.New() + } + } + s.Postings = postings + } + + if cap(s.FreqNorms) >= numPostingsLists { + s.FreqNorms = s.FreqNorms[:numPostingsLists] + } else { + s.FreqNorms = make([][]interimFreqNorm, numPostingsLists) + } + + if cap(s.freqNormsBacking) >= totTFs { + s.freqNormsBacking = s.freqNormsBacking[:totTFs] + } else { + s.freqNormsBacking = make([]interimFreqNorm, totTFs) + } + + freqNormsBacking := s.freqNormsBacking + for pid, numTerms := range s.numTermsPerPostingsList { + s.FreqNorms[pid] = freqNormsBacking[0:0] + freqNormsBacking = freqNormsBacking[numTerms:] + } + + if cap(s.Locs) >= numPostingsLists { + s.Locs = s.Locs[:numPostingsLists] + } else { + s.Locs = make([][]interimLoc, numPostingsLists) + } + + if cap(s.locsBacking) >= totLocs { + s.locsBacking = s.locsBacking[:totLocs] + } else { + s.locsBacking = make([]interimLoc, totLocs) + } + + locsBacking := s.locsBacking + for pid, numLocs := range s.numLocsPerPostingsList { + s.Locs[pid] = locsBacking[0:0] + locsBacking = locsBacking[numLocs:] + } +} + +func (s *interim) processDocuments() { + numFields := len(s.FieldsInv) + reuseFieldLens := make([]int, numFields) + reuseFieldTFs := make([]analysis.TokenFrequencies, numFields) + + for docNum, result := range s.results { + for i := 0; i < numFields; i++ { // clear these for reuse + reuseFieldLens[i] = 0 + reuseFieldTFs[i] = nil + } + + s.processDocument(uint64(docNum), result, + reuseFieldLens, reuseFieldTFs) + } +} + +func (s *interim) processDocument(docNum uint64, + result *index.AnalysisResult, + fieldLens []int, fieldTFs []analysis.TokenFrequencies) { + visitField := func(fieldID uint16, fieldName string, + ln int, tf analysis.TokenFrequencies) { + fieldLens[fieldID] += ln + + existingFreqs := fieldTFs[fieldID] + if existingFreqs != nil { + existingFreqs.MergeAll(fieldName, tf) + } else { + fieldTFs[fieldID] = tf + } + } + + // walk each composite field + for _, field := range result.Document.CompositeFields { + fieldID := uint16(s.getOrDefineField(field.Name())) + ln, tf := field.Analyze() + visitField(fieldID, field.Name(), ln, tf) + } + + // walk each field + for i, field := range result.Document.Fields { + fieldID := uint16(s.getOrDefineField(field.Name())) + ln := result.Length[i] + tf := result.Analyzed[i] + visitField(fieldID, field.Name(), ln, tf) + } + + // now that it's been rolled up into fieldTFs, walk that + for fieldID, tfs := range fieldTFs { + dict := s.Dicts[fieldID] + norm := float32(1.0 / math.Sqrt(float64(fieldLens[fieldID]))) + + for term, tf := range tfs { + pid := dict[term] - 1 + bs := s.Postings[pid] + bs.Add(uint32(docNum)) + + s.FreqNorms[pid] = append(s.FreqNorms[pid], + interimFreqNorm{ + freq: uint64(tf.Frequency()), + norm: norm, + numLocs: len(tf.Locations), + }) + + if len(tf.Locations) > 0 { + locs := s.Locs[pid] + + for _, loc := range tf.Locations { + var locf = uint16(fieldID) + if loc.Field != "" { + locf = uint16(s.getOrDefineField(loc.Field)) + } + var arrayposs []uint64 + if len(loc.ArrayPositions) > 0 { + arrayposs = loc.ArrayPositions + } + locs = append(locs, interimLoc{ + fieldID: locf, + pos: uint64(loc.Position), + start: uint64(loc.Start), + end: uint64(loc.End), + arrayposs: arrayposs, + }) + } + + s.Locs[pid] = locs + } + } + } +} + +func (s *interim) writeStoredFields() ( + storedIndexOffset uint64, err error) { + varBuf := make([]byte, binary.MaxVarintLen64) + metaEncode := func(val uint64) (int, error) { + wb := binary.PutUvarint(varBuf, val) + return s.metaBuf.Write(varBuf[:wb]) + } + + data, compressed := s.tmp0[:0], s.tmp1[:0] + defer func() { s.tmp0, s.tmp1 = data, compressed }() + + // keyed by docNum + docStoredOffsets := make([]uint64, len(s.results)) + + // keyed by fieldID, for the current doc in the loop + docStoredFields := map[uint16]interimStoredField{} + + for docNum, result := range s.results { + for fieldID := range docStoredFields { // reset for next doc + delete(docStoredFields, fieldID) + } + + for _, field := range result.Document.Fields { + fieldID := uint16(s.getOrDefineField(field.Name())) + + opts := field.Options() + + if opts.IsStored() { + isf := docStoredFields[fieldID] + isf.vals = append(isf.vals, field.Value()) + isf.typs = append(isf.typs, encodeFieldType(field)) + isf.arrayposs = append(isf.arrayposs, field.ArrayPositions()) + docStoredFields[fieldID] = isf + } + + if opts.IncludeDocValues() { + s.IncludeDocValues[fieldID] = true + } + + err := ValidateDocFields(field) + if err != nil { + return 0, err + } + } + + var curr int + + s.metaBuf.Reset() + data = data[:0] + + // _id field special case optimizes ExternalID() lookups + idFieldVal := docStoredFields[uint16(0)].vals[0] + _, err = metaEncode(uint64(len(idFieldVal))) + if err != nil { + return 0, err + } + + // handle non-"_id" fields + for fieldID := 1; fieldID < len(s.FieldsInv); fieldID++ { + isf, exists := docStoredFields[uint16(fieldID)] + if exists { + curr, data, err = persistStoredFieldValues( + fieldID, isf.vals, isf.typs, isf.arrayposs, + curr, metaEncode, data) + if err != nil { + return 0, err + } + } + } + + metaBytes := s.metaBuf.Bytes() + + compressed = snappy.Encode(compressed[:cap(compressed)], data) + + docStoredOffsets[docNum] = uint64(s.w.Count()) + + _, err := writeUvarints(s.w, + uint64(len(metaBytes)), + uint64(len(idFieldVal)+len(compressed))) + if err != nil { + return 0, err + } + + _, err = s.w.Write(metaBytes) + if err != nil { + return 0, err + } + + _, err = s.w.Write(idFieldVal) + if err != nil { + return 0, err + } + + _, err = s.w.Write(compressed) + if err != nil { + return 0, err + } + } + + storedIndexOffset = uint64(s.w.Count()) + + for _, docStoredOffset := range docStoredOffsets { + err = binary.Write(s.w, binary.BigEndian, docStoredOffset) + if err != nil { + return 0, err + } + } + + return storedIndexOffset, nil +} + +func (s *interim) writeDicts() (fdvIndexOffset uint64, dictOffsets []uint64, err error) { + dictOffsets = make([]uint64, len(s.FieldsInv)) + + fdvOffsetsStart := make([]uint64, len(s.FieldsInv)) + fdvOffsetsEnd := make([]uint64, len(s.FieldsInv)) + + buf := s.grabBuf(binary.MaxVarintLen64) + + // these int coders are initialized with chunk size 1024 + // however this will be reset to the correct chunk size + // while processing each individual field-term section + tfEncoder := newChunkedIntCoder(1024, uint64(len(s.results)-1)) + locEncoder := newChunkedIntCoder(1024, uint64(len(s.results)-1)) + + var docTermMap [][]byte + + if s.builder == nil { + s.builder, err = vellum.New(&s.builderBuf, nil) + if err != nil { + return 0, nil, err + } + } + + for fieldID, terms := range s.DictKeys { + if cap(docTermMap) < len(s.results) { + docTermMap = make([][]byte, len(s.results)) + } else { + docTermMap = docTermMap[0:len(s.results)] + for docNum := range docTermMap { // reset the docTermMap + docTermMap[docNum] = docTermMap[docNum][:0] + } + } + + dict := s.Dicts[fieldID] + + for _, term := range terms { // terms are already sorted + pid := dict[term] - 1 + + postingsBS := s.Postings[pid] + + freqNorms := s.FreqNorms[pid] + freqNormOffset := 0 + + locs := s.Locs[pid] + locOffset := 0 + + chunkSize, err := getChunkSize(s.chunkMode, postingsBS.GetCardinality(), uint64(len(s.results))) + if err != nil { + return 0, nil, err + } + tfEncoder.SetChunkSize(chunkSize, uint64(len(s.results)-1)) + locEncoder.SetChunkSize(chunkSize, uint64(len(s.results)-1)) + + postingsItr := postingsBS.Iterator() + for postingsItr.HasNext() { + docNum := uint64(postingsItr.Next()) + + freqNorm := freqNorms[freqNormOffset] + + err = tfEncoder.Add(docNum, + encodeFreqHasLocs(freqNorm.freq, freqNorm.numLocs > 0), + uint64(math.Float32bits(freqNorm.norm))) + if err != nil { + return 0, nil, err + } + + if freqNorm.numLocs > 0 { + numBytesLocs := 0 + for _, loc := range locs[locOffset : locOffset+freqNorm.numLocs] { + numBytesLocs += totalUvarintBytes( + uint64(loc.fieldID), loc.pos, loc.start, loc.end, + uint64(len(loc.arrayposs)), loc.arrayposs) + } + + err = locEncoder.Add(docNum, uint64(numBytesLocs)) + if err != nil { + return 0, nil, err + } + + for _, loc := range locs[locOffset : locOffset+freqNorm.numLocs] { + err = locEncoder.Add(docNum, + uint64(loc.fieldID), loc.pos, loc.start, loc.end, + uint64(len(loc.arrayposs))) + if err != nil { + return 0, nil, err + } + + err = locEncoder.Add(docNum, loc.arrayposs...) + if err != nil { + return 0, nil, err + } + } + + locOffset += freqNorm.numLocs + } + + freqNormOffset++ + + docTermMap[docNum] = append( + append(docTermMap[docNum], term...), + termSeparator) + } + + tfEncoder.Close() + locEncoder.Close() + + postingsOffset, err := + writePostings(postingsBS, tfEncoder, locEncoder, nil, s.w, buf) + if err != nil { + return 0, nil, err + } + + if postingsOffset > uint64(0) { + err = s.builder.Insert([]byte(term), postingsOffset) + if err != nil { + return 0, nil, err + } + } + + tfEncoder.Reset() + locEncoder.Reset() + } + + err = s.builder.Close() + if err != nil { + return 0, nil, err + } + + // record where this dictionary starts + dictOffsets[fieldID] = uint64(s.w.Count()) + + vellumData := s.builderBuf.Bytes() + + // write out the length of the vellum data + n := binary.PutUvarint(buf, uint64(len(vellumData))) + _, err = s.w.Write(buf[:n]) + if err != nil { + return 0, nil, err + } + + // write this vellum to disk + _, err = s.w.Write(vellumData) + if err != nil { + return 0, nil, err + } + + // reset vellum for reuse + s.builderBuf.Reset() + + err = s.builder.Reset(&s.builderBuf) + if err != nil { + return 0, nil, err + } + + // write the field doc values + // NOTE: doc values continue to use legacy chunk mode + chunkSize, err := getChunkSize(LegacyChunkMode, 0, 0) + if err != nil { + return 0, nil, err + } + fdvEncoder := newChunkedContentCoder(chunkSize, uint64(len(s.results)-1), s.w, false) + if s.IncludeDocValues[fieldID] { + for docNum, docTerms := range docTermMap { + if len(docTerms) > 0 { + err = fdvEncoder.Add(uint64(docNum), docTerms) + if err != nil { + return 0, nil, err + } + } + } + err = fdvEncoder.Close() + if err != nil { + return 0, nil, err + } + + fdvOffsetsStart[fieldID] = uint64(s.w.Count()) + + _, err = fdvEncoder.Write() + if err != nil { + return 0, nil, err + } + + fdvOffsetsEnd[fieldID] = uint64(s.w.Count()) + + fdvEncoder.Reset() + } else { + fdvOffsetsStart[fieldID] = fieldNotUninverted + fdvOffsetsEnd[fieldID] = fieldNotUninverted + } + } + + fdvIndexOffset = uint64(s.w.Count()) + + for i := 0; i < len(fdvOffsetsStart); i++ { + n := binary.PutUvarint(buf, fdvOffsetsStart[i]) + _, err := s.w.Write(buf[:n]) + if err != nil { + return 0, nil, err + } + n = binary.PutUvarint(buf, fdvOffsetsEnd[i]) + _, err = s.w.Write(buf[:n]) + if err != nil { + return 0, nil, err + } + } + + return fdvIndexOffset, dictOffsets, nil +} + +func encodeFieldType(f document.Field) byte { + fieldType := byte('x') + switch f.(type) { + case *document.TextField: + fieldType = 't' + case *document.NumericField: + fieldType = 'n' + case *document.DateTimeField: + fieldType = 'd' + case *document.BooleanField: + fieldType = 'b' + case *document.GeoPointField: + fieldType = 'g' + case *document.CompositeField: + fieldType = 'c' + } + return fieldType +} + +// returns the total # of bytes needed to encode the given uint64's +// into binary.PutUVarint() encoding +func totalUvarintBytes(a, b, c, d, e uint64, more []uint64) (n int) { + n = numUvarintBytes(a) + n += numUvarintBytes(b) + n += numUvarintBytes(c) + n += numUvarintBytes(d) + n += numUvarintBytes(e) + for _, v := range more { + n += numUvarintBytes(v) + } + return n +} + +// returns # of bytes needed to encode x in binary.PutUvarint() encoding +func numUvarintBytes(x uint64) (n int) { + for x >= 0x80 { + x >>= 7 + n++ + } + return n + 1 +} diff --git a/vendor/github.com/blevesearch/zap/v12/plugin.go b/vendor/github.com/blevesearch/zap/v12/plugin.go new file mode 100644 index 0000000..38a0638 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/plugin.go @@ -0,0 +1,37 @@ +// Copyright (c) 2020 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "github.com/blevesearch/bleve/index/scorch/segment" +) + +// ZapPlugin implements the Plugin interface of +// the blevesearch/bleve/index/scorch/segment pkg +type ZapPlugin struct{} + +func (*ZapPlugin) Type() string { + return Type +} + +func (*ZapPlugin) Version() uint32 { + return Version +} + +// Plugin returns an instance segment.Plugin for use +// by the Scorch indexing scheme +func Plugin() segment.Plugin { + return &ZapPlugin{} +} diff --git a/vendor/github.com/blevesearch/zap/v12/posting.go b/vendor/github.com/blevesearch/zap/v12/posting.go new file mode 100644 index 0000000..3a6ee54 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/posting.go @@ -0,0 +1,798 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "encoding/binary" + "fmt" + "math" + "reflect" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizePostingsList int +var reflectStaticSizePostingsIterator int +var reflectStaticSizePosting int +var reflectStaticSizeLocation int + +func init() { + var pl PostingsList + reflectStaticSizePostingsList = int(reflect.TypeOf(pl).Size()) + var pi PostingsIterator + reflectStaticSizePostingsIterator = int(reflect.TypeOf(pi).Size()) + var p Posting + reflectStaticSizePosting = int(reflect.TypeOf(p).Size()) + var l Location + reflectStaticSizeLocation = int(reflect.TypeOf(l).Size()) +} + +// FST or vellum value (uint64) encoding is determined by the top two +// highest-order or most significant bits... +// +// encoding : MSB +// name : 63 62 61...to...bit #0 (LSB) +// ----------+---+---+--------------------------------------------------- +// general : 0 | 0 | 62-bits of postingsOffset. +// ~ : 0 | 1 | reserved for future. +// 1-hit : 1 | 0 | 31-bits of positive float31 norm | 31-bits docNum. +// ~ : 1 | 1 | reserved for future. +// +// Encoding "general" is able to handle all cases, where the +// postingsOffset points to more information about the postings for +// the term. +// +// Encoding "1-hit" is used to optimize a commonly seen case when a +// term has only a single hit. For example, a term in the _id field +// will have only 1 hit. The "1-hit" encoding is used for a term +// in a field when... +// +// - term vector info is disabled for that field; +// - and, the term appears in only a single doc for that field; +// - and, the term's freq is exactly 1 in that single doc for that field; +// - and, the docNum must fit into 31-bits; +// +// Otherwise, the "general" encoding is used instead. +// +// In the "1-hit" encoding, the field in that single doc may have +// other terms, which is supported in the "1-hit" encoding by the +// positive float31 norm. + +const FSTValEncodingMask = uint64(0xc000000000000000) +const FSTValEncodingGeneral = uint64(0x0000000000000000) +const FSTValEncoding1Hit = uint64(0x8000000000000000) + +func FSTValEncode1Hit(docNum uint64, normBits uint64) uint64 { + return FSTValEncoding1Hit | ((mask31Bits & normBits) << 31) | (mask31Bits & docNum) +} + +func FSTValDecode1Hit(v uint64) (docNum uint64, normBits uint64) { + return (mask31Bits & v), (mask31Bits & (v >> 31)) +} + +const mask31Bits = uint64(0x000000007fffffff) + +func under32Bits(x uint64) bool { + return x <= mask31Bits +} + +const DocNum1HitFinished = math.MaxUint64 + +var NormBits1Hit = uint64(math.Float32bits(float32(1))) + +// PostingsList is an in-memory representation of a postings list +type PostingsList struct { + sb *SegmentBase + postingsOffset uint64 + freqOffset uint64 + locOffset uint64 + postings *roaring.Bitmap + except *roaring.Bitmap + + // when normBits1Hit != 0, then this postings list came from a + // 1-hit encoding, and only the docNum1Hit & normBits1Hit apply + docNum1Hit uint64 + normBits1Hit uint64 +} + +// represents an immutable, empty postings list +var emptyPostingsList = &PostingsList{} + +func (p *PostingsList) Size() int { + sizeInBytes := reflectStaticSizePostingsList + size.SizeOfPtr + + if p.except != nil { + sizeInBytes += int(p.except.GetSizeInBytes()) + } + + return sizeInBytes +} + +func (p *PostingsList) OrInto(receiver *roaring.Bitmap) { + if p.normBits1Hit != 0 { + receiver.Add(uint32(p.docNum1Hit)) + return + } + + if p.postings != nil { + receiver.Or(p.postings) + } +} + +// Iterator returns an iterator for this postings list +func (p *PostingsList) Iterator(includeFreq, includeNorm, includeLocs bool, + prealloc segment.PostingsIterator) segment.PostingsIterator { + if p.normBits1Hit == 0 && p.postings == nil { + return emptyPostingsIterator + } + + var preallocPI *PostingsIterator + pi, ok := prealloc.(*PostingsIterator) + if ok && pi != nil { + preallocPI = pi + } + if preallocPI == emptyPostingsIterator { + preallocPI = nil + } + + return p.iterator(includeFreq, includeNorm, includeLocs, preallocPI) +} + +func (p *PostingsList) iterator(includeFreq, includeNorm, includeLocs bool, + rv *PostingsIterator) *PostingsIterator { + if rv == nil { + rv = &PostingsIterator{} + } else { + freqNormReader := rv.freqNormReader + if freqNormReader != nil { + freqNormReader.reset() + } + + locReader := rv.locReader + if locReader != nil { + locReader.reset() + } + + nextLocs := rv.nextLocs[:0] + nextSegmentLocs := rv.nextSegmentLocs[:0] + + buf := rv.buf + + *rv = PostingsIterator{} // clear the struct + + rv.freqNormReader = freqNormReader + rv.locReader = locReader + + rv.nextLocs = nextLocs + rv.nextSegmentLocs = nextSegmentLocs + + rv.buf = buf + } + + rv.postings = p + rv.includeFreqNorm = includeFreq || includeNorm || includeLocs + rv.includeLocs = includeLocs + + if p.normBits1Hit != 0 { + // "1-hit" encoding + rv.docNum1Hit = p.docNum1Hit + rv.normBits1Hit = p.normBits1Hit + + if p.except != nil && p.except.Contains(uint32(rv.docNum1Hit)) { + rv.docNum1Hit = DocNum1HitFinished + } + + return rv + } + + // "general" encoding, check if empty + if p.postings == nil { + return rv + } + + // initialize freq chunk reader + if rv.includeFreqNorm { + rv.freqNormReader = newChunkedIntDecoder(p.sb.mem, p.freqOffset) + } + + // initialize the loc chunk reader + if rv.includeLocs { + rv.locReader = newChunkedIntDecoder(p.sb.mem, p.locOffset) + } + + rv.all = p.postings.Iterator() + if p.except != nil { + rv.ActualBM = roaring.AndNot(p.postings, p.except) + rv.Actual = rv.ActualBM.Iterator() + } else { + rv.ActualBM = p.postings + rv.Actual = rv.all // Optimize to use same iterator for all & Actual. + } + + return rv +} + +// Count returns the number of items on this postings list +func (p *PostingsList) Count() uint64 { + var n, e uint64 + if p.normBits1Hit != 0 { + n = 1 + if p.except != nil && p.except.Contains(uint32(p.docNum1Hit)) { + e = 1 + } + } else if p.postings != nil { + n = p.postings.GetCardinality() + if p.except != nil { + e = p.postings.AndCardinality(p.except) + } + } + return n - e +} + +func (rv *PostingsList) read(postingsOffset uint64, d *Dictionary) error { + rv.postingsOffset = postingsOffset + + // handle "1-hit" encoding special case + if rv.postingsOffset&FSTValEncodingMask == FSTValEncoding1Hit { + return rv.init1Hit(postingsOffset) + } + + // read the location of the freq/norm details + var n uint64 + var read int + + rv.freqOffset, read = binary.Uvarint(d.sb.mem[postingsOffset+n : postingsOffset+binary.MaxVarintLen64]) + n += uint64(read) + + rv.locOffset, read = binary.Uvarint(d.sb.mem[postingsOffset+n : postingsOffset+n+binary.MaxVarintLen64]) + n += uint64(read) + + var postingsLen uint64 + postingsLen, read = binary.Uvarint(d.sb.mem[postingsOffset+n : postingsOffset+n+binary.MaxVarintLen64]) + n += uint64(read) + + roaringBytes := d.sb.mem[postingsOffset+n : postingsOffset+n+postingsLen] + + if rv.postings == nil { + rv.postings = roaring.NewBitmap() + } + _, err := rv.postings.FromBuffer(roaringBytes) + if err != nil { + return fmt.Errorf("error loading roaring bitmap: %v", err) + } + + return nil +} + +func (rv *PostingsList) init1Hit(fstVal uint64) error { + docNum, normBits := FSTValDecode1Hit(fstVal) + + rv.docNum1Hit = docNum + rv.normBits1Hit = normBits + + return nil +} + +// PostingsIterator provides a way to iterate through the postings list +type PostingsIterator struct { + postings *PostingsList + all roaring.IntPeekable + Actual roaring.IntPeekable + ActualBM *roaring.Bitmap + + currChunk uint32 + freqNormReader *chunkedIntDecoder + locReader *chunkedIntDecoder + + next Posting // reused across Next() calls + nextLocs []Location // reused across Next() calls + nextSegmentLocs []segment.Location // reused across Next() calls + + docNum1Hit uint64 + normBits1Hit uint64 + + buf []byte + + includeFreqNorm bool + includeLocs bool +} + +var emptyPostingsIterator = &PostingsIterator{} + +func (i *PostingsIterator) Size() int { + sizeInBytes := reflectStaticSizePostingsIterator + size.SizeOfPtr + + i.next.Size() + // account for freqNormReader, locReader if we start using this. + for _, entry := range i.nextLocs { + sizeInBytes += entry.Size() + } + + return sizeInBytes +} + +func (i *PostingsIterator) loadChunk(chunk int) error { + if i.includeFreqNorm { + err := i.freqNormReader.loadChunk(chunk) + if err != nil { + return err + } + } + + if i.includeLocs { + err := i.locReader.loadChunk(chunk) + if err != nil { + return err + } + } + + i.currChunk = uint32(chunk) + return nil +} + +func (i *PostingsIterator) readFreqNormHasLocs() (uint64, uint64, bool, error) { + if i.normBits1Hit != 0 { + return 1, i.normBits1Hit, false, nil + } + + freqHasLocs, err := i.freqNormReader.readUvarint() + if err != nil { + return 0, 0, false, fmt.Errorf("error reading frequency: %v", err) + } + + freq, hasLocs := decodeFreqHasLocs(freqHasLocs) + + normBits, err := i.freqNormReader.readUvarint() + if err != nil { + return 0, 0, false, fmt.Errorf("error reading norm: %v", err) + } + + return freq, normBits, hasLocs, nil +} + +func (i *PostingsIterator) skipFreqNormReadHasLocs() (bool, error) { + if i.normBits1Hit != 0 { + return false, nil + } + + freqHasLocs, err := i.freqNormReader.readUvarint() + if err != nil { + return false, fmt.Errorf("error reading freqHasLocs: %v", err) + } + + i.freqNormReader.SkipUvarint() // Skip normBits. + + return freqHasLocs&0x01 != 0, nil // See decodeFreqHasLocs() / hasLocs. +} + +func encodeFreqHasLocs(freq uint64, hasLocs bool) uint64 { + rv := freq << 1 + if hasLocs { + rv = rv | 0x01 // 0'th LSB encodes whether there are locations + } + return rv +} + +func decodeFreqHasLocs(freqHasLocs uint64) (uint64, bool) { + freq := freqHasLocs >> 1 + hasLocs := freqHasLocs&0x01 != 0 + return freq, hasLocs +} + +// readLocation processes all the integers on the stream representing a single +// location. +func (i *PostingsIterator) readLocation(l *Location) error { + // read off field + fieldID, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location field: %v", err) + } + // read off pos + pos, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location pos: %v", err) + } + // read off start + start, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location start: %v", err) + } + // read off end + end, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location end: %v", err) + } + // read off num array pos + numArrayPos, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location num array pos: %v", err) + } + + l.field = i.postings.sb.fieldsInv[fieldID] + l.pos = pos + l.start = start + l.end = end + + if cap(l.ap) < int(numArrayPos) { + l.ap = make([]uint64, int(numArrayPos)) + } else { + l.ap = l.ap[:int(numArrayPos)] + } + + // read off array positions + for k := 0; k < int(numArrayPos); k++ { + ap, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading array position: %v", err) + } + + l.ap[k] = ap + } + + return nil +} + +// Next returns the next posting on the postings list, or nil at the end +func (i *PostingsIterator) Next() (segment.Posting, error) { + return i.nextAtOrAfter(0) +} + +// Advance returns the posting at the specified docNum or it is not present +// the next posting, or if the end is reached, nil +func (i *PostingsIterator) Advance(docNum uint64) (segment.Posting, error) { + return i.nextAtOrAfter(docNum) +} + +// Next returns the next posting on the postings list, or nil at the end +func (i *PostingsIterator) nextAtOrAfter(atOrAfter uint64) (segment.Posting, error) { + docNum, exists, err := i.nextDocNumAtOrAfter(atOrAfter) + if err != nil || !exists { + return nil, err + } + + i.next = Posting{} // clear the struct + rv := &i.next + rv.docNum = docNum + + if !i.includeFreqNorm { + return rv, nil + } + + var normBits uint64 + var hasLocs bool + + rv.freq, normBits, hasLocs, err = i.readFreqNormHasLocs() + if err != nil { + return nil, err + } + + rv.norm = math.Float32frombits(uint32(normBits)) + + if i.includeLocs && hasLocs { + // prepare locations into reused slices, where we assume + // rv.freq >= "number of locs", since in a composite field, + // some component fields might have their IncludeTermVector + // flags disabled while other component fields are enabled + if cap(i.nextLocs) >= int(rv.freq) { + i.nextLocs = i.nextLocs[0:rv.freq] + } else { + i.nextLocs = make([]Location, rv.freq, rv.freq*2) + } + if cap(i.nextSegmentLocs) < int(rv.freq) { + i.nextSegmentLocs = make([]segment.Location, rv.freq, rv.freq*2) + } + rv.locs = i.nextSegmentLocs[:0] + + numLocsBytes, err := i.locReader.readUvarint() + if err != nil { + return nil, fmt.Errorf("error reading location numLocsBytes: %v", err) + } + + j := 0 + startBytesRemaining := i.locReader.Len() // # bytes remaining in the locReader + for startBytesRemaining-i.locReader.Len() < int(numLocsBytes) { + err := i.readLocation(&i.nextLocs[j]) + if err != nil { + return nil, err + } + rv.locs = append(rv.locs, &i.nextLocs[j]) + j++ + } + } + + return rv, nil +} + +// nextDocNum returns the next docNum on the postings list, and also +// sets up the currChunk / loc related fields of the iterator. +func (i *PostingsIterator) nextDocNumAtOrAfter(atOrAfter uint64) (uint64, bool, error) { + if i.normBits1Hit != 0 { + if i.docNum1Hit == DocNum1HitFinished { + return 0, false, nil + } + if i.docNum1Hit < atOrAfter { + // advanced past our 1-hit + i.docNum1Hit = DocNum1HitFinished // consume our 1-hit docNum + return 0, false, nil + } + docNum := i.docNum1Hit + i.docNum1Hit = DocNum1HitFinished // consume our 1-hit docNum + return docNum, true, nil + } + + if i.Actual == nil || !i.Actual.HasNext() { + return 0, false, nil + } + + if i.postings == nil || i.postings.postings == i.ActualBM { + return i.nextDocNumAtOrAfterClean(atOrAfter) + } + + i.Actual.AdvanceIfNeeded(uint32(atOrAfter)) + + if !i.Actual.HasNext() { + // couldn't find anything + return 0, false, nil + } + + n := i.Actual.Next() + allN := i.all.Next() + + chunkSize, err := getChunkSize(i.postings.sb.chunkMode, i.postings.postings.GetCardinality(), i.postings.sb.numDocs) + if err != nil { + return 0, false, err + } + nChunk := n / uint32(chunkSize) + + // when allN becomes >= to here, then allN is in the same chunk as nChunk. + allNReachesNChunk := nChunk * uint32(chunkSize) + + // n is the next actual hit (excluding some postings), and + // allN is the next hit in the full postings, and + // if they don't match, move 'all' forwards until they do + for allN != n { + // we've reached same chunk, so move the freq/norm/loc decoders forward + if i.includeFreqNorm && allN >= allNReachesNChunk { + err := i.currChunkNext(nChunk) + if err != nil { + return 0, false, err + } + } + + allN = i.all.Next() + } + + if i.includeFreqNorm && (i.currChunk != nChunk || i.freqNormReader.isNil()) { + err := i.loadChunk(int(nChunk)) + if err != nil { + return 0, false, fmt.Errorf("error loading chunk: %v", err) + } + } + + return uint64(n), true, nil +} + +// optimization when the postings list is "clean" (e.g., no updates & +// no deletions) where the all bitmap is the same as the actual bitmap +func (i *PostingsIterator) nextDocNumAtOrAfterClean( + atOrAfter uint64) (uint64, bool, error) { + + if !i.includeFreqNorm { + i.Actual.AdvanceIfNeeded(uint32(atOrAfter)) + + if !i.Actual.HasNext() { + return 0, false, nil // couldn't find anything + } + + return uint64(i.Actual.Next()), true, nil + } + + chunkSize, err := getChunkSize(i.postings.sb.chunkMode, i.postings.postings.GetCardinality(), i.postings.sb.numDocs) + if err != nil { + return 0, false, err + } + + // freq-norm's needed, so maintain freq-norm chunk reader + sameChunkNexts := 0 // # of times we called Next() in the same chunk + n := i.Actual.Next() + nChunk := n / uint32(chunkSize) + + for uint64(n) < atOrAfter && i.Actual.HasNext() { + n = i.Actual.Next() + + nChunkPrev := nChunk + nChunk = n / uint32(chunkSize) + + if nChunk != nChunkPrev { + sameChunkNexts = 0 + } else { + sameChunkNexts += 1 + } + } + + if uint64(n) < atOrAfter { + // couldn't find anything + return 0, false, nil + } + + for j := 0; j < sameChunkNexts; j++ { + err := i.currChunkNext(nChunk) + if err != nil { + return 0, false, fmt.Errorf("error optimized currChunkNext: %v", err) + } + } + + if i.currChunk != nChunk || i.freqNormReader.isNil() { + err := i.loadChunk(int(nChunk)) + if err != nil { + return 0, false, fmt.Errorf("error loading chunk: %v", err) + } + } + + return uint64(n), true, nil +} + +func (i *PostingsIterator) currChunkNext(nChunk uint32) error { + if i.currChunk != nChunk || i.freqNormReader.isNil() { + err := i.loadChunk(int(nChunk)) + if err != nil { + return fmt.Errorf("error loading chunk: %v", err) + } + } + + // read off freq/offsets even though we don't care about them + hasLocs, err := i.skipFreqNormReadHasLocs() + if err != nil { + return err + } + + if i.includeLocs && hasLocs { + numLocsBytes, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location numLocsBytes: %v", err) + } + + // skip over all the location bytes + i.locReader.SkipBytes(int(numLocsBytes)) + } + + return nil +} + +// DocNum1Hit returns the docNum and true if this is "1-hit" optimized +// and the docNum is available. +func (p *PostingsIterator) DocNum1Hit() (uint64, bool) { + if p.normBits1Hit != 0 && p.docNum1Hit != DocNum1HitFinished { + return p.docNum1Hit, true + } + return 0, false +} + +// ActualBitmap returns the underlying actual bitmap +// which can be used up the stack for optimizations +func (p *PostingsIterator) ActualBitmap() *roaring.Bitmap { + return p.ActualBM +} + +// ReplaceActual replaces the ActualBM with the provided +// bitmap +func (p *PostingsIterator) ReplaceActual(abm *roaring.Bitmap) { + p.ActualBM = abm + p.Actual = abm.Iterator() +} + +// PostingsIteratorFromBitmap constructs a PostingsIterator given an +// "actual" bitmap. +func PostingsIteratorFromBitmap(bm *roaring.Bitmap, + includeFreqNorm, includeLocs bool) (segment.PostingsIterator, error) { + return &PostingsIterator{ + ActualBM: bm, + Actual: bm.Iterator(), + includeFreqNorm: includeFreqNorm, + includeLocs: includeLocs, + }, nil +} + +// PostingsIteratorFrom1Hit constructs a PostingsIterator given a +// 1-hit docNum. +func PostingsIteratorFrom1Hit(docNum1Hit uint64, + includeFreqNorm, includeLocs bool) (segment.PostingsIterator, error) { + return &PostingsIterator{ + docNum1Hit: docNum1Hit, + normBits1Hit: NormBits1Hit, + includeFreqNorm: includeFreqNorm, + includeLocs: includeLocs, + }, nil +} + +// Posting is a single entry in a postings list +type Posting struct { + docNum uint64 + freq uint64 + norm float32 + locs []segment.Location +} + +func (p *Posting) Size() int { + sizeInBytes := reflectStaticSizePosting + + for _, entry := range p.locs { + sizeInBytes += entry.Size() + } + + return sizeInBytes +} + +// Number returns the document number of this posting in this segment +func (p *Posting) Number() uint64 { + return p.docNum +} + +// Frequency returns the frequencies of occurrence of this term in this doc/field +func (p *Posting) Frequency() uint64 { + return p.freq +} + +// Norm returns the normalization factor for this posting +func (p *Posting) Norm() float64 { + return float64(p.norm) +} + +// Locations returns the location information for each occurrence +func (p *Posting) Locations() []segment.Location { + return p.locs +} + +// Location represents the location of a single occurrence +type Location struct { + field string + pos uint64 + start uint64 + end uint64 + ap []uint64 +} + +func (l *Location) Size() int { + return reflectStaticSizeLocation + + len(l.field) + + len(l.ap)*size.SizeOfUint64 +} + +// Field returns the name of the field (useful in composite fields to know +// which original field the value came from) +func (l *Location) Field() string { + return l.field +} + +// Start returns the start byte offset of this occurrence +func (l *Location) Start() uint64 { + return l.start +} + +// End returns the end byte offset of this occurrence +func (l *Location) End() uint64 { + return l.end +} + +// Pos returns the 1-based phrase position of this occurrence +func (l *Location) Pos() uint64 { + return l.pos +} + +// ArrayPositions returns the array position vector associated with this occurrence +func (l *Location) ArrayPositions() []uint64 { + return l.ap +} diff --git a/vendor/github.com/blevesearch/zap/v12/read.go b/vendor/github.com/blevesearch/zap/v12/read.go new file mode 100644 index 0000000..e47d4c6 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/read.go @@ -0,0 +1,43 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import "encoding/binary" + +func (s *SegmentBase) getDocStoredMetaAndCompressed(docNum uint64) ([]byte, []byte) { + _, storedOffset, n, metaLen, dataLen := s.getDocStoredOffsets(docNum) + + meta := s.mem[storedOffset+n : storedOffset+n+metaLen] + data := s.mem[storedOffset+n+metaLen : storedOffset+n+metaLen+dataLen] + + return meta, data +} + +func (s *SegmentBase) getDocStoredOffsets(docNum uint64) ( + uint64, uint64, uint64, uint64, uint64) { + indexOffset := s.storedIndexOffset + (8 * docNum) + + storedOffset := binary.BigEndian.Uint64(s.mem[indexOffset : indexOffset+8]) + + var n uint64 + + metaLen, read := binary.Uvarint(s.mem[storedOffset : storedOffset+binary.MaxVarintLen64]) + n += uint64(read) + + dataLen, read := binary.Uvarint(s.mem[storedOffset+n : storedOffset+n+binary.MaxVarintLen64]) + n += uint64(read) + + return indexOffset, storedOffset, n, metaLen, dataLen +} diff --git a/vendor/github.com/blevesearch/zap/v12/segment.go b/vendor/github.com/blevesearch/zap/v12/segment.go new file mode 100644 index 0000000..e8b1f06 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/segment.go @@ -0,0 +1,572 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "fmt" + "io" + "os" + "sync" + "unsafe" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/blevesearch/bleve/size" + "github.com/couchbase/vellum" + mmap "github.com/blevesearch/mmap-go" + "github.com/golang/snappy" +) + +var reflectStaticSizeSegmentBase int + +func init() { + var sb SegmentBase + reflectStaticSizeSegmentBase = int(unsafe.Sizeof(sb)) +} + +// Open returns a zap impl of a segment +func (*ZapPlugin) Open(path string) (segment.Segment, error) { + f, err := os.Open(path) + if err != nil { + return nil, err + } + mm, err := mmap.Map(f, mmap.RDONLY, 0) + if err != nil { + // mmap failed, try to close the file + _ = f.Close() + return nil, err + } + + rv := &Segment{ + SegmentBase: SegmentBase{ + mem: mm[0 : len(mm)-FooterSize], + fieldsMap: make(map[string]uint16), + fieldDvReaders: make(map[uint16]*docValueReader), + fieldFSTs: make(map[uint16]*vellum.FST), + }, + f: f, + mm: mm, + path: path, + refs: 1, + } + rv.SegmentBase.updateSize() + + err = rv.loadConfig() + if err != nil { + _ = rv.Close() + return nil, err + } + + err = rv.loadFields() + if err != nil { + _ = rv.Close() + return nil, err + } + + err = rv.loadDvReaders() + if err != nil { + _ = rv.Close() + return nil, err + } + + return rv, nil +} + +// SegmentBase is a memory only, read-only implementation of the +// segment.Segment interface, using zap's data representation. +type SegmentBase struct { + mem []byte + memCRC uint32 + chunkMode uint32 + fieldsMap map[string]uint16 // fieldName -> fieldID+1 + fieldsInv []string // fieldID -> fieldName + numDocs uint64 + storedIndexOffset uint64 + fieldsIndexOffset uint64 + docValueOffset uint64 + dictLocs []uint64 + fieldDvReaders map[uint16]*docValueReader // naive chunk cache per field + fieldDvNames []string // field names cached in fieldDvReaders + size uint64 + + m sync.Mutex + fieldFSTs map[uint16]*vellum.FST +} + +func (sb *SegmentBase) Size() int { + return int(sb.size) +} + +func (sb *SegmentBase) updateSize() { + sizeInBytes := reflectStaticSizeSegmentBase + + cap(sb.mem) + + // fieldsMap + for k := range sb.fieldsMap { + sizeInBytes += (len(k) + size.SizeOfString) + size.SizeOfUint16 + } + + // fieldsInv, dictLocs + for _, entry := range sb.fieldsInv { + sizeInBytes += len(entry) + size.SizeOfString + } + sizeInBytes += len(sb.dictLocs) * size.SizeOfUint64 + + // fieldDvReaders + for _, v := range sb.fieldDvReaders { + sizeInBytes += size.SizeOfUint16 + size.SizeOfPtr + if v != nil { + sizeInBytes += v.size() + } + } + + sb.size = uint64(sizeInBytes) +} + +func (sb *SegmentBase) AddRef() {} +func (sb *SegmentBase) DecRef() (err error) { return nil } +func (sb *SegmentBase) Close() (err error) { return nil } + +// Segment implements a persisted segment.Segment interface, by +// embedding an mmap()'ed SegmentBase. +type Segment struct { + SegmentBase + + f *os.File + mm mmap.MMap + path string + version uint32 + crc uint32 + + m sync.Mutex // Protects the fields that follow. + refs int64 +} + +func (s *Segment) Size() int { + // 8 /* size of file pointer */ + // 4 /* size of version -> uint32 */ + // 4 /* size of crc -> uint32 */ + sizeOfUints := 16 + + sizeInBytes := (len(s.path) + size.SizeOfString) + sizeOfUints + + // mutex, refs -> int64 + sizeInBytes += 16 + + // do not include the mmap'ed part + return sizeInBytes + s.SegmentBase.Size() - cap(s.mem) +} + +func (s *Segment) AddRef() { + s.m.Lock() + s.refs++ + s.m.Unlock() +} + +func (s *Segment) DecRef() (err error) { + s.m.Lock() + s.refs-- + if s.refs == 0 { + err = s.closeActual() + } + s.m.Unlock() + return err +} + +func (s *Segment) loadConfig() error { + crcOffset := len(s.mm) - 4 + s.crc = binary.BigEndian.Uint32(s.mm[crcOffset : crcOffset+4]) + + verOffset := crcOffset - 4 + s.version = binary.BigEndian.Uint32(s.mm[verOffset : verOffset+4]) + if s.version != Version { + return fmt.Errorf("unsupported version %d", s.version) + } + + chunkOffset := verOffset - 4 + s.chunkMode = binary.BigEndian.Uint32(s.mm[chunkOffset : chunkOffset+4]) + + docValueOffset := chunkOffset - 8 + s.docValueOffset = binary.BigEndian.Uint64(s.mm[docValueOffset : docValueOffset+8]) + + fieldsIndexOffset := docValueOffset - 8 + s.fieldsIndexOffset = binary.BigEndian.Uint64(s.mm[fieldsIndexOffset : fieldsIndexOffset+8]) + + storedIndexOffset := fieldsIndexOffset - 8 + s.storedIndexOffset = binary.BigEndian.Uint64(s.mm[storedIndexOffset : storedIndexOffset+8]) + + numDocsOffset := storedIndexOffset - 8 + s.numDocs = binary.BigEndian.Uint64(s.mm[numDocsOffset : numDocsOffset+8]) + return nil +} + +func (s *SegmentBase) loadFields() error { + // NOTE for now we assume the fields index immediately precedes + // the footer, and if this changes, need to adjust accordingly (or + // store explicit length), where s.mem was sliced from s.mm in Open(). + fieldsIndexEnd := uint64(len(s.mem)) + + // iterate through fields index + var fieldID uint64 + for s.fieldsIndexOffset+(8*fieldID) < fieldsIndexEnd { + addr := binary.BigEndian.Uint64(s.mem[s.fieldsIndexOffset+(8*fieldID) : s.fieldsIndexOffset+(8*fieldID)+8]) + + dictLoc, read := binary.Uvarint(s.mem[addr:fieldsIndexEnd]) + n := uint64(read) + s.dictLocs = append(s.dictLocs, dictLoc) + + var nameLen uint64 + nameLen, read = binary.Uvarint(s.mem[addr+n : fieldsIndexEnd]) + n += uint64(read) + + name := string(s.mem[addr+n : addr+n+nameLen]) + s.fieldsInv = append(s.fieldsInv, name) + s.fieldsMap[name] = uint16(fieldID + 1) + + fieldID++ + } + return nil +} + +// Dictionary returns the term dictionary for the specified field +func (s *SegmentBase) Dictionary(field string) (segment.TermDictionary, error) { + dict, err := s.dictionary(field) + if err == nil && dict == nil { + return &segment.EmptyDictionary{}, nil + } + return dict, err +} + +func (sb *SegmentBase) dictionary(field string) (rv *Dictionary, err error) { + fieldIDPlus1 := sb.fieldsMap[field] + if fieldIDPlus1 > 0 { + rv = &Dictionary{ + sb: sb, + field: field, + fieldID: fieldIDPlus1 - 1, + } + + dictStart := sb.dictLocs[rv.fieldID] + if dictStart > 0 { + var ok bool + sb.m.Lock() + if rv.fst, ok = sb.fieldFSTs[rv.fieldID]; !ok { + // read the length of the vellum data + vellumLen, read := binary.Uvarint(sb.mem[dictStart : dictStart+binary.MaxVarintLen64]) + fstBytes := sb.mem[dictStart+uint64(read) : dictStart+uint64(read)+vellumLen] + rv.fst, err = vellum.Load(fstBytes) + if err != nil { + sb.m.Unlock() + return nil, fmt.Errorf("dictionary field %s vellum err: %v", field, err) + } + + sb.fieldFSTs[rv.fieldID] = rv.fst + } + + sb.m.Unlock() + rv.fstReader, err = rv.fst.Reader() + if err != nil { + return nil, fmt.Errorf("dictionary field %s vellum reader err: %v", field, err) + } + + } + } + + return rv, nil +} + +// visitDocumentCtx holds data structures that are reusable across +// multiple VisitDocument() calls to avoid memory allocations +type visitDocumentCtx struct { + buf []byte + reader bytes.Reader + arrayPos []uint64 +} + +var visitDocumentCtxPool = sync.Pool{ + New: func() interface{} { + reuse := &visitDocumentCtx{} + return reuse + }, +} + +// VisitDocument invokes the DocFieldValueVistor for each stored field +// for the specified doc number +func (s *SegmentBase) VisitDocument(num uint64, visitor segment.DocumentFieldValueVisitor) error { + vdc := visitDocumentCtxPool.Get().(*visitDocumentCtx) + defer visitDocumentCtxPool.Put(vdc) + return s.visitDocument(vdc, num, visitor) +} + +func (s *SegmentBase) visitDocument(vdc *visitDocumentCtx, num uint64, + visitor segment.DocumentFieldValueVisitor) error { + // first make sure this is a valid number in this segment + if num < s.numDocs { + meta, compressed := s.getDocStoredMetaAndCompressed(num) + + vdc.reader.Reset(meta) + + // handle _id field special case + idFieldValLen, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + idFieldVal := compressed[:idFieldValLen] + + keepGoing := visitor("_id", byte('t'), idFieldVal, nil) + if !keepGoing { + visitDocumentCtxPool.Put(vdc) + return nil + } + + // handle non-"_id" fields + compressed = compressed[idFieldValLen:] + + uncompressed, err := snappy.Decode(vdc.buf[:cap(vdc.buf)], compressed) + if err != nil { + return err + } + + for keepGoing { + field, err := binary.ReadUvarint(&vdc.reader) + if err == io.EOF { + break + } + if err != nil { + return err + } + typ, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + offset, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + l, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + numap, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + var arrayPos []uint64 + if numap > 0 { + if cap(vdc.arrayPos) < int(numap) { + vdc.arrayPos = make([]uint64, numap) + } + arrayPos = vdc.arrayPos[:numap] + for i := 0; i < int(numap); i++ { + ap, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + arrayPos[i] = ap + } + } + + value := uncompressed[offset : offset+l] + keepGoing = visitor(s.fieldsInv[field], byte(typ), value, arrayPos) + } + + vdc.buf = uncompressed + } + return nil +} + +// DocID returns the value of the _id field for the given docNum +func (s *SegmentBase) DocID(num uint64) ([]byte, error) { + if num >= s.numDocs { + return nil, nil + } + + vdc := visitDocumentCtxPool.Get().(*visitDocumentCtx) + + meta, compressed := s.getDocStoredMetaAndCompressed(num) + + vdc.reader.Reset(meta) + + // handle _id field special case + idFieldValLen, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return nil, err + } + idFieldVal := compressed[:idFieldValLen] + + visitDocumentCtxPool.Put(vdc) + + return idFieldVal, nil +} + +// Count returns the number of documents in this segment. +func (s *SegmentBase) Count() uint64 { + return s.numDocs +} + +// DocNumbers returns a bitset corresponding to the doc numbers of all the +// provided _id strings +func (s *SegmentBase) DocNumbers(ids []string) (*roaring.Bitmap, error) { + rv := roaring.New() + + if len(s.fieldsMap) > 0 { + idDict, err := s.dictionary("_id") + if err != nil { + return nil, err + } + + postingsList := emptyPostingsList + + sMax, err := idDict.fst.GetMaxKey() + if err != nil { + return nil, err + } + sMaxStr := string(sMax) + filteredIds := make([]string, 0, len(ids)) + for _, id := range ids { + if id <= sMaxStr { + filteredIds = append(filteredIds, id) + } + } + + for _, id := range filteredIds { + postingsList, err = idDict.postingsList([]byte(id), nil, postingsList) + if err != nil { + return nil, err + } + postingsList.OrInto(rv) + } + } + + return rv, nil +} + +// Fields returns the field names used in this segment +func (s *SegmentBase) Fields() []string { + return s.fieldsInv +} + +// Path returns the path of this segment on disk +func (s *Segment) Path() string { + return s.path +} + +// Close releases all resources associated with this segment +func (s *Segment) Close() (err error) { + return s.DecRef() +} + +func (s *Segment) closeActual() (err error) { + if s.mm != nil { + err = s.mm.Unmap() + } + // try to close file even if unmap failed + if s.f != nil { + err2 := s.f.Close() + if err == nil { + // try to return first error + err = err2 + } + } + return +} + +// some helpers i started adding for the command-line utility + +// Data returns the underlying mmaped data slice +func (s *Segment) Data() []byte { + return s.mm +} + +// CRC returns the CRC value stored in the file footer +func (s *Segment) CRC() uint32 { + return s.crc +} + +// Version returns the file version in the file footer +func (s *Segment) Version() uint32 { + return s.version +} + +// ChunkFactor returns the chunk factor in the file footer +func (s *Segment) ChunkMode() uint32 { + return s.chunkMode +} + +// FieldsIndexOffset returns the fields index offset in the file footer +func (s *Segment) FieldsIndexOffset() uint64 { + return s.fieldsIndexOffset +} + +// StoredIndexOffset returns the stored value index offset in the file footer +func (s *Segment) StoredIndexOffset() uint64 { + return s.storedIndexOffset +} + +// DocValueOffset returns the docValue offset in the file footer +func (s *Segment) DocValueOffset() uint64 { + return s.docValueOffset +} + +// NumDocs returns the number of documents in the file footer +func (s *Segment) NumDocs() uint64 { + return s.numDocs +} + +// DictAddr is a helper function to compute the file offset where the +// dictionary is stored for the specified field. +func (s *Segment) DictAddr(field string) (uint64, error) { + fieldIDPlus1, ok := s.fieldsMap[field] + if !ok { + return 0, fmt.Errorf("no such field '%s'", field) + } + + return s.dictLocs[fieldIDPlus1-1], nil +} + +func (s *SegmentBase) loadDvReaders() error { + if s.docValueOffset == fieldNotUninverted || s.numDocs == 0 { + return nil + } + + var read uint64 + for fieldID, field := range s.fieldsInv { + var fieldLocStart, fieldLocEnd uint64 + var n int + fieldLocStart, n = binary.Uvarint(s.mem[s.docValueOffset+read : s.docValueOffset+read+binary.MaxVarintLen64]) + if n <= 0 { + return fmt.Errorf("loadDvReaders: failed to read the docvalue offset start for field %d", fieldID) + } + read += uint64(n) + fieldLocEnd, n = binary.Uvarint(s.mem[s.docValueOffset+read : s.docValueOffset+read+binary.MaxVarintLen64]) + if n <= 0 { + return fmt.Errorf("loadDvReaders: failed to read the docvalue offset end for field %d", fieldID) + } + read += uint64(n) + + fieldDvReader, err := s.loadFieldDocValueReader(field, fieldLocStart, fieldLocEnd) + if err != nil { + return err + } + if fieldDvReader != nil { + s.fieldDvReaders[uint16(fieldID)] = fieldDvReader + s.fieldDvNames = append(s.fieldDvNames, field) + } + } + + return nil +} diff --git a/vendor/github.com/blevesearch/zap/v12/write.go b/vendor/github.com/blevesearch/zap/v12/write.go new file mode 100644 index 0000000..77aefdb --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/write.go @@ -0,0 +1,145 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "encoding/binary" + "io" + + "github.com/RoaringBitmap/roaring" +) + +// writes out the length of the roaring bitmap in bytes as varint +// then writes out the roaring bitmap itself +func writeRoaringWithLen(r *roaring.Bitmap, w io.Writer, + reuseBufVarint []byte) (int, error) { + buf, err := r.ToBytes() + if err != nil { + return 0, err + } + + var tw int + + // write out the length + n := binary.PutUvarint(reuseBufVarint, uint64(len(buf))) + nw, err := w.Write(reuseBufVarint[:n]) + tw += nw + if err != nil { + return tw, err + } + + // write out the roaring bytes + nw, err = w.Write(buf) + tw += nw + if err != nil { + return tw, err + } + + return tw, nil +} + +func persistFields(fieldsInv []string, w *CountHashWriter, dictLocs []uint64) (uint64, error) { + var rv uint64 + var fieldsOffsets []uint64 + + for fieldID, fieldName := range fieldsInv { + // record start of this field + fieldsOffsets = append(fieldsOffsets, uint64(w.Count())) + + // write out the dict location and field name length + _, err := writeUvarints(w, dictLocs[fieldID], uint64(len(fieldName))) + if err != nil { + return 0, err + } + + // write out the field name + _, err = w.Write([]byte(fieldName)) + if err != nil { + return 0, err + } + } + + // now write out the fields index + rv = uint64(w.Count()) + for fieldID := range fieldsInv { + err := binary.Write(w, binary.BigEndian, fieldsOffsets[fieldID]) + if err != nil { + return 0, err + } + } + + return rv, nil +} + +// FooterSize is the size of the footer record in bytes +// crc + ver + chunk + field offset + stored offset + num docs + docValueOffset +const FooterSize = 4 + 4 + 4 + 8 + 8 + 8 + 8 + +func persistFooter(numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset uint64, + chunkMode uint32, crcBeforeFooter uint32, writerIn io.Writer) error { + w := NewCountHashWriter(writerIn) + w.crc = crcBeforeFooter + + // write out the number of docs + err := binary.Write(w, binary.BigEndian, numDocs) + if err != nil { + return err + } + // write out the stored field index location: + err = binary.Write(w, binary.BigEndian, storedIndexOffset) + if err != nil { + return err + } + // write out the field index location + err = binary.Write(w, binary.BigEndian, fieldsIndexOffset) + if err != nil { + return err + } + // write out the fieldDocValue location + err = binary.Write(w, binary.BigEndian, docValueOffset) + if err != nil { + return err + } + // write out 32-bit chunk factor + err = binary.Write(w, binary.BigEndian, chunkMode) + if err != nil { + return err + } + // write out 32-bit version + err = binary.Write(w, binary.BigEndian, Version) + if err != nil { + return err + } + // write out CRC-32 of everything upto but not including this CRC + err = binary.Write(w, binary.BigEndian, w.crc) + if err != nil { + return err + } + return nil +} + +func writeUvarints(w io.Writer, vals ...uint64) (tw int, err error) { + buf := make([]byte, binary.MaxVarintLen64) + for _, val := range vals { + n := binary.PutUvarint(buf, val) + var nw int + nw, err = w.Write(buf[:n]) + tw += nw + if err != nil { + return tw, err + } + } + return tw, err +} diff --git a/vendor/github.com/blevesearch/zap/v12/zap.md b/vendor/github.com/blevesearch/zap/v12/zap.md new file mode 100644 index 0000000..d74dc54 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v12/zap.md @@ -0,0 +1,177 @@ +# ZAP File Format + +## Legend + +### Sections + + |========| + | | section + |========| + +### Fixed-size fields + + |--------| |----| |--| |-| + | | uint64 | | uint32 | | uint16 | | uint8 + |--------| |----| |--| |-| + +### Varints + + |~~~~~~~~| + | | varint(up to uint64) + |~~~~~~~~| + +### Arbitrary-length fields + + |--------...---| + | | arbitrary-length field (string, vellum, roaring bitmap) + |--------...---| + +### Chunked data + + [--------] + [ ] + [--------] + +## Overview + +Footer section describes the configuration of particular ZAP file. The format of footer is version-dependent, so it is necessary to check `V` field before the parsing. + + |==================================================| + | Stored Fields | + |==================================================| + |-----> | Stored Fields Index | + | |==================================================| + | | Dictionaries + Postings + DocValues | + | |==================================================| + | |---> | DocValues Index | + | | |==================================================| + | | | Fields | + | | |==================================================| + | | |-> | Fields Index | + | | | |========|========|========|========|====|====|====| + | | | | D# | SF | F | FDV | CF | V | CC | (Footer) + | | | |========|====|===|====|===|====|===|====|====|====| + | | | | | | + |-+-+-----------------| | | + | |--------------------------| | + |-------------------------------------| + + D#. Number of Docs. + SF. Stored Fields Index Offset. + F. Field Index Offset. + FDV. Field DocValue Offset. + CF. Chunk Factor. + V. Version. + CC. CRC32. + +## Stored Fields + +Stored Fields Index is `D#` consecutive 64-bit unsigned integers - offsets, where relevant Stored Fields Data records are located. + + 0 [SF] [SF + D# * 8] + | Stored Fields | Stored Fields Index | + |================================|==================================| + | | | + | |--------------------| ||--------|--------|. . .|--------|| + | |-> | Stored Fields Data | || 0 | 1 | | D# - 1 || + | | |--------------------| ||--------|----|---|. . .|--------|| + | | | | | + |===|============================|==============|===================| + | | + |-------------------------------------------| + +Stored Fields Data is an arbitrary size record, which consists of metadata and [Snappy](https://github.com/golang/snappy)-compressed data. + + Stored Fields Data + |~~~~~~~~|~~~~~~~~|~~~~~~~~...~~~~~~~~|~~~~~~~~...~~~~~~~~| + | MDS | CDS | MD | CD | + |~~~~~~~~|~~~~~~~~|~~~~~~~~...~~~~~~~~|~~~~~~~~...~~~~~~~~| + + MDS. Metadata size. + CDS. Compressed data size. + MD. Metadata. + CD. Snappy-compressed data. + +## Fields + +Fields Index section located between addresses `F` and `len(file) - len(footer)` and consist of `uint64` values (`F1`, `F2`, ...) which are offsets to records in Fields section. We have `F# = (len(file) - len(footer) - F) / sizeof(uint64)` fields. + + + (...) [F] [F + F#] + | Fields | Fields Index. | + |================================|================================| + | | | + | |~~~~~~~~|~~~~~~~~|---...---|||--------|--------|...|--------|| + ||->| Dict | Length | Name ||| 0 | 1 | | F# - 1 || + || |~~~~~~~~|~~~~~~~~|---...---|||--------|----|---|...|--------|| + || | | | + ||===============================|==============|=================| + | | + |----------------------------------------------| + + +## Dictionaries + Postings + +Each of fields has its own dictionary, encoded in [Vellum](https://github.com/couchbase/vellum) format. Dictionary consists of pairs `(term, offset)`, where `offset` indicates the position of postings (list of documents) for this particular term. + + |================================================================|- Dictionaries + + | | Postings + + | | DocValues + | Freq/Norm (chunked) | + | [~~~~~~|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~] | + | |->[ Freq | Norm (float32 under varint) ] | + | | [~~~~~~|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~] | + | | | + | |------------------------------------------------------------| | + | Location Details (chunked) | | + | [~~~~~~|~~~~~|~~~~~~~|~~~~~|~~~~~~|~~~~~~~~|~~~~~] | | + | |->[ Size | Pos | Start | End | Arr# | ArrPos | ... ] | | + | | [~~~~~~|~~~~~|~~~~~~~|~~~~~|~~~~~~|~~~~~~~~|~~~~~] | | + | | | | + | |----------------------| | | + | Postings List | | | + | |~~~~~~~~|~~~~~|~~|~~~~~~~~|-----------...--| | | + | |->| F/N | LD | Length | ROARING BITMAP | | | + | | |~~~~~|~~|~~~~~~~~|~~~~~~~~|-----------...--| | | + | | |----------------------------------------------| | + | |--------------------------------------| | + | Dictionary | | + | |~~~~~~~~|--------------------------|-...-| | + | |->| Length | VELLUM DATA : (TERM -> OFFSET) | | + | | |~~~~~~~~|----------------------------...-| | + | | | + |======|=========================================================|- DocValues Index + | | | + |======|=========================================================|- Fields + | | | + | |~~~~|~~~|~~~~~~~~|---...---| | + | | Dict | Length | Name | | + | |~~~~~~~~|~~~~~~~~|---...---| | + | | + |================================================================| + +## DocValues + +DocValues Index is `F#` pairs of varints, one pair per field. Each pair of varints indicates start and end point of DocValues slice. + + |================================================================| + | |------...--| | + | |->| DocValues |<-| | + | | |------...--| | | + |==|=================|===========================================|- DocValues Index + ||~|~~~~~~~~~|~~~~~~~|~~| |~~~~~~~~~~~~~~|~~~~~~~~~~~~|| + || DV1 START | DV1 STOP | . . . . . | DV(F#) START | DV(F#) END || + ||~~~~~~~~~~~|~~~~~~~~~~| |~~~~~~~~~~~~~~|~~~~~~~~~~~~|| + |================================================================| + +DocValues is chunked Snappy-compressed values for each document and field. + + [~~~~~~~~~~~~~~~|~~~~~~|~~~~~~~~~|-...-|~~~~~~|~~~~~~~~~|--------------------...-] + [ Doc# in Chunk | Doc1 | Offset1 | ... | DocN | OffsetN | SNAPPY COMPRESSED DATA ] + [~~~~~~~~~~~~~~~|~~~~~~|~~~~~~~~~|-...-|~~~~~~|~~~~~~~~~|--------------------...-] + +Last 16 bytes are description of chunks. + + |~~~~~~~~~~~~...~|----------------|----------------| + | Chunk Sizes | Chunk Size Arr | Chunk# | + |~~~~~~~~~~~~...~|----------------|----------------| diff --git a/vendor/github.com/blevesearch/zap/v13/.gitignore b/vendor/github.com/blevesearch/zap/v13/.gitignore new file mode 100644 index 0000000..46d1cfa --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/.gitignore @@ -0,0 +1,12 @@ +#* +*.sublime-* +*~ +.#* +.project +.settings +**/.idea/ +**/*.iml +.DS_Store +/cmd/zap/zap +*.test +tags diff --git a/vendor/github.com/blevesearch/zap/v13/LICENSE b/vendor/github.com/blevesearch/zap/v13/LICENSE new file mode 100644 index 0000000..7a4a3ea --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/vendor/github.com/blevesearch/zap/v13/README.md b/vendor/github.com/blevesearch/zap/v13/README.md new file mode 100644 index 0000000..0facb66 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/README.md @@ -0,0 +1,158 @@ +# zap file format + +Advanced ZAP File Format Documentation is [here](zap.md). + +The file is written in the reverse order that we typically access data. This helps us write in one pass since later sections of the file require file offsets of things we've already written. + +Current usage: + +- mmap the entire file +- crc-32 bytes and version are in fixed position at end of the file +- reading remainder of footer could be version specific +- remainder of footer gives us: + - 3 important offsets (docValue , fields index and stored data index) + - 2 important values (number of docs and chunk factor) +- field data is processed once and memoized onto the heap so that we never have to go back to disk for it +- access to stored data by doc number means first navigating to the stored data index, then accessing a fixed position offset into that slice, which gives us the actual address of the data. the first bytes of that section tell us the size of data so that we know where it ends. +- access to all other indexed data follows the following pattern: + - first know the field name -> convert to id + - next navigate to term dictionary for that field + - some operations stop here and do dictionary ops + - next use dictionary to navigate to posting list for a specific term + - walk posting list + - if necessary, walk posting details as we go + - if location info is desired, consult location bitmap to see if it is there + +## stored fields section + +- for each document + - preparation phase: + - produce a slice of metadata bytes and data bytes + - produce these slices in field id order + - field value is appended to the data slice + - metadata slice is varint encoded with the following values for each field value + - field id (uint16) + - field type (byte) + - field value start offset in uncompressed data slice (uint64) + - field value length (uint64) + - field number of array positions (uint64) + - one additional value for each array position (uint64) + - compress the data slice using snappy + - file writing phase: + - remember the start offset for this document + - write out meta data length (varint uint64) + - write out compressed data length (varint uint64) + - write out the metadata bytes + - write out the compressed data bytes + +## stored fields idx + +- for each document + - write start offset (remembered from previous section) of stored data (big endian uint64) + +With this index and a known document number, we have direct access to all the stored field data. + +## posting details (freq/norm) section + +- for each posting list + - produce a slice containing multiple consecutive chunks (each chunk is varint stream) + - produce a slice remembering offsets of where each chunk starts + - preparation phase: + - for each hit in the posting list + - if this hit is in next chunk close out encoding of last chunk and record offset start of next + - encode term frequency (uint64) + - encode norm factor (float32) + - file writing phase: + - remember start position for this posting list details + - write out number of chunks that follow (varint uint64) + - write out length of each chunk (each a varint uint64) + - write out the byte slice containing all the chunk data + +If you know the doc number you're interested in, this format lets you jump to the correct chunk (docNum/chunkFactor) directly and then seek within that chunk until you find it. + +## posting details (location) section + +- for each posting list + - produce a slice containing multiple consecutive chunks (each chunk is varint stream) + - produce a slice remembering offsets of where each chunk starts + - preparation phase: + - for each hit in the posting list + - if this hit is in next chunk close out encoding of last chunk and record offset start of next + - encode field (uint16) + - encode field pos (uint64) + - encode field start (uint64) + - encode field end (uint64) + - encode number of array positions to follow (uint64) + - encode each array position (each uint64) + - file writing phase: + - remember start position for this posting list details + - write out number of chunks that follow (varint uint64) + - write out length of each chunk (each a varint uint64) + - write out the byte slice containing all the chunk data + +If you know the doc number you're interested in, this format lets you jump to the correct chunk (docNum/chunkFactor) directly and then seek within that chunk until you find it. + +## postings list section + +- for each posting list + - preparation phase: + - encode roaring bitmap posting list to bytes (so we know the length) + - file writing phase: + - remember the start position for this posting list + - write freq/norm details offset (remembered from previous, as varint uint64) + - write location details offset (remembered from previous, as varint uint64) + - write length of encoded roaring bitmap + - write the serialized roaring bitmap data + +## dictionary + +- for each field + - preparation phase: + - encode vellum FST with dictionary data pointing to file offset of posting list (remembered from previous) + - file writing phase: + - remember the start position of this persistDictionary + - write length of vellum data (varint uint64) + - write out vellum data + +## fields section + +- for each field + - file writing phase: + - remember start offset for each field + - write dictionary address (remembered from previous) (varint uint64) + - write length of field name (varint uint64) + - write field name bytes + +## fields idx + +- for each field + - file writing phase: + - write big endian uint64 of start offset for each field + +NOTE: currently we don't know or record the length of this fields index. Instead we rely on the fact that we know it immediately precedes a footer of known size. + +## fields DocValue + +- for each field + - preparation phase: + - produce a slice containing multiple consecutive chunks, where each chunk is composed of a meta section followed by compressed columnar field data + - produce a slice remembering the length of each chunk + - file writing phase: + - remember the start position of this first field DocValue offset in the footer + - write out number of chunks that follow (varint uint64) + - write out length of each chunk (each a varint uint64) + - write out the byte slice containing all the chunk data + +NOTE: currently the meta header inside each chunk gives clue to the location offsets and size of the data pertaining to a given docID and any +read operation leverage that meta information to extract the document specific data from the file. + +## footer + +- file writing phase + - write number of docs (big endian uint64) + - write stored field index location (big endian uint64) + - write field index location (big endian uint64) + - write field docValue location (big endian uint64) + - write out chunk factor (big endian uint32) + - write out version (big endian uint32) + - write out file CRC of everything preceding this (big endian uint32) diff --git a/vendor/github.com/blevesearch/zap/v13/build.go b/vendor/github.com/blevesearch/zap/v13/build.go new file mode 100644 index 0000000..58d829f --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/build.go @@ -0,0 +1,156 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bufio" + "math" + "os" + + "github.com/couchbase/vellum" +) + +const Version uint32 = 13 + +const Type string = "zap" + +const fieldNotUninverted = math.MaxUint64 + +func (sb *SegmentBase) Persist(path string) error { + return PersistSegmentBase(sb, path) +} + +// PersistSegmentBase persists SegmentBase in the zap file format. +func PersistSegmentBase(sb *SegmentBase, path string) error { + flag := os.O_RDWR | os.O_CREATE + + f, err := os.OpenFile(path, flag, 0600) + if err != nil { + return err + } + + cleanup := func() { + _ = f.Close() + _ = os.Remove(path) + } + + br := bufio.NewWriter(f) + + _, err = br.Write(sb.mem) + if err != nil { + cleanup() + return err + } + + err = persistFooter(sb.numDocs, sb.storedIndexOffset, sb.fieldsIndexOffset, sb.docValueOffset, + sb.chunkMode, sb.memCRC, br) + if err != nil { + cleanup() + return err + } + + err = br.Flush() + if err != nil { + cleanup() + return err + } + + err = f.Sync() + if err != nil { + cleanup() + return err + } + + err = f.Close() + if err != nil { + cleanup() + return err + } + + return nil +} + +func persistStoredFieldValues(fieldID int, + storedFieldValues [][]byte, stf []byte, spf [][]uint64, + curr int, metaEncode varintEncoder, data []byte) ( + int, []byte, error) { + for i := 0; i < len(storedFieldValues); i++ { + // encode field + _, err := metaEncode(uint64(fieldID)) + if err != nil { + return 0, nil, err + } + // encode type + _, err = metaEncode(uint64(stf[i])) + if err != nil { + return 0, nil, err + } + // encode start offset + _, err = metaEncode(uint64(curr)) + if err != nil { + return 0, nil, err + } + // end len + _, err = metaEncode(uint64(len(storedFieldValues[i]))) + if err != nil { + return 0, nil, err + } + // encode number of array pos + _, err = metaEncode(uint64(len(spf[i]))) + if err != nil { + return 0, nil, err + } + // encode all array positions + for _, pos := range spf[i] { + _, err = metaEncode(pos) + if err != nil { + return 0, nil, err + } + } + + data = append(data, storedFieldValues[i]...) + curr += len(storedFieldValues[i]) + } + + return curr, data, nil +} + +func InitSegmentBase(mem []byte, memCRC uint32, chunkMode uint32, + fieldsMap map[string]uint16, fieldsInv []string, numDocs uint64, + storedIndexOffset uint64, fieldsIndexOffset uint64, docValueOffset uint64, + dictLocs []uint64) (*SegmentBase, error) { + sb := &SegmentBase{ + mem: mem, + memCRC: memCRC, + chunkMode: chunkMode, + fieldsMap: fieldsMap, + fieldsInv: fieldsInv, + numDocs: numDocs, + storedIndexOffset: storedIndexOffset, + fieldsIndexOffset: fieldsIndexOffset, + docValueOffset: docValueOffset, + dictLocs: dictLocs, + fieldDvReaders: make(map[uint16]*docValueReader), + fieldFSTs: make(map[uint16]*vellum.FST), + } + sb.updateSize() + + err := sb.loadDvReaders() + if err != nil { + return nil, err + } + + return sb, nil +} diff --git a/vendor/github.com/blevesearch/zap/v13/chunk.go b/vendor/github.com/blevesearch/zap/v13/chunk.go new file mode 100644 index 0000000..fe9f398 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/chunk.go @@ -0,0 +1,54 @@ +// Copyright (c) 2019 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "fmt" +) + +// LegacyChunkMode was the original chunk mode (always chunk size 1024) +// this mode is still used for chunking doc values. +var LegacyChunkMode uint32 = 1024 + +// DefaultChunkMode is the most recent improvement to chunking and should +// be used by default. +var DefaultChunkMode uint32 = 1025 + +func getChunkSize(chunkMode uint32, cardinality uint64, maxDocs uint64) (uint64, error) { + switch { + // any chunkMode <= 1024 will always chunk with chunkSize=chunkMode + case chunkMode <= 1024: + // legacy chunk size + return uint64(chunkMode), nil + + case chunkMode == 1025: + // attempt at simple improvement + // theory - the point of chunking is to put a bound on the maximum number of + // calls to Next() needed to find a random document. ie, you should be able + // to do one jump to the correct chunk, and then walk through at most + // chunk-size items + // previously 1024 was chosen as the chunk size, but this is particularly + // wasteful for low cardinality terms. the observation is that if there + // are less than 1024 items, why not put them all in one chunk, + // this way you'll still achieve the same goal of visiting at most + // chunk-size items. + // no attempt is made to tweak any other case + if cardinality <= 1024 { + return maxDocs, nil + } + return 1024, nil + } + return 0, fmt.Errorf("unknown chunk mode %d", chunkMode) +} diff --git a/vendor/github.com/blevesearch/zap/v13/contentcoder.go b/vendor/github.com/blevesearch/zap/v13/contentcoder.go new file mode 100644 index 0000000..c145b5a --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/contentcoder.go @@ -0,0 +1,243 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "io" + "reflect" + + "github.com/golang/snappy" +) + +var reflectStaticSizeMetaData int + +func init() { + var md MetaData + reflectStaticSizeMetaData = int(reflect.TypeOf(md).Size()) +} + +var termSeparator byte = 0xff +var termSeparatorSplitSlice = []byte{termSeparator} + +type chunkedContentCoder struct { + final []byte + chunkSize uint64 + currChunk uint64 + chunkLens []uint64 + + w io.Writer + progressiveWrite bool + + chunkMetaBuf bytes.Buffer + chunkBuf bytes.Buffer + + chunkMeta []MetaData + + compressed []byte // temp buf for snappy compression +} + +// MetaData represents the data information inside a +// chunk. +type MetaData struct { + DocNum uint64 // docNum of the data inside the chunk + DocDvOffset uint64 // offset of data inside the chunk for the given docid +} + +// newChunkedContentCoder returns a new chunk content coder which +// packs data into chunks based on the provided chunkSize +func newChunkedContentCoder(chunkSize uint64, maxDocNum uint64, + w io.Writer, progressiveWrite bool) *chunkedContentCoder { + total := maxDocNum/chunkSize + 1 + rv := &chunkedContentCoder{ + chunkSize: chunkSize, + chunkLens: make([]uint64, total), + chunkMeta: make([]MetaData, 0, total), + w: w, + progressiveWrite: progressiveWrite, + } + + return rv +} + +// Reset lets you reuse this chunked content coder. Buffers are reset +// and re used. You cannot change the chunk size. +func (c *chunkedContentCoder) Reset() { + c.currChunk = 0 + c.final = c.final[:0] + c.chunkBuf.Reset() + c.chunkMetaBuf.Reset() + for i := range c.chunkLens { + c.chunkLens[i] = 0 + } + c.chunkMeta = c.chunkMeta[:0] +} + +func (c *chunkedContentCoder) SetChunkSize(chunkSize uint64, maxDocNum uint64) { + total := int(maxDocNum/chunkSize + 1) + c.chunkSize = chunkSize + if cap(c.chunkLens) < total { + c.chunkLens = make([]uint64, total) + } else { + c.chunkLens = c.chunkLens[:total] + } + if cap(c.chunkMeta) < total { + c.chunkMeta = make([]MetaData, 0, total) + } +} + +// Close indicates you are done calling Add() this allows +// the final chunk to be encoded. +func (c *chunkedContentCoder) Close() error { + return c.flushContents() +} + +func (c *chunkedContentCoder) flushContents() error { + // flush the contents, with meta information at first + buf := make([]byte, binary.MaxVarintLen64) + n := binary.PutUvarint(buf, uint64(len(c.chunkMeta))) + _, err := c.chunkMetaBuf.Write(buf[:n]) + if err != nil { + return err + } + + // write out the metaData slice + for _, meta := range c.chunkMeta { + _, err := writeUvarints(&c.chunkMetaBuf, meta.DocNum, meta.DocDvOffset) + if err != nil { + return err + } + } + + // write the metadata to final data + metaData := c.chunkMetaBuf.Bytes() + c.final = append(c.final, c.chunkMetaBuf.Bytes()...) + // write the compressed data to the final data + c.compressed = snappy.Encode(c.compressed[:cap(c.compressed)], c.chunkBuf.Bytes()) + c.final = append(c.final, c.compressed...) + + c.chunkLens[c.currChunk] = uint64(len(c.compressed) + len(metaData)) + + if c.progressiveWrite { + _, err := c.w.Write(c.final) + if err != nil { + return err + } + c.final = c.final[:0] + } + + return nil +} + +// Add encodes the provided byte slice into the correct chunk for the provided +// doc num. You MUST call Add() with increasing docNums. +func (c *chunkedContentCoder) Add(docNum uint64, vals []byte) error { + chunk := docNum / c.chunkSize + if chunk != c.currChunk { + // flush out the previous chunk details + err := c.flushContents() + if err != nil { + return err + } + // clearing the chunk specific meta for next chunk + c.chunkBuf.Reset() + c.chunkMetaBuf.Reset() + c.chunkMeta = c.chunkMeta[:0] + c.currChunk = chunk + } + + // get the starting offset for this doc + dvOffset := c.chunkBuf.Len() + dvSize, err := c.chunkBuf.Write(vals) + if err != nil { + return err + } + + c.chunkMeta = append(c.chunkMeta, MetaData{ + DocNum: docNum, + DocDvOffset: uint64(dvOffset + dvSize), + }) + return nil +} + +// Write commits all the encoded chunked contents to the provided writer. +// +// | ..... data ..... | chunk offsets (varints) +// | position of chunk offsets (uint64) | number of offsets (uint64) | +// +func (c *chunkedContentCoder) Write() (int, error) { + var tw int + + if c.final != nil { + // write out the data section first + nw, err := c.w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + } + + chunkOffsetsStart := uint64(tw) + + if cap(c.final) < binary.MaxVarintLen64 { + c.final = make([]byte, binary.MaxVarintLen64) + } else { + c.final = c.final[0:binary.MaxVarintLen64] + } + chunkOffsets := modifyLengthsToEndOffsets(c.chunkLens) + // write out the chunk offsets + for _, chunkOffset := range chunkOffsets { + n := binary.PutUvarint(c.final, chunkOffset) + nw, err := c.w.Write(c.final[:n]) + tw += nw + if err != nil { + return tw, err + } + } + + chunkOffsetsLen := uint64(tw) - chunkOffsetsStart + + c.final = c.final[0:8] + // write out the length of chunk offsets + binary.BigEndian.PutUint64(c.final, chunkOffsetsLen) + nw, err := c.w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + + // write out the number of chunks + binary.BigEndian.PutUint64(c.final, uint64(len(c.chunkLens))) + nw, err = c.w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + + c.final = c.final[:0] + + return tw, nil +} + +// ReadDocValueBoundary elicits the start, end offsets from a +// metaData header slice +func ReadDocValueBoundary(chunk int, metaHeaders []MetaData) (uint64, uint64) { + var start uint64 + if chunk > 0 { + start = metaHeaders[chunk-1].DocDvOffset + } + return start, metaHeaders[chunk].DocDvOffset +} diff --git a/vendor/github.com/blevesearch/zap/v13/count.go b/vendor/github.com/blevesearch/zap/v13/count.go new file mode 100644 index 0000000..50290f8 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/count.go @@ -0,0 +1,61 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "hash/crc32" + "io" + + "github.com/blevesearch/bleve/index/scorch/segment" +) + +// CountHashWriter is a wrapper around a Writer which counts the number of +// bytes which have been written and computes a crc32 hash +type CountHashWriter struct { + w io.Writer + crc uint32 + n int + s segment.StatsReporter +} + +// NewCountHashWriter returns a CountHashWriter which wraps the provided Writer +func NewCountHashWriter(w io.Writer) *CountHashWriter { + return &CountHashWriter{w: w} +} + +func NewCountHashWriterWithStatsReporter(w io.Writer, s segment.StatsReporter) *CountHashWriter { + return &CountHashWriter{w: w, s: s} +} + +// Write writes the provided bytes to the wrapped writer and counts the bytes +func (c *CountHashWriter) Write(b []byte) (int, error) { + n, err := c.w.Write(b) + c.crc = crc32.Update(c.crc, crc32.IEEETable, b[:n]) + c.n += n + if c.s != nil { + c.s.ReportBytesWritten(uint64(n)) + } + return n, err +} + +// Count returns the number of bytes written +func (c *CountHashWriter) Count() int { + return c.n +} + +// Sum32 returns the CRC-32 hash of the content written to this writer +func (c *CountHashWriter) Sum32() uint32 { + return c.crc +} diff --git a/vendor/github.com/blevesearch/zap/v13/dict.go b/vendor/github.com/blevesearch/zap/v13/dict.go new file mode 100644 index 0000000..ad4a8f8 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/dict.go @@ -0,0 +1,263 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "fmt" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/couchbase/vellum" +) + +// Dictionary is the zap representation of the term dictionary +type Dictionary struct { + sb *SegmentBase + field string + fieldID uint16 + fst *vellum.FST + fstReader *vellum.Reader +} + +// PostingsList returns the postings list for the specified term +func (d *Dictionary) PostingsList(term []byte, except *roaring.Bitmap, + prealloc segment.PostingsList) (segment.PostingsList, error) { + var preallocPL *PostingsList + pl, ok := prealloc.(*PostingsList) + if ok && pl != nil { + preallocPL = pl + } + return d.postingsList(term, except, preallocPL) +} + +func (d *Dictionary) postingsList(term []byte, except *roaring.Bitmap, rv *PostingsList) (*PostingsList, error) { + if d.fstReader == nil { + if rv == nil || rv == emptyPostingsList { + return emptyPostingsList, nil + } + return d.postingsListInit(rv, except), nil + } + + postingsOffset, exists, err := d.fstReader.Get(term) + if err != nil { + return nil, fmt.Errorf("vellum err: %v", err) + } + if !exists { + if rv == nil || rv == emptyPostingsList { + return emptyPostingsList, nil + } + return d.postingsListInit(rv, except), nil + } + + return d.postingsListFromOffset(postingsOffset, except, rv) +} + +func (d *Dictionary) postingsListFromOffset(postingsOffset uint64, except *roaring.Bitmap, rv *PostingsList) (*PostingsList, error) { + rv = d.postingsListInit(rv, except) + + err := rv.read(postingsOffset, d) + if err != nil { + return nil, err + } + + return rv, nil +} + +func (d *Dictionary) postingsListInit(rv *PostingsList, except *roaring.Bitmap) *PostingsList { + if rv == nil || rv == emptyPostingsList { + rv = &PostingsList{} + } else { + postings := rv.postings + if postings != nil { + postings.Clear() + } + + *rv = PostingsList{} // clear the struct + + rv.postings = postings + } + rv.sb = d.sb + rv.except = except + return rv +} + +func (d *Dictionary) Contains(key []byte) (bool, error) { + return d.fst.Contains(key) +} + +// Iterator returns an iterator for this dictionary +func (d *Dictionary) Iterator() segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + if d.fst != nil { + itr, err := d.fst.Iterator(nil, nil) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +// PrefixIterator returns an iterator which only visits terms having the +// the specified prefix +func (d *Dictionary) PrefixIterator(prefix string) segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + kBeg := []byte(prefix) + kEnd := segment.IncrementBytes(kBeg) + + if d.fst != nil { + itr, err := d.fst.Iterator(kBeg, kEnd) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +// RangeIterator returns an iterator which only visits terms between the +// start and end terms. NOTE: bleve.index API specifies the end is inclusive. +func (d *Dictionary) RangeIterator(start, end string) segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + // need to increment the end position to be inclusive + var endBytes []byte + if len(end) > 0 { + endBytes = []byte(end) + if endBytes[len(endBytes)-1] < 0xff { + endBytes[len(endBytes)-1]++ + } else { + endBytes = append(endBytes, 0xff) + } + } + + if d.fst != nil { + itr, err := d.fst.Iterator([]byte(start), endBytes) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +// AutomatonIterator returns an iterator which only visits terms +// having the the vellum automaton and start/end key range +func (d *Dictionary) AutomatonIterator(a vellum.Automaton, + startKeyInclusive, endKeyExclusive []byte) segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + if d.fst != nil { + itr, err := d.fst.Search(a, startKeyInclusive, endKeyExclusive) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +func (d *Dictionary) OnlyIterator(onlyTerms [][]byte, + includeCount bool) segment.DictionaryIterator { + + rv := &DictionaryIterator{ + d: d, + omitCount: !includeCount, + } + + var buf bytes.Buffer + builder, err := vellum.New(&buf, nil) + if err != nil { + rv.err = err + return rv + } + for _, term := range onlyTerms { + err = builder.Insert(term, 0) + if err != nil { + rv.err = err + return rv + } + } + err = builder.Close() + if err != nil { + rv.err = err + return rv + } + + onlyFST, err := vellum.Load(buf.Bytes()) + if err != nil { + rv.err = err + return rv + } + + itr, err := d.fst.Search(onlyFST, nil, nil) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + + return rv +} + +// DictionaryIterator is an iterator for term dictionary +type DictionaryIterator struct { + d *Dictionary + itr vellum.Iterator + err error + tmp PostingsList + entry index.DictEntry + omitCount bool +} + +// Next returns the next entry in the dictionary +func (i *DictionaryIterator) Next() (*index.DictEntry, error) { + if i.err != nil && i.err != vellum.ErrIteratorDone { + return nil, i.err + } else if i.itr == nil || i.err == vellum.ErrIteratorDone { + return nil, nil + } + term, postingsOffset := i.itr.Current() + i.entry.Term = string(term) + if !i.omitCount { + i.err = i.tmp.read(postingsOffset, i.d) + if i.err != nil { + return nil, i.err + } + i.entry.Count = i.tmp.Count() + } + i.err = i.itr.Next() + return &i.entry, nil +} diff --git a/vendor/github.com/blevesearch/zap/v13/docvalues.go b/vendor/github.com/blevesearch/zap/v13/docvalues.go new file mode 100644 index 0000000..793797b --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/docvalues.go @@ -0,0 +1,312 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "fmt" + "math" + "reflect" + "sort" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/blevesearch/bleve/size" + "github.com/golang/snappy" +) + +var reflectStaticSizedocValueReader int + +func init() { + var dvi docValueReader + reflectStaticSizedocValueReader = int(reflect.TypeOf(dvi).Size()) +} + +type docNumTermsVisitor func(docNum uint64, terms []byte) error + +type docVisitState struct { + dvrs map[uint16]*docValueReader + segment *SegmentBase +} + +type docValueReader struct { + field string + curChunkNum uint64 + chunkOffsets []uint64 + dvDataLoc uint64 + curChunkHeader []MetaData + curChunkData []byte // compressed data cache + uncompressed []byte // temp buf for snappy decompression +} + +func (di *docValueReader) size() int { + return reflectStaticSizedocValueReader + size.SizeOfPtr + + len(di.field) + + len(di.chunkOffsets)*size.SizeOfUint64 + + len(di.curChunkHeader)*reflectStaticSizeMetaData + + len(di.curChunkData) +} + +func (di *docValueReader) cloneInto(rv *docValueReader) *docValueReader { + if rv == nil { + rv = &docValueReader{} + } + + rv.field = di.field + rv.curChunkNum = math.MaxUint64 + rv.chunkOffsets = di.chunkOffsets // immutable, so it's sharable + rv.dvDataLoc = di.dvDataLoc + rv.curChunkHeader = rv.curChunkHeader[:0] + rv.curChunkData = nil + rv.uncompressed = rv.uncompressed[:0] + + return rv +} + +func (di *docValueReader) curChunkNumber() uint64 { + return di.curChunkNum +} + +func (s *SegmentBase) loadFieldDocValueReader(field string, + fieldDvLocStart, fieldDvLocEnd uint64) (*docValueReader, error) { + // get the docValue offset for the given fields + if fieldDvLocStart == fieldNotUninverted { + // no docValues found, nothing to do + return nil, nil + } + + // read the number of chunks, and chunk offsets position + var numChunks, chunkOffsetsPosition uint64 + + if fieldDvLocEnd-fieldDvLocStart > 16 { + numChunks = binary.BigEndian.Uint64(s.mem[fieldDvLocEnd-8 : fieldDvLocEnd]) + // read the length of chunk offsets + chunkOffsetsLen := binary.BigEndian.Uint64(s.mem[fieldDvLocEnd-16 : fieldDvLocEnd-8]) + // acquire position of chunk offsets + chunkOffsetsPosition = (fieldDvLocEnd - 16) - chunkOffsetsLen + } else { + return nil, fmt.Errorf("loadFieldDocValueReader: fieldDvLoc too small: %d-%d", fieldDvLocEnd, fieldDvLocStart) + } + + fdvIter := &docValueReader{ + curChunkNum: math.MaxUint64, + field: field, + chunkOffsets: make([]uint64, int(numChunks)), + } + + // read the chunk offsets + var offset uint64 + for i := 0; i < int(numChunks); i++ { + loc, read := binary.Uvarint(s.mem[chunkOffsetsPosition+offset : chunkOffsetsPosition+offset+binary.MaxVarintLen64]) + if read <= 0 { + return nil, fmt.Errorf("corrupted chunk offset during segment load") + } + fdvIter.chunkOffsets[i] = loc + offset += uint64(read) + } + + // set the data offset + fdvIter.dvDataLoc = fieldDvLocStart + + return fdvIter, nil +} + +func (di *docValueReader) loadDvChunk(chunkNumber uint64, s *SegmentBase) error { + // advance to the chunk where the docValues + // reside for the given docNum + destChunkDataLoc, curChunkEnd := di.dvDataLoc, di.dvDataLoc + start, end := readChunkBoundary(int(chunkNumber), di.chunkOffsets) + if start >= end { + di.curChunkHeader = di.curChunkHeader[:0] + di.curChunkData = nil + di.curChunkNum = chunkNumber + di.uncompressed = di.uncompressed[:0] + return nil + } + + destChunkDataLoc += start + curChunkEnd += end + + // read the number of docs reside in the chunk + numDocs, read := binary.Uvarint(s.mem[destChunkDataLoc : destChunkDataLoc+binary.MaxVarintLen64]) + if read <= 0 { + return fmt.Errorf("failed to read the chunk") + } + chunkMetaLoc := destChunkDataLoc + uint64(read) + + offset := uint64(0) + if cap(di.curChunkHeader) < int(numDocs) { + di.curChunkHeader = make([]MetaData, int(numDocs)) + } else { + di.curChunkHeader = di.curChunkHeader[:int(numDocs)] + } + for i := 0; i < int(numDocs); i++ { + di.curChunkHeader[i].DocNum, read = binary.Uvarint(s.mem[chunkMetaLoc+offset : chunkMetaLoc+offset+binary.MaxVarintLen64]) + offset += uint64(read) + di.curChunkHeader[i].DocDvOffset, read = binary.Uvarint(s.mem[chunkMetaLoc+offset : chunkMetaLoc+offset+binary.MaxVarintLen64]) + offset += uint64(read) + } + + compressedDataLoc := chunkMetaLoc + offset + dataLength := curChunkEnd - compressedDataLoc + di.curChunkData = s.mem[compressedDataLoc : compressedDataLoc+dataLength] + di.curChunkNum = chunkNumber + di.uncompressed = di.uncompressed[:0] + return nil +} + +func (di *docValueReader) iterateAllDocValues(s *SegmentBase, visitor docNumTermsVisitor) error { + for i := 0; i < len(di.chunkOffsets); i++ { + err := di.loadDvChunk(uint64(i), s) + if err != nil { + return err + } + if di.curChunkData == nil || len(di.curChunkHeader) == 0 { + continue + } + + // uncompress the already loaded data + uncompressed, err := snappy.Decode(di.uncompressed[:cap(di.uncompressed)], di.curChunkData) + if err != nil { + return err + } + di.uncompressed = uncompressed + + start := uint64(0) + for _, entry := range di.curChunkHeader { + err = visitor(entry.DocNum, uncompressed[start:entry.DocDvOffset]) + if err != nil { + return err + } + + start = entry.DocDvOffset + } + } + + return nil +} + +func (di *docValueReader) visitDocValues(docNum uint64, + visitor index.DocumentFieldTermVisitor) error { + // binary search the term locations for the docNum + start, end := di.getDocValueLocs(docNum) + if start == math.MaxUint64 || end == math.MaxUint64 || start == end { + return nil + } + + var uncompressed []byte + var err error + // use the uncompressed copy if available + if len(di.uncompressed) > 0 { + uncompressed = di.uncompressed + } else { + // uncompress the already loaded data + uncompressed, err = snappy.Decode(di.uncompressed[:cap(di.uncompressed)], di.curChunkData) + if err != nil { + return err + } + di.uncompressed = uncompressed + } + + // pick the terms for the given docNum + uncompressed = uncompressed[start:end] + for { + i := bytes.Index(uncompressed, termSeparatorSplitSlice) + if i < 0 { + break + } + + visitor(di.field, uncompressed[0:i]) + uncompressed = uncompressed[i+1:] + } + + return nil +} + +func (di *docValueReader) getDocValueLocs(docNum uint64) (uint64, uint64) { + i := sort.Search(len(di.curChunkHeader), func(i int) bool { + return di.curChunkHeader[i].DocNum >= docNum + }) + if i < len(di.curChunkHeader) && di.curChunkHeader[i].DocNum == docNum { + return ReadDocValueBoundary(i, di.curChunkHeader) + } + return math.MaxUint64, math.MaxUint64 +} + +// VisitDocumentFieldTerms is an implementation of the +// DocumentFieldTermVisitable interface +func (s *SegmentBase) VisitDocumentFieldTerms(localDocNum uint64, fields []string, + visitor index.DocumentFieldTermVisitor, dvsIn segment.DocVisitState) ( + segment.DocVisitState, error) { + dvs, ok := dvsIn.(*docVisitState) + if !ok || dvs == nil { + dvs = &docVisitState{} + } else { + if dvs.segment != s { + dvs.segment = s + dvs.dvrs = nil + } + } + + var fieldIDPlus1 uint16 + if dvs.dvrs == nil { + dvs.dvrs = make(map[uint16]*docValueReader, len(fields)) + for _, field := range fields { + if fieldIDPlus1, ok = s.fieldsMap[field]; !ok { + continue + } + fieldID := fieldIDPlus1 - 1 + if dvIter, exists := s.fieldDvReaders[fieldID]; exists && + dvIter != nil { + dvs.dvrs[fieldID] = dvIter.cloneInto(dvs.dvrs[fieldID]) + } + } + } + + // find the chunkNumber where the docValues are stored + // NOTE: doc values continue to use legacy chunk mode + chunkFactor, err := getChunkSize(LegacyChunkMode, 0, 0) + if err != nil { + return nil, err + } + docInChunk := localDocNum / chunkFactor + var dvr *docValueReader + for _, field := range fields { + if fieldIDPlus1, ok = s.fieldsMap[field]; !ok { + continue + } + fieldID := fieldIDPlus1 - 1 + if dvr, ok = dvs.dvrs[fieldID]; ok && dvr != nil { + // check if the chunk is already loaded + if docInChunk != dvr.curChunkNumber() { + err := dvr.loadDvChunk(docInChunk, s) + if err != nil { + return dvs, err + } + } + + _ = dvr.visitDocValues(localDocNum, visitor) + } + } + return dvs, nil +} + +// VisitableDocValueFields returns the list of fields with +// persisted doc value terms ready to be visitable using the +// VisitDocumentFieldTerms method. +func (s *SegmentBase) VisitableDocValueFields() ([]string, error) { + return s.fieldDvNames, nil +} diff --git a/vendor/github.com/blevesearch/zap/v13/enumerator.go b/vendor/github.com/blevesearch/zap/v13/enumerator.go new file mode 100644 index 0000000..bc5b7e6 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/enumerator.go @@ -0,0 +1,138 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + + "github.com/couchbase/vellum" +) + +// enumerator provides an ordered traversal of multiple vellum +// iterators. Like JOIN of iterators, the enumerator produces a +// sequence of (key, iteratorIndex, value) tuples, sorted by key ASC, +// then iteratorIndex ASC, where the same key might be seen or +// repeated across multiple child iterators. +type enumerator struct { + itrs []vellum.Iterator + currKs [][]byte + currVs []uint64 + + lowK []byte + lowIdxs []int + lowCurr int +} + +// newEnumerator returns a new enumerator over the vellum Iterators +func newEnumerator(itrs []vellum.Iterator) (*enumerator, error) { + rv := &enumerator{ + itrs: itrs, + currKs: make([][]byte, len(itrs)), + currVs: make([]uint64, len(itrs)), + lowIdxs: make([]int, 0, len(itrs)), + } + for i, itr := range rv.itrs { + rv.currKs[i], rv.currVs[i] = itr.Current() + } + rv.updateMatches(false) + if rv.lowK == nil && len(rv.lowIdxs) == 0 { + return rv, vellum.ErrIteratorDone + } + return rv, nil +} + +// updateMatches maintains the low key matches based on the currKs +func (m *enumerator) updateMatches(skipEmptyKey bool) { + m.lowK = nil + m.lowIdxs = m.lowIdxs[:0] + m.lowCurr = 0 + + for i, key := range m.currKs { + if (key == nil && m.currVs[i] == 0) || // in case of empty iterator + (len(key) == 0 && skipEmptyKey) { // skip empty keys + continue + } + + cmp := bytes.Compare(key, m.lowK) + if cmp < 0 || len(m.lowIdxs) == 0 { + // reached a new low + m.lowK = key + m.lowIdxs = m.lowIdxs[:0] + m.lowIdxs = append(m.lowIdxs, i) + } else if cmp == 0 { + m.lowIdxs = append(m.lowIdxs, i) + } + } +} + +// Current returns the enumerator's current key, iterator-index, and +// value. If the enumerator is not pointing at a valid value (because +// Next returned an error previously), Current will return nil,0,0. +func (m *enumerator) Current() ([]byte, int, uint64) { + var i int + var v uint64 + if m.lowCurr < len(m.lowIdxs) { + i = m.lowIdxs[m.lowCurr] + v = m.currVs[i] + } + return m.lowK, i, v +} + +// GetLowIdxsAndValues will return all of the iterator indices +// which point to the current key, and their corresponding +// values. This can be used by advanced caller which may need +// to peek into these other sets of data before processing. +func (m *enumerator) GetLowIdxsAndValues() ([]int, []uint64) { + values := make([]uint64, 0, len(m.lowIdxs)) + for _, idx := range m.lowIdxs { + values = append(values, m.currVs[idx]) + } + return m.lowIdxs, values +} + +// Next advances the enumerator to the next key/iterator/value result, +// else vellum.ErrIteratorDone is returned. +func (m *enumerator) Next() error { + m.lowCurr += 1 + if m.lowCurr >= len(m.lowIdxs) { + // move all the current low iterators forwards + for _, vi := range m.lowIdxs { + err := m.itrs[vi].Next() + if err != nil && err != vellum.ErrIteratorDone { + return err + } + m.currKs[vi], m.currVs[vi] = m.itrs[vi].Current() + } + // can skip any empty keys encountered at this point + m.updateMatches(true) + } + if m.lowK == nil && len(m.lowIdxs) == 0 { + return vellum.ErrIteratorDone + } + return nil +} + +// Close all the underlying Iterators. The first error, if any, will +// be returned. +func (m *enumerator) Close() error { + var rv error + for _, itr := range m.itrs { + err := itr.Close() + if rv == nil { + rv = err + } + } + return rv +} diff --git a/vendor/github.com/blevesearch/zap/v13/intDecoder.go b/vendor/github.com/blevesearch/zap/v13/intDecoder.go new file mode 100644 index 0000000..4cd008f --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/intDecoder.go @@ -0,0 +1,111 @@ +// Copyright (c) 2019 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "encoding/binary" + "fmt" + + "github.com/blevesearch/bleve/index/scorch/segment" +) + +type chunkedIntDecoder struct { + startOffset uint64 + dataStartOffset uint64 + chunkOffsets []uint64 + curChunkBytes []byte + data []byte + r *segment.MemUvarintReader +} + +func newChunkedIntDecoder(buf []byte, offset uint64) *chunkedIntDecoder { + rv := &chunkedIntDecoder{startOffset: offset, data: buf} + var n, numChunks uint64 + var read int + if offset == termNotEncoded { + numChunks = 0 + } else { + numChunks, read = binary.Uvarint(buf[offset+n : offset+n+binary.MaxVarintLen64]) + } + + n += uint64(read) + if cap(rv.chunkOffsets) >= int(numChunks) { + rv.chunkOffsets = rv.chunkOffsets[:int(numChunks)] + } else { + rv.chunkOffsets = make([]uint64, int(numChunks)) + } + for i := 0; i < int(numChunks); i++ { + rv.chunkOffsets[i], read = binary.Uvarint(buf[offset+n : offset+n+binary.MaxVarintLen64]) + n += uint64(read) + } + rv.dataStartOffset = offset + n + return rv +} + +func (d *chunkedIntDecoder) loadChunk(chunk int) error { + if d.startOffset == termNotEncoded { + d.r = segment.NewMemUvarintReader([]byte(nil)) + return nil + } + + if chunk >= len(d.chunkOffsets) { + return fmt.Errorf("tried to load freq chunk that doesn't exist %d/(%d)", + chunk, len(d.chunkOffsets)) + } + + end, start := d.dataStartOffset, d.dataStartOffset + s, e := readChunkBoundary(chunk, d.chunkOffsets) + start += s + end += e + d.curChunkBytes = d.data[start:end] + if d.r == nil { + d.r = segment.NewMemUvarintReader(d.curChunkBytes) + } else { + d.r.Reset(d.curChunkBytes) + } + + return nil +} + +func (d *chunkedIntDecoder) reset() { + d.startOffset = 0 + d.dataStartOffset = 0 + d.chunkOffsets = d.chunkOffsets[:0] + d.curChunkBytes = d.curChunkBytes[:0] + d.data = d.data[:0] + if d.r != nil { + d.r.Reset([]byte(nil)) + } +} + +func (d *chunkedIntDecoder) isNil() bool { + return d.curChunkBytes == nil +} + +func (d *chunkedIntDecoder) readUvarint() (uint64, error) { + return d.r.ReadUvarint() +} + +func (d *chunkedIntDecoder) SkipUvarint() { + d.r.SkipUvarint() +} + +func (d *chunkedIntDecoder) SkipBytes(count int) { + d.r.SkipBytes(count) +} + +func (d *chunkedIntDecoder) Len() int { + return d.r.Len() +} diff --git a/vendor/github.com/blevesearch/zap/v13/intcoder.go b/vendor/github.com/blevesearch/zap/v13/intcoder.go new file mode 100644 index 0000000..c3c488f --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/intcoder.go @@ -0,0 +1,206 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "io" +) + +// We can safely use 0 to represent termNotEncoded since 0 +// could never be a valid address for term location information. +// (stored field index is always non-empty and earlier in the +// file) +const termNotEncoded = 0 + +type chunkedIntCoder struct { + final []byte + chunkSize uint64 + chunkBuf bytes.Buffer + chunkLens []uint64 + currChunk uint64 + + buf []byte +} + +// newChunkedIntCoder returns a new chunk int coder which packs data into +// chunks based on the provided chunkSize and supports up to the specified +// maxDocNum +func newChunkedIntCoder(chunkSize uint64, maxDocNum uint64) *chunkedIntCoder { + total := maxDocNum/chunkSize + 1 + rv := &chunkedIntCoder{ + chunkSize: chunkSize, + chunkLens: make([]uint64, total), + final: make([]byte, 0, 64), + } + + return rv +} + +// Reset lets you reuse this chunked int coder. buffers are reset and reused +// from previous use. you cannot change the chunk size or max doc num. +func (c *chunkedIntCoder) Reset() { + c.final = c.final[:0] + c.chunkBuf.Reset() + c.currChunk = 0 + for i := range c.chunkLens { + c.chunkLens[i] = 0 + } +} + +// SetChunkSize changes the chunk size. It is only valid to do so +// with a new chunkedIntCoder, or immediately after calling Reset() +func (c *chunkedIntCoder) SetChunkSize(chunkSize uint64, maxDocNum uint64) { + total := int(maxDocNum/chunkSize + 1) + c.chunkSize = chunkSize + if cap(c.chunkLens) < total { + c.chunkLens = make([]uint64, total) + } else { + c.chunkLens = c.chunkLens[:total] + } +} + +// Add encodes the provided integers into the correct chunk for the provided +// doc num. You MUST call Add() with increasing docNums. +func (c *chunkedIntCoder) Add(docNum uint64, vals ...uint64) error { + chunk := docNum / c.chunkSize + if chunk != c.currChunk { + // starting a new chunk + c.Close() + c.chunkBuf.Reset() + c.currChunk = chunk + } + + if len(c.buf) < binary.MaxVarintLen64 { + c.buf = make([]byte, binary.MaxVarintLen64) + } + + for _, val := range vals { + wb := binary.PutUvarint(c.buf, val) + _, err := c.chunkBuf.Write(c.buf[:wb]) + if err != nil { + return err + } + } + + return nil +} + +func (c *chunkedIntCoder) AddBytes(docNum uint64, buf []byte) error { + chunk := docNum / c.chunkSize + if chunk != c.currChunk { + // starting a new chunk + c.Close() + c.chunkBuf.Reset() + c.currChunk = chunk + } + + _, err := c.chunkBuf.Write(buf) + return err +} + +// Close indicates you are done calling Add() this allows the final chunk +// to be encoded. +func (c *chunkedIntCoder) Close() { + encodingBytes := c.chunkBuf.Bytes() + c.chunkLens[c.currChunk] = uint64(len(encodingBytes)) + c.final = append(c.final, encodingBytes...) + c.currChunk = uint64(cap(c.chunkLens)) // sentinel to detect double close +} + +// Write commits all the encoded chunked integers to the provided writer. +func (c *chunkedIntCoder) Write(w io.Writer) (int, error) { + bufNeeded := binary.MaxVarintLen64 * (1 + len(c.chunkLens)) + if len(c.buf) < bufNeeded { + c.buf = make([]byte, bufNeeded) + } + buf := c.buf + + // convert the chunk lengths into chunk offsets + chunkOffsets := modifyLengthsToEndOffsets(c.chunkLens) + + // write out the number of chunks & each chunk offsets + n := binary.PutUvarint(buf, uint64(len(chunkOffsets))) + for _, chunkOffset := range chunkOffsets { + n += binary.PutUvarint(buf[n:], chunkOffset) + } + + tw, err := w.Write(buf[:n]) + if err != nil { + return tw, err + } + + // write out the data + nw, err := w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + return tw, nil +} + +// writeAt commits all the encoded chunked integers to the provided writer +// and returns the starting offset, total bytes written and an error +func (c *chunkedIntCoder) writeAt(w io.Writer) (uint64, int, error) { + startOffset := uint64(termNotEncoded) + if len(c.final) <= 0 { + return startOffset, 0, nil + } + + if chw := w.(*CountHashWriter); chw != nil { + startOffset = uint64(chw.Count()) + } + + tw, err := c.Write(w) + return startOffset, tw, err +} + +func (c *chunkedIntCoder) FinalSize() int { + return len(c.final) +} + +// modifyLengthsToEndOffsets converts the chunk length array +// to a chunk offset array. The readChunkBoundary +// will figure out the start and end of every chunk from +// these offsets. Starting offset of i'th index is stored +// in i-1'th position except for 0'th index and ending offset +// is stored at i'th index position. +// For 0'th element, starting position is always zero. +// eg: +// Lens -> 5 5 5 5 => 5 10 15 20 +// Lens -> 0 5 0 5 => 0 5 5 10 +// Lens -> 0 0 0 5 => 0 0 0 5 +// Lens -> 5 0 0 0 => 5 5 5 5 +// Lens -> 0 5 0 0 => 0 5 5 5 +// Lens -> 0 0 5 0 => 0 0 5 5 +func modifyLengthsToEndOffsets(lengths []uint64) []uint64 { + var runningOffset uint64 + var index, i int + for i = 1; i <= len(lengths); i++ { + runningOffset += lengths[i-1] + lengths[index] = runningOffset + index++ + } + return lengths +} + +func readChunkBoundary(chunk int, offsets []uint64) (uint64, uint64) { + var start uint64 + if chunk > 0 { + start = offsets[chunk-1] + } + return start, offsets[chunk] +} diff --git a/vendor/github.com/blevesearch/zap/v13/merge.go b/vendor/github.com/blevesearch/zap/v13/merge.go new file mode 100644 index 0000000..805100f --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/merge.go @@ -0,0 +1,847 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bufio" + "bytes" + "encoding/binary" + "fmt" + "math" + "os" + "sort" + + "github.com/RoaringBitmap/roaring" + seg "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/couchbase/vellum" + "github.com/golang/snappy" +) + +var DefaultFileMergerBufferSize = 1024 * 1024 + +const docDropped = math.MaxUint64 // sentinel docNum to represent a deleted doc + +// Merge takes a slice of segments and bit masks describing which +// documents may be dropped, and creates a new segment containing the +// remaining data. This new segment is built at the specified path. +func (*ZapPlugin) Merge(segments []seg.Segment, drops []*roaring.Bitmap, path string, + closeCh chan struct{}, s seg.StatsReporter) ( + [][]uint64, uint64, error) { + + segmentBases := make([]*SegmentBase, len(segments)) + for segmenti, segment := range segments { + switch segmentx := segment.(type) { + case *Segment: + segmentBases[segmenti] = &segmentx.SegmentBase + case *SegmentBase: + segmentBases[segmenti] = segmentx + default: + panic(fmt.Sprintf("oops, unexpected segment type: %T", segment)) + } + } + return mergeSegmentBases(segmentBases, drops, path, DefaultChunkMode, closeCh, s) +} + +func mergeSegmentBases(segmentBases []*SegmentBase, drops []*roaring.Bitmap, path string, + chunkMode uint32, closeCh chan struct{}, s seg.StatsReporter) ( + [][]uint64, uint64, error) { + flag := os.O_RDWR | os.O_CREATE + + f, err := os.OpenFile(path, flag, 0600) + if err != nil { + return nil, 0, err + } + + cleanup := func() { + _ = f.Close() + _ = os.Remove(path) + } + + // buffer the output + br := bufio.NewWriterSize(f, DefaultFileMergerBufferSize) + + // wrap it for counting (tracking offsets) + cr := NewCountHashWriterWithStatsReporter(br, s) + + newDocNums, numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset, _, _, _, err := + MergeToWriter(segmentBases, drops, chunkMode, cr, closeCh) + if err != nil { + cleanup() + return nil, 0, err + } + + err = persistFooter(numDocs, storedIndexOffset, fieldsIndexOffset, + docValueOffset, chunkMode, cr.Sum32(), cr) + if err != nil { + cleanup() + return nil, 0, err + } + + err = br.Flush() + if err != nil { + cleanup() + return nil, 0, err + } + + err = f.Sync() + if err != nil { + cleanup() + return nil, 0, err + } + + err = f.Close() + if err != nil { + cleanup() + return nil, 0, err + } + + return newDocNums, uint64(cr.Count()), nil +} + +func MergeToWriter(segments []*SegmentBase, drops []*roaring.Bitmap, + chunkMode uint32, cr *CountHashWriter, closeCh chan struct{}) ( + newDocNums [][]uint64, + numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset uint64, + dictLocs []uint64, fieldsInv []string, fieldsMap map[string]uint16, + err error) { + docValueOffset = uint64(fieldNotUninverted) + + var fieldsSame bool + fieldsSame, fieldsInv = mergeFields(segments) + fieldsMap = mapFields(fieldsInv) + + numDocs = computeNewDocCount(segments, drops) + + if isClosed(closeCh) { + return nil, 0, 0, 0, 0, nil, nil, nil, seg.ErrClosed + } + + if numDocs > 0 { + storedIndexOffset, newDocNums, err = mergeStoredAndRemap(segments, drops, + fieldsMap, fieldsInv, fieldsSame, numDocs, cr, closeCh) + if err != nil { + return nil, 0, 0, 0, 0, nil, nil, nil, err + } + + dictLocs, docValueOffset, err = persistMergedRest(segments, drops, + fieldsInv, fieldsMap, fieldsSame, + newDocNums, numDocs, chunkMode, cr, closeCh) + if err != nil { + return nil, 0, 0, 0, 0, nil, nil, nil, err + } + } else { + dictLocs = make([]uint64, len(fieldsInv)) + } + + fieldsIndexOffset, err = persistFields(fieldsInv, cr, dictLocs) + if err != nil { + return nil, 0, 0, 0, 0, nil, nil, nil, err + } + + return newDocNums, numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset, dictLocs, fieldsInv, fieldsMap, nil +} + +// mapFields takes the fieldsInv list and returns a map of fieldName +// to fieldID+1 +func mapFields(fields []string) map[string]uint16 { + rv := make(map[string]uint16, len(fields)) + for i, fieldName := range fields { + rv[fieldName] = uint16(i) + 1 + } + return rv +} + +// computeNewDocCount determines how many documents will be in the newly +// merged segment when obsoleted docs are dropped +func computeNewDocCount(segments []*SegmentBase, drops []*roaring.Bitmap) uint64 { + var newDocCount uint64 + for segI, segment := range segments { + newDocCount += segment.numDocs + if drops[segI] != nil { + newDocCount -= drops[segI].GetCardinality() + } + } + return newDocCount +} + +func persistMergedRest(segments []*SegmentBase, dropsIn []*roaring.Bitmap, + fieldsInv []string, fieldsMap map[string]uint16, fieldsSame bool, + newDocNumsIn [][]uint64, newSegDocCount uint64, chunkMode uint32, + w *CountHashWriter, closeCh chan struct{}) ([]uint64, uint64, error) { + + var bufMaxVarintLen64 []byte = make([]byte, binary.MaxVarintLen64) + var bufLoc []uint64 + + var postings *PostingsList + var postItr *PostingsIterator + + rv := make([]uint64, len(fieldsInv)) + fieldDvLocsStart := make([]uint64, len(fieldsInv)) + fieldDvLocsEnd := make([]uint64, len(fieldsInv)) + + // these int coders are initialized with chunk size 1024 + // however this will be reset to the correct chunk size + // while processing each individual field-term section + tfEncoder := newChunkedIntCoder(1024, newSegDocCount-1) + locEncoder := newChunkedIntCoder(1024, newSegDocCount-1) + + var vellumBuf bytes.Buffer + newVellum, err := vellum.New(&vellumBuf, nil) + if err != nil { + return nil, 0, err + } + + newRoaring := roaring.NewBitmap() + + // for each field + for fieldID, fieldName := range fieldsInv { + + // collect FST iterators from all active segments for this field + var newDocNums [][]uint64 + var drops []*roaring.Bitmap + var dicts []*Dictionary + var itrs []vellum.Iterator + + var segmentsInFocus []*SegmentBase + + for segmentI, segment := range segments { + + // check for the closure in meantime + if isClosed(closeCh) { + return nil, 0, seg.ErrClosed + } + + dict, err2 := segment.dictionary(fieldName) + if err2 != nil { + return nil, 0, err2 + } + if dict != nil && dict.fst != nil { + itr, err2 := dict.fst.Iterator(nil, nil) + if err2 != nil && err2 != vellum.ErrIteratorDone { + return nil, 0, err2 + } + if itr != nil { + newDocNums = append(newDocNums, newDocNumsIn[segmentI]) + if dropsIn[segmentI] != nil && !dropsIn[segmentI].IsEmpty() { + drops = append(drops, dropsIn[segmentI]) + } else { + drops = append(drops, nil) + } + dicts = append(dicts, dict) + itrs = append(itrs, itr) + segmentsInFocus = append(segmentsInFocus, segment) + } + } + } + + var prevTerm []byte + + newRoaring.Clear() + + var lastDocNum, lastFreq, lastNorm uint64 + + // determines whether to use "1-hit" encoding optimization + // when a term appears in only 1 doc, with no loc info, + // has freq of 1, and the docNum fits into 31-bits + use1HitEncoding := func(termCardinality uint64) (bool, uint64, uint64) { + if termCardinality == uint64(1) && locEncoder.FinalSize() <= 0 { + docNum := uint64(newRoaring.Minimum()) + if under32Bits(docNum) && docNum == lastDocNum && lastFreq == 1 { + return true, docNum, lastNorm + } + } + return false, 0, 0 + } + + finishTerm := func(term []byte) error { + tfEncoder.Close() + locEncoder.Close() + + postingsOffset, err := writePostings(newRoaring, + tfEncoder, locEncoder, use1HitEncoding, w, bufMaxVarintLen64) + if err != nil { + return err + } + + if postingsOffset > 0 { + err = newVellum.Insert(term, postingsOffset) + if err != nil { + return err + } + } + + newRoaring.Clear() + + tfEncoder.Reset() + locEncoder.Reset() + + lastDocNum = 0 + lastFreq = 0 + lastNorm = 0 + + return nil + } + + enumerator, err := newEnumerator(itrs) + + for err == nil { + term, itrI, postingsOffset := enumerator.Current() + + if !bytes.Equal(prevTerm, term) { + // check for the closure in meantime + if isClosed(closeCh) { + return nil, 0, seg.ErrClosed + } + + // if the term changed, write out the info collected + // for the previous term + err = finishTerm(prevTerm) + if err != nil { + return nil, 0, err + } + } + if !bytes.Equal(prevTerm, term) || prevTerm == nil { + // compute cardinality of field-term in new seg + var newCard uint64 + lowItrIdxs, lowItrVals := enumerator.GetLowIdxsAndValues() + for i, idx := range lowItrIdxs { + pl, err := dicts[idx].postingsListFromOffset(lowItrVals[i], drops[idx], nil) + if err != nil { + return nil, 0, err + } + newCard += pl.Count() + } + // compute correct chunk size with this + chunkSize, err := getChunkSize(chunkMode, newCard, newSegDocCount) + if err != nil { + return nil, 0, err + } + // update encoders chunk + tfEncoder.SetChunkSize(chunkSize, newSegDocCount-1) + locEncoder.SetChunkSize(chunkSize, newSegDocCount-1) + } + + postings, err = dicts[itrI].postingsListFromOffset( + postingsOffset, drops[itrI], postings) + if err != nil { + return nil, 0, err + } + + postItr = postings.iterator(true, true, true, postItr) + + // can no longer optimize by copying, since chunk factor could have changed + lastDocNum, lastFreq, lastNorm, bufLoc, err = mergeTermFreqNormLocs( + fieldsMap, term, postItr, newDocNums[itrI], newRoaring, + tfEncoder, locEncoder, bufLoc) + + if err != nil { + return nil, 0, err + } + + prevTerm = prevTerm[:0] // copy to prevTerm in case Next() reuses term mem + prevTerm = append(prevTerm, term...) + + err = enumerator.Next() + } + if err != vellum.ErrIteratorDone { + return nil, 0, err + } + + err = finishTerm(prevTerm) + if err != nil { + return nil, 0, err + } + + dictOffset := uint64(w.Count()) + + err = newVellum.Close() + if err != nil { + return nil, 0, err + } + vellumData := vellumBuf.Bytes() + + // write out the length of the vellum data + n := binary.PutUvarint(bufMaxVarintLen64, uint64(len(vellumData))) + _, err = w.Write(bufMaxVarintLen64[:n]) + if err != nil { + return nil, 0, err + } + + // write this vellum to disk + _, err = w.Write(vellumData) + if err != nil { + return nil, 0, err + } + + rv[fieldID] = dictOffset + + // get the field doc value offset (start) + fieldDvLocsStart[fieldID] = uint64(w.Count()) + + // update the field doc values + // NOTE: doc values continue to use legacy chunk mode + chunkSize, err := getChunkSize(LegacyChunkMode, 0, 0) + if err != nil { + return nil, 0, err + } + fdvEncoder := newChunkedContentCoder(chunkSize, newSegDocCount-1, w, true) + + fdvReadersAvailable := false + var dvIterClone *docValueReader + for segmentI, segment := range segmentsInFocus { + // check for the closure in meantime + if isClosed(closeCh) { + return nil, 0, seg.ErrClosed + } + + fieldIDPlus1 := uint16(segment.fieldsMap[fieldName]) + if dvIter, exists := segment.fieldDvReaders[fieldIDPlus1-1]; exists && + dvIter != nil { + fdvReadersAvailable = true + dvIterClone = dvIter.cloneInto(dvIterClone) + err = dvIterClone.iterateAllDocValues(segment, func(docNum uint64, terms []byte) error { + if newDocNums[segmentI][docNum] == docDropped { + return nil + } + err := fdvEncoder.Add(newDocNums[segmentI][docNum], terms) + if err != nil { + return err + } + return nil + }) + if err != nil { + return nil, 0, err + } + } + } + + if fdvReadersAvailable { + err = fdvEncoder.Close() + if err != nil { + return nil, 0, err + } + + // persist the doc value details for this field + _, err = fdvEncoder.Write() + if err != nil { + return nil, 0, err + } + + // get the field doc value offset (end) + fieldDvLocsEnd[fieldID] = uint64(w.Count()) + } else { + fieldDvLocsStart[fieldID] = fieldNotUninverted + fieldDvLocsEnd[fieldID] = fieldNotUninverted + } + + // reset vellum buffer and vellum builder + vellumBuf.Reset() + err = newVellum.Reset(&vellumBuf) + if err != nil { + return nil, 0, err + } + } + + fieldDvLocsOffset := uint64(w.Count()) + + buf := bufMaxVarintLen64 + for i := 0; i < len(fieldDvLocsStart); i++ { + n := binary.PutUvarint(buf, fieldDvLocsStart[i]) + _, err := w.Write(buf[:n]) + if err != nil { + return nil, 0, err + } + n = binary.PutUvarint(buf, fieldDvLocsEnd[i]) + _, err = w.Write(buf[:n]) + if err != nil { + return nil, 0, err + } + } + + return rv, fieldDvLocsOffset, nil +} + +func mergeTermFreqNormLocs(fieldsMap map[string]uint16, term []byte, postItr *PostingsIterator, + newDocNums []uint64, newRoaring *roaring.Bitmap, + tfEncoder *chunkedIntCoder, locEncoder *chunkedIntCoder, bufLoc []uint64) ( + lastDocNum uint64, lastFreq uint64, lastNorm uint64, bufLocOut []uint64, err error) { + next, err := postItr.Next() + for next != nil && err == nil { + hitNewDocNum := newDocNums[next.Number()] + if hitNewDocNum == docDropped { + return 0, 0, 0, nil, fmt.Errorf("see hit with dropped docNum") + } + + newRoaring.Add(uint32(hitNewDocNum)) + + nextFreq := next.Frequency() + nextNorm := uint64(math.Float32bits(float32(next.Norm()))) + + locs := next.Locations() + + err = tfEncoder.Add(hitNewDocNum, + encodeFreqHasLocs(nextFreq, len(locs) > 0), nextNorm) + if err != nil { + return 0, 0, 0, nil, err + } + + if len(locs) > 0 { + numBytesLocs := 0 + for _, loc := range locs { + ap := loc.ArrayPositions() + numBytesLocs += totalUvarintBytes(uint64(fieldsMap[loc.Field()]-1), + loc.Pos(), loc.Start(), loc.End(), uint64(len(ap)), ap) + } + + err = locEncoder.Add(hitNewDocNum, uint64(numBytesLocs)) + if err != nil { + return 0, 0, 0, nil, err + } + + for _, loc := range locs { + ap := loc.ArrayPositions() + if cap(bufLoc) < 5+len(ap) { + bufLoc = make([]uint64, 0, 5+len(ap)) + } + args := bufLoc[0:5] + args[0] = uint64(fieldsMap[loc.Field()] - 1) + args[1] = loc.Pos() + args[2] = loc.Start() + args[3] = loc.End() + args[4] = uint64(len(ap)) + args = append(args, ap...) + err = locEncoder.Add(hitNewDocNum, args...) + if err != nil { + return 0, 0, 0, nil, err + } + } + } + + lastDocNum = hitNewDocNum + lastFreq = nextFreq + lastNorm = nextNorm + + next, err = postItr.Next() + } + + return lastDocNum, lastFreq, lastNorm, bufLoc, err +} + +func writePostings(postings *roaring.Bitmap, tfEncoder, locEncoder *chunkedIntCoder, + use1HitEncoding func(uint64) (bool, uint64, uint64), + w *CountHashWriter, bufMaxVarintLen64 []byte) ( + offset uint64, err error) { + termCardinality := postings.GetCardinality() + if termCardinality <= 0 { + return 0, nil + } + + if use1HitEncoding != nil { + encodeAs1Hit, docNum1Hit, normBits1Hit := use1HitEncoding(termCardinality) + if encodeAs1Hit { + return FSTValEncode1Hit(docNum1Hit, normBits1Hit), nil + } + } + + var tfOffset uint64 + tfOffset, _, err = tfEncoder.writeAt(w) + if err != nil { + return 0, err + } + + var locOffset uint64 + locOffset, _, err = locEncoder.writeAt(w) + if err != nil { + return 0, err + } + + postingsOffset := uint64(w.Count()) + + n := binary.PutUvarint(bufMaxVarintLen64, tfOffset) + _, err = w.Write(bufMaxVarintLen64[:n]) + if err != nil { + return 0, err + } + + n = binary.PutUvarint(bufMaxVarintLen64, locOffset) + _, err = w.Write(bufMaxVarintLen64[:n]) + if err != nil { + return 0, err + } + + _, err = writeRoaringWithLen(postings, w, bufMaxVarintLen64) + if err != nil { + return 0, err + } + + return postingsOffset, nil +} + +type varintEncoder func(uint64) (int, error) + +func mergeStoredAndRemap(segments []*SegmentBase, drops []*roaring.Bitmap, + fieldsMap map[string]uint16, fieldsInv []string, fieldsSame bool, newSegDocCount uint64, + w *CountHashWriter, closeCh chan struct{}) (uint64, [][]uint64, error) { + var rv [][]uint64 // The remapped or newDocNums for each segment. + + var newDocNum uint64 + + var curr int + var data, compressed []byte + var metaBuf bytes.Buffer + varBuf := make([]byte, binary.MaxVarintLen64) + metaEncode := func(val uint64) (int, error) { + wb := binary.PutUvarint(varBuf, val) + return metaBuf.Write(varBuf[:wb]) + } + + vals := make([][][]byte, len(fieldsInv)) + typs := make([][]byte, len(fieldsInv)) + poss := make([][][]uint64, len(fieldsInv)) + + var posBuf []uint64 + + docNumOffsets := make([]uint64, newSegDocCount) + + vdc := visitDocumentCtxPool.Get().(*visitDocumentCtx) + defer visitDocumentCtxPool.Put(vdc) + + // for each segment + for segI, segment := range segments { + // check for the closure in meantime + if isClosed(closeCh) { + return 0, nil, seg.ErrClosed + } + + segNewDocNums := make([]uint64, segment.numDocs) + + dropsI := drops[segI] + + // optimize when the field mapping is the same across all + // segments and there are no deletions, via byte-copying + // of stored docs bytes directly to the writer + if fieldsSame && (dropsI == nil || dropsI.GetCardinality() == 0) { + err := segment.copyStoredDocs(newDocNum, docNumOffsets, w) + if err != nil { + return 0, nil, err + } + + for i := uint64(0); i < segment.numDocs; i++ { + segNewDocNums[i] = newDocNum + newDocNum++ + } + rv = append(rv, segNewDocNums) + + continue + } + + // for each doc num + for docNum := uint64(0); docNum < segment.numDocs; docNum++ { + // TODO: roaring's API limits docNums to 32-bits? + if dropsI != nil && dropsI.Contains(uint32(docNum)) { + segNewDocNums[docNum] = docDropped + continue + } + + segNewDocNums[docNum] = newDocNum + + curr = 0 + metaBuf.Reset() + data = data[:0] + + posTemp := posBuf + + // collect all the data + for i := 0; i < len(fieldsInv); i++ { + vals[i] = vals[i][:0] + typs[i] = typs[i][:0] + poss[i] = poss[i][:0] + } + err := segment.visitDocument(vdc, docNum, func(field string, typ byte, value []byte, pos []uint64) bool { + fieldID := int(fieldsMap[field]) - 1 + vals[fieldID] = append(vals[fieldID], value) + typs[fieldID] = append(typs[fieldID], typ) + + // copy array positions to preserve them beyond the scope of this callback + var curPos []uint64 + if len(pos) > 0 { + if cap(posTemp) < len(pos) { + posBuf = make([]uint64, len(pos)*len(fieldsInv)) + posTemp = posBuf + } + curPos = posTemp[0:len(pos)] + copy(curPos, pos) + posTemp = posTemp[len(pos):] + } + poss[fieldID] = append(poss[fieldID], curPos) + + return true + }) + if err != nil { + return 0, nil, err + } + + // _id field special case optimizes ExternalID() lookups + idFieldVal := vals[uint16(0)][0] + _, err = metaEncode(uint64(len(idFieldVal))) + if err != nil { + return 0, nil, err + } + + // now walk the non-"_id" fields in order + for fieldID := 1; fieldID < len(fieldsInv); fieldID++ { + storedFieldValues := vals[fieldID] + + stf := typs[fieldID] + spf := poss[fieldID] + + var err2 error + curr, data, err2 = persistStoredFieldValues(fieldID, + storedFieldValues, stf, spf, curr, metaEncode, data) + if err2 != nil { + return 0, nil, err2 + } + } + + metaBytes := metaBuf.Bytes() + + compressed = snappy.Encode(compressed[:cap(compressed)], data) + + // record where we're about to start writing + docNumOffsets[newDocNum] = uint64(w.Count()) + + // write out the meta len and compressed data len + _, err = writeUvarints(w, + uint64(len(metaBytes)), + uint64(len(idFieldVal)+len(compressed))) + if err != nil { + return 0, nil, err + } + // now write the meta + _, err = w.Write(metaBytes) + if err != nil { + return 0, nil, err + } + // now write the _id field val (counted as part of the 'compressed' data) + _, err = w.Write(idFieldVal) + if err != nil { + return 0, nil, err + } + // now write the compressed data + _, err = w.Write(compressed) + if err != nil { + return 0, nil, err + } + + newDocNum++ + } + + rv = append(rv, segNewDocNums) + } + + // return value is the start of the stored index + storedIndexOffset := uint64(w.Count()) + + // now write out the stored doc index + for _, docNumOffset := range docNumOffsets { + err := binary.Write(w, binary.BigEndian, docNumOffset) + if err != nil { + return 0, nil, err + } + } + + return storedIndexOffset, rv, nil +} + +// copyStoredDocs writes out a segment's stored doc info, optimized by +// using a single Write() call for the entire set of bytes. The +// newDocNumOffsets is filled with the new offsets for each doc. +func (s *SegmentBase) copyStoredDocs(newDocNum uint64, newDocNumOffsets []uint64, + w *CountHashWriter) error { + if s.numDocs <= 0 { + return nil + } + + indexOffset0, storedOffset0, _, _, _ := + s.getDocStoredOffsets(0) // the segment's first doc + + indexOffsetN, storedOffsetN, readN, metaLenN, dataLenN := + s.getDocStoredOffsets(s.numDocs - 1) // the segment's last doc + + storedOffset0New := uint64(w.Count()) + + storedBytes := s.mem[storedOffset0 : storedOffsetN+readN+metaLenN+dataLenN] + _, err := w.Write(storedBytes) + if err != nil { + return err + } + + // remap the storedOffset's for the docs into new offsets relative + // to storedOffset0New, filling the given docNumOffsetsOut array + for indexOffset := indexOffset0; indexOffset <= indexOffsetN; indexOffset += 8 { + storedOffset := binary.BigEndian.Uint64(s.mem[indexOffset : indexOffset+8]) + storedOffsetNew := storedOffset - storedOffset0 + storedOffset0New + newDocNumOffsets[newDocNum] = storedOffsetNew + newDocNum += 1 + } + + return nil +} + +// mergeFields builds a unified list of fields used across all the +// input segments, and computes whether the fields are the same across +// segments (which depends on fields to be sorted in the same way +// across segments) +func mergeFields(segments []*SegmentBase) (bool, []string) { + fieldsSame := true + + var segment0Fields []string + if len(segments) > 0 { + segment0Fields = segments[0].Fields() + } + + fieldsExist := map[string]struct{}{} + for _, segment := range segments { + fields := segment.Fields() + for fieldi, field := range fields { + fieldsExist[field] = struct{}{} + if len(segment0Fields) != len(fields) || segment0Fields[fieldi] != field { + fieldsSame = false + } + } + } + + rv := make([]string, 0, len(fieldsExist)) + // ensure _id stays first + rv = append(rv, "_id") + for k := range fieldsExist { + if k != "_id" { + rv = append(rv, k) + } + } + + sort.Strings(rv[1:]) // leave _id as first + + return fieldsSame, rv +} + +func isClosed(closeCh chan struct{}) bool { + select { + case <-closeCh: + return true + default: + return false + } +} diff --git a/vendor/github.com/blevesearch/zap/v13/new.go b/vendor/github.com/blevesearch/zap/v13/new.go new file mode 100644 index 0000000..9815818 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/new.go @@ -0,0 +1,860 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "math" + "sort" + "sync" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/couchbase/vellum" + "github.com/golang/snappy" +) + +var NewSegmentBufferNumResultsBump int = 100 +var NewSegmentBufferNumResultsFactor float64 = 1.0 +var NewSegmentBufferAvgBytesPerDocFactor float64 = 1.0 + +// ValidateDocFields can be set by applications to perform additional checks +// on fields in a document being added to a new segment, by default it does +// nothing. +// This API is experimental and may be removed at any time. +var ValidateDocFields = func(field document.Field) error { + return nil +} + +// AnalysisResultsToSegmentBase produces an in-memory zap-encoded +// SegmentBase from analysis results +func (z *ZapPlugin) New(results []*index.AnalysisResult) ( + segment.Segment, uint64, error) { + return z.newWithChunkMode(results, DefaultChunkMode) +} + +func (*ZapPlugin) newWithChunkMode(results []*index.AnalysisResult, + chunkMode uint32) (segment.Segment, uint64, error) { + s := interimPool.Get().(*interim) + + var br bytes.Buffer + if s.lastNumDocs > 0 { + // use previous results to initialize the buf with an estimate + // size, but note that the interim instance comes from a + // global interimPool, so multiple scorch instances indexing + // different docs can lead to low quality estimates + estimateAvgBytesPerDoc := int(float64(s.lastOutSize/s.lastNumDocs) * + NewSegmentBufferNumResultsFactor) + estimateNumResults := int(float64(len(results)+NewSegmentBufferNumResultsBump) * + NewSegmentBufferAvgBytesPerDocFactor) + br.Grow(estimateAvgBytesPerDoc * estimateNumResults) + } + + s.results = results + s.chunkMode = chunkMode + s.w = NewCountHashWriter(&br) + + storedIndexOffset, fieldsIndexOffset, fdvIndexOffset, dictOffsets, + err := s.convert() + if err != nil { + return nil, uint64(0), err + } + + sb, err := InitSegmentBase(br.Bytes(), s.w.Sum32(), chunkMode, + s.FieldsMap, s.FieldsInv, uint64(len(results)), + storedIndexOffset, fieldsIndexOffset, fdvIndexOffset, dictOffsets) + + if err == nil && s.reset() == nil { + s.lastNumDocs = len(results) + s.lastOutSize = len(br.Bytes()) + interimPool.Put(s) + } + + return sb, uint64(len(br.Bytes())), err +} + +var interimPool = sync.Pool{New: func() interface{} { return &interim{} }} + +// interim holds temporary working data used while converting from +// analysis results to a zap-encoded segment +type interim struct { + results []*index.AnalysisResult + + chunkMode uint32 + + w *CountHashWriter + + // FieldsMap adds 1 to field id to avoid zero value issues + // name -> field id + 1 + FieldsMap map[string]uint16 + + // FieldsInv is the inverse of FieldsMap + // field id -> name + FieldsInv []string + + // Term dictionaries for each field + // field id -> term -> postings list id + 1 + Dicts []map[string]uint64 + + // Terms for each field, where terms are sorted ascending + // field id -> []term + DictKeys [][]string + + // Fields whose IncludeDocValues is true + // field id -> bool + IncludeDocValues []bool + + // postings id -> bitmap of docNums + Postings []*roaring.Bitmap + + // postings id -> freq/norm's, one for each docNum in postings + FreqNorms [][]interimFreqNorm + freqNormsBacking []interimFreqNorm + + // postings id -> locs, one for each freq + Locs [][]interimLoc + locsBacking []interimLoc + + numTermsPerPostingsList []int // key is postings list id + numLocsPerPostingsList []int // key is postings list id + + builder *vellum.Builder + builderBuf bytes.Buffer + + metaBuf bytes.Buffer + + tmp0 []byte + tmp1 []byte + + lastNumDocs int + lastOutSize int +} + +func (s *interim) reset() (err error) { + s.results = nil + s.chunkMode = 0 + s.w = nil + s.FieldsMap = nil + s.FieldsInv = nil + for i := range s.Dicts { + s.Dicts[i] = nil + } + s.Dicts = s.Dicts[:0] + for i := range s.DictKeys { + s.DictKeys[i] = s.DictKeys[i][:0] + } + s.DictKeys = s.DictKeys[:0] + for i := range s.IncludeDocValues { + s.IncludeDocValues[i] = false + } + s.IncludeDocValues = s.IncludeDocValues[:0] + for _, idn := range s.Postings { + idn.Clear() + } + s.Postings = s.Postings[:0] + s.FreqNorms = s.FreqNorms[:0] + for i := range s.freqNormsBacking { + s.freqNormsBacking[i] = interimFreqNorm{} + } + s.freqNormsBacking = s.freqNormsBacking[:0] + s.Locs = s.Locs[:0] + for i := range s.locsBacking { + s.locsBacking[i] = interimLoc{} + } + s.locsBacking = s.locsBacking[:0] + s.numTermsPerPostingsList = s.numTermsPerPostingsList[:0] + s.numLocsPerPostingsList = s.numLocsPerPostingsList[:0] + s.builderBuf.Reset() + if s.builder != nil { + err = s.builder.Reset(&s.builderBuf) + } + s.metaBuf.Reset() + s.tmp0 = s.tmp0[:0] + s.tmp1 = s.tmp1[:0] + s.lastNumDocs = 0 + s.lastOutSize = 0 + + return err +} + +func (s *interim) grabBuf(size int) []byte { + buf := s.tmp0 + if cap(buf) < size { + buf = make([]byte, size) + s.tmp0 = buf + } + return buf[0:size] +} + +type interimStoredField struct { + vals [][]byte + typs []byte + arrayposs [][]uint64 // array positions +} + +type interimFreqNorm struct { + freq uint64 + norm float32 + numLocs int +} + +type interimLoc struct { + fieldID uint16 + pos uint64 + start uint64 + end uint64 + arrayposs []uint64 +} + +func (s *interim) convert() (uint64, uint64, uint64, []uint64, error) { + s.FieldsMap = map[string]uint16{} + + s.getOrDefineField("_id") // _id field is fieldID 0 + + for _, result := range s.results { + for _, field := range result.Document.CompositeFields { + s.getOrDefineField(field.Name()) + } + for _, field := range result.Document.Fields { + s.getOrDefineField(field.Name()) + } + } + + sort.Strings(s.FieldsInv[1:]) // keep _id as first field + + for fieldID, fieldName := range s.FieldsInv { + s.FieldsMap[fieldName] = uint16(fieldID + 1) + } + + if cap(s.IncludeDocValues) >= len(s.FieldsInv) { + s.IncludeDocValues = s.IncludeDocValues[:len(s.FieldsInv)] + } else { + s.IncludeDocValues = make([]bool, len(s.FieldsInv)) + } + + s.prepareDicts() + + for _, dict := range s.DictKeys { + sort.Strings(dict) + } + + s.processDocuments() + + storedIndexOffset, err := s.writeStoredFields() + if err != nil { + return 0, 0, 0, nil, err + } + + var fdvIndexOffset uint64 + var dictOffsets []uint64 + + if len(s.results) > 0 { + fdvIndexOffset, dictOffsets, err = s.writeDicts() + if err != nil { + return 0, 0, 0, nil, err + } + } else { + dictOffsets = make([]uint64, len(s.FieldsInv)) + } + + fieldsIndexOffset, err := persistFields(s.FieldsInv, s.w, dictOffsets) + if err != nil { + return 0, 0, 0, nil, err + } + + return storedIndexOffset, fieldsIndexOffset, fdvIndexOffset, dictOffsets, nil +} + +func (s *interim) getOrDefineField(fieldName string) int { + fieldIDPlus1, exists := s.FieldsMap[fieldName] + if !exists { + fieldIDPlus1 = uint16(len(s.FieldsInv) + 1) + s.FieldsMap[fieldName] = fieldIDPlus1 + s.FieldsInv = append(s.FieldsInv, fieldName) + + s.Dicts = append(s.Dicts, make(map[string]uint64)) + + n := len(s.DictKeys) + if n < cap(s.DictKeys) { + s.DictKeys = s.DictKeys[:n+1] + s.DictKeys[n] = s.DictKeys[n][:0] + } else { + s.DictKeys = append(s.DictKeys, []string(nil)) + } + } + + return int(fieldIDPlus1 - 1) +} + +// fill Dicts and DictKeys from analysis results +func (s *interim) prepareDicts() { + var pidNext int + + var totTFs int + var totLocs int + + visitField := func(fieldID uint16, tfs analysis.TokenFrequencies) { + dict := s.Dicts[fieldID] + dictKeys := s.DictKeys[fieldID] + + for term, tf := range tfs { + pidPlus1, exists := dict[term] + if !exists { + pidNext++ + pidPlus1 = uint64(pidNext) + + dict[term] = pidPlus1 + dictKeys = append(dictKeys, term) + + s.numTermsPerPostingsList = append(s.numTermsPerPostingsList, 0) + s.numLocsPerPostingsList = append(s.numLocsPerPostingsList, 0) + } + + pid := pidPlus1 - 1 + + s.numTermsPerPostingsList[pid] += 1 + s.numLocsPerPostingsList[pid] += len(tf.Locations) + + totLocs += len(tf.Locations) + } + + totTFs += len(tfs) + + s.DictKeys[fieldID] = dictKeys + } + + for _, result := range s.results { + // walk each composite field + for _, field := range result.Document.CompositeFields { + fieldID := uint16(s.getOrDefineField(field.Name())) + _, tf := field.Analyze() + visitField(fieldID, tf) + } + + // walk each field + for i, field := range result.Document.Fields { + fieldID := uint16(s.getOrDefineField(field.Name())) + tf := result.Analyzed[i] + visitField(fieldID, tf) + } + } + + numPostingsLists := pidNext + + if cap(s.Postings) >= numPostingsLists { + s.Postings = s.Postings[:numPostingsLists] + } else { + postings := make([]*roaring.Bitmap, numPostingsLists) + copy(postings, s.Postings[:cap(s.Postings)]) + for i := 0; i < numPostingsLists; i++ { + if postings[i] == nil { + postings[i] = roaring.New() + } + } + s.Postings = postings + } + + if cap(s.FreqNorms) >= numPostingsLists { + s.FreqNorms = s.FreqNorms[:numPostingsLists] + } else { + s.FreqNorms = make([][]interimFreqNorm, numPostingsLists) + } + + if cap(s.freqNormsBacking) >= totTFs { + s.freqNormsBacking = s.freqNormsBacking[:totTFs] + } else { + s.freqNormsBacking = make([]interimFreqNorm, totTFs) + } + + freqNormsBacking := s.freqNormsBacking + for pid, numTerms := range s.numTermsPerPostingsList { + s.FreqNorms[pid] = freqNormsBacking[0:0] + freqNormsBacking = freqNormsBacking[numTerms:] + } + + if cap(s.Locs) >= numPostingsLists { + s.Locs = s.Locs[:numPostingsLists] + } else { + s.Locs = make([][]interimLoc, numPostingsLists) + } + + if cap(s.locsBacking) >= totLocs { + s.locsBacking = s.locsBacking[:totLocs] + } else { + s.locsBacking = make([]interimLoc, totLocs) + } + + locsBacking := s.locsBacking + for pid, numLocs := range s.numLocsPerPostingsList { + s.Locs[pid] = locsBacking[0:0] + locsBacking = locsBacking[numLocs:] + } +} + +func (s *interim) processDocuments() { + numFields := len(s.FieldsInv) + reuseFieldLens := make([]int, numFields) + reuseFieldTFs := make([]analysis.TokenFrequencies, numFields) + + for docNum, result := range s.results { + for i := 0; i < numFields; i++ { // clear these for reuse + reuseFieldLens[i] = 0 + reuseFieldTFs[i] = nil + } + + s.processDocument(uint64(docNum), result, + reuseFieldLens, reuseFieldTFs) + } +} + +func (s *interim) processDocument(docNum uint64, + result *index.AnalysisResult, + fieldLens []int, fieldTFs []analysis.TokenFrequencies) { + visitField := func(fieldID uint16, fieldName string, + ln int, tf analysis.TokenFrequencies) { + fieldLens[fieldID] += ln + + existingFreqs := fieldTFs[fieldID] + if existingFreqs != nil { + existingFreqs.MergeAll(fieldName, tf) + } else { + fieldTFs[fieldID] = tf + } + } + + // walk each composite field + for _, field := range result.Document.CompositeFields { + fieldID := uint16(s.getOrDefineField(field.Name())) + ln, tf := field.Analyze() + visitField(fieldID, field.Name(), ln, tf) + } + + // walk each field + for i, field := range result.Document.Fields { + fieldID := uint16(s.getOrDefineField(field.Name())) + ln := result.Length[i] + tf := result.Analyzed[i] + visitField(fieldID, field.Name(), ln, tf) + } + + // now that it's been rolled up into fieldTFs, walk that + for fieldID, tfs := range fieldTFs { + dict := s.Dicts[fieldID] + norm := float32(1.0 / math.Sqrt(float64(fieldLens[fieldID]))) + + for term, tf := range tfs { + pid := dict[term] - 1 + bs := s.Postings[pid] + bs.Add(uint32(docNum)) + + s.FreqNorms[pid] = append(s.FreqNorms[pid], + interimFreqNorm{ + freq: uint64(tf.Frequency()), + norm: norm, + numLocs: len(tf.Locations), + }) + + if len(tf.Locations) > 0 { + locs := s.Locs[pid] + + for _, loc := range tf.Locations { + var locf = uint16(fieldID) + if loc.Field != "" { + locf = uint16(s.getOrDefineField(loc.Field)) + } + var arrayposs []uint64 + if len(loc.ArrayPositions) > 0 { + arrayposs = loc.ArrayPositions + } + locs = append(locs, interimLoc{ + fieldID: locf, + pos: uint64(loc.Position), + start: uint64(loc.Start), + end: uint64(loc.End), + arrayposs: arrayposs, + }) + } + + s.Locs[pid] = locs + } + } + } +} + +func (s *interim) writeStoredFields() ( + storedIndexOffset uint64, err error) { + varBuf := make([]byte, binary.MaxVarintLen64) + metaEncode := func(val uint64) (int, error) { + wb := binary.PutUvarint(varBuf, val) + return s.metaBuf.Write(varBuf[:wb]) + } + + data, compressed := s.tmp0[:0], s.tmp1[:0] + defer func() { s.tmp0, s.tmp1 = data, compressed }() + + // keyed by docNum + docStoredOffsets := make([]uint64, len(s.results)) + + // keyed by fieldID, for the current doc in the loop + docStoredFields := map[uint16]interimStoredField{} + + for docNum, result := range s.results { + for fieldID := range docStoredFields { // reset for next doc + delete(docStoredFields, fieldID) + } + + for _, field := range result.Document.Fields { + fieldID := uint16(s.getOrDefineField(field.Name())) + + opts := field.Options() + + if opts.IsStored() { + isf := docStoredFields[fieldID] + isf.vals = append(isf.vals, field.Value()) + isf.typs = append(isf.typs, encodeFieldType(field)) + isf.arrayposs = append(isf.arrayposs, field.ArrayPositions()) + docStoredFields[fieldID] = isf + } + + if opts.IncludeDocValues() { + s.IncludeDocValues[fieldID] = true + } + + err := ValidateDocFields(field) + if err != nil { + return 0, err + } + } + + var curr int + + s.metaBuf.Reset() + data = data[:0] + + // _id field special case optimizes ExternalID() lookups + idFieldVal := docStoredFields[uint16(0)].vals[0] + _, err = metaEncode(uint64(len(idFieldVal))) + if err != nil { + return 0, err + } + + // handle non-"_id" fields + for fieldID := 1; fieldID < len(s.FieldsInv); fieldID++ { + isf, exists := docStoredFields[uint16(fieldID)] + if exists { + curr, data, err = persistStoredFieldValues( + fieldID, isf.vals, isf.typs, isf.arrayposs, + curr, metaEncode, data) + if err != nil { + return 0, err + } + } + } + + metaBytes := s.metaBuf.Bytes() + + compressed = snappy.Encode(compressed[:cap(compressed)], data) + + docStoredOffsets[docNum] = uint64(s.w.Count()) + + _, err := writeUvarints(s.w, + uint64(len(metaBytes)), + uint64(len(idFieldVal)+len(compressed))) + if err != nil { + return 0, err + } + + _, err = s.w.Write(metaBytes) + if err != nil { + return 0, err + } + + _, err = s.w.Write(idFieldVal) + if err != nil { + return 0, err + } + + _, err = s.w.Write(compressed) + if err != nil { + return 0, err + } + } + + storedIndexOffset = uint64(s.w.Count()) + + for _, docStoredOffset := range docStoredOffsets { + err = binary.Write(s.w, binary.BigEndian, docStoredOffset) + if err != nil { + return 0, err + } + } + + return storedIndexOffset, nil +} + +func (s *interim) writeDicts() (fdvIndexOffset uint64, dictOffsets []uint64, err error) { + dictOffsets = make([]uint64, len(s.FieldsInv)) + + fdvOffsetsStart := make([]uint64, len(s.FieldsInv)) + fdvOffsetsEnd := make([]uint64, len(s.FieldsInv)) + + buf := s.grabBuf(binary.MaxVarintLen64) + + // these int coders are initialized with chunk size 1024 + // however this will be reset to the correct chunk size + // while processing each individual field-term section + tfEncoder := newChunkedIntCoder(1024, uint64(len(s.results)-1)) + locEncoder := newChunkedIntCoder(1024, uint64(len(s.results)-1)) + + var docTermMap [][]byte + + if s.builder == nil { + s.builder, err = vellum.New(&s.builderBuf, nil) + if err != nil { + return 0, nil, err + } + } + + for fieldID, terms := range s.DictKeys { + if cap(docTermMap) < len(s.results) { + docTermMap = make([][]byte, len(s.results)) + } else { + docTermMap = docTermMap[0:len(s.results)] + for docNum := range docTermMap { // reset the docTermMap + docTermMap[docNum] = docTermMap[docNum][:0] + } + } + + dict := s.Dicts[fieldID] + + for _, term := range terms { // terms are already sorted + pid := dict[term] - 1 + + postingsBS := s.Postings[pid] + + freqNorms := s.FreqNorms[pid] + freqNormOffset := 0 + + locs := s.Locs[pid] + locOffset := 0 + + chunkSize, err := getChunkSize(s.chunkMode, postingsBS.GetCardinality(), uint64(len(s.results))) + if err != nil { + return 0, nil, err + } + tfEncoder.SetChunkSize(chunkSize, uint64(len(s.results)-1)) + locEncoder.SetChunkSize(chunkSize, uint64(len(s.results)-1)) + + postingsItr := postingsBS.Iterator() + for postingsItr.HasNext() { + docNum := uint64(postingsItr.Next()) + + freqNorm := freqNorms[freqNormOffset] + + err = tfEncoder.Add(docNum, + encodeFreqHasLocs(freqNorm.freq, freqNorm.numLocs > 0), + uint64(math.Float32bits(freqNorm.norm))) + if err != nil { + return 0, nil, err + } + + if freqNorm.numLocs > 0 { + numBytesLocs := 0 + for _, loc := range locs[locOffset : locOffset+freqNorm.numLocs] { + numBytesLocs += totalUvarintBytes( + uint64(loc.fieldID), loc.pos, loc.start, loc.end, + uint64(len(loc.arrayposs)), loc.arrayposs) + } + + err = locEncoder.Add(docNum, uint64(numBytesLocs)) + if err != nil { + return 0, nil, err + } + + for _, loc := range locs[locOffset : locOffset+freqNorm.numLocs] { + err = locEncoder.Add(docNum, + uint64(loc.fieldID), loc.pos, loc.start, loc.end, + uint64(len(loc.arrayposs))) + if err != nil { + return 0, nil, err + } + + err = locEncoder.Add(docNum, loc.arrayposs...) + if err != nil { + return 0, nil, err + } + } + + locOffset += freqNorm.numLocs + } + + freqNormOffset++ + + docTermMap[docNum] = append( + append(docTermMap[docNum], term...), + termSeparator) + } + + tfEncoder.Close() + locEncoder.Close() + + postingsOffset, err := + writePostings(postingsBS, tfEncoder, locEncoder, nil, s.w, buf) + if err != nil { + return 0, nil, err + } + + if postingsOffset > uint64(0) { + err = s.builder.Insert([]byte(term), postingsOffset) + if err != nil { + return 0, nil, err + } + } + + tfEncoder.Reset() + locEncoder.Reset() + } + + err = s.builder.Close() + if err != nil { + return 0, nil, err + } + + // record where this dictionary starts + dictOffsets[fieldID] = uint64(s.w.Count()) + + vellumData := s.builderBuf.Bytes() + + // write out the length of the vellum data + n := binary.PutUvarint(buf, uint64(len(vellumData))) + _, err = s.w.Write(buf[:n]) + if err != nil { + return 0, nil, err + } + + // write this vellum to disk + _, err = s.w.Write(vellumData) + if err != nil { + return 0, nil, err + } + + // reset vellum for reuse + s.builderBuf.Reset() + + err = s.builder.Reset(&s.builderBuf) + if err != nil { + return 0, nil, err + } + + // write the field doc values + // NOTE: doc values continue to use legacy chunk mode + chunkSize, err := getChunkSize(LegacyChunkMode, 0, 0) + if err != nil { + return 0, nil, err + } + fdvEncoder := newChunkedContentCoder(chunkSize, uint64(len(s.results)-1), s.w, false) + if s.IncludeDocValues[fieldID] { + for docNum, docTerms := range docTermMap { + if len(docTerms) > 0 { + err = fdvEncoder.Add(uint64(docNum), docTerms) + if err != nil { + return 0, nil, err + } + } + } + err = fdvEncoder.Close() + if err != nil { + return 0, nil, err + } + + fdvOffsetsStart[fieldID] = uint64(s.w.Count()) + + _, err = fdvEncoder.Write() + if err != nil { + return 0, nil, err + } + + fdvOffsetsEnd[fieldID] = uint64(s.w.Count()) + + fdvEncoder.Reset() + } else { + fdvOffsetsStart[fieldID] = fieldNotUninverted + fdvOffsetsEnd[fieldID] = fieldNotUninverted + } + } + + fdvIndexOffset = uint64(s.w.Count()) + + for i := 0; i < len(fdvOffsetsStart); i++ { + n := binary.PutUvarint(buf, fdvOffsetsStart[i]) + _, err := s.w.Write(buf[:n]) + if err != nil { + return 0, nil, err + } + n = binary.PutUvarint(buf, fdvOffsetsEnd[i]) + _, err = s.w.Write(buf[:n]) + if err != nil { + return 0, nil, err + } + } + + return fdvIndexOffset, dictOffsets, nil +} + +func encodeFieldType(f document.Field) byte { + fieldType := byte('x') + switch f.(type) { + case *document.TextField: + fieldType = 't' + case *document.NumericField: + fieldType = 'n' + case *document.DateTimeField: + fieldType = 'd' + case *document.BooleanField: + fieldType = 'b' + case *document.GeoPointField: + fieldType = 'g' + case *document.CompositeField: + fieldType = 'c' + } + return fieldType +} + +// returns the total # of bytes needed to encode the given uint64's +// into binary.PutUVarint() encoding +func totalUvarintBytes(a, b, c, d, e uint64, more []uint64) (n int) { + n = numUvarintBytes(a) + n += numUvarintBytes(b) + n += numUvarintBytes(c) + n += numUvarintBytes(d) + n += numUvarintBytes(e) + for _, v := range more { + n += numUvarintBytes(v) + } + return n +} + +// returns # of bytes needed to encode x in binary.PutUvarint() encoding +func numUvarintBytes(x uint64) (n int) { + for x >= 0x80 { + x >>= 7 + n++ + } + return n + 1 +} diff --git a/vendor/github.com/blevesearch/zap/v13/plugin.go b/vendor/github.com/blevesearch/zap/v13/plugin.go new file mode 100644 index 0000000..38a0638 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/plugin.go @@ -0,0 +1,37 @@ +// Copyright (c) 2020 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "github.com/blevesearch/bleve/index/scorch/segment" +) + +// ZapPlugin implements the Plugin interface of +// the blevesearch/bleve/index/scorch/segment pkg +type ZapPlugin struct{} + +func (*ZapPlugin) Type() string { + return Type +} + +func (*ZapPlugin) Version() uint32 { + return Version +} + +// Plugin returns an instance segment.Plugin for use +// by the Scorch indexing scheme +func Plugin() segment.Plugin { + return &ZapPlugin{} +} diff --git a/vendor/github.com/blevesearch/zap/v13/posting.go b/vendor/github.com/blevesearch/zap/v13/posting.go new file mode 100644 index 0000000..3a6ee54 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/posting.go @@ -0,0 +1,798 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "encoding/binary" + "fmt" + "math" + "reflect" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizePostingsList int +var reflectStaticSizePostingsIterator int +var reflectStaticSizePosting int +var reflectStaticSizeLocation int + +func init() { + var pl PostingsList + reflectStaticSizePostingsList = int(reflect.TypeOf(pl).Size()) + var pi PostingsIterator + reflectStaticSizePostingsIterator = int(reflect.TypeOf(pi).Size()) + var p Posting + reflectStaticSizePosting = int(reflect.TypeOf(p).Size()) + var l Location + reflectStaticSizeLocation = int(reflect.TypeOf(l).Size()) +} + +// FST or vellum value (uint64) encoding is determined by the top two +// highest-order or most significant bits... +// +// encoding : MSB +// name : 63 62 61...to...bit #0 (LSB) +// ----------+---+---+--------------------------------------------------- +// general : 0 | 0 | 62-bits of postingsOffset. +// ~ : 0 | 1 | reserved for future. +// 1-hit : 1 | 0 | 31-bits of positive float31 norm | 31-bits docNum. +// ~ : 1 | 1 | reserved for future. +// +// Encoding "general" is able to handle all cases, where the +// postingsOffset points to more information about the postings for +// the term. +// +// Encoding "1-hit" is used to optimize a commonly seen case when a +// term has only a single hit. For example, a term in the _id field +// will have only 1 hit. The "1-hit" encoding is used for a term +// in a field when... +// +// - term vector info is disabled for that field; +// - and, the term appears in only a single doc for that field; +// - and, the term's freq is exactly 1 in that single doc for that field; +// - and, the docNum must fit into 31-bits; +// +// Otherwise, the "general" encoding is used instead. +// +// In the "1-hit" encoding, the field in that single doc may have +// other terms, which is supported in the "1-hit" encoding by the +// positive float31 norm. + +const FSTValEncodingMask = uint64(0xc000000000000000) +const FSTValEncodingGeneral = uint64(0x0000000000000000) +const FSTValEncoding1Hit = uint64(0x8000000000000000) + +func FSTValEncode1Hit(docNum uint64, normBits uint64) uint64 { + return FSTValEncoding1Hit | ((mask31Bits & normBits) << 31) | (mask31Bits & docNum) +} + +func FSTValDecode1Hit(v uint64) (docNum uint64, normBits uint64) { + return (mask31Bits & v), (mask31Bits & (v >> 31)) +} + +const mask31Bits = uint64(0x000000007fffffff) + +func under32Bits(x uint64) bool { + return x <= mask31Bits +} + +const DocNum1HitFinished = math.MaxUint64 + +var NormBits1Hit = uint64(math.Float32bits(float32(1))) + +// PostingsList is an in-memory representation of a postings list +type PostingsList struct { + sb *SegmentBase + postingsOffset uint64 + freqOffset uint64 + locOffset uint64 + postings *roaring.Bitmap + except *roaring.Bitmap + + // when normBits1Hit != 0, then this postings list came from a + // 1-hit encoding, and only the docNum1Hit & normBits1Hit apply + docNum1Hit uint64 + normBits1Hit uint64 +} + +// represents an immutable, empty postings list +var emptyPostingsList = &PostingsList{} + +func (p *PostingsList) Size() int { + sizeInBytes := reflectStaticSizePostingsList + size.SizeOfPtr + + if p.except != nil { + sizeInBytes += int(p.except.GetSizeInBytes()) + } + + return sizeInBytes +} + +func (p *PostingsList) OrInto(receiver *roaring.Bitmap) { + if p.normBits1Hit != 0 { + receiver.Add(uint32(p.docNum1Hit)) + return + } + + if p.postings != nil { + receiver.Or(p.postings) + } +} + +// Iterator returns an iterator for this postings list +func (p *PostingsList) Iterator(includeFreq, includeNorm, includeLocs bool, + prealloc segment.PostingsIterator) segment.PostingsIterator { + if p.normBits1Hit == 0 && p.postings == nil { + return emptyPostingsIterator + } + + var preallocPI *PostingsIterator + pi, ok := prealloc.(*PostingsIterator) + if ok && pi != nil { + preallocPI = pi + } + if preallocPI == emptyPostingsIterator { + preallocPI = nil + } + + return p.iterator(includeFreq, includeNorm, includeLocs, preallocPI) +} + +func (p *PostingsList) iterator(includeFreq, includeNorm, includeLocs bool, + rv *PostingsIterator) *PostingsIterator { + if rv == nil { + rv = &PostingsIterator{} + } else { + freqNormReader := rv.freqNormReader + if freqNormReader != nil { + freqNormReader.reset() + } + + locReader := rv.locReader + if locReader != nil { + locReader.reset() + } + + nextLocs := rv.nextLocs[:0] + nextSegmentLocs := rv.nextSegmentLocs[:0] + + buf := rv.buf + + *rv = PostingsIterator{} // clear the struct + + rv.freqNormReader = freqNormReader + rv.locReader = locReader + + rv.nextLocs = nextLocs + rv.nextSegmentLocs = nextSegmentLocs + + rv.buf = buf + } + + rv.postings = p + rv.includeFreqNorm = includeFreq || includeNorm || includeLocs + rv.includeLocs = includeLocs + + if p.normBits1Hit != 0 { + // "1-hit" encoding + rv.docNum1Hit = p.docNum1Hit + rv.normBits1Hit = p.normBits1Hit + + if p.except != nil && p.except.Contains(uint32(rv.docNum1Hit)) { + rv.docNum1Hit = DocNum1HitFinished + } + + return rv + } + + // "general" encoding, check if empty + if p.postings == nil { + return rv + } + + // initialize freq chunk reader + if rv.includeFreqNorm { + rv.freqNormReader = newChunkedIntDecoder(p.sb.mem, p.freqOffset) + } + + // initialize the loc chunk reader + if rv.includeLocs { + rv.locReader = newChunkedIntDecoder(p.sb.mem, p.locOffset) + } + + rv.all = p.postings.Iterator() + if p.except != nil { + rv.ActualBM = roaring.AndNot(p.postings, p.except) + rv.Actual = rv.ActualBM.Iterator() + } else { + rv.ActualBM = p.postings + rv.Actual = rv.all // Optimize to use same iterator for all & Actual. + } + + return rv +} + +// Count returns the number of items on this postings list +func (p *PostingsList) Count() uint64 { + var n, e uint64 + if p.normBits1Hit != 0 { + n = 1 + if p.except != nil && p.except.Contains(uint32(p.docNum1Hit)) { + e = 1 + } + } else if p.postings != nil { + n = p.postings.GetCardinality() + if p.except != nil { + e = p.postings.AndCardinality(p.except) + } + } + return n - e +} + +func (rv *PostingsList) read(postingsOffset uint64, d *Dictionary) error { + rv.postingsOffset = postingsOffset + + // handle "1-hit" encoding special case + if rv.postingsOffset&FSTValEncodingMask == FSTValEncoding1Hit { + return rv.init1Hit(postingsOffset) + } + + // read the location of the freq/norm details + var n uint64 + var read int + + rv.freqOffset, read = binary.Uvarint(d.sb.mem[postingsOffset+n : postingsOffset+binary.MaxVarintLen64]) + n += uint64(read) + + rv.locOffset, read = binary.Uvarint(d.sb.mem[postingsOffset+n : postingsOffset+n+binary.MaxVarintLen64]) + n += uint64(read) + + var postingsLen uint64 + postingsLen, read = binary.Uvarint(d.sb.mem[postingsOffset+n : postingsOffset+n+binary.MaxVarintLen64]) + n += uint64(read) + + roaringBytes := d.sb.mem[postingsOffset+n : postingsOffset+n+postingsLen] + + if rv.postings == nil { + rv.postings = roaring.NewBitmap() + } + _, err := rv.postings.FromBuffer(roaringBytes) + if err != nil { + return fmt.Errorf("error loading roaring bitmap: %v", err) + } + + return nil +} + +func (rv *PostingsList) init1Hit(fstVal uint64) error { + docNum, normBits := FSTValDecode1Hit(fstVal) + + rv.docNum1Hit = docNum + rv.normBits1Hit = normBits + + return nil +} + +// PostingsIterator provides a way to iterate through the postings list +type PostingsIterator struct { + postings *PostingsList + all roaring.IntPeekable + Actual roaring.IntPeekable + ActualBM *roaring.Bitmap + + currChunk uint32 + freqNormReader *chunkedIntDecoder + locReader *chunkedIntDecoder + + next Posting // reused across Next() calls + nextLocs []Location // reused across Next() calls + nextSegmentLocs []segment.Location // reused across Next() calls + + docNum1Hit uint64 + normBits1Hit uint64 + + buf []byte + + includeFreqNorm bool + includeLocs bool +} + +var emptyPostingsIterator = &PostingsIterator{} + +func (i *PostingsIterator) Size() int { + sizeInBytes := reflectStaticSizePostingsIterator + size.SizeOfPtr + + i.next.Size() + // account for freqNormReader, locReader if we start using this. + for _, entry := range i.nextLocs { + sizeInBytes += entry.Size() + } + + return sizeInBytes +} + +func (i *PostingsIterator) loadChunk(chunk int) error { + if i.includeFreqNorm { + err := i.freqNormReader.loadChunk(chunk) + if err != nil { + return err + } + } + + if i.includeLocs { + err := i.locReader.loadChunk(chunk) + if err != nil { + return err + } + } + + i.currChunk = uint32(chunk) + return nil +} + +func (i *PostingsIterator) readFreqNormHasLocs() (uint64, uint64, bool, error) { + if i.normBits1Hit != 0 { + return 1, i.normBits1Hit, false, nil + } + + freqHasLocs, err := i.freqNormReader.readUvarint() + if err != nil { + return 0, 0, false, fmt.Errorf("error reading frequency: %v", err) + } + + freq, hasLocs := decodeFreqHasLocs(freqHasLocs) + + normBits, err := i.freqNormReader.readUvarint() + if err != nil { + return 0, 0, false, fmt.Errorf("error reading norm: %v", err) + } + + return freq, normBits, hasLocs, nil +} + +func (i *PostingsIterator) skipFreqNormReadHasLocs() (bool, error) { + if i.normBits1Hit != 0 { + return false, nil + } + + freqHasLocs, err := i.freqNormReader.readUvarint() + if err != nil { + return false, fmt.Errorf("error reading freqHasLocs: %v", err) + } + + i.freqNormReader.SkipUvarint() // Skip normBits. + + return freqHasLocs&0x01 != 0, nil // See decodeFreqHasLocs() / hasLocs. +} + +func encodeFreqHasLocs(freq uint64, hasLocs bool) uint64 { + rv := freq << 1 + if hasLocs { + rv = rv | 0x01 // 0'th LSB encodes whether there are locations + } + return rv +} + +func decodeFreqHasLocs(freqHasLocs uint64) (uint64, bool) { + freq := freqHasLocs >> 1 + hasLocs := freqHasLocs&0x01 != 0 + return freq, hasLocs +} + +// readLocation processes all the integers on the stream representing a single +// location. +func (i *PostingsIterator) readLocation(l *Location) error { + // read off field + fieldID, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location field: %v", err) + } + // read off pos + pos, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location pos: %v", err) + } + // read off start + start, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location start: %v", err) + } + // read off end + end, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location end: %v", err) + } + // read off num array pos + numArrayPos, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location num array pos: %v", err) + } + + l.field = i.postings.sb.fieldsInv[fieldID] + l.pos = pos + l.start = start + l.end = end + + if cap(l.ap) < int(numArrayPos) { + l.ap = make([]uint64, int(numArrayPos)) + } else { + l.ap = l.ap[:int(numArrayPos)] + } + + // read off array positions + for k := 0; k < int(numArrayPos); k++ { + ap, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading array position: %v", err) + } + + l.ap[k] = ap + } + + return nil +} + +// Next returns the next posting on the postings list, or nil at the end +func (i *PostingsIterator) Next() (segment.Posting, error) { + return i.nextAtOrAfter(0) +} + +// Advance returns the posting at the specified docNum or it is not present +// the next posting, or if the end is reached, nil +func (i *PostingsIterator) Advance(docNum uint64) (segment.Posting, error) { + return i.nextAtOrAfter(docNum) +} + +// Next returns the next posting on the postings list, or nil at the end +func (i *PostingsIterator) nextAtOrAfter(atOrAfter uint64) (segment.Posting, error) { + docNum, exists, err := i.nextDocNumAtOrAfter(atOrAfter) + if err != nil || !exists { + return nil, err + } + + i.next = Posting{} // clear the struct + rv := &i.next + rv.docNum = docNum + + if !i.includeFreqNorm { + return rv, nil + } + + var normBits uint64 + var hasLocs bool + + rv.freq, normBits, hasLocs, err = i.readFreqNormHasLocs() + if err != nil { + return nil, err + } + + rv.norm = math.Float32frombits(uint32(normBits)) + + if i.includeLocs && hasLocs { + // prepare locations into reused slices, where we assume + // rv.freq >= "number of locs", since in a composite field, + // some component fields might have their IncludeTermVector + // flags disabled while other component fields are enabled + if cap(i.nextLocs) >= int(rv.freq) { + i.nextLocs = i.nextLocs[0:rv.freq] + } else { + i.nextLocs = make([]Location, rv.freq, rv.freq*2) + } + if cap(i.nextSegmentLocs) < int(rv.freq) { + i.nextSegmentLocs = make([]segment.Location, rv.freq, rv.freq*2) + } + rv.locs = i.nextSegmentLocs[:0] + + numLocsBytes, err := i.locReader.readUvarint() + if err != nil { + return nil, fmt.Errorf("error reading location numLocsBytes: %v", err) + } + + j := 0 + startBytesRemaining := i.locReader.Len() // # bytes remaining in the locReader + for startBytesRemaining-i.locReader.Len() < int(numLocsBytes) { + err := i.readLocation(&i.nextLocs[j]) + if err != nil { + return nil, err + } + rv.locs = append(rv.locs, &i.nextLocs[j]) + j++ + } + } + + return rv, nil +} + +// nextDocNum returns the next docNum on the postings list, and also +// sets up the currChunk / loc related fields of the iterator. +func (i *PostingsIterator) nextDocNumAtOrAfter(atOrAfter uint64) (uint64, bool, error) { + if i.normBits1Hit != 0 { + if i.docNum1Hit == DocNum1HitFinished { + return 0, false, nil + } + if i.docNum1Hit < atOrAfter { + // advanced past our 1-hit + i.docNum1Hit = DocNum1HitFinished // consume our 1-hit docNum + return 0, false, nil + } + docNum := i.docNum1Hit + i.docNum1Hit = DocNum1HitFinished // consume our 1-hit docNum + return docNum, true, nil + } + + if i.Actual == nil || !i.Actual.HasNext() { + return 0, false, nil + } + + if i.postings == nil || i.postings.postings == i.ActualBM { + return i.nextDocNumAtOrAfterClean(atOrAfter) + } + + i.Actual.AdvanceIfNeeded(uint32(atOrAfter)) + + if !i.Actual.HasNext() { + // couldn't find anything + return 0, false, nil + } + + n := i.Actual.Next() + allN := i.all.Next() + + chunkSize, err := getChunkSize(i.postings.sb.chunkMode, i.postings.postings.GetCardinality(), i.postings.sb.numDocs) + if err != nil { + return 0, false, err + } + nChunk := n / uint32(chunkSize) + + // when allN becomes >= to here, then allN is in the same chunk as nChunk. + allNReachesNChunk := nChunk * uint32(chunkSize) + + // n is the next actual hit (excluding some postings), and + // allN is the next hit in the full postings, and + // if they don't match, move 'all' forwards until they do + for allN != n { + // we've reached same chunk, so move the freq/norm/loc decoders forward + if i.includeFreqNorm && allN >= allNReachesNChunk { + err := i.currChunkNext(nChunk) + if err != nil { + return 0, false, err + } + } + + allN = i.all.Next() + } + + if i.includeFreqNorm && (i.currChunk != nChunk || i.freqNormReader.isNil()) { + err := i.loadChunk(int(nChunk)) + if err != nil { + return 0, false, fmt.Errorf("error loading chunk: %v", err) + } + } + + return uint64(n), true, nil +} + +// optimization when the postings list is "clean" (e.g., no updates & +// no deletions) where the all bitmap is the same as the actual bitmap +func (i *PostingsIterator) nextDocNumAtOrAfterClean( + atOrAfter uint64) (uint64, bool, error) { + + if !i.includeFreqNorm { + i.Actual.AdvanceIfNeeded(uint32(atOrAfter)) + + if !i.Actual.HasNext() { + return 0, false, nil // couldn't find anything + } + + return uint64(i.Actual.Next()), true, nil + } + + chunkSize, err := getChunkSize(i.postings.sb.chunkMode, i.postings.postings.GetCardinality(), i.postings.sb.numDocs) + if err != nil { + return 0, false, err + } + + // freq-norm's needed, so maintain freq-norm chunk reader + sameChunkNexts := 0 // # of times we called Next() in the same chunk + n := i.Actual.Next() + nChunk := n / uint32(chunkSize) + + for uint64(n) < atOrAfter && i.Actual.HasNext() { + n = i.Actual.Next() + + nChunkPrev := nChunk + nChunk = n / uint32(chunkSize) + + if nChunk != nChunkPrev { + sameChunkNexts = 0 + } else { + sameChunkNexts += 1 + } + } + + if uint64(n) < atOrAfter { + // couldn't find anything + return 0, false, nil + } + + for j := 0; j < sameChunkNexts; j++ { + err := i.currChunkNext(nChunk) + if err != nil { + return 0, false, fmt.Errorf("error optimized currChunkNext: %v", err) + } + } + + if i.currChunk != nChunk || i.freqNormReader.isNil() { + err := i.loadChunk(int(nChunk)) + if err != nil { + return 0, false, fmt.Errorf("error loading chunk: %v", err) + } + } + + return uint64(n), true, nil +} + +func (i *PostingsIterator) currChunkNext(nChunk uint32) error { + if i.currChunk != nChunk || i.freqNormReader.isNil() { + err := i.loadChunk(int(nChunk)) + if err != nil { + return fmt.Errorf("error loading chunk: %v", err) + } + } + + // read off freq/offsets even though we don't care about them + hasLocs, err := i.skipFreqNormReadHasLocs() + if err != nil { + return err + } + + if i.includeLocs && hasLocs { + numLocsBytes, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location numLocsBytes: %v", err) + } + + // skip over all the location bytes + i.locReader.SkipBytes(int(numLocsBytes)) + } + + return nil +} + +// DocNum1Hit returns the docNum and true if this is "1-hit" optimized +// and the docNum is available. +func (p *PostingsIterator) DocNum1Hit() (uint64, bool) { + if p.normBits1Hit != 0 && p.docNum1Hit != DocNum1HitFinished { + return p.docNum1Hit, true + } + return 0, false +} + +// ActualBitmap returns the underlying actual bitmap +// which can be used up the stack for optimizations +func (p *PostingsIterator) ActualBitmap() *roaring.Bitmap { + return p.ActualBM +} + +// ReplaceActual replaces the ActualBM with the provided +// bitmap +func (p *PostingsIterator) ReplaceActual(abm *roaring.Bitmap) { + p.ActualBM = abm + p.Actual = abm.Iterator() +} + +// PostingsIteratorFromBitmap constructs a PostingsIterator given an +// "actual" bitmap. +func PostingsIteratorFromBitmap(bm *roaring.Bitmap, + includeFreqNorm, includeLocs bool) (segment.PostingsIterator, error) { + return &PostingsIterator{ + ActualBM: bm, + Actual: bm.Iterator(), + includeFreqNorm: includeFreqNorm, + includeLocs: includeLocs, + }, nil +} + +// PostingsIteratorFrom1Hit constructs a PostingsIterator given a +// 1-hit docNum. +func PostingsIteratorFrom1Hit(docNum1Hit uint64, + includeFreqNorm, includeLocs bool) (segment.PostingsIterator, error) { + return &PostingsIterator{ + docNum1Hit: docNum1Hit, + normBits1Hit: NormBits1Hit, + includeFreqNorm: includeFreqNorm, + includeLocs: includeLocs, + }, nil +} + +// Posting is a single entry in a postings list +type Posting struct { + docNum uint64 + freq uint64 + norm float32 + locs []segment.Location +} + +func (p *Posting) Size() int { + sizeInBytes := reflectStaticSizePosting + + for _, entry := range p.locs { + sizeInBytes += entry.Size() + } + + return sizeInBytes +} + +// Number returns the document number of this posting in this segment +func (p *Posting) Number() uint64 { + return p.docNum +} + +// Frequency returns the frequencies of occurrence of this term in this doc/field +func (p *Posting) Frequency() uint64 { + return p.freq +} + +// Norm returns the normalization factor for this posting +func (p *Posting) Norm() float64 { + return float64(p.norm) +} + +// Locations returns the location information for each occurrence +func (p *Posting) Locations() []segment.Location { + return p.locs +} + +// Location represents the location of a single occurrence +type Location struct { + field string + pos uint64 + start uint64 + end uint64 + ap []uint64 +} + +func (l *Location) Size() int { + return reflectStaticSizeLocation + + len(l.field) + + len(l.ap)*size.SizeOfUint64 +} + +// Field returns the name of the field (useful in composite fields to know +// which original field the value came from) +func (l *Location) Field() string { + return l.field +} + +// Start returns the start byte offset of this occurrence +func (l *Location) Start() uint64 { + return l.start +} + +// End returns the end byte offset of this occurrence +func (l *Location) End() uint64 { + return l.end +} + +// Pos returns the 1-based phrase position of this occurrence +func (l *Location) Pos() uint64 { + return l.pos +} + +// ArrayPositions returns the array position vector associated with this occurrence +func (l *Location) ArrayPositions() []uint64 { + return l.ap +} diff --git a/vendor/github.com/blevesearch/zap/v13/read.go b/vendor/github.com/blevesearch/zap/v13/read.go new file mode 100644 index 0000000..e47d4c6 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/read.go @@ -0,0 +1,43 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import "encoding/binary" + +func (s *SegmentBase) getDocStoredMetaAndCompressed(docNum uint64) ([]byte, []byte) { + _, storedOffset, n, metaLen, dataLen := s.getDocStoredOffsets(docNum) + + meta := s.mem[storedOffset+n : storedOffset+n+metaLen] + data := s.mem[storedOffset+n+metaLen : storedOffset+n+metaLen+dataLen] + + return meta, data +} + +func (s *SegmentBase) getDocStoredOffsets(docNum uint64) ( + uint64, uint64, uint64, uint64, uint64) { + indexOffset := s.storedIndexOffset + (8 * docNum) + + storedOffset := binary.BigEndian.Uint64(s.mem[indexOffset : indexOffset+8]) + + var n uint64 + + metaLen, read := binary.Uvarint(s.mem[storedOffset : storedOffset+binary.MaxVarintLen64]) + n += uint64(read) + + dataLen, read := binary.Uvarint(s.mem[storedOffset+n : storedOffset+n+binary.MaxVarintLen64]) + n += uint64(read) + + return indexOffset, storedOffset, n, metaLen, dataLen +} diff --git a/vendor/github.com/blevesearch/zap/v13/segment.go b/vendor/github.com/blevesearch/zap/v13/segment.go new file mode 100644 index 0000000..e8b1f06 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/segment.go @@ -0,0 +1,572 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "fmt" + "io" + "os" + "sync" + "unsafe" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/blevesearch/bleve/size" + "github.com/couchbase/vellum" + mmap "github.com/blevesearch/mmap-go" + "github.com/golang/snappy" +) + +var reflectStaticSizeSegmentBase int + +func init() { + var sb SegmentBase + reflectStaticSizeSegmentBase = int(unsafe.Sizeof(sb)) +} + +// Open returns a zap impl of a segment +func (*ZapPlugin) Open(path string) (segment.Segment, error) { + f, err := os.Open(path) + if err != nil { + return nil, err + } + mm, err := mmap.Map(f, mmap.RDONLY, 0) + if err != nil { + // mmap failed, try to close the file + _ = f.Close() + return nil, err + } + + rv := &Segment{ + SegmentBase: SegmentBase{ + mem: mm[0 : len(mm)-FooterSize], + fieldsMap: make(map[string]uint16), + fieldDvReaders: make(map[uint16]*docValueReader), + fieldFSTs: make(map[uint16]*vellum.FST), + }, + f: f, + mm: mm, + path: path, + refs: 1, + } + rv.SegmentBase.updateSize() + + err = rv.loadConfig() + if err != nil { + _ = rv.Close() + return nil, err + } + + err = rv.loadFields() + if err != nil { + _ = rv.Close() + return nil, err + } + + err = rv.loadDvReaders() + if err != nil { + _ = rv.Close() + return nil, err + } + + return rv, nil +} + +// SegmentBase is a memory only, read-only implementation of the +// segment.Segment interface, using zap's data representation. +type SegmentBase struct { + mem []byte + memCRC uint32 + chunkMode uint32 + fieldsMap map[string]uint16 // fieldName -> fieldID+1 + fieldsInv []string // fieldID -> fieldName + numDocs uint64 + storedIndexOffset uint64 + fieldsIndexOffset uint64 + docValueOffset uint64 + dictLocs []uint64 + fieldDvReaders map[uint16]*docValueReader // naive chunk cache per field + fieldDvNames []string // field names cached in fieldDvReaders + size uint64 + + m sync.Mutex + fieldFSTs map[uint16]*vellum.FST +} + +func (sb *SegmentBase) Size() int { + return int(sb.size) +} + +func (sb *SegmentBase) updateSize() { + sizeInBytes := reflectStaticSizeSegmentBase + + cap(sb.mem) + + // fieldsMap + for k := range sb.fieldsMap { + sizeInBytes += (len(k) + size.SizeOfString) + size.SizeOfUint16 + } + + // fieldsInv, dictLocs + for _, entry := range sb.fieldsInv { + sizeInBytes += len(entry) + size.SizeOfString + } + sizeInBytes += len(sb.dictLocs) * size.SizeOfUint64 + + // fieldDvReaders + for _, v := range sb.fieldDvReaders { + sizeInBytes += size.SizeOfUint16 + size.SizeOfPtr + if v != nil { + sizeInBytes += v.size() + } + } + + sb.size = uint64(sizeInBytes) +} + +func (sb *SegmentBase) AddRef() {} +func (sb *SegmentBase) DecRef() (err error) { return nil } +func (sb *SegmentBase) Close() (err error) { return nil } + +// Segment implements a persisted segment.Segment interface, by +// embedding an mmap()'ed SegmentBase. +type Segment struct { + SegmentBase + + f *os.File + mm mmap.MMap + path string + version uint32 + crc uint32 + + m sync.Mutex // Protects the fields that follow. + refs int64 +} + +func (s *Segment) Size() int { + // 8 /* size of file pointer */ + // 4 /* size of version -> uint32 */ + // 4 /* size of crc -> uint32 */ + sizeOfUints := 16 + + sizeInBytes := (len(s.path) + size.SizeOfString) + sizeOfUints + + // mutex, refs -> int64 + sizeInBytes += 16 + + // do not include the mmap'ed part + return sizeInBytes + s.SegmentBase.Size() - cap(s.mem) +} + +func (s *Segment) AddRef() { + s.m.Lock() + s.refs++ + s.m.Unlock() +} + +func (s *Segment) DecRef() (err error) { + s.m.Lock() + s.refs-- + if s.refs == 0 { + err = s.closeActual() + } + s.m.Unlock() + return err +} + +func (s *Segment) loadConfig() error { + crcOffset := len(s.mm) - 4 + s.crc = binary.BigEndian.Uint32(s.mm[crcOffset : crcOffset+4]) + + verOffset := crcOffset - 4 + s.version = binary.BigEndian.Uint32(s.mm[verOffset : verOffset+4]) + if s.version != Version { + return fmt.Errorf("unsupported version %d", s.version) + } + + chunkOffset := verOffset - 4 + s.chunkMode = binary.BigEndian.Uint32(s.mm[chunkOffset : chunkOffset+4]) + + docValueOffset := chunkOffset - 8 + s.docValueOffset = binary.BigEndian.Uint64(s.mm[docValueOffset : docValueOffset+8]) + + fieldsIndexOffset := docValueOffset - 8 + s.fieldsIndexOffset = binary.BigEndian.Uint64(s.mm[fieldsIndexOffset : fieldsIndexOffset+8]) + + storedIndexOffset := fieldsIndexOffset - 8 + s.storedIndexOffset = binary.BigEndian.Uint64(s.mm[storedIndexOffset : storedIndexOffset+8]) + + numDocsOffset := storedIndexOffset - 8 + s.numDocs = binary.BigEndian.Uint64(s.mm[numDocsOffset : numDocsOffset+8]) + return nil +} + +func (s *SegmentBase) loadFields() error { + // NOTE for now we assume the fields index immediately precedes + // the footer, and if this changes, need to adjust accordingly (or + // store explicit length), where s.mem was sliced from s.mm in Open(). + fieldsIndexEnd := uint64(len(s.mem)) + + // iterate through fields index + var fieldID uint64 + for s.fieldsIndexOffset+(8*fieldID) < fieldsIndexEnd { + addr := binary.BigEndian.Uint64(s.mem[s.fieldsIndexOffset+(8*fieldID) : s.fieldsIndexOffset+(8*fieldID)+8]) + + dictLoc, read := binary.Uvarint(s.mem[addr:fieldsIndexEnd]) + n := uint64(read) + s.dictLocs = append(s.dictLocs, dictLoc) + + var nameLen uint64 + nameLen, read = binary.Uvarint(s.mem[addr+n : fieldsIndexEnd]) + n += uint64(read) + + name := string(s.mem[addr+n : addr+n+nameLen]) + s.fieldsInv = append(s.fieldsInv, name) + s.fieldsMap[name] = uint16(fieldID + 1) + + fieldID++ + } + return nil +} + +// Dictionary returns the term dictionary for the specified field +func (s *SegmentBase) Dictionary(field string) (segment.TermDictionary, error) { + dict, err := s.dictionary(field) + if err == nil && dict == nil { + return &segment.EmptyDictionary{}, nil + } + return dict, err +} + +func (sb *SegmentBase) dictionary(field string) (rv *Dictionary, err error) { + fieldIDPlus1 := sb.fieldsMap[field] + if fieldIDPlus1 > 0 { + rv = &Dictionary{ + sb: sb, + field: field, + fieldID: fieldIDPlus1 - 1, + } + + dictStart := sb.dictLocs[rv.fieldID] + if dictStart > 0 { + var ok bool + sb.m.Lock() + if rv.fst, ok = sb.fieldFSTs[rv.fieldID]; !ok { + // read the length of the vellum data + vellumLen, read := binary.Uvarint(sb.mem[dictStart : dictStart+binary.MaxVarintLen64]) + fstBytes := sb.mem[dictStart+uint64(read) : dictStart+uint64(read)+vellumLen] + rv.fst, err = vellum.Load(fstBytes) + if err != nil { + sb.m.Unlock() + return nil, fmt.Errorf("dictionary field %s vellum err: %v", field, err) + } + + sb.fieldFSTs[rv.fieldID] = rv.fst + } + + sb.m.Unlock() + rv.fstReader, err = rv.fst.Reader() + if err != nil { + return nil, fmt.Errorf("dictionary field %s vellum reader err: %v", field, err) + } + + } + } + + return rv, nil +} + +// visitDocumentCtx holds data structures that are reusable across +// multiple VisitDocument() calls to avoid memory allocations +type visitDocumentCtx struct { + buf []byte + reader bytes.Reader + arrayPos []uint64 +} + +var visitDocumentCtxPool = sync.Pool{ + New: func() interface{} { + reuse := &visitDocumentCtx{} + return reuse + }, +} + +// VisitDocument invokes the DocFieldValueVistor for each stored field +// for the specified doc number +func (s *SegmentBase) VisitDocument(num uint64, visitor segment.DocumentFieldValueVisitor) error { + vdc := visitDocumentCtxPool.Get().(*visitDocumentCtx) + defer visitDocumentCtxPool.Put(vdc) + return s.visitDocument(vdc, num, visitor) +} + +func (s *SegmentBase) visitDocument(vdc *visitDocumentCtx, num uint64, + visitor segment.DocumentFieldValueVisitor) error { + // first make sure this is a valid number in this segment + if num < s.numDocs { + meta, compressed := s.getDocStoredMetaAndCompressed(num) + + vdc.reader.Reset(meta) + + // handle _id field special case + idFieldValLen, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + idFieldVal := compressed[:idFieldValLen] + + keepGoing := visitor("_id", byte('t'), idFieldVal, nil) + if !keepGoing { + visitDocumentCtxPool.Put(vdc) + return nil + } + + // handle non-"_id" fields + compressed = compressed[idFieldValLen:] + + uncompressed, err := snappy.Decode(vdc.buf[:cap(vdc.buf)], compressed) + if err != nil { + return err + } + + for keepGoing { + field, err := binary.ReadUvarint(&vdc.reader) + if err == io.EOF { + break + } + if err != nil { + return err + } + typ, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + offset, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + l, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + numap, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + var arrayPos []uint64 + if numap > 0 { + if cap(vdc.arrayPos) < int(numap) { + vdc.arrayPos = make([]uint64, numap) + } + arrayPos = vdc.arrayPos[:numap] + for i := 0; i < int(numap); i++ { + ap, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + arrayPos[i] = ap + } + } + + value := uncompressed[offset : offset+l] + keepGoing = visitor(s.fieldsInv[field], byte(typ), value, arrayPos) + } + + vdc.buf = uncompressed + } + return nil +} + +// DocID returns the value of the _id field for the given docNum +func (s *SegmentBase) DocID(num uint64) ([]byte, error) { + if num >= s.numDocs { + return nil, nil + } + + vdc := visitDocumentCtxPool.Get().(*visitDocumentCtx) + + meta, compressed := s.getDocStoredMetaAndCompressed(num) + + vdc.reader.Reset(meta) + + // handle _id field special case + idFieldValLen, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return nil, err + } + idFieldVal := compressed[:idFieldValLen] + + visitDocumentCtxPool.Put(vdc) + + return idFieldVal, nil +} + +// Count returns the number of documents in this segment. +func (s *SegmentBase) Count() uint64 { + return s.numDocs +} + +// DocNumbers returns a bitset corresponding to the doc numbers of all the +// provided _id strings +func (s *SegmentBase) DocNumbers(ids []string) (*roaring.Bitmap, error) { + rv := roaring.New() + + if len(s.fieldsMap) > 0 { + idDict, err := s.dictionary("_id") + if err != nil { + return nil, err + } + + postingsList := emptyPostingsList + + sMax, err := idDict.fst.GetMaxKey() + if err != nil { + return nil, err + } + sMaxStr := string(sMax) + filteredIds := make([]string, 0, len(ids)) + for _, id := range ids { + if id <= sMaxStr { + filteredIds = append(filteredIds, id) + } + } + + for _, id := range filteredIds { + postingsList, err = idDict.postingsList([]byte(id), nil, postingsList) + if err != nil { + return nil, err + } + postingsList.OrInto(rv) + } + } + + return rv, nil +} + +// Fields returns the field names used in this segment +func (s *SegmentBase) Fields() []string { + return s.fieldsInv +} + +// Path returns the path of this segment on disk +func (s *Segment) Path() string { + return s.path +} + +// Close releases all resources associated with this segment +func (s *Segment) Close() (err error) { + return s.DecRef() +} + +func (s *Segment) closeActual() (err error) { + if s.mm != nil { + err = s.mm.Unmap() + } + // try to close file even if unmap failed + if s.f != nil { + err2 := s.f.Close() + if err == nil { + // try to return first error + err = err2 + } + } + return +} + +// some helpers i started adding for the command-line utility + +// Data returns the underlying mmaped data slice +func (s *Segment) Data() []byte { + return s.mm +} + +// CRC returns the CRC value stored in the file footer +func (s *Segment) CRC() uint32 { + return s.crc +} + +// Version returns the file version in the file footer +func (s *Segment) Version() uint32 { + return s.version +} + +// ChunkFactor returns the chunk factor in the file footer +func (s *Segment) ChunkMode() uint32 { + return s.chunkMode +} + +// FieldsIndexOffset returns the fields index offset in the file footer +func (s *Segment) FieldsIndexOffset() uint64 { + return s.fieldsIndexOffset +} + +// StoredIndexOffset returns the stored value index offset in the file footer +func (s *Segment) StoredIndexOffset() uint64 { + return s.storedIndexOffset +} + +// DocValueOffset returns the docValue offset in the file footer +func (s *Segment) DocValueOffset() uint64 { + return s.docValueOffset +} + +// NumDocs returns the number of documents in the file footer +func (s *Segment) NumDocs() uint64 { + return s.numDocs +} + +// DictAddr is a helper function to compute the file offset where the +// dictionary is stored for the specified field. +func (s *Segment) DictAddr(field string) (uint64, error) { + fieldIDPlus1, ok := s.fieldsMap[field] + if !ok { + return 0, fmt.Errorf("no such field '%s'", field) + } + + return s.dictLocs[fieldIDPlus1-1], nil +} + +func (s *SegmentBase) loadDvReaders() error { + if s.docValueOffset == fieldNotUninverted || s.numDocs == 0 { + return nil + } + + var read uint64 + for fieldID, field := range s.fieldsInv { + var fieldLocStart, fieldLocEnd uint64 + var n int + fieldLocStart, n = binary.Uvarint(s.mem[s.docValueOffset+read : s.docValueOffset+read+binary.MaxVarintLen64]) + if n <= 0 { + return fmt.Errorf("loadDvReaders: failed to read the docvalue offset start for field %d", fieldID) + } + read += uint64(n) + fieldLocEnd, n = binary.Uvarint(s.mem[s.docValueOffset+read : s.docValueOffset+read+binary.MaxVarintLen64]) + if n <= 0 { + return fmt.Errorf("loadDvReaders: failed to read the docvalue offset end for field %d", fieldID) + } + read += uint64(n) + + fieldDvReader, err := s.loadFieldDocValueReader(field, fieldLocStart, fieldLocEnd) + if err != nil { + return err + } + if fieldDvReader != nil { + s.fieldDvReaders[uint16(fieldID)] = fieldDvReader + s.fieldDvNames = append(s.fieldDvNames, field) + } + } + + return nil +} diff --git a/vendor/github.com/blevesearch/zap/v13/write.go b/vendor/github.com/blevesearch/zap/v13/write.go new file mode 100644 index 0000000..77aefdb --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/write.go @@ -0,0 +1,145 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "encoding/binary" + "io" + + "github.com/RoaringBitmap/roaring" +) + +// writes out the length of the roaring bitmap in bytes as varint +// then writes out the roaring bitmap itself +func writeRoaringWithLen(r *roaring.Bitmap, w io.Writer, + reuseBufVarint []byte) (int, error) { + buf, err := r.ToBytes() + if err != nil { + return 0, err + } + + var tw int + + // write out the length + n := binary.PutUvarint(reuseBufVarint, uint64(len(buf))) + nw, err := w.Write(reuseBufVarint[:n]) + tw += nw + if err != nil { + return tw, err + } + + // write out the roaring bytes + nw, err = w.Write(buf) + tw += nw + if err != nil { + return tw, err + } + + return tw, nil +} + +func persistFields(fieldsInv []string, w *CountHashWriter, dictLocs []uint64) (uint64, error) { + var rv uint64 + var fieldsOffsets []uint64 + + for fieldID, fieldName := range fieldsInv { + // record start of this field + fieldsOffsets = append(fieldsOffsets, uint64(w.Count())) + + // write out the dict location and field name length + _, err := writeUvarints(w, dictLocs[fieldID], uint64(len(fieldName))) + if err != nil { + return 0, err + } + + // write out the field name + _, err = w.Write([]byte(fieldName)) + if err != nil { + return 0, err + } + } + + // now write out the fields index + rv = uint64(w.Count()) + for fieldID := range fieldsInv { + err := binary.Write(w, binary.BigEndian, fieldsOffsets[fieldID]) + if err != nil { + return 0, err + } + } + + return rv, nil +} + +// FooterSize is the size of the footer record in bytes +// crc + ver + chunk + field offset + stored offset + num docs + docValueOffset +const FooterSize = 4 + 4 + 4 + 8 + 8 + 8 + 8 + +func persistFooter(numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset uint64, + chunkMode uint32, crcBeforeFooter uint32, writerIn io.Writer) error { + w := NewCountHashWriter(writerIn) + w.crc = crcBeforeFooter + + // write out the number of docs + err := binary.Write(w, binary.BigEndian, numDocs) + if err != nil { + return err + } + // write out the stored field index location: + err = binary.Write(w, binary.BigEndian, storedIndexOffset) + if err != nil { + return err + } + // write out the field index location + err = binary.Write(w, binary.BigEndian, fieldsIndexOffset) + if err != nil { + return err + } + // write out the fieldDocValue location + err = binary.Write(w, binary.BigEndian, docValueOffset) + if err != nil { + return err + } + // write out 32-bit chunk factor + err = binary.Write(w, binary.BigEndian, chunkMode) + if err != nil { + return err + } + // write out 32-bit version + err = binary.Write(w, binary.BigEndian, Version) + if err != nil { + return err + } + // write out CRC-32 of everything upto but not including this CRC + err = binary.Write(w, binary.BigEndian, w.crc) + if err != nil { + return err + } + return nil +} + +func writeUvarints(w io.Writer, vals ...uint64) (tw int, err error) { + buf := make([]byte, binary.MaxVarintLen64) + for _, val := range vals { + n := binary.PutUvarint(buf, val) + var nw int + nw, err = w.Write(buf[:n]) + tw += nw + if err != nil { + return tw, err + } + } + return tw, err +} diff --git a/vendor/github.com/blevesearch/zap/v13/zap.md b/vendor/github.com/blevesearch/zap/v13/zap.md new file mode 100644 index 0000000..d74dc54 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v13/zap.md @@ -0,0 +1,177 @@ +# ZAP File Format + +## Legend + +### Sections + + |========| + | | section + |========| + +### Fixed-size fields + + |--------| |----| |--| |-| + | | uint64 | | uint32 | | uint16 | | uint8 + |--------| |----| |--| |-| + +### Varints + + |~~~~~~~~| + | | varint(up to uint64) + |~~~~~~~~| + +### Arbitrary-length fields + + |--------...---| + | | arbitrary-length field (string, vellum, roaring bitmap) + |--------...---| + +### Chunked data + + [--------] + [ ] + [--------] + +## Overview + +Footer section describes the configuration of particular ZAP file. The format of footer is version-dependent, so it is necessary to check `V` field before the parsing. + + |==================================================| + | Stored Fields | + |==================================================| + |-----> | Stored Fields Index | + | |==================================================| + | | Dictionaries + Postings + DocValues | + | |==================================================| + | |---> | DocValues Index | + | | |==================================================| + | | | Fields | + | | |==================================================| + | | |-> | Fields Index | + | | | |========|========|========|========|====|====|====| + | | | | D# | SF | F | FDV | CF | V | CC | (Footer) + | | | |========|====|===|====|===|====|===|====|====|====| + | | | | | | + |-+-+-----------------| | | + | |--------------------------| | + |-------------------------------------| + + D#. Number of Docs. + SF. Stored Fields Index Offset. + F. Field Index Offset. + FDV. Field DocValue Offset. + CF. Chunk Factor. + V. Version. + CC. CRC32. + +## Stored Fields + +Stored Fields Index is `D#` consecutive 64-bit unsigned integers - offsets, where relevant Stored Fields Data records are located. + + 0 [SF] [SF + D# * 8] + | Stored Fields | Stored Fields Index | + |================================|==================================| + | | | + | |--------------------| ||--------|--------|. . .|--------|| + | |-> | Stored Fields Data | || 0 | 1 | | D# - 1 || + | | |--------------------| ||--------|----|---|. . .|--------|| + | | | | | + |===|============================|==============|===================| + | | + |-------------------------------------------| + +Stored Fields Data is an arbitrary size record, which consists of metadata and [Snappy](https://github.com/golang/snappy)-compressed data. + + Stored Fields Data + |~~~~~~~~|~~~~~~~~|~~~~~~~~...~~~~~~~~|~~~~~~~~...~~~~~~~~| + | MDS | CDS | MD | CD | + |~~~~~~~~|~~~~~~~~|~~~~~~~~...~~~~~~~~|~~~~~~~~...~~~~~~~~| + + MDS. Metadata size. + CDS. Compressed data size. + MD. Metadata. + CD. Snappy-compressed data. + +## Fields + +Fields Index section located between addresses `F` and `len(file) - len(footer)` and consist of `uint64` values (`F1`, `F2`, ...) which are offsets to records in Fields section. We have `F# = (len(file) - len(footer) - F) / sizeof(uint64)` fields. + + + (...) [F] [F + F#] + | Fields | Fields Index. | + |================================|================================| + | | | + | |~~~~~~~~|~~~~~~~~|---...---|||--------|--------|...|--------|| + ||->| Dict | Length | Name ||| 0 | 1 | | F# - 1 || + || |~~~~~~~~|~~~~~~~~|---...---|||--------|----|---|...|--------|| + || | | | + ||===============================|==============|=================| + | | + |----------------------------------------------| + + +## Dictionaries + Postings + +Each of fields has its own dictionary, encoded in [Vellum](https://github.com/couchbase/vellum) format. Dictionary consists of pairs `(term, offset)`, where `offset` indicates the position of postings (list of documents) for this particular term. + + |================================================================|- Dictionaries + + | | Postings + + | | DocValues + | Freq/Norm (chunked) | + | [~~~~~~|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~] | + | |->[ Freq | Norm (float32 under varint) ] | + | | [~~~~~~|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~] | + | | | + | |------------------------------------------------------------| | + | Location Details (chunked) | | + | [~~~~~~|~~~~~|~~~~~~~|~~~~~|~~~~~~|~~~~~~~~|~~~~~] | | + | |->[ Size | Pos | Start | End | Arr# | ArrPos | ... ] | | + | | [~~~~~~|~~~~~|~~~~~~~|~~~~~|~~~~~~|~~~~~~~~|~~~~~] | | + | | | | + | |----------------------| | | + | Postings List | | | + | |~~~~~~~~|~~~~~|~~|~~~~~~~~|-----------...--| | | + | |->| F/N | LD | Length | ROARING BITMAP | | | + | | |~~~~~|~~|~~~~~~~~|~~~~~~~~|-----------...--| | | + | | |----------------------------------------------| | + | |--------------------------------------| | + | Dictionary | | + | |~~~~~~~~|--------------------------|-...-| | + | |->| Length | VELLUM DATA : (TERM -> OFFSET) | | + | | |~~~~~~~~|----------------------------...-| | + | | | + |======|=========================================================|- DocValues Index + | | | + |======|=========================================================|- Fields + | | | + | |~~~~|~~~|~~~~~~~~|---...---| | + | | Dict | Length | Name | | + | |~~~~~~~~|~~~~~~~~|---...---| | + | | + |================================================================| + +## DocValues + +DocValues Index is `F#` pairs of varints, one pair per field. Each pair of varints indicates start and end point of DocValues slice. + + |================================================================| + | |------...--| | + | |->| DocValues |<-| | + | | |------...--| | | + |==|=================|===========================================|- DocValues Index + ||~|~~~~~~~~~|~~~~~~~|~~| |~~~~~~~~~~~~~~|~~~~~~~~~~~~|| + || DV1 START | DV1 STOP | . . . . . | DV(F#) START | DV(F#) END || + ||~~~~~~~~~~~|~~~~~~~~~~| |~~~~~~~~~~~~~~|~~~~~~~~~~~~|| + |================================================================| + +DocValues is chunked Snappy-compressed values for each document and field. + + [~~~~~~~~~~~~~~~|~~~~~~|~~~~~~~~~|-...-|~~~~~~|~~~~~~~~~|--------------------...-] + [ Doc# in Chunk | Doc1 | Offset1 | ... | DocN | OffsetN | SNAPPY COMPRESSED DATA ] + [~~~~~~~~~~~~~~~|~~~~~~|~~~~~~~~~|-...-|~~~~~~|~~~~~~~~~|--------------------...-] + +Last 16 bytes are description of chunks. + + |~~~~~~~~~~~~...~|----------------|----------------| + | Chunk Sizes | Chunk Size Arr | Chunk# | + |~~~~~~~~~~~~...~|----------------|----------------| diff --git a/vendor/github.com/blevesearch/zap/v14/.gitignore b/vendor/github.com/blevesearch/zap/v14/.gitignore new file mode 100644 index 0000000..46d1cfa --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/.gitignore @@ -0,0 +1,12 @@ +#* +*.sublime-* +*~ +.#* +.project +.settings +**/.idea/ +**/*.iml +.DS_Store +/cmd/zap/zap +*.test +tags diff --git a/vendor/github.com/blevesearch/zap/v14/LICENSE b/vendor/github.com/blevesearch/zap/v14/LICENSE new file mode 100644 index 0000000..7a4a3ea --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/vendor/github.com/blevesearch/zap/v14/README.md b/vendor/github.com/blevesearch/zap/v14/README.md new file mode 100644 index 0000000..0facb66 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/README.md @@ -0,0 +1,158 @@ +# zap file format + +Advanced ZAP File Format Documentation is [here](zap.md). + +The file is written in the reverse order that we typically access data. This helps us write in one pass since later sections of the file require file offsets of things we've already written. + +Current usage: + +- mmap the entire file +- crc-32 bytes and version are in fixed position at end of the file +- reading remainder of footer could be version specific +- remainder of footer gives us: + - 3 important offsets (docValue , fields index and stored data index) + - 2 important values (number of docs and chunk factor) +- field data is processed once and memoized onto the heap so that we never have to go back to disk for it +- access to stored data by doc number means first navigating to the stored data index, then accessing a fixed position offset into that slice, which gives us the actual address of the data. the first bytes of that section tell us the size of data so that we know where it ends. +- access to all other indexed data follows the following pattern: + - first know the field name -> convert to id + - next navigate to term dictionary for that field + - some operations stop here and do dictionary ops + - next use dictionary to navigate to posting list for a specific term + - walk posting list + - if necessary, walk posting details as we go + - if location info is desired, consult location bitmap to see if it is there + +## stored fields section + +- for each document + - preparation phase: + - produce a slice of metadata bytes and data bytes + - produce these slices in field id order + - field value is appended to the data slice + - metadata slice is varint encoded with the following values for each field value + - field id (uint16) + - field type (byte) + - field value start offset in uncompressed data slice (uint64) + - field value length (uint64) + - field number of array positions (uint64) + - one additional value for each array position (uint64) + - compress the data slice using snappy + - file writing phase: + - remember the start offset for this document + - write out meta data length (varint uint64) + - write out compressed data length (varint uint64) + - write out the metadata bytes + - write out the compressed data bytes + +## stored fields idx + +- for each document + - write start offset (remembered from previous section) of stored data (big endian uint64) + +With this index and a known document number, we have direct access to all the stored field data. + +## posting details (freq/norm) section + +- for each posting list + - produce a slice containing multiple consecutive chunks (each chunk is varint stream) + - produce a slice remembering offsets of where each chunk starts + - preparation phase: + - for each hit in the posting list + - if this hit is in next chunk close out encoding of last chunk and record offset start of next + - encode term frequency (uint64) + - encode norm factor (float32) + - file writing phase: + - remember start position for this posting list details + - write out number of chunks that follow (varint uint64) + - write out length of each chunk (each a varint uint64) + - write out the byte slice containing all the chunk data + +If you know the doc number you're interested in, this format lets you jump to the correct chunk (docNum/chunkFactor) directly and then seek within that chunk until you find it. + +## posting details (location) section + +- for each posting list + - produce a slice containing multiple consecutive chunks (each chunk is varint stream) + - produce a slice remembering offsets of where each chunk starts + - preparation phase: + - for each hit in the posting list + - if this hit is in next chunk close out encoding of last chunk and record offset start of next + - encode field (uint16) + - encode field pos (uint64) + - encode field start (uint64) + - encode field end (uint64) + - encode number of array positions to follow (uint64) + - encode each array position (each uint64) + - file writing phase: + - remember start position for this posting list details + - write out number of chunks that follow (varint uint64) + - write out length of each chunk (each a varint uint64) + - write out the byte slice containing all the chunk data + +If you know the doc number you're interested in, this format lets you jump to the correct chunk (docNum/chunkFactor) directly and then seek within that chunk until you find it. + +## postings list section + +- for each posting list + - preparation phase: + - encode roaring bitmap posting list to bytes (so we know the length) + - file writing phase: + - remember the start position for this posting list + - write freq/norm details offset (remembered from previous, as varint uint64) + - write location details offset (remembered from previous, as varint uint64) + - write length of encoded roaring bitmap + - write the serialized roaring bitmap data + +## dictionary + +- for each field + - preparation phase: + - encode vellum FST with dictionary data pointing to file offset of posting list (remembered from previous) + - file writing phase: + - remember the start position of this persistDictionary + - write length of vellum data (varint uint64) + - write out vellum data + +## fields section + +- for each field + - file writing phase: + - remember start offset for each field + - write dictionary address (remembered from previous) (varint uint64) + - write length of field name (varint uint64) + - write field name bytes + +## fields idx + +- for each field + - file writing phase: + - write big endian uint64 of start offset for each field + +NOTE: currently we don't know or record the length of this fields index. Instead we rely on the fact that we know it immediately precedes a footer of known size. + +## fields DocValue + +- for each field + - preparation phase: + - produce a slice containing multiple consecutive chunks, where each chunk is composed of a meta section followed by compressed columnar field data + - produce a slice remembering the length of each chunk + - file writing phase: + - remember the start position of this first field DocValue offset in the footer + - write out number of chunks that follow (varint uint64) + - write out length of each chunk (each a varint uint64) + - write out the byte slice containing all the chunk data + +NOTE: currently the meta header inside each chunk gives clue to the location offsets and size of the data pertaining to a given docID and any +read operation leverage that meta information to extract the document specific data from the file. + +## footer + +- file writing phase + - write number of docs (big endian uint64) + - write stored field index location (big endian uint64) + - write field index location (big endian uint64) + - write field docValue location (big endian uint64) + - write out chunk factor (big endian uint32) + - write out version (big endian uint32) + - write out file CRC of everything preceding this (big endian uint32) diff --git a/vendor/github.com/blevesearch/zap/v14/build.go b/vendor/github.com/blevesearch/zap/v14/build.go new file mode 100644 index 0000000..7a8dce0 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/build.go @@ -0,0 +1,156 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bufio" + "math" + "os" + + "github.com/couchbase/vellum" +) + +const Version uint32 = 14 + +const Type string = "zap" + +const fieldNotUninverted = math.MaxUint64 + +func (sb *SegmentBase) Persist(path string) error { + return PersistSegmentBase(sb, path) +} + +// PersistSegmentBase persists SegmentBase in the zap file format. +func PersistSegmentBase(sb *SegmentBase, path string) error { + flag := os.O_RDWR | os.O_CREATE + + f, err := os.OpenFile(path, flag, 0600) + if err != nil { + return err + } + + cleanup := func() { + _ = f.Close() + _ = os.Remove(path) + } + + br := bufio.NewWriter(f) + + _, err = br.Write(sb.mem) + if err != nil { + cleanup() + return err + } + + err = persistFooter(sb.numDocs, sb.storedIndexOffset, sb.fieldsIndexOffset, sb.docValueOffset, + sb.chunkMode, sb.memCRC, br) + if err != nil { + cleanup() + return err + } + + err = br.Flush() + if err != nil { + cleanup() + return err + } + + err = f.Sync() + if err != nil { + cleanup() + return err + } + + err = f.Close() + if err != nil { + cleanup() + return err + } + + return nil +} + +func persistStoredFieldValues(fieldID int, + storedFieldValues [][]byte, stf []byte, spf [][]uint64, + curr int, metaEncode varintEncoder, data []byte) ( + int, []byte, error) { + for i := 0; i < len(storedFieldValues); i++ { + // encode field + _, err := metaEncode(uint64(fieldID)) + if err != nil { + return 0, nil, err + } + // encode type + _, err = metaEncode(uint64(stf[i])) + if err != nil { + return 0, nil, err + } + // encode start offset + _, err = metaEncode(uint64(curr)) + if err != nil { + return 0, nil, err + } + // end len + _, err = metaEncode(uint64(len(storedFieldValues[i]))) + if err != nil { + return 0, nil, err + } + // encode number of array pos + _, err = metaEncode(uint64(len(spf[i]))) + if err != nil { + return 0, nil, err + } + // encode all array positions + for _, pos := range spf[i] { + _, err = metaEncode(pos) + if err != nil { + return 0, nil, err + } + } + + data = append(data, storedFieldValues[i]...) + curr += len(storedFieldValues[i]) + } + + return curr, data, nil +} + +func InitSegmentBase(mem []byte, memCRC uint32, chunkMode uint32, + fieldsMap map[string]uint16, fieldsInv []string, numDocs uint64, + storedIndexOffset uint64, fieldsIndexOffset uint64, docValueOffset uint64, + dictLocs []uint64) (*SegmentBase, error) { + sb := &SegmentBase{ + mem: mem, + memCRC: memCRC, + chunkMode: chunkMode, + fieldsMap: fieldsMap, + fieldsInv: fieldsInv, + numDocs: numDocs, + storedIndexOffset: storedIndexOffset, + fieldsIndexOffset: fieldsIndexOffset, + docValueOffset: docValueOffset, + dictLocs: dictLocs, + fieldDvReaders: make(map[uint16]*docValueReader), + fieldFSTs: make(map[uint16]*vellum.FST), + } + sb.updateSize() + + err := sb.loadDvReaders() + if err != nil { + return nil, err + } + + return sb, nil +} diff --git a/vendor/github.com/blevesearch/zap/v14/chunk.go b/vendor/github.com/blevesearch/zap/v14/chunk.go new file mode 100644 index 0000000..4307d0e --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/chunk.go @@ -0,0 +1,67 @@ +// Copyright (c) 2019 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "fmt" +) + +// LegacyChunkMode was the original chunk mode (always chunk size 1024) +// this mode is still used for chunking doc values. +var LegacyChunkMode uint32 = 1024 + +// DefaultChunkMode is the most recent improvement to chunking and should +// be used by default. +var DefaultChunkMode uint32 = 1026 + +func getChunkSize(chunkMode uint32, cardinality uint64, maxDocs uint64) (uint64, error) { + switch { + // any chunkMode <= 1024 will always chunk with chunkSize=chunkMode + case chunkMode <= 1024: + // legacy chunk size + return uint64(chunkMode), nil + + case chunkMode == 1025: + // attempt at simple improvement + // theory - the point of chunking is to put a bound on the maximum number of + // calls to Next() needed to find a random document. ie, you should be able + // to do one jump to the correct chunk, and then walk through at most + // chunk-size items + // previously 1024 was chosen as the chunk size, but this is particularly + // wasteful for low cardinality terms. the observation is that if there + // are less than 1024 items, why not put them all in one chunk, + // this way you'll still achieve the same goal of visiting at most + // chunk-size items. + // no attempt is made to tweak any other case + if cardinality <= 1024 { + return maxDocs, nil + } + return 1024, nil + + case chunkMode == 1026: + // improve upon the ideas tested in chunkMode 1025 + // the observation that the fewest number of dense chunks is the most + // desirable layout, given the built-in assumptions of chunking + // (that we want to put an upper-bound on the number of items you must + // walk over without skipping, currently tuned to 1024) + // + // 1. compute the number of chunks needed (max 1024/chunk) + // 2. convert to chunkSize, dividing into maxDocs + numChunks := (cardinality / 1024) + 1 + chunkSize := maxDocs / numChunks + return chunkSize, nil + } + return 0, fmt.Errorf("unknown chunk mode %d", chunkMode) +} diff --git a/vendor/github.com/blevesearch/zap/v14/contentcoder.go b/vendor/github.com/blevesearch/zap/v14/contentcoder.go new file mode 100644 index 0000000..c145b5a --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/contentcoder.go @@ -0,0 +1,243 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "io" + "reflect" + + "github.com/golang/snappy" +) + +var reflectStaticSizeMetaData int + +func init() { + var md MetaData + reflectStaticSizeMetaData = int(reflect.TypeOf(md).Size()) +} + +var termSeparator byte = 0xff +var termSeparatorSplitSlice = []byte{termSeparator} + +type chunkedContentCoder struct { + final []byte + chunkSize uint64 + currChunk uint64 + chunkLens []uint64 + + w io.Writer + progressiveWrite bool + + chunkMetaBuf bytes.Buffer + chunkBuf bytes.Buffer + + chunkMeta []MetaData + + compressed []byte // temp buf for snappy compression +} + +// MetaData represents the data information inside a +// chunk. +type MetaData struct { + DocNum uint64 // docNum of the data inside the chunk + DocDvOffset uint64 // offset of data inside the chunk for the given docid +} + +// newChunkedContentCoder returns a new chunk content coder which +// packs data into chunks based on the provided chunkSize +func newChunkedContentCoder(chunkSize uint64, maxDocNum uint64, + w io.Writer, progressiveWrite bool) *chunkedContentCoder { + total := maxDocNum/chunkSize + 1 + rv := &chunkedContentCoder{ + chunkSize: chunkSize, + chunkLens: make([]uint64, total), + chunkMeta: make([]MetaData, 0, total), + w: w, + progressiveWrite: progressiveWrite, + } + + return rv +} + +// Reset lets you reuse this chunked content coder. Buffers are reset +// and re used. You cannot change the chunk size. +func (c *chunkedContentCoder) Reset() { + c.currChunk = 0 + c.final = c.final[:0] + c.chunkBuf.Reset() + c.chunkMetaBuf.Reset() + for i := range c.chunkLens { + c.chunkLens[i] = 0 + } + c.chunkMeta = c.chunkMeta[:0] +} + +func (c *chunkedContentCoder) SetChunkSize(chunkSize uint64, maxDocNum uint64) { + total := int(maxDocNum/chunkSize + 1) + c.chunkSize = chunkSize + if cap(c.chunkLens) < total { + c.chunkLens = make([]uint64, total) + } else { + c.chunkLens = c.chunkLens[:total] + } + if cap(c.chunkMeta) < total { + c.chunkMeta = make([]MetaData, 0, total) + } +} + +// Close indicates you are done calling Add() this allows +// the final chunk to be encoded. +func (c *chunkedContentCoder) Close() error { + return c.flushContents() +} + +func (c *chunkedContentCoder) flushContents() error { + // flush the contents, with meta information at first + buf := make([]byte, binary.MaxVarintLen64) + n := binary.PutUvarint(buf, uint64(len(c.chunkMeta))) + _, err := c.chunkMetaBuf.Write(buf[:n]) + if err != nil { + return err + } + + // write out the metaData slice + for _, meta := range c.chunkMeta { + _, err := writeUvarints(&c.chunkMetaBuf, meta.DocNum, meta.DocDvOffset) + if err != nil { + return err + } + } + + // write the metadata to final data + metaData := c.chunkMetaBuf.Bytes() + c.final = append(c.final, c.chunkMetaBuf.Bytes()...) + // write the compressed data to the final data + c.compressed = snappy.Encode(c.compressed[:cap(c.compressed)], c.chunkBuf.Bytes()) + c.final = append(c.final, c.compressed...) + + c.chunkLens[c.currChunk] = uint64(len(c.compressed) + len(metaData)) + + if c.progressiveWrite { + _, err := c.w.Write(c.final) + if err != nil { + return err + } + c.final = c.final[:0] + } + + return nil +} + +// Add encodes the provided byte slice into the correct chunk for the provided +// doc num. You MUST call Add() with increasing docNums. +func (c *chunkedContentCoder) Add(docNum uint64, vals []byte) error { + chunk := docNum / c.chunkSize + if chunk != c.currChunk { + // flush out the previous chunk details + err := c.flushContents() + if err != nil { + return err + } + // clearing the chunk specific meta for next chunk + c.chunkBuf.Reset() + c.chunkMetaBuf.Reset() + c.chunkMeta = c.chunkMeta[:0] + c.currChunk = chunk + } + + // get the starting offset for this doc + dvOffset := c.chunkBuf.Len() + dvSize, err := c.chunkBuf.Write(vals) + if err != nil { + return err + } + + c.chunkMeta = append(c.chunkMeta, MetaData{ + DocNum: docNum, + DocDvOffset: uint64(dvOffset + dvSize), + }) + return nil +} + +// Write commits all the encoded chunked contents to the provided writer. +// +// | ..... data ..... | chunk offsets (varints) +// | position of chunk offsets (uint64) | number of offsets (uint64) | +// +func (c *chunkedContentCoder) Write() (int, error) { + var tw int + + if c.final != nil { + // write out the data section first + nw, err := c.w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + } + + chunkOffsetsStart := uint64(tw) + + if cap(c.final) < binary.MaxVarintLen64 { + c.final = make([]byte, binary.MaxVarintLen64) + } else { + c.final = c.final[0:binary.MaxVarintLen64] + } + chunkOffsets := modifyLengthsToEndOffsets(c.chunkLens) + // write out the chunk offsets + for _, chunkOffset := range chunkOffsets { + n := binary.PutUvarint(c.final, chunkOffset) + nw, err := c.w.Write(c.final[:n]) + tw += nw + if err != nil { + return tw, err + } + } + + chunkOffsetsLen := uint64(tw) - chunkOffsetsStart + + c.final = c.final[0:8] + // write out the length of chunk offsets + binary.BigEndian.PutUint64(c.final, chunkOffsetsLen) + nw, err := c.w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + + // write out the number of chunks + binary.BigEndian.PutUint64(c.final, uint64(len(c.chunkLens))) + nw, err = c.w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + + c.final = c.final[:0] + + return tw, nil +} + +// ReadDocValueBoundary elicits the start, end offsets from a +// metaData header slice +func ReadDocValueBoundary(chunk int, metaHeaders []MetaData) (uint64, uint64) { + var start uint64 + if chunk > 0 { + start = metaHeaders[chunk-1].DocDvOffset + } + return start, metaHeaders[chunk].DocDvOffset +} diff --git a/vendor/github.com/blevesearch/zap/v14/count.go b/vendor/github.com/blevesearch/zap/v14/count.go new file mode 100644 index 0000000..50290f8 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/count.go @@ -0,0 +1,61 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "hash/crc32" + "io" + + "github.com/blevesearch/bleve/index/scorch/segment" +) + +// CountHashWriter is a wrapper around a Writer which counts the number of +// bytes which have been written and computes a crc32 hash +type CountHashWriter struct { + w io.Writer + crc uint32 + n int + s segment.StatsReporter +} + +// NewCountHashWriter returns a CountHashWriter which wraps the provided Writer +func NewCountHashWriter(w io.Writer) *CountHashWriter { + return &CountHashWriter{w: w} +} + +func NewCountHashWriterWithStatsReporter(w io.Writer, s segment.StatsReporter) *CountHashWriter { + return &CountHashWriter{w: w, s: s} +} + +// Write writes the provided bytes to the wrapped writer and counts the bytes +func (c *CountHashWriter) Write(b []byte) (int, error) { + n, err := c.w.Write(b) + c.crc = crc32.Update(c.crc, crc32.IEEETable, b[:n]) + c.n += n + if c.s != nil { + c.s.ReportBytesWritten(uint64(n)) + } + return n, err +} + +// Count returns the number of bytes written +func (c *CountHashWriter) Count() int { + return c.n +} + +// Sum32 returns the CRC-32 hash of the content written to this writer +func (c *CountHashWriter) Sum32() uint32 { + return c.crc +} diff --git a/vendor/github.com/blevesearch/zap/v14/dict.go b/vendor/github.com/blevesearch/zap/v14/dict.go new file mode 100644 index 0000000..ad4a8f8 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/dict.go @@ -0,0 +1,263 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "fmt" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/couchbase/vellum" +) + +// Dictionary is the zap representation of the term dictionary +type Dictionary struct { + sb *SegmentBase + field string + fieldID uint16 + fst *vellum.FST + fstReader *vellum.Reader +} + +// PostingsList returns the postings list for the specified term +func (d *Dictionary) PostingsList(term []byte, except *roaring.Bitmap, + prealloc segment.PostingsList) (segment.PostingsList, error) { + var preallocPL *PostingsList + pl, ok := prealloc.(*PostingsList) + if ok && pl != nil { + preallocPL = pl + } + return d.postingsList(term, except, preallocPL) +} + +func (d *Dictionary) postingsList(term []byte, except *roaring.Bitmap, rv *PostingsList) (*PostingsList, error) { + if d.fstReader == nil { + if rv == nil || rv == emptyPostingsList { + return emptyPostingsList, nil + } + return d.postingsListInit(rv, except), nil + } + + postingsOffset, exists, err := d.fstReader.Get(term) + if err != nil { + return nil, fmt.Errorf("vellum err: %v", err) + } + if !exists { + if rv == nil || rv == emptyPostingsList { + return emptyPostingsList, nil + } + return d.postingsListInit(rv, except), nil + } + + return d.postingsListFromOffset(postingsOffset, except, rv) +} + +func (d *Dictionary) postingsListFromOffset(postingsOffset uint64, except *roaring.Bitmap, rv *PostingsList) (*PostingsList, error) { + rv = d.postingsListInit(rv, except) + + err := rv.read(postingsOffset, d) + if err != nil { + return nil, err + } + + return rv, nil +} + +func (d *Dictionary) postingsListInit(rv *PostingsList, except *roaring.Bitmap) *PostingsList { + if rv == nil || rv == emptyPostingsList { + rv = &PostingsList{} + } else { + postings := rv.postings + if postings != nil { + postings.Clear() + } + + *rv = PostingsList{} // clear the struct + + rv.postings = postings + } + rv.sb = d.sb + rv.except = except + return rv +} + +func (d *Dictionary) Contains(key []byte) (bool, error) { + return d.fst.Contains(key) +} + +// Iterator returns an iterator for this dictionary +func (d *Dictionary) Iterator() segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + if d.fst != nil { + itr, err := d.fst.Iterator(nil, nil) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +// PrefixIterator returns an iterator which only visits terms having the +// the specified prefix +func (d *Dictionary) PrefixIterator(prefix string) segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + kBeg := []byte(prefix) + kEnd := segment.IncrementBytes(kBeg) + + if d.fst != nil { + itr, err := d.fst.Iterator(kBeg, kEnd) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +// RangeIterator returns an iterator which only visits terms between the +// start and end terms. NOTE: bleve.index API specifies the end is inclusive. +func (d *Dictionary) RangeIterator(start, end string) segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + // need to increment the end position to be inclusive + var endBytes []byte + if len(end) > 0 { + endBytes = []byte(end) + if endBytes[len(endBytes)-1] < 0xff { + endBytes[len(endBytes)-1]++ + } else { + endBytes = append(endBytes, 0xff) + } + } + + if d.fst != nil { + itr, err := d.fst.Iterator([]byte(start), endBytes) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +// AutomatonIterator returns an iterator which only visits terms +// having the the vellum automaton and start/end key range +func (d *Dictionary) AutomatonIterator(a vellum.Automaton, + startKeyInclusive, endKeyExclusive []byte) segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + if d.fst != nil { + itr, err := d.fst.Search(a, startKeyInclusive, endKeyExclusive) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +func (d *Dictionary) OnlyIterator(onlyTerms [][]byte, + includeCount bool) segment.DictionaryIterator { + + rv := &DictionaryIterator{ + d: d, + omitCount: !includeCount, + } + + var buf bytes.Buffer + builder, err := vellum.New(&buf, nil) + if err != nil { + rv.err = err + return rv + } + for _, term := range onlyTerms { + err = builder.Insert(term, 0) + if err != nil { + rv.err = err + return rv + } + } + err = builder.Close() + if err != nil { + rv.err = err + return rv + } + + onlyFST, err := vellum.Load(buf.Bytes()) + if err != nil { + rv.err = err + return rv + } + + itr, err := d.fst.Search(onlyFST, nil, nil) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + + return rv +} + +// DictionaryIterator is an iterator for term dictionary +type DictionaryIterator struct { + d *Dictionary + itr vellum.Iterator + err error + tmp PostingsList + entry index.DictEntry + omitCount bool +} + +// Next returns the next entry in the dictionary +func (i *DictionaryIterator) Next() (*index.DictEntry, error) { + if i.err != nil && i.err != vellum.ErrIteratorDone { + return nil, i.err + } else if i.itr == nil || i.err == vellum.ErrIteratorDone { + return nil, nil + } + term, postingsOffset := i.itr.Current() + i.entry.Term = string(term) + if !i.omitCount { + i.err = i.tmp.read(postingsOffset, i.d) + if i.err != nil { + return nil, i.err + } + i.entry.Count = i.tmp.Count() + } + i.err = i.itr.Next() + return &i.entry, nil +} diff --git a/vendor/github.com/blevesearch/zap/v14/docvalues.go b/vendor/github.com/blevesearch/zap/v14/docvalues.go new file mode 100644 index 0000000..793797b --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/docvalues.go @@ -0,0 +1,312 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "fmt" + "math" + "reflect" + "sort" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/blevesearch/bleve/size" + "github.com/golang/snappy" +) + +var reflectStaticSizedocValueReader int + +func init() { + var dvi docValueReader + reflectStaticSizedocValueReader = int(reflect.TypeOf(dvi).Size()) +} + +type docNumTermsVisitor func(docNum uint64, terms []byte) error + +type docVisitState struct { + dvrs map[uint16]*docValueReader + segment *SegmentBase +} + +type docValueReader struct { + field string + curChunkNum uint64 + chunkOffsets []uint64 + dvDataLoc uint64 + curChunkHeader []MetaData + curChunkData []byte // compressed data cache + uncompressed []byte // temp buf for snappy decompression +} + +func (di *docValueReader) size() int { + return reflectStaticSizedocValueReader + size.SizeOfPtr + + len(di.field) + + len(di.chunkOffsets)*size.SizeOfUint64 + + len(di.curChunkHeader)*reflectStaticSizeMetaData + + len(di.curChunkData) +} + +func (di *docValueReader) cloneInto(rv *docValueReader) *docValueReader { + if rv == nil { + rv = &docValueReader{} + } + + rv.field = di.field + rv.curChunkNum = math.MaxUint64 + rv.chunkOffsets = di.chunkOffsets // immutable, so it's sharable + rv.dvDataLoc = di.dvDataLoc + rv.curChunkHeader = rv.curChunkHeader[:0] + rv.curChunkData = nil + rv.uncompressed = rv.uncompressed[:0] + + return rv +} + +func (di *docValueReader) curChunkNumber() uint64 { + return di.curChunkNum +} + +func (s *SegmentBase) loadFieldDocValueReader(field string, + fieldDvLocStart, fieldDvLocEnd uint64) (*docValueReader, error) { + // get the docValue offset for the given fields + if fieldDvLocStart == fieldNotUninverted { + // no docValues found, nothing to do + return nil, nil + } + + // read the number of chunks, and chunk offsets position + var numChunks, chunkOffsetsPosition uint64 + + if fieldDvLocEnd-fieldDvLocStart > 16 { + numChunks = binary.BigEndian.Uint64(s.mem[fieldDvLocEnd-8 : fieldDvLocEnd]) + // read the length of chunk offsets + chunkOffsetsLen := binary.BigEndian.Uint64(s.mem[fieldDvLocEnd-16 : fieldDvLocEnd-8]) + // acquire position of chunk offsets + chunkOffsetsPosition = (fieldDvLocEnd - 16) - chunkOffsetsLen + } else { + return nil, fmt.Errorf("loadFieldDocValueReader: fieldDvLoc too small: %d-%d", fieldDvLocEnd, fieldDvLocStart) + } + + fdvIter := &docValueReader{ + curChunkNum: math.MaxUint64, + field: field, + chunkOffsets: make([]uint64, int(numChunks)), + } + + // read the chunk offsets + var offset uint64 + for i := 0; i < int(numChunks); i++ { + loc, read := binary.Uvarint(s.mem[chunkOffsetsPosition+offset : chunkOffsetsPosition+offset+binary.MaxVarintLen64]) + if read <= 0 { + return nil, fmt.Errorf("corrupted chunk offset during segment load") + } + fdvIter.chunkOffsets[i] = loc + offset += uint64(read) + } + + // set the data offset + fdvIter.dvDataLoc = fieldDvLocStart + + return fdvIter, nil +} + +func (di *docValueReader) loadDvChunk(chunkNumber uint64, s *SegmentBase) error { + // advance to the chunk where the docValues + // reside for the given docNum + destChunkDataLoc, curChunkEnd := di.dvDataLoc, di.dvDataLoc + start, end := readChunkBoundary(int(chunkNumber), di.chunkOffsets) + if start >= end { + di.curChunkHeader = di.curChunkHeader[:0] + di.curChunkData = nil + di.curChunkNum = chunkNumber + di.uncompressed = di.uncompressed[:0] + return nil + } + + destChunkDataLoc += start + curChunkEnd += end + + // read the number of docs reside in the chunk + numDocs, read := binary.Uvarint(s.mem[destChunkDataLoc : destChunkDataLoc+binary.MaxVarintLen64]) + if read <= 0 { + return fmt.Errorf("failed to read the chunk") + } + chunkMetaLoc := destChunkDataLoc + uint64(read) + + offset := uint64(0) + if cap(di.curChunkHeader) < int(numDocs) { + di.curChunkHeader = make([]MetaData, int(numDocs)) + } else { + di.curChunkHeader = di.curChunkHeader[:int(numDocs)] + } + for i := 0; i < int(numDocs); i++ { + di.curChunkHeader[i].DocNum, read = binary.Uvarint(s.mem[chunkMetaLoc+offset : chunkMetaLoc+offset+binary.MaxVarintLen64]) + offset += uint64(read) + di.curChunkHeader[i].DocDvOffset, read = binary.Uvarint(s.mem[chunkMetaLoc+offset : chunkMetaLoc+offset+binary.MaxVarintLen64]) + offset += uint64(read) + } + + compressedDataLoc := chunkMetaLoc + offset + dataLength := curChunkEnd - compressedDataLoc + di.curChunkData = s.mem[compressedDataLoc : compressedDataLoc+dataLength] + di.curChunkNum = chunkNumber + di.uncompressed = di.uncompressed[:0] + return nil +} + +func (di *docValueReader) iterateAllDocValues(s *SegmentBase, visitor docNumTermsVisitor) error { + for i := 0; i < len(di.chunkOffsets); i++ { + err := di.loadDvChunk(uint64(i), s) + if err != nil { + return err + } + if di.curChunkData == nil || len(di.curChunkHeader) == 0 { + continue + } + + // uncompress the already loaded data + uncompressed, err := snappy.Decode(di.uncompressed[:cap(di.uncompressed)], di.curChunkData) + if err != nil { + return err + } + di.uncompressed = uncompressed + + start := uint64(0) + for _, entry := range di.curChunkHeader { + err = visitor(entry.DocNum, uncompressed[start:entry.DocDvOffset]) + if err != nil { + return err + } + + start = entry.DocDvOffset + } + } + + return nil +} + +func (di *docValueReader) visitDocValues(docNum uint64, + visitor index.DocumentFieldTermVisitor) error { + // binary search the term locations for the docNum + start, end := di.getDocValueLocs(docNum) + if start == math.MaxUint64 || end == math.MaxUint64 || start == end { + return nil + } + + var uncompressed []byte + var err error + // use the uncompressed copy if available + if len(di.uncompressed) > 0 { + uncompressed = di.uncompressed + } else { + // uncompress the already loaded data + uncompressed, err = snappy.Decode(di.uncompressed[:cap(di.uncompressed)], di.curChunkData) + if err != nil { + return err + } + di.uncompressed = uncompressed + } + + // pick the terms for the given docNum + uncompressed = uncompressed[start:end] + for { + i := bytes.Index(uncompressed, termSeparatorSplitSlice) + if i < 0 { + break + } + + visitor(di.field, uncompressed[0:i]) + uncompressed = uncompressed[i+1:] + } + + return nil +} + +func (di *docValueReader) getDocValueLocs(docNum uint64) (uint64, uint64) { + i := sort.Search(len(di.curChunkHeader), func(i int) bool { + return di.curChunkHeader[i].DocNum >= docNum + }) + if i < len(di.curChunkHeader) && di.curChunkHeader[i].DocNum == docNum { + return ReadDocValueBoundary(i, di.curChunkHeader) + } + return math.MaxUint64, math.MaxUint64 +} + +// VisitDocumentFieldTerms is an implementation of the +// DocumentFieldTermVisitable interface +func (s *SegmentBase) VisitDocumentFieldTerms(localDocNum uint64, fields []string, + visitor index.DocumentFieldTermVisitor, dvsIn segment.DocVisitState) ( + segment.DocVisitState, error) { + dvs, ok := dvsIn.(*docVisitState) + if !ok || dvs == nil { + dvs = &docVisitState{} + } else { + if dvs.segment != s { + dvs.segment = s + dvs.dvrs = nil + } + } + + var fieldIDPlus1 uint16 + if dvs.dvrs == nil { + dvs.dvrs = make(map[uint16]*docValueReader, len(fields)) + for _, field := range fields { + if fieldIDPlus1, ok = s.fieldsMap[field]; !ok { + continue + } + fieldID := fieldIDPlus1 - 1 + if dvIter, exists := s.fieldDvReaders[fieldID]; exists && + dvIter != nil { + dvs.dvrs[fieldID] = dvIter.cloneInto(dvs.dvrs[fieldID]) + } + } + } + + // find the chunkNumber where the docValues are stored + // NOTE: doc values continue to use legacy chunk mode + chunkFactor, err := getChunkSize(LegacyChunkMode, 0, 0) + if err != nil { + return nil, err + } + docInChunk := localDocNum / chunkFactor + var dvr *docValueReader + for _, field := range fields { + if fieldIDPlus1, ok = s.fieldsMap[field]; !ok { + continue + } + fieldID := fieldIDPlus1 - 1 + if dvr, ok = dvs.dvrs[fieldID]; ok && dvr != nil { + // check if the chunk is already loaded + if docInChunk != dvr.curChunkNumber() { + err := dvr.loadDvChunk(docInChunk, s) + if err != nil { + return dvs, err + } + } + + _ = dvr.visitDocValues(localDocNum, visitor) + } + } + return dvs, nil +} + +// VisitableDocValueFields returns the list of fields with +// persisted doc value terms ready to be visitable using the +// VisitDocumentFieldTerms method. +func (s *SegmentBase) VisitableDocValueFields() ([]string, error) { + return s.fieldDvNames, nil +} diff --git a/vendor/github.com/blevesearch/zap/v14/enumerator.go b/vendor/github.com/blevesearch/zap/v14/enumerator.go new file mode 100644 index 0000000..bc5b7e6 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/enumerator.go @@ -0,0 +1,138 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + + "github.com/couchbase/vellum" +) + +// enumerator provides an ordered traversal of multiple vellum +// iterators. Like JOIN of iterators, the enumerator produces a +// sequence of (key, iteratorIndex, value) tuples, sorted by key ASC, +// then iteratorIndex ASC, where the same key might be seen or +// repeated across multiple child iterators. +type enumerator struct { + itrs []vellum.Iterator + currKs [][]byte + currVs []uint64 + + lowK []byte + lowIdxs []int + lowCurr int +} + +// newEnumerator returns a new enumerator over the vellum Iterators +func newEnumerator(itrs []vellum.Iterator) (*enumerator, error) { + rv := &enumerator{ + itrs: itrs, + currKs: make([][]byte, len(itrs)), + currVs: make([]uint64, len(itrs)), + lowIdxs: make([]int, 0, len(itrs)), + } + for i, itr := range rv.itrs { + rv.currKs[i], rv.currVs[i] = itr.Current() + } + rv.updateMatches(false) + if rv.lowK == nil && len(rv.lowIdxs) == 0 { + return rv, vellum.ErrIteratorDone + } + return rv, nil +} + +// updateMatches maintains the low key matches based on the currKs +func (m *enumerator) updateMatches(skipEmptyKey bool) { + m.lowK = nil + m.lowIdxs = m.lowIdxs[:0] + m.lowCurr = 0 + + for i, key := range m.currKs { + if (key == nil && m.currVs[i] == 0) || // in case of empty iterator + (len(key) == 0 && skipEmptyKey) { // skip empty keys + continue + } + + cmp := bytes.Compare(key, m.lowK) + if cmp < 0 || len(m.lowIdxs) == 0 { + // reached a new low + m.lowK = key + m.lowIdxs = m.lowIdxs[:0] + m.lowIdxs = append(m.lowIdxs, i) + } else if cmp == 0 { + m.lowIdxs = append(m.lowIdxs, i) + } + } +} + +// Current returns the enumerator's current key, iterator-index, and +// value. If the enumerator is not pointing at a valid value (because +// Next returned an error previously), Current will return nil,0,0. +func (m *enumerator) Current() ([]byte, int, uint64) { + var i int + var v uint64 + if m.lowCurr < len(m.lowIdxs) { + i = m.lowIdxs[m.lowCurr] + v = m.currVs[i] + } + return m.lowK, i, v +} + +// GetLowIdxsAndValues will return all of the iterator indices +// which point to the current key, and their corresponding +// values. This can be used by advanced caller which may need +// to peek into these other sets of data before processing. +func (m *enumerator) GetLowIdxsAndValues() ([]int, []uint64) { + values := make([]uint64, 0, len(m.lowIdxs)) + for _, idx := range m.lowIdxs { + values = append(values, m.currVs[idx]) + } + return m.lowIdxs, values +} + +// Next advances the enumerator to the next key/iterator/value result, +// else vellum.ErrIteratorDone is returned. +func (m *enumerator) Next() error { + m.lowCurr += 1 + if m.lowCurr >= len(m.lowIdxs) { + // move all the current low iterators forwards + for _, vi := range m.lowIdxs { + err := m.itrs[vi].Next() + if err != nil && err != vellum.ErrIteratorDone { + return err + } + m.currKs[vi], m.currVs[vi] = m.itrs[vi].Current() + } + // can skip any empty keys encountered at this point + m.updateMatches(true) + } + if m.lowK == nil && len(m.lowIdxs) == 0 { + return vellum.ErrIteratorDone + } + return nil +} + +// Close all the underlying Iterators. The first error, if any, will +// be returned. +func (m *enumerator) Close() error { + var rv error + for _, itr := range m.itrs { + err := itr.Close() + if rv == nil { + rv = err + } + } + return rv +} diff --git a/vendor/github.com/blevesearch/zap/v14/intDecoder.go b/vendor/github.com/blevesearch/zap/v14/intDecoder.go new file mode 100644 index 0000000..3baa0c2 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/intDecoder.go @@ -0,0 +1,118 @@ +// Copyright (c) 2019 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "encoding/binary" + "fmt" + + "github.com/blevesearch/bleve/index/scorch/segment" +) + +type chunkedIntDecoder struct { + startOffset uint64 + dataStartOffset uint64 + chunkOffsets []uint64 + curChunkBytes []byte + data []byte + r *segment.MemUvarintReader +} + +// newChunkedIntDecoder expects an optional or reset chunkedIntDecoder for better reuse. +func newChunkedIntDecoder(buf []byte, offset uint64, rv *chunkedIntDecoder) *chunkedIntDecoder { + if rv == nil { + rv = &chunkedIntDecoder{startOffset: offset, data: buf} + } else { + rv.startOffset = offset + rv.data = buf + } + + var n, numChunks uint64 + var read int + if offset == termNotEncoded { + numChunks = 0 + } else { + numChunks, read = binary.Uvarint(buf[offset+n : offset+n+binary.MaxVarintLen64]) + } + + n += uint64(read) + if cap(rv.chunkOffsets) >= int(numChunks) { + rv.chunkOffsets = rv.chunkOffsets[:int(numChunks)] + } else { + rv.chunkOffsets = make([]uint64, int(numChunks)) + } + for i := 0; i < int(numChunks); i++ { + rv.chunkOffsets[i], read = binary.Uvarint(buf[offset+n : offset+n+binary.MaxVarintLen64]) + n += uint64(read) + } + rv.dataStartOffset = offset + n + return rv +} + +func (d *chunkedIntDecoder) loadChunk(chunk int) error { + if d.startOffset == termNotEncoded { + d.r = segment.NewMemUvarintReader([]byte(nil)) + return nil + } + + if chunk >= len(d.chunkOffsets) { + return fmt.Errorf("tried to load freq chunk that doesn't exist %d/(%d)", + chunk, len(d.chunkOffsets)) + } + + end, start := d.dataStartOffset, d.dataStartOffset + s, e := readChunkBoundary(chunk, d.chunkOffsets) + start += s + end += e + d.curChunkBytes = d.data[start:end] + if d.r == nil { + d.r = segment.NewMemUvarintReader(d.curChunkBytes) + } else { + d.r.Reset(d.curChunkBytes) + } + + return nil +} + +func (d *chunkedIntDecoder) reset() { + d.startOffset = 0 + d.dataStartOffset = 0 + d.chunkOffsets = d.chunkOffsets[:0] + d.curChunkBytes = d.curChunkBytes[:0] + d.data = d.data[:0] + if d.r != nil { + d.r.Reset([]byte(nil)) + } +} + +func (d *chunkedIntDecoder) isNil() bool { + return d.curChunkBytes == nil || len(d.curChunkBytes) == 0 +} + +func (d *chunkedIntDecoder) readUvarint() (uint64, error) { + return d.r.ReadUvarint() +} + +func (d *chunkedIntDecoder) SkipUvarint() { + d.r.SkipUvarint() +} + +func (d *chunkedIntDecoder) SkipBytes(count int) { + d.r.SkipBytes(count) +} + +func (d *chunkedIntDecoder) Len() int { + return d.r.Len() +} diff --git a/vendor/github.com/blevesearch/zap/v14/intcoder.go b/vendor/github.com/blevesearch/zap/v14/intcoder.go new file mode 100644 index 0000000..c3c488f --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/intcoder.go @@ -0,0 +1,206 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "io" +) + +// We can safely use 0 to represent termNotEncoded since 0 +// could never be a valid address for term location information. +// (stored field index is always non-empty and earlier in the +// file) +const termNotEncoded = 0 + +type chunkedIntCoder struct { + final []byte + chunkSize uint64 + chunkBuf bytes.Buffer + chunkLens []uint64 + currChunk uint64 + + buf []byte +} + +// newChunkedIntCoder returns a new chunk int coder which packs data into +// chunks based on the provided chunkSize and supports up to the specified +// maxDocNum +func newChunkedIntCoder(chunkSize uint64, maxDocNum uint64) *chunkedIntCoder { + total := maxDocNum/chunkSize + 1 + rv := &chunkedIntCoder{ + chunkSize: chunkSize, + chunkLens: make([]uint64, total), + final: make([]byte, 0, 64), + } + + return rv +} + +// Reset lets you reuse this chunked int coder. buffers are reset and reused +// from previous use. you cannot change the chunk size or max doc num. +func (c *chunkedIntCoder) Reset() { + c.final = c.final[:0] + c.chunkBuf.Reset() + c.currChunk = 0 + for i := range c.chunkLens { + c.chunkLens[i] = 0 + } +} + +// SetChunkSize changes the chunk size. It is only valid to do so +// with a new chunkedIntCoder, or immediately after calling Reset() +func (c *chunkedIntCoder) SetChunkSize(chunkSize uint64, maxDocNum uint64) { + total := int(maxDocNum/chunkSize + 1) + c.chunkSize = chunkSize + if cap(c.chunkLens) < total { + c.chunkLens = make([]uint64, total) + } else { + c.chunkLens = c.chunkLens[:total] + } +} + +// Add encodes the provided integers into the correct chunk for the provided +// doc num. You MUST call Add() with increasing docNums. +func (c *chunkedIntCoder) Add(docNum uint64, vals ...uint64) error { + chunk := docNum / c.chunkSize + if chunk != c.currChunk { + // starting a new chunk + c.Close() + c.chunkBuf.Reset() + c.currChunk = chunk + } + + if len(c.buf) < binary.MaxVarintLen64 { + c.buf = make([]byte, binary.MaxVarintLen64) + } + + for _, val := range vals { + wb := binary.PutUvarint(c.buf, val) + _, err := c.chunkBuf.Write(c.buf[:wb]) + if err != nil { + return err + } + } + + return nil +} + +func (c *chunkedIntCoder) AddBytes(docNum uint64, buf []byte) error { + chunk := docNum / c.chunkSize + if chunk != c.currChunk { + // starting a new chunk + c.Close() + c.chunkBuf.Reset() + c.currChunk = chunk + } + + _, err := c.chunkBuf.Write(buf) + return err +} + +// Close indicates you are done calling Add() this allows the final chunk +// to be encoded. +func (c *chunkedIntCoder) Close() { + encodingBytes := c.chunkBuf.Bytes() + c.chunkLens[c.currChunk] = uint64(len(encodingBytes)) + c.final = append(c.final, encodingBytes...) + c.currChunk = uint64(cap(c.chunkLens)) // sentinel to detect double close +} + +// Write commits all the encoded chunked integers to the provided writer. +func (c *chunkedIntCoder) Write(w io.Writer) (int, error) { + bufNeeded := binary.MaxVarintLen64 * (1 + len(c.chunkLens)) + if len(c.buf) < bufNeeded { + c.buf = make([]byte, bufNeeded) + } + buf := c.buf + + // convert the chunk lengths into chunk offsets + chunkOffsets := modifyLengthsToEndOffsets(c.chunkLens) + + // write out the number of chunks & each chunk offsets + n := binary.PutUvarint(buf, uint64(len(chunkOffsets))) + for _, chunkOffset := range chunkOffsets { + n += binary.PutUvarint(buf[n:], chunkOffset) + } + + tw, err := w.Write(buf[:n]) + if err != nil { + return tw, err + } + + // write out the data + nw, err := w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + return tw, nil +} + +// writeAt commits all the encoded chunked integers to the provided writer +// and returns the starting offset, total bytes written and an error +func (c *chunkedIntCoder) writeAt(w io.Writer) (uint64, int, error) { + startOffset := uint64(termNotEncoded) + if len(c.final) <= 0 { + return startOffset, 0, nil + } + + if chw := w.(*CountHashWriter); chw != nil { + startOffset = uint64(chw.Count()) + } + + tw, err := c.Write(w) + return startOffset, tw, err +} + +func (c *chunkedIntCoder) FinalSize() int { + return len(c.final) +} + +// modifyLengthsToEndOffsets converts the chunk length array +// to a chunk offset array. The readChunkBoundary +// will figure out the start and end of every chunk from +// these offsets. Starting offset of i'th index is stored +// in i-1'th position except for 0'th index and ending offset +// is stored at i'th index position. +// For 0'th element, starting position is always zero. +// eg: +// Lens -> 5 5 5 5 => 5 10 15 20 +// Lens -> 0 5 0 5 => 0 5 5 10 +// Lens -> 0 0 0 5 => 0 0 0 5 +// Lens -> 5 0 0 0 => 5 5 5 5 +// Lens -> 0 5 0 0 => 0 5 5 5 +// Lens -> 0 0 5 0 => 0 0 5 5 +func modifyLengthsToEndOffsets(lengths []uint64) []uint64 { + var runningOffset uint64 + var index, i int + for i = 1; i <= len(lengths); i++ { + runningOffset += lengths[i-1] + lengths[index] = runningOffset + index++ + } + return lengths +} + +func readChunkBoundary(chunk int, offsets []uint64) (uint64, uint64) { + var start uint64 + if chunk > 0 { + start = offsets[chunk-1] + } + return start, offsets[chunk] +} diff --git a/vendor/github.com/blevesearch/zap/v14/merge.go b/vendor/github.com/blevesearch/zap/v14/merge.go new file mode 100644 index 0000000..805100f --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/merge.go @@ -0,0 +1,847 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bufio" + "bytes" + "encoding/binary" + "fmt" + "math" + "os" + "sort" + + "github.com/RoaringBitmap/roaring" + seg "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/couchbase/vellum" + "github.com/golang/snappy" +) + +var DefaultFileMergerBufferSize = 1024 * 1024 + +const docDropped = math.MaxUint64 // sentinel docNum to represent a deleted doc + +// Merge takes a slice of segments and bit masks describing which +// documents may be dropped, and creates a new segment containing the +// remaining data. This new segment is built at the specified path. +func (*ZapPlugin) Merge(segments []seg.Segment, drops []*roaring.Bitmap, path string, + closeCh chan struct{}, s seg.StatsReporter) ( + [][]uint64, uint64, error) { + + segmentBases := make([]*SegmentBase, len(segments)) + for segmenti, segment := range segments { + switch segmentx := segment.(type) { + case *Segment: + segmentBases[segmenti] = &segmentx.SegmentBase + case *SegmentBase: + segmentBases[segmenti] = segmentx + default: + panic(fmt.Sprintf("oops, unexpected segment type: %T", segment)) + } + } + return mergeSegmentBases(segmentBases, drops, path, DefaultChunkMode, closeCh, s) +} + +func mergeSegmentBases(segmentBases []*SegmentBase, drops []*roaring.Bitmap, path string, + chunkMode uint32, closeCh chan struct{}, s seg.StatsReporter) ( + [][]uint64, uint64, error) { + flag := os.O_RDWR | os.O_CREATE + + f, err := os.OpenFile(path, flag, 0600) + if err != nil { + return nil, 0, err + } + + cleanup := func() { + _ = f.Close() + _ = os.Remove(path) + } + + // buffer the output + br := bufio.NewWriterSize(f, DefaultFileMergerBufferSize) + + // wrap it for counting (tracking offsets) + cr := NewCountHashWriterWithStatsReporter(br, s) + + newDocNums, numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset, _, _, _, err := + MergeToWriter(segmentBases, drops, chunkMode, cr, closeCh) + if err != nil { + cleanup() + return nil, 0, err + } + + err = persistFooter(numDocs, storedIndexOffset, fieldsIndexOffset, + docValueOffset, chunkMode, cr.Sum32(), cr) + if err != nil { + cleanup() + return nil, 0, err + } + + err = br.Flush() + if err != nil { + cleanup() + return nil, 0, err + } + + err = f.Sync() + if err != nil { + cleanup() + return nil, 0, err + } + + err = f.Close() + if err != nil { + cleanup() + return nil, 0, err + } + + return newDocNums, uint64(cr.Count()), nil +} + +func MergeToWriter(segments []*SegmentBase, drops []*roaring.Bitmap, + chunkMode uint32, cr *CountHashWriter, closeCh chan struct{}) ( + newDocNums [][]uint64, + numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset uint64, + dictLocs []uint64, fieldsInv []string, fieldsMap map[string]uint16, + err error) { + docValueOffset = uint64(fieldNotUninverted) + + var fieldsSame bool + fieldsSame, fieldsInv = mergeFields(segments) + fieldsMap = mapFields(fieldsInv) + + numDocs = computeNewDocCount(segments, drops) + + if isClosed(closeCh) { + return nil, 0, 0, 0, 0, nil, nil, nil, seg.ErrClosed + } + + if numDocs > 0 { + storedIndexOffset, newDocNums, err = mergeStoredAndRemap(segments, drops, + fieldsMap, fieldsInv, fieldsSame, numDocs, cr, closeCh) + if err != nil { + return nil, 0, 0, 0, 0, nil, nil, nil, err + } + + dictLocs, docValueOffset, err = persistMergedRest(segments, drops, + fieldsInv, fieldsMap, fieldsSame, + newDocNums, numDocs, chunkMode, cr, closeCh) + if err != nil { + return nil, 0, 0, 0, 0, nil, nil, nil, err + } + } else { + dictLocs = make([]uint64, len(fieldsInv)) + } + + fieldsIndexOffset, err = persistFields(fieldsInv, cr, dictLocs) + if err != nil { + return nil, 0, 0, 0, 0, nil, nil, nil, err + } + + return newDocNums, numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset, dictLocs, fieldsInv, fieldsMap, nil +} + +// mapFields takes the fieldsInv list and returns a map of fieldName +// to fieldID+1 +func mapFields(fields []string) map[string]uint16 { + rv := make(map[string]uint16, len(fields)) + for i, fieldName := range fields { + rv[fieldName] = uint16(i) + 1 + } + return rv +} + +// computeNewDocCount determines how many documents will be in the newly +// merged segment when obsoleted docs are dropped +func computeNewDocCount(segments []*SegmentBase, drops []*roaring.Bitmap) uint64 { + var newDocCount uint64 + for segI, segment := range segments { + newDocCount += segment.numDocs + if drops[segI] != nil { + newDocCount -= drops[segI].GetCardinality() + } + } + return newDocCount +} + +func persistMergedRest(segments []*SegmentBase, dropsIn []*roaring.Bitmap, + fieldsInv []string, fieldsMap map[string]uint16, fieldsSame bool, + newDocNumsIn [][]uint64, newSegDocCount uint64, chunkMode uint32, + w *CountHashWriter, closeCh chan struct{}) ([]uint64, uint64, error) { + + var bufMaxVarintLen64 []byte = make([]byte, binary.MaxVarintLen64) + var bufLoc []uint64 + + var postings *PostingsList + var postItr *PostingsIterator + + rv := make([]uint64, len(fieldsInv)) + fieldDvLocsStart := make([]uint64, len(fieldsInv)) + fieldDvLocsEnd := make([]uint64, len(fieldsInv)) + + // these int coders are initialized with chunk size 1024 + // however this will be reset to the correct chunk size + // while processing each individual field-term section + tfEncoder := newChunkedIntCoder(1024, newSegDocCount-1) + locEncoder := newChunkedIntCoder(1024, newSegDocCount-1) + + var vellumBuf bytes.Buffer + newVellum, err := vellum.New(&vellumBuf, nil) + if err != nil { + return nil, 0, err + } + + newRoaring := roaring.NewBitmap() + + // for each field + for fieldID, fieldName := range fieldsInv { + + // collect FST iterators from all active segments for this field + var newDocNums [][]uint64 + var drops []*roaring.Bitmap + var dicts []*Dictionary + var itrs []vellum.Iterator + + var segmentsInFocus []*SegmentBase + + for segmentI, segment := range segments { + + // check for the closure in meantime + if isClosed(closeCh) { + return nil, 0, seg.ErrClosed + } + + dict, err2 := segment.dictionary(fieldName) + if err2 != nil { + return nil, 0, err2 + } + if dict != nil && dict.fst != nil { + itr, err2 := dict.fst.Iterator(nil, nil) + if err2 != nil && err2 != vellum.ErrIteratorDone { + return nil, 0, err2 + } + if itr != nil { + newDocNums = append(newDocNums, newDocNumsIn[segmentI]) + if dropsIn[segmentI] != nil && !dropsIn[segmentI].IsEmpty() { + drops = append(drops, dropsIn[segmentI]) + } else { + drops = append(drops, nil) + } + dicts = append(dicts, dict) + itrs = append(itrs, itr) + segmentsInFocus = append(segmentsInFocus, segment) + } + } + } + + var prevTerm []byte + + newRoaring.Clear() + + var lastDocNum, lastFreq, lastNorm uint64 + + // determines whether to use "1-hit" encoding optimization + // when a term appears in only 1 doc, with no loc info, + // has freq of 1, and the docNum fits into 31-bits + use1HitEncoding := func(termCardinality uint64) (bool, uint64, uint64) { + if termCardinality == uint64(1) && locEncoder.FinalSize() <= 0 { + docNum := uint64(newRoaring.Minimum()) + if under32Bits(docNum) && docNum == lastDocNum && lastFreq == 1 { + return true, docNum, lastNorm + } + } + return false, 0, 0 + } + + finishTerm := func(term []byte) error { + tfEncoder.Close() + locEncoder.Close() + + postingsOffset, err := writePostings(newRoaring, + tfEncoder, locEncoder, use1HitEncoding, w, bufMaxVarintLen64) + if err != nil { + return err + } + + if postingsOffset > 0 { + err = newVellum.Insert(term, postingsOffset) + if err != nil { + return err + } + } + + newRoaring.Clear() + + tfEncoder.Reset() + locEncoder.Reset() + + lastDocNum = 0 + lastFreq = 0 + lastNorm = 0 + + return nil + } + + enumerator, err := newEnumerator(itrs) + + for err == nil { + term, itrI, postingsOffset := enumerator.Current() + + if !bytes.Equal(prevTerm, term) { + // check for the closure in meantime + if isClosed(closeCh) { + return nil, 0, seg.ErrClosed + } + + // if the term changed, write out the info collected + // for the previous term + err = finishTerm(prevTerm) + if err != nil { + return nil, 0, err + } + } + if !bytes.Equal(prevTerm, term) || prevTerm == nil { + // compute cardinality of field-term in new seg + var newCard uint64 + lowItrIdxs, lowItrVals := enumerator.GetLowIdxsAndValues() + for i, idx := range lowItrIdxs { + pl, err := dicts[idx].postingsListFromOffset(lowItrVals[i], drops[idx], nil) + if err != nil { + return nil, 0, err + } + newCard += pl.Count() + } + // compute correct chunk size with this + chunkSize, err := getChunkSize(chunkMode, newCard, newSegDocCount) + if err != nil { + return nil, 0, err + } + // update encoders chunk + tfEncoder.SetChunkSize(chunkSize, newSegDocCount-1) + locEncoder.SetChunkSize(chunkSize, newSegDocCount-1) + } + + postings, err = dicts[itrI].postingsListFromOffset( + postingsOffset, drops[itrI], postings) + if err != nil { + return nil, 0, err + } + + postItr = postings.iterator(true, true, true, postItr) + + // can no longer optimize by copying, since chunk factor could have changed + lastDocNum, lastFreq, lastNorm, bufLoc, err = mergeTermFreqNormLocs( + fieldsMap, term, postItr, newDocNums[itrI], newRoaring, + tfEncoder, locEncoder, bufLoc) + + if err != nil { + return nil, 0, err + } + + prevTerm = prevTerm[:0] // copy to prevTerm in case Next() reuses term mem + prevTerm = append(prevTerm, term...) + + err = enumerator.Next() + } + if err != vellum.ErrIteratorDone { + return nil, 0, err + } + + err = finishTerm(prevTerm) + if err != nil { + return nil, 0, err + } + + dictOffset := uint64(w.Count()) + + err = newVellum.Close() + if err != nil { + return nil, 0, err + } + vellumData := vellumBuf.Bytes() + + // write out the length of the vellum data + n := binary.PutUvarint(bufMaxVarintLen64, uint64(len(vellumData))) + _, err = w.Write(bufMaxVarintLen64[:n]) + if err != nil { + return nil, 0, err + } + + // write this vellum to disk + _, err = w.Write(vellumData) + if err != nil { + return nil, 0, err + } + + rv[fieldID] = dictOffset + + // get the field doc value offset (start) + fieldDvLocsStart[fieldID] = uint64(w.Count()) + + // update the field doc values + // NOTE: doc values continue to use legacy chunk mode + chunkSize, err := getChunkSize(LegacyChunkMode, 0, 0) + if err != nil { + return nil, 0, err + } + fdvEncoder := newChunkedContentCoder(chunkSize, newSegDocCount-1, w, true) + + fdvReadersAvailable := false + var dvIterClone *docValueReader + for segmentI, segment := range segmentsInFocus { + // check for the closure in meantime + if isClosed(closeCh) { + return nil, 0, seg.ErrClosed + } + + fieldIDPlus1 := uint16(segment.fieldsMap[fieldName]) + if dvIter, exists := segment.fieldDvReaders[fieldIDPlus1-1]; exists && + dvIter != nil { + fdvReadersAvailable = true + dvIterClone = dvIter.cloneInto(dvIterClone) + err = dvIterClone.iterateAllDocValues(segment, func(docNum uint64, terms []byte) error { + if newDocNums[segmentI][docNum] == docDropped { + return nil + } + err := fdvEncoder.Add(newDocNums[segmentI][docNum], terms) + if err != nil { + return err + } + return nil + }) + if err != nil { + return nil, 0, err + } + } + } + + if fdvReadersAvailable { + err = fdvEncoder.Close() + if err != nil { + return nil, 0, err + } + + // persist the doc value details for this field + _, err = fdvEncoder.Write() + if err != nil { + return nil, 0, err + } + + // get the field doc value offset (end) + fieldDvLocsEnd[fieldID] = uint64(w.Count()) + } else { + fieldDvLocsStart[fieldID] = fieldNotUninverted + fieldDvLocsEnd[fieldID] = fieldNotUninverted + } + + // reset vellum buffer and vellum builder + vellumBuf.Reset() + err = newVellum.Reset(&vellumBuf) + if err != nil { + return nil, 0, err + } + } + + fieldDvLocsOffset := uint64(w.Count()) + + buf := bufMaxVarintLen64 + for i := 0; i < len(fieldDvLocsStart); i++ { + n := binary.PutUvarint(buf, fieldDvLocsStart[i]) + _, err := w.Write(buf[:n]) + if err != nil { + return nil, 0, err + } + n = binary.PutUvarint(buf, fieldDvLocsEnd[i]) + _, err = w.Write(buf[:n]) + if err != nil { + return nil, 0, err + } + } + + return rv, fieldDvLocsOffset, nil +} + +func mergeTermFreqNormLocs(fieldsMap map[string]uint16, term []byte, postItr *PostingsIterator, + newDocNums []uint64, newRoaring *roaring.Bitmap, + tfEncoder *chunkedIntCoder, locEncoder *chunkedIntCoder, bufLoc []uint64) ( + lastDocNum uint64, lastFreq uint64, lastNorm uint64, bufLocOut []uint64, err error) { + next, err := postItr.Next() + for next != nil && err == nil { + hitNewDocNum := newDocNums[next.Number()] + if hitNewDocNum == docDropped { + return 0, 0, 0, nil, fmt.Errorf("see hit with dropped docNum") + } + + newRoaring.Add(uint32(hitNewDocNum)) + + nextFreq := next.Frequency() + nextNorm := uint64(math.Float32bits(float32(next.Norm()))) + + locs := next.Locations() + + err = tfEncoder.Add(hitNewDocNum, + encodeFreqHasLocs(nextFreq, len(locs) > 0), nextNorm) + if err != nil { + return 0, 0, 0, nil, err + } + + if len(locs) > 0 { + numBytesLocs := 0 + for _, loc := range locs { + ap := loc.ArrayPositions() + numBytesLocs += totalUvarintBytes(uint64(fieldsMap[loc.Field()]-1), + loc.Pos(), loc.Start(), loc.End(), uint64(len(ap)), ap) + } + + err = locEncoder.Add(hitNewDocNum, uint64(numBytesLocs)) + if err != nil { + return 0, 0, 0, nil, err + } + + for _, loc := range locs { + ap := loc.ArrayPositions() + if cap(bufLoc) < 5+len(ap) { + bufLoc = make([]uint64, 0, 5+len(ap)) + } + args := bufLoc[0:5] + args[0] = uint64(fieldsMap[loc.Field()] - 1) + args[1] = loc.Pos() + args[2] = loc.Start() + args[3] = loc.End() + args[4] = uint64(len(ap)) + args = append(args, ap...) + err = locEncoder.Add(hitNewDocNum, args...) + if err != nil { + return 0, 0, 0, nil, err + } + } + } + + lastDocNum = hitNewDocNum + lastFreq = nextFreq + lastNorm = nextNorm + + next, err = postItr.Next() + } + + return lastDocNum, lastFreq, lastNorm, bufLoc, err +} + +func writePostings(postings *roaring.Bitmap, tfEncoder, locEncoder *chunkedIntCoder, + use1HitEncoding func(uint64) (bool, uint64, uint64), + w *CountHashWriter, bufMaxVarintLen64 []byte) ( + offset uint64, err error) { + termCardinality := postings.GetCardinality() + if termCardinality <= 0 { + return 0, nil + } + + if use1HitEncoding != nil { + encodeAs1Hit, docNum1Hit, normBits1Hit := use1HitEncoding(termCardinality) + if encodeAs1Hit { + return FSTValEncode1Hit(docNum1Hit, normBits1Hit), nil + } + } + + var tfOffset uint64 + tfOffset, _, err = tfEncoder.writeAt(w) + if err != nil { + return 0, err + } + + var locOffset uint64 + locOffset, _, err = locEncoder.writeAt(w) + if err != nil { + return 0, err + } + + postingsOffset := uint64(w.Count()) + + n := binary.PutUvarint(bufMaxVarintLen64, tfOffset) + _, err = w.Write(bufMaxVarintLen64[:n]) + if err != nil { + return 0, err + } + + n = binary.PutUvarint(bufMaxVarintLen64, locOffset) + _, err = w.Write(bufMaxVarintLen64[:n]) + if err != nil { + return 0, err + } + + _, err = writeRoaringWithLen(postings, w, bufMaxVarintLen64) + if err != nil { + return 0, err + } + + return postingsOffset, nil +} + +type varintEncoder func(uint64) (int, error) + +func mergeStoredAndRemap(segments []*SegmentBase, drops []*roaring.Bitmap, + fieldsMap map[string]uint16, fieldsInv []string, fieldsSame bool, newSegDocCount uint64, + w *CountHashWriter, closeCh chan struct{}) (uint64, [][]uint64, error) { + var rv [][]uint64 // The remapped or newDocNums for each segment. + + var newDocNum uint64 + + var curr int + var data, compressed []byte + var metaBuf bytes.Buffer + varBuf := make([]byte, binary.MaxVarintLen64) + metaEncode := func(val uint64) (int, error) { + wb := binary.PutUvarint(varBuf, val) + return metaBuf.Write(varBuf[:wb]) + } + + vals := make([][][]byte, len(fieldsInv)) + typs := make([][]byte, len(fieldsInv)) + poss := make([][][]uint64, len(fieldsInv)) + + var posBuf []uint64 + + docNumOffsets := make([]uint64, newSegDocCount) + + vdc := visitDocumentCtxPool.Get().(*visitDocumentCtx) + defer visitDocumentCtxPool.Put(vdc) + + // for each segment + for segI, segment := range segments { + // check for the closure in meantime + if isClosed(closeCh) { + return 0, nil, seg.ErrClosed + } + + segNewDocNums := make([]uint64, segment.numDocs) + + dropsI := drops[segI] + + // optimize when the field mapping is the same across all + // segments and there are no deletions, via byte-copying + // of stored docs bytes directly to the writer + if fieldsSame && (dropsI == nil || dropsI.GetCardinality() == 0) { + err := segment.copyStoredDocs(newDocNum, docNumOffsets, w) + if err != nil { + return 0, nil, err + } + + for i := uint64(0); i < segment.numDocs; i++ { + segNewDocNums[i] = newDocNum + newDocNum++ + } + rv = append(rv, segNewDocNums) + + continue + } + + // for each doc num + for docNum := uint64(0); docNum < segment.numDocs; docNum++ { + // TODO: roaring's API limits docNums to 32-bits? + if dropsI != nil && dropsI.Contains(uint32(docNum)) { + segNewDocNums[docNum] = docDropped + continue + } + + segNewDocNums[docNum] = newDocNum + + curr = 0 + metaBuf.Reset() + data = data[:0] + + posTemp := posBuf + + // collect all the data + for i := 0; i < len(fieldsInv); i++ { + vals[i] = vals[i][:0] + typs[i] = typs[i][:0] + poss[i] = poss[i][:0] + } + err := segment.visitDocument(vdc, docNum, func(field string, typ byte, value []byte, pos []uint64) bool { + fieldID := int(fieldsMap[field]) - 1 + vals[fieldID] = append(vals[fieldID], value) + typs[fieldID] = append(typs[fieldID], typ) + + // copy array positions to preserve them beyond the scope of this callback + var curPos []uint64 + if len(pos) > 0 { + if cap(posTemp) < len(pos) { + posBuf = make([]uint64, len(pos)*len(fieldsInv)) + posTemp = posBuf + } + curPos = posTemp[0:len(pos)] + copy(curPos, pos) + posTemp = posTemp[len(pos):] + } + poss[fieldID] = append(poss[fieldID], curPos) + + return true + }) + if err != nil { + return 0, nil, err + } + + // _id field special case optimizes ExternalID() lookups + idFieldVal := vals[uint16(0)][0] + _, err = metaEncode(uint64(len(idFieldVal))) + if err != nil { + return 0, nil, err + } + + // now walk the non-"_id" fields in order + for fieldID := 1; fieldID < len(fieldsInv); fieldID++ { + storedFieldValues := vals[fieldID] + + stf := typs[fieldID] + spf := poss[fieldID] + + var err2 error + curr, data, err2 = persistStoredFieldValues(fieldID, + storedFieldValues, stf, spf, curr, metaEncode, data) + if err2 != nil { + return 0, nil, err2 + } + } + + metaBytes := metaBuf.Bytes() + + compressed = snappy.Encode(compressed[:cap(compressed)], data) + + // record where we're about to start writing + docNumOffsets[newDocNum] = uint64(w.Count()) + + // write out the meta len and compressed data len + _, err = writeUvarints(w, + uint64(len(metaBytes)), + uint64(len(idFieldVal)+len(compressed))) + if err != nil { + return 0, nil, err + } + // now write the meta + _, err = w.Write(metaBytes) + if err != nil { + return 0, nil, err + } + // now write the _id field val (counted as part of the 'compressed' data) + _, err = w.Write(idFieldVal) + if err != nil { + return 0, nil, err + } + // now write the compressed data + _, err = w.Write(compressed) + if err != nil { + return 0, nil, err + } + + newDocNum++ + } + + rv = append(rv, segNewDocNums) + } + + // return value is the start of the stored index + storedIndexOffset := uint64(w.Count()) + + // now write out the stored doc index + for _, docNumOffset := range docNumOffsets { + err := binary.Write(w, binary.BigEndian, docNumOffset) + if err != nil { + return 0, nil, err + } + } + + return storedIndexOffset, rv, nil +} + +// copyStoredDocs writes out a segment's stored doc info, optimized by +// using a single Write() call for the entire set of bytes. The +// newDocNumOffsets is filled with the new offsets for each doc. +func (s *SegmentBase) copyStoredDocs(newDocNum uint64, newDocNumOffsets []uint64, + w *CountHashWriter) error { + if s.numDocs <= 0 { + return nil + } + + indexOffset0, storedOffset0, _, _, _ := + s.getDocStoredOffsets(0) // the segment's first doc + + indexOffsetN, storedOffsetN, readN, metaLenN, dataLenN := + s.getDocStoredOffsets(s.numDocs - 1) // the segment's last doc + + storedOffset0New := uint64(w.Count()) + + storedBytes := s.mem[storedOffset0 : storedOffsetN+readN+metaLenN+dataLenN] + _, err := w.Write(storedBytes) + if err != nil { + return err + } + + // remap the storedOffset's for the docs into new offsets relative + // to storedOffset0New, filling the given docNumOffsetsOut array + for indexOffset := indexOffset0; indexOffset <= indexOffsetN; indexOffset += 8 { + storedOffset := binary.BigEndian.Uint64(s.mem[indexOffset : indexOffset+8]) + storedOffsetNew := storedOffset - storedOffset0 + storedOffset0New + newDocNumOffsets[newDocNum] = storedOffsetNew + newDocNum += 1 + } + + return nil +} + +// mergeFields builds a unified list of fields used across all the +// input segments, and computes whether the fields are the same across +// segments (which depends on fields to be sorted in the same way +// across segments) +func mergeFields(segments []*SegmentBase) (bool, []string) { + fieldsSame := true + + var segment0Fields []string + if len(segments) > 0 { + segment0Fields = segments[0].Fields() + } + + fieldsExist := map[string]struct{}{} + for _, segment := range segments { + fields := segment.Fields() + for fieldi, field := range fields { + fieldsExist[field] = struct{}{} + if len(segment0Fields) != len(fields) || segment0Fields[fieldi] != field { + fieldsSame = false + } + } + } + + rv := make([]string, 0, len(fieldsExist)) + // ensure _id stays first + rv = append(rv, "_id") + for k := range fieldsExist { + if k != "_id" { + rv = append(rv, k) + } + } + + sort.Strings(rv[1:]) // leave _id as first + + return fieldsSame, rv +} + +func isClosed(closeCh chan struct{}) bool { + select { + case <-closeCh: + return true + default: + return false + } +} diff --git a/vendor/github.com/blevesearch/zap/v14/new.go b/vendor/github.com/blevesearch/zap/v14/new.go new file mode 100644 index 0000000..9815818 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/new.go @@ -0,0 +1,860 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "math" + "sort" + "sync" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/couchbase/vellum" + "github.com/golang/snappy" +) + +var NewSegmentBufferNumResultsBump int = 100 +var NewSegmentBufferNumResultsFactor float64 = 1.0 +var NewSegmentBufferAvgBytesPerDocFactor float64 = 1.0 + +// ValidateDocFields can be set by applications to perform additional checks +// on fields in a document being added to a new segment, by default it does +// nothing. +// This API is experimental and may be removed at any time. +var ValidateDocFields = func(field document.Field) error { + return nil +} + +// AnalysisResultsToSegmentBase produces an in-memory zap-encoded +// SegmentBase from analysis results +func (z *ZapPlugin) New(results []*index.AnalysisResult) ( + segment.Segment, uint64, error) { + return z.newWithChunkMode(results, DefaultChunkMode) +} + +func (*ZapPlugin) newWithChunkMode(results []*index.AnalysisResult, + chunkMode uint32) (segment.Segment, uint64, error) { + s := interimPool.Get().(*interim) + + var br bytes.Buffer + if s.lastNumDocs > 0 { + // use previous results to initialize the buf with an estimate + // size, but note that the interim instance comes from a + // global interimPool, so multiple scorch instances indexing + // different docs can lead to low quality estimates + estimateAvgBytesPerDoc := int(float64(s.lastOutSize/s.lastNumDocs) * + NewSegmentBufferNumResultsFactor) + estimateNumResults := int(float64(len(results)+NewSegmentBufferNumResultsBump) * + NewSegmentBufferAvgBytesPerDocFactor) + br.Grow(estimateAvgBytesPerDoc * estimateNumResults) + } + + s.results = results + s.chunkMode = chunkMode + s.w = NewCountHashWriter(&br) + + storedIndexOffset, fieldsIndexOffset, fdvIndexOffset, dictOffsets, + err := s.convert() + if err != nil { + return nil, uint64(0), err + } + + sb, err := InitSegmentBase(br.Bytes(), s.w.Sum32(), chunkMode, + s.FieldsMap, s.FieldsInv, uint64(len(results)), + storedIndexOffset, fieldsIndexOffset, fdvIndexOffset, dictOffsets) + + if err == nil && s.reset() == nil { + s.lastNumDocs = len(results) + s.lastOutSize = len(br.Bytes()) + interimPool.Put(s) + } + + return sb, uint64(len(br.Bytes())), err +} + +var interimPool = sync.Pool{New: func() interface{} { return &interim{} }} + +// interim holds temporary working data used while converting from +// analysis results to a zap-encoded segment +type interim struct { + results []*index.AnalysisResult + + chunkMode uint32 + + w *CountHashWriter + + // FieldsMap adds 1 to field id to avoid zero value issues + // name -> field id + 1 + FieldsMap map[string]uint16 + + // FieldsInv is the inverse of FieldsMap + // field id -> name + FieldsInv []string + + // Term dictionaries for each field + // field id -> term -> postings list id + 1 + Dicts []map[string]uint64 + + // Terms for each field, where terms are sorted ascending + // field id -> []term + DictKeys [][]string + + // Fields whose IncludeDocValues is true + // field id -> bool + IncludeDocValues []bool + + // postings id -> bitmap of docNums + Postings []*roaring.Bitmap + + // postings id -> freq/norm's, one for each docNum in postings + FreqNorms [][]interimFreqNorm + freqNormsBacking []interimFreqNorm + + // postings id -> locs, one for each freq + Locs [][]interimLoc + locsBacking []interimLoc + + numTermsPerPostingsList []int // key is postings list id + numLocsPerPostingsList []int // key is postings list id + + builder *vellum.Builder + builderBuf bytes.Buffer + + metaBuf bytes.Buffer + + tmp0 []byte + tmp1 []byte + + lastNumDocs int + lastOutSize int +} + +func (s *interim) reset() (err error) { + s.results = nil + s.chunkMode = 0 + s.w = nil + s.FieldsMap = nil + s.FieldsInv = nil + for i := range s.Dicts { + s.Dicts[i] = nil + } + s.Dicts = s.Dicts[:0] + for i := range s.DictKeys { + s.DictKeys[i] = s.DictKeys[i][:0] + } + s.DictKeys = s.DictKeys[:0] + for i := range s.IncludeDocValues { + s.IncludeDocValues[i] = false + } + s.IncludeDocValues = s.IncludeDocValues[:0] + for _, idn := range s.Postings { + idn.Clear() + } + s.Postings = s.Postings[:0] + s.FreqNorms = s.FreqNorms[:0] + for i := range s.freqNormsBacking { + s.freqNormsBacking[i] = interimFreqNorm{} + } + s.freqNormsBacking = s.freqNormsBacking[:0] + s.Locs = s.Locs[:0] + for i := range s.locsBacking { + s.locsBacking[i] = interimLoc{} + } + s.locsBacking = s.locsBacking[:0] + s.numTermsPerPostingsList = s.numTermsPerPostingsList[:0] + s.numLocsPerPostingsList = s.numLocsPerPostingsList[:0] + s.builderBuf.Reset() + if s.builder != nil { + err = s.builder.Reset(&s.builderBuf) + } + s.metaBuf.Reset() + s.tmp0 = s.tmp0[:0] + s.tmp1 = s.tmp1[:0] + s.lastNumDocs = 0 + s.lastOutSize = 0 + + return err +} + +func (s *interim) grabBuf(size int) []byte { + buf := s.tmp0 + if cap(buf) < size { + buf = make([]byte, size) + s.tmp0 = buf + } + return buf[0:size] +} + +type interimStoredField struct { + vals [][]byte + typs []byte + arrayposs [][]uint64 // array positions +} + +type interimFreqNorm struct { + freq uint64 + norm float32 + numLocs int +} + +type interimLoc struct { + fieldID uint16 + pos uint64 + start uint64 + end uint64 + arrayposs []uint64 +} + +func (s *interim) convert() (uint64, uint64, uint64, []uint64, error) { + s.FieldsMap = map[string]uint16{} + + s.getOrDefineField("_id") // _id field is fieldID 0 + + for _, result := range s.results { + for _, field := range result.Document.CompositeFields { + s.getOrDefineField(field.Name()) + } + for _, field := range result.Document.Fields { + s.getOrDefineField(field.Name()) + } + } + + sort.Strings(s.FieldsInv[1:]) // keep _id as first field + + for fieldID, fieldName := range s.FieldsInv { + s.FieldsMap[fieldName] = uint16(fieldID + 1) + } + + if cap(s.IncludeDocValues) >= len(s.FieldsInv) { + s.IncludeDocValues = s.IncludeDocValues[:len(s.FieldsInv)] + } else { + s.IncludeDocValues = make([]bool, len(s.FieldsInv)) + } + + s.prepareDicts() + + for _, dict := range s.DictKeys { + sort.Strings(dict) + } + + s.processDocuments() + + storedIndexOffset, err := s.writeStoredFields() + if err != nil { + return 0, 0, 0, nil, err + } + + var fdvIndexOffset uint64 + var dictOffsets []uint64 + + if len(s.results) > 0 { + fdvIndexOffset, dictOffsets, err = s.writeDicts() + if err != nil { + return 0, 0, 0, nil, err + } + } else { + dictOffsets = make([]uint64, len(s.FieldsInv)) + } + + fieldsIndexOffset, err := persistFields(s.FieldsInv, s.w, dictOffsets) + if err != nil { + return 0, 0, 0, nil, err + } + + return storedIndexOffset, fieldsIndexOffset, fdvIndexOffset, dictOffsets, nil +} + +func (s *interim) getOrDefineField(fieldName string) int { + fieldIDPlus1, exists := s.FieldsMap[fieldName] + if !exists { + fieldIDPlus1 = uint16(len(s.FieldsInv) + 1) + s.FieldsMap[fieldName] = fieldIDPlus1 + s.FieldsInv = append(s.FieldsInv, fieldName) + + s.Dicts = append(s.Dicts, make(map[string]uint64)) + + n := len(s.DictKeys) + if n < cap(s.DictKeys) { + s.DictKeys = s.DictKeys[:n+1] + s.DictKeys[n] = s.DictKeys[n][:0] + } else { + s.DictKeys = append(s.DictKeys, []string(nil)) + } + } + + return int(fieldIDPlus1 - 1) +} + +// fill Dicts and DictKeys from analysis results +func (s *interim) prepareDicts() { + var pidNext int + + var totTFs int + var totLocs int + + visitField := func(fieldID uint16, tfs analysis.TokenFrequencies) { + dict := s.Dicts[fieldID] + dictKeys := s.DictKeys[fieldID] + + for term, tf := range tfs { + pidPlus1, exists := dict[term] + if !exists { + pidNext++ + pidPlus1 = uint64(pidNext) + + dict[term] = pidPlus1 + dictKeys = append(dictKeys, term) + + s.numTermsPerPostingsList = append(s.numTermsPerPostingsList, 0) + s.numLocsPerPostingsList = append(s.numLocsPerPostingsList, 0) + } + + pid := pidPlus1 - 1 + + s.numTermsPerPostingsList[pid] += 1 + s.numLocsPerPostingsList[pid] += len(tf.Locations) + + totLocs += len(tf.Locations) + } + + totTFs += len(tfs) + + s.DictKeys[fieldID] = dictKeys + } + + for _, result := range s.results { + // walk each composite field + for _, field := range result.Document.CompositeFields { + fieldID := uint16(s.getOrDefineField(field.Name())) + _, tf := field.Analyze() + visitField(fieldID, tf) + } + + // walk each field + for i, field := range result.Document.Fields { + fieldID := uint16(s.getOrDefineField(field.Name())) + tf := result.Analyzed[i] + visitField(fieldID, tf) + } + } + + numPostingsLists := pidNext + + if cap(s.Postings) >= numPostingsLists { + s.Postings = s.Postings[:numPostingsLists] + } else { + postings := make([]*roaring.Bitmap, numPostingsLists) + copy(postings, s.Postings[:cap(s.Postings)]) + for i := 0; i < numPostingsLists; i++ { + if postings[i] == nil { + postings[i] = roaring.New() + } + } + s.Postings = postings + } + + if cap(s.FreqNorms) >= numPostingsLists { + s.FreqNorms = s.FreqNorms[:numPostingsLists] + } else { + s.FreqNorms = make([][]interimFreqNorm, numPostingsLists) + } + + if cap(s.freqNormsBacking) >= totTFs { + s.freqNormsBacking = s.freqNormsBacking[:totTFs] + } else { + s.freqNormsBacking = make([]interimFreqNorm, totTFs) + } + + freqNormsBacking := s.freqNormsBacking + for pid, numTerms := range s.numTermsPerPostingsList { + s.FreqNorms[pid] = freqNormsBacking[0:0] + freqNormsBacking = freqNormsBacking[numTerms:] + } + + if cap(s.Locs) >= numPostingsLists { + s.Locs = s.Locs[:numPostingsLists] + } else { + s.Locs = make([][]interimLoc, numPostingsLists) + } + + if cap(s.locsBacking) >= totLocs { + s.locsBacking = s.locsBacking[:totLocs] + } else { + s.locsBacking = make([]interimLoc, totLocs) + } + + locsBacking := s.locsBacking + for pid, numLocs := range s.numLocsPerPostingsList { + s.Locs[pid] = locsBacking[0:0] + locsBacking = locsBacking[numLocs:] + } +} + +func (s *interim) processDocuments() { + numFields := len(s.FieldsInv) + reuseFieldLens := make([]int, numFields) + reuseFieldTFs := make([]analysis.TokenFrequencies, numFields) + + for docNum, result := range s.results { + for i := 0; i < numFields; i++ { // clear these for reuse + reuseFieldLens[i] = 0 + reuseFieldTFs[i] = nil + } + + s.processDocument(uint64(docNum), result, + reuseFieldLens, reuseFieldTFs) + } +} + +func (s *interim) processDocument(docNum uint64, + result *index.AnalysisResult, + fieldLens []int, fieldTFs []analysis.TokenFrequencies) { + visitField := func(fieldID uint16, fieldName string, + ln int, tf analysis.TokenFrequencies) { + fieldLens[fieldID] += ln + + existingFreqs := fieldTFs[fieldID] + if existingFreqs != nil { + existingFreqs.MergeAll(fieldName, tf) + } else { + fieldTFs[fieldID] = tf + } + } + + // walk each composite field + for _, field := range result.Document.CompositeFields { + fieldID := uint16(s.getOrDefineField(field.Name())) + ln, tf := field.Analyze() + visitField(fieldID, field.Name(), ln, tf) + } + + // walk each field + for i, field := range result.Document.Fields { + fieldID := uint16(s.getOrDefineField(field.Name())) + ln := result.Length[i] + tf := result.Analyzed[i] + visitField(fieldID, field.Name(), ln, tf) + } + + // now that it's been rolled up into fieldTFs, walk that + for fieldID, tfs := range fieldTFs { + dict := s.Dicts[fieldID] + norm := float32(1.0 / math.Sqrt(float64(fieldLens[fieldID]))) + + for term, tf := range tfs { + pid := dict[term] - 1 + bs := s.Postings[pid] + bs.Add(uint32(docNum)) + + s.FreqNorms[pid] = append(s.FreqNorms[pid], + interimFreqNorm{ + freq: uint64(tf.Frequency()), + norm: norm, + numLocs: len(tf.Locations), + }) + + if len(tf.Locations) > 0 { + locs := s.Locs[pid] + + for _, loc := range tf.Locations { + var locf = uint16(fieldID) + if loc.Field != "" { + locf = uint16(s.getOrDefineField(loc.Field)) + } + var arrayposs []uint64 + if len(loc.ArrayPositions) > 0 { + arrayposs = loc.ArrayPositions + } + locs = append(locs, interimLoc{ + fieldID: locf, + pos: uint64(loc.Position), + start: uint64(loc.Start), + end: uint64(loc.End), + arrayposs: arrayposs, + }) + } + + s.Locs[pid] = locs + } + } + } +} + +func (s *interim) writeStoredFields() ( + storedIndexOffset uint64, err error) { + varBuf := make([]byte, binary.MaxVarintLen64) + metaEncode := func(val uint64) (int, error) { + wb := binary.PutUvarint(varBuf, val) + return s.metaBuf.Write(varBuf[:wb]) + } + + data, compressed := s.tmp0[:0], s.tmp1[:0] + defer func() { s.tmp0, s.tmp1 = data, compressed }() + + // keyed by docNum + docStoredOffsets := make([]uint64, len(s.results)) + + // keyed by fieldID, for the current doc in the loop + docStoredFields := map[uint16]interimStoredField{} + + for docNum, result := range s.results { + for fieldID := range docStoredFields { // reset for next doc + delete(docStoredFields, fieldID) + } + + for _, field := range result.Document.Fields { + fieldID := uint16(s.getOrDefineField(field.Name())) + + opts := field.Options() + + if opts.IsStored() { + isf := docStoredFields[fieldID] + isf.vals = append(isf.vals, field.Value()) + isf.typs = append(isf.typs, encodeFieldType(field)) + isf.arrayposs = append(isf.arrayposs, field.ArrayPositions()) + docStoredFields[fieldID] = isf + } + + if opts.IncludeDocValues() { + s.IncludeDocValues[fieldID] = true + } + + err := ValidateDocFields(field) + if err != nil { + return 0, err + } + } + + var curr int + + s.metaBuf.Reset() + data = data[:0] + + // _id field special case optimizes ExternalID() lookups + idFieldVal := docStoredFields[uint16(0)].vals[0] + _, err = metaEncode(uint64(len(idFieldVal))) + if err != nil { + return 0, err + } + + // handle non-"_id" fields + for fieldID := 1; fieldID < len(s.FieldsInv); fieldID++ { + isf, exists := docStoredFields[uint16(fieldID)] + if exists { + curr, data, err = persistStoredFieldValues( + fieldID, isf.vals, isf.typs, isf.arrayposs, + curr, metaEncode, data) + if err != nil { + return 0, err + } + } + } + + metaBytes := s.metaBuf.Bytes() + + compressed = snappy.Encode(compressed[:cap(compressed)], data) + + docStoredOffsets[docNum] = uint64(s.w.Count()) + + _, err := writeUvarints(s.w, + uint64(len(metaBytes)), + uint64(len(idFieldVal)+len(compressed))) + if err != nil { + return 0, err + } + + _, err = s.w.Write(metaBytes) + if err != nil { + return 0, err + } + + _, err = s.w.Write(idFieldVal) + if err != nil { + return 0, err + } + + _, err = s.w.Write(compressed) + if err != nil { + return 0, err + } + } + + storedIndexOffset = uint64(s.w.Count()) + + for _, docStoredOffset := range docStoredOffsets { + err = binary.Write(s.w, binary.BigEndian, docStoredOffset) + if err != nil { + return 0, err + } + } + + return storedIndexOffset, nil +} + +func (s *interim) writeDicts() (fdvIndexOffset uint64, dictOffsets []uint64, err error) { + dictOffsets = make([]uint64, len(s.FieldsInv)) + + fdvOffsetsStart := make([]uint64, len(s.FieldsInv)) + fdvOffsetsEnd := make([]uint64, len(s.FieldsInv)) + + buf := s.grabBuf(binary.MaxVarintLen64) + + // these int coders are initialized with chunk size 1024 + // however this will be reset to the correct chunk size + // while processing each individual field-term section + tfEncoder := newChunkedIntCoder(1024, uint64(len(s.results)-1)) + locEncoder := newChunkedIntCoder(1024, uint64(len(s.results)-1)) + + var docTermMap [][]byte + + if s.builder == nil { + s.builder, err = vellum.New(&s.builderBuf, nil) + if err != nil { + return 0, nil, err + } + } + + for fieldID, terms := range s.DictKeys { + if cap(docTermMap) < len(s.results) { + docTermMap = make([][]byte, len(s.results)) + } else { + docTermMap = docTermMap[0:len(s.results)] + for docNum := range docTermMap { // reset the docTermMap + docTermMap[docNum] = docTermMap[docNum][:0] + } + } + + dict := s.Dicts[fieldID] + + for _, term := range terms { // terms are already sorted + pid := dict[term] - 1 + + postingsBS := s.Postings[pid] + + freqNorms := s.FreqNorms[pid] + freqNormOffset := 0 + + locs := s.Locs[pid] + locOffset := 0 + + chunkSize, err := getChunkSize(s.chunkMode, postingsBS.GetCardinality(), uint64(len(s.results))) + if err != nil { + return 0, nil, err + } + tfEncoder.SetChunkSize(chunkSize, uint64(len(s.results)-1)) + locEncoder.SetChunkSize(chunkSize, uint64(len(s.results)-1)) + + postingsItr := postingsBS.Iterator() + for postingsItr.HasNext() { + docNum := uint64(postingsItr.Next()) + + freqNorm := freqNorms[freqNormOffset] + + err = tfEncoder.Add(docNum, + encodeFreqHasLocs(freqNorm.freq, freqNorm.numLocs > 0), + uint64(math.Float32bits(freqNorm.norm))) + if err != nil { + return 0, nil, err + } + + if freqNorm.numLocs > 0 { + numBytesLocs := 0 + for _, loc := range locs[locOffset : locOffset+freqNorm.numLocs] { + numBytesLocs += totalUvarintBytes( + uint64(loc.fieldID), loc.pos, loc.start, loc.end, + uint64(len(loc.arrayposs)), loc.arrayposs) + } + + err = locEncoder.Add(docNum, uint64(numBytesLocs)) + if err != nil { + return 0, nil, err + } + + for _, loc := range locs[locOffset : locOffset+freqNorm.numLocs] { + err = locEncoder.Add(docNum, + uint64(loc.fieldID), loc.pos, loc.start, loc.end, + uint64(len(loc.arrayposs))) + if err != nil { + return 0, nil, err + } + + err = locEncoder.Add(docNum, loc.arrayposs...) + if err != nil { + return 0, nil, err + } + } + + locOffset += freqNorm.numLocs + } + + freqNormOffset++ + + docTermMap[docNum] = append( + append(docTermMap[docNum], term...), + termSeparator) + } + + tfEncoder.Close() + locEncoder.Close() + + postingsOffset, err := + writePostings(postingsBS, tfEncoder, locEncoder, nil, s.w, buf) + if err != nil { + return 0, nil, err + } + + if postingsOffset > uint64(0) { + err = s.builder.Insert([]byte(term), postingsOffset) + if err != nil { + return 0, nil, err + } + } + + tfEncoder.Reset() + locEncoder.Reset() + } + + err = s.builder.Close() + if err != nil { + return 0, nil, err + } + + // record where this dictionary starts + dictOffsets[fieldID] = uint64(s.w.Count()) + + vellumData := s.builderBuf.Bytes() + + // write out the length of the vellum data + n := binary.PutUvarint(buf, uint64(len(vellumData))) + _, err = s.w.Write(buf[:n]) + if err != nil { + return 0, nil, err + } + + // write this vellum to disk + _, err = s.w.Write(vellumData) + if err != nil { + return 0, nil, err + } + + // reset vellum for reuse + s.builderBuf.Reset() + + err = s.builder.Reset(&s.builderBuf) + if err != nil { + return 0, nil, err + } + + // write the field doc values + // NOTE: doc values continue to use legacy chunk mode + chunkSize, err := getChunkSize(LegacyChunkMode, 0, 0) + if err != nil { + return 0, nil, err + } + fdvEncoder := newChunkedContentCoder(chunkSize, uint64(len(s.results)-1), s.w, false) + if s.IncludeDocValues[fieldID] { + for docNum, docTerms := range docTermMap { + if len(docTerms) > 0 { + err = fdvEncoder.Add(uint64(docNum), docTerms) + if err != nil { + return 0, nil, err + } + } + } + err = fdvEncoder.Close() + if err != nil { + return 0, nil, err + } + + fdvOffsetsStart[fieldID] = uint64(s.w.Count()) + + _, err = fdvEncoder.Write() + if err != nil { + return 0, nil, err + } + + fdvOffsetsEnd[fieldID] = uint64(s.w.Count()) + + fdvEncoder.Reset() + } else { + fdvOffsetsStart[fieldID] = fieldNotUninverted + fdvOffsetsEnd[fieldID] = fieldNotUninverted + } + } + + fdvIndexOffset = uint64(s.w.Count()) + + for i := 0; i < len(fdvOffsetsStart); i++ { + n := binary.PutUvarint(buf, fdvOffsetsStart[i]) + _, err := s.w.Write(buf[:n]) + if err != nil { + return 0, nil, err + } + n = binary.PutUvarint(buf, fdvOffsetsEnd[i]) + _, err = s.w.Write(buf[:n]) + if err != nil { + return 0, nil, err + } + } + + return fdvIndexOffset, dictOffsets, nil +} + +func encodeFieldType(f document.Field) byte { + fieldType := byte('x') + switch f.(type) { + case *document.TextField: + fieldType = 't' + case *document.NumericField: + fieldType = 'n' + case *document.DateTimeField: + fieldType = 'd' + case *document.BooleanField: + fieldType = 'b' + case *document.GeoPointField: + fieldType = 'g' + case *document.CompositeField: + fieldType = 'c' + } + return fieldType +} + +// returns the total # of bytes needed to encode the given uint64's +// into binary.PutUVarint() encoding +func totalUvarintBytes(a, b, c, d, e uint64, more []uint64) (n int) { + n = numUvarintBytes(a) + n += numUvarintBytes(b) + n += numUvarintBytes(c) + n += numUvarintBytes(d) + n += numUvarintBytes(e) + for _, v := range more { + n += numUvarintBytes(v) + } + return n +} + +// returns # of bytes needed to encode x in binary.PutUvarint() encoding +func numUvarintBytes(x uint64) (n int) { + for x >= 0x80 { + x >>= 7 + n++ + } + return n + 1 +} diff --git a/vendor/github.com/blevesearch/zap/v14/plugin.go b/vendor/github.com/blevesearch/zap/v14/plugin.go new file mode 100644 index 0000000..38a0638 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/plugin.go @@ -0,0 +1,37 @@ +// Copyright (c) 2020 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "github.com/blevesearch/bleve/index/scorch/segment" +) + +// ZapPlugin implements the Plugin interface of +// the blevesearch/bleve/index/scorch/segment pkg +type ZapPlugin struct{} + +func (*ZapPlugin) Type() string { + return Type +} + +func (*ZapPlugin) Version() uint32 { + return Version +} + +// Plugin returns an instance segment.Plugin for use +// by the Scorch indexing scheme +func Plugin() segment.Plugin { + return &ZapPlugin{} +} diff --git a/vendor/github.com/blevesearch/zap/v14/posting.go b/vendor/github.com/blevesearch/zap/v14/posting.go new file mode 100644 index 0000000..88b2b95 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/posting.go @@ -0,0 +1,796 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "encoding/binary" + "fmt" + "math" + "reflect" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizePostingsList int +var reflectStaticSizePostingsIterator int +var reflectStaticSizePosting int +var reflectStaticSizeLocation int + +func init() { + var pl PostingsList + reflectStaticSizePostingsList = int(reflect.TypeOf(pl).Size()) + var pi PostingsIterator + reflectStaticSizePostingsIterator = int(reflect.TypeOf(pi).Size()) + var p Posting + reflectStaticSizePosting = int(reflect.TypeOf(p).Size()) + var l Location + reflectStaticSizeLocation = int(reflect.TypeOf(l).Size()) +} + +// FST or vellum value (uint64) encoding is determined by the top two +// highest-order or most significant bits... +// +// encoding : MSB +// name : 63 62 61...to...bit #0 (LSB) +// ----------+---+---+--------------------------------------------------- +// general : 0 | 0 | 62-bits of postingsOffset. +// ~ : 0 | 1 | reserved for future. +// 1-hit : 1 | 0 | 31-bits of positive float31 norm | 31-bits docNum. +// ~ : 1 | 1 | reserved for future. +// +// Encoding "general" is able to handle all cases, where the +// postingsOffset points to more information about the postings for +// the term. +// +// Encoding "1-hit" is used to optimize a commonly seen case when a +// term has only a single hit. For example, a term in the _id field +// will have only 1 hit. The "1-hit" encoding is used for a term +// in a field when... +// +// - term vector info is disabled for that field; +// - and, the term appears in only a single doc for that field; +// - and, the term's freq is exactly 1 in that single doc for that field; +// - and, the docNum must fit into 31-bits; +// +// Otherwise, the "general" encoding is used instead. +// +// In the "1-hit" encoding, the field in that single doc may have +// other terms, which is supported in the "1-hit" encoding by the +// positive float31 norm. + +const FSTValEncodingMask = uint64(0xc000000000000000) +const FSTValEncodingGeneral = uint64(0x0000000000000000) +const FSTValEncoding1Hit = uint64(0x8000000000000000) + +func FSTValEncode1Hit(docNum uint64, normBits uint64) uint64 { + return FSTValEncoding1Hit | ((mask31Bits & normBits) << 31) | (mask31Bits & docNum) +} + +func FSTValDecode1Hit(v uint64) (docNum uint64, normBits uint64) { + return (mask31Bits & v), (mask31Bits & (v >> 31)) +} + +const mask31Bits = uint64(0x000000007fffffff) + +func under32Bits(x uint64) bool { + return x <= mask31Bits +} + +const DocNum1HitFinished = math.MaxUint64 + +var NormBits1Hit = uint64(math.Float32bits(float32(1))) + +// PostingsList is an in-memory representation of a postings list +type PostingsList struct { + sb *SegmentBase + postingsOffset uint64 + freqOffset uint64 + locOffset uint64 + postings *roaring.Bitmap + except *roaring.Bitmap + + // when normBits1Hit != 0, then this postings list came from a + // 1-hit encoding, and only the docNum1Hit & normBits1Hit apply + docNum1Hit uint64 + normBits1Hit uint64 + + chunkSize uint64 +} + +// represents an immutable, empty postings list +var emptyPostingsList = &PostingsList{} + +func (p *PostingsList) Size() int { + sizeInBytes := reflectStaticSizePostingsList + size.SizeOfPtr + + if p.except != nil { + sizeInBytes += int(p.except.GetSizeInBytes()) + } + + return sizeInBytes +} + +func (p *PostingsList) OrInto(receiver *roaring.Bitmap) { + if p.normBits1Hit != 0 { + receiver.Add(uint32(p.docNum1Hit)) + return + } + + if p.postings != nil { + receiver.Or(p.postings) + } +} + +// Iterator returns an iterator for this postings list +func (p *PostingsList) Iterator(includeFreq, includeNorm, includeLocs bool, + prealloc segment.PostingsIterator) segment.PostingsIterator { + if p.normBits1Hit == 0 && p.postings == nil { + return emptyPostingsIterator + } + + var preallocPI *PostingsIterator + pi, ok := prealloc.(*PostingsIterator) + if ok && pi != nil { + preallocPI = pi + } + if preallocPI == emptyPostingsIterator { + preallocPI = nil + } + + return p.iterator(includeFreq, includeNorm, includeLocs, preallocPI) +} + +func (p *PostingsList) iterator(includeFreq, includeNorm, includeLocs bool, + rv *PostingsIterator) *PostingsIterator { + if rv == nil { + rv = &PostingsIterator{} + } else { + freqNormReader := rv.freqNormReader + if freqNormReader != nil { + freqNormReader.reset() + } + + locReader := rv.locReader + if locReader != nil { + locReader.reset() + } + + nextLocs := rv.nextLocs[:0] + nextSegmentLocs := rv.nextSegmentLocs[:0] + + buf := rv.buf + + *rv = PostingsIterator{} // clear the struct + + rv.freqNormReader = freqNormReader + rv.locReader = locReader + + rv.nextLocs = nextLocs + rv.nextSegmentLocs = nextSegmentLocs + + rv.buf = buf + } + + rv.postings = p + rv.includeFreqNorm = includeFreq || includeNorm || includeLocs + rv.includeLocs = includeLocs + + if p.normBits1Hit != 0 { + // "1-hit" encoding + rv.docNum1Hit = p.docNum1Hit + rv.normBits1Hit = p.normBits1Hit + + if p.except != nil && p.except.Contains(uint32(rv.docNum1Hit)) { + rv.docNum1Hit = DocNum1HitFinished + } + + return rv + } + + // "general" encoding, check if empty + if p.postings == nil { + return rv + } + + // initialize freq chunk reader + if rv.includeFreqNorm { + rv.freqNormReader = newChunkedIntDecoder(p.sb.mem, p.freqOffset, rv.freqNormReader) + } + + // initialize the loc chunk reader + if rv.includeLocs { + rv.locReader = newChunkedIntDecoder(p.sb.mem, p.locOffset, rv.locReader) + } + + rv.all = p.postings.Iterator() + if p.except != nil { + rv.ActualBM = roaring.AndNot(p.postings, p.except) + rv.Actual = rv.ActualBM.Iterator() + } else { + rv.ActualBM = p.postings + rv.Actual = rv.all // Optimize to use same iterator for all & Actual. + } + + return rv +} + +// Count returns the number of items on this postings list +func (p *PostingsList) Count() uint64 { + var n, e uint64 + if p.normBits1Hit != 0 { + n = 1 + if p.except != nil && p.except.Contains(uint32(p.docNum1Hit)) { + e = 1 + } + } else if p.postings != nil { + n = p.postings.GetCardinality() + if p.except != nil { + e = p.postings.AndCardinality(p.except) + } + } + return n - e +} + +func (rv *PostingsList) read(postingsOffset uint64, d *Dictionary) error { + rv.postingsOffset = postingsOffset + + // handle "1-hit" encoding special case + if rv.postingsOffset&FSTValEncodingMask == FSTValEncoding1Hit { + return rv.init1Hit(postingsOffset) + } + + // read the location of the freq/norm details + var n uint64 + var read int + + rv.freqOffset, read = binary.Uvarint(d.sb.mem[postingsOffset+n : postingsOffset+binary.MaxVarintLen64]) + n += uint64(read) + + rv.locOffset, read = binary.Uvarint(d.sb.mem[postingsOffset+n : postingsOffset+n+binary.MaxVarintLen64]) + n += uint64(read) + + var postingsLen uint64 + postingsLen, read = binary.Uvarint(d.sb.mem[postingsOffset+n : postingsOffset+n+binary.MaxVarintLen64]) + n += uint64(read) + + roaringBytes := d.sb.mem[postingsOffset+n : postingsOffset+n+postingsLen] + + if rv.postings == nil { + rv.postings = roaring.NewBitmap() + } + _, err := rv.postings.FromBuffer(roaringBytes) + if err != nil { + return fmt.Errorf("error loading roaring bitmap: %v", err) + } + + rv.chunkSize, err = getChunkSize(d.sb.chunkMode, + rv.postings.GetCardinality(), d.sb.numDocs) + if err != nil { + return err + } + + return nil +} + +func (rv *PostingsList) init1Hit(fstVal uint64) error { + docNum, normBits := FSTValDecode1Hit(fstVal) + + rv.docNum1Hit = docNum + rv.normBits1Hit = normBits + + return nil +} + +// PostingsIterator provides a way to iterate through the postings list +type PostingsIterator struct { + postings *PostingsList + all roaring.IntPeekable + Actual roaring.IntPeekable + ActualBM *roaring.Bitmap + + currChunk uint32 + freqNormReader *chunkedIntDecoder + locReader *chunkedIntDecoder + + next Posting // reused across Next() calls + nextLocs []Location // reused across Next() calls + nextSegmentLocs []segment.Location // reused across Next() calls + + docNum1Hit uint64 + normBits1Hit uint64 + + buf []byte + + includeFreqNorm bool + includeLocs bool +} + +var emptyPostingsIterator = &PostingsIterator{} + +func (i *PostingsIterator) Size() int { + sizeInBytes := reflectStaticSizePostingsIterator + size.SizeOfPtr + + i.next.Size() + // account for freqNormReader, locReader if we start using this. + for _, entry := range i.nextLocs { + sizeInBytes += entry.Size() + } + + return sizeInBytes +} + +func (i *PostingsIterator) loadChunk(chunk int) error { + if i.includeFreqNorm { + err := i.freqNormReader.loadChunk(chunk) + if err != nil { + return err + } + } + + if i.includeLocs { + err := i.locReader.loadChunk(chunk) + if err != nil { + return err + } + } + + i.currChunk = uint32(chunk) + return nil +} + +func (i *PostingsIterator) readFreqNormHasLocs() (uint64, uint64, bool, error) { + if i.normBits1Hit != 0 { + return 1, i.normBits1Hit, false, nil + } + + freqHasLocs, err := i.freqNormReader.readUvarint() + if err != nil { + return 0, 0, false, fmt.Errorf("error reading frequency: %v", err) + } + + freq, hasLocs := decodeFreqHasLocs(freqHasLocs) + + normBits, err := i.freqNormReader.readUvarint() + if err != nil { + return 0, 0, false, fmt.Errorf("error reading norm: %v", err) + } + + return freq, normBits, hasLocs, nil +} + +func (i *PostingsIterator) skipFreqNormReadHasLocs() (bool, error) { + if i.normBits1Hit != 0 { + return false, nil + } + + freqHasLocs, err := i.freqNormReader.readUvarint() + if err != nil { + return false, fmt.Errorf("error reading freqHasLocs: %v", err) + } + + i.freqNormReader.SkipUvarint() // Skip normBits. + + return freqHasLocs&0x01 != 0, nil // See decodeFreqHasLocs() / hasLocs. +} + +func encodeFreqHasLocs(freq uint64, hasLocs bool) uint64 { + rv := freq << 1 + if hasLocs { + rv = rv | 0x01 // 0'th LSB encodes whether there are locations + } + return rv +} + +func decodeFreqHasLocs(freqHasLocs uint64) (uint64, bool) { + freq := freqHasLocs >> 1 + hasLocs := freqHasLocs&0x01 != 0 + return freq, hasLocs +} + +// readLocation processes all the integers on the stream representing a single +// location. +func (i *PostingsIterator) readLocation(l *Location) error { + // read off field + fieldID, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location field: %v", err) + } + // read off pos + pos, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location pos: %v", err) + } + // read off start + start, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location start: %v", err) + } + // read off end + end, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location end: %v", err) + } + // read off num array pos + numArrayPos, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location num array pos: %v", err) + } + + l.field = i.postings.sb.fieldsInv[fieldID] + l.pos = pos + l.start = start + l.end = end + + if cap(l.ap) < int(numArrayPos) { + l.ap = make([]uint64, int(numArrayPos)) + } else { + l.ap = l.ap[:int(numArrayPos)] + } + + // read off array positions + for k := 0; k < int(numArrayPos); k++ { + ap, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading array position: %v", err) + } + + l.ap[k] = ap + } + + return nil +} + +// Next returns the next posting on the postings list, or nil at the end +func (i *PostingsIterator) Next() (segment.Posting, error) { + return i.nextAtOrAfter(0) +} + +// Advance returns the posting at the specified docNum or it is not present +// the next posting, or if the end is reached, nil +func (i *PostingsIterator) Advance(docNum uint64) (segment.Posting, error) { + return i.nextAtOrAfter(docNum) +} + +// Next returns the next posting on the postings list, or nil at the end +func (i *PostingsIterator) nextAtOrAfter(atOrAfter uint64) (segment.Posting, error) { + docNum, exists, err := i.nextDocNumAtOrAfter(atOrAfter) + if err != nil || !exists { + return nil, err + } + + i.next = Posting{} // clear the struct + rv := &i.next + rv.docNum = docNum + + if !i.includeFreqNorm { + return rv, nil + } + + var normBits uint64 + var hasLocs bool + + rv.freq, normBits, hasLocs, err = i.readFreqNormHasLocs() + if err != nil { + return nil, err + } + + rv.norm = math.Float32frombits(uint32(normBits)) + + if i.includeLocs && hasLocs { + // prepare locations into reused slices, where we assume + // rv.freq >= "number of locs", since in a composite field, + // some component fields might have their IncludeTermVector + // flags disabled while other component fields are enabled + if cap(i.nextLocs) >= int(rv.freq) { + i.nextLocs = i.nextLocs[0:rv.freq] + } else { + i.nextLocs = make([]Location, rv.freq, rv.freq*2) + } + if cap(i.nextSegmentLocs) < int(rv.freq) { + i.nextSegmentLocs = make([]segment.Location, rv.freq, rv.freq*2) + } + rv.locs = i.nextSegmentLocs[:0] + + numLocsBytes, err := i.locReader.readUvarint() + if err != nil { + return nil, fmt.Errorf("error reading location numLocsBytes: %v", err) + } + + j := 0 + startBytesRemaining := i.locReader.Len() // # bytes remaining in the locReader + for startBytesRemaining-i.locReader.Len() < int(numLocsBytes) { + err := i.readLocation(&i.nextLocs[j]) + if err != nil { + return nil, err + } + rv.locs = append(rv.locs, &i.nextLocs[j]) + j++ + } + } + + return rv, nil +} + +// nextDocNum returns the next docNum on the postings list, and also +// sets up the currChunk / loc related fields of the iterator. +func (i *PostingsIterator) nextDocNumAtOrAfter(atOrAfter uint64) (uint64, bool, error) { + if i.normBits1Hit != 0 { + if i.docNum1Hit == DocNum1HitFinished { + return 0, false, nil + } + if i.docNum1Hit < atOrAfter { + // advanced past our 1-hit + i.docNum1Hit = DocNum1HitFinished // consume our 1-hit docNum + return 0, false, nil + } + docNum := i.docNum1Hit + i.docNum1Hit = DocNum1HitFinished // consume our 1-hit docNum + return docNum, true, nil + } + + if i.Actual == nil || !i.Actual.HasNext() { + return 0, false, nil + } + + if i.postings == nil || i.postings.postings == i.ActualBM { + return i.nextDocNumAtOrAfterClean(atOrAfter) + } + + i.Actual.AdvanceIfNeeded(uint32(atOrAfter)) + + if !i.Actual.HasNext() { + // couldn't find anything + return 0, false, nil + } + + n := i.Actual.Next() + allN := i.all.Next() + nChunk := n / uint32(i.postings.chunkSize) + + // when allN becomes >= to here, then allN is in the same chunk as nChunk. + allNReachesNChunk := nChunk * uint32(i.postings.chunkSize) + + // n is the next actual hit (excluding some postings), and + // allN is the next hit in the full postings, and + // if they don't match, move 'all' forwards until they do + for allN != n { + // we've reached same chunk, so move the freq/norm/loc decoders forward + if i.includeFreqNorm && allN >= allNReachesNChunk { + err := i.currChunkNext(nChunk) + if err != nil { + return 0, false, err + } + } + + allN = i.all.Next() + } + + if i.includeFreqNorm && (i.currChunk != nChunk || i.freqNormReader.isNil()) { + err := i.loadChunk(int(nChunk)) + if err != nil { + return 0, false, fmt.Errorf("error loading chunk: %v", err) + } + } + + return uint64(n), true, nil +} + +// optimization when the postings list is "clean" (e.g., no updates & +// no deletions) where the all bitmap is the same as the actual bitmap +func (i *PostingsIterator) nextDocNumAtOrAfterClean( + atOrAfter uint64) (uint64, bool, error) { + + if !i.includeFreqNorm { + i.Actual.AdvanceIfNeeded(uint32(atOrAfter)) + + if !i.Actual.HasNext() { + return 0, false, nil // couldn't find anything + } + + return uint64(i.Actual.Next()), true, nil + } + + // freq-norm's needed, so maintain freq-norm chunk reader + sameChunkNexts := 0 // # of times we called Next() in the same chunk + n := i.Actual.Next() + nChunk := n / uint32(i.postings.chunkSize) + + for uint64(n) < atOrAfter && i.Actual.HasNext() { + n = i.Actual.Next() + + nChunkPrev := nChunk + nChunk = n / uint32(i.postings.chunkSize) + + if nChunk != nChunkPrev { + sameChunkNexts = 0 + } else { + sameChunkNexts += 1 + } + } + + if uint64(n) < atOrAfter { + // couldn't find anything + return 0, false, nil + } + + for j := 0; j < sameChunkNexts; j++ { + err := i.currChunkNext(nChunk) + if err != nil { + return 0, false, fmt.Errorf("error optimized currChunkNext: %v", err) + } + } + + if i.currChunk != nChunk || i.freqNormReader.isNil() { + err := i.loadChunk(int(nChunk)) + if err != nil { + return 0, false, fmt.Errorf("error loading chunk: %v", err) + } + } + + return uint64(n), true, nil +} + +func (i *PostingsIterator) currChunkNext(nChunk uint32) error { + if i.currChunk != nChunk || i.freqNormReader.isNil() { + err := i.loadChunk(int(nChunk)) + if err != nil { + return fmt.Errorf("error loading chunk: %v", err) + } + } + + // read off freq/offsets even though we don't care about them + hasLocs, err := i.skipFreqNormReadHasLocs() + if err != nil { + return err + } + + if i.includeLocs && hasLocs { + numLocsBytes, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location numLocsBytes: %v", err) + } + + // skip over all the location bytes + i.locReader.SkipBytes(int(numLocsBytes)) + } + + return nil +} + +// DocNum1Hit returns the docNum and true if this is "1-hit" optimized +// and the docNum is available. +func (p *PostingsIterator) DocNum1Hit() (uint64, bool) { + if p.normBits1Hit != 0 && p.docNum1Hit != DocNum1HitFinished { + return p.docNum1Hit, true + } + return 0, false +} + +// ActualBitmap returns the underlying actual bitmap +// which can be used up the stack for optimizations +func (p *PostingsIterator) ActualBitmap() *roaring.Bitmap { + return p.ActualBM +} + +// ReplaceActual replaces the ActualBM with the provided +// bitmap +func (p *PostingsIterator) ReplaceActual(abm *roaring.Bitmap) { + p.ActualBM = abm + p.Actual = abm.Iterator() +} + +// PostingsIteratorFromBitmap constructs a PostingsIterator given an +// "actual" bitmap. +func PostingsIteratorFromBitmap(bm *roaring.Bitmap, + includeFreqNorm, includeLocs bool) (segment.PostingsIterator, error) { + return &PostingsIterator{ + ActualBM: bm, + Actual: bm.Iterator(), + includeFreqNorm: includeFreqNorm, + includeLocs: includeLocs, + }, nil +} + +// PostingsIteratorFrom1Hit constructs a PostingsIterator given a +// 1-hit docNum. +func PostingsIteratorFrom1Hit(docNum1Hit uint64, + includeFreqNorm, includeLocs bool) (segment.PostingsIterator, error) { + return &PostingsIterator{ + docNum1Hit: docNum1Hit, + normBits1Hit: NormBits1Hit, + includeFreqNorm: includeFreqNorm, + includeLocs: includeLocs, + }, nil +} + +// Posting is a single entry in a postings list +type Posting struct { + docNum uint64 + freq uint64 + norm float32 + locs []segment.Location +} + +func (p *Posting) Size() int { + sizeInBytes := reflectStaticSizePosting + + for _, entry := range p.locs { + sizeInBytes += entry.Size() + } + + return sizeInBytes +} + +// Number returns the document number of this posting in this segment +func (p *Posting) Number() uint64 { + return p.docNum +} + +// Frequency returns the frequencies of occurrence of this term in this doc/field +func (p *Posting) Frequency() uint64 { + return p.freq +} + +// Norm returns the normalization factor for this posting +func (p *Posting) Norm() float64 { + return float64(p.norm) +} + +// Locations returns the location information for each occurrence +func (p *Posting) Locations() []segment.Location { + return p.locs +} + +// Location represents the location of a single occurrence +type Location struct { + field string + pos uint64 + start uint64 + end uint64 + ap []uint64 +} + +func (l *Location) Size() int { + return reflectStaticSizeLocation + + len(l.field) + + len(l.ap)*size.SizeOfUint64 +} + +// Field returns the name of the field (useful in composite fields to know +// which original field the value came from) +func (l *Location) Field() string { + return l.field +} + +// Start returns the start byte offset of this occurrence +func (l *Location) Start() uint64 { + return l.start +} + +// End returns the end byte offset of this occurrence +func (l *Location) End() uint64 { + return l.end +} + +// Pos returns the 1-based phrase position of this occurrence +func (l *Location) Pos() uint64 { + return l.pos +} + +// ArrayPositions returns the array position vector associated with this occurrence +func (l *Location) ArrayPositions() []uint64 { + return l.ap +} diff --git a/vendor/github.com/blevesearch/zap/v14/read.go b/vendor/github.com/blevesearch/zap/v14/read.go new file mode 100644 index 0000000..e47d4c6 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/read.go @@ -0,0 +1,43 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import "encoding/binary" + +func (s *SegmentBase) getDocStoredMetaAndCompressed(docNum uint64) ([]byte, []byte) { + _, storedOffset, n, metaLen, dataLen := s.getDocStoredOffsets(docNum) + + meta := s.mem[storedOffset+n : storedOffset+n+metaLen] + data := s.mem[storedOffset+n+metaLen : storedOffset+n+metaLen+dataLen] + + return meta, data +} + +func (s *SegmentBase) getDocStoredOffsets(docNum uint64) ( + uint64, uint64, uint64, uint64, uint64) { + indexOffset := s.storedIndexOffset + (8 * docNum) + + storedOffset := binary.BigEndian.Uint64(s.mem[indexOffset : indexOffset+8]) + + var n uint64 + + metaLen, read := binary.Uvarint(s.mem[storedOffset : storedOffset+binary.MaxVarintLen64]) + n += uint64(read) + + dataLen, read := binary.Uvarint(s.mem[storedOffset+n : storedOffset+n+binary.MaxVarintLen64]) + n += uint64(read) + + return indexOffset, storedOffset, n, metaLen, dataLen +} diff --git a/vendor/github.com/blevesearch/zap/v14/segment.go b/vendor/github.com/blevesearch/zap/v14/segment.go new file mode 100644 index 0000000..e8b1f06 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/segment.go @@ -0,0 +1,572 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "fmt" + "io" + "os" + "sync" + "unsafe" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/blevesearch/bleve/size" + "github.com/couchbase/vellum" + mmap "github.com/blevesearch/mmap-go" + "github.com/golang/snappy" +) + +var reflectStaticSizeSegmentBase int + +func init() { + var sb SegmentBase + reflectStaticSizeSegmentBase = int(unsafe.Sizeof(sb)) +} + +// Open returns a zap impl of a segment +func (*ZapPlugin) Open(path string) (segment.Segment, error) { + f, err := os.Open(path) + if err != nil { + return nil, err + } + mm, err := mmap.Map(f, mmap.RDONLY, 0) + if err != nil { + // mmap failed, try to close the file + _ = f.Close() + return nil, err + } + + rv := &Segment{ + SegmentBase: SegmentBase{ + mem: mm[0 : len(mm)-FooterSize], + fieldsMap: make(map[string]uint16), + fieldDvReaders: make(map[uint16]*docValueReader), + fieldFSTs: make(map[uint16]*vellum.FST), + }, + f: f, + mm: mm, + path: path, + refs: 1, + } + rv.SegmentBase.updateSize() + + err = rv.loadConfig() + if err != nil { + _ = rv.Close() + return nil, err + } + + err = rv.loadFields() + if err != nil { + _ = rv.Close() + return nil, err + } + + err = rv.loadDvReaders() + if err != nil { + _ = rv.Close() + return nil, err + } + + return rv, nil +} + +// SegmentBase is a memory only, read-only implementation of the +// segment.Segment interface, using zap's data representation. +type SegmentBase struct { + mem []byte + memCRC uint32 + chunkMode uint32 + fieldsMap map[string]uint16 // fieldName -> fieldID+1 + fieldsInv []string // fieldID -> fieldName + numDocs uint64 + storedIndexOffset uint64 + fieldsIndexOffset uint64 + docValueOffset uint64 + dictLocs []uint64 + fieldDvReaders map[uint16]*docValueReader // naive chunk cache per field + fieldDvNames []string // field names cached in fieldDvReaders + size uint64 + + m sync.Mutex + fieldFSTs map[uint16]*vellum.FST +} + +func (sb *SegmentBase) Size() int { + return int(sb.size) +} + +func (sb *SegmentBase) updateSize() { + sizeInBytes := reflectStaticSizeSegmentBase + + cap(sb.mem) + + // fieldsMap + for k := range sb.fieldsMap { + sizeInBytes += (len(k) + size.SizeOfString) + size.SizeOfUint16 + } + + // fieldsInv, dictLocs + for _, entry := range sb.fieldsInv { + sizeInBytes += len(entry) + size.SizeOfString + } + sizeInBytes += len(sb.dictLocs) * size.SizeOfUint64 + + // fieldDvReaders + for _, v := range sb.fieldDvReaders { + sizeInBytes += size.SizeOfUint16 + size.SizeOfPtr + if v != nil { + sizeInBytes += v.size() + } + } + + sb.size = uint64(sizeInBytes) +} + +func (sb *SegmentBase) AddRef() {} +func (sb *SegmentBase) DecRef() (err error) { return nil } +func (sb *SegmentBase) Close() (err error) { return nil } + +// Segment implements a persisted segment.Segment interface, by +// embedding an mmap()'ed SegmentBase. +type Segment struct { + SegmentBase + + f *os.File + mm mmap.MMap + path string + version uint32 + crc uint32 + + m sync.Mutex // Protects the fields that follow. + refs int64 +} + +func (s *Segment) Size() int { + // 8 /* size of file pointer */ + // 4 /* size of version -> uint32 */ + // 4 /* size of crc -> uint32 */ + sizeOfUints := 16 + + sizeInBytes := (len(s.path) + size.SizeOfString) + sizeOfUints + + // mutex, refs -> int64 + sizeInBytes += 16 + + // do not include the mmap'ed part + return sizeInBytes + s.SegmentBase.Size() - cap(s.mem) +} + +func (s *Segment) AddRef() { + s.m.Lock() + s.refs++ + s.m.Unlock() +} + +func (s *Segment) DecRef() (err error) { + s.m.Lock() + s.refs-- + if s.refs == 0 { + err = s.closeActual() + } + s.m.Unlock() + return err +} + +func (s *Segment) loadConfig() error { + crcOffset := len(s.mm) - 4 + s.crc = binary.BigEndian.Uint32(s.mm[crcOffset : crcOffset+4]) + + verOffset := crcOffset - 4 + s.version = binary.BigEndian.Uint32(s.mm[verOffset : verOffset+4]) + if s.version != Version { + return fmt.Errorf("unsupported version %d", s.version) + } + + chunkOffset := verOffset - 4 + s.chunkMode = binary.BigEndian.Uint32(s.mm[chunkOffset : chunkOffset+4]) + + docValueOffset := chunkOffset - 8 + s.docValueOffset = binary.BigEndian.Uint64(s.mm[docValueOffset : docValueOffset+8]) + + fieldsIndexOffset := docValueOffset - 8 + s.fieldsIndexOffset = binary.BigEndian.Uint64(s.mm[fieldsIndexOffset : fieldsIndexOffset+8]) + + storedIndexOffset := fieldsIndexOffset - 8 + s.storedIndexOffset = binary.BigEndian.Uint64(s.mm[storedIndexOffset : storedIndexOffset+8]) + + numDocsOffset := storedIndexOffset - 8 + s.numDocs = binary.BigEndian.Uint64(s.mm[numDocsOffset : numDocsOffset+8]) + return nil +} + +func (s *SegmentBase) loadFields() error { + // NOTE for now we assume the fields index immediately precedes + // the footer, and if this changes, need to adjust accordingly (or + // store explicit length), where s.mem was sliced from s.mm in Open(). + fieldsIndexEnd := uint64(len(s.mem)) + + // iterate through fields index + var fieldID uint64 + for s.fieldsIndexOffset+(8*fieldID) < fieldsIndexEnd { + addr := binary.BigEndian.Uint64(s.mem[s.fieldsIndexOffset+(8*fieldID) : s.fieldsIndexOffset+(8*fieldID)+8]) + + dictLoc, read := binary.Uvarint(s.mem[addr:fieldsIndexEnd]) + n := uint64(read) + s.dictLocs = append(s.dictLocs, dictLoc) + + var nameLen uint64 + nameLen, read = binary.Uvarint(s.mem[addr+n : fieldsIndexEnd]) + n += uint64(read) + + name := string(s.mem[addr+n : addr+n+nameLen]) + s.fieldsInv = append(s.fieldsInv, name) + s.fieldsMap[name] = uint16(fieldID + 1) + + fieldID++ + } + return nil +} + +// Dictionary returns the term dictionary for the specified field +func (s *SegmentBase) Dictionary(field string) (segment.TermDictionary, error) { + dict, err := s.dictionary(field) + if err == nil && dict == nil { + return &segment.EmptyDictionary{}, nil + } + return dict, err +} + +func (sb *SegmentBase) dictionary(field string) (rv *Dictionary, err error) { + fieldIDPlus1 := sb.fieldsMap[field] + if fieldIDPlus1 > 0 { + rv = &Dictionary{ + sb: sb, + field: field, + fieldID: fieldIDPlus1 - 1, + } + + dictStart := sb.dictLocs[rv.fieldID] + if dictStart > 0 { + var ok bool + sb.m.Lock() + if rv.fst, ok = sb.fieldFSTs[rv.fieldID]; !ok { + // read the length of the vellum data + vellumLen, read := binary.Uvarint(sb.mem[dictStart : dictStart+binary.MaxVarintLen64]) + fstBytes := sb.mem[dictStart+uint64(read) : dictStart+uint64(read)+vellumLen] + rv.fst, err = vellum.Load(fstBytes) + if err != nil { + sb.m.Unlock() + return nil, fmt.Errorf("dictionary field %s vellum err: %v", field, err) + } + + sb.fieldFSTs[rv.fieldID] = rv.fst + } + + sb.m.Unlock() + rv.fstReader, err = rv.fst.Reader() + if err != nil { + return nil, fmt.Errorf("dictionary field %s vellum reader err: %v", field, err) + } + + } + } + + return rv, nil +} + +// visitDocumentCtx holds data structures that are reusable across +// multiple VisitDocument() calls to avoid memory allocations +type visitDocumentCtx struct { + buf []byte + reader bytes.Reader + arrayPos []uint64 +} + +var visitDocumentCtxPool = sync.Pool{ + New: func() interface{} { + reuse := &visitDocumentCtx{} + return reuse + }, +} + +// VisitDocument invokes the DocFieldValueVistor for each stored field +// for the specified doc number +func (s *SegmentBase) VisitDocument(num uint64, visitor segment.DocumentFieldValueVisitor) error { + vdc := visitDocumentCtxPool.Get().(*visitDocumentCtx) + defer visitDocumentCtxPool.Put(vdc) + return s.visitDocument(vdc, num, visitor) +} + +func (s *SegmentBase) visitDocument(vdc *visitDocumentCtx, num uint64, + visitor segment.DocumentFieldValueVisitor) error { + // first make sure this is a valid number in this segment + if num < s.numDocs { + meta, compressed := s.getDocStoredMetaAndCompressed(num) + + vdc.reader.Reset(meta) + + // handle _id field special case + idFieldValLen, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + idFieldVal := compressed[:idFieldValLen] + + keepGoing := visitor("_id", byte('t'), idFieldVal, nil) + if !keepGoing { + visitDocumentCtxPool.Put(vdc) + return nil + } + + // handle non-"_id" fields + compressed = compressed[idFieldValLen:] + + uncompressed, err := snappy.Decode(vdc.buf[:cap(vdc.buf)], compressed) + if err != nil { + return err + } + + for keepGoing { + field, err := binary.ReadUvarint(&vdc.reader) + if err == io.EOF { + break + } + if err != nil { + return err + } + typ, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + offset, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + l, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + numap, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + var arrayPos []uint64 + if numap > 0 { + if cap(vdc.arrayPos) < int(numap) { + vdc.arrayPos = make([]uint64, numap) + } + arrayPos = vdc.arrayPos[:numap] + for i := 0; i < int(numap); i++ { + ap, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + arrayPos[i] = ap + } + } + + value := uncompressed[offset : offset+l] + keepGoing = visitor(s.fieldsInv[field], byte(typ), value, arrayPos) + } + + vdc.buf = uncompressed + } + return nil +} + +// DocID returns the value of the _id field for the given docNum +func (s *SegmentBase) DocID(num uint64) ([]byte, error) { + if num >= s.numDocs { + return nil, nil + } + + vdc := visitDocumentCtxPool.Get().(*visitDocumentCtx) + + meta, compressed := s.getDocStoredMetaAndCompressed(num) + + vdc.reader.Reset(meta) + + // handle _id field special case + idFieldValLen, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return nil, err + } + idFieldVal := compressed[:idFieldValLen] + + visitDocumentCtxPool.Put(vdc) + + return idFieldVal, nil +} + +// Count returns the number of documents in this segment. +func (s *SegmentBase) Count() uint64 { + return s.numDocs +} + +// DocNumbers returns a bitset corresponding to the doc numbers of all the +// provided _id strings +func (s *SegmentBase) DocNumbers(ids []string) (*roaring.Bitmap, error) { + rv := roaring.New() + + if len(s.fieldsMap) > 0 { + idDict, err := s.dictionary("_id") + if err != nil { + return nil, err + } + + postingsList := emptyPostingsList + + sMax, err := idDict.fst.GetMaxKey() + if err != nil { + return nil, err + } + sMaxStr := string(sMax) + filteredIds := make([]string, 0, len(ids)) + for _, id := range ids { + if id <= sMaxStr { + filteredIds = append(filteredIds, id) + } + } + + for _, id := range filteredIds { + postingsList, err = idDict.postingsList([]byte(id), nil, postingsList) + if err != nil { + return nil, err + } + postingsList.OrInto(rv) + } + } + + return rv, nil +} + +// Fields returns the field names used in this segment +func (s *SegmentBase) Fields() []string { + return s.fieldsInv +} + +// Path returns the path of this segment on disk +func (s *Segment) Path() string { + return s.path +} + +// Close releases all resources associated with this segment +func (s *Segment) Close() (err error) { + return s.DecRef() +} + +func (s *Segment) closeActual() (err error) { + if s.mm != nil { + err = s.mm.Unmap() + } + // try to close file even if unmap failed + if s.f != nil { + err2 := s.f.Close() + if err == nil { + // try to return first error + err = err2 + } + } + return +} + +// some helpers i started adding for the command-line utility + +// Data returns the underlying mmaped data slice +func (s *Segment) Data() []byte { + return s.mm +} + +// CRC returns the CRC value stored in the file footer +func (s *Segment) CRC() uint32 { + return s.crc +} + +// Version returns the file version in the file footer +func (s *Segment) Version() uint32 { + return s.version +} + +// ChunkFactor returns the chunk factor in the file footer +func (s *Segment) ChunkMode() uint32 { + return s.chunkMode +} + +// FieldsIndexOffset returns the fields index offset in the file footer +func (s *Segment) FieldsIndexOffset() uint64 { + return s.fieldsIndexOffset +} + +// StoredIndexOffset returns the stored value index offset in the file footer +func (s *Segment) StoredIndexOffset() uint64 { + return s.storedIndexOffset +} + +// DocValueOffset returns the docValue offset in the file footer +func (s *Segment) DocValueOffset() uint64 { + return s.docValueOffset +} + +// NumDocs returns the number of documents in the file footer +func (s *Segment) NumDocs() uint64 { + return s.numDocs +} + +// DictAddr is a helper function to compute the file offset where the +// dictionary is stored for the specified field. +func (s *Segment) DictAddr(field string) (uint64, error) { + fieldIDPlus1, ok := s.fieldsMap[field] + if !ok { + return 0, fmt.Errorf("no such field '%s'", field) + } + + return s.dictLocs[fieldIDPlus1-1], nil +} + +func (s *SegmentBase) loadDvReaders() error { + if s.docValueOffset == fieldNotUninverted || s.numDocs == 0 { + return nil + } + + var read uint64 + for fieldID, field := range s.fieldsInv { + var fieldLocStart, fieldLocEnd uint64 + var n int + fieldLocStart, n = binary.Uvarint(s.mem[s.docValueOffset+read : s.docValueOffset+read+binary.MaxVarintLen64]) + if n <= 0 { + return fmt.Errorf("loadDvReaders: failed to read the docvalue offset start for field %d", fieldID) + } + read += uint64(n) + fieldLocEnd, n = binary.Uvarint(s.mem[s.docValueOffset+read : s.docValueOffset+read+binary.MaxVarintLen64]) + if n <= 0 { + return fmt.Errorf("loadDvReaders: failed to read the docvalue offset end for field %d", fieldID) + } + read += uint64(n) + + fieldDvReader, err := s.loadFieldDocValueReader(field, fieldLocStart, fieldLocEnd) + if err != nil { + return err + } + if fieldDvReader != nil { + s.fieldDvReaders[uint16(fieldID)] = fieldDvReader + s.fieldDvNames = append(s.fieldDvNames, field) + } + } + + return nil +} diff --git a/vendor/github.com/blevesearch/zap/v14/write.go b/vendor/github.com/blevesearch/zap/v14/write.go new file mode 100644 index 0000000..77aefdb --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/write.go @@ -0,0 +1,145 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "encoding/binary" + "io" + + "github.com/RoaringBitmap/roaring" +) + +// writes out the length of the roaring bitmap in bytes as varint +// then writes out the roaring bitmap itself +func writeRoaringWithLen(r *roaring.Bitmap, w io.Writer, + reuseBufVarint []byte) (int, error) { + buf, err := r.ToBytes() + if err != nil { + return 0, err + } + + var tw int + + // write out the length + n := binary.PutUvarint(reuseBufVarint, uint64(len(buf))) + nw, err := w.Write(reuseBufVarint[:n]) + tw += nw + if err != nil { + return tw, err + } + + // write out the roaring bytes + nw, err = w.Write(buf) + tw += nw + if err != nil { + return tw, err + } + + return tw, nil +} + +func persistFields(fieldsInv []string, w *CountHashWriter, dictLocs []uint64) (uint64, error) { + var rv uint64 + var fieldsOffsets []uint64 + + for fieldID, fieldName := range fieldsInv { + // record start of this field + fieldsOffsets = append(fieldsOffsets, uint64(w.Count())) + + // write out the dict location and field name length + _, err := writeUvarints(w, dictLocs[fieldID], uint64(len(fieldName))) + if err != nil { + return 0, err + } + + // write out the field name + _, err = w.Write([]byte(fieldName)) + if err != nil { + return 0, err + } + } + + // now write out the fields index + rv = uint64(w.Count()) + for fieldID := range fieldsInv { + err := binary.Write(w, binary.BigEndian, fieldsOffsets[fieldID]) + if err != nil { + return 0, err + } + } + + return rv, nil +} + +// FooterSize is the size of the footer record in bytes +// crc + ver + chunk + field offset + stored offset + num docs + docValueOffset +const FooterSize = 4 + 4 + 4 + 8 + 8 + 8 + 8 + +func persistFooter(numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset uint64, + chunkMode uint32, crcBeforeFooter uint32, writerIn io.Writer) error { + w := NewCountHashWriter(writerIn) + w.crc = crcBeforeFooter + + // write out the number of docs + err := binary.Write(w, binary.BigEndian, numDocs) + if err != nil { + return err + } + // write out the stored field index location: + err = binary.Write(w, binary.BigEndian, storedIndexOffset) + if err != nil { + return err + } + // write out the field index location + err = binary.Write(w, binary.BigEndian, fieldsIndexOffset) + if err != nil { + return err + } + // write out the fieldDocValue location + err = binary.Write(w, binary.BigEndian, docValueOffset) + if err != nil { + return err + } + // write out 32-bit chunk factor + err = binary.Write(w, binary.BigEndian, chunkMode) + if err != nil { + return err + } + // write out 32-bit version + err = binary.Write(w, binary.BigEndian, Version) + if err != nil { + return err + } + // write out CRC-32 of everything upto but not including this CRC + err = binary.Write(w, binary.BigEndian, w.crc) + if err != nil { + return err + } + return nil +} + +func writeUvarints(w io.Writer, vals ...uint64) (tw int, err error) { + buf := make([]byte, binary.MaxVarintLen64) + for _, val := range vals { + n := binary.PutUvarint(buf, val) + var nw int + nw, err = w.Write(buf[:n]) + tw += nw + if err != nil { + return tw, err + } + } + return tw, err +} diff --git a/vendor/github.com/blevesearch/zap/v14/zap.md b/vendor/github.com/blevesearch/zap/v14/zap.md new file mode 100644 index 0000000..d74dc54 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v14/zap.md @@ -0,0 +1,177 @@ +# ZAP File Format + +## Legend + +### Sections + + |========| + | | section + |========| + +### Fixed-size fields + + |--------| |----| |--| |-| + | | uint64 | | uint32 | | uint16 | | uint8 + |--------| |----| |--| |-| + +### Varints + + |~~~~~~~~| + | | varint(up to uint64) + |~~~~~~~~| + +### Arbitrary-length fields + + |--------...---| + | | arbitrary-length field (string, vellum, roaring bitmap) + |--------...---| + +### Chunked data + + [--------] + [ ] + [--------] + +## Overview + +Footer section describes the configuration of particular ZAP file. The format of footer is version-dependent, so it is necessary to check `V` field before the parsing. + + |==================================================| + | Stored Fields | + |==================================================| + |-----> | Stored Fields Index | + | |==================================================| + | | Dictionaries + Postings + DocValues | + | |==================================================| + | |---> | DocValues Index | + | | |==================================================| + | | | Fields | + | | |==================================================| + | | |-> | Fields Index | + | | | |========|========|========|========|====|====|====| + | | | | D# | SF | F | FDV | CF | V | CC | (Footer) + | | | |========|====|===|====|===|====|===|====|====|====| + | | | | | | + |-+-+-----------------| | | + | |--------------------------| | + |-------------------------------------| + + D#. Number of Docs. + SF. Stored Fields Index Offset. + F. Field Index Offset. + FDV. Field DocValue Offset. + CF. Chunk Factor. + V. Version. + CC. CRC32. + +## Stored Fields + +Stored Fields Index is `D#` consecutive 64-bit unsigned integers - offsets, where relevant Stored Fields Data records are located. + + 0 [SF] [SF + D# * 8] + | Stored Fields | Stored Fields Index | + |================================|==================================| + | | | + | |--------------------| ||--------|--------|. . .|--------|| + | |-> | Stored Fields Data | || 0 | 1 | | D# - 1 || + | | |--------------------| ||--------|----|---|. . .|--------|| + | | | | | + |===|============================|==============|===================| + | | + |-------------------------------------------| + +Stored Fields Data is an arbitrary size record, which consists of metadata and [Snappy](https://github.com/golang/snappy)-compressed data. + + Stored Fields Data + |~~~~~~~~|~~~~~~~~|~~~~~~~~...~~~~~~~~|~~~~~~~~...~~~~~~~~| + | MDS | CDS | MD | CD | + |~~~~~~~~|~~~~~~~~|~~~~~~~~...~~~~~~~~|~~~~~~~~...~~~~~~~~| + + MDS. Metadata size. + CDS. Compressed data size. + MD. Metadata. + CD. Snappy-compressed data. + +## Fields + +Fields Index section located between addresses `F` and `len(file) - len(footer)` and consist of `uint64` values (`F1`, `F2`, ...) which are offsets to records in Fields section. We have `F# = (len(file) - len(footer) - F) / sizeof(uint64)` fields. + + + (...) [F] [F + F#] + | Fields | Fields Index. | + |================================|================================| + | | | + | |~~~~~~~~|~~~~~~~~|---...---|||--------|--------|...|--------|| + ||->| Dict | Length | Name ||| 0 | 1 | | F# - 1 || + || |~~~~~~~~|~~~~~~~~|---...---|||--------|----|---|...|--------|| + || | | | + ||===============================|==============|=================| + | | + |----------------------------------------------| + + +## Dictionaries + Postings + +Each of fields has its own dictionary, encoded in [Vellum](https://github.com/couchbase/vellum) format. Dictionary consists of pairs `(term, offset)`, where `offset` indicates the position of postings (list of documents) for this particular term. + + |================================================================|- Dictionaries + + | | Postings + + | | DocValues + | Freq/Norm (chunked) | + | [~~~~~~|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~] | + | |->[ Freq | Norm (float32 under varint) ] | + | | [~~~~~~|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~] | + | | | + | |------------------------------------------------------------| | + | Location Details (chunked) | | + | [~~~~~~|~~~~~|~~~~~~~|~~~~~|~~~~~~|~~~~~~~~|~~~~~] | | + | |->[ Size | Pos | Start | End | Arr# | ArrPos | ... ] | | + | | [~~~~~~|~~~~~|~~~~~~~|~~~~~|~~~~~~|~~~~~~~~|~~~~~] | | + | | | | + | |----------------------| | | + | Postings List | | | + | |~~~~~~~~|~~~~~|~~|~~~~~~~~|-----------...--| | | + | |->| F/N | LD | Length | ROARING BITMAP | | | + | | |~~~~~|~~|~~~~~~~~|~~~~~~~~|-----------...--| | | + | | |----------------------------------------------| | + | |--------------------------------------| | + | Dictionary | | + | |~~~~~~~~|--------------------------|-...-| | + | |->| Length | VELLUM DATA : (TERM -> OFFSET) | | + | | |~~~~~~~~|----------------------------...-| | + | | | + |======|=========================================================|- DocValues Index + | | | + |======|=========================================================|- Fields + | | | + | |~~~~|~~~|~~~~~~~~|---...---| | + | | Dict | Length | Name | | + | |~~~~~~~~|~~~~~~~~|---...---| | + | | + |================================================================| + +## DocValues + +DocValues Index is `F#` pairs of varints, one pair per field. Each pair of varints indicates start and end point of DocValues slice. + + |================================================================| + | |------...--| | + | |->| DocValues |<-| | + | | |------...--| | | + |==|=================|===========================================|- DocValues Index + ||~|~~~~~~~~~|~~~~~~~|~~| |~~~~~~~~~~~~~~|~~~~~~~~~~~~|| + || DV1 START | DV1 STOP | . . . . . | DV(F#) START | DV(F#) END || + ||~~~~~~~~~~~|~~~~~~~~~~| |~~~~~~~~~~~~~~|~~~~~~~~~~~~|| + |================================================================| + +DocValues is chunked Snappy-compressed values for each document and field. + + [~~~~~~~~~~~~~~~|~~~~~~|~~~~~~~~~|-...-|~~~~~~|~~~~~~~~~|--------------------...-] + [ Doc# in Chunk | Doc1 | Offset1 | ... | DocN | OffsetN | SNAPPY COMPRESSED DATA ] + [~~~~~~~~~~~~~~~|~~~~~~|~~~~~~~~~|-...-|~~~~~~|~~~~~~~~~|--------------------...-] + +Last 16 bytes are description of chunks. + + |~~~~~~~~~~~~...~|----------------|----------------| + | Chunk Sizes | Chunk Size Arr | Chunk# | + |~~~~~~~~~~~~...~|----------------|----------------| diff --git a/vendor/github.com/blevesearch/zap/v15/.gitignore b/vendor/github.com/blevesearch/zap/v15/.gitignore new file mode 100644 index 0000000..46d1cfa --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/.gitignore @@ -0,0 +1,12 @@ +#* +*.sublime-* +*~ +.#* +.project +.settings +**/.idea/ +**/*.iml +.DS_Store +/cmd/zap/zap +*.test +tags diff --git a/vendor/github.com/blevesearch/zap/v15/LICENSE b/vendor/github.com/blevesearch/zap/v15/LICENSE new file mode 100644 index 0000000..7a4a3ea --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/vendor/github.com/blevesearch/zap/v15/README.md b/vendor/github.com/blevesearch/zap/v15/README.md new file mode 100644 index 0000000..0facb66 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/README.md @@ -0,0 +1,158 @@ +# zap file format + +Advanced ZAP File Format Documentation is [here](zap.md). + +The file is written in the reverse order that we typically access data. This helps us write in one pass since later sections of the file require file offsets of things we've already written. + +Current usage: + +- mmap the entire file +- crc-32 bytes and version are in fixed position at end of the file +- reading remainder of footer could be version specific +- remainder of footer gives us: + - 3 important offsets (docValue , fields index and stored data index) + - 2 important values (number of docs and chunk factor) +- field data is processed once and memoized onto the heap so that we never have to go back to disk for it +- access to stored data by doc number means first navigating to the stored data index, then accessing a fixed position offset into that slice, which gives us the actual address of the data. the first bytes of that section tell us the size of data so that we know where it ends. +- access to all other indexed data follows the following pattern: + - first know the field name -> convert to id + - next navigate to term dictionary for that field + - some operations stop here and do dictionary ops + - next use dictionary to navigate to posting list for a specific term + - walk posting list + - if necessary, walk posting details as we go + - if location info is desired, consult location bitmap to see if it is there + +## stored fields section + +- for each document + - preparation phase: + - produce a slice of metadata bytes and data bytes + - produce these slices in field id order + - field value is appended to the data slice + - metadata slice is varint encoded with the following values for each field value + - field id (uint16) + - field type (byte) + - field value start offset in uncompressed data slice (uint64) + - field value length (uint64) + - field number of array positions (uint64) + - one additional value for each array position (uint64) + - compress the data slice using snappy + - file writing phase: + - remember the start offset for this document + - write out meta data length (varint uint64) + - write out compressed data length (varint uint64) + - write out the metadata bytes + - write out the compressed data bytes + +## stored fields idx + +- for each document + - write start offset (remembered from previous section) of stored data (big endian uint64) + +With this index and a known document number, we have direct access to all the stored field data. + +## posting details (freq/norm) section + +- for each posting list + - produce a slice containing multiple consecutive chunks (each chunk is varint stream) + - produce a slice remembering offsets of where each chunk starts + - preparation phase: + - for each hit in the posting list + - if this hit is in next chunk close out encoding of last chunk and record offset start of next + - encode term frequency (uint64) + - encode norm factor (float32) + - file writing phase: + - remember start position for this posting list details + - write out number of chunks that follow (varint uint64) + - write out length of each chunk (each a varint uint64) + - write out the byte slice containing all the chunk data + +If you know the doc number you're interested in, this format lets you jump to the correct chunk (docNum/chunkFactor) directly and then seek within that chunk until you find it. + +## posting details (location) section + +- for each posting list + - produce a slice containing multiple consecutive chunks (each chunk is varint stream) + - produce a slice remembering offsets of where each chunk starts + - preparation phase: + - for each hit in the posting list + - if this hit is in next chunk close out encoding of last chunk and record offset start of next + - encode field (uint16) + - encode field pos (uint64) + - encode field start (uint64) + - encode field end (uint64) + - encode number of array positions to follow (uint64) + - encode each array position (each uint64) + - file writing phase: + - remember start position for this posting list details + - write out number of chunks that follow (varint uint64) + - write out length of each chunk (each a varint uint64) + - write out the byte slice containing all the chunk data + +If you know the doc number you're interested in, this format lets you jump to the correct chunk (docNum/chunkFactor) directly and then seek within that chunk until you find it. + +## postings list section + +- for each posting list + - preparation phase: + - encode roaring bitmap posting list to bytes (so we know the length) + - file writing phase: + - remember the start position for this posting list + - write freq/norm details offset (remembered from previous, as varint uint64) + - write location details offset (remembered from previous, as varint uint64) + - write length of encoded roaring bitmap + - write the serialized roaring bitmap data + +## dictionary + +- for each field + - preparation phase: + - encode vellum FST with dictionary data pointing to file offset of posting list (remembered from previous) + - file writing phase: + - remember the start position of this persistDictionary + - write length of vellum data (varint uint64) + - write out vellum data + +## fields section + +- for each field + - file writing phase: + - remember start offset for each field + - write dictionary address (remembered from previous) (varint uint64) + - write length of field name (varint uint64) + - write field name bytes + +## fields idx + +- for each field + - file writing phase: + - write big endian uint64 of start offset for each field + +NOTE: currently we don't know or record the length of this fields index. Instead we rely on the fact that we know it immediately precedes a footer of known size. + +## fields DocValue + +- for each field + - preparation phase: + - produce a slice containing multiple consecutive chunks, where each chunk is composed of a meta section followed by compressed columnar field data + - produce a slice remembering the length of each chunk + - file writing phase: + - remember the start position of this first field DocValue offset in the footer + - write out number of chunks that follow (varint uint64) + - write out length of each chunk (each a varint uint64) + - write out the byte slice containing all the chunk data + +NOTE: currently the meta header inside each chunk gives clue to the location offsets and size of the data pertaining to a given docID and any +read operation leverage that meta information to extract the document specific data from the file. + +## footer + +- file writing phase + - write number of docs (big endian uint64) + - write stored field index location (big endian uint64) + - write field index location (big endian uint64) + - write field docValue location (big endian uint64) + - write out chunk factor (big endian uint32) + - write out version (big endian uint32) + - write out file CRC of everything preceding this (big endian uint32) diff --git a/vendor/github.com/blevesearch/zap/v15/build.go b/vendor/github.com/blevesearch/zap/v15/build.go new file mode 100644 index 0000000..1b73183 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/build.go @@ -0,0 +1,156 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bufio" + "math" + "os" + + "github.com/couchbase/vellum" +) + +const Version uint32 = 15 + +const Type string = "zap" + +const fieldNotUninverted = math.MaxUint64 + +func (sb *SegmentBase) Persist(path string) error { + return PersistSegmentBase(sb, path) +} + +// PersistSegmentBase persists SegmentBase in the zap file format. +func PersistSegmentBase(sb *SegmentBase, path string) error { + flag := os.O_RDWR | os.O_CREATE + + f, err := os.OpenFile(path, flag, 0600) + if err != nil { + return err + } + + cleanup := func() { + _ = f.Close() + _ = os.Remove(path) + } + + br := bufio.NewWriter(f) + + _, err = br.Write(sb.mem) + if err != nil { + cleanup() + return err + } + + err = persistFooter(sb.numDocs, sb.storedIndexOffset, sb.fieldsIndexOffset, sb.docValueOffset, + sb.chunkMode, sb.memCRC, br) + if err != nil { + cleanup() + return err + } + + err = br.Flush() + if err != nil { + cleanup() + return err + } + + err = f.Sync() + if err != nil { + cleanup() + return err + } + + err = f.Close() + if err != nil { + cleanup() + return err + } + + return nil +} + +func persistStoredFieldValues(fieldID int, + storedFieldValues [][]byte, stf []byte, spf [][]uint64, + curr int, metaEncode varintEncoder, data []byte) ( + int, []byte, error) { + for i := 0; i < len(storedFieldValues); i++ { + // encode field + _, err := metaEncode(uint64(fieldID)) + if err != nil { + return 0, nil, err + } + // encode type + _, err = metaEncode(uint64(stf[i])) + if err != nil { + return 0, nil, err + } + // encode start offset + _, err = metaEncode(uint64(curr)) + if err != nil { + return 0, nil, err + } + // end len + _, err = metaEncode(uint64(len(storedFieldValues[i]))) + if err != nil { + return 0, nil, err + } + // encode number of array pos + _, err = metaEncode(uint64(len(spf[i]))) + if err != nil { + return 0, nil, err + } + // encode all array positions + for _, pos := range spf[i] { + _, err = metaEncode(pos) + if err != nil { + return 0, nil, err + } + } + + data = append(data, storedFieldValues[i]...) + curr += len(storedFieldValues[i]) + } + + return curr, data, nil +} + +func InitSegmentBase(mem []byte, memCRC uint32, chunkMode uint32, + fieldsMap map[string]uint16, fieldsInv []string, numDocs uint64, + storedIndexOffset uint64, fieldsIndexOffset uint64, docValueOffset uint64, + dictLocs []uint64) (*SegmentBase, error) { + sb := &SegmentBase{ + mem: mem, + memCRC: memCRC, + chunkMode: chunkMode, + fieldsMap: fieldsMap, + fieldsInv: fieldsInv, + numDocs: numDocs, + storedIndexOffset: storedIndexOffset, + fieldsIndexOffset: fieldsIndexOffset, + docValueOffset: docValueOffset, + dictLocs: dictLocs, + fieldDvReaders: make(map[uint16]*docValueReader), + fieldFSTs: make(map[uint16]*vellum.FST), + } + sb.updateSize() + + err := sb.loadDvReaders() + if err != nil { + return nil, err + } + + return sb, nil +} diff --git a/vendor/github.com/blevesearch/zap/v15/chunk.go b/vendor/github.com/blevesearch/zap/v15/chunk.go new file mode 100644 index 0000000..4307d0e --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/chunk.go @@ -0,0 +1,67 @@ +// Copyright (c) 2019 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "fmt" +) + +// LegacyChunkMode was the original chunk mode (always chunk size 1024) +// this mode is still used for chunking doc values. +var LegacyChunkMode uint32 = 1024 + +// DefaultChunkMode is the most recent improvement to chunking and should +// be used by default. +var DefaultChunkMode uint32 = 1026 + +func getChunkSize(chunkMode uint32, cardinality uint64, maxDocs uint64) (uint64, error) { + switch { + // any chunkMode <= 1024 will always chunk with chunkSize=chunkMode + case chunkMode <= 1024: + // legacy chunk size + return uint64(chunkMode), nil + + case chunkMode == 1025: + // attempt at simple improvement + // theory - the point of chunking is to put a bound on the maximum number of + // calls to Next() needed to find a random document. ie, you should be able + // to do one jump to the correct chunk, and then walk through at most + // chunk-size items + // previously 1024 was chosen as the chunk size, but this is particularly + // wasteful for low cardinality terms. the observation is that if there + // are less than 1024 items, why not put them all in one chunk, + // this way you'll still achieve the same goal of visiting at most + // chunk-size items. + // no attempt is made to tweak any other case + if cardinality <= 1024 { + return maxDocs, nil + } + return 1024, nil + + case chunkMode == 1026: + // improve upon the ideas tested in chunkMode 1025 + // the observation that the fewest number of dense chunks is the most + // desirable layout, given the built-in assumptions of chunking + // (that we want to put an upper-bound on the number of items you must + // walk over without skipping, currently tuned to 1024) + // + // 1. compute the number of chunks needed (max 1024/chunk) + // 2. convert to chunkSize, dividing into maxDocs + numChunks := (cardinality / 1024) + 1 + chunkSize := maxDocs / numChunks + return chunkSize, nil + } + return 0, fmt.Errorf("unknown chunk mode %d", chunkMode) +} diff --git a/vendor/github.com/blevesearch/zap/v15/contentcoder.go b/vendor/github.com/blevesearch/zap/v15/contentcoder.go new file mode 100644 index 0000000..c145b5a --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/contentcoder.go @@ -0,0 +1,243 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "io" + "reflect" + + "github.com/golang/snappy" +) + +var reflectStaticSizeMetaData int + +func init() { + var md MetaData + reflectStaticSizeMetaData = int(reflect.TypeOf(md).Size()) +} + +var termSeparator byte = 0xff +var termSeparatorSplitSlice = []byte{termSeparator} + +type chunkedContentCoder struct { + final []byte + chunkSize uint64 + currChunk uint64 + chunkLens []uint64 + + w io.Writer + progressiveWrite bool + + chunkMetaBuf bytes.Buffer + chunkBuf bytes.Buffer + + chunkMeta []MetaData + + compressed []byte // temp buf for snappy compression +} + +// MetaData represents the data information inside a +// chunk. +type MetaData struct { + DocNum uint64 // docNum of the data inside the chunk + DocDvOffset uint64 // offset of data inside the chunk for the given docid +} + +// newChunkedContentCoder returns a new chunk content coder which +// packs data into chunks based on the provided chunkSize +func newChunkedContentCoder(chunkSize uint64, maxDocNum uint64, + w io.Writer, progressiveWrite bool) *chunkedContentCoder { + total := maxDocNum/chunkSize + 1 + rv := &chunkedContentCoder{ + chunkSize: chunkSize, + chunkLens: make([]uint64, total), + chunkMeta: make([]MetaData, 0, total), + w: w, + progressiveWrite: progressiveWrite, + } + + return rv +} + +// Reset lets you reuse this chunked content coder. Buffers are reset +// and re used. You cannot change the chunk size. +func (c *chunkedContentCoder) Reset() { + c.currChunk = 0 + c.final = c.final[:0] + c.chunkBuf.Reset() + c.chunkMetaBuf.Reset() + for i := range c.chunkLens { + c.chunkLens[i] = 0 + } + c.chunkMeta = c.chunkMeta[:0] +} + +func (c *chunkedContentCoder) SetChunkSize(chunkSize uint64, maxDocNum uint64) { + total := int(maxDocNum/chunkSize + 1) + c.chunkSize = chunkSize + if cap(c.chunkLens) < total { + c.chunkLens = make([]uint64, total) + } else { + c.chunkLens = c.chunkLens[:total] + } + if cap(c.chunkMeta) < total { + c.chunkMeta = make([]MetaData, 0, total) + } +} + +// Close indicates you are done calling Add() this allows +// the final chunk to be encoded. +func (c *chunkedContentCoder) Close() error { + return c.flushContents() +} + +func (c *chunkedContentCoder) flushContents() error { + // flush the contents, with meta information at first + buf := make([]byte, binary.MaxVarintLen64) + n := binary.PutUvarint(buf, uint64(len(c.chunkMeta))) + _, err := c.chunkMetaBuf.Write(buf[:n]) + if err != nil { + return err + } + + // write out the metaData slice + for _, meta := range c.chunkMeta { + _, err := writeUvarints(&c.chunkMetaBuf, meta.DocNum, meta.DocDvOffset) + if err != nil { + return err + } + } + + // write the metadata to final data + metaData := c.chunkMetaBuf.Bytes() + c.final = append(c.final, c.chunkMetaBuf.Bytes()...) + // write the compressed data to the final data + c.compressed = snappy.Encode(c.compressed[:cap(c.compressed)], c.chunkBuf.Bytes()) + c.final = append(c.final, c.compressed...) + + c.chunkLens[c.currChunk] = uint64(len(c.compressed) + len(metaData)) + + if c.progressiveWrite { + _, err := c.w.Write(c.final) + if err != nil { + return err + } + c.final = c.final[:0] + } + + return nil +} + +// Add encodes the provided byte slice into the correct chunk for the provided +// doc num. You MUST call Add() with increasing docNums. +func (c *chunkedContentCoder) Add(docNum uint64, vals []byte) error { + chunk := docNum / c.chunkSize + if chunk != c.currChunk { + // flush out the previous chunk details + err := c.flushContents() + if err != nil { + return err + } + // clearing the chunk specific meta for next chunk + c.chunkBuf.Reset() + c.chunkMetaBuf.Reset() + c.chunkMeta = c.chunkMeta[:0] + c.currChunk = chunk + } + + // get the starting offset for this doc + dvOffset := c.chunkBuf.Len() + dvSize, err := c.chunkBuf.Write(vals) + if err != nil { + return err + } + + c.chunkMeta = append(c.chunkMeta, MetaData{ + DocNum: docNum, + DocDvOffset: uint64(dvOffset + dvSize), + }) + return nil +} + +// Write commits all the encoded chunked contents to the provided writer. +// +// | ..... data ..... | chunk offsets (varints) +// | position of chunk offsets (uint64) | number of offsets (uint64) | +// +func (c *chunkedContentCoder) Write() (int, error) { + var tw int + + if c.final != nil { + // write out the data section first + nw, err := c.w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + } + + chunkOffsetsStart := uint64(tw) + + if cap(c.final) < binary.MaxVarintLen64 { + c.final = make([]byte, binary.MaxVarintLen64) + } else { + c.final = c.final[0:binary.MaxVarintLen64] + } + chunkOffsets := modifyLengthsToEndOffsets(c.chunkLens) + // write out the chunk offsets + for _, chunkOffset := range chunkOffsets { + n := binary.PutUvarint(c.final, chunkOffset) + nw, err := c.w.Write(c.final[:n]) + tw += nw + if err != nil { + return tw, err + } + } + + chunkOffsetsLen := uint64(tw) - chunkOffsetsStart + + c.final = c.final[0:8] + // write out the length of chunk offsets + binary.BigEndian.PutUint64(c.final, chunkOffsetsLen) + nw, err := c.w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + + // write out the number of chunks + binary.BigEndian.PutUint64(c.final, uint64(len(c.chunkLens))) + nw, err = c.w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + + c.final = c.final[:0] + + return tw, nil +} + +// ReadDocValueBoundary elicits the start, end offsets from a +// metaData header slice +func ReadDocValueBoundary(chunk int, metaHeaders []MetaData) (uint64, uint64) { + var start uint64 + if chunk > 0 { + start = metaHeaders[chunk-1].DocDvOffset + } + return start, metaHeaders[chunk].DocDvOffset +} diff --git a/vendor/github.com/blevesearch/zap/v15/count.go b/vendor/github.com/blevesearch/zap/v15/count.go new file mode 100644 index 0000000..50290f8 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/count.go @@ -0,0 +1,61 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "hash/crc32" + "io" + + "github.com/blevesearch/bleve/index/scorch/segment" +) + +// CountHashWriter is a wrapper around a Writer which counts the number of +// bytes which have been written and computes a crc32 hash +type CountHashWriter struct { + w io.Writer + crc uint32 + n int + s segment.StatsReporter +} + +// NewCountHashWriter returns a CountHashWriter which wraps the provided Writer +func NewCountHashWriter(w io.Writer) *CountHashWriter { + return &CountHashWriter{w: w} +} + +func NewCountHashWriterWithStatsReporter(w io.Writer, s segment.StatsReporter) *CountHashWriter { + return &CountHashWriter{w: w, s: s} +} + +// Write writes the provided bytes to the wrapped writer and counts the bytes +func (c *CountHashWriter) Write(b []byte) (int, error) { + n, err := c.w.Write(b) + c.crc = crc32.Update(c.crc, crc32.IEEETable, b[:n]) + c.n += n + if c.s != nil { + c.s.ReportBytesWritten(uint64(n)) + } + return n, err +} + +// Count returns the number of bytes written +func (c *CountHashWriter) Count() int { + return c.n +} + +// Sum32 returns the CRC-32 hash of the content written to this writer +func (c *CountHashWriter) Sum32() uint32 { + return c.crc +} diff --git a/vendor/github.com/blevesearch/zap/v15/dict.go b/vendor/github.com/blevesearch/zap/v15/dict.go new file mode 100644 index 0000000..ad4a8f8 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/dict.go @@ -0,0 +1,263 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "fmt" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/couchbase/vellum" +) + +// Dictionary is the zap representation of the term dictionary +type Dictionary struct { + sb *SegmentBase + field string + fieldID uint16 + fst *vellum.FST + fstReader *vellum.Reader +} + +// PostingsList returns the postings list for the specified term +func (d *Dictionary) PostingsList(term []byte, except *roaring.Bitmap, + prealloc segment.PostingsList) (segment.PostingsList, error) { + var preallocPL *PostingsList + pl, ok := prealloc.(*PostingsList) + if ok && pl != nil { + preallocPL = pl + } + return d.postingsList(term, except, preallocPL) +} + +func (d *Dictionary) postingsList(term []byte, except *roaring.Bitmap, rv *PostingsList) (*PostingsList, error) { + if d.fstReader == nil { + if rv == nil || rv == emptyPostingsList { + return emptyPostingsList, nil + } + return d.postingsListInit(rv, except), nil + } + + postingsOffset, exists, err := d.fstReader.Get(term) + if err != nil { + return nil, fmt.Errorf("vellum err: %v", err) + } + if !exists { + if rv == nil || rv == emptyPostingsList { + return emptyPostingsList, nil + } + return d.postingsListInit(rv, except), nil + } + + return d.postingsListFromOffset(postingsOffset, except, rv) +} + +func (d *Dictionary) postingsListFromOffset(postingsOffset uint64, except *roaring.Bitmap, rv *PostingsList) (*PostingsList, error) { + rv = d.postingsListInit(rv, except) + + err := rv.read(postingsOffset, d) + if err != nil { + return nil, err + } + + return rv, nil +} + +func (d *Dictionary) postingsListInit(rv *PostingsList, except *roaring.Bitmap) *PostingsList { + if rv == nil || rv == emptyPostingsList { + rv = &PostingsList{} + } else { + postings := rv.postings + if postings != nil { + postings.Clear() + } + + *rv = PostingsList{} // clear the struct + + rv.postings = postings + } + rv.sb = d.sb + rv.except = except + return rv +} + +func (d *Dictionary) Contains(key []byte) (bool, error) { + return d.fst.Contains(key) +} + +// Iterator returns an iterator for this dictionary +func (d *Dictionary) Iterator() segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + if d.fst != nil { + itr, err := d.fst.Iterator(nil, nil) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +// PrefixIterator returns an iterator which only visits terms having the +// the specified prefix +func (d *Dictionary) PrefixIterator(prefix string) segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + kBeg := []byte(prefix) + kEnd := segment.IncrementBytes(kBeg) + + if d.fst != nil { + itr, err := d.fst.Iterator(kBeg, kEnd) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +// RangeIterator returns an iterator which only visits terms between the +// start and end terms. NOTE: bleve.index API specifies the end is inclusive. +func (d *Dictionary) RangeIterator(start, end string) segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + // need to increment the end position to be inclusive + var endBytes []byte + if len(end) > 0 { + endBytes = []byte(end) + if endBytes[len(endBytes)-1] < 0xff { + endBytes[len(endBytes)-1]++ + } else { + endBytes = append(endBytes, 0xff) + } + } + + if d.fst != nil { + itr, err := d.fst.Iterator([]byte(start), endBytes) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +// AutomatonIterator returns an iterator which only visits terms +// having the the vellum automaton and start/end key range +func (d *Dictionary) AutomatonIterator(a vellum.Automaton, + startKeyInclusive, endKeyExclusive []byte) segment.DictionaryIterator { + rv := &DictionaryIterator{ + d: d, + } + + if d.fst != nil { + itr, err := d.fst.Search(a, startKeyInclusive, endKeyExclusive) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + } + + return rv +} + +func (d *Dictionary) OnlyIterator(onlyTerms [][]byte, + includeCount bool) segment.DictionaryIterator { + + rv := &DictionaryIterator{ + d: d, + omitCount: !includeCount, + } + + var buf bytes.Buffer + builder, err := vellum.New(&buf, nil) + if err != nil { + rv.err = err + return rv + } + for _, term := range onlyTerms { + err = builder.Insert(term, 0) + if err != nil { + rv.err = err + return rv + } + } + err = builder.Close() + if err != nil { + rv.err = err + return rv + } + + onlyFST, err := vellum.Load(buf.Bytes()) + if err != nil { + rv.err = err + return rv + } + + itr, err := d.fst.Search(onlyFST, nil, nil) + if err == nil { + rv.itr = itr + } else if err != vellum.ErrIteratorDone { + rv.err = err + } + + return rv +} + +// DictionaryIterator is an iterator for term dictionary +type DictionaryIterator struct { + d *Dictionary + itr vellum.Iterator + err error + tmp PostingsList + entry index.DictEntry + omitCount bool +} + +// Next returns the next entry in the dictionary +func (i *DictionaryIterator) Next() (*index.DictEntry, error) { + if i.err != nil && i.err != vellum.ErrIteratorDone { + return nil, i.err + } else if i.itr == nil || i.err == vellum.ErrIteratorDone { + return nil, nil + } + term, postingsOffset := i.itr.Current() + i.entry.Term = string(term) + if !i.omitCount { + i.err = i.tmp.read(postingsOffset, i.d) + if i.err != nil { + return nil, i.err + } + i.entry.Count = i.tmp.Count() + } + i.err = i.itr.Next() + return &i.entry, nil +} diff --git a/vendor/github.com/blevesearch/zap/v15/docvalues.go b/vendor/github.com/blevesearch/zap/v15/docvalues.go new file mode 100644 index 0000000..793797b --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/docvalues.go @@ -0,0 +1,312 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "fmt" + "math" + "reflect" + "sort" + + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/blevesearch/bleve/size" + "github.com/golang/snappy" +) + +var reflectStaticSizedocValueReader int + +func init() { + var dvi docValueReader + reflectStaticSizedocValueReader = int(reflect.TypeOf(dvi).Size()) +} + +type docNumTermsVisitor func(docNum uint64, terms []byte) error + +type docVisitState struct { + dvrs map[uint16]*docValueReader + segment *SegmentBase +} + +type docValueReader struct { + field string + curChunkNum uint64 + chunkOffsets []uint64 + dvDataLoc uint64 + curChunkHeader []MetaData + curChunkData []byte // compressed data cache + uncompressed []byte // temp buf for snappy decompression +} + +func (di *docValueReader) size() int { + return reflectStaticSizedocValueReader + size.SizeOfPtr + + len(di.field) + + len(di.chunkOffsets)*size.SizeOfUint64 + + len(di.curChunkHeader)*reflectStaticSizeMetaData + + len(di.curChunkData) +} + +func (di *docValueReader) cloneInto(rv *docValueReader) *docValueReader { + if rv == nil { + rv = &docValueReader{} + } + + rv.field = di.field + rv.curChunkNum = math.MaxUint64 + rv.chunkOffsets = di.chunkOffsets // immutable, so it's sharable + rv.dvDataLoc = di.dvDataLoc + rv.curChunkHeader = rv.curChunkHeader[:0] + rv.curChunkData = nil + rv.uncompressed = rv.uncompressed[:0] + + return rv +} + +func (di *docValueReader) curChunkNumber() uint64 { + return di.curChunkNum +} + +func (s *SegmentBase) loadFieldDocValueReader(field string, + fieldDvLocStart, fieldDvLocEnd uint64) (*docValueReader, error) { + // get the docValue offset for the given fields + if fieldDvLocStart == fieldNotUninverted { + // no docValues found, nothing to do + return nil, nil + } + + // read the number of chunks, and chunk offsets position + var numChunks, chunkOffsetsPosition uint64 + + if fieldDvLocEnd-fieldDvLocStart > 16 { + numChunks = binary.BigEndian.Uint64(s.mem[fieldDvLocEnd-8 : fieldDvLocEnd]) + // read the length of chunk offsets + chunkOffsetsLen := binary.BigEndian.Uint64(s.mem[fieldDvLocEnd-16 : fieldDvLocEnd-8]) + // acquire position of chunk offsets + chunkOffsetsPosition = (fieldDvLocEnd - 16) - chunkOffsetsLen + } else { + return nil, fmt.Errorf("loadFieldDocValueReader: fieldDvLoc too small: %d-%d", fieldDvLocEnd, fieldDvLocStart) + } + + fdvIter := &docValueReader{ + curChunkNum: math.MaxUint64, + field: field, + chunkOffsets: make([]uint64, int(numChunks)), + } + + // read the chunk offsets + var offset uint64 + for i := 0; i < int(numChunks); i++ { + loc, read := binary.Uvarint(s.mem[chunkOffsetsPosition+offset : chunkOffsetsPosition+offset+binary.MaxVarintLen64]) + if read <= 0 { + return nil, fmt.Errorf("corrupted chunk offset during segment load") + } + fdvIter.chunkOffsets[i] = loc + offset += uint64(read) + } + + // set the data offset + fdvIter.dvDataLoc = fieldDvLocStart + + return fdvIter, nil +} + +func (di *docValueReader) loadDvChunk(chunkNumber uint64, s *SegmentBase) error { + // advance to the chunk where the docValues + // reside for the given docNum + destChunkDataLoc, curChunkEnd := di.dvDataLoc, di.dvDataLoc + start, end := readChunkBoundary(int(chunkNumber), di.chunkOffsets) + if start >= end { + di.curChunkHeader = di.curChunkHeader[:0] + di.curChunkData = nil + di.curChunkNum = chunkNumber + di.uncompressed = di.uncompressed[:0] + return nil + } + + destChunkDataLoc += start + curChunkEnd += end + + // read the number of docs reside in the chunk + numDocs, read := binary.Uvarint(s.mem[destChunkDataLoc : destChunkDataLoc+binary.MaxVarintLen64]) + if read <= 0 { + return fmt.Errorf("failed to read the chunk") + } + chunkMetaLoc := destChunkDataLoc + uint64(read) + + offset := uint64(0) + if cap(di.curChunkHeader) < int(numDocs) { + di.curChunkHeader = make([]MetaData, int(numDocs)) + } else { + di.curChunkHeader = di.curChunkHeader[:int(numDocs)] + } + for i := 0; i < int(numDocs); i++ { + di.curChunkHeader[i].DocNum, read = binary.Uvarint(s.mem[chunkMetaLoc+offset : chunkMetaLoc+offset+binary.MaxVarintLen64]) + offset += uint64(read) + di.curChunkHeader[i].DocDvOffset, read = binary.Uvarint(s.mem[chunkMetaLoc+offset : chunkMetaLoc+offset+binary.MaxVarintLen64]) + offset += uint64(read) + } + + compressedDataLoc := chunkMetaLoc + offset + dataLength := curChunkEnd - compressedDataLoc + di.curChunkData = s.mem[compressedDataLoc : compressedDataLoc+dataLength] + di.curChunkNum = chunkNumber + di.uncompressed = di.uncompressed[:0] + return nil +} + +func (di *docValueReader) iterateAllDocValues(s *SegmentBase, visitor docNumTermsVisitor) error { + for i := 0; i < len(di.chunkOffsets); i++ { + err := di.loadDvChunk(uint64(i), s) + if err != nil { + return err + } + if di.curChunkData == nil || len(di.curChunkHeader) == 0 { + continue + } + + // uncompress the already loaded data + uncompressed, err := snappy.Decode(di.uncompressed[:cap(di.uncompressed)], di.curChunkData) + if err != nil { + return err + } + di.uncompressed = uncompressed + + start := uint64(0) + for _, entry := range di.curChunkHeader { + err = visitor(entry.DocNum, uncompressed[start:entry.DocDvOffset]) + if err != nil { + return err + } + + start = entry.DocDvOffset + } + } + + return nil +} + +func (di *docValueReader) visitDocValues(docNum uint64, + visitor index.DocumentFieldTermVisitor) error { + // binary search the term locations for the docNum + start, end := di.getDocValueLocs(docNum) + if start == math.MaxUint64 || end == math.MaxUint64 || start == end { + return nil + } + + var uncompressed []byte + var err error + // use the uncompressed copy if available + if len(di.uncompressed) > 0 { + uncompressed = di.uncompressed + } else { + // uncompress the already loaded data + uncompressed, err = snappy.Decode(di.uncompressed[:cap(di.uncompressed)], di.curChunkData) + if err != nil { + return err + } + di.uncompressed = uncompressed + } + + // pick the terms for the given docNum + uncompressed = uncompressed[start:end] + for { + i := bytes.Index(uncompressed, termSeparatorSplitSlice) + if i < 0 { + break + } + + visitor(di.field, uncompressed[0:i]) + uncompressed = uncompressed[i+1:] + } + + return nil +} + +func (di *docValueReader) getDocValueLocs(docNum uint64) (uint64, uint64) { + i := sort.Search(len(di.curChunkHeader), func(i int) bool { + return di.curChunkHeader[i].DocNum >= docNum + }) + if i < len(di.curChunkHeader) && di.curChunkHeader[i].DocNum == docNum { + return ReadDocValueBoundary(i, di.curChunkHeader) + } + return math.MaxUint64, math.MaxUint64 +} + +// VisitDocumentFieldTerms is an implementation of the +// DocumentFieldTermVisitable interface +func (s *SegmentBase) VisitDocumentFieldTerms(localDocNum uint64, fields []string, + visitor index.DocumentFieldTermVisitor, dvsIn segment.DocVisitState) ( + segment.DocVisitState, error) { + dvs, ok := dvsIn.(*docVisitState) + if !ok || dvs == nil { + dvs = &docVisitState{} + } else { + if dvs.segment != s { + dvs.segment = s + dvs.dvrs = nil + } + } + + var fieldIDPlus1 uint16 + if dvs.dvrs == nil { + dvs.dvrs = make(map[uint16]*docValueReader, len(fields)) + for _, field := range fields { + if fieldIDPlus1, ok = s.fieldsMap[field]; !ok { + continue + } + fieldID := fieldIDPlus1 - 1 + if dvIter, exists := s.fieldDvReaders[fieldID]; exists && + dvIter != nil { + dvs.dvrs[fieldID] = dvIter.cloneInto(dvs.dvrs[fieldID]) + } + } + } + + // find the chunkNumber where the docValues are stored + // NOTE: doc values continue to use legacy chunk mode + chunkFactor, err := getChunkSize(LegacyChunkMode, 0, 0) + if err != nil { + return nil, err + } + docInChunk := localDocNum / chunkFactor + var dvr *docValueReader + for _, field := range fields { + if fieldIDPlus1, ok = s.fieldsMap[field]; !ok { + continue + } + fieldID := fieldIDPlus1 - 1 + if dvr, ok = dvs.dvrs[fieldID]; ok && dvr != nil { + // check if the chunk is already loaded + if docInChunk != dvr.curChunkNumber() { + err := dvr.loadDvChunk(docInChunk, s) + if err != nil { + return dvs, err + } + } + + _ = dvr.visitDocValues(localDocNum, visitor) + } + } + return dvs, nil +} + +// VisitableDocValueFields returns the list of fields with +// persisted doc value terms ready to be visitable using the +// VisitDocumentFieldTerms method. +func (s *SegmentBase) VisitableDocValueFields() ([]string, error) { + return s.fieldDvNames, nil +} diff --git a/vendor/github.com/blevesearch/zap/v15/enumerator.go b/vendor/github.com/blevesearch/zap/v15/enumerator.go new file mode 100644 index 0000000..bc5b7e6 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/enumerator.go @@ -0,0 +1,138 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + + "github.com/couchbase/vellum" +) + +// enumerator provides an ordered traversal of multiple vellum +// iterators. Like JOIN of iterators, the enumerator produces a +// sequence of (key, iteratorIndex, value) tuples, sorted by key ASC, +// then iteratorIndex ASC, where the same key might be seen or +// repeated across multiple child iterators. +type enumerator struct { + itrs []vellum.Iterator + currKs [][]byte + currVs []uint64 + + lowK []byte + lowIdxs []int + lowCurr int +} + +// newEnumerator returns a new enumerator over the vellum Iterators +func newEnumerator(itrs []vellum.Iterator) (*enumerator, error) { + rv := &enumerator{ + itrs: itrs, + currKs: make([][]byte, len(itrs)), + currVs: make([]uint64, len(itrs)), + lowIdxs: make([]int, 0, len(itrs)), + } + for i, itr := range rv.itrs { + rv.currKs[i], rv.currVs[i] = itr.Current() + } + rv.updateMatches(false) + if rv.lowK == nil && len(rv.lowIdxs) == 0 { + return rv, vellum.ErrIteratorDone + } + return rv, nil +} + +// updateMatches maintains the low key matches based on the currKs +func (m *enumerator) updateMatches(skipEmptyKey bool) { + m.lowK = nil + m.lowIdxs = m.lowIdxs[:0] + m.lowCurr = 0 + + for i, key := range m.currKs { + if (key == nil && m.currVs[i] == 0) || // in case of empty iterator + (len(key) == 0 && skipEmptyKey) { // skip empty keys + continue + } + + cmp := bytes.Compare(key, m.lowK) + if cmp < 0 || len(m.lowIdxs) == 0 { + // reached a new low + m.lowK = key + m.lowIdxs = m.lowIdxs[:0] + m.lowIdxs = append(m.lowIdxs, i) + } else if cmp == 0 { + m.lowIdxs = append(m.lowIdxs, i) + } + } +} + +// Current returns the enumerator's current key, iterator-index, and +// value. If the enumerator is not pointing at a valid value (because +// Next returned an error previously), Current will return nil,0,0. +func (m *enumerator) Current() ([]byte, int, uint64) { + var i int + var v uint64 + if m.lowCurr < len(m.lowIdxs) { + i = m.lowIdxs[m.lowCurr] + v = m.currVs[i] + } + return m.lowK, i, v +} + +// GetLowIdxsAndValues will return all of the iterator indices +// which point to the current key, and their corresponding +// values. This can be used by advanced caller which may need +// to peek into these other sets of data before processing. +func (m *enumerator) GetLowIdxsAndValues() ([]int, []uint64) { + values := make([]uint64, 0, len(m.lowIdxs)) + for _, idx := range m.lowIdxs { + values = append(values, m.currVs[idx]) + } + return m.lowIdxs, values +} + +// Next advances the enumerator to the next key/iterator/value result, +// else vellum.ErrIteratorDone is returned. +func (m *enumerator) Next() error { + m.lowCurr += 1 + if m.lowCurr >= len(m.lowIdxs) { + // move all the current low iterators forwards + for _, vi := range m.lowIdxs { + err := m.itrs[vi].Next() + if err != nil && err != vellum.ErrIteratorDone { + return err + } + m.currKs[vi], m.currVs[vi] = m.itrs[vi].Current() + } + // can skip any empty keys encountered at this point + m.updateMatches(true) + } + if m.lowK == nil && len(m.lowIdxs) == 0 { + return vellum.ErrIteratorDone + } + return nil +} + +// Close all the underlying Iterators. The first error, if any, will +// be returned. +func (m *enumerator) Close() error { + var rv error + for _, itr := range m.itrs { + err := itr.Close() + if rv == nil { + rv = err + } + } + return rv +} diff --git a/vendor/github.com/blevesearch/zap/v15/intDecoder.go b/vendor/github.com/blevesearch/zap/v15/intDecoder.go new file mode 100644 index 0000000..ea8021d --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/intDecoder.go @@ -0,0 +1,126 @@ +// Copyright (c) 2019 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "encoding/binary" + "fmt" + + "github.com/blevesearch/bleve/index/scorch/segment" +) + +type chunkedIntDecoder struct { + startOffset uint64 + dataStartOffset uint64 + chunkOffsets []uint64 + curChunkBytes []byte + data []byte + r *segment.MemUvarintReader +} + +// newChunkedIntDecoder expects an optional or reset chunkedIntDecoder for better reuse. +func newChunkedIntDecoder(buf []byte, offset uint64, rv *chunkedIntDecoder) *chunkedIntDecoder { + if rv == nil { + rv = &chunkedIntDecoder{startOffset: offset, data: buf} + } else { + rv.startOffset = offset + rv.data = buf + } + + var n, numChunks uint64 + var read int + if offset == termNotEncoded { + numChunks = 0 + } else { + numChunks, read = binary.Uvarint(buf[offset+n : offset+n+binary.MaxVarintLen64]) + } + + n += uint64(read) + if cap(rv.chunkOffsets) >= int(numChunks) { + rv.chunkOffsets = rv.chunkOffsets[:int(numChunks)] + } else { + rv.chunkOffsets = make([]uint64, int(numChunks)) + } + for i := 0; i < int(numChunks); i++ { + rv.chunkOffsets[i], read = binary.Uvarint(buf[offset+n : offset+n+binary.MaxVarintLen64]) + n += uint64(read) + } + rv.dataStartOffset = offset + n + return rv +} + +func (d *chunkedIntDecoder) loadChunk(chunk int) error { + if d.startOffset == termNotEncoded { + d.r = segment.NewMemUvarintReader([]byte(nil)) + return nil + } + + if chunk >= len(d.chunkOffsets) { + return fmt.Errorf("tried to load freq chunk that doesn't exist %d/(%d)", + chunk, len(d.chunkOffsets)) + } + + end, start := d.dataStartOffset, d.dataStartOffset + s, e := readChunkBoundary(chunk, d.chunkOffsets) + start += s + end += e + d.curChunkBytes = d.data[start:end] + if d.r == nil { + d.r = segment.NewMemUvarintReader(d.curChunkBytes) + } else { + d.r.Reset(d.curChunkBytes) + } + + return nil +} + +func (d *chunkedIntDecoder) reset() { + d.startOffset = 0 + d.dataStartOffset = 0 + d.chunkOffsets = d.chunkOffsets[:0] + d.curChunkBytes = d.curChunkBytes[:0] + d.data = d.data[:0] + if d.r != nil { + d.r.Reset([]byte(nil)) + } +} + +func (d *chunkedIntDecoder) isNil() bool { + return d.curChunkBytes == nil || len(d.curChunkBytes) == 0 +} + +func (d *chunkedIntDecoder) readUvarint() (uint64, error) { + return d.r.ReadUvarint() +} + +func (d *chunkedIntDecoder) readBytes(start, end int) []byte { + return d.curChunkBytes[start:end] +} + +func (d *chunkedIntDecoder) SkipUvarint() { + d.r.SkipUvarint() +} + +func (d *chunkedIntDecoder) SkipBytes(count int) { + d.r.SkipBytes(count) +} + +func (d *chunkedIntDecoder) Len() int { + return d.r.Len() +} + +func (d *chunkedIntDecoder) remainingLen() int { + return len(d.curChunkBytes) - d.r.Len() +} diff --git a/vendor/github.com/blevesearch/zap/v15/intcoder.go b/vendor/github.com/blevesearch/zap/v15/intcoder.go new file mode 100644 index 0000000..c3c488f --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/intcoder.go @@ -0,0 +1,206 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "io" +) + +// We can safely use 0 to represent termNotEncoded since 0 +// could never be a valid address for term location information. +// (stored field index is always non-empty and earlier in the +// file) +const termNotEncoded = 0 + +type chunkedIntCoder struct { + final []byte + chunkSize uint64 + chunkBuf bytes.Buffer + chunkLens []uint64 + currChunk uint64 + + buf []byte +} + +// newChunkedIntCoder returns a new chunk int coder which packs data into +// chunks based on the provided chunkSize and supports up to the specified +// maxDocNum +func newChunkedIntCoder(chunkSize uint64, maxDocNum uint64) *chunkedIntCoder { + total := maxDocNum/chunkSize + 1 + rv := &chunkedIntCoder{ + chunkSize: chunkSize, + chunkLens: make([]uint64, total), + final: make([]byte, 0, 64), + } + + return rv +} + +// Reset lets you reuse this chunked int coder. buffers are reset and reused +// from previous use. you cannot change the chunk size or max doc num. +func (c *chunkedIntCoder) Reset() { + c.final = c.final[:0] + c.chunkBuf.Reset() + c.currChunk = 0 + for i := range c.chunkLens { + c.chunkLens[i] = 0 + } +} + +// SetChunkSize changes the chunk size. It is only valid to do so +// with a new chunkedIntCoder, or immediately after calling Reset() +func (c *chunkedIntCoder) SetChunkSize(chunkSize uint64, maxDocNum uint64) { + total := int(maxDocNum/chunkSize + 1) + c.chunkSize = chunkSize + if cap(c.chunkLens) < total { + c.chunkLens = make([]uint64, total) + } else { + c.chunkLens = c.chunkLens[:total] + } +} + +// Add encodes the provided integers into the correct chunk for the provided +// doc num. You MUST call Add() with increasing docNums. +func (c *chunkedIntCoder) Add(docNum uint64, vals ...uint64) error { + chunk := docNum / c.chunkSize + if chunk != c.currChunk { + // starting a new chunk + c.Close() + c.chunkBuf.Reset() + c.currChunk = chunk + } + + if len(c.buf) < binary.MaxVarintLen64 { + c.buf = make([]byte, binary.MaxVarintLen64) + } + + for _, val := range vals { + wb := binary.PutUvarint(c.buf, val) + _, err := c.chunkBuf.Write(c.buf[:wb]) + if err != nil { + return err + } + } + + return nil +} + +func (c *chunkedIntCoder) AddBytes(docNum uint64, buf []byte) error { + chunk := docNum / c.chunkSize + if chunk != c.currChunk { + // starting a new chunk + c.Close() + c.chunkBuf.Reset() + c.currChunk = chunk + } + + _, err := c.chunkBuf.Write(buf) + return err +} + +// Close indicates you are done calling Add() this allows the final chunk +// to be encoded. +func (c *chunkedIntCoder) Close() { + encodingBytes := c.chunkBuf.Bytes() + c.chunkLens[c.currChunk] = uint64(len(encodingBytes)) + c.final = append(c.final, encodingBytes...) + c.currChunk = uint64(cap(c.chunkLens)) // sentinel to detect double close +} + +// Write commits all the encoded chunked integers to the provided writer. +func (c *chunkedIntCoder) Write(w io.Writer) (int, error) { + bufNeeded := binary.MaxVarintLen64 * (1 + len(c.chunkLens)) + if len(c.buf) < bufNeeded { + c.buf = make([]byte, bufNeeded) + } + buf := c.buf + + // convert the chunk lengths into chunk offsets + chunkOffsets := modifyLengthsToEndOffsets(c.chunkLens) + + // write out the number of chunks & each chunk offsets + n := binary.PutUvarint(buf, uint64(len(chunkOffsets))) + for _, chunkOffset := range chunkOffsets { + n += binary.PutUvarint(buf[n:], chunkOffset) + } + + tw, err := w.Write(buf[:n]) + if err != nil { + return tw, err + } + + // write out the data + nw, err := w.Write(c.final) + tw += nw + if err != nil { + return tw, err + } + return tw, nil +} + +// writeAt commits all the encoded chunked integers to the provided writer +// and returns the starting offset, total bytes written and an error +func (c *chunkedIntCoder) writeAt(w io.Writer) (uint64, int, error) { + startOffset := uint64(termNotEncoded) + if len(c.final) <= 0 { + return startOffset, 0, nil + } + + if chw := w.(*CountHashWriter); chw != nil { + startOffset = uint64(chw.Count()) + } + + tw, err := c.Write(w) + return startOffset, tw, err +} + +func (c *chunkedIntCoder) FinalSize() int { + return len(c.final) +} + +// modifyLengthsToEndOffsets converts the chunk length array +// to a chunk offset array. The readChunkBoundary +// will figure out the start and end of every chunk from +// these offsets. Starting offset of i'th index is stored +// in i-1'th position except for 0'th index and ending offset +// is stored at i'th index position. +// For 0'th element, starting position is always zero. +// eg: +// Lens -> 5 5 5 5 => 5 10 15 20 +// Lens -> 0 5 0 5 => 0 5 5 10 +// Lens -> 0 0 0 5 => 0 0 0 5 +// Lens -> 5 0 0 0 => 5 5 5 5 +// Lens -> 0 5 0 0 => 0 5 5 5 +// Lens -> 0 0 5 0 => 0 0 5 5 +func modifyLengthsToEndOffsets(lengths []uint64) []uint64 { + var runningOffset uint64 + var index, i int + for i = 1; i <= len(lengths); i++ { + runningOffset += lengths[i-1] + lengths[index] = runningOffset + index++ + } + return lengths +} + +func readChunkBoundary(chunk int, offsets []uint64) (uint64, uint64) { + var start uint64 + if chunk > 0 { + start = offsets[chunk-1] + } + return start, offsets[chunk] +} diff --git a/vendor/github.com/blevesearch/zap/v15/merge.go b/vendor/github.com/blevesearch/zap/v15/merge.go new file mode 100644 index 0000000..0d73c1e --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/merge.go @@ -0,0 +1,893 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bufio" + "bytes" + "encoding/binary" + "fmt" + "math" + "os" + "sort" + + "github.com/RoaringBitmap/roaring" + seg "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/couchbase/vellum" + "github.com/golang/snappy" +) + +var DefaultFileMergerBufferSize = 1024 * 1024 + +const docDropped = math.MaxUint64 // sentinel docNum to represent a deleted doc + +// Merge takes a slice of segments and bit masks describing which +// documents may be dropped, and creates a new segment containing the +// remaining data. This new segment is built at the specified path. +func (*ZapPlugin) Merge(segments []seg.Segment, drops []*roaring.Bitmap, path string, + closeCh chan struct{}, s seg.StatsReporter) ( + [][]uint64, uint64, error) { + + segmentBases := make([]*SegmentBase, len(segments)) + for segmenti, segment := range segments { + switch segmentx := segment.(type) { + case *Segment: + segmentBases[segmenti] = &segmentx.SegmentBase + case *SegmentBase: + segmentBases[segmenti] = segmentx + default: + panic(fmt.Sprintf("oops, unexpected segment type: %T", segment)) + } + } + return mergeSegmentBases(segmentBases, drops, path, DefaultChunkMode, closeCh, s) +} + +func mergeSegmentBases(segmentBases []*SegmentBase, drops []*roaring.Bitmap, path string, + chunkMode uint32, closeCh chan struct{}, s seg.StatsReporter) ( + [][]uint64, uint64, error) { + flag := os.O_RDWR | os.O_CREATE + + f, err := os.OpenFile(path, flag, 0600) + if err != nil { + return nil, 0, err + } + + cleanup := func() { + _ = f.Close() + _ = os.Remove(path) + } + + // buffer the output + br := bufio.NewWriterSize(f, DefaultFileMergerBufferSize) + + // wrap it for counting (tracking offsets) + cr := NewCountHashWriterWithStatsReporter(br, s) + + newDocNums, numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset, _, _, _, err := + MergeToWriter(segmentBases, drops, chunkMode, cr, closeCh) + if err != nil { + cleanup() + return nil, 0, err + } + + err = persistFooter(numDocs, storedIndexOffset, fieldsIndexOffset, + docValueOffset, chunkMode, cr.Sum32(), cr) + if err != nil { + cleanup() + return nil, 0, err + } + + err = br.Flush() + if err != nil { + cleanup() + return nil, 0, err + } + + err = f.Sync() + if err != nil { + cleanup() + return nil, 0, err + } + + err = f.Close() + if err != nil { + cleanup() + return nil, 0, err + } + + return newDocNums, uint64(cr.Count()), nil +} + +func MergeToWriter(segments []*SegmentBase, drops []*roaring.Bitmap, + chunkMode uint32, cr *CountHashWriter, closeCh chan struct{}) ( + newDocNums [][]uint64, + numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset uint64, + dictLocs []uint64, fieldsInv []string, fieldsMap map[string]uint16, + err error) { + docValueOffset = uint64(fieldNotUninverted) + + var fieldsSame bool + fieldsSame, fieldsInv = mergeFields(segments) + fieldsMap = mapFields(fieldsInv) + + numDocs = computeNewDocCount(segments, drops) + + if isClosed(closeCh) { + return nil, 0, 0, 0, 0, nil, nil, nil, seg.ErrClosed + } + + if numDocs > 0 { + storedIndexOffset, newDocNums, err = mergeStoredAndRemap(segments, drops, + fieldsMap, fieldsInv, fieldsSame, numDocs, cr, closeCh) + if err != nil { + return nil, 0, 0, 0, 0, nil, nil, nil, err + } + + dictLocs, docValueOffset, err = persistMergedRest(segments, drops, + fieldsInv, fieldsMap, fieldsSame, + newDocNums, numDocs, chunkMode, cr, closeCh) + if err != nil { + return nil, 0, 0, 0, 0, nil, nil, nil, err + } + } else { + dictLocs = make([]uint64, len(fieldsInv)) + } + + fieldsIndexOffset, err = persistFields(fieldsInv, cr, dictLocs) + if err != nil { + return nil, 0, 0, 0, 0, nil, nil, nil, err + } + + return newDocNums, numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset, dictLocs, fieldsInv, fieldsMap, nil +} + +// mapFields takes the fieldsInv list and returns a map of fieldName +// to fieldID+1 +func mapFields(fields []string) map[string]uint16 { + rv := make(map[string]uint16, len(fields)) + for i, fieldName := range fields { + rv[fieldName] = uint16(i) + 1 + } + return rv +} + +// computeNewDocCount determines how many documents will be in the newly +// merged segment when obsoleted docs are dropped +func computeNewDocCount(segments []*SegmentBase, drops []*roaring.Bitmap) uint64 { + var newDocCount uint64 + for segI, segment := range segments { + newDocCount += segment.numDocs + if drops[segI] != nil { + newDocCount -= drops[segI].GetCardinality() + } + } + return newDocCount +} + +func persistMergedRest(segments []*SegmentBase, dropsIn []*roaring.Bitmap, + fieldsInv []string, fieldsMap map[string]uint16, fieldsSame bool, + newDocNumsIn [][]uint64, newSegDocCount uint64, chunkMode uint32, + w *CountHashWriter, closeCh chan struct{}) ([]uint64, uint64, error) { + + var bufMaxVarintLen64 []byte = make([]byte, binary.MaxVarintLen64) + var bufLoc []uint64 + + var postings *PostingsList + var postItr *PostingsIterator + + rv := make([]uint64, len(fieldsInv)) + fieldDvLocsStart := make([]uint64, len(fieldsInv)) + fieldDvLocsEnd := make([]uint64, len(fieldsInv)) + + // these int coders are initialized with chunk size 1024 + // however this will be reset to the correct chunk size + // while processing each individual field-term section + tfEncoder := newChunkedIntCoder(1024, newSegDocCount-1) + locEncoder := newChunkedIntCoder(1024, newSegDocCount-1) + + var vellumBuf bytes.Buffer + newVellum, err := vellum.New(&vellumBuf, nil) + if err != nil { + return nil, 0, err + } + + newRoaring := roaring.NewBitmap() + + // for each field + for fieldID, fieldName := range fieldsInv { + + // collect FST iterators from all active segments for this field + var newDocNums [][]uint64 + var drops []*roaring.Bitmap + var dicts []*Dictionary + var itrs []vellum.Iterator + + var segmentsInFocus []*SegmentBase + + for segmentI, segment := range segments { + + // check for the closure in meantime + if isClosed(closeCh) { + return nil, 0, seg.ErrClosed + } + + dict, err2 := segment.dictionary(fieldName) + if err2 != nil { + return nil, 0, err2 + } + if dict != nil && dict.fst != nil { + itr, err2 := dict.fst.Iterator(nil, nil) + if err2 != nil && err2 != vellum.ErrIteratorDone { + return nil, 0, err2 + } + if itr != nil { + newDocNums = append(newDocNums, newDocNumsIn[segmentI]) + if dropsIn[segmentI] != nil && !dropsIn[segmentI].IsEmpty() { + drops = append(drops, dropsIn[segmentI]) + } else { + drops = append(drops, nil) + } + dicts = append(dicts, dict) + itrs = append(itrs, itr) + segmentsInFocus = append(segmentsInFocus, segment) + } + } + } + + var prevTerm []byte + + newRoaring.Clear() + + var lastDocNum, lastFreq, lastNorm uint64 + + // determines whether to use "1-hit" encoding optimization + // when a term appears in only 1 doc, with no loc info, + // has freq of 1, and the docNum fits into 31-bits + use1HitEncoding := func(termCardinality uint64) (bool, uint64, uint64) { + if termCardinality == uint64(1) && locEncoder.FinalSize() <= 0 { + docNum := uint64(newRoaring.Minimum()) + if under32Bits(docNum) && docNum == lastDocNum && lastFreq == 1 { + return true, docNum, lastNorm + } + } + return false, 0, 0 + } + + finishTerm := func(term []byte) error { + tfEncoder.Close() + locEncoder.Close() + + postingsOffset, err := writePostings(newRoaring, + tfEncoder, locEncoder, use1HitEncoding, w, bufMaxVarintLen64) + if err != nil { + return err + } + + if postingsOffset > 0 { + err = newVellum.Insert(term, postingsOffset) + if err != nil { + return err + } + } + + newRoaring.Clear() + + tfEncoder.Reset() + locEncoder.Reset() + + lastDocNum = 0 + lastFreq = 0 + lastNorm = 0 + + return nil + } + + enumerator, err := newEnumerator(itrs) + + for err == nil { + term, itrI, postingsOffset := enumerator.Current() + + if !bytes.Equal(prevTerm, term) { + // check for the closure in meantime + if isClosed(closeCh) { + return nil, 0, seg.ErrClosed + } + + // if the term changed, write out the info collected + // for the previous term + err = finishTerm(prevTerm) + if err != nil { + return nil, 0, err + } + } + if !bytes.Equal(prevTerm, term) || prevTerm == nil { + // compute cardinality of field-term in new seg + var newCard uint64 + lowItrIdxs, lowItrVals := enumerator.GetLowIdxsAndValues() + for i, idx := range lowItrIdxs { + pl, err := dicts[idx].postingsListFromOffset(lowItrVals[i], drops[idx], nil) + if err != nil { + return nil, 0, err + } + newCard += pl.Count() + } + // compute correct chunk size with this + chunkSize, err := getChunkSize(chunkMode, newCard, newSegDocCount) + if err != nil { + return nil, 0, err + } + // update encoders chunk + tfEncoder.SetChunkSize(chunkSize, newSegDocCount-1) + locEncoder.SetChunkSize(chunkSize, newSegDocCount-1) + } + + postings, err = dicts[itrI].postingsListFromOffset( + postingsOffset, drops[itrI], postings) + if err != nil { + return nil, 0, err + } + + postItr = postings.iterator(true, true, true, postItr) + + if fieldsSame { + // can optimize by copying freq/norm/loc bytes directly + lastDocNum, lastFreq, lastNorm, err = mergeTermFreqNormLocsByCopying( + term, postItr, newDocNums[itrI], newRoaring, + tfEncoder, locEncoder) + } else { + lastDocNum, lastFreq, lastNorm, bufLoc, err = mergeTermFreqNormLocs( + fieldsMap, term, postItr, newDocNums[itrI], newRoaring, + tfEncoder, locEncoder, bufLoc) + } + if err != nil { + return nil, 0, err + } + + prevTerm = prevTerm[:0] // copy to prevTerm in case Next() reuses term mem + prevTerm = append(prevTerm, term...) + + err = enumerator.Next() + } + if err != vellum.ErrIteratorDone { + return nil, 0, err + } + + err = finishTerm(prevTerm) + if err != nil { + return nil, 0, err + } + + dictOffset := uint64(w.Count()) + + err = newVellum.Close() + if err != nil { + return nil, 0, err + } + vellumData := vellumBuf.Bytes() + + // write out the length of the vellum data + n := binary.PutUvarint(bufMaxVarintLen64, uint64(len(vellumData))) + _, err = w.Write(bufMaxVarintLen64[:n]) + if err != nil { + return nil, 0, err + } + + // write this vellum to disk + _, err = w.Write(vellumData) + if err != nil { + return nil, 0, err + } + + rv[fieldID] = dictOffset + + // get the field doc value offset (start) + fieldDvLocsStart[fieldID] = uint64(w.Count()) + + // update the field doc values + // NOTE: doc values continue to use legacy chunk mode + chunkSize, err := getChunkSize(LegacyChunkMode, 0, 0) + if err != nil { + return nil, 0, err + } + fdvEncoder := newChunkedContentCoder(chunkSize, newSegDocCount-1, w, true) + + fdvReadersAvailable := false + var dvIterClone *docValueReader + for segmentI, segment := range segmentsInFocus { + // check for the closure in meantime + if isClosed(closeCh) { + return nil, 0, seg.ErrClosed + } + + fieldIDPlus1 := uint16(segment.fieldsMap[fieldName]) + if dvIter, exists := segment.fieldDvReaders[fieldIDPlus1-1]; exists && + dvIter != nil { + fdvReadersAvailable = true + dvIterClone = dvIter.cloneInto(dvIterClone) + err = dvIterClone.iterateAllDocValues(segment, func(docNum uint64, terms []byte) error { + if newDocNums[segmentI][docNum] == docDropped { + return nil + } + err := fdvEncoder.Add(newDocNums[segmentI][docNum], terms) + if err != nil { + return err + } + return nil + }) + if err != nil { + return nil, 0, err + } + } + } + + if fdvReadersAvailable { + err = fdvEncoder.Close() + if err != nil { + return nil, 0, err + } + + // persist the doc value details for this field + _, err = fdvEncoder.Write() + if err != nil { + return nil, 0, err + } + + // get the field doc value offset (end) + fieldDvLocsEnd[fieldID] = uint64(w.Count()) + } else { + fieldDvLocsStart[fieldID] = fieldNotUninverted + fieldDvLocsEnd[fieldID] = fieldNotUninverted + } + + // reset vellum buffer and vellum builder + vellumBuf.Reset() + err = newVellum.Reset(&vellumBuf) + if err != nil { + return nil, 0, err + } + } + + fieldDvLocsOffset := uint64(w.Count()) + + buf := bufMaxVarintLen64 + for i := 0; i < len(fieldDvLocsStart); i++ { + n := binary.PutUvarint(buf, fieldDvLocsStart[i]) + _, err := w.Write(buf[:n]) + if err != nil { + return nil, 0, err + } + n = binary.PutUvarint(buf, fieldDvLocsEnd[i]) + _, err = w.Write(buf[:n]) + if err != nil { + return nil, 0, err + } + } + + return rv, fieldDvLocsOffset, nil +} + +func mergeTermFreqNormLocsByCopying(term []byte, postItr *PostingsIterator, + newDocNums []uint64, newRoaring *roaring.Bitmap, + tfEncoder *chunkedIntCoder, locEncoder *chunkedIntCoder) ( + lastDocNum uint64, lastFreq uint64, lastNorm uint64, err error) { + nextDocNum, nextFreq, nextNorm, nextFreqNormBytes, nextLocBytes, err := + postItr.nextBytes() + for err == nil && len(nextFreqNormBytes) > 0 { + hitNewDocNum := newDocNums[nextDocNum] + if hitNewDocNum == docDropped { + return 0, 0, 0, fmt.Errorf("see hit with dropped doc num") + } + + newRoaring.Add(uint32(hitNewDocNum)) + err = tfEncoder.AddBytes(hitNewDocNum, nextFreqNormBytes) + if err != nil { + return 0, 0, 0, err + } + + if len(nextLocBytes) > 0 { + err = locEncoder.AddBytes(hitNewDocNum, nextLocBytes) + if err != nil { + return 0, 0, 0, err + } + } + + lastDocNum = hitNewDocNum + lastFreq = nextFreq + lastNorm = nextNorm + + nextDocNum, nextFreq, nextNorm, nextFreqNormBytes, nextLocBytes, err = + postItr.nextBytes() + } + + return lastDocNum, lastFreq, lastNorm, err +} + +func mergeTermFreqNormLocs(fieldsMap map[string]uint16, term []byte, postItr *PostingsIterator, + newDocNums []uint64, newRoaring *roaring.Bitmap, + tfEncoder *chunkedIntCoder, locEncoder *chunkedIntCoder, bufLoc []uint64) ( + lastDocNum uint64, lastFreq uint64, lastNorm uint64, bufLocOut []uint64, err error) { + next, err := postItr.Next() + for next != nil && err == nil { + hitNewDocNum := newDocNums[next.Number()] + if hitNewDocNum == docDropped { + return 0, 0, 0, nil, fmt.Errorf("see hit with dropped docNum") + } + + newRoaring.Add(uint32(hitNewDocNum)) + + nextFreq := next.Frequency() + var nextNorm uint64 + if pi, ok := next.(*Posting); ok { + nextNorm = pi.NormUint64() + } else { + return 0, 0, 0, nil, fmt.Errorf("unexpected posting type %T", next) + } + + locs := next.Locations() + + err = tfEncoder.Add(hitNewDocNum, + encodeFreqHasLocs(nextFreq, len(locs) > 0), nextNorm) + if err != nil { + return 0, 0, 0, nil, err + } + + if len(locs) > 0 { + numBytesLocs := 0 + for _, loc := range locs { + ap := loc.ArrayPositions() + numBytesLocs += totalUvarintBytes(uint64(fieldsMap[loc.Field()]-1), + loc.Pos(), loc.Start(), loc.End(), uint64(len(ap)), ap) + } + + err = locEncoder.Add(hitNewDocNum, uint64(numBytesLocs)) + if err != nil { + return 0, 0, 0, nil, err + } + + for _, loc := range locs { + ap := loc.ArrayPositions() + if cap(bufLoc) < 5+len(ap) { + bufLoc = make([]uint64, 0, 5+len(ap)) + } + args := bufLoc[0:5] + args[0] = uint64(fieldsMap[loc.Field()] - 1) + args[1] = loc.Pos() + args[2] = loc.Start() + args[3] = loc.End() + args[4] = uint64(len(ap)) + args = append(args, ap...) + err = locEncoder.Add(hitNewDocNum, args...) + if err != nil { + return 0, 0, 0, nil, err + } + } + } + + lastDocNum = hitNewDocNum + lastFreq = nextFreq + lastNorm = nextNorm + + next, err = postItr.Next() + } + + return lastDocNum, lastFreq, lastNorm, bufLoc, err +} + +func writePostings(postings *roaring.Bitmap, tfEncoder, locEncoder *chunkedIntCoder, + use1HitEncoding func(uint64) (bool, uint64, uint64), + w *CountHashWriter, bufMaxVarintLen64 []byte) ( + offset uint64, err error) { + termCardinality := postings.GetCardinality() + if termCardinality <= 0 { + return 0, nil + } + + if use1HitEncoding != nil { + encodeAs1Hit, docNum1Hit, normBits1Hit := use1HitEncoding(termCardinality) + if encodeAs1Hit { + return FSTValEncode1Hit(docNum1Hit, normBits1Hit), nil + } + } + + var tfOffset uint64 + tfOffset, _, err = tfEncoder.writeAt(w) + if err != nil { + return 0, err + } + + var locOffset uint64 + locOffset, _, err = locEncoder.writeAt(w) + if err != nil { + return 0, err + } + + postingsOffset := uint64(w.Count()) + + n := binary.PutUvarint(bufMaxVarintLen64, tfOffset) + _, err = w.Write(bufMaxVarintLen64[:n]) + if err != nil { + return 0, err + } + + n = binary.PutUvarint(bufMaxVarintLen64, locOffset) + _, err = w.Write(bufMaxVarintLen64[:n]) + if err != nil { + return 0, err + } + + _, err = writeRoaringWithLen(postings, w, bufMaxVarintLen64) + if err != nil { + return 0, err + } + + return postingsOffset, nil +} + +type varintEncoder func(uint64) (int, error) + +func mergeStoredAndRemap(segments []*SegmentBase, drops []*roaring.Bitmap, + fieldsMap map[string]uint16, fieldsInv []string, fieldsSame bool, newSegDocCount uint64, + w *CountHashWriter, closeCh chan struct{}) (uint64, [][]uint64, error) { + var rv [][]uint64 // The remapped or newDocNums for each segment. + + var newDocNum uint64 + + var curr int + var data, compressed []byte + var metaBuf bytes.Buffer + varBuf := make([]byte, binary.MaxVarintLen64) + metaEncode := func(val uint64) (int, error) { + wb := binary.PutUvarint(varBuf, val) + return metaBuf.Write(varBuf[:wb]) + } + + vals := make([][][]byte, len(fieldsInv)) + typs := make([][]byte, len(fieldsInv)) + poss := make([][][]uint64, len(fieldsInv)) + + var posBuf []uint64 + + docNumOffsets := make([]uint64, newSegDocCount) + + vdc := visitDocumentCtxPool.Get().(*visitDocumentCtx) + defer visitDocumentCtxPool.Put(vdc) + + // for each segment + for segI, segment := range segments { + // check for the closure in meantime + if isClosed(closeCh) { + return 0, nil, seg.ErrClosed + } + + segNewDocNums := make([]uint64, segment.numDocs) + + dropsI := drops[segI] + + // optimize when the field mapping is the same across all + // segments and there are no deletions, via byte-copying + // of stored docs bytes directly to the writer + if fieldsSame && (dropsI == nil || dropsI.GetCardinality() == 0) { + err := segment.copyStoredDocs(newDocNum, docNumOffsets, w) + if err != nil { + return 0, nil, err + } + + for i := uint64(0); i < segment.numDocs; i++ { + segNewDocNums[i] = newDocNum + newDocNum++ + } + rv = append(rv, segNewDocNums) + + continue + } + + // for each doc num + for docNum := uint64(0); docNum < segment.numDocs; docNum++ { + // TODO: roaring's API limits docNums to 32-bits? + if dropsI != nil && dropsI.Contains(uint32(docNum)) { + segNewDocNums[docNum] = docDropped + continue + } + + segNewDocNums[docNum] = newDocNum + + curr = 0 + metaBuf.Reset() + data = data[:0] + + posTemp := posBuf + + // collect all the data + for i := 0; i < len(fieldsInv); i++ { + vals[i] = vals[i][:0] + typs[i] = typs[i][:0] + poss[i] = poss[i][:0] + } + err := segment.visitDocument(vdc, docNum, func(field string, typ byte, value []byte, pos []uint64) bool { + fieldID := int(fieldsMap[field]) - 1 + vals[fieldID] = append(vals[fieldID], value) + typs[fieldID] = append(typs[fieldID], typ) + + // copy array positions to preserve them beyond the scope of this callback + var curPos []uint64 + if len(pos) > 0 { + if cap(posTemp) < len(pos) { + posBuf = make([]uint64, len(pos)*len(fieldsInv)) + posTemp = posBuf + } + curPos = posTemp[0:len(pos)] + copy(curPos, pos) + posTemp = posTemp[len(pos):] + } + poss[fieldID] = append(poss[fieldID], curPos) + + return true + }) + if err != nil { + return 0, nil, err + } + + // _id field special case optimizes ExternalID() lookups + idFieldVal := vals[uint16(0)][0] + _, err = metaEncode(uint64(len(idFieldVal))) + if err != nil { + return 0, nil, err + } + + // now walk the non-"_id" fields in order + for fieldID := 1; fieldID < len(fieldsInv); fieldID++ { + storedFieldValues := vals[fieldID] + + stf := typs[fieldID] + spf := poss[fieldID] + + var err2 error + curr, data, err2 = persistStoredFieldValues(fieldID, + storedFieldValues, stf, spf, curr, metaEncode, data) + if err2 != nil { + return 0, nil, err2 + } + } + + metaBytes := metaBuf.Bytes() + + compressed = snappy.Encode(compressed[:cap(compressed)], data) + + // record where we're about to start writing + docNumOffsets[newDocNum] = uint64(w.Count()) + + // write out the meta len and compressed data len + _, err = writeUvarints(w, + uint64(len(metaBytes)), + uint64(len(idFieldVal)+len(compressed))) + if err != nil { + return 0, nil, err + } + // now write the meta + _, err = w.Write(metaBytes) + if err != nil { + return 0, nil, err + } + // now write the _id field val (counted as part of the 'compressed' data) + _, err = w.Write(idFieldVal) + if err != nil { + return 0, nil, err + } + // now write the compressed data + _, err = w.Write(compressed) + if err != nil { + return 0, nil, err + } + + newDocNum++ + } + + rv = append(rv, segNewDocNums) + } + + // return value is the start of the stored index + storedIndexOffset := uint64(w.Count()) + + // now write out the stored doc index + for _, docNumOffset := range docNumOffsets { + err := binary.Write(w, binary.BigEndian, docNumOffset) + if err != nil { + return 0, nil, err + } + } + + return storedIndexOffset, rv, nil +} + +// copyStoredDocs writes out a segment's stored doc info, optimized by +// using a single Write() call for the entire set of bytes. The +// newDocNumOffsets is filled with the new offsets for each doc. +func (s *SegmentBase) copyStoredDocs(newDocNum uint64, newDocNumOffsets []uint64, + w *CountHashWriter) error { + if s.numDocs <= 0 { + return nil + } + + indexOffset0, storedOffset0, _, _, _ := + s.getDocStoredOffsets(0) // the segment's first doc + + indexOffsetN, storedOffsetN, readN, metaLenN, dataLenN := + s.getDocStoredOffsets(s.numDocs - 1) // the segment's last doc + + storedOffset0New := uint64(w.Count()) + + storedBytes := s.mem[storedOffset0 : storedOffsetN+readN+metaLenN+dataLenN] + _, err := w.Write(storedBytes) + if err != nil { + return err + } + + // remap the storedOffset's for the docs into new offsets relative + // to storedOffset0New, filling the given docNumOffsetsOut array + for indexOffset := indexOffset0; indexOffset <= indexOffsetN; indexOffset += 8 { + storedOffset := binary.BigEndian.Uint64(s.mem[indexOffset : indexOffset+8]) + storedOffsetNew := storedOffset - storedOffset0 + storedOffset0New + newDocNumOffsets[newDocNum] = storedOffsetNew + newDocNum += 1 + } + + return nil +} + +// mergeFields builds a unified list of fields used across all the +// input segments, and computes whether the fields are the same across +// segments (which depends on fields to be sorted in the same way +// across segments) +func mergeFields(segments []*SegmentBase) (bool, []string) { + fieldsSame := true + + var segment0Fields []string + if len(segments) > 0 { + segment0Fields = segments[0].Fields() + } + + fieldsExist := map[string]struct{}{} + for _, segment := range segments { + fields := segment.Fields() + for fieldi, field := range fields { + fieldsExist[field] = struct{}{} + if len(segment0Fields) != len(fields) || segment0Fields[fieldi] != field { + fieldsSame = false + } + } + } + + rv := make([]string, 0, len(fieldsExist)) + // ensure _id stays first + rv = append(rv, "_id") + for k := range fieldsExist { + if k != "_id" { + rv = append(rv, k) + } + } + + sort.Strings(rv[1:]) // leave _id as first + + return fieldsSame, rv +} + +func isClosed(closeCh chan struct{}) bool { + select { + case <-closeCh: + return true + default: + return false + } +} diff --git a/vendor/github.com/blevesearch/zap/v15/new.go b/vendor/github.com/blevesearch/zap/v15/new.go new file mode 100644 index 0000000..c10a6a0 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/new.go @@ -0,0 +1,860 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "math" + "sort" + "sync" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/analysis" + "github.com/blevesearch/bleve/document" + "github.com/blevesearch/bleve/index" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/couchbase/vellum" + "github.com/golang/snappy" +) + +var NewSegmentBufferNumResultsBump int = 100 +var NewSegmentBufferNumResultsFactor float64 = 1.0 +var NewSegmentBufferAvgBytesPerDocFactor float64 = 1.0 + +// ValidateDocFields can be set by applications to perform additional checks +// on fields in a document being added to a new segment, by default it does +// nothing. +// This API is experimental and may be removed at any time. +var ValidateDocFields = func(field document.Field) error { + return nil +} + +// AnalysisResultsToSegmentBase produces an in-memory zap-encoded +// SegmentBase from analysis results +func (z *ZapPlugin) New(results []*index.AnalysisResult) ( + segment.Segment, uint64, error) { + return z.newWithChunkMode(results, DefaultChunkMode) +} + +func (*ZapPlugin) newWithChunkMode(results []*index.AnalysisResult, + chunkMode uint32) (segment.Segment, uint64, error) { + s := interimPool.Get().(*interim) + + var br bytes.Buffer + if s.lastNumDocs > 0 { + // use previous results to initialize the buf with an estimate + // size, but note that the interim instance comes from a + // global interimPool, so multiple scorch instances indexing + // different docs can lead to low quality estimates + estimateAvgBytesPerDoc := int(float64(s.lastOutSize/s.lastNumDocs) * + NewSegmentBufferNumResultsFactor) + estimateNumResults := int(float64(len(results)+NewSegmentBufferNumResultsBump) * + NewSegmentBufferAvgBytesPerDocFactor) + br.Grow(estimateAvgBytesPerDoc * estimateNumResults) + } + + s.results = results + s.chunkMode = chunkMode + s.w = NewCountHashWriter(&br) + + storedIndexOffset, fieldsIndexOffset, fdvIndexOffset, dictOffsets, + err := s.convert() + if err != nil { + return nil, uint64(0), err + } + + sb, err := InitSegmentBase(br.Bytes(), s.w.Sum32(), chunkMode, + s.FieldsMap, s.FieldsInv, uint64(len(results)), + storedIndexOffset, fieldsIndexOffset, fdvIndexOffset, dictOffsets) + + if err == nil && s.reset() == nil { + s.lastNumDocs = len(results) + s.lastOutSize = len(br.Bytes()) + interimPool.Put(s) + } + + return sb, uint64(len(br.Bytes())), err +} + +var interimPool = sync.Pool{New: func() interface{} { return &interim{} }} + +// interim holds temporary working data used while converting from +// analysis results to a zap-encoded segment +type interim struct { + results []*index.AnalysisResult + + chunkMode uint32 + + w *CountHashWriter + + // FieldsMap adds 1 to field id to avoid zero value issues + // name -> field id + 1 + FieldsMap map[string]uint16 + + // FieldsInv is the inverse of FieldsMap + // field id -> name + FieldsInv []string + + // Term dictionaries for each field + // field id -> term -> postings list id + 1 + Dicts []map[string]uint64 + + // Terms for each field, where terms are sorted ascending + // field id -> []term + DictKeys [][]string + + // Fields whose IncludeDocValues is true + // field id -> bool + IncludeDocValues []bool + + // postings id -> bitmap of docNums + Postings []*roaring.Bitmap + + // postings id -> freq/norm's, one for each docNum in postings + FreqNorms [][]interimFreqNorm + freqNormsBacking []interimFreqNorm + + // postings id -> locs, one for each freq + Locs [][]interimLoc + locsBacking []interimLoc + + numTermsPerPostingsList []int // key is postings list id + numLocsPerPostingsList []int // key is postings list id + + builder *vellum.Builder + builderBuf bytes.Buffer + + metaBuf bytes.Buffer + + tmp0 []byte + tmp1 []byte + + lastNumDocs int + lastOutSize int +} + +func (s *interim) reset() (err error) { + s.results = nil + s.chunkMode = 0 + s.w = nil + s.FieldsMap = nil + s.FieldsInv = nil + for i := range s.Dicts { + s.Dicts[i] = nil + } + s.Dicts = s.Dicts[:0] + for i := range s.DictKeys { + s.DictKeys[i] = s.DictKeys[i][:0] + } + s.DictKeys = s.DictKeys[:0] + for i := range s.IncludeDocValues { + s.IncludeDocValues[i] = false + } + s.IncludeDocValues = s.IncludeDocValues[:0] + for _, idn := range s.Postings { + idn.Clear() + } + s.Postings = s.Postings[:0] + s.FreqNorms = s.FreqNorms[:0] + for i := range s.freqNormsBacking { + s.freqNormsBacking[i] = interimFreqNorm{} + } + s.freqNormsBacking = s.freqNormsBacking[:0] + s.Locs = s.Locs[:0] + for i := range s.locsBacking { + s.locsBacking[i] = interimLoc{} + } + s.locsBacking = s.locsBacking[:0] + s.numTermsPerPostingsList = s.numTermsPerPostingsList[:0] + s.numLocsPerPostingsList = s.numLocsPerPostingsList[:0] + s.builderBuf.Reset() + if s.builder != nil { + err = s.builder.Reset(&s.builderBuf) + } + s.metaBuf.Reset() + s.tmp0 = s.tmp0[:0] + s.tmp1 = s.tmp1[:0] + s.lastNumDocs = 0 + s.lastOutSize = 0 + + return err +} + +func (s *interim) grabBuf(size int) []byte { + buf := s.tmp0 + if cap(buf) < size { + buf = make([]byte, size) + s.tmp0 = buf + } + return buf[0:size] +} + +type interimStoredField struct { + vals [][]byte + typs []byte + arrayposs [][]uint64 // array positions +} + +type interimFreqNorm struct { + freq uint64 + norm float32 + numLocs int +} + +type interimLoc struct { + fieldID uint16 + pos uint64 + start uint64 + end uint64 + arrayposs []uint64 +} + +func (s *interim) convert() (uint64, uint64, uint64, []uint64, error) { + s.FieldsMap = map[string]uint16{} + + s.getOrDefineField("_id") // _id field is fieldID 0 + + for _, result := range s.results { + for _, field := range result.Document.CompositeFields { + s.getOrDefineField(field.Name()) + } + for _, field := range result.Document.Fields { + s.getOrDefineField(field.Name()) + } + } + + sort.Strings(s.FieldsInv[1:]) // keep _id as first field + + for fieldID, fieldName := range s.FieldsInv { + s.FieldsMap[fieldName] = uint16(fieldID + 1) + } + + if cap(s.IncludeDocValues) >= len(s.FieldsInv) { + s.IncludeDocValues = s.IncludeDocValues[:len(s.FieldsInv)] + } else { + s.IncludeDocValues = make([]bool, len(s.FieldsInv)) + } + + s.prepareDicts() + + for _, dict := range s.DictKeys { + sort.Strings(dict) + } + + s.processDocuments() + + storedIndexOffset, err := s.writeStoredFields() + if err != nil { + return 0, 0, 0, nil, err + } + + var fdvIndexOffset uint64 + var dictOffsets []uint64 + + if len(s.results) > 0 { + fdvIndexOffset, dictOffsets, err = s.writeDicts() + if err != nil { + return 0, 0, 0, nil, err + } + } else { + dictOffsets = make([]uint64, len(s.FieldsInv)) + } + + fieldsIndexOffset, err := persistFields(s.FieldsInv, s.w, dictOffsets) + if err != nil { + return 0, 0, 0, nil, err + } + + return storedIndexOffset, fieldsIndexOffset, fdvIndexOffset, dictOffsets, nil +} + +func (s *interim) getOrDefineField(fieldName string) int { + fieldIDPlus1, exists := s.FieldsMap[fieldName] + if !exists { + fieldIDPlus1 = uint16(len(s.FieldsInv) + 1) + s.FieldsMap[fieldName] = fieldIDPlus1 + s.FieldsInv = append(s.FieldsInv, fieldName) + + s.Dicts = append(s.Dicts, make(map[string]uint64)) + + n := len(s.DictKeys) + if n < cap(s.DictKeys) { + s.DictKeys = s.DictKeys[:n+1] + s.DictKeys[n] = s.DictKeys[n][:0] + } else { + s.DictKeys = append(s.DictKeys, []string(nil)) + } + } + + return int(fieldIDPlus1 - 1) +} + +// fill Dicts and DictKeys from analysis results +func (s *interim) prepareDicts() { + var pidNext int + + var totTFs int + var totLocs int + + visitField := func(fieldID uint16, tfs analysis.TokenFrequencies) { + dict := s.Dicts[fieldID] + dictKeys := s.DictKeys[fieldID] + + for term, tf := range tfs { + pidPlus1, exists := dict[term] + if !exists { + pidNext++ + pidPlus1 = uint64(pidNext) + + dict[term] = pidPlus1 + dictKeys = append(dictKeys, term) + + s.numTermsPerPostingsList = append(s.numTermsPerPostingsList, 0) + s.numLocsPerPostingsList = append(s.numLocsPerPostingsList, 0) + } + + pid := pidPlus1 - 1 + + s.numTermsPerPostingsList[pid] += 1 + s.numLocsPerPostingsList[pid] += len(tf.Locations) + + totLocs += len(tf.Locations) + } + + totTFs += len(tfs) + + s.DictKeys[fieldID] = dictKeys + } + + for _, result := range s.results { + // walk each composite field + for _, field := range result.Document.CompositeFields { + fieldID := uint16(s.getOrDefineField(field.Name())) + _, tf := field.Analyze() + visitField(fieldID, tf) + } + + // walk each field + for i, field := range result.Document.Fields { + fieldID := uint16(s.getOrDefineField(field.Name())) + tf := result.Analyzed[i] + visitField(fieldID, tf) + } + } + + numPostingsLists := pidNext + + if cap(s.Postings) >= numPostingsLists { + s.Postings = s.Postings[:numPostingsLists] + } else { + postings := make([]*roaring.Bitmap, numPostingsLists) + copy(postings, s.Postings[:cap(s.Postings)]) + for i := 0; i < numPostingsLists; i++ { + if postings[i] == nil { + postings[i] = roaring.New() + } + } + s.Postings = postings + } + + if cap(s.FreqNorms) >= numPostingsLists { + s.FreqNorms = s.FreqNorms[:numPostingsLists] + } else { + s.FreqNorms = make([][]interimFreqNorm, numPostingsLists) + } + + if cap(s.freqNormsBacking) >= totTFs { + s.freqNormsBacking = s.freqNormsBacking[:totTFs] + } else { + s.freqNormsBacking = make([]interimFreqNorm, totTFs) + } + + freqNormsBacking := s.freqNormsBacking + for pid, numTerms := range s.numTermsPerPostingsList { + s.FreqNorms[pid] = freqNormsBacking[0:0] + freqNormsBacking = freqNormsBacking[numTerms:] + } + + if cap(s.Locs) >= numPostingsLists { + s.Locs = s.Locs[:numPostingsLists] + } else { + s.Locs = make([][]interimLoc, numPostingsLists) + } + + if cap(s.locsBacking) >= totLocs { + s.locsBacking = s.locsBacking[:totLocs] + } else { + s.locsBacking = make([]interimLoc, totLocs) + } + + locsBacking := s.locsBacking + for pid, numLocs := range s.numLocsPerPostingsList { + s.Locs[pid] = locsBacking[0:0] + locsBacking = locsBacking[numLocs:] + } +} + +func (s *interim) processDocuments() { + numFields := len(s.FieldsInv) + reuseFieldLens := make([]int, numFields) + reuseFieldTFs := make([]analysis.TokenFrequencies, numFields) + + for docNum, result := range s.results { + for i := 0; i < numFields; i++ { // clear these for reuse + reuseFieldLens[i] = 0 + reuseFieldTFs[i] = nil + } + + s.processDocument(uint64(docNum), result, + reuseFieldLens, reuseFieldTFs) + } +} + +func (s *interim) processDocument(docNum uint64, + result *index.AnalysisResult, + fieldLens []int, fieldTFs []analysis.TokenFrequencies) { + visitField := func(fieldID uint16, fieldName string, + ln int, tf analysis.TokenFrequencies) { + fieldLens[fieldID] += ln + + existingFreqs := fieldTFs[fieldID] + if existingFreqs != nil { + existingFreqs.MergeAll(fieldName, tf) + } else { + fieldTFs[fieldID] = tf + } + } + + // walk each composite field + for _, field := range result.Document.CompositeFields { + fieldID := uint16(s.getOrDefineField(field.Name())) + ln, tf := field.Analyze() + visitField(fieldID, field.Name(), ln, tf) + } + + // walk each field + for i, field := range result.Document.Fields { + fieldID := uint16(s.getOrDefineField(field.Name())) + ln := result.Length[i] + tf := result.Analyzed[i] + visitField(fieldID, field.Name(), ln, tf) + } + + // now that it's been rolled up into fieldTFs, walk that + for fieldID, tfs := range fieldTFs { + dict := s.Dicts[fieldID] + norm := math.Float32frombits(uint32(fieldLens[fieldID])) + + for term, tf := range tfs { + pid := dict[term] - 1 + bs := s.Postings[pid] + bs.Add(uint32(docNum)) + + s.FreqNorms[pid] = append(s.FreqNorms[pid], + interimFreqNorm{ + freq: uint64(tf.Frequency()), + norm: norm, + numLocs: len(tf.Locations), + }) + + if len(tf.Locations) > 0 { + locs := s.Locs[pid] + + for _, loc := range tf.Locations { + var locf = uint16(fieldID) + if loc.Field != "" { + locf = uint16(s.getOrDefineField(loc.Field)) + } + var arrayposs []uint64 + if len(loc.ArrayPositions) > 0 { + arrayposs = loc.ArrayPositions + } + locs = append(locs, interimLoc{ + fieldID: locf, + pos: uint64(loc.Position), + start: uint64(loc.Start), + end: uint64(loc.End), + arrayposs: arrayposs, + }) + } + + s.Locs[pid] = locs + } + } + } +} + +func (s *interim) writeStoredFields() ( + storedIndexOffset uint64, err error) { + varBuf := make([]byte, binary.MaxVarintLen64) + metaEncode := func(val uint64) (int, error) { + wb := binary.PutUvarint(varBuf, val) + return s.metaBuf.Write(varBuf[:wb]) + } + + data, compressed := s.tmp0[:0], s.tmp1[:0] + defer func() { s.tmp0, s.tmp1 = data, compressed }() + + // keyed by docNum + docStoredOffsets := make([]uint64, len(s.results)) + + // keyed by fieldID, for the current doc in the loop + docStoredFields := map[uint16]interimStoredField{} + + for docNum, result := range s.results { + for fieldID := range docStoredFields { // reset for next doc + delete(docStoredFields, fieldID) + } + + for _, field := range result.Document.Fields { + fieldID := uint16(s.getOrDefineField(field.Name())) + + opts := field.Options() + + if opts.IsStored() { + isf := docStoredFields[fieldID] + isf.vals = append(isf.vals, field.Value()) + isf.typs = append(isf.typs, encodeFieldType(field)) + isf.arrayposs = append(isf.arrayposs, field.ArrayPositions()) + docStoredFields[fieldID] = isf + } + + if opts.IncludeDocValues() { + s.IncludeDocValues[fieldID] = true + } + + err := ValidateDocFields(field) + if err != nil { + return 0, err + } + } + + var curr int + + s.metaBuf.Reset() + data = data[:0] + + // _id field special case optimizes ExternalID() lookups + idFieldVal := docStoredFields[uint16(0)].vals[0] + _, err = metaEncode(uint64(len(idFieldVal))) + if err != nil { + return 0, err + } + + // handle non-"_id" fields + for fieldID := 1; fieldID < len(s.FieldsInv); fieldID++ { + isf, exists := docStoredFields[uint16(fieldID)] + if exists { + curr, data, err = persistStoredFieldValues( + fieldID, isf.vals, isf.typs, isf.arrayposs, + curr, metaEncode, data) + if err != nil { + return 0, err + } + } + } + + metaBytes := s.metaBuf.Bytes() + + compressed = snappy.Encode(compressed[:cap(compressed)], data) + + docStoredOffsets[docNum] = uint64(s.w.Count()) + + _, err := writeUvarints(s.w, + uint64(len(metaBytes)), + uint64(len(idFieldVal)+len(compressed))) + if err != nil { + return 0, err + } + + _, err = s.w.Write(metaBytes) + if err != nil { + return 0, err + } + + _, err = s.w.Write(idFieldVal) + if err != nil { + return 0, err + } + + _, err = s.w.Write(compressed) + if err != nil { + return 0, err + } + } + + storedIndexOffset = uint64(s.w.Count()) + + for _, docStoredOffset := range docStoredOffsets { + err = binary.Write(s.w, binary.BigEndian, docStoredOffset) + if err != nil { + return 0, err + } + } + + return storedIndexOffset, nil +} + +func (s *interim) writeDicts() (fdvIndexOffset uint64, dictOffsets []uint64, err error) { + dictOffsets = make([]uint64, len(s.FieldsInv)) + + fdvOffsetsStart := make([]uint64, len(s.FieldsInv)) + fdvOffsetsEnd := make([]uint64, len(s.FieldsInv)) + + buf := s.grabBuf(binary.MaxVarintLen64) + + // these int coders are initialized with chunk size 1024 + // however this will be reset to the correct chunk size + // while processing each individual field-term section + tfEncoder := newChunkedIntCoder(1024, uint64(len(s.results)-1)) + locEncoder := newChunkedIntCoder(1024, uint64(len(s.results)-1)) + + var docTermMap [][]byte + + if s.builder == nil { + s.builder, err = vellum.New(&s.builderBuf, nil) + if err != nil { + return 0, nil, err + } + } + + for fieldID, terms := range s.DictKeys { + if cap(docTermMap) < len(s.results) { + docTermMap = make([][]byte, len(s.results)) + } else { + docTermMap = docTermMap[0:len(s.results)] + for docNum := range docTermMap { // reset the docTermMap + docTermMap[docNum] = docTermMap[docNum][:0] + } + } + + dict := s.Dicts[fieldID] + + for _, term := range terms { // terms are already sorted + pid := dict[term] - 1 + + postingsBS := s.Postings[pid] + + freqNorms := s.FreqNorms[pid] + freqNormOffset := 0 + + locs := s.Locs[pid] + locOffset := 0 + + chunkSize, err := getChunkSize(s.chunkMode, postingsBS.GetCardinality(), uint64(len(s.results))) + if err != nil { + return 0, nil, err + } + tfEncoder.SetChunkSize(chunkSize, uint64(len(s.results)-1)) + locEncoder.SetChunkSize(chunkSize, uint64(len(s.results)-1)) + + postingsItr := postingsBS.Iterator() + for postingsItr.HasNext() { + docNum := uint64(postingsItr.Next()) + + freqNorm := freqNorms[freqNormOffset] + + err = tfEncoder.Add(docNum, + encodeFreqHasLocs(freqNorm.freq, freqNorm.numLocs > 0), + uint64(math.Float32bits(freqNorm.norm))) + if err != nil { + return 0, nil, err + } + + if freqNorm.numLocs > 0 { + numBytesLocs := 0 + for _, loc := range locs[locOffset : locOffset+freqNorm.numLocs] { + numBytesLocs += totalUvarintBytes( + uint64(loc.fieldID), loc.pos, loc.start, loc.end, + uint64(len(loc.arrayposs)), loc.arrayposs) + } + + err = locEncoder.Add(docNum, uint64(numBytesLocs)) + if err != nil { + return 0, nil, err + } + + for _, loc := range locs[locOffset : locOffset+freqNorm.numLocs] { + err = locEncoder.Add(docNum, + uint64(loc.fieldID), loc.pos, loc.start, loc.end, + uint64(len(loc.arrayposs))) + if err != nil { + return 0, nil, err + } + + err = locEncoder.Add(docNum, loc.arrayposs...) + if err != nil { + return 0, nil, err + } + } + + locOffset += freqNorm.numLocs + } + + freqNormOffset++ + + docTermMap[docNum] = append( + append(docTermMap[docNum], term...), + termSeparator) + } + + tfEncoder.Close() + locEncoder.Close() + + postingsOffset, err := + writePostings(postingsBS, tfEncoder, locEncoder, nil, s.w, buf) + if err != nil { + return 0, nil, err + } + + if postingsOffset > uint64(0) { + err = s.builder.Insert([]byte(term), postingsOffset) + if err != nil { + return 0, nil, err + } + } + + tfEncoder.Reset() + locEncoder.Reset() + } + + err = s.builder.Close() + if err != nil { + return 0, nil, err + } + + // record where this dictionary starts + dictOffsets[fieldID] = uint64(s.w.Count()) + + vellumData := s.builderBuf.Bytes() + + // write out the length of the vellum data + n := binary.PutUvarint(buf, uint64(len(vellumData))) + _, err = s.w.Write(buf[:n]) + if err != nil { + return 0, nil, err + } + + // write this vellum to disk + _, err = s.w.Write(vellumData) + if err != nil { + return 0, nil, err + } + + // reset vellum for reuse + s.builderBuf.Reset() + + err = s.builder.Reset(&s.builderBuf) + if err != nil { + return 0, nil, err + } + + // write the field doc values + // NOTE: doc values continue to use legacy chunk mode + chunkSize, err := getChunkSize(LegacyChunkMode, 0, 0) + if err != nil { + return 0, nil, err + } + fdvEncoder := newChunkedContentCoder(chunkSize, uint64(len(s.results)-1), s.w, false) + if s.IncludeDocValues[fieldID] { + for docNum, docTerms := range docTermMap { + if len(docTerms) > 0 { + err = fdvEncoder.Add(uint64(docNum), docTerms) + if err != nil { + return 0, nil, err + } + } + } + err = fdvEncoder.Close() + if err != nil { + return 0, nil, err + } + + fdvOffsetsStart[fieldID] = uint64(s.w.Count()) + + _, err = fdvEncoder.Write() + if err != nil { + return 0, nil, err + } + + fdvOffsetsEnd[fieldID] = uint64(s.w.Count()) + + fdvEncoder.Reset() + } else { + fdvOffsetsStart[fieldID] = fieldNotUninverted + fdvOffsetsEnd[fieldID] = fieldNotUninverted + } + } + + fdvIndexOffset = uint64(s.w.Count()) + + for i := 0; i < len(fdvOffsetsStart); i++ { + n := binary.PutUvarint(buf, fdvOffsetsStart[i]) + _, err := s.w.Write(buf[:n]) + if err != nil { + return 0, nil, err + } + n = binary.PutUvarint(buf, fdvOffsetsEnd[i]) + _, err = s.w.Write(buf[:n]) + if err != nil { + return 0, nil, err + } + } + + return fdvIndexOffset, dictOffsets, nil +} + +func encodeFieldType(f document.Field) byte { + fieldType := byte('x') + switch f.(type) { + case *document.TextField: + fieldType = 't' + case *document.NumericField: + fieldType = 'n' + case *document.DateTimeField: + fieldType = 'd' + case *document.BooleanField: + fieldType = 'b' + case *document.GeoPointField: + fieldType = 'g' + case *document.CompositeField: + fieldType = 'c' + } + return fieldType +} + +// returns the total # of bytes needed to encode the given uint64's +// into binary.PutUVarint() encoding +func totalUvarintBytes(a, b, c, d, e uint64, more []uint64) (n int) { + n = numUvarintBytes(a) + n += numUvarintBytes(b) + n += numUvarintBytes(c) + n += numUvarintBytes(d) + n += numUvarintBytes(e) + for _, v := range more { + n += numUvarintBytes(v) + } + return n +} + +// returns # of bytes needed to encode x in binary.PutUvarint() encoding +func numUvarintBytes(x uint64) (n int) { + for x >= 0x80 { + x >>= 7 + n++ + } + return n + 1 +} diff --git a/vendor/github.com/blevesearch/zap/v15/plugin.go b/vendor/github.com/blevesearch/zap/v15/plugin.go new file mode 100644 index 0000000..38a0638 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/plugin.go @@ -0,0 +1,37 @@ +// Copyright (c) 2020 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "github.com/blevesearch/bleve/index/scorch/segment" +) + +// ZapPlugin implements the Plugin interface of +// the blevesearch/bleve/index/scorch/segment pkg +type ZapPlugin struct{} + +func (*ZapPlugin) Type() string { + return Type +} + +func (*ZapPlugin) Version() uint32 { + return Version +} + +// Plugin returns an instance segment.Plugin for use +// by the Scorch indexing scheme +func Plugin() segment.Plugin { + return &ZapPlugin{} +} diff --git a/vendor/github.com/blevesearch/zap/v15/posting.go b/vendor/github.com/blevesearch/zap/v15/posting.go new file mode 100644 index 0000000..75faa5d --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/posting.go @@ -0,0 +1,853 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "encoding/binary" + "fmt" + "math" + "reflect" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/blevesearch/bleve/size" +) + +var reflectStaticSizePostingsList int +var reflectStaticSizePostingsIterator int +var reflectStaticSizePosting int +var reflectStaticSizeLocation int + +func init() { + var pl PostingsList + reflectStaticSizePostingsList = int(reflect.TypeOf(pl).Size()) + var pi PostingsIterator + reflectStaticSizePostingsIterator = int(reflect.TypeOf(pi).Size()) + var p Posting + reflectStaticSizePosting = int(reflect.TypeOf(p).Size()) + var l Location + reflectStaticSizeLocation = int(reflect.TypeOf(l).Size()) +} + +// FST or vellum value (uint64) encoding is determined by the top two +// highest-order or most significant bits... +// +// encoding : MSB +// name : 63 62 61...to...bit #0 (LSB) +// ----------+---+---+--------------------------------------------------- +// general : 0 | 0 | 62-bits of postingsOffset. +// ~ : 0 | 1 | reserved for future. +// 1-hit : 1 | 0 | 31-bits of positive float31 norm | 31-bits docNum. +// ~ : 1 | 1 | reserved for future. +// +// Encoding "general" is able to handle all cases, where the +// postingsOffset points to more information about the postings for +// the term. +// +// Encoding "1-hit" is used to optimize a commonly seen case when a +// term has only a single hit. For example, a term in the _id field +// will have only 1 hit. The "1-hit" encoding is used for a term +// in a field when... +// +// - term vector info is disabled for that field; +// - and, the term appears in only a single doc for that field; +// - and, the term's freq is exactly 1 in that single doc for that field; +// - and, the docNum must fit into 31-bits; +// +// Otherwise, the "general" encoding is used instead. +// +// In the "1-hit" encoding, the field in that single doc may have +// other terms, which is supported in the "1-hit" encoding by the +// positive float31 norm. + +const FSTValEncodingMask = uint64(0xc000000000000000) +const FSTValEncodingGeneral = uint64(0x0000000000000000) +const FSTValEncoding1Hit = uint64(0x8000000000000000) + +func FSTValEncode1Hit(docNum uint64, normBits uint64) uint64 { + return FSTValEncoding1Hit | ((mask31Bits & normBits) << 31) | (mask31Bits & docNum) +} + +func FSTValDecode1Hit(v uint64) (docNum uint64, normBits uint64) { + return (mask31Bits & v), (mask31Bits & (v >> 31)) +} + +const mask31Bits = uint64(0x000000007fffffff) + +func under32Bits(x uint64) bool { + return x <= mask31Bits +} + +const DocNum1HitFinished = math.MaxUint64 + +var NormBits1Hit = uint64(1) + +// PostingsList is an in-memory representation of a postings list +type PostingsList struct { + sb *SegmentBase + postingsOffset uint64 + freqOffset uint64 + locOffset uint64 + postings *roaring.Bitmap + except *roaring.Bitmap + + // when normBits1Hit != 0, then this postings list came from a + // 1-hit encoding, and only the docNum1Hit & normBits1Hit apply + docNum1Hit uint64 + normBits1Hit uint64 + + chunkSize uint64 +} + +// represents an immutable, empty postings list +var emptyPostingsList = &PostingsList{} + +func (p *PostingsList) Size() int { + sizeInBytes := reflectStaticSizePostingsList + size.SizeOfPtr + + if p.except != nil { + sizeInBytes += int(p.except.GetSizeInBytes()) + } + + return sizeInBytes +} + +func (p *PostingsList) OrInto(receiver *roaring.Bitmap) { + if p.normBits1Hit != 0 { + receiver.Add(uint32(p.docNum1Hit)) + return + } + + if p.postings != nil { + receiver.Or(p.postings) + } +} + +// Iterator returns an iterator for this postings list +func (p *PostingsList) Iterator(includeFreq, includeNorm, includeLocs bool, + prealloc segment.PostingsIterator) segment.PostingsIterator { + if p.normBits1Hit == 0 && p.postings == nil { + return emptyPostingsIterator + } + + var preallocPI *PostingsIterator + pi, ok := prealloc.(*PostingsIterator) + if ok && pi != nil { + preallocPI = pi + } + if preallocPI == emptyPostingsIterator { + preallocPI = nil + } + + return p.iterator(includeFreq, includeNorm, includeLocs, preallocPI) +} + +func (p *PostingsList) iterator(includeFreq, includeNorm, includeLocs bool, + rv *PostingsIterator) *PostingsIterator { + if rv == nil { + rv = &PostingsIterator{} + } else { + freqNormReader := rv.freqNormReader + if freqNormReader != nil { + freqNormReader.reset() + } + + locReader := rv.locReader + if locReader != nil { + locReader.reset() + } + + nextLocs := rv.nextLocs[:0] + nextSegmentLocs := rv.nextSegmentLocs[:0] + + buf := rv.buf + + *rv = PostingsIterator{} // clear the struct + + rv.freqNormReader = freqNormReader + rv.locReader = locReader + + rv.nextLocs = nextLocs + rv.nextSegmentLocs = nextSegmentLocs + + rv.buf = buf + } + + rv.postings = p + rv.includeFreqNorm = includeFreq || includeNorm || includeLocs + rv.includeLocs = includeLocs + + if p.normBits1Hit != 0 { + // "1-hit" encoding + rv.docNum1Hit = p.docNum1Hit + rv.normBits1Hit = p.normBits1Hit + + if p.except != nil && p.except.Contains(uint32(rv.docNum1Hit)) { + rv.docNum1Hit = DocNum1HitFinished + } + + return rv + } + + // "general" encoding, check if empty + if p.postings == nil { + return rv + } + + // initialize freq chunk reader + if rv.includeFreqNorm { + rv.freqNormReader = newChunkedIntDecoder(p.sb.mem, p.freqOffset, rv.freqNormReader) + } + + // initialize the loc chunk reader + if rv.includeLocs { + rv.locReader = newChunkedIntDecoder(p.sb.mem, p.locOffset, rv.locReader) + } + + rv.all = p.postings.Iterator() + if p.except != nil { + rv.ActualBM = roaring.AndNot(p.postings, p.except) + rv.Actual = rv.ActualBM.Iterator() + } else { + rv.ActualBM = p.postings + rv.Actual = rv.all // Optimize to use same iterator for all & Actual. + } + + return rv +} + +// Count returns the number of items on this postings list +func (p *PostingsList) Count() uint64 { + var n, e uint64 + if p.normBits1Hit != 0 { + n = 1 + if p.except != nil && p.except.Contains(uint32(p.docNum1Hit)) { + e = 1 + } + } else if p.postings != nil { + n = p.postings.GetCardinality() + if p.except != nil { + e = p.postings.AndCardinality(p.except) + } + } + return n - e +} + +func (rv *PostingsList) read(postingsOffset uint64, d *Dictionary) error { + rv.postingsOffset = postingsOffset + + // handle "1-hit" encoding special case + if rv.postingsOffset&FSTValEncodingMask == FSTValEncoding1Hit { + return rv.init1Hit(postingsOffset) + } + + // read the location of the freq/norm details + var n uint64 + var read int + + rv.freqOffset, read = binary.Uvarint(d.sb.mem[postingsOffset+n : postingsOffset+binary.MaxVarintLen64]) + n += uint64(read) + + rv.locOffset, read = binary.Uvarint(d.sb.mem[postingsOffset+n : postingsOffset+n+binary.MaxVarintLen64]) + n += uint64(read) + + var postingsLen uint64 + postingsLen, read = binary.Uvarint(d.sb.mem[postingsOffset+n : postingsOffset+n+binary.MaxVarintLen64]) + n += uint64(read) + + roaringBytes := d.sb.mem[postingsOffset+n : postingsOffset+n+postingsLen] + + if rv.postings == nil { + rv.postings = roaring.NewBitmap() + } + _, err := rv.postings.FromBuffer(roaringBytes) + if err != nil { + return fmt.Errorf("error loading roaring bitmap: %v", err) + } + + rv.chunkSize, err = getChunkSize(d.sb.chunkMode, + rv.postings.GetCardinality(), d.sb.numDocs) + if err != nil { + return err + } + + return nil +} + +func (rv *PostingsList) init1Hit(fstVal uint64) error { + docNum, normBits := FSTValDecode1Hit(fstVal) + + rv.docNum1Hit = docNum + rv.normBits1Hit = normBits + + return nil +} + +// PostingsIterator provides a way to iterate through the postings list +type PostingsIterator struct { + postings *PostingsList + all roaring.IntPeekable + Actual roaring.IntPeekable + ActualBM *roaring.Bitmap + + currChunk uint32 + freqNormReader *chunkedIntDecoder + locReader *chunkedIntDecoder + + next Posting // reused across Next() calls + nextLocs []Location // reused across Next() calls + nextSegmentLocs []segment.Location // reused across Next() calls + + docNum1Hit uint64 + normBits1Hit uint64 + + buf []byte + + includeFreqNorm bool + includeLocs bool +} + +var emptyPostingsIterator = &PostingsIterator{} + +func (i *PostingsIterator) Size() int { + sizeInBytes := reflectStaticSizePostingsIterator + size.SizeOfPtr + + i.next.Size() + // account for freqNormReader, locReader if we start using this. + for _, entry := range i.nextLocs { + sizeInBytes += entry.Size() + } + + return sizeInBytes +} + +func (i *PostingsIterator) loadChunk(chunk int) error { + if i.includeFreqNorm { + err := i.freqNormReader.loadChunk(chunk) + if err != nil { + return err + } + } + + if i.includeLocs { + err := i.locReader.loadChunk(chunk) + if err != nil { + return err + } + } + + i.currChunk = uint32(chunk) + return nil +} + +func (i *PostingsIterator) readFreqNormHasLocs() (uint64, uint64, bool, error) { + if i.normBits1Hit != 0 { + return 1, i.normBits1Hit, false, nil + } + + freqHasLocs, err := i.freqNormReader.readUvarint() + if err != nil { + return 0, 0, false, fmt.Errorf("error reading frequency: %v", err) + } + + freq, hasLocs := decodeFreqHasLocs(freqHasLocs) + + normBits, err := i.freqNormReader.readUvarint() + if err != nil { + return 0, 0, false, fmt.Errorf("error reading norm: %v", err) + } + + return freq, normBits, hasLocs, nil +} + +func (i *PostingsIterator) skipFreqNormReadHasLocs() (bool, error) { + if i.normBits1Hit != 0 { + return false, nil + } + + freqHasLocs, err := i.freqNormReader.readUvarint() + if err != nil { + return false, fmt.Errorf("error reading freqHasLocs: %v", err) + } + + i.freqNormReader.SkipUvarint() // Skip normBits. + + return freqHasLocs&0x01 != 0, nil // See decodeFreqHasLocs() / hasLocs. +} + +func encodeFreqHasLocs(freq uint64, hasLocs bool) uint64 { + rv := freq << 1 + if hasLocs { + rv = rv | 0x01 // 0'th LSB encodes whether there are locations + } + return rv +} + +func decodeFreqHasLocs(freqHasLocs uint64) (uint64, bool) { + freq := freqHasLocs >> 1 + hasLocs := freqHasLocs&0x01 != 0 + return freq, hasLocs +} + +// readLocation processes all the integers on the stream representing a single +// location. +func (i *PostingsIterator) readLocation(l *Location) error { + // read off field + fieldID, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location field: %v", err) + } + // read off pos + pos, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location pos: %v", err) + } + // read off start + start, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location start: %v", err) + } + // read off end + end, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location end: %v", err) + } + // read off num array pos + numArrayPos, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location num array pos: %v", err) + } + + l.field = i.postings.sb.fieldsInv[fieldID] + l.pos = pos + l.start = start + l.end = end + + if cap(l.ap) < int(numArrayPos) { + l.ap = make([]uint64, int(numArrayPos)) + } else { + l.ap = l.ap[:int(numArrayPos)] + } + + // read off array positions + for k := 0; k < int(numArrayPos); k++ { + ap, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading array position: %v", err) + } + + l.ap[k] = ap + } + + return nil +} + +// Next returns the next posting on the postings list, or nil at the end +func (i *PostingsIterator) Next() (segment.Posting, error) { + return i.nextAtOrAfter(0) +} + +// Advance returns the posting at the specified docNum or it is not present +// the next posting, or if the end is reached, nil +func (i *PostingsIterator) Advance(docNum uint64) (segment.Posting, error) { + return i.nextAtOrAfter(docNum) +} + +// Next returns the next posting on the postings list, or nil at the end +func (i *PostingsIterator) nextAtOrAfter(atOrAfter uint64) (segment.Posting, error) { + docNum, exists, err := i.nextDocNumAtOrAfter(atOrAfter) + if err != nil || !exists { + return nil, err + } + + i.next = Posting{} // clear the struct + rv := &i.next + rv.docNum = docNum + + if !i.includeFreqNorm { + return rv, nil + } + + var normBits uint64 + var hasLocs bool + + rv.freq, normBits, hasLocs, err = i.readFreqNormHasLocs() + if err != nil { + return nil, err + } + + rv.norm = math.Float32frombits(uint32(normBits)) + + if i.includeLocs && hasLocs { + // prepare locations into reused slices, where we assume + // rv.freq >= "number of locs", since in a composite field, + // some component fields might have their IncludeTermVector + // flags disabled while other component fields are enabled + if cap(i.nextLocs) >= int(rv.freq) { + i.nextLocs = i.nextLocs[0:rv.freq] + } else { + i.nextLocs = make([]Location, rv.freq, rv.freq*2) + } + if cap(i.nextSegmentLocs) < int(rv.freq) { + i.nextSegmentLocs = make([]segment.Location, rv.freq, rv.freq*2) + } + rv.locs = i.nextSegmentLocs[:0] + + numLocsBytes, err := i.locReader.readUvarint() + if err != nil { + return nil, fmt.Errorf("error reading location numLocsBytes: %v", err) + } + + j := 0 + startBytesRemaining := i.locReader.Len() // # bytes remaining in the locReader + for startBytesRemaining-i.locReader.Len() < int(numLocsBytes) { + err := i.readLocation(&i.nextLocs[j]) + if err != nil { + return nil, err + } + rv.locs = append(rv.locs, &i.nextLocs[j]) + j++ + } + } + + return rv, nil +} + +// nextDocNum returns the next docNum on the postings list, and also +// sets up the currChunk / loc related fields of the iterator. +func (i *PostingsIterator) nextDocNumAtOrAfter(atOrAfter uint64) (uint64, bool, error) { + if i.normBits1Hit != 0 { + if i.docNum1Hit == DocNum1HitFinished { + return 0, false, nil + } + if i.docNum1Hit < atOrAfter { + // advanced past our 1-hit + i.docNum1Hit = DocNum1HitFinished // consume our 1-hit docNum + return 0, false, nil + } + docNum := i.docNum1Hit + i.docNum1Hit = DocNum1HitFinished // consume our 1-hit docNum + return docNum, true, nil + } + + if i.Actual == nil || !i.Actual.HasNext() { + return 0, false, nil + } + + if i.postings == nil || i.postings.postings == i.ActualBM { + return i.nextDocNumAtOrAfterClean(atOrAfter) + } + + i.Actual.AdvanceIfNeeded(uint32(atOrAfter)) + + if !i.Actual.HasNext() { + // couldn't find anything + return 0, false, nil + } + + n := i.Actual.Next() + allN := i.all.Next() + nChunk := n / uint32(i.postings.chunkSize) + + // when allN becomes >= to here, then allN is in the same chunk as nChunk. + allNReachesNChunk := nChunk * uint32(i.postings.chunkSize) + + // n is the next actual hit (excluding some postings), and + // allN is the next hit in the full postings, and + // if they don't match, move 'all' forwards until they do + for allN != n { + // we've reached same chunk, so move the freq/norm/loc decoders forward + if i.includeFreqNorm && allN >= allNReachesNChunk { + err := i.currChunkNext(nChunk) + if err != nil { + return 0, false, err + } + } + + allN = i.all.Next() + } + + if i.includeFreqNorm && (i.currChunk != nChunk || i.freqNormReader.isNil()) { + err := i.loadChunk(int(nChunk)) + if err != nil { + return 0, false, fmt.Errorf("error loading chunk: %v", err) + } + } + + return uint64(n), true, nil +} + +var freqHasLocs1Hit = encodeFreqHasLocs(1, false) + +// nextBytes returns the docNum and the encoded freq & loc bytes for +// the next posting +func (i *PostingsIterator) nextBytes() ( + docNumOut uint64, freq uint64, normBits uint64, + bytesFreqNorm []byte, bytesLoc []byte, err error) { + docNum, exists, err := i.nextDocNumAtOrAfter(0) + if err != nil || !exists { + return 0, 0, 0, nil, nil, err + } + + if i.normBits1Hit != 0 { + if i.buf == nil { + i.buf = make([]byte, binary.MaxVarintLen64*2) + } + n := binary.PutUvarint(i.buf, freqHasLocs1Hit) + n += binary.PutUvarint(i.buf[n:], i.normBits1Hit) + return docNum, uint64(1), i.normBits1Hit, i.buf[:n], nil, nil + } + + startFreqNorm := i.freqNormReader.remainingLen() + + var hasLocs bool + + freq, normBits, hasLocs, err = i.readFreqNormHasLocs() + if err != nil { + return 0, 0, 0, nil, nil, err + } + + endFreqNorm := i.freqNormReader.remainingLen() + bytesFreqNorm = i.freqNormReader.readBytes(startFreqNorm, endFreqNorm) + + if hasLocs { + startLoc := i.locReader.remainingLen() + + numLocsBytes, err := i.locReader.readUvarint() + if err != nil { + return 0, 0, 0, nil, nil, + fmt.Errorf("error reading location nextBytes numLocs: %v", err) + } + + // skip over all the location bytes + i.locReader.SkipBytes(int(numLocsBytes)) + + endLoc := i.locReader.remainingLen() + bytesLoc = i.locReader.readBytes(startLoc, endLoc) + } + + return docNum, freq, normBits, bytesFreqNorm, bytesLoc, nil +} + +// optimization when the postings list is "clean" (e.g., no updates & +// no deletions) where the all bitmap is the same as the actual bitmap +func (i *PostingsIterator) nextDocNumAtOrAfterClean( + atOrAfter uint64) (uint64, bool, error) { + + if !i.includeFreqNorm { + i.Actual.AdvanceIfNeeded(uint32(atOrAfter)) + + if !i.Actual.HasNext() { + return 0, false, nil // couldn't find anything + } + + return uint64(i.Actual.Next()), true, nil + } + + // freq-norm's needed, so maintain freq-norm chunk reader + sameChunkNexts := 0 // # of times we called Next() in the same chunk + n := i.Actual.Next() + nChunk := n / uint32(i.postings.chunkSize) + + for uint64(n) < atOrAfter && i.Actual.HasNext() { + n = i.Actual.Next() + + nChunkPrev := nChunk + nChunk = n / uint32(i.postings.chunkSize) + + if nChunk != nChunkPrev { + sameChunkNexts = 0 + } else { + sameChunkNexts += 1 + } + } + + if uint64(n) < atOrAfter { + // couldn't find anything + return 0, false, nil + } + + for j := 0; j < sameChunkNexts; j++ { + err := i.currChunkNext(nChunk) + if err != nil { + return 0, false, fmt.Errorf("error optimized currChunkNext: %v", err) + } + } + + if i.currChunk != nChunk || i.freqNormReader.isNil() { + err := i.loadChunk(int(nChunk)) + if err != nil { + return 0, false, fmt.Errorf("error loading chunk: %v", err) + } + } + + return uint64(n), true, nil +} + +func (i *PostingsIterator) currChunkNext(nChunk uint32) error { + if i.currChunk != nChunk || i.freqNormReader.isNil() { + err := i.loadChunk(int(nChunk)) + if err != nil { + return fmt.Errorf("error loading chunk: %v", err) + } + } + + // read off freq/offsets even though we don't care about them + hasLocs, err := i.skipFreqNormReadHasLocs() + if err != nil { + return err + } + + if i.includeLocs && hasLocs { + numLocsBytes, err := i.locReader.readUvarint() + if err != nil { + return fmt.Errorf("error reading location numLocsBytes: %v", err) + } + + // skip over all the location bytes + i.locReader.SkipBytes(int(numLocsBytes)) + } + + return nil +} + +// DocNum1Hit returns the docNum and true if this is "1-hit" optimized +// and the docNum is available. +func (p *PostingsIterator) DocNum1Hit() (uint64, bool) { + if p.normBits1Hit != 0 && p.docNum1Hit != DocNum1HitFinished { + return p.docNum1Hit, true + } + return 0, false +} + +// ActualBitmap returns the underlying actual bitmap +// which can be used up the stack for optimizations +func (p *PostingsIterator) ActualBitmap() *roaring.Bitmap { + return p.ActualBM +} + +// ReplaceActual replaces the ActualBM with the provided +// bitmap +func (p *PostingsIterator) ReplaceActual(abm *roaring.Bitmap) { + p.ActualBM = abm + p.Actual = abm.Iterator() +} + +// PostingsIteratorFromBitmap constructs a PostingsIterator given an +// "actual" bitmap. +func PostingsIteratorFromBitmap(bm *roaring.Bitmap, + includeFreqNorm, includeLocs bool) (segment.PostingsIterator, error) { + return &PostingsIterator{ + ActualBM: bm, + Actual: bm.Iterator(), + includeFreqNorm: includeFreqNorm, + includeLocs: includeLocs, + }, nil +} + +// PostingsIteratorFrom1Hit constructs a PostingsIterator given a +// 1-hit docNum. +func PostingsIteratorFrom1Hit(docNum1Hit uint64, + includeFreqNorm, includeLocs bool) (segment.PostingsIterator, error) { + return &PostingsIterator{ + docNum1Hit: docNum1Hit, + normBits1Hit: NormBits1Hit, + includeFreqNorm: includeFreqNorm, + includeLocs: includeLocs, + }, nil +} + +// Posting is a single entry in a postings list +type Posting struct { + docNum uint64 + freq uint64 + norm float32 + locs []segment.Location +} + +func (p *Posting) Size() int { + sizeInBytes := reflectStaticSizePosting + + for _, entry := range p.locs { + sizeInBytes += entry.Size() + } + + return sizeInBytes +} + +// Number returns the document number of this posting in this segment +func (p *Posting) Number() uint64 { + return p.docNum +} + +// Frequency returns the frequencies of occurrence of this term in this doc/field +func (p *Posting) Frequency() uint64 { + return p.freq +} + +// Norm returns the normalization factor for this posting +func (p *Posting) Norm() float64 { + return float64(float32(1.0 / math.Sqrt(float64(math.Float32bits(p.norm))))) +} + +// Locations returns the location information for each occurrence +func (p *Posting) Locations() []segment.Location { + return p.locs +} + +// NormUint64 returns the norm value as uint64 +func (p *Posting) NormUint64() uint64 { + return uint64(math.Float32bits(p.norm)) +} + +// Location represents the location of a single occurrence +type Location struct { + field string + pos uint64 + start uint64 + end uint64 + ap []uint64 +} + +func (l *Location) Size() int { + return reflectStaticSizeLocation + + len(l.field) + + len(l.ap)*size.SizeOfUint64 +} + +// Field returns the name of the field (useful in composite fields to know +// which original field the value came from) +func (l *Location) Field() string { + return l.field +} + +// Start returns the start byte offset of this occurrence +func (l *Location) Start() uint64 { + return l.start +} + +// End returns the end byte offset of this occurrence +func (l *Location) End() uint64 { + return l.end +} + +// Pos returns the 1-based phrase position of this occurrence +func (l *Location) Pos() uint64 { + return l.pos +} + +// ArrayPositions returns the array position vector associated with this occurrence +func (l *Location) ArrayPositions() []uint64 { + return l.ap +} diff --git a/vendor/github.com/blevesearch/zap/v15/read.go b/vendor/github.com/blevesearch/zap/v15/read.go new file mode 100644 index 0000000..e47d4c6 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/read.go @@ -0,0 +1,43 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import "encoding/binary" + +func (s *SegmentBase) getDocStoredMetaAndCompressed(docNum uint64) ([]byte, []byte) { + _, storedOffset, n, metaLen, dataLen := s.getDocStoredOffsets(docNum) + + meta := s.mem[storedOffset+n : storedOffset+n+metaLen] + data := s.mem[storedOffset+n+metaLen : storedOffset+n+metaLen+dataLen] + + return meta, data +} + +func (s *SegmentBase) getDocStoredOffsets(docNum uint64) ( + uint64, uint64, uint64, uint64, uint64) { + indexOffset := s.storedIndexOffset + (8 * docNum) + + storedOffset := binary.BigEndian.Uint64(s.mem[indexOffset : indexOffset+8]) + + var n uint64 + + metaLen, read := binary.Uvarint(s.mem[storedOffset : storedOffset+binary.MaxVarintLen64]) + n += uint64(read) + + dataLen, read := binary.Uvarint(s.mem[storedOffset+n : storedOffset+n+binary.MaxVarintLen64]) + n += uint64(read) + + return indexOffset, storedOffset, n, metaLen, dataLen +} diff --git a/vendor/github.com/blevesearch/zap/v15/segment.go b/vendor/github.com/blevesearch/zap/v15/segment.go new file mode 100644 index 0000000..2d158e8 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/segment.go @@ -0,0 +1,572 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "bytes" + "encoding/binary" + "fmt" + "io" + "os" + "sync" + "unsafe" + + "github.com/RoaringBitmap/roaring" + "github.com/blevesearch/bleve/index/scorch/segment" + "github.com/blevesearch/bleve/size" + "github.com/couchbase/vellum" + mmap "github.com/blevesearch/mmap-go" + "github.com/golang/snappy" +) + +var reflectStaticSizeSegmentBase int + +func init() { + var sb SegmentBase + reflectStaticSizeSegmentBase = int(unsafe.Sizeof(sb)) +} + +// Open returns a zap impl of a segment +func (*ZapPlugin) Open(path string) (segment.Segment, error) { + f, err := os.Open(path) + if err != nil { + return nil, err + } + mm, err := mmap.Map(f, mmap.RDONLY, 0) + if err != nil { + // mmap failed, try to close the file + _ = f.Close() + return nil, err + } + + rv := &Segment{ + SegmentBase: SegmentBase{ + mem: mm[0 : len(mm)-FooterSize], + fieldsMap: make(map[string]uint16), + fieldDvReaders: make(map[uint16]*docValueReader), + fieldFSTs: make(map[uint16]*vellum.FST), + }, + f: f, + mm: mm, + path: path, + refs: 1, + } + rv.SegmentBase.updateSize() + + err = rv.loadConfig() + if err != nil { + _ = rv.Close() + return nil, err + } + + err = rv.loadFields() + if err != nil { + _ = rv.Close() + return nil, err + } + + err = rv.loadDvReaders() + if err != nil { + _ = rv.Close() + return nil, err + } + + return rv, nil +} + +// SegmentBase is a memory only, read-only implementation of the +// segment.Segment interface, using zap's data representation. +type SegmentBase struct { + mem []byte + memCRC uint32 + chunkMode uint32 + fieldsMap map[string]uint16 // fieldName -> fieldID+1 + fieldsInv []string // fieldID -> fieldName + numDocs uint64 + storedIndexOffset uint64 + fieldsIndexOffset uint64 + docValueOffset uint64 + dictLocs []uint64 + fieldDvReaders map[uint16]*docValueReader // naive chunk cache per field + fieldDvNames []string // field names cached in fieldDvReaders + size uint64 + + m sync.Mutex + fieldFSTs map[uint16]*vellum.FST +} + +func (sb *SegmentBase) Size() int { + return int(sb.size) +} + +func (sb *SegmentBase) updateSize() { + sizeInBytes := reflectStaticSizeSegmentBase + + cap(sb.mem) + + // fieldsMap + for k := range sb.fieldsMap { + sizeInBytes += (len(k) + size.SizeOfString) + size.SizeOfUint16 + } + + // fieldsInv, dictLocs + for _, entry := range sb.fieldsInv { + sizeInBytes += len(entry) + size.SizeOfString + } + sizeInBytes += len(sb.dictLocs) * size.SizeOfUint64 + + // fieldDvReaders + for _, v := range sb.fieldDvReaders { + sizeInBytes += size.SizeOfUint16 + size.SizeOfPtr + if v != nil { + sizeInBytes += v.size() + } + } + + sb.size = uint64(sizeInBytes) +} + +func (sb *SegmentBase) AddRef() {} +func (sb *SegmentBase) DecRef() (err error) { return nil } +func (sb *SegmentBase) Close() (err error) { return nil } + +// Segment implements a persisted segment.Segment interface, by +// embedding an mmap()'ed SegmentBase. +type Segment struct { + SegmentBase + + f *os.File + mm mmap.MMap + path string + version uint32 + crc uint32 + + m sync.Mutex // Protects the fields that follow. + refs int64 +} + +func (s *Segment) Size() int { + // 8 /* size of file pointer */ + // 4 /* size of version -> uint32 */ + // 4 /* size of crc -> uint32 */ + sizeOfUints := 16 + + sizeInBytes := (len(s.path) + size.SizeOfString) + sizeOfUints + + // mutex, refs -> int64 + sizeInBytes += 16 + + // do not include the mmap'ed part + return sizeInBytes + s.SegmentBase.Size() - cap(s.mem) +} + +func (s *Segment) AddRef() { + s.m.Lock() + s.refs++ + s.m.Unlock() +} + +func (s *Segment) DecRef() (err error) { + s.m.Lock() + s.refs-- + if s.refs == 0 { + err = s.closeActual() + } + s.m.Unlock() + return err +} + +func (s *Segment) loadConfig() error { + crcOffset := len(s.mm) - 4 + s.crc = binary.BigEndian.Uint32(s.mm[crcOffset : crcOffset+4]) + + verOffset := crcOffset - 4 + s.version = binary.BigEndian.Uint32(s.mm[verOffset : verOffset+4]) + if s.version != Version { + return fmt.Errorf("unsupported version %d != %d", s.version, Version) + } + + chunkOffset := verOffset - 4 + s.chunkMode = binary.BigEndian.Uint32(s.mm[chunkOffset : chunkOffset+4]) + + docValueOffset := chunkOffset - 8 + s.docValueOffset = binary.BigEndian.Uint64(s.mm[docValueOffset : docValueOffset+8]) + + fieldsIndexOffset := docValueOffset - 8 + s.fieldsIndexOffset = binary.BigEndian.Uint64(s.mm[fieldsIndexOffset : fieldsIndexOffset+8]) + + storedIndexOffset := fieldsIndexOffset - 8 + s.storedIndexOffset = binary.BigEndian.Uint64(s.mm[storedIndexOffset : storedIndexOffset+8]) + + numDocsOffset := storedIndexOffset - 8 + s.numDocs = binary.BigEndian.Uint64(s.mm[numDocsOffset : numDocsOffset+8]) + return nil +} + +func (s *SegmentBase) loadFields() error { + // NOTE for now we assume the fields index immediately precedes + // the footer, and if this changes, need to adjust accordingly (or + // store explicit length), where s.mem was sliced from s.mm in Open(). + fieldsIndexEnd := uint64(len(s.mem)) + + // iterate through fields index + var fieldID uint64 + for s.fieldsIndexOffset+(8*fieldID) < fieldsIndexEnd { + addr := binary.BigEndian.Uint64(s.mem[s.fieldsIndexOffset+(8*fieldID) : s.fieldsIndexOffset+(8*fieldID)+8]) + + dictLoc, read := binary.Uvarint(s.mem[addr:fieldsIndexEnd]) + n := uint64(read) + s.dictLocs = append(s.dictLocs, dictLoc) + + var nameLen uint64 + nameLen, read = binary.Uvarint(s.mem[addr+n : fieldsIndexEnd]) + n += uint64(read) + + name := string(s.mem[addr+n : addr+n+nameLen]) + s.fieldsInv = append(s.fieldsInv, name) + s.fieldsMap[name] = uint16(fieldID + 1) + + fieldID++ + } + return nil +} + +// Dictionary returns the term dictionary for the specified field +func (s *SegmentBase) Dictionary(field string) (segment.TermDictionary, error) { + dict, err := s.dictionary(field) + if err == nil && dict == nil { + return &segment.EmptyDictionary{}, nil + } + return dict, err +} + +func (sb *SegmentBase) dictionary(field string) (rv *Dictionary, err error) { + fieldIDPlus1 := sb.fieldsMap[field] + if fieldIDPlus1 > 0 { + rv = &Dictionary{ + sb: sb, + field: field, + fieldID: fieldIDPlus1 - 1, + } + + dictStart := sb.dictLocs[rv.fieldID] + if dictStart > 0 { + var ok bool + sb.m.Lock() + if rv.fst, ok = sb.fieldFSTs[rv.fieldID]; !ok { + // read the length of the vellum data + vellumLen, read := binary.Uvarint(sb.mem[dictStart : dictStart+binary.MaxVarintLen64]) + fstBytes := sb.mem[dictStart+uint64(read) : dictStart+uint64(read)+vellumLen] + rv.fst, err = vellum.Load(fstBytes) + if err != nil { + sb.m.Unlock() + return nil, fmt.Errorf("dictionary field %s vellum err: %v", field, err) + } + + sb.fieldFSTs[rv.fieldID] = rv.fst + } + + sb.m.Unlock() + rv.fstReader, err = rv.fst.Reader() + if err != nil { + return nil, fmt.Errorf("dictionary field %s vellum reader err: %v", field, err) + } + + } + } + + return rv, nil +} + +// visitDocumentCtx holds data structures that are reusable across +// multiple VisitDocument() calls to avoid memory allocations +type visitDocumentCtx struct { + buf []byte + reader bytes.Reader + arrayPos []uint64 +} + +var visitDocumentCtxPool = sync.Pool{ + New: func() interface{} { + reuse := &visitDocumentCtx{} + return reuse + }, +} + +// VisitDocument invokes the DocFieldValueVistor for each stored field +// for the specified doc number +func (s *SegmentBase) VisitDocument(num uint64, visitor segment.DocumentFieldValueVisitor) error { + vdc := visitDocumentCtxPool.Get().(*visitDocumentCtx) + defer visitDocumentCtxPool.Put(vdc) + return s.visitDocument(vdc, num, visitor) +} + +func (s *SegmentBase) visitDocument(vdc *visitDocumentCtx, num uint64, + visitor segment.DocumentFieldValueVisitor) error { + // first make sure this is a valid number in this segment + if num < s.numDocs { + meta, compressed := s.getDocStoredMetaAndCompressed(num) + + vdc.reader.Reset(meta) + + // handle _id field special case + idFieldValLen, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + idFieldVal := compressed[:idFieldValLen] + + keepGoing := visitor("_id", byte('t'), idFieldVal, nil) + if !keepGoing { + visitDocumentCtxPool.Put(vdc) + return nil + } + + // handle non-"_id" fields + compressed = compressed[idFieldValLen:] + + uncompressed, err := snappy.Decode(vdc.buf[:cap(vdc.buf)], compressed) + if err != nil { + return err + } + + for keepGoing { + field, err := binary.ReadUvarint(&vdc.reader) + if err == io.EOF { + break + } + if err != nil { + return err + } + typ, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + offset, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + l, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + numap, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + var arrayPos []uint64 + if numap > 0 { + if cap(vdc.arrayPos) < int(numap) { + vdc.arrayPos = make([]uint64, numap) + } + arrayPos = vdc.arrayPos[:numap] + for i := 0; i < int(numap); i++ { + ap, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return err + } + arrayPos[i] = ap + } + } + + value := uncompressed[offset : offset+l] + keepGoing = visitor(s.fieldsInv[field], byte(typ), value, arrayPos) + } + + vdc.buf = uncompressed + } + return nil +} + +// DocID returns the value of the _id field for the given docNum +func (s *SegmentBase) DocID(num uint64) ([]byte, error) { + if num >= s.numDocs { + return nil, nil + } + + vdc := visitDocumentCtxPool.Get().(*visitDocumentCtx) + + meta, compressed := s.getDocStoredMetaAndCompressed(num) + + vdc.reader.Reset(meta) + + // handle _id field special case + idFieldValLen, err := binary.ReadUvarint(&vdc.reader) + if err != nil { + return nil, err + } + idFieldVal := compressed[:idFieldValLen] + + visitDocumentCtxPool.Put(vdc) + + return idFieldVal, nil +} + +// Count returns the number of documents in this segment. +func (s *SegmentBase) Count() uint64 { + return s.numDocs +} + +// DocNumbers returns a bitset corresponding to the doc numbers of all the +// provided _id strings +func (s *SegmentBase) DocNumbers(ids []string) (*roaring.Bitmap, error) { + rv := roaring.New() + + if len(s.fieldsMap) > 0 { + idDict, err := s.dictionary("_id") + if err != nil { + return nil, err + } + + postingsList := emptyPostingsList + + sMax, err := idDict.fst.GetMaxKey() + if err != nil { + return nil, err + } + sMaxStr := string(sMax) + filteredIds := make([]string, 0, len(ids)) + for _, id := range ids { + if id <= sMaxStr { + filteredIds = append(filteredIds, id) + } + } + + for _, id := range filteredIds { + postingsList, err = idDict.postingsList([]byte(id), nil, postingsList) + if err != nil { + return nil, err + } + postingsList.OrInto(rv) + } + } + + return rv, nil +} + +// Fields returns the field names used in this segment +func (s *SegmentBase) Fields() []string { + return s.fieldsInv +} + +// Path returns the path of this segment on disk +func (s *Segment) Path() string { + return s.path +} + +// Close releases all resources associated with this segment +func (s *Segment) Close() (err error) { + return s.DecRef() +} + +func (s *Segment) closeActual() (err error) { + if s.mm != nil { + err = s.mm.Unmap() + } + // try to close file even if unmap failed + if s.f != nil { + err2 := s.f.Close() + if err == nil { + // try to return first error + err = err2 + } + } + return +} + +// some helpers i started adding for the command-line utility + +// Data returns the underlying mmaped data slice +func (s *Segment) Data() []byte { + return s.mm +} + +// CRC returns the CRC value stored in the file footer +func (s *Segment) CRC() uint32 { + return s.crc +} + +// Version returns the file version in the file footer +func (s *Segment) Version() uint32 { + return s.version +} + +// ChunkFactor returns the chunk factor in the file footer +func (s *Segment) ChunkMode() uint32 { + return s.chunkMode +} + +// FieldsIndexOffset returns the fields index offset in the file footer +func (s *Segment) FieldsIndexOffset() uint64 { + return s.fieldsIndexOffset +} + +// StoredIndexOffset returns the stored value index offset in the file footer +func (s *Segment) StoredIndexOffset() uint64 { + return s.storedIndexOffset +} + +// DocValueOffset returns the docValue offset in the file footer +func (s *Segment) DocValueOffset() uint64 { + return s.docValueOffset +} + +// NumDocs returns the number of documents in the file footer +func (s *Segment) NumDocs() uint64 { + return s.numDocs +} + +// DictAddr is a helper function to compute the file offset where the +// dictionary is stored for the specified field. +func (s *Segment) DictAddr(field string) (uint64, error) { + fieldIDPlus1, ok := s.fieldsMap[field] + if !ok { + return 0, fmt.Errorf("no such field '%s'", field) + } + + return s.dictLocs[fieldIDPlus1-1], nil +} + +func (s *SegmentBase) loadDvReaders() error { + if s.docValueOffset == fieldNotUninverted || s.numDocs == 0 { + return nil + } + + var read uint64 + for fieldID, field := range s.fieldsInv { + var fieldLocStart, fieldLocEnd uint64 + var n int + fieldLocStart, n = binary.Uvarint(s.mem[s.docValueOffset+read : s.docValueOffset+read+binary.MaxVarintLen64]) + if n <= 0 { + return fmt.Errorf("loadDvReaders: failed to read the docvalue offset start for field %d", fieldID) + } + read += uint64(n) + fieldLocEnd, n = binary.Uvarint(s.mem[s.docValueOffset+read : s.docValueOffset+read+binary.MaxVarintLen64]) + if n <= 0 { + return fmt.Errorf("loadDvReaders: failed to read the docvalue offset end for field %d", fieldID) + } + read += uint64(n) + + fieldDvReader, err := s.loadFieldDocValueReader(field, fieldLocStart, fieldLocEnd) + if err != nil { + return err + } + if fieldDvReader != nil { + s.fieldDvReaders[uint16(fieldID)] = fieldDvReader + s.fieldDvNames = append(s.fieldDvNames, field) + } + } + + return nil +} diff --git a/vendor/github.com/blevesearch/zap/v15/write.go b/vendor/github.com/blevesearch/zap/v15/write.go new file mode 100644 index 0000000..77aefdb --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/write.go @@ -0,0 +1,145 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package zap + +import ( + "encoding/binary" + "io" + + "github.com/RoaringBitmap/roaring" +) + +// writes out the length of the roaring bitmap in bytes as varint +// then writes out the roaring bitmap itself +func writeRoaringWithLen(r *roaring.Bitmap, w io.Writer, + reuseBufVarint []byte) (int, error) { + buf, err := r.ToBytes() + if err != nil { + return 0, err + } + + var tw int + + // write out the length + n := binary.PutUvarint(reuseBufVarint, uint64(len(buf))) + nw, err := w.Write(reuseBufVarint[:n]) + tw += nw + if err != nil { + return tw, err + } + + // write out the roaring bytes + nw, err = w.Write(buf) + tw += nw + if err != nil { + return tw, err + } + + return tw, nil +} + +func persistFields(fieldsInv []string, w *CountHashWriter, dictLocs []uint64) (uint64, error) { + var rv uint64 + var fieldsOffsets []uint64 + + for fieldID, fieldName := range fieldsInv { + // record start of this field + fieldsOffsets = append(fieldsOffsets, uint64(w.Count())) + + // write out the dict location and field name length + _, err := writeUvarints(w, dictLocs[fieldID], uint64(len(fieldName))) + if err != nil { + return 0, err + } + + // write out the field name + _, err = w.Write([]byte(fieldName)) + if err != nil { + return 0, err + } + } + + // now write out the fields index + rv = uint64(w.Count()) + for fieldID := range fieldsInv { + err := binary.Write(w, binary.BigEndian, fieldsOffsets[fieldID]) + if err != nil { + return 0, err + } + } + + return rv, nil +} + +// FooterSize is the size of the footer record in bytes +// crc + ver + chunk + field offset + stored offset + num docs + docValueOffset +const FooterSize = 4 + 4 + 4 + 8 + 8 + 8 + 8 + +func persistFooter(numDocs, storedIndexOffset, fieldsIndexOffset, docValueOffset uint64, + chunkMode uint32, crcBeforeFooter uint32, writerIn io.Writer) error { + w := NewCountHashWriter(writerIn) + w.crc = crcBeforeFooter + + // write out the number of docs + err := binary.Write(w, binary.BigEndian, numDocs) + if err != nil { + return err + } + // write out the stored field index location: + err = binary.Write(w, binary.BigEndian, storedIndexOffset) + if err != nil { + return err + } + // write out the field index location + err = binary.Write(w, binary.BigEndian, fieldsIndexOffset) + if err != nil { + return err + } + // write out the fieldDocValue location + err = binary.Write(w, binary.BigEndian, docValueOffset) + if err != nil { + return err + } + // write out 32-bit chunk factor + err = binary.Write(w, binary.BigEndian, chunkMode) + if err != nil { + return err + } + // write out 32-bit version + err = binary.Write(w, binary.BigEndian, Version) + if err != nil { + return err + } + // write out CRC-32 of everything upto but not including this CRC + err = binary.Write(w, binary.BigEndian, w.crc) + if err != nil { + return err + } + return nil +} + +func writeUvarints(w io.Writer, vals ...uint64) (tw int, err error) { + buf := make([]byte, binary.MaxVarintLen64) + for _, val := range vals { + n := binary.PutUvarint(buf, val) + var nw int + nw, err = w.Write(buf[:n]) + tw += nw + if err != nil { + return tw, err + } + } + return tw, err +} diff --git a/vendor/github.com/blevesearch/zap/v15/zap.md b/vendor/github.com/blevesearch/zap/v15/zap.md new file mode 100644 index 0000000..d74dc54 --- /dev/null +++ b/vendor/github.com/blevesearch/zap/v15/zap.md @@ -0,0 +1,177 @@ +# ZAP File Format + +## Legend + +### Sections + + |========| + | | section + |========| + +### Fixed-size fields + + |--------| |----| |--| |-| + | | uint64 | | uint32 | | uint16 | | uint8 + |--------| |----| |--| |-| + +### Varints + + |~~~~~~~~| + | | varint(up to uint64) + |~~~~~~~~| + +### Arbitrary-length fields + + |--------...---| + | | arbitrary-length field (string, vellum, roaring bitmap) + |--------...---| + +### Chunked data + + [--------] + [ ] + [--------] + +## Overview + +Footer section describes the configuration of particular ZAP file. The format of footer is version-dependent, so it is necessary to check `V` field before the parsing. + + |==================================================| + | Stored Fields | + |==================================================| + |-----> | Stored Fields Index | + | |==================================================| + | | Dictionaries + Postings + DocValues | + | |==================================================| + | |---> | DocValues Index | + | | |==================================================| + | | | Fields | + | | |==================================================| + | | |-> | Fields Index | + | | | |========|========|========|========|====|====|====| + | | | | D# | SF | F | FDV | CF | V | CC | (Footer) + | | | |========|====|===|====|===|====|===|====|====|====| + | | | | | | + |-+-+-----------------| | | + | |--------------------------| | + |-------------------------------------| + + D#. Number of Docs. + SF. Stored Fields Index Offset. + F. Field Index Offset. + FDV. Field DocValue Offset. + CF. Chunk Factor. + V. Version. + CC. CRC32. + +## Stored Fields + +Stored Fields Index is `D#` consecutive 64-bit unsigned integers - offsets, where relevant Stored Fields Data records are located. + + 0 [SF] [SF + D# * 8] + | Stored Fields | Stored Fields Index | + |================================|==================================| + | | | + | |--------------------| ||--------|--------|. . .|--------|| + | |-> | Stored Fields Data | || 0 | 1 | | D# - 1 || + | | |--------------------| ||--------|----|---|. . .|--------|| + | | | | | + |===|============================|==============|===================| + | | + |-------------------------------------------| + +Stored Fields Data is an arbitrary size record, which consists of metadata and [Snappy](https://github.com/golang/snappy)-compressed data. + + Stored Fields Data + |~~~~~~~~|~~~~~~~~|~~~~~~~~...~~~~~~~~|~~~~~~~~...~~~~~~~~| + | MDS | CDS | MD | CD | + |~~~~~~~~|~~~~~~~~|~~~~~~~~...~~~~~~~~|~~~~~~~~...~~~~~~~~| + + MDS. Metadata size. + CDS. Compressed data size. + MD. Metadata. + CD. Snappy-compressed data. + +## Fields + +Fields Index section located between addresses `F` and `len(file) - len(footer)` and consist of `uint64` values (`F1`, `F2`, ...) which are offsets to records in Fields section. We have `F# = (len(file) - len(footer) - F) / sizeof(uint64)` fields. + + + (...) [F] [F + F#] + | Fields | Fields Index. | + |================================|================================| + | | | + | |~~~~~~~~|~~~~~~~~|---...---|||--------|--------|...|--------|| + ||->| Dict | Length | Name ||| 0 | 1 | | F# - 1 || + || |~~~~~~~~|~~~~~~~~|---...---|||--------|----|---|...|--------|| + || | | | + ||===============================|==============|=================| + | | + |----------------------------------------------| + + +## Dictionaries + Postings + +Each of fields has its own dictionary, encoded in [Vellum](https://github.com/couchbase/vellum) format. Dictionary consists of pairs `(term, offset)`, where `offset` indicates the position of postings (list of documents) for this particular term. + + |================================================================|- Dictionaries + + | | Postings + + | | DocValues + | Freq/Norm (chunked) | + | [~~~~~~|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~] | + | |->[ Freq | Norm (float32 under varint) ] | + | | [~~~~~~|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~] | + | | | + | |------------------------------------------------------------| | + | Location Details (chunked) | | + | [~~~~~~|~~~~~|~~~~~~~|~~~~~|~~~~~~|~~~~~~~~|~~~~~] | | + | |->[ Size | Pos | Start | End | Arr# | ArrPos | ... ] | | + | | [~~~~~~|~~~~~|~~~~~~~|~~~~~|~~~~~~|~~~~~~~~|~~~~~] | | + | | | | + | |----------------------| | | + | Postings List | | | + | |~~~~~~~~|~~~~~|~~|~~~~~~~~|-----------...--| | | + | |->| F/N | LD | Length | ROARING BITMAP | | | + | | |~~~~~|~~|~~~~~~~~|~~~~~~~~|-----------...--| | | + | | |----------------------------------------------| | + | |--------------------------------------| | + | Dictionary | | + | |~~~~~~~~|--------------------------|-...-| | + | |->| Length | VELLUM DATA : (TERM -> OFFSET) | | + | | |~~~~~~~~|----------------------------...-| | + | | | + |======|=========================================================|- DocValues Index + | | | + |======|=========================================================|- Fields + | | | + | |~~~~|~~~|~~~~~~~~|---...---| | + | | Dict | Length | Name | | + | |~~~~~~~~|~~~~~~~~|---...---| | + | | + |================================================================| + +## DocValues + +DocValues Index is `F#` pairs of varints, one pair per field. Each pair of varints indicates start and end point of DocValues slice. + + |================================================================| + | |------...--| | + | |->| DocValues |<-| | + | | |------...--| | | + |==|=================|===========================================|- DocValues Index + ||~|~~~~~~~~~|~~~~~~~|~~| |~~~~~~~~~~~~~~|~~~~~~~~~~~~|| + || DV1 START | DV1 STOP | . . . . . | DV(F#) START | DV(F#) END || + ||~~~~~~~~~~~|~~~~~~~~~~| |~~~~~~~~~~~~~~|~~~~~~~~~~~~|| + |================================================================| + +DocValues is chunked Snappy-compressed values for each document and field. + + [~~~~~~~~~~~~~~~|~~~~~~|~~~~~~~~~|-...-|~~~~~~|~~~~~~~~~|--------------------...-] + [ Doc# in Chunk | Doc1 | Offset1 | ... | DocN | OffsetN | SNAPPY COMPRESSED DATA ] + [~~~~~~~~~~~~~~~|~~~~~~|~~~~~~~~~|-...-|~~~~~~|~~~~~~~~~|--------------------...-] + +Last 16 bytes are description of chunks. + + |~~~~~~~~~~~~...~|----------------|----------------| + | Chunk Sizes | Chunk Size Arr | Chunk# | + |~~~~~~~~~~~~...~|----------------|----------------| diff --git a/vendor/github.com/cenkalti/backoff/v4/.gitignore b/vendor/github.com/cenkalti/backoff/v4/.gitignore new file mode 100644 index 0000000..0026861 --- /dev/null +++ b/vendor/github.com/cenkalti/backoff/v4/.gitignore @@ -0,0 +1,22 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe diff --git a/vendor/github.com/cenkalti/backoff/v4/.travis.yml b/vendor/github.com/cenkalti/backoff/v4/.travis.yml new file mode 100644 index 0000000..871150c --- /dev/null +++ b/vendor/github.com/cenkalti/backoff/v4/.travis.yml @@ -0,0 +1,10 @@ +language: go +go: + - 1.12 + - 1.x + - tip +before_install: + - go get github.com/mattn/goveralls + - go get golang.org/x/tools/cmd/cover +script: + - $HOME/gopath/bin/goveralls -service=travis-ci diff --git a/vendor/github.com/cenkalti/backoff/v4/LICENSE b/vendor/github.com/cenkalti/backoff/v4/LICENSE new file mode 100644 index 0000000..89b8179 --- /dev/null +++ b/vendor/github.com/cenkalti/backoff/v4/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2014 Cenk Altı + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/cenkalti/backoff/v4/README.md b/vendor/github.com/cenkalti/backoff/v4/README.md new file mode 100644 index 0000000..cabfc9c --- /dev/null +++ b/vendor/github.com/cenkalti/backoff/v4/README.md @@ -0,0 +1,33 @@ +# Exponential Backoff [![GoDoc][godoc image]][godoc] [![Build Status][travis image]][travis] [![Coverage Status][coveralls image]][coveralls] + +This is a Go port of the exponential backoff algorithm from [Google's HTTP Client Library for Java][google-http-java-client]. + +[Exponential backoff][exponential backoff wiki] +is an algorithm that uses feedback to multiplicatively decrease the rate of some process, +in order to gradually find an acceptable rate. +The retries exponentially increase and stop increasing when a certain threshold is met. + +## Usage + +Import path is `github.com/cenkalti/backoff/v4`. Please note the version part at the end. + +godoc.org does not support modules yet, +so you can use https://godoc.org/gopkg.in/cenkalti/backoff.v4 to view the documentation. + +## Contributing + +* I would like to keep this library as small as possible. +* Please don't send a PR without opening an issue and discussing it first. +* If proposed change is not a common use case, I will probably not accept it. + +[godoc]: https://godoc.org/github.com/cenkalti/backoff +[godoc image]: https://godoc.org/github.com/cenkalti/backoff?status.png +[travis]: https://travis-ci.org/cenkalti/backoff +[travis image]: https://travis-ci.org/cenkalti/backoff.png?branch=master +[coveralls]: https://coveralls.io/github/cenkalti/backoff?branch=master +[coveralls image]: https://coveralls.io/repos/github/cenkalti/backoff/badge.svg?branch=master + +[google-http-java-client]: https://github.com/google/google-http-java-client/blob/da1aa993e90285ec18579f1553339b00e19b3ab5/google-http-client/src/main/java/com/google/api/client/util/ExponentialBackOff.java +[exponential backoff wiki]: http://en.wikipedia.org/wiki/Exponential_backoff + +[advanced example]: https://godoc.org/github.com/cenkalti/backoff#example_ diff --git a/vendor/github.com/cenkalti/backoff/v4/backoff.go b/vendor/github.com/cenkalti/backoff/v4/backoff.go new file mode 100644 index 0000000..3676ee4 --- /dev/null +++ b/vendor/github.com/cenkalti/backoff/v4/backoff.go @@ -0,0 +1,66 @@ +// Package backoff implements backoff algorithms for retrying operations. +// +// Use Retry function for retrying operations that may fail. +// If Retry does not meet your needs, +// copy/paste the function into your project and modify as you wish. +// +// There is also Ticker type similar to time.Ticker. +// You can use it if you need to work with channels. +// +// See Examples section below for usage examples. +package backoff + +import "time" + +// BackOff is a backoff policy for retrying an operation. +type BackOff interface { + // NextBackOff returns the duration to wait before retrying the operation, + // or backoff. Stop to indicate that no more retries should be made. + // + // Example usage: + // + // duration := backoff.NextBackOff(); + // if (duration == backoff.Stop) { + // // Do not retry operation. + // } else { + // // Sleep for duration and retry operation. + // } + // + NextBackOff() time.Duration + + // Reset to initial state. + Reset() +} + +// Stop indicates that no more retries should be made for use in NextBackOff(). +const Stop time.Duration = -1 + +// ZeroBackOff is a fixed backoff policy whose backoff time is always zero, +// meaning that the operation is retried immediately without waiting, indefinitely. +type ZeroBackOff struct{} + +func (b *ZeroBackOff) Reset() {} + +func (b *ZeroBackOff) NextBackOff() time.Duration { return 0 } + +// StopBackOff is a fixed backoff policy that always returns backoff.Stop for +// NextBackOff(), meaning that the operation should never be retried. +type StopBackOff struct{} + +func (b *StopBackOff) Reset() {} + +func (b *StopBackOff) NextBackOff() time.Duration { return Stop } + +// ConstantBackOff is a backoff policy that always returns the same backoff delay. +// This is in contrast to an exponential backoff policy, +// which returns a delay that grows longer as you call NextBackOff() over and over again. +type ConstantBackOff struct { + Interval time.Duration +} + +func (b *ConstantBackOff) Reset() {} +func (b *ConstantBackOff) NextBackOff() time.Duration { return b.Interval } + +func NewConstantBackOff(d time.Duration) *ConstantBackOff { + return &ConstantBackOff{Interval: d} +} diff --git a/vendor/github.com/cenkalti/backoff/v4/context.go b/vendor/github.com/cenkalti/backoff/v4/context.go new file mode 100644 index 0000000..fcff86c --- /dev/null +++ b/vendor/github.com/cenkalti/backoff/v4/context.go @@ -0,0 +1,66 @@ +package backoff + +import ( + "context" + "time" +) + +// BackOffContext is a backoff policy that stops retrying after the context +// is canceled. +type BackOffContext interface { // nolint: golint + BackOff + Context() context.Context +} + +type backOffContext struct { + BackOff + ctx context.Context +} + +// WithContext returns a BackOffContext with context ctx +// +// ctx must not be nil +func WithContext(b BackOff, ctx context.Context) BackOffContext { // nolint: golint + if ctx == nil { + panic("nil context") + } + + if b, ok := b.(*backOffContext); ok { + return &backOffContext{ + BackOff: b.BackOff, + ctx: ctx, + } + } + + return &backOffContext{ + BackOff: b, + ctx: ctx, + } +} + +func getContext(b BackOff) context.Context { + if cb, ok := b.(BackOffContext); ok { + return cb.Context() + } + if tb, ok := b.(*backOffTries); ok { + return getContext(tb.delegate) + } + return context.Background() +} + +func (b *backOffContext) Context() context.Context { + return b.ctx +} + +func (b *backOffContext) NextBackOff() time.Duration { + select { + case <-b.ctx.Done(): + return Stop + default: + } + next := b.BackOff.NextBackOff() + if deadline, ok := b.ctx.Deadline(); ok && deadline.Sub(time.Now()) < next { // nolint: gosimple + return Stop + } + return next +} diff --git a/vendor/github.com/cenkalti/backoff/v4/exponential.go b/vendor/github.com/cenkalti/backoff/v4/exponential.go new file mode 100644 index 0000000..3d34532 --- /dev/null +++ b/vendor/github.com/cenkalti/backoff/v4/exponential.go @@ -0,0 +1,158 @@ +package backoff + +import ( + "math/rand" + "time" +) + +/* +ExponentialBackOff is a backoff implementation that increases the backoff +period for each retry attempt using a randomization function that grows exponentially. + +NextBackOff() is calculated using the following formula: + + randomized interval = + RetryInterval * (random value in range [1 - RandomizationFactor, 1 + RandomizationFactor]) + +In other words NextBackOff() will range between the randomization factor +percentage below and above the retry interval. + +For example, given the following parameters: + + RetryInterval = 2 + RandomizationFactor = 0.5 + Multiplier = 2 + +the actual backoff period used in the next retry attempt will range between 1 and 3 seconds, +multiplied by the exponential, that is, between 2 and 6 seconds. + +Note: MaxInterval caps the RetryInterval and not the randomized interval. + +If the time elapsed since an ExponentialBackOff instance is created goes past the +MaxElapsedTime, then the method NextBackOff() starts returning backoff.Stop. + +The elapsed time can be reset by calling Reset(). + +Example: Given the following default arguments, for 10 tries the sequence will be, +and assuming we go over the MaxElapsedTime on the 10th try: + + Request # RetryInterval (seconds) Randomized Interval (seconds) + + 1 0.5 [0.25, 0.75] + 2 0.75 [0.375, 1.125] + 3 1.125 [0.562, 1.687] + 4 1.687 [0.8435, 2.53] + 5 2.53 [1.265, 3.795] + 6 3.795 [1.897, 5.692] + 7 5.692 [2.846, 8.538] + 8 8.538 [4.269, 12.807] + 9 12.807 [6.403, 19.210] + 10 19.210 backoff.Stop + +Note: Implementation is not thread-safe. +*/ +type ExponentialBackOff struct { + InitialInterval time.Duration + RandomizationFactor float64 + Multiplier float64 + MaxInterval time.Duration + // After MaxElapsedTime the ExponentialBackOff returns Stop. + // It never stops if MaxElapsedTime == 0. + MaxElapsedTime time.Duration + Stop time.Duration + Clock Clock + + currentInterval time.Duration + startTime time.Time +} + +// Clock is an interface that returns current time for BackOff. +type Clock interface { + Now() time.Time +} + +// Default values for ExponentialBackOff. +const ( + DefaultInitialInterval = 500 * time.Millisecond + DefaultRandomizationFactor = 0.5 + DefaultMultiplier = 1.5 + DefaultMaxInterval = 60 * time.Second + DefaultMaxElapsedTime = 15 * time.Minute +) + +// NewExponentialBackOff creates an instance of ExponentialBackOff using default values. +func NewExponentialBackOff() *ExponentialBackOff { + b := &ExponentialBackOff{ + InitialInterval: DefaultInitialInterval, + RandomizationFactor: DefaultRandomizationFactor, + Multiplier: DefaultMultiplier, + MaxInterval: DefaultMaxInterval, + MaxElapsedTime: DefaultMaxElapsedTime, + Stop: Stop, + Clock: SystemClock, + } + b.Reset() + return b +} + +type systemClock struct{} + +func (t systemClock) Now() time.Time { + return time.Now() +} + +// SystemClock implements Clock interface that uses time.Now(). +var SystemClock = systemClock{} + +// Reset the interval back to the initial retry interval and restarts the timer. +// Reset must be called before using b. +func (b *ExponentialBackOff) Reset() { + b.currentInterval = b.InitialInterval + b.startTime = b.Clock.Now() +} + +// NextBackOff calculates the next backoff interval using the formula: +// Randomized interval = RetryInterval * (1 ± RandomizationFactor) +func (b *ExponentialBackOff) NextBackOff() time.Duration { + // Make sure we have not gone over the maximum elapsed time. + elapsed := b.GetElapsedTime() + next := getRandomValueFromInterval(b.RandomizationFactor, rand.Float64(), b.currentInterval) + b.incrementCurrentInterval() + if b.MaxElapsedTime != 0 && elapsed+next > b.MaxElapsedTime { + return b.Stop + } + return next +} + +// GetElapsedTime returns the elapsed time since an ExponentialBackOff instance +// is created and is reset when Reset() is called. +// +// The elapsed time is computed using time.Now().UnixNano(). It is +// safe to call even while the backoff policy is used by a running +// ticker. +func (b *ExponentialBackOff) GetElapsedTime() time.Duration { + return b.Clock.Now().Sub(b.startTime) +} + +// Increments the current interval by multiplying it with the multiplier. +func (b *ExponentialBackOff) incrementCurrentInterval() { + // Check for overflow, if overflow is detected set the current interval to the max interval. + if float64(b.currentInterval) >= float64(b.MaxInterval)/b.Multiplier { + b.currentInterval = b.MaxInterval + } else { + b.currentInterval = time.Duration(float64(b.currentInterval) * b.Multiplier) + } +} + +// Returns a random value from the following interval: +// [currentInterval - randomizationFactor * currentInterval, currentInterval + randomizationFactor * currentInterval]. +func getRandomValueFromInterval(randomizationFactor, random float64, currentInterval time.Duration) time.Duration { + var delta = randomizationFactor * float64(currentInterval) + var minInterval = float64(currentInterval) - delta + var maxInterval = float64(currentInterval) + delta + + // Get a random value from the range [minInterval, maxInterval]. + // The formula used below has a +1 because if the minInterval is 1 and the maxInterval is 3 then + // we want a 33% chance for selecting either 1, 2 or 3. + return time.Duration(minInterval + (random * (maxInterval - minInterval + 1))) +} diff --git a/vendor/github.com/cenkalti/backoff/v4/retry.go b/vendor/github.com/cenkalti/backoff/v4/retry.go new file mode 100644 index 0000000..6c776cc --- /dev/null +++ b/vendor/github.com/cenkalti/backoff/v4/retry.go @@ -0,0 +1,96 @@ +package backoff + +import "time" + +// An Operation is executing by Retry() or RetryNotify(). +// The operation will be retried using a backoff policy if it returns an error. +type Operation func() error + +// Notify is a notify-on-error function. It receives an operation error and +// backoff delay if the operation failed (with an error). +// +// NOTE that if the backoff policy stated to stop retrying, +// the notify function isn't called. +type Notify func(error, time.Duration) + +// Retry the operation o until it does not return error or BackOff stops. +// o is guaranteed to be run at least once. +// +// If o returns a *PermanentError, the operation is not retried, and the +// wrapped error is returned. +// +// Retry sleeps the goroutine for the duration returned by BackOff after a +// failed operation returns. +func Retry(o Operation, b BackOff) error { + return RetryNotify(o, b, nil) +} + +// RetryNotify calls notify function with the error and wait duration +// for each failed attempt before sleep. +func RetryNotify(operation Operation, b BackOff, notify Notify) error { + return RetryNotifyWithTimer(operation, b, notify, nil) +} + +// RetryNotifyWithTimer calls notify function with the error and wait duration using the given Timer +// for each failed attempt before sleep. +// A default timer that uses system timer is used when nil is passed. +func RetryNotifyWithTimer(operation Operation, b BackOff, notify Notify, t Timer) error { + var err error + var next time.Duration + if t == nil { + t = &defaultTimer{} + } + + defer func() { + t.Stop() + }() + + ctx := getContext(b) + + b.Reset() + for { + if err = operation(); err == nil { + return nil + } + + if permanent, ok := err.(*PermanentError); ok { + return permanent.Err + } + + if next = b.NextBackOff(); next == Stop { + return err + } + + if notify != nil { + notify(err, next) + } + + t.Start(next) + + select { + case <-ctx.Done(): + return ctx.Err() + case <-t.C(): + } + } +} + +// PermanentError signals that the operation should not be retried. +type PermanentError struct { + Err error +} + +func (e *PermanentError) Error() string { + return e.Err.Error() +} + +func (e *PermanentError) Unwrap() error { + return e.Err +} + +// Permanent wraps the given err in a *PermanentError. +func Permanent(err error) *PermanentError { + return &PermanentError{ + Err: err, + } +} diff --git a/vendor/github.com/cenkalti/backoff/v4/ticker.go b/vendor/github.com/cenkalti/backoff/v4/ticker.go new file mode 100644 index 0000000..df9d68b --- /dev/null +++ b/vendor/github.com/cenkalti/backoff/v4/ticker.go @@ -0,0 +1,97 @@ +package backoff + +import ( + "context" + "sync" + "time" +) + +// Ticker holds a channel that delivers `ticks' of a clock at times reported by a BackOff. +// +// Ticks will continue to arrive when the previous operation is still running, +// so operations that take a while to fail could run in quick succession. +type Ticker struct { + C <-chan time.Time + c chan time.Time + b BackOff + ctx context.Context + timer Timer + stop chan struct{} + stopOnce sync.Once +} + +// NewTicker returns a new Ticker containing a channel that will send +// the time at times specified by the BackOff argument. Ticker is +// guaranteed to tick at least once. The channel is closed when Stop +// method is called or BackOff stops. It is not safe to manipulate the +// provided backoff policy (notably calling NextBackOff or Reset) +// while the ticker is running. +func NewTicker(b BackOff) *Ticker { + return NewTickerWithTimer(b, &defaultTimer{}) +} + +// NewTickerWithTimer returns a new Ticker with a custom timer. +// A default timer that uses system timer is used when nil is passed. +func NewTickerWithTimer(b BackOff, timer Timer) *Ticker { + if timer == nil { + timer = &defaultTimer{} + } + c := make(chan time.Time) + t := &Ticker{ + C: c, + c: c, + b: b, + ctx: getContext(b), + timer: timer, + stop: make(chan struct{}), + } + t.b.Reset() + go t.run() + return t +} + +// Stop turns off a ticker. After Stop, no more ticks will be sent. +func (t *Ticker) Stop() { + t.stopOnce.Do(func() { close(t.stop) }) +} + +func (t *Ticker) run() { + c := t.c + defer close(c) + + // Ticker is guaranteed to tick at least once. + afterC := t.send(time.Now()) + + for { + if afterC == nil { + return + } + + select { + case tick := <-afterC: + afterC = t.send(tick) + case <-t.stop: + t.c = nil // Prevent future ticks from being sent to the channel. + return + case <-t.ctx.Done(): + return + } + } +} + +func (t *Ticker) send(tick time.Time) <-chan time.Time { + select { + case t.c <- tick: + case <-t.stop: + return nil + } + + next := t.b.NextBackOff() + if next == Stop { + t.Stop() + return nil + } + + t.timer.Start(next) + return t.timer.C() +} diff --git a/vendor/github.com/cenkalti/backoff/v4/timer.go b/vendor/github.com/cenkalti/backoff/v4/timer.go new file mode 100644 index 0000000..8120d02 --- /dev/null +++ b/vendor/github.com/cenkalti/backoff/v4/timer.go @@ -0,0 +1,35 @@ +package backoff + +import "time" + +type Timer interface { + Start(duration time.Duration) + Stop() + C() <-chan time.Time +} + +// defaultTimer implements Timer interface using time.Timer +type defaultTimer struct { + timer *time.Timer +} + +// C returns the timers channel which receives the current time when the timer fires. +func (t *defaultTimer) C() <-chan time.Time { + return t.timer.C +} + +// Start starts the timer to fire after the given duration +func (t *defaultTimer) Start(duration time.Duration) { + if t.timer == nil { + t.timer = time.NewTimer(duration) + } else { + t.timer.Reset(duration) + } +} + +// Stop is called when the timer is not used anymore and resources may be freed. +func (t *defaultTimer) Stop() { + if t.timer != nil { + t.timer.Stop() + } +} diff --git a/vendor/github.com/cenkalti/backoff/v4/tries.go b/vendor/github.com/cenkalti/backoff/v4/tries.go new file mode 100644 index 0000000..28d58ca --- /dev/null +++ b/vendor/github.com/cenkalti/backoff/v4/tries.go @@ -0,0 +1,38 @@ +package backoff + +import "time" + +/* +WithMaxRetries creates a wrapper around another BackOff, which will +return Stop if NextBackOff() has been called too many times since +the last time Reset() was called + +Note: Implementation is not thread-safe. +*/ +func WithMaxRetries(b BackOff, max uint64) BackOff { + return &backOffTries{delegate: b, maxTries: max} +} + +type backOffTries struct { + delegate BackOff + maxTries uint64 + numTries uint64 +} + +func (b *backOffTries) NextBackOff() time.Duration { + if b.maxTries == 0 { + return Stop + } + if b.maxTries > 0 { + if b.maxTries <= b.numTries { + return Stop + } + b.numTries++ + } + return b.delegate.NextBackOff() +} + +func (b *backOffTries) Reset() { + b.numTries = 0 + b.delegate.Reset() +} diff --git a/vendor/github.com/clbanning/mxj/v2/.travis.yml b/vendor/github.com/clbanning/mxj/v2/.travis.yml new file mode 100644 index 0000000..9c86115 --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/.travis.yml @@ -0,0 +1,4 @@ +language: go + +go: +- 1.x \ No newline at end of file diff --git a/vendor/github.com/clbanning/mxj/v2/LICENSE b/vendor/github.com/clbanning/mxj/v2/LICENSE new file mode 100644 index 0000000..1ada880 --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2012-2021 Charles Banning . All rights reserved. + +The MIT License (MIT) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/vendor/github.com/clbanning/mxj/v2/anyxml.go b/vendor/github.com/clbanning/mxj/v2/anyxml.go new file mode 100644 index 0000000..63970ee --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/anyxml.go @@ -0,0 +1,201 @@ +package mxj + +import ( + "bytes" + "encoding/xml" + "reflect" +) + +const ( + DefaultElementTag = "element" +) + +// Encode arbitrary value as XML. +// +// Note: unmarshaling the resultant +// XML may not return the original value, since tag labels may have been injected +// to create the XML representation of the value. +/* + Encode an arbitrary JSON object. + package main + + import ( + "encoding/json" + "fmt" + "github.com/clbanning/mxj" + ) + + func main() { + jsondata := []byte(`[ + { "somekey":"somevalue" }, + "string", + 3.14159265, + true + ]`) + var i interface{} + err := json.Unmarshal(jsondata, &i) + if err != nil { + // do something + } + x, err := mxj.AnyXmlIndent(i, "", " ", "mydoc") + if err != nil { + // do something else + } + fmt.Println(string(x)) + } + + output: + + somevalue + string + 3.14159265 + true + + +An extreme example is available in examples/goofy_map.go. +*/ +// Alternative values for DefaultRootTag and DefaultElementTag can be set as: +// AnyXml( v, myRootTag, myElementTag). +func AnyXml(v interface{}, tags ...string) ([]byte, error) { + var rt, et string + if len(tags) == 1 || len(tags) == 2 { + rt = tags[0] + } else { + rt = DefaultRootTag + } + if len(tags) == 2 { + et = tags[1] + } else { + et = DefaultElementTag + } + + if v == nil { + if useGoXmlEmptyElemSyntax { + return []byte("<" + rt + ">"), nil + } + return []byte("<" + rt + "/>"), nil + } + if reflect.TypeOf(v).Kind() == reflect.Struct { + return xml.Marshal(v) + } + + var err error + s := new(bytes.Buffer) + p := new(pretty) + + var b []byte + switch v.(type) { + case []interface{}: + if _, err = s.WriteString("<" + rt + ">"); err != nil { + return nil, err + } + for _, vv := range v.([]interface{}) { + switch vv.(type) { + case map[string]interface{}: + m := vv.(map[string]interface{}) + if len(m) == 1 { + for tag, val := range m { + err = marshalMapToXmlIndent(false, s, tag, val, p) + } + } else { + err = marshalMapToXmlIndent(false, s, et, vv, p) + } + default: + err = marshalMapToXmlIndent(false, s, et, vv, p) + } + if err != nil { + break + } + } + if _, err = s.WriteString(""); err != nil { + return nil, err + } + b = s.Bytes() + case map[string]interface{}: + m := Map(v.(map[string]interface{})) + b, err = m.Xml(rt) + default: + err = marshalMapToXmlIndent(false, s, rt, v, p) + b = s.Bytes() + } + + return b, err +} + +// Encode an arbitrary value as a pretty XML string. +// Alternative values for DefaultRootTag and DefaultElementTag can be set as: +// AnyXmlIndent( v, "", " ", myRootTag, myElementTag). +func AnyXmlIndent(v interface{}, prefix, indent string, tags ...string) ([]byte, error) { + var rt, et string + if len(tags) == 1 || len(tags) == 2 { + rt = tags[0] + } else { + rt = DefaultRootTag + } + if len(tags) == 2 { + et = tags[1] + } else { + et = DefaultElementTag + } + + if v == nil { + if useGoXmlEmptyElemSyntax { + return []byte(prefix + "<" + rt + ">"), nil + } + return []byte(prefix + "<" + rt + "/>"), nil + } + if reflect.TypeOf(v).Kind() == reflect.Struct { + return xml.MarshalIndent(v, prefix, indent) + } + + var err error + s := new(bytes.Buffer) + p := new(pretty) + p.indent = indent + p.padding = prefix + + var b []byte + switch v.(type) { + case []interface{}: + if _, err = s.WriteString("<" + rt + ">\n"); err != nil { + return nil, err + } + p.Indent() + for _, vv := range v.([]interface{}) { + switch vv.(type) { + case map[string]interface{}: + m := vv.(map[string]interface{}) + if len(m) == 1 { + for tag, val := range m { + err = marshalMapToXmlIndent(true, s, tag, val, p) + } + } else { + p.start = 1 // we 1 tag in + err = marshalMapToXmlIndent(true, s, et, vv, p) + // *s += "\n" + if _, err = s.WriteString("\n"); err != nil { + return nil, err + } + } + default: + p.start = 0 // in case trailing p.start = 1 + err = marshalMapToXmlIndent(true, s, et, vv, p) + } + if err != nil { + break + } + } + if _, err = s.WriteString(``); err != nil { + return nil, err + } + b = s.Bytes() + case map[string]interface{}: + m := Map(v.(map[string]interface{})) + b, err = m.XmlIndent(prefix, indent, rt) + default: + err = marshalMapToXmlIndent(true, s, rt, v, p) + b = s.Bytes() + } + + return b, err +} diff --git a/vendor/github.com/clbanning/mxj/v2/atomFeedString.xml b/vendor/github.com/clbanning/mxj/v2/atomFeedString.xml new file mode 100644 index 0000000..474575a --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/atomFeedString.xml @@ -0,0 +1,54 @@ + +Code Review - My issueshttp://codereview.appspot.com/rietveld<>rietveld: an attempt at pubsubhubbub +2009-10-04T01:35:58+00:00email-address-removedurn:md5:134d9179c41f806be79b3a5f7877d19a + An attempt at adding pubsubhubbub support to Rietveld. +http://code.google.com/p/pubsubhubbub +http://code.google.com/p/rietveld/issues/detail?id=155 + +The server side of the protocol is trivial: + 1. add a &lt;link rel=&quot;hub&quot; href=&quot;hub-server&quot;&gt; tag to all + feeds that will be pubsubhubbubbed. + 2. every time one of those feeds changes, tell the hub + with a simple POST request. + +I have tested this by adding debug prints to a local hub +server and checking that the server got the right publish +requests. + +I can&#39;t quite get the server to work, but I think the bug +is not in my code. I think that the server expects to be +able to grab the feed and see the feed&#39;s actual URL in +the link rel=&quot;self&quot;, but the default value for that drops +the :port from the URL, and I cannot for the life of me +figure out how to get the Atom generator deep inside +django not to do that, or even where it is doing that, +or even what code is running to generate the Atom feed. +(I thought I knew but I added some assert False statements +and it kept running!) + +Ignoring that particular problem, I would appreciate +feedback on the right way to get the two values at +the top of feeds.py marked NOTE(rsc). + + +rietveld: correct tab handling +2009-10-03T23:02:17+00:00email-address-removedurn:md5:0a2a4f19bb815101f0ba2904aed7c35a + This fixes the buggy tab rendering that can be seen at +http://codereview.appspot.com/116075/diff/1/2 + +The fundamental problem was that the tab code was +not being told what column the text began in, so it +didn&#39;t know where to put the tab stops. Another problem +was that some of the code assumed that string byte +offsets were the same as column offsets, which is only +true if there are no tabs. + +In the process of fixing this, I cleaned up the arguments +to Fold and ExpandTabs and renamed them Break and +_ExpandTabs so that I could be sure that I found all the +call sites. I also wanted to verify that ExpandTabs was +not being used from outside intra_region_diff.py. + + + ` + diff --git a/vendor/github.com/clbanning/mxj/v2/doc.go b/vendor/github.com/clbanning/mxj/v2/doc.go new file mode 100644 index 0000000..07ac098 --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/doc.go @@ -0,0 +1,143 @@ +// mxj - A collection of map[string]interface{} and associated XML and JSON utilities. +// Copyright 2012-2019, Charles Banning. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file + +/* +Marshal/Unmarshal XML to/from map[string]interface{} values (and JSON); extract/modify values from maps by key or key-path, including wildcards. + +mxj supplants the legacy x2j and j2x packages. The subpackage x2j-wrapper is provided to facilitate migrating from the x2j package. The x2j and j2x subpackages provide similar functionality of the old packages but are not function-name compatible with them. + +Note: this library was designed for processing ad hoc anonymous messages. Bulk processing large data sets may be much more efficiently performed using the encoding/xml or encoding/json packages from Go's standard library directly. + +Related Packages: + checkxml: github.com/clbanning/checkxml provides functions for validating XML data. + +Notes: + 2022.11.28: v2.7 - add SetGlobalKeyMapPrefix to change default prefix, '#', for default keys + 2022.11.20: v2.6 - add NewMapForattedXmlSeq for XML docs formatted with whitespace character + 2021.02.02: v2.5 - add XmlCheckIsValid toggle to force checking that the encoded XML is valid + 2020.12.14: v2.4 - add XMLEscapeCharsDecoder to preserve XML escaped characters in Map values + 2020.10.28: v2.3 - add TrimWhiteSpace option + 2020.05.01: v2.2 - optimize map to XML encoding for large XML docs. + 2019.07.04: v2.0 - remove unnecessary methods - mv.XmlWriterRaw, mv.XmlIndentWriterRaw - for Map and MapSeq. + 2019.07.04: Add MapSeq type and move associated functions and methods from Map to MapSeq. + 2019.01.21: DecodeSimpleValuesAsMap - decode to map[:map["#text":]] rather than map[:]. + 2018.04.18: mv.Xml/mv.XmlIndent encodes non-map[string]interface{} map values - map[string]string, map[int]uint, etc. + 2018.03.29: mv.Gob/NewMapGob support gob encoding/decoding of Maps. + 2018.03.26: Added mxj/x2j-wrapper sub-package for migrating from legacy x2j package. + 2017.02.22: LeafNode paths can use ".N" syntax rather than "[N]" for list member indexing. + 2017.02.21: github.com/clbanning/checkxml provides functions for validating XML data. + 2017.02.10: SetFieldSeparator changes field separator for args in UpdateValuesForPath, ValuesFor... methods. + 2017.02.06: Support XMPP stream processing - HandleXMPPStreamTag(). + 2016.11.07: Preserve name space prefix syntax in XmlSeq parser - NewMapXmlSeq(), etc. + 2016.06.25: Support overriding default XML attribute prefix, "-", in Map keys - SetAttrPrefix(). + 2016.05.26: Support customization of xml.Decoder by exposing CustomDecoder variable. + 2016.03.19: Escape invalid chars when encoding XML attribute and element values - XMLEscapeChars(). + 2016.03.02: By default decoding XML with float64 and bool value casting will not cast "NaN", "Inf", and "-Inf". + To cast them to float64, first set flag with CastNanInf(true). + 2016.02.22: New mv.Root(), mv.Elements(), mv.Attributes methods let you examine XML document structure. + 2016.02.16: Add CoerceKeysToLower() option to handle tags with mixed capitalization. + 2016.02.12: Seek for first xml.StartElement token; only return error if io.EOF is reached first (handles BOM). + 2015-12-02: NewMapXmlSeq() with mv.XmlSeq() & co. will try to preserve structure of XML doc when re-encoding. + 2014-08-02: AnyXml() and AnyXmlIndent() will try to marshal arbitrary values to XML. + +SUMMARY + + type Map map[string]interface{} + + Create a Map value, 'mv', from any map[string]interface{} value, 'v': + mv := Map(v) + + Unmarshal / marshal XML as a Map value, 'mv': + mv, err := NewMapXml(xmlValue) // unmarshal + xmlValue, err := mv.Xml() // marshal + + Unmarshal XML from an io.Reader as a Map value, 'mv': + mv, err := NewMapXmlReader(xmlReader) // repeated calls, as with an os.File Reader, will process stream + mv, raw, err := NewMapXmlReaderRaw(xmlReader) // 'raw' is the raw XML that was decoded + + Marshal Map value, 'mv', to an XML Writer (io.Writer): + err := mv.XmlWriter(xmlWriter) + raw, err := mv.XmlWriterRaw(xmlWriter) // 'raw' is the raw XML that was written on xmlWriter + + Also, for prettified output: + xmlValue, err := mv.XmlIndent(prefix, indent, ...) + err := mv.XmlIndentWriter(xmlWriter, prefix, indent, ...) + raw, err := mv.XmlIndentWriterRaw(xmlWriter, prefix, indent, ...) + + Bulk process XML with error handling (note: handlers must return a boolean value): + err := HandleXmlReader(xmlReader, mapHandler(Map), errHandler(error)) + err := HandleXmlReaderRaw(xmlReader, mapHandler(Map, []byte), errHandler(error, []byte)) + + Converting XML to JSON: see Examples for NewMapXml and HandleXmlReader. + + There are comparable functions and methods for JSON processing. + + Arbitrary structure values can be decoded to / encoded from Map values: + mv, err := NewMapStruct(structVal) + err := mv.Struct(structPointer) + + To work with XML tag values, JSON or Map key values or structure field values, decode the XML, JSON + or structure to a Map value, 'mv', or cast a map[string]interface{} value to a Map value, 'mv', then: + paths := mv.PathsForKey(key) + path := mv.PathForKeyShortest(key) + values, err := mv.ValuesForKey(key, subkeys) + values, err := mv.ValuesForPath(path, subkeys) // 'path' can be dot-notation with wildcards and indexed arrays. + count, err := mv.UpdateValuesForPath(newVal, path, subkeys) + + Get everything at once, irrespective of path depth: + leafnodes := mv.LeafNodes() + leafvalues := mv.LeafValues() + + A new Map with whatever keys are desired can be created from the current Map and then encoded in XML + or JSON. (Note: keys can use dot-notation. 'oldKey' can also use wildcards and indexed arrays.) + newMap, err := mv.NewMap("oldKey_1:newKey_1", "oldKey_2:newKey_2", ..., "oldKey_N:newKey_N") + newMap, err := mv.NewMap("oldKey1", "oldKey3", "oldKey5") // a subset of 'mv'; see "examples/partial.go" + newXml, err := newMap.Xml() // for example + newJson, err := newMap.Json() // ditto + +XML PARSING CONVENTIONS + + Using NewMapXml() + + - Attributes are parsed to `map[string]interface{}` values by prefixing a hyphen, `-`, + to the attribute label. (Unless overridden by `PrependAttrWithHyphen(false)` or + `SetAttrPrefix()`.) + - If the element is a simple element and has attributes, the element value + is given the key `#text` for its `map[string]interface{}` representation. (See + the 'atomFeedString.xml' test data, below.) + - XML comments, directives, and process instructions are ignored. + - If CoerceKeysToLower() has been called, then the resultant keys will be lower case. + + Using NewMapXmlSeq() + + - Attributes are parsed to `map["#attr"]map[]map[string]interface{}`values + where the `` value has "#text" and "#seq" keys - the "#text" key holds the + value for ``. + - All elements, except for the root, have a "#seq" key. + - Comments, directives, and process instructions are unmarshalled into the Map using the + keys "#comment", "#directive", and "#procinst", respectively. (See documentation for more + specifics.) + - Name space syntax is preserved: + - something parses to map["ns:key"]interface{}{"something"} + - xmlns:ns="http://myns.com/ns" parses to map["xmlns:ns"]interface{}{"http://myns.com/ns"} + + Both + + - By default, "Nan", "Inf", and "-Inf" values are not cast to float64. If you want them + to be cast, set a flag to cast them using CastNanInf(true). + +XML ENCODING CONVENTIONS + + - 'nil' Map values, which may represent 'null' JSON values, are encoded as "". + NOTE: the operation is not symmetric as "" elements are decoded as 'tag:""' Map values, + which, then, encode in JSON as '"tag":""' values.. + - ALSO: there is no guarantee that the encoded XML doc will be the same as the decoded one. (Go + randomizes the walk through map[string]interface{} values.) If you plan to re-encode the + Map value to XML and want the same sequencing of elements look at NewMapXmlSeq() and + mv.XmlSeq() - these try to preserve the element sequencing but with added complexity when + working with the Map representation. + +*/ +package mxj diff --git a/vendor/github.com/clbanning/mxj/v2/escapechars.go b/vendor/github.com/clbanning/mxj/v2/escapechars.go new file mode 100644 index 0000000..eeb3d25 --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/escapechars.go @@ -0,0 +1,93 @@ +// Copyright 2016 Charles Banning. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file + +package mxj + +import ( + "bytes" +) + +var xmlEscapeChars bool + +// XMLEscapeChars(true) forces escaping invalid characters in attribute and element values. +// NOTE: this is brute force with NO interrogation of '&' being escaped already; if it is +// then '&' will be re-escaped as '&amp;'. +// +/* + The values are: + " " + ' ' + < < + > > + & & +*/ +// +// Note: if XMLEscapeCharsDecoder(true) has been called - or the default, 'false,' value +// has been toggled to 'true' - then XMLEscapeChars(true) is ignored. If XMLEscapeChars(true) +// has already been called before XMLEscapeCharsDecoder(true), XMLEscapeChars(false) is called +// to turn escape encoding on mv.Xml, etc., to prevent double escaping ampersands, '&'. +func XMLEscapeChars(b ...bool) { + var bb bool + if len(b) == 0 { + bb = !xmlEscapeChars + } else { + bb = b[0] + } + if bb == true && xmlEscapeCharsDecoder == false { + xmlEscapeChars = true + } else { + xmlEscapeChars = false + } +} + +// Scan for '&' first, since 's' may contain "&" that is parsed to "&amp;" +// - or "<" that is parsed to "&lt;". +var escapechars = [][2][]byte{ + {[]byte(`&`), []byte(`&`)}, + {[]byte(`<`), []byte(`<`)}, + {[]byte(`>`), []byte(`>`)}, + {[]byte(`"`), []byte(`"`)}, + {[]byte(`'`), []byte(`'`)}, +} + +func escapeChars(s string) string { + if len(s) == 0 { + return s + } + + b := []byte(s) + for _, v := range escapechars { + n := bytes.Count(b, v[0]) + if n == 0 { + continue + } + b = bytes.Replace(b, v[0], v[1], n) + } + return string(b) +} + +// per issue #84, escape CharData values from xml.Decoder + +var xmlEscapeCharsDecoder bool + +// XMLEscapeCharsDecoder(b ...bool) escapes XML characters in xml.CharData values +// returned by Decoder.Token. Thus, the internal Map values will contain escaped +// values, and you do not need to set XMLEscapeChars for proper encoding. +// +// By default, the Map values have the non-escaped values returned by Decoder.Token. +// XMLEscapeCharsDecoder(true) - or, XMLEscapeCharsDecoder() - will toggle escape +// encoding 'on.' +// +// Note: if XMLEscapeCharDecoder(true) is call then XMLEscapeChars(false) is +// called to prevent re-escaping the values on encoding using mv.Xml, etc. +func XMLEscapeCharsDecoder(b ...bool) { + if len(b) == 0 { + xmlEscapeCharsDecoder = !xmlEscapeCharsDecoder + } else { + xmlEscapeCharsDecoder = b[0] + } + if xmlEscapeCharsDecoder == true && xmlEscapeChars == true { + xmlEscapeChars = false + } +} diff --git a/vendor/github.com/clbanning/mxj/v2/exists.go b/vendor/github.com/clbanning/mxj/v2/exists.go new file mode 100644 index 0000000..07aeda4 --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/exists.go @@ -0,0 +1,9 @@ +package mxj + +// Checks whether the path exists. If err != nil then 'false' is returned +// along with the error encountered parsing either the "path" or "subkeys" +// argument. +func (mv Map) Exists(path string, subkeys ...string) (bool, error) { + v, err := mv.ValuesForPath(path, subkeys...) + return (err == nil && len(v) > 0), err +} diff --git a/vendor/github.com/clbanning/mxj/v2/files.go b/vendor/github.com/clbanning/mxj/v2/files.go new file mode 100644 index 0000000..27e06e1 --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/files.go @@ -0,0 +1,287 @@ +package mxj + +import ( + "fmt" + "io" + "os" +) + +type Maps []Map + +func NewMaps() Maps { + return make(Maps, 0) +} + +type MapRaw struct { + M Map + R []byte +} + +// NewMapsFromXmlFile - creates an array from a file of JSON values. +func NewMapsFromJsonFile(name string) (Maps, error) { + fi, err := os.Stat(name) + if err != nil { + return nil, err + } + if !fi.Mode().IsRegular() { + return nil, fmt.Errorf("file %s is not a regular file", name) + } + + fh, err := os.Open(name) + if err != nil { + return nil, err + } + defer fh.Close() + + am := make([]Map, 0) + for { + m, raw, err := NewMapJsonReaderRaw(fh) + if err != nil && err != io.EOF { + return am, fmt.Errorf("error: %s - reading: %s", err.Error(), string(raw)) + } + if len(m) > 0 { + am = append(am, m) + } + if err == io.EOF { + break + } + } + return am, nil +} + +// ReadMapsFromJsonFileRaw - creates an array of MapRaw from a file of JSON values. +func NewMapsFromJsonFileRaw(name string) ([]MapRaw, error) { + fi, err := os.Stat(name) + if err != nil { + return nil, err + } + if !fi.Mode().IsRegular() { + return nil, fmt.Errorf("file %s is not a regular file", name) + } + + fh, err := os.Open(name) + if err != nil { + return nil, err + } + defer fh.Close() + + am := make([]MapRaw, 0) + for { + mr := new(MapRaw) + mr.M, mr.R, err = NewMapJsonReaderRaw(fh) + if err != nil && err != io.EOF { + return am, fmt.Errorf("error: %s - reading: %s", err.Error(), string(mr.R)) + } + if len(mr.M) > 0 { + am = append(am, *mr) + } + if err == io.EOF { + break + } + } + return am, nil +} + +// NewMapsFromXmlFile - creates an array from a file of XML values. +func NewMapsFromXmlFile(name string) (Maps, error) { + fi, err := os.Stat(name) + if err != nil { + return nil, err + } + if !fi.Mode().IsRegular() { + return nil, fmt.Errorf("file %s is not a regular file", name) + } + + fh, err := os.Open(name) + if err != nil { + return nil, err + } + defer fh.Close() + + am := make([]Map, 0) + for { + m, raw, err := NewMapXmlReaderRaw(fh) + if err != nil && err != io.EOF { + return am, fmt.Errorf("error: %s - reading: %s", err.Error(), string(raw)) + } + if len(m) > 0 { + am = append(am, m) + } + if err == io.EOF { + break + } + } + return am, nil +} + +// NewMapsFromXmlFileRaw - creates an array of MapRaw from a file of XML values. +// NOTE: the slice with the raw XML is clean with no extra capacity - unlike NewMapXmlReaderRaw(). +// It is slow at parsing a file from disk and is intended for relatively small utility files. +func NewMapsFromXmlFileRaw(name string) ([]MapRaw, error) { + fi, err := os.Stat(name) + if err != nil { + return nil, err + } + if !fi.Mode().IsRegular() { + return nil, fmt.Errorf("file %s is not a regular file", name) + } + + fh, err := os.Open(name) + if err != nil { + return nil, err + } + defer fh.Close() + + am := make([]MapRaw, 0) + for { + mr := new(MapRaw) + mr.M, mr.R, err = NewMapXmlReaderRaw(fh) + if err != nil && err != io.EOF { + return am, fmt.Errorf("error: %s - reading: %s", err.Error(), string(mr.R)) + } + if len(mr.M) > 0 { + am = append(am, *mr) + } + if err == io.EOF { + break + } + } + return am, nil +} + +// ------------------------ Maps writing ------------------------- +// These are handy-dandy methods for dumping configuration data, etc. + +// JsonString - analogous to mv.Json() +func (mvs Maps) JsonString(safeEncoding ...bool) (string, error) { + var s string + for _, v := range mvs { + j, err := v.Json() + if err != nil { + return s, err + } + s += string(j) + } + return s, nil +} + +// JsonStringIndent - analogous to mv.JsonIndent() +func (mvs Maps) JsonStringIndent(prefix, indent string, safeEncoding ...bool) (string, error) { + var s string + var haveFirst bool + for _, v := range mvs { + j, err := v.JsonIndent(prefix, indent) + if err != nil { + return s, err + } + if haveFirst { + s += "\n" + } else { + haveFirst = true + } + s += string(j) + } + return s, nil +} + +// XmlString - analogous to mv.Xml() +func (mvs Maps) XmlString() (string, error) { + var s string + for _, v := range mvs { + x, err := v.Xml() + if err != nil { + return s, err + } + s += string(x) + } + return s, nil +} + +// XmlStringIndent - analogous to mv.XmlIndent() +func (mvs Maps) XmlStringIndent(prefix, indent string) (string, error) { + var s string + for _, v := range mvs { + x, err := v.XmlIndent(prefix, indent) + if err != nil { + return s, err + } + s += string(x) + } + return s, nil +} + +// JsonFile - write Maps to named file as JSON +// Note: the file will be created, if necessary; if it exists it will be truncated. +// If you need to append to a file, open it and use JsonWriter method. +func (mvs Maps) JsonFile(file string, safeEncoding ...bool) error { + var encoding bool + if len(safeEncoding) == 1 { + encoding = safeEncoding[0] + } + s, err := mvs.JsonString(encoding) + if err != nil { + return err + } + fh, err := os.Create(file) + if err != nil { + return err + } + defer fh.Close() + fh.WriteString(s) + return nil +} + +// JsonFileIndent - write Maps to named file as pretty JSON +// Note: the file will be created, if necessary; if it exists it will be truncated. +// If you need to append to a file, open it and use JsonIndentWriter method. +func (mvs Maps) JsonFileIndent(file, prefix, indent string, safeEncoding ...bool) error { + var encoding bool + if len(safeEncoding) == 1 { + encoding = safeEncoding[0] + } + s, err := mvs.JsonStringIndent(prefix, indent, encoding) + if err != nil { + return err + } + fh, err := os.Create(file) + if err != nil { + return err + } + defer fh.Close() + fh.WriteString(s) + return nil +} + +// XmlFile - write Maps to named file as XML +// Note: the file will be created, if necessary; if it exists it will be truncated. +// If you need to append to a file, open it and use XmlWriter method. +func (mvs Maps) XmlFile(file string) error { + s, err := mvs.XmlString() + if err != nil { + return err + } + fh, err := os.Create(file) + if err != nil { + return err + } + defer fh.Close() + fh.WriteString(s) + return nil +} + +// XmlFileIndent - write Maps to named file as pretty XML +// Note: the file will be created,if necessary; if it exists it will be truncated. +// If you need to append to a file, open it and use XmlIndentWriter method. +func (mvs Maps) XmlFileIndent(file, prefix, indent string) error { + s, err := mvs.XmlStringIndent(prefix, indent) + if err != nil { + return err + } + fh, err := os.Create(file) + if err != nil { + return err + } + defer fh.Close() + fh.WriteString(s) + return nil +} diff --git a/vendor/github.com/clbanning/mxj/v2/files_test.badjson b/vendor/github.com/clbanning/mxj/v2/files_test.badjson new file mode 100644 index 0000000..d187200 --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/files_test.badjson @@ -0,0 +1,2 @@ +{ "this":"is", "a":"test", "file":"for", "files_test.go":"case" } +{ "with":"some", "bad":JSON, "in":"it" } diff --git a/vendor/github.com/clbanning/mxj/v2/files_test.badxml b/vendor/github.com/clbanning/mxj/v2/files_test.badxml new file mode 100644 index 0000000..4736ef9 --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/files_test.badxml @@ -0,0 +1,9 @@ + + test + for files.go + + + some + doc + test case + diff --git a/vendor/github.com/clbanning/mxj/v2/files_test.json b/vendor/github.com/clbanning/mxj/v2/files_test.json new file mode 100644 index 0000000..e9a3ddf --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/files_test.json @@ -0,0 +1,2 @@ +{ "this":"is", "a":"test", "file":"for", "files_test.go":"case" } +{ "with":"just", "two":2, "JSON":"values", "true":true } diff --git a/vendor/github.com/clbanning/mxj/v2/files_test.xml b/vendor/github.com/clbanning/mxj/v2/files_test.xml new file mode 100644 index 0000000..65cf021 --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/files_test.xml @@ -0,0 +1,9 @@ + + test + for files.go + + + some + doc + test case + diff --git a/vendor/github.com/clbanning/mxj/v2/files_test_dup.json b/vendor/github.com/clbanning/mxj/v2/files_test_dup.json new file mode 100644 index 0000000..2becb6a --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/files_test_dup.json @@ -0,0 +1 @@ +{"a":"test","file":"for","files_test.go":"case","this":"is"}{"JSON":"values","true":true,"two":2,"with":"just"} \ No newline at end of file diff --git a/vendor/github.com/clbanning/mxj/v2/files_test_dup.xml b/vendor/github.com/clbanning/mxj/v2/files_test_dup.xml new file mode 100644 index 0000000..f68d22e --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/files_test_dup.xml @@ -0,0 +1 @@ +for files.gotestdoctest casesome \ No newline at end of file diff --git a/vendor/github.com/clbanning/mxj/v2/files_test_indent.json b/vendor/github.com/clbanning/mxj/v2/files_test_indent.json new file mode 100644 index 0000000..6fde156 --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/files_test_indent.json @@ -0,0 +1,12 @@ +{ + "a": "test", + "file": "for", + "files_test.go": "case", + "this": "is" +} +{ + "JSON": "values", + "true": true, + "two": 2, + "with": "just" +} \ No newline at end of file diff --git a/vendor/github.com/clbanning/mxj/v2/files_test_indent.xml b/vendor/github.com/clbanning/mxj/v2/files_test_indent.xml new file mode 100644 index 0000000..8c91a1d --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/files_test_indent.xml @@ -0,0 +1,8 @@ + + for files.go + test + + doc + test case + some + \ No newline at end of file diff --git a/vendor/github.com/clbanning/mxj/v2/gob.go b/vendor/github.com/clbanning/mxj/v2/gob.go new file mode 100644 index 0000000..d56c2fd --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/gob.go @@ -0,0 +1,35 @@ +// gob.go - Encode/Decode a Map into a gob object. + +package mxj + +import ( + "bytes" + "encoding/gob" +) + +// NewMapGob returns a Map value for a gob object that has been +// encoded from a map[string]interface{} (or compatible type) value. +// It is intended to provide symmetric handling of Maps that have +// been encoded using mv.Gob. +func NewMapGob(gobj []byte) (Map, error) { + m := make(map[string]interface{}, 0) + if len(gobj) == 0 { + return m, nil + } + r := bytes.NewReader(gobj) + dec := gob.NewDecoder(r) + if err := dec.Decode(&m); err != nil { + return m, err + } + return m, nil +} + +// Gob returns a gob-encoded value for the Map 'mv'. +func (mv Map) Gob() ([]byte, error) { + var buf bytes.Buffer + enc := gob.NewEncoder(&buf) + if err := enc.Encode(map[string]interface{}(mv)); err != nil { + return nil, err + } + return buf.Bytes(), nil +} diff --git a/vendor/github.com/clbanning/mxj/v2/json.go b/vendor/github.com/clbanning/mxj/v2/json.go new file mode 100644 index 0000000..eb2c05a --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/json.go @@ -0,0 +1,323 @@ +// Copyright 2012-2014 Charles Banning. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file + +package mxj + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "time" +) + +// ------------------------------ write JSON ----------------------- + +// Just a wrapper on json.Marshal. +// If option safeEncoding is'true' then safe encoding of '<', '>' and '&' +// is preserved. (see encoding/json#Marshal, encoding/json#Encode) +func (mv Map) Json(safeEncoding ...bool) ([]byte, error) { + var s bool + if len(safeEncoding) == 1 { + s = safeEncoding[0] + } + + b, err := json.Marshal(mv) + + if !s { + b = bytes.Replace(b, []byte("\\u003c"), []byte("<"), -1) + b = bytes.Replace(b, []byte("\\u003e"), []byte(">"), -1) + b = bytes.Replace(b, []byte("\\u0026"), []byte("&"), -1) + } + return b, err +} + +// Just a wrapper on json.MarshalIndent. +// If option safeEncoding is'true' then safe encoding of '<' , '>' and '&' +// is preserved. (see encoding/json#Marshal, encoding/json#Encode) +func (mv Map) JsonIndent(prefix, indent string, safeEncoding ...bool) ([]byte, error) { + var s bool + if len(safeEncoding) == 1 { + s = safeEncoding[0] + } + + b, err := json.MarshalIndent(mv, prefix, indent) + if !s { + b = bytes.Replace(b, []byte("\\u003c"), []byte("<"), -1) + b = bytes.Replace(b, []byte("\\u003e"), []byte(">"), -1) + b = bytes.Replace(b, []byte("\\u0026"), []byte("&"), -1) + } + return b, err +} + +// The following implementation is provided for symmetry with NewMapJsonReader[Raw] +// The names will also provide a key for the number of return arguments. + +// Writes the Map as JSON on the Writer. +// If 'safeEncoding' is 'true', then "safe" encoding of '<', '>' and '&' is preserved. +func (mv Map) JsonWriter(jsonWriter io.Writer, safeEncoding ...bool) error { + b, err := mv.Json(safeEncoding...) + if err != nil { + return err + } + + _, err = jsonWriter.Write(b) + return err +} + +// Writes the Map as JSON on the Writer. []byte is the raw JSON that was written. +// If 'safeEncoding' is 'true', then "safe" encoding of '<', '>' and '&' is preserved. +func (mv Map) JsonWriterRaw(jsonWriter io.Writer, safeEncoding ...bool) ([]byte, error) { + b, err := mv.Json(safeEncoding...) + if err != nil { + return b, err + } + + _, err = jsonWriter.Write(b) + return b, err +} + +// Writes the Map as pretty JSON on the Writer. +// If 'safeEncoding' is 'true', then "safe" encoding of '<', '>' and '&' is preserved. +func (mv Map) JsonIndentWriter(jsonWriter io.Writer, prefix, indent string, safeEncoding ...bool) error { + b, err := mv.JsonIndent(prefix, indent, safeEncoding...) + if err != nil { + return err + } + + _, err = jsonWriter.Write(b) + return err +} + +// Writes the Map as pretty JSON on the Writer. []byte is the raw JSON that was written. +// If 'safeEncoding' is 'true', then "safe" encoding of '<', '>' and '&' is preserved. +func (mv Map) JsonIndentWriterRaw(jsonWriter io.Writer, prefix, indent string, safeEncoding ...bool) ([]byte, error) { + b, err := mv.JsonIndent(prefix, indent, safeEncoding...) + if err != nil { + return b, err + } + + _, err = jsonWriter.Write(b) + return b, err +} + +// --------------------------- read JSON ----------------------------- + +// Decode numericvalues as json.Number type Map values - see encoding/json#Number. +// NOTE: this is for decoding JSON into a Map with NewMapJson(), NewMapJsonReader(), +// etc.; it does not affect NewMapXml(), etc. The XML encoders mv.Xml() and mv.XmlIndent() +// do recognize json.Number types; a JSON object can be decoded to a Map with json.Number +// value types and the resulting Map can be correctly encoded into a XML object. +var JsonUseNumber bool + +// Just a wrapper on json.Unmarshal +// Converting JSON to XML is a simple as: +// ... +// mapVal, merr := mxj.NewMapJson(jsonVal) +// if merr != nil { +// // handle error +// } +// xmlVal, xerr := mapVal.Xml() +// if xerr != nil { +// // handle error +// } +// NOTE: as a special case, passing a list, e.g., [{"some-null-value":"", "a-non-null-value":"bar"}], +// will be interpreted as having the root key 'object' prepended - {"object":[ ... ]} - to unmarshal to a Map. +// See mxj/j2x/j2x_test.go. +func NewMapJson(jsonVal []byte) (Map, error) { + // empty or nil begets empty + if len(jsonVal) == 0 { + m := make(map[string]interface{}, 0) + return m, nil + } + // handle a goofy case ... + if jsonVal[0] == '[' { + jsonVal = []byte(`{"object":` + string(jsonVal) + `}`) + } + m := make(map[string]interface{}) + // err := json.Unmarshal(jsonVal, &m) + buf := bytes.NewReader(jsonVal) + dec := json.NewDecoder(buf) + if JsonUseNumber { + dec.UseNumber() + } + err := dec.Decode(&m) + return m, err +} + +// Retrieve a Map value from an io.Reader. +// NOTE: The raw JSON off the reader is buffered to []byte using a ByteReader. If the io.Reader is an +// os.File, there may be significant performance impact. If the io.Reader is wrapping a []byte +// value in-memory, however, such as http.Request.Body you CAN use it to efficiently unmarshal +// a JSON object. +func NewMapJsonReader(jsonReader io.Reader) (Map, error) { + jb, err := getJson(jsonReader) + if err != nil || len(*jb) == 0 { + return nil, err + } + + // Unmarshal the 'presumed' JSON string + return NewMapJson(*jb) +} + +// Retrieve a Map value and raw JSON - []byte - from an io.Reader. +// NOTE: The raw JSON off the reader is buffered to []byte using a ByteReader. If the io.Reader is an +// os.File, there may be significant performance impact. If the io.Reader is wrapping a []byte +// value in-memory, however, such as http.Request.Body you CAN use it to efficiently unmarshal +// a JSON object and retrieve the raw JSON in a single call. +func NewMapJsonReaderRaw(jsonReader io.Reader) (Map, []byte, error) { + jb, err := getJson(jsonReader) + if err != nil || len(*jb) == 0 { + return nil, *jb, err + } + + // Unmarshal the 'presumed' JSON string + m, merr := NewMapJson(*jb) + return m, *jb, merr +} + +// Pull the next JSON string off the stream: just read from first '{' to its closing '}'. +// Returning a pointer to the slice saves 16 bytes - maybe unnecessary, but internal to package. +func getJson(rdr io.Reader) (*[]byte, error) { + bval := make([]byte, 1) + jb := make([]byte, 0) + var inQuote, inJson bool + var parenCnt int + var previous byte + + // scan the input for a matched set of {...} + // json.Unmarshal will handle syntax checking. + for { + _, err := rdr.Read(bval) + if err != nil { + if err == io.EOF && inJson && parenCnt > 0 { + return &jb, fmt.Errorf("no closing } for JSON string: %s", string(jb)) + } + return &jb, err + } + switch bval[0] { + case '{': + if !inQuote { + parenCnt++ + inJson = true + } + case '}': + if !inQuote { + parenCnt-- + } + if parenCnt < 0 { + return nil, fmt.Errorf("closing } without opening {: %s", string(jb)) + } + case '"': + if inQuote { + if previous == '\\' { + break + } + inQuote = false + } else { + inQuote = true + } + case '\n', '\r', '\t', ' ': + if !inQuote { + continue + } + } + if inJson { + jb = append(jb, bval[0]) + if parenCnt == 0 { + break + } + } + previous = bval[0] + } + + return &jb, nil +} + +// ------------------------------- JSON Reader handler via Map values ----------------------- + +// Default poll delay to keep Handler from spinning on an open stream +// like sitting on os.Stdin waiting for imput. +var jhandlerPollInterval = time.Duration(1e6) + +// While unnecessary, we make HandleJsonReader() have the same signature as HandleXmlReader(). +// This avoids treating one or other as a special case and discussing the underlying stdlib logic. + +// Bulk process JSON using handlers that process a Map value. +// 'rdr' is an io.Reader for the JSON (stream). +// 'mapHandler' is the Map processing handler. Return of 'false' stops io.Reader processing. +// 'errHandler' is the error processor. Return of 'false' stops io.Reader processing and returns the error. +// Note: mapHandler() and errHandler() calls are blocking, so reading and processing of messages is serialized. +// This means that you can stop reading the file on error or after processing a particular message. +// To have reading and handling run concurrently, pass argument to a go routine in handler and return 'true'. +func HandleJsonReader(jsonReader io.Reader, mapHandler func(Map) bool, errHandler func(error) bool) error { + var n int + for { + m, merr := NewMapJsonReader(jsonReader) + n++ + + // handle error condition with errhandler + if merr != nil && merr != io.EOF { + merr = fmt.Errorf("[jsonReader: %d] %s", n, merr.Error()) + if ok := errHandler(merr); !ok { + // caused reader termination + return merr + } + continue + } + + // pass to maphandler + if len(m) != 0 { + if ok := mapHandler(m); !ok { + break + } + } else if merr != io.EOF { + <-time.After(jhandlerPollInterval) + } + + if merr == io.EOF { + break + } + } + return nil +} + +// Bulk process JSON using handlers that process a Map value and the raw JSON. +// 'rdr' is an io.Reader for the JSON (stream). +// 'mapHandler' is the Map and raw JSON - []byte - processor. Return of 'false' stops io.Reader processing. +// 'errHandler' is the error and raw JSON processor. Return of 'false' stops io.Reader processing and returns the error. +// Note: mapHandler() and errHandler() calls are blocking, so reading and processing of messages is serialized. +// This means that you can stop reading the file on error or after processing a particular message. +// To have reading and handling run concurrently, pass argument(s) to a go routine in handler and return 'true'. +func HandleJsonReaderRaw(jsonReader io.Reader, mapHandler func(Map, []byte) bool, errHandler func(error, []byte) bool) error { + var n int + for { + m, raw, merr := NewMapJsonReaderRaw(jsonReader) + n++ + + // handle error condition with errhandler + if merr != nil && merr != io.EOF { + merr = fmt.Errorf("[jsonReader: %d] %s", n, merr.Error()) + if ok := errHandler(merr, raw); !ok { + // caused reader termination + return merr + } + continue + } + + // pass to maphandler + if len(m) != 0 { + if ok := mapHandler(m, raw); !ok { + break + } + } else if merr != io.EOF { + <-time.After(jhandlerPollInterval) + } + + if merr == io.EOF { + break + } + } + return nil +} diff --git a/vendor/github.com/clbanning/mxj/v2/keyvalues.go b/vendor/github.com/clbanning/mxj/v2/keyvalues.go new file mode 100644 index 0000000..55620ca --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/keyvalues.go @@ -0,0 +1,668 @@ +// Copyright 2012-2014 Charles Banning. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file + +// keyvalues.go: Extract values from an arbitrary XML doc. Tag path can include wildcard characters. + +package mxj + +import ( + "errors" + "fmt" + "strconv" + "strings" +) + +// ----------------------------- get everything FOR a single key ------------------------- + +const ( + minArraySize = 32 +) + +var defaultArraySize int = minArraySize + +// SetArraySize adjust the buffers for expected number of values to return from ValuesForKey() and ValuesForPath(). +// This can have the effect of significantly reducing memory allocation-copy functions for large data sets. +// Returns the initial buffer size. +func SetArraySize(size int) int { + if size > minArraySize { + defaultArraySize = size + } else { + defaultArraySize = minArraySize + } + return defaultArraySize +} + +// ValuesForKey return all values in Map, 'mv', associated with a 'key'. If len(returned_values) == 0, then no match. +// On error, the returned slice is 'nil'. NOTE: 'key' can be wildcard, "*". +// 'subkeys' (optional) are "key:val[:type]" strings representing attributes or elements in a list. +// - By default 'val' is of type string. "key:val:bool" and "key:val:float" to coerce them. +// - For attributes prefix the label with the attribute prefix character, by default a +// hyphen, '-', e.g., "-seq:3". (See SetAttrPrefix function.) +// - If the 'key' refers to a list, then "key:value" could select a list member of the list. +// - The subkey can be wildcarded - "key:*" - to require that it's there with some value. +// - If a subkey is preceeded with the '!' character, the key:value[:type] entry is treated as an +// exclusion critera - e.g., "!author:William T. Gaddis". +// - If val contains ":" symbol, use SetFieldSeparator to a unused symbol, perhaps "|". +func (mv Map) ValuesForKey(key string, subkeys ...string) ([]interface{}, error) { + m := map[string]interface{}(mv) + var subKeyMap map[string]interface{} + if len(subkeys) > 0 { + var err error + subKeyMap, err = getSubKeyMap(subkeys...) + if err != nil { + return nil, err + } + } + + ret := make([]interface{}, 0, defaultArraySize) + var cnt int + hasKey(m, key, &ret, &cnt, subKeyMap) + return ret[:cnt], nil +} + +var KeyNotExistError = errors.New("Key does not exist") + +// ValueForKey is a wrapper on ValuesForKey. It returns the first member of []interface{}, if any. +// If there is no value, "nil, nil" is returned. +func (mv Map) ValueForKey(key string, subkeys ...string) (interface{}, error) { + vals, err := mv.ValuesForKey(key, subkeys...) + if err != nil { + return nil, err + } + if len(vals) == 0 { + return nil, KeyNotExistError + } + return vals[0], nil +} + +// hasKey - if the map 'key' exists append it to array +// if it doesn't do nothing except scan array and map values +func hasKey(iv interface{}, key string, ret *[]interface{}, cnt *int, subkeys map[string]interface{}) { + // func hasKey(iv interface{}, key string, ret *[]interface{}, subkeys map[string]interface{}) { + switch iv.(type) { + case map[string]interface{}: + vv := iv.(map[string]interface{}) + // see if the current value is of interest + if v, ok := vv[key]; ok { + switch v.(type) { + case map[string]interface{}: + if hasSubKeys(v, subkeys) { + *ret = append(*ret, v) + *cnt++ + } + case []interface{}: + for _, av := range v.([]interface{}) { + if hasSubKeys(av, subkeys) { + *ret = append(*ret, av) + *cnt++ + } + } + default: + if len(subkeys) == 0 { + *ret = append(*ret, v) + *cnt++ + } + } + } + + // wildcard case + if key == "*" { + for _, v := range vv { + switch v.(type) { + case map[string]interface{}: + if hasSubKeys(v, subkeys) { + *ret = append(*ret, v) + *cnt++ + } + case []interface{}: + for _, av := range v.([]interface{}) { + if hasSubKeys(av, subkeys) { + *ret = append(*ret, av) + *cnt++ + } + } + default: + if len(subkeys) == 0 { + *ret = append(*ret, v) + *cnt++ + } + } + } + } + + // scan the rest + for _, v := range vv { + hasKey(v, key, ret, cnt, subkeys) + } + case []interface{}: + for _, v := range iv.([]interface{}) { + hasKey(v, key, ret, cnt, subkeys) + } + } +} + +// ----------------------- get everything for a node in the Map --------------------------- + +// Allow indexed arrays in "path" specification. (Request from Abhijit Kadam - abhijitk100@gmail.com.) +// 2014.04.28 - implementation note. +// Implemented as a wrapper of (old)ValuesForPath() because we need look-ahead logic to handle expansion +// of wildcards and unindexed arrays. Embedding such logic into valuesForKeyPath() would have made the +// code much more complicated; this wrapper is straightforward, easy to debug, and doesn't add significant overhead. + +// ValuesForPatb retrieves all values for a path from the Map. If len(returned_values) == 0, then no match. +// On error, the returned array is 'nil'. +// 'path' is a dot-separated path of key values. +// - If a node in the path is '*', then everything beyond is walked. +// - 'path' can contain indexed array references, such as, "*.data[1]" and "msgs[2].data[0].field" - +// even "*[2].*[0].field". +// 'subkeys' (optional) are "key:val[:type]" strings representing attributes or elements in a list. +// - By default 'val' is of type string. "key:val:bool" and "key:val:float" to coerce them. +// - For attributes prefix the label with the attribute prefix character, by default a +// hyphen, '-', e.g., "-seq:3". (See SetAttrPrefix function.) +// - If the 'path' refers to a list, then "tag:value" would return member of the list. +// - The subkey can be wildcarded - "key:*" - to require that it's there with some value. +// - If a subkey is preceeded with the '!' character, the key:value[:type] entry is treated as an +// exclusion critera - e.g., "!author:William T. Gaddis". +// - If val contains ":" symbol, use SetFieldSeparator to a unused symbol, perhaps "|". +func (mv Map) ValuesForPath(path string, subkeys ...string) ([]interface{}, error) { + // If there are no array indexes in path, use legacy ValuesForPath() logic. + if strings.Index(path, "[") < 0 { + return mv.oldValuesForPath(path, subkeys...) + } + + var subKeyMap map[string]interface{} + if len(subkeys) > 0 { + var err error + subKeyMap, err = getSubKeyMap(subkeys...) + if err != nil { + return nil, err + } + } + + keys, kerr := parsePath(path) + if kerr != nil { + return nil, kerr + } + + vals, verr := valuesForArray(keys, mv) + if verr != nil { + return nil, verr // Vals may be nil, but return empty array. + } + + // Need to handle subkeys ... only return members of vals that satisfy conditions. + retvals := make([]interface{}, 0) + for _, v := range vals { + if hasSubKeys(v, subKeyMap) { + retvals = append(retvals, v) + } + } + return retvals, nil +} + +func valuesForArray(keys []*key, m Map) ([]interface{}, error) { + var tmppath string + var haveFirst bool + var vals []interface{} + var verr error + + lastkey := len(keys) - 1 + for i := 0; i <= lastkey; i++ { + if !haveFirst { + tmppath = keys[i].name + haveFirst = true + } else { + tmppath += "." + keys[i].name + } + + // Look-ahead: explode wildcards and unindexed arrays. + // Need to handle un-indexed list recursively: + // e.g., path is "stuff.data[0]" rather than "stuff[0].data[0]". + // Need to treat it as "stuff[0].data[0]", "stuff[1].data[0]", ... + if !keys[i].isArray && i < lastkey && keys[i+1].isArray { + // Can't pass subkeys because we may not be at literal end of path. + vv, vverr := m.oldValuesForPath(tmppath) + if vverr != nil { + return nil, vverr + } + for _, v := range vv { + // See if we can walk the value. + am, ok := v.(map[string]interface{}) + if !ok { + continue + } + // Work the backend. + nvals, nvalserr := valuesForArray(keys[i+1:], Map(am)) + if nvalserr != nil { + return nil, nvalserr + } + vals = append(vals, nvals...) + } + break // have recursed the whole path - return + } + + if keys[i].isArray || i == lastkey { + // Don't pass subkeys because may not be at literal end of path. + vals, verr = m.oldValuesForPath(tmppath) + } else { + continue + } + if verr != nil { + return nil, verr + } + + if i == lastkey && !keys[i].isArray { + break + } + + // Now we're looking at an array - supposedly. + // Is index in range of vals? + if len(vals) <= keys[i].position { + vals = nil + break + } + + // Return the array member of interest, if at end of path. + if i == lastkey { + vals = vals[keys[i].position:(keys[i].position + 1)] + break + } + + // Extract the array member of interest. + am := vals[keys[i].position:(keys[i].position + 1)] + + // must be a map[string]interface{} value so we can keep walking the path + amm, ok := am[0].(map[string]interface{}) + if !ok { + vals = nil + break + } + + m = Map(amm) + haveFirst = false + } + + return vals, nil +} + +type key struct { + name string + isArray bool + position int +} + +func parsePath(s string) ([]*key, error) { + keys := strings.Split(s, ".") + + ret := make([]*key, 0) + + for i := 0; i < len(keys); i++ { + if keys[i] == "" { + continue + } + + newkey := new(key) + if strings.Index(keys[i], "[") < 0 { + newkey.name = keys[i] + ret = append(ret, newkey) + continue + } + + p := strings.Split(keys[i], "[") + newkey.name = p[0] + p = strings.Split(p[1], "]") + if p[0] == "" { // no right bracket + return nil, fmt.Errorf("no right bracket on key index: %s", keys[i]) + } + // convert p[0] to a int value + pos, nerr := strconv.ParseInt(p[0], 10, 32) + if nerr != nil { + return nil, fmt.Errorf("cannot convert index to int value: %s", p[0]) + } + newkey.position = int(pos) + newkey.isArray = true + ret = append(ret, newkey) + } + + return ret, nil +} + +// legacy ValuesForPath() - now wrapped to handle special case of indexed arrays in 'path'. +func (mv Map) oldValuesForPath(path string, subkeys ...string) ([]interface{}, error) { + m := map[string]interface{}(mv) + var subKeyMap map[string]interface{} + if len(subkeys) > 0 { + var err error + subKeyMap, err = getSubKeyMap(subkeys...) + if err != nil { + return nil, err + } + } + + keys := strings.Split(path, ".") + if keys[len(keys)-1] == "" { + keys = keys[:len(keys)-1] + } + ivals := make([]interface{}, 0, defaultArraySize) + var cnt int + valuesForKeyPath(&ivals, &cnt, m, keys, subKeyMap) + return ivals[:cnt], nil +} + +func valuesForKeyPath(ret *[]interface{}, cnt *int, m interface{}, keys []string, subkeys map[string]interface{}) { + lenKeys := len(keys) + + // load 'm' values into 'ret' + // expand any lists + if lenKeys == 0 { + switch m.(type) { + case map[string]interface{}: + if subkeys != nil { + if ok := hasSubKeys(m, subkeys); !ok { + return + } + } + *ret = append(*ret, m) + *cnt++ + case []interface{}: + for i, v := range m.([]interface{}) { + if subkeys != nil { + if ok := hasSubKeys(v, subkeys); !ok { + continue // only load list members with subkeys + } + } + *ret = append(*ret, (m.([]interface{}))[i]) + *cnt++ + } + default: + if subkeys != nil { + return // must be map[string]interface{} if there are subkeys + } + *ret = append(*ret, m) + *cnt++ + } + return + } + + // key of interest + key := keys[0] + switch key { + case "*": // wildcard - scan all values + switch m.(type) { + case map[string]interface{}: + for _, v := range m.(map[string]interface{}) { + // valuesForKeyPath(ret, v, keys[1:], subkeys) + valuesForKeyPath(ret, cnt, v, keys[1:], subkeys) + } + case []interface{}: + for _, v := range m.([]interface{}) { + switch v.(type) { + // flatten out a list of maps - keys are processed + case map[string]interface{}: + for _, vv := range v.(map[string]interface{}) { + // valuesForKeyPath(ret, vv, keys[1:], subkeys) + valuesForKeyPath(ret, cnt, vv, keys[1:], subkeys) + } + default: + // valuesForKeyPath(ret, v, keys[1:], subkeys) + valuesForKeyPath(ret, cnt, v, keys[1:], subkeys) + } + } + } + default: // key - must be map[string]interface{} + switch m.(type) { + case map[string]interface{}: + if v, ok := m.(map[string]interface{})[key]; ok { + // valuesForKeyPath(ret, v, keys[1:], subkeys) + valuesForKeyPath(ret, cnt, v, keys[1:], subkeys) + } + case []interface{}: // may be buried in list + for _, v := range m.([]interface{}) { + switch v.(type) { + case map[string]interface{}: + if vv, ok := v.(map[string]interface{})[key]; ok { + // valuesForKeyPath(ret, vv, keys[1:], subkeys) + valuesForKeyPath(ret, cnt, vv, keys[1:], subkeys) + } + } + } + } + } +} + +// hasSubKeys() - interface{} equality works for string, float64, bool +// 'v' must be a map[string]interface{} value to have subkeys +// 'a' can have k:v pairs with v.(string) == "*", which is treated like a wildcard. +func hasSubKeys(v interface{}, subkeys map[string]interface{}) bool { + if len(subkeys) == 0 { + return true + } + + switch v.(type) { + case map[string]interface{}: + // do all subKey name:value pairs match? + mv := v.(map[string]interface{}) + for skey, sval := range subkeys { + isNotKey := false + if skey[:1] == "!" { // a NOT-key + skey = skey[1:] + isNotKey = true + } + vv, ok := mv[skey] + if !ok { // key doesn't exist + if isNotKey { // key not there, but that's what we want + if kv, ok := sval.(string); ok && kv == "*" { + continue + } + } + return false + } + // wildcard check + if kv, ok := sval.(string); ok && kv == "*" { + if isNotKey { // key is there, and we don't want it + return false + } + continue + } + switch sval.(type) { + case string: + if s, ok := vv.(string); ok && s == sval.(string) { + if isNotKey { + return false + } + continue + } + case bool: + if b, ok := vv.(bool); ok && b == sval.(bool) { + if isNotKey { + return false + } + continue + } + case float64: + if f, ok := vv.(float64); ok && f == sval.(float64) { + if isNotKey { + return false + } + continue + } + } + // key there but didn't match subkey value + if isNotKey { // that's what we want + continue + } + return false + } + // all subkeys matched + return true + } + + // not a map[string]interface{} value, can't have subkeys + return false +} + +// Generate map of key:value entries as map[string]string. +// 'kv' arguments are "name:value" pairs: attribute keys are designated with prepended hyphen, '-'. +// If len(kv) == 0, the return is (nil, nil). +func getSubKeyMap(kv ...string) (map[string]interface{}, error) { + if len(kv) == 0 { + return nil, nil + } + m := make(map[string]interface{}, 0) + for _, v := range kv { + vv := strings.Split(v, fieldSep) + switch len(vv) { + case 2: + m[vv[0]] = interface{}(vv[1]) + case 3: + switch vv[2] { + case "string", "char", "text": + m[vv[0]] = interface{}(vv[1]) + case "bool", "boolean": + // ParseBool treats "1"==true & "0"==false + b, err := strconv.ParseBool(vv[1]) + if err != nil { + return nil, fmt.Errorf("can't convert subkey value to bool: %s", vv[1]) + } + m[vv[0]] = interface{}(b) + case "float", "float64", "num", "number", "numeric": + f, err := strconv.ParseFloat(vv[1], 64) + if err != nil { + return nil, fmt.Errorf("can't convert subkey value to float: %s", vv[1]) + } + m[vv[0]] = interface{}(f) + default: + return nil, fmt.Errorf("unknown subkey conversion spec: %s", v) + } + default: + return nil, fmt.Errorf("unknown subkey spec: %s", v) + } + } + return m, nil +} + +// ------------------------------- END of valuesFor ... ---------------------------- + +// ----------------------- locate where a key value is in the tree ------------------- + +//----------------------------- find all paths to a key -------------------------------- + +// PathsForKey returns all paths through Map, 'mv', (in dot-notation) that terminate with the specified key. +// Results can be used with ValuesForPath. +func (mv Map) PathsForKey(key string) []string { + m := map[string]interface{}(mv) + breadbasket := make(map[string]bool, 0) + breadcrumbs := "" + + hasKeyPath(breadcrumbs, m, key, breadbasket) + if len(breadbasket) == 0 { + return nil + } + + // unpack map keys to return + res := make([]string, len(breadbasket)) + var i int + for k := range breadbasket { + res[i] = k + i++ + } + + return res +} + +// PathForKeyShortest extracts the shortest path from all possible paths - from PathsForKey() - in Map, 'mv'.. +// Paths are strings using dot-notation. +func (mv Map) PathForKeyShortest(key string) string { + paths := mv.PathsForKey(key) + + lp := len(paths) + if lp == 0 { + return "" + } + if lp == 1 { + return paths[0] + } + + shortest := paths[0] + shortestLen := len(strings.Split(shortest, ".")) + + for i := 1; i < len(paths); i++ { + vlen := len(strings.Split(paths[i], ".")) + if vlen < shortestLen { + shortest = paths[i] + shortestLen = vlen + } + } + + return shortest +} + +// hasKeyPath - if the map 'key' exists append it to KeyPath.path and increment KeyPath.depth +// This is really just a breadcrumber that saves all trails that hit the prescribed 'key'. +func hasKeyPath(crumbs string, iv interface{}, key string, basket map[string]bool) { + switch iv.(type) { + case map[string]interface{}: + vv := iv.(map[string]interface{}) + if _, ok := vv[key]; ok { + // create a new breadcrumb, intialized with the one we have + var nbc string + if crumbs == "" { + nbc = key + } else { + nbc = crumbs + "." + key + } + basket[nbc] = true + } + // walk on down the path, key could occur again at deeper node + for k, v := range vv { + // create a new breadcrumb, intialized with the one we have + var nbc string + if crumbs == "" { + nbc = k + } else { + nbc = crumbs + "." + k + } + hasKeyPath(nbc, v, key, basket) + } + case []interface{}: + // crumb-trail doesn't change, pass it on + for _, v := range iv.([]interface{}) { + hasKeyPath(crumbs, v, key, basket) + } + } +} + +var PathNotExistError = errors.New("Path does not exist") + +// ValueForPath wraps ValuesFor Path and returns the first value returned. +// If no value is found it returns 'nil' and PathNotExistError. +func (mv Map) ValueForPath(path string) (interface{}, error) { + vals, err := mv.ValuesForPath(path) + if err != nil { + return nil, err + } + if len(vals) == 0 { + return nil, PathNotExistError + } + return vals[0], nil +} + +// ValuesForPathString returns the first found value for the path as a string. +func (mv Map) ValueForPathString(path string) (string, error) { + vals, err := mv.ValuesForPath(path) + if err != nil { + return "", err + } + if len(vals) == 0 { + return "", errors.New("ValueForPath: path not found") + } + val := vals[0] + return fmt.Sprintf("%v", val), nil +} + +// ValueOrEmptyForPathString returns the first found value for the path as a string. +// If the path is not found then it returns an empty string. +func (mv Map) ValueOrEmptyForPathString(path string) string { + str, _ := mv.ValueForPathString(path) + return str +} diff --git a/vendor/github.com/clbanning/mxj/v2/leafnode.go b/vendor/github.com/clbanning/mxj/v2/leafnode.go new file mode 100644 index 0000000..1bc814f --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/leafnode.go @@ -0,0 +1,112 @@ +package mxj + +// leafnode.go - return leaf nodes with paths and values for the Map +// inspired by: https://groups.google.com/forum/#!topic/golang-nuts/3JhuVKRuBbw + +import ( + "strconv" + "strings" +) + +const ( + NoAttributes = true // suppress LeafNode values that are attributes +) + +// LeafNode - a terminal path value in a Map. +// For XML Map values it represents an attribute or simple element value - of type +// string unless Map was created using Cast flag. For JSON Map values it represents +// a string, numeric, boolean, or null value. +type LeafNode struct { + Path string // a dot-notation representation of the path with array subscripting + Value interface{} // the value at the path termination +} + +// LeafNodes - returns an array of all LeafNode values for the Map. +// The option no_attr argument suppresses attribute values (keys with prepended hyphen, '-') +// as well as the "#text" key for the associated simple element value. +// +// PrependAttrWithHypen(false) will result in attributes having .attr-name as +// terminal node in 'path' while the path for the element value, itself, will be +// the base path w/o "#text". +// +// LeafUseDotNotation(true) causes list members to be identified using ".N" syntax +// rather than "[N]" syntax. +func (mv Map) LeafNodes(no_attr ...bool) []LeafNode { + var a bool + if len(no_attr) == 1 { + a = no_attr[0] + } + + l := make([]LeafNode, 0) + getLeafNodes("", "", map[string]interface{}(mv), &l, a) + return l +} + +func getLeafNodes(path, node string, mv interface{}, l *[]LeafNode, noattr bool) { + // if stripping attributes, then also strip "#text" key + if !noattr || node != textK { + if path != "" && node[:1] != "[" { + path += "." + } + path += node + } + switch mv.(type) { + case map[string]interface{}: + for k, v := range mv.(map[string]interface{}) { + // if noattr && k[:1] == "-" { + if noattr && len(attrPrefix) > 0 && strings.Index(k, attrPrefix) == 0 { + continue + } + getLeafNodes(path, k, v, l, noattr) + } + case []interface{}: + for i, v := range mv.([]interface{}) { + if useDotNotation { + getLeafNodes(path, strconv.Itoa(i), v, l, noattr) + } else { + getLeafNodes(path, "["+strconv.Itoa(i)+"]", v, l, noattr) + } + } + default: + // can't walk any further, so create leaf + n := LeafNode{path, mv} + *l = append(*l, n) + } +} + +// LeafPaths - all paths that terminate in LeafNode values. +func (mv Map) LeafPaths(no_attr ...bool) []string { + ln := mv.LeafNodes() + ss := make([]string, len(ln)) + for i := 0; i < len(ln); i++ { + ss[i] = ln[i].Path + } + return ss +} + +// LeafValues - all terminal values in the Map. +func (mv Map) LeafValues(no_attr ...bool) []interface{} { + ln := mv.LeafNodes() + vv := make([]interface{}, len(ln)) + for i := 0; i < len(ln); i++ { + vv[i] = ln[i].Value + } + return vv +} + +// ====================== utilities ====================== + +// https://groups.google.com/forum/#!topic/golang-nuts/pj0C5IrZk4I +var useDotNotation bool + +// LeafUseDotNotation sets a flag that list members in LeafNode paths +// should be identified using ".N" syntax rather than the default "[N]" +// syntax. Calling LeafUseDotNotation with no arguments toggles the +// flag on/off; otherwise, the argument sets the flag value 'true'/'false'. +func LeafUseDotNotation(b ...bool) { + if len(b) == 0 { + useDotNotation = !useDotNotation + return + } + useDotNotation = b[0] +} diff --git a/vendor/github.com/clbanning/mxj/v2/misc.go b/vendor/github.com/clbanning/mxj/v2/misc.go new file mode 100644 index 0000000..5b4fab2 --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/misc.go @@ -0,0 +1,86 @@ +// Copyright 2016 Charles Banning. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file + +// misc.go - mimic functions (+others) called out in: +// https://groups.google.com/forum/#!topic/golang-nuts/jm_aGsJNbdQ +// Primarily these methods let you retrive XML structure information. + +package mxj + +import ( + "fmt" + "sort" + "strings" +) + +// Return the root element of the Map. If there is not a single key in Map, +// then an error is returned. +func (mv Map) Root() (string, error) { + mm := map[string]interface{}(mv) + if len(mm) != 1 { + return "", fmt.Errorf("Map does not have singleton root. Len: %d.", len(mm)) + } + for k, _ := range mm { + return k, nil + } + return "", nil +} + +// If the path is an element with sub-elements, return a list of the sub-element +// keys. (The list is alphabeticly sorted.) NOTE: Map keys that are prefixed with +// '-', a hyphen, are considered attributes; see m.Attributes(path). +func (mv Map) Elements(path string) ([]string, error) { + e, err := mv.ValueForPath(path) + if err != nil { + return nil, err + } + switch e.(type) { + case map[string]interface{}: + ee := e.(map[string]interface{}) + elems := make([]string, len(ee)) + var i int + for k, _ := range ee { + if len(attrPrefix) > 0 && strings.Index(k, attrPrefix) == 0 { + continue // skip attributes + } + elems[i] = k + i++ + } + elems = elems[:i] + // alphabetic sort keeps things tidy + sort.Strings(elems) + return elems, nil + } + return nil, fmt.Errorf("no elements for path: %s", path) +} + +// If the path is an element with attributes, return a list of the attribute +// keys. (The list is alphabeticly sorted.) NOTE: Map keys that are not prefixed with +// '-', a hyphen, are not treated as attributes; see m.Elements(path). Also, if the +// attribute prefix is "" - SetAttrPrefix("") or PrependAttrWithHyphen(false) - then +// there are no identifiable attributes. +func (mv Map) Attributes(path string) ([]string, error) { + a, err := mv.ValueForPath(path) + if err != nil { + return nil, err + } + switch a.(type) { + case map[string]interface{}: + aa := a.(map[string]interface{}) + attrs := make([]string, len(aa)) + var i int + for k, _ := range aa { + if len(attrPrefix) == 0 || strings.Index(k, attrPrefix) != 0 { + continue // skip non-attributes + } + attrs[i] = k[len(attrPrefix):] + i++ + } + attrs = attrs[:i] + // alphabetic sort keeps things tidy + sort.Strings(attrs) + return attrs, nil + } + return nil, fmt.Errorf("no attributes for path: %s", path) +} diff --git a/vendor/github.com/clbanning/mxj/v2/mxj.go b/vendor/github.com/clbanning/mxj/v2/mxj.go new file mode 100644 index 0000000..f0592f0 --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/mxj.go @@ -0,0 +1,128 @@ +// mxj - A collection of map[string]interface{} and associated XML and JSON utilities. +// Copyright 2012-2014 Charles Banning. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file + +package mxj + +import ( + "fmt" + "sort" +) + +const ( + Cast = true // for clarity - e.g., mxj.NewMapXml(doc, mxj.Cast) + SafeEncoding = true // ditto - e.g., mv.Json(mxj.SafeEncoding) +) + +type Map map[string]interface{} + +// Allocate a Map. +func New() Map { + m := make(map[string]interface{}, 0) + return m +} + +// Cast a Map to map[string]interface{} +func (mv Map) Old() map[string]interface{} { + return mv +} + +// Return a copy of mv as a newly allocated Map. If the Map only contains string, +// numeric, map[string]interface{}, and []interface{} values, then it can be thought +// of as a "deep copy." Copying a structure (or structure reference) value is subject +// to the noted restrictions. +// NOTE: If 'mv' includes structure values with, possibly, JSON encoding tags +// then only public fields of the structure are in the new Map - and with +// keys that conform to any encoding tag instructions. The structure itself will +// be represented as a map[string]interface{} value. +func (mv Map) Copy() (Map, error) { + // this is the poor-man's deep copy + // not efficient, but it works + j, jerr := mv.Json() + // must handle, we don't know how mv got built + if jerr != nil { + return nil, jerr + } + return NewMapJson(j) +} + +// --------------- StringIndent ... from x2j.WriteMap ------------- + +// Pretty print a Map. +func (mv Map) StringIndent(offset ...int) string { + return writeMap(map[string]interface{}(mv), true, true, offset...) +} + +// Pretty print a Map without the value type information - just key:value entries. +func (mv Map) StringIndentNoTypeInfo(offset ...int) string { + return writeMap(map[string]interface{}(mv), false, true, offset...) +} + +// writeMap - dumps the map[string]interface{} for examination. +// 'typeInfo' causes value type to be printed. +// 'offset' is initial indentation count; typically: Write(m). +func writeMap(m interface{}, typeInfo, root bool, offset ...int) string { + var indent int + if len(offset) == 1 { + indent = offset[0] + } + + var s string + switch m.(type) { + case []interface{}: + if typeInfo { + s += "[[]interface{}]" + } + for _, v := range m.([]interface{}) { + s += "\n" + for i := 0; i < indent; i++ { + s += " " + } + s += writeMap(v, typeInfo, false, indent+1) + } + case map[string]interface{}: + list := make([][2]string, len(m.(map[string]interface{}))) + var n int + for k, v := range m.(map[string]interface{}) { + list[n][0] = k + list[n][1] = writeMap(v, typeInfo, false, indent+1) + n++ + } + sort.Sort(mapList(list)) + for _, v := range list { + if root { + root = false + } else { + s += "\n" + } + for i := 0; i < indent; i++ { + s += " " + } + s += v[0] + " : " + v[1] + } + default: + if typeInfo { + s += fmt.Sprintf("[%T] %+v", m, m) + } else { + s += fmt.Sprintf("%+v", m) + } + } + return s +} + +// ======================== utility =============== + +type mapList [][2]string + +func (ml mapList) Len() int { + return len(ml) +} + +func (ml mapList) Swap(i, j int) { + ml[i], ml[j] = ml[j], ml[i] +} + +func (ml mapList) Less(i, j int) bool { + return ml[i][0] <= ml[j][0] +} diff --git a/vendor/github.com/clbanning/mxj/v2/newmap.go b/vendor/github.com/clbanning/mxj/v2/newmap.go new file mode 100644 index 0000000..b293949 --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/newmap.go @@ -0,0 +1,184 @@ +// mxj - A collection of map[string]interface{} and associated XML and JSON utilities. +// Copyright 2012-2014, 2018 Charles Banning. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file + +// remap.go - build a new Map from the current Map based on keyOld:keyNew mapppings +// keys can use dot-notation, keyOld can use wildcard, '*' +// +// Computational strategy - +// Using the key path - []string - traverse a new map[string]interface{} and +// insert the oldVal as the newVal when we arrive at the end of the path. +// If the type at the end is nil, then that is newVal +// If the type at the end is a singleton (string, float64, bool) an array is created. +// If the type at the end is an array, newVal is just appended. +// If the type at the end is a map, it is inserted if possible or the map value +// is converted into an array if necessary. + +package mxj + +import ( + "errors" + "strings" +) + +// (Map)NewMap - create a new Map from data in the current Map. +// 'keypairs' are key mappings "oldKey:newKey" and specify that the current value of 'oldKey' +// should be the value for 'newKey' in the returned Map. +// - 'oldKey' supports dot-notation as described for (Map)ValuesForPath() +// - 'newKey' supports dot-notation but with no wildcards, '*', or indexed arrays +// - "oldKey" is shorthand for the keypair value "oldKey:oldKey" +// - "oldKey:" and ":newKey" are invalid keypair values +// - if 'oldKey' does not exist in the current Map, it is not written to the new Map. +// "null" is not supported unless it is the current Map. +// - see newmap_test.go for several syntax examples +// - mv.NewMap() == mxj.New() +// +// NOTE: "examples/partial.go" shows how to create arbitrary sub-docs of an XML doc. +func (mv Map) NewMap(keypairs ...string) (Map, error) { + n := make(map[string]interface{}, 0) + if len(keypairs) == 0 { + return n, nil + } + + // loop through the pairs + var oldKey, newKey string + var path []string + for _, v := range keypairs { + if len(v) == 0 { + continue // just skip over empty keypair arguments + } + + // initialize oldKey, newKey and check + vv := strings.Split(v, ":") + if len(vv) > 2 { + return n, errors.New("oldKey:newKey keypair value not valid - " + v) + } + if len(vv) == 1 { + oldKey, newKey = vv[0], vv[0] + } else { + oldKey, newKey = vv[0], vv[1] + } + strings.TrimSpace(oldKey) + strings.TrimSpace(newKey) + if i := strings.Index(newKey, "*"); i > -1 { + return n, errors.New("newKey value cannot contain wildcard character - " + v) + } + if i := strings.Index(newKey, "["); i > -1 { + return n, errors.New("newKey value cannot contain indexed arrays - " + v) + } + if oldKey == "" || newKey == "" { + return n, errors.New("oldKey or newKey is not specified - " + v) + } + + // get oldKey value + oldVal, err := mv.ValuesForPath(oldKey) + if err != nil { + return n, err + } + if len(oldVal) == 0 { + continue // oldKey has no value, may not exist in mv + } + + // break down path + path = strings.Split(newKey, ".") + if path[len(path)-1] == "" { // ignore a trailing dot in newKey spec + path = path[:len(path)-1] + } + + addNewVal(&n, path, oldVal) + } + + return n, nil +} + +// navigate 'n' to end of path and add val +func addNewVal(n *map[string]interface{}, path []string, val []interface{}) { + // newVal - either singleton or array + var newVal interface{} + if len(val) == 1 { + newVal = val[0] // is type interface{} + } else { + newVal = interface{}(val) + } + + // walk to the position of interest, create it if necessary + m := (*n) // initialize map walker + var k string // key for m + lp := len(path) - 1 // when to stop looking + for i := 0; i < len(path); i++ { + k = path[i] + if i == lp { + break + } + var nm map[string]interface{} // holds position of next-map + switch m[k].(type) { + case nil: // need a map for next node in path, so go there + nm = make(map[string]interface{}, 0) + m[k] = interface{}(nm) + m = m[k].(map[string]interface{}) + case map[string]interface{}: + // OK - got somewhere to walk to, go there + m = m[k].(map[string]interface{}) + case []interface{}: + // add a map and nm points to new map unless there's already + // a map in the array, then nm points there + // The placement of the next value in the array is dependent + // on the sequence of members - could land on a map or a nil + // value first. TODO: how to test this. + a := make([]interface{}, 0) + var foundmap bool + for _, vv := range m[k].([]interface{}) { + switch vv.(type) { + case nil: // doesn't appear that this occurs, need a test case + if foundmap { // use the first one in array + a = append(a, vv) + continue + } + nm = make(map[string]interface{}, 0) + a = append(a, interface{}(nm)) + foundmap = true + case map[string]interface{}: + if foundmap { // use the first one in array + a = append(a, vv) + continue + } + nm = vv.(map[string]interface{}) + a = append(a, vv) + foundmap = true + default: + a = append(a, vv) + } + } + // no map found in array + if !foundmap { + nm = make(map[string]interface{}, 0) + a = append(a, interface{}(nm)) + } + m[k] = interface{}(a) // must insert in map + m = nm + default: // it's a string, float, bool, etc. + aa := make([]interface{}, 0) + nm = make(map[string]interface{}, 0) + aa = append(aa, m[k], nm) + m[k] = interface{}(aa) + m = nm + } + } + + // value is nil, array or a singleton of some kind + // initially m.(type) == map[string]interface{} + v := m[k] + switch v.(type) { + case nil: // initialized + m[k] = newVal + case []interface{}: + a := m[k].([]interface{}) + a = append(a, newVal) + m[k] = interface{}(a) + default: // v exists:string, float64, bool, map[string]interface, etc. + a := make([]interface{}, 0) + a = append(a, v, newVal) + m[k] = interface{}(a) + } +} diff --git a/vendor/github.com/clbanning/mxj/v2/readme.md b/vendor/github.com/clbanning/mxj/v2/readme.md new file mode 100644 index 0000000..0e0a09a --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/readme.md @@ -0,0 +1,209 @@ +

mxj - to/from maps, XML and JSON

+Decode/encode XML to/from map[string]interface{} (or JSON) values, and extract/modify values from maps by key or key-path, including wildcards. + +mxj supplants the legacy x2j and j2x packages. If you want the old syntax, use mxj/x2j and mxj/j2x packages. + +

Installation

+Using go.mod: +
+go get github.com/clbanning/mxj/v2@v2.7	
+
+ +
+import "github.com/clbanning/mxj/v2"
+
+ +... or just vendor the package. + +

Related Packages

+ +https://github.com/clbanning/checkxml provides functions for validating XML data. + +

Refactor Encoder - 2020.05.01

+Issue #70 highlighted that encoding large maps does not scale well, since the original logic used string appends operations. Using bytes.Buffer results in linear scaling for very large XML docs. (Metrics based on MacBook Pro i7 w/ 16 GB.) + + Nodes m.XML() time + 54809 12.53708ms + 109780 32.403183ms + 164678 59.826412ms + 482598 109.358007ms + +

Refactor Decoder - 2015.11.15

+For over a year I've wanted to refactor the XML-to-map[string]interface{} decoder to make it more performant. I recently took the time to do that, since we were using github.com/clbanning/mxj in a production system that could be deployed on a Raspberry Pi. Now the decoder is comparable to the stdlib JSON-to-map[string]interface{} decoder in terms of its additional processing overhead relative to decoding to a structure value. As shown by: + + BenchmarkNewMapXml-4 100000 18043 ns/op + BenchmarkNewStructXml-4 100000 14892 ns/op + BenchmarkNewMapJson-4 300000 4633 ns/op + BenchmarkNewStructJson-4 300000 3427 ns/op + BenchmarkNewMapXmlBooks-4 20000 82850 ns/op + BenchmarkNewStructXmlBooks-4 20000 67822 ns/op + BenchmarkNewMapJsonBooks-4 100000 17222 ns/op + BenchmarkNewStructJsonBooks-4 100000 15309 ns/op + +

Notices

+ + 2022.11.28: v2.7 - add SetGlobalKeyMapPrefix to change default prefix, '#', for default keys + 2022.11.20: v2.6 - add NewMapForattedXmlSeq for XML docs formatted with whitespace character + 2021.02.02: v2.5 - add XmlCheckIsValid toggle to force checking that the encoded XML is valid + 2020.12.14: v2.4 - add XMLEscapeCharsDecoder to preserve XML escaped characters in Map values + 2020.10.28: v2.3 - add TrimWhiteSpace option + 2020.05.01: v2.2 - optimize map to XML encoding for large XML docs. + 2019.07.04: v2.0 - remove unnecessary methods - mv.XmlWriterRaw, mv.XmlIndentWriterRaw - for Map and MapSeq. + 2019.07.04: Add MapSeq type and move associated functions and methods from Map to MapSeq. + 2019.01.21: DecodeSimpleValuesAsMap - decode to map[:map["#text":]] rather than map[:] + 2018.04.18: mv.Xml/mv.XmlIndent encodes non-map[string]interface{} map values - map[string]string, map[int]uint, etc. + 2018.03.29: mv.Gob/NewMapGob support gob encoding/decoding of Maps. + 2018.03.26: Added mxj/x2j-wrapper sub-package for migrating from legacy x2j package. + 2017.02.22: LeafNode paths can use ".N" syntax rather than "[N]" for list member indexing. + 2017.02.10: SetFieldSeparator changes field separator for args in UpdateValuesForPath, ValuesFor... methods. + 2017.02.06: Support XMPP stream processing - HandleXMPPStreamTag(). + 2016.11.07: Preserve name space prefix syntax in XmlSeq parser - NewMapXmlSeq(), etc. + 2016.06.25: Support overriding default XML attribute prefix, "-", in Map keys - SetAttrPrefix(). + 2016.05.26: Support customization of xml.Decoder by exposing CustomDecoder variable. + 2016.03.19: Escape invalid chars when encoding XML attribute and element values - XMLEscapeChars(). + 2016.03.02: By default decoding XML with float64 and bool value casting will not cast "NaN", "Inf", and "-Inf". + To cast them to float64, first set flag with CastNanInf(true). + 2016.02.22: New mv.Root(), mv.Elements(), mv.Attributes methods let you examine XML document structure. + 2016.02.16: Add CoerceKeysToLower() option to handle tags with mixed capitalization. + 2016.02.12: Seek for first xml.StartElement token; only return error if io.EOF is reached first (handles BOM). + 2015.12.02: XML decoding/encoding that preserves original structure of document. See NewMapXmlSeq() + and mv.XmlSeq() / mv.XmlSeqIndent(). + 2015-05-20: New: mv.StringIndentNoTypeInfo(). + Also, alphabetically sort map[string]interface{} values by key to prettify output for mv.Xml(), + mv.XmlIndent(), mv.StringIndent(), mv.StringIndentNoTypeInfo(). + 2014-11-09: IncludeTagSeqNum() adds "_seq" key with XML doc positional information. + (NOTE: PreserveXmlList() is similar and will be here soon.) + 2014-09-18: inspired by NYTimes fork, added PrependAttrWithHyphen() to allow stripping hyphen from attribute tag. + 2014-08-02: AnyXml() and AnyXmlIndent() will try to marshal arbitrary values to XML. + 2014-04-28: ValuesForPath() and NewMap() now accept path with indexed array references. + +

Basic Unmarshal XML to map[string]interface{}

+
type Map map[string]interface{}
+ +Create a `Map` value, 'mv', from any `map[string]interface{}` value, 'v': +
mv := Map(v)
+ +Unmarshal / marshal XML as a `Map` value, 'mv': +
mv, err := NewMapXml(xmlValue) // unmarshal
+xmlValue, err := mv.Xml()      // marshal
+ +Unmarshal XML from an `io.Reader` as a `Map` value, 'mv': +
mv, err := NewMapXmlReader(xmlReader)         // repeated calls, as with an os.File Reader, will process stream
+mv, raw, err := NewMapXmlReaderRaw(xmlReader) // 'raw' is the raw XML that was decoded
+ +Marshal `Map` value, 'mv', to an XML Writer (`io.Writer`): +
err := mv.XmlWriter(xmlWriter)
+raw, err := mv.XmlWriterRaw(xmlWriter) // 'raw' is the raw XML that was written on xmlWriter
+ +Also, for prettified output: +
xmlValue, err := mv.XmlIndent(prefix, indent, ...)
+err := mv.XmlIndentWriter(xmlWriter, prefix, indent, ...)
+raw, err := mv.XmlIndentWriterRaw(xmlWriter, prefix, indent, ...)
+ +Bulk process XML with error handling (note: handlers must return a boolean value): +
err := HandleXmlReader(xmlReader, mapHandler(Map), errHandler(error))
+err := HandleXmlReaderRaw(xmlReader, mapHandler(Map, []byte), errHandler(error, []byte))
+ +Converting XML to JSON: see Examples for `NewMapXml` and `HandleXmlReader`. + +There are comparable functions and methods for JSON processing. + +Arbitrary structure values can be decoded to / encoded from `Map` values: +
mv, err := NewMapStruct(structVal)
+err := mv.Struct(structPointer)
+ +

Extract / modify Map values

+To work with XML tag values, JSON or Map key values or structure field values, decode the XML, JSON +or structure to a `Map` value, 'mv', or cast a `map[string]interface{}` value to a `Map` value, 'mv', then: +
paths := mv.PathsForKey(key)
+path := mv.PathForKeyShortest(key)
+values, err := mv.ValuesForKey(key, subkeys)
+values, err := mv.ValuesForPath(path, subkeys)
+count, err := mv.UpdateValuesForPath(newVal, path, subkeys)
+ +Get everything at once, irrespective of path depth: +
leafnodes := mv.LeafNodes()
+leafvalues := mv.LeafValues()
+ +A new `Map` with whatever keys are desired can be created from the current `Map` and then encoded in XML +or JSON. (Note: keys can use dot-notation.) +
newMap, err := mv.NewMap("oldKey_1:newKey_1", "oldKey_2:newKey_2", ..., "oldKey_N:newKey_N")
+newMap, err := mv.NewMap("oldKey1", "oldKey3", "oldKey5") // a subset of 'mv'; see "examples/partial.go"
+newXml, err := newMap.Xml()   // for example
+newJson, err := newMap.Json() // ditto
+ +

Usage

+ +The package is fairly well [self-documented with examples](http://godoc.org/github.com/clbanning/mxj). + +Also, the subdirectory "examples" contains a wide range of examples, several taken from golang-nuts discussions. + +

XML parsing conventions

+ +Using NewMapXml() + + - Attributes are parsed to `map[string]interface{}` values by prefixing a hyphen, `-`, + to the attribute label. (Unless overridden by `PrependAttrWithHyphen(false)` or + `SetAttrPrefix()`.) + - If the element is a simple element and has attributes, the element value + is given the key `#text` for its `map[string]interface{}` representation. (See + the 'atomFeedString.xml' test data, below.) + - XML comments, directives, and process instructions are ignored. + - If CoerceKeysToLower() has been called, then the resultant keys will be lower case. + +Using NewMapXmlSeq() + + - Attributes are parsed to `map["#attr"]map[]map[string]interface{}`values + where the `` value has "#text" and "#seq" keys - the "#text" key holds the + value for ``. + - All elements, except for the root, have a "#seq" key. + - Comments, directives, and process instructions are unmarshalled into the Map using the + keys "#comment", "#directive", and "#procinst", respectively. (See documentation for more + specifics.) + - Name space syntax is preserved: + - `something` parses to `map["ns:key"]interface{}{"something"}` + - `xmlns:ns="http://myns.com/ns"` parses to `map["xmlns:ns"]interface{}{"http://myns.com/ns"}` + +Both + + - By default, "Nan", "Inf", and "-Inf" values are not cast to float64. If you want them + to be cast, set a flag to cast them using CastNanInf(true). + +

XML encoding conventions

+ + - 'nil' `Map` values, which may represent 'null' JSON values, are encoded as ``. + NOTE: the operation is not symmetric as `` elements are decoded as `tag:""` `Map` values, + which, then, encode in JSON as `"tag":""` values. + - ALSO: there is no guarantee that the encoded XML doc will be the same as the decoded one. (Go + randomizes the walk through map[string]interface{} values.) If you plan to re-encode the + Map value to XML and want the same sequencing of elements look at NewMapXmlSeq() and + mv.XmlSeq() - these try to preserve the element sequencing but with added complexity when + working with the Map representation. + +

Running "go test"

+ +Because there are no guarantees on the sequence map elements are retrieved, the tests have been +written for visual verification in most cases. One advantage is that you can easily use the +output from running "go test" as examples of calling the various functions and methods. + +

Motivation

+ +I make extensive use of JSON for messaging and typically unmarshal the messages into +`map[string]interface{}` values. This is easily done using `json.Unmarshal` from the +standard Go libraries. Unfortunately, many legacy solutions use structured +XML messages; in those environments the applications would have to be refactored to +interoperate with my components. + +The better solution is to just provide an alternative HTTP handler that receives +XML messages and parses it into a `map[string]interface{}` value and then reuse +all the JSON-based code. The Go `xml.Unmarshal()` function does not provide the same +option of unmarshaling XML messages into `map[string]interface{}` values. So I wrote +a couple of small functions to fill this gap and released them as the x2j package. + +Over the next year and a half additional features were added, and the companion j2x +package was released to address XML encoding of arbitrary JSON and `map[string]interface{}` +values. As part of a refactoring of our production system and looking at how we had been +using the x2j and j2x packages we found that we rarely performed direct XML-to-JSON or +JSON-to_XML conversion and that working with the XML or JSON as `map[string]interface{}` +values was the primary value. Thus, everything was refactored into the mxj package. + diff --git a/vendor/github.com/clbanning/mxj/v2/remove.go b/vendor/github.com/clbanning/mxj/v2/remove.go new file mode 100644 index 0000000..8362ab1 --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/remove.go @@ -0,0 +1,37 @@ +package mxj + +import "strings" + +// Removes the path. +func (mv Map) Remove(path string) error { + m := map[string]interface{}(mv) + return remove(m, path) +} + +func remove(m interface{}, path string) error { + val, err := prevValueByPath(m, path) + if err != nil { + return err + } + + lastKey := lastKey(path) + delete(val, lastKey) + + return nil +} + +// returns the last key of the path. +// lastKey("a.b.c") would had returned "c" +func lastKey(path string) string { + keys := strings.Split(path, ".") + key := keys[len(keys)-1] + return key +} + +// returns the path without the last key +// parentPath("a.b.c") whould had returned "a.b" +func parentPath(path string) string { + keys := strings.Split(path, ".") + parentPath := strings.Join(keys[0:len(keys)-1], ".") + return parentPath +} diff --git a/vendor/github.com/clbanning/mxj/v2/rename.go b/vendor/github.com/clbanning/mxj/v2/rename.go new file mode 100644 index 0000000..4c655ed --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/rename.go @@ -0,0 +1,61 @@ +package mxj + +import ( + "errors" + "strings" +) + +// RenameKey renames a key in a Map. +// It works only for nested maps. +// It doesn't work for cases when the key is in a list. +func (mv Map) RenameKey(path string, newName string) error { + var v bool + var err error + if v, err = mv.Exists(path); err == nil && !v { + return errors.New("RenameKey: path not found: " + path) + } else if err != nil { + return err + } + if v, err = mv.Exists(parentPath(path) + "." + newName); err == nil && v { + return errors.New("RenameKey: key already exists: " + newName) + } else if err != nil { + return err + } + + m := map[string]interface{}(mv) + return renameKey(m, path, newName) +} + +func renameKey(m interface{}, path string, newName string) error { + val, err := prevValueByPath(m, path) + if err != nil { + return err + } + + oldName := lastKey(path) + val[newName] = val[oldName] + delete(val, oldName) + + return nil +} + +// returns a value which contains a last key in the path +// For example: prevValueByPath("a.b.c", {a{b{c: 3}}}) returns {c: 3} +func prevValueByPath(m interface{}, path string) (map[string]interface{}, error) { + keys := strings.Split(path, ".") + + switch mValue := m.(type) { + case map[string]interface{}: + for key, value := range mValue { + if key == keys[0] { + if len(keys) == 1 { + return mValue, nil + } else { + // keep looking for the full path to the key + return prevValueByPath(value, strings.Join(keys[1:], ".")) + } + } + } + } + return nil, errors.New("prevValueByPath: didn't find path – " + path) +} diff --git a/vendor/github.com/clbanning/mxj/v2/set.go b/vendor/github.com/clbanning/mxj/v2/set.go new file mode 100644 index 0000000..a297fc3 --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/set.go @@ -0,0 +1,26 @@ +package mxj + +import ( + "strings" +) + +// Sets the value for the path +func (mv Map) SetValueForPath(value interface{}, path string) error { + pathAry := strings.Split(path, ".") + parentPathAry := pathAry[0 : len(pathAry)-1] + parentPath := strings.Join(parentPathAry, ".") + + val, err := mv.ValueForPath(parentPath) + if err != nil { + return err + } + if val == nil { + return nil // we just ignore the request if there's no val + } + + key := pathAry[len(pathAry)-1] + cVal := val.(map[string]interface{}) + cVal[key] = value + + return nil +} diff --git a/vendor/github.com/clbanning/mxj/v2/setfieldsep.go b/vendor/github.com/clbanning/mxj/v2/setfieldsep.go new file mode 100644 index 0000000..b70715e --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/setfieldsep.go @@ -0,0 +1,20 @@ +package mxj + +// Per: https://github.com/clbanning/mxj/issues/37#issuecomment-278651862 +var fieldSep string = ":" + +// SetFieldSeparator changes the default field separator, ":", for the +// newVal argument in mv.UpdateValuesForPath and the optional 'subkey' arguments +// in mv.ValuesForKey and mv.ValuesForPath. +// +// E.g., if the newVal value is "http://blah/blah", setting the field separator +// to "|" will allow the newVal specification, "|http://blah/blah" to parse +// properly. If called with no argument or an empty string value, the field +// separator is set to the default, ":". +func SetFieldSeparator(s ...string) { + if len(s) == 0 || s[0] == "" { + fieldSep = ":" // the default + return + } + fieldSep = s[0] +} diff --git a/vendor/github.com/clbanning/mxj/v2/songtext.xml b/vendor/github.com/clbanning/mxj/v2/songtext.xml new file mode 100644 index 0000000..8c0f2be --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/songtext.xml @@ -0,0 +1,29 @@ + + help me! + + + + Henry was a renegade + Didn't like to play it safe + One component at a time + There's got to be a better way + Oh, people came from miles around + Searching for a steady job + Welcome to the Motor Town + Booming like an atom bomb + + + Oh, Henry was the end of the story + Then everything went wrong + And we'll return it to its former glory + But it just takes so long + + + + It's going to take a long time + It's going to take it, but we'll make it one day + It's going to take a long time + It's going to take it, but we'll make it one day + + + diff --git a/vendor/github.com/clbanning/mxj/v2/strict.go b/vendor/github.com/clbanning/mxj/v2/strict.go new file mode 100644 index 0000000..1e76956 --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/strict.go @@ -0,0 +1,30 @@ +// Copyright 2016 Charles Banning. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file + +// strict.go actually addresses setting xml.Decoder attribute +// values. This'll let you parse non-standard XML. + +package mxj + +import ( + "encoding/xml" +) + +// CustomDecoder can be used to specify xml.Decoder attribute +// values, e.g., Strict:false, to be used. By default CustomDecoder +// is nil. If CustomeDecoder != nil, then mxj.XmlCharsetReader variable is +// ignored and must be set as part of the CustomDecoder value, if needed. +// Usage: +// mxj.CustomDecoder = &xml.Decoder{Strict:false} +var CustomDecoder *xml.Decoder + +// useCustomDecoder copy over public attributes from customDecoder +func useCustomDecoder(d *xml.Decoder) { + d.Strict = CustomDecoder.Strict + d.AutoClose = CustomDecoder.AutoClose + d.Entity = CustomDecoder.Entity + d.CharsetReader = CustomDecoder.CharsetReader + d.DefaultSpace = CustomDecoder.DefaultSpace +} + diff --git a/vendor/github.com/clbanning/mxj/v2/struct.go b/vendor/github.com/clbanning/mxj/v2/struct.go new file mode 100644 index 0000000..9be636c --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/struct.go @@ -0,0 +1,54 @@ +// Copyright 2012-2017 Charles Banning. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file + +package mxj + +import ( + "encoding/json" + "errors" + "reflect" + + // "github.com/fatih/structs" +) + +// Create a new Map value from a structure. Error returned if argument is not a structure. +// Only public structure fields are decoded in the Map value. See github.com/fatih/structs#Map +// for handling of "structs" tags. + +// DEPRECATED - import github.com/fatih/structs and cast result of structs.Map to mxj.Map. +// import "github.com/fatih/structs" +// ... +// sm, err := structs.Map() +// if err != nil { +// // handle error +// } +// m := mxj.Map(sm) +// Alernatively uncomment the old source and import in struct.go. +func NewMapStruct(structVal interface{}) (Map, error) { + return nil, errors.New("deprecated - see package documentation") + /* + if !structs.IsStruct(structVal) { + return nil, errors.New("NewMapStruct() error: argument is not type Struct") + } + return structs.Map(structVal), nil + */ +} + +// Marshal a map[string]interface{} into a structure referenced by 'structPtr'. Error returned +// if argument is not a pointer or if json.Unmarshal returns an error. +// json.Unmarshal structure encoding rules are followed to encode public structure fields. +func (mv Map) Struct(structPtr interface{}) error { + // should check that we're getting a pointer. + if reflect.ValueOf(structPtr).Kind() != reflect.Ptr { + return errors.New("mv.Struct() error: argument is not type Ptr") + } + + m := map[string]interface{}(mv) + j, err := json.Marshal(m) + if err != nil { + return err + } + + return json.Unmarshal(j, structPtr) +} diff --git a/vendor/github.com/clbanning/mxj/v2/updatevalues.go b/vendor/github.com/clbanning/mxj/v2/updatevalues.go new file mode 100644 index 0000000..9e10d84 --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/updatevalues.go @@ -0,0 +1,258 @@ +// Copyright 2012-2014, 2017 Charles Banning. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file + +// updatevalues.go - modify a value based on path and possibly sub-keys +// TODO(clb): handle simple elements with attributes and NewMapXmlSeq Map values. + +package mxj + +import ( + "fmt" + "strconv" + "strings" +) + +// Update value based on path and possible sub-key values. +// A count of the number of values changed and any error are returned. +// If the count == 0, then no path (and subkeys) matched. +// 'newVal' can be a Map or map[string]interface{} value with a single 'key' that is the key to be modified +// or a string value "key:value[:type]" where type is "bool" or "num" to cast the value. +// 'path' is dot-notation list of keys to traverse; last key in path can be newVal key +// NOTE: 'path' spec does not currently support indexed array references. +// 'subkeys' are "key:value[:type]" entries that must match for path node +// - For attributes prefix the label with the attribute prefix character, by default a +// hyphen, '-', e.g., "-seq:3". (See SetAttrPrefix function.) +// - The subkey can be wildcarded - "key:*" - to require that it's there with some value. +// - If a subkey is preceeded with the '!' character, the key:value[:type] entry is treated as an +// exclusion critera - e.g., "!author:William T. Gaddis". +// +// NOTES: +// 1. Simple elements with attributes need a path terminated as ".#text" to modify the actual value. +// 2. Values in Maps created using NewMapXmlSeq are map[string]interface{} values with a "#text" key. +// 3. If values in 'newVal' or 'subkeys' args contain ":", use SetFieldSeparator to an unused symbol, +// perhaps "|". +func (mv Map) UpdateValuesForPath(newVal interface{}, path string, subkeys ...string) (int, error) { + m := map[string]interface{}(mv) + + // extract the subkeys + var subKeyMap map[string]interface{} + if len(subkeys) > 0 { + var err error + subKeyMap, err = getSubKeyMap(subkeys...) + if err != nil { + return 0, err + } + } + + // extract key and value from newVal + var key string + var val interface{} + switch newVal.(type) { + case map[string]interface{}, Map: + switch newVal.(type) { // "fallthrough is not permitted in type switch" (Spec) + case Map: + newVal = newVal.(Map).Old() + } + if len(newVal.(map[string]interface{})) != 1 { + return 0, fmt.Errorf("newVal map can only have len == 1 - %+v", newVal) + } + for key, val = range newVal.(map[string]interface{}) { + } + case string: // split it as a key:value pair + ss := strings.Split(newVal.(string), fieldSep) + n := len(ss) + if n < 2 || n > 3 { + return 0, fmt.Errorf("unknown newVal spec - %+v", newVal) + } + key = ss[0] + if n == 2 { + val = interface{}(ss[1]) + } else if n == 3 { + switch ss[2] { + case "bool", "boolean": + nv, err := strconv.ParseBool(ss[1]) + if err != nil { + return 0, fmt.Errorf("can't convert newVal to bool - %+v", newVal) + } + val = interface{}(nv) + case "num", "numeric", "float", "int": + nv, err := strconv.ParseFloat(ss[1], 64) + if err != nil { + return 0, fmt.Errorf("can't convert newVal to float64 - %+v", newVal) + } + val = interface{}(nv) + default: + return 0, fmt.Errorf("unknown type for newVal value - %+v", newVal) + } + } + default: + return 0, fmt.Errorf("invalid newVal type - %+v", newVal) + } + + // parse path + keys := strings.Split(path, ".") + + var count int + updateValuesForKeyPath(key, val, m, keys, subKeyMap, &count) + + return count, nil +} + +// navigate the path +func updateValuesForKeyPath(key string, value interface{}, m interface{}, keys []string, subkeys map[string]interface{}, cnt *int) { + // ----- at end node: looking at possible node to get 'key' ---- + if len(keys) == 1 { + updateValue(key, value, m, keys[0], subkeys, cnt) + return + } + + // ----- here we are navigating the path thru the penultimate node -------- + // key of interest is keys[0] - the next in the path + switch keys[0] { + case "*": // wildcard - scan all values + switch m.(type) { + case map[string]interface{}: + for _, v := range m.(map[string]interface{}) { + updateValuesForKeyPath(key, value, v, keys[1:], subkeys, cnt) + } + case []interface{}: + for _, v := range m.([]interface{}) { + switch v.(type) { + // flatten out a list of maps - keys are processed + case map[string]interface{}: + for _, vv := range v.(map[string]interface{}) { + updateValuesForKeyPath(key, value, vv, keys[1:], subkeys, cnt) + } + default: + updateValuesForKeyPath(key, value, v, keys[1:], subkeys, cnt) + } + } + } + default: // key - must be map[string]interface{} + switch m.(type) { + case map[string]interface{}: + if v, ok := m.(map[string]interface{})[keys[0]]; ok { + updateValuesForKeyPath(key, value, v, keys[1:], subkeys, cnt) + } + case []interface{}: // may be buried in list + for _, v := range m.([]interface{}) { + switch v.(type) { + case map[string]interface{}: + if vv, ok := v.(map[string]interface{})[keys[0]]; ok { + updateValuesForKeyPath(key, value, vv, keys[1:], subkeys, cnt) + } + } + } + } + } +} + +// change value if key and subkeys are present +func updateValue(key string, value interface{}, m interface{}, keys0 string, subkeys map[string]interface{}, cnt *int) { + // there are two possible options for the value of 'keys0': map[string]interface, []interface{} + // and 'key' is a key in the map or is a key in a map in a list. + switch m.(type) { + case map[string]interface{}: // gotta have the last key + if keys0 == "*" { + for k := range m.(map[string]interface{}) { + updateValue(key, value, m, k, subkeys, cnt) + } + return + } + endVal, _ := m.(map[string]interface{})[keys0] + + // if newV key is the end of path, replace the value for path-end + // may be []interface{} - means replace just an entry w/ subkeys + // otherwise replace the keys0 value if subkeys are there + // NOTE: this will replace the subkeys, also + if key == keys0 { + switch endVal.(type) { + case map[string]interface{}: + if hasSubKeys(m, subkeys) { + (m.(map[string]interface{}))[keys0] = value + (*cnt)++ + } + case []interface{}: + // without subkeys can't select list member to modify + // so key:value spec is it ... + if hasSubKeys(m, subkeys) { + (m.(map[string]interface{}))[keys0] = value + (*cnt)++ + break + } + nv := make([]interface{}, 0) + var valmodified bool + for _, v := range endVal.([]interface{}) { + // check entry subkeys + if hasSubKeys(v, subkeys) { + // replace v with value + nv = append(nv, value) + valmodified = true + (*cnt)++ + continue + } + nv = append(nv, v) + } + if valmodified { + (m.(map[string]interface{}))[keys0] = interface{}(nv) + } + default: // anything else is a strict replacement + if hasSubKeys(m, subkeys) { + (m.(map[string]interface{}))[keys0] = value + (*cnt)++ + } + } + return + } + + // so value is for an element of endVal + // if endVal is a map then 'key' must be there w/ subkeys + // if endVal is a list then 'key' must be in a list member w/ subkeys + switch endVal.(type) { + case map[string]interface{}: + if !hasSubKeys(endVal, subkeys) { + return + } + if _, ok := (endVal.(map[string]interface{}))[key]; ok { + (endVal.(map[string]interface{}))[key] = value + (*cnt)++ + } + case []interface{}: // keys0 points to a list, check subkeys + for _, v := range endVal.([]interface{}) { + // got to be a map so we can replace value for 'key' + vv, vok := v.(map[string]interface{}) + if !vok { + continue + } + if _, ok := vv[key]; !ok { + continue + } + if !hasSubKeys(vv, subkeys) { + continue + } + vv[key] = value + (*cnt)++ + } + } + case []interface{}: // key may be in a list member + // don't need to handle keys0 == "*"; we're looking at everything, anyway. + for _, v := range m.([]interface{}) { + // only map values - we're looking for 'key' + mm, ok := v.(map[string]interface{}) + if !ok { + continue + } + if _, ok := mm[key]; !ok { + continue + } + if !hasSubKeys(mm, subkeys) { + continue + } + mm[key] = value + (*cnt)++ + } + } + + // return +} diff --git a/vendor/github.com/clbanning/mxj/v2/xml.go b/vendor/github.com/clbanning/mxj/v2/xml.go new file mode 100644 index 0000000..b72a146 --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/xml.go @@ -0,0 +1,1440 @@ +// Copyright 2012-2016, 2018-2019 Charles Banning. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file + +// xml.go - basically the core of X2j for map[string]interface{} values. +// NewMapXml, NewMapXmlReader, mv.Xml, mv.XmlWriter +// see x2j and j2x for wrappers to provide end-to-end transformation of XML and JSON messages. + +package mxj + +import ( + "bytes" + "encoding/json" + "encoding/xml" + "errors" + "fmt" + "io" + "reflect" + "sort" + "strconv" + "strings" + "time" +) + +var ( + textK = "#text" + seqK = "#seq" + commentK = "#comment" + attrK = "#attr" + directiveK = "#directive" + procinstK = "#procinst" + targetK = "#target" + instK = "#inst" +) + +// Support overriding default Map keys prefix + +func SetGlobalKeyMapPrefix(s string) { + textK = strings.ReplaceAll(textK, textK[0:1], s) + seqK = strings.ReplaceAll(seqK, seqK[0:1], s) + commentK = strings.ReplaceAll(commentK, commentK[0:1], s) + directiveK = strings.ReplaceAll(directiveK, directiveK[0:1], s) + procinstK = strings.ReplaceAll(procinstK, procinstK[0:1], s) + targetK = strings.ReplaceAll(targetK, targetK[0:1], s) + instK = strings.ReplaceAll(instK, instK[0:1], s) + attrK = strings.ReplaceAll(attrK, attrK[0:1], s) +} + +// ------------------- NewMapXml & NewMapXmlReader ... ------------------------- + +// If XmlCharsetReader != nil, it will be used to decode the XML, if required. +// Note: if CustomDecoder != nil, then XmlCharsetReader is ignored; +// set the CustomDecoder attribute instead. +// import ( +// charset "code.google.com/p/go-charset/charset" +// github.com/clbanning/mxj +// ) +// ... +// mxj.XmlCharsetReader = charset.NewReader +// m, merr := mxj.NewMapXml(xmlValue) +var XmlCharsetReader func(charset string, input io.Reader) (io.Reader, error) + +// NewMapXml - convert a XML doc into a Map +// (This is analogous to unmarshalling a JSON string to map[string]interface{} using json.Unmarshal().) +// If the optional argument 'cast' is 'true', then values will be converted to boolean or float64 if possible. +// +// Converting XML to JSON is a simple as: +// ... +// mapVal, merr := mxj.NewMapXml(xmlVal) +// if merr != nil { +// // handle error +// } +// jsonVal, jerr := mapVal.Json() +// if jerr != nil { +// // handle error +// } +// +// NOTES: +// 1. Declarations, directives, process instructions and comments are NOT parsed. +// 2. The 'xmlVal' will be parsed looking for an xml.StartElement, so BOM and other +// extraneous xml.CharData will be ignored unless io.EOF is reached first. +// 3. If CoerceKeysToLower() has been called, then all key values will be lower case. +// 4. If CoerceKeysToSnakeCase() has been called, then all key values will be converted to snake case. +// 5. If DisableTrimWhiteSpace(b bool) has been called, then all values will be trimmed or not. 'true' by default. +func NewMapXml(xmlVal []byte, cast ...bool) (Map, error) { + var r bool + if len(cast) == 1 { + r = cast[0] + } + return xmlToMap(xmlVal, r) +} + +// Get next XML doc from an io.Reader as a Map value. Returns Map value. +// NOTES: +// 1. Declarations, directives, process instructions and comments are NOT parsed. +// 2. The 'xmlReader' will be parsed looking for an xml.StartElement, so BOM and other +// extraneous xml.CharData will be ignored unless io.EOF is reached first. +// 3. If CoerceKeysToLower() has been called, then all key values will be lower case. +// 4. If CoerceKeysToSnakeCase() has been called, then all key values will be converted to snake case. +func NewMapXmlReader(xmlReader io.Reader, cast ...bool) (Map, error) { + var r bool + if len(cast) == 1 { + r = cast[0] + } + + // We need to put an *os.File reader in a ByteReader or the xml.NewDecoder + // will wrap it in a bufio.Reader and seek on the file beyond where the + // xml.Decoder parses! + if _, ok := xmlReader.(io.ByteReader); !ok { + xmlReader = myByteReader(xmlReader) // see code at EOF + } + + // build the map + return xmlReaderToMap(xmlReader, r) +} + +// Get next XML doc from an io.Reader as a Map value. Returns Map value and slice with the raw XML. +// NOTES: +// 1. Declarations, directives, process instructions and comments are NOT parsed. +// 2. Due to the implementation of xml.Decoder, the raw XML off the reader is buffered to []byte +// using a ByteReader. If the io.Reader is an os.File, there may be significant performance impact. +// See the examples - getmetrics1.go through getmetrics4.go - for comparative use cases on a large +// data set. If the io.Reader is wrapping a []byte value in-memory, however, such as http.Request.Body +// you CAN use it to efficiently unmarshal a XML doc and retrieve the raw XML in a single call. +// 3. The 'raw' return value may be larger than the XML text value. +// 4. The 'xmlReader' will be parsed looking for an xml.StartElement, so BOM and other +// extraneous xml.CharData will be ignored unless io.EOF is reached first. +// 5. If CoerceKeysToLower() has been called, then all key values will be lower case. +// 6. If CoerceKeysToSnakeCase() has been called, then all key values will be converted to snake case. +func NewMapXmlReaderRaw(xmlReader io.Reader, cast ...bool) (Map, []byte, error) { + var r bool + if len(cast) == 1 { + r = cast[0] + } + // create TeeReader so we can retrieve raw XML + buf := make([]byte, 0) + wb := bytes.NewBuffer(buf) + trdr := myTeeReader(xmlReader, wb) // see code at EOF + + m, err := xmlReaderToMap(trdr, r) + + // retrieve the raw XML that was decoded + b := wb.Bytes() + + if err != nil { + return nil, b, err + } + + return m, b, nil +} + +// xmlReaderToMap() - parse a XML io.Reader to a map[string]interface{} value +func xmlReaderToMap(rdr io.Reader, r bool) (map[string]interface{}, error) { + // parse the Reader + p := xml.NewDecoder(rdr) + if CustomDecoder != nil { + useCustomDecoder(p) + } else { + p.CharsetReader = XmlCharsetReader + } + return xmlToMapParser("", nil, p, r) +} + +// xmlToMap - convert a XML doc into map[string]interface{} value +func xmlToMap(doc []byte, r bool) (map[string]interface{}, error) { + b := bytes.NewReader(doc) + p := xml.NewDecoder(b) + if CustomDecoder != nil { + useCustomDecoder(p) + } else { + p.CharsetReader = XmlCharsetReader + } + return xmlToMapParser("", nil, p, r) +} + +// ===================================== where the work happens ============================= + +// PrependAttrWithHyphen. Prepend attribute tags with a hyphen. +// Default is 'true'. (Not applicable to NewMapXmlSeq(), mv.XmlSeq(), etc.) +// Note: +// If 'false', unmarshaling and marshaling is not symmetric. Attributes will be +// marshal'd as attr and may be part of a list. +func PrependAttrWithHyphen(v bool) { + if v { + attrPrefix = "-" + lenAttrPrefix = len(attrPrefix) + return + } + attrPrefix = "" + lenAttrPrefix = len(attrPrefix) +} + +// Include sequence id with inner tags. - per Sean Murphy, murphysean84@gmail.com. +var includeTagSeqNum bool + +// IncludeTagSeqNum - include a "_seq":N key:value pair with each inner tag, denoting +// its position when parsed. This is of limited usefulness, since list values cannot +// be tagged with "_seq" without changing their depth in the Map. +// So THIS SHOULD BE USED WITH CAUTION - see the test cases. Here's a sample of what +// you get. +/* + + + + + hello + + + parses as: + + { + Obj:{ + "-c":"la", + "-h":"da", + "-x":"dee", + "intObj":[ + { + "-id"="3", + "_seq":"0" // if mxj.Cast is passed, then: "_seq":0 + }, + { + "-id"="2", + "_seq":"2" + }], + "intObj1":{ + "-id":"1", + "_seq":"1" + }, + "StrObj":{ + "#text":"hello", // simple element value gets "#text" tag + "_seq":"3" + } + } + } +*/ +func IncludeTagSeqNum(b ...bool) { + if len(b) == 0 { + includeTagSeqNum = !includeTagSeqNum + } else if len(b) == 1 { + includeTagSeqNum = b[0] + } +} + +// all keys will be "lower case" +var lowerCase bool + +// Coerce all tag values to keys in lower case. This is useful if you've got sources with variable +// tag capitalization, and you want to use m.ValuesForKeys(), etc., with the key or path spec +// in lower case. +// CoerceKeysToLower() will toggle the coercion flag true|false - on|off +// CoerceKeysToLower(true|false) will set the coercion flag on|off +// +// NOTE: only recognized by NewMapXml, NewMapXmlReader, and NewMapXmlReaderRaw functions as well as +// the associated HandleXmlReader and HandleXmlReaderRaw. +func CoerceKeysToLower(b ...bool) { + if len(b) == 0 { + lowerCase = !lowerCase + } else if len(b) == 1 { + lowerCase = b[0] + } +} + +// disableTrimWhiteSpace sets if the white space should be removed or not +var disableTrimWhiteSpace bool +var trimRunes = "\t\r\b\n " + +// DisableTrimWhiteSpace set if the white space should be trimmed or not. By default white space is always trimmed. If +// no argument is provided, trim white space will be disabled. +func DisableTrimWhiteSpace(b ...bool) { + if len(b) == 0 { + disableTrimWhiteSpace = true + } else { + disableTrimWhiteSpace = b[0] + } + + if disableTrimWhiteSpace { + trimRunes = "\t\r\b\n" + } else { + trimRunes = "\t\r\b\n " + } +} + +// 25jun16: Allow user to specify the "prefix" character for XML attribute key labels. +// We do this by replacing '`' constant with attrPrefix var, replacing useHyphen with attrPrefix = "", +// and adding a SetAttrPrefix(s string) function. + +var attrPrefix string = `-` // the default +var lenAttrPrefix int = 1 // the default + +// SetAttrPrefix changes the default, "-", to the specified value, s. +// SetAttrPrefix("") is the same as PrependAttrWithHyphen(false). +// (Not applicable for NewMapXmlSeq(), mv.XmlSeq(), etc.) +func SetAttrPrefix(s string) { + attrPrefix = s + lenAttrPrefix = len(attrPrefix) +} + +// 18jan17: Allows user to specify if the map keys should be in snake case instead +// of the default hyphenated notation. +var snakeCaseKeys bool + +// CoerceKeysToSnakeCase changes the default, false, to the specified value, b. +// Note: the attribute prefix will be a hyphen, '-', or what ever string value has +// been specified using SetAttrPrefix. +func CoerceKeysToSnakeCase(b ...bool) { + if len(b) == 0 { + snakeCaseKeys = !snakeCaseKeys + } else if len(b) == 1 { + snakeCaseKeys = b[0] + } +} + +// 10jan19: use of pull request #57 should be conditional - legacy code assumes +// numeric values are float64. +var castToInt bool + +// CastValuesToInt tries to coerce numeric valus to int64 or uint64 instead of the +// default float64. Repeated calls with no argument will toggle this on/off, or this +// handling will be set with the value of 'b'. +func CastValuesToInt(b ...bool) { + if len(b) == 0 { + castToInt = !castToInt + } else if len(b) == 1 { + castToInt = b[0] + } +} + +// 05feb17: support processing XMPP streams (issue #36) +var handleXMPPStreamTag bool + +// HandleXMPPStreamTag causes decoder to parse XMPP elements. +// If called with no argument, XMPP stream element handling is toggled on/off. +// (See xmppStream_test.go for example.) +// If called with NewMapXml, NewMapXmlReader, New MapXmlReaderRaw the "stream" +// element will be returned as: +// map["stream"]interface{}{map[-]interface{}}. +// If called with NewMapSeq, NewMapSeqReader, NewMapSeqReaderRaw the "stream" +// element will be returned as: +// map["stream:stream"]interface{}{map["#attr"]interface{}{map[string]interface{}}} +// where the "#attr" values have "#text" and "#seq" keys. (See NewMapXmlSeq.) +func HandleXMPPStreamTag(b ...bool) { + if len(b) == 0 { + handleXMPPStreamTag = !handleXMPPStreamTag + } else if len(b) == 1 { + handleXMPPStreamTag = b[0] + } +} + +// 21jan18 - decode all values as map["#text":value] (issue #56) +var decodeSimpleValuesAsMap bool + +// DecodeSimpleValuesAsMap forces all values to be decoded as map["#text":]. +// If called with no argument, the decoding is toggled on/off. +// +// By default the NewMapXml functions decode simple values without attributes as +// map[:]. This function causes simple values without attributes to be +// decoded the same as simple values with attributes - map[:map["#text":]]. +func DecodeSimpleValuesAsMap(b ...bool) { + if len(b) == 0 { + decodeSimpleValuesAsMap = !decodeSimpleValuesAsMap + } else if len(b) == 1 { + decodeSimpleValuesAsMap = b[0] + } +} + +// xmlToMapParser (2015.11.12) - load a 'clean' XML doc into a map[string]interface{} directly. +// A refactoring of xmlToTreeParser(), markDuplicate() and treeToMap() - here, all-in-one. +// We've removed the intermediate *node tree with the allocation and subsequent rescanning. +func xmlToMapParser(skey string, a []xml.Attr, p *xml.Decoder, r bool) (map[string]interface{}, error) { + if lowerCase { + skey = strings.ToLower(skey) + } + if snakeCaseKeys { + skey = strings.Replace(skey, "-", "_", -1) + } + + // NOTE: all attributes and sub-elements parsed into 'na', 'na' is returned as value for 'skey' in 'n'. + // Unless 'skey' is a simple element w/o attributes, in which case the xml.CharData value is the value. + var n, na map[string]interface{} + var seq int // for includeTagSeqNum + + // Allocate maps and load attributes, if any. + // NOTE: on entry from NewMapXml(), etc., skey=="", and we fall through + // to get StartElement then recurse with skey==xml.StartElement.Name.Local + // where we begin allocating map[string]interface{} values 'n' and 'na'. + if skey != "" { + n = make(map[string]interface{}) // old n + na = make(map[string]interface{}) // old n.nodes + if len(a) > 0 { + for _, v := range a { + if snakeCaseKeys { + v.Name.Local = strings.Replace(v.Name.Local, "-", "_", -1) + } + var key string + key = attrPrefix + v.Name.Local + if lowerCase { + key = strings.ToLower(key) + } + if xmlEscapeCharsDecoder { // per issue#84 + v.Value = escapeChars(v.Value) + } + na[key] = cast(v.Value, r, key) + } + } + } + // Return XMPP message. + if handleXMPPStreamTag && skey == "stream" { + n[skey] = na + return n, nil + } + + for { + t, err := p.Token() + if err != nil { + if err != io.EOF { + return nil, errors.New("xml.Decoder.Token() - " + err.Error()) + } + return nil, err + } + switch t.(type) { + case xml.StartElement: + tt := t.(xml.StartElement) + + // First call to xmlToMapParser() doesn't pass xml.StartElement - the map key. + // So when the loop is first entered, the first token is the root tag along + // with any attributes, which we process here. + // + // Subsequent calls to xmlToMapParser() will pass in tag+attributes for + // processing before getting the next token which is the element value, + // which is done above. + if skey == "" { + return xmlToMapParser(tt.Name.Local, tt.Attr, p, r) + } + + // If not initializing the map, parse the element. + // len(nn) == 1, necessarily - it is just an 'n'. + nn, err := xmlToMapParser(tt.Name.Local, tt.Attr, p, r) + if err != nil { + return nil, err + } + + // The nn map[string]interface{} value is a na[nn_key] value. + // We need to see if nn_key already exists - means we're parsing a list. + // This may require converting na[nn_key] value into []interface{} type. + // First, extract the key:val for the map - it's a singleton. + // Note: + // * if CoerceKeysToLower() called, then key will be lower case. + // * if CoerceKeysToSnakeCase() called, then key will be converted to snake case. + var key string + var val interface{} + for key, val = range nn { + break + } + + // IncludeTagSeqNum requests that the element be augmented with a "_seq" sub-element. + // In theory, we don't need this if len(na) == 1. But, we don't know what might + // come next - we're only parsing forward. So if you ask for 'includeTagSeqNum' you + // get it on every element. (Personally, I never liked this, but I added it on request + // and did get a $50 Amazon gift card in return - now we support it for backwards compatibility!) + if includeTagSeqNum { + switch val.(type) { + case []interface{}: + // noop - There's no clean way to handle this w/o changing message structure. + case map[string]interface{}: + val.(map[string]interface{})["_seq"] = seq // will overwrite an "_seq" XML tag + seq++ + case interface{}: // a non-nil simple element: string, float64, bool + v := map[string]interface{}{textK: val} + v["_seq"] = seq + seq++ + val = v + } + } + + // 'na' holding sub-elements of n. + // See if 'key' already exists. + // If 'key' exists, then this is a list, if not just add key:val to na. + if v, ok := na[key]; ok { + var a []interface{} + switch v.(type) { + case []interface{}: + a = v.([]interface{}) + default: // anything else - note: v.(type) != nil + a = []interface{}{v} + } + a = append(a, val) + na[key] = a + } else { + na[key] = val // save it as a singleton + } + case xml.EndElement: + // len(n) > 0 if this is a simple element w/o xml.Attrs - see xml.CharData case. + if len(n) == 0 { + // If len(na)==0 we have an empty element == ""; + // it has no xml.Attr nor xml.CharData. + // Note: in original node-tree parser, val defaulted to ""; + // so we always had the default if len(node.nodes) == 0. + if len(na) > 0 { + n[skey] = na + } else { + n[skey] = "" // empty element + } + } else if len(n) == 1 && len(na) > 0 { + // it's a simple element w/ no attributes w/ subelements + for _, v := range n { + na[textK] = v + } + n[skey] = na + } + return n, nil + case xml.CharData: + // clean up possible noise + tt := strings.Trim(string(t.(xml.CharData)), trimRunes) + if xmlEscapeCharsDecoder { // issue#84 + tt = escapeChars(tt) + } + if len(tt) > 0 { + if len(na) > 0 || decodeSimpleValuesAsMap { + na[textK] = cast(tt, r, textK) + } else if skey != "" { + n[skey] = cast(tt, r, skey) + } else { + // per Adrian (http://www.adrianlungu.com/) catch stray text + // in decoder stream - + // https://github.com/clbanning/mxj/pull/14#issuecomment-182816374 + // NOTE: CharSetReader must be set to non-UTF-8 CharSet or you'll get + // a p.Token() decoding error when the BOM is UTF-16 or UTF-32. + continue + } + } + default: + // noop + } + } +} + +var castNanInf bool + +// Cast "Nan", "Inf", "-Inf" XML values to 'float64'. +// By default, these values will be decoded as 'string'. +func CastNanInf(b ...bool) { + if len(b) == 0 { + castNanInf = !castNanInf + } else if len(b) == 1 { + castNanInf = b[0] + } +} + +// cast - try to cast string values to bool or float64 +// 't' is the tag key that can be checked for 'not-casting' +func cast(s string, r bool, t string) interface{} { + if checkTagToSkip != nil && t != "" && checkTagToSkip(t) { + // call the check-function here with 't[0]' + // if 'true' return s + return s + } + + if r { + // handle nan and inf + if !castNanInf { + switch strings.ToLower(s) { + case "nan", "inf", "-inf": + return s + } + } + + // handle numeric strings ahead of boolean + if castToInt { + if f, err := strconv.ParseInt(s, 10, 64); err == nil { + return f + } + if f, err := strconv.ParseUint(s, 10, 64); err == nil { + return f + } + } + + if castToFloat { + if f, err := strconv.ParseFloat(s, 64); err == nil { + return f + } + } + + // ParseBool treats "1"==true & "0"==false, we've already scanned those + // values as float64. See if value has 't' or 'f' as initial screen to + // minimize calls to ParseBool; also, see if len(s) < 6. + if castToBool { + if len(s) > 0 && len(s) < 6 { + switch s[:1] { + case "t", "T", "f", "F": + if b, err := strconv.ParseBool(s); err == nil { + return b + } + } + } + } + } + return s +} + +// pull request, #59 +var castToFloat = true + +// CastValuesToFloat can be used to skip casting to float64 when +// "cast" argument is 'true' in NewMapXml, etc. +// Default is true. +func CastValuesToFloat(b ...bool) { + if len(b) == 0 { + castToFloat = !castToFloat + } else if len(b) == 1 { + castToFloat = b[0] + } +} + +var castToBool = true + +// CastValuesToBool can be used to skip casting to bool when +// "cast" argument is 'true' in NewMapXml, etc. +// Default is true. +func CastValuesToBool(b ...bool) { + if len(b) == 0 { + castToBool = !castToBool + } else if len(b) == 1 { + castToBool = b[0] + } +} + +// checkTagToSkip - switch to address Issue #58 + +var checkTagToSkip func(string) bool + +// SetCheckTagToSkipFunc registers function to test whether the value +// for a tag should be cast to bool or float64 when "cast" argument is 'true'. +// (Dot tag path notation is not supported.) +// NOTE: key may be "#text" if it's a simple element with attributes +// or "decodeSimpleValuesAsMap == true". +// NOTE: does not apply to NewMapXmlSeq... functions. +func SetCheckTagToSkipFunc(fn func(string) bool) { + checkTagToSkip = fn +} + +// ------------------ END: NewMapXml & NewMapXmlReader ------------------------- + +// ------------------ mv.Xml & mv.XmlWriter - from j2x ------------------------ + +const ( + DefaultRootTag = "doc" +) + +var useGoXmlEmptyElemSyntax bool + +// XmlGoEmptyElemSyntax() - rather than . +// Go's encoding/xml package marshals empty XML elements as . By default this package +// encodes empty elements as . If you're marshaling Map values that include structures +// (which are passed to xml.Marshal for encoding), this will let you conform to the standard package. +func XmlGoEmptyElemSyntax() { + useGoXmlEmptyElemSyntax = true +} + +// XmlDefaultEmptyElemSyntax() - rather than . +// Return XML encoding for empty elements to the default package setting. +// Reverses effect of XmlGoEmptyElemSyntax(). +func XmlDefaultEmptyElemSyntax() { + useGoXmlEmptyElemSyntax = false +} + +// ------- issue #88 ---------- +// xmlCheckIsValid set switch to force decoding the encoded XML to +// see if it is valid XML. +var xmlCheckIsValid bool + +// XmlCheckIsValid forces the encoded XML to be checked for validity. +func XmlCheckIsValid(b ...bool) { + if len(b) == 1 { + xmlCheckIsValid = b[0] + return + } + xmlCheckIsValid = !xmlCheckIsValid +} + +// Encode a Map as XML. The companion of NewMapXml(). +// The following rules apply. +// - The key label "#text" is treated as the value for a simple element with attributes. +// - Map keys that begin with a hyphen, '-', are interpreted as attributes. +// It is an error if the attribute doesn't have a []byte, string, number, or boolean value. +// - Map value type encoding: +// > string, bool, float64, int, int32, int64, float32: per "%v" formating +// > []bool, []uint8: by casting to string +// > structures, etc.: handed to xml.Marshal() - if there is an error, the element +// value is "UNKNOWN" +// - Elements with only attribute values or are null are terminated using "/>". +// - If len(mv) == 1 and no rootTag is provided, then the map key is used as the root tag, possible. +// Thus, `{ "key":"value" }` encodes as "value". +// - To encode empty elements in a syntax consistent with encoding/xml call UseGoXmlEmptyElementSyntax(). +// The attributes tag=value pairs are alphabetized by "tag". Also, when encoding map[string]interface{} values - +// complex elements, etc. - the key:value pairs are alphabetized by key so the resulting tags will appear sorted. +func (mv Map) Xml(rootTag ...string) ([]byte, error) { + m := map[string]interface{}(mv) + var err error + b := new(bytes.Buffer) + p := new(pretty) // just a stub + + if len(m) == 1 && len(rootTag) == 0 { + for key, value := range m { + // if it an array, see if all values are map[string]interface{} + // we force a new root tag if we'll end up with no key:value in the list + // so: key:[string_val, bool:true] --> string_valtrue + switch value.(type) { + case []interface{}: + for _, v := range value.([]interface{}) { + switch v.(type) { + case map[string]interface{}: // noop + default: // anything else + err = marshalMapToXmlIndent(false, b, DefaultRootTag, m, p) + goto done + } + } + } + err = marshalMapToXmlIndent(false, b, key, value, p) + } + } else if len(rootTag) == 1 { + err = marshalMapToXmlIndent(false, b, rootTag[0], m, p) + } else { + err = marshalMapToXmlIndent(false, b, DefaultRootTag, m, p) + } +done: + if xmlCheckIsValid { + d := xml.NewDecoder(bytes.NewReader(b.Bytes())) + for { + _, err = d.Token() + if err == io.EOF { + err = nil + break + } else if err != nil { + return nil, err + } + } + } + return b.Bytes(), err +} + +// The following implementation is provided only for symmetry with NewMapXmlReader[Raw] +// The names will also provide a key for the number of return arguments. + +// Writes the Map as XML on the Writer. +// See Xml() for encoding rules. +func (mv Map) XmlWriter(xmlWriter io.Writer, rootTag ...string) error { + x, err := mv.Xml(rootTag...) + if err != nil { + return err + } + + _, err = xmlWriter.Write(x) + return err +} + +// Writes the Map as XML on the Writer. []byte is the raw XML that was written. +// See Xml() for encoding rules. +/* +func (mv Map) XmlWriterRaw(xmlWriter io.Writer, rootTag ...string) ([]byte, error) { + x, err := mv.Xml(rootTag...) + if err != nil { + return x, err + } + + _, err = xmlWriter.Write(x) + return x, err +} +*/ + +// Writes the Map as pretty XML on the Writer. +// See Xml() for encoding rules. +func (mv Map) XmlIndentWriter(xmlWriter io.Writer, prefix, indent string, rootTag ...string) error { + x, err := mv.XmlIndent(prefix, indent, rootTag...) + if err != nil { + return err + } + + _, err = xmlWriter.Write(x) + return err +} + +// Writes the Map as pretty XML on the Writer. []byte is the raw XML that was written. +// See Xml() for encoding rules. +/* +func (mv Map) XmlIndentWriterRaw(xmlWriter io.Writer, prefix, indent string, rootTag ...string) ([]byte, error) { + x, err := mv.XmlIndent(prefix, indent, rootTag...) + if err != nil { + return x, err + } + + _, err = xmlWriter.Write(x) + return x, err +} +*/ + +// -------------------- END: mv.Xml & mv.XmlWriter ------------------------------- + +// -------------- Handle XML stream by processing Map value -------------------- + +// Default poll delay to keep Handler from spinning on an open stream +// like sitting on os.Stdin waiting for imput. +var xhandlerPollInterval = time.Millisecond + +// Bulk process XML using handlers that process a Map value. +// 'rdr' is an io.Reader for XML (stream) +// 'mapHandler' is the Map processor. Return of 'false' stops io.Reader processing. +// 'errHandler' is the error processor. Return of 'false' stops io.Reader processing and returns the error. +// Note: mapHandler() and errHandler() calls are blocking, so reading and processing of messages is serialized. +// This means that you can stop reading the file on error or after processing a particular message. +// To have reading and handling run concurrently, pass argument to a go routine in handler and return 'true'. +func HandleXmlReader(xmlReader io.Reader, mapHandler func(Map) bool, errHandler func(error) bool) error { + var n int + for { + m, merr := NewMapXmlReader(xmlReader) + n++ + + // handle error condition with errhandler + if merr != nil && merr != io.EOF { + merr = fmt.Errorf("[xmlReader: %d] %s", n, merr.Error()) + if ok := errHandler(merr); !ok { + // caused reader termination + return merr + } + continue + } + + // pass to maphandler + if len(m) != 0 { + if ok := mapHandler(m); !ok { + break + } + } else if merr != io.EOF { + time.Sleep(xhandlerPollInterval) + } + + if merr == io.EOF { + break + } + } + return nil +} + +// Bulk process XML using handlers that process a Map value and the raw XML. +// 'rdr' is an io.Reader for XML (stream) +// 'mapHandler' is the Map and raw XML - []byte - processor. Return of 'false' stops io.Reader processing. +// 'errHandler' is the error and raw XML processor. Return of 'false' stops io.Reader processing and returns the error. +// Note: mapHandler() and errHandler() calls are blocking, so reading and processing of messages is serialized. +// This means that you can stop reading the file on error or after processing a particular message. +// To have reading and handling run concurrently, pass argument(s) to a go routine in handler and return 'true'. +// See NewMapXmlReaderRaw for comment on performance associated with retrieving raw XML from a Reader. +func HandleXmlReaderRaw(xmlReader io.Reader, mapHandler func(Map, []byte) bool, errHandler func(error, []byte) bool) error { + var n int + for { + m, raw, merr := NewMapXmlReaderRaw(xmlReader) + n++ + + // handle error condition with errhandler + if merr != nil && merr != io.EOF { + merr = fmt.Errorf("[xmlReader: %d] %s", n, merr.Error()) + if ok := errHandler(merr, raw); !ok { + // caused reader termination + return merr + } + continue + } + + // pass to maphandler + if len(m) != 0 { + if ok := mapHandler(m, raw); !ok { + break + } + } else if merr != io.EOF { + time.Sleep(xhandlerPollInterval) + } + + if merr == io.EOF { + break + } + } + return nil +} + +// ----------------- END: Handle XML stream by processing Map value -------------- + +// -------- a hack of io.TeeReader ... need one that's an io.ByteReader for xml.NewDecoder() ---------- + +// This is a clone of io.TeeReader with the additional method t.ReadByte(). +// Thus, this TeeReader is also an io.ByteReader. +// This is necessary because xml.NewDecoder uses a ByteReader not a Reader. It appears to have been written +// with bufio.Reader or bytes.Reader in mind ... not a generic io.Reader, which doesn't have to have ReadByte().. +// If NewDecoder is passed a Reader that does not satisfy ByteReader() it wraps the Reader with +// bufio.NewReader and uses ReadByte rather than Read that runs the TeeReader pipe logic. + +type teeReader struct { + r io.Reader + w io.Writer + b []byte +} + +func myTeeReader(r io.Reader, w io.Writer) io.Reader { + b := make([]byte, 1) + return &teeReader{r, w, b} +} + +// need for io.Reader - but we don't use it ... +func (t *teeReader) Read(p []byte) (int, error) { + return 0, nil +} + +func (t *teeReader) ReadByte() (byte, error) { + n, err := t.r.Read(t.b) + if n > 0 { + if _, err := t.w.Write(t.b[:1]); err != nil { + return t.b[0], err + } + } + return t.b[0], err +} + +// For use with NewMapXmlReader & NewMapXmlSeqReader. +type byteReader struct { + r io.Reader + b []byte +} + +func myByteReader(r io.Reader) io.Reader { + b := make([]byte, 1) + return &byteReader{r, b} +} + +// Need for io.Reader interface ... +// Needed if reading a malformed http.Request.Body - issue #38. +func (b *byteReader) Read(p []byte) (int, error) { + return b.r.Read(p) +} + +func (b *byteReader) ReadByte() (byte, error) { + _, err := b.r.Read(b.b) + if len(b.b) > 0 { + // issue #38 + return b.b[0], err + } + var c byte + return c, err +} + +// ----------------------- END: io.TeeReader hack ----------------------------------- + +// ---------------------- XmlIndent - from j2x package ---------------------------- + +// Encode a map[string]interface{} as a pretty XML string. +// See Xml for encoding rules. +func (mv Map) XmlIndent(prefix, indent string, rootTag ...string) ([]byte, error) { + m := map[string]interface{}(mv) + + var err error + b := new(bytes.Buffer) + p := new(pretty) + p.indent = indent + p.padding = prefix + + if len(m) == 1 && len(rootTag) == 0 { + // this can extract the key for the single map element + // use it if it isn't a key for a list + for key, value := range m { + if _, ok := value.([]interface{}); ok { + err = marshalMapToXmlIndent(true, b, DefaultRootTag, m, p) + } else { + err = marshalMapToXmlIndent(true, b, key, value, p) + } + } + } else if len(rootTag) == 1 { + err = marshalMapToXmlIndent(true, b, rootTag[0], m, p) + } else { + err = marshalMapToXmlIndent(true, b, DefaultRootTag, m, p) + } + if xmlCheckIsValid { + d := xml.NewDecoder(bytes.NewReader(b.Bytes())) + for { + _, err = d.Token() + if err == io.EOF { + err = nil + break + } else if err != nil { + return nil, err + } + } + } + return b.Bytes(), err +} + +type pretty struct { + indent string + cnt int + padding string + mapDepth int + start int +} + +func (p *pretty) Indent() { + p.padding += p.indent + p.cnt++ +} + +func (p *pretty) Outdent() { + if p.cnt > 0 { + p.padding = p.padding[:len(p.padding)-len(p.indent)] + p.cnt-- + } +} + +// where the work actually happens +// returns an error if an attribute is not atomic +// NOTE: 01may20 - replaces mapToXmlIndent(); uses bytes.Buffer instead for string appends. +func marshalMapToXmlIndent(doIndent bool, b *bytes.Buffer, key string, value interface{}, pp *pretty) error { + var err error + var endTag bool + var isSimple bool + var elen int + p := &pretty{pp.indent, pp.cnt, pp.padding, pp.mapDepth, pp.start} + + // per issue #48, 18apr18 - try and coerce maps to map[string]interface{} + // Don't need for mapToXmlSeqIndent, since maps there are decoded by NewMapXmlSeq(). + if reflect.ValueOf(value).Kind() == reflect.Map { + switch value.(type) { + case map[string]interface{}: + default: + val := make(map[string]interface{}) + vv := reflect.ValueOf(value) + keys := vv.MapKeys() + for _, k := range keys { + val[fmt.Sprint(k)] = vv.MapIndex(k).Interface() + } + value = val + } + } + + // 14jul20. The following block of code has become something of a catch all for odd stuff + // that might be passed in as a result of casting an arbitrary map[] to an mxj.Map + // value and then call m.Xml or m.XmlIndent. See issue #71 (and #73) for such edge cases. + switch value.(type) { + // these types are handled during encoding + case map[string]interface{}, []byte, string, float64, bool, int, int32, int64, float32, json.Number: + case []map[string]interface{}, []string, []float64, []bool, []int, []int32, []int64, []float32, []json.Number: + case []interface{}: + case nil: + value = "" + default: + // see if value is a struct, if so marshal using encoding/xml package + if reflect.ValueOf(value).Kind() == reflect.Struct { + if v, err := xml.Marshal(value); err != nil { + return err + } else { + value = string(v) + } + } else { + // coerce eveything else into a string value + value = fmt.Sprint(value) + } + } + + // start the XML tag with required indentaton and padding + if doIndent { + switch value.(type) { + case []interface{}, []string: + // list processing handles indentation for all elements + default: + if _, err = b.WriteString(p.padding); err != nil { + return err + } + } + } + switch value.(type) { + case []interface{}: + default: + if _, err = b.WriteString(`<` + key); err != nil { + return err + } + } + + switch value.(type) { + case map[string]interface{}: + vv := value.(map[string]interface{}) + lenvv := len(vv) + // scan out attributes - attribute keys have prepended attrPrefix + attrlist := make([][2]string, len(vv)) + var n int + var ss string + for k, v := range vv { + if lenAttrPrefix > 0 && lenAttrPrefix < len(k) && k[:lenAttrPrefix] == attrPrefix { + switch v.(type) { + case string: + if xmlEscapeChars { + ss = escapeChars(v.(string)) + } else { + ss = v.(string) + } + attrlist[n][0] = k[lenAttrPrefix:] + attrlist[n][1] = ss + case float64, bool, int, int32, int64, float32, json.Number: + attrlist[n][0] = k[lenAttrPrefix:] + attrlist[n][1] = fmt.Sprintf("%v", v) + case []byte: + if xmlEscapeChars { + ss = escapeChars(string(v.([]byte))) + } else { + ss = string(v.([]byte)) + } + attrlist[n][0] = k[lenAttrPrefix:] + attrlist[n][1] = ss + default: + return fmt.Errorf("invalid attribute value for: %s:<%T>", k, v) + } + n++ + } + } + if n > 0 { + attrlist = attrlist[:n] + sort.Sort(attrList(attrlist)) + for _, v := range attrlist { + if _, err = b.WriteString(` ` + v[0] + `="` + v[1] + `"`); err != nil { + return err + } + } + } + // only attributes? + if n == lenvv { + if useGoXmlEmptyElemSyntax { + if _, err = b.WriteString(`"); err != nil { + return err + } + } else { + if _, err = b.WriteString(`/>`); err != nil { + return err + } + } + break + } + + // simple element? Note: '#text" is an invalid XML tag. + isComplex := false + if v, ok := vv[textK]; ok && n+1 == lenvv { + // just the value and attributes + switch v.(type) { + case string: + if xmlEscapeChars { + v = escapeChars(v.(string)) + } else { + v = v.(string) + } + case []byte: + if xmlEscapeChars { + v = escapeChars(string(v.([]byte))) + } else { + v = string(v.([]byte)) + } + } + if _, err = b.WriteString(">" + fmt.Sprintf("%v", v)); err != nil { + return err + } + endTag = true + elen = 1 + isSimple = true + break + } else if ok { + // need to handle when there are subelements in addition to the simple element value + // issue #90 + switch v.(type) { + case string: + if xmlEscapeChars { + v = escapeChars(v.(string)) + } else { + v = v.(string) + } + case []byte: + if xmlEscapeChars { + v = escapeChars(string(v.([]byte))) + } else { + v = string(v.([]byte)) + } + } + if _, err = b.WriteString(">" + fmt.Sprintf("%v", v)); err != nil { + return err + } + isComplex = true + } + + // close tag with possible attributes + if !isComplex { + if _, err = b.WriteString(">"); err != nil { + return err + } + } + if doIndent { + // *s += "\n" + if _, err = b.WriteString("\n"); err != nil { + return err + } + } + // something more complex + p.mapDepth++ + // extract the map k:v pairs and sort on key + elemlist := make([][2]interface{}, len(vv)) + n = 0 + for k, v := range vv { + if k == textK { + // simple element handled above + continue + } + if lenAttrPrefix > 0 && lenAttrPrefix < len(k) && k[:lenAttrPrefix] == attrPrefix { + continue + } + elemlist[n][0] = k + elemlist[n][1] = v + n++ + } + elemlist = elemlist[:n] + sort.Sort(elemList(elemlist)) + var i int + for _, v := range elemlist { + switch v[1].(type) { + case []interface{}: + default: + if i == 0 && doIndent { + p.Indent() + } + } + i++ + if err := marshalMapToXmlIndent(doIndent, b, v[0].(string), v[1], p); err != nil { + return err + } + switch v[1].(type) { + case []interface{}: // handled in []interface{} case + default: + if doIndent { + p.Outdent() + } + } + i-- + } + p.mapDepth-- + endTag = true + elen = 1 // we do have some content ... + case []interface{}: + // special case - found during implementing Issue #23 + if len(value.([]interface{})) == 0 { + if doIndent { + if _, err = b.WriteString(p.padding + p.indent); err != nil { + return err + } + } + if _, err = b.WriteString("<" + key); err != nil { + return err + } + elen = 0 + endTag = true + break + } + for _, v := range value.([]interface{}) { + if doIndent { + p.Indent() + } + if err := marshalMapToXmlIndent(doIndent, b, key, v, p); err != nil { + return err + } + if doIndent { + p.Outdent() + } + } + return nil + case []string: + // This was added by https://github.com/slotix ... not a type that + // would be encountered if mv generated from NewMapXml, NewMapJson. + // Could be encountered in AnyXml(), so we'll let it stay, though + // it should be merged with case []interface{}, above. + //quick fix for []string type + //[]string should be treated exaclty as []interface{} + if len(value.([]string)) == 0 { + if doIndent { + if _, err = b.WriteString(p.padding + p.indent); err != nil { + return err + } + } + if _, err = b.WriteString("<" + key); err != nil { + return err + } + elen = 0 + endTag = true + break + } + for _, v := range value.([]string) { + if doIndent { + p.Indent() + } + if err := marshalMapToXmlIndent(doIndent, b, key, v, p); err != nil { + return err + } + if doIndent { + p.Outdent() + } + } + return nil + case nil: + // terminate the tag + if doIndent { + // *s += p.padding + if _, err = b.WriteString(p.padding); err != nil { + return err + } + } + if _, err = b.WriteString("<" + key); err != nil { + return err + } + endTag, isSimple = true, true + break + default: // handle anything - even goofy stuff + elen = 0 + switch value.(type) { + case string: + v := value.(string) + if xmlEscapeChars { + v = escapeChars(v) + } + elen = len(v) + if elen > 0 { + // *s += ">" + v + if _, err = b.WriteString(">" + v); err != nil { + return err + } + } + case float64, bool, int, int32, int64, float32, json.Number: + v := fmt.Sprintf("%v", value) + elen = len(v) // always > 0 + if _, err = b.WriteString(">" + v); err != nil { + return err + } + case []byte: // NOTE: byte is just an alias for uint8 + // similar to how xml.Marshal handles []byte structure members + v := string(value.([]byte)) + if xmlEscapeChars { + v = escapeChars(v) + } + elen = len(v) + if elen > 0 { + // *s += ">" + v + if _, err = b.WriteString(">" + v); err != nil { + return err + } + } + default: + if _, err = b.WriteString(">"); err != nil { + return err + } + var v []byte + var err error + if doIndent { + v, err = xml.MarshalIndent(value, p.padding, p.indent) + } else { + v, err = xml.Marshal(value) + } + if err != nil { + if _, err = b.WriteString(">UNKNOWN"); err != nil { + return err + } + } else { + elen = len(v) + if elen > 0 { + if _, err = b.Write(v); err != nil { + return err + } + } + } + } + isSimple = true + endTag = true + } + if endTag { + if doIndent { + if !isSimple { + if _, err = b.WriteString(p.padding); err != nil { + return err + } + } + } + if elen > 0 || useGoXmlEmptyElemSyntax { + if elen == 0 { + if _, err = b.WriteString(">"); err != nil { + return err + } + } + if _, err = b.WriteString(`"); err != nil { + return err + } + } else { + if _, err = b.WriteString(`/>`); err != nil { + return err + } + } + } + if doIndent { + if p.cnt > p.start { + if _, err = b.WriteString("\n"); err != nil { + return err + } + } + p.Outdent() + } + + return nil +} + +// ============================ sort interface implementation ================= + +type attrList [][2]string + +func (a attrList) Len() int { + return len(a) +} + +func (a attrList) Swap(i, j int) { + a[i], a[j] = a[j], a[i] +} + +func (a attrList) Less(i, j int) bool { + return a[i][0] <= a[j][0] +} + +type elemList [][2]interface{} + +func (e elemList) Len() int { + return len(e) +} + +func (e elemList) Swap(i, j int) { + e[i], e[j] = e[j], e[i] +} + +func (e elemList) Less(i, j int) bool { + return e[i][0].(string) <= e[j][0].(string) +} diff --git a/vendor/github.com/clbanning/mxj/v2/xmlseq.go b/vendor/github.com/clbanning/mxj/v2/xmlseq.go new file mode 100644 index 0000000..9732dec --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/xmlseq.go @@ -0,0 +1,902 @@ +// Copyright 2012-2016, 2019 Charles Banning. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file + +// xmlseq.go - version of xml.go with sequence # injection on Decoding and sorting on Encoding. +// Also, handles comments, directives and process instructions. + +package mxj + +import ( + "bytes" + "encoding/xml" + "errors" + "fmt" + "io" + "regexp" + "sort" + "strings" +) + +// MapSeq is like Map but contains seqencing indices to allow recovering the original order of +// the XML elements when the map[string]interface{} is marshaled. Element attributes are +// stored as a map["#attr"]map[]map[string]interface{}{"#text":"", "#seq":} +// value instead of denoting the keys with a prefix character. Also, comments, directives and +// process instructions are preserved. +type MapSeq map[string]interface{} + +// NoRoot is returned by NewXmlSeq, etc., when a comment, directive or procinstr element is parsed +// in the XML data stream and the element is not contained in an XML object with a root element. +var NoRoot = errors.New("no root key") +var NO_ROOT = NoRoot // maintain backwards compatibility + +// ------------------- NewMapXmlSeq & NewMapXmlSeqReader ... ------------------------- + +// NewMapXmlSeq converts a XML doc into a MapSeq value with elements id'd with decoding sequence key represented +// as map["#seq"]. +// If the optional argument 'cast' is 'true', then values will be converted to boolean or float64 if possible. +// NOTE: "#seq" key/value pairs are removed on encoding with msv.Xml() / msv.XmlIndent(). +// • attributes are a map - map["#attr"]map["attr_key"]map[string]interface{}{"#text":, "#seq":} +// • all simple elements are decoded as map["#text"]interface{} with a "#seq" k:v pair, as well. +// • lists always decode as map["list_tag"][]map[string]interface{} where the array elements are maps that +// include a "#seq" k:v pair based on sequence they are decoded. Thus, XML like: +// +// value 1 +// value 2 +// value 3 +// +// is decoded as: +// doc : +// ltag :[[]interface{}] +// [item: 0] +// #seq :[int] 0 +// #text :[string] value 1 +// [item: 1] +// #seq :[int] 2 +// #text :[string] value 3 +// newtag : +// #seq :[int] 1 +// #text :[string] value 2 +// It will encode in proper sequence even though the MapSeq representation merges all "ltag" elements in an array. +// • comments - "" - are decoded as map["#comment"]map["#text"]"cmnt_text" with a "#seq" k:v pair. +// • directives - "" - are decoded as map["#directive"]map[#text"]"directive_text" with a "#seq" k:v pair. +// • process instructions - "" - are decoded as map["#procinst"]interface{} where the #procinst value +// is of map[string]interface{} type with the following keys: #target, #inst, and #seq. +// • comments, directives, and procinsts that are NOT part of a document with a root key will be returned as +// map[string]interface{} and the error value 'NoRoot'. +// • note: ": tag preserve the +// ":" notation rather than stripping it as with NewMapXml(). +// 2. Attribute keys for name space prefix declarations preserve "xmlns:" notation. +// +// ERRORS: +// 1. If a NoRoot error, "no root key," is returned, check the initial map key for a "#comment", +// "#directive" or #procinst" key. +// 2. Unmarshaling an XML doc that is formatted using the whitespace character, " ", will error, since +// Decoder.RawToken treats such occurances as significant. See NewMapFormattedXmlSeq(). +func NewMapXmlSeq(xmlVal []byte, cast ...bool) (MapSeq, error) { + var r bool + if len(cast) == 1 { + r = cast[0] + } + return xmlSeqToMap(xmlVal, r) +} + +// NewMapFormattedXmlSeq performs the same as NewMapXmlSeq but is useful for processing XML objects that +// are formatted using the whitespace character, " ". (The stdlib xml.Decoder, by default, treats all +// whitespace as significant; Decoder.Token() and Decoder.RawToken() will return strings of one or more +// whitespace characters and without alphanumeric or punctuation characters as xml.CharData values.) +// +// If you're processing such XML, then this will convert all occurrences of whitespace-only strings +// into an empty string, "", prior to parsing the XML - irrespective of whether the occurrence is +// formatting or is a actual element value. +func NewMapFormattedXmlSeq(xmlVal []byte, cast ...bool) (MapSeq, error) { + var c bool + if len(cast) == 1 { + c = cast[0] + } + + // Per PR #104 - clean out formatting characters so they don't show up in Decoder.RawToken() stream. + // NOTE: Also replaces element values that are solely comprised of formatting/whitespace characters + // with empty string, "". + r := regexp.MustCompile(`>[\n\t\r ]*<`) + xmlVal = r.ReplaceAll(xmlVal, []byte("><")) + return xmlSeqToMap(xmlVal, c) +} + +// NewMpaXmlSeqReader returns next XML doc from an io.Reader as a MapSeq value. +// NOTES: +// 1. The 'xmlReader' will be parsed looking for an xml.StartElement, xml.Comment, etc., so BOM and other +// extraneous xml.CharData will be ignored unless io.EOF is reached first. +// 2. CoerceKeysToLower() is NOT recognized, since the intent here is to eventually call m.XmlSeq() to +// re-encode the message in its original structure. +// 3. If CoerceKeysToSnakeCase() has been called, then all key values will be converted to snake case. +// +// ERRORS: +// 1. If a NoRoot error, "no root key," is returned, check the initial map key for a "#comment", +// "#directive" or #procinst" key. +func NewMapXmlSeqReader(xmlReader io.Reader, cast ...bool) (MapSeq, error) { + var r bool + if len(cast) == 1 { + r = cast[0] + } + + // We need to put an *os.File reader in a ByteReader or the xml.NewDecoder + // will wrap it in a bufio.Reader and seek on the file beyond where the + // xml.Decoder parses! + if _, ok := xmlReader.(io.ByteReader); !ok { + xmlReader = myByteReader(xmlReader) // see code at EOF + } + + // build the map + return xmlSeqReaderToMap(xmlReader, r) +} + +// NewMapXmlSeqReaderRaw returns the next XML doc from an io.Reader as a MapSeq value. +// Returns MapSeq value, slice with the raw XML, and any error. +// NOTES: +// 1. Due to the implementation of xml.Decoder, the raw XML off the reader is buffered to []byte +// using a ByteReader. If the io.Reader is an os.File, there may be significant performance impact. +// See the examples - getmetrics1.go through getmetrics4.go - for comparative use cases on a large +// data set. If the io.Reader is wrapping a []byte value in-memory, however, such as http.Request.Body +// you CAN use it to efficiently unmarshal a XML doc and retrieve the raw XML in a single call. +// 2. The 'raw' return value may be larger than the XML text value. +// 3. The 'xmlReader' will be parsed looking for an xml.StartElement, xml.Comment, etc., so BOM and other +// extraneous xml.CharData will be ignored unless io.EOF is reached first. +// 4. CoerceKeysToLower() is NOT recognized, since the intent here is to eventually call m.XmlSeq() to +// re-encode the message in its original structure. +// 5. If CoerceKeysToSnakeCase() has been called, then all key values will be converted to snake case. +// +// ERRORS: +// 1. If a NoRoot error, "no root key," is returned, check if the initial map key is "#comment", +// "#directive" or #procinst" key. +func NewMapXmlSeqReaderRaw(xmlReader io.Reader, cast ...bool) (MapSeq, []byte, error) { + var r bool + if len(cast) == 1 { + r = cast[0] + } + // create TeeReader so we can retrieve raw XML + buf := make([]byte, 0) + wb := bytes.NewBuffer(buf) + trdr := myTeeReader(xmlReader, wb) + + m, err := xmlSeqReaderToMap(trdr, r) + + // retrieve the raw XML that was decoded + b := wb.Bytes() + + // err may be NoRoot + return m, b, err +} + +// xmlSeqReaderToMap() - parse a XML io.Reader to a map[string]interface{} value +func xmlSeqReaderToMap(rdr io.Reader, r bool) (map[string]interface{}, error) { + // parse the Reader + p := xml.NewDecoder(rdr) + if CustomDecoder != nil { + useCustomDecoder(p) + } else { + p.CharsetReader = XmlCharsetReader + } + return xmlSeqToMapParser("", nil, p, r) +} + +// xmlSeqToMap - convert a XML doc into map[string]interface{} value +func xmlSeqToMap(doc []byte, r bool) (map[string]interface{}, error) { + b := bytes.NewReader(doc) + p := xml.NewDecoder(b) + if CustomDecoder != nil { + useCustomDecoder(p) + } else { + p.CharsetReader = XmlCharsetReader + } + return xmlSeqToMapParser("", nil, p, r) +} + +// ===================================== where the work happens ============================= + +// xmlSeqToMapParser - load a 'clean' XML doc into a map[string]interface{} directly. +// Add #seq tag value for each element decoded - to be used for Encoding later. +func xmlSeqToMapParser(skey string, a []xml.Attr, p *xml.Decoder, r bool) (map[string]interface{}, error) { + if snakeCaseKeys { + skey = strings.Replace(skey, "-", "_", -1) + } + + // NOTE: all attributes and sub-elements parsed into 'na', 'na' is returned as value for 'skey' in 'n'. + var n, na map[string]interface{} + var seq int // for including seq num when decoding + + // Allocate maps and load attributes, if any. + // NOTE: on entry from NewMapXml(), etc., skey=="", and we fall through + // to get StartElement then recurse with skey==xml.StartElement.Name.Local + // where we begin allocating map[string]interface{} values 'n' and 'na'. + if skey != "" { + // 'n' only needs one slot - save call to runtime•hashGrow() + // 'na' we don't know + n = make(map[string]interface{}, 1) + na = make(map[string]interface{}) + if len(a) > 0 { + // xml.Attr is decoded into: map["#attr"]map[]interface{} + // where interface{} is map[string]interface{}{"#text":, "#seq":} + aa := make(map[string]interface{}, len(a)) + for i, v := range a { + if snakeCaseKeys { + v.Name.Local = strings.Replace(v.Name.Local, "-", "_", -1) + } + if xmlEscapeCharsDecoder { // per issue#84 + v.Value = escapeChars(v.Value) + } + if len(v.Name.Space) > 0 { + aa[v.Name.Space+`:`+v.Name.Local] = map[string]interface{}{textK: cast(v.Value, r, ""), seqK: i} + } else { + aa[v.Name.Local] = map[string]interface{}{textK: cast(v.Value, r, ""), seqK: i} + } + } + na[attrK] = aa + } + } + + // Return XMPP message. + if handleXMPPStreamTag && skey == "stream:stream" { + n[skey] = na + return n, nil + } + + for { + t, err := p.RawToken() + if err != nil { + if err != io.EOF { + return nil, errors.New("xml.Decoder.Token() - " + err.Error()) + } + return nil, err + } + switch t.(type) { + case xml.StartElement: + tt := t.(xml.StartElement) + + // First call to xmlSeqToMapParser() doesn't pass xml.StartElement - the map key. + // So when the loop is first entered, the first token is the root tag along + // with any attributes, which we process here. + // + // Subsequent calls to xmlSeqToMapParser() will pass in tag+attributes for + // processing before getting the next token which is the element value, + // which is done above. + if skey == "" { + if len(tt.Name.Space) > 0 { + return xmlSeqToMapParser(tt.Name.Space+`:`+tt.Name.Local, tt.Attr, p, r) + } else { + return xmlSeqToMapParser(tt.Name.Local, tt.Attr, p, r) + } + } + + // If not initializing the map, parse the element. + // len(nn) == 1, necessarily - it is just an 'n'. + var nn map[string]interface{} + if len(tt.Name.Space) > 0 { + nn, err = xmlSeqToMapParser(tt.Name.Space+`:`+tt.Name.Local, tt.Attr, p, r) + } else { + nn, err = xmlSeqToMapParser(tt.Name.Local, tt.Attr, p, r) + } + if err != nil { + return nil, err + } + + // The nn map[string]interface{} value is a na[nn_key] value. + // We need to see if nn_key already exists - means we're parsing a list. + // This may require converting na[nn_key] value into []interface{} type. + // First, extract the key:val for the map - it's a singleton. + var key string + var val interface{} + for key, val = range nn { + break + } + + // add "#seq" k:v pair - + // Sequence number included even in list elements - this should allow us + // to properly resequence even something goofy like: + // item 1 + // item 2 + // item 3 + // where all the "list" subelements are decoded into an array. + switch val.(type) { + case map[string]interface{}: + val.(map[string]interface{})[seqK] = seq + seq++ + case interface{}: // a non-nil simple element: string, float64, bool + v := map[string]interface{}{textK: val, seqK: seq} + seq++ + val = v + } + + // 'na' holding sub-elements of n. + // See if 'key' already exists. + // If 'key' exists, then this is a list, if not just add key:val to na. + if v, ok := na[key]; ok { + var a []interface{} + switch v.(type) { + case []interface{}: + a = v.([]interface{}) + default: // anything else - note: v.(type) != nil + a = []interface{}{v} + } + a = append(a, val) + na[key] = a + } else { + na[key] = val // save it as a singleton + } + case xml.EndElement: + if skey != "" { + tt := t.(xml.EndElement) + if snakeCaseKeys { + tt.Name.Local = strings.Replace(tt.Name.Local, "-", "_", -1) + } + var name string + if len(tt.Name.Space) > 0 { + name = tt.Name.Space + `:` + tt.Name.Local + } else { + name = tt.Name.Local + } + if skey != name { + return nil, fmt.Errorf("element %s not properly terminated, got %s at #%d", + skey, name, p.InputOffset()) + } + } + // len(n) > 0 if this is a simple element w/o xml.Attrs - see xml.CharData case. + if len(n) == 0 { + // If len(na)==0 we have an empty element == ""; + // it has no xml.Attr nor xml.CharData. + // Empty element content will be map["etag"]map["#text"]"" + // after #seq injection - map["etag"]map["#seq"]seq - after return. + if len(na) > 0 { + n[skey] = na + } else { + n[skey] = "" // empty element + } + } + return n, nil + case xml.CharData: + // clean up possible noise + tt := strings.Trim(string(t.(xml.CharData)), trimRunes) + if xmlEscapeCharsDecoder { // issue#84 + tt = escapeChars(tt) + } + if skey == "" { + // per Adrian (http://www.adrianlungu.com/) catch stray text + // in decoder stream - + // https://github.com/clbanning/mxj/pull/14#issuecomment-182816374 + // NOTE: CharSetReader must be set to non-UTF-8 CharSet or you'll get + // a p.Token() decoding error when the BOM is UTF-16 or UTF-32. + continue + } + if len(tt) > 0 { + // every simple element is a #text and has #seq associated with it + na[textK] = cast(tt, r, "") + na[seqK] = seq + seq++ + } + case xml.Comment: + if n == nil { // no root 'key' + n = map[string]interface{}{commentK: string(t.(xml.Comment))} + return n, NoRoot + } + cm := make(map[string]interface{}, 2) + cm[textK] = string(t.(xml.Comment)) + cm[seqK] = seq + seq++ + na[commentK] = cm + case xml.Directive: + if n == nil { // no root 'key' + n = map[string]interface{}{directiveK: string(t.(xml.Directive))} + return n, NoRoot + } + dm := make(map[string]interface{}, 2) + dm[textK] = string(t.(xml.Directive)) + dm[seqK] = seq + seq++ + na[directiveK] = dm + case xml.ProcInst: + if n == nil { + na = map[string]interface{}{targetK: t.(xml.ProcInst).Target, instK: string(t.(xml.ProcInst).Inst)} + n = map[string]interface{}{procinstK: na} + return n, NoRoot + } + pm := make(map[string]interface{}, 3) + pm[targetK] = t.(xml.ProcInst).Target + pm[instK] = string(t.(xml.ProcInst).Inst) + pm[seqK] = seq + seq++ + na[procinstK] = pm + default: + // noop - shouldn't ever get here, now, since we handle all token types + } + } +} + +// ------------------ END: NewMapXml & NewMapXmlReader ------------------------- + +// --------------------- mv.XmlSeq & mv.XmlSeqWriter ------------------------- + +// Xml encodes a MapSeq as XML with elements sorted on #seq. The companion of NewMapXmlSeq(). +// The following rules apply. +// - The "#seq" key value is used to seqence the subelements or attributes only. +// - The "#attr" map key identifies the map of attribute map[string]interface{} values with "#text" key. +// - The "#comment" map key identifies a comment in the value "#text" map entry - . +// - The "#directive" map key identifies a directive in the value "#text" map entry - . +// - The "#procinst" map key identifies a process instruction in the value "#target" and "#inst" +// map entries - . +// - Value type encoding: +// > string, bool, float64, int, int32, int64, float32: per "%v" formating +// > []bool, []uint8: by casting to string +// > structures, etc.: handed to xml.Marshal() - if there is an error, the element +// value is "UNKNOWN" +// - Elements with only attribute values or are null are terminated using "/>" unless XmlGoEmptyElemSystax() called. +// - If len(mv) == 1 and no rootTag is provided, then the map key is used as the root tag, possible. +// Thus, `{ "key":"value" }` encodes as "value". +func (mv MapSeq) Xml(rootTag ...string) ([]byte, error) { + m := map[string]interface{}(mv) + var err error + s := new(string) + p := new(pretty) // just a stub + + if len(m) == 1 && len(rootTag) == 0 { + for key, value := range m { + // if it's an array, see if all values are map[string]interface{} + // we force a new root tag if we'll end up with no key:value in the list + // so: key:[string_val, bool:true] --> string_valtrue + switch value.(type) { + case []interface{}: + for _, v := range value.([]interface{}) { + switch v.(type) { + case map[string]interface{}: // noop + default: // anything else + err = mapToXmlSeqIndent(false, s, DefaultRootTag, m, p) + goto done + } + } + } + err = mapToXmlSeqIndent(false, s, key, value, p) + } + } else if len(rootTag) == 1 { + err = mapToXmlSeqIndent(false, s, rootTag[0], m, p) + } else { + err = mapToXmlSeqIndent(false, s, DefaultRootTag, m, p) + } +done: + if xmlCheckIsValid { + d := xml.NewDecoder(bytes.NewReader([]byte(*s))) + for { + _, err = d.Token() + if err == io.EOF { + err = nil + break + } else if err != nil { + return nil, err + } + } + } + return []byte(*s), err +} + +// The following implementation is provided only for symmetry with NewMapXmlReader[Raw] +// The names will also provide a key for the number of return arguments. + +// XmlWriter Writes the MapSeq value as XML on the Writer. +// See MapSeq.Xml() for encoding rules. +func (mv MapSeq) XmlWriter(xmlWriter io.Writer, rootTag ...string) error { + x, err := mv.Xml(rootTag...) + if err != nil { + return err + } + + _, err = xmlWriter.Write(x) + return err +} + +// XmlWriteRaw writes the MapSeq value as XML on the Writer. []byte is the raw XML that was written. +// See Map.XmlSeq() for encoding rules. +/* +func (mv MapSeq) XmlWriterRaw(xmlWriter io.Writer, rootTag ...string) ([]byte, error) { + x, err := mv.Xml(rootTag...) + if err != nil { + return x, err + } + + _, err = xmlWriter.Write(x) + return x, err +} +*/ + +// XmlIndentWriter writes the MapSeq value as pretty XML on the Writer. +// See MapSeq.Xml() for encoding rules. +func (mv MapSeq) XmlIndentWriter(xmlWriter io.Writer, prefix, indent string, rootTag ...string) error { + x, err := mv.XmlIndent(prefix, indent, rootTag...) + if err != nil { + return err + } + + _, err = xmlWriter.Write(x) + return err +} + +// XmlIndentWriterRaw writes the Map as pretty XML on the Writer. []byte is the raw XML that was written. +// See Map.XmlSeq() for encoding rules. +/* +func (mv MapSeq) XmlIndentWriterRaw(xmlWriter io.Writer, prefix, indent string, rootTag ...string) ([]byte, error) { + x, err := mv.XmlSeqIndent(prefix, indent, rootTag...) + if err != nil { + return x, err + } + + _, err = xmlWriter.Write(x) + return x, err +} +*/ + +// -------------------- END: mv.Xml & mv.XmlWriter ------------------------------- + +// ---------------------- XmlSeqIndent ---------------------------- + +// XmlIndent encodes a map[string]interface{} as a pretty XML string. +// See MapSeq.XmlSeq() for encoding rules. +func (mv MapSeq) XmlIndent(prefix, indent string, rootTag ...string) ([]byte, error) { + m := map[string]interface{}(mv) + + var err error + s := new(string) + p := new(pretty) + p.indent = indent + p.padding = prefix + + if len(m) == 1 && len(rootTag) == 0 { + // this can extract the key for the single map element + // use it if it isn't a key for a list + for key, value := range m { + if _, ok := value.([]interface{}); ok { + err = mapToXmlSeqIndent(true, s, DefaultRootTag, m, p) + } else { + err = mapToXmlSeqIndent(true, s, key, value, p) + } + } + } else if len(rootTag) == 1 { + err = mapToXmlSeqIndent(true, s, rootTag[0], m, p) + } else { + err = mapToXmlSeqIndent(true, s, DefaultRootTag, m, p) + } + if xmlCheckIsValid { + if _, err = NewMapXml([]byte(*s)); err != nil { + return nil, err + } + d := xml.NewDecoder(bytes.NewReader([]byte(*s))) + for { + _, err = d.Token() + if err == io.EOF { + err = nil + break + } else if err != nil { + return nil, err + } + } + } + return []byte(*s), err +} + +// where the work actually happens +// returns an error if an attribute is not atomic +func mapToXmlSeqIndent(doIndent bool, s *string, key string, value interface{}, pp *pretty) error { + var endTag bool + var isSimple bool + var noEndTag bool + var elen int + var ss string + p := &pretty{pp.indent, pp.cnt, pp.padding, pp.mapDepth, pp.start} + + switch value.(type) { + case map[string]interface{}, []byte, string, float64, bool, int, int32, int64, float32: + if doIndent { + *s += p.padding + } + if key != commentK && key != directiveK && key != procinstK { + *s += `<` + key + } + } + switch value.(type) { + case map[string]interface{}: + val := value.(map[string]interface{}) + + if key == commentK { + *s += `` + noEndTag = true + break + } + + if key == directiveK { + *s += `` + noEndTag = true + break + } + + if key == procinstK { + *s += `` + noEndTag = true + break + } + + haveAttrs := false + // process attributes first + if v, ok := val[attrK].(map[string]interface{}); ok { + // First, unroll the map[string]interface{} into a []keyval array. + // Then sequence it. + kv := make([]keyval, len(v)) + n := 0 + for ak, av := range v { + kv[n] = keyval{ak, av} + n++ + } + sort.Sort(elemListSeq(kv)) + // Now encode the attributes in original decoding sequence, using keyval array. + for _, a := range kv { + vv := a.v.(map[string]interface{}) + switch vv[textK].(type) { + case string: + if xmlEscapeChars { + ss = escapeChars(vv[textK].(string)) + } else { + ss = vv[textK].(string) + } + *s += ` ` + a.k + `="` + ss + `"` + case float64, bool, int, int32, int64, float32: + *s += ` ` + a.k + `="` + fmt.Sprintf("%v", vv[textK]) + `"` + case []byte: + if xmlEscapeChars { + ss = escapeChars(string(vv[textK].([]byte))) + } else { + ss = string(vv[textK].([]byte)) + } + *s += ` ` + a.k + `="` + ss + `"` + default: + return fmt.Errorf("invalid attribute value for: %s", a.k) + } + } + haveAttrs = true + } + + // simple element? + // every map value has, at least, "#seq" and, perhaps, "#text" and/or "#attr" + _, seqOK := val[seqK] // have key + if v, ok := val[textK]; ok && ((len(val) == 3 && haveAttrs) || (len(val) == 2 && !haveAttrs)) && seqOK { + if stmp, ok := v.(string); ok && stmp != "" { + if xmlEscapeChars { + stmp = escapeChars(stmp) + } + *s += ">" + stmp + endTag = true + elen = 1 + } + isSimple = true + break + } else if !ok && ((len(val) == 2 && haveAttrs) || (len(val) == 1 && !haveAttrs)) && seqOK { + // here no #text but have #seq or #seq+#attr + endTag = false + break + } + + // we now need to sequence everything except attributes + // 'kv' will hold everything that needs to be written + kv := make([]keyval, 0) + for k, v := range val { + if k == attrK { // already processed + continue + } + if k == seqK { // ignore - just for sorting + continue + } + switch v.(type) { + case []interface{}: + // unwind the array as separate entries + for _, vv := range v.([]interface{}) { + kv = append(kv, keyval{k, vv}) + } + default: + kv = append(kv, keyval{k, v}) + } + } + + // close tag with possible attributes + *s += ">" + if doIndent { + *s += "\n" + } + // something more complex + p.mapDepth++ + sort.Sort(elemListSeq(kv)) + i := 0 + for _, v := range kv { + switch v.v.(type) { + case []interface{}: + default: + if i == 0 && doIndent { + p.Indent() + } + } + i++ + if err := mapToXmlSeqIndent(doIndent, s, v.k, v.v, p); err != nil { + return err + } + switch v.v.(type) { + case []interface{}: // handled in []interface{} case + default: + if doIndent { + p.Outdent() + } + } + i-- + } + p.mapDepth-- + endTag = true + elen = 1 // we do have some content other than attrs + case []interface{}: + for _, v := range value.([]interface{}) { + if doIndent { + p.Indent() + } + if err := mapToXmlSeqIndent(doIndent, s, key, v, p); err != nil { + return err + } + if doIndent { + p.Outdent() + } + } + return nil + case nil: + // terminate the tag + if doIndent { + *s += p.padding + } + *s += "<" + key + endTag, isSimple = true, true + break + default: // handle anything - even goofy stuff + elen = 0 + switch value.(type) { + case string: + if xmlEscapeChars { + ss = escapeChars(value.(string)) + } else { + ss = value.(string) + } + elen = len(ss) + if elen > 0 { + *s += ">" + ss + } + case float64, bool, int, int32, int64, float32: + v := fmt.Sprintf("%v", value) + elen = len(v) + if elen > 0 { + *s += ">" + v + } + case []byte: // NOTE: byte is just an alias for uint8 + // similar to how xml.Marshal handles []byte structure members + if xmlEscapeChars { + ss = escapeChars(string(value.([]byte))) + } else { + ss = string(value.([]byte)) + } + elen = len(ss) + if elen > 0 { + *s += ">" + ss + } + default: + var v []byte + var err error + if doIndent { + v, err = xml.MarshalIndent(value, p.padding, p.indent) + } else { + v, err = xml.Marshal(value) + } + if err != nil { + *s += ">UNKNOWN" + } else { + elen = len(v) + if elen > 0 { + *s += string(v) + } + } + } + isSimple = true + endTag = true + } + if endTag && !noEndTag { + if doIndent { + if !isSimple { + *s += p.padding + } + } + switch value.(type) { + case map[string]interface{}, []byte, string, float64, bool, int, int32, int64, float32: + if elen > 0 || useGoXmlEmptyElemSyntax { + if elen == 0 { + *s += ">" + } + *s += `" + } else { + *s += `/>` + } + } + } else if !noEndTag { + if useGoXmlEmptyElemSyntax { + *s += `" + // *s += ">" + } else { + *s += "/>" + } + } + if doIndent { + if p.cnt > p.start { + *s += "\n" + } + p.Outdent() + } + + return nil +} + +// the element sort implementation + +type keyval struct { + k string + v interface{} +} +type elemListSeq []keyval + +func (e elemListSeq) Len() int { + return len(e) +} + +func (e elemListSeq) Swap(i, j int) { + e[i], e[j] = e[j], e[i] +} + +func (e elemListSeq) Less(i, j int) bool { + var iseq, jseq int + var fiseq, fjseq float64 + var ok bool + if iseq, ok = e[i].v.(map[string]interface{})[seqK].(int); !ok { + if fiseq, ok = e[i].v.(map[string]interface{})[seqK].(float64); ok { + iseq = int(fiseq) + } else { + iseq = 9999999 + } + } + + if jseq, ok = e[j].v.(map[string]interface{})[seqK].(int); !ok { + if fjseq, ok = e[j].v.(map[string]interface{})[seqK].(float64); ok { + jseq = int(fjseq) + } else { + jseq = 9999999 + } + } + + return iseq <= jseq +} + +// =============== https://groups.google.com/forum/#!topic/golang-nuts/lHPOHD-8qio + +// BeautifyXml (re)formats an XML doc similar to Map.XmlIndent(). +// It preserves comments, directives and process instructions, +func BeautifyXml(b []byte, prefix, indent string) ([]byte, error) { + x, err := NewMapXmlSeq(b) + if err != nil { + return nil, err + } + return x.XmlIndent(prefix, indent) +} diff --git a/vendor/github.com/clbanning/mxj/v2/xmlseq2.go b/vendor/github.com/clbanning/mxj/v2/xmlseq2.go new file mode 100644 index 0000000..467fd07 --- /dev/null +++ b/vendor/github.com/clbanning/mxj/v2/xmlseq2.go @@ -0,0 +1,18 @@ +// Copyright 2012-2016, 2019 Charles Banning. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file + +package mxj + +// ---------------- expose Map methods to MapSeq type --------------------------- + +// Pretty print a Map. +func (msv MapSeq) StringIndent(offset ...int) string { + return writeMap(map[string]interface{}(msv), true, true, offset...) +} + +// Pretty print a Map without the value type information - just key:value entries. +func (msv MapSeq) StringIndentNoTypeInfo(offset ...int) string { + return writeMap(map[string]interface{}(msv), false, true, offset...) +} + diff --git a/vendor/github.com/cli/safeexec/LICENSE b/vendor/github.com/cli/safeexec/LICENSE new file mode 100644 index 0000000..ca49857 --- /dev/null +++ b/vendor/github.com/cli/safeexec/LICENSE @@ -0,0 +1,25 @@ +BSD 2-Clause License + +Copyright (c) 2020, GitHub Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/cli/safeexec/README.md b/vendor/github.com/cli/safeexec/README.md new file mode 100644 index 0000000..4ff1c2a --- /dev/null +++ b/vendor/github.com/cli/safeexec/README.md @@ -0,0 +1,48 @@ +# safeexec + +A Go module that provides a stabler alternative to `exec.LookPath()` that: +- Avoids a Windows security risk of executing commands found in the current directory; and +- Allows executing commands found in PATH, even if they come from relative PATH entries. + +This is an alternative to [`golang.org/x/sys/execabs`](https://pkg.go.dev/golang.org/x/sys/execabs). + +## Usage +```go +import ( + "os/exec" + "github.com/cli/safeexec" +) + +func gitStatus() error { + gitBin, err := safeexec.LookPath("git") + if err != nil { + return err + } + cmd := exec.Command(gitBin, "status") + return cmd.Run() +} +``` + +## Background +### Windows security vulnerability with Go <= 1.18 +Go 1.18 (and older) standard library has a security vulnerability when executing programs: +```go +import "os/exec" + +func gitStatus() error { + // On Windows, this will result in `.\git.exe` or `.\git.bat` being executed + // if either were found in the current working directory. + cmd := exec.Command("git", "status") + return cmd.Run() +} +``` + +For historic reasons, Go used to implicitly [include the current directory](https://github.com/golang/go/issues/38736) in the PATH resolution on Windows. The `safeexec` package avoids searching the current directory on Windows. + +### Relative PATH entries with Go 1.19+ + +Go 1.19 (and newer) standard library [throws an error](https://github.com/golang/go/issues/43724) if `exec.LookPath("git")` resolved to an executable relative to the current directory. This can happen on other platforms if the PATH environment variable contains relative entries, e.g. `PATH=./bin:$PATH`. The `safeexec` package allows respecting relative PATH entries as it assumes that the responsibility for keeping PATH safe lies outside of the Go program. + +## TODO + +Ideally, this module would also provide `exec.Command()` and `exec.CommandContext()` equivalents that delegate to the patched version of `LookPath`. However, this doesn't seem possible since `LookPath` may return an error, while `exec.Command/CommandContext()` themselves do not return an error. In the standard library, the resulting `exec.Cmd` struct stores the LookPath error in a private field, but that functionality isn't available to us. diff --git a/vendor/github.com/cli/safeexec/lookpath.go b/vendor/github.com/cli/safeexec/lookpath.go new file mode 100644 index 0000000..e649ca7 --- /dev/null +++ b/vendor/github.com/cli/safeexec/lookpath.go @@ -0,0 +1,17 @@ +//go:build !windows && go1.19 +// +build !windows,go1.19 + +package safeexec + +import ( + "errors" + "os/exec" +) + +func LookPath(file string) (string, error) { + path, err := exec.LookPath(file) + if errors.Is(err, exec.ErrDot) { + return path, nil + } + return path, err +} diff --git a/vendor/github.com/cli/safeexec/lookpath_1.18.go b/vendor/github.com/cli/safeexec/lookpath_1.18.go new file mode 100644 index 0000000..bb4a27e --- /dev/null +++ b/vendor/github.com/cli/safeexec/lookpath_1.18.go @@ -0,0 +1,10 @@ +//go:build !windows && !go1.19 +// +build !windows,!go1.19 + +package safeexec + +import "os/exec" + +func LookPath(file string) (string, error) { + return exec.LookPath(file) +} diff --git a/vendor/github.com/cli/safeexec/lookpath_windows.go b/vendor/github.com/cli/safeexec/lookpath_windows.go new file mode 100644 index 0000000..19b3e52 --- /dev/null +++ b/vendor/github.com/cli/safeexec/lookpath_windows.go @@ -0,0 +1,120 @@ +// Copyright (c) 2009 The Go Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Package safeexec provides alternatives for exec package functions to avoid +// accidentally executing binaries found in the current working directory on +// Windows. +package safeexec + +import ( + "os" + "os/exec" + "path/filepath" + "strings" +) + +func chkStat(file string) error { + d, err := os.Stat(file) + if err != nil { + return err + } + if d.IsDir() { + return os.ErrPermission + } + return nil +} + +func hasExt(file string) bool { + i := strings.LastIndex(file, ".") + if i < 0 { + return false + } + return strings.LastIndexAny(file, `:\/`) < i +} + +func findExecutable(file string, exts []string) (string, error) { + if len(exts) == 0 { + return file, chkStat(file) + } + if hasExt(file) { + if chkStat(file) == nil { + return file, nil + } + } + for _, e := range exts { + if f := file + e; chkStat(f) == nil { + return f, nil + } + } + return "", os.ErrNotExist +} + +// LookPath searches for an executable named file in the +// directories named by the PATH environment variable. +// If file contains a slash, it is tried directly and the PATH is not consulted. +// LookPath also uses PATHEXT environment variable to match +// a suitable candidate. +// The result may be an absolute path or a path relative to the current directory. +func LookPath(file string) (string, error) { + var exts []string + x := os.Getenv(`PATHEXT`) + if x != "" { + for _, e := range strings.Split(strings.ToLower(x), `;`) { + if e == "" { + continue + } + if e[0] != '.' { + e = "." + e + } + exts = append(exts, e) + } + } else { + exts = []string{".com", ".exe", ".bat", ".cmd"} + } + + if strings.ContainsAny(file, `:\/`) { + if f, err := findExecutable(file, exts); err == nil { + return f, nil + } else { + return "", &exec.Error{file, err} + } + } + + // https://github.com/golang/go/issues/38736 + // if f, err := findExecutable(filepath.Join(".", file), exts); err == nil { + // return f, nil + // } + + path := os.Getenv("path") + for _, dir := range filepath.SplitList(path) { + if f, err := findExecutable(filepath.Join(dir, file), exts); err == nil { + return f, nil + } + } + return "", &exec.Error{file, exec.ErrNotFound} +} diff --git a/vendor/github.com/couchbase/vellum/.travis.yml b/vendor/github.com/couchbase/vellum/.travis.yml new file mode 100644 index 0000000..229edf2 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/.travis.yml @@ -0,0 +1,22 @@ +sudo: false + +language: go + +go: + - "1.9.x" + - "1.10.x" + - "1.11.x" + +script: + - go get github.com/mattn/goveralls + - go get -u github.com/kisielk/errcheck + - go test -v $(go list ./... | grep -v vendor/) + - go test -race + - go vet + - errcheck + - go test -coverprofile=profile.out -covermode=count + - 'if [ "$TRAVIS_PULL_REQUEST" = "false" ]; then goveralls -service=travis-ci -coverprofile=profile.out -repotoken $COVERALLS; fi' + +notifications: + email: + - marty.schoch@gmail.com diff --git a/vendor/github.com/couchbase/vellum/CONTRIBUTING.md b/vendor/github.com/couchbase/vellum/CONTRIBUTING.md new file mode 100644 index 0000000..b85ec82 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/CONTRIBUTING.md @@ -0,0 +1,16 @@ +# Contributing to Vellum + +We look forward to your contributions, but ask that you first review these guidelines. + +### Sign the CLA + +As Vellum is a Couchbase project we require contributors accept the [Couchbase Contributor License Agreement](http://review.couchbase.org/static/individual_agreement.html). To sign this agreement log into the Couchbase [code review tool](http://review.couchbase.org/). The Vellum project does not use this code review tool but it is still used to track acceptance of the contributor license agreements. + +### Submitting a Pull Request + +All types of contributions are welcome, but please keep the following in mind: + +- If you're planning a large change, you should really discuss it in a github issue first. This helps avoid duplicate effort and spending time on something that may not be merged. +- Existing tests should continue to pass, new tests for the contribution are nice to have. +- All code should have gone through `go fmt` +- All code should pass `go vet` diff --git a/vendor/github.com/couchbase/vellum/LICENSE b/vendor/github.com/couchbase/vellum/LICENSE new file mode 100644 index 0000000..7a4a3ea --- /dev/null +++ b/vendor/github.com/couchbase/vellum/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/vendor/github.com/couchbase/vellum/README.md b/vendor/github.com/couchbase/vellum/README.md new file mode 100644 index 0000000..e5c4a8b --- /dev/null +++ b/vendor/github.com/couchbase/vellum/README.md @@ -0,0 +1,183 @@ +# ![vellum](docs/logo.png) vellum + +[![Tests](https://github.com/couchbase/vellum/workflows/Tests/badge.svg?branch=master&event=push)](https://github.com/couchbase/vellum/actions?query=workflow%3ATests+event%3Apush+branch%3Amaster) +[![Coverage Status](https://coveralls.io/repos/github/couchbase/vellum/badge.svg?branch=master)](https://coveralls.io/github/couchbase/vellum?branch=master) +[![GoDoc](https://godoc.org/github.com/couchbase/vellum?status.svg)](https://godoc.org/github.com/couchbase/vellum) +[![Go Report Card](https://goreportcard.com/badge/github.com/couchbase/vellum)](https://goreportcard.com/report/github.com/couchbase/vellum) +[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) + +A Go library implementing an FST (finite state transducer) capable of: + - mapping between keys ([]byte) and a value (uint64) + - enumerating keys in lexicographic order + +Some additional goals of this implementation: + - bounded memory use while building the FST + - streaming out FST data while building + - mmap FST runtime to support very large FTSs (optional) + +## Usage + +### Building an FST + +To build an FST, create a new builder using the `New()` method. This method takes an `io.Writer` as an argument. As the FST is being built, data will be streamed to the writer as soon as possible. With this builder you **MUST** insert keys in lexicographic order. Inserting keys out of order will result in an error. After inserting the last key into the builder, you **MUST** call `Close()` on the builder. This will flush all remaining data to the underlying writer. + +In memory: +```go + var buf bytes.Buffer + builder, err := vellum.New(&buf, nil) + if err != nil { + log.Fatal(err) + } +``` + +To disk: +```go + f, err := os.Create("/tmp/vellum.fst") + if err != nil { + log.Fatal(err) + } + builder, err := vellum.New(f, nil) + if err != nil { + log.Fatal(err) + } +``` + +**MUST** insert keys in lexicographic order: +```go +err = builder.Insert([]byte("cat"), 1) +if err != nil { + log.Fatal(err) +} + +err = builder.Insert([]byte("dog"), 2) +if err != nil { + log.Fatal(err) +} + +err = builder.Insert([]byte("fish"), 3) +if err != nil { + log.Fatal(err) +} + +err = builder.Close() +if err != nil { + log.Fatal(err) +} +``` + +### Using an FST + +After closing the builder, the data can be used to instantiate an FST. If the data was written to disk, you can use the `Open()` method to mmap the file. If the data is already in memory, or you wish to load/mmap the data yourself, you can instantiate the FST with the `Load()` method. + +Load in memory: +```go + fst, err := vellum.Load(buf.Bytes()) + if err != nil { + log.Fatal(err) + } +``` + +Open from disk: +```go + fst, err := vellum.Open("/tmp/vellum.fst") + if err != nil { + log.Fatal(err) + } +``` + +Get key/value: +```go + val, exists, err = fst.Get([]byte("dog")) + if err != nil { + log.Fatal(err) + } + if exists { + fmt.Printf("contains dog with val: %d\n", val) + } else { + fmt.Printf("does not contain dog") + } +``` + +Iterate key/values: +```go + itr, err := fst.Iterator(startKeyInclusive, endKeyExclusive) + for err == nil { + key, val := itr.Current() + fmt.Printf("contains key: %s val: %d", key, val) + err = itr.Next() + } + if err != nil { + log.Fatal(err) + } +``` + +### How does the FST get built? + +A full example of the implementation is beyond the scope of this README, but let's consider a small example where we want to insert 3 key/value pairs. + +First we insert "are" with the value 4. + +![step1](docs/demo1.png) + +Next, we insert "ate" with the value 2. + +![step2](docs/demo2.png) + +Notice how the values associated with the transitions were adjusted so that by summing them while traversing we still get the expected value. + +At this point, we see that state 5 looks like state 3, and state 4 looks like state 2. But, we cannot yet combine them because future inserts could change this. + +Now, we insert "see" with value 3. Once it has been added, we now know that states 5 and 4 can longer change. Since they are identical to 3 and 2, we replace them. + +![step3](docs/demo3.png) + +Again, we see that states 7 and 8 appear to be identical to 2 and 3. + +Having inserted our last key, we call `Close()` on the builder. + +![step4](docs/demo4.png) + +Now, states 7 and 8 can safely be replaced with 2 and 3. + +For additional information, see the references at the bottom of this document. + +### What does the serialized format look like? + +We've broken out a separate document on the [vellum disk format v1](docs/format.md). + +### What if I want to use this on a system that doesn't have mmap? + +The mmap library itself is guarded with system/architecture build tags, but we've also added an additional build tag in vellum. If you'd like to Open() a file based representation of an FST, but not use mmap, you can build the library with the `nommap` build tag. NOTE: if you do this, the entire FST will be read into memory. + +### Can I use this with Unicode strings? + +Yes, however this implementation is only aware of the byte representation you choose. In order to find matches, you must work with some canonical byte representation of the string. In the future, some encoding-aware traversals may be possible on top of the lower-level byte transitions. + +### How did this library come to be? + +In my work on the [Bleve](https://github.com/blevesearch/bleve) project I became aware of the power of the FST for many search-related tasks. The obvious starting point for such a thing in Go was the [mafsa](https://github.com/smartystreets/mafsa) project. While working with mafsa I encountered some issues. First, it did not stream data to disk while building. Second, it chose to use a rune as the fundamental unit of transition in the FST, but I felt using a byte would be more powerful in the end. My hope is that higher-level encoding-aware traversals will be possible when necessary. Finally, as I reported bugs and submitted PRs I learned that the mafsa project was mainly a research project and no longer being maintained. I wanted to build something that could be used in production. As the project advanced more and more techniques from the [BurntSushi/fst](https://github.com/BurntSushi/fst) were adapted to our implementation. + +### Are there tools to work with vellum files? + +Under the cmd/vellum subdirectory, there's a command-line tool which +features subcommands that can allow you to create, inspect and query +vellum files. + +### How can I generate a state transition diagram from a vellum file? + +The vellum command-line tool has a "dot" subcommand that can emit +graphviz dot output data from an input vellum file. The dot file can +in turn be converted into an image using graphviz tools. Example... + + $ vellum dot myFile.vellum > output.dot + $ dot -Tpng output.dot -o output.png + +## Related Work + +Much credit goes to two existing projects: + - [mafsa](https://github.com/smartystreets/mafsa) + - [BurntSushi/fst](https://github.com/BurntSushi/fst) + +Most of the original implementation here started with my digging into the internals of mafsa. As the implementation progressed, I continued to borrow ideas/approaches from the BurntSushi/fst library as well. + +For a great introduction to this topic, please read the blog post [Index 1,600,000,000 Keys with Automata and Rust](http://blog.burntsushi.net/transducers/) diff --git a/vendor/github.com/couchbase/vellum/automaton.go b/vendor/github.com/couchbase/vellum/automaton.go new file mode 100644 index 0000000..70398f2 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/automaton.go @@ -0,0 +1,85 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vellum + +// Automaton represents the general contract of a byte-based finite automaton +type Automaton interface { + + // Start returns the start state + Start() int + + // IsMatch returns true if and only if the state is a match + IsMatch(int) bool + + // CanMatch returns true if and only if it is possible to reach a match + // in zero or more steps + CanMatch(int) bool + + // WillAlwaysMatch returns true if and only if the current state matches + // and will always match no matter what steps are taken + WillAlwaysMatch(int) bool + + // Accept returns the next state given the input to the specified state + Accept(int, byte) int +} + +// AutomatonContains implements an generic Contains() method which works +// on any implementation of Automaton +func AutomatonContains(a Automaton, k []byte) bool { + i := 0 + curr := a.Start() + for a.CanMatch(curr) && i < len(k) { + curr = a.Accept(curr, k[i]) + if curr == noneAddr { + break + } + i++ + } + if i != len(k) { + return false + } + return a.IsMatch(curr) +} + +// AlwaysMatch is an Automaton implementation which always matches +type AlwaysMatch struct{} + +// Start returns the AlwaysMatch start state +func (m *AlwaysMatch) Start() int { + return 0 +} + +// IsMatch always returns true +func (m *AlwaysMatch) IsMatch(int) bool { + return true +} + +// CanMatch always returns true +func (m *AlwaysMatch) CanMatch(int) bool { + return true +} + +// WillAlwaysMatch always returns true +func (m *AlwaysMatch) WillAlwaysMatch(int) bool { + return true +} + +// Accept returns the next AlwaysMatch state +func (m *AlwaysMatch) Accept(int, byte) int { + return 0 +} + +// creating an alwaysMatchAutomaton to avoid unnecessary repeated allocations. +var alwaysMatchAutomaton = &AlwaysMatch{} diff --git a/vendor/github.com/couchbase/vellum/builder.go b/vendor/github.com/couchbase/vellum/builder.go new file mode 100644 index 0000000..f793329 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/builder.go @@ -0,0 +1,452 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vellum + +import ( + "bytes" + "io" +) + +var defaultBuilderOpts = &BuilderOpts{ + Encoder: 1, + RegistryTableSize: 10000, + RegistryMRUSize: 2, +} + +// A Builder is used to build a new FST. When possible data is +// streamed out to the underlying Writer as soon as possible. +type Builder struct { + unfinished *unfinishedNodes + registry *registry + last []byte + len int + + lastAddr int + + encoder encoder + opts *BuilderOpts + + builderNodePool *builderNodePool +} + +const noneAddr = 1 +const emptyAddr = 0 + +// NewBuilder returns a new Builder which will stream out the +// underlying representation to the provided Writer as the set is built. +func newBuilder(w io.Writer, opts *BuilderOpts) (*Builder, error) { + if opts == nil { + opts = defaultBuilderOpts + } + builderNodePool := &builderNodePool{} + rv := &Builder{ + unfinished: newUnfinishedNodes(builderNodePool), + registry: newRegistry(builderNodePool, opts.RegistryTableSize, opts.RegistryMRUSize), + builderNodePool: builderNodePool, + opts: opts, + lastAddr: noneAddr, + } + + var err error + rv.encoder, err = loadEncoder(opts.Encoder, w) + if err != nil { + return nil, err + } + err = rv.encoder.start() + if err != nil { + return nil, err + } + return rv, nil +} + +func (b *Builder) Reset(w io.Writer) error { + b.unfinished.Reset() + b.registry.Reset() + b.lastAddr = noneAddr + b.encoder.reset(w) + b.last = nil + b.len = 0 + + err := b.encoder.start() + if err != nil { + return err + } + return nil +} + +// Insert the provided value to the set being built. +// NOTE: values must be inserted in lexicographical order. +func (b *Builder) Insert(key []byte, val uint64) error { + // ensure items are added in lexicographic order + if bytes.Compare(key, b.last) < 0 { + return ErrOutOfOrder + } + if len(key) == 0 { + b.len = 1 + b.unfinished.setRootOutput(val) + return nil + } + + prefixLen, out := b.unfinished.findCommonPrefixAndSetOutput(key, val) + b.len++ + err := b.compileFrom(prefixLen) + if err != nil { + return err + } + b.copyLastKey(key) + b.unfinished.addSuffix(key[prefixLen:], out) + + return nil +} + +func (b *Builder) copyLastKey(key []byte) { + if b.last == nil { + b.last = make([]byte, 0, 64) + } else { + b.last = b.last[:0] + } + b.last = append(b.last, key...) +} + +// Close MUST be called after inserting all values. +func (b *Builder) Close() error { + err := b.compileFrom(0) + if err != nil { + return err + } + root := b.unfinished.popRoot() + rootAddr, err := b.compile(root) + if err != nil { + return err + } + return b.encoder.finish(b.len, rootAddr) +} + +func (b *Builder) compileFrom(iState int) error { + addr := noneAddr + for iState+1 < len(b.unfinished.stack) { + var node *builderNode + if addr == noneAddr { + node = b.unfinished.popEmpty() + } else { + node = b.unfinished.popFreeze(addr) + } + var err error + addr, err = b.compile(node) + if err != nil { + return nil + } + } + b.unfinished.topLastFreeze(addr) + return nil +} + +func (b *Builder) compile(node *builderNode) (int, error) { + if node.final && len(node.trans) == 0 && + node.finalOutput == 0 { + return 0, nil + } + found, addr, entry := b.registry.entry(node) + if found { + return addr, nil + } + addr, err := b.encoder.encodeState(node, b.lastAddr) + if err != nil { + return 0, err + } + + b.lastAddr = addr + entry.addr = addr + return addr, nil +} + +type unfinishedNodes struct { + stack []*builderNodeUnfinished + + // cache allocates a reasonable number of builderNodeUnfinished + // objects up front and tries to keep reusing them + // because the main data structure is a stack, we assume the + // same access pattern, and don't track items separately + // this means calls get() and pushXYZ() must be paired, + // as well as calls put() and popXYZ() + cache []builderNodeUnfinished + + builderNodePool *builderNodePool +} + +func (u *unfinishedNodes) Reset() { + u.stack = u.stack[:0] + for i := 0; i < len(u.cache); i++ { + u.cache[i] = builderNodeUnfinished{} + } + u.pushEmpty(false) +} + +func newUnfinishedNodes(p *builderNodePool) *unfinishedNodes { + rv := &unfinishedNodes{ + stack: make([]*builderNodeUnfinished, 0, 64), + cache: make([]builderNodeUnfinished, 64), + builderNodePool: p, + } + rv.pushEmpty(false) + return rv +} + +// get new builderNodeUnfinished, reusing cache if possible +func (u *unfinishedNodes) get() *builderNodeUnfinished { + if len(u.stack) < len(u.cache) { + return &u.cache[len(u.stack)] + } + // full now allocate a new one + return &builderNodeUnfinished{} +} + +// return builderNodeUnfinished, clearing it for reuse +func (u *unfinishedNodes) put() { + if len(u.stack) >= len(u.cache) { + return + // do nothing, not part of cache + } + u.cache[len(u.stack)] = builderNodeUnfinished{} +} + +func (u *unfinishedNodes) findCommonPrefixAndSetOutput(key []byte, + out uint64) (int, uint64) { + var i int + for i < len(key) { + if i >= len(u.stack) { + break + } + var addPrefix uint64 + if !u.stack[i].hasLastT { + break + } + if u.stack[i].lastIn == key[i] { + commonPre := outputPrefix(u.stack[i].lastOut, out) + addPrefix = outputSub(u.stack[i].lastOut, commonPre) + out = outputSub(out, commonPre) + u.stack[i].lastOut = commonPre + i++ + } else { + break + } + + if addPrefix != 0 { + u.stack[i].addOutputPrefix(addPrefix) + } + } + + return i, out +} + +func (u *unfinishedNodes) pushEmpty(final bool) { + next := u.get() + next.node = u.builderNodePool.Get() + next.node.final = final + u.stack = append(u.stack, next) +} + +func (u *unfinishedNodes) popRoot() *builderNode { + l := len(u.stack) + var unfinished *builderNodeUnfinished + u.stack, unfinished = u.stack[:l-1], u.stack[l-1] + rv := unfinished.node + u.put() + return rv +} + +func (u *unfinishedNodes) popFreeze(addr int) *builderNode { + l := len(u.stack) + var unfinished *builderNodeUnfinished + u.stack, unfinished = u.stack[:l-1], u.stack[l-1] + unfinished.lastCompiled(addr) + rv := unfinished.node + u.put() + return rv +} + +func (u *unfinishedNodes) popEmpty() *builderNode { + l := len(u.stack) + var unfinished *builderNodeUnfinished + u.stack, unfinished = u.stack[:l-1], u.stack[l-1] + rv := unfinished.node + u.put() + return rv +} + +func (u *unfinishedNodes) setRootOutput(out uint64) { + u.stack[0].node.final = true + u.stack[0].node.finalOutput = out +} + +func (u *unfinishedNodes) topLastFreeze(addr int) { + last := len(u.stack) - 1 + u.stack[last].lastCompiled(addr) +} + +func (u *unfinishedNodes) addSuffix(bs []byte, out uint64) { + if len(bs) == 0 { + return + } + last := len(u.stack) - 1 + u.stack[last].hasLastT = true + u.stack[last].lastIn = bs[0] + u.stack[last].lastOut = out + for _, b := range bs[1:] { + next := u.get() + next.node = u.builderNodePool.Get() + next.hasLastT = true + next.lastIn = b + next.lastOut = 0 + u.stack = append(u.stack, next) + } + u.pushEmpty(true) +} + +type builderNodeUnfinished struct { + node *builderNode + lastOut uint64 + lastIn byte + hasLastT bool +} + +func (b *builderNodeUnfinished) lastCompiled(addr int) { + if b.hasLastT { + transIn := b.lastIn + transOut := b.lastOut + b.hasLastT = false + b.lastOut = 0 + b.node.trans = append(b.node.trans, transition{ + in: transIn, + out: transOut, + addr: addr, + }) + } +} + +func (b *builderNodeUnfinished) addOutputPrefix(prefix uint64) { + if b.node.final { + b.node.finalOutput = outputCat(prefix, b.node.finalOutput) + } + for i := range b.node.trans { + b.node.trans[i].out = outputCat(prefix, b.node.trans[i].out) + } + if b.hasLastT { + b.lastOut = outputCat(prefix, b.lastOut) + } +} + +type builderNode struct { + finalOutput uint64 + trans []transition + final bool + + // intrusive linked list + next *builderNode +} + +// reset resets the receiver builderNode to a re-usable state. +func (n *builderNode) reset() { + n.final = false + n.finalOutput = 0 + for i := range n.trans { + n.trans[i] = emptyTransition + } + n.trans = n.trans[:0] + n.next = nil +} + +func (n *builderNode) equiv(o *builderNode) bool { + if n.final != o.final { + return false + } + if n.finalOutput != o.finalOutput { + return false + } + if len(n.trans) != len(o.trans) { + return false + } + for i, ntrans := range n.trans { + otrans := o.trans[i] + if ntrans.in != otrans.in { + return false + } + if ntrans.addr != otrans.addr { + return false + } + if ntrans.out != otrans.out { + return false + } + } + return true +} + +var emptyTransition = transition{} + +type transition struct { + out uint64 + addr int + in byte +} + +func outputPrefix(l, r uint64) uint64 { + if l < r { + return l + } + return r +} + +func outputSub(l, r uint64) uint64 { + return l - r +} + +func outputCat(l, r uint64) uint64 { + return l + r +} + +// builderNodePool pools builderNodes using a singly linked list. +// +// NB: builderNode lifecylce is described by the following interactions - +// +------------------------+ +----------------------+ +// | Unfinished Nodes | Transfer once | Registry | +// |(not frozen builderNode)|-----builderNode is ------->| (frozen builderNode) | +// +------------------------+ marked frozen +----------------------+ +// ^ | +// | | +// | Put() +// | Get() on +-------------------+ when +// +-new char--------| builderNode Pool |<-----------evicted +// +-------------------+ +type builderNodePool struct { + head *builderNode +} + +func (p *builderNodePool) Get() *builderNode { + if p.head == nil { + return &builderNode{} + } + head := p.head + p.head = p.head.next + return head +} + +func (p *builderNodePool) Put(v *builderNode) { + if v == nil { + return + } + v.reset() + v.next = p.head + p.head = v +} diff --git a/vendor/github.com/couchbase/vellum/common.go b/vendor/github.com/couchbase/vellum/common.go new file mode 100644 index 0000000..cd3e6a0 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/common.go @@ -0,0 +1,547 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vellum + +const maxCommon = 1<<6 - 1 + +func encodeCommon(in byte) byte { + val := byte((int(commonInputs[in]) + 1) % 256) + if val > maxCommon { + return 0 + } + return val +} + +func decodeCommon(in byte) byte { + return commonInputsInv[in-1] +} + +var commonInputs = []byte{ + 84, // '\x00' + 85, // '\x01' + 86, // '\x02' + 87, // '\x03' + 88, // '\x04' + 89, // '\x05' + 90, // '\x06' + 91, // '\x07' + 92, // '\x08' + 93, // '\t' + 94, // '\n' + 95, // '\x0b' + 96, // '\x0c' + 97, // '\r' + 98, // '\x0e' + 99, // '\x0f' + 100, // '\x10' + 101, // '\x11' + 102, // '\x12' + 103, // '\x13' + 104, // '\x14' + 105, // '\x15' + 106, // '\x16' + 107, // '\x17' + 108, // '\x18' + 109, // '\x19' + 110, // '\x1a' + 111, // '\x1b' + 112, // '\x1c' + 113, // '\x1d' + 114, // '\x1e' + 115, // '\x1f' + 116, // ' ' + 80, // '!' + 117, // '"' + 118, // '#' + 79, // '$' + 39, // '%' + 30, // '&' + 81, // "'" + 75, // '(' + 74, // ')' + 82, // '*' + 57, // '+' + 66, // ',' + 16, // '-' + 12, // '.' + 2, // '/' + 19, // '0' + 20, // '1' + 21, // '2' + 27, // '3' + 32, // '4' + 29, // '5' + 35, // '6' + 36, // '7' + 37, // '8' + 34, // '9' + 24, // ':' + 73, // ';' + 119, // '<' + 23, // '=' + 120, // '>' + 40, // '?' + 83, // '@' + 44, // 'A' + 48, // 'B' + 42, // 'C' + 43, // 'D' + 49, // 'E' + 46, // 'F' + 62, // 'G' + 61, // 'H' + 47, // 'I' + 69, // 'J' + 68, // 'K' + 58, // 'L' + 56, // 'M' + 55, // 'N' + 59, // 'O' + 51, // 'P' + 72, // 'Q' + 54, // 'R' + 45, // 'S' + 52, // 'T' + 64, // 'U' + 65, // 'V' + 63, // 'W' + 71, // 'X' + 67, // 'Y' + 70, // 'Z' + 77, // '[' + 121, // '\\' + 78, // ']' + 122, // '^' + 31, // '_' + 123, // '`' + 4, // 'a' + 25, // 'b' + 9, // 'c' + 17, // 'd' + 1, // 'e' + 26, // 'f' + 22, // 'g' + 13, // 'h' + 7, // 'i' + 50, // 'j' + 38, // 'k' + 14, // 'l' + 15, // 'm' + 10, // 'n' + 3, // 'o' + 8, // 'p' + 60, // 'q' + 6, // 'r' + 5, // 's' + 0, // 't' + 18, // 'u' + 33, // 'v' + 11, // 'w' + 41, // 'x' + 28, // 'y' + 53, // 'z' + 124, // '{' + 125, // '|' + 126, // '}' + 76, // '~' + 127, // '\x7f' + 128, // '\x80' + 129, // '\x81' + 130, // '\x82' + 131, // '\x83' + 132, // '\x84' + 133, // '\x85' + 134, // '\x86' + 135, // '\x87' + 136, // '\x88' + 137, // '\x89' + 138, // '\x8a' + 139, // '\x8b' + 140, // '\x8c' + 141, // '\x8d' + 142, // '\x8e' + 143, // '\x8f' + 144, // '\x90' + 145, // '\x91' + 146, // '\x92' + 147, // '\x93' + 148, // '\x94' + 149, // '\x95' + 150, // '\x96' + 151, // '\x97' + 152, // '\x98' + 153, // '\x99' + 154, // '\x9a' + 155, // '\x9b' + 156, // '\x9c' + 157, // '\x9d' + 158, // '\x9e' + 159, // '\x9f' + 160, // '\xa0' + 161, // '¡' + 162, // '¢' + 163, // '£' + 164, // '¤' + 165, // 'Â¥' + 166, // '¦' + 167, // '§' + 168, // '¨' + 169, // '©' + 170, // 'ª' + 171, // '«' + 172, // '¬' + 173, // '\xad' + 174, // '®' + 175, // '¯' + 176, // '°' + 177, // '±' + 178, // '²' + 179, // '³' + 180, // '´' + 181, // 'µ' + 182, // '¶' + 183, // '·' + 184, // '¸' + 185, // '¹' + 186, // 'º' + 187, // '»' + 188, // '¼' + 189, // '½' + 190, // '¾' + 191, // '¿' + 192, // 'À' + 193, // 'Ã' + 194, // 'Â' + 195, // 'Ã' + 196, // 'Ä' + 197, // 'Ã…' + 198, // 'Æ' + 199, // 'Ç' + 200, // 'È' + 201, // 'É' + 202, // 'Ê' + 203, // 'Ë' + 204, // 'ÃŒ' + 205, // 'Ã' + 206, // 'ÃŽ' + 207, // 'Ã' + 208, // 'Ã' + 209, // 'Ñ' + 210, // 'Ã’' + 211, // 'Ó' + 212, // 'Ô' + 213, // 'Õ' + 214, // 'Ö' + 215, // '×' + 216, // 'Ø' + 217, // 'Ù' + 218, // 'Ú' + 219, // 'Û' + 220, // 'Ãœ' + 221, // 'Ã' + 222, // 'Þ' + 223, // 'ß' + 224, // 'à' + 225, // 'á' + 226, // 'â' + 227, // 'ã' + 228, // 'ä' + 229, // 'Ã¥' + 230, // 'æ' + 231, // 'ç' + 232, // 'è' + 233, // 'é' + 234, // 'ê' + 235, // 'ë' + 236, // 'ì' + 237, // 'í' + 238, // 'î' + 239, // 'ï' + 240, // 'ð' + 241, // 'ñ' + 242, // 'ò' + 243, // 'ó' + 244, // 'ô' + 245, // 'õ' + 246, // 'ö' + 247, // '÷' + 248, // 'ø' + 249, // 'ù' + 250, // 'ú' + 251, // 'û' + 252, // 'ü' + 253, // 'ý' + 254, // 'þ' + 255, // 'ÿ' +} + +var commonInputsInv = []byte{ + 't', + 'e', + '/', + 'o', + 'a', + 's', + 'r', + 'i', + 'p', + 'c', + 'n', + 'w', + '.', + 'h', + 'l', + 'm', + '-', + 'd', + 'u', + '0', + '1', + '2', + 'g', + '=', + ':', + 'b', + 'f', + '3', + 'y', + '5', + '&', + '_', + '4', + 'v', + '9', + '6', + '7', + '8', + 'k', + '%', + '?', + 'x', + 'C', + 'D', + 'A', + 'S', + 'F', + 'I', + 'B', + 'E', + 'j', + 'P', + 'T', + 'z', + 'R', + 'N', + 'M', + '+', + 'L', + 'O', + 'q', + 'H', + 'G', + 'W', + 'U', + 'V', + ',', + 'Y', + 'K', + 'J', + 'Z', + 'X', + 'Q', + ';', + ')', + '(', + '~', + '[', + ']', + '$', + '!', + '\'', + '*', + '@', + '\x00', + '\x01', + '\x02', + '\x03', + '\x04', + '\x05', + '\x06', + '\x07', + '\x08', + '\t', + '\n', + '\x0b', + '\x0c', + '\r', + '\x0e', + '\x0f', + '\x10', + '\x11', + '\x12', + '\x13', + '\x14', + '\x15', + '\x16', + '\x17', + '\x18', + '\x19', + '\x1a', + '\x1b', + '\x1c', + '\x1d', + '\x1e', + '\x1f', + ' ', + '"', + '#', + '<', + '>', + '\\', + '^', + '`', + '{', + '|', + '}', + '\x7f', + '\x80', + '\x81', + '\x82', + '\x83', + '\x84', + '\x85', + '\x86', + '\x87', + '\x88', + '\x89', + '\x8a', + '\x8b', + '\x8c', + '\x8d', + '\x8e', + '\x8f', + '\x90', + '\x91', + '\x92', + '\x93', + '\x94', + '\x95', + '\x96', + '\x97', + '\x98', + '\x99', + '\x9a', + '\x9b', + '\x9c', + '\x9d', + '\x9e', + '\x9f', + '\xa0', + '\xa1', + '\xa2', + '\xa3', + '\xa4', + '\xa5', + '\xa6', + '\xa7', + '\xa8', + '\xa9', + '\xaa', + '\xab', + '\xac', + '\xad', + '\xae', + '\xaf', + '\xb0', + '\xb1', + '\xb2', + '\xb3', + '\xb4', + '\xb5', + '\xb6', + '\xb7', + '\xb8', + '\xb9', + '\xba', + '\xbb', + '\xbc', + '\xbd', + '\xbe', + '\xbf', + '\xc0', + '\xc1', + '\xc2', + '\xc3', + '\xc4', + '\xc5', + '\xc6', + '\xc7', + '\xc8', + '\xc9', + '\xca', + '\xcb', + '\xcc', + '\xcd', + '\xce', + '\xcf', + '\xd0', + '\xd1', + '\xd2', + '\xd3', + '\xd4', + '\xd5', + '\xd6', + '\xd7', + '\xd8', + '\xd9', + '\xda', + '\xdb', + '\xdc', + '\xdd', + '\xde', + '\xdf', + '\xe0', + '\xe1', + '\xe2', + '\xe3', + '\xe4', + '\xe5', + '\xe6', + '\xe7', + '\xe8', + '\xe9', + '\xea', + '\xeb', + '\xec', + '\xed', + '\xee', + '\xef', + '\xf0', + '\xf1', + '\xf2', + '\xf3', + '\xf4', + '\xf5', + '\xf6', + '\xf7', + '\xf8', + '\xf9', + '\xfa', + '\xfb', + '\xfc', + '\xfd', + '\xfe', + '\xff', +} diff --git a/vendor/github.com/couchbase/vellum/decoder_v1.go b/vendor/github.com/couchbase/vellum/decoder_v1.go new file mode 100644 index 0000000..d56e61d --- /dev/null +++ b/vendor/github.com/couchbase/vellum/decoder_v1.go @@ -0,0 +1,314 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vellum + +import ( + "bytes" + "encoding/binary" + "fmt" + "strconv" +) + +func init() { + registerDecoder(versionV1, func(data []byte) decoder { + return newDecoderV1(data) + }) +} + +type decoderV1 struct { + data []byte +} + +func newDecoderV1(data []byte) *decoderV1 { + return &decoderV1{ + data: data, + } +} + +func (d *decoderV1) getRoot() int { + if len(d.data) < footerSizeV1 { + return noneAddr + } + footer := d.data[len(d.data)-footerSizeV1:] + root := binary.LittleEndian.Uint64(footer[8:]) + return int(root) +} + +func (d *decoderV1) getLen() int { + if len(d.data) < footerSizeV1 { + return 0 + } + footer := d.data[len(d.data)-footerSizeV1:] + dlen := binary.LittleEndian.Uint64(footer) + return int(dlen) +} + +func (d *decoderV1) stateAt(addr int, prealloc fstState) (fstState, error) { + state, ok := prealloc.(*fstStateV1) + if ok && state != nil { + *state = fstStateV1{} // clear the struct + } else { + state = &fstStateV1{} + } + err := state.at(d.data, addr) + if err != nil { + return nil, err + } + return state, nil +} + +type fstStateV1 struct { + data []byte + top int + bottom int + numTrans int + + // single trans only + singleTransChar byte + singleTransNext bool + singleTransAddr uint64 + singleTransOut uint64 + + // shared + transSize int + outSize int + + // multiple trans only + final bool + transTop int + transBottom int + destTop int + destBottom int + outTop int + outBottom int + outFinal int +} + +func (f *fstStateV1) isEncodedSingle() bool { + if f.data[f.top]>>7 > 0 { + return true + } + return false +} + +func (f *fstStateV1) at(data []byte, addr int) error { + f.data = data + if addr == emptyAddr { + return f.atZero() + } else if addr == noneAddr { + return f.atNone() + } + if addr > len(data) || addr < 16 { + return fmt.Errorf("invalid address %d/%d", addr, len(data)) + } + f.top = addr + f.bottom = addr + if f.isEncodedSingle() { + return f.atSingle(data, addr) + } + return f.atMulti(data, addr) +} + +func (f *fstStateV1) atZero() error { + f.top = 0 + f.bottom = 1 + f.numTrans = 0 + f.final = true + f.outFinal = 0 + return nil +} + +func (f *fstStateV1) atNone() error { + f.top = 0 + f.bottom = 1 + f.numTrans = 0 + f.final = false + f.outFinal = 0 + return nil +} + +func (f *fstStateV1) atSingle(data []byte, addr int) error { + // handle single transition case + f.numTrans = 1 + f.singleTransNext = data[f.top]&transitionNext > 0 + f.singleTransChar = data[f.top] & maxCommon + if f.singleTransChar == 0 { + f.bottom-- // extra byte for uncommon + f.singleTransChar = data[f.bottom] + } else { + f.singleTransChar = decodeCommon(f.singleTransChar) + } + if f.singleTransNext { + // now we know the bottom, can compute next addr + f.singleTransAddr = uint64(f.bottom - 1) + f.singleTransOut = 0 + } else { + f.bottom-- // extra byte with pack sizes + f.transSize, f.outSize = decodePackSize(data[f.bottom]) + f.bottom -= f.transSize // exactly one trans + f.singleTransAddr = readPackedUint(data[f.bottom : f.bottom+f.transSize]) + if f.outSize > 0 { + f.bottom -= f.outSize // exactly one out (could be length 0 though) + f.singleTransOut = readPackedUint(data[f.bottom : f.bottom+f.outSize]) + } else { + f.singleTransOut = 0 + } + // need to wait till we know bottom + if f.singleTransAddr != 0 { + f.singleTransAddr = uint64(f.bottom) - f.singleTransAddr + } + } + return nil +} + +func (f *fstStateV1) atMulti(data []byte, addr int) error { + // handle multiple transitions case + f.final = data[f.top]&stateFinal > 0 + f.numTrans = int(data[f.top] & maxNumTrans) + if f.numTrans == 0 { + f.bottom-- // extra byte for number of trans + f.numTrans = int(data[f.bottom]) + if f.numTrans == 1 { + // can't really be 1 here, this is special case that means 256 + f.numTrans = 256 + } + } + f.bottom-- // extra byte with pack sizes + f.transSize, f.outSize = decodePackSize(data[f.bottom]) + + f.transTop = f.bottom + f.bottom -= f.numTrans // one byte for each transition + f.transBottom = f.bottom + + f.destTop = f.bottom + f.bottom -= f.numTrans * f.transSize + f.destBottom = f.bottom + + if f.outSize > 0 { + f.outTop = f.bottom + f.bottom -= f.numTrans * f.outSize + f.outBottom = f.bottom + if f.final { + f.bottom -= f.outSize + f.outFinal = f.bottom + } + } + return nil +} + +func (f *fstStateV1) Address() int { + return f.top +} + +func (f *fstStateV1) Final() bool { + return f.final +} + +func (f *fstStateV1) FinalOutput() uint64 { + if f.final && f.outSize > 0 { + return readPackedUint(f.data[f.outFinal : f.outFinal+f.outSize]) + } + return 0 +} + +func (f *fstStateV1) NumTransitions() int { + return f.numTrans +} + +func (f *fstStateV1) TransitionAt(i int) byte { + if f.isEncodedSingle() { + return f.singleTransChar + } + transitionKeys := f.data[f.transBottom:f.transTop] + return transitionKeys[f.numTrans-i-1] +} + +func (f *fstStateV1) TransitionFor(b byte) (int, int, uint64) { + if f.isEncodedSingle() { + if f.singleTransChar == b { + return 0, int(f.singleTransAddr), f.singleTransOut + } + return -1, noneAddr, 0 + } + transitionKeys := f.data[f.transBottom:f.transTop] + pos := bytes.IndexByte(transitionKeys, b) + if pos < 0 { + return -1, noneAddr, 0 + } + transDests := f.data[f.destBottom:f.destTop] + dest := int(readPackedUint(transDests[pos*f.transSize : pos*f.transSize+f.transSize])) + if dest > 0 { + // convert delta + dest = f.bottom - dest + } + transVals := f.data[f.outBottom:f.outTop] + var out uint64 + if f.outSize > 0 { + out = readPackedUint(transVals[pos*f.outSize : pos*f.outSize+f.outSize]) + } + return f.numTrans - pos - 1, dest, out +} + +func (f *fstStateV1) String() string { + rv := "" + rv += fmt.Sprintf("State: %d (%#x)", f.top, f.top) + if f.final { + rv += " final" + fout := f.FinalOutput() + if fout != 0 { + rv += fmt.Sprintf(" (%d)", fout) + } + } + rv += "\n" + rv += fmt.Sprintf("Data: % x\n", f.data[f.bottom:f.top+1]) + + for i := 0; i < f.numTrans; i++ { + transChar := f.TransitionAt(i) + _, transDest, transOut := f.TransitionFor(transChar) + rv += fmt.Sprintf(" - %d (%#x) '%s' ---> %d (%#x) with output: %d", transChar, transChar, string(transChar), transDest, transDest, transOut) + rv += "\n" + } + if f.numTrans == 0 { + rv += "\n" + } + return rv +} + +func (f *fstStateV1) DotString(num int) string { + rv := "" + label := fmt.Sprintf("%d", num) + final := "" + if f.final { + final = ",peripheries=2" + } + rv += fmt.Sprintf(" %d [label=\"%s\"%s];\n", f.top, label, final) + + for i := 0; i < f.numTrans; i++ { + transChar := f.TransitionAt(i) + _, transDest, transOut := f.TransitionFor(transChar) + out := "" + if transOut != 0 { + out = fmt.Sprintf("/%d", transOut) + } + rv += fmt.Sprintf(" %d -> %d [label=\"%s%s\"];\n", f.top, transDest, escapeInput(transChar), out) + } + + return rv +} + +func escapeInput(b byte) string { + x := strconv.AppendQuoteRune(nil, rune(b)) + return string(x[1:(len(x) - 1)]) +} diff --git a/vendor/github.com/couchbase/vellum/encoder_v1.go b/vendor/github.com/couchbase/vellum/encoder_v1.go new file mode 100644 index 0000000..0651fc8 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/encoder_v1.go @@ -0,0 +1,227 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vellum + +import ( + "encoding/binary" + "fmt" + "io" +) + +const versionV1 = 1 +const oneTransition = 1 << 7 +const transitionNext = 1 << 6 +const stateFinal = 1 << 6 +const footerSizeV1 = 16 + +func init() { + registerEncoder(versionV1, func(w io.Writer) encoder { + return newEncoderV1(w) + }) +} + +type encoderV1 struct { + bw *writer +} + +func newEncoderV1(w io.Writer) *encoderV1 { + return &encoderV1{ + bw: newWriter(w), + } +} + +func (e *encoderV1) reset(w io.Writer) { + e.bw.Reset(w) +} + +func (e *encoderV1) start() error { + header := make([]byte, headerSize) + binary.LittleEndian.PutUint64(header, versionV1) + binary.LittleEndian.PutUint64(header[8:], uint64(0)) // type + n, err := e.bw.Write(header) + if err != nil { + return err + } + if n != headerSize { + return fmt.Errorf("short write of header %d/%d", n, headerSize) + } + return nil +} + +func (e *encoderV1) encodeState(s *builderNode, lastAddr int) (int, error) { + if len(s.trans) == 0 && s.final && s.finalOutput == 0 { + return 0, nil + } else if len(s.trans) != 1 || s.final { + return e.encodeStateMany(s) + } else if !s.final && s.trans[0].out == 0 && s.trans[0].addr == lastAddr { + return e.encodeStateOneFinish(s, transitionNext) + } + return e.encodeStateOne(s) +} + +func (e *encoderV1) encodeStateOne(s *builderNode) (int, error) { + start := uint64(e.bw.counter) + outPackSize := 0 + if s.trans[0].out != 0 { + outPackSize = packedSize(s.trans[0].out) + err := e.bw.WritePackedUintIn(s.trans[0].out, outPackSize) + if err != nil { + return 0, err + } + } + delta := deltaAddr(start, uint64(s.trans[0].addr)) + transPackSize := packedSize(delta) + err := e.bw.WritePackedUintIn(delta, transPackSize) + if err != nil { + return 0, err + } + + packSize := encodePackSize(transPackSize, outPackSize) + err = e.bw.WriteByte(packSize) + if err != nil { + return 0, err + } + + return e.encodeStateOneFinish(s, 0) +} + +func (e *encoderV1) encodeStateOneFinish(s *builderNode, next byte) (int, error) { + enc := encodeCommon(s.trans[0].in) + + // not a common input + if enc == 0 { + err := e.bw.WriteByte(s.trans[0].in) + if err != nil { + return 0, err + } + } + err := e.bw.WriteByte(oneTransition | next | enc) + if err != nil { + return 0, err + } + + return e.bw.counter - 1, nil +} + +func (e *encoderV1) encodeStateMany(s *builderNode) (int, error) { + start := uint64(e.bw.counter) + transPackSize := 0 + outPackSize := packedSize(s.finalOutput) + anyOutputs := s.finalOutput != 0 + for i := range s.trans { + delta := deltaAddr(start, uint64(s.trans[i].addr)) + tsize := packedSize(delta) + if tsize > transPackSize { + transPackSize = tsize + } + osize := packedSize(s.trans[i].out) + if osize > outPackSize { + outPackSize = osize + } + anyOutputs = anyOutputs || s.trans[i].out != 0 + } + if !anyOutputs { + outPackSize = 0 + } + + if anyOutputs { + // output final value + if s.final { + err := e.bw.WritePackedUintIn(s.finalOutput, outPackSize) + if err != nil { + return 0, err + } + } + // output transition values (in reverse) + for j := len(s.trans) - 1; j >= 0; j-- { + err := e.bw.WritePackedUintIn(s.trans[j].out, outPackSize) + if err != nil { + return 0, err + } + } + } + + // output transition dests (in reverse) + for j := len(s.trans) - 1; j >= 0; j-- { + delta := deltaAddr(start, uint64(s.trans[j].addr)) + err := e.bw.WritePackedUintIn(delta, transPackSize) + if err != nil { + return 0, err + } + } + + // output transition keys (in reverse) + for j := len(s.trans) - 1; j >= 0; j-- { + err := e.bw.WriteByte(s.trans[j].in) + if err != nil { + return 0, err + } + } + + packSize := encodePackSize(transPackSize, outPackSize) + err := e.bw.WriteByte(packSize) + if err != nil { + return 0, err + } + + numTrans := encodeNumTrans(len(s.trans)) + + // if number of transitions wont fit in edge header byte + // write out separately + if numTrans == 0 { + if len(s.trans) == 256 { + // this wouldn't fit in single byte, but reuse value 1 + // which would have always fit in the edge header instead + err = e.bw.WriteByte(1) + if err != nil { + return 0, err + } + } else { + err = e.bw.WriteByte(byte(len(s.trans))) + if err != nil { + return 0, err + } + } + } + + // finally write edge header + if s.final { + numTrans |= stateFinal + } + err = e.bw.WriteByte(numTrans) + if err != nil { + return 0, err + } + + return e.bw.counter - 1, nil +} + +func (e *encoderV1) finish(count, rootAddr int) error { + footer := make([]byte, footerSizeV1) + binary.LittleEndian.PutUint64(footer, uint64(count)) // root addr + binary.LittleEndian.PutUint64(footer[8:], uint64(rootAddr)) // root addr + n, err := e.bw.Write(footer) + if err != nil { + return err + } + if n != footerSizeV1 { + return fmt.Errorf("short write of footer %d/%d", n, footerSizeV1) + } + err = e.bw.Flush() + if err != nil { + return err + } + return nil +} diff --git a/vendor/github.com/couchbase/vellum/encoding.go b/vendor/github.com/couchbase/vellum/encoding.go new file mode 100644 index 0000000..988d486 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/encoding.go @@ -0,0 +1,87 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vellum + +import ( + "encoding/binary" + "fmt" + "io" +) + +const headerSize = 16 + +type encoderConstructor func(w io.Writer) encoder +type decoderConstructor func([]byte) decoder + +var encoders = map[int]encoderConstructor{} +var decoders = map[int]decoderConstructor{} + +type encoder interface { + start() error + encodeState(s *builderNode, addr int) (int, error) + finish(count, rootAddr int) error + reset(w io.Writer) +} + +func loadEncoder(ver int, w io.Writer) (encoder, error) { + if cons, ok := encoders[ver]; ok { + return cons(w), nil + } + return nil, fmt.Errorf("no encoder for version %d registered", ver) +} + +func registerEncoder(ver int, cons encoderConstructor) { + encoders[ver] = cons +} + +type decoder interface { + getRoot() int + getLen() int + stateAt(addr int, prealloc fstState) (fstState, error) +} + +func loadDecoder(ver int, data []byte) (decoder, error) { + if cons, ok := decoders[ver]; ok { + return cons(data), nil + } + return nil, fmt.Errorf("no decoder for version %d registered", ver) +} + +func registerDecoder(ver int, cons decoderConstructor) { + decoders[ver] = cons +} + +func decodeHeader(header []byte) (ver int, typ int, err error) { + if len(header) < headerSize { + err = fmt.Errorf("invalid header < 16 bytes") + return + } + ver = int(binary.LittleEndian.Uint64(header[0:8])) + typ = int(binary.LittleEndian.Uint64(header[8:16])) + return +} + +// fstState represents a state inside the FTS runtime +// It is the main contract between the FST impl and the decoder +// The FST impl should work only with this interface, while only the decoder +// impl knows the physical representation. +type fstState interface { + Address() int + Final() bool + FinalOutput() uint64 + NumTransitions() int + TransitionFor(b byte) (int, int, uint64) + TransitionAt(i int) byte +} diff --git a/vendor/github.com/couchbase/vellum/fst.go b/vendor/github.com/couchbase/vellum/fst.go new file mode 100644 index 0000000..64ee21a --- /dev/null +++ b/vendor/github.com/couchbase/vellum/fst.go @@ -0,0 +1,300 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vellum + +import ( + "io" + + "github.com/willf/bitset" +) + +// FST is an in-memory representation of a finite state transducer, +// capable of returning the uint64 value associated with +// each []byte key stored, as well as enumerating all of the keys +// in order. +type FST struct { + f io.Closer + ver int + len int + typ int + data []byte + decoder decoder +} + +func new(data []byte, f io.Closer) (rv *FST, err error) { + rv = &FST{ + data: data, + f: f, + } + + rv.ver, rv.typ, err = decodeHeader(data) + if err != nil { + return nil, err + } + + rv.decoder, err = loadDecoder(rv.ver, rv.data) + if err != nil { + return nil, err + } + + rv.len = rv.decoder.getLen() + + return rv, nil +} + +// Contains returns true if this FST contains the specified key. +func (f *FST) Contains(val []byte) (bool, error) { + _, exists, err := f.Get(val) + return exists, err +} + +// Get returns the value associated with the key. NOTE: a value of zero +// does not imply the key does not exist, you must consult the second +// return value as well. +func (f *FST) Get(input []byte) (uint64, bool, error) { + return f.get(input, nil) +} + +func (f *FST) get(input []byte, prealloc fstState) (uint64, bool, error) { + var total uint64 + curr := f.decoder.getRoot() + state, err := f.decoder.stateAt(curr, prealloc) + if err != nil { + return 0, false, err + } + for _, c := range input { + _, curr, output := state.TransitionFor(c) + if curr == noneAddr { + return 0, false, nil + } + + state, err = f.decoder.stateAt(curr, state) + if err != nil { + return 0, false, err + } + + total += output + } + + if state.Final() { + total += state.FinalOutput() + return total, true, nil + } + return 0, false, nil +} + +// Version returns the encoding version used by this FST instance. +func (f *FST) Version() int { + return f.ver +} + +// Len returns the number of entries in this FST instance. +func (f *FST) Len() int { + return f.len +} + +// Type returns the type of this FST instance. +func (f *FST) Type() int { + return f.typ +} + +// Close will unmap any mmap'd data (if managed by vellum) and it will close +// the backing file (if managed by vellum). You MUST call Close() for any +// FST instance that is created. +func (f *FST) Close() error { + if f.f != nil { + err := f.f.Close() + if err != nil { + return err + } + } + f.data = nil + f.decoder = nil + return nil +} + +// Start returns the start state of this Automaton +func (f *FST) Start() int { + return f.decoder.getRoot() +} + +// IsMatch returns if this state is a matching state in this Automaton +func (f *FST) IsMatch(addr int) bool { + match, _ := f.IsMatchWithVal(addr) + return match +} + +// CanMatch returns if this state can ever transition to a matching state +// in this Automaton +func (f *FST) CanMatch(addr int) bool { + if addr == noneAddr { + return false + } + return true +} + +// WillAlwaysMatch returns if from this state the Automaton will always +// be in a matching state +func (f *FST) WillAlwaysMatch(int) bool { + return false +} + +// Accept returns the next state for this Automaton on input of byte b +func (f *FST) Accept(addr int, b byte) int { + next, _ := f.AcceptWithVal(addr, b) + return next +} + +// IsMatchWithVal returns if this state is a matching state in this Automaton +// and also returns the final output value for this state +func (f *FST) IsMatchWithVal(addr int) (bool, uint64) { + s, err := f.decoder.stateAt(addr, nil) + if err != nil { + return false, 0 + } + return s.Final(), s.FinalOutput() +} + +// AcceptWithVal returns the next state for this Automaton on input of byte b +// and also returns the output value for the transition +func (f *FST) AcceptWithVal(addr int, b byte) (int, uint64) { + s, err := f.decoder.stateAt(addr, nil) + if err != nil { + return noneAddr, 0 + } + _, next, output := s.TransitionFor(b) + return next, output +} + +// Iterator returns a new Iterator capable of enumerating the key/value pairs +// between the provided startKeyInclusive and endKeyExclusive. +func (f *FST) Iterator(startKeyInclusive, endKeyExclusive []byte) (*FSTIterator, error) { + return newIterator(f, startKeyInclusive, endKeyExclusive, nil) +} + +// Search returns a new Iterator capable of enumerating the key/value pairs +// between the provided startKeyInclusive and endKeyExclusive that also +// satisfy the provided automaton. +func (f *FST) Search(aut Automaton, startKeyInclusive, endKeyExclusive []byte) (*FSTIterator, error) { + return newIterator(f, startKeyInclusive, endKeyExclusive, aut) +} + +// Debug is only intended for debug purposes, it simply asks the underlying +// decoder visit each state, and pass it to the provided callback. +func (f *FST) Debug(callback func(int, interface{}) error) error { + + addr := f.decoder.getRoot() + set := bitset.New(uint(addr)) + stack := addrStack{addr} + + stateNumber := 0 + stack, addr = stack[:len(stack)-1], stack[len(stack)-1] + for addr != noneAddr { + if set.Test(uint(addr)) { + stack, addr = stack.Pop() + continue + } + set.Set(uint(addr)) + state, err := f.decoder.stateAt(addr, nil) + if err != nil { + return err + } + err = callback(stateNumber, state) + if err != nil { + return err + } + for i := 0; i < state.NumTransitions(); i++ { + tchar := state.TransitionAt(i) + _, dest, _ := state.TransitionFor(tchar) + stack = append(stack, dest) + } + stateNumber++ + stack, addr = stack.Pop() + } + + return nil +} + +type addrStack []int + +func (a addrStack) Pop() (addrStack, int) { + l := len(a) + if l < 1 { + return a, noneAddr + } + return a[:l-1], a[l-1] +} + +// Reader() returns a Reader instance that a single thread may use to +// retrieve data from the FST +func (f *FST) Reader() (*Reader, error) { + return &Reader{f: f}, nil +} + +func (f *FST) GetMinKey() ([]byte, error) { + var rv []byte + + curr := f.decoder.getRoot() + state, err := f.decoder.stateAt(curr, nil) + if err != nil { + return nil, err + } + + for !state.Final() { + nextTrans := state.TransitionAt(0) + _, curr, _ = state.TransitionFor(nextTrans) + state, err = f.decoder.stateAt(curr, state) + if err != nil { + return nil, err + } + + rv = append(rv, nextTrans) + } + + return rv, nil +} + +func (f *FST) GetMaxKey() ([]byte, error) { + var rv []byte + + curr := f.decoder.getRoot() + state, err := f.decoder.stateAt(curr, nil) + if err != nil { + return nil, err + } + + for state.NumTransitions() > 0 { + nextTrans := state.TransitionAt(state.NumTransitions() - 1) + _, curr, _ = state.TransitionFor(nextTrans) + state, err = f.decoder.stateAt(curr, state) + if err != nil { + return nil, err + } + + rv = append(rv, nextTrans) + } + + return rv, nil +} + +// A Reader is meant for a single threaded use +type Reader struct { + f *FST + prealloc fstStateV1 +} + +func (r *Reader) Get(input []byte) (uint64, bool, error) { + return r.f.get(input, &r.prealloc) +} diff --git a/vendor/github.com/couchbase/vellum/fst_iterator.go b/vendor/github.com/couchbase/vellum/fst_iterator.go new file mode 100644 index 0000000..2c6b0d6 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/fst_iterator.go @@ -0,0 +1,303 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vellum + +import ( + "bytes" +) + +// Iterator represents a means of visiting key/value pairs in order. +type Iterator interface { + + // Current() returns the key/value pair currently pointed to. + // The []byte of the key is ONLY guaranteed to be valid until + // another call to Next/Seek/Close. If you need it beyond that + // point you MUST make a copy. + Current() ([]byte, uint64) + + // Next() advances the iterator to the next key/value pair. + // If no more key/value pairs exist, ErrIteratorDone is returned. + Next() error + + // Seek() advances the iterator the specified key, or the next key + // if it does not exist. + // If no keys exist after that point, ErrIteratorDone is returned. + Seek(key []byte) error + + // Reset resets the Iterator' internal state to allow for iterator + // reuse (e.g. pooling). + Reset(f *FST, startKeyInclusive, endKeyExclusive []byte, aut Automaton) error + + // Close() frees any resources held by this iterator. + Close() error +} + +// FSTIterator is a structure for iterating key/value pairs in this FST in +// lexicographic order. Iterators should be constructed with the FSTIterator +// method on the parent FST structure. +type FSTIterator struct { + f *FST + aut Automaton + + startKeyInclusive []byte + endKeyExclusive []byte + + statesStack []fstState + keysStack []byte + keysPosStack []int + valsStack []uint64 + autStatesStack []int + + nextStart []byte +} + +func newIterator(f *FST, startKeyInclusive, endKeyExclusive []byte, + aut Automaton) (*FSTIterator, error) { + + rv := &FSTIterator{} + err := rv.Reset(f, startKeyInclusive, endKeyExclusive, aut) + if err != nil { + return nil, err + } + return rv, nil +} + +// Reset resets the Iterator' internal state to allow for iterator +// reuse (e.g. pooling). +func (i *FSTIterator) Reset(f *FST, + startKeyInclusive, endKeyExclusive []byte, aut Automaton) error { + if aut == nil { + aut = alwaysMatchAutomaton + } + + i.f = f + i.startKeyInclusive = startKeyInclusive + i.endKeyExclusive = endKeyExclusive + i.aut = aut + + return i.pointTo(startKeyInclusive) +} + +// pointTo attempts to point us to the specified location +func (i *FSTIterator) pointTo(key []byte) error { + // tried to seek before start + if bytes.Compare(key, i.startKeyInclusive) < 0 { + key = i.startKeyInclusive + } + + // tried to see past end + if i.endKeyExclusive != nil && + bytes.Compare(key, i.endKeyExclusive) > 0 { + key = i.endKeyExclusive + } + + // reset any state, pointTo always starts over + i.statesStack = i.statesStack[:0] + i.keysStack = i.keysStack[:0] + i.keysPosStack = i.keysPosStack[:0] + i.valsStack = i.valsStack[:0] + i.autStatesStack = i.autStatesStack[:0] + + root, err := i.f.decoder.stateAt(i.f.decoder.getRoot(), nil) + if err != nil { + return err + } + + autStart := i.aut.Start() + + maxQ := -1 + // root is always part of the path + i.statesStack = append(i.statesStack, root) + i.autStatesStack = append(i.autStatesStack, autStart) + for j := 0; j < len(key); j++ { + keyJ := key[j] + curr := i.statesStack[len(i.statesStack)-1] + autCurr := i.autStatesStack[len(i.autStatesStack)-1] + + pos, nextAddr, nextVal := curr.TransitionFor(keyJ) + if nextAddr == noneAddr { + // needed transition doesn't exist + // find last trans before the one we needed + for q := curr.NumTransitions() - 1; q >= 0; q-- { + if curr.TransitionAt(q) < keyJ { + maxQ = q + break + } + } + break + } + autNext := i.aut.Accept(autCurr, keyJ) + + next, err := i.f.decoder.stateAt(nextAddr, nil) + if err != nil { + return err + } + + i.statesStack = append(i.statesStack, next) + i.keysStack = append(i.keysStack, keyJ) + i.keysPosStack = append(i.keysPosStack, pos) + i.valsStack = append(i.valsStack, nextVal) + i.autStatesStack = append(i.autStatesStack, autNext) + continue + } + + if !i.statesStack[len(i.statesStack)-1].Final() || + !i.aut.IsMatch(i.autStatesStack[len(i.autStatesStack)-1]) || + bytes.Compare(i.keysStack, key) < 0 { + return i.next(maxQ) + } + + return nil +} + +// Current returns the key and value currently pointed to by the iterator. +// If the iterator is not pointing at a valid value (because Iterator/Next/Seek) +// returned an error previously, it may return nil,0. +func (i *FSTIterator) Current() ([]byte, uint64) { + curr := i.statesStack[len(i.statesStack)-1] + if curr.Final() { + var total uint64 + for _, v := range i.valsStack { + total += v + } + total += curr.FinalOutput() + return i.keysStack, total + } + return nil, 0 +} + +// Next advances this iterator to the next key/value pair. If there is none +// or the advancement goes beyond the configured endKeyExclusive, then +// ErrIteratorDone is returned. +func (i *FSTIterator) Next() error { + return i.next(-1) +} + +func (i *FSTIterator) next(lastOffset int) error { + // remember where we started with keysStack in this next() call + i.nextStart = append(i.nextStart[:0], i.keysStack...) + + nextOffset := lastOffset + 1 + allowCompare := false + +OUTER: + for true { + curr := i.statesStack[len(i.statesStack)-1] + autCurr := i.autStatesStack[len(i.autStatesStack)-1] + + if curr.Final() && i.aut.IsMatch(autCurr) && allowCompare { + // check to see if new keystack might have gone too far + if i.endKeyExclusive != nil && + bytes.Compare(i.keysStack, i.endKeyExclusive) >= 0 { + return ErrIteratorDone + } + + cmp := bytes.Compare(i.keysStack, i.nextStart) + if cmp > 0 { + // in final state greater than start key + return nil + } + } + + numTrans := curr.NumTransitions() + + INNER: + for nextOffset < numTrans { + t := curr.TransitionAt(nextOffset) + + autNext := i.aut.Accept(autCurr, t) + if !i.aut.CanMatch(autNext) { + // TODO: potential optimization to skip nextOffset + // forwards more directly to something that the + // automaton likes rather than a linear scan? + nextOffset += 1 + continue INNER + } + + pos, nextAddr, v := curr.TransitionFor(t) + + // the next slot in the statesStack might have an + // fstState instance that we can reuse + var nextPrealloc fstState + if len(i.statesStack) < cap(i.statesStack) { + nextPrealloc = i.statesStack[0:cap(i.statesStack)][len(i.statesStack)] + } + + // push onto stack + next, err := i.f.decoder.stateAt(nextAddr, nextPrealloc) + if err != nil { + return err + } + + i.statesStack = append(i.statesStack, next) + i.keysStack = append(i.keysStack, t) + i.keysPosStack = append(i.keysPosStack, pos) + i.valsStack = append(i.valsStack, v) + i.autStatesStack = append(i.autStatesStack, autNext) + + nextOffset = 0 + allowCompare = true + + continue OUTER + } + + // no more transitions, so need to backtrack and stack pop + if len(i.statesStack) <= 1 { + // stack len is 1 (root), can't go back further, we're done + break + } + + // if the top of the stack represents a linear chain of states + // (i.e., a suffix of nodes linked by single transitions), + // then optimize by popping the suffix in one shot without + // going back all the way to the OUTER loop + var popNum int + for j := len(i.statesStack) - 1; j > 0; j-- { + if j == 1 || i.statesStack[j].NumTransitions() != 1 { + popNum = len(i.statesStack) - 1 - j + break + } + } + if popNum < 1 { // always pop at least 1 entry from the stacks + popNum = 1 + } + + nextOffset = i.keysPosStack[len(i.keysPosStack)-popNum] + 1 + allowCompare = false + + i.statesStack = i.statesStack[:len(i.statesStack)-popNum] + i.keysStack = i.keysStack[:len(i.keysStack)-popNum] + i.keysPosStack = i.keysPosStack[:len(i.keysPosStack)-popNum] + i.valsStack = i.valsStack[:len(i.valsStack)-popNum] + i.autStatesStack = i.autStatesStack[:len(i.autStatesStack)-popNum] + } + + return ErrIteratorDone +} + +// Seek advances this iterator to the specified key/value pair. If this key +// is not in the FST, Current() will return the next largest key. If this +// seek operation would go past the last key, or outside the configured +// startKeyInclusive/endKeyExclusive then ErrIteratorDone is returned. +func (i *FSTIterator) Seek(key []byte) error { + return i.pointTo(key) +} + +// Close will free any resources held by this iterator. +func (i *FSTIterator) Close() error { + // at the moment we don't do anything, + // but wanted this for API completeness + return nil +} diff --git a/vendor/github.com/couchbase/vellum/levenshtein/LICENSE b/vendor/github.com/couchbase/vellum/levenshtein/LICENSE new file mode 100644 index 0000000..6b0b127 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/levenshtein/LICENSE @@ -0,0 +1,203 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/vendor/github.com/couchbase/vellum/levenshtein/README.md b/vendor/github.com/couchbase/vellum/levenshtein/README.md new file mode 100644 index 0000000..582b69c --- /dev/null +++ b/vendor/github.com/couchbase/vellum/levenshtein/README.md @@ -0,0 +1,33 @@ +# levenshtein +levenshtein automaton + +This package makes it fast and simple to build a finite determinic automaton that computes the levenshtein distance from a given string. + +# Sample usage: + +``` +// build a re-usable builder +lb := NewLevenshteinAutomatonBuilder(2, false) + +origTerm := "couchbasefts" +dfa := lb.BuildDfa("couchbases", 2) +ed := dfa.eval([]byte(origTerm)) +if ed.distance() != 2 { + log.Errorf("expected distance 2, actual: %d", ed.distance()) +} + +``` + +This implementation is inspired by [blog post](https://fulmicoton.com/posts/levenshtein/) and is intended to be +a port of original rust implementation: https://github.com/tantivy-search/levenshtein-automata + + +Micro Benchmark Results against the current vellum/levenshtein is as below. + +``` +BenchmarkNewEditDistance1-8 30000 52684 ns/op 89985 B/op 295 allocs/op +BenchmarkOlderEditDistance1-8 10000 132931 ns/op 588892 B/op 363 allocs/op + +BenchmarkNewEditDistance2-8 10000 199127 ns/op 377532 B/op 1019 allocs/op +BenchmarkOlderEditDistance2-8 2000 988109 ns/op 4236609 B/op 1898 allocs/op +``` diff --git a/vendor/github.com/couchbase/vellum/levenshtein/alphabet.go b/vendor/github.com/couchbase/vellum/levenshtein/alphabet.go new file mode 100644 index 0000000..ec28512 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/levenshtein/alphabet.go @@ -0,0 +1,125 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package levenshtein + +import ( + "fmt" + "sort" + "unicode/utf8" +) + +type FullCharacteristicVector []uint32 + +func (fcv FullCharacteristicVector) shiftAndMask(offset, mask uint32) uint32 { + bucketID := offset / 32 + align := offset - bucketID*32 + if align == 0 { + return fcv[bucketID] & mask + } + left := fcv[bucketID] >> align + right := fcv[bucketID+1] << (32 - align) + return (left | right) & mask +} + +type tuple struct { + char rune + fcv FullCharacteristicVector +} + +type sortRunes []rune + +func (s sortRunes) Less(i, j int) bool { + return s[i] < s[j] +} + +func (s sortRunes) Swap(i, j int) { + s[i], s[j] = s[j], s[i] +} + +func (s sortRunes) Len() int { + return len(s) +} + +func sortRune(r []rune) []rune { + sort.Sort(sortRunes(r)) + return r +} + +type Alphabet struct { + charset []tuple + index uint32 +} + +func (a *Alphabet) resetNext() { + a.index = 0 +} + +func (a *Alphabet) next() (rune, FullCharacteristicVector, error) { + if int(a.index) >= len(a.charset) { + return 0, nil, fmt.Errorf("eof") + } + + rv := a.charset[a.index] + a.index++ + return rv.char, rv.fcv, nil +} + +func dedupe(in string) string { + lookUp := make(map[rune]struct{}, len(in)) + var rv string + for len(in) > 0 { + r, size := utf8.DecodeRuneInString(in) + in = in[size:] + if _, ok := lookUp[r]; !ok { + rv += string(r) + lookUp[r] = struct{}{} + } + } + return rv +} + +func queryChars(qChars string) Alphabet { + chars := dedupe(qChars) + inChars := sortRune([]rune(chars)) + charsets := make([]tuple, 0, len(inChars)) + + for _, c := range inChars { + tempChars := qChars + var bits []uint32 + for len(tempChars) > 0 { + var chunk string + if len(tempChars) > 32 { + chunk = tempChars[0:32] + tempChars = tempChars[32:] + } else { + chunk = tempChars + tempChars = tempChars[:0] + } + + chunkBits := uint32(0) + bit := uint32(1) + for _, chr := range chunk { + if chr == c { + chunkBits |= bit + } + bit <<= 1 + } + bits = append(bits, chunkBits) + } + bits = append(bits, 0) + charsets = append(charsets, tuple{char: c, fcv: FullCharacteristicVector(bits)}) + } + return Alphabet{charset: charsets} +} diff --git a/vendor/github.com/couchbase/vellum/levenshtein/dfa.go b/vendor/github.com/couchbase/vellum/levenshtein/dfa.go new file mode 100644 index 0000000..d0e43ca --- /dev/null +++ b/vendor/github.com/couchbase/vellum/levenshtein/dfa.go @@ -0,0 +1,250 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package levenshtein + +import ( + "fmt" + "math" +) + +const SinkState = uint32(0) + +type DFA struct { + transitions [][256]uint32 + distances []Distance + initState int + ed uint8 +} + +/// Returns the initial state +func (d *DFA) initialState() int { + return d.initState +} + +/// Returns the Levenshtein distance associated to the +/// current state. +func (d *DFA) distance(stateId int) Distance { + return d.distances[stateId] +} + +/// Returns the number of states in the `DFA`. +func (d *DFA) numStates() int { + return len(d.transitions) +} + +/// Returns the destination state reached after consuming a given byte. +func (d *DFA) transition(fromState int, b uint8) int { + return int(d.transitions[fromState][b]) +} + +func (d *DFA) eval(bytes []uint8) Distance { + state := d.initialState() + + for _, b := range bytes { + state = d.transition(state, b) + } + + return d.distance(state) +} + +func (d *DFA) Start() int { + return int(d.initialState()) +} + +func (d *DFA) IsMatch(state int) bool { + if _, ok := d.distance(state).(Exact); ok { + return true + } + return false +} + +func (d *DFA) CanMatch(state int) bool { + return state > 0 && state < d.numStates() +} + +func (d *DFA) Accept(state int, b byte) int { + return int(d.transition(state, b)) +} + +// WillAlwaysMatch returns if the specified state will always end in a +// matching state. +func (d *DFA) WillAlwaysMatch(state int) bool { + return false +} + +func fill(dest []uint32, val uint32) { + for i := range dest { + dest[i] = val + } +} + +func fillTransitions(dest *[256]uint32, val uint32) { + for i := range dest { + dest[i] = val + } +} + +type Utf8DFAStateBuilder struct { + dfaBuilder *Utf8DFABuilder + stateID uint32 + defaultSuccessor []uint32 +} + +func (sb *Utf8DFAStateBuilder) addTransitionID(fromStateID uint32, b uint8, + toStateID uint32) { + sb.dfaBuilder.transitions[fromStateID][b] = toStateID +} + +func (sb *Utf8DFAStateBuilder) addTransition(in rune, toStateID uint32) { + fromStateID := sb.stateID + chars := []byte(string(in)) + lastByte := chars[len(chars)-1] + + for i, ch := range chars[:len(chars)-1] { + remNumBytes := len(chars) - i - 1 + defaultSuccessor := sb.defaultSuccessor[remNumBytes] + intermediateStateID := sb.dfaBuilder.transitions[fromStateID][ch] + + if intermediateStateID == defaultSuccessor { + intermediateStateID = sb.dfaBuilder.allocate() + fillTransitions(&sb.dfaBuilder.transitions[intermediateStateID], + sb.defaultSuccessor[remNumBytes-1]) + } + + sb.addTransitionID(fromStateID, ch, intermediateStateID) + fromStateID = intermediateStateID + } + + toStateIDDecoded := sb.dfaBuilder.getOrAllocate(original(toStateID)) + sb.addTransitionID(fromStateID, lastByte, toStateIDDecoded) +} + +type Utf8StateId uint32 + +func original(stateId uint32) Utf8StateId { + return predecessor(stateId, 0) +} + +func predecessor(stateId uint32, numSteps uint8) Utf8StateId { + return Utf8StateId(stateId*4 + uint32(numSteps)) +} + +// Utf8DFABuilder makes it possible to define a DFA +// that takes unicode character, and build a `DFA` +// that operates on utf-8 encoded +type Utf8DFABuilder struct { + index []uint32 + distances []Distance + transitions [][256]uint32 + initialState uint32 + numStates uint32 + maxNumStates uint32 +} + +func withMaxStates(maxStates uint32) *Utf8DFABuilder { + rv := &Utf8DFABuilder{ + index: make([]uint32, maxStates*2+100), + distances: make([]Distance, 0, maxStates), + transitions: make([][256]uint32, 0, maxStates), + maxNumStates: maxStates, + } + + for i := range rv.index { + rv.index[i] = math.MaxUint32 + } + + return rv +} + +func (dfab *Utf8DFABuilder) allocate() uint32 { + newState := dfab.numStates + dfab.numStates++ + + dfab.distances = append(dfab.distances, Atleast{d: 255}) + dfab.transitions = append(dfab.transitions, [256]uint32{}) + + return newState +} + +func (dfab *Utf8DFABuilder) getOrAllocate(state Utf8StateId) uint32 { + if int(state) >= cap(dfab.index) { + cloneIndex := make([]uint32, int(state)*2) + copy(cloneIndex, dfab.index) + dfab.index = cloneIndex + } + if dfab.index[state] != math.MaxUint32 { + return dfab.index[state] + } + + nstate := dfab.allocate() + dfab.index[state] = nstate + + return nstate +} + +func (dfab *Utf8DFABuilder) setInitialState(iState uint32) { + decodedID := dfab.getOrAllocate(original(iState)) + dfab.initialState = decodedID +} + +func (dfab *Utf8DFABuilder) build(ed uint8) *DFA { + return &DFA{ + transitions: dfab.transitions, + distances: dfab.distances, + initState: int(dfab.initialState), + ed: ed, + } +} + +func (dfab *Utf8DFABuilder) addState(state, default_suc_orig uint32, + distance Distance) (*Utf8DFAStateBuilder, error) { + if state > dfab.maxNumStates { + return nil, fmt.Errorf("State id is larger than maxNumStates") + } + + stateID := dfab.getOrAllocate(original(state)) + dfab.distances[stateID] = distance + + defaultSuccID := dfab.getOrAllocate(original(default_suc_orig)) + // creates a chain of states of predecessors of `default_suc_orig`. + // Accepting k-bytes (whatever the bytes are) from `predecessor_states[k-1]` + // leads to the `default_suc_orig` state. + predecessorStates := []uint32{defaultSuccID, + defaultSuccID, + defaultSuccID, + defaultSuccID} + + for numBytes := uint8(1); numBytes < 4; numBytes++ { + predecessorState := predecessor(default_suc_orig, numBytes) + predecessorStateID := dfab.getOrAllocate(predecessorState) + predecessorStates[numBytes] = predecessorStateID + succ := predecessorStates[numBytes-1] + fillTransitions(&dfab.transitions[predecessorStateID], succ) + } + + // 1-byte encoded chars. + fill(dfab.transitions[stateID][0:192], predecessorStates[0]) + // 2-bytes encoded chars. + fill(dfab.transitions[stateID][192:224], predecessorStates[1]) + // 3-bytes encoded chars. + fill(dfab.transitions[stateID][224:240], predecessorStates[2]) + // 4-bytes encoded chars. + fill(dfab.transitions[stateID][240:256], predecessorStates[3]) + + return &Utf8DFAStateBuilder{ + dfaBuilder: dfab, + stateID: stateID, + defaultSuccessor: predecessorStates}, nil +} diff --git a/vendor/github.com/couchbase/vellum/levenshtein/levenshtein.go b/vendor/github.com/couchbase/vellum/levenshtein/levenshtein.go new file mode 100644 index 0000000..aa652df --- /dev/null +++ b/vendor/github.com/couchbase/vellum/levenshtein/levenshtein.go @@ -0,0 +1,64 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package levenshtein + +import "fmt" + +// StateLimit is the maximum number of states allowed +const StateLimit = 10000 + +// ErrTooManyStates is returned if you attempt to build a Levenshtein +// automaton which requires too many states. +var ErrTooManyStates = fmt.Errorf("dfa contains more than %d states", + StateLimit) + +// LevenshteinAutomatonBuilder wraps a precomputed +// datastructure that allows to produce small (but not minimal) DFA. +type LevenshteinAutomatonBuilder struct { + pDfa *ParametricDFA +} + +// NewLevenshteinAutomatonBuilder creates a +// reusable, threadsafe Levenshtein automaton builder. +// `maxDistance` - maximum distance considered by the automaton. +// `transposition` - assign a distance of 1 for transposition +// +// Building this automaton builder is computationally intensive. +// While it takes only a few milliseconds for `d=2`, it grows +// exponentially with `d`. It is only reasonable to `d <= 5`. +func NewLevenshteinAutomatonBuilder(maxDistance uint8, + transposition bool) (*LevenshteinAutomatonBuilder, error) { + lnfa := newLevenshtein(maxDistance, transposition) + + pdfa, err := fromNfa(lnfa) + if err != nil { + return nil, err + } + + return &LevenshteinAutomatonBuilder{pDfa: pdfa}, nil +} + +// BuildDfa builds the levenshtein automaton for serving +// queries with a given edit distance. +func (lab *LevenshteinAutomatonBuilder) BuildDfa(query string, + fuzziness uint8) (*DFA, error) { + return lab.pDfa.buildDfa(query, fuzziness, false) +} + +// MaxDistance returns the MaxEdit distance supported by the +// LevenshteinAutomatonBuilder builder. +func (lab *LevenshteinAutomatonBuilder) MaxDistance() uint8 { + return lab.pDfa.maxDistance +} diff --git a/vendor/github.com/couchbase/vellum/levenshtein/levenshtein_nfa.go b/vendor/github.com/couchbase/vellum/levenshtein/levenshtein_nfa.go new file mode 100644 index 0000000..68db5d1 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/levenshtein/levenshtein_nfa.go @@ -0,0 +1,292 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package levenshtein + +import ( + "math" + "sort" +) + +/// Levenshtein Distance computed by a Levenshtein Automaton. +/// +/// Levenshtein automata can only compute the exact Levenshtein distance +/// up to a given `max_distance`. +/// +/// Over this distance, the automaton will invariably +/// return `Distance::AtLeast(max_distance + 1)`. +type Distance interface { + distance() uint8 +} + +type Exact struct { + d uint8 +} + +func (e Exact) distance() uint8 { + return e.d +} + +type Atleast struct { + d uint8 +} + +func (a Atleast) distance() uint8 { + return a.d +} + +func characteristicVector(query []rune, c rune) uint64 { + chi := uint64(0) + for i := 0; i < len(query); i++ { + if query[i] == c { + chi |= 1 << uint64(i) + } + } + return chi +} + +type NFAState struct { + Offset uint32 + Distance uint8 + InTranspose bool +} + +type NFAStates []NFAState + +func (ns NFAStates) Len() int { + return len(ns) +} + +func (ns NFAStates) Less(i, j int) bool { + if ns[i].Offset != ns[j].Offset { + return ns[i].Offset < ns[j].Offset + } + + if ns[i].Distance != ns[j].Distance { + return ns[i].Distance < ns[j].Distance + } + + return !ns[i].InTranspose && ns[j].InTranspose +} + +func (ns NFAStates) Swap(i, j int) { + ns[i], ns[j] = ns[j], ns[i] +} + +func (ns *NFAState) imply(other NFAState) bool { + transposeImply := ns.InTranspose + if !other.InTranspose { + transposeImply = !other.InTranspose + } + + deltaOffset := ns.Offset - other.Offset + if ns.Offset < other.Offset { + deltaOffset = other.Offset - ns.Offset + } + + if transposeImply { + return uint32(other.Distance) >= (uint32(ns.Distance) + deltaOffset) + } + + return uint32(other.Distance) > (uint32(ns.Distance) + deltaOffset) +} + +type MultiState struct { + states []NFAState +} + +func (ms *MultiState) States() []NFAState { + return ms.states +} + +func (ms *MultiState) Clear() { + ms.states = ms.states[:0] +} + +func newMultiState() *MultiState { + return &MultiState{states: make([]NFAState, 0)} +} + +func (ms *MultiState) normalize() uint32 { + minOffset := uint32(math.MaxUint32) + + for _, s := range ms.states { + if s.Offset < minOffset { + minOffset = s.Offset + } + } + if minOffset == uint32(math.MaxUint32) { + minOffset = 0 + } + + for i := 0; i < len(ms.states); i++ { + ms.states[i].Offset -= minOffset + } + + sort.Sort(NFAStates(ms.states)) + + return minOffset +} + +func (ms *MultiState) addStates(nState NFAState) { + + for _, s := range ms.states { + if s.imply(nState) { + return + } + } + + i := 0 + for i < len(ms.states) { + if nState.imply(ms.states[i]) { + ms.states = append(ms.states[:i], ms.states[i+1:]...) + } else { + i++ + } + } + ms.states = append(ms.states, nState) + +} + +func extractBit(bitset uint64, pos uint8) bool { + shift := bitset >> pos + bit := shift & 1 + return bit == uint64(1) +} + +func dist(left, right uint32) uint32 { + if left > right { + return left - right + } + return right - left +} + +type LevenshteinNFA struct { + mDistance uint8 + damerau bool +} + +func newLevenshtein(maxD uint8, transposition bool) *LevenshteinNFA { + return &LevenshteinNFA{mDistance: maxD, + damerau: transposition, + } +} + +func (la *LevenshteinNFA) maxDistance() uint8 { + return la.mDistance +} + +func (la *LevenshteinNFA) msDiameter() uint8 { + return 2*la.mDistance + 1 +} + +func (la *LevenshteinNFA) initialStates() *MultiState { + ms := MultiState{} + nfaState := NFAState{} + ms.addStates(nfaState) + return &ms +} + +func (la *LevenshteinNFA) multistateDistance(ms *MultiState, + queryLen uint32) Distance { + minDistance := Atleast{d: la.mDistance + 1} + for _, s := range ms.states { + t := s.Distance + uint8(dist(queryLen, s.Offset)) + if t <= uint8(la.mDistance) { + if minDistance.distance() > t { + minDistance.d = t + } + } + } + + if minDistance.distance() == la.mDistance+1 { + return Atleast{d: la.mDistance + 1} + } + + return minDistance +} + +func (la *LevenshteinNFA) simpleTransition(state NFAState, + symbol uint64, ms *MultiState) { + + if state.Distance < la.mDistance { + // insertion + ms.addStates(NFAState{Offset: state.Offset, + Distance: state.Distance + 1, + InTranspose: false}) + + // substitution + ms.addStates(NFAState{Offset: state.Offset + 1, + Distance: state.Distance + 1, + InTranspose: false}) + + n := la.mDistance + 1 - state.Distance + for d := uint8(1); d < n; d++ { + if extractBit(symbol, d) { + // for d > 0, as many deletion and character match + ms.addStates(NFAState{Offset: state.Offset + 1 + uint32(d), + Distance: state.Distance + d, + InTranspose: false}) + } + } + + if la.damerau && extractBit(symbol, 1) { + ms.addStates(NFAState{ + Offset: state.Offset, + Distance: state.Distance + 1, + InTranspose: true}) + } + + } + + if extractBit(symbol, 0) { + ms.addStates(NFAState{Offset: state.Offset + 1, + Distance: state.Distance, + InTranspose: false}) + } + + if state.InTranspose && extractBit(symbol, 0) { + ms.addStates(NFAState{Offset: state.Offset + 2, + Distance: state.Distance, + InTranspose: false}) + } + +} + +func (la *LevenshteinNFA) transition(cState *MultiState, + dState *MultiState, scv uint64) { + dState.Clear() + mask := (uint64(1) << la.msDiameter()) - uint64(1) + + for _, state := range cState.states { + cv := (scv >> state.Offset) & mask + la.simpleTransition(state, cv, dState) + } + + sort.Sort(NFAStates(dState.states)) +} + +func (la *LevenshteinNFA) computeDistance(query, other []rune) Distance { + cState := la.initialStates() + nState := newMultiState() + + for _, i := range other { + nState.Clear() + chi := characteristicVector(query, i) + la.transition(cState, nState, chi) + cState, nState = nState, cState + } + + return la.multistateDistance(cState, uint32(len(query))) +} diff --git a/vendor/github.com/couchbase/vellum/levenshtein/parametric_dfa.go b/vendor/github.com/couchbase/vellum/levenshtein/parametric_dfa.go new file mode 100644 index 0000000..d08e5da --- /dev/null +++ b/vendor/github.com/couchbase/vellum/levenshtein/parametric_dfa.go @@ -0,0 +1,349 @@ +// Copyright (c) 2018 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package levenshtein + +import ( + "crypto/md5" + "encoding/json" + "fmt" + "math" +) + +type ParametricState struct { + shapeID uint32 + offset uint32 +} + +func newParametricState() ParametricState { + return ParametricState{} +} + +func (ps *ParametricState) isDeadEnd() bool { + return ps.shapeID == 0 +} + +type Transition struct { + destShapeID uint32 + deltaOffset uint32 +} + +func (t *Transition) apply(state ParametricState) ParametricState { + ps := ParametricState{ + shapeID: t.destShapeID} + // don't need any offset if we are in the dead state, + // this ensures we have only one dead state. + if t.destShapeID != 0 { + ps.offset = state.offset + t.deltaOffset + } + + return ps +} + +type ParametricStateIndex struct { + stateIndex []uint32 + stateQueue []ParametricState + numOffsets uint32 +} + +func newParametricStateIndex(queryLen, + numParamState uint32) ParametricStateIndex { + numOffsets := queryLen + 1 + if numParamState == 0 { + numParamState = numOffsets + } + maxNumStates := numParamState * numOffsets + psi := ParametricStateIndex{ + stateIndex: make([]uint32, maxNumStates), + stateQueue: make([]ParametricState, 0, 150), + numOffsets: numOffsets, + } + + for i := uint32(0); i < maxNumStates; i++ { + psi.stateIndex[i] = math.MaxUint32 + } + return psi +} + +func (psi *ParametricStateIndex) numStates() int { + return len(psi.stateQueue) +} + +func (psi *ParametricStateIndex) maxNumStates() int { + return len(psi.stateIndex) +} + +func (psi *ParametricStateIndex) get(stateID uint32) ParametricState { + return psi.stateQueue[stateID] +} + +func (psi *ParametricStateIndex) getOrAllocate(ps ParametricState) uint32 { + bucket := ps.shapeID*psi.numOffsets + ps.offset + if bucket < uint32(len(psi.stateIndex)) && + psi.stateIndex[bucket] != math.MaxUint32 { + return psi.stateIndex[bucket] + } + nState := uint32(len(psi.stateQueue)) + psi.stateQueue = append(psi.stateQueue, ps) + + psi.stateIndex[bucket] = nState + return nState +} + +type ParametricDFA struct { + distance []uint8 + transitions []Transition + maxDistance uint8 + transitionStride uint32 + diameter uint32 +} + +func (pdfa *ParametricDFA) initialState() ParametricState { + return ParametricState{shapeID: 1} +} + +// Returns true iff whatever characters come afterward, +// we will never reach a shorter distance +func (pdfa *ParametricDFA) isPrefixSink(state ParametricState, queryLen uint32) bool { + if state.isDeadEnd() { + return true + } + + remOffset := queryLen - state.offset + if remOffset < pdfa.diameter { + stateDistances := pdfa.distance[pdfa.diameter*state.shapeID:] + prefixDistance := stateDistances[remOffset] + if prefixDistance > pdfa.maxDistance { + return false + } + + for _, d := range stateDistances { + if d < prefixDistance { + return false + } + } + return true + } + return false +} + +func (pdfa *ParametricDFA) numStates() int { + return len(pdfa.transitions) / int(pdfa.transitionStride) +} + +func min(x, y uint32) uint32 { + if x < y { + return x + } + return y +} + +func (pdfa *ParametricDFA) transition(state ParametricState, + chi uint32) Transition { + return pdfa.transitions[pdfa.transitionStride*state.shapeID+chi] +} + +func (pdfa *ParametricDFA) getDistance(state ParametricState, + qLen uint32) Distance { + remainingOffset := qLen - state.offset + if state.isDeadEnd() || remainingOffset >= pdfa.diameter { + return Atleast{d: pdfa.maxDistance + 1} + } + dist := pdfa.distance[int(pdfa.diameter*state.shapeID)+int(remainingOffset)] + if dist > pdfa.maxDistance { + return Atleast{d: dist} + } + return Exact{d: dist} +} + +func (pdfa *ParametricDFA) computeDistance(left, right string) Distance { + state := pdfa.initialState() + leftChars := []rune(left) + for _, chr := range []rune(right) { + start := state.offset + stop := min(start+pdfa.diameter, uint32(len(leftChars))) + chi := characteristicVector(leftChars[start:stop], chr) + transition := pdfa.transition(state, uint32(chi)) + state = transition.apply(state) + if state.isDeadEnd() { + return Atleast{d: pdfa.maxDistance + 1} + } + } + return pdfa.getDistance(state, uint32(len(left))) +} + +func (pdfa *ParametricDFA) buildDfa(query string, distance uint8, + prefix bool) (*DFA, error) { + qLen := uint32(len([]rune(query))) + alphabet := queryChars(query) + + psi := newParametricStateIndex(qLen, uint32(pdfa.numStates())) + maxNumStates := psi.maxNumStates() + deadEndStateID := psi.getOrAllocate(newParametricState()) + if deadEndStateID != 0 { + return nil, fmt.Errorf("Invalid dead end state") + } + + initialStateID := psi.getOrAllocate(pdfa.initialState()) + dfaBuilder := withMaxStates(uint32(maxNumStates)) + mask := uint32((1 << pdfa.diameter) - 1) + + var stateID int + for stateID = 0; stateID < StateLimit; stateID++ { + if stateID == psi.numStates() { + break + } + state := psi.get(uint32(stateID)) + if prefix && pdfa.isPrefixSink(state, qLen) { + distance := pdfa.getDistance(state, qLen) + dfaBuilder.addState(uint32(stateID), uint32(stateID), distance) + } else { + transition := pdfa.transition(state, 0) + defSuccessor := transition.apply(state) + defSuccessorID := psi.getOrAllocate(defSuccessor) + distance := pdfa.getDistance(state, qLen) + stateBuilder, err := dfaBuilder.addState(uint32(stateID), defSuccessorID, distance) + + if err != nil { + return nil, fmt.Errorf("parametric_dfa: buildDfa, err: %v", err) + } + + alphabet.resetNext() + chr, cv, err := alphabet.next() + for err == nil { + chi := cv.shiftAndMask(state.offset, mask) + + transition := pdfa.transition(state, chi) + + destState := transition.apply(state) + + destStateID := psi.getOrAllocate(destState) + + stateBuilder.addTransition(chr, destStateID) + + chr, cv, err = alphabet.next() + } + } + } + + if stateID == StateLimit { + return nil, ErrTooManyStates + } + + dfaBuilder.setInitialState(initialStateID) + return dfaBuilder.build(distance), nil +} + +func fromNfa(nfa *LevenshteinNFA) (*ParametricDFA, error) { + lookUp := newHash() + lookUp.getOrAllocate(*newMultiState()) + initialState := nfa.initialStates() + lookUp.getOrAllocate(*initialState) + + maxDistance := nfa.maxDistance() + msDiameter := nfa.msDiameter() + + numChi := 1 << msDiameter + chiValues := make([]uint64, numChi) + for i := 0; i < numChi; i++ { + chiValues[i] = uint64(i) + } + + transitions := make([]Transition, 0, numChi*int(msDiameter)) + var stateID int + for stateID = 0; stateID < StateLimit; stateID++ { + if stateID == len(lookUp.items) { + break + } + + for _, chi := range chiValues { + destMs := newMultiState() + + ms := lookUp.getFromID(stateID) + + nfa.transition(ms, destMs, chi) + + translation := destMs.normalize() + + destID := lookUp.getOrAllocate(*destMs) + + transitions = append(transitions, Transition{ + destShapeID: uint32(destID), + deltaOffset: translation, + }) + } + } + + if stateID == StateLimit { + return nil, ErrTooManyStates + } + + ns := len(lookUp.items) + diameter := int(msDiameter) + + distances := make([]uint8, 0, diameter*ns) + for stateID := 0; stateID < ns; stateID++ { + ms := lookUp.getFromID(stateID) + for offset := 0; offset < diameter; offset++ { + dist := nfa.multistateDistance(ms, uint32(offset)) + distances = append(distances, dist.distance()) + } + } + + return &ParametricDFA{ + diameter: uint32(msDiameter), + transitions: transitions, + maxDistance: maxDistance, + transitionStride: uint32(numChi), + distance: distances, + }, nil +} + +type hash struct { + index map[[16]byte]int + items []MultiState +} + +func newHash() *hash { + return &hash{ + index: make(map[[16]byte]int, 100), + items: make([]MultiState, 0, 100), + } +} + +func (h *hash) getOrAllocate(m MultiState) int { + size := len(h.items) + var exists bool + var pos int + md5 := getHash(&m) + if pos, exists = h.index[md5]; !exists { + h.index[md5] = size + pos = size + h.items = append(h.items, m) + } + return pos +} + +func (h *hash) getFromID(id int) *MultiState { + return &h.items[id] +} + +func getHash(ms *MultiState) [16]byte { + msBytes := []byte{} + for _, state := range ms.states { + jsonBytes, _ := json.Marshal(&state) + msBytes = append(msBytes, jsonBytes...) + } + return md5.Sum(msBytes) +} diff --git a/vendor/github.com/couchbase/vellum/merge_iterator.go b/vendor/github.com/couchbase/vellum/merge_iterator.go new file mode 100644 index 0000000..f00f778 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/merge_iterator.go @@ -0,0 +1,188 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vellum + +import ( + "bytes" +) + +// MergeFunc is used to choose the new value for a key when merging a slice +// of iterators, and the same key is observed with multiple values. +// Values presented to the MergeFunc will be in the same order as the +// original slice creating the MergeIterator. This allows some MergeFunc +// implementations to prioritize one iterator over another. +type MergeFunc func([]uint64) uint64 + +// MergeIterator implements the Iterator interface by traversing a slice +// of iterators and merging the contents of them. If the same key exists +// in mulitipe underlying iterators, a user-provided MergeFunc will be +// invoked to choose the new value. +type MergeIterator struct { + itrs []Iterator + f MergeFunc + currKs [][]byte + currVs []uint64 + + lowK []byte + lowV uint64 + lowIdxs []int + + mergeV []uint64 +} + +// NewMergeIterator creates a new MergeIterator over the provided slice of +// Iterators and with the specified MergeFunc to resolve duplicate keys. +func NewMergeIterator(itrs []Iterator, f MergeFunc) (*MergeIterator, error) { + rv := &MergeIterator{ + itrs: itrs, + f: f, + currKs: make([][]byte, len(itrs)), + currVs: make([]uint64, len(itrs)), + lowIdxs: make([]int, 0, len(itrs)), + mergeV: make([]uint64, 0, len(itrs)), + } + rv.init() + if rv.lowK == nil { + return rv, ErrIteratorDone + } + return rv, nil +} + +func (m *MergeIterator) init() { + for i, itr := range m.itrs { + m.currKs[i], m.currVs[i] = itr.Current() + } + m.updateMatches() +} + +func (m *MergeIterator) updateMatches() { + if len(m.itrs) < 1 { + return + } + m.lowK = m.currKs[0] + m.lowIdxs = m.lowIdxs[:0] + m.lowIdxs = append(m.lowIdxs, 0) + for i := 1; i < len(m.itrs); i++ { + if m.currKs[i] == nil { + continue + } + cmp := bytes.Compare(m.currKs[i], m.lowK) + if m.lowK == nil || cmp < 0 { + // reached a new low + m.lowK = m.currKs[i] + m.lowIdxs = m.lowIdxs[:0] + m.lowIdxs = append(m.lowIdxs, i) + } else if cmp == 0 { + m.lowIdxs = append(m.lowIdxs, i) + } + } + if len(m.lowIdxs) > 1 { + // merge multiple values + m.mergeV = m.mergeV[:0] + for _, vi := range m.lowIdxs { + m.mergeV = append(m.mergeV, m.currVs[vi]) + } + m.lowV = m.f(m.mergeV) + } else if len(m.lowIdxs) == 1 { + m.lowV = m.currVs[m.lowIdxs[0]] + } +} + +// Current returns the key and value currently pointed to by this iterator. +// If the iterator is not pointing at a valid value (because Iterator/Next/Seek) +// returned an error previously, it may return nil,0. +func (m *MergeIterator) Current() ([]byte, uint64) { + return m.lowK, m.lowV +} + +// Next advances this iterator to the next key/value pair. If there is none, +// then ErrIteratorDone is returned. +func (m *MergeIterator) Next() error { + // move all the current low iterators to next + for _, vi := range m.lowIdxs { + err := m.itrs[vi].Next() + if err != nil && err != ErrIteratorDone { + return err + } + m.currKs[vi], m.currVs[vi] = m.itrs[vi].Current() + } + m.updateMatches() + if m.lowK == nil { + return ErrIteratorDone + } + return nil +} + +// Seek advances this iterator to the specified key/value pair. If this key +// is not in the FST, Current() will return the next largest key. If this +// seek operation would go past the last key, then ErrIteratorDone is returned. +func (m *MergeIterator) Seek(key []byte) error { + for i := range m.itrs { + err := m.itrs[i].Seek(key) + if err != nil && err != ErrIteratorDone { + return err + } + } + m.updateMatches() + if m.lowK == nil { + return ErrIteratorDone + } + return nil +} + +// Close will attempt to close all the underlying Iterators. If any errors +// are encountered, the first will be returned. +func (m *MergeIterator) Close() error { + var rv error + for i := range m.itrs { + // close all iterators, return first error if any + err := m.itrs[i].Close() + if rv == nil { + rv = err + } + } + return rv +} + +// MergeMin chooses the minimum value +func MergeMin(vals []uint64) uint64 { + rv := vals[0] + for _, v := range vals[1:] { + if v < rv { + rv = v + } + } + return rv +} + +// MergeMax chooses the maximum value +func MergeMax(vals []uint64) uint64 { + rv := vals[0] + for _, v := range vals[1:] { + if v > rv { + rv = v + } + } + return rv +} + +// MergeSum sums the values +func MergeSum(vals []uint64) uint64 { + rv := vals[0] + for _, v := range vals[1:] { + rv += v + } + return rv +} diff --git a/vendor/github.com/couchbase/vellum/pack.go b/vendor/github.com/couchbase/vellum/pack.go new file mode 100644 index 0000000..78f3dcd --- /dev/null +++ b/vendor/github.com/couchbase/vellum/pack.go @@ -0,0 +1,55 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vellum + +func deltaAddr(base, trans uint64) uint64 { + // transition dest of 0 is special case + if trans == 0 { + return 0 + } + return base - trans +} + +const packOutMask = 1<<4 - 1 + +func encodePackSize(transSize, outSize int) byte { + var rv byte + rv = byte(transSize << 4) + rv |= byte(outSize) + return rv +} + +func decodePackSize(pack byte) (transSize int, packSize int) { + transSize = int(pack >> 4) + packSize = int(pack & packOutMask) + return +} + +const maxNumTrans = 1<<6 - 1 + +func encodeNumTrans(n int) byte { + if n <= maxNumTrans { + return byte(n) + } + return 0 +} + +func readPackedUint(data []byte) (rv uint64) { + for i := range data { + shifted := uint64(data[i]) << uint(i*8) + rv |= shifted + } + return +} diff --git a/vendor/github.com/couchbase/vellum/regexp/compile.go b/vendor/github.com/couchbase/vellum/regexp/compile.go new file mode 100644 index 0000000..92284d0 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/regexp/compile.go @@ -0,0 +1,343 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package regexp + +import ( + "regexp/syntax" + "unicode" + + unicode_utf8 "unicode/utf8" + + "github.com/couchbase/vellum/utf8" +) + +type compiler struct { + sizeLimit uint + insts prog + instsPool []inst + + sequences utf8.Sequences + rangeStack utf8.RangeStack + startBytes []byte + endBytes []byte +} + +func newCompiler(sizeLimit uint) *compiler { + return &compiler{ + sizeLimit: sizeLimit, + startBytes: make([]byte, unicode_utf8.UTFMax), + endBytes: make([]byte, unicode_utf8.UTFMax), + } +} + +func (c *compiler) compile(ast *syntax.Regexp) (prog, error) { + err := c.c(ast) + if err != nil { + return nil, err + } + inst := c.allocInst() + inst.op = OpMatch + c.insts = append(c.insts, inst) + return c.insts, nil +} + +func (c *compiler) c(ast *syntax.Regexp) (err error) { + if ast.Flags&syntax.NonGreedy > 1 { + return ErrNoLazy + } + + switch ast.Op { + case syntax.OpEndLine, syntax.OpBeginLine, + syntax.OpBeginText, syntax.OpEndText: + return ErrNoEmpty + case syntax.OpWordBoundary, syntax.OpNoWordBoundary: + return ErrNoWordBoundary + case syntax.OpEmptyMatch: + return nil + case syntax.OpLiteral: + for _, r := range ast.Rune { + if ast.Flags&syntax.FoldCase > 0 { + next := syntax.Regexp{ + Op: syntax.OpCharClass, + Flags: ast.Flags & syntax.FoldCase, + Rune0: [2]rune{r, r}, + } + next.Rune = next.Rune0[0:2] + // try to find more folded runes + for r1 := unicode.SimpleFold(r); r1 != r; r1 = unicode.SimpleFold(r1) { + next.Rune = append(next.Rune, r1, r1) + } + err = c.c(&next) + if err != nil { + return err + } + } else { + c.sequences, c.rangeStack, err = utf8.NewSequencesPrealloc( + r, r, c.sequences, c.rangeStack, c.startBytes, c.endBytes) + if err != nil { + return err + } + for _, seq := range c.sequences { + c.compileUtf8Ranges(seq) + } + } + } + case syntax.OpAnyChar: + next := syntax.Regexp{ + Op: syntax.OpCharClass, + Flags: ast.Flags & syntax.FoldCase, + Rune0: [2]rune{0, unicode.MaxRune}, + } + next.Rune = next.Rune0[:2] + return c.c(&next) + case syntax.OpAnyCharNotNL: + next := syntax.Regexp{ + Op: syntax.OpCharClass, + Flags: ast.Flags & syntax.FoldCase, + Rune: []rune{0, 0x09, 0x0B, unicode.MaxRune}, + } + return c.c(&next) + case syntax.OpCharClass: + return c.compileClass(ast) + case syntax.OpCapture: + return c.c(ast.Sub[0]) + case syntax.OpConcat: + for _, sub := range ast.Sub { + err := c.c(sub) + if err != nil { + return err + } + } + return nil + case syntax.OpAlternate: + if len(ast.Sub) == 0 { + return nil + } + jmpsToEnd := make([]uint, 0, len(ast.Sub)-1) + // does not handle last entry + for i := 0; i < len(ast.Sub)-1; i++ { + sub := ast.Sub[i] + split := c.emptySplit() + j1 := c.top() + err := c.c(sub) + if err != nil { + return err + } + jmpsToEnd = append(jmpsToEnd, c.emptyJump()) + j2 := c.top() + c.setSplit(split, j1, j2) + } + // handle last entry + err := c.c(ast.Sub[len(ast.Sub)-1]) + if err != nil { + return err + } + end := uint(len(c.insts)) + for _, jmpToEnd := range jmpsToEnd { + c.setJump(jmpToEnd, end) + } + case syntax.OpQuest: + split := c.emptySplit() + j1 := c.top() + err := c.c(ast.Sub[0]) + if err != nil { + return err + } + j2 := c.top() + c.setSplit(split, j1, j2) + + case syntax.OpStar: + j1 := c.top() + split := c.emptySplit() + j2 := c.top() + err := c.c(ast.Sub[0]) + if err != nil { + return err + } + jmp := c.emptyJump() + j3 := uint(len(c.insts)) + + c.setJump(jmp, j1) + c.setSplit(split, j2, j3) + + case syntax.OpPlus: + j1 := c.top() + err := c.c(ast.Sub[0]) + if err != nil { + return err + } + split := c.emptySplit() + j2 := c.top() + c.setSplit(split, j1, j2) + + case syntax.OpRepeat: + if ast.Max == -1 { + for i := 0; i < ast.Min; i++ { + err := c.c(ast.Sub[0]) + if err != nil { + return err + } + } + next := syntax.Regexp{ + Op: syntax.OpStar, + Flags: ast.Flags, + Sub: ast.Sub, + Sub0: ast.Sub0, + Rune: ast.Rune, + Rune0: ast.Rune0, + } + return c.c(&next) + } + for i := 0; i < ast.Min; i++ { + err := c.c(ast.Sub[0]) + if err != nil { + return err + } + } + splits := make([]uint, 0, ast.Max-ast.Min) + starts := make([]uint, 0, ast.Max-ast.Min) + for i := ast.Min; i < ast.Max; i++ { + splits = append(splits, c.emptySplit()) + starts = append(starts, uint(len(c.insts))) + err := c.c(ast.Sub[0]) + if err != nil { + return err + } + } + end := uint(len(c.insts)) + for i := 0; i < len(splits); i++ { + c.setSplit(splits[i], starts[i], end) + } + + } + + return c.checkSize() +} + +func (c *compiler) checkSize() error { + if uint(len(c.insts)*instSize) > c.sizeLimit { + return ErrCompiledTooBig + } + return nil +} + +func (c *compiler) compileClass(ast *syntax.Regexp) error { + if len(ast.Rune) == 0 { + return nil + } + jmps := make([]uint, 0, len(ast.Rune)-2) + // does not do last pair + for i := 0; i < len(ast.Rune)-2; i += 2 { + rstart := ast.Rune[i] + rend := ast.Rune[i+1] + + split := c.emptySplit() + j1 := c.top() + err := c.compileClassRange(rstart, rend) + if err != nil { + return err + } + jmps = append(jmps, c.emptyJump()) + j2 := c.top() + c.setSplit(split, j1, j2) + } + // handle last pair + rstart := ast.Rune[len(ast.Rune)-2] + rend := ast.Rune[len(ast.Rune)-1] + err := c.compileClassRange(rstart, rend) + if err != nil { + return err + } + end := c.top() + for _, jmp := range jmps { + c.setJump(jmp, end) + } + return nil +} + +func (c *compiler) compileClassRange(startR, endR rune) (err error) { + c.sequences, c.rangeStack, err = utf8.NewSequencesPrealloc( + startR, endR, c.sequences, c.rangeStack, c.startBytes, c.endBytes) + if err != nil { + return err + } + jmps := make([]uint, 0, len(c.sequences)-1) + // does not do last entry + for i := 0; i < len(c.sequences)-1; i++ { + seq := c.sequences[i] + split := c.emptySplit() + j1 := c.top() + c.compileUtf8Ranges(seq) + jmps = append(jmps, c.emptyJump()) + j2 := c.top() + c.setSplit(split, j1, j2) + } + // handle last entry + c.compileUtf8Ranges(c.sequences[len(c.sequences)-1]) + end := c.top() + for _, jmp := range jmps { + c.setJump(jmp, end) + } + + return nil +} + +func (c *compiler) compileUtf8Ranges(seq utf8.Sequence) { + for _, r := range seq { + inst := c.allocInst() + inst.op = OpRange + inst.rangeStart = r.Start + inst.rangeEnd = r.End + c.insts = append(c.insts, inst) + } +} + +func (c *compiler) emptySplit() uint { + inst := c.allocInst() + inst.op = OpSplit + c.insts = append(c.insts, inst) + return c.top() - 1 +} + +func (c *compiler) emptyJump() uint { + inst := c.allocInst() + inst.op = OpJmp + c.insts = append(c.insts, inst) + return c.top() - 1 +} + +func (c *compiler) setSplit(i, pc1, pc2 uint) { + split := c.insts[i] + split.splitA = pc1 + split.splitB = pc2 +} + +func (c *compiler) setJump(i, pc uint) { + jmp := c.insts[i] + jmp.to = pc +} + +func (c *compiler) top() uint { + return uint(len(c.insts)) +} + +func (c *compiler) allocInst() *inst { + if len(c.instsPool) <= 0 { + c.instsPool = make([]inst, 16) + } + inst := &c.instsPool[0] + c.instsPool = c.instsPool[1:] + return inst +} diff --git a/vendor/github.com/couchbase/vellum/regexp/dfa.go b/vendor/github.com/couchbase/vellum/regexp/dfa.go new file mode 100644 index 0000000..7e6fb29 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/regexp/dfa.go @@ -0,0 +1,196 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package regexp + +import ( + "encoding/binary" + "fmt" +) + +// StateLimit is the maximum number of states allowed +const StateLimit = 10000 + +// ErrTooManyStates is returned if you attempt to build a Levenshtein +// automaton which requires too many states. +var ErrTooManyStates = fmt.Errorf("dfa contains more than %d states", + StateLimit) + +type dfaBuilder struct { + dfa *dfa + cache map[string]int + keyBuf []byte +} + +func newDfaBuilder(insts prog) *dfaBuilder { + d := &dfaBuilder{ + dfa: &dfa{ + insts: insts, + states: make([]state, 0, 16), + }, + cache: make(map[string]int, 1024), + } + // add 0 state that is invalid + d.dfa.states = append(d.dfa.states, state{ + next: make([]int, 256), + match: false, + }) + return d +} + +func (d *dfaBuilder) build() (*dfa, error) { + cur := newSparseSet(uint(len(d.dfa.insts))) + next := newSparseSet(uint(len(d.dfa.insts))) + + d.dfa.add(cur, 0) + ns, instsReuse := d.cachedState(cur, nil) + states := intStack{ns} + seen := make(map[int]struct{}) + var s int + states, s = states.Pop() + for s != 0 { + for b := 0; b < 256; b++ { + var ns int + ns, instsReuse = d.runState(cur, next, s, byte(b), instsReuse) + if ns != 0 { + if _, ok := seen[ns]; !ok { + seen[ns] = struct{}{} + states = states.Push(ns) + } + } + if len(d.dfa.states) > StateLimit { + return nil, ErrTooManyStates + } + } + states, s = states.Pop() + } + return d.dfa, nil +} + +func (d *dfaBuilder) runState(cur, next *sparseSet, state int, b byte, instsReuse []uint) ( + int, []uint) { + cur.Clear() + for _, ip := range d.dfa.states[state].insts { + cur.Add(ip) + } + d.dfa.run(cur, next, b) + var nextState int + nextState, instsReuse = d.cachedState(next, instsReuse) + d.dfa.states[state].next[b] = nextState + return nextState, instsReuse +} + +func instsKey(insts []uint, buf []byte) []byte { + if cap(buf) < 8*len(insts) { + buf = make([]byte, 8*len(insts)) + } else { + buf = buf[0 : 8*len(insts)] + } + for i, inst := range insts { + binary.LittleEndian.PutUint64(buf[i*8:], uint64(inst)) + } + return buf +} + +func (d *dfaBuilder) cachedState(set *sparseSet, + instsReuse []uint) (int, []uint) { + insts := instsReuse[:0] + if cap(insts) == 0 { + insts = make([]uint, 0, set.Len()) + } + var isMatch bool + for i := uint(0); i < uint(set.Len()); i++ { + ip := set.Get(i) + switch d.dfa.insts[ip].op { + case OpRange: + insts = append(insts, ip) + case OpMatch: + isMatch = true + insts = append(insts, ip) + } + } + if len(insts) == 0 { + return 0, insts + } + d.keyBuf = instsKey(insts, d.keyBuf) + v, ok := d.cache[string(d.keyBuf)] + if ok { + return v, insts + } + d.dfa.states = append(d.dfa.states, state{ + insts: insts, + next: make([]int, 256), + match: isMatch, + }) + newV := len(d.dfa.states) - 1 + d.cache[string(d.keyBuf)] = newV + return newV, nil +} + +type dfa struct { + insts prog + states []state +} + +func (d *dfa) add(set *sparseSet, ip uint) { + if set.Contains(ip) { + return + } + set.Add(ip) + switch d.insts[ip].op { + case OpJmp: + d.add(set, d.insts[ip].to) + case OpSplit: + d.add(set, d.insts[ip].splitA) + d.add(set, d.insts[ip].splitB) + } +} + +func (d *dfa) run(from, to *sparseSet, b byte) bool { + to.Clear() + var isMatch bool + for i := uint(0); i < uint(from.Len()); i++ { + ip := from.Get(i) + switch d.insts[ip].op { + case OpMatch: + isMatch = true + case OpRange: + if d.insts[ip].rangeStart <= b && + b <= d.insts[ip].rangeEnd { + d.add(to, ip+1) + } + } + } + return isMatch +} + +type state struct { + insts []uint + next []int + match bool +} + +type intStack []int + +func (s intStack) Push(v int) intStack { + return append(s, v) +} + +func (s intStack) Pop() (intStack, int) { + l := len(s) + if l < 1 { + return s, 0 + } + return s[:l-1], s[l-1] +} diff --git a/vendor/github.com/couchbase/vellum/regexp/inst.go b/vendor/github.com/couchbase/vellum/regexp/inst.go new file mode 100644 index 0000000..36f2e60 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/regexp/inst.go @@ -0,0 +1,62 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package regexp + +import "fmt" + +// instOp represents a instruction operation +type instOp int + +// the enumeration of operations +const ( + OpMatch instOp = iota + OpJmp + OpSplit + OpRange +) + +// instSize is the approximate size of the an inst struct in bytes +const instSize = 40 + +type inst struct { + op instOp + to uint + splitA uint + splitB uint + rangeStart byte + rangeEnd byte +} + +func (i *inst) String() string { + switch i.op { + case OpJmp: + return fmt.Sprintf("JMP: %d", i.to) + case OpSplit: + return fmt.Sprintf("SPLIT: %d - %d", i.splitA, i.splitB) + case OpRange: + return fmt.Sprintf("RANGE: %x - %x", i.rangeStart, i.rangeEnd) + } + return "MATCH" +} + +type prog []*inst + +func (p prog) String() string { + rv := "\n" + for i, pi := range p { + rv += fmt.Sprintf("%d %v\n", i, pi) + } + return rv +} diff --git a/vendor/github.com/couchbase/vellum/regexp/regexp.go b/vendor/github.com/couchbase/vellum/regexp/regexp.go new file mode 100644 index 0000000..920ddc3 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/regexp/regexp.go @@ -0,0 +1,119 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package regexp + +import ( + "fmt" + "regexp/syntax" +) + +// ErrNoEmpty returned when "zero width assertions" are used +var ErrNoEmpty = fmt.Errorf("zero width assertions not allowed") + +// ErrNoWordBoundary returned when word boundaries are used +var ErrNoWordBoundary = fmt.Errorf("word boundaries are not allowed") + +// ErrNoBytes returned when byte literals are used +var ErrNoBytes = fmt.Errorf("byte literals are not allowed") + +// ErrNoLazy returned when lazy quantifiers are used +var ErrNoLazy = fmt.Errorf("lazy quantifiers are not allowed") + +// ErrCompiledTooBig returned when regular expression parses into +// too many instructions +var ErrCompiledTooBig = fmt.Errorf("too many instructions") + +var DefaultLimit = uint(10 * (1 << 20)) + +// Regexp implements the vellum.Automaton interface for matcing a user +// specified regular expression. +type Regexp struct { + orig string + dfa *dfa +} + +// NewRegexp creates a new Regular Expression automaton with the specified +// expression. By default it is limited to approximately 10MB for the +// compiled finite state automaton. If this size is exceeded, +// ErrCompiledTooBig will be returned. +func New(expr string) (*Regexp, error) { + return NewWithLimit(expr, DefaultLimit) +} + +// NewRegexpWithLimit creates a new Regular Expression automaton with +// the specified expression. The size of the compiled finite state +// automaton exceeds the user specified size, ErrCompiledTooBig will be +// returned. +func NewWithLimit(expr string, size uint) (*Regexp, error) { + parsed, err := syntax.Parse(expr, syntax.Perl) + if err != nil { + return nil, err + } + return NewParsedWithLimit(expr, parsed, size) +} + +func NewParsedWithLimit(expr string, parsed *syntax.Regexp, size uint) (*Regexp, error) { + compiler := newCompiler(size) + insts, err := compiler.compile(parsed) + if err != nil { + return nil, err + } + dfaBuilder := newDfaBuilder(insts) + dfa, err := dfaBuilder.build() + if err != nil { + return nil, err + } + return &Regexp{ + orig: expr, + dfa: dfa, + }, nil +} + +// Start returns the start state of this automaton. +func (r *Regexp) Start() int { + return 1 +} + +// IsMatch returns if the specified state is a matching state. +func (r *Regexp) IsMatch(s int) bool { + if s < len(r.dfa.states) { + return r.dfa.states[s].match + } + return false +} + +// CanMatch returns if the specified state can ever transition to a matching +// state. +func (r *Regexp) CanMatch(s int) bool { + if s < len(r.dfa.states) && s > 0 { + return true + } + return false +} + +// WillAlwaysMatch returns if the specified state will always end in a +// matching state. +func (r *Regexp) WillAlwaysMatch(int) bool { + return false +} + +// Accept returns the new state, resulting from the transition byte b +// when currently in the state s. +func (r *Regexp) Accept(s int, b byte) int { + if s < len(r.dfa.states) { + return r.dfa.states[s].next[b] + } + return 0 +} diff --git a/vendor/github.com/couchbase/vellum/regexp/sparse.go b/vendor/github.com/couchbase/vellum/regexp/sparse.go new file mode 100644 index 0000000..7afbfce --- /dev/null +++ b/vendor/github.com/couchbase/vellum/regexp/sparse.go @@ -0,0 +1,54 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package regexp + +type sparseSet struct { + dense []uint + sparse []uint + size uint +} + +func newSparseSet(size uint) *sparseSet { + return &sparseSet{ + dense: make([]uint, size), + sparse: make([]uint, size), + size: 0, + } +} + +func (s *sparseSet) Len() int { + return int(s.size) +} + +func (s *sparseSet) Add(ip uint) uint { + i := s.size + s.dense[i] = ip + s.sparse[ip] = i + s.size++ + return i +} + +func (s *sparseSet) Get(i uint) uint { + return s.dense[i] +} + +func (s *sparseSet) Contains(ip uint) bool { + i := s.sparse[ip] + return i < s.size && s.dense[i] == ip +} + +func (s *sparseSet) Clear() { + s.size = 0 +} diff --git a/vendor/github.com/couchbase/vellum/registry.go b/vendor/github.com/couchbase/vellum/registry.go new file mode 100644 index 0000000..f5b9b4d --- /dev/null +++ b/vendor/github.com/couchbase/vellum/registry.go @@ -0,0 +1,114 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vellum + +type registryCell struct { + addr int + node *builderNode +} + +type registry struct { + builderNodePool *builderNodePool + table []registryCell + tableSize uint + mruSize uint +} + +func newRegistry(p *builderNodePool, tableSize, mruSize int) *registry { + nsize := tableSize * mruSize + rv := ®istry{ + builderNodePool: p, + table: make([]registryCell, nsize), + tableSize: uint(tableSize), + mruSize: uint(mruSize), + } + return rv +} + +func (r *registry) Reset() { + var empty registryCell + for i := range r.table { + r.builderNodePool.Put(r.table[i].node) + r.table[i] = empty + } +} + +func (r *registry) entry(node *builderNode) (bool, int, *registryCell) { + if len(r.table) == 0 { + return false, 0, nil + } + bucket := r.hash(node) + start := r.mruSize * uint(bucket) + end := start + r.mruSize + rc := registryCache(r.table[start:end]) + return rc.entry(node, r.builderNodePool) +} + +const fnvPrime = 1099511628211 + +func (r *registry) hash(b *builderNode) int { + var final uint64 + if b.final { + final = 1 + } + + var h uint64 = 14695981039346656037 + h = (h ^ final) * fnvPrime + h = (h ^ b.finalOutput) * fnvPrime + for _, t := range b.trans { + h = (h ^ uint64(t.in)) * fnvPrime + h = (h ^ t.out) * fnvPrime + h = (h ^ uint64(t.addr)) * fnvPrime + } + return int(h % uint64(r.tableSize)) +} + +type registryCache []registryCell + +func (r registryCache) entry(node *builderNode, pool *builderNodePool) (bool, int, *registryCell) { + if len(r) == 1 { + if r[0].node != nil && r[0].node.equiv(node) { + return true, r[0].addr, nil + } + pool.Put(r[0].node) + r[0].node = node + return false, 0, &r[0] + } + for i := range r { + if r[i].node != nil && r[i].node.equiv(node) { + addr := r[i].addr + r.promote(i) + return true, addr, nil + } + } + // no match + last := len(r) - 1 + pool.Put(r[last].node) + r[last].node = node // discard LRU + r.promote(last) + return false, 0, &r[0] + +} + +func (r registryCache) promote(i int) { + for i > 0 { + r.swap(i-1, i) + i-- + } +} + +func (r registryCache) swap(i, j int) { + r[i], r[j] = r[j], r[i] +} diff --git a/vendor/github.com/couchbase/vellum/transducer.go b/vendor/github.com/couchbase/vellum/transducer.go new file mode 100644 index 0000000..753c422 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/transducer.go @@ -0,0 +1,55 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vellum + +// Transducer represents the general contract of a byte-based finite transducer +type Transducer interface { + + // all transducers are also automatons + Automaton + + // IsMatchWithValue returns true if and only if the state is a match + // additionally it returns a states final value (if any) + IsMatchWithVal(int) (bool, uint64) + + // Accept returns the next state given the input to the specified state + // additionally it returns the value associated with the transition + AcceptWithVal(int, byte) (int, uint64) +} + +// TransducerGet implements an generic Get() method which works +// on any implementation of Transducer +// The caller MUST check the boolean return value for a match. +// Zero is a valid value regardless of match status, +// and if it is NOT a match, the value collected so far is returned. +func TransducerGet(t Transducer, k []byte) (bool, uint64) { + var total uint64 + i := 0 + curr := t.Start() + for t.CanMatch(curr) && i < len(k) { + var transVal uint64 + curr, transVal = t.AcceptWithVal(curr, k[i]) + if curr == noneAddr { + break + } + total += transVal + i++ + } + if i != len(k) { + return false, total + } + match, finalVal := t.IsMatchWithVal(curr) + return match, total + finalVal +} diff --git a/vendor/github.com/couchbase/vellum/utf8/utf8.go b/vendor/github.com/couchbase/vellum/utf8/utf8.go new file mode 100644 index 0000000..54e23b9 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/utf8/utf8.go @@ -0,0 +1,268 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package utf8 + +import ( + "fmt" + "unicode/utf8" +) + +// Sequences is a collection of Sequence +type Sequences []Sequence + +// NewSequences constructs a collection of Sequence which describe the +// byte ranges covered between the start and end runes. +func NewSequences(start, end rune) (Sequences, error) { + rv, _, err := NewSequencesPrealloc(start, end, nil, nil, nil, nil) + return rv, err +} + +func NewSequencesPrealloc(start, end rune, + preallocSequences Sequences, + preallocRangeStack RangeStack, + preallocStartBytes, preallocEndBytes []byte) (Sequences, RangeStack, error) { + rv := preallocSequences[:0] + + startBytes := preallocStartBytes + if cap(startBytes) < utf8.UTFMax { + startBytes = make([]byte, utf8.UTFMax) + } + startBytes = startBytes[:utf8.UTFMax] + + endBytes := preallocEndBytes + if cap(endBytes) < utf8.UTFMax { + endBytes = make([]byte, utf8.UTFMax) + } + endBytes = endBytes[:utf8.UTFMax] + + rangeStack := preallocRangeStack[:0] + rangeStack = rangeStack.Push(scalarRange{start, end}) + + rangeStack, r := rangeStack.Pop() +TOP: + for r != nilScalarRange { + INNER: + for { + r1, r2 := r.split() + if r1 != nilScalarRange { + rangeStack = rangeStack.Push(scalarRange{r2.start, r2.end}) + r.start = r1.start + r.end = r1.end + continue INNER + } + if !r.valid() { + rangeStack, r = rangeStack.Pop() + continue TOP + } + for i := 1; i < utf8.UTFMax; i++ { + max := maxScalarValue(i) + if r.start <= max && max < r.end { + rangeStack = rangeStack.Push(scalarRange{max + 1, r.end}) + r.end = max + continue INNER + } + } + asciiRange := r.ascii() + if asciiRange != nilRange { + rv = append(rv, Sequence{ + asciiRange, + }) + rangeStack, r = rangeStack.Pop() + continue TOP + } + for i := uint(1); i < utf8.UTFMax; i++ { + m := rune((1 << (6 * i)) - 1) + if (r.start & ^m) != (r.end & ^m) { + if (r.start & m) != 0 { + rangeStack = rangeStack.Push(scalarRange{(r.start | m) + 1, r.end}) + r.end = r.start | m + continue INNER + } + if (r.end & m) != m { + rangeStack = rangeStack.Push(scalarRange{r.end & ^m, r.end}) + r.end = (r.end & ^m) - 1 + continue INNER + } + } + } + n, m := r.encode(startBytes, endBytes) + seq, err := SequenceFromEncodedRange(startBytes[0:n], endBytes[0:m]) + if err != nil { + return nil, nil, err + } + rv = append(rv, seq) + rangeStack, r = rangeStack.Pop() + continue TOP + } + } + + return rv, rangeStack, nil +} + +// Sequence is a collection of Range +type Sequence []Range + +// SequenceFromEncodedRange creates sequence from the encoded bytes +func SequenceFromEncodedRange(start, end []byte) (Sequence, error) { + if len(start) != len(end) { + return nil, fmt.Errorf("byte slices must be the same length") + } + switch len(start) { + case 2: + return Sequence{ + Range{start[0], end[0]}, + Range{start[1], end[1]}, + }, nil + case 3: + return Sequence{ + Range{start[0], end[0]}, + Range{start[1], end[1]}, + Range{start[2], end[2]}, + }, nil + case 4: + return Sequence{ + Range{start[0], end[0]}, + Range{start[1], end[1]}, + Range{start[2], end[2]}, + Range{start[3], end[3]}, + }, nil + } + + return nil, fmt.Errorf("invalid encoded byte length") +} + +// Matches checks to see if the provided byte slice matches the Sequence +func (u Sequence) Matches(bytes []byte) bool { + if len(bytes) < len(u) { + return false + } + for i := 0; i < len(u); i++ { + if !u[i].matches(bytes[i]) { + return false + } + } + return true +} + +func (u Sequence) String() string { + switch len(u) { + case 1: + return fmt.Sprintf("%v", u[0]) + case 2: + return fmt.Sprintf("%v%v", u[0], u[1]) + case 3: + return fmt.Sprintf("%v%v%v", u[0], u[1], u[2]) + case 4: + return fmt.Sprintf("%v%v%v%v", u[0], u[1], u[2], u[3]) + default: + return fmt.Sprintf("invalid utf8 sequence") + } +} + +// Range describes a single range of byte values +type Range struct { + Start byte + End byte +} + +var nilRange = Range{0xff, 0} + +func (u Range) matches(b byte) bool { + if u.Start <= b && b <= u.End { + return true + } + return false +} + +func (u Range) String() string { + if u.Start == u.End { + return fmt.Sprintf("[%X]", u.Start) + } + return fmt.Sprintf("[%X-%X]", u.Start, u.End) +} + +type scalarRange struct { + start rune + end rune +} + +var nilScalarRange = scalarRange{0xffff, 0} + +func (s *scalarRange) String() string { + return fmt.Sprintf("ScalarRange(%d,%d)", s.start, s.end) +} + +// split this scalar range if it overlaps with a surrogate codepoint +func (s *scalarRange) split() (scalarRange, scalarRange) { + if s.start < 0xe000 && s.end > 0xd7ff { + return scalarRange{ + start: s.start, + end: 0xd7ff, + }, + scalarRange{ + start: 0xe000, + end: s.end, + } + } + return nilScalarRange, nilScalarRange +} + +func (s *scalarRange) valid() bool { + return s.start <= s.end +} + +func (s *scalarRange) ascii() Range { + if s.valid() && s.end <= 0x7f { + return Range{ + Start: byte(s.start), + End: byte(s.end), + } + } + return nilRange +} + +// start and end MUST have capacity for utf8.UTFMax bytes +func (s *scalarRange) encode(start, end []byte) (int, int) { + n := utf8.EncodeRune(start, s.start) + m := utf8.EncodeRune(end, s.end) + return n, m +} + +type RangeStack []scalarRange + +func (s RangeStack) Push(v scalarRange) RangeStack { + return append(s, v) +} + +func (s RangeStack) Pop() (RangeStack, scalarRange) { + l := len(s) + if l < 1 { + return s, nilScalarRange + } + return s[:l-1], s[l-1] +} + +func maxScalarValue(nbytes int) rune { + switch nbytes { + case 1: + return 0x007f + case 2: + return 0x07FF + case 3: + return 0xFFFF + default: + return 0x10FFFF + } +} diff --git a/vendor/github.com/couchbase/vellum/vellum.go b/vendor/github.com/couchbase/vellum/vellum.go new file mode 100644 index 0000000..b2537b3 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/vellum.go @@ -0,0 +1,111 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/* +Package vellum is a library for building, serializing and executing an FST (finite +state transducer). + +There are two distinct phases, building an FST and using it. + +When building an FST, you insert keys ([]byte) and their associated value +(uint64). Insert operations MUST be done in lexicographic order. While +building the FST, data is streamed to an underlying Writer. At the conclusion +of building, you MUST call Close() on the builder. + +After completion of the build phase, you can either Open() the FST if you +serialized it to disk. Alternatively, if you already have the bytes in +memory, you can use Load(). By default, Open() will use mmap to avoid loading +the entire file into memory. + +Once the FST is ready, you can use the Contains() method to see if a keys is +in the FST. You can use the Get() method to see if a key is in the FST and +retrieve it's associated value. And, you can use the Iterator method to +enumerate key/value pairs within a specified range. + +*/ +package vellum + +import ( + "errors" + "io" +) + +// ErrOutOfOrder is returned when values are not inserted in +// lexicographic order. +var ErrOutOfOrder = errors.New("values not inserted in lexicographic order") + +// ErrIteratorDone is returned by Iterator/Next/Seek methods when the +// Current() value pointed to by the iterator is greater than the last +// key in this FST, or outside the configured startKeyInclusive/endKeyExclusive +// range of the Iterator. +var ErrIteratorDone = errors.New("iterator-done") + +// BuilderOpts is a structure to let advanced users customize the behavior +// of the builder and some aspects of the generated FST. +type BuilderOpts struct { + Encoder int + RegistryTableSize int + RegistryMRUSize int +} + +// New returns a new Builder which will stream out the +// underlying representation to the provided Writer as the set is built. +func New(w io.Writer, opts *BuilderOpts) (*Builder, error) { + return newBuilder(w, opts) +} + +// Open loads the FST stored in the provided path +func Open(path string) (*FST, error) { + return open(path) +} + +// Load will return the FST represented by the provided byte slice. +func Load(data []byte) (*FST, error) { + return new(data, nil) +} + +// Merge will iterate through the provided Iterators, merge duplicate keys +// with the provided MergeFunc, and build a new FST to the provided Writer. +func Merge(w io.Writer, opts *BuilderOpts, itrs []Iterator, f MergeFunc) error { + builder, err := New(w, opts) + if err != nil { + return err + } + + itr, err := NewMergeIterator(itrs, f) + for err == nil { + k, v := itr.Current() + err = builder.Insert(k, v) + if err != nil { + return err + } + err = itr.Next() + } + + if err != nil && err != ErrIteratorDone { + return err + } + + err = itr.Close() + if err != nil { + return err + } + + err = builder.Close() + if err != nil { + return err + } + + return nil +} diff --git a/vendor/github.com/couchbase/vellum/vellum_mmap.go b/vendor/github.com/couchbase/vellum/vellum_mmap.go new file mode 100644 index 0000000..81ea165 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/vellum_mmap.go @@ -0,0 +1,60 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +build !nommap + +package vellum + +import ( + "os" + + mmap "github.com/blevesearch/mmap-go" +) + +type mmapWrapper struct { + f *os.File + mm mmap.MMap +} + +func (m *mmapWrapper) Close() (err error) { + if m.mm != nil { + err = m.mm.Unmap() + } + // try to close file even if unmap failed + if m.f != nil { + err2 := m.f.Close() + if err == nil { + // try to return first error + err = err2 + } + } + return +} + +func open(path string) (*FST, error) { + f, err := os.Open(path) + if err != nil { + return nil, err + } + mm, err := mmap.Map(f, mmap.RDONLY, 0) + if err != nil { + // mmap failed, try to close the file + _ = f.Close() + return nil, err + } + return new(mm, &mmapWrapper{ + f: f, + mm: mm, + }) +} diff --git a/vendor/github.com/couchbase/vellum/vellum_nommap.go b/vendor/github.com/couchbase/vellum/vellum_nommap.go new file mode 100644 index 0000000..e985272 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/vellum_nommap.go @@ -0,0 +1,27 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +build nommap + +package vellum + +import "io/ioutil" + +func open(path string) (*FST, error) { + data, err := ioutil.ReadFile(string) + if err != nil { + return nil, err + } + return new(data, nil) +} diff --git a/vendor/github.com/couchbase/vellum/writer.go b/vendor/github.com/couchbase/vellum/writer.go new file mode 100644 index 0000000..d655d47 --- /dev/null +++ b/vendor/github.com/couchbase/vellum/writer.go @@ -0,0 +1,92 @@ +// Copyright (c) 2017 Couchbase, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vellum + +import ( + "bufio" + "io" +) + +// A writer is a buffered writer used by vellum. It counts how many bytes have +// been written and has some convenience methods used for encoding the data. +type writer struct { + w *bufio.Writer + counter int +} + +func newWriter(w io.Writer) *writer { + return &writer{ + w: bufio.NewWriter(w), + } +} + +func (w *writer) Reset(newWriter io.Writer) { + w.w.Reset(newWriter) + w.counter = 0 +} + +func (w *writer) WriteByte(c byte) error { + err := w.w.WriteByte(c) + if err != nil { + return err + } + w.counter++ + return nil +} + +func (w *writer) Write(p []byte) (int, error) { + n, err := w.w.Write(p) + w.counter += n + return n, err +} + +func (w *writer) Flush() error { + return w.w.Flush() +} + +func (w *writer) WritePackedUintIn(v uint64, n int) error { + for shift := uint(0); shift < uint(n*8); shift += 8 { + err := w.WriteByte(byte(v >> shift)) + if err != nil { + return err + } + } + + return nil +} + +func (w *writer) WritePackedUint(v uint64) error { + n := packedSize(v) + return w.WritePackedUintIn(v, n) +} + +func packedSize(n uint64) int { + if n < 1<<8 { + return 1 + } else if n < 1<<16 { + return 2 + } else if n < 1<<24 { + return 3 + } else if n < 1<<32 { + return 4 + } else if n < 1<<40 { + return 5 + } else if n < 1<<48 { + return 6 + } else if n < 1<<56 { + return 7 + } + return 8 +} diff --git a/vendor/github.com/dgrijalva/jwt-go/.gitignore b/vendor/github.com/dgrijalva/jwt-go/.gitignore new file mode 100644 index 0000000..80bed65 --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/.gitignore @@ -0,0 +1,4 @@ +.DS_Store +bin + + diff --git a/vendor/github.com/dgrijalva/jwt-go/.travis.yml b/vendor/github.com/dgrijalva/jwt-go/.travis.yml new file mode 100644 index 0000000..1027f56 --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/.travis.yml @@ -0,0 +1,13 @@ +language: go + +script: + - go vet ./... + - go test -v ./... + +go: + - 1.3 + - 1.4 + - 1.5 + - 1.6 + - 1.7 + - tip diff --git a/vendor/github.com/dgrijalva/jwt-go/LICENSE b/vendor/github.com/dgrijalva/jwt-go/LICENSE new file mode 100644 index 0000000..df83a9c --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/LICENSE @@ -0,0 +1,8 @@ +Copyright (c) 2012 Dave Grijalva + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/vendor/github.com/dgrijalva/jwt-go/MIGRATION_GUIDE.md b/vendor/github.com/dgrijalva/jwt-go/MIGRATION_GUIDE.md new file mode 100644 index 0000000..7fc1f79 --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/MIGRATION_GUIDE.md @@ -0,0 +1,97 @@ +## Migration Guide from v2 -> v3 + +Version 3 adds several new, frequently requested features. To do so, it introduces a few breaking changes. We've worked to keep these as minimal as possible. This guide explains the breaking changes and how you can quickly update your code. + +### `Token.Claims` is now an interface type + +The most requested feature from the 2.0 verison of this library was the ability to provide a custom type to the JSON parser for claims. This was implemented by introducing a new interface, `Claims`, to replace `map[string]interface{}`. We also included two concrete implementations of `Claims`: `MapClaims` and `StandardClaims`. + +`MapClaims` is an alias for `map[string]interface{}` with built in validation behavior. It is the default claims type when using `Parse`. The usage is unchanged except you must type cast the claims property. + +The old example for parsing a token looked like this.. + +```go + if token, err := jwt.Parse(tokenString, keyLookupFunc); err == nil { + fmt.Printf("Token for user %v expires %v", token.Claims["user"], token.Claims["exp"]) + } +``` + +is now directly mapped to... + +```go + if token, err := jwt.Parse(tokenString, keyLookupFunc); err == nil { + claims := token.Claims.(jwt.MapClaims) + fmt.Printf("Token for user %v expires %v", claims["user"], claims["exp"]) + } +``` + +`StandardClaims` is designed to be embedded in your custom type. You can supply a custom claims type with the new `ParseWithClaims` function. Here's an example of using a custom claims type. + +```go + type MyCustomClaims struct { + User string + *StandardClaims + } + + if token, err := jwt.ParseWithClaims(tokenString, &MyCustomClaims{}, keyLookupFunc); err == nil { + claims := token.Claims.(*MyCustomClaims) + fmt.Printf("Token for user %v expires %v", claims.User, claims.StandardClaims.ExpiresAt) + } +``` + +### `ParseFromRequest` has been moved + +To keep this library focused on the tokens without becoming overburdened with complex request processing logic, `ParseFromRequest` and its new companion `ParseFromRequestWithClaims` have been moved to a subpackage, `request`. The method signatues have also been augmented to receive a new argument: `Extractor`. + +`Extractors` do the work of picking the token string out of a request. The interface is simple and composable. + +This simple parsing example: + +```go + if token, err := jwt.ParseFromRequest(tokenString, req, keyLookupFunc); err == nil { + fmt.Printf("Token for user %v expires %v", token.Claims["user"], token.Claims["exp"]) + } +``` + +is directly mapped to: + +```go + if token, err := request.ParseFromRequest(req, request.OAuth2Extractor, keyLookupFunc); err == nil { + claims := token.Claims.(jwt.MapClaims) + fmt.Printf("Token for user %v expires %v", claims["user"], claims["exp"]) + } +``` + +There are several concrete `Extractor` types provided for your convenience: + +* `HeaderExtractor` will search a list of headers until one contains content. +* `ArgumentExtractor` will search a list of keys in request query and form arguments until one contains content. +* `MultiExtractor` will try a list of `Extractors` in order until one returns content. +* `AuthorizationHeaderExtractor` will look in the `Authorization` header for a `Bearer` token. +* `OAuth2Extractor` searches the places an OAuth2 token would be specified (per the spec): `Authorization` header and `access_token` argument +* `PostExtractionFilter` wraps an `Extractor`, allowing you to process the content before it's parsed. A simple example is stripping the `Bearer ` text from a header + + +### RSA signing methods no longer accept `[]byte` keys + +Due to a [critical vulnerability](https://auth0.com/blog/2015/03/31/critical-vulnerabilities-in-json-web-token-libraries/), we've decided the convenience of accepting `[]byte` instead of `rsa.PublicKey` or `rsa.PrivateKey` isn't worth the risk of misuse. + +To replace this behavior, we've added two helper methods: `ParseRSAPrivateKeyFromPEM(key []byte) (*rsa.PrivateKey, error)` and `ParseRSAPublicKeyFromPEM(key []byte) (*rsa.PublicKey, error)`. These are just simple helpers for unpacking PEM encoded PKCS1 and PKCS8 keys. If your keys are encoded any other way, all you need to do is convert them to the `crypto/rsa` package's types. + +```go + func keyLookupFunc(*Token) (interface{}, error) { + // Don't forget to validate the alg is what you expect: + if _, ok := token.Method.(*jwt.SigningMethodRSA); !ok { + return nil, fmt.Errorf("Unexpected signing method: %v", token.Header["alg"]) + } + + // Look up key + key, err := lookupPublicKey(token.Header["kid"]) + if err != nil { + return nil, err + } + + // Unpack key from PEM encoded PKCS8 + return jwt.ParseRSAPublicKeyFromPEM(key) + } +``` diff --git a/vendor/github.com/dgrijalva/jwt-go/README.md b/vendor/github.com/dgrijalva/jwt-go/README.md new file mode 100644 index 0000000..d358d88 --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/README.md @@ -0,0 +1,100 @@ +# jwt-go + +[![Build Status](https://travis-ci.org/dgrijalva/jwt-go.svg?branch=master)](https://travis-ci.org/dgrijalva/jwt-go) +[![GoDoc](https://godoc.org/github.com/dgrijalva/jwt-go?status.svg)](https://godoc.org/github.com/dgrijalva/jwt-go) + +A [go](http://www.golang.org) (or 'golang' for search engine friendliness) implementation of [JSON Web Tokens](http://self-issued.info/docs/draft-ietf-oauth-json-web-token.html) + +**NEW VERSION COMING:** There have been a lot of improvements suggested since the version 3.0.0 released in 2016. I'm working now on cutting two different releases: 3.2.0 will contain any non-breaking changes or enhancements. 4.0.0 will follow shortly which will include breaking changes. See the 4.0.0 milestone to get an idea of what's coming. If you have other ideas, or would like to participate in 4.0.0, now's the time. If you depend on this library and don't want to be interrupted, I recommend you use your dependency mangement tool to pin to version 3. + +**SECURITY NOTICE:** Some older versions of Go have a security issue in the cryotp/elliptic. Recommendation is to upgrade to at least 1.8.3. See issue #216 for more detail. + +**SECURITY NOTICE:** It's important that you [validate the `alg` presented is what you expect](https://auth0.com/blog/2015/03/31/critical-vulnerabilities-in-json-web-token-libraries/). This library attempts to make it easy to do the right thing by requiring key types match the expected alg, but you should take the extra step to verify it in your usage. See the examples provided. + +## What the heck is a JWT? + +JWT.io has [a great introduction](https://jwt.io/introduction) to JSON Web Tokens. + +In short, it's a signed JSON object that does something useful (for example, authentication). It's commonly used for `Bearer` tokens in Oauth 2. A token is made of three parts, separated by `.`'s. The first two parts are JSON objects, that have been [base64url](http://tools.ietf.org/html/rfc4648) encoded. The last part is the signature, encoded the same way. + +The first part is called the header. It contains the necessary information for verifying the last part, the signature. For example, which encryption method was used for signing and what key was used. + +The part in the middle is the interesting bit. It's called the Claims and contains the actual stuff you care about. Refer to [the RFC](http://self-issued.info/docs/draft-jones-json-web-token.html) for information about reserved keys and the proper way to add your own. + +## What's in the box? + +This library supports the parsing and verification as well as the generation and signing of JWTs. Current supported signing algorithms are HMAC SHA, RSA, RSA-PSS, and ECDSA, though hooks are present for adding your own. + +## Examples + +See [the project documentation](https://godoc.org/github.com/dgrijalva/jwt-go) for examples of usage: + +* [Simple example of parsing and validating a token](https://godoc.org/github.com/dgrijalva/jwt-go#example-Parse--Hmac) +* [Simple example of building and signing a token](https://godoc.org/github.com/dgrijalva/jwt-go#example-New--Hmac) +* [Directory of Examples](https://godoc.org/github.com/dgrijalva/jwt-go#pkg-examples) + +## Extensions + +This library publishes all the necessary components for adding your own signing methods. Simply implement the `SigningMethod` interface and register a factory method using `RegisterSigningMethod`. + +Here's an example of an extension that integrates with the Google App Engine signing tools: https://github.com/someone1/gcp-jwt-go + +## Compliance + +This library was last reviewed to comply with [RTF 7519](http://www.rfc-editor.org/info/rfc7519) dated May 2015 with a few notable differences: + +* In order to protect against accidental use of [Unsecured JWTs](http://self-issued.info/docs/draft-ietf-oauth-json-web-token.html#UnsecuredJWT), tokens using `alg=none` will only be accepted if the constant `jwt.UnsafeAllowNoneSignatureType` is provided as the key. + +## Project Status & Versioning + +This library is considered production ready. Feedback and feature requests are appreciated. The API should be considered stable. There should be very few backwards-incompatible changes outside of major version updates (and only with good reason). + +This project uses [Semantic Versioning 2.0.0](http://semver.org). Accepted pull requests will land on `master`. Periodically, versions will be tagged from `master`. You can find all the releases on [the project releases page](https://github.com/dgrijalva/jwt-go/releases). + +While we try to make it obvious when we make breaking changes, there isn't a great mechanism for pushing announcements out to users. You may want to use this alternative package include: `gopkg.in/dgrijalva/jwt-go.v3`. It will do the right thing WRT semantic versioning. + +**BREAKING CHANGES:*** +* Version 3.0.0 includes _a lot_ of changes from the 2.x line, including a few that break the API. We've tried to break as few things as possible, so there should just be a few type signature changes. A full list of breaking changes is available in `VERSION_HISTORY.md`. See `MIGRATION_GUIDE.md` for more information on updating your code. + +## Usage Tips + +### Signing vs Encryption + +A token is simply a JSON object that is signed by its author. this tells you exactly two things about the data: + +* The author of the token was in the possession of the signing secret +* The data has not been modified since it was signed + +It's important to know that JWT does not provide encryption, which means anyone who has access to the token can read its contents. If you need to protect (encrypt) the data, there is a companion spec, `JWE`, that provides this functionality. JWE is currently outside the scope of this library. + +### Choosing a Signing Method + +There are several signing methods available, and you should probably take the time to learn about the various options before choosing one. The principal design decision is most likely going to be symmetric vs asymmetric. + +Symmetric signing methods, such as HSA, use only a single secret. This is probably the simplest signing method to use since any `[]byte` can be used as a valid secret. They are also slightly computationally faster to use, though this rarely is enough to matter. Symmetric signing methods work the best when both producers and consumers of tokens are trusted, or even the same system. Since the same secret is used to both sign and validate tokens, you can't easily distribute the key for validation. + +Asymmetric signing methods, such as RSA, use different keys for signing and verifying tokens. This makes it possible to produce tokens with a private key, and allow any consumer to access the public key for verification. + +### Signing Methods and Key Types + +Each signing method expects a different object type for its signing keys. See the package documentation for details. Here are the most common ones: + +* The [HMAC signing method](https://godoc.org/github.com/dgrijalva/jwt-go#SigningMethodHMAC) (`HS256`,`HS384`,`HS512`) expect `[]byte` values for signing and validation +* The [RSA signing method](https://godoc.org/github.com/dgrijalva/jwt-go#SigningMethodRSA) (`RS256`,`RS384`,`RS512`) expect `*rsa.PrivateKey` for signing and `*rsa.PublicKey` for validation +* The [ECDSA signing method](https://godoc.org/github.com/dgrijalva/jwt-go#SigningMethodECDSA) (`ES256`,`ES384`,`ES512`) expect `*ecdsa.PrivateKey` for signing and `*ecdsa.PublicKey` for validation + +### JWT and OAuth + +It's worth mentioning that OAuth and JWT are not the same thing. A JWT token is simply a signed JSON object. It can be used anywhere such a thing is useful. There is some confusion, though, as JWT is the most common type of bearer token used in OAuth2 authentication. + +Without going too far down the rabbit hole, here's a description of the interaction of these technologies: + +* OAuth is a protocol for allowing an identity provider to be separate from the service a user is logging in to. For example, whenever you use Facebook to log into a different service (Yelp, Spotify, etc), you are using OAuth. +* OAuth defines several options for passing around authentication data. One popular method is called a "bearer token". A bearer token is simply a string that _should_ only be held by an authenticated user. Thus, simply presenting this token proves your identity. You can probably derive from here why a JWT might make a good bearer token. +* Because bearer tokens are used for authentication, it's important they're kept secret. This is why transactions that use bearer tokens typically happen over SSL. + +## More + +Documentation can be found [on godoc.org](http://godoc.org/github.com/dgrijalva/jwt-go). + +The command line utility included in this project (cmd/jwt) provides a straightforward example of token creation and parsing as well as a useful tool for debugging your own integration. You'll also find several implementation examples in the documentation. diff --git a/vendor/github.com/dgrijalva/jwt-go/VERSION_HISTORY.md b/vendor/github.com/dgrijalva/jwt-go/VERSION_HISTORY.md new file mode 100644 index 0000000..6370298 --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/VERSION_HISTORY.md @@ -0,0 +1,118 @@ +## `jwt-go` Version History + +#### 3.2.0 + +* Added method `ParseUnverified` to allow users to split up the tasks of parsing and validation +* HMAC signing method returns `ErrInvalidKeyType` instead of `ErrInvalidKey` where appropriate +* Added options to `request.ParseFromRequest`, which allows for an arbitrary list of modifiers to parsing behavior. Initial set include `WithClaims` and `WithParser`. Existing usage of this function will continue to work as before. +* Deprecated `ParseFromRequestWithClaims` to simplify API in the future. + +#### 3.1.0 + +* Improvements to `jwt` command line tool +* Added `SkipClaimsValidation` option to `Parser` +* Documentation updates + +#### 3.0.0 + +* **Compatibility Breaking Changes**: See MIGRATION_GUIDE.md for tips on updating your code + * Dropped support for `[]byte` keys when using RSA signing methods. This convenience feature could contribute to security vulnerabilities involving mismatched key types with signing methods. + * `ParseFromRequest` has been moved to `request` subpackage and usage has changed + * The `Claims` property on `Token` is now type `Claims` instead of `map[string]interface{}`. The default value is type `MapClaims`, which is an alias to `map[string]interface{}`. This makes it possible to use a custom type when decoding claims. +* Other Additions and Changes + * Added `Claims` interface type to allow users to decode the claims into a custom type + * Added `ParseWithClaims`, which takes a third argument of type `Claims`. Use this function instead of `Parse` if you have a custom type you'd like to decode into. + * Dramatically improved the functionality and flexibility of `ParseFromRequest`, which is now in the `request` subpackage + * Added `ParseFromRequestWithClaims` which is the `FromRequest` equivalent of `ParseWithClaims` + * Added new interface type `Extractor`, which is used for extracting JWT strings from http requests. Used with `ParseFromRequest` and `ParseFromRequestWithClaims`. + * Added several new, more specific, validation errors to error type bitmask + * Moved examples from README to executable example files + * Signing method registry is now thread safe + * Added new property to `ValidationError`, which contains the raw error returned by calls made by parse/verify (such as those returned by keyfunc or json parser) + +#### 2.7.0 + +This will likely be the last backwards compatible release before 3.0.0, excluding essential bug fixes. + +* Added new option `-show` to the `jwt` command that will just output the decoded token without verifying +* Error text for expired tokens includes how long it's been expired +* Fixed incorrect error returned from `ParseRSAPublicKeyFromPEM` +* Documentation updates + +#### 2.6.0 + +* Exposed inner error within ValidationError +* Fixed validation errors when using UseJSONNumber flag +* Added several unit tests + +#### 2.5.0 + +* Added support for signing method none. You shouldn't use this. The API tries to make this clear. +* Updated/fixed some documentation +* Added more helpful error message when trying to parse tokens that begin with `BEARER ` + +#### 2.4.0 + +* Added new type, Parser, to allow for configuration of various parsing parameters + * You can now specify a list of valid signing methods. Anything outside this set will be rejected. + * You can now opt to use the `json.Number` type instead of `float64` when parsing token JSON +* Added support for [Travis CI](https://travis-ci.org/dgrijalva/jwt-go) +* Fixed some bugs with ECDSA parsing + +#### 2.3.0 + +* Added support for ECDSA signing methods +* Added support for RSA PSS signing methods (requires go v1.4) + +#### 2.2.0 + +* Gracefully handle a `nil` `Keyfunc` being passed to `Parse`. Result will now be the parsed token and an error, instead of a panic. + +#### 2.1.0 + +Backwards compatible API change that was missed in 2.0.0. + +* The `SignedString` method on `Token` now takes `interface{}` instead of `[]byte` + +#### 2.0.0 + +There were two major reasons for breaking backwards compatibility with this update. The first was a refactor required to expand the width of the RSA and HMAC-SHA signing implementations. There will likely be no required code changes to support this change. + +The second update, while unfortunately requiring a small change in integration, is required to open up this library to other signing methods. Not all keys used for all signing methods have a single standard on-disk representation. Requiring `[]byte` as the type for all keys proved too limiting. Additionally, this implementation allows for pre-parsed tokens to be reused, which might matter in an application that parses a high volume of tokens with a small set of keys. Backwards compatibilty has been maintained for passing `[]byte` to the RSA signing methods, but they will also accept `*rsa.PublicKey` and `*rsa.PrivateKey`. + +It is likely the only integration change required here will be to change `func(t *jwt.Token) ([]byte, error)` to `func(t *jwt.Token) (interface{}, error)` when calling `Parse`. + +* **Compatibility Breaking Changes** + * `SigningMethodHS256` is now `*SigningMethodHMAC` instead of `type struct` + * `SigningMethodRS256` is now `*SigningMethodRSA` instead of `type struct` + * `KeyFunc` now returns `interface{}` instead of `[]byte` + * `SigningMethod.Sign` now takes `interface{}` instead of `[]byte` for the key + * `SigningMethod.Verify` now takes `interface{}` instead of `[]byte` for the key +* Renamed type `SigningMethodHS256` to `SigningMethodHMAC`. Specific sizes are now just instances of this type. + * Added public package global `SigningMethodHS256` + * Added public package global `SigningMethodHS384` + * Added public package global `SigningMethodHS512` +* Renamed type `SigningMethodRS256` to `SigningMethodRSA`. Specific sizes are now just instances of this type. + * Added public package global `SigningMethodRS256` + * Added public package global `SigningMethodRS384` + * Added public package global `SigningMethodRS512` +* Moved sample private key for HMAC tests from an inline value to a file on disk. Value is unchanged. +* Refactored the RSA implementation to be easier to read +* Exposed helper methods `ParseRSAPrivateKeyFromPEM` and `ParseRSAPublicKeyFromPEM` + +#### 1.0.2 + +* Fixed bug in parsing public keys from certificates +* Added more tests around the parsing of keys for RS256 +* Code refactoring in RS256 implementation. No functional changes + +#### 1.0.1 + +* Fixed panic if RS256 signing method was passed an invalid key + +#### 1.0.0 + +* First versioned release +* API stabilized +* Supports creating, signing, parsing, and validating JWT tokens +* Supports RS256 and HS256 signing methods \ No newline at end of file diff --git a/vendor/github.com/dgrijalva/jwt-go/claims.go b/vendor/github.com/dgrijalva/jwt-go/claims.go new file mode 100644 index 0000000..f0228f0 --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/claims.go @@ -0,0 +1,134 @@ +package jwt + +import ( + "crypto/subtle" + "fmt" + "time" +) + +// For a type to be a Claims object, it must just have a Valid method that determines +// if the token is invalid for any supported reason +type Claims interface { + Valid() error +} + +// Structured version of Claims Section, as referenced at +// https://tools.ietf.org/html/rfc7519#section-4.1 +// See examples for how to use this with your own claim types +type StandardClaims struct { + Audience string `json:"aud,omitempty"` + ExpiresAt int64 `json:"exp,omitempty"` + Id string `json:"jti,omitempty"` + IssuedAt int64 `json:"iat,omitempty"` + Issuer string `json:"iss,omitempty"` + NotBefore int64 `json:"nbf,omitempty"` + Subject string `json:"sub,omitempty"` +} + +// Validates time based claims "exp, iat, nbf". +// There is no accounting for clock skew. +// As well, if any of the above claims are not in the token, it will still +// be considered a valid claim. +func (c StandardClaims) Valid() error { + vErr := new(ValidationError) + now := TimeFunc().Unix() + + // The claims below are optional, by default, so if they are set to the + // default value in Go, let's not fail the verification for them. + if c.VerifyExpiresAt(now, false) == false { + delta := time.Unix(now, 0).Sub(time.Unix(c.ExpiresAt, 0)) + vErr.Inner = fmt.Errorf("token is expired by %v", delta) + vErr.Errors |= ValidationErrorExpired + } + + if c.VerifyIssuedAt(now, false) == false { + vErr.Inner = fmt.Errorf("Token used before issued") + vErr.Errors |= ValidationErrorIssuedAt + } + + if c.VerifyNotBefore(now, false) == false { + vErr.Inner = fmt.Errorf("token is not valid yet") + vErr.Errors |= ValidationErrorNotValidYet + } + + if vErr.valid() { + return nil + } + + return vErr +} + +// Compares the aud claim against cmp. +// If required is false, this method will return true if the value matches or is unset +func (c *StandardClaims) VerifyAudience(cmp string, req bool) bool { + return verifyAud(c.Audience, cmp, req) +} + +// Compares the exp claim against cmp. +// If required is false, this method will return true if the value matches or is unset +func (c *StandardClaims) VerifyExpiresAt(cmp int64, req bool) bool { + return verifyExp(c.ExpiresAt, cmp, req) +} + +// Compares the iat claim against cmp. +// If required is false, this method will return true if the value matches or is unset +func (c *StandardClaims) VerifyIssuedAt(cmp int64, req bool) bool { + return verifyIat(c.IssuedAt, cmp, req) +} + +// Compares the iss claim against cmp. +// If required is false, this method will return true if the value matches or is unset +func (c *StandardClaims) VerifyIssuer(cmp string, req bool) bool { + return verifyIss(c.Issuer, cmp, req) +} + +// Compares the nbf claim against cmp. +// If required is false, this method will return true if the value matches or is unset +func (c *StandardClaims) VerifyNotBefore(cmp int64, req bool) bool { + return verifyNbf(c.NotBefore, cmp, req) +} + +// ----- helpers + +func verifyAud(aud string, cmp string, required bool) bool { + if aud == "" { + return !required + } + if subtle.ConstantTimeCompare([]byte(aud), []byte(cmp)) != 0 { + return true + } else { + return false + } +} + +func verifyExp(exp int64, now int64, required bool) bool { + if exp == 0 { + return !required + } + return now <= exp +} + +func verifyIat(iat int64, now int64, required bool) bool { + if iat == 0 { + return !required + } + return now >= iat +} + +func verifyIss(iss string, cmp string, required bool) bool { + if iss == "" { + return !required + } + if subtle.ConstantTimeCompare([]byte(iss), []byte(cmp)) != 0 { + return true + } else { + return false + } +} + +func verifyNbf(nbf int64, now int64, required bool) bool { + if nbf == 0 { + return !required + } + return now >= nbf +} diff --git a/vendor/github.com/dgrijalva/jwt-go/doc.go b/vendor/github.com/dgrijalva/jwt-go/doc.go new file mode 100644 index 0000000..a86dc1a --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/doc.go @@ -0,0 +1,4 @@ +// Package jwt is a Go implementation of JSON Web Tokens: http://self-issued.info/docs/draft-jones-json-web-token.html +// +// See README.md for more info. +package jwt diff --git a/vendor/github.com/dgrijalva/jwt-go/ecdsa.go b/vendor/github.com/dgrijalva/jwt-go/ecdsa.go new file mode 100644 index 0000000..f977381 --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/ecdsa.go @@ -0,0 +1,148 @@ +package jwt + +import ( + "crypto" + "crypto/ecdsa" + "crypto/rand" + "errors" + "math/big" +) + +var ( + // Sadly this is missing from crypto/ecdsa compared to crypto/rsa + ErrECDSAVerification = errors.New("crypto/ecdsa: verification error") +) + +// Implements the ECDSA family of signing methods signing methods +// Expects *ecdsa.PrivateKey for signing and *ecdsa.PublicKey for verification +type SigningMethodECDSA struct { + Name string + Hash crypto.Hash + KeySize int + CurveBits int +} + +// Specific instances for EC256 and company +var ( + SigningMethodES256 *SigningMethodECDSA + SigningMethodES384 *SigningMethodECDSA + SigningMethodES512 *SigningMethodECDSA +) + +func init() { + // ES256 + SigningMethodES256 = &SigningMethodECDSA{"ES256", crypto.SHA256, 32, 256} + RegisterSigningMethod(SigningMethodES256.Alg(), func() SigningMethod { + return SigningMethodES256 + }) + + // ES384 + SigningMethodES384 = &SigningMethodECDSA{"ES384", crypto.SHA384, 48, 384} + RegisterSigningMethod(SigningMethodES384.Alg(), func() SigningMethod { + return SigningMethodES384 + }) + + // ES512 + SigningMethodES512 = &SigningMethodECDSA{"ES512", crypto.SHA512, 66, 521} + RegisterSigningMethod(SigningMethodES512.Alg(), func() SigningMethod { + return SigningMethodES512 + }) +} + +func (m *SigningMethodECDSA) Alg() string { + return m.Name +} + +// Implements the Verify method from SigningMethod +// For this verify method, key must be an ecdsa.PublicKey struct +func (m *SigningMethodECDSA) Verify(signingString, signature string, key interface{}) error { + var err error + + // Decode the signature + var sig []byte + if sig, err = DecodeSegment(signature); err != nil { + return err + } + + // Get the key + var ecdsaKey *ecdsa.PublicKey + switch k := key.(type) { + case *ecdsa.PublicKey: + ecdsaKey = k + default: + return ErrInvalidKeyType + } + + if len(sig) != 2*m.KeySize { + return ErrECDSAVerification + } + + r := big.NewInt(0).SetBytes(sig[:m.KeySize]) + s := big.NewInt(0).SetBytes(sig[m.KeySize:]) + + // Create hasher + if !m.Hash.Available() { + return ErrHashUnavailable + } + hasher := m.Hash.New() + hasher.Write([]byte(signingString)) + + // Verify the signature + if verifystatus := ecdsa.Verify(ecdsaKey, hasher.Sum(nil), r, s); verifystatus == true { + return nil + } else { + return ErrECDSAVerification + } +} + +// Implements the Sign method from SigningMethod +// For this signing method, key must be an ecdsa.PrivateKey struct +func (m *SigningMethodECDSA) Sign(signingString string, key interface{}) (string, error) { + // Get the key + var ecdsaKey *ecdsa.PrivateKey + switch k := key.(type) { + case *ecdsa.PrivateKey: + ecdsaKey = k + default: + return "", ErrInvalidKeyType + } + + // Create the hasher + if !m.Hash.Available() { + return "", ErrHashUnavailable + } + + hasher := m.Hash.New() + hasher.Write([]byte(signingString)) + + // Sign the string and return r, s + if r, s, err := ecdsa.Sign(rand.Reader, ecdsaKey, hasher.Sum(nil)); err == nil { + curveBits := ecdsaKey.Curve.Params().BitSize + + if m.CurveBits != curveBits { + return "", ErrInvalidKey + } + + keyBytes := curveBits / 8 + if curveBits%8 > 0 { + keyBytes += 1 + } + + // We serialize the outpus (r and s) into big-endian byte arrays and pad + // them with zeros on the left to make sure the sizes work out. Both arrays + // must be keyBytes long, and the output must be 2*keyBytes long. + rBytes := r.Bytes() + rBytesPadded := make([]byte, keyBytes) + copy(rBytesPadded[keyBytes-len(rBytes):], rBytes) + + sBytes := s.Bytes() + sBytesPadded := make([]byte, keyBytes) + copy(sBytesPadded[keyBytes-len(sBytes):], sBytes) + + out := append(rBytesPadded, sBytesPadded...) + + return EncodeSegment(out), nil + } else { + return "", err + } +} diff --git a/vendor/github.com/dgrijalva/jwt-go/ecdsa_utils.go b/vendor/github.com/dgrijalva/jwt-go/ecdsa_utils.go new file mode 100644 index 0000000..d19624b --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/ecdsa_utils.go @@ -0,0 +1,67 @@ +package jwt + +import ( + "crypto/ecdsa" + "crypto/x509" + "encoding/pem" + "errors" +) + +var ( + ErrNotECPublicKey = errors.New("Key is not a valid ECDSA public key") + ErrNotECPrivateKey = errors.New("Key is not a valid ECDSA private key") +) + +// Parse PEM encoded Elliptic Curve Private Key Structure +func ParseECPrivateKeyFromPEM(key []byte) (*ecdsa.PrivateKey, error) { + var err error + + // Parse PEM block + var block *pem.Block + if block, _ = pem.Decode(key); block == nil { + return nil, ErrKeyMustBePEMEncoded + } + + // Parse the key + var parsedKey interface{} + if parsedKey, err = x509.ParseECPrivateKey(block.Bytes); err != nil { + return nil, err + } + + var pkey *ecdsa.PrivateKey + var ok bool + if pkey, ok = parsedKey.(*ecdsa.PrivateKey); !ok { + return nil, ErrNotECPrivateKey + } + + return pkey, nil +} + +// Parse PEM encoded PKCS1 or PKCS8 public key +func ParseECPublicKeyFromPEM(key []byte) (*ecdsa.PublicKey, error) { + var err error + + // Parse PEM block + var block *pem.Block + if block, _ = pem.Decode(key); block == nil { + return nil, ErrKeyMustBePEMEncoded + } + + // Parse the key + var parsedKey interface{} + if parsedKey, err = x509.ParsePKIXPublicKey(block.Bytes); err != nil { + if cert, err := x509.ParseCertificate(block.Bytes); err == nil { + parsedKey = cert.PublicKey + } else { + return nil, err + } + } + + var pkey *ecdsa.PublicKey + var ok bool + if pkey, ok = parsedKey.(*ecdsa.PublicKey); !ok { + return nil, ErrNotECPublicKey + } + + return pkey, nil +} diff --git a/vendor/github.com/dgrijalva/jwt-go/errors.go b/vendor/github.com/dgrijalva/jwt-go/errors.go new file mode 100644 index 0000000..1c93024 --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/errors.go @@ -0,0 +1,59 @@ +package jwt + +import ( + "errors" +) + +// Error constants +var ( + ErrInvalidKey = errors.New("key is invalid") + ErrInvalidKeyType = errors.New("key is of invalid type") + ErrHashUnavailable = errors.New("the requested hash function is unavailable") +) + +// The errors that might occur when parsing and validating a token +const ( + ValidationErrorMalformed uint32 = 1 << iota // Token is malformed + ValidationErrorUnverifiable // Token could not be verified because of signing problems + ValidationErrorSignatureInvalid // Signature validation failed + + // Standard Claim validation errors + ValidationErrorAudience // AUD validation failed + ValidationErrorExpired // EXP validation failed + ValidationErrorIssuedAt // IAT validation failed + ValidationErrorIssuer // ISS validation failed + ValidationErrorNotValidYet // NBF validation failed + ValidationErrorId // JTI validation failed + ValidationErrorClaimsInvalid // Generic claims validation error +) + +// Helper for constructing a ValidationError with a string error message +func NewValidationError(errorText string, errorFlags uint32) *ValidationError { + return &ValidationError{ + text: errorText, + Errors: errorFlags, + } +} + +// The error from Parse if token is not valid +type ValidationError struct { + Inner error // stores the error returned by external dependencies, i.e.: KeyFunc + Errors uint32 // bitfield. see ValidationError... constants + text string // errors that do not have a valid error just have text +} + +// Validation error is an error type +func (e ValidationError) Error() string { + if e.Inner != nil { + return e.Inner.Error() + } else if e.text != "" { + return e.text + } else { + return "token is invalid" + } +} + +// No errors +func (e *ValidationError) valid() bool { + return e.Errors == 0 +} diff --git a/vendor/github.com/dgrijalva/jwt-go/hmac.go b/vendor/github.com/dgrijalva/jwt-go/hmac.go new file mode 100644 index 0000000..addbe5d --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/hmac.go @@ -0,0 +1,95 @@ +package jwt + +import ( + "crypto" + "crypto/hmac" + "errors" +) + +// Implements the HMAC-SHA family of signing methods signing methods +// Expects key type of []byte for both signing and validation +type SigningMethodHMAC struct { + Name string + Hash crypto.Hash +} + +// Specific instances for HS256 and company +var ( + SigningMethodHS256 *SigningMethodHMAC + SigningMethodHS384 *SigningMethodHMAC + SigningMethodHS512 *SigningMethodHMAC + ErrSignatureInvalid = errors.New("signature is invalid") +) + +func init() { + // HS256 + SigningMethodHS256 = &SigningMethodHMAC{"HS256", crypto.SHA256} + RegisterSigningMethod(SigningMethodHS256.Alg(), func() SigningMethod { + return SigningMethodHS256 + }) + + // HS384 + SigningMethodHS384 = &SigningMethodHMAC{"HS384", crypto.SHA384} + RegisterSigningMethod(SigningMethodHS384.Alg(), func() SigningMethod { + return SigningMethodHS384 + }) + + // HS512 + SigningMethodHS512 = &SigningMethodHMAC{"HS512", crypto.SHA512} + RegisterSigningMethod(SigningMethodHS512.Alg(), func() SigningMethod { + return SigningMethodHS512 + }) +} + +func (m *SigningMethodHMAC) Alg() string { + return m.Name +} + +// Verify the signature of HSXXX tokens. Returns nil if the signature is valid. +func (m *SigningMethodHMAC) Verify(signingString, signature string, key interface{}) error { + // Verify the key is the right type + keyBytes, ok := key.([]byte) + if !ok { + return ErrInvalidKeyType + } + + // Decode signature, for comparison + sig, err := DecodeSegment(signature) + if err != nil { + return err + } + + // Can we use the specified hashing method? + if !m.Hash.Available() { + return ErrHashUnavailable + } + + // This signing method is symmetric, so we validate the signature + // by reproducing the signature from the signing string and key, then + // comparing that against the provided signature. + hasher := hmac.New(m.Hash.New, keyBytes) + hasher.Write([]byte(signingString)) + if !hmac.Equal(sig, hasher.Sum(nil)) { + return ErrSignatureInvalid + } + + // No validation errors. Signature is good. + return nil +} + +// Implements the Sign method from SigningMethod for this signing method. +// Key must be []byte +func (m *SigningMethodHMAC) Sign(signingString string, key interface{}) (string, error) { + if keyBytes, ok := key.([]byte); ok { + if !m.Hash.Available() { + return "", ErrHashUnavailable + } + + hasher := hmac.New(m.Hash.New, keyBytes) + hasher.Write([]byte(signingString)) + + return EncodeSegment(hasher.Sum(nil)), nil + } + + return "", ErrInvalidKeyType +} diff --git a/vendor/github.com/dgrijalva/jwt-go/map_claims.go b/vendor/github.com/dgrijalva/jwt-go/map_claims.go new file mode 100644 index 0000000..291213c --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/map_claims.go @@ -0,0 +1,94 @@ +package jwt + +import ( + "encoding/json" + "errors" + // "fmt" +) + +// Claims type that uses the map[string]interface{} for JSON decoding +// This is the default claims type if you don't supply one +type MapClaims map[string]interface{} + +// Compares the aud claim against cmp. +// If required is false, this method will return true if the value matches or is unset +func (m MapClaims) VerifyAudience(cmp string, req bool) bool { + aud, _ := m["aud"].(string) + return verifyAud(aud, cmp, req) +} + +// Compares the exp claim against cmp. +// If required is false, this method will return true if the value matches or is unset +func (m MapClaims) VerifyExpiresAt(cmp int64, req bool) bool { + switch exp := m["exp"].(type) { + case float64: + return verifyExp(int64(exp), cmp, req) + case json.Number: + v, _ := exp.Int64() + return verifyExp(v, cmp, req) + } + return req == false +} + +// Compares the iat claim against cmp. +// If required is false, this method will return true if the value matches or is unset +func (m MapClaims) VerifyIssuedAt(cmp int64, req bool) bool { + switch iat := m["iat"].(type) { + case float64: + return verifyIat(int64(iat), cmp, req) + case json.Number: + v, _ := iat.Int64() + return verifyIat(v, cmp, req) + } + return req == false +} + +// Compares the iss claim against cmp. +// If required is false, this method will return true if the value matches or is unset +func (m MapClaims) VerifyIssuer(cmp string, req bool) bool { + iss, _ := m["iss"].(string) + return verifyIss(iss, cmp, req) +} + +// Compares the nbf claim against cmp. +// If required is false, this method will return true if the value matches or is unset +func (m MapClaims) VerifyNotBefore(cmp int64, req bool) bool { + switch nbf := m["nbf"].(type) { + case float64: + return verifyNbf(int64(nbf), cmp, req) + case json.Number: + v, _ := nbf.Int64() + return verifyNbf(v, cmp, req) + } + return req == false +} + +// Validates time based claims "exp, iat, nbf". +// There is no accounting for clock skew. +// As well, if any of the above claims are not in the token, it will still +// be considered a valid claim. +func (m MapClaims) Valid() error { + vErr := new(ValidationError) + now := TimeFunc().Unix() + + if m.VerifyExpiresAt(now, false) == false { + vErr.Inner = errors.New("Token is expired") + vErr.Errors |= ValidationErrorExpired + } + + if m.VerifyIssuedAt(now, false) == false { + vErr.Inner = errors.New("Token used before issued") + vErr.Errors |= ValidationErrorIssuedAt + } + + if m.VerifyNotBefore(now, false) == false { + vErr.Inner = errors.New("Token is not valid yet") + vErr.Errors |= ValidationErrorNotValidYet + } + + if vErr.valid() { + return nil + } + + return vErr +} diff --git a/vendor/github.com/dgrijalva/jwt-go/none.go b/vendor/github.com/dgrijalva/jwt-go/none.go new file mode 100644 index 0000000..f04d189 --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/none.go @@ -0,0 +1,52 @@ +package jwt + +// Implements the none signing method. This is required by the spec +// but you probably should never use it. +var SigningMethodNone *signingMethodNone + +const UnsafeAllowNoneSignatureType unsafeNoneMagicConstant = "none signing method allowed" + +var NoneSignatureTypeDisallowedError error + +type signingMethodNone struct{} +type unsafeNoneMagicConstant string + +func init() { + SigningMethodNone = &signingMethodNone{} + NoneSignatureTypeDisallowedError = NewValidationError("'none' signature type is not allowed", ValidationErrorSignatureInvalid) + + RegisterSigningMethod(SigningMethodNone.Alg(), func() SigningMethod { + return SigningMethodNone + }) +} + +func (m *signingMethodNone) Alg() string { + return "none" +} + +// Only allow 'none' alg type if UnsafeAllowNoneSignatureType is specified as the key +func (m *signingMethodNone) Verify(signingString, signature string, key interface{}) (err error) { + // Key must be UnsafeAllowNoneSignatureType to prevent accidentally + // accepting 'none' signing method + if _, ok := key.(unsafeNoneMagicConstant); !ok { + return NoneSignatureTypeDisallowedError + } + // If signing method is none, signature must be an empty string + if signature != "" { + return NewValidationError( + "'none' signing method with non-empty signature", + ValidationErrorSignatureInvalid, + ) + } + + // Accept 'none' signing method. + return nil +} + +// Only allow 'none' signing if UnsafeAllowNoneSignatureType is specified as the key +func (m *signingMethodNone) Sign(signingString string, key interface{}) (string, error) { + if _, ok := key.(unsafeNoneMagicConstant); ok { + return "", nil + } + return "", NoneSignatureTypeDisallowedError +} diff --git a/vendor/github.com/dgrijalva/jwt-go/parser.go b/vendor/github.com/dgrijalva/jwt-go/parser.go new file mode 100644 index 0000000..d6901d9 --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/parser.go @@ -0,0 +1,148 @@ +package jwt + +import ( + "bytes" + "encoding/json" + "fmt" + "strings" +) + +type Parser struct { + ValidMethods []string // If populated, only these methods will be considered valid + UseJSONNumber bool // Use JSON Number format in JSON decoder + SkipClaimsValidation bool // Skip claims validation during token parsing +} + +// Parse, validate, and return a token. +// keyFunc will receive the parsed token and should return the key for validating. +// If everything is kosher, err will be nil +func (p *Parser) Parse(tokenString string, keyFunc Keyfunc) (*Token, error) { + return p.ParseWithClaims(tokenString, MapClaims{}, keyFunc) +} + +func (p *Parser) ParseWithClaims(tokenString string, claims Claims, keyFunc Keyfunc) (*Token, error) { + token, parts, err := p.ParseUnverified(tokenString, claims) + if err != nil { + return token, err + } + + // Verify signing method is in the required set + if p.ValidMethods != nil { + var signingMethodValid = false + var alg = token.Method.Alg() + for _, m := range p.ValidMethods { + if m == alg { + signingMethodValid = true + break + } + } + if !signingMethodValid { + // signing method is not in the listed set + return token, NewValidationError(fmt.Sprintf("signing method %v is invalid", alg), ValidationErrorSignatureInvalid) + } + } + + // Lookup key + var key interface{} + if keyFunc == nil { + // keyFunc was not provided. short circuiting validation + return token, NewValidationError("no Keyfunc was provided.", ValidationErrorUnverifiable) + } + if key, err = keyFunc(token); err != nil { + // keyFunc returned an error + if ve, ok := err.(*ValidationError); ok { + return token, ve + } + return token, &ValidationError{Inner: err, Errors: ValidationErrorUnverifiable} + } + + vErr := &ValidationError{} + + // Validate Claims + if !p.SkipClaimsValidation { + if err := token.Claims.Valid(); err != nil { + + // If the Claims Valid returned an error, check if it is a validation error, + // If it was another error type, create a ValidationError with a generic ClaimsInvalid flag set + if e, ok := err.(*ValidationError); !ok { + vErr = &ValidationError{Inner: err, Errors: ValidationErrorClaimsInvalid} + } else { + vErr = e + } + } + } + + // Perform validation + token.Signature = parts[2] + if err = token.Method.Verify(strings.Join(parts[0:2], "."), token.Signature, key); err != nil { + vErr.Inner = err + vErr.Errors |= ValidationErrorSignatureInvalid + } + + if vErr.valid() { + token.Valid = true + return token, nil + } + + return token, vErr +} + +// WARNING: Don't use this method unless you know what you're doing +// +// This method parses the token but doesn't validate the signature. It's only +// ever useful in cases where you know the signature is valid (because it has +// been checked previously in the stack) and you want to extract values from +// it. +func (p *Parser) ParseUnverified(tokenString string, claims Claims) (token *Token, parts []string, err error) { + parts = strings.Split(tokenString, ".") + if len(parts) != 3 { + return nil, parts, NewValidationError("token contains an invalid number of segments", ValidationErrorMalformed) + } + + token = &Token{Raw: tokenString} + + // parse Header + var headerBytes []byte + if headerBytes, err = DecodeSegment(parts[0]); err != nil { + if strings.HasPrefix(strings.ToLower(tokenString), "bearer ") { + return token, parts, NewValidationError("tokenstring should not contain 'bearer '", ValidationErrorMalformed) + } + return token, parts, &ValidationError{Inner: err, Errors: ValidationErrorMalformed} + } + if err = json.Unmarshal(headerBytes, &token.Header); err != nil { + return token, parts, &ValidationError{Inner: err, Errors: ValidationErrorMalformed} + } + + // parse Claims + var claimBytes []byte + token.Claims = claims + + if claimBytes, err = DecodeSegment(parts[1]); err != nil { + return token, parts, &ValidationError{Inner: err, Errors: ValidationErrorMalformed} + } + dec := json.NewDecoder(bytes.NewBuffer(claimBytes)) + if p.UseJSONNumber { + dec.UseNumber() + } + // JSON Decode. Special case for map type to avoid weird pointer behavior + if c, ok := token.Claims.(MapClaims); ok { + err = dec.Decode(&c) + } else { + err = dec.Decode(&claims) + } + // Handle decode error + if err != nil { + return token, parts, &ValidationError{Inner: err, Errors: ValidationErrorMalformed} + } + + // Lookup signature method + if method, ok := token.Header["alg"].(string); ok { + if token.Method = GetSigningMethod(method); token.Method == nil { + return token, parts, NewValidationError("signing method (alg) is unavailable.", ValidationErrorUnverifiable) + } + } else { + return token, parts, NewValidationError("signing method (alg) is unspecified.", ValidationErrorUnverifiable) + } + + return token, parts, nil +} diff --git a/vendor/github.com/dgrijalva/jwt-go/rsa.go b/vendor/github.com/dgrijalva/jwt-go/rsa.go new file mode 100644 index 0000000..e4caf1c --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/rsa.go @@ -0,0 +1,101 @@ +package jwt + +import ( + "crypto" + "crypto/rand" + "crypto/rsa" +) + +// Implements the RSA family of signing methods signing methods +// Expects *rsa.PrivateKey for signing and *rsa.PublicKey for validation +type SigningMethodRSA struct { + Name string + Hash crypto.Hash +} + +// Specific instances for RS256 and company +var ( + SigningMethodRS256 *SigningMethodRSA + SigningMethodRS384 *SigningMethodRSA + SigningMethodRS512 *SigningMethodRSA +) + +func init() { + // RS256 + SigningMethodRS256 = &SigningMethodRSA{"RS256", crypto.SHA256} + RegisterSigningMethod(SigningMethodRS256.Alg(), func() SigningMethod { + return SigningMethodRS256 + }) + + // RS384 + SigningMethodRS384 = &SigningMethodRSA{"RS384", crypto.SHA384} + RegisterSigningMethod(SigningMethodRS384.Alg(), func() SigningMethod { + return SigningMethodRS384 + }) + + // RS512 + SigningMethodRS512 = &SigningMethodRSA{"RS512", crypto.SHA512} + RegisterSigningMethod(SigningMethodRS512.Alg(), func() SigningMethod { + return SigningMethodRS512 + }) +} + +func (m *SigningMethodRSA) Alg() string { + return m.Name +} + +// Implements the Verify method from SigningMethod +// For this signing method, must be an *rsa.PublicKey structure. +func (m *SigningMethodRSA) Verify(signingString, signature string, key interface{}) error { + var err error + + // Decode the signature + var sig []byte + if sig, err = DecodeSegment(signature); err != nil { + return err + } + + var rsaKey *rsa.PublicKey + var ok bool + + if rsaKey, ok = key.(*rsa.PublicKey); !ok { + return ErrInvalidKeyType + } + + // Create hasher + if !m.Hash.Available() { + return ErrHashUnavailable + } + hasher := m.Hash.New() + hasher.Write([]byte(signingString)) + + // Verify the signature + return rsa.VerifyPKCS1v15(rsaKey, m.Hash, hasher.Sum(nil), sig) +} + +// Implements the Sign method from SigningMethod +// For this signing method, must be an *rsa.PrivateKey structure. +func (m *SigningMethodRSA) Sign(signingString string, key interface{}) (string, error) { + var rsaKey *rsa.PrivateKey + var ok bool + + // Validate type of key + if rsaKey, ok = key.(*rsa.PrivateKey); !ok { + return "", ErrInvalidKey + } + + // Create the hasher + if !m.Hash.Available() { + return "", ErrHashUnavailable + } + + hasher := m.Hash.New() + hasher.Write([]byte(signingString)) + + // Sign the string and return the encoded bytes + if sigBytes, err := rsa.SignPKCS1v15(rand.Reader, rsaKey, m.Hash, hasher.Sum(nil)); err == nil { + return EncodeSegment(sigBytes), nil + } else { + return "", err + } +} diff --git a/vendor/github.com/dgrijalva/jwt-go/rsa_pss.go b/vendor/github.com/dgrijalva/jwt-go/rsa_pss.go new file mode 100644 index 0000000..10ee9db --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/rsa_pss.go @@ -0,0 +1,126 @@ +// +build go1.4 + +package jwt + +import ( + "crypto" + "crypto/rand" + "crypto/rsa" +) + +// Implements the RSAPSS family of signing methods signing methods +type SigningMethodRSAPSS struct { + *SigningMethodRSA + Options *rsa.PSSOptions +} + +// Specific instances for RS/PS and company +var ( + SigningMethodPS256 *SigningMethodRSAPSS + SigningMethodPS384 *SigningMethodRSAPSS + SigningMethodPS512 *SigningMethodRSAPSS +) + +func init() { + // PS256 + SigningMethodPS256 = &SigningMethodRSAPSS{ + &SigningMethodRSA{ + Name: "PS256", + Hash: crypto.SHA256, + }, + &rsa.PSSOptions{ + SaltLength: rsa.PSSSaltLengthAuto, + Hash: crypto.SHA256, + }, + } + RegisterSigningMethod(SigningMethodPS256.Alg(), func() SigningMethod { + return SigningMethodPS256 + }) + + // PS384 + SigningMethodPS384 = &SigningMethodRSAPSS{ + &SigningMethodRSA{ + Name: "PS384", + Hash: crypto.SHA384, + }, + &rsa.PSSOptions{ + SaltLength: rsa.PSSSaltLengthAuto, + Hash: crypto.SHA384, + }, + } + RegisterSigningMethod(SigningMethodPS384.Alg(), func() SigningMethod { + return SigningMethodPS384 + }) + + // PS512 + SigningMethodPS512 = &SigningMethodRSAPSS{ + &SigningMethodRSA{ + Name: "PS512", + Hash: crypto.SHA512, + }, + &rsa.PSSOptions{ + SaltLength: rsa.PSSSaltLengthAuto, + Hash: crypto.SHA512, + }, + } + RegisterSigningMethod(SigningMethodPS512.Alg(), func() SigningMethod { + return SigningMethodPS512 + }) +} + +// Implements the Verify method from SigningMethod +// For this verify method, key must be an rsa.PublicKey struct +func (m *SigningMethodRSAPSS) Verify(signingString, signature string, key interface{}) error { + var err error + + // Decode the signature + var sig []byte + if sig, err = DecodeSegment(signature); err != nil { + return err + } + + var rsaKey *rsa.PublicKey + switch k := key.(type) { + case *rsa.PublicKey: + rsaKey = k + default: + return ErrInvalidKey + } + + // Create hasher + if !m.Hash.Available() { + return ErrHashUnavailable + } + hasher := m.Hash.New() + hasher.Write([]byte(signingString)) + + return rsa.VerifyPSS(rsaKey, m.Hash, hasher.Sum(nil), sig, m.Options) +} + +// Implements the Sign method from SigningMethod +// For this signing method, key must be an rsa.PrivateKey struct +func (m *SigningMethodRSAPSS) Sign(signingString string, key interface{}) (string, error) { + var rsaKey *rsa.PrivateKey + + switch k := key.(type) { + case *rsa.PrivateKey: + rsaKey = k + default: + return "", ErrInvalidKeyType + } + + // Create the hasher + if !m.Hash.Available() { + return "", ErrHashUnavailable + } + + hasher := m.Hash.New() + hasher.Write([]byte(signingString)) + + // Sign the string and return the encoded bytes + if sigBytes, err := rsa.SignPSS(rand.Reader, rsaKey, m.Hash, hasher.Sum(nil), m.Options); err == nil { + return EncodeSegment(sigBytes), nil + } else { + return "", err + } +} diff --git a/vendor/github.com/dgrijalva/jwt-go/rsa_utils.go b/vendor/github.com/dgrijalva/jwt-go/rsa_utils.go new file mode 100644 index 0000000..a5ababf --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/rsa_utils.go @@ -0,0 +1,101 @@ +package jwt + +import ( + "crypto/rsa" + "crypto/x509" + "encoding/pem" + "errors" +) + +var ( + ErrKeyMustBePEMEncoded = errors.New("Invalid Key: Key must be PEM encoded PKCS1 or PKCS8 private key") + ErrNotRSAPrivateKey = errors.New("Key is not a valid RSA private key") + ErrNotRSAPublicKey = errors.New("Key is not a valid RSA public key") +) + +// Parse PEM encoded PKCS1 or PKCS8 private key +func ParseRSAPrivateKeyFromPEM(key []byte) (*rsa.PrivateKey, error) { + var err error + + // Parse PEM block + var block *pem.Block + if block, _ = pem.Decode(key); block == nil { + return nil, ErrKeyMustBePEMEncoded + } + + var parsedKey interface{} + if parsedKey, err = x509.ParsePKCS1PrivateKey(block.Bytes); err != nil { + if parsedKey, err = x509.ParsePKCS8PrivateKey(block.Bytes); err != nil { + return nil, err + } + } + + var pkey *rsa.PrivateKey + var ok bool + if pkey, ok = parsedKey.(*rsa.PrivateKey); !ok { + return nil, ErrNotRSAPrivateKey + } + + return pkey, nil +} + +// Parse PEM encoded PKCS1 or PKCS8 private key protected with password +func ParseRSAPrivateKeyFromPEMWithPassword(key []byte, password string) (*rsa.PrivateKey, error) { + var err error + + // Parse PEM block + var block *pem.Block + if block, _ = pem.Decode(key); block == nil { + return nil, ErrKeyMustBePEMEncoded + } + + var parsedKey interface{} + + var blockDecrypted []byte + if blockDecrypted, err = x509.DecryptPEMBlock(block, []byte(password)); err != nil { + return nil, err + } + + if parsedKey, err = x509.ParsePKCS1PrivateKey(blockDecrypted); err != nil { + if parsedKey, err = x509.ParsePKCS8PrivateKey(blockDecrypted); err != nil { + return nil, err + } + } + + var pkey *rsa.PrivateKey + var ok bool + if pkey, ok = parsedKey.(*rsa.PrivateKey); !ok { + return nil, ErrNotRSAPrivateKey + } + + return pkey, nil +} + +// Parse PEM encoded PKCS1 or PKCS8 public key +func ParseRSAPublicKeyFromPEM(key []byte) (*rsa.PublicKey, error) { + var err error + + // Parse PEM block + var block *pem.Block + if block, _ = pem.Decode(key); block == nil { + return nil, ErrKeyMustBePEMEncoded + } + + // Parse the key + var parsedKey interface{} + if parsedKey, err = x509.ParsePKIXPublicKey(block.Bytes); err != nil { + if cert, err := x509.ParseCertificate(block.Bytes); err == nil { + parsedKey = cert.PublicKey + } else { + return nil, err + } + } + + var pkey *rsa.PublicKey + var ok bool + if pkey, ok = parsedKey.(*rsa.PublicKey); !ok { + return nil, ErrNotRSAPublicKey + } + + return pkey, nil +} diff --git a/vendor/github.com/dgrijalva/jwt-go/signing_method.go b/vendor/github.com/dgrijalva/jwt-go/signing_method.go new file mode 100644 index 0000000..ed1f212 --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/signing_method.go @@ -0,0 +1,35 @@ +package jwt + +import ( + "sync" +) + +var signingMethods = map[string]func() SigningMethod{} +var signingMethodLock = new(sync.RWMutex) + +// Implement SigningMethod to add new methods for signing or verifying tokens. +type SigningMethod interface { + Verify(signingString, signature string, key interface{}) error // Returns nil if signature is valid + Sign(signingString string, key interface{}) (string, error) // Returns encoded signature or error + Alg() string // returns the alg identifier for this method (example: 'HS256') +} + +// Register the "alg" name and a factory function for signing method. +// This is typically done during init() in the method's implementation +func RegisterSigningMethod(alg string, f func() SigningMethod) { + signingMethodLock.Lock() + defer signingMethodLock.Unlock() + + signingMethods[alg] = f +} + +// Get a signing method from an "alg" string +func GetSigningMethod(alg string) (method SigningMethod) { + signingMethodLock.RLock() + defer signingMethodLock.RUnlock() + + if methodF, ok := signingMethods[alg]; ok { + method = methodF() + } + return +} diff --git a/vendor/github.com/dgrijalva/jwt-go/token.go b/vendor/github.com/dgrijalva/jwt-go/token.go new file mode 100644 index 0000000..d637e08 --- /dev/null +++ b/vendor/github.com/dgrijalva/jwt-go/token.go @@ -0,0 +1,108 @@ +package jwt + +import ( + "encoding/base64" + "encoding/json" + "strings" + "time" +) + +// TimeFunc provides the current time when parsing token to validate "exp" claim (expiration time). +// You can override it to use another time value. This is useful for testing or if your +// server uses a different time zone than your tokens. +var TimeFunc = time.Now + +// Parse methods use this callback function to supply +// the key for verification. The function receives the parsed, +// but unverified Token. This allows you to use properties in the +// Header of the token (such as `kid`) to identify which key to use. +type Keyfunc func(*Token) (interface{}, error) + +// A JWT Token. Different fields will be used depending on whether you're +// creating or parsing/verifying a token. +type Token struct { + Raw string // The raw token. Populated when you Parse a token + Method SigningMethod // The signing method used or to be used + Header map[string]interface{} // The first segment of the token + Claims Claims // The second segment of the token + Signature string // The third segment of the token. Populated when you Parse a token + Valid bool // Is the token valid? Populated when you Parse/Verify a token +} + +// Create a new Token. Takes a signing method +func New(method SigningMethod) *Token { + return NewWithClaims(method, MapClaims{}) +} + +func NewWithClaims(method SigningMethod, claims Claims) *Token { + return &Token{ + Header: map[string]interface{}{ + "typ": "JWT", + "alg": method.Alg(), + }, + Claims: claims, + Method: method, + } +} + +// Get the complete, signed token +func (t *Token) SignedString(key interface{}) (string, error) { + var sig, sstr string + var err error + if sstr, err = t.SigningString(); err != nil { + return "", err + } + if sig, err = t.Method.Sign(sstr, key); err != nil { + return "", err + } + return strings.Join([]string{sstr, sig}, "."), nil +} + +// Generate the signing string. This is the +// most expensive part of the whole deal. Unless you +// need this for something special, just go straight for +// the SignedString. +func (t *Token) SigningString() (string, error) { + var err error + parts := make([]string, 2) + for i, _ := range parts { + var jsonValue []byte + if i == 0 { + if jsonValue, err = json.Marshal(t.Header); err != nil { + return "", err + } + } else { + if jsonValue, err = json.Marshal(t.Claims); err != nil { + return "", err + } + } + + parts[i] = EncodeSegment(jsonValue) + } + return strings.Join(parts, "."), nil +} + +// Parse, validate, and return a token. +// keyFunc will receive the parsed token and should return the key for validating. +// If everything is kosher, err will be nil +func Parse(tokenString string, keyFunc Keyfunc) (*Token, error) { + return new(Parser).Parse(tokenString, keyFunc) +} + +func ParseWithClaims(tokenString string, claims Claims, keyFunc Keyfunc) (*Token, error) { + return new(Parser).ParseWithClaims(tokenString, claims, keyFunc) +} + +// Encode JWT specific base64url encoding with padding stripped +func EncodeSegment(seg []byte) string { + return strings.TrimRight(base64.URLEncoding.EncodeToString(seg), "=") +} + +// Decode JWT specific base64url encoding with padding stripped +func DecodeSegment(seg string) ([]byte, error) { + if l := len(seg) % 4; l > 0 { + seg += strings.Repeat("=", 4-l) + } + + return base64.URLEncoding.DecodeString(seg) +} diff --git a/vendor/github.com/disintegration/gift/.travis.yml b/vendor/github.com/disintegration/gift/.travis.yml new file mode 100644 index 0000000..50c1f10 --- /dev/null +++ b/vendor/github.com/disintegration/gift/.travis.yml @@ -0,0 +1,13 @@ +language: go + +go: + - 1.10.x + - 1.11.x + - 1.12.x + +before_install: + - go get github.com/mattn/goveralls + +script: + - go test -v -race -cover + - $GOPATH/bin/goveralls -service=travis-ci diff --git a/vendor/github.com/disintegration/gift/LICENSE b/vendor/github.com/disintegration/gift/LICENSE new file mode 100644 index 0000000..d9b9c2b --- /dev/null +++ b/vendor/github.com/disintegration/gift/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014-2018 Grigory Dryapak + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/disintegration/gift/README.md b/vendor/github.com/disintegration/gift/README.md new file mode 100644 index 0000000..300b1df --- /dev/null +++ b/vendor/github.com/disintegration/gift/README.md @@ -0,0 +1,255 @@ +# GO IMAGE FILTERING TOOLKIT (GIFT) + +[![GoDoc](https://godoc.org/github.com/disintegration/gift?status.svg)](https://godoc.org/github.com/disintegration/gift) +[![Build Status](https://travis-ci.org/disintegration/gift.svg?branch=master)](https://travis-ci.org/disintegration/gift) +[![Coverage Status](https://coveralls.io/repos/github/disintegration/gift/badge.svg?branch=master)](https://coveralls.io/github/disintegration/gift?branch=master) +[![Go Report Card](https://goreportcard.com/badge/github.com/disintegration/gift)](https://goreportcard.com/report/github.com/disintegration/gift) + + +*Package gift provides a set of useful image processing filters.* + +Pure Go. No external dependencies outside of the Go standard library. + + +### INSTALLATION / UPDATING + + go get -u github.com/disintegration/gift + + +### DOCUMENTATION + +http://godoc.org/github.com/disintegration/gift + + +### QUICK START + +```go +// 1. Create a new filter list and add some filters. +g := gift.New( + gift.Resize(800, 0, gift.LanczosResampling), + gift.UnsharpMask(1, 1, 0), +) + +// 2. Create a new image of the corresponding size. +// dst is a new target image, src is the original image. +dst := image.NewRGBA(g.Bounds(src.Bounds())) + +// 3. Use the Draw func to apply the filters to src and store the result in dst. +g.Draw(dst, src) +``` + +### USAGE + +To create a sequence of filters, the `New` function is used: +```go +g := gift.New( + gift.Grayscale(), + gift.Contrast(10), +) +``` +Filters also can be added using the `Add` method: +```go +g.Add(GaussianBlur(2)) +``` + +The `Bounds` method takes the bounds of the source image and returns appropriate bounds for the destination image to fit the result (for example, after using `Resize` or `Rotate` filters). + +```go +dst := image.NewRGBA(g.Bounds(src.Bounds())) +``` + +There are two methods available to apply these filters to an image: + +- `Draw` applies all the added filters to the src image and outputs the result to the dst image starting from the top-left corner (Min point). +```go +g.Draw(dst, src) +``` + +- `DrawAt` provides more control. It outputs the filtered src image to the dst image at the specified position using the specified image composition operator. This example is equivalent to the previous: +```go +g.DrawAt(dst, src, dst.Bounds().Min, gift.CopyOperator) +``` + +Two image composition operators are supported by now: +- `CopyOperator` - Replaces pixels of the dst image with pixels of the filtered src image. This mode is used by the Draw method. +- `OverOperator` - Places the filtered src image on top of the dst image. This mode makes sence if the filtered src image has transparent areas. + +Empty filter list can be used to create a copy of an image or to paste one image to another. For example: +```go +// Create a new image with dimensions of the bgImage. +dstImage := image.NewRGBA(bgImage.Bounds()) +// Copy the bgImage to the dstImage. +gift.New().Draw(dstImage, bgImage) +// Draw the fgImage over the dstImage at the (100, 100) position. +gift.New().DrawAt(dstImage, fgImage, image.Pt(100, 100), gift.OverOperator) +``` + + +### SUPPORTED FILTERS + ++ Transformations + + - Crop(rect image.Rectangle) + - CropToSize(width, height int, anchor Anchor) + - FlipHorizontal() + - FlipVertical() + - Resize(width, height int, resampling Resampling) + - ResizeToFill(width, height int, resampling Resampling, anchor Anchor) + - ResizeToFit(width, height int, resampling Resampling) + - Rotate(angle float32, backgroundColor color.Color, interpolation Interpolation) + - Rotate180() + - Rotate270() + - Rotate90() + - Transpose() + - Transverse() + ++ Adjustments & effects + + - Brightness(percentage float32) + - ColorBalance(percentageRed, percentageGreen, percentageBlue float32) + - ColorFunc(fn func(r0, g0, b0, a0 float32) (r, g, b, a float32)) + - Colorize(hue, saturation, percentage float32) + - ColorspaceLinearToSRGB() + - ColorspaceSRGBToLinear() + - Contrast(percentage float32) + - Convolution(kernel []float32, normalize, alpha, abs bool, delta float32) + - Gamma(gamma float32) + - GaussianBlur(sigma float32) + - Grayscale() + - Hue(shift float32) + - Invert() + - Maximum(ksize int, disk bool) + - Mean(ksize int, disk bool) + - Median(ksize int, disk bool) + - Minimum(ksize int, disk bool) + - Pixelate(size int) + - Saturation(percentage float32) + - Sepia(percentage float32) + - Sigmoid(midpoint, factor float32) + - Sobel() + - Threshold(percentage float32) + - UnsharpMask(sigma, amount, threshold float32) + + +### FILTER EXAMPLES + +The original image: + +![](testdata/src.png) + +Resulting images after applying some of the filters: + + name / result | name / result | name / result | name / result +--------------------------------------------|--------------------------------------------|--------------------------------------------|-------------------------------------------- +resize | crop_to_size | rotate_180 | rotate_30 +![](testdata/dst_resize.png) | ![](testdata/dst_crop_to_size.png) | ![](testdata/dst_rotate_180.png) | ![](testdata/dst_rotate_30.png) +brightness_increase | brightness_decrease | contrast_increase | contrast_decrease +![](testdata/dst_brightness_increase.png) | ![](testdata/dst_brightness_decrease.png) | ![](testdata/dst_contrast_increase.png) | ![](testdata/dst_contrast_decrease.png) +saturation_increase | saturation_decrease | gamma_1.5 | gamma_0.5 +![](testdata/dst_saturation_increase.png) | ![](testdata/dst_saturation_decrease.png) | ![](testdata/dst_gamma_1.5.png) | ![](testdata/dst_gamma_0.5.png) +gaussian_blur | unsharp_mask | sigmoid | pixelate +![](testdata/dst_gaussian_blur.png) | ![](testdata/dst_unsharp_mask.png) | ![](testdata/dst_sigmoid.png) | ![](testdata/dst_pixelate.png) +colorize | grayscale | sepia | invert +![](testdata/dst_colorize.png) | ![](testdata/dst_grayscale.png) | ![](testdata/dst_sepia.png) | ![](testdata/dst_invert.png) +mean | median | minimum | maximum +![](testdata/dst_mean.png) | ![](testdata/dst_median.png) | ![](testdata/dst_minimum.png) | ![](testdata/dst_maximum.png) +hue_rotate | color_balance | color_func | convolution_emboss +![](testdata/dst_hue_rotate.png) | ![](testdata/dst_color_balance.png) | ![](testdata/dst_color_func.png) | ![](testdata/dst_convolution_emboss.png) + +Here's the code that produces the above images: + +```go +package main + +import ( + "image" + "image/color" + "image/png" + "log" + "os" + + "github.com/disintegration/gift" +) + +func main() { + src := loadImage("testdata/src.png") + + filters := map[string]gift.Filter{ + "resize": gift.Resize(100, 0, gift.LanczosResampling), + "crop_to_size": gift.CropToSize(100, 100, gift.LeftAnchor), + "rotate_180": gift.Rotate180(), + "rotate_30": gift.Rotate(30, color.Transparent, gift.CubicInterpolation), + "brightness_increase": gift.Brightness(30), + "brightness_decrease": gift.Brightness(-30), + "contrast_increase": gift.Contrast(30), + "contrast_decrease": gift.Contrast(-30), + "saturation_increase": gift.Saturation(50), + "saturation_decrease": gift.Saturation(-50), + "gamma_1.5": gift.Gamma(1.5), + "gamma_0.5": gift.Gamma(0.5), + "gaussian_blur": gift.GaussianBlur(1), + "unsharp_mask": gift.UnsharpMask(1, 1, 0), + "sigmoid": gift.Sigmoid(0.5, 7), + "pixelate": gift.Pixelate(5), + "colorize": gift.Colorize(240, 50, 100), + "grayscale": gift.Grayscale(), + "sepia": gift.Sepia(100), + "invert": gift.Invert(), + "mean": gift.Mean(5, true), + "median": gift.Median(5, true), + "minimum": gift.Minimum(5, true), + "maximum": gift.Maximum(5, true), + "hue_rotate": gift.Hue(45), + "color_balance": gift.ColorBalance(10, -10, -10), + "color_func": gift.ColorFunc( + func(r0, g0, b0, a0 float32) (r, g, b, a float32) { + r = 1 - r0 // invert the red channel + g = g0 + 0.1 // shift the green channel by 0.1 + b = 0 // set the blue channel to 0 + a = a0 // preserve the alpha channel + return r, g, b, a + }, + ), + "convolution_emboss": gift.Convolution( + []float32{ + -1, -1, 0, + -1, 1, 1, + 0, 1, 1, + }, + false, false, false, 0.0, + ), + } + + for name, filter := range filters { + g := gift.New(filter) + dst := image.NewNRGBA(g.Bounds(src.Bounds())) + g.Draw(dst, src) + saveImage("testdata/dst_"+name+".png", dst) + } +} + +func loadImage(filename string) image.Image { + f, err := os.Open(filename) + if err != nil { + log.Fatalf("os.Open failed: %v", err) + } + defer f.Close() + img, _, err := image.Decode(f) + if err != nil { + log.Fatalf("image.Decode failed: %v", err) + } + return img +} + +func saveImage(filename string, img image.Image) { + f, err := os.Create(filename) + if err != nil { + log.Fatalf("os.Create failed: %v", err) + } + defer f.Close() + err = png.Encode(f, img) + if err != nil { + log.Fatalf("png.Encode failed: %v", err) + } +} +``` diff --git a/vendor/github.com/disintegration/gift/colors.go b/vendor/github.com/disintegration/gift/colors.go new file mode 100644 index 0000000..c107619 --- /dev/null +++ b/vendor/github.com/disintegration/gift/colors.go @@ -0,0 +1,511 @@ +package gift + +import ( + "image" + "image/draw" + "math" +) + +func prepareLut(lutSize int, fn func(float32) float32) []float32 { + lut := make([]float32, lutSize) + q := 1 / float32(lutSize-1) + for v := 0; v < lutSize; v++ { + u := float32(v) * q + lut[v] = fn(u) + } + return lut +} + +func getFromLut(lut []float32, u float32) float32 { + v := int(u*float32(len(lut)-1) + 0.5) + return lut[v] +} + +type colorchanFilter struct { + fn func(float32) float32 + lut bool +} + +func (p *colorchanFilter) Bounds(srcBounds image.Rectangle) (dstBounds image.Rectangle) { + dstBounds = image.Rect(0, 0, srcBounds.Dx(), srcBounds.Dy()) + return +} + +func (p *colorchanFilter) Draw(dst draw.Image, src image.Image, options *Options) { + if options == nil { + options = &defaultOptions + } + + srcb := src.Bounds() + dstb := dst.Bounds() + pixGetter := newPixelGetter(src) + pixSetter := newPixelSetter(dst) + + var useLut bool + var lut []float32 + + useLut = false + if p.lut { + var lutSize int + + it := pixGetter.it + if it == itNRGBA || it == itRGBA || it == itGray || it == itYCbCr { + lutSize = 0xff + 1 + } else { + lutSize = 0xffff + 1 + } + + numCalculations := srcb.Dx() * srcb.Dy() * 3 + if numCalculations > lutSize*2 { + useLut = true + lut = prepareLut(lutSize, p.fn) + } + } + + parallelize(options.Parallelization, srcb.Min.Y, srcb.Max.Y, func(start, stop int) { + for y := start; y < stop; y++ { + for x := srcb.Min.X; x < srcb.Max.X; x++ { + px := pixGetter.getPixel(x, y) + if useLut { + px.r = getFromLut(lut, px.r) + px.g = getFromLut(lut, px.g) + px.b = getFromLut(lut, px.b) + } else { + px.r = p.fn(px.r) + px.g = p.fn(px.g) + px.b = p.fn(px.b) + } + pixSetter.setPixel(dstb.Min.X+x-srcb.Min.X, dstb.Min.Y+y-srcb.Min.Y, px) + } + } + }) +} + +// Invert creates a filter that negates the colors of an image. +func Invert() Filter { + return &colorchanFilter{ + fn: func(x float32) float32 { + return 1 - x + }, + lut: false, + } +} + +// ColorspaceSRGBToLinear creates a filter that converts the colors of an image from sRGB to linear RGB. +func ColorspaceSRGBToLinear() Filter { + return &colorchanFilter{ + fn: func(x float32) float32 { + if x <= 0.04045 { + return x / 12.92 + } + return float32(math.Pow(float64((x+0.055)/1.055), 2.4)) + }, + lut: true, + } +} + +// ColorspaceLinearToSRGB creates a filter that converts the colors of an image from linear RGB to sRGB. +func ColorspaceLinearToSRGB() Filter { + return &colorchanFilter{ + fn: func(x float32) float32 { + if x <= 0.0031308 { + return x * 12.92 + } + return float32(1.055*math.Pow(float64(x), 1/2.4) - 0.055) + }, + lut: true, + } +} + +// Gamma creates a filter that performs a gamma correction on an image. +// The gamma parameter must be positive. Gamma = 1 gives the original image. +// Gamma less than 1 darkens the image and gamma greater than 1 lightens it. +func Gamma(gamma float32) Filter { + e := 1 / maxf32(gamma, 1.0e-5) + return &colorchanFilter{ + fn: func(x float32) float32 { + return powf32(x, e) + }, + lut: true, + } +} + +func sigmoid(a, b, x float32) float32 { + return 1 / (1 + expf32(b*(a-x))) +} + +// Sigmoid creates a filter that changes the contrast of an image using a sigmoidal function and returns the adjusted image. +// It's a non-linear contrast change useful for photo adjustments as it preserves highlight and shadow detail. +// The midpoint parameter is the midpoint of contrast that must be between 0 and 1, typically 0.5. +// The factor parameter indicates how much to increase or decrease the contrast, typically in range (-10, 10). +// If the factor parameter is positive the image contrast is increased otherwise the contrast is decreased. +// +// Example: +// +// g := gift.New( +// gift.Sigmoid(0.5, 5), +// ) +// dst := image.NewRGBA(g.Bounds(src.Bounds())) +// g.Draw(dst, src) +// +func Sigmoid(midpoint, factor float32) Filter { + a := minf32(maxf32(midpoint, 0), 1) + b := absf32(factor) + sig0 := sigmoid(a, b, 0) + sig1 := sigmoid(a, b, 1) + e := float32(1.0e-5) + + return &colorchanFilter{ + fn: func(x float32) float32 { + if factor == 0 { + return x + } else if factor > 0 { + sig := sigmoid(a, b, x) + return (sig - sig0) / (sig1 - sig0) + } else { + arg := minf32(maxf32((sig1-sig0)*x+sig0, e), 1-e) + return a - logf32(1/arg-1)/b + } + }, + lut: true, + } +} + +// Contrast creates a filter that changes the contrast of an image. +// The percentage parameter must be in range (-100, 100). The percentage = 0 gives the original image. +// The percentage = -100 gives solid grey image. The percentage = 100 gives an overcontrasted image. +func Contrast(percentage float32) Filter { + if percentage == 0 { + return ©imageFilter{} + } + + p := 1 + minf32(maxf32(percentage, -100), 100)/100 + + return &colorchanFilter{ + fn: func(x float32) float32 { + if 0 <= p && p <= 1 { + return 0.5 + (x-0.5)*p + } else if 1 < p && p < 2 { + return 0.5 + (x-0.5)*(1/(2.0-p)) + } else { + if x < 0.5 { + return 0 + } + return 1 + } + }, + lut: false, + } +} + +// Brightness creates a filter that changes the brightness of an image. +// The percentage parameter must be in range (-100, 100). The percentage = 0 gives the original image. +// The percentage = -100 gives solid black image. The percentage = 100 gives solid white image. +func Brightness(percentage float32) Filter { + if percentage == 0 { + return ©imageFilter{} + } + + shift := minf32(maxf32(percentage, -100), 100) / 100 + + return &colorchanFilter{ + fn: func(x float32) float32 { + return x + shift + }, + lut: false, + } +} + +type colorFilter struct { + fn func(pixel) pixel +} + +func (p *colorFilter) Bounds(srcBounds image.Rectangle) (dstBounds image.Rectangle) { + dstBounds = image.Rect(0, 0, srcBounds.Dx(), srcBounds.Dy()) + return +} + +func (p *colorFilter) Draw(dst draw.Image, src image.Image, options *Options) { + if options == nil { + options = &defaultOptions + } + + srcb := src.Bounds() + dstb := dst.Bounds() + pixGetter := newPixelGetter(src) + pixSetter := newPixelSetter(dst) + + parallelize(options.Parallelization, srcb.Min.Y, srcb.Max.Y, func(start, stop int) { + for y := start; y < stop; y++ { + for x := srcb.Min.X; x < srcb.Max.X; x++ { + px := pixGetter.getPixel(x, y) + pixSetter.setPixel(dstb.Min.X+x-srcb.Min.X, dstb.Min.Y+y-srcb.Min.Y, p.fn(px)) + } + } + }) +} + +// Grayscale creates a filter that produces a grayscale version of an image. +func Grayscale() Filter { + return &colorFilter{ + fn: func(px pixel) pixel { + y := 0.299*px.r + 0.587*px.g + 0.114*px.b + return pixel{y, y, y, px.a} + }, + } +} + +// Sepia creates a filter that produces a sepia-toned version of an image. +// The percentage parameter specifies how much the image should be adjusted. It must be in the range (0, 100) +// +// Example: +// +// g := gift.New( +// gift.Sepia(100), +// ) +// dst := image.NewRGBA(g.Bounds(src.Bounds())) +// g.Draw(dst, src) +// +func Sepia(percentage float32) Filter { + adjustAmount := minf32(maxf32(percentage, 0), 100) / 100 + rr := 1 - 0.607*adjustAmount + rg := 0.769 * adjustAmount + rb := 0.189 * adjustAmount + gr := 0.349 * adjustAmount + gg := 1 - 0.314*adjustAmount + gb := 0.168 * adjustAmount + br := 0.272 * adjustAmount + bg := 0.534 * adjustAmount + bb := 1 - 0.869*adjustAmount + return &colorFilter{ + fn: func(px pixel) pixel { + r := px.r*rr + px.g*rg + px.b*rb + g := px.r*gr + px.g*gg + px.b*gb + b := px.r*br + px.g*bg + px.b*bb + return pixel{r, g, b, px.a} + }, + } +} + +func convertHSLToRGB(h, s, l float32) (float32, float32, float32) { + if s == 0 { + return l, l, l + } + + hueToRGB := func(p, q, t float32) float32 { + if t < 0 { + t++ + } + if t > 1 { + t-- + } + if t < 1/6.0 { + return p + (q-p)*6*t + } + if t < 1/2.0 { + return q + } + if t < 2/3.0 { + return p + (q-p)*(2/3.0-t)*6 + } + return p + } + + var p, q float32 + if l < 0.5 { + q = l * (1 + s) + } else { + q = l + s - l*s + } + p = 2*l - q + + r := hueToRGB(p, q, h+1/3.0) + g := hueToRGB(p, q, h) + b := hueToRGB(p, q, h-1/3.0) + + return r, g, b +} + +func convertRGBToHSL(r, g, b float32) (float32, float32, float32) { + max := maxf32(r, maxf32(g, b)) + min := minf32(r, minf32(g, b)) + + l := (max + min) / 2 + + if max == min { + return 0, 0, l + } + + var h, s float32 + d := max - min + if l > 0.5 { + s = d / (2 - max - min) + } else { + s = d / (max + min) + } + + if r == max { + h = (g - b) / d + if g < b { + h += 6 + } + } else if g == max { + h = (b-r)/d + 2 + } else { + h = (r-g)/d + 4 + } + h /= 6 + + return h, s, l +} + +func normalizeHue(hue float32) float32 { + hue = hue - float32(int(hue)) + if hue < 0 { + hue++ + } + return hue +} + +// Hue creates a filter that rotates the hue of an image. +// The shift parameter is the hue angle shift, typically in range (-180, 180). +// The shift = 0 gives the original image. +func Hue(shift float32) Filter { + p := normalizeHue(shift / 360) + if p == 0 { + return ©imageFilter{} + } + + return &colorFilter{ + fn: func(px pixel) pixel { + h, s, l := convertRGBToHSL(px.r, px.g, px.b) + h = normalizeHue(h + p) + r, g, b := convertHSLToRGB(h, s, l) + return pixel{r, g, b, px.a} + }, + } +} + +// Saturation creates a filter that changes the saturation of an image. +// The percentage parameter must be in range (-100, 500). The percentage = 0 gives the original image. +func Saturation(percentage float32) Filter { + p := 1 + minf32(maxf32(percentage, -100), 500)/100 + if p == 1 { + return ©imageFilter{} + } + + return &colorFilter{ + fn: func(px pixel) pixel { + h, s, l := convertRGBToHSL(px.r, px.g, px.b) + s *= p + if s > 1 { + s = 1 + } + r, g, b := convertHSLToRGB(h, s, l) + return pixel{r, g, b, px.a} + }, + } +} + +// Colorize creates a filter that produces a colorized version of an image. +// The hue parameter is the angle on the color wheel, typically in range (0, 360). +// The saturation parameter must be in range (0, 100). +// The percentage parameter specifies the strength of the effect, it must be in range (0, 100). +// +// Example: +// +// g := gift.New( +// gift.Colorize(240, 50, 100), // blue colorization, 50% saturation +// ) +// dst := image.NewRGBA(g.Bounds(src.Bounds())) +// g.Draw(dst, src) +// +func Colorize(hue, saturation, percentage float32) Filter { + h := normalizeHue(hue / 360) + s := minf32(maxf32(saturation, 0), 100) / 100 + p := minf32(maxf32(percentage, 0), 100) / 100 + if p == 0 { + return ©imageFilter{} + } + + return &colorFilter{ + fn: func(px pixel) pixel { + _, _, l := convertRGBToHSL(px.r, px.g, px.b) + r, g, b := convertHSLToRGB(h, s, l) + px.r += (r - px.r) * p + px.g += (g - px.g) * p + px.b += (b - px.b) * p + return px + }, + } +} + +// ColorBalance creates a filter that changes the color balance of an image. +// The percentage parameters for each color channel (red, green, blue) must be in range (-100, 500). +// +// Example: +// +// g := gift.New( +// gift.ColorBalance(20, -20, 0), // +20% red, -20% green +// ) +// dst := image.NewRGBA(g.Bounds(src.Bounds())) +// g.Draw(dst, src) +// +func ColorBalance(percentageRed, percentageGreen, percentageBlue float32) Filter { + pr := 1 + minf32(maxf32(percentageRed, -100), 500)/100 + pg := 1 + minf32(maxf32(percentageGreen, -100), 500)/100 + pb := 1 + minf32(maxf32(percentageBlue, -100), 500)/100 + + return &colorFilter{ + fn: func(px pixel) pixel { + px.r *= pr + px.g *= pg + px.b *= pb + return px + }, + } +} + +// Threshold creates a filter that applies black/white thresholding to the image. +// The percentage parameter must be in range (0, 100). +func Threshold(percentage float32) Filter { + p := minf32(maxf32(percentage, 0), 100) / 100 + return &colorFilter{ + fn: func(px pixel) pixel { + y := 0.299*px.r + 0.587*px.g + 0.114*px.b + if y > p { + return pixel{1, 1, 1, px.a} + } + return pixel{0, 0, 0, px.a} + }, + } +} + +// ColorFunc creates a filter that changes the colors of an image using custom function. +// The fn parameter specifies a function that takes red, green, blue and alpha channels of a pixel +// as float32 values in range (0, 1) and returns the modified channel values. +// +// Example: +// +// g := gift.New( +// gift.ColorFunc( +// func(r0, g0, b0, a0 float32) (r, g, b, a float32) { +// r = 1 - r0 // invert the red channel +// g = g0 + 0.1 // shift the green channel by 0.1 +// b = 0 // set the blue channel to 0 +// a = a0 // preserve the alpha channel +// return r, g, b, a +// }, +// ), +// ) +// dst := image.NewRGBA(g.Bounds(src.Bounds())) +// g.Draw(dst, src) +// +func ColorFunc(fn func(r0, g0, b0, a0 float32) (r, g, b, a float32)) Filter { + return &colorFilter{ + fn: func(px pixel) pixel { + r, g, b, a := fn(px.r, px.g, px.b, px.a) + return pixel{r, g, b, a} + }, + } +} diff --git a/vendor/github.com/disintegration/gift/convolution.go b/vendor/github.com/disintegration/gift/convolution.go new file mode 100644 index 0000000..a03f730 --- /dev/null +++ b/vendor/github.com/disintegration/gift/convolution.go @@ -0,0 +1,579 @@ +package gift + +import ( + "image" + "image/draw" + "math" +) + +type uweight struct { + u int + weight float32 +} + +type uvweight struct { + u int + v int + weight float32 +} + +func prepareConvolutionWeights(kernel []float32, normalize bool) (int, []uvweight) { + size := int(math.Sqrt(float64(len(kernel)))) + if size%2 == 0 { + size-- + } + if size < 1 { + return 0, []uvweight{} + } + center := size / 2 + + weights := []uvweight{} + for i := 0; i < size; i++ { + for j := 0; j < size; j++ { + k := j*size + i + w := float32(0) + if k < len(kernel) { + w = kernel[k] + } + if w != 0 { + weights = append(weights, uvweight{u: i - center, v: j - center, weight: w}) + } + } + } + + if !normalize { + return size, weights + } + + var sum, sumpositive float32 + for _, w := range weights { + sum += w.weight + if w.weight > 0 { + sumpositive += w.weight + } + } + + var div float32 + if sum != 0 { + div = sum + } else if sumpositive != 0 { + div = sumpositive + } else { + return size, weights + } + + for i := 0; i < len(weights); i++ { + weights[i].weight /= div + } + + return size, weights +} + +type convolutionFilter struct { + kernel []float32 + normalize bool + alpha bool + abs bool + delta float32 +} + +func (p *convolutionFilter) Bounds(srcBounds image.Rectangle) (dstBounds image.Rectangle) { + dstBounds = image.Rect(0, 0, srcBounds.Dx(), srcBounds.Dy()) + return +} + +func (p *convolutionFilter) Draw(dst draw.Image, src image.Image, options *Options) { + if options == nil { + options = &defaultOptions + } + + srcb := src.Bounds() + dstb := dst.Bounds() + + if srcb.Dx() <= 0 || srcb.Dy() <= 0 { + return + } + + ksize, weights := prepareConvolutionWeights(p.kernel, p.normalize) + kcenter := ksize / 2 + + if ksize < 1 { + copyimage(dst, src, options) + return + } + + pixGetter := newPixelGetter(src) + pixSetter := newPixelSetter(dst) + + parallelize(options.Parallelization, srcb.Min.Y, srcb.Max.Y, func(start, stop int) { + // Init temporary rows. + starty := start + rows := make([][]pixel, ksize) + for i := 0; i < ksize; i++ { + rowy := starty + i - kcenter + if rowy < srcb.Min.Y { + rowy = srcb.Min.Y + } else if rowy > srcb.Max.Y-1 { + rowy = srcb.Max.Y - 1 + } + row := make([]pixel, srcb.Dx()) + pixGetter.getPixelRow(rowy, &row) + rows[i] = row + } + + for y := start; y < stop; y++ { + // Calculate dst row. + for x := srcb.Min.X; x < srcb.Max.X; x++ { + var r, g, b, a float32 + for _, w := range weights { + wx := x + w.u + if wx < srcb.Min.X { + wx = srcb.Min.X + } else if wx > srcb.Max.X-1 { + wx = srcb.Max.X - 1 + } + rowsx := wx - srcb.Min.X + rowsy := kcenter + w.v + + px := rows[rowsy][rowsx] + r += px.r * w.weight + g += px.g * w.weight + b += px.b * w.weight + if p.alpha { + a += px.a * w.weight + } + } + if p.abs { + r = absf32(r) + g = absf32(g) + b = absf32(b) + if p.alpha { + a = absf32(a) + } + } + if p.delta != 0 { + r += p.delta + g += p.delta + b += p.delta + if p.alpha { + a += p.delta + } + } + if !p.alpha { + a = rows[kcenter][x-srcb.Min.X].a + } + pixSetter.setPixel(dstb.Min.X+x-srcb.Min.X, dstb.Min.Y+y-srcb.Min.Y, pixel{r, g, b, a}) + } + + // Rotate temporary rows. + if y < stop-1 { + tmprow := rows[0] + for i := 0; i < ksize-1; i++ { + rows[i] = rows[i+1] + } + nextrowy := y + ksize/2 + 1 + if nextrowy > srcb.Max.Y-1 { + nextrowy = srcb.Max.Y - 1 + } + pixGetter.getPixelRow(nextrowy, &tmprow) + rows[ksize-1] = tmprow + } + } + }) +} + +// Convolution creates a filter that applies a square convolution kernel to an image. +// The length of the kernel slice must be the square of an odd kernel size (e.g. 9 for 3x3 kernel, 25 for 5x5 kernel). +// Excessive slice members will be ignored. +// If normalize parameter is true, the kernel will be normalized before applying the filter. +// If alpha parameter is true, the alpha component of color will be filtered too. +// If abs parameter is true, absolute values of color components will be taken after doing calculations. +// If delta parameter is not zero, this value will be added to the filtered pixels. +// +// Example: +// +// // Apply the emboss filter to an image. +// g := gift.New( +// gift.Convolution( +// []float32{ +// -1, -1, 0, +// -1, 1, 1, +// 0, 1, 1, +// }, +// false, false, false, 0, +// ), +// ) +// dst := image.NewRGBA(g.Bounds(src.Bounds())) +// g.Draw(dst, src) +// +func Convolution(kernel []float32, normalize, alpha, abs bool, delta float32) Filter { + return &convolutionFilter{ + kernel: kernel, + normalize: normalize, + alpha: alpha, + abs: abs, + delta: delta, + } +} + +// prepareConvolutionWeights1d prepares pixel weights using a convolution kernel. +// Weights equal to 0 are excluded. +func prepareConvolutionWeights1d(kernel []float32) (int, []uweight) { + size := len(kernel) + if size%2 == 0 { + size-- + } + if size < 1 { + return 0, []uweight{} + } + center := size / 2 + weights := []uweight{} + for i := 0; i < size; i++ { + w := float32(0) + if i < len(kernel) { + w = kernel[i] + } + if w != 0 { + weights = append(weights, uweight{i - center, w}) + } + } + return size, weights +} + +// convolveLine convolves a single line of pixels according to the given weights. +func convolveLine(dstBuf []pixel, srcBuf []pixel, weights []uweight) { + max := len(srcBuf) - 1 + if max < 0 { + return + } + for dstu := 0; dstu < len(srcBuf); dstu++ { + var r, g, b, a float32 + for _, w := range weights { + k := dstu + w.u + if k < 0 { + k = 0 + } else if k > max { + k = max + } + c := srcBuf[k] + wa := c.a * w.weight + r += c.r * wa + g += c.g * wa + b += c.b * wa + a += wa + } + if a != 0 { + r /= a + g /= a + b /= a + } + dstBuf[dstu] = pixel{r, g, b, a} + } +} + +// convolve1dv performs a fast vertical 1d convolution. +func convolve1dv(dst draw.Image, src image.Image, kernel []float32, options *Options) { + srcb := src.Bounds() + dstb := dst.Bounds() + if srcb.Dx() <= 0 || srcb.Dy() <= 0 { + return + } + if kernel == nil || len(kernel) < 1 { + copyimage(dst, src, options) + return + } + _, weights := prepareConvolutionWeights1d(kernel) + pixGetter := newPixelGetter(src) + pixSetter := newPixelSetter(dst) + parallelize(options.Parallelization, srcb.Min.X, srcb.Max.X, func(start, stop int) { + srcBuf := make([]pixel, srcb.Dy()) + dstBuf := make([]pixel, srcb.Dy()) + for x := start; x < stop; x++ { + pixGetter.getPixelColumn(x, &srcBuf) + convolveLine(dstBuf, srcBuf, weights) + pixSetter.setPixelColumn(dstb.Min.X+x-srcb.Min.X, dstBuf) + } + }) +} + +// convolve1dh performs afast horizontal 1d convolution. +func convolve1dh(dst draw.Image, src image.Image, kernel []float32, options *Options) { + srcb := src.Bounds() + dstb := dst.Bounds() + if srcb.Dx() <= 0 || srcb.Dy() <= 0 { + return + } + if kernel == nil || len(kernel) < 1 { + copyimage(dst, src, options) + return + } + _, weights := prepareConvolutionWeights1d(kernel) + pixGetter := newPixelGetter(src) + pixSetter := newPixelSetter(dst) + parallelize(options.Parallelization, srcb.Min.Y, srcb.Max.Y, func(start, stop int) { + srcBuf := make([]pixel, srcb.Dx()) + dstBuf := make([]pixel, srcb.Dx()) + for y := start; y < stop; y++ { + pixGetter.getPixelRow(y, &srcBuf) + convolveLine(dstBuf, srcBuf, weights) + pixSetter.setPixelRow(dstb.Min.Y+y-srcb.Min.Y, dstBuf) + } + }) +} + +func gaussianBlurKernel(x, sigma float32) float32 { + return float32(math.Exp(-float64(x*x)/float64(2*sigma*sigma)) / (float64(sigma) * math.Sqrt(2*math.Pi))) +} + +type gausssianBlurFilter struct { + sigma float32 +} + +func (p *gausssianBlurFilter) Bounds(srcBounds image.Rectangle) (dstBounds image.Rectangle) { + dstBounds = image.Rect(0, 0, srcBounds.Dx(), srcBounds.Dy()) + return +} + +func (p *gausssianBlurFilter) Draw(dst draw.Image, src image.Image, options *Options) { + if options == nil { + options = &defaultOptions + } + + srcb := src.Bounds() + if srcb.Dx() <= 0 || srcb.Dy() <= 0 { + return + } + + if p.sigma <= 0 { + copyimage(dst, src, options) + return + } + + radius := int(math.Ceil(float64(p.sigma * 3))) + size := 2*radius + 1 + center := radius + kernel := make([]float32, size) + + kernel[center] = gaussianBlurKernel(0, p.sigma) + sum := kernel[center] + + for i := 1; i <= radius; i++ { + f := gaussianBlurKernel(float32(i), p.sigma) + kernel[center-i] = f + kernel[center+i] = f + sum += 2 * f + } + + for i := 0; i < len(kernel); i++ { + kernel[i] /= sum + } + + tmp := createTempImage(srcb) + convolve1dh(tmp, src, kernel, options) + convolve1dv(dst, tmp, kernel, options) +} + +// GaussianBlur creates a filter that applies a gaussian blur to an image. +// The sigma parameter must be positive and indicates how much the image will be blurred. +// Blur affected radius roughly equals 3 * sigma. +// +// Example: +// +// g := gift.New( +// gift.GaussianBlur(1.5), +// ) +// dst := image.NewRGBA(g.Bounds(src.Bounds())) +// g.Draw(dst, src) +// +func GaussianBlur(sigma float32) Filter { + return &gausssianBlurFilter{ + sigma: sigma, + } +} + +type unsharpMaskFilter struct { + sigma float32 + amount float32 + threshold float32 +} + +func (p *unsharpMaskFilter) Bounds(srcBounds image.Rectangle) (dstBounds image.Rectangle) { + dstBounds = image.Rect(0, 0, srcBounds.Dx(), srcBounds.Dy()) + return +} + +func unsharp(orig, blurred, amount, threshold float32) float32 { + dif := (orig - blurred) * amount + if absf32(dif) > absf32(threshold) { + return orig + dif + } + return orig +} + +func (p *unsharpMaskFilter) Draw(dst draw.Image, src image.Image, options *Options) { + if options == nil { + options = &defaultOptions + } + + srcb := src.Bounds() + dstb := dst.Bounds() + + if srcb.Dx() <= 0 || srcb.Dy() <= 0 { + return + } + + blurred := createTempImage(srcb) + blur := GaussianBlur(p.sigma) + blur.Draw(blurred, src, options) + + pixGetterOrig := newPixelGetter(src) + pixGetterBlur := newPixelGetter(blurred) + pixelSetter := newPixelSetter(dst) + + parallelize(options.Parallelization, srcb.Min.Y, srcb.Max.Y, func(start, stop int) { + for y := start; y < stop; y++ { + for x := srcb.Min.X; x < srcb.Max.X; x++ { + pxOrig := pixGetterOrig.getPixel(x, y) + pxBlur := pixGetterBlur.getPixel(x, y) + + r := unsharp(pxOrig.r, pxBlur.r, p.amount, p.threshold) + g := unsharp(pxOrig.g, pxBlur.g, p.amount, p.threshold) + b := unsharp(pxOrig.b, pxBlur.b, p.amount, p.threshold) + a := unsharp(pxOrig.a, pxBlur.a, p.amount, p.threshold) + + pixelSetter.setPixel(dstb.Min.X+x-srcb.Min.X, dstb.Min.Y+y-srcb.Min.Y, pixel{r, g, b, a}) + } + } + }) +} + +// UnsharpMask creates a filter that sharpens an image. +// The sigma parameter is used in a gaussian function and affects the radius of effect. +// Sigma must be positive. Sharpen radius roughly equals 3 * sigma. +// The amount parameter controls how much darker and how much lighter the edge borders become. Typically between 0.5 and 1.5. +// The threshold parameter controls the minimum brightness change that will be sharpened. Typically between 0 and 0.05. +// +// Example: +// +// g := gift.New( +// gift.UnsharpMask(1, 1, 0), +// ) +// dst := image.NewRGBA(g.Bounds(src.Bounds())) +// g.Draw(dst, src) +// +func UnsharpMask(sigma, amount, threshold float32) Filter { + return &unsharpMaskFilter{ + sigma: sigma, + amount: amount, + threshold: threshold, + } +} + +type meanFilter struct { + ksize int + disk bool +} + +func (p *meanFilter) Bounds(srcBounds image.Rectangle) (dstBounds image.Rectangle) { + dstBounds = image.Rect(0, 0, srcBounds.Dx(), srcBounds.Dy()) + return +} + +func (p *meanFilter) Draw(dst draw.Image, src image.Image, options *Options) { + if options == nil { + options = &defaultOptions + } + + srcb := src.Bounds() + if srcb.Dx() <= 0 || srcb.Dy() <= 0 { + return + } + + ksize := p.ksize + if ksize%2 == 0 { + ksize-- + } + + if ksize <= 1 { + copyimage(dst, src, options) + return + } + + if p.disk { + diskKernel := genDisk(p.ksize) + f := Convolution(diskKernel, true, true, false, 0) + f.Draw(dst, src, options) + } else { + kernel := make([]float32, ksize*ksize) + for i := range kernel { + kernel[i] = 1 + } + f := Convolution(kernel, true, true, false, 0) + f.Draw(dst, src, options) + } +} + +// Mean creates a local mean image filter. +// Takes an average across a neighborhood for each pixel. +// The ksize parameter is the kernel size. It must be an odd positive integer (for example: 3, 5, 7). +// If the disk parameter is true, a disk-shaped neighborhood will be used instead of a square neighborhood. +func Mean(ksize int, disk bool) Filter { + return &meanFilter{ + ksize: ksize, + disk: disk, + } +} + +type hvConvolutionFilter struct { + hkernel, vkernel []float32 +} + +func (p *hvConvolutionFilter) Bounds(srcBounds image.Rectangle) (dstBounds image.Rectangle) { + dstBounds = image.Rect(0, 0, srcBounds.Dx(), srcBounds.Dy()) + return +} + +func (p *hvConvolutionFilter) Draw(dst draw.Image, src image.Image, options *Options) { + if options == nil { + options = &defaultOptions + } + + srcb := src.Bounds() + dstb := dst.Bounds() + + if srcb.Dx() <= 0 || srcb.Dy() <= 0 { + return + } + + tmph := createTempImage(srcb) + Convolution(p.hkernel, false, false, true, 0).Draw(tmph, src, options) + pixGetterH := newPixelGetter(tmph) + + tmpv := createTempImage(srcb) + Convolution(p.vkernel, false, false, true, 0).Draw(tmpv, src, options) + pixGetterV := newPixelGetter(tmpv) + + pixSetter := newPixelSetter(dst) + + parallelize(options.Parallelization, srcb.Min.Y, srcb.Max.Y, func(start, stop int) { + for y := start; y < stop; y++ { + for x := srcb.Min.X; x < srcb.Max.X; x++ { + pxh := pixGetterH.getPixel(x, y) + pxv := pixGetterV.getPixel(x, y) + r := sqrtf32(pxh.r*pxh.r + pxv.r*pxv.r) + g := sqrtf32(pxh.g*pxh.g + pxv.g*pxv.g) + b := sqrtf32(pxh.b*pxh.b + pxv.b*pxv.b) + pixSetter.setPixel(dstb.Min.X+x-srcb.Min.X, dstb.Min.Y+y-srcb.Min.Y, pixel{r, g, b, pxh.a}) + } + } + }) + +} + +// Sobel creates a filter that applies a sobel operator to an image. +func Sobel() Filter { + return &hvConvolutionFilter{ + hkernel: []float32{-1, 0, 1, -2, 0, 2, -1, 0, 1}, + vkernel: []float32{-1, -2, -1, 0, 0, 0, 1, 2, 1}, + } +} diff --git a/vendor/github.com/disintegration/gift/effects.go b/vendor/github.com/disintegration/gift/effects.go new file mode 100644 index 0000000..00dd070 --- /dev/null +++ b/vendor/github.com/disintegration/gift/effects.go @@ -0,0 +1,87 @@ +package gift + +import ( + "image" + "image/draw" +) + +type pixelateFilter struct { + size int +} + +func (p *pixelateFilter) Bounds(srcBounds image.Rectangle) (dstBounds image.Rectangle) { + dstBounds = image.Rect(0, 0, srcBounds.Dx(), srcBounds.Dy()) + return +} + +func (p *pixelateFilter) Draw(dst draw.Image, src image.Image, options *Options) { + if options == nil { + options = &defaultOptions + } + + blockSize := p.size + if blockSize <= 1 { + copyimage(dst, src, options) + return + } + + srcb := src.Bounds() + dstb := dst.Bounds() + + numBlocksX := srcb.Dx() / blockSize + if srcb.Dx()%blockSize > 0 { + numBlocksX++ + } + numBlocksY := srcb.Dy() / blockSize + if srcb.Dy()%blockSize > 0 { + numBlocksY++ + } + + pixGetter := newPixelGetter(src) + pixSetter := newPixelSetter(dst) + + parallelize(options.Parallelization, 0, numBlocksY, func(start, stop int) { + for by := start; by < stop; by++ { + for bx := 0; bx < numBlocksX; bx++ { + // Calculate the block bounds. + bb := image.Rect(bx*blockSize, by*blockSize, (bx+1)*blockSize, (by+1)*blockSize) + bbSrc := bb.Add(srcb.Min).Intersect(srcb) + bbDst := bbSrc.Sub(srcb.Min).Add(dstb.Min).Intersect(dstb) + + // Calculate the average color of the block. + var r, g, b, a float32 + var cnt float32 + for y := bbSrc.Min.Y; y < bbSrc.Max.Y; y++ { + for x := bbSrc.Min.X; x < bbSrc.Max.X; x++ { + px := pixGetter.getPixel(x, y) + r += px.r + g += px.g + b += px.b + a += px.a + cnt++ + } + } + if cnt > 0 { + r /= cnt + g /= cnt + b /= cnt + a /= cnt + } + + // Set the calculated color for all pixels in the block. + for y := bbDst.Min.Y; y < bbDst.Max.Y; y++ { + for x := bbDst.Min.X; x < bbDst.Max.X; x++ { + pixSetter.setPixel(x, y, pixel{r, g, b, a}) + } + } + } + } + }) +} + +// Pixelate creates a filter that applies a pixelation effect to an image. +func Pixelate(size int) Filter { + return &pixelateFilter{ + size: size, + } +} diff --git a/vendor/github.com/disintegration/gift/gift.go b/vendor/github.com/disintegration/gift/gift.go new file mode 100644 index 0000000..938bb40 --- /dev/null +++ b/vendor/github.com/disintegration/gift/gift.go @@ -0,0 +1,215 @@ +/* +Package gift provides a set of useful image processing filters. + +Basic usage: + + // 1. Create a new filter list and add some filters. + g := gift.New( + gift.Resize(800, 0, gift.LanczosResampling), + gift.UnsharpMask(1, 1, 0), + ) + + // 2. Create a new image of the corresponding size. + // dst is a new target image, src is the original image. + dst := image.NewRGBA(g.Bounds(src.Bounds())) + + // 3. Use the Draw func to apply the filters to src and store the result in dst. + g.Draw(dst, src) + +*/ +package gift + +import ( + "image" + "image/draw" +) + +// Filter is an image processing filter. +type Filter interface { + // Draw applies the filter to the src image and outputs the result to the dst image. + Draw(dst draw.Image, src image.Image, options *Options) + // Bounds calculates the appropriate bounds of an image after applying the filter. + Bounds(srcBounds image.Rectangle) (dstBounds image.Rectangle) +} + +// Options is the parameters passed to image processing filters. +type Options struct { + Parallelization bool +} + +var defaultOptions = Options{ + Parallelization: true, +} + +// GIFT is a list of image processing filters. +type GIFT struct { + Filters []Filter + Options Options +} + +// New creates a new filter list and initializes it with the given slice of filters. +func New(filters ...Filter) *GIFT { + return &GIFT{ + Filters: filters, + Options: defaultOptions, + } +} + +// SetParallelization enables or disables the image processing parallelization. +// Parallelization is enabled by default. +func (g *GIFT) SetParallelization(isEnabled bool) { + g.Options.Parallelization = isEnabled +} + +// Parallelization returns the current state of parallelization option. +func (g *GIFT) Parallelization() bool { + return g.Options.Parallelization +} + +// Add appends the given filters to the list of filters. +func (g *GIFT) Add(filters ...Filter) { + g.Filters = append(g.Filters, filters...) +} + +// Empty removes all the filters from the list. +func (g *GIFT) Empty() { + g.Filters = []Filter{} +} + +// Bounds calculates the appropriate bounds for the result image after applying all the added filters. +// Parameter srcBounds is the bounds of the source image. +// +// Example: +// +// src := image.NewRGBA(image.Rect(0, 0, 100, 200)) +// g := gift.New(gift.Rotate90()) +// +// // calculate image bounds after applying rotation and create a new image of that size. +// dst := image.NewRGBA(g.Bounds(src.Bounds())) // dst bounds: (0, 0, 200, 100) +// +// +func (g *GIFT) Bounds(srcBounds image.Rectangle) (dstBounds image.Rectangle) { + b := srcBounds + for _, f := range g.Filters { + b = f.Bounds(b) + } + dstBounds = b + return +} + +// Draw applies all the added filters to the src image and outputs the result to the dst image. +func (g *GIFT) Draw(dst draw.Image, src image.Image) { + if len(g.Filters) == 0 { + copyimage(dst, src, &g.Options) + return + } + + first, last := 0, len(g.Filters)-1 + var tmpIn image.Image + var tmpOut draw.Image + + for i, f := range g.Filters { + if i == first { + tmpIn = src + } else { + tmpIn = tmpOut + } + + if i == last { + tmpOut = dst + } else { + tmpOut = createTempImage(f.Bounds(tmpIn.Bounds())) + } + + f.Draw(tmpOut, tmpIn, &g.Options) + } +} + +// Operator is an image composition operator. +type Operator int + +// Composition operators. +const ( + CopyOperator Operator = iota + OverOperator +) + +// DrawAt applies all the added filters to the src image and outputs the result to the dst image +// at the specified position pt using the specified composition operator op. +func (g *GIFT) DrawAt(dst draw.Image, src image.Image, pt image.Point, op Operator) { + switch op { + case OverOperator: + tb := g.Bounds(src.Bounds()) + tb = tb.Sub(tb.Min).Add(pt) + tmp := createTempImage(tb) + g.Draw(tmp, src) + pixGetterDst := newPixelGetter(dst) + pixGetterTmp := newPixelGetter(tmp) + pixSetterDst := newPixelSetter(dst) + ib := tb.Intersect(dst.Bounds()) + parallelize(g.Options.Parallelization, ib.Min.Y, ib.Max.Y, func(start, stop int) { + for y := start; y < stop; y++ { + for x := ib.Min.X; x < ib.Max.X; x++ { + px0 := pixGetterDst.getPixel(x, y) + px1 := pixGetterTmp.getPixel(x, y) + c1 := px1.a + c0 := (1 - c1) * px0.a + cs := c0 + c1 + c0 /= cs + c1 /= cs + r := px0.r*c0 + px1.r*c1 + g := px0.g*c0 + px1.g*c1 + b := px0.b*c0 + px1.b*c1 + a := px0.a + px1.a*(1-px0.a) + pixSetterDst.setPixel(x, y, pixel{r, g, b, a}) + } + } + }) + + default: + if pt.Eq(dst.Bounds().Min) { + g.Draw(dst, src) + return + } + if subimg, ok := getSubImage(dst, pt); ok { + g.Draw(subimg, src) + return + } + tb := g.Bounds(src.Bounds()) + tb = tb.Sub(tb.Min).Add(pt) + tmp := createTempImage(tb) + g.Draw(tmp, src) + pixGetter := newPixelGetter(tmp) + pixSetter := newPixelSetter(dst) + ib := tb.Intersect(dst.Bounds()) + parallelize(g.Options.Parallelization, ib.Min.Y, ib.Max.Y, func(start, stop int) { + for y := start; y < stop; y++ { + for x := ib.Min.X; x < ib.Max.X; x++ { + pixSetter.setPixel(x, y, pixGetter.getPixel(x, y)) + } + } + }) + } +} + +func getSubImage(img draw.Image, pt image.Point) (draw.Image, bool) { + if !pt.In(img.Bounds()) { + return nil, false + } + switch img := img.(type) { + case *image.Gray: + return img.SubImage(image.Rectangle{pt, img.Bounds().Max}).(draw.Image), true + case *image.Gray16: + return img.SubImage(image.Rectangle{pt, img.Bounds().Max}).(draw.Image), true + case *image.RGBA: + return img.SubImage(image.Rectangle{pt, img.Bounds().Max}).(draw.Image), true + case *image.RGBA64: + return img.SubImage(image.Rectangle{pt, img.Bounds().Max}).(draw.Image), true + case *image.NRGBA: + return img.SubImage(image.Rectangle{pt, img.Bounds().Max}).(draw.Image), true + case *image.NRGBA64: + return img.SubImage(image.Rectangle{pt, img.Bounds().Max}).(draw.Image), true + default: + return nil, false + } +} diff --git a/vendor/github.com/disintegration/gift/pixels.go b/vendor/github.com/disintegration/gift/pixels.go new file mode 100644 index 0000000..cd468f9 --- /dev/null +++ b/vendor/github.com/disintegration/gift/pixels.go @@ -0,0 +1,493 @@ +package gift + +import ( + "image" + "image/color" + "image/draw" +) + +type pixel struct { + r, g, b, a float32 +} + +type imageType int + +const ( + itGeneric imageType = iota + itNRGBA + itNRGBA64 + itRGBA + itRGBA64 + itYCbCr + itGray + itGray16 + itPaletted +) + +type pixelGetter struct { + it imageType + bounds image.Rectangle + image image.Image + nrgba *image.NRGBA + nrgba64 *image.NRGBA64 + rgba *image.RGBA + rgba64 *image.RGBA64 + gray *image.Gray + gray16 *image.Gray16 + ycbcr *image.YCbCr + paletted *image.Paletted + palette []pixel +} + +func newPixelGetter(img image.Image) *pixelGetter { + switch img := img.(type) { + case *image.NRGBA: + return &pixelGetter{ + it: itNRGBA, + bounds: img.Bounds(), + nrgba: img, + } + + case *image.NRGBA64: + return &pixelGetter{ + it: itNRGBA64, + bounds: img.Bounds(), + nrgba64: img, + } + + case *image.RGBA: + return &pixelGetter{ + it: itRGBA, + bounds: img.Bounds(), + rgba: img, + } + + case *image.RGBA64: + return &pixelGetter{ + it: itRGBA64, + bounds: img.Bounds(), + rgba64: img, + } + + case *image.Gray: + return &pixelGetter{ + it: itGray, + bounds: img.Bounds(), + gray: img, + } + + case *image.Gray16: + return &pixelGetter{ + it: itGray16, + bounds: img.Bounds(), + gray16: img, + } + + case *image.YCbCr: + return &pixelGetter{ + it: itYCbCr, + bounds: img.Bounds(), + ycbcr: img, + } + + case *image.Paletted: + return &pixelGetter{ + it: itPaletted, + bounds: img.Bounds(), + paletted: img, + palette: convertPalette(img.Palette), + } + + default: + return &pixelGetter{ + it: itGeneric, + bounds: img.Bounds(), + image: img, + } + } +} + +const ( + qf8 = 1.0 / 0xff + qf16 = 1.0 / 0xffff + epal = qf16 * qf16 / 2 +) + +func pixelFromColor(c color.Color) (px pixel) { + r16, g16, b16, a16 := c.RGBA() + switch a16 { + case 0: + px = pixel{0, 0, 0, 0} + case 0xffff: + r := float32(r16) * qf16 + g := float32(g16) * qf16 + b := float32(b16) * qf16 + px = pixel{r, g, b, 1} + default: + q := float32(1) / float32(a16) + r := float32(r16) * q + g := float32(g16) * q + b := float32(b16) * q + a := float32(a16) * qf16 + px = pixel{r, g, b, a} + } + return px +} + +func convertPalette(p []color.Color) []pixel { + pal := make([]pixel, len(p)) + for i := 0; i < len(p); i++ { + pal[i] = pixelFromColor(p[i]) + } + return pal +} + +func getPaletteIndex(pal []pixel, px pixel) int { + var k int + var dmin float32 = 4 + for i, palpx := range pal { + d := px.r - palpx.r + dcur := d * d + d = px.g - palpx.g + dcur += d * d + d = px.b - palpx.b + dcur += d * d + d = px.a - palpx.a + dcur += d * d + if dcur < epal { + return i + } + if dcur < dmin { + dmin = dcur + k = i + } + } + return k +} + +func (p *pixelGetter) getPixel(x, y int) pixel { + switch p.it { + case itNRGBA: + i := p.nrgba.PixOffset(x, y) + r := float32(p.nrgba.Pix[i+0]) * qf8 + g := float32(p.nrgba.Pix[i+1]) * qf8 + b := float32(p.nrgba.Pix[i+2]) * qf8 + a := float32(p.nrgba.Pix[i+3]) * qf8 + return pixel{r, g, b, a} + + case itNRGBA64: + i := p.nrgba64.PixOffset(x, y) + r := float32(uint16(p.nrgba64.Pix[i+0])<<8|uint16(p.nrgba64.Pix[i+1])) * qf16 + g := float32(uint16(p.nrgba64.Pix[i+2])<<8|uint16(p.nrgba64.Pix[i+3])) * qf16 + b := float32(uint16(p.nrgba64.Pix[i+4])<<8|uint16(p.nrgba64.Pix[i+5])) * qf16 + a := float32(uint16(p.nrgba64.Pix[i+6])<<8|uint16(p.nrgba64.Pix[i+7])) * qf16 + return pixel{r, g, b, a} + + case itRGBA: + i := p.rgba.PixOffset(x, y) + a8 := p.rgba.Pix[i+3] + switch a8 { + case 0xff: + r := float32(p.rgba.Pix[i+0]) * qf8 + g := float32(p.rgba.Pix[i+1]) * qf8 + b := float32(p.rgba.Pix[i+2]) * qf8 + return pixel{r, g, b, 1} + case 0: + return pixel{0, 0, 0, 0} + default: + q := float32(1) / float32(a8) + r := float32(p.rgba.Pix[i+0]) * q + g := float32(p.rgba.Pix[i+1]) * q + b := float32(p.rgba.Pix[i+2]) * q + a := float32(a8) * qf8 + return pixel{r, g, b, a} + } + + case itRGBA64: + i := p.rgba64.PixOffset(x, y) + a16 := uint16(p.rgba64.Pix[i+6])<<8 | uint16(p.rgba64.Pix[i+7]) + switch a16 { + case 0xffff: + r := float32(uint16(p.rgba64.Pix[i+0])<<8|uint16(p.rgba64.Pix[i+1])) * qf16 + g := float32(uint16(p.rgba64.Pix[i+2])<<8|uint16(p.rgba64.Pix[i+3])) * qf16 + b := float32(uint16(p.rgba64.Pix[i+4])<<8|uint16(p.rgba64.Pix[i+5])) * qf16 + return pixel{r, g, b, 1} + case 0: + return pixel{0, 0, 0, 0} + default: + q := float32(1) / float32(a16) + r := float32(uint16(p.rgba64.Pix[i+0])<<8|uint16(p.rgba64.Pix[i+1])) * q + g := float32(uint16(p.rgba64.Pix[i+2])<<8|uint16(p.rgba64.Pix[i+3])) * q + b := float32(uint16(p.rgba64.Pix[i+4])<<8|uint16(p.rgba64.Pix[i+5])) * q + a := float32(a16) * qf16 + return pixel{r, g, b, a} + } + + case itGray: + i := p.gray.PixOffset(x, y) + v := float32(p.gray.Pix[i]) * qf8 + return pixel{v, v, v, 1} + + case itGray16: + i := p.gray16.PixOffset(x, y) + v := float32(uint16(p.gray16.Pix[i+0])<<8|uint16(p.gray16.Pix[i+1])) * qf16 + return pixel{v, v, v, 1} + + case itYCbCr: + iy := (y-p.ycbcr.Rect.Min.Y)*p.ycbcr.YStride + (x - p.ycbcr.Rect.Min.X) + + var ic int + switch p.ycbcr.SubsampleRatio { + case image.YCbCrSubsampleRatio444: + ic = (y-p.ycbcr.Rect.Min.Y)*p.ycbcr.CStride + (x - p.ycbcr.Rect.Min.X) + case image.YCbCrSubsampleRatio422: + ic = (y-p.ycbcr.Rect.Min.Y)*p.ycbcr.CStride + (x/2 - p.ycbcr.Rect.Min.X/2) + case image.YCbCrSubsampleRatio420: + ic = (y/2-p.ycbcr.Rect.Min.Y/2)*p.ycbcr.CStride + (x/2 - p.ycbcr.Rect.Min.X/2) + case image.YCbCrSubsampleRatio440: + ic = (y/2-p.ycbcr.Rect.Min.Y/2)*p.ycbcr.CStride + (x - p.ycbcr.Rect.Min.X) + default: + ic = p.ycbcr.COffset(x, y) + } + + const ( + max = 255 * 1e5 + inv = 1.0 / max + ) + + y1 := int32(p.ycbcr.Y[iy]) * 1e5 + cb1 := int32(p.ycbcr.Cb[ic]) - 128 + cr1 := int32(p.ycbcr.Cr[ic]) - 128 + + r1 := y1 + 140200*cr1 + g1 := y1 - 34414*cb1 - 71414*cr1 + b1 := y1 + 177200*cb1 + + r := float32(clampi32(r1, 0, max)) * inv + g := float32(clampi32(g1, 0, max)) * inv + b := float32(clampi32(b1, 0, max)) * inv + + return pixel{r, g, b, 1} + + case itPaletted: + i := p.paletted.PixOffset(x, y) + k := p.paletted.Pix[i] + return p.palette[k] + } + + return pixelFromColor(p.image.At(x, y)) +} + +func (p *pixelGetter) getPixelRow(y int, buf *[]pixel) { + *buf = (*buf)[:0] + for x := p.bounds.Min.X; x != p.bounds.Max.X; x++ { + *buf = append(*buf, p.getPixel(x, y)) + } +} + +func (p *pixelGetter) getPixelColumn(x int, buf *[]pixel) { + *buf = (*buf)[:0] + for y := p.bounds.Min.Y; y != p.bounds.Max.Y; y++ { + *buf = append(*buf, p.getPixel(x, y)) + } +} + +func f32u8(val float32) uint8 { + x := int64(val + 0.5) + if x > 0xff { + return 0xff + } + if x > 0 { + return uint8(x) + } + return 0 +} + +func f32u16(val float32) uint16 { + x := int64(val + 0.5) + if x > 0xffff { + return 0xffff + } + if x > 0 { + return uint16(x) + } + return 0 +} + +func clampi32(val, min, max int32) int32 { + if val > max { + return max + } + if val > min { + return val + } + return 0 +} + +type pixelSetter struct { + it imageType + bounds image.Rectangle + image draw.Image + nrgba *image.NRGBA + nrgba64 *image.NRGBA64 + rgba *image.RGBA + rgba64 *image.RGBA64 + gray *image.Gray + gray16 *image.Gray16 + paletted *image.Paletted + palette []pixel +} + +func newPixelSetter(img draw.Image) *pixelSetter { + switch img := img.(type) { + case *image.NRGBA: + return &pixelSetter{ + it: itNRGBA, + bounds: img.Bounds(), + nrgba: img, + } + + case *image.NRGBA64: + return &pixelSetter{ + it: itNRGBA64, + bounds: img.Bounds(), + nrgba64: img, + } + + case *image.RGBA: + return &pixelSetter{ + it: itRGBA, + bounds: img.Bounds(), + rgba: img, + } + + case *image.RGBA64: + return &pixelSetter{ + it: itRGBA64, + bounds: img.Bounds(), + rgba64: img, + } + + case *image.Gray: + return &pixelSetter{ + it: itGray, + bounds: img.Bounds(), + gray: img, + } + + case *image.Gray16: + return &pixelSetter{ + it: itGray16, + bounds: img.Bounds(), + gray16: img, + } + + case *image.Paletted: + return &pixelSetter{ + it: itPaletted, + bounds: img.Bounds(), + paletted: img, + palette: convertPalette(img.Palette), + } + + default: + return &pixelSetter{ + it: itGeneric, + bounds: img.Bounds(), + image: img, + } + } +} + +func (p *pixelSetter) setPixel(x, y int, px pixel) { + if !image.Pt(x, y).In(p.bounds) { + return + } + switch p.it { + case itNRGBA: + i := p.nrgba.PixOffset(x, y) + p.nrgba.Pix[i+0] = f32u8(px.r * 0xff) + p.nrgba.Pix[i+1] = f32u8(px.g * 0xff) + p.nrgba.Pix[i+2] = f32u8(px.b * 0xff) + p.nrgba.Pix[i+3] = f32u8(px.a * 0xff) + + case itNRGBA64: + r16 := f32u16(px.r * 0xffff) + g16 := f32u16(px.g * 0xffff) + b16 := f32u16(px.b * 0xffff) + a16 := f32u16(px.a * 0xffff) + i := p.nrgba64.PixOffset(x, y) + p.nrgba64.Pix[i+0] = uint8(r16 >> 8) + p.nrgba64.Pix[i+1] = uint8(r16 & 0xff) + p.nrgba64.Pix[i+2] = uint8(g16 >> 8) + p.nrgba64.Pix[i+3] = uint8(g16 & 0xff) + p.nrgba64.Pix[i+4] = uint8(b16 >> 8) + p.nrgba64.Pix[i+5] = uint8(b16 & 0xff) + p.nrgba64.Pix[i+6] = uint8(a16 >> 8) + p.nrgba64.Pix[i+7] = uint8(a16 & 0xff) + + case itRGBA: + fa := px.a * 0xff + i := p.rgba.PixOffset(x, y) + p.rgba.Pix[i+0] = f32u8(px.r * fa) + p.rgba.Pix[i+1] = f32u8(px.g * fa) + p.rgba.Pix[i+2] = f32u8(px.b * fa) + p.rgba.Pix[i+3] = f32u8(fa) + + case itRGBA64: + fa := px.a * 0xffff + r16 := f32u16(px.r * fa) + g16 := f32u16(px.g * fa) + b16 := f32u16(px.b * fa) + a16 := f32u16(fa) + i := p.rgba64.PixOffset(x, y) + p.rgba64.Pix[i+0] = uint8(r16 >> 8) + p.rgba64.Pix[i+1] = uint8(r16 & 0xff) + p.rgba64.Pix[i+2] = uint8(g16 >> 8) + p.rgba64.Pix[i+3] = uint8(g16 & 0xff) + p.rgba64.Pix[i+4] = uint8(b16 >> 8) + p.rgba64.Pix[i+5] = uint8(b16 & 0xff) + p.rgba64.Pix[i+6] = uint8(a16 >> 8) + p.rgba64.Pix[i+7] = uint8(a16 & 0xff) + + case itGray: + i := p.gray.PixOffset(x, y) + p.gray.Pix[i] = f32u8((0.299*px.r + 0.587*px.g + 0.114*px.b) * px.a * 0xff) + + case itGray16: + i := p.gray16.PixOffset(x, y) + y16 := f32u16((0.299*px.r + 0.587*px.g + 0.114*px.b) * px.a * 0xffff) + p.gray16.Pix[i+0] = uint8(y16 >> 8) + p.gray16.Pix[i+1] = uint8(y16 & 0xff) + + case itPaletted: + px1 := pixel{ + minf32(maxf32(px.r, 0), 1), + minf32(maxf32(px.g, 0), 1), + minf32(maxf32(px.b, 0), 1), + minf32(maxf32(px.a, 0), 1), + } + i := p.paletted.PixOffset(x, y) + k := getPaletteIndex(p.palette, px1) + p.paletted.Pix[i] = uint8(k) + + case itGeneric: + r16 := f32u16(px.r * 0xffff) + g16 := f32u16(px.g * 0xffff) + b16 := f32u16(px.b * 0xffff) + a16 := f32u16(px.a * 0xffff) + p.image.Set(x, y, color.NRGBA64{r16, g16, b16, a16}) + } +} + +func (p *pixelSetter) setPixelRow(y int, buf []pixel) { + for i, x := 0, p.bounds.Min.X; i < len(buf); i, x = i+1, x+1 { + p.setPixel(x, y, buf[i]) + } +} + +func (p *pixelSetter) setPixelColumn(x int, buf []pixel) { + for i, y := 0, p.bounds.Min.Y; i < len(buf); i, y = i+1, y+1 { + p.setPixel(x, y, buf[i]) + } +} diff --git a/vendor/github.com/disintegration/gift/rank.go b/vendor/github.com/disintegration/gift/rank.go new file mode 100644 index 0000000..8d0b651 --- /dev/null +++ b/vendor/github.com/disintegration/gift/rank.go @@ -0,0 +1,223 @@ +package gift + +import ( + "image" + "image/draw" +) + +type rankMode int + +const ( + rankMedian rankMode = iota + rankMin + rankMax +) + +type rankFilter struct { + ksize int + disk bool + mode rankMode +} + +func (p *rankFilter) Bounds(srcBounds image.Rectangle) (dstBounds image.Rectangle) { + dstBounds = image.Rect(0, 0, srcBounds.Dx(), srcBounds.Dy()) + return +} + +func (p *rankFilter) Draw(dst draw.Image, src image.Image, options *Options) { + if options == nil { + options = &defaultOptions + } + + srcb := src.Bounds() + dstb := dst.Bounds() + + if srcb.Dx() <= 0 || srcb.Dy() <= 0 { + return + } + + ksize := p.ksize + if ksize%2 == 0 { + ksize-- + } + + if ksize <= 1 { + copyimage(dst, src, options) + return + } + kradius := ksize / 2 + + opaque := isOpaque(src) + + var disk []float32 + if p.disk { + disk = genDisk(ksize) + } + + pixGetter := newPixelGetter(src) + pixSetter := newPixelSetter(dst) + + parallelize(options.Parallelization, srcb.Min.Y, srcb.Max.Y, func(start, stop int) { + pxbuf := []pixel{} + + var rbuf, gbuf, bbuf, abuf []float32 + if p.mode == rankMedian { + rbuf = make([]float32, 0, ksize*ksize) + gbuf = make([]float32, 0, ksize*ksize) + bbuf = make([]float32, 0, ksize*ksize) + if !opaque { + abuf = make([]float32, 0, ksize*ksize) + } + } + + for y := start; y < stop; y++ { + // Init buffer. + pxbuf = pxbuf[:0] + for i := srcb.Min.X - kradius; i <= srcb.Min.X+kradius; i++ { + for j := y - kradius; j <= y+kradius; j++ { + kx, ky := i, j + if kx < srcb.Min.X { + kx = srcb.Min.X + } else if kx > srcb.Max.X-1 { + kx = srcb.Max.X - 1 + } + if ky < srcb.Min.Y { + ky = srcb.Min.Y + } else if ky > srcb.Max.Y-1 { + ky = srcb.Max.Y - 1 + } + pxbuf = append(pxbuf, pixGetter.getPixel(kx, ky)) + } + } + + for x := srcb.Min.X; x < srcb.Max.X; x++ { + var r, g, b, a float32 + if p.mode == rankMedian { + rbuf = rbuf[:0] + gbuf = gbuf[:0] + bbuf = bbuf[:0] + if !opaque { + abuf = abuf[:0] + } + } else if p.mode == rankMin { + r, g, b, a = 1, 1, 1, 1 + } else if p.mode == rankMax { + r, g, b, a = 0, 0, 0, 0 + } + + sz := 0 + for i := 0; i < ksize; i++ { + for j := 0; j < ksize; j++ { + + if p.disk { + if disk[i*ksize+j] == 0 { + continue + } + } + + px := pxbuf[i*ksize+j] + if p.mode == rankMedian { + rbuf = append(rbuf, px.r) + gbuf = append(gbuf, px.g) + bbuf = append(bbuf, px.b) + if !opaque { + abuf = append(abuf, px.a) + } + } else if p.mode == rankMin { + r = minf32(r, px.r) + g = minf32(g, px.g) + b = minf32(b, px.b) + if !opaque { + a = minf32(a, px.a) + } + } else if p.mode == rankMax { + r = maxf32(r, px.r) + g = maxf32(g, px.g) + b = maxf32(b, px.b) + if !opaque { + a = maxf32(a, px.a) + } + } + sz++ + } + } + + if p.mode == rankMedian { + sort(rbuf) + sort(gbuf) + sort(bbuf) + if !opaque { + sort(abuf) + } + + idx := sz / 2 + r, g, b = rbuf[idx], gbuf[idx], bbuf[idx] + if !opaque { + a = abuf[idx] + } + } + + if opaque { + a = 1 + } + + pixSetter.setPixel(dstb.Min.X+x-srcb.Min.X, dstb.Min.Y+y-srcb.Min.Y, pixel{r, g, b, a}) + + // Rotate buffer columns. + if x < srcb.Max.X-1 { + copy(pxbuf[0:], pxbuf[ksize:]) + pxbuf = pxbuf[0 : ksize*(ksize-1)] + kx := x + 1 + kradius + if kx > srcb.Max.X-1 { + kx = srcb.Max.X - 1 + } + for j := y - kradius; j <= y+kradius; j++ { + ky := j + if ky < srcb.Min.Y { + ky = srcb.Min.Y + } else if ky > srcb.Max.Y-1 { + ky = srcb.Max.Y - 1 + } + pxbuf = append(pxbuf, pixGetter.getPixel(kx, ky)) + } + } + } + } + }) +} + +// Median creates a median image filter. +// Picks a median value per channel in neighborhood for each pixel. +// The ksize parameter is the kernel size. It must be an odd positive integer (for example: 3, 5, 7). +// If the disk parameter is true, a disk-shaped neighborhood will be used instead of a square neighborhood. +func Median(ksize int, disk bool) Filter { + return &rankFilter{ + ksize: ksize, + disk: disk, + mode: rankMedian, + } +} + +// Minimum creates a local minimum image filter. +// Picks a minimum value per channel in neighborhood for each pixel. +// The ksize parameter is the kernel size. It must be an odd positive integer (for example: 3, 5, 7). +// If the disk parameter is true, a disk-shaped neighborhood will be used instead of a square neighborhood. +func Minimum(ksize int, disk bool) Filter { + return &rankFilter{ + ksize: ksize, + disk: disk, + mode: rankMin, + } +} + +// Maximum creates a local maximum image filter. +// Picks a maximum value per channel in neighborhood for each pixel. +// The ksize parameter is the kernel size. It must be an odd positive integer (for example: 3, 5, 7). +// If the disk parameter is true, a disk-shaped neighborhood will be used instead of a square neighborhood. +func Maximum(ksize int, disk bool) Filter { + return &rankFilter{ + ksize: ksize, + disk: disk, + mode: rankMax, + } +} diff --git a/vendor/github.com/disintegration/gift/resize.go b/vendor/github.com/disintegration/gift/resize.go new file mode 100644 index 0000000..a23102a --- /dev/null +++ b/vendor/github.com/disintegration/gift/resize.go @@ -0,0 +1,462 @@ +package gift + +import ( + "image" + "image/draw" + "math" +) + +// Resampling is an interpolation algorithm used for image resizing. +type Resampling interface { + Support() float32 + Kernel(float32) float32 +} + +func bcspline(x, b, c float32) float32 { + if x < 0 { + x = -x + } + if x < 1 { + return ((12-9*b-6*c)*x*x*x + (-18+12*b+6*c)*x*x + (6 - 2*b)) / 6 + } + if x < 2 { + return ((-b-6*c)*x*x*x + (6*b+30*c)*x*x + (-12*b-48*c)*x + (8*b + 24*c)) / 6 + } + return 0 +} + +func sinc(x float32) float32 { + if x == 0 { + return 1 + } + return float32(math.Sin(math.Pi*float64(x)) / (math.Pi * float64(x))) +} + +type resamp struct { + name string + support float32 + kernel func(float32) float32 +} + +func (r resamp) String() string { + return r.name +} + +func (r resamp) Support() float32 { + return r.support +} + +func (r resamp) Kernel(x float32) float32 { + return r.kernel(x) +} + +// NearestNeighborResampling is a nearest neighbor resampling filter. +var NearestNeighborResampling Resampling + +// BoxResampling is a box resampling filter (average of surrounding pixels). +var BoxResampling Resampling + +// LinearResampling is a bilinear resampling filter. +var LinearResampling Resampling + +// CubicResampling is a bicubic resampling filter (Catmull-Rom). +var CubicResampling Resampling + +// LanczosResampling is a Lanczos resampling filter (3 lobes). +var LanczosResampling Resampling + +type resampWeight struct { + index int + weight float32 +} + +func prepareResampWeights(dstSize, srcSize int, resampling Resampling) [][]resampWeight { + delta := float32(srcSize) / float32(dstSize) + scale := delta + if scale < 1 { + scale = 1 + } + radius := float32(math.Ceil(float64(scale * resampling.Support()))) + + result := make([][]resampWeight, dstSize) + tmp := make([]resampWeight, 0, dstSize*int(radius+2)*2) + + for i := 0; i < dstSize; i++ { + center := (float32(i)+0.5)*delta - 0.5 + + left := int(math.Ceil(float64(center - radius))) + if left < 0 { + left = 0 + } + right := int(math.Floor(float64(center + radius))) + if right > srcSize-1 { + right = srcSize - 1 + } + + var sum float32 + for j := left; j <= right; j++ { + weight := resampling.Kernel((float32(j) - center) / scale) + if weight == 0 { + continue + } + tmp = append(tmp, resampWeight{ + index: j, + weight: weight, + }) + sum += weight + } + + for j := range tmp { + tmp[j].weight /= sum + } + + result[i] = tmp + tmp = tmp[len(tmp):] + } + + return result +} + +func resizeLine(dst []pixel, src []pixel, weights [][]resampWeight) { + for i := 0; i < len(dst); i++ { + var r, g, b, a float32 + for _, w := range weights[i] { + c := src[w.index] + wa := c.a * w.weight + r += c.r * wa + g += c.g * wa + b += c.b * wa + a += wa + } + if a != 0 { + r /= a + g /= a + b /= a + } + dst[i] = pixel{r, g, b, a} + } +} + +func resizeHorizontal(dst draw.Image, src image.Image, w int, resampling Resampling, options *Options) { + srcb := src.Bounds() + dstb := dst.Bounds() + + weights := prepareResampWeights(w, srcb.Dx(), resampling) + + pixGetter := newPixelGetter(src) + pixSetter := newPixelSetter(dst) + + parallelize(options.Parallelization, srcb.Min.Y, srcb.Max.Y, func(start, stop int) { + srcBuf := make([]pixel, srcb.Dx()) + dstBuf := make([]pixel, w) + for srcy := start; srcy < stop; srcy++ { + pixGetter.getPixelRow(srcy, &srcBuf) + resizeLine(dstBuf, srcBuf, weights) + pixSetter.setPixelRow(dstb.Min.Y+srcy-srcb.Min.Y, dstBuf) + } + }) +} + +func resizeVertical(dst draw.Image, src image.Image, h int, resampling Resampling, options *Options) { + srcb := src.Bounds() + dstb := dst.Bounds() + + weights := prepareResampWeights(h, srcb.Dy(), resampling) + + pixGetter := newPixelGetter(src) + pixSetter := newPixelSetter(dst) + + parallelize(options.Parallelization, srcb.Min.X, srcb.Max.X, func(start, stop int) { + srcBuf := make([]pixel, srcb.Dy()) + dstBuf := make([]pixel, h) + for srcx := start; srcx < stop; srcx++ { + pixGetter.getPixelColumn(srcx, &srcBuf) + resizeLine(dstBuf, srcBuf, weights) + pixSetter.setPixelColumn(dstb.Min.X+srcx-srcb.Min.X, dstBuf) + } + }) +} + +func resizeNearest(dst draw.Image, src image.Image, w, h int, options *Options) { + srcb := src.Bounds() + dstb := dst.Bounds() + dx := float64(srcb.Dx()) / float64(w) + dy := float64(srcb.Dy()) / float64(h) + + pixGetter := newPixelGetter(src) + pixSetter := newPixelSetter(dst) + + parallelize(options.Parallelization, dstb.Min.Y, dstb.Min.Y+h, func(start, stop int) { + for dsty := start; dsty < stop; dsty++ { + for dstx := dstb.Min.X; dstx < dstb.Min.X+w; dstx++ { + fx := math.Floor((float64(dstx-dstb.Min.X) + 0.5) * dx) + fy := math.Floor((float64(dsty-dstb.Min.Y) + 0.5) * dy) + srcx := srcb.Min.X + int(fx) + srcy := srcb.Min.Y + int(fy) + px := pixGetter.getPixel(srcx, srcy) + pixSetter.setPixel(dstx, dsty, px) + } + } + }) +} + +type resizeFilter struct { + width int + height int + resampling Resampling +} + +func (p *resizeFilter) Bounds(srcBounds image.Rectangle) (dstBounds image.Rectangle) { + w, h := p.width, p.height + srcw, srch := srcBounds.Dx(), srcBounds.Dy() + + if (w == 0 && h == 0) || w < 0 || h < 0 || srcw <= 0 || srch <= 0 { + dstBounds = image.Rect(0, 0, 0, 0) + } else if w == 0 { + fw := float64(h) * float64(srcw) / float64(srch) + dstw := int(math.Max(1, math.Floor(fw+0.5))) + dstBounds = image.Rect(0, 0, dstw, h) + } else if h == 0 { + fh := float64(w) * float64(srch) / float64(srcw) + dsth := int(math.Max(1, math.Floor(fh+0.5))) + dstBounds = image.Rect(0, 0, w, dsth) + } else { + dstBounds = image.Rect(0, 0, w, h) + } + + return +} + +func (p *resizeFilter) Draw(dst draw.Image, src image.Image, options *Options) { + if options == nil { + options = &defaultOptions + } + + b := p.Bounds(src.Bounds()) + w, h := b.Dx(), b.Dy() + + if w <= 0 || h <= 0 { + return + } + + if src.Bounds().Dx() == w && src.Bounds().Dy() == h { + copyimage(dst, src, options) + return + } + + if p.resampling.Support() <= 0 { + resizeNearest(dst, src, w, h, options) + return + } + + if src.Bounds().Dx() == w { + resizeVertical(dst, src, h, p.resampling, options) + return + } + + if src.Bounds().Dy() == h { + resizeHorizontal(dst, src, w, p.resampling, options) + return + } + + tmp := createTempImage(image.Rect(0, 0, w, src.Bounds().Dy())) + resizeHorizontal(tmp, src, w, p.resampling, options) + resizeVertical(dst, tmp, h, p.resampling, options) +} + +// Resize creates a filter that resizes an image to the specified width and height using the specified resampling. +// If one of width or height is 0, the image aspect ratio is preserved. +// Supported resampling parameters: NearestNeighborResampling, BoxResampling, LinearResampling, CubicResampling, LanczosResampling. +// +// Example: +// +// // Resize the src image to width=300 preserving the aspect ratio. +// g := gift.New( +// gift.Resize(300, 0, gift.LanczosResampling), +// ) +// dst := image.NewRGBA(g.Bounds(src.Bounds())) +// g.Draw(dst, src) +// +func Resize(width, height int, resampling Resampling) Filter { + return &resizeFilter{ + width: width, + height: height, + resampling: resampling, + } +} + +type resizeToFitFilter struct { + width int + height int + resampling Resampling +} + +func (p *resizeToFitFilter) Bounds(srcBounds image.Rectangle) image.Rectangle { + w, h := p.width, p.height + srcw, srch := srcBounds.Dx(), srcBounds.Dy() + + if w <= 0 || h <= 0 || srcw <= 0 || srch <= 0 { + return image.Rect(0, 0, 0, 0) + } + + if srcw <= w && srch <= h { + return image.Rect(0, 0, srcw, srch) + } + + wratio := float64(srcw) / float64(w) + hratio := float64(srch) / float64(h) + + var dstw, dsth int + if wratio > hratio { + dstw = w + dsth = minint(int(float64(srch)/wratio+0.5), h) + } else { + dsth = h + dstw = minint(int(float64(srcw)/hratio+0.5), w) + } + + return image.Rect(0, 0, dstw, dsth) +} + +func (p *resizeToFitFilter) Draw(dst draw.Image, src image.Image, options *Options) { + b := p.Bounds(src.Bounds()) + Resize(b.Dx(), b.Dy(), p.resampling).Draw(dst, src, options) +} + +// ResizeToFit creates a filter that resizes an image to fit within the specified dimensions while preserving the aspect ratio. +// Supported resampling parameters: NearestNeighborResampling, BoxResampling, LinearResampling, CubicResampling, LanczosResampling. +func ResizeToFit(width, height int, resampling Resampling) Filter { + return &resizeToFitFilter{ + width: width, + height: height, + resampling: resampling, + } +} + +type resizeToFillFilter struct { + width int + height int + anchor Anchor + resampling Resampling +} + +func (p *resizeToFillFilter) Bounds(srcBounds image.Rectangle) image.Rectangle { + w, h := p.width, p.height + srcw, srch := srcBounds.Dx(), srcBounds.Dy() + + if w <= 0 || h <= 0 || srcw <= 0 || srch <= 0 { + return image.Rect(0, 0, 0, 0) + } + + return image.Rect(0, 0, w, h) +} + +func (p *resizeToFillFilter) Draw(dst draw.Image, src image.Image, options *Options) { + b := p.Bounds(src.Bounds()) + w, h := b.Dx(), b.Dy() + + if w <= 0 || h <= 0 { + return + } + + srcw, srch := src.Bounds().Dx(), src.Bounds().Dy() + + wratio := float64(srcw) / float64(w) + hratio := float64(srch) / float64(h) + + var tmpw, tmph int + if wratio < hratio { + tmpw = w + tmph = maxint(int(float64(srch)/wratio+0.5), h) + } else { + tmph = h + tmpw = maxint(int(float64(srcw)/hratio+0.5), w) + } + + tmp := createTempImage(image.Rect(0, 0, tmpw, tmph)) + Resize(tmpw, tmph, p.resampling).Draw(tmp, src, options) + CropToSize(w, h, p.anchor).Draw(dst, tmp, options) +} + +// ResizeToFill creates a filter that resizes an image to the smallest possible size that will cover the specified dimensions, +// then crops the resized image to the specified dimensions using the specified anchor point. +// Supported resampling parameters: NearestNeighborResampling, BoxResampling, LinearResampling, CubicResampling, LanczosResampling. +func ResizeToFill(width, height int, resampling Resampling, anchor Anchor) Filter { + return &resizeToFillFilter{ + width: width, + height: height, + anchor: anchor, + resampling: resampling, + } +} + +func init() { + // Nearest neighbor resampling filter. + NearestNeighborResampling = resamp{ + name: "NearestNeighborResampling", + support: 0, + kernel: func(x float32) float32 { + return 0 + }, + } + + // Box resampling filter. + BoxResampling = resamp{ + name: "BoxResampling", + support: 0.5, + kernel: func(x float32) float32 { + if x < 0 { + x = -x + } + if x <= 0.5 { + return 1 + } + return 0 + }, + } + + // Linear resampling filter. + LinearResampling = resamp{ + name: "LinearResampling", + support: 1, + kernel: func(x float32) float32 { + if x < 0 { + x = -x + } + if x < 1 { + return 1 - x + } + return 0 + }, + } + + // Cubic resampling filter (Catmull-Rom). + CubicResampling = resamp{ + name: "CubicResampling", + support: 2, + kernel: func(x float32) float32 { + if x < 0 { + x = -x + } + if x < 2 { + return bcspline(x, 0, 0.5) + } + return 0 + }, + } + + // Lanczos resampling filter (3 lobes). + LanczosResampling = resamp{ + name: "LanczosResampling", + support: 3, + kernel: func(x float32) float32 { + if x < 0 { + x = -x + } + if x < 3 { + return sinc(x) * sinc(x/3) + } + return 0 + }, + } +} diff --git a/vendor/github.com/disintegration/gift/transform.go b/vendor/github.com/disintegration/gift/transform.go new file mode 100644 index 0000000..a9df8f9 --- /dev/null +++ b/vendor/github.com/disintegration/gift/transform.go @@ -0,0 +1,498 @@ +package gift + +import ( + "image" + "image/color" + "image/draw" +) + +type transformType int + +const ( + ttRotate90 transformType = iota + ttRotate180 + ttRotate270 + ttFlipHorizontal + ttFlipVertical + ttTranspose + ttTransverse +) + +type transformFilter struct { + tt transformType +} + +func (p *transformFilter) Bounds(srcBounds image.Rectangle) (dstBounds image.Rectangle) { + if p.tt == ttRotate90 || p.tt == ttRotate270 || p.tt == ttTranspose || p.tt == ttTransverse { + dstBounds = image.Rect(0, 0, srcBounds.Dy(), srcBounds.Dx()) + } else { + dstBounds = image.Rect(0, 0, srcBounds.Dx(), srcBounds.Dy()) + } + return +} + +func (p *transformFilter) Draw(dst draw.Image, src image.Image, options *Options) { + if options == nil { + options = &defaultOptions + } + + srcb := src.Bounds() + dstb := dst.Bounds() + + pixGetter := newPixelGetter(src) + pixSetter := newPixelSetter(dst) + + parallelize(options.Parallelization, srcb.Min.Y, srcb.Max.Y, func(start, stop int) { + for srcy := start; srcy < stop; srcy++ { + for srcx := srcb.Min.X; srcx < srcb.Max.X; srcx++ { + var dstx, dsty int + switch p.tt { + case ttRotate90: + dstx = dstb.Min.X + srcy - srcb.Min.Y + dsty = dstb.Min.Y + srcb.Max.X - srcx - 1 + case ttRotate180: + dstx = dstb.Min.X + srcb.Max.X - srcx - 1 + dsty = dstb.Min.Y + srcb.Max.Y - srcy - 1 + case ttRotate270: + dstx = dstb.Min.X + srcb.Max.Y - srcy - 1 + dsty = dstb.Min.Y + srcx - srcb.Min.X + case ttFlipHorizontal: + dstx = dstb.Min.X + srcb.Max.X - srcx - 1 + dsty = dstb.Min.Y + srcy - srcb.Min.Y + case ttFlipVertical: + dstx = dstb.Min.X + srcx - srcb.Min.X + dsty = dstb.Min.Y + srcb.Max.Y - srcy - 1 + case ttTranspose: + dstx = dstb.Min.X + srcy - srcb.Min.Y + dsty = dstb.Min.Y + srcx - srcb.Min.X + case ttTransverse: + dstx = dstb.Min.Y + srcb.Max.Y - srcy - 1 + dsty = dstb.Min.X + srcb.Max.X - srcx - 1 + } + pixSetter.setPixel(dstx, dsty, pixGetter.getPixel(srcx, srcy)) + } + } + }) +} + +// Rotate90 creates a filter that rotates an image 90 degrees counter-clockwise. +func Rotate90() Filter { + return &transformFilter{ + tt: ttRotate90, + } +} + +// Rotate180 creates a filter that rotates an image 180 degrees counter-clockwise. +func Rotate180() Filter { + return &transformFilter{ + tt: ttRotate180, + } +} + +// Rotate270 creates a filter that rotates an image 270 degrees counter-clockwise. +func Rotate270() Filter { + return &transformFilter{ + tt: ttRotate270, + } +} + +// FlipHorizontal creates a filter that flips an image horizontally. +func FlipHorizontal() Filter { + return &transformFilter{ + tt: ttFlipHorizontal, + } +} + +// FlipVertical creates a filter that flips an image vertically. +func FlipVertical() Filter { + return &transformFilter{ + tt: ttFlipVertical, + } +} + +// Transpose creates a filter that flips an image horizontally and rotates 90 degrees counter-clockwise. +func Transpose() Filter { + return &transformFilter{ + tt: ttTranspose, + } +} + +// Transverse creates a filter that flips an image vertically and rotates 90 degrees counter-clockwise. +func Transverse() Filter { + return &transformFilter{ + tt: ttTransverse, + } +} + +// Interpolation is an interpolation algorithm used for image transformation. +type Interpolation int + +const ( + // NearestNeighborInterpolation is a nearest-neighbor interpolation algorithm. + NearestNeighborInterpolation Interpolation = iota + // LinearInterpolation is a bilinear interpolation algorithm. + LinearInterpolation + // CubicInterpolation is a bicubic interpolation algorithm. + CubicInterpolation +) + +func rotatePoint(x, y, asin, acos float32) (float32, float32) { + newx := x*acos - y*asin + newy := x*asin + y*acos + return newx, newy +} + +func calcRotatedSize(w, h int, angle float32) (int, int) { + if w <= 0 || h <= 0 { + return 0, 0 + } + + xoff := float32(w)/2 - 0.5 + yoff := float32(h)/2 - 0.5 + + asin, acos := sincosf32(angle) + x1, y1 := rotatePoint(0-xoff, 0-yoff, asin, acos) + x2, y2 := rotatePoint(float32(w-1)-xoff, 0-yoff, asin, acos) + x3, y3 := rotatePoint(float32(w-1)-xoff, float32(h-1)-yoff, asin, acos) + x4, y4 := rotatePoint(0-xoff, float32(h-1)-yoff, asin, acos) + + minx := minf32(x1, minf32(x2, minf32(x3, x4))) + maxx := maxf32(x1, maxf32(x2, maxf32(x3, x4))) + miny := minf32(y1, minf32(y2, minf32(y3, y4))) + maxy := maxf32(y1, maxf32(y2, maxf32(y3, y4))) + + neww := maxx - minx + 1 + if neww-floorf32(neww) > 0.01 { + neww += 2 + } + newh := maxy - miny + 1 + if newh-floorf32(newh) > 0.01 { + newh += 2 + } + return int(neww), int(newh) +} + +type rotateFilter struct { + angle float32 + bgcolor color.Color + interpolation Interpolation +} + +func (p *rotateFilter) Bounds(srcBounds image.Rectangle) (dstBounds image.Rectangle) { + w, h := calcRotatedSize(srcBounds.Dx(), srcBounds.Dy(), p.angle) + dstBounds = image.Rect(0, 0, w, h) + return +} + +func (p *rotateFilter) Draw(dst draw.Image, src image.Image, options *Options) { + if options == nil { + options = &defaultOptions + } + + srcb := src.Bounds() + dstb := dst.Bounds() + + w, h := calcRotatedSize(srcb.Dx(), srcb.Dy(), p.angle) + if w <= 0 || h <= 0 { + return + } + + srcxoff := float32(srcb.Dx())/2 - 0.5 + srcyoff := float32(srcb.Dy())/2 - 0.5 + dstxoff := float32(w)/2 - 0.5 + dstyoff := float32(h)/2 - 0.5 + + bgpx := pixelFromColor(p.bgcolor) + asin, acos := sincosf32(p.angle) + + pixGetter := newPixelGetter(src) + pixSetter := newPixelSetter(dst) + + parallelize(options.Parallelization, 0, h, func(start, stop int) { + for y := start; y < stop; y++ { + for x := 0; x < w; x++ { + + xf, yf := rotatePoint(float32(x)-dstxoff, float32(y)-dstyoff, asin, acos) + xf, yf = float32(srcb.Min.X)+xf+srcxoff, float32(srcb.Min.Y)+yf+srcyoff + var px pixel + + switch p.interpolation { + case CubicInterpolation: + px = interpolateCubic(xf, yf, srcb, pixGetter, bgpx) + case LinearInterpolation: + px = interpolateLinear(xf, yf, srcb, pixGetter, bgpx) + default: + px = interpolateNearest(xf, yf, srcb, pixGetter, bgpx) + } + + pixSetter.setPixel(dstb.Min.X+x, dstb.Min.Y+y, px) + } + } + }) +} + +func interpolateCubic(xf, yf float32, bounds image.Rectangle, pixGetter *pixelGetter, bgpx pixel) pixel { + var pxs [16]pixel + var cfs [16]float32 + var px pixel + + x0, y0 := int(floorf32(xf)), int(floorf32(yf)) + if !image.Pt(x0, y0).In(image.Rect(bounds.Min.X-1, bounds.Min.Y-1, bounds.Max.X, bounds.Max.Y)) { + return bgpx + } + xq, yq := xf-float32(x0), yf-float32(y0) + + for i := 0; i < 4; i++ { + for j := 0; j < 4; j++ { + pt := image.Pt(x0+j-1, y0+i-1) + if pt.In(bounds) { + pxs[i*4+j] = pixGetter.getPixel(pt.X, pt.Y) + } else { + pxs[i*4+j] = bgpx + } + } + } + + const ( + k04 = 1 / 4.0 + k12 = 1 / 12.0 + k36 = 1 / 36.0 + ) + + cfs[0] = k36 * xq * yq * (xq - 1) * (xq - 2) * (yq - 1) * (yq - 2) + cfs[1] = -k12 * yq * (xq - 1) * (xq - 2) * (xq + 1) * (yq - 1) * (yq - 2) + cfs[2] = k12 * xq * yq * (xq + 1) * (xq - 2) * (yq - 1) * (yq - 2) + cfs[3] = -k36 * xq * yq * (xq - 1) * (xq + 1) * (yq - 1) * (yq - 2) + cfs[4] = -k12 * xq * (xq - 1) * (xq - 2) * (yq - 1) * (yq - 2) * (yq + 1) + cfs[5] = k04 * (xq - 1) * (xq - 2) * (xq + 1) * (yq - 1) * (yq - 2) * (yq + 1) + cfs[6] = -k04 * xq * (xq + 1) * (xq - 2) * (yq - 1) * (yq - 2) * (yq + 1) + cfs[7] = k12 * xq * (xq - 1) * (xq + 1) * (yq - 1) * (yq - 2) * (yq + 1) + cfs[8] = k12 * xq * yq * (xq - 1) * (xq - 2) * (yq + 1) * (yq - 2) + cfs[9] = -k04 * yq * (xq - 1) * (xq - 2) * (xq + 1) * (yq + 1) * (yq - 2) + cfs[10] = k04 * xq * yq * (xq + 1) * (xq - 2) * (yq + 1) * (yq - 2) + cfs[11] = -k12 * xq * yq * (xq - 1) * (xq + 1) * (yq + 1) * (yq - 2) + cfs[12] = -k36 * xq * yq * (xq - 1) * (xq - 2) * (yq - 1) * (yq + 1) + cfs[13] = k12 * yq * (xq - 1) * (xq - 2) * (xq + 1) * (yq - 1) * (yq + 1) + cfs[14] = -k12 * xq * yq * (xq + 1) * (xq - 2) * (yq - 1) * (yq + 1) + cfs[15] = k36 * xq * yq * (xq - 1) * (xq + 1) * (yq - 1) * (yq + 1) + + for i := range pxs { + wa := pxs[i].a * cfs[i] + px.r += pxs[i].r * wa + px.g += pxs[i].g * wa + px.b += pxs[i].b * wa + px.a += wa + } + + if px.a != 0 { + px.r /= px.a + px.g /= px.a + px.b /= px.a + } + + return px +} + +func interpolateLinear(xf, yf float32, bounds image.Rectangle, pixGetter *pixelGetter, bgpx pixel) pixel { + var pxs [4]pixel + var cfs [4]float32 + var px pixel + + x0, y0 := int(floorf32(xf)), int(floorf32(yf)) + if !image.Pt(x0, y0).In(image.Rect(bounds.Min.X-1, bounds.Min.Y-1, bounds.Max.X, bounds.Max.Y)) { + return bgpx + } + xq, yq := xf-float32(x0), yf-float32(y0) + + for i := 0; i < 2; i++ { + for j := 0; j < 2; j++ { + pt := image.Pt(x0+j, y0+i) + if pt.In(bounds) { + pxs[i*2+j] = pixGetter.getPixel(pt.X, pt.Y) + } else { + pxs[i*2+j] = bgpx + } + } + } + + cfs[0] = (1 - xq) * (1 - yq) + cfs[1] = xq * (1 - yq) + cfs[2] = (1 - xq) * yq + cfs[3] = xq * yq + + for i := range pxs { + wa := pxs[i].a * cfs[i] + px.r += pxs[i].r * wa + px.g += pxs[i].g * wa + px.b += pxs[i].b * wa + px.a += wa + } + + if px.a != 0 { + px.r /= px.a + px.g /= px.a + px.b /= px.a + } + + return px +} + +func interpolateNearest(xf, yf float32, bounds image.Rectangle, pixGetter *pixelGetter, bgpx pixel) pixel { + x0, y0 := int(floorf32(xf+0.5)), int(floorf32(yf+0.5)) + if image.Pt(x0, y0).In(bounds) { + return pixGetter.getPixel(x0, y0) + } + return bgpx +} + +// Rotate creates a filter that rotates an image by the given angle counter-clockwise. +// The angle parameter is the rotation angle in degrees. +// The backgroundColor parameter specifies the color of the uncovered zone after the rotation. +// The interpolation parameter specifies the interpolation method. +// Supported interpolation methods: NearestNeighborInterpolation, LinearInterpolation, CubicInterpolation. +// +// Example: +// +// g := gift.New( +// gift.Rotate(45, color.Black, gift.LinearInterpolation), +// ) +// dst := image.NewRGBA(g.Bounds(src.Bounds())) +// g.Draw(dst, src) +// +func Rotate(angle float32, backgroundColor color.Color, interpolation Interpolation) Filter { + return &rotateFilter{ + angle: angle, + bgcolor: backgroundColor, + interpolation: interpolation, + } +} + +type cropFilter struct { + rect image.Rectangle +} + +func (p *cropFilter) Bounds(srcBounds image.Rectangle) (dstBounds image.Rectangle) { + b := srcBounds.Intersect(p.rect) + return b.Sub(b.Min) +} + +func (p *cropFilter) Draw(dst draw.Image, src image.Image, options *Options) { + if options == nil { + options = &defaultOptions + } + + srcb := src.Bounds().Intersect(p.rect) + dstb := dst.Bounds() + pixGetter := newPixelGetter(src) + pixSetter := newPixelSetter(dst) + + parallelize(options.Parallelization, srcb.Min.Y, srcb.Max.Y, func(start, stop int) { + for srcy := start; srcy < stop; srcy++ { + for srcx := srcb.Min.X; srcx < srcb.Max.X; srcx++ { + dstx := dstb.Min.X + srcx - srcb.Min.X + dsty := dstb.Min.Y + srcy - srcb.Min.Y + pixSetter.setPixel(dstx, dsty, pixGetter.getPixel(srcx, srcy)) + } + } + }) +} + +// Crop creates a filter that crops the specified rectangular region from an image. +// +// Example: +// +// g := gift.New( +// gift.Crop(image.Rect(100, 100, 200, 200)), +// ) +// dst := image.NewRGBA(g.Bounds(src.Bounds())) +// g.Draw(dst, src) +// +func Crop(rect image.Rectangle) Filter { + return &cropFilter{ + rect: rect, + } +} + +// Anchor is the anchor point for image cropping. +type Anchor int + +// Anchor point positions. +const ( + CenterAnchor Anchor = iota + TopLeftAnchor + TopAnchor + TopRightAnchor + LeftAnchor + RightAnchor + BottomLeftAnchor + BottomAnchor + BottomRightAnchor +) + +func anchorPt(b image.Rectangle, w, h int, anchor Anchor) image.Point { + var x, y int + switch anchor { + case TopLeftAnchor: + x = b.Min.X + y = b.Min.Y + case TopAnchor: + x = b.Min.X + (b.Dx()-w)/2 + y = b.Min.Y + case TopRightAnchor: + x = b.Max.X - w + y = b.Min.Y + case LeftAnchor: + x = b.Min.X + y = b.Min.Y + (b.Dy()-h)/2 + case RightAnchor: + x = b.Max.X - w + y = b.Min.Y + (b.Dy()-h)/2 + case BottomLeftAnchor: + x = b.Min.X + y = b.Max.Y - h + case BottomAnchor: + x = b.Min.X + (b.Dx()-w)/2 + y = b.Max.Y - h + case BottomRightAnchor: + x = b.Max.X - w + y = b.Max.Y - h + default: + x = b.Min.X + (b.Dx()-w)/2 + y = b.Min.Y + (b.Dy()-h)/2 + } + return image.Pt(x, y) +} + +type cropToSizeFilter struct { + w, h int + anchor Anchor +} + +func (p *cropToSizeFilter) Bounds(srcBounds image.Rectangle) (dstBounds image.Rectangle) { + if p.w <= 0 || p.h <= 0 { + return image.Rect(0, 0, 0, 0) + } + pt := anchorPt(srcBounds, p.w, p.h, p.anchor) + r := image.Rect(0, 0, p.w, p.h).Add(pt) + b := srcBounds.Intersect(r) + return b.Sub(b.Min) +} + +func (p *cropToSizeFilter) Draw(dst draw.Image, src image.Image, options *Options) { + if p.w <= 0 || p.h <= 0 { + return + } + pt := anchorPt(src.Bounds(), p.w, p.h, p.anchor) + r := image.Rect(0, 0, p.w, p.h).Add(pt) + b := src.Bounds().Intersect(r) + Crop(b).Draw(dst, src, options) +} + +// CropToSize creates a filter that crops an image to the specified size using the specified anchor point. +func CropToSize(width, height int, anchor Anchor) Filter { + return &cropToSizeFilter{ + w: width, + h: height, + anchor: anchor, + } +} diff --git a/vendor/github.com/disintegration/gift/utils.go b/vendor/github.com/disintegration/gift/utils.go new file mode 100644 index 0000000..61e9270 --- /dev/null +++ b/vendor/github.com/disintegration/gift/utils.go @@ -0,0 +1,226 @@ +package gift + +import ( + "image" + "image/draw" + "math" + "runtime" + "sync" +) + +// parallelize parallelizes the data processing. +func parallelize(enabled bool, start, stop int, fn func(start, stop int)) { + procs := 1 + if enabled { + procs = runtime.GOMAXPROCS(0) + } + var wg sync.WaitGroup + splitRange(start, stop, procs, func(pstart, pstop int) { + wg.Add(1) + go func() { + defer wg.Done() + fn(pstart, pstop) + }() + }) + wg.Wait() +} + +// splitRange splits a range into n parts and calls a function for each of them. +func splitRange(start, stop, n int, fn func(pstart, pstop int)) { + count := stop - start + if count < 1 { + return + } + + if n < 1 { + n = 1 + } + if n > count { + n = count + } + + div := count / n + mod := count % n + + for i := 0; i < n; i++ { + fn( + start+i*div+minint(i, mod), + start+(i+1)*div+minint(i+1, mod), + ) + } +} + +func absf32(x float32) float32 { + if x < 0 { + return -x + } + return x +} + +func minf32(x, y float32) float32 { + if x < y { + return x + } + return y +} + +func maxf32(x, y float32) float32 { + if x > y { + return x + } + return y +} + +func powf32(x, y float32) float32 { + return float32(math.Pow(float64(x), float64(y))) +} + +func logf32(x float32) float32 { + return float32(math.Log(float64(x))) +} + +func expf32(x float32) float32 { + return float32(math.Exp(float64(x))) +} + +func sincosf32(a float32) (float32, float32) { + sin, cos := math.Sincos(math.Pi * float64(a) / 180) + return float32(sin), float32(cos) +} + +func floorf32(x float32) float32 { + return float32(math.Floor(float64(x))) +} + +func sqrtf32(x float32) float32 { + return float32(math.Sqrt(float64(x))) +} + +func minint(x, y int) int { + if x < y { + return x + } + return y +} + +func maxint(x, y int) int { + if x > y { + return x + } + return y +} + +func sort(data []float32) { + n := len(data) + + if n < 2 { + return + } + + if n <= 20 { + for i := 1; i < n; i++ { + x := data[i] + j := i - 1 + for ; j >= 0 && data[j] > x; j-- { + data[j+1] = data[j] + } + data[j+1] = x + } + return + } + + i := 0 + j := n - 1 + x := data[n/2] + for i <= j { + for data[i] < x { + i++ + } + for data[j] > x { + j-- + } + if i <= j { + data[i], data[j] = data[j], data[i] + i++ + j-- + } + } + if j > 0 { + sort(data[:j+1]) + } + if i < n-1 { + sort(data[i:]) + } +} + +// createTempImage creates a temporary image. +func createTempImage(r image.Rectangle) draw.Image { + return image.NewNRGBA64(r) +} + +// isOpaque checks if the given image is opaque. +func isOpaque(img image.Image) bool { + type opaquer interface { + Opaque() bool + } + if o, ok := img.(opaquer); ok { + return o.Opaque() + } + return false +} + +// genDisk generates a disk-shaped kernel. +func genDisk(ksize int) []float32 { + if ksize%2 == 0 { + ksize-- + } + if ksize < 1 { + return []float32{} + } + disk := make([]float32, ksize*ksize) + kcenter := ksize / 2 + for i := 0; i < ksize; i++ { + for j := 0; j < ksize; j++ { + x := kcenter - i + y := kcenter - j + r := math.Sqrt(float64(x*x + y*y)) + if r <= float64(ksize/2) { + disk[j*ksize+i] = 1 + } + } + } + return disk +} + +// copyimage copies an image from src to dst. +func copyimage(dst draw.Image, src image.Image, options *Options) { + if options == nil { + options = &defaultOptions + } + + srcb := src.Bounds() + dstb := dst.Bounds() + pixGetter := newPixelGetter(src) + pixSetter := newPixelSetter(dst) + + parallelize(options.Parallelization, srcb.Min.Y, srcb.Max.Y, func(start, stop int) { + for srcy := start; srcy < stop; srcy++ { + for srcx := srcb.Min.X; srcx < srcb.Max.X; srcx++ { + dstx := dstb.Min.X + srcx - srcb.Min.X + dsty := dstb.Min.Y + srcy - srcb.Min.Y + pixSetter.setPixel(dstx, dsty, pixGetter.getPixel(srcx, srcy)) + } + } + }) +} + +type copyimageFilter struct{} + +func (p *copyimageFilter) Bounds(srcBounds image.Rectangle) (dstBounds image.Rectangle) { + dstBounds = image.Rect(0, 0, srcBounds.Dx(), srcBounds.Dy()) + return +} + +func (p *copyimageFilter) Draw(dst draw.Image, src image.Image, options *Options) { + copyimage(dst, src, options) +} diff --git a/vendor/github.com/dlclark/regexp2/.gitignore b/vendor/github.com/dlclark/regexp2/.gitignore new file mode 100644 index 0000000..fb844c3 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/.gitignore @@ -0,0 +1,27 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof +*.out + +.DS_Store diff --git a/vendor/github.com/dlclark/regexp2/.travis.yml b/vendor/github.com/dlclark/regexp2/.travis.yml new file mode 100644 index 0000000..a2da6be --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/.travis.yml @@ -0,0 +1,7 @@ +language: go +arch: + - AMD64 + - ppc64le +go: + - 1.9 + - tip diff --git a/vendor/github.com/dlclark/regexp2/ATTRIB b/vendor/github.com/dlclark/regexp2/ATTRIB new file mode 100644 index 0000000..cdf4560 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/ATTRIB @@ -0,0 +1,133 @@ +============ +These pieces of code were ported from dotnet/corefx: + +syntax/charclass.go (from RegexCharClass.cs): ported to use the built-in Go unicode classes. Canonicalize is + a direct port, but most of the other code required large changes because the C# implementation + used a string to represent the CharSet data structure and I cleaned that up in my implementation. + +syntax/code.go (from RegexCode.cs): ported literally with various cleanups and layout to make it more Go-ish. + +syntax/escape.go (from RegexParser.cs): ported Escape method and added some optimizations. Unescape is inspired by + the C# implementation but couldn't be directly ported because of the lack of do-while syntax in Go. + +syntax/parser.go (from RegexpParser.cs and RegexOptions.cs): ported parser struct and associated methods as + literally as possible. Several language differences required changes. E.g. lack pre/post-fix increments as + expressions, lack of do-while loops, lack of overloads, etc. + +syntax/prefix.go (from RegexFCD.cs and RegexBoyerMoore.cs): ported as literally as possible and added support + for unicode chars that are longer than the 16-bit char in C# for the 32-bit rune in Go. + +syntax/replacerdata.go (from RegexReplacement.cs): conceptually ported and re-organized to handle differences + in charclass implementation, and fix odd code layout between RegexParser.cs, Regex.cs, and RegexReplacement.cs. + +syntax/tree.go (from RegexTree.cs and RegexNode.cs): ported literally as possible. + +syntax/writer.go (from RegexWriter.cs): ported literally with minor changes to make it more Go-ish. + +match.go (from RegexMatch.cs): ported, simplified, and changed to handle Go's lack of inheritence. + +regexp.go (from Regex.cs and RegexOptions.cs): conceptually serves the same "starting point", but is simplified + and changed to handle differences in C# strings and Go strings/runes. + +replace.go (from RegexReplacement.cs): ported closely and then cleaned up to combine the MatchEvaluator and + simple string replace implementations. + +runner.go (from RegexRunner.cs): ported literally as possible. + +regexp_test.go (from CaptureTests.cs and GroupNamesAndNumbers.cs): conceptually ported, but the code was + manually structured like Go tests. + +replace_test.go (from RegexReplaceStringTest0.cs): conceptually ported + +rtl_test.go (from RightToLeft.cs): conceptually ported +--- +dotnet/corefx was released under this license: + +The MIT License (MIT) + +Copyright (c) Microsoft Corporation + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +============ +These pieces of code are copied from the Go framework: + +- The overall directory structure of regexp2 was inspired by the Go runtime regexp package. +- The optimization in the escape method of syntax/escape.go is from the Go runtime QuoteMeta() func in regexp/regexp.go +- The method signatures in regexp.go are designed to match the Go framework regexp methods closely +- func regexp2.MustCompile and func quote are almost identifical to the regexp package versions +- BenchmarkMatch* and TestProgramTooLong* funcs in regexp_performance_test.go were copied from the framework + regexp/exec_test.go +--- +The Go framework was released under this license: + +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +============ +Some test data were gathered from the Mono project. + +regexp_mono_test.go: ported from https://github.com/mono/mono/blob/master/mcs/class/System/Test/System.Text.RegularExpressions/PerlTrials.cs +--- +Mono tests released under this license: + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/vendor/github.com/dlclark/regexp2/LICENSE b/vendor/github.com/dlclark/regexp2/LICENSE new file mode 100644 index 0000000..fe83dfd --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Doug Clark + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/dlclark/regexp2/README.md b/vendor/github.com/dlclark/regexp2/README.md new file mode 100644 index 0000000..f3d1bd9 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/README.md @@ -0,0 +1,167 @@ +# regexp2 - full featured regular expressions for Go +Regexp2 is a feature-rich RegExp engine for Go. It doesn't have constant time guarantees like the built-in `regexp` package, but it allows backtracking and is compatible with Perl5 and .NET. You'll likely be better off with the RE2 engine from the `regexp` package and should only use this if you need to write very complex patterns or require compatibility with .NET. + +## Basis of the engine +The engine is ported from the .NET framework's System.Text.RegularExpressions.Regex engine. That engine was open sourced in 2015 under the MIT license. There are some fundamental differences between .NET strings and Go strings that required a bit of borrowing from the Go framework regex engine as well. I cleaned up a couple of the dirtier bits during the port (regexcharclass.cs was terrible), but the parse tree, code emmitted, and therefore patterns matched should be identical. + +## Installing +This is a go-gettable library, so install is easy: + + go get github.com/dlclark/regexp2/... + +## Usage +Usage is similar to the Go `regexp` package. Just like in `regexp`, you start by converting a regex into a state machine via the `Compile` or `MustCompile` methods. They ultimately do the same thing, but `MustCompile` will panic if the regex is invalid. You can then use the provided `Regexp` struct to find matches repeatedly. A `Regexp` struct is safe to use across goroutines. + +```go +re := regexp2.MustCompile(`Your pattern`, 0) +if isMatch, _ := re.MatchString(`Something to match`); isMatch { + //do something +} +``` + +The only error that the `*Match*` methods *should* return is a Timeout if you set the `re.MatchTimeout` field. Any other error is a bug in the `regexp2` package. If you need more details about capture groups in a match then use the `FindStringMatch` method, like so: + +```go +if m, _ := re.FindStringMatch(`Something to match`); m != nil { + // the whole match is always group 0 + fmt.Printf("Group 0: %v\n", m.String()) + + // you can get all the groups too + gps := m.Groups() + + // a group can be captured multiple times, so each cap is separately addressable + fmt.Printf("Group 1, first capture", gps[1].Captures[0].String()) + fmt.Printf("Group 1, second capture", gps[1].Captures[1].String()) +} +``` + +Group 0 is embedded in the Match. Group 0 is an automatically-assigned group that encompasses the whole pattern. This means that `m.String()` is the same as `m.Group.String()` and `m.Groups()[0].String()` + +The __last__ capture is embedded in each group, so `g.String()` will return the same thing as `g.Capture.String()` and `g.Captures[len(g.Captures)-1].String()`. + +If you want to find multiple matches from a single input string you should use the `FindNextMatch` method. For example, to implement a function similar to `regexp.FindAllString`: + +```go +func regexp2FindAllString(re *regexp2.Regexp, s string) []string { + var matches []string + m, _ := re.FindStringMatch(s) + for m != nil { + matches = append(matches, m.String()) + m, _ = re.FindNextMatch(m) + } + return matches +} +``` + +`FindNextMatch` is optmized so that it re-uses the underlying string/rune slice. + +The internals of `regexp2` always operate on `[]rune` so `Index` and `Length` data in a `Match` always reference a position in `rune`s rather than `byte`s (even if the input was given as a string). This is a dramatic difference between `regexp` and `regexp2`. It's advisable to use the provided `String()` methods to avoid having to work with indices. + +## Compare `regexp` and `regexp2` +| Category | regexp | regexp2 | +| --- | --- | --- | +| Catastrophic backtracking possible | no, constant execution time guarantees | yes, if your pattern is at risk you can use the `re.MatchTimeout` field | +| Python-style capture groups `(?Pre)` | yes | no (yes in RE2 compat mode) | +| .NET-style capture groups `(?re)` or `(?'name're)` | no | yes | +| comments `(?#comment)` | no | yes | +| branch numbering reset `(?\|a\|b)` | no | no | +| possessive match `(?>re)` | no | yes | +| positive lookahead `(?=re)` | no | yes | +| negative lookahead `(?!re)` | no | yes | +| positive lookbehind `(?<=re)` | no | yes | +| negative lookbehind `(?re)`) +* change singleline behavior for `$` to only match end of string (like RE2) (see [#24](https://github.com/dlclark/regexp2/issues/24)) +* change the character classes `\d` `\s` and `\w` to match the same characters as RE2. NOTE: if you also use the `ECMAScript` option then this will change the `\s` character class to match ECMAScript instead of RE2. ECMAScript allows more whitespace characters in `\s` than RE2 (but still fewer than the the default behavior). +* allow character escape sequences to have defaults. For example, by default `\_` isn't a known character escape and will fail to compile, but in RE2 mode it will match the literal character `_` + +```go +re := regexp2.MustCompile(`Your RE2-compatible pattern`, regexp2.RE2) +if isMatch, _ := re.MatchString(`Something to match`); isMatch { + //do something +} +``` + +This feature is a work in progress and I'm open to ideas for more things to put here (maybe more relaxed character escaping rules?). + +## Catastrophic Backtracking and Timeouts + +`regexp2` supports features that can lead to catastrophic backtracking. +`Regexp.MatchTimeout` can be set to to limit the impact of such behavior; the +match will fail with an error after approximately MatchTimeout. No timeout +checks are done by default. + +Timeout checking is not free. The current timeout checking implementation starts +a background worker that updates a clock value approximately once every 100 +milliseconds. The matching code compares this value against the precomputed +deadline for the match. The performance impact is as follows. + +1. A match with a timeout runs almost as fast as a match without a timeout. +2. If any live matches have a timeout, there will be a background CPU load + (`~0.15%` currently on a modern machine). This load will remain constant + regardless of the number of matches done including matches done in parallel. +3. If no live matches are using a timeout, the background load will remain + until the longest deadline (match timeout + the time when the match started) + is reached. E.g., if you set a timeout of one minute the load will persist + for approximately a minute even if the match finishes quickly. + +See [PR #58](https://github.com/dlclark/regexp2/pull/58) for more details and +alternatives considered. + +## Goroutine leak error +If you're using a library during unit tests (e.g. https://github.com/uber-go/goleak) that validates all goroutines are exited then you'll likely get an error if you or any of your dependencies use regex's with a MatchTimeout. +To remedy the problem you'll need to tell the unit test to wait until the backgroup timeout goroutine is exited. + +```go +func TestSomething(t *testing.T) { + defer goleak.VerifyNone(t) + defer regexp2.StopTimeoutClock() + + // ... test +} + +//or + +func TestMain(m *testing.M) { + // setup + // ... + + // run + m.Run() + + //tear down + regexp2.StopTimeoutClock() + goleak.VerifyNone(t) +} +``` + +This will add ~100ms runtime to each test (or TestMain). If that's too much time you can set the clock cycle rate of the timeout goroutine in an init function in a test file. `regexp2.SetTimeoutCheckPeriod` isn't threadsafe so it must be setup before starting any regex's with Timeouts. + +```go +func init() { + //speed up testing by making the timeout clock 1ms + regexp2.SetTimeoutCheckPeriod(time.Millisecond) +} +``` + +## ECMAScript compatibility mode +In this mode the engine provides compatibility with the [regex engine](https://tc39.es/ecma262/multipage/text-processing.html#sec-regexp-regular-expression-objects) described in the ECMAScript specification. + +Additionally a Unicode mode is provided which allows parsing of `\u{CodePoint}` syntax that is only when both are provided. + +## Library features that I'm still working on +- Regex split + +## Potential bugs +I've run a battery of tests against regexp2 from various sources and found the debug output matches the .NET engine, but .NET and Go handle strings very differently. I've attempted to handle these differences, but most of my testing deals with basic ASCII with a little bit of multi-byte Unicode. There's a chance that there are bugs in the string handling related to character sets with supplementary Unicode chars. Right-to-Left support is coded, but not well tested either. + +## Find a bug? +I'm open to new issues and pull requests with tests if you find something odd! diff --git a/vendor/github.com/dlclark/regexp2/fastclock.go b/vendor/github.com/dlclark/regexp2/fastclock.go new file mode 100644 index 0000000..caf2c9d --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/fastclock.go @@ -0,0 +1,129 @@ +package regexp2 + +import ( + "sync" + "sync/atomic" + "time" +) + +// fasttime holds a time value (ticks since clock initialization) +type fasttime int64 + +// fastclock provides a fast clock implementation. +// +// A background goroutine periodically stores the current time +// into an atomic variable. +// +// A deadline can be quickly checked for expiration by comparing +// its value to the clock stored in the atomic variable. +// +// The goroutine automatically stops once clockEnd is reached. +// (clockEnd covers the largest deadline seen so far + some +// extra time). This ensures that if regexp2 with timeouts +// stops being used we will stop background work. +type fastclock struct { + // instances of atomicTime must be at the start of the struct (or at least 64-bit aligned) + // otherwise 32-bit architectures will panic + + current atomicTime // Current time (approximate) + clockEnd atomicTime // When clock updater is supposed to stop (>= any existing deadline) + + // current and clockEnd can be read via atomic loads. + // Reads and writes of other fields require mu to be held. + mu sync.Mutex + start time.Time // Time corresponding to fasttime(0) + running bool // Is a clock updater running? +} + +var fast fastclock + +// reached returns true if current time is at or past t. +func (t fasttime) reached() bool { + return fast.current.read() >= t +} + +// makeDeadline returns a time that is approximately time.Now().Add(d) +func makeDeadline(d time.Duration) fasttime { + // Increase the deadline since the clock we are reading may be + // just about to tick forwards. + end := fast.current.read() + durationToTicks(d+clockPeriod) + + // Start or extend clock if necessary. + if end > fast.clockEnd.read() { + extendClock(end) + } + return end +} + +// extendClock ensures that clock is live and will run until at least end. +func extendClock(end fasttime) { + fast.mu.Lock() + defer fast.mu.Unlock() + + if fast.start.IsZero() { + fast.start = time.Now() + } + + // Extend the running time to cover end as well as a bit of slop. + if shutdown := end + durationToTicks(time.Second); shutdown > fast.clockEnd.read() { + fast.clockEnd.write(shutdown) + } + + // Start clock if necessary + if !fast.running { + fast.running = true + go runClock() + } +} + +// stop the timeout clock in the background +// should only used for unit tests to abandon the background goroutine +func stopClock() { + fast.mu.Lock() + if fast.running { + fast.clockEnd.write(fasttime(0)) + } + fast.mu.Unlock() + + // pause until not running + // get and release the lock + isRunning := true + for isRunning { + time.Sleep(clockPeriod / 2) + fast.mu.Lock() + isRunning = fast.running + fast.mu.Unlock() + } +} + +func durationToTicks(d time.Duration) fasttime { + // Downscale nanoseconds to approximately a millisecond so that we can avoid + // overflow even if the caller passes in math.MaxInt64. + return fasttime(d) >> 20 +} + +const DefaultClockPeriod = 100 * time.Millisecond + +// clockPeriod is the approximate interval between updates of approximateClock. +var clockPeriod = DefaultClockPeriod + +func runClock() { + fast.mu.Lock() + defer fast.mu.Unlock() + + for fast.current.read() <= fast.clockEnd.read() { + // Unlock while sleeping. + fast.mu.Unlock() + time.Sleep(clockPeriod) + fast.mu.Lock() + + newTime := durationToTicks(time.Since(fast.start)) + fast.current.write(newTime) + } + fast.running = false +} + +type atomicTime struct{ v int64 } // Should change to atomic.Int64 when we can use go 1.19 + +func (t *atomicTime) read() fasttime { return fasttime(atomic.LoadInt64(&t.v)) } +func (t *atomicTime) write(v fasttime) { atomic.StoreInt64(&t.v, int64(v)) } diff --git a/vendor/github.com/dlclark/regexp2/match.go b/vendor/github.com/dlclark/regexp2/match.go new file mode 100644 index 0000000..1871cff --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/match.go @@ -0,0 +1,347 @@ +package regexp2 + +import ( + "bytes" + "fmt" +) + +// Match is a single regex result match that contains groups and repeated captures +// -Groups +// -Capture +type Match struct { + Group //embeded group 0 + + regex *Regexp + otherGroups []Group + + // input to the match + textpos int + textstart int + + capcount int + caps []int + sparseCaps map[int]int + + // output from the match + matches [][]int + matchcount []int + + // whether we've done any balancing with this match. If we + // have done balancing, we'll need to do extra work in Tidy(). + balancing bool +} + +// Group is an explicit or implit (group 0) matched group within the pattern +type Group struct { + Capture // the last capture of this group is embeded for ease of use + + Name string // group name + Captures []Capture // captures of this group +} + +// Capture is a single capture of text within the larger original string +type Capture struct { + // the original string + text []rune + // the position in the original string where the first character of + // captured substring was found. + Index int + // the length of the captured substring. + Length int +} + +// String returns the captured text as a String +func (c *Capture) String() string { + return string(c.text[c.Index : c.Index+c.Length]) +} + +// Runes returns the captured text as a rune slice +func (c *Capture) Runes() []rune { + return c.text[c.Index : c.Index+c.Length] +} + +func newMatch(regex *Regexp, capcount int, text []rune, startpos int) *Match { + m := Match{ + regex: regex, + matchcount: make([]int, capcount), + matches: make([][]int, capcount), + textstart: startpos, + balancing: false, + } + m.Name = "0" + m.text = text + m.matches[0] = make([]int, 2) + return &m +} + +func newMatchSparse(regex *Regexp, caps map[int]int, capcount int, text []rune, startpos int) *Match { + m := newMatch(regex, capcount, text, startpos) + m.sparseCaps = caps + return m +} + +func (m *Match) reset(text []rune, textstart int) { + m.text = text + m.textstart = textstart + for i := 0; i < len(m.matchcount); i++ { + m.matchcount[i] = 0 + } + m.balancing = false +} + +func (m *Match) tidy(textpos int) { + + interval := m.matches[0] + m.Index = interval[0] + m.Length = interval[1] + m.textpos = textpos + m.capcount = m.matchcount[0] + //copy our root capture to the list + m.Group.Captures = []Capture{m.Group.Capture} + + if m.balancing { + // The idea here is that we want to compact all of our unbalanced captures. To do that we + // use j basically as a count of how many unbalanced captures we have at any given time + // (really j is an index, but j/2 is the count). First we skip past all of the real captures + // until we find a balance captures. Then we check each subsequent entry. If it's a balance + // capture (it's negative), we decrement j. If it's a real capture, we increment j and copy + // it down to the last free position. + for cap := 0; cap < len(m.matchcount); cap++ { + limit := m.matchcount[cap] * 2 + matcharray := m.matches[cap] + + var i, j int + + for i = 0; i < limit; i++ { + if matcharray[i] < 0 { + break + } + } + + for j = i; i < limit; i++ { + if matcharray[i] < 0 { + // skip negative values + j-- + } else { + // but if we find something positive (an actual capture), copy it back to the last + // unbalanced position. + if i != j { + matcharray[j] = matcharray[i] + } + j++ + } + } + + m.matchcount[cap] = j / 2 + } + + m.balancing = false + } +} + +// isMatched tells if a group was matched by capnum +func (m *Match) isMatched(cap int) bool { + return cap < len(m.matchcount) && m.matchcount[cap] > 0 && m.matches[cap][m.matchcount[cap]*2-1] != (-3+1) +} + +// matchIndex returns the index of the last specified matched group by capnum +func (m *Match) matchIndex(cap int) int { + i := m.matches[cap][m.matchcount[cap]*2-2] + if i >= 0 { + return i + } + + return m.matches[cap][-3-i] +} + +// matchLength returns the length of the last specified matched group by capnum +func (m *Match) matchLength(cap int) int { + i := m.matches[cap][m.matchcount[cap]*2-1] + if i >= 0 { + return i + } + + return m.matches[cap][-3-i] +} + +// Nonpublic builder: add a capture to the group specified by "c" +func (m *Match) addMatch(c, start, l int) { + + if m.matches[c] == nil { + m.matches[c] = make([]int, 2) + } + + capcount := m.matchcount[c] + + if capcount*2+2 > len(m.matches[c]) { + oldmatches := m.matches[c] + newmatches := make([]int, capcount*8) + copy(newmatches, oldmatches[:capcount*2]) + m.matches[c] = newmatches + } + + m.matches[c][capcount*2] = start + m.matches[c][capcount*2+1] = l + m.matchcount[c] = capcount + 1 + //log.Printf("addMatch: c=%v, i=%v, l=%v ... matches: %v", c, start, l, m.matches) +} + +// Nonpublic builder: Add a capture to balance the specified group. This is used by the +// balanced match construct. (?...) +// +// If there were no such thing as backtracking, this would be as simple as calling RemoveMatch(c). +// However, since we have backtracking, we need to keep track of everything. +func (m *Match) balanceMatch(c int) { + m.balancing = true + + // we'll look at the last capture first + capcount := m.matchcount[c] + target := capcount*2 - 2 + + // first see if it is negative, and therefore is a reference to the next available + // capture group for balancing. If it is, we'll reset target to point to that capture. + if m.matches[c][target] < 0 { + target = -3 - m.matches[c][target] + } + + // move back to the previous capture + target -= 2 + + // if the previous capture is a reference, just copy that reference to the end. Otherwise, point to it. + if target >= 0 && m.matches[c][target] < 0 { + m.addMatch(c, m.matches[c][target], m.matches[c][target+1]) + } else { + m.addMatch(c, -3-target, -4-target /* == -3 - (target + 1) */) + } +} + +// Nonpublic builder: removes a group match by capnum +func (m *Match) removeMatch(c int) { + m.matchcount[c]-- +} + +// GroupCount returns the number of groups this match has matched +func (m *Match) GroupCount() int { + return len(m.matchcount) +} + +// GroupByName returns a group based on the name of the group, or nil if the group name does not exist +func (m *Match) GroupByName(name string) *Group { + num := m.regex.GroupNumberFromName(name) + if num < 0 { + return nil + } + return m.GroupByNumber(num) +} + +// GroupByNumber returns a group based on the number of the group, or nil if the group number does not exist +func (m *Match) GroupByNumber(num int) *Group { + // check our sparse map + if m.sparseCaps != nil { + if newNum, ok := m.sparseCaps[num]; ok { + num = newNum + } + } + if num >= len(m.matchcount) || num < 0 { + return nil + } + + if num == 0 { + return &m.Group + } + + m.populateOtherGroups() + + return &m.otherGroups[num-1] +} + +// Groups returns all the capture groups, starting with group 0 (the full match) +func (m *Match) Groups() []Group { + m.populateOtherGroups() + g := make([]Group, len(m.otherGroups)+1) + g[0] = m.Group + copy(g[1:], m.otherGroups) + return g +} + +func (m *Match) populateOtherGroups() { + // Construct all the Group objects first time called + if m.otherGroups == nil { + m.otherGroups = make([]Group, len(m.matchcount)-1) + for i := 0; i < len(m.otherGroups); i++ { + m.otherGroups[i] = newGroup(m.regex.GroupNameFromNumber(i+1), m.text, m.matches[i+1], m.matchcount[i+1]) + } + } +} + +func (m *Match) groupValueAppendToBuf(groupnum int, buf *bytes.Buffer) { + c := m.matchcount[groupnum] + if c == 0 { + return + } + + matches := m.matches[groupnum] + + index := matches[(c-1)*2] + last := index + matches[(c*2)-1] + + for ; index < last; index++ { + buf.WriteRune(m.text[index]) + } +} + +func newGroup(name string, text []rune, caps []int, capcount int) Group { + g := Group{} + g.text = text + if capcount > 0 { + g.Index = caps[(capcount-1)*2] + g.Length = caps[(capcount*2)-1] + } + g.Name = name + g.Captures = make([]Capture, capcount) + for i := 0; i < capcount; i++ { + g.Captures[i] = Capture{ + text: text, + Index: caps[i*2], + Length: caps[i*2+1], + } + } + //log.Printf("newGroup! capcount %v, %+v", capcount, g) + + return g +} + +func (m *Match) dump() string { + buf := &bytes.Buffer{} + buf.WriteRune('\n') + if len(m.sparseCaps) > 0 { + for k, v := range m.sparseCaps { + fmt.Fprintf(buf, "Slot %v -> %v\n", k, v) + } + } + + for i, g := range m.Groups() { + fmt.Fprintf(buf, "Group %v (%v), %v caps:\n", i, g.Name, len(g.Captures)) + + for _, c := range g.Captures { + fmt.Fprintf(buf, " (%v, %v) %v\n", c.Index, c.Length, c.String()) + } + } + /* + for i := 0; i < len(m.matchcount); i++ { + fmt.Fprintf(buf, "\nGroup %v (%v):\n", i, m.regex.GroupNameFromNumber(i)) + + for j := 0; j < m.matchcount[i]; j++ { + text := "" + + if m.matches[i][j*2] >= 0 { + start := m.matches[i][j*2] + text = m.text[start : start+m.matches[i][j*2+1]] + } + + fmt.Fprintf(buf, " (%v, %v) %v\n", m.matches[i][j*2], m.matches[i][j*2+1], text) + } + } + */ + return buf.String() +} diff --git a/vendor/github.com/dlclark/regexp2/regexp.go b/vendor/github.com/dlclark/regexp2/regexp.go new file mode 100644 index 0000000..a7ddbaf --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/regexp.go @@ -0,0 +1,395 @@ +/* +Package regexp2 is a regexp package that has an interface similar to Go's framework regexp engine but uses a +more feature full regex engine behind the scenes. + +It doesn't have constant time guarantees, but it allows backtracking and is compatible with Perl5 and .NET. +You'll likely be better off with the RE2 engine from the regexp package and should only use this if you +need to write very complex patterns or require compatibility with .NET. +*/ +package regexp2 + +import ( + "errors" + "math" + "strconv" + "sync" + "time" + + "github.com/dlclark/regexp2/syntax" +) + +var ( + // DefaultMatchTimeout used when running regexp matches -- "forever" + DefaultMatchTimeout = time.Duration(math.MaxInt64) + // DefaultUnmarshalOptions used when unmarshaling a regex from text + DefaultUnmarshalOptions = None +) + +// Regexp is the representation of a compiled regular expression. +// A Regexp is safe for concurrent use by multiple goroutines. +type Regexp struct { + // A match will time out if it takes (approximately) more than + // MatchTimeout. This is a safety check in case the match + // encounters catastrophic backtracking. The default value + // (DefaultMatchTimeout) causes all time out checking to be + // suppressed. + MatchTimeout time.Duration + + // read-only after Compile + pattern string // as passed to Compile + options RegexOptions // options + + caps map[int]int // capnum->index + capnames map[string]int //capture group name -> index + capslist []string //sorted list of capture group names + capsize int // size of the capture array + + code *syntax.Code // compiled program + + // cache of machines for running regexp + muRun *sync.Mutex + runner []*runner +} + +// Compile parses a regular expression and returns, if successful, +// a Regexp object that can be used to match against text. +func Compile(expr string, opt RegexOptions) (*Regexp, error) { + // parse it + tree, err := syntax.Parse(expr, syntax.RegexOptions(opt)) + if err != nil { + return nil, err + } + + // translate it to code + code, err := syntax.Write(tree) + if err != nil { + return nil, err + } + + // return it + return &Regexp{ + pattern: expr, + options: opt, + caps: code.Caps, + capnames: tree.Capnames, + capslist: tree.Caplist, + capsize: code.Capsize, + code: code, + MatchTimeout: DefaultMatchTimeout, + muRun: &sync.Mutex{}, + }, nil +} + +// MustCompile is like Compile but panics if the expression cannot be parsed. +// It simplifies safe initialization of global variables holding compiled regular +// expressions. +func MustCompile(str string, opt RegexOptions) *Regexp { + regexp, error := Compile(str, opt) + if error != nil { + panic(`regexp2: Compile(` + quote(str) + `): ` + error.Error()) + } + return regexp +} + +// Escape adds backslashes to any special characters in the input string +func Escape(input string) string { + return syntax.Escape(input) +} + +// Unescape removes any backslashes from previously-escaped special characters in the input string +func Unescape(input string) (string, error) { + return syntax.Unescape(input) +} + +// SetTimeoutPeriod is a debug function that sets the frequency of the timeout goroutine's sleep cycle. +// Defaults to 100ms. The only benefit of setting this lower is that the 1 background goroutine that manages +// timeouts may exit slightly sooner after all the timeouts have expired. See Github issue #63 +func SetTimeoutCheckPeriod(d time.Duration) { + clockPeriod = d +} + +// StopTimeoutClock should only be used in unit tests to prevent the timeout clock goroutine +// from appearing like a leaking goroutine +func StopTimeoutClock() { + stopClock() +} + +// String returns the source text used to compile the regular expression. +func (re *Regexp) String() string { + return re.pattern +} + +func quote(s string) string { + if strconv.CanBackquote(s) { + return "`" + s + "`" + } + return strconv.Quote(s) +} + +// RegexOptions impact the runtime and parsing behavior +// for each specific regex. They are setable in code as well +// as in the regex pattern itself. +type RegexOptions int32 + +const ( + None RegexOptions = 0x0 + IgnoreCase = 0x0001 // "i" + Multiline = 0x0002 // "m" + ExplicitCapture = 0x0004 // "n" + Compiled = 0x0008 // "c" + Singleline = 0x0010 // "s" + IgnorePatternWhitespace = 0x0020 // "x" + RightToLeft = 0x0040 // "r" + Debug = 0x0080 // "d" + ECMAScript = 0x0100 // "e" + RE2 = 0x0200 // RE2 (regexp package) compatibility mode + Unicode = 0x0400 // "u" +) + +func (re *Regexp) RightToLeft() bool { + return re.options&RightToLeft != 0 +} + +func (re *Regexp) Debug() bool { + return re.options&Debug != 0 +} + +// Replace searches the input string and replaces each match found with the replacement text. +// Count will limit the number of matches attempted and startAt will allow +// us to skip past possible matches at the start of the input (left or right depending on RightToLeft option). +// Set startAt and count to -1 to go through the whole string +func (re *Regexp) Replace(input, replacement string, startAt, count int) (string, error) { + data, err := syntax.NewReplacerData(replacement, re.caps, re.capsize, re.capnames, syntax.RegexOptions(re.options)) + if err != nil { + return "", err + } + //TODO: cache ReplacerData + + return replace(re, data, nil, input, startAt, count) +} + +// ReplaceFunc searches the input string and replaces each match found using the string from the evaluator +// Count will limit the number of matches attempted and startAt will allow +// us to skip past possible matches at the start of the input (left or right depending on RightToLeft option). +// Set startAt and count to -1 to go through the whole string. +func (re *Regexp) ReplaceFunc(input string, evaluator MatchEvaluator, startAt, count int) (string, error) { + return replace(re, nil, evaluator, input, startAt, count) +} + +// FindStringMatch searches the input string for a Regexp match +func (re *Regexp) FindStringMatch(s string) (*Match, error) { + // convert string to runes + return re.run(false, -1, getRunes(s)) +} + +// FindRunesMatch searches the input rune slice for a Regexp match +func (re *Regexp) FindRunesMatch(r []rune) (*Match, error) { + return re.run(false, -1, r) +} + +// FindStringMatchStartingAt searches the input string for a Regexp match starting at the startAt index +func (re *Regexp) FindStringMatchStartingAt(s string, startAt int) (*Match, error) { + if startAt > len(s) { + return nil, errors.New("startAt must be less than the length of the input string") + } + r, startAt := re.getRunesAndStart(s, startAt) + if startAt == -1 { + // we didn't find our start index in the string -- that's a problem + return nil, errors.New("startAt must align to the start of a valid rune in the input string") + } + + return re.run(false, startAt, r) +} + +// FindRunesMatchStartingAt searches the input rune slice for a Regexp match starting at the startAt index +func (re *Regexp) FindRunesMatchStartingAt(r []rune, startAt int) (*Match, error) { + return re.run(false, startAt, r) +} + +// FindNextMatch returns the next match in the same input string as the match parameter. +// Will return nil if there is no next match or if given a nil match. +func (re *Regexp) FindNextMatch(m *Match) (*Match, error) { + if m == nil { + return nil, nil + } + + // If previous match was empty, advance by one before matching to prevent + // infinite loop + startAt := m.textpos + if m.Length == 0 { + if m.textpos == len(m.text) { + return nil, nil + } + + if re.RightToLeft() { + startAt-- + } else { + startAt++ + } + } + return re.run(false, startAt, m.text) +} + +// MatchString return true if the string matches the regex +// error will be set if a timeout occurs +func (re *Regexp) MatchString(s string) (bool, error) { + m, err := re.run(true, -1, getRunes(s)) + if err != nil { + return false, err + } + return m != nil, nil +} + +func (re *Regexp) getRunesAndStart(s string, startAt int) ([]rune, int) { + if startAt < 0 { + if re.RightToLeft() { + r := getRunes(s) + return r, len(r) + } + return getRunes(s), 0 + } + ret := make([]rune, len(s)) + i := 0 + runeIdx := -1 + for strIdx, r := range s { + if strIdx == startAt { + runeIdx = i + } + ret[i] = r + i++ + } + if startAt == len(s) { + runeIdx = i + } + return ret[:i], runeIdx +} + +func getRunes(s string) []rune { + return []rune(s) +} + +// MatchRunes return true if the runes matches the regex +// error will be set if a timeout occurs +func (re *Regexp) MatchRunes(r []rune) (bool, error) { + m, err := re.run(true, -1, r) + if err != nil { + return false, err + } + return m != nil, nil +} + +// GetGroupNames Returns the set of strings used to name capturing groups in the expression. +func (re *Regexp) GetGroupNames() []string { + var result []string + + if re.capslist == nil { + result = make([]string, re.capsize) + + for i := 0; i < len(result); i++ { + result[i] = strconv.Itoa(i) + } + } else { + result = make([]string, len(re.capslist)) + copy(result, re.capslist) + } + + return result +} + +// GetGroupNumbers returns the integer group numbers corresponding to a group name. +func (re *Regexp) GetGroupNumbers() []int { + var result []int + + if re.caps == nil { + result = make([]int, re.capsize) + + for i := 0; i < len(result); i++ { + result[i] = i + } + } else { + result = make([]int, len(re.caps)) + + for k, v := range re.caps { + result[v] = k + } + } + + return result +} + +// GroupNameFromNumber retrieves a group name that corresponds to a group number. +// It will return "" for and unknown group number. Unnamed groups automatically +// receive a name that is the decimal string equivalent of its number. +func (re *Regexp) GroupNameFromNumber(i int) string { + if re.capslist == nil { + if i >= 0 && i < re.capsize { + return strconv.Itoa(i) + } + + return "" + } + + if re.caps != nil { + var ok bool + if i, ok = re.caps[i]; !ok { + return "" + } + } + + if i >= 0 && i < len(re.capslist) { + return re.capslist[i] + } + + return "" +} + +// GroupNumberFromName returns a group number that corresponds to a group name. +// Returns -1 if the name is not a recognized group name. Numbered groups +// automatically get a group name that is the decimal string equivalent of its number. +func (re *Regexp) GroupNumberFromName(name string) int { + // look up name if we have a hashtable of names + if re.capnames != nil { + if k, ok := re.capnames[name]; ok { + return k + } + + return -1 + } + + // convert to an int if it looks like a number + result := 0 + for i := 0; i < len(name); i++ { + ch := name[i] + + if ch > '9' || ch < '0' { + return -1 + } + + result *= 10 + result += int(ch - '0') + } + + // return int if it's in range + if result >= 0 && result < re.capsize { + return result + } + + return -1 +} + +// MarshalText implements [encoding.TextMarshaler]. The output +// matches that of calling the [Regexp.String] method. +func (re *Regexp) MarshalText() ([]byte, error) { + return []byte(re.String()), nil +} + +// UnmarshalText implements [encoding.TextUnmarshaler] by calling +// [Compile] on the encoded value. +func (re *Regexp) UnmarshalText(text []byte) error { + newRE, err := Compile(string(text), DefaultUnmarshalOptions) + if err != nil { + return err + } + *re = *newRE + return nil +} diff --git a/vendor/github.com/dlclark/regexp2/replace.go b/vendor/github.com/dlclark/regexp2/replace.go new file mode 100644 index 0000000..0376bd9 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/replace.go @@ -0,0 +1,177 @@ +package regexp2 + +import ( + "bytes" + "errors" + + "github.com/dlclark/regexp2/syntax" +) + +const ( + replaceSpecials = 4 + replaceLeftPortion = -1 + replaceRightPortion = -2 + replaceLastGroup = -3 + replaceWholeString = -4 +) + +// MatchEvaluator is a function that takes a match and returns a replacement string to be used +type MatchEvaluator func(Match) string + +// Three very similar algorithms appear below: replace (pattern), +// replace (evaluator), and split. + +// Replace Replaces all occurrences of the regex in the string with the +// replacement pattern. +// +// Note that the special case of no matches is handled on its own: +// with no matches, the input string is returned unchanged. +// The right-to-left case is split out because StringBuilder +// doesn't handle right-to-left string building directly very well. +func replace(regex *Regexp, data *syntax.ReplacerData, evaluator MatchEvaluator, input string, startAt, count int) (string, error) { + if count < -1 { + return "", errors.New("Count too small") + } + if count == 0 { + return "", nil + } + + m, err := regex.FindStringMatchStartingAt(input, startAt) + + if err != nil { + return "", err + } + if m == nil { + return input, nil + } + + buf := &bytes.Buffer{} + text := m.text + + if !regex.RightToLeft() { + prevat := 0 + for m != nil { + if m.Index != prevat { + buf.WriteString(string(text[prevat:m.Index])) + } + prevat = m.Index + m.Length + if evaluator == nil { + replacementImpl(data, buf, m) + } else { + buf.WriteString(evaluator(*m)) + } + + count-- + if count == 0 { + break + } + m, err = regex.FindNextMatch(m) + if err != nil { + return "", nil + } + } + + if prevat < len(text) { + buf.WriteString(string(text[prevat:])) + } + } else { + prevat := len(text) + var al []string + + for m != nil { + if m.Index+m.Length != prevat { + al = append(al, string(text[m.Index+m.Length:prevat])) + } + prevat = m.Index + if evaluator == nil { + replacementImplRTL(data, &al, m) + } else { + al = append(al, evaluator(*m)) + } + + count-- + if count == 0 { + break + } + m, err = regex.FindNextMatch(m) + if err != nil { + return "", nil + } + } + + if prevat > 0 { + buf.WriteString(string(text[:prevat])) + } + + for i := len(al) - 1; i >= 0; i-- { + buf.WriteString(al[i]) + } + } + + return buf.String(), nil +} + +// Given a Match, emits into the StringBuilder the evaluated +// substitution pattern. +func replacementImpl(data *syntax.ReplacerData, buf *bytes.Buffer, m *Match) { + for _, r := range data.Rules { + + if r >= 0 { // string lookup + buf.WriteString(data.Strings[r]) + } else if r < -replaceSpecials { // group lookup + m.groupValueAppendToBuf(-replaceSpecials-1-r, buf) + } else { + switch -replaceSpecials - 1 - r { // special insertion patterns + case replaceLeftPortion: + for i := 0; i < m.Index; i++ { + buf.WriteRune(m.text[i]) + } + case replaceRightPortion: + for i := m.Index + m.Length; i < len(m.text); i++ { + buf.WriteRune(m.text[i]) + } + case replaceLastGroup: + m.groupValueAppendToBuf(m.GroupCount()-1, buf) + case replaceWholeString: + for i := 0; i < len(m.text); i++ { + buf.WriteRune(m.text[i]) + } + } + } + } +} + +func replacementImplRTL(data *syntax.ReplacerData, al *[]string, m *Match) { + l := *al + buf := &bytes.Buffer{} + + for _, r := range data.Rules { + buf.Reset() + if r >= 0 { // string lookup + l = append(l, data.Strings[r]) + } else if r < -replaceSpecials { // group lookup + m.groupValueAppendToBuf(-replaceSpecials-1-r, buf) + l = append(l, buf.String()) + } else { + switch -replaceSpecials - 1 - r { // special insertion patterns + case replaceLeftPortion: + for i := 0; i < m.Index; i++ { + buf.WriteRune(m.text[i]) + } + case replaceRightPortion: + for i := m.Index + m.Length; i < len(m.text); i++ { + buf.WriteRune(m.text[i]) + } + case replaceLastGroup: + m.groupValueAppendToBuf(m.GroupCount()-1, buf) + case replaceWholeString: + for i := 0; i < len(m.text); i++ { + buf.WriteRune(m.text[i]) + } + } + l = append(l, buf.String()) + } + } + + *al = l +} diff --git a/vendor/github.com/dlclark/regexp2/runner.go b/vendor/github.com/dlclark/regexp2/runner.go new file mode 100644 index 0000000..494dcef --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/runner.go @@ -0,0 +1,1609 @@ +package regexp2 + +import ( + "bytes" + "errors" + "fmt" + "math" + "strconv" + "strings" + "time" + "unicode" + + "github.com/dlclark/regexp2/syntax" +) + +type runner struct { + re *Regexp + code *syntax.Code + + runtextstart int // starting point for search + + runtext []rune // text to search + runtextpos int // current position in text + runtextend int + + // The backtracking stack. Opcodes use this to store data regarding + // what they have matched and where to backtrack to. Each "frame" on + // the stack takes the form of [CodePosition Data1 Data2...], where + // CodePosition is the position of the current opcode and + // the data values are all optional. The CodePosition can be negative, and + // these values (also called "back2") are used by the BranchMark family of opcodes + // to indicate whether they are backtracking after a successful or failed + // match. + // When we backtrack, we pop the CodePosition off the stack, set the current + // instruction pointer to that code position, and mark the opcode + // with a backtracking flag ("Back"). Each opcode then knows how to + // handle its own data. + runtrack []int + runtrackpos int + + // This stack is used to track text positions across different opcodes. + // For example, in /(a*b)+/, the parentheses result in a SetMark/CaptureMark + // pair. SetMark records the text position before we match a*b. Then + // CaptureMark uses that position to figure out where the capture starts. + // Opcodes which push onto this stack are always paired with other opcodes + // which will pop the value from it later. A successful match should mean + // that this stack is empty. + runstack []int + runstackpos int + + // The crawl stack is used to keep track of captures. Every time a group + // has a capture, we push its group number onto the runcrawl stack. In + // the case of a balanced match, we push BOTH groups onto the stack. + runcrawl []int + runcrawlpos int + + runtrackcount int // count of states that may do backtracking + + runmatch *Match // result object + + ignoreTimeout bool + timeout time.Duration // timeout in milliseconds (needed for actual) + deadline fasttime + + operator syntax.InstOp + codepos int + rightToLeft bool + caseInsensitive bool +} + +// run searches for matches and can continue from the previous match +// +// quick is usually false, but can be true to not return matches, just put it in caches +// textstart is -1 to start at the "beginning" (depending on Right-To-Left), otherwise an index in input +// input is the string to search for our regex pattern +func (re *Regexp) run(quick bool, textstart int, input []rune) (*Match, error) { + + // get a cached runner + runner := re.getRunner() + defer re.putRunner(runner) + + if textstart < 0 { + if re.RightToLeft() { + textstart = len(input) + } else { + textstart = 0 + } + } + + return runner.scan(input, textstart, quick, re.MatchTimeout) +} + +// Scans the string to find the first match. Uses the Match object +// both to feed text in and as a place to store matches that come out. +// +// All the action is in the Go() method. Our +// responsibility is to load up the class members before +// calling Go. +// +// The optimizer can compute a set of candidate starting characters, +// and we could use a separate method Skip() that will quickly scan past +// any characters that we know can't match. +func (r *runner) scan(rt []rune, textstart int, quick bool, timeout time.Duration) (*Match, error) { + r.timeout = timeout + r.ignoreTimeout = (time.Duration(math.MaxInt64) == timeout) + r.runtextstart = textstart + r.runtext = rt + r.runtextend = len(rt) + + stoppos := r.runtextend + bump := 1 + + if r.re.RightToLeft() { + bump = -1 + stoppos = 0 + } + + r.runtextpos = textstart + initted := false + + r.startTimeoutWatch() + for { + if r.re.Debug() { + //fmt.Printf("\nSearch content: %v\n", string(r.runtext)) + fmt.Printf("\nSearch range: from 0 to %v\n", r.runtextend) + fmt.Printf("Firstchar search starting at %v stopping at %v\n", r.runtextpos, stoppos) + } + + if r.findFirstChar() { + if err := r.checkTimeout(); err != nil { + return nil, err + } + + if !initted { + r.initMatch() + initted = true + } + + if r.re.Debug() { + fmt.Printf("Executing engine starting at %v\n\n", r.runtextpos) + } + + if err := r.execute(); err != nil { + return nil, err + } + + if r.runmatch.matchcount[0] > 0 { + // We'll return a match even if it touches a previous empty match + return r.tidyMatch(quick), nil + } + + // reset state for another go + r.runtrackpos = len(r.runtrack) + r.runstackpos = len(r.runstack) + r.runcrawlpos = len(r.runcrawl) + } + + // failure! + + if r.runtextpos == stoppos { + r.tidyMatch(true) + return nil, nil + } + + // Recognize leading []* and various anchors, and bump on failure accordingly + + // r.bump by one and start again + + r.runtextpos += bump + } + // We never get here +} + +func (r *runner) execute() error { + + r.goTo(0) + + for { + + if r.re.Debug() { + r.dumpState() + } + + if err := r.checkTimeout(); err != nil { + return err + } + + switch r.operator { + case syntax.Stop: + return nil + + case syntax.Nothing: + break + + case syntax.Goto: + r.goTo(r.operand(0)) + continue + + case syntax.Testref: + if !r.runmatch.isMatched(r.operand(0)) { + break + } + r.advance(1) + continue + + case syntax.Lazybranch: + r.trackPush1(r.textPos()) + r.advance(1) + continue + + case syntax.Lazybranch | syntax.Back: + r.trackPop() + r.textto(r.trackPeek()) + r.goTo(r.operand(0)) + continue + + case syntax.Setmark: + r.stackPush(r.textPos()) + r.trackPush() + r.advance(0) + continue + + case syntax.Nullmark: + r.stackPush(-1) + r.trackPush() + r.advance(0) + continue + + case syntax.Setmark | syntax.Back, syntax.Nullmark | syntax.Back: + r.stackPop() + break + + case syntax.Getmark: + r.stackPop() + r.trackPush1(r.stackPeek()) + r.textto(r.stackPeek()) + r.advance(0) + continue + + case syntax.Getmark | syntax.Back: + r.trackPop() + r.stackPush(r.trackPeek()) + break + + case syntax.Capturemark: + if r.operand(1) != -1 && !r.runmatch.isMatched(r.operand(1)) { + break + } + r.stackPop() + if r.operand(1) != -1 { + r.transferCapture(r.operand(0), r.operand(1), r.stackPeek(), r.textPos()) + } else { + r.capture(r.operand(0), r.stackPeek(), r.textPos()) + } + r.trackPush1(r.stackPeek()) + + r.advance(2) + + continue + + case syntax.Capturemark | syntax.Back: + r.trackPop() + r.stackPush(r.trackPeek()) + r.uncapture() + if r.operand(0) != -1 && r.operand(1) != -1 { + r.uncapture() + } + + break + + case syntax.Branchmark: + r.stackPop() + + matched := r.textPos() - r.stackPeek() + + if matched != 0 { // Nonempty match -> loop now + r.trackPush2(r.stackPeek(), r.textPos()) // Save old mark, textpos + r.stackPush(r.textPos()) // Make new mark + r.goTo(r.operand(0)) // Loop + } else { // Empty match -> straight now + r.trackPushNeg1(r.stackPeek()) // Save old mark + r.advance(1) // Straight + } + continue + + case syntax.Branchmark | syntax.Back: + r.trackPopN(2) + r.stackPop() + r.textto(r.trackPeekN(1)) // Recall position + r.trackPushNeg1(r.trackPeek()) // Save old mark + r.advance(1) // Straight + continue + + case syntax.Branchmark | syntax.Back2: + r.trackPop() + r.stackPush(r.trackPeek()) // Recall old mark + break // Backtrack + + case syntax.Lazybranchmark: + { + // We hit this the first time through a lazy loop and after each + // successful match of the inner expression. It simply continues + // on and doesn't loop. + r.stackPop() + + oldMarkPos := r.stackPeek() + + if r.textPos() != oldMarkPos { // Nonempty match -> try to loop again by going to 'back' state + if oldMarkPos != -1 { + r.trackPush2(oldMarkPos, r.textPos()) // Save old mark, textpos + } else { + r.trackPush2(r.textPos(), r.textPos()) + } + } else { + // The inner expression found an empty match, so we'll go directly to 'back2' if we + // backtrack. In this case, we need to push something on the stack, since back2 pops. + // However, in the case of ()+? or similar, this empty match may be legitimate, so push the text + // position associated with that empty match. + r.stackPush(oldMarkPos) + + r.trackPushNeg1(r.stackPeek()) // Save old mark + } + r.advance(1) + continue + } + + case syntax.Lazybranchmark | syntax.Back: + + // After the first time, Lazybranchmark | syntax.Back occurs + // with each iteration of the loop, and therefore with every attempted + // match of the inner expression. We'll try to match the inner expression, + // then go back to Lazybranchmark if successful. If the inner expression + // fails, we go to Lazybranchmark | syntax.Back2 + + r.trackPopN(2) + pos := r.trackPeekN(1) + r.trackPushNeg1(r.trackPeek()) // Save old mark + r.stackPush(pos) // Make new mark + r.textto(pos) // Recall position + r.goTo(r.operand(0)) // Loop + continue + + case syntax.Lazybranchmark | syntax.Back2: + // The lazy loop has failed. We'll do a true backtrack and + // start over before the lazy loop. + r.stackPop() + r.trackPop() + r.stackPush(r.trackPeek()) // Recall old mark + break + + case syntax.Setcount: + r.stackPush2(r.textPos(), r.operand(0)) + r.trackPush() + r.advance(1) + continue + + case syntax.Nullcount: + r.stackPush2(-1, r.operand(0)) + r.trackPush() + r.advance(1) + continue + + case syntax.Setcount | syntax.Back: + r.stackPopN(2) + break + + case syntax.Nullcount | syntax.Back: + r.stackPopN(2) + break + + case syntax.Branchcount: + // r.stackPush: + // 0: Mark + // 1: Count + + r.stackPopN(2) + mark := r.stackPeek() + count := r.stackPeekN(1) + matched := r.textPos() - mark + + if count >= r.operand(1) || (matched == 0 && count >= 0) { // Max loops or empty match -> straight now + r.trackPushNeg2(mark, count) // Save old mark, count + r.advance(2) // Straight + } else { // Nonempty match -> count+loop now + r.trackPush1(mark) // remember mark + r.stackPush2(r.textPos(), count+1) // Make new mark, incr count + r.goTo(r.operand(0)) // Loop + } + continue + + case syntax.Branchcount | syntax.Back: + // r.trackPush: + // 0: Previous mark + // r.stackPush: + // 0: Mark (= current pos, discarded) + // 1: Count + r.trackPop() + r.stackPopN(2) + if r.stackPeekN(1) > 0 { // Positive -> can go straight + r.textto(r.stackPeek()) // Zap to mark + r.trackPushNeg2(r.trackPeek(), r.stackPeekN(1)-1) // Save old mark, old count + r.advance(2) // Straight + continue + } + r.stackPush2(r.trackPeek(), r.stackPeekN(1)-1) // recall old mark, old count + break + + case syntax.Branchcount | syntax.Back2: + // r.trackPush: + // 0: Previous mark + // 1: Previous count + r.trackPopN(2) + r.stackPush2(r.trackPeek(), r.trackPeekN(1)) // Recall old mark, old count + break // Backtrack + + case syntax.Lazybranchcount: + // r.stackPush: + // 0: Mark + // 1: Count + + r.stackPopN(2) + mark := r.stackPeek() + count := r.stackPeekN(1) + + if count < 0 { // Negative count -> loop now + r.trackPushNeg1(mark) // Save old mark + r.stackPush2(r.textPos(), count+1) // Make new mark, incr count + r.goTo(r.operand(0)) // Loop + } else { // Nonneg count -> straight now + r.trackPush3(mark, count, r.textPos()) // Save mark, count, position + r.advance(2) // Straight + } + continue + + case syntax.Lazybranchcount | syntax.Back: + // r.trackPush: + // 0: Mark + // 1: Count + // 2: r.textPos + + r.trackPopN(3) + mark := r.trackPeek() + textpos := r.trackPeekN(2) + + if r.trackPeekN(1) < r.operand(1) && textpos != mark { // Under limit and not empty match -> loop + r.textto(textpos) // Recall position + r.stackPush2(textpos, r.trackPeekN(1)+1) // Make new mark, incr count + r.trackPushNeg1(mark) // Save old mark + r.goTo(r.operand(0)) // Loop + continue + } else { // Max loops or empty match -> backtrack + r.stackPush2(r.trackPeek(), r.trackPeekN(1)) // Recall old mark, count + break // backtrack + } + + case syntax.Lazybranchcount | syntax.Back2: + // r.trackPush: + // 0: Previous mark + // r.stackPush: + // 0: Mark (== current pos, discarded) + // 1: Count + r.trackPop() + r.stackPopN(2) + r.stackPush2(r.trackPeek(), r.stackPeekN(1)-1) // Recall old mark, count + break // Backtrack + + case syntax.Setjump: + r.stackPush2(r.trackpos(), r.crawlpos()) + r.trackPush() + r.advance(0) + continue + + case syntax.Setjump | syntax.Back: + r.stackPopN(2) + break + + case syntax.Backjump: + // r.stackPush: + // 0: Saved trackpos + // 1: r.crawlpos + r.stackPopN(2) + r.trackto(r.stackPeek()) + + for r.crawlpos() != r.stackPeekN(1) { + r.uncapture() + } + + break + + case syntax.Forejump: + // r.stackPush: + // 0: Saved trackpos + // 1: r.crawlpos + r.stackPopN(2) + r.trackto(r.stackPeek()) + r.trackPush1(r.stackPeekN(1)) + r.advance(0) + continue + + case syntax.Forejump | syntax.Back: + // r.trackPush: + // 0: r.crawlpos + r.trackPop() + + for r.crawlpos() != r.trackPeek() { + r.uncapture() + } + + break + + case syntax.Bol: + if r.leftchars() > 0 && r.charAt(r.textPos()-1) != '\n' { + break + } + r.advance(0) + continue + + case syntax.Eol: + if r.rightchars() > 0 && r.charAt(r.textPos()) != '\n' { + break + } + r.advance(0) + continue + + case syntax.Boundary: + if !r.isBoundary(r.textPos(), 0, r.runtextend) { + break + } + r.advance(0) + continue + + case syntax.Nonboundary: + if r.isBoundary(r.textPos(), 0, r.runtextend) { + break + } + r.advance(0) + continue + + case syntax.ECMABoundary: + if !r.isECMABoundary(r.textPos(), 0, r.runtextend) { + break + } + r.advance(0) + continue + + case syntax.NonECMABoundary: + if r.isECMABoundary(r.textPos(), 0, r.runtextend) { + break + } + r.advance(0) + continue + + case syntax.Beginning: + if r.leftchars() > 0 { + break + } + r.advance(0) + continue + + case syntax.Start: + if r.textPos() != r.textstart() { + break + } + r.advance(0) + continue + + case syntax.EndZ: + rchars := r.rightchars() + if rchars > 1 { + break + } + // RE2 and EcmaScript define $ as "asserts position at the end of the string" + // PCRE/.NET adds "or before the line terminator right at the end of the string (if any)" + if (r.re.options & (RE2 | ECMAScript)) != 0 { + // RE2/Ecmascript mode + if rchars > 0 { + break + } + } else if rchars == 1 && r.charAt(r.textPos()) != '\n' { + // "regular" mode + break + } + + r.advance(0) + continue + + case syntax.End: + if r.rightchars() > 0 { + break + } + r.advance(0) + continue + + case syntax.One: + if r.forwardchars() < 1 || r.forwardcharnext() != rune(r.operand(0)) { + break + } + + r.advance(1) + continue + + case syntax.Notone: + if r.forwardchars() < 1 || r.forwardcharnext() == rune(r.operand(0)) { + break + } + + r.advance(1) + continue + + case syntax.Set: + + if r.forwardchars() < 1 || !r.code.Sets[r.operand(0)].CharIn(r.forwardcharnext()) { + break + } + + r.advance(1) + continue + + case syntax.Multi: + if !r.runematch(r.code.Strings[r.operand(0)]) { + break + } + + r.advance(1) + continue + + case syntax.Ref: + + capnum := r.operand(0) + + if r.runmatch.isMatched(capnum) { + if !r.refmatch(r.runmatch.matchIndex(capnum), r.runmatch.matchLength(capnum)) { + break + } + } else { + if (r.re.options & ECMAScript) == 0 { + break + } + } + + r.advance(1) + continue + + case syntax.Onerep: + + c := r.operand(1) + + if r.forwardchars() < c { + break + } + + ch := rune(r.operand(0)) + + for c > 0 { + if r.forwardcharnext() != ch { + goto BreakBackward + } + c-- + } + + r.advance(2) + continue + + case syntax.Notonerep: + + c := r.operand(1) + + if r.forwardchars() < c { + break + } + ch := rune(r.operand(0)) + + for c > 0 { + if r.forwardcharnext() == ch { + goto BreakBackward + } + c-- + } + + r.advance(2) + continue + + case syntax.Setrep: + + c := r.operand(1) + + if r.forwardchars() < c { + break + } + + set := r.code.Sets[r.operand(0)] + + for c > 0 { + if !set.CharIn(r.forwardcharnext()) { + goto BreakBackward + } + c-- + } + + r.advance(2) + continue + + case syntax.Oneloop: + + c := r.operand(1) + + if c > r.forwardchars() { + c = r.forwardchars() + } + + ch := rune(r.operand(0)) + i := c + + for ; i > 0; i-- { + if r.forwardcharnext() != ch { + r.backwardnext() + break + } + } + + if c > i { + r.trackPush2(c-i-1, r.textPos()-r.bump()) + } + + r.advance(2) + continue + + case syntax.Notoneloop: + + c := r.operand(1) + + if c > r.forwardchars() { + c = r.forwardchars() + } + + ch := rune(r.operand(0)) + i := c + + for ; i > 0; i-- { + if r.forwardcharnext() == ch { + r.backwardnext() + break + } + } + + if c > i { + r.trackPush2(c-i-1, r.textPos()-r.bump()) + } + + r.advance(2) + continue + + case syntax.Setloop: + + c := r.operand(1) + + if c > r.forwardchars() { + c = r.forwardchars() + } + + set := r.code.Sets[r.operand(0)] + i := c + + for ; i > 0; i-- { + if !set.CharIn(r.forwardcharnext()) { + r.backwardnext() + break + } + } + + if c > i { + r.trackPush2(c-i-1, r.textPos()-r.bump()) + } + + r.advance(2) + continue + + case syntax.Oneloop | syntax.Back, syntax.Notoneloop | syntax.Back: + + r.trackPopN(2) + i := r.trackPeek() + pos := r.trackPeekN(1) + + r.textto(pos) + + if i > 0 { + r.trackPush2(i-1, pos-r.bump()) + } + + r.advance(2) + continue + + case syntax.Setloop | syntax.Back: + + r.trackPopN(2) + i := r.trackPeek() + pos := r.trackPeekN(1) + + r.textto(pos) + + if i > 0 { + r.trackPush2(i-1, pos-r.bump()) + } + + r.advance(2) + continue + + case syntax.Onelazy, syntax.Notonelazy: + + c := r.operand(1) + + if c > r.forwardchars() { + c = r.forwardchars() + } + + if c > 0 { + r.trackPush2(c-1, r.textPos()) + } + + r.advance(2) + continue + + case syntax.Setlazy: + + c := r.operand(1) + + if c > r.forwardchars() { + c = r.forwardchars() + } + + if c > 0 { + r.trackPush2(c-1, r.textPos()) + } + + r.advance(2) + continue + + case syntax.Onelazy | syntax.Back: + + r.trackPopN(2) + pos := r.trackPeekN(1) + r.textto(pos) + + if r.forwardcharnext() != rune(r.operand(0)) { + break + } + + i := r.trackPeek() + + if i > 0 { + r.trackPush2(i-1, pos+r.bump()) + } + + r.advance(2) + continue + + case syntax.Notonelazy | syntax.Back: + + r.trackPopN(2) + pos := r.trackPeekN(1) + r.textto(pos) + + if r.forwardcharnext() == rune(r.operand(0)) { + break + } + + i := r.trackPeek() + + if i > 0 { + r.trackPush2(i-1, pos+r.bump()) + } + + r.advance(2) + continue + + case syntax.Setlazy | syntax.Back: + + r.trackPopN(2) + pos := r.trackPeekN(1) + r.textto(pos) + + if !r.code.Sets[r.operand(0)].CharIn(r.forwardcharnext()) { + break + } + + i := r.trackPeek() + + if i > 0 { + r.trackPush2(i-1, pos+r.bump()) + } + + r.advance(2) + continue + + default: + return errors.New("unknown state in regex runner") + } + + BreakBackward: + ; + + // "break Backward" comes here: + r.backtrack() + } +} + +// increase the size of stack and track storage +func (r *runner) ensureStorage() { + if r.runstackpos < r.runtrackcount*4 { + doubleIntSlice(&r.runstack, &r.runstackpos) + } + if r.runtrackpos < r.runtrackcount*4 { + doubleIntSlice(&r.runtrack, &r.runtrackpos) + } +} + +func doubleIntSlice(s *[]int, pos *int) { + oldLen := len(*s) + newS := make([]int, oldLen*2) + + copy(newS[oldLen:], *s) + *pos += oldLen + *s = newS +} + +// Save a number on the longjump unrolling stack +func (r *runner) crawl(i int) { + if r.runcrawlpos == 0 { + doubleIntSlice(&r.runcrawl, &r.runcrawlpos) + } + r.runcrawlpos-- + r.runcrawl[r.runcrawlpos] = i +} + +// Remove a number from the longjump unrolling stack +func (r *runner) popcrawl() int { + val := r.runcrawl[r.runcrawlpos] + r.runcrawlpos++ + return val +} + +// Get the height of the stack +func (r *runner) crawlpos() int { + return len(r.runcrawl) - r.runcrawlpos +} + +func (r *runner) advance(i int) { + r.codepos += (i + 1) + r.setOperator(r.code.Codes[r.codepos]) +} + +func (r *runner) goTo(newpos int) { + // when branching backward or in place, ensure storage + if newpos <= r.codepos { + r.ensureStorage() + } + + r.setOperator(r.code.Codes[newpos]) + r.codepos = newpos +} + +func (r *runner) textto(newpos int) { + r.runtextpos = newpos +} + +func (r *runner) trackto(newpos int) { + r.runtrackpos = len(r.runtrack) - newpos +} + +func (r *runner) textstart() int { + return r.runtextstart +} + +func (r *runner) textPos() int { + return r.runtextpos +} + +// push onto the backtracking stack +func (r *runner) trackpos() int { + return len(r.runtrack) - r.runtrackpos +} + +func (r *runner) trackPush() { + r.runtrackpos-- + r.runtrack[r.runtrackpos] = r.codepos +} + +func (r *runner) trackPush1(I1 int) { + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I1 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = r.codepos +} + +func (r *runner) trackPush2(I1, I2 int) { + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I1 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I2 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = r.codepos +} + +func (r *runner) trackPush3(I1, I2, I3 int) { + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I1 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I2 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I3 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = r.codepos +} + +func (r *runner) trackPushNeg1(I1 int) { + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I1 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = -r.codepos +} + +func (r *runner) trackPushNeg2(I1, I2 int) { + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I1 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I2 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = -r.codepos +} + +func (r *runner) backtrack() { + newpos := r.runtrack[r.runtrackpos] + r.runtrackpos++ + + if r.re.Debug() { + if newpos < 0 { + fmt.Printf(" Backtracking (back2) to code position %v\n", -newpos) + } else { + fmt.Printf(" Backtracking to code position %v\n", newpos) + } + } + + if newpos < 0 { + newpos = -newpos + r.setOperator(r.code.Codes[newpos] | syntax.Back2) + } else { + r.setOperator(r.code.Codes[newpos] | syntax.Back) + } + + // When branching backward, ensure storage + if newpos < r.codepos { + r.ensureStorage() + } + + r.codepos = newpos +} + +func (r *runner) setOperator(op int) { + r.caseInsensitive = (0 != (op & syntax.Ci)) + r.rightToLeft = (0 != (op & syntax.Rtl)) + r.operator = syntax.InstOp(op & ^(syntax.Rtl | syntax.Ci)) +} + +func (r *runner) trackPop() { + r.runtrackpos++ +} + +// pop framesize items from the backtracking stack +func (r *runner) trackPopN(framesize int) { + r.runtrackpos += framesize +} + +// Technically we are actually peeking at items already popped. So if you want to +// get and pop the top item from the stack, you do +// r.trackPop(); +// r.trackPeek(); +func (r *runner) trackPeek() int { + return r.runtrack[r.runtrackpos-1] +} + +// get the ith element down on the backtracking stack +func (r *runner) trackPeekN(i int) int { + return r.runtrack[r.runtrackpos-i-1] +} + +// Push onto the grouping stack +func (r *runner) stackPush(I1 int) { + r.runstackpos-- + r.runstack[r.runstackpos] = I1 +} + +func (r *runner) stackPush2(I1, I2 int) { + r.runstackpos-- + r.runstack[r.runstackpos] = I1 + r.runstackpos-- + r.runstack[r.runstackpos] = I2 +} + +func (r *runner) stackPop() { + r.runstackpos++ +} + +// pop framesize items from the grouping stack +func (r *runner) stackPopN(framesize int) { + r.runstackpos += framesize +} + +// Technically we are actually peeking at items already popped. So if you want to +// get and pop the top item from the stack, you do +// r.stackPop(); +// r.stackPeek(); +func (r *runner) stackPeek() int { + return r.runstack[r.runstackpos-1] +} + +// get the ith element down on the grouping stack +func (r *runner) stackPeekN(i int) int { + return r.runstack[r.runstackpos-i-1] +} + +func (r *runner) operand(i int) int { + return r.code.Codes[r.codepos+i+1] +} + +func (r *runner) leftchars() int { + return r.runtextpos +} + +func (r *runner) rightchars() int { + return r.runtextend - r.runtextpos +} + +func (r *runner) bump() int { + if r.rightToLeft { + return -1 + } + return 1 +} + +func (r *runner) forwardchars() int { + if r.rightToLeft { + return r.runtextpos + } + return r.runtextend - r.runtextpos +} + +func (r *runner) forwardcharnext() rune { + var ch rune + if r.rightToLeft { + r.runtextpos-- + ch = r.runtext[r.runtextpos] + } else { + ch = r.runtext[r.runtextpos] + r.runtextpos++ + } + + if r.caseInsensitive { + return unicode.ToLower(ch) + } + return ch +} + +func (r *runner) runematch(str []rune) bool { + var pos int + + c := len(str) + if !r.rightToLeft { + if r.runtextend-r.runtextpos < c { + return false + } + + pos = r.runtextpos + c + } else { + if r.runtextpos-0 < c { + return false + } + + pos = r.runtextpos + } + + if !r.caseInsensitive { + for c != 0 { + c-- + pos-- + if str[c] != r.runtext[pos] { + return false + } + } + } else { + for c != 0 { + c-- + pos-- + if str[c] != unicode.ToLower(r.runtext[pos]) { + return false + } + } + } + + if !r.rightToLeft { + pos += len(str) + } + + r.runtextpos = pos + + return true +} + +func (r *runner) refmatch(index, len int) bool { + var c, pos, cmpos int + + if !r.rightToLeft { + if r.runtextend-r.runtextpos < len { + return false + } + + pos = r.runtextpos + len + } else { + if r.runtextpos-0 < len { + return false + } + + pos = r.runtextpos + } + cmpos = index + len + + c = len + + if !r.caseInsensitive { + for c != 0 { + c-- + cmpos-- + pos-- + if r.runtext[cmpos] != r.runtext[pos] { + return false + } + + } + } else { + for c != 0 { + c-- + cmpos-- + pos-- + + if unicode.ToLower(r.runtext[cmpos]) != unicode.ToLower(r.runtext[pos]) { + return false + } + } + } + + if !r.rightToLeft { + pos += len + } + + r.runtextpos = pos + + return true +} + +func (r *runner) backwardnext() { + if r.rightToLeft { + r.runtextpos++ + } else { + r.runtextpos-- + } +} + +func (r *runner) charAt(j int) rune { + return r.runtext[j] +} + +func (r *runner) findFirstChar() bool { + + if 0 != (r.code.Anchors & (syntax.AnchorBeginning | syntax.AnchorStart | syntax.AnchorEndZ | syntax.AnchorEnd)) { + if !r.code.RightToLeft { + if (0 != (r.code.Anchors&syntax.AnchorBeginning) && r.runtextpos > 0) || + (0 != (r.code.Anchors&syntax.AnchorStart) && r.runtextpos > r.runtextstart) { + r.runtextpos = r.runtextend + return false + } + if 0 != (r.code.Anchors&syntax.AnchorEndZ) && r.runtextpos < r.runtextend-1 { + r.runtextpos = r.runtextend - 1 + } else if 0 != (r.code.Anchors&syntax.AnchorEnd) && r.runtextpos < r.runtextend { + r.runtextpos = r.runtextend + } + } else { + if (0 != (r.code.Anchors&syntax.AnchorEnd) && r.runtextpos < r.runtextend) || + (0 != (r.code.Anchors&syntax.AnchorEndZ) && (r.runtextpos < r.runtextend-1 || + (r.runtextpos == r.runtextend-1 && r.charAt(r.runtextpos) != '\n'))) || + (0 != (r.code.Anchors&syntax.AnchorStart) && r.runtextpos < r.runtextstart) { + r.runtextpos = 0 + return false + } + if 0 != (r.code.Anchors&syntax.AnchorBeginning) && r.runtextpos > 0 { + r.runtextpos = 0 + } + } + + if r.code.BmPrefix != nil { + return r.code.BmPrefix.IsMatch(r.runtext, r.runtextpos, 0, r.runtextend) + } + + return true // found a valid start or end anchor + } else if r.code.BmPrefix != nil { + r.runtextpos = r.code.BmPrefix.Scan(r.runtext, r.runtextpos, 0, r.runtextend) + + if r.runtextpos == -1 { + if r.code.RightToLeft { + r.runtextpos = 0 + } else { + r.runtextpos = r.runtextend + } + return false + } + + return true + } else if r.code.FcPrefix == nil { + return true + } + + r.rightToLeft = r.code.RightToLeft + r.caseInsensitive = r.code.FcPrefix.CaseInsensitive + + set := r.code.FcPrefix.PrefixSet + if set.IsSingleton() { + ch := set.SingletonChar() + for i := r.forwardchars(); i > 0; i-- { + if ch == r.forwardcharnext() { + r.backwardnext() + return true + } + } + } else { + for i := r.forwardchars(); i > 0; i-- { + n := r.forwardcharnext() + //fmt.Printf("%v in %v: %v\n", string(n), set.String(), set.CharIn(n)) + if set.CharIn(n) { + r.backwardnext() + return true + } + } + } + + return false +} + +func (r *runner) initMatch() { + // Use a hashtable'ed Match object if the capture numbers are sparse + + if r.runmatch == nil { + if r.re.caps != nil { + r.runmatch = newMatchSparse(r.re, r.re.caps, r.re.capsize, r.runtext, r.runtextstart) + } else { + r.runmatch = newMatch(r.re, r.re.capsize, r.runtext, r.runtextstart) + } + } else { + r.runmatch.reset(r.runtext, r.runtextstart) + } + + // note we test runcrawl, because it is the last one to be allocated + // If there is an alloc failure in the middle of the three allocations, + // we may still return to reuse this instance, and we want to behave + // as if the allocations didn't occur. (we used to test _trackcount != 0) + + if r.runcrawl != nil { + r.runtrackpos = len(r.runtrack) + r.runstackpos = len(r.runstack) + r.runcrawlpos = len(r.runcrawl) + return + } + + r.initTrackCount() + + tracksize := r.runtrackcount * 8 + stacksize := r.runtrackcount * 8 + + if tracksize < 32 { + tracksize = 32 + } + if stacksize < 16 { + stacksize = 16 + } + + r.runtrack = make([]int, tracksize) + r.runtrackpos = tracksize + + r.runstack = make([]int, stacksize) + r.runstackpos = stacksize + + r.runcrawl = make([]int, 32) + r.runcrawlpos = 32 +} + +func (r *runner) tidyMatch(quick bool) *Match { + if !quick { + match := r.runmatch + + r.runmatch = nil + + match.tidy(r.runtextpos) + return match + } else { + // send back our match -- it's not leaving the package, so it's safe to not clean it up + // this reduces allocs for frequent calls to the "IsMatch" bool-only functions + return r.runmatch + } +} + +// capture captures a subexpression. Note that the +// capnum used here has already been mapped to a non-sparse +// index (by the code generator RegexWriter). +func (r *runner) capture(capnum, start, end int) { + if end < start { + T := end + end = start + start = T + } + + r.crawl(capnum) + r.runmatch.addMatch(capnum, start, end-start) +} + +// transferCapture captures a subexpression. Note that the +// capnum used here has already been mapped to a non-sparse +// index (by the code generator RegexWriter). +func (r *runner) transferCapture(capnum, uncapnum, start, end int) { + var start2, end2 int + + // these are the two intervals that are cancelling each other + + if end < start { + T := end + end = start + start = T + } + + start2 = r.runmatch.matchIndex(uncapnum) + end2 = start2 + r.runmatch.matchLength(uncapnum) + + // The new capture gets the innermost defined interval + + if start >= end2 { + end = start + start = end2 + } else if end <= start2 { + start = start2 + } else { + if end > end2 { + end = end2 + } + if start2 > start { + start = start2 + } + } + + r.crawl(uncapnum) + r.runmatch.balanceMatch(uncapnum) + + if capnum != -1 { + r.crawl(capnum) + r.runmatch.addMatch(capnum, start, end-start) + } +} + +// revert the last capture +func (r *runner) uncapture() { + capnum := r.popcrawl() + r.runmatch.removeMatch(capnum) +} + +//debug + +func (r *runner) dumpState() { + back := "" + if r.operator&syntax.Back != 0 { + back = " Back" + } + if r.operator&syntax.Back2 != 0 { + back += " Back2" + } + fmt.Printf("Text: %v\nTrack: %v\nStack: %v\n %s%s\n\n", + r.textposDescription(), + r.stackDescription(r.runtrack, r.runtrackpos), + r.stackDescription(r.runstack, r.runstackpos), + r.code.OpcodeDescription(r.codepos), + back) +} + +func (r *runner) stackDescription(a []int, index int) string { + buf := &bytes.Buffer{} + + fmt.Fprintf(buf, "%v/%v", len(a)-index, len(a)) + if buf.Len() < 8 { + buf.WriteString(strings.Repeat(" ", 8-buf.Len())) + } + + buf.WriteRune('(') + for i := index; i < len(a); i++ { + if i > index { + buf.WriteRune(' ') + } + + buf.WriteString(strconv.Itoa(a[i])) + } + + buf.WriteRune(')') + + return buf.String() +} + +func (r *runner) textposDescription() string { + buf := &bytes.Buffer{} + + buf.WriteString(strconv.Itoa(r.runtextpos)) + + if buf.Len() < 8 { + buf.WriteString(strings.Repeat(" ", 8-buf.Len())) + } + + if r.runtextpos > 0 { + buf.WriteString(syntax.CharDescription(r.runtext[r.runtextpos-1])) + } else { + buf.WriteRune('^') + } + + buf.WriteRune('>') + + for i := r.runtextpos; i < r.runtextend; i++ { + buf.WriteString(syntax.CharDescription(r.runtext[i])) + } + if buf.Len() >= 64 { + buf.Truncate(61) + buf.WriteString("...") + } else { + buf.WriteRune('$') + } + + return buf.String() +} + +// decide whether the pos +// at the specified index is a boundary or not. It's just not worth +// emitting inline code for this logic. +func (r *runner) isBoundary(index, startpos, endpos int) bool { + return (index > startpos && syntax.IsWordChar(r.runtext[index-1])) != + (index < endpos && syntax.IsWordChar(r.runtext[index])) +} + +func (r *runner) isECMABoundary(index, startpos, endpos int) bool { + return (index > startpos && syntax.IsECMAWordChar(r.runtext[index-1])) != + (index < endpos && syntax.IsECMAWordChar(r.runtext[index])) +} + +func (r *runner) startTimeoutWatch() { + if r.ignoreTimeout { + return + } + r.deadline = makeDeadline(r.timeout) +} + +func (r *runner) checkTimeout() error { + if r.ignoreTimeout || !r.deadline.reached() { + return nil + } + + if r.re.Debug() { + //Debug.WriteLine("") + //Debug.WriteLine("RegEx match timeout occurred!") + //Debug.WriteLine("Specified timeout: " + TimeSpan.FromMilliseconds(_timeout).ToString()) + //Debug.WriteLine("Timeout check frequency: " + TimeoutCheckFrequency) + //Debug.WriteLine("Search pattern: " + _runregex._pattern) + //Debug.WriteLine("Input: " + r.runtext) + //Debug.WriteLine("About to throw RegexMatchTimeoutException.") + } + + return fmt.Errorf("match timeout after %v on input `%v`", r.timeout, string(r.runtext)) +} + +func (r *runner) initTrackCount() { + r.runtrackcount = r.code.TrackCount +} + +// getRunner returns a run to use for matching re. +// It uses the re's runner cache if possible, to avoid +// unnecessary allocation. +func (re *Regexp) getRunner() *runner { + re.muRun.Lock() + if n := len(re.runner); n > 0 { + z := re.runner[n-1] + re.runner = re.runner[:n-1] + re.muRun.Unlock() + return z + } + re.muRun.Unlock() + z := &runner{ + re: re, + code: re.code, + } + return z +} + +// putRunner returns a runner to the re's cache. +// There is no attempt to limit the size of the cache, so it will +// grow to the maximum number of simultaneous matches +// run using re. (The cache empties when re gets garbage collected.) +func (re *Regexp) putRunner(r *runner) { + re.muRun.Lock() + re.runner = append(re.runner, r) + re.muRun.Unlock() +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/charclass.go b/vendor/github.com/dlclark/regexp2/syntax/charclass.go new file mode 100644 index 0000000..6881a0e --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/charclass.go @@ -0,0 +1,865 @@ +package syntax + +import ( + "bytes" + "encoding/binary" + "fmt" + "sort" + "unicode" + "unicode/utf8" +) + +// CharSet combines start-end rune ranges and unicode categories representing a set of characters +type CharSet struct { + ranges []singleRange + categories []category + sub *CharSet //optional subtractor + negate bool + anything bool +} + +type category struct { + negate bool + cat string +} + +type singleRange struct { + first rune + last rune +} + +const ( + spaceCategoryText = " " + wordCategoryText = "W" +) + +var ( + ecmaSpace = []rune{0x0009, 0x000e, 0x0020, 0x0021, 0x00a0, 0x00a1, 0x1680, 0x1681, 0x2000, 0x200b, 0x2028, 0x202a, 0x202f, 0x2030, 0x205f, 0x2060, 0x3000, 0x3001, 0xfeff, 0xff00} + ecmaWord = []rune{0x0030, 0x003a, 0x0041, 0x005b, 0x005f, 0x0060, 0x0061, 0x007b} + ecmaDigit = []rune{0x0030, 0x003a} + + re2Space = []rune{0x0009, 0x000b, 0x000c, 0x000e, 0x0020, 0x0021} +) + +var ( + AnyClass = getCharSetFromOldString([]rune{0}, false) + ECMAAnyClass = getCharSetFromOldString([]rune{0, 0x000a, 0x000b, 0x000d, 0x000e}, false) + NoneClass = getCharSetFromOldString(nil, false) + ECMAWordClass = getCharSetFromOldString(ecmaWord, false) + NotECMAWordClass = getCharSetFromOldString(ecmaWord, true) + ECMASpaceClass = getCharSetFromOldString(ecmaSpace, false) + NotECMASpaceClass = getCharSetFromOldString(ecmaSpace, true) + ECMADigitClass = getCharSetFromOldString(ecmaDigit, false) + NotECMADigitClass = getCharSetFromOldString(ecmaDigit, true) + + WordClass = getCharSetFromCategoryString(false, false, wordCategoryText) + NotWordClass = getCharSetFromCategoryString(true, false, wordCategoryText) + SpaceClass = getCharSetFromCategoryString(false, false, spaceCategoryText) + NotSpaceClass = getCharSetFromCategoryString(true, false, spaceCategoryText) + DigitClass = getCharSetFromCategoryString(false, false, "Nd") + NotDigitClass = getCharSetFromCategoryString(false, true, "Nd") + + RE2SpaceClass = getCharSetFromOldString(re2Space, false) + NotRE2SpaceClass = getCharSetFromOldString(re2Space, true) +) + +var unicodeCategories = func() map[string]*unicode.RangeTable { + retVal := make(map[string]*unicode.RangeTable) + for k, v := range unicode.Scripts { + retVal[k] = v + } + for k, v := range unicode.Categories { + retVal[k] = v + } + for k, v := range unicode.Properties { + retVal[k] = v + } + return retVal +}() + +func getCharSetFromCategoryString(negateSet bool, negateCat bool, cats ...string) func() *CharSet { + if negateCat && negateSet { + panic("BUG! You should only negate the set OR the category in a constant setup, but not both") + } + + c := CharSet{negate: negateSet} + + c.categories = make([]category, len(cats)) + for i, cat := range cats { + c.categories[i] = category{cat: cat, negate: negateCat} + } + return func() *CharSet { + //make a copy each time + local := c + //return that address + return &local + } +} + +func getCharSetFromOldString(setText []rune, negate bool) func() *CharSet { + c := CharSet{} + if len(setText) > 0 { + fillFirst := false + l := len(setText) + if negate { + if setText[0] == 0 { + setText = setText[1:] + } else { + l++ + fillFirst = true + } + } + + if l%2 == 0 { + c.ranges = make([]singleRange, l/2) + } else { + c.ranges = make([]singleRange, l/2+1) + } + + first := true + if fillFirst { + c.ranges[0] = singleRange{first: 0} + first = false + } + + i := 0 + for _, r := range setText { + if first { + // lower bound in a new range + c.ranges[i] = singleRange{first: r} + first = false + } else { + c.ranges[i].last = r - 1 + i++ + first = true + } + } + if !first { + c.ranges[i].last = utf8.MaxRune + } + } + + return func() *CharSet { + local := c + return &local + } +} + +// Copy makes a deep copy to prevent accidental mutation of a set +func (c CharSet) Copy() CharSet { + ret := CharSet{ + anything: c.anything, + negate: c.negate, + } + + ret.ranges = append(ret.ranges, c.ranges...) + ret.categories = append(ret.categories, c.categories...) + + if c.sub != nil { + sub := c.sub.Copy() + ret.sub = &sub + } + + return ret +} + +// gets a human-readable description for a set string +func (c CharSet) String() string { + buf := &bytes.Buffer{} + buf.WriteRune('[') + + if c.IsNegated() { + buf.WriteRune('^') + } + + for _, r := range c.ranges { + + buf.WriteString(CharDescription(r.first)) + if r.first != r.last { + if r.last-r.first != 1 { + //groups that are 1 char apart skip the dash + buf.WriteRune('-') + } + buf.WriteString(CharDescription(r.last)) + } + } + + for _, c := range c.categories { + buf.WriteString(c.String()) + } + + if c.sub != nil { + buf.WriteRune('-') + buf.WriteString(c.sub.String()) + } + + buf.WriteRune(']') + + return buf.String() +} + +// mapHashFill converts a charset into a buffer for use in maps +func (c CharSet) mapHashFill(buf *bytes.Buffer) { + if c.negate { + buf.WriteByte(0) + } else { + buf.WriteByte(1) + } + + binary.Write(buf, binary.LittleEndian, len(c.ranges)) + binary.Write(buf, binary.LittleEndian, len(c.categories)) + for _, r := range c.ranges { + buf.WriteRune(r.first) + buf.WriteRune(r.last) + } + for _, ct := range c.categories { + buf.WriteString(ct.cat) + if ct.negate { + buf.WriteByte(1) + } else { + buf.WriteByte(0) + } + } + + if c.sub != nil { + c.sub.mapHashFill(buf) + } +} + +// CharIn returns true if the rune is in our character set (either ranges or categories). +// It handles negations and subtracted sub-charsets. +func (c CharSet) CharIn(ch rune) bool { + val := false + // in s && !s.subtracted + + //check ranges + for _, r := range c.ranges { + if ch < r.first { + continue + } + if ch <= r.last { + val = true + break + } + } + + //check categories if we haven't already found a range + if !val && len(c.categories) > 0 { + for _, ct := range c.categories { + // special categories...then unicode + if ct.cat == spaceCategoryText { + if unicode.IsSpace(ch) { + // we found a space so we're done + // negate means this is a "bad" thing + val = !ct.negate + break + } else if ct.negate { + val = true + break + } + } else if ct.cat == wordCategoryText { + if IsWordChar(ch) { + val = !ct.negate + break + } else if ct.negate { + val = true + break + } + } else if unicode.Is(unicodeCategories[ct.cat], ch) { + // if we're in this unicode category then we're done + // if negate=true on this category then we "failed" our test + // otherwise we're good that we found it + val = !ct.negate + break + } else if ct.negate { + val = true + break + } + } + } + + // negate the whole char set + if c.negate { + val = !val + } + + // get subtracted recurse + if val && c.sub != nil { + val = !c.sub.CharIn(ch) + } + + //log.Printf("Char '%v' in %v == %v", string(ch), c.String(), val) + return val +} + +func (c category) String() string { + switch c.cat { + case spaceCategoryText: + if c.negate { + return "\\S" + } + return "\\s" + case wordCategoryText: + if c.negate { + return "\\W" + } + return "\\w" + } + if _, ok := unicodeCategories[c.cat]; ok { + + if c.negate { + return "\\P{" + c.cat + "}" + } + return "\\p{" + c.cat + "}" + } + return "Unknown category: " + c.cat +} + +// CharDescription Produces a human-readable description for a single character. +func CharDescription(ch rune) string { + /*if ch == '\\' { + return "\\\\" + } + + if ch > ' ' && ch <= '~' { + return string(ch) + } else if ch == '\n' { + return "\\n" + } else if ch == ' ' { + return "\\ " + }*/ + + b := &bytes.Buffer{} + escape(b, ch, false) //fmt.Sprintf("%U", ch) + return b.String() +} + +// According to UTS#18 Unicode Regular Expressions (http://www.unicode.org/reports/tr18/) +// RL 1.4 Simple Word Boundaries The class of includes all Alphabetic +// values from the Unicode character database, from UnicodeData.txt [UData], plus the U+200C +// ZERO WIDTH NON-JOINER and U+200D ZERO WIDTH JOINER. +func IsWordChar(r rune) bool { + //"L", "Mn", "Nd", "Pc" + return unicode.In(r, + unicode.Categories["L"], unicode.Categories["Mn"], + unicode.Categories["Nd"], unicode.Categories["Pc"]) || r == '\u200D' || r == '\u200C' + //return 'A' <= r && r <= 'Z' || 'a' <= r && r <= 'z' || '0' <= r && r <= '9' || r == '_' +} + +func IsECMAWordChar(r rune) bool { + return unicode.In(r, + unicode.Categories["L"], unicode.Categories["Mn"], + unicode.Categories["Nd"], unicode.Categories["Pc"]) + + //return 'A' <= r && r <= 'Z' || 'a' <= r && r <= 'z' || '0' <= r && r <= '9' || r == '_' +} + +// SingletonChar will return the char from the first range without validation. +// It assumes you have checked for IsSingleton or IsSingletonInverse and will panic given bad input +func (c CharSet) SingletonChar() rune { + return c.ranges[0].first +} + +func (c CharSet) IsSingleton() bool { + return !c.negate && //negated is multiple chars + len(c.categories) == 0 && len(c.ranges) == 1 && // multiple ranges and unicode classes represent multiple chars + c.sub == nil && // subtraction means we've got multiple chars + c.ranges[0].first == c.ranges[0].last // first and last equal means we're just 1 char +} + +func (c CharSet) IsSingletonInverse() bool { + return c.negate && //same as above, but requires negated + len(c.categories) == 0 && len(c.ranges) == 1 && // multiple ranges and unicode classes represent multiple chars + c.sub == nil && // subtraction means we've got multiple chars + c.ranges[0].first == c.ranges[0].last // first and last equal means we're just 1 char +} + +func (c CharSet) IsMergeable() bool { + return !c.IsNegated() && !c.HasSubtraction() +} + +func (c CharSet) IsNegated() bool { + return c.negate +} + +func (c CharSet) HasSubtraction() bool { + return c.sub != nil +} + +func (c CharSet) IsEmpty() bool { + return len(c.ranges) == 0 && len(c.categories) == 0 && c.sub == nil +} + +func (c *CharSet) addDigit(ecma, negate bool, pattern string) { + if ecma { + if negate { + c.addRanges(NotECMADigitClass().ranges) + } else { + c.addRanges(ECMADigitClass().ranges) + } + } else { + c.addCategories(category{cat: "Nd", negate: negate}) + } +} + +func (c *CharSet) addChar(ch rune) { + c.addRange(ch, ch) +} + +func (c *CharSet) addSpace(ecma, re2, negate bool) { + if ecma { + if negate { + c.addRanges(NotECMASpaceClass().ranges) + } else { + c.addRanges(ECMASpaceClass().ranges) + } + } else if re2 { + if negate { + c.addRanges(NotRE2SpaceClass().ranges) + } else { + c.addRanges(RE2SpaceClass().ranges) + } + } else { + c.addCategories(category{cat: spaceCategoryText, negate: negate}) + } +} + +func (c *CharSet) addWord(ecma, negate bool) { + if ecma { + if negate { + c.addRanges(NotECMAWordClass().ranges) + } else { + c.addRanges(ECMAWordClass().ranges) + } + } else { + c.addCategories(category{cat: wordCategoryText, negate: negate}) + } +} + +// Add set ranges and categories into ours -- no deduping or anything +func (c *CharSet) addSet(set CharSet) { + if c.anything { + return + } + if set.anything { + c.makeAnything() + return + } + // just append here to prevent double-canon + c.ranges = append(c.ranges, set.ranges...) + c.addCategories(set.categories...) + c.canonicalize() +} + +func (c *CharSet) makeAnything() { + c.anything = true + c.categories = []category{} + c.ranges = AnyClass().ranges +} + +func (c *CharSet) addCategories(cats ...category) { + // don't add dupes and remove positive+negative + if c.anything { + // if we've had a previous positive+negative group then + // just return, we're as broad as we can get + return + } + + for _, ct := range cats { + found := false + for _, ct2 := range c.categories { + if ct.cat == ct2.cat { + if ct.negate != ct2.negate { + // oposite negations...this mean we just + // take us as anything and move on + c.makeAnything() + return + } + found = true + break + } + } + + if !found { + c.categories = append(c.categories, ct) + } + } +} + +// Merges new ranges to our own +func (c *CharSet) addRanges(ranges []singleRange) { + if c.anything { + return + } + c.ranges = append(c.ranges, ranges...) + c.canonicalize() +} + +// Merges everything but the new ranges into our own +func (c *CharSet) addNegativeRanges(ranges []singleRange) { + if c.anything { + return + } + + var hi rune + + // convert incoming ranges into opposites, assume they are in order + for _, r := range ranges { + if hi < r.first { + c.ranges = append(c.ranges, singleRange{hi, r.first - 1}) + } + hi = r.last + 1 + } + + if hi < utf8.MaxRune { + c.ranges = append(c.ranges, singleRange{hi, utf8.MaxRune}) + } + + c.canonicalize() +} + +func isValidUnicodeCat(catName string) bool { + _, ok := unicodeCategories[catName] + return ok +} + +func (c *CharSet) addCategory(categoryName string, negate, caseInsensitive bool, pattern string) { + if !isValidUnicodeCat(categoryName) { + // unknown unicode category, script, or property "blah" + panic(fmt.Errorf("Unknown unicode category, script, or property '%v'", categoryName)) + + } + + if caseInsensitive && (categoryName == "Ll" || categoryName == "Lu" || categoryName == "Lt") { + // when RegexOptions.IgnoreCase is specified then {Ll} {Lu} and {Lt} cases should all match + c.addCategories( + category{cat: "Ll", negate: negate}, + category{cat: "Lu", negate: negate}, + category{cat: "Lt", negate: negate}) + } + c.addCategories(category{cat: categoryName, negate: negate}) +} + +func (c *CharSet) addSubtraction(sub *CharSet) { + c.sub = sub +} + +func (c *CharSet) addRange(chMin, chMax rune) { + c.ranges = append(c.ranges, singleRange{first: chMin, last: chMax}) + c.canonicalize() +} + +func (c *CharSet) addNamedASCII(name string, negate bool) bool { + var rs []singleRange + + switch name { + case "alnum": + rs = []singleRange{singleRange{'0', '9'}, singleRange{'A', 'Z'}, singleRange{'a', 'z'}} + case "alpha": + rs = []singleRange{singleRange{'A', 'Z'}, singleRange{'a', 'z'}} + case "ascii": + rs = []singleRange{singleRange{0, 0x7f}} + case "blank": + rs = []singleRange{singleRange{'\t', '\t'}, singleRange{' ', ' '}} + case "cntrl": + rs = []singleRange{singleRange{0, 0x1f}, singleRange{0x7f, 0x7f}} + case "digit": + c.addDigit(false, negate, "") + case "graph": + rs = []singleRange{singleRange{'!', '~'}} + case "lower": + rs = []singleRange{singleRange{'a', 'z'}} + case "print": + rs = []singleRange{singleRange{' ', '~'}} + case "punct": //[!-/:-@[-`{-~] + rs = []singleRange{singleRange{'!', '/'}, singleRange{':', '@'}, singleRange{'[', '`'}, singleRange{'{', '~'}} + case "space": + c.addSpace(true, false, negate) + case "upper": + rs = []singleRange{singleRange{'A', 'Z'}} + case "word": + c.addWord(true, negate) + case "xdigit": + rs = []singleRange{singleRange{'0', '9'}, singleRange{'A', 'F'}, singleRange{'a', 'f'}} + default: + return false + } + + if len(rs) > 0 { + if negate { + c.addNegativeRanges(rs) + } else { + c.addRanges(rs) + } + } + + return true +} + +type singleRangeSorter []singleRange + +func (p singleRangeSorter) Len() int { return len(p) } +func (p singleRangeSorter) Less(i, j int) bool { return p[i].first < p[j].first } +func (p singleRangeSorter) Swap(i, j int) { p[i], p[j] = p[j], p[i] } + +// Logic to reduce a character class to a unique, sorted form. +func (c *CharSet) canonicalize() { + var i, j int + var last rune + + // + // Find and eliminate overlapping or abutting ranges + // + + if len(c.ranges) > 1 { + sort.Sort(singleRangeSorter(c.ranges)) + + done := false + + for i, j = 1, 0; ; i++ { + for last = c.ranges[j].last; ; i++ { + if i == len(c.ranges) || last == utf8.MaxRune { + done = true + break + } + + CurrentRange := c.ranges[i] + if CurrentRange.first > last+1 { + break + } + + if last < CurrentRange.last { + last = CurrentRange.last + } + } + + c.ranges[j] = singleRange{first: c.ranges[j].first, last: last} + + j++ + + if done { + break + } + + if j < i { + c.ranges[j] = c.ranges[i] + } + } + + c.ranges = append(c.ranges[:j], c.ranges[len(c.ranges):]...) + } +} + +// Adds to the class any lowercase versions of characters already +// in the class. Used for case-insensitivity. +func (c *CharSet) addLowercase() { + if c.anything { + return + } + toAdd := []singleRange{} + for i := 0; i < len(c.ranges); i++ { + r := c.ranges[i] + if r.first == r.last { + lower := unicode.ToLower(r.first) + c.ranges[i] = singleRange{first: lower, last: lower} + } else { + toAdd = append(toAdd, r) + } + } + + for _, r := range toAdd { + c.addLowercaseRange(r.first, r.last) + } + c.canonicalize() +} + +/************************************************************************** + Let U be the set of Unicode character values and let L be the lowercase + function, mapping from U to U. To perform case insensitive matching of + character sets, we need to be able to map an interval I in U, say + + I = [chMin, chMax] = { ch : chMin <= ch <= chMax } + + to a set A such that A contains L(I) and A is contained in the union of + I and L(I). + + The table below partitions U into intervals on which L is non-decreasing. + Thus, for any interval J = [a, b] contained in one of these intervals, + L(J) is contained in [L(a), L(b)]. + + It is also true that for any such J, [L(a), L(b)] is contained in the + union of J and L(J). This does not follow from L being non-decreasing on + these intervals. It follows from the nature of the L on each interval. + On each interval, L has one of the following forms: + + (1) L(ch) = constant (LowercaseSet) + (2) L(ch) = ch + offset (LowercaseAdd) + (3) L(ch) = ch | 1 (LowercaseBor) + (4) L(ch) = ch + (ch & 1) (LowercaseBad) + + It is easy to verify that for any of these forms [L(a), L(b)] is + contained in the union of [a, b] and L([a, b]). +***************************************************************************/ + +const ( + LowercaseSet = 0 // Set to arg. + LowercaseAdd = 1 // Add arg. + LowercaseBor = 2 // Bitwise or with 1. + LowercaseBad = 3 // Bitwise and with 1 and add original. +) + +type lcMap struct { + chMin, chMax rune + op, data int32 +} + +var lcTable = []lcMap{ + lcMap{'\u0041', '\u005A', LowercaseAdd, 32}, + lcMap{'\u00C0', '\u00DE', LowercaseAdd, 32}, + lcMap{'\u0100', '\u012E', LowercaseBor, 0}, + lcMap{'\u0130', '\u0130', LowercaseSet, 0x0069}, + lcMap{'\u0132', '\u0136', LowercaseBor, 0}, + lcMap{'\u0139', '\u0147', LowercaseBad, 0}, + lcMap{'\u014A', '\u0176', LowercaseBor, 0}, + lcMap{'\u0178', '\u0178', LowercaseSet, 0x00FF}, + lcMap{'\u0179', '\u017D', LowercaseBad, 0}, + lcMap{'\u0181', '\u0181', LowercaseSet, 0x0253}, + lcMap{'\u0182', '\u0184', LowercaseBor, 0}, + lcMap{'\u0186', '\u0186', LowercaseSet, 0x0254}, + lcMap{'\u0187', '\u0187', LowercaseSet, 0x0188}, + lcMap{'\u0189', '\u018A', LowercaseAdd, 205}, + lcMap{'\u018B', '\u018B', LowercaseSet, 0x018C}, + lcMap{'\u018E', '\u018E', LowercaseSet, 0x01DD}, + lcMap{'\u018F', '\u018F', LowercaseSet, 0x0259}, + lcMap{'\u0190', '\u0190', LowercaseSet, 0x025B}, + lcMap{'\u0191', '\u0191', LowercaseSet, 0x0192}, + lcMap{'\u0193', '\u0193', LowercaseSet, 0x0260}, + lcMap{'\u0194', '\u0194', LowercaseSet, 0x0263}, + lcMap{'\u0196', '\u0196', LowercaseSet, 0x0269}, + lcMap{'\u0197', '\u0197', LowercaseSet, 0x0268}, + lcMap{'\u0198', '\u0198', LowercaseSet, 0x0199}, + lcMap{'\u019C', '\u019C', LowercaseSet, 0x026F}, + lcMap{'\u019D', '\u019D', LowercaseSet, 0x0272}, + lcMap{'\u019F', '\u019F', LowercaseSet, 0x0275}, + lcMap{'\u01A0', '\u01A4', LowercaseBor, 0}, + lcMap{'\u01A7', '\u01A7', LowercaseSet, 0x01A8}, + lcMap{'\u01A9', '\u01A9', LowercaseSet, 0x0283}, + lcMap{'\u01AC', '\u01AC', LowercaseSet, 0x01AD}, + lcMap{'\u01AE', '\u01AE', LowercaseSet, 0x0288}, + lcMap{'\u01AF', '\u01AF', LowercaseSet, 0x01B0}, + lcMap{'\u01B1', '\u01B2', LowercaseAdd, 217}, + lcMap{'\u01B3', '\u01B5', LowercaseBad, 0}, + lcMap{'\u01B7', '\u01B7', LowercaseSet, 0x0292}, + lcMap{'\u01B8', '\u01B8', LowercaseSet, 0x01B9}, + lcMap{'\u01BC', '\u01BC', LowercaseSet, 0x01BD}, + lcMap{'\u01C4', '\u01C5', LowercaseSet, 0x01C6}, + lcMap{'\u01C7', '\u01C8', LowercaseSet, 0x01C9}, + lcMap{'\u01CA', '\u01CB', LowercaseSet, 0x01CC}, + lcMap{'\u01CD', '\u01DB', LowercaseBad, 0}, + lcMap{'\u01DE', '\u01EE', LowercaseBor, 0}, + lcMap{'\u01F1', '\u01F2', LowercaseSet, 0x01F3}, + lcMap{'\u01F4', '\u01F4', LowercaseSet, 0x01F5}, + lcMap{'\u01FA', '\u0216', LowercaseBor, 0}, + lcMap{'\u0386', '\u0386', LowercaseSet, 0x03AC}, + lcMap{'\u0388', '\u038A', LowercaseAdd, 37}, + lcMap{'\u038C', '\u038C', LowercaseSet, 0x03CC}, + lcMap{'\u038E', '\u038F', LowercaseAdd, 63}, + lcMap{'\u0391', '\u03AB', LowercaseAdd, 32}, + lcMap{'\u03E2', '\u03EE', LowercaseBor, 0}, + lcMap{'\u0401', '\u040F', LowercaseAdd, 80}, + lcMap{'\u0410', '\u042F', LowercaseAdd, 32}, + lcMap{'\u0460', '\u0480', LowercaseBor, 0}, + lcMap{'\u0490', '\u04BE', LowercaseBor, 0}, + lcMap{'\u04C1', '\u04C3', LowercaseBad, 0}, + lcMap{'\u04C7', '\u04C7', LowercaseSet, 0x04C8}, + lcMap{'\u04CB', '\u04CB', LowercaseSet, 0x04CC}, + lcMap{'\u04D0', '\u04EA', LowercaseBor, 0}, + lcMap{'\u04EE', '\u04F4', LowercaseBor, 0}, + lcMap{'\u04F8', '\u04F8', LowercaseSet, 0x04F9}, + lcMap{'\u0531', '\u0556', LowercaseAdd, 48}, + lcMap{'\u10A0', '\u10C5', LowercaseAdd, 48}, + lcMap{'\u1E00', '\u1EF8', LowercaseBor, 0}, + lcMap{'\u1F08', '\u1F0F', LowercaseAdd, -8}, + lcMap{'\u1F18', '\u1F1F', LowercaseAdd, -8}, + lcMap{'\u1F28', '\u1F2F', LowercaseAdd, -8}, + lcMap{'\u1F38', '\u1F3F', LowercaseAdd, -8}, + lcMap{'\u1F48', '\u1F4D', LowercaseAdd, -8}, + lcMap{'\u1F59', '\u1F59', LowercaseSet, 0x1F51}, + lcMap{'\u1F5B', '\u1F5B', LowercaseSet, 0x1F53}, + lcMap{'\u1F5D', '\u1F5D', LowercaseSet, 0x1F55}, + lcMap{'\u1F5F', '\u1F5F', LowercaseSet, 0x1F57}, + lcMap{'\u1F68', '\u1F6F', LowercaseAdd, -8}, + lcMap{'\u1F88', '\u1F8F', LowercaseAdd, -8}, + lcMap{'\u1F98', '\u1F9F', LowercaseAdd, -8}, + lcMap{'\u1FA8', '\u1FAF', LowercaseAdd, -8}, + lcMap{'\u1FB8', '\u1FB9', LowercaseAdd, -8}, + lcMap{'\u1FBA', '\u1FBB', LowercaseAdd, -74}, + lcMap{'\u1FBC', '\u1FBC', LowercaseSet, 0x1FB3}, + lcMap{'\u1FC8', '\u1FCB', LowercaseAdd, -86}, + lcMap{'\u1FCC', '\u1FCC', LowercaseSet, 0x1FC3}, + lcMap{'\u1FD8', '\u1FD9', LowercaseAdd, -8}, + lcMap{'\u1FDA', '\u1FDB', LowercaseAdd, -100}, + lcMap{'\u1FE8', '\u1FE9', LowercaseAdd, -8}, + lcMap{'\u1FEA', '\u1FEB', LowercaseAdd, -112}, + lcMap{'\u1FEC', '\u1FEC', LowercaseSet, 0x1FE5}, + lcMap{'\u1FF8', '\u1FF9', LowercaseAdd, -128}, + lcMap{'\u1FFA', '\u1FFB', LowercaseAdd, -126}, + lcMap{'\u1FFC', '\u1FFC', LowercaseSet, 0x1FF3}, + lcMap{'\u2160', '\u216F', LowercaseAdd, 16}, + lcMap{'\u24B6', '\u24D0', LowercaseAdd, 26}, + lcMap{'\uFF21', '\uFF3A', LowercaseAdd, 32}, +} + +func (c *CharSet) addLowercaseRange(chMin, chMax rune) { + var i, iMax, iMid int + var chMinT, chMaxT rune + var lc lcMap + + for i, iMax = 0, len(lcTable); i < iMax; { + iMid = (i + iMax) / 2 + if lcTable[iMid].chMax < chMin { + i = iMid + 1 + } else { + iMax = iMid + } + } + + for ; i < len(lcTable); i++ { + lc = lcTable[i] + if lc.chMin > chMax { + return + } + chMinT = lc.chMin + if chMinT < chMin { + chMinT = chMin + } + + chMaxT = lc.chMax + if chMaxT > chMax { + chMaxT = chMax + } + + switch lc.op { + case LowercaseSet: + chMinT = rune(lc.data) + chMaxT = rune(lc.data) + break + case LowercaseAdd: + chMinT += lc.data + chMaxT += lc.data + break + case LowercaseBor: + chMinT |= 1 + chMaxT |= 1 + break + case LowercaseBad: + chMinT += (chMinT & 1) + chMaxT += (chMaxT & 1) + break + } + + if chMinT < chMin || chMaxT > chMax { + c.addRange(chMinT, chMaxT) + } + } +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/code.go b/vendor/github.com/dlclark/regexp2/syntax/code.go new file mode 100644 index 0000000..686e822 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/code.go @@ -0,0 +1,274 @@ +package syntax + +import ( + "bytes" + "fmt" + "math" +) + +// similar to prog.go in the go regex package...also with comment 'may not belong in this package' + +// File provides operator constants for use by the Builder and the Machine. + +// Implementation notes: +// +// Regexps are built into RegexCodes, which contain an operation array, +// a string table, and some constants. +// +// Each operation is one of the codes below, followed by the integer +// operands specified for each op. +// +// Strings and sets are indices into a string table. + +type InstOp int + +const ( + // lef/back operands description + + Onerep InstOp = 0 // lef,back char,min,max a {n} + Notonerep = 1 // lef,back char,min,max .{n} + Setrep = 2 // lef,back set,min,max [\d]{n} + + Oneloop = 3 // lef,back char,min,max a {,n} + Notoneloop = 4 // lef,back char,min,max .{,n} + Setloop = 5 // lef,back set,min,max [\d]{,n} + + Onelazy = 6 // lef,back char,min,max a {,n}? + Notonelazy = 7 // lef,back char,min,max .{,n}? + Setlazy = 8 // lef,back set,min,max [\d]{,n}? + + One = 9 // lef char a + Notone = 10 // lef char [^a] + Set = 11 // lef set [a-z\s] \w \s \d + + Multi = 12 // lef string abcd + Ref = 13 // lef group \# + + Bol = 14 // ^ + Eol = 15 // $ + Boundary = 16 // \b + Nonboundary = 17 // \B + Beginning = 18 // \A + Start = 19 // \G + EndZ = 20 // \Z + End = 21 // \Z + + Nothing = 22 // Reject! + + // Primitive control structures + + Lazybranch = 23 // back jump straight first + Branchmark = 24 // back jump branch first for loop + Lazybranchmark = 25 // back jump straight first for loop + Nullcount = 26 // back val set counter, null mark + Setcount = 27 // back val set counter, make mark + Branchcount = 28 // back jump,limit branch++ if zero<=c impl group slots + Capsize int // number of impl group slots + FcPrefix *Prefix // the set of candidate first characters (may be null) + BmPrefix *BmPrefix // the fixed prefix string as a Boyer-Moore machine (may be null) + Anchors AnchorLoc // the set of zero-length start anchors (RegexFCD.Bol, etc) + RightToLeft bool // true if right to left +} + +func opcodeBacktracks(op InstOp) bool { + op &= Mask + + switch op { + case Oneloop, Notoneloop, Setloop, Onelazy, Notonelazy, Setlazy, Lazybranch, Branchmark, Lazybranchmark, + Nullcount, Setcount, Branchcount, Lazybranchcount, Setmark, Capturemark, Getmark, Setjump, Backjump, + Forejump, Goto: + return true + + default: + return false + } +} + +func opcodeSize(op InstOp) int { + op &= Mask + + switch op { + case Nothing, Bol, Eol, Boundary, Nonboundary, ECMABoundary, NonECMABoundary, Beginning, Start, EndZ, + End, Nullmark, Setmark, Getmark, Setjump, Backjump, Forejump, Stop: + return 1 + + case One, Notone, Multi, Ref, Testref, Goto, Nullcount, Setcount, Lazybranch, Branchmark, Lazybranchmark, + Prune, Set: + return 2 + + case Capturemark, Branchcount, Lazybranchcount, Onerep, Notonerep, Oneloop, Notoneloop, Onelazy, Notonelazy, + Setlazy, Setrep, Setloop: + return 3 + + default: + panic(fmt.Errorf("Unexpected op code: %v", op)) + } +} + +var codeStr = []string{ + "Onerep", "Notonerep", "Setrep", + "Oneloop", "Notoneloop", "Setloop", + "Onelazy", "Notonelazy", "Setlazy", + "One", "Notone", "Set", + "Multi", "Ref", + "Bol", "Eol", "Boundary", "Nonboundary", "Beginning", "Start", "EndZ", "End", + "Nothing", + "Lazybranch", "Branchmark", "Lazybranchmark", + "Nullcount", "Setcount", "Branchcount", "Lazybranchcount", + "Nullmark", "Setmark", "Capturemark", "Getmark", + "Setjump", "Backjump", "Forejump", "Testref", "Goto", + "Prune", "Stop", + "ECMABoundary", "NonECMABoundary", +} + +func operatorDescription(op InstOp) string { + desc := codeStr[op&Mask] + if (op & Ci) != 0 { + desc += "-Ci" + } + if (op & Rtl) != 0 { + desc += "-Rtl" + } + if (op & Back) != 0 { + desc += "-Back" + } + if (op & Back2) != 0 { + desc += "-Back2" + } + + return desc +} + +// OpcodeDescription is a humman readable string of the specific offset +func (c *Code) OpcodeDescription(offset int) string { + buf := &bytes.Buffer{} + + op := InstOp(c.Codes[offset]) + fmt.Fprintf(buf, "%06d ", offset) + + if opcodeBacktracks(op & Mask) { + buf.WriteString("*") + } else { + buf.WriteString(" ") + } + buf.WriteString(operatorDescription(op)) + buf.WriteString("(") + op &= Mask + + switch op { + case One, Notone, Onerep, Notonerep, Oneloop, Notoneloop, Onelazy, Notonelazy: + buf.WriteString("Ch = ") + buf.WriteString(CharDescription(rune(c.Codes[offset+1]))) + + case Set, Setrep, Setloop, Setlazy: + buf.WriteString("Set = ") + buf.WriteString(c.Sets[c.Codes[offset+1]].String()) + + case Multi: + fmt.Fprintf(buf, "String = %s", string(c.Strings[c.Codes[offset+1]])) + + case Ref, Testref: + fmt.Fprintf(buf, "Index = %d", c.Codes[offset+1]) + + case Capturemark: + fmt.Fprintf(buf, "Index = %d", c.Codes[offset+1]) + if c.Codes[offset+2] != -1 { + fmt.Fprintf(buf, ", Unindex = %d", c.Codes[offset+2]) + } + + case Nullcount, Setcount: + fmt.Fprintf(buf, "Value = %d", c.Codes[offset+1]) + + case Goto, Lazybranch, Branchmark, Lazybranchmark, Branchcount, Lazybranchcount: + fmt.Fprintf(buf, "Addr = %d", c.Codes[offset+1]) + } + + switch op { + case Onerep, Notonerep, Oneloop, Notoneloop, Onelazy, Notonelazy, Setrep, Setloop, Setlazy: + buf.WriteString(", Rep = ") + if c.Codes[offset+2] == math.MaxInt32 { + buf.WriteString("inf") + } else { + fmt.Fprintf(buf, "%d", c.Codes[offset+2]) + } + + case Branchcount, Lazybranchcount: + buf.WriteString(", Limit = ") + if c.Codes[offset+2] == math.MaxInt32 { + buf.WriteString("inf") + } else { + fmt.Fprintf(buf, "%d", c.Codes[offset+2]) + } + + } + + buf.WriteString(")") + + return buf.String() +} + +func (c *Code) Dump() string { + buf := &bytes.Buffer{} + + if c.RightToLeft { + fmt.Fprintln(buf, "Direction: right-to-left") + } else { + fmt.Fprintln(buf, "Direction: left-to-right") + } + if c.FcPrefix == nil { + fmt.Fprintln(buf, "Firstchars: n/a") + } else { + fmt.Fprintf(buf, "Firstchars: %v\n", c.FcPrefix.PrefixSet.String()) + } + + if c.BmPrefix == nil { + fmt.Fprintln(buf, "Prefix: n/a") + } else { + fmt.Fprintf(buf, "Prefix: %v\n", Escape(c.BmPrefix.String())) + } + + fmt.Fprintf(buf, "Anchors: %v\n", c.Anchors) + fmt.Fprintln(buf) + + if c.BmPrefix != nil { + fmt.Fprintln(buf, "BoyerMoore:") + fmt.Fprintln(buf, c.BmPrefix.Dump(" ")) + } + for i := 0; i < len(c.Codes); i += opcodeSize(InstOp(c.Codes[i])) { + fmt.Fprintln(buf, c.OpcodeDescription(i)) + } + + return buf.String() +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/escape.go b/vendor/github.com/dlclark/regexp2/syntax/escape.go new file mode 100644 index 0000000..609df10 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/escape.go @@ -0,0 +1,94 @@ +package syntax + +import ( + "bytes" + "strconv" + "strings" + "unicode" +) + +func Escape(input string) string { + b := &bytes.Buffer{} + for _, r := range input { + escape(b, r, false) + } + return b.String() +} + +const meta = `\.+*?()|[]{}^$# ` + +func escape(b *bytes.Buffer, r rune, force bool) { + if unicode.IsPrint(r) { + if strings.IndexRune(meta, r) >= 0 || force { + b.WriteRune('\\') + } + b.WriteRune(r) + return + } + + switch r { + case '\a': + b.WriteString(`\a`) + case '\f': + b.WriteString(`\f`) + case '\n': + b.WriteString(`\n`) + case '\r': + b.WriteString(`\r`) + case '\t': + b.WriteString(`\t`) + case '\v': + b.WriteString(`\v`) + default: + if r < 0x100 { + b.WriteString(`\x`) + s := strconv.FormatInt(int64(r), 16) + if len(s) == 1 { + b.WriteRune('0') + } + b.WriteString(s) + break + } + b.WriteString(`\u`) + b.WriteString(strconv.FormatInt(int64(r), 16)) + } +} + +func Unescape(input string) (string, error) { + idx := strings.IndexRune(input, '\\') + // no slashes means no unescape needed + if idx == -1 { + return input, nil + } + + buf := bytes.NewBufferString(input[:idx]) + // get the runes for the rest of the string -- we're going full parser scan on this + + p := parser{} + p.setPattern(input[idx+1:]) + for { + if p.rightMost() { + return "", p.getErr(ErrIllegalEndEscape) + } + r, err := p.scanCharEscape() + if err != nil { + return "", err + } + buf.WriteRune(r) + // are we done? + if p.rightMost() { + return buf.String(), nil + } + + r = p.moveRightGetChar() + for r != '\\' { + buf.WriteRune(r) + if p.rightMost() { + // we're done, no more slashes + return buf.String(), nil + } + // keep scanning until we get another slash + r = p.moveRightGetChar() + } + } +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/fuzz.go b/vendor/github.com/dlclark/regexp2/syntax/fuzz.go new file mode 100644 index 0000000..ee86386 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/fuzz.go @@ -0,0 +1,20 @@ +// +build gofuzz + +package syntax + +// Fuzz is the input point for go-fuzz +func Fuzz(data []byte) int { + sdata := string(data) + tree, err := Parse(sdata, RegexOptions(0)) + if err != nil { + return 0 + } + + // translate it to code + _, err = Write(tree) + if err != nil { + panic(err) + } + + return 1 +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/parser.go b/vendor/github.com/dlclark/regexp2/syntax/parser.go new file mode 100644 index 0000000..b6c3670 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/parser.go @@ -0,0 +1,2262 @@ +package syntax + +import ( + "fmt" + "math" + "os" + "sort" + "strconv" + "unicode" +) + +type RegexOptions int32 + +const ( + IgnoreCase RegexOptions = 0x0001 // "i" + Multiline = 0x0002 // "m" + ExplicitCapture = 0x0004 // "n" + Compiled = 0x0008 // "c" + Singleline = 0x0010 // "s" + IgnorePatternWhitespace = 0x0020 // "x" + RightToLeft = 0x0040 // "r" + Debug = 0x0080 // "d" + ECMAScript = 0x0100 // "e" + RE2 = 0x0200 // RE2 compat mode + Unicode = 0x0400 // "u" +) + +func optionFromCode(ch rune) RegexOptions { + // case-insensitive + switch ch { + case 'i', 'I': + return IgnoreCase + case 'r', 'R': + return RightToLeft + case 'm', 'M': + return Multiline + case 'n', 'N': + return ExplicitCapture + case 's', 'S': + return Singleline + case 'x', 'X': + return IgnorePatternWhitespace + case 'd', 'D': + return Debug + case 'e', 'E': + return ECMAScript + case 'u', 'U': + return Unicode + default: + return 0 + } +} + +// An Error describes a failure to parse a regular expression +// and gives the offending expression. +type Error struct { + Code ErrorCode + Expr string + Args []interface{} +} + +func (e *Error) Error() string { + if len(e.Args) == 0 { + return "error parsing regexp: " + e.Code.String() + " in `" + e.Expr + "`" + } + return "error parsing regexp: " + fmt.Sprintf(e.Code.String(), e.Args...) + " in `" + e.Expr + "`" +} + +// An ErrorCode describes a failure to parse a regular expression. +type ErrorCode string + +const ( + // internal issue + ErrInternalError ErrorCode = "regexp/syntax: internal error" + // Parser errors + ErrUnterminatedComment = "unterminated comment" + ErrInvalidCharRange = "invalid character class range" + ErrInvalidRepeatSize = "invalid repeat count" + ErrInvalidUTF8 = "invalid UTF-8" + ErrCaptureGroupOutOfRange = "capture group number out of range" + ErrUnexpectedParen = "unexpected )" + ErrMissingParen = "missing closing )" + ErrMissingBrace = "missing closing }" + ErrInvalidRepeatOp = "invalid nested repetition operator" + ErrMissingRepeatArgument = "missing argument to repetition operator" + ErrConditionalExpression = "illegal conditional (?(...)) expression" + ErrTooManyAlternates = "too many | in (?()|)" + ErrUnrecognizedGrouping = "unrecognized grouping construct: (%v" + ErrInvalidGroupName = "invalid group name: group names must begin with a word character and have a matching terminator" + ErrCapNumNotZero = "capture number cannot be zero" + ErrUndefinedBackRef = "reference to undefined group number %v" + ErrUndefinedNameRef = "reference to undefined group name %v" + ErrAlternationCantCapture = "alternation conditions do not capture and cannot be named" + ErrAlternationCantHaveComment = "alternation conditions cannot be comments" + ErrMalformedReference = "(?(%v) ) malformed" + ErrUndefinedReference = "(?(%v) ) reference to undefined group" + ErrIllegalEndEscape = "illegal \\ at end of pattern" + ErrMalformedSlashP = "malformed \\p{X} character escape" + ErrIncompleteSlashP = "incomplete \\p{X} character escape" + ErrUnknownSlashP = "unknown unicode category, script, or property '%v'" + ErrUnrecognizedEscape = "unrecognized escape sequence \\%v" + ErrMissingControl = "missing control character" + ErrUnrecognizedControl = "unrecognized control character" + ErrTooFewHex = "insufficient hexadecimal digits" + ErrInvalidHex = "hex values may not be larger than 0x10FFFF" + ErrMalformedNameRef = "malformed \\k<...> named back reference" + ErrBadClassInCharRange = "cannot include class \\%v in character range" + ErrUnterminatedBracket = "unterminated [] set" + ErrSubtractionMustBeLast = "a subtraction must be the last element in a character class" + ErrReversedCharRange = "[%c-%c] range in reverse order" +) + +func (e ErrorCode) String() string { + return string(e) +} + +type parser struct { + stack *regexNode + group *regexNode + alternation *regexNode + concatenation *regexNode + unit *regexNode + + patternRaw string + pattern []rune + + currentPos int + specialCase *unicode.SpecialCase + + autocap int + capcount int + captop int + capsize int + + caps map[int]int + capnames map[string]int + + capnumlist []int + capnamelist []string + + options RegexOptions + optionsStack []RegexOptions + ignoreNextParen bool +} + +const ( + maxValueDiv10 int = math.MaxInt32 / 10 + maxValueMod10 = math.MaxInt32 % 10 +) + +// Parse converts a regex string into a parse tree +func Parse(re string, op RegexOptions) (*RegexTree, error) { + p := parser{ + options: op, + caps: make(map[int]int), + } + p.setPattern(re) + + if err := p.countCaptures(); err != nil { + return nil, err + } + + p.reset(op) + root, err := p.scanRegex() + + if err != nil { + return nil, err + } + tree := &RegexTree{ + root: root, + caps: p.caps, + capnumlist: p.capnumlist, + captop: p.captop, + Capnames: p.capnames, + Caplist: p.capnamelist, + options: op, + } + + if tree.options&Debug > 0 { + os.Stdout.WriteString(tree.Dump()) + } + + return tree, nil +} + +func (p *parser) setPattern(pattern string) { + p.patternRaw = pattern + p.pattern = make([]rune, 0, len(pattern)) + + //populate our rune array to handle utf8 encoding + for _, r := range pattern { + p.pattern = append(p.pattern, r) + } +} +func (p *parser) getErr(code ErrorCode, args ...interface{}) error { + return &Error{Code: code, Expr: p.patternRaw, Args: args} +} + +func (p *parser) noteCaptureSlot(i, pos int) { + if _, ok := p.caps[i]; !ok { + // the rhs of the hashtable isn't used in the parser + p.caps[i] = pos + p.capcount++ + + if p.captop <= i { + if i == math.MaxInt32 { + p.captop = i + } else { + p.captop = i + 1 + } + } + } +} + +func (p *parser) noteCaptureName(name string, pos int) { + if p.capnames == nil { + p.capnames = make(map[string]int) + } + + if _, ok := p.capnames[name]; !ok { + p.capnames[name] = pos + p.capnamelist = append(p.capnamelist, name) + } +} + +func (p *parser) assignNameSlots() { + if p.capnames != nil { + for _, name := range p.capnamelist { + for p.isCaptureSlot(p.autocap) { + p.autocap++ + } + pos := p.capnames[name] + p.capnames[name] = p.autocap + p.noteCaptureSlot(p.autocap, pos) + + p.autocap++ + } + } + + // if the caps array has at least one gap, construct the list of used slots + if p.capcount < p.captop { + p.capnumlist = make([]int, p.capcount) + i := 0 + + for k := range p.caps { + p.capnumlist[i] = k + i++ + } + + sort.Ints(p.capnumlist) + } + + // merge capsnumlist into capnamelist + if p.capnames != nil || p.capnumlist != nil { + var oldcapnamelist []string + var next int + var k int + + if p.capnames == nil { + oldcapnamelist = nil + p.capnames = make(map[string]int) + p.capnamelist = []string{} + next = -1 + } else { + oldcapnamelist = p.capnamelist + p.capnamelist = []string{} + next = p.capnames[oldcapnamelist[0]] + } + + for i := 0; i < p.capcount; i++ { + j := i + if p.capnumlist != nil { + j = p.capnumlist[i] + } + + if next == j { + p.capnamelist = append(p.capnamelist, oldcapnamelist[k]) + k++ + + if k == len(oldcapnamelist) { + next = -1 + } else { + next = p.capnames[oldcapnamelist[k]] + } + + } else { + //feature: culture? + str := strconv.Itoa(j) + p.capnamelist = append(p.capnamelist, str) + p.capnames[str] = j + } + } + } +} + +func (p *parser) consumeAutocap() int { + r := p.autocap + p.autocap++ + return r +} + +// CountCaptures is a prescanner for deducing the slots used for +// captures by doing a partial tokenization of the pattern. +func (p *parser) countCaptures() error { + var ch rune + + p.noteCaptureSlot(0, 0) + + p.autocap = 1 + + for p.charsRight() > 0 { + pos := p.textpos() + ch = p.moveRightGetChar() + switch ch { + case '\\': + if p.charsRight() > 0 { + p.scanBackslash(true) + } + + case '#': + if p.useOptionX() { + p.moveLeft() + p.scanBlank() + } + + case '[': + p.scanCharSet(false, true) + + case ')': + if !p.emptyOptionsStack() { + p.popOptions() + } + + case '(': + if p.charsRight() >= 2 && p.rightChar(1) == '#' && p.rightChar(0) == '?' { + p.moveLeft() + p.scanBlank() + } else { + p.pushOptions() + if p.charsRight() > 0 && p.rightChar(0) == '?' { + // we have (?... + p.moveRight(1) + + if p.charsRight() > 1 && (p.rightChar(0) == '<' || p.rightChar(0) == '\'') { + // named group: (?<... or (?'... + + p.moveRight(1) + ch = p.rightChar(0) + + if ch != '0' && IsWordChar(ch) { + if ch >= '1' && ch <= '9' { + dec, err := p.scanDecimal() + if err != nil { + return err + } + p.noteCaptureSlot(dec, pos) + } else { + p.noteCaptureName(p.scanCapname(), pos) + } + } + } else if p.useRE2() && p.charsRight() > 2 && (p.rightChar(0) == 'P' && p.rightChar(1) == '<') { + // RE2-compat (?P<) + p.moveRight(2) + ch = p.rightChar(0) + if IsWordChar(ch) { + p.noteCaptureName(p.scanCapname(), pos) + } + + } else { + // (?... + + // get the options if it's an option construct (?cimsx-cimsx...) + p.scanOptions() + + if p.charsRight() > 0 { + if p.rightChar(0) == ')' { + // (?cimsx-cimsx) + p.moveRight(1) + p.popKeepOptions() + } else if p.rightChar(0) == '(' { + // alternation construct: (?(foo)yes|no) + // ignore the next paren so we don't capture the condition + p.ignoreNextParen = true + + // break from here so we don't reset ignoreNextParen + continue + } + } + } + } else { + if !p.useOptionN() && !p.ignoreNextParen { + p.noteCaptureSlot(p.consumeAutocap(), pos) + } + } + } + + p.ignoreNextParen = false + + } + } + + p.assignNameSlots() + return nil +} + +func (p *parser) reset(topopts RegexOptions) { + p.currentPos = 0 + p.autocap = 1 + p.ignoreNextParen = false + + if len(p.optionsStack) > 0 { + p.optionsStack = p.optionsStack[:0] + } + + p.options = topopts + p.stack = nil +} + +func (p *parser) scanRegex() (*regexNode, error) { + ch := '@' // nonspecial ch, means at beginning + isQuant := false + + p.startGroup(newRegexNodeMN(ntCapture, p.options, 0, -1)) + + for p.charsRight() > 0 { + wasPrevQuantifier := isQuant + isQuant = false + + if err := p.scanBlank(); err != nil { + return nil, err + } + + startpos := p.textpos() + + // move past all of the normal characters. We'll stop when we hit some kind of control character, + // or if IgnorePatternWhiteSpace is on, we'll stop when we see some whitespace. + if p.useOptionX() { + for p.charsRight() > 0 { + ch = p.rightChar(0) + //UGLY: clean up, this is ugly + if !(!isStopperX(ch) || (ch == '{' && !p.isTrueQuantifier())) { + break + } + p.moveRight(1) + } + } else { + for p.charsRight() > 0 { + ch = p.rightChar(0) + if !(!isSpecial(ch) || ch == '{' && !p.isTrueQuantifier()) { + break + } + p.moveRight(1) + } + } + + endpos := p.textpos() + + p.scanBlank() + + if p.charsRight() == 0 { + ch = '!' // nonspecial, means at end + } else if ch = p.rightChar(0); isSpecial(ch) { + isQuant = isQuantifier(ch) + p.moveRight(1) + } else { + ch = ' ' // nonspecial, means at ordinary char + } + + if startpos < endpos { + cchUnquantified := endpos - startpos + if isQuant { + cchUnquantified-- + } + wasPrevQuantifier = false + + if cchUnquantified > 0 { + p.addToConcatenate(startpos, cchUnquantified, false) + } + + if isQuant { + p.addUnitOne(p.charAt(endpos - 1)) + } + } + + switch ch { + case '!': + goto BreakOuterScan + + case ' ': + goto ContinueOuterScan + + case '[': + cc, err := p.scanCharSet(p.useOptionI(), false) + if err != nil { + return nil, err + } + p.addUnitSet(cc) + + case '(': + p.pushOptions() + + if grouper, err := p.scanGroupOpen(); err != nil { + return nil, err + } else if grouper == nil { + p.popKeepOptions() + } else { + p.pushGroup() + p.startGroup(grouper) + } + + continue + + case '|': + p.addAlternate() + goto ContinueOuterScan + + case ')': + if p.emptyStack() { + return nil, p.getErr(ErrUnexpectedParen) + } + + if err := p.addGroup(); err != nil { + return nil, err + } + if err := p.popGroup(); err != nil { + return nil, err + } + p.popOptions() + + if p.unit == nil { + goto ContinueOuterScan + } + + case '\\': + n, err := p.scanBackslash(false) + if err != nil { + return nil, err + } + p.addUnitNode(n) + + case '^': + if p.useOptionM() { + p.addUnitType(ntBol) + } else { + p.addUnitType(ntBeginning) + } + + case '$': + if p.useOptionM() { + p.addUnitType(ntEol) + } else { + p.addUnitType(ntEndZ) + } + + case '.': + if p.useOptionE() { + p.addUnitSet(ECMAAnyClass()) + } else if p.useOptionS() { + p.addUnitSet(AnyClass()) + } else { + p.addUnitNotone('\n') + } + + case '{', '*', '+', '?': + if p.unit == nil { + if wasPrevQuantifier { + return nil, p.getErr(ErrInvalidRepeatOp) + } else { + return nil, p.getErr(ErrMissingRepeatArgument) + } + } + p.moveLeft() + + default: + return nil, p.getErr(ErrInternalError) + } + + if err := p.scanBlank(); err != nil { + return nil, err + } + + if p.charsRight() > 0 { + isQuant = p.isTrueQuantifier() + } + if p.charsRight() == 0 || !isQuant { + //maintain odd C# assignment order -- not sure if required, could clean up? + p.addConcatenate() + goto ContinueOuterScan + } + + ch = p.moveRightGetChar() + + // Handle quantifiers + for p.unit != nil { + var min, max int + var lazy bool + + switch ch { + case '*': + min = 0 + max = math.MaxInt32 + + case '?': + min = 0 + max = 1 + + case '+': + min = 1 + max = math.MaxInt32 + + case '{': + { + var err error + startpos = p.textpos() + if min, err = p.scanDecimal(); err != nil { + return nil, err + } + max = min + if startpos < p.textpos() { + if p.charsRight() > 0 && p.rightChar(0) == ',' { + p.moveRight(1) + if p.charsRight() == 0 || p.rightChar(0) == '}' { + max = math.MaxInt32 + } else { + if max, err = p.scanDecimal(); err != nil { + return nil, err + } + } + } + } + + if startpos == p.textpos() || p.charsRight() == 0 || p.moveRightGetChar() != '}' { + p.addConcatenate() + p.textto(startpos - 1) + goto ContinueOuterScan + } + } + + default: + return nil, p.getErr(ErrInternalError) + } + + if err := p.scanBlank(); err != nil { + return nil, err + } + + if p.charsRight() == 0 || p.rightChar(0) != '?' { + lazy = false + } else { + p.moveRight(1) + lazy = true + } + + if min > max { + return nil, p.getErr(ErrInvalidRepeatSize) + } + + p.addConcatenate3(lazy, min, max) + } + + ContinueOuterScan: + } + +BreakOuterScan: + ; + + if !p.emptyStack() { + return nil, p.getErr(ErrMissingParen) + } + + if err := p.addGroup(); err != nil { + return nil, err + } + + return p.unit, nil + +} + +/* + * Simple parsing for replacement patterns + */ +func (p *parser) scanReplacement() (*regexNode, error) { + var c, startpos int + + p.concatenation = newRegexNode(ntConcatenate, p.options) + + for { + c = p.charsRight() + if c == 0 { + break + } + + startpos = p.textpos() + + for c > 0 && p.rightChar(0) != '$' { + p.moveRight(1) + c-- + } + + p.addToConcatenate(startpos, p.textpos()-startpos, true) + + if c > 0 { + if p.moveRightGetChar() == '$' { + n, err := p.scanDollar() + if err != nil { + return nil, err + } + p.addUnitNode(n) + } + p.addConcatenate() + } + } + + return p.concatenation, nil +} + +/* + * Scans $ patterns recognized within replacement patterns + */ +func (p *parser) scanDollar() (*regexNode, error) { + if p.charsRight() == 0 { + return newRegexNodeCh(ntOne, p.options, '$'), nil + } + + ch := p.rightChar(0) + angled := false + backpos := p.textpos() + lastEndPos := backpos + + // Note angle + + if ch == '{' && p.charsRight() > 1 { + angled = true + p.moveRight(1) + ch = p.rightChar(0) + } + + // Try to parse backreference: \1 or \{1} or \{cap} + + if ch >= '0' && ch <= '9' { + if !angled && p.useOptionE() { + capnum := -1 + newcapnum := int(ch - '0') + p.moveRight(1) + if p.isCaptureSlot(newcapnum) { + capnum = newcapnum + lastEndPos = p.textpos() + } + + for p.charsRight() > 0 { + ch = p.rightChar(0) + if ch < '0' || ch > '9' { + break + } + digit := int(ch - '0') + if newcapnum > maxValueDiv10 || (newcapnum == maxValueDiv10 && digit > maxValueMod10) { + return nil, p.getErr(ErrCaptureGroupOutOfRange) + } + + newcapnum = newcapnum*10 + digit + + p.moveRight(1) + if p.isCaptureSlot(newcapnum) { + capnum = newcapnum + lastEndPos = p.textpos() + } + } + p.textto(lastEndPos) + if capnum >= 0 { + return newRegexNodeM(ntRef, p.options, capnum), nil + } + } else { + capnum, err := p.scanDecimal() + if err != nil { + return nil, err + } + if !angled || p.charsRight() > 0 && p.moveRightGetChar() == '}' { + if p.isCaptureSlot(capnum) { + return newRegexNodeM(ntRef, p.options, capnum), nil + } + } + } + } else if angled && IsWordChar(ch) { + capname := p.scanCapname() + + if p.charsRight() > 0 && p.moveRightGetChar() == '}' { + if p.isCaptureName(capname) { + return newRegexNodeM(ntRef, p.options, p.captureSlotFromName(capname)), nil + } + } + } else if !angled { + capnum := 1 + + switch ch { + case '$': + p.moveRight(1) + return newRegexNodeCh(ntOne, p.options, '$'), nil + case '&': + capnum = 0 + case '`': + capnum = replaceLeftPortion + case '\'': + capnum = replaceRightPortion + case '+': + capnum = replaceLastGroup + case '_': + capnum = replaceWholeString + } + + if capnum != 1 { + p.moveRight(1) + return newRegexNodeM(ntRef, p.options, capnum), nil + } + } + + // unrecognized $: literalize + + p.textto(backpos) + return newRegexNodeCh(ntOne, p.options, '$'), nil +} + +// scanGroupOpen scans chars following a '(' (not counting the '('), and returns +// a RegexNode for the type of group scanned, or nil if the group +// simply changed options (?cimsx-cimsx) or was a comment (#...). +func (p *parser) scanGroupOpen() (*regexNode, error) { + var ch rune + var nt nodeType + var err error + close := '>' + start := p.textpos() + + // just return a RegexNode if we have: + // 1. "(" followed by nothing + // 2. "(x" where x != ? + // 3. "(?)" + if p.charsRight() == 0 || p.rightChar(0) != '?' || (p.rightChar(0) == '?' && (p.charsRight() > 1 && p.rightChar(1) == ')')) { + if p.useOptionN() || p.ignoreNextParen { + p.ignoreNextParen = false + return newRegexNode(ntGroup, p.options), nil + } + return newRegexNodeMN(ntCapture, p.options, p.consumeAutocap(), -1), nil + } + + p.moveRight(1) + + for { + if p.charsRight() == 0 { + break + } + + switch ch = p.moveRightGetChar(); ch { + case ':': + nt = ntGroup + + case '=': + p.options &= ^RightToLeft + nt = ntRequire + + case '!': + p.options &= ^RightToLeft + nt = ntPrevent + + case '>': + nt = ntGreedy + + case '\'': + close = '\'' + fallthrough + + case '<': + if p.charsRight() == 0 { + goto BreakRecognize + } + + switch ch = p.moveRightGetChar(); ch { + case '=': + if close == '\'' { + goto BreakRecognize + } + + p.options |= RightToLeft + nt = ntRequire + + case '!': + if close == '\'' { + goto BreakRecognize + } + + p.options |= RightToLeft + nt = ntPrevent + + default: + p.moveLeft() + capnum := -1 + uncapnum := -1 + proceed := false + + // grab part before - + + if ch >= '0' && ch <= '9' { + if capnum, err = p.scanDecimal(); err != nil { + return nil, err + } + + if !p.isCaptureSlot(capnum) { + capnum = -1 + } + + // check if we have bogus characters after the number + if p.charsRight() > 0 && !(p.rightChar(0) == close || p.rightChar(0) == '-') { + return nil, p.getErr(ErrInvalidGroupName) + } + if capnum == 0 { + return nil, p.getErr(ErrCapNumNotZero) + } + } else if IsWordChar(ch) { + capname := p.scanCapname() + + if p.isCaptureName(capname) { + capnum = p.captureSlotFromName(capname) + } + + // check if we have bogus character after the name + if p.charsRight() > 0 && !(p.rightChar(0) == close || p.rightChar(0) == '-') { + return nil, p.getErr(ErrInvalidGroupName) + } + } else if ch == '-' { + proceed = true + } else { + // bad group name - starts with something other than a word character and isn't a number + return nil, p.getErr(ErrInvalidGroupName) + } + + // grab part after - if any + + if (capnum != -1 || proceed == true) && p.charsRight() > 0 && p.rightChar(0) == '-' { + p.moveRight(1) + + //no more chars left, no closing char, etc + if p.charsRight() == 0 { + return nil, p.getErr(ErrInvalidGroupName) + } + + ch = p.rightChar(0) + if ch >= '0' && ch <= '9' { + if uncapnum, err = p.scanDecimal(); err != nil { + return nil, err + } + + if !p.isCaptureSlot(uncapnum) { + return nil, p.getErr(ErrUndefinedBackRef, uncapnum) + } + + // check if we have bogus characters after the number + if p.charsRight() > 0 && p.rightChar(0) != close { + return nil, p.getErr(ErrInvalidGroupName) + } + } else if IsWordChar(ch) { + uncapname := p.scanCapname() + + if !p.isCaptureName(uncapname) { + return nil, p.getErr(ErrUndefinedNameRef, uncapname) + } + uncapnum = p.captureSlotFromName(uncapname) + + // check if we have bogus character after the name + if p.charsRight() > 0 && p.rightChar(0) != close { + return nil, p.getErr(ErrInvalidGroupName) + } + } else { + // bad group name - starts with something other than a word character and isn't a number + return nil, p.getErr(ErrInvalidGroupName) + } + } + + // actually make the node + + if (capnum != -1 || uncapnum != -1) && p.charsRight() > 0 && p.moveRightGetChar() == close { + return newRegexNodeMN(ntCapture, p.options, capnum, uncapnum), nil + } + goto BreakRecognize + } + + case '(': + // alternation construct (?(...) | ) + + parenPos := p.textpos() + if p.charsRight() > 0 { + ch = p.rightChar(0) + + // check if the alternation condition is a backref + if ch >= '0' && ch <= '9' { + var capnum int + if capnum, err = p.scanDecimal(); err != nil { + return nil, err + } + if p.charsRight() > 0 && p.moveRightGetChar() == ')' { + if p.isCaptureSlot(capnum) { + return newRegexNodeM(ntTestref, p.options, capnum), nil + } + return nil, p.getErr(ErrUndefinedReference, capnum) + } + + return nil, p.getErr(ErrMalformedReference, capnum) + + } else if IsWordChar(ch) { + capname := p.scanCapname() + + if p.isCaptureName(capname) && p.charsRight() > 0 && p.moveRightGetChar() == ')' { + return newRegexNodeM(ntTestref, p.options, p.captureSlotFromName(capname)), nil + } + } + } + // not a backref + nt = ntTestgroup + p.textto(parenPos - 1) // jump to the start of the parentheses + p.ignoreNextParen = true // but make sure we don't try to capture the insides + + charsRight := p.charsRight() + if charsRight >= 3 && p.rightChar(1) == '?' { + rightchar2 := p.rightChar(2) + // disallow comments in the condition + if rightchar2 == '#' { + return nil, p.getErr(ErrAlternationCantHaveComment) + } + + // disallow named capture group (?<..>..) in the condition + if rightchar2 == '\'' { + return nil, p.getErr(ErrAlternationCantCapture) + } + + if charsRight >= 4 && (rightchar2 == '<' && p.rightChar(3) != '!' && p.rightChar(3) != '=') { + return nil, p.getErr(ErrAlternationCantCapture) + } + } + + case 'P': + if p.useRE2() { + // support for P syntax + if p.charsRight() < 3 { + goto BreakRecognize + } + + ch = p.moveRightGetChar() + if ch != '<' { + goto BreakRecognize + } + + ch = p.moveRightGetChar() + p.moveLeft() + + if IsWordChar(ch) { + capnum := -1 + capname := p.scanCapname() + + if p.isCaptureName(capname) { + capnum = p.captureSlotFromName(capname) + } + + // check if we have bogus character after the name + if p.charsRight() > 0 && p.rightChar(0) != '>' { + return nil, p.getErr(ErrInvalidGroupName) + } + + // actually make the node + + if capnum != -1 && p.charsRight() > 0 && p.moveRightGetChar() == '>' { + return newRegexNodeMN(ntCapture, p.options, capnum, -1), nil + } + goto BreakRecognize + + } else { + // bad group name - starts with something other than a word character and isn't a number + return nil, p.getErr(ErrInvalidGroupName) + } + } + // if we're not using RE2 compat mode then + // we just behave like normal + fallthrough + + default: + p.moveLeft() + + nt = ntGroup + // disallow options in the children of a testgroup node + if p.group.t != ntTestgroup { + p.scanOptions() + } + if p.charsRight() == 0 { + goto BreakRecognize + } + + if ch = p.moveRightGetChar(); ch == ')' { + return nil, nil + } + + if ch != ':' { + goto BreakRecognize + } + + } + + return newRegexNode(nt, p.options), nil + } + +BreakRecognize: + + // break Recognize comes here + + return nil, p.getErr(ErrUnrecognizedGrouping, string(p.pattern[start:p.textpos()])) +} + +// scans backslash specials and basics +func (p *parser) scanBackslash(scanOnly bool) (*regexNode, error) { + + if p.charsRight() == 0 { + return nil, p.getErr(ErrIllegalEndEscape) + } + + switch ch := p.rightChar(0); ch { + case 'b', 'B', 'A', 'G', 'Z', 'z': + p.moveRight(1) + return newRegexNode(p.typeFromCode(ch), p.options), nil + + case 'w': + p.moveRight(1) + if p.useOptionE() || p.useRE2() { + return newRegexNodeSet(ntSet, p.options, ECMAWordClass()), nil + } + return newRegexNodeSet(ntSet, p.options, WordClass()), nil + + case 'W': + p.moveRight(1) + if p.useOptionE() || p.useRE2() { + return newRegexNodeSet(ntSet, p.options, NotECMAWordClass()), nil + } + return newRegexNodeSet(ntSet, p.options, NotWordClass()), nil + + case 's': + p.moveRight(1) + if p.useOptionE() { + return newRegexNodeSet(ntSet, p.options, ECMASpaceClass()), nil + } else if p.useRE2() { + return newRegexNodeSet(ntSet, p.options, RE2SpaceClass()), nil + } + return newRegexNodeSet(ntSet, p.options, SpaceClass()), nil + + case 'S': + p.moveRight(1) + if p.useOptionE() { + return newRegexNodeSet(ntSet, p.options, NotECMASpaceClass()), nil + } else if p.useRE2() { + return newRegexNodeSet(ntSet, p.options, NotRE2SpaceClass()), nil + } + return newRegexNodeSet(ntSet, p.options, NotSpaceClass()), nil + + case 'd': + p.moveRight(1) + if p.useOptionE() || p.useRE2() { + return newRegexNodeSet(ntSet, p.options, ECMADigitClass()), nil + } + return newRegexNodeSet(ntSet, p.options, DigitClass()), nil + + case 'D': + p.moveRight(1) + if p.useOptionE() || p.useRE2() { + return newRegexNodeSet(ntSet, p.options, NotECMADigitClass()), nil + } + return newRegexNodeSet(ntSet, p.options, NotDigitClass()), nil + + case 'p', 'P': + p.moveRight(1) + prop, err := p.parseProperty() + if err != nil { + return nil, err + } + cc := &CharSet{} + cc.addCategory(prop, (ch != 'p'), p.useOptionI(), p.patternRaw) + if p.useOptionI() { + cc.addLowercase() + } + + return newRegexNodeSet(ntSet, p.options, cc), nil + + default: + return p.scanBasicBackslash(scanOnly) + } +} + +// Scans \-style backreferences and character escapes +func (p *parser) scanBasicBackslash(scanOnly bool) (*regexNode, error) { + if p.charsRight() == 0 { + return nil, p.getErr(ErrIllegalEndEscape) + } + angled := false + k := false + close := '\x00' + + backpos := p.textpos() + ch := p.rightChar(0) + + // Allow \k instead of \, which is now deprecated. + + // According to ECMAScript specification, \k is only parsed as a named group reference if + // there is at least one group name in the regexp. + // See https://www.ecma-international.org/ecma-262/#sec-isvalidregularexpressionliteral, step 7. + // Note, during the first (scanOnly) run we may not have all group names scanned, but that's ok. + if ch == 'k' && (!p.useOptionE() || len(p.capnames) > 0) { + if p.charsRight() >= 2 { + p.moveRight(1) + ch = p.moveRightGetChar() + + if ch == '<' || (!p.useOptionE() && ch == '\'') { // No support for \k'name' in ECMAScript + angled = true + if ch == '\'' { + close = '\'' + } else { + close = '>' + } + } + } + + if !angled || p.charsRight() <= 0 { + return nil, p.getErr(ErrMalformedNameRef) + } + + ch = p.rightChar(0) + k = true + + } else if !p.useOptionE() && (ch == '<' || ch == '\'') && p.charsRight() > 1 { // Note angle without \g + angled = true + if ch == '\'' { + close = '\'' + } else { + close = '>' + } + + p.moveRight(1) + ch = p.rightChar(0) + } + + // Try to parse backreference: \<1> or \ + + if angled && ch >= '0' && ch <= '9' { + capnum, err := p.scanDecimal() + if err != nil { + return nil, err + } + + if p.charsRight() > 0 && p.moveRightGetChar() == close { + if p.isCaptureSlot(capnum) { + return newRegexNodeM(ntRef, p.options, capnum), nil + } + return nil, p.getErr(ErrUndefinedBackRef, capnum) + } + } else if !angled && ch >= '1' && ch <= '9' { // Try to parse backreference or octal: \1 + capnum, err := p.scanDecimal() + if err != nil { + return nil, err + } + + if scanOnly { + return nil, nil + } + + if p.isCaptureSlot(capnum) { + return newRegexNodeM(ntRef, p.options, capnum), nil + } + if capnum <= 9 && !p.useOptionE() { + return nil, p.getErr(ErrUndefinedBackRef, capnum) + } + + } else if angled { + capname := p.scanCapname() + + if capname != "" && p.charsRight() > 0 && p.moveRightGetChar() == close { + + if scanOnly { + return nil, nil + } + + if p.isCaptureName(capname) { + return newRegexNodeM(ntRef, p.options, p.captureSlotFromName(capname)), nil + } + return nil, p.getErr(ErrUndefinedNameRef, capname) + } else { + if k { + return nil, p.getErr(ErrMalformedNameRef) + } + } + } + + // Not backreference: must be char code + + p.textto(backpos) + ch, err := p.scanCharEscape() + if err != nil { + return nil, err + } + + if scanOnly { + return nil, nil + } + + if p.useOptionI() { + ch = unicode.ToLower(ch) + } + + return newRegexNodeCh(ntOne, p.options, ch), nil +} + +// Scans X for \p{X} or \P{X} +func (p *parser) parseProperty() (string, error) { + // RE2 and PCRE supports \pX syntax (no {} and only 1 letter unicode cats supported) + // since this is purely additive syntax it's not behind a flag + if p.charsRight() >= 1 && p.rightChar(0) != '{' { + ch := string(p.moveRightGetChar()) + // check if it's a valid cat + if !isValidUnicodeCat(ch) { + return "", p.getErr(ErrUnknownSlashP, ch) + } + return ch, nil + } + + if p.charsRight() < 3 { + return "", p.getErr(ErrIncompleteSlashP) + } + ch := p.moveRightGetChar() + if ch != '{' { + return "", p.getErr(ErrMalformedSlashP) + } + + startpos := p.textpos() + for p.charsRight() > 0 { + ch = p.moveRightGetChar() + if !(IsWordChar(ch) || ch == '-') { + p.moveLeft() + break + } + } + capname := string(p.pattern[startpos:p.textpos()]) + + if p.charsRight() == 0 || p.moveRightGetChar() != '}' { + return "", p.getErr(ErrIncompleteSlashP) + } + + if !isValidUnicodeCat(capname) { + return "", p.getErr(ErrUnknownSlashP, capname) + } + + return capname, nil +} + +// Returns ReNode type for zero-length assertions with a \ code. +func (p *parser) typeFromCode(ch rune) nodeType { + switch ch { + case 'b': + if p.useOptionE() { + return ntECMABoundary + } + return ntBoundary + case 'B': + if p.useOptionE() { + return ntNonECMABoundary + } + return ntNonboundary + case 'A': + return ntBeginning + case 'G': + return ntStart + case 'Z': + return ntEndZ + case 'z': + return ntEnd + default: + return ntNothing + } +} + +// Scans whitespace or x-mode comments. +func (p *parser) scanBlank() error { + if p.useOptionX() { + for { + for p.charsRight() > 0 && isSpace(p.rightChar(0)) { + p.moveRight(1) + } + + if p.charsRight() == 0 { + break + } + + if p.rightChar(0) == '#' { + for p.charsRight() > 0 && p.rightChar(0) != '\n' { + p.moveRight(1) + } + } else if p.charsRight() >= 3 && p.rightChar(2) == '#' && + p.rightChar(1) == '?' && p.rightChar(0) == '(' { + for p.charsRight() > 0 && p.rightChar(0) != ')' { + p.moveRight(1) + } + if p.charsRight() == 0 { + return p.getErr(ErrUnterminatedComment) + } + p.moveRight(1) + } else { + break + } + } + } else { + for { + if p.charsRight() < 3 || p.rightChar(2) != '#' || + p.rightChar(1) != '?' || p.rightChar(0) != '(' { + return nil + } + + for p.charsRight() > 0 && p.rightChar(0) != ')' { + p.moveRight(1) + } + if p.charsRight() == 0 { + return p.getErr(ErrUnterminatedComment) + } + p.moveRight(1) + } + } + return nil +} + +func (p *parser) scanCapname() string { + startpos := p.textpos() + + for p.charsRight() > 0 { + if !IsWordChar(p.moveRightGetChar()) { + p.moveLeft() + break + } + } + + return string(p.pattern[startpos:p.textpos()]) +} + +// Scans contents of [] (not including []'s), and converts to a set. +func (p *parser) scanCharSet(caseInsensitive, scanOnly bool) (*CharSet, error) { + ch := '\x00' + chPrev := '\x00' + inRange := false + firstChar := true + closed := false + + var cc *CharSet + if !scanOnly { + cc = &CharSet{} + } + + if p.charsRight() > 0 && p.rightChar(0) == '^' { + p.moveRight(1) + if !scanOnly { + cc.negate = true + } + } + + for ; p.charsRight() > 0; firstChar = false { + fTranslatedChar := false + ch = p.moveRightGetChar() + if ch == ']' { + if !firstChar { + closed = true + break + } else if p.useOptionE() { + if !scanOnly { + cc.addRanges(NoneClass().ranges) + } + closed = true + break + } + + } else if ch == '\\' && p.charsRight() > 0 { + switch ch = p.moveRightGetChar(); ch { + case 'D', 'd': + if !scanOnly { + if inRange { + return nil, p.getErr(ErrBadClassInCharRange, ch) + } + cc.addDigit(p.useOptionE() || p.useRE2(), ch == 'D', p.patternRaw) + } + continue + + case 'S', 's': + if !scanOnly { + if inRange { + return nil, p.getErr(ErrBadClassInCharRange, ch) + } + cc.addSpace(p.useOptionE(), p.useRE2(), ch == 'S') + } + continue + + case 'W', 'w': + if !scanOnly { + if inRange { + return nil, p.getErr(ErrBadClassInCharRange, ch) + } + + cc.addWord(p.useOptionE() || p.useRE2(), ch == 'W') + } + continue + + case 'p', 'P': + if !scanOnly { + if inRange { + return nil, p.getErr(ErrBadClassInCharRange, ch) + } + prop, err := p.parseProperty() + if err != nil { + return nil, err + } + cc.addCategory(prop, (ch != 'p'), caseInsensitive, p.patternRaw) + } else { + p.parseProperty() + } + + continue + + case '-': + if !scanOnly { + cc.addRange(ch, ch) + } + continue + + default: + p.moveLeft() + var err error + ch, err = p.scanCharEscape() // non-literal character + if err != nil { + return nil, err + } + fTranslatedChar = true + break // this break will only break out of the switch + } + } else if ch == '[' { + // This is code for Posix style properties - [:Ll:] or [:IsTibetan:]. + // It currently doesn't do anything other than skip the whole thing! + if p.charsRight() > 0 && p.rightChar(0) == ':' && !inRange { + savePos := p.textpos() + + p.moveRight(1) + negate := false + if p.charsRight() > 1 && p.rightChar(0) == '^' { + negate = true + p.moveRight(1) + } + + nm := p.scanCapname() // snag the name + if !scanOnly && p.useRE2() { + // look up the name since these are valid for RE2 + // add the group based on the name + if ok := cc.addNamedASCII(nm, negate); !ok { + return nil, p.getErr(ErrInvalidCharRange) + } + } + if p.charsRight() < 2 || p.moveRightGetChar() != ':' || p.moveRightGetChar() != ']' { + p.textto(savePos) + } else if p.useRE2() { + // move on + continue + } + } + } + + if inRange { + inRange = false + if !scanOnly { + if ch == '[' && !fTranslatedChar && !firstChar { + // We thought we were in a range, but we're actually starting a subtraction. + // In that case, we'll add chPrev to our char class, skip the opening [, and + // scan the new character class recursively. + cc.addChar(chPrev) + sub, err := p.scanCharSet(caseInsensitive, false) + if err != nil { + return nil, err + } + cc.addSubtraction(sub) + + if p.charsRight() > 0 && p.rightChar(0) != ']' { + return nil, p.getErr(ErrSubtractionMustBeLast) + } + } else { + // a regular range, like a-z + if chPrev > ch { + return nil, p.getErr(ErrReversedCharRange, chPrev, ch) + } + cc.addRange(chPrev, ch) + } + } + } else if p.charsRight() >= 2 && p.rightChar(0) == '-' && p.rightChar(1) != ']' { + // this could be the start of a range + chPrev = ch + inRange = true + p.moveRight(1) + } else if p.charsRight() >= 1 && ch == '-' && !fTranslatedChar && p.rightChar(0) == '[' && !firstChar { + // we aren't in a range, and now there is a subtraction. Usually this happens + // only when a subtraction follows a range, like [a-z-[b]] + if !scanOnly { + p.moveRight(1) + sub, err := p.scanCharSet(caseInsensitive, false) + if err != nil { + return nil, err + } + cc.addSubtraction(sub) + + if p.charsRight() > 0 && p.rightChar(0) != ']' { + return nil, p.getErr(ErrSubtractionMustBeLast) + } + } else { + p.moveRight(1) + p.scanCharSet(caseInsensitive, true) + } + } else { + if !scanOnly { + cc.addRange(ch, ch) + } + } + } + + if !closed { + return nil, p.getErr(ErrUnterminatedBracket) + } + + if !scanOnly && caseInsensitive { + cc.addLowercase() + } + + return cc, nil +} + +// Scans any number of decimal digits (pegs value at 2^31-1 if too large) +func (p *parser) scanDecimal() (int, error) { + i := 0 + var d int + + for p.charsRight() > 0 { + d = int(p.rightChar(0) - '0') + if d < 0 || d > 9 { + break + } + p.moveRight(1) + + if i > maxValueDiv10 || (i == maxValueDiv10 && d > maxValueMod10) { + return 0, p.getErr(ErrCaptureGroupOutOfRange) + } + + i *= 10 + i += d + } + + return int(i), nil +} + +// Returns true for options allowed only at the top level +func isOnlyTopOption(option RegexOptions) bool { + return option == RightToLeft || option == ECMAScript || option == RE2 +} + +// Scans cimsx-cimsx option string, stops at the first unrecognized char. +func (p *parser) scanOptions() { + + for off := false; p.charsRight() > 0; p.moveRight(1) { + ch := p.rightChar(0) + + if ch == '-' { + off = true + } else if ch == '+' { + off = false + } else { + option := optionFromCode(ch) + if option == 0 || isOnlyTopOption(option) { + return + } + + if off { + p.options &= ^option + } else { + p.options |= option + } + } + } +} + +// Scans \ code for escape codes that map to single unicode chars. +func (p *parser) scanCharEscape() (r rune, err error) { + + ch := p.moveRightGetChar() + + if ch >= '0' && ch <= '7' { + p.moveLeft() + return p.scanOctal(), nil + } + + pos := p.textpos() + + switch ch { + case 'x': + // support for \x{HEX} syntax from Perl and PCRE + if p.charsRight() > 0 && p.rightChar(0) == '{' { + if p.useOptionE() { + return ch, nil + } + p.moveRight(1) + return p.scanHexUntilBrace() + } else { + r, err = p.scanHex(2) + } + case 'u': + // ECMAscript suppot \u{HEX} only if `u` is also set + if p.useOptionE() && p.useOptionU() && p.charsRight() > 0 && p.rightChar(0) == '{' { + p.moveRight(1) + return p.scanHexUntilBrace() + } else { + r, err = p.scanHex(4) + } + case 'a': + return '\u0007', nil + case 'b': + return '\b', nil + case 'e': + return '\u001B', nil + case 'f': + return '\f', nil + case 'n': + return '\n', nil + case 'r': + return '\r', nil + case 't': + return '\t', nil + case 'v': + return '\u000B', nil + case 'c': + r, err = p.scanControl() + default: + if !p.useOptionE() && !p.useRE2() && IsWordChar(ch) { + return 0, p.getErr(ErrUnrecognizedEscape, string(ch)) + } + return ch, nil + } + if err != nil && p.useOptionE() { + p.textto(pos) + return ch, nil + } + return +} + +// Grabs and converts an ascii control character +func (p *parser) scanControl() (rune, error) { + if p.charsRight() <= 0 { + return 0, p.getErr(ErrMissingControl) + } + + ch := p.moveRightGetChar() + + // \ca interpreted as \cA + + if ch >= 'a' && ch <= 'z' { + ch = (ch - ('a' - 'A')) + } + ch = (ch - '@') + if ch >= 0 && ch < ' ' { + return ch, nil + } + + return 0, p.getErr(ErrUnrecognizedControl) + +} + +// Scan hex digits until we hit a closing brace. +// Non-hex digits, hex value too large for UTF-8, or running out of chars are errors +func (p *parser) scanHexUntilBrace() (rune, error) { + // PCRE spec reads like unlimited hex digits are allowed, but unicode has a limit + // so we can enforce that + i := 0 + hasContent := false + + for p.charsRight() > 0 { + ch := p.moveRightGetChar() + if ch == '}' { + // hit our close brace, we're done here + // prevent \x{} + if !hasContent { + return 0, p.getErr(ErrTooFewHex) + } + return rune(i), nil + } + hasContent = true + // no brace needs to be hex digit + d := hexDigit(ch) + if d < 0 { + return 0, p.getErr(ErrMissingBrace) + } + + i *= 0x10 + i += d + + if i > unicode.MaxRune { + return 0, p.getErr(ErrInvalidHex) + } + } + + // we only make it here if we run out of digits without finding the brace + return 0, p.getErr(ErrMissingBrace) +} + +// Scans exactly c hex digits (c=2 for \xFF, c=4 for \uFFFF) +func (p *parser) scanHex(c int) (rune, error) { + + i := 0 + + if p.charsRight() >= c { + for c > 0 { + d := hexDigit(p.moveRightGetChar()) + if d < 0 { + break + } + i *= 0x10 + i += d + c-- + } + } + + if c > 0 { + return 0, p.getErr(ErrTooFewHex) + } + + return rune(i), nil +} + +// Returns n <= 0xF for a hex digit. +func hexDigit(ch rune) int { + + if d := uint(ch - '0'); d <= 9 { + return int(d) + } + + if d := uint(ch - 'a'); d <= 5 { + return int(d + 0xa) + } + + if d := uint(ch - 'A'); d <= 5 { + return int(d + 0xa) + } + + return -1 +} + +// Scans up to three octal digits (stops before exceeding 0377). +func (p *parser) scanOctal() rune { + // Consume octal chars only up to 3 digits and value 0377 + + c := 3 + + if c > p.charsRight() { + c = p.charsRight() + } + + //we know the first char is good because the caller had to check + i := 0 + d := int(p.rightChar(0) - '0') + for c > 0 && d <= 7 && d >= 0 { + if i >= 0x20 && p.useOptionE() { + break + } + i *= 8 + i += d + c-- + + p.moveRight(1) + if !p.rightMost() { + d = int(p.rightChar(0) - '0') + } + } + + // Octal codes only go up to 255. Any larger and the behavior that Perl follows + // is simply to truncate the high bits. + i &= 0xFF + + return rune(i) +} + +// Returns the current parsing position. +func (p *parser) textpos() int { + return p.currentPos +} + +// Zaps to a specific parsing position. +func (p *parser) textto(pos int) { + p.currentPos = pos +} + +// Returns the char at the right of the current parsing position and advances to the right. +func (p *parser) moveRightGetChar() rune { + ch := p.pattern[p.currentPos] + p.currentPos++ + return ch +} + +// Moves the current position to the right. +func (p *parser) moveRight(i int) { + // default would be 1 + p.currentPos += i +} + +// Moves the current parsing position one to the left. +func (p *parser) moveLeft() { + p.currentPos-- +} + +// Returns the char left of the current parsing position. +func (p *parser) charAt(i int) rune { + return p.pattern[i] +} + +// Returns the char i chars right of the current parsing position. +func (p *parser) rightChar(i int) rune { + // default would be 0 + return p.pattern[p.currentPos+i] +} + +// Number of characters to the right of the current parsing position. +func (p *parser) charsRight() int { + return len(p.pattern) - p.currentPos +} + +func (p *parser) rightMost() bool { + return p.currentPos == len(p.pattern) +} + +// Looks up the slot number for a given name +func (p *parser) captureSlotFromName(capname string) int { + return p.capnames[capname] +} + +// True if the capture slot was noted +func (p *parser) isCaptureSlot(i int) bool { + if p.caps != nil { + _, ok := p.caps[i] + return ok + } + + return (i >= 0 && i < p.capsize) +} + +// Looks up the slot number for a given name +func (p *parser) isCaptureName(capname string) bool { + if p.capnames == nil { + return false + } + + _, ok := p.capnames[capname] + return ok +} + +// option shortcuts + +// True if N option disabling '(' autocapture is on. +func (p *parser) useOptionN() bool { + return (p.options & ExplicitCapture) != 0 +} + +// True if I option enabling case-insensitivity is on. +func (p *parser) useOptionI() bool { + return (p.options & IgnoreCase) != 0 +} + +// True if M option altering meaning of $ and ^ is on. +func (p *parser) useOptionM() bool { + return (p.options & Multiline) != 0 +} + +// True if S option altering meaning of . is on. +func (p *parser) useOptionS() bool { + return (p.options & Singleline) != 0 +} + +// True if X option enabling whitespace/comment mode is on. +func (p *parser) useOptionX() bool { + return (p.options & IgnorePatternWhitespace) != 0 +} + +// True if E option enabling ECMAScript behavior on. +func (p *parser) useOptionE() bool { + return (p.options & ECMAScript) != 0 +} + +// true to use RE2 compatibility parsing behavior. +func (p *parser) useRE2() bool { + return (p.options & RE2) != 0 +} + +// True if U option enabling ECMAScript's Unicode behavior on. +func (p *parser) useOptionU() bool { + return (p.options & Unicode) != 0 +} + +// True if options stack is empty. +func (p *parser) emptyOptionsStack() bool { + return len(p.optionsStack) == 0 +} + +// Finish the current quantifiable (when a quantifier is not found or is not possible) +func (p *parser) addConcatenate() { + // The first (| inside a Testgroup group goes directly to the group + p.concatenation.addChild(p.unit) + p.unit = nil +} + +// Finish the current quantifiable (when a quantifier is found) +func (p *parser) addConcatenate3(lazy bool, min, max int) { + p.concatenation.addChild(p.unit.makeQuantifier(lazy, min, max)) + p.unit = nil +} + +// Sets the current unit to a single char node +func (p *parser) addUnitOne(ch rune) { + if p.useOptionI() { + ch = unicode.ToLower(ch) + } + + p.unit = newRegexNodeCh(ntOne, p.options, ch) +} + +// Sets the current unit to a single inverse-char node +func (p *parser) addUnitNotone(ch rune) { + if p.useOptionI() { + ch = unicode.ToLower(ch) + } + + p.unit = newRegexNodeCh(ntNotone, p.options, ch) +} + +// Sets the current unit to a single set node +func (p *parser) addUnitSet(set *CharSet) { + p.unit = newRegexNodeSet(ntSet, p.options, set) +} + +// Sets the current unit to a subtree +func (p *parser) addUnitNode(node *regexNode) { + p.unit = node +} + +// Sets the current unit to an assertion of the specified type +func (p *parser) addUnitType(t nodeType) { + p.unit = newRegexNode(t, p.options) +} + +// Finish the current group (in response to a ')' or end) +func (p *parser) addGroup() error { + if p.group.t == ntTestgroup || p.group.t == ntTestref { + p.group.addChild(p.concatenation.reverseLeft()) + if (p.group.t == ntTestref && len(p.group.children) > 2) || len(p.group.children) > 3 { + return p.getErr(ErrTooManyAlternates) + } + } else { + p.alternation.addChild(p.concatenation.reverseLeft()) + p.group.addChild(p.alternation) + } + + p.unit = p.group + return nil +} + +// Pops the option stack, but keeps the current options unchanged. +func (p *parser) popKeepOptions() { + lastIdx := len(p.optionsStack) - 1 + p.optionsStack = p.optionsStack[:lastIdx] +} + +// Recalls options from the stack. +func (p *parser) popOptions() { + lastIdx := len(p.optionsStack) - 1 + // get the last item on the stack and then remove it by reslicing + p.options = p.optionsStack[lastIdx] + p.optionsStack = p.optionsStack[:lastIdx] +} + +// Saves options on a stack. +func (p *parser) pushOptions() { + p.optionsStack = append(p.optionsStack, p.options) +} + +// Add a string to the last concatenate. +func (p *parser) addToConcatenate(pos, cch int, isReplacement bool) { + var node *regexNode + + if cch == 0 { + return + } + + if cch > 1 { + str := make([]rune, cch) + copy(str, p.pattern[pos:pos+cch]) + + if p.useOptionI() && !isReplacement { + // We do the ToLower character by character for consistency. With surrogate chars, doing + // a ToLower on the entire string could actually change the surrogate pair. This is more correct + // linguistically, but since Regex doesn't support surrogates, it's more important to be + // consistent. + for i := 0; i < len(str); i++ { + str[i] = unicode.ToLower(str[i]) + } + } + + node = newRegexNodeStr(ntMulti, p.options, str) + } else { + ch := p.charAt(pos) + + if p.useOptionI() && !isReplacement { + ch = unicode.ToLower(ch) + } + + node = newRegexNodeCh(ntOne, p.options, ch) + } + + p.concatenation.addChild(node) +} + +// Push the parser state (in response to an open paren) +func (p *parser) pushGroup() { + p.group.next = p.stack + p.alternation.next = p.group + p.concatenation.next = p.alternation + p.stack = p.concatenation +} + +// Remember the pushed state (in response to a ')') +func (p *parser) popGroup() error { + p.concatenation = p.stack + p.alternation = p.concatenation.next + p.group = p.alternation.next + p.stack = p.group.next + + // The first () inside a Testgroup group goes directly to the group + if p.group.t == ntTestgroup && len(p.group.children) == 0 { + if p.unit == nil { + return p.getErr(ErrConditionalExpression) + } + + p.group.addChild(p.unit) + p.unit = nil + } + return nil +} + +// True if the group stack is empty. +func (p *parser) emptyStack() bool { + return p.stack == nil +} + +// Start a new round for the parser state (in response to an open paren or string start) +func (p *parser) startGroup(openGroup *regexNode) { + p.group = openGroup + p.alternation = newRegexNode(ntAlternate, p.options) + p.concatenation = newRegexNode(ntConcatenate, p.options) +} + +// Finish the current concatenation (in response to a |) +func (p *parser) addAlternate() { + // The | parts inside a Testgroup group go directly to the group + + if p.group.t == ntTestgroup || p.group.t == ntTestref { + p.group.addChild(p.concatenation.reverseLeft()) + } else { + p.alternation.addChild(p.concatenation.reverseLeft()) + } + + p.concatenation = newRegexNode(ntConcatenate, p.options) +} + +// For categorizing ascii characters. + +const ( + Q byte = 5 // quantifier + S = 4 // ordinary stopper + Z = 3 // ScanBlank stopper + X = 2 // whitespace + E = 1 // should be escaped +) + +var _category = []byte{ + //01 2 3 4 5 6 7 8 9 A B C D E F 0 1 2 3 4 5 6 7 8 9 A B C D E F + 0, 0, 0, 0, 0, 0, 0, 0, 0, X, X, X, X, X, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // ! " # $ % & ' ( ) * + , - . / 0 1 2 3 4 5 6 7 8 9 : ; < = > ? + X, 0, 0, Z, S, 0, 0, 0, S, S, Q, Q, 0, 0, S, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, Q, + //@A B C D E F G H I J K L M N O P Q R S T U V W X Y Z [ \ ] ^ _ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, S, S, 0, S, 0, + //'a b c d e f g h i j k l m n o p q r s t u v w x y z { | } ~ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, Q, S, 0, 0, 0, +} + +func isSpace(ch rune) bool { + return (ch <= ' ' && _category[ch] == X) +} + +// Returns true for those characters that terminate a string of ordinary chars. +func isSpecial(ch rune) bool { + return (ch <= '|' && _category[ch] >= S) +} + +// Returns true for those characters that terminate a string of ordinary chars. +func isStopperX(ch rune) bool { + return (ch <= '|' && _category[ch] >= X) +} + +// Returns true for those characters that begin a quantifier. +func isQuantifier(ch rune) bool { + return (ch <= '{' && _category[ch] >= Q) +} + +func (p *parser) isTrueQuantifier() bool { + nChars := p.charsRight() + if nChars == 0 { + return false + } + + startpos := p.textpos() + ch := p.charAt(startpos) + if ch != '{' { + return ch <= '{' && _category[ch] >= Q + } + + //UGLY: this is ugly -- the original code was ugly too + pos := startpos + for { + nChars-- + if nChars <= 0 { + break + } + pos++ + ch = p.charAt(pos) + if ch < '0' || ch > '9' { + break + } + } + + if nChars == 0 || pos-startpos == 1 { + return false + } + if ch == '}' { + return true + } + if ch != ',' { + return false + } + for { + nChars-- + if nChars <= 0 { + break + } + pos++ + ch = p.charAt(pos) + if ch < '0' || ch > '9' { + break + } + } + + return nChars > 0 && ch == '}' +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/prefix.go b/vendor/github.com/dlclark/regexp2/syntax/prefix.go new file mode 100644 index 0000000..f671688 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/prefix.go @@ -0,0 +1,896 @@ +package syntax + +import ( + "bytes" + "fmt" + "strconv" + "unicode" + "unicode/utf8" +) + +type Prefix struct { + PrefixStr []rune + PrefixSet CharSet + CaseInsensitive bool +} + +// It takes a RegexTree and computes the set of chars that can start it. +func getFirstCharsPrefix(tree *RegexTree) *Prefix { + s := regexFcd{ + fcStack: make([]regexFc, 32), + intStack: make([]int, 32), + } + fc := s.regexFCFromRegexTree(tree) + + if fc == nil || fc.nullable || fc.cc.IsEmpty() { + return nil + } + fcSet := fc.getFirstChars() + return &Prefix{PrefixSet: fcSet, CaseInsensitive: fc.caseInsensitive} +} + +type regexFcd struct { + intStack []int + intDepth int + fcStack []regexFc + fcDepth int + skipAllChildren bool // don't process any more children at the current level + skipchild bool // don't process the current child. + failed bool +} + +/* + * The main FC computation. It does a shortcutted depth-first walk + * through the tree and calls CalculateFC to emits code before + * and after each child of an interior node, and at each leaf. + */ +func (s *regexFcd) regexFCFromRegexTree(tree *RegexTree) *regexFc { + curNode := tree.root + curChild := 0 + + for { + if len(curNode.children) == 0 { + // This is a leaf node + s.calculateFC(curNode.t, curNode, 0) + } else if curChild < len(curNode.children) && !s.skipAllChildren { + // This is an interior node, and we have more children to analyze + s.calculateFC(curNode.t|beforeChild, curNode, curChild) + + if !s.skipchild { + curNode = curNode.children[curChild] + // this stack is how we get a depth first walk of the tree. + s.pushInt(curChild) + curChild = 0 + } else { + curChild++ + s.skipchild = false + } + continue + } + + // This is an interior node where we've finished analyzing all the children, or + // the end of a leaf node. + s.skipAllChildren = false + + if s.intIsEmpty() { + break + } + + curChild = s.popInt() + curNode = curNode.next + + s.calculateFC(curNode.t|afterChild, curNode, curChild) + if s.failed { + return nil + } + + curChild++ + } + + if s.fcIsEmpty() { + return nil + } + + return s.popFC() +} + +// To avoid recursion, we use a simple integer stack. +// This is the push. +func (s *regexFcd) pushInt(I int) { + if s.intDepth >= len(s.intStack) { + expanded := make([]int, s.intDepth*2) + copy(expanded, s.intStack) + s.intStack = expanded + } + + s.intStack[s.intDepth] = I + s.intDepth++ +} + +// True if the stack is empty. +func (s *regexFcd) intIsEmpty() bool { + return s.intDepth == 0 +} + +// This is the pop. +func (s *regexFcd) popInt() int { + s.intDepth-- + return s.intStack[s.intDepth] +} + +// We also use a stack of RegexFC objects. +// This is the push. +func (s *regexFcd) pushFC(fc regexFc) { + if s.fcDepth >= len(s.fcStack) { + expanded := make([]regexFc, s.fcDepth*2) + copy(expanded, s.fcStack) + s.fcStack = expanded + } + + s.fcStack[s.fcDepth] = fc + s.fcDepth++ +} + +// True if the stack is empty. +func (s *regexFcd) fcIsEmpty() bool { + return s.fcDepth == 0 +} + +// This is the pop. +func (s *regexFcd) popFC() *regexFc { + s.fcDepth-- + return &s.fcStack[s.fcDepth] +} + +// This is the top. +func (s *regexFcd) topFC() *regexFc { + return &s.fcStack[s.fcDepth-1] +} + +// Called in Beforechild to prevent further processing of the current child +func (s *regexFcd) skipChild() { + s.skipchild = true +} + +// FC computation and shortcut cases for each node type +func (s *regexFcd) calculateFC(nt nodeType, node *regexNode, CurIndex int) { + //fmt.Printf("NodeType: %v, CurIndex: %v, Desc: %v\n", nt, CurIndex, node.description()) + ci := false + rtl := false + + if nt <= ntRef { + if (node.options & IgnoreCase) != 0 { + ci = true + } + if (node.options & RightToLeft) != 0 { + rtl = true + } + } + + switch nt { + case ntConcatenate | beforeChild, ntAlternate | beforeChild, ntTestref | beforeChild, ntLoop | beforeChild, ntLazyloop | beforeChild: + break + + case ntTestgroup | beforeChild: + if CurIndex == 0 { + s.skipChild() + } + break + + case ntEmpty: + s.pushFC(regexFc{nullable: true}) + break + + case ntConcatenate | afterChild: + if CurIndex != 0 { + child := s.popFC() + cumul := s.topFC() + + s.failed = !cumul.addFC(*child, true) + } + + fc := s.topFC() + if !fc.nullable { + s.skipAllChildren = true + } + break + + case ntTestgroup | afterChild: + if CurIndex > 1 { + child := s.popFC() + cumul := s.topFC() + + s.failed = !cumul.addFC(*child, false) + } + break + + case ntAlternate | afterChild, ntTestref | afterChild: + if CurIndex != 0 { + child := s.popFC() + cumul := s.topFC() + + s.failed = !cumul.addFC(*child, false) + } + break + + case ntLoop | afterChild, ntLazyloop | afterChild: + if node.m == 0 { + fc := s.topFC() + fc.nullable = true + } + break + + case ntGroup | beforeChild, ntGroup | afterChild, ntCapture | beforeChild, ntCapture | afterChild, ntGreedy | beforeChild, ntGreedy | afterChild: + break + + case ntRequire | beforeChild, ntPrevent | beforeChild: + s.skipChild() + s.pushFC(regexFc{nullable: true}) + break + + case ntRequire | afterChild, ntPrevent | afterChild: + break + + case ntOne, ntNotone: + s.pushFC(newRegexFc(node.ch, nt == ntNotone, false, ci)) + break + + case ntOneloop, ntOnelazy: + s.pushFC(newRegexFc(node.ch, false, node.m == 0, ci)) + break + + case ntNotoneloop, ntNotonelazy: + s.pushFC(newRegexFc(node.ch, true, node.m == 0, ci)) + break + + case ntMulti: + if len(node.str) == 0 { + s.pushFC(regexFc{nullable: true}) + } else if !rtl { + s.pushFC(newRegexFc(node.str[0], false, false, ci)) + } else { + s.pushFC(newRegexFc(node.str[len(node.str)-1], false, false, ci)) + } + break + + case ntSet: + s.pushFC(regexFc{cc: node.set.Copy(), nullable: false, caseInsensitive: ci}) + break + + case ntSetloop, ntSetlazy: + s.pushFC(regexFc{cc: node.set.Copy(), nullable: node.m == 0, caseInsensitive: ci}) + break + + case ntRef: + s.pushFC(regexFc{cc: *AnyClass(), nullable: true, caseInsensitive: false}) + break + + case ntNothing, ntBol, ntEol, ntBoundary, ntNonboundary, ntECMABoundary, ntNonECMABoundary, ntBeginning, ntStart, ntEndZ, ntEnd: + s.pushFC(regexFc{nullable: true}) + break + + default: + panic(fmt.Sprintf("unexpected op code: %v", nt)) + } +} + +type regexFc struct { + cc CharSet + nullable bool + caseInsensitive bool +} + +func newRegexFc(ch rune, not, nullable, caseInsensitive bool) regexFc { + r := regexFc{ + caseInsensitive: caseInsensitive, + nullable: nullable, + } + if not { + if ch > 0 { + r.cc.addRange('\x00', ch-1) + } + if ch < 0xFFFF { + r.cc.addRange(ch+1, utf8.MaxRune) + } + } else { + r.cc.addRange(ch, ch) + } + return r +} + +func (r *regexFc) getFirstChars() CharSet { + if r.caseInsensitive { + r.cc.addLowercase() + } + + return r.cc +} + +func (r *regexFc) addFC(fc regexFc, concatenate bool) bool { + if !r.cc.IsMergeable() || !fc.cc.IsMergeable() { + return false + } + + if concatenate { + if !r.nullable { + return true + } + + if !fc.nullable { + r.nullable = false + } + } else { + if fc.nullable { + r.nullable = true + } + } + + r.caseInsensitive = r.caseInsensitive || fc.caseInsensitive + r.cc.addSet(fc.cc) + + return true +} + +// This is a related computation: it takes a RegexTree and computes the +// leading substring if it sees one. It's quite trivial and gives up easily. +func getPrefix(tree *RegexTree) *Prefix { + var concatNode *regexNode + nextChild := 0 + + curNode := tree.root + + for { + switch curNode.t { + case ntConcatenate: + if len(curNode.children) > 0 { + concatNode = curNode + nextChild = 0 + } + + case ntGreedy, ntCapture: + curNode = curNode.children[0] + concatNode = nil + continue + + case ntOneloop, ntOnelazy: + if curNode.m > 0 { + return &Prefix{ + PrefixStr: repeat(curNode.ch, curNode.m), + CaseInsensitive: (curNode.options & IgnoreCase) != 0, + } + } + return nil + + case ntOne: + return &Prefix{ + PrefixStr: []rune{curNode.ch}, + CaseInsensitive: (curNode.options & IgnoreCase) != 0, + } + + case ntMulti: + return &Prefix{ + PrefixStr: curNode.str, + CaseInsensitive: (curNode.options & IgnoreCase) != 0, + } + + case ntBol, ntEol, ntBoundary, ntECMABoundary, ntBeginning, ntStart, + ntEndZ, ntEnd, ntEmpty, ntRequire, ntPrevent: + + default: + return nil + } + + if concatNode == nil || nextChild >= len(concatNode.children) { + return nil + } + + curNode = concatNode.children[nextChild] + nextChild++ + } +} + +// repeat the rune r, c times... up to the max of MaxPrefixSize +func repeat(r rune, c int) []rune { + if c > MaxPrefixSize { + c = MaxPrefixSize + } + + ret := make([]rune, c) + + // binary growth using copy for speed + ret[0] = r + bp := 1 + for bp < len(ret) { + copy(ret[bp:], ret[:bp]) + bp *= 2 + } + + return ret +} + +// BmPrefix precomputes the Boyer-Moore +// tables for fast string scanning. These tables allow +// you to scan for the first occurrence of a string within +// a large body of text without examining every character. +// The performance of the heuristic depends on the actual +// string and the text being searched, but usually, the longer +// the string that is being searched for, the fewer characters +// need to be examined. +type BmPrefix struct { + positive []int + negativeASCII []int + negativeUnicode [][]int + pattern []rune + lowASCII rune + highASCII rune + rightToLeft bool + caseInsensitive bool +} + +func newBmPrefix(pattern []rune, caseInsensitive, rightToLeft bool) *BmPrefix { + + b := &BmPrefix{ + rightToLeft: rightToLeft, + caseInsensitive: caseInsensitive, + pattern: pattern, + } + + if caseInsensitive { + for i := 0; i < len(b.pattern); i++ { + // We do the ToLower character by character for consistency. With surrogate chars, doing + // a ToLower on the entire string could actually change the surrogate pair. This is more correct + // linguistically, but since Regex doesn't support surrogates, it's more important to be + // consistent. + + b.pattern[i] = unicode.ToLower(b.pattern[i]) + } + } + + var beforefirst, last, bump int + var scan, match int + + if !rightToLeft { + beforefirst = -1 + last = len(b.pattern) - 1 + bump = 1 + } else { + beforefirst = len(b.pattern) + last = 0 + bump = -1 + } + + // PART I - the good-suffix shift table + // + // compute the positive requirement: + // if char "i" is the first one from the right that doesn't match, + // then we know the matcher can advance by _positive[i]. + // + // This algorithm is a simplified variant of the standard + // Boyer-Moore good suffix calculation. + + b.positive = make([]int, len(b.pattern)) + + examine := last + ch := b.pattern[examine] + b.positive[examine] = bump + examine -= bump + +Outerloop: + for { + // find an internal char (examine) that matches the tail + + for { + if examine == beforefirst { + break Outerloop + } + if b.pattern[examine] == ch { + break + } + examine -= bump + } + + match = last + scan = examine + + // find the length of the match + for { + if scan == beforefirst || b.pattern[match] != b.pattern[scan] { + // at the end of the match, note the difference in _positive + // this is not the length of the match, but the distance from the internal match + // to the tail suffix. + if b.positive[match] == 0 { + b.positive[match] = match - scan + } + + // System.Diagnostics.Debug.WriteLine("Set positive[" + match + "] to " + (match - scan)); + + break + } + + scan -= bump + match -= bump + } + + examine -= bump + } + + match = last - bump + + // scan for the chars for which there are no shifts that yield a different candidate + + // The inside of the if statement used to say + // "_positive[match] = last - beforefirst;" + // This is slightly less aggressive in how much we skip, but at worst it + // should mean a little more work rather than skipping a potential match. + for match != beforefirst { + if b.positive[match] == 0 { + b.positive[match] = bump + } + + match -= bump + } + + // PART II - the bad-character shift table + // + // compute the negative requirement: + // if char "ch" is the reject character when testing position "i", + // we can slide up by _negative[ch]; + // (_negative[ch] = str.Length - 1 - str.LastIndexOf(ch)) + // + // the lookup table is divided into ASCII and Unicode portions; + // only those parts of the Unicode 16-bit code set that actually + // appear in the string are in the table. (Maximum size with + // Unicode is 65K; ASCII only case is 512 bytes.) + + b.negativeASCII = make([]int, 128) + + for i := 0; i < len(b.negativeASCII); i++ { + b.negativeASCII[i] = last - beforefirst + } + + b.lowASCII = 127 + b.highASCII = 0 + + for examine = last; examine != beforefirst; examine -= bump { + ch = b.pattern[examine] + + switch { + case ch < 128: + if b.lowASCII > ch { + b.lowASCII = ch + } + + if b.highASCII < ch { + b.highASCII = ch + } + + if b.negativeASCII[ch] == last-beforefirst { + b.negativeASCII[ch] = last - examine + } + case ch <= 0xffff: + i, j := ch>>8, ch&0xFF + + if b.negativeUnicode == nil { + b.negativeUnicode = make([][]int, 256) + } + + if b.negativeUnicode[i] == nil { + newarray := make([]int, 256) + + for k := 0; k < len(newarray); k++ { + newarray[k] = last - beforefirst + } + + if i == 0 { + copy(newarray, b.negativeASCII) + //TODO: this line needed? + b.negativeASCII = newarray + } + + b.negativeUnicode[i] = newarray + } + + if b.negativeUnicode[i][j] == last-beforefirst { + b.negativeUnicode[i][j] = last - examine + } + default: + // we can't do the filter because this algo doesn't support + // unicode chars >0xffff + return nil + } + } + + return b +} + +func (b *BmPrefix) String() string { + return string(b.pattern) +} + +// Dump returns the contents of the filter as a human readable string +func (b *BmPrefix) Dump(indent string) string { + buf := &bytes.Buffer{} + + fmt.Fprintf(buf, "%sBM Pattern: %s\n%sPositive: ", indent, string(b.pattern), indent) + for i := 0; i < len(b.positive); i++ { + buf.WriteString(strconv.Itoa(b.positive[i])) + buf.WriteRune(' ') + } + buf.WriteRune('\n') + + if b.negativeASCII != nil { + buf.WriteString(indent) + buf.WriteString("Negative table\n") + for i := 0; i < len(b.negativeASCII); i++ { + if b.negativeASCII[i] != len(b.pattern) { + fmt.Fprintf(buf, "%s %s %s\n", indent, Escape(string(rune(i))), strconv.Itoa(b.negativeASCII[i])) + } + } + } + + return buf.String() +} + +// Scan uses the Boyer-Moore algorithm to find the first occurrence +// of the specified string within text, beginning at index, and +// constrained within beglimit and endlimit. +// +// The direction and case-sensitivity of the match is determined +// by the arguments to the RegexBoyerMoore constructor. +func (b *BmPrefix) Scan(text []rune, index, beglimit, endlimit int) int { + var ( + defadv, test, test2 int + match, startmatch, endmatch int + bump, advance int + chTest rune + unicodeLookup []int + ) + + if !b.rightToLeft { + defadv = len(b.pattern) + startmatch = len(b.pattern) - 1 + endmatch = 0 + test = index + defadv - 1 + bump = 1 + } else { + defadv = -len(b.pattern) + startmatch = 0 + endmatch = -defadv - 1 + test = index + defadv + bump = -1 + } + + chMatch := b.pattern[startmatch] + + for { + if test >= endlimit || test < beglimit { + return -1 + } + + chTest = text[test] + + if b.caseInsensitive { + chTest = unicode.ToLower(chTest) + } + + if chTest != chMatch { + if chTest < 128 { + advance = b.negativeASCII[chTest] + } else if chTest < 0xffff && len(b.negativeUnicode) > 0 { + unicodeLookup = b.negativeUnicode[chTest>>8] + if len(unicodeLookup) > 0 { + advance = unicodeLookup[chTest&0xFF] + } else { + advance = defadv + } + } else { + advance = defadv + } + + test += advance + } else { // if (chTest == chMatch) + test2 = test + match = startmatch + + for { + if match == endmatch { + if b.rightToLeft { + return test2 + 1 + } else { + return test2 + } + } + + match -= bump + test2 -= bump + + chTest = text[test2] + + if b.caseInsensitive { + chTest = unicode.ToLower(chTest) + } + + if chTest != b.pattern[match] { + advance = b.positive[match] + if chTest < 128 { + test2 = (match - startmatch) + b.negativeASCII[chTest] + } else if chTest < 0xffff && len(b.negativeUnicode) > 0 { + unicodeLookup = b.negativeUnicode[chTest>>8] + if len(unicodeLookup) > 0 { + test2 = (match - startmatch) + unicodeLookup[chTest&0xFF] + } else { + test += advance + break + } + } else { + test += advance + break + } + + if b.rightToLeft { + if test2 < advance { + advance = test2 + } + } else if test2 > advance { + advance = test2 + } + + test += advance + break + } + } + } + } +} + +// When a regex is anchored, we can do a quick IsMatch test instead of a Scan +func (b *BmPrefix) IsMatch(text []rune, index, beglimit, endlimit int) bool { + if !b.rightToLeft { + if index < beglimit || endlimit-index < len(b.pattern) { + return false + } + + return b.matchPattern(text, index) + } else { + if index > endlimit || index-beglimit < len(b.pattern) { + return false + } + + return b.matchPattern(text, index-len(b.pattern)) + } +} + +func (b *BmPrefix) matchPattern(text []rune, index int) bool { + if len(text)-index < len(b.pattern) { + return false + } + + if b.caseInsensitive { + for i := 0; i < len(b.pattern); i++ { + //Debug.Assert(textinfo.ToLower(_pattern[i]) == _pattern[i], "pattern should be converted to lower case in constructor!"); + if unicode.ToLower(text[index+i]) != b.pattern[i] { + return false + } + } + return true + } else { + for i := 0; i < len(b.pattern); i++ { + if text[index+i] != b.pattern[i] { + return false + } + } + return true + } +} + +type AnchorLoc int16 + +// where the regex can be pegged +const ( + AnchorBeginning AnchorLoc = 0x0001 + AnchorBol = 0x0002 + AnchorStart = 0x0004 + AnchorEol = 0x0008 + AnchorEndZ = 0x0010 + AnchorEnd = 0x0020 + AnchorBoundary = 0x0040 + AnchorECMABoundary = 0x0080 +) + +func getAnchors(tree *RegexTree) AnchorLoc { + + var concatNode *regexNode + nextChild, result := 0, AnchorLoc(0) + + curNode := tree.root + + for { + switch curNode.t { + case ntConcatenate: + if len(curNode.children) > 0 { + concatNode = curNode + nextChild = 0 + } + + case ntGreedy, ntCapture: + curNode = curNode.children[0] + concatNode = nil + continue + + case ntBol, ntEol, ntBoundary, ntECMABoundary, ntBeginning, + ntStart, ntEndZ, ntEnd: + return result | anchorFromType(curNode.t) + + case ntEmpty, ntRequire, ntPrevent: + + default: + return result + } + + if concatNode == nil || nextChild >= len(concatNode.children) { + return result + } + + curNode = concatNode.children[nextChild] + nextChild++ + } +} + +func anchorFromType(t nodeType) AnchorLoc { + switch t { + case ntBol: + return AnchorBol + case ntEol: + return AnchorEol + case ntBoundary: + return AnchorBoundary + case ntECMABoundary: + return AnchorECMABoundary + case ntBeginning: + return AnchorBeginning + case ntStart: + return AnchorStart + case ntEndZ: + return AnchorEndZ + case ntEnd: + return AnchorEnd + default: + return 0 + } +} + +// anchorDescription returns a human-readable description of the anchors +func (anchors AnchorLoc) String() string { + buf := &bytes.Buffer{} + + if 0 != (anchors & AnchorBeginning) { + buf.WriteString(", Beginning") + } + if 0 != (anchors & AnchorStart) { + buf.WriteString(", Start") + } + if 0 != (anchors & AnchorBol) { + buf.WriteString(", Bol") + } + if 0 != (anchors & AnchorBoundary) { + buf.WriteString(", Boundary") + } + if 0 != (anchors & AnchorECMABoundary) { + buf.WriteString(", ECMABoundary") + } + if 0 != (anchors & AnchorEol) { + buf.WriteString(", Eol") + } + if 0 != (anchors & AnchorEnd) { + buf.WriteString(", End") + } + if 0 != (anchors & AnchorEndZ) { + buf.WriteString(", EndZ") + } + + // trim off comma + if buf.Len() >= 2 { + return buf.String()[2:] + } + return "None" +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/replacerdata.go b/vendor/github.com/dlclark/regexp2/syntax/replacerdata.go new file mode 100644 index 0000000..bcf4d3f --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/replacerdata.go @@ -0,0 +1,87 @@ +package syntax + +import ( + "bytes" + "errors" +) + +type ReplacerData struct { + Rep string + Strings []string + Rules []int +} + +const ( + replaceSpecials = 4 + replaceLeftPortion = -1 + replaceRightPortion = -2 + replaceLastGroup = -3 + replaceWholeString = -4 +) + +//ErrReplacementError is a general error during parsing the replacement text +var ErrReplacementError = errors.New("Replacement pattern error.") + +// NewReplacerData will populate a reusable replacer data struct based on the given replacement string +// and the capture group data from a regexp +func NewReplacerData(rep string, caps map[int]int, capsize int, capnames map[string]int, op RegexOptions) (*ReplacerData, error) { + p := parser{ + options: op, + caps: caps, + capsize: capsize, + capnames: capnames, + } + p.setPattern(rep) + concat, err := p.scanReplacement() + if err != nil { + return nil, err + } + + if concat.t != ntConcatenate { + panic(ErrReplacementError) + } + + sb := &bytes.Buffer{} + var ( + strings []string + rules []int + ) + + for _, child := range concat.children { + switch child.t { + case ntMulti: + child.writeStrToBuf(sb) + + case ntOne: + sb.WriteRune(child.ch) + + case ntRef: + if sb.Len() > 0 { + rules = append(rules, len(strings)) + strings = append(strings, sb.String()) + sb.Reset() + } + slot := child.m + + if len(caps) > 0 && slot >= 0 { + slot = caps[slot] + } + + rules = append(rules, -replaceSpecials-1-slot) + + default: + panic(ErrReplacementError) + } + } + + if sb.Len() > 0 { + rules = append(rules, len(strings)) + strings = append(strings, sb.String()) + } + + return &ReplacerData{ + Rep: rep, + Strings: strings, + Rules: rules, + }, nil +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/tree.go b/vendor/github.com/dlclark/regexp2/syntax/tree.go new file mode 100644 index 0000000..ea28829 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/tree.go @@ -0,0 +1,654 @@ +package syntax + +import ( + "bytes" + "fmt" + "math" + "strconv" +) + +type RegexTree struct { + root *regexNode + caps map[int]int + capnumlist []int + captop int + Capnames map[string]int + Caplist []string + options RegexOptions +} + +// It is built into a parsed tree for a regular expression. + +// Implementation notes: +// +// Since the node tree is a temporary data structure only used +// during compilation of the regexp to integer codes, it's +// designed for clarity and convenience rather than +// space efficiency. +// +// RegexNodes are built into a tree, linked by the n.children list. +// Each node also has a n.parent and n.ichild member indicating +// its parent and which child # it is in its parent's list. +// +// RegexNodes come in as many types as there are constructs in +// a regular expression, for example, "concatenate", "alternate", +// "one", "rept", "group". There are also node types for basic +// peephole optimizations, e.g., "onerep", "notsetrep", etc. +// +// Because perl 5 allows "lookback" groups that scan backwards, +// each node also gets a "direction". Normally the value of +// boolean n.backward = false. +// +// During parsing, top-level nodes are also stacked onto a parse +// stack (a stack of trees). For this purpose we have a n.next +// pointer. [Note that to save a few bytes, we could overload the +// n.parent pointer instead.] +// +// On the parse stack, each tree has a "role" - basically, the +// nonterminal in the grammar that the parser has currently +// assigned to the tree. That code is stored in n.role. +// +// Finally, some of the different kinds of nodes have data. +// Two integers (for the looping constructs) are stored in +// n.operands, an an object (either a string or a set) +// is stored in n.data +type regexNode struct { + t nodeType + children []*regexNode + str []rune + set *CharSet + ch rune + m int + n int + options RegexOptions + next *regexNode +} + +type nodeType int32 + +const ( + // The following are leaves, and correspond to primitive operations + + ntOnerep nodeType = 0 // lef,back char,min,max a {n} + ntNotonerep = 1 // lef,back char,min,max .{n} + ntSetrep = 2 // lef,back set,min,max [\d]{n} + ntOneloop = 3 // lef,back char,min,max a {,n} + ntNotoneloop = 4 // lef,back char,min,max .{,n} + ntSetloop = 5 // lef,back set,min,max [\d]{,n} + ntOnelazy = 6 // lef,back char,min,max a {,n}? + ntNotonelazy = 7 // lef,back char,min,max .{,n}? + ntSetlazy = 8 // lef,back set,min,max [\d]{,n}? + ntOne = 9 // lef char a + ntNotone = 10 // lef char [^a] + ntSet = 11 // lef set [a-z\s] \w \s \d + ntMulti = 12 // lef string abcd + ntRef = 13 // lef group \# + ntBol = 14 // ^ + ntEol = 15 // $ + ntBoundary = 16 // \b + ntNonboundary = 17 // \B + ntBeginning = 18 // \A + ntStart = 19 // \G + ntEndZ = 20 // \Z + ntEnd = 21 // \Z + + // Interior nodes do not correspond to primitive operations, but + // control structures compositing other operations + + // Concat and alternate take n children, and can run forward or backwards + + ntNothing = 22 // [] + ntEmpty = 23 // () + ntAlternate = 24 // a|b + ntConcatenate = 25 // ab + ntLoop = 26 // m,x * + ? {,} + ntLazyloop = 27 // m,x *? +? ?? {,}? + ntCapture = 28 // n () + ntGroup = 29 // (?:) + ntRequire = 30 // (?=) (?<=) + ntPrevent = 31 // (?!) (?) (?<) + ntTestref = 33 // (?(n) | ) + ntTestgroup = 34 // (?(...) | ) + + ntECMABoundary = 41 // \b + ntNonECMABoundary = 42 // \B +) + +func newRegexNode(t nodeType, opt RegexOptions) *regexNode { + return ®exNode{ + t: t, + options: opt, + } +} + +func newRegexNodeCh(t nodeType, opt RegexOptions, ch rune) *regexNode { + return ®exNode{ + t: t, + options: opt, + ch: ch, + } +} + +func newRegexNodeStr(t nodeType, opt RegexOptions, str []rune) *regexNode { + return ®exNode{ + t: t, + options: opt, + str: str, + } +} + +func newRegexNodeSet(t nodeType, opt RegexOptions, set *CharSet) *regexNode { + return ®exNode{ + t: t, + options: opt, + set: set, + } +} + +func newRegexNodeM(t nodeType, opt RegexOptions, m int) *regexNode { + return ®exNode{ + t: t, + options: opt, + m: m, + } +} +func newRegexNodeMN(t nodeType, opt RegexOptions, m, n int) *regexNode { + return ®exNode{ + t: t, + options: opt, + m: m, + n: n, + } +} + +func (n *regexNode) writeStrToBuf(buf *bytes.Buffer) { + for i := 0; i < len(n.str); i++ { + buf.WriteRune(n.str[i]) + } +} + +func (n *regexNode) addChild(child *regexNode) { + reduced := child.reduce() + n.children = append(n.children, reduced) + reduced.next = n +} + +func (n *regexNode) insertChildren(afterIndex int, nodes []*regexNode) { + newChildren := make([]*regexNode, 0, len(n.children)+len(nodes)) + n.children = append(append(append(newChildren, n.children[:afterIndex]...), nodes...), n.children[afterIndex:]...) +} + +// removes children including the start but not the end index +func (n *regexNode) removeChildren(startIndex, endIndex int) { + n.children = append(n.children[:startIndex], n.children[endIndex:]...) +} + +// Pass type as OneLazy or OneLoop +func (n *regexNode) makeRep(t nodeType, min, max int) { + n.t += (t - ntOne) + n.m = min + n.n = max +} + +func (n *regexNode) reduce() *regexNode { + switch n.t { + case ntAlternate: + return n.reduceAlternation() + + case ntConcatenate: + return n.reduceConcatenation() + + case ntLoop, ntLazyloop: + return n.reduceRep() + + case ntGroup: + return n.reduceGroup() + + case ntSet, ntSetloop: + return n.reduceSet() + + default: + return n + } +} + +// Basic optimization. Single-letter alternations can be replaced +// by faster set specifications, and nested alternations with no +// intervening operators can be flattened: +// +// a|b|c|def|g|h -> [a-c]|def|[gh] +// apple|(?:orange|pear)|grape -> apple|orange|pear|grape +func (n *regexNode) reduceAlternation() *regexNode { + if len(n.children) == 0 { + return newRegexNode(ntNothing, n.options) + } + + wasLastSet := false + lastNodeCannotMerge := false + var optionsLast RegexOptions + var i, j int + + for i, j = 0, 0; i < len(n.children); i, j = i+1, j+1 { + at := n.children[i] + + if j < i { + n.children[j] = at + } + + for { + if at.t == ntAlternate { + for k := 0; k < len(at.children); k++ { + at.children[k].next = n + } + n.insertChildren(i+1, at.children) + + j-- + } else if at.t == ntSet || at.t == ntOne { + // Cannot merge sets if L or I options differ, or if either are negated. + optionsAt := at.options & (RightToLeft | IgnoreCase) + + if at.t == ntSet { + if !wasLastSet || optionsLast != optionsAt || lastNodeCannotMerge || !at.set.IsMergeable() { + wasLastSet = true + lastNodeCannotMerge = !at.set.IsMergeable() + optionsLast = optionsAt + break + } + } else if !wasLastSet || optionsLast != optionsAt || lastNodeCannotMerge { + wasLastSet = true + lastNodeCannotMerge = false + optionsLast = optionsAt + break + } + + // The last node was a Set or a One, we're a Set or One and our options are the same. + // Merge the two nodes. + j-- + prev := n.children[j] + + var prevCharClass *CharSet + if prev.t == ntOne { + prevCharClass = &CharSet{} + prevCharClass.addChar(prev.ch) + } else { + prevCharClass = prev.set + } + + if at.t == ntOne { + prevCharClass.addChar(at.ch) + } else { + prevCharClass.addSet(*at.set) + } + + prev.t = ntSet + prev.set = prevCharClass + } else if at.t == ntNothing { + j-- + } else { + wasLastSet = false + lastNodeCannotMerge = false + } + break + } + } + + if j < i { + n.removeChildren(j, i) + } + + return n.stripEnation(ntNothing) +} + +// Basic optimization. Adjacent strings can be concatenated. +// +// (?:abc)(?:def) -> abcdef +func (n *regexNode) reduceConcatenation() *regexNode { + // Eliminate empties and concat adjacent strings/chars + + var optionsLast RegexOptions + var optionsAt RegexOptions + var i, j int + + if len(n.children) == 0 { + return newRegexNode(ntEmpty, n.options) + } + + wasLastString := false + + for i, j = 0, 0; i < len(n.children); i, j = i+1, j+1 { + var at, prev *regexNode + + at = n.children[i] + + if j < i { + n.children[j] = at + } + + if at.t == ntConcatenate && + ((at.options & RightToLeft) == (n.options & RightToLeft)) { + for k := 0; k < len(at.children); k++ { + at.children[k].next = n + } + + //insert at.children at i+1 index in n.children + n.insertChildren(i+1, at.children) + + j-- + } else if at.t == ntMulti || at.t == ntOne { + // Cannot merge strings if L or I options differ + optionsAt = at.options & (RightToLeft | IgnoreCase) + + if !wasLastString || optionsLast != optionsAt { + wasLastString = true + optionsLast = optionsAt + continue + } + + j-- + prev = n.children[j] + + if prev.t == ntOne { + prev.t = ntMulti + prev.str = []rune{prev.ch} + } + + if (optionsAt & RightToLeft) == 0 { + if at.t == ntOne { + prev.str = append(prev.str, at.ch) + } else { + prev.str = append(prev.str, at.str...) + } + } else { + if at.t == ntOne { + // insert at the front by expanding our slice, copying the data over, and then setting the value + prev.str = append(prev.str, 0) + copy(prev.str[1:], prev.str) + prev.str[0] = at.ch + } else { + //insert at the front...this one we'll make a new slice and copy both into it + merge := make([]rune, len(prev.str)+len(at.str)) + copy(merge, at.str) + copy(merge[len(at.str):], prev.str) + prev.str = merge + } + } + } else if at.t == ntEmpty { + j-- + } else { + wasLastString = false + } + } + + if j < i { + // remove indices j through i from the children + n.removeChildren(j, i) + } + + return n.stripEnation(ntEmpty) +} + +// Nested repeaters just get multiplied with each other if they're not +// too lumpy +func (n *regexNode) reduceRep() *regexNode { + + u := n + t := n.t + min := n.m + max := n.n + + for { + if len(u.children) == 0 { + break + } + + child := u.children[0] + + // multiply reps of the same type only + if child.t != t { + childType := child.t + + if !(childType >= ntOneloop && childType <= ntSetloop && t == ntLoop || + childType >= ntOnelazy && childType <= ntSetlazy && t == ntLazyloop) { + break + } + } + + // child can be too lumpy to blur, e.g., (a {100,105}) {3} or (a {2,})? + // [but things like (a {2,})+ are not too lumpy...] + if u.m == 0 && child.m > 1 || child.n < child.m*2 { + break + } + + u = child + if u.m > 0 { + if (math.MaxInt32-1)/u.m < min { + u.m = math.MaxInt32 + } else { + u.m = u.m * min + } + } + if u.n > 0 { + if (math.MaxInt32-1)/u.n < max { + u.n = math.MaxInt32 + } else { + u.n = u.n * max + } + } + } + + if math.MaxInt32 == min { + return newRegexNode(ntNothing, n.options) + } + return u + +} + +// Simple optimization. If a concatenation or alternation has only +// one child strip out the intermediate node. If it has zero children, +// turn it into an empty. +func (n *regexNode) stripEnation(emptyType nodeType) *regexNode { + switch len(n.children) { + case 0: + return newRegexNode(emptyType, n.options) + case 1: + return n.children[0] + default: + return n + } +} + +func (n *regexNode) reduceGroup() *regexNode { + u := n + + for u.t == ntGroup { + u = u.children[0] + } + + return u +} + +// Simple optimization. If a set is a singleton, an inverse singleton, +// or empty, it's transformed accordingly. +func (n *regexNode) reduceSet() *regexNode { + // Extract empty-set, one and not-one case as special + + if n.set == nil { + n.t = ntNothing + } else if n.set.IsSingleton() { + n.ch = n.set.SingletonChar() + n.set = nil + n.t += (ntOne - ntSet) + } else if n.set.IsSingletonInverse() { + n.ch = n.set.SingletonChar() + n.set = nil + n.t += (ntNotone - ntSet) + } + + return n +} + +func (n *regexNode) reverseLeft() *regexNode { + if n.options&RightToLeft != 0 && n.t == ntConcatenate && len(n.children) > 0 { + //reverse children order + for left, right := 0, len(n.children)-1; left < right; left, right = left+1, right-1 { + n.children[left], n.children[right] = n.children[right], n.children[left] + } + } + + return n +} + +func (n *regexNode) makeQuantifier(lazy bool, min, max int) *regexNode { + if min == 0 && max == 0 { + return newRegexNode(ntEmpty, n.options) + } + + if min == 1 && max == 1 { + return n + } + + switch n.t { + case ntOne, ntNotone, ntSet: + if lazy { + n.makeRep(Onelazy, min, max) + } else { + n.makeRep(Oneloop, min, max) + } + return n + + default: + var t nodeType + if lazy { + t = ntLazyloop + } else { + t = ntLoop + } + result := newRegexNodeMN(t, n.options, min, max) + result.addChild(n) + return result + } +} + +// debug functions + +var typeStr = []string{ + "Onerep", "Notonerep", "Setrep", + "Oneloop", "Notoneloop", "Setloop", + "Onelazy", "Notonelazy", "Setlazy", + "One", "Notone", "Set", + "Multi", "Ref", + "Bol", "Eol", "Boundary", "Nonboundary", + "Beginning", "Start", "EndZ", "End", + "Nothing", "Empty", + "Alternate", "Concatenate", + "Loop", "Lazyloop", + "Capture", "Group", "Require", "Prevent", "Greedy", + "Testref", "Testgroup", + "Unknown", "Unknown", "Unknown", + "Unknown", "Unknown", "Unknown", + "ECMABoundary", "NonECMABoundary", +} + +func (n *regexNode) description() string { + buf := &bytes.Buffer{} + + buf.WriteString(typeStr[n.t]) + + if (n.options & ExplicitCapture) != 0 { + buf.WriteString("-C") + } + if (n.options & IgnoreCase) != 0 { + buf.WriteString("-I") + } + if (n.options & RightToLeft) != 0 { + buf.WriteString("-L") + } + if (n.options & Multiline) != 0 { + buf.WriteString("-M") + } + if (n.options & Singleline) != 0 { + buf.WriteString("-S") + } + if (n.options & IgnorePatternWhitespace) != 0 { + buf.WriteString("-X") + } + if (n.options & ECMAScript) != 0 { + buf.WriteString("-E") + } + + switch n.t { + case ntOneloop, ntNotoneloop, ntOnelazy, ntNotonelazy, ntOne, ntNotone: + buf.WriteString("(Ch = " + CharDescription(n.ch) + ")") + break + case ntCapture: + buf.WriteString("(index = " + strconv.Itoa(n.m) + ", unindex = " + strconv.Itoa(n.n) + ")") + break + case ntRef, ntTestref: + buf.WriteString("(index = " + strconv.Itoa(n.m) + ")") + break + case ntMulti: + fmt.Fprintf(buf, "(String = %s)", string(n.str)) + break + case ntSet, ntSetloop, ntSetlazy: + buf.WriteString("(Set = " + n.set.String() + ")") + break + } + + switch n.t { + case ntOneloop, ntNotoneloop, ntOnelazy, ntNotonelazy, ntSetloop, ntSetlazy, ntLoop, ntLazyloop: + buf.WriteString("(Min = ") + buf.WriteString(strconv.Itoa(n.m)) + buf.WriteString(", Max = ") + if n.n == math.MaxInt32 { + buf.WriteString("inf") + } else { + buf.WriteString(strconv.Itoa(n.n)) + } + buf.WriteString(")") + + break + } + + return buf.String() +} + +var padSpace = []byte(" ") + +func (t *RegexTree) Dump() string { + return t.root.dump() +} + +func (n *regexNode) dump() string { + var stack []int + CurNode := n + CurChild := 0 + + buf := bytes.NewBufferString(CurNode.description()) + buf.WriteRune('\n') + + for { + if CurNode.children != nil && CurChild < len(CurNode.children) { + stack = append(stack, CurChild+1) + CurNode = CurNode.children[CurChild] + CurChild = 0 + + Depth := len(stack) + if Depth > 32 { + Depth = 32 + } + buf.Write(padSpace[:Depth]) + buf.WriteString(CurNode.description()) + buf.WriteRune('\n') + } else { + if len(stack) == 0 { + break + } + + CurChild = stack[len(stack)-1] + stack = stack[:len(stack)-1] + CurNode = CurNode.next + } + } + return buf.String() +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/writer.go b/vendor/github.com/dlclark/regexp2/syntax/writer.go new file mode 100644 index 0000000..a5aa11c --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/writer.go @@ -0,0 +1,500 @@ +package syntax + +import ( + "bytes" + "fmt" + "math" + "os" +) + +func Write(tree *RegexTree) (*Code, error) { + w := writer{ + intStack: make([]int, 0, 32), + emitted: make([]int, 2), + stringhash: make(map[string]int), + sethash: make(map[string]int), + } + + code, err := w.codeFromTree(tree) + + if tree.options&Debug > 0 && code != nil { + os.Stdout.WriteString(code.Dump()) + os.Stdout.WriteString("\n") + } + + return code, err +} + +type writer struct { + emitted []int + + intStack []int + curpos int + stringhash map[string]int + stringtable [][]rune + sethash map[string]int + settable []*CharSet + counting bool + count int + trackcount int + caps map[int]int +} + +const ( + beforeChild nodeType = 64 + afterChild = 128 + //MaxPrefixSize is the largest number of runes we'll use for a BoyerMoyer prefix + MaxPrefixSize = 50 +) + +// The top level RegexCode generator. It does a depth-first walk +// through the tree and calls EmitFragment to emits code before +// and after each child of an interior node, and at each leaf. +// +// It runs two passes, first to count the size of the generated +// code, and second to generate the code. +// +// We should time it against the alternative, which is +// to just generate the code and grow the array as we go. +func (w *writer) codeFromTree(tree *RegexTree) (*Code, error) { + var ( + curNode *regexNode + curChild int + capsize int + ) + // construct sparse capnum mapping if some numbers are unused + + if tree.capnumlist == nil || tree.captop == len(tree.capnumlist) { + capsize = tree.captop + w.caps = nil + } else { + capsize = len(tree.capnumlist) + w.caps = tree.caps + for i := 0; i < len(tree.capnumlist); i++ { + w.caps[tree.capnumlist[i]] = i + } + } + + w.counting = true + + for { + if !w.counting { + w.emitted = make([]int, w.count) + } + + curNode = tree.root + curChild = 0 + + w.emit1(Lazybranch, 0) + + for { + if len(curNode.children) == 0 { + w.emitFragment(curNode.t, curNode, 0) + } else if curChild < len(curNode.children) { + w.emitFragment(curNode.t|beforeChild, curNode, curChild) + + curNode = curNode.children[curChild] + + w.pushInt(curChild) + curChild = 0 + continue + } + + if w.emptyStack() { + break + } + + curChild = w.popInt() + curNode = curNode.next + + w.emitFragment(curNode.t|afterChild, curNode, curChild) + curChild++ + } + + w.patchJump(0, w.curPos()) + w.emit(Stop) + + if !w.counting { + break + } + + w.counting = false + } + + fcPrefix := getFirstCharsPrefix(tree) + prefix := getPrefix(tree) + rtl := (tree.options & RightToLeft) != 0 + + var bmPrefix *BmPrefix + //TODO: benchmark string prefixes + if prefix != nil && len(prefix.PrefixStr) > 0 && MaxPrefixSize > 0 { + if len(prefix.PrefixStr) > MaxPrefixSize { + // limit prefix changes to 10k + prefix.PrefixStr = prefix.PrefixStr[:MaxPrefixSize] + } + bmPrefix = newBmPrefix(prefix.PrefixStr, prefix.CaseInsensitive, rtl) + } else { + bmPrefix = nil + } + + return &Code{ + Codes: w.emitted, + Strings: w.stringtable, + Sets: w.settable, + TrackCount: w.trackcount, + Caps: w.caps, + Capsize: capsize, + FcPrefix: fcPrefix, + BmPrefix: bmPrefix, + Anchors: getAnchors(tree), + RightToLeft: rtl, + }, nil +} + +// The main RegexCode generator. It does a depth-first walk +// through the tree and calls EmitFragment to emits code before +// and after each child of an interior node, and at each leaf. +func (w *writer) emitFragment(nodetype nodeType, node *regexNode, curIndex int) error { + bits := InstOp(0) + + if nodetype <= ntRef { + if (node.options & RightToLeft) != 0 { + bits |= Rtl + } + if (node.options & IgnoreCase) != 0 { + bits |= Ci + } + } + ntBits := nodeType(bits) + + switch nodetype { + case ntConcatenate | beforeChild, ntConcatenate | afterChild, ntEmpty: + break + + case ntAlternate | beforeChild: + if curIndex < len(node.children)-1 { + w.pushInt(w.curPos()) + w.emit1(Lazybranch, 0) + } + + case ntAlternate | afterChild: + if curIndex < len(node.children)-1 { + lbPos := w.popInt() + w.pushInt(w.curPos()) + w.emit1(Goto, 0) + w.patchJump(lbPos, w.curPos()) + } else { + for i := 0; i < curIndex; i++ { + w.patchJump(w.popInt(), w.curPos()) + } + } + break + + case ntTestref | beforeChild: + if curIndex == 0 { + w.emit(Setjump) + w.pushInt(w.curPos()) + w.emit1(Lazybranch, 0) + w.emit1(Testref, w.mapCapnum(node.m)) + w.emit(Forejump) + } + + case ntTestref | afterChild: + if curIndex == 0 { + branchpos := w.popInt() + w.pushInt(w.curPos()) + w.emit1(Goto, 0) + w.patchJump(branchpos, w.curPos()) + w.emit(Forejump) + if len(node.children) <= 1 { + w.patchJump(w.popInt(), w.curPos()) + } + } else if curIndex == 1 { + w.patchJump(w.popInt(), w.curPos()) + } + + case ntTestgroup | beforeChild: + if curIndex == 0 { + w.emit(Setjump) + w.emit(Setmark) + w.pushInt(w.curPos()) + w.emit1(Lazybranch, 0) + } + + case ntTestgroup | afterChild: + if curIndex == 0 { + w.emit(Getmark) + w.emit(Forejump) + } else if curIndex == 1 { + Branchpos := w.popInt() + w.pushInt(w.curPos()) + w.emit1(Goto, 0) + w.patchJump(Branchpos, w.curPos()) + w.emit(Getmark) + w.emit(Forejump) + if len(node.children) <= 2 { + w.patchJump(w.popInt(), w.curPos()) + } + } else if curIndex == 2 { + w.patchJump(w.popInt(), w.curPos()) + } + + case ntLoop | beforeChild, ntLazyloop | beforeChild: + + if node.n < math.MaxInt32 || node.m > 1 { + if node.m == 0 { + w.emit1(Nullcount, 0) + } else { + w.emit1(Setcount, 1-node.m) + } + } else if node.m == 0 { + w.emit(Nullmark) + } else { + w.emit(Setmark) + } + + if node.m == 0 { + w.pushInt(w.curPos()) + w.emit1(Goto, 0) + } + w.pushInt(w.curPos()) + + case ntLoop | afterChild, ntLazyloop | afterChild: + + startJumpPos := w.curPos() + lazy := (nodetype - (ntLoop | afterChild)) + + if node.n < math.MaxInt32 || node.m > 1 { + if node.n == math.MaxInt32 { + w.emit2(InstOp(Branchcount+lazy), w.popInt(), math.MaxInt32) + } else { + w.emit2(InstOp(Branchcount+lazy), w.popInt(), node.n-node.m) + } + } else { + w.emit1(InstOp(Branchmark+lazy), w.popInt()) + } + + if node.m == 0 { + w.patchJump(w.popInt(), startJumpPos) + } + + case ntGroup | beforeChild, ntGroup | afterChild: + + case ntCapture | beforeChild: + w.emit(Setmark) + + case ntCapture | afterChild: + w.emit2(Capturemark, w.mapCapnum(node.m), w.mapCapnum(node.n)) + + case ntRequire | beforeChild: + // NOTE: the following line causes lookahead/lookbehind to be + // NON-BACKTRACKING. It can be commented out with (*) + w.emit(Setjump) + + w.emit(Setmark) + + case ntRequire | afterChild: + w.emit(Getmark) + + // NOTE: the following line causes lookahead/lookbehind to be + // NON-BACKTRACKING. It can be commented out with (*) + w.emit(Forejump) + + case ntPrevent | beforeChild: + w.emit(Setjump) + w.pushInt(w.curPos()) + w.emit1(Lazybranch, 0) + + case ntPrevent | afterChild: + w.emit(Backjump) + w.patchJump(w.popInt(), w.curPos()) + w.emit(Forejump) + + case ntGreedy | beforeChild: + w.emit(Setjump) + + case ntGreedy | afterChild: + w.emit(Forejump) + + case ntOne, ntNotone: + w.emit1(InstOp(node.t|ntBits), int(node.ch)) + + case ntNotoneloop, ntNotonelazy, ntOneloop, ntOnelazy: + if node.m > 0 { + if node.t == ntOneloop || node.t == ntOnelazy { + w.emit2(Onerep|bits, int(node.ch), node.m) + } else { + w.emit2(Notonerep|bits, int(node.ch), node.m) + } + } + if node.n > node.m { + if node.n == math.MaxInt32 { + w.emit2(InstOp(node.t|ntBits), int(node.ch), math.MaxInt32) + } else { + w.emit2(InstOp(node.t|ntBits), int(node.ch), node.n-node.m) + } + } + + case ntSetloop, ntSetlazy: + if node.m > 0 { + w.emit2(Setrep|bits, w.setCode(node.set), node.m) + } + if node.n > node.m { + if node.n == math.MaxInt32 { + w.emit2(InstOp(node.t|ntBits), w.setCode(node.set), math.MaxInt32) + } else { + w.emit2(InstOp(node.t|ntBits), w.setCode(node.set), node.n-node.m) + } + } + + case ntMulti: + w.emit1(InstOp(node.t|ntBits), w.stringCode(node.str)) + + case ntSet: + w.emit1(InstOp(node.t|ntBits), w.setCode(node.set)) + + case ntRef: + w.emit1(InstOp(node.t|ntBits), w.mapCapnum(node.m)) + + case ntNothing, ntBol, ntEol, ntBoundary, ntNonboundary, ntECMABoundary, ntNonECMABoundary, ntBeginning, ntStart, ntEndZ, ntEnd: + w.emit(InstOp(node.t)) + + default: + return fmt.Errorf("unexpected opcode in regular expression generation: %v", nodetype) + } + + return nil +} + +// To avoid recursion, we use a simple integer stack. +// This is the push. +func (w *writer) pushInt(i int) { + w.intStack = append(w.intStack, i) +} + +// Returns true if the stack is empty. +func (w *writer) emptyStack() bool { + return len(w.intStack) == 0 +} + +// This is the pop. +func (w *writer) popInt() int { + //get our item + idx := len(w.intStack) - 1 + i := w.intStack[idx] + //trim our slice + w.intStack = w.intStack[:idx] + return i +} + +// Returns the current position in the emitted code. +func (w *writer) curPos() int { + return w.curpos +} + +// Fixes up a jump instruction at the specified offset +// so that it jumps to the specified jumpDest. +func (w *writer) patchJump(offset, jumpDest int) { + w.emitted[offset+1] = jumpDest +} + +// Returns an index in the set table for a charset +// uses a map to eliminate duplicates. +func (w *writer) setCode(set *CharSet) int { + if w.counting { + return 0 + } + + buf := &bytes.Buffer{} + + set.mapHashFill(buf) + hash := buf.String() + i, ok := w.sethash[hash] + if !ok { + i = len(w.sethash) + w.sethash[hash] = i + w.settable = append(w.settable, set) + } + return i +} + +// Returns an index in the string table for a string. +// uses a map to eliminate duplicates. +func (w *writer) stringCode(str []rune) int { + if w.counting { + return 0 + } + + hash := string(str) + i, ok := w.stringhash[hash] + if !ok { + i = len(w.stringhash) + w.stringhash[hash] = i + w.stringtable = append(w.stringtable, str) + } + + return i +} + +// When generating code on a regex that uses a sparse set +// of capture slots, we hash them to a dense set of indices +// for an array of capture slots. Instead of doing the hash +// at match time, it's done at compile time, here. +func (w *writer) mapCapnum(capnum int) int { + if capnum == -1 { + return -1 + } + + if w.caps != nil { + return w.caps[capnum] + } + + return capnum +} + +// Emits a zero-argument operation. Note that the emit +// functions all run in two modes: they can emit code, or +// they can just count the size of the code. +func (w *writer) emit(op InstOp) { + if w.counting { + w.count++ + if opcodeBacktracks(op) { + w.trackcount++ + } + return + } + w.emitted[w.curpos] = int(op) + w.curpos++ +} + +// Emits a one-argument operation. +func (w *writer) emit1(op InstOp, opd1 int) { + if w.counting { + w.count += 2 + if opcodeBacktracks(op) { + w.trackcount++ + } + return + } + w.emitted[w.curpos] = int(op) + w.curpos++ + w.emitted[w.curpos] = opd1 + w.curpos++ +} + +// Emits a two-argument operation. +func (w *writer) emit2(op InstOp, opd1, opd2 int) { + if w.counting { + w.count += 3 + if opcodeBacktracks(op) { + w.trackcount++ + } + return + } + w.emitted[w.curpos] = int(op) + w.curpos++ + w.emitted[w.curpos] = opd1 + w.curpos++ + w.emitted[w.curpos] = opd2 + w.curpos++ +} diff --git a/vendor/github.com/dlclark/regexp2/testoutput1 b/vendor/github.com/dlclark/regexp2/testoutput1 new file mode 100644 index 0000000..fbf63fd --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/testoutput1 @@ -0,0 +1,7061 @@ +# This set of tests is for features that are compatible with all versions of +# Perl >= 5.10, in non-UTF mode. It should run clean for the 8-bit, 16-bit, and +# 32-bit PCRE libraries, and also using the perltest.pl script. + +#forbid_utf +#newline_default lf any anycrlf +#perltest + +/the quick brown fox/ + the quick brown fox + 0: the quick brown fox + What do you know about the quick brown fox? + 0: the quick brown fox +\= Expect no match + The quick brown FOX +No match + What do you know about THE QUICK BROWN FOX? +No match + +/The quick brown fox/i + the quick brown fox + 0: the quick brown fox + The quick brown FOX + 0: The quick brown FOX + What do you know about the quick brown fox? + 0: the quick brown fox + What do you know about THE QUICK BROWN FOX? + 0: THE QUICK BROWN FOX + +/abcd\t\n\r\f\a\e\071\x3b\$\\\?caxyz/ + abcd\t\n\r\f\a\e9;\$\\?caxyz + 0: abcd\x09\x0a\x0d\x0c\x07\x1b9;$\?caxyz + +/a*abc?xyz+pqr{3}ab{2,}xy{4,5}pq{0,6}AB{0,}zz/ + abxyzpqrrrabbxyyyypqAzz + 0: abxyzpqrrrabbxyyyypqAzz + abxyzpqrrrabbxyyyypqAzz + 0: abxyzpqrrrabbxyyyypqAzz + aabxyzpqrrrabbxyyyypqAzz + 0: aabxyzpqrrrabbxyyyypqAzz + aaabxyzpqrrrabbxyyyypqAzz + 0: aaabxyzpqrrrabbxyyyypqAzz + aaaabxyzpqrrrabbxyyyypqAzz + 0: aaaabxyzpqrrrabbxyyyypqAzz + abcxyzpqrrrabbxyyyypqAzz + 0: abcxyzpqrrrabbxyyyypqAzz + aabcxyzpqrrrabbxyyyypqAzz + 0: aabcxyzpqrrrabbxyyyypqAzz + aaabcxyzpqrrrabbxyyyypAzz + 0: aaabcxyzpqrrrabbxyyyypAzz + aaabcxyzpqrrrabbxyyyypqAzz + 0: aaabcxyzpqrrrabbxyyyypqAzz + aaabcxyzpqrrrabbxyyyypqqAzz + 0: aaabcxyzpqrrrabbxyyyypqqAzz + aaabcxyzpqrrrabbxyyyypqqqAzz + 0: aaabcxyzpqrrrabbxyyyypqqqAzz + aaabcxyzpqrrrabbxyyyypqqqqAzz + 0: aaabcxyzpqrrrabbxyyyypqqqqAzz + aaabcxyzpqrrrabbxyyyypqqqqqAzz + 0: aaabcxyzpqrrrabbxyyyypqqqqqAzz + aaabcxyzpqrrrabbxyyyypqqqqqqAzz + 0: aaabcxyzpqrrrabbxyyyypqqqqqqAzz + aaaabcxyzpqrrrabbxyyyypqAzz + 0: aaaabcxyzpqrrrabbxyyyypqAzz + abxyzzpqrrrabbxyyyypqAzz + 0: abxyzzpqrrrabbxyyyypqAzz + aabxyzzzpqrrrabbxyyyypqAzz + 0: aabxyzzzpqrrrabbxyyyypqAzz + aaabxyzzzzpqrrrabbxyyyypqAzz + 0: aaabxyzzzzpqrrrabbxyyyypqAzz + aaaabxyzzzzpqrrrabbxyyyypqAzz + 0: aaaabxyzzzzpqrrrabbxyyyypqAzz + abcxyzzpqrrrabbxyyyypqAzz + 0: abcxyzzpqrrrabbxyyyypqAzz + aabcxyzzzpqrrrabbxyyyypqAzz + 0: aabcxyzzzpqrrrabbxyyyypqAzz + aaabcxyzzzzpqrrrabbxyyyypqAzz + 0: aaabcxyzzzzpqrrrabbxyyyypqAzz + aaaabcxyzzzzpqrrrabbxyyyypqAzz + 0: aaaabcxyzzzzpqrrrabbxyyyypqAzz + aaaabcxyzzzzpqrrrabbbxyyyypqAzz + 0: aaaabcxyzzzzpqrrrabbbxyyyypqAzz + aaaabcxyzzzzpqrrrabbbxyyyyypqAzz + 0: aaaabcxyzzzzpqrrrabbbxyyyyypqAzz + aaabcxyzpqrrrabbxyyyypABzz + 0: aaabcxyzpqrrrabbxyyyypABzz + aaabcxyzpqrrrabbxyyyypABBzz + 0: aaabcxyzpqrrrabbxyyyypABBzz + >>>aaabxyzpqrrrabbxyyyypqAzz + 0: aaabxyzpqrrrabbxyyyypqAzz + >aaaabxyzpqrrrabbxyyyypqAzz + 0: aaaabxyzpqrrrabbxyyyypqAzz + >>>>abcxyzpqrrrabbxyyyypqAzz + 0: abcxyzpqrrrabbxyyyypqAzz +\= Expect no match + abxyzpqrrabbxyyyypqAzz +No match + abxyzpqrrrrabbxyyyypqAzz +No match + abxyzpqrrrabxyyyypqAzz +No match + aaaabcxyzzzzpqrrrabbbxyyyyyypqAzz +No match + aaaabcxyzzzzpqrrrabbbxyyypqAzz +No match + aaabcxyzpqrrrabbxyyyypqqqqqqqAzz +No match + +/^(abc){1,2}zz/ + abczz + 0: abczz + 1: abc + abcabczz + 0: abcabczz + 1: abc +\= Expect no match + zz +No match + abcabcabczz +No match + >>abczz +No match + +/^(b+?|a){1,2}?c/ + bc + 0: bc + 1: b + bbc + 0: bbc + 1: b + bbbc + 0: bbbc + 1: bb + bac + 0: bac + 1: a + bbac + 0: bbac + 1: a + aac + 0: aac + 1: a + abbbbbbbbbbbc + 0: abbbbbbbbbbbc + 1: bbbbbbbbbbb + bbbbbbbbbbbac + 0: bbbbbbbbbbbac + 1: a +\= Expect no match + aaac +No match + abbbbbbbbbbbac +No match + +/^(b+|a){1,2}c/ + bc + 0: bc + 1: b + bbc + 0: bbc + 1: bb + bbbc + 0: bbbc + 1: bbb + bac + 0: bac + 1: a + bbac + 0: bbac + 1: a + aac + 0: aac + 1: a + abbbbbbbbbbbc + 0: abbbbbbbbbbbc + 1: bbbbbbbbbbb + bbbbbbbbbbbac + 0: bbbbbbbbbbbac + 1: a +\= Expect no match + aaac +No match + abbbbbbbbbbbac +No match + +/^(b+|a){1,2}?bc/ + bbc + 0: bbc + 1: b + +/^(b*|ba){1,2}?bc/ + babc + 0: babc + 1: ba + bbabc + 0: bbabc + 1: ba + bababc + 0: bababc + 1: ba +\= Expect no match + bababbc +No match + babababc +No match + +/^(ba|b*){1,2}?bc/ + babc + 0: babc + 1: ba + bbabc + 0: bbabc + 1: ba + bababc + 0: bababc + 1: ba +\= Expect no match + bababbc +No match + babababc +No match + +#/^\ca\cA\c[;\c:/ +# \x01\x01\e;z +# 0: \x01\x01\x1b;z + +/^[ab\]cde]/ + athing + 0: a + bthing + 0: b + ]thing + 0: ] + cthing + 0: c + dthing + 0: d + ething + 0: e +\= Expect no match + fthing +No match + [thing +No match + \\thing +No match + +/^[]cde]/ + ]thing + 0: ] + cthing + 0: c + dthing + 0: d + ething + 0: e +\= Expect no match + athing +No match + fthing +No match + +/^[^ab\]cde]/ + fthing + 0: f + [thing + 0: [ + \\thing + 0: \ +\= Expect no match + athing +No match + bthing +No match + ]thing +No match + cthing +No match + dthing +No match + ething +No match + +/^[^]cde]/ + athing + 0: a + fthing + 0: f +\= Expect no match + ]thing +No match + cthing +No match + dthing +No match + ething +No match + +# DLC - I don't get this one +#/^\Â/ +#  +# 0: \x81 + +#updated to handle 16-bits utf8 +/^ÿ/ + ÿ + 0: \xc3\xbf + +/^[0-9]+$/ + 0 + 0: 0 + 1 + 0: 1 + 2 + 0: 2 + 3 + 0: 3 + 4 + 0: 4 + 5 + 0: 5 + 6 + 0: 6 + 7 + 0: 7 + 8 + 0: 8 + 9 + 0: 9 + 10 + 0: 10 + 100 + 0: 100 +\= Expect no match + abc +No match + +/^.*nter/ + enter + 0: enter + inter + 0: inter + uponter + 0: uponter + +/^xxx[0-9]+$/ + xxx0 + 0: xxx0 + xxx1234 + 0: xxx1234 +\= Expect no match + xxx +No match + +/^.+[0-9][0-9][0-9]$/ + x123 + 0: x123 + x1234 + 0: x1234 + xx123 + 0: xx123 + 123456 + 0: 123456 +\= Expect no match + 123 +No match + +/^.+?[0-9][0-9][0-9]$/ + x123 + 0: x123 + x1234 + 0: x1234 + xx123 + 0: xx123 + 123456 + 0: 123456 +\= Expect no match + 123 +No match + +/^([^!]+)!(.+)=apquxz\.ixr\.zzz\.ac\.uk$/ + abc!pqr=apquxz.ixr.zzz.ac.uk + 0: abc!pqr=apquxz.ixr.zzz.ac.uk + 1: abc + 2: pqr +\= Expect no match + !pqr=apquxz.ixr.zzz.ac.uk +No match + abc!=apquxz.ixr.zzz.ac.uk +No match + abc!pqr=apquxz:ixr.zzz.ac.uk +No match + abc!pqr=apquxz.ixr.zzz.ac.ukk +No match + +/:/ + Well, we need a colon: somewhere + 0: : +\= Expect no match + Fail without a colon +No match + +/([\da-f:]+)$/i + 0abc + 0: 0abc + 1: 0abc + abc + 0: abc + 1: abc + fed + 0: fed + 1: fed + E + 0: E + 1: E + :: + 0: :: + 1: :: + 5f03:12C0::932e + 0: 5f03:12C0::932e + 1: 5f03:12C0::932e + fed def + 0: def + 1: def + Any old stuff + 0: ff + 1: ff +\= Expect no match + 0zzz +No match + gzzz +No match + fed\x20 +No match + Any old rubbish +No match + +/^.*\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/ + .1.2.3 + 0: .1.2.3 + 1: 1 + 2: 2 + 3: 3 + A.12.123.0 + 0: A.12.123.0 + 1: 12 + 2: 123 + 3: 0 +\= Expect no match + .1.2.3333 +No match + 1.2.3 +No match + 1234.2.3 +No match + +/^(\d+)\s+IN\s+SOA\s+(\S+)\s+(\S+)\s*\(\s*$/ + 1 IN SOA non-sp1 non-sp2( + 0: 1 IN SOA non-sp1 non-sp2( + 1: 1 + 2: non-sp1 + 3: non-sp2 + 1 IN SOA non-sp1 non-sp2 ( + 0: 1 IN SOA non-sp1 non-sp2 ( + 1: 1 + 2: non-sp1 + 3: non-sp2 +\= Expect no match + 1IN SOA non-sp1 non-sp2( +No match + +/^[a-zA-Z\d][a-zA-Z\d\-]*(\.[a-zA-Z\d][a-zA-z\d\-]*)*\.$/ + a. + 0: a. + Z. + 0: Z. + 2. + 0: 2. + ab-c.pq-r. + 0: ab-c.pq-r. + 1: .pq-r + sxk.zzz.ac.uk. + 0: sxk.zzz.ac.uk. + 1: .uk + x-.y-. + 0: x-.y-. + 1: .y- +\= Expect no match + -abc.peq. +No match + +/^\*\.[a-z]([a-z\-\d]*[a-z\d]+)?(\.[a-z]([a-z\-\d]*[a-z\d]+)?)*$/ + *.a + 0: *.a + *.b0-a + 0: *.b0-a + 1: 0-a + *.c3-b.c + 0: *.c3-b.c + 1: 3-b + 2: .c + *.c-a.b-c + 0: *.c-a.b-c + 1: -a + 2: .b-c + 3: -c +\= Expect no match + *.0 +No match + *.a- +No match + *.a-b.c- +No match + *.c-a.0-c +No match + +/^(?=ab(de))(abd)(e)/ + abde + 0: abde + 1: de + 2: abd + 3: e + +/^(?!(ab)de|x)(abd)(f)/ + abdf + 0: abdf + 1: + 2: abd + 3: f + +/^(?=(ab(cd)))(ab)/ + abcd + 0: ab + 1: abcd + 2: cd + 3: ab + +/^[\da-f](\.[\da-f])*$/i + a.b.c.d + 0: a.b.c.d + 1: .d + A.B.C.D + 0: A.B.C.D + 1: .D + a.b.c.1.2.3.C + 0: a.b.c.1.2.3.C + 1: .C + +/^\".*\"\s*(;.*)?$/ + \"1234\" + 0: "1234" + \"abcd\" ; + 0: "abcd" ; + 1: ; + \"\" ; rhubarb + 0: "" ; rhubarb + 1: ; rhubarb +\= Expect no match + \"1234\" : things +No match + +/^$/ + \ + 0: +\= Expect no match + A non-empty line +No match + +/ ^ a (?# begins with a) b\sc (?# then b c) $ (?# then end)/x + ab c + 0: ab c +\= Expect no match + abc +No match + ab cde +No match + +/(?x) ^ a (?# begins with a) b\sc (?# then b c) $ (?# then end)/ + ab c + 0: ab c +\= Expect no match + abc +No match + ab cde +No match + +/^ a\ b[c ]d $/x + a bcd + 0: a bcd + a b d + 0: a b d +\= Expect no match + abcd +No match + ab d +No match + +/^(a(b(c)))(d(e(f)))(h(i(j)))(k(l(m)))$/ + abcdefhijklm + 0: abcdefhijklm + 1: abc + 2: bc + 3: c + 4: def + 5: ef + 6: f + 7: hij + 8: ij + 9: j +10: klm +11: lm +12: m + +/^(?:a(b(c)))(?:d(e(f)))(?:h(i(j)))(?:k(l(m)))$/ + abcdefhijklm + 0: abcdefhijklm + 1: bc + 2: c + 3: ef + 4: f + 5: ij + 6: j + 7: lm + 8: m + +#/^[\w][\W][\s][\S][\d][\D][\b][\n][\c]][\022]/ +# a+ Z0+\x08\n\x1d\x12 +# 0: a+ Z0+\x08\x0a\x1d\x12 + +/^[.^$|()*+?{,}]+/ + .^\$(*+)|{?,?} + 0: .^$(*+)|{?,?} + +/^a*\w/ + z + 0: z + az + 0: az + aaaz + 0: aaaz + a + 0: a + aa + 0: aa + aaaa + 0: aaaa + a+ + 0: a + aa+ + 0: aa + +/^a*?\w/ + z + 0: z + az + 0: a + aaaz + 0: a + a + 0: a + aa + 0: a + aaaa + 0: a + a+ + 0: a + aa+ + 0: a + +/^a+\w/ + az + 0: az + aaaz + 0: aaaz + aa + 0: aa + aaaa + 0: aaaa + aa+ + 0: aa + +/^a+?\w/ + az + 0: az + aaaz + 0: aa + aa + 0: aa + aaaa + 0: aa + aa+ + 0: aa + +/^\d{8}\w{2,}/ + 1234567890 + 0: 1234567890 + 12345678ab + 0: 12345678ab + 12345678__ + 0: 12345678__ +\= Expect no match + 1234567 +No match + +/^[aeiou\d]{4,5}$/ + uoie + 0: uoie + 1234 + 0: 1234 + 12345 + 0: 12345 + aaaaa + 0: aaaaa +\= Expect no match + 123456 +No match + +/^[aeiou\d]{4,5}?/ + uoie + 0: uoie + 1234 + 0: 1234 + 12345 + 0: 1234 + aaaaa + 0: aaaa + 123456 + 0: 1234 + +/\A(abc|def)=(\1){2,3}\Z/ + abc=abcabc + 0: abc=abcabc + 1: abc + 2: abc + def=defdefdef + 0: def=defdefdef + 1: def + 2: def +\= Expect no match + abc=defdef +No match + +/^(a)(b)(c)(d)(e)(f)(g)(h)(i)(j)(k)\11*(\3\4)\1(?#)2$/ + abcdefghijkcda2 + 0: abcdefghijkcda2 + 1: a + 2: b + 3: c + 4: d + 5: e + 6: f + 7: g + 8: h + 9: i +10: j +11: k +12: cd + abcdefghijkkkkcda2 + 0: abcdefghijkkkkcda2 + 1: a + 2: b + 3: c + 4: d + 5: e + 6: f + 7: g + 8: h + 9: i +10: j +11: k +12: cd + +/(cat(a(ract|tonic)|erpillar)) \1()2(3)/ + cataract cataract23 + 0: cataract cataract23 + 1: cataract + 2: aract + 3: ract + 4: + 5: 3 + catatonic catatonic23 + 0: catatonic catatonic23 + 1: catatonic + 2: atonic + 3: tonic + 4: + 5: 3 + caterpillar caterpillar23 + 0: caterpillar caterpillar23 + 1: caterpillar + 2: erpillar + 3: + 4: + 5: 3 + + +/^From +([^ ]+) +[a-zA-Z][a-zA-Z][a-zA-Z] +[a-zA-Z][a-zA-Z][a-zA-Z] +[0-9]?[0-9] +[0-9][0-9]:[0-9][0-9]/ + From abcd Mon Sep 01 12:33:02 1997 + 0: From abcd Mon Sep 01 12:33 + 1: abcd + +/^From\s+\S+\s+([a-zA-Z]{3}\s+){2}\d{1,2}\s+\d\d:\d\d/ + From abcd Mon Sep 01 12:33:02 1997 + 0: From abcd Mon Sep 01 12:33 + 1: Sep + From abcd Mon Sep 1 12:33:02 1997 + 0: From abcd Mon Sep 1 12:33 + 1: Sep +\= Expect no match + From abcd Sep 01 12:33:02 1997 +No match + +/^12.34/s + 12\n34 + 0: 12\x0a34 + 12\r34 + 0: 12\x0d34 + +/\w+(?=\t)/ + the quick brown\t fox + 0: brown + +/foo(?!bar)(.*)/ + foobar is foolish see? + 0: foolish see? + 1: lish see? + +/(?:(?!foo)...|^.{0,2})bar(.*)/ + foobar crowbar etc + 0: rowbar etc + 1: etc + barrel + 0: barrel + 1: rel + 2barrel + 0: 2barrel + 1: rel + A barrel + 0: A barrel + 1: rel + +/^(\D*)(?=\d)(?!123)/ + abc456 + 0: abc + 1: abc +\= Expect no match + abc123 +No match + +/^1234(?# test newlines + inside)/ + 1234 + 0: 1234 + +/^1234 #comment in extended re + /x + 1234 + 0: 1234 + +/#rhubarb + abcd/x + abcd + 0: abcd + +/^abcd#rhubarb/x + abcd + 0: abcd + +/^(a)\1{2,3}(.)/ + aaab + 0: aaab + 1: a + 2: b + aaaab + 0: aaaab + 1: a + 2: b + aaaaab + 0: aaaaa + 1: a + 2: a + aaaaaab + 0: aaaaa + 1: a + 2: a + +/(?!^)abc/ + the abc + 0: abc +\= Expect no match + abc +No match + +/(?=^)abc/ + abc + 0: abc +\= Expect no match + the abc +No match + +/^[ab]{1,3}(ab*|b)/ + aabbbbb + 0: aabb + 1: b + +/^[ab]{1,3}?(ab*|b)/ + aabbbbb + 0: aabbbbb + 1: abbbbb + +/^[ab]{1,3}?(ab*?|b)/ + aabbbbb + 0: aa + 1: a + +/^[ab]{1,3}(ab*?|b)/ + aabbbbb + 0: aabb + 1: b + +/ (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* # optional leading comment +(?: (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +" (?: # opening quote... +[^\\\x80-\xff\n\015"] # Anything except backslash and quote +| # or +\\ [^\x80-\xff] # Escaped something (something != CR) +)* " # closing quote +) # initial word +(?: (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* \. (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +" (?: # opening quote... +[^\\\x80-\xff\n\015"] # Anything except backslash and quote +| # or +\\ [^\x80-\xff] # Escaped something (something != CR) +)* " # closing quote +) )* # further okay, if led by a period +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* @ (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # initial subdomain +(?: # +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* \. # if led by a period... +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # ...further okay +)* +# address +| # or +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +" (?: # opening quote... +[^\\\x80-\xff\n\015"] # Anything except backslash and quote +| # or +\\ [^\x80-\xff] # Escaped something (something != CR) +)* " # closing quote +) # one word, optionally followed by.... +(?: +[^()<>@,;:".\\\[\]\x80-\xff\000-\010\012-\037] | # atom and space parts, or... +\( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) | # comments, or... + +" (?: # opening quote... +[^\\\x80-\xff\n\015"] # Anything except backslash and quote +| # or +\\ [^\x80-\xff] # Escaped something (something != CR) +)* " # closing quote +# quoted strings +)* +< (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* # leading < +(?: @ (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # initial subdomain +(?: # +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* \. # if led by a period... +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # ...further okay +)* + +(?: (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* , (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* @ (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # initial subdomain +(?: # +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* \. # if led by a period... +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # ...further okay +)* +)* # further okay, if led by comma +: # closing colon +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* )? # optional route +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +" (?: # opening quote... +[^\\\x80-\xff\n\015"] # Anything except backslash and quote +| # or +\\ [^\x80-\xff] # Escaped something (something != CR) +)* " # closing quote +) # initial word +(?: (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* \. (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +" (?: # opening quote... +[^\\\x80-\xff\n\015"] # Anything except backslash and quote +| # or +\\ [^\x80-\xff] # Escaped something (something != CR) +)* " # closing quote +) )* # further okay, if led by a period +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* @ (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # initial subdomain +(?: # +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* \. # if led by a period... +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # ...further okay +)* +# address spec +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* > # trailing > +# name and address +) (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* # optional trailing comment +/x + Alan Other + 0: Alan Other + + 0: user@dom.ain + user\@dom.ain + 0: user@dom.ain + \"A. Other\" (a comment) + 0: "A. Other" (a comment) + A. Other (a comment) + 0: Other (a comment) + \"/s=user/ou=host/o=place/prmd=uu.yy/admd= /c=gb/\"\@x400-re.lay + 0: "/s=user/ou=host/o=place/prmd=uu.yy/admd= /c=gb/"@x400-re.lay + A missing angle @,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +# Atom +| # or +" # " +[^\\\x80-\xff\n\015"] * # normal +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015"] * )* # ( special normal* )* +" # " +# Quoted string +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +\. +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +# Atom +| # or +" # " +[^\\\x80-\xff\n\015"] * # normal +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015"] * )* # ( special normal* )* +" # " +# Quoted string +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# additional words +)* +@ +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +(?: +\. +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +)* +# address +| # or +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +# Atom +| # or +" # " +[^\\\x80-\xff\n\015"] * # normal +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015"] * )* # ( special normal* )* +" # " +# Quoted string +) +# leading word +[^()<>@,;:".\\\[\]\x80-\xff\000-\010\012-\037] * # "normal" atoms and or spaces +(?: +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +| +" # " +[^\\\x80-\xff\n\015"] * # normal +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015"] * )* # ( special normal* )* +" # " +) # "special" comment or quoted string +[^()<>@,;:".\\\[\]\x80-\xff\000-\010\012-\037] * # more "normal" +)* +< +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# < +(?: +@ +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +(?: +\. +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +)* +(?: , +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +@ +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +(?: +\. +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +)* +)* # additional domains +: +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +)? # optional route +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +# Atom +| # or +" # " +[^\\\x80-\xff\n\015"] * # normal +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015"] * )* # ( special normal* )* +" # " +# Quoted string +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +\. +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +# Atom +| # or +" # " +[^\\\x80-\xff\n\015"] * # normal +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015"] * )* # ( special normal* )* +" # " +# Quoted string +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# additional words +)* +@ +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +(?: +\. +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +)* +# address spec +> # > +# name and address +) +/x + Alan Other + 0: Alan Other + + 0: user@dom.ain + user\@dom.ain + 0: user@dom.ain + \"A. Other\" (a comment) + 0: "A. Other" + A. Other (a comment) + 0: Other + \"/s=user/ou=host/o=place/prmd=uu.yy/admd= /c=gb/\"\@x400-re.lay + 0: "/s=user/ou=host/o=place/prmd=uu.yy/admd= /c=gb/"@x400-re.lay + A missing angle ?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~\x7f + +/P[^*]TAIRE[^*]{1,6}?LL/ + xxxxxxxxxxxPSTAIREISLLxxxxxxxxx + 0: PSTAIREISLL + +/P[^*]TAIRE[^*]{1,}?LL/ + xxxxxxxxxxxPSTAIREISLLxxxxxxxxx + 0: PSTAIREISLL + +/(\.\d\d[1-9]?)\d+/ + 1.230003938 + 0: .230003938 + 1: .23 + 1.875000282 + 0: .875000282 + 1: .875 + 1.235 + 0: .235 + 1: .23 + +/(\.\d\d((?=0)|\d(?=\d)))/ + 1.230003938 + 0: .23 + 1: .23 + 2: + 1.875000282 + 0: .875 + 1: .875 + 2: 5 +\= Expect no match + 1.235 +No match + +/\b(foo)\s+(\w+)/i + Food is on the foo table + 0: foo table + 1: foo + 2: table + +/foo(.*)bar/ + The food is under the bar in the barn. + 0: food is under the bar in the bar + 1: d is under the bar in the + +/foo(.*?)bar/ + The food is under the bar in the barn. + 0: food is under the bar + 1: d is under the + +/(.*)(\d*)/ + I have 2 numbers: 53147 + 0: I have 2 numbers: 53147 + 1: I have 2 numbers: 53147 + 2: + +/(.*)(\d+)/ + I have 2 numbers: 53147 + 0: I have 2 numbers: 53147 + 1: I have 2 numbers: 5314 + 2: 7 + +/(.*?)(\d*)/ + I have 2 numbers: 53147 + 0: + 1: + 2: + +/(.*?)(\d+)/ + I have 2 numbers: 53147 + 0: I have 2 + 1: I have + 2: 2 + +/(.*)(\d+)$/ + I have 2 numbers: 53147 + 0: I have 2 numbers: 53147 + 1: I have 2 numbers: 5314 + 2: 7 + +/(.*?)(\d+)$/ + I have 2 numbers: 53147 + 0: I have 2 numbers: 53147 + 1: I have 2 numbers: + 2: 53147 + +/(.*)\b(\d+)$/ + I have 2 numbers: 53147 + 0: I have 2 numbers: 53147 + 1: I have 2 numbers: + 2: 53147 + +/(.*\D)(\d+)$/ + I have 2 numbers: 53147 + 0: I have 2 numbers: 53147 + 1: I have 2 numbers: + 2: 53147 + +/^\D*(?!123)/ + ABC123 + 0: AB + +/^(\D*)(?=\d)(?!123)/ + ABC445 + 0: ABC + 1: ABC +\= Expect no match + ABC123 +No match + +/^[W-]46]/ + W46]789 + 0: W46] + -46]789 + 0: -46] +\= Expect no match + Wall +No match + Zebra +No match + 42 +No match + [abcd] +No match + ]abcd[ +No match + +/^[W-\]46]/ + W46]789 + 0: W + Wall + 0: W + Zebra + 0: Z + Xylophone + 0: X + 42 + 0: 4 + [abcd] + 0: [ + ]abcd[ + 0: ] + \\backslash + 0: \ +\= Expect no match + -46]789 +No match + well +No match + +/\d\d\/\d\d\/\d\d\d\d/ + 01/01/2000 + 0: 01/01/2000 + +/word (?:[a-zA-Z0-9]+ ){0,10}otherword/ + word cat dog elephant mussel cow horse canary baboon snake shark otherword + 0: word cat dog elephant mussel cow horse canary baboon snake shark otherword +\= Expect no match + word cat dog elephant mussel cow horse canary baboon snake shark +No match + +/word (?:[a-zA-Z0-9]+ ){0,300}otherword/ +\= Expect no match + word cat dog elephant mussel cow horse canary baboon snake shark the quick brown fox and the lazy dog and several other words getting close to thirty by now I hope +No match + +/^(a){0,0}/ + bcd + 0: + abc + 0: + aab + 0: + +/^(a){0,1}/ + bcd + 0: + abc + 0: a + 1: a + aab + 0: a + 1: a + +/^(a){0,2}/ + bcd + 0: + abc + 0: a + 1: a + aab + 0: aa + 1: a + +/^(a){0,3}/ + bcd + 0: + abc + 0: a + 1: a + aab + 0: aa + 1: a + aaa + 0: aaa + 1: a + +/^(a){0,}/ + bcd + 0: + abc + 0: a + 1: a + aab + 0: aa + 1: a + aaa + 0: aaa + 1: a + aaaaaaaa + 0: aaaaaaaa + 1: a + +/^(a){1,1}/ + abc + 0: a + 1: a + aab + 0: a + 1: a +\= Expect no match + bcd +No match + +/^(a){1,2}/ + abc + 0: a + 1: a + aab + 0: aa + 1: a +\= Expect no match + bcd +No match + +/^(a){1,3}/ + abc + 0: a + 1: a + aab + 0: aa + 1: a + aaa + 0: aaa + 1: a +\= Expect no match + bcd +No match + +/^(a){1,}/ + abc + 0: a + 1: a + aab + 0: aa + 1: a + aaa + 0: aaa + 1: a + aaaaaaaa + 0: aaaaaaaa + 1: a +\= Expect no match + bcd +No match + +/.*\.gif/ + borfle\nbib.gif\nno + 0: bib.gif + +/.{0,}\.gif/ + borfle\nbib.gif\nno + 0: bib.gif + +/.*\.gif/m + borfle\nbib.gif\nno + 0: bib.gif + +/.*\.gif/s + borfle\nbib.gif\nno + 0: borfle\x0abib.gif + +/.*\.gif/ms + borfle\nbib.gif\nno + 0: borfle\x0abib.gif + +/.*$/ + borfle\nbib.gif\nno + 0: no + +/.*$/m + borfle\nbib.gif\nno + 0: borfle + +/.*$/s + borfle\nbib.gif\nno + 0: borfle\x0abib.gif\x0ano + +/.*$/ms + borfle\nbib.gif\nno + 0: borfle\x0abib.gif\x0ano + +/.*$/ + borfle\nbib.gif\nno\n + 0: no + +/.*$/m + borfle\nbib.gif\nno\n + 0: borfle + +/.*$/s + borfle\nbib.gif\nno\n + 0: borfle\x0abib.gif\x0ano\x0a + +/.*$/ms + borfle\nbib.gif\nno\n + 0: borfle\x0abib.gif\x0ano\x0a + +/(.*X|^B)/ + abcde\n1234Xyz + 0: 1234X + 1: 1234X + BarFoo + 0: B + 1: B +\= Expect no match + abcde\nBar +No match + +/(.*X|^B)/m + abcde\n1234Xyz + 0: 1234X + 1: 1234X + BarFoo + 0: B + 1: B + abcde\nBar + 0: B + 1: B + +/(.*X|^B)/s + abcde\n1234Xyz + 0: abcde\x0a1234X + 1: abcde\x0a1234X + BarFoo + 0: B + 1: B +\= Expect no match + abcde\nBar +No match + +/(.*X|^B)/ms + abcde\n1234Xyz + 0: abcde\x0a1234X + 1: abcde\x0a1234X + BarFoo + 0: B + 1: B + abcde\nBar + 0: B + 1: B + +/(?s)(.*X|^B)/ + abcde\n1234Xyz + 0: abcde\x0a1234X + 1: abcde\x0a1234X + BarFoo + 0: B + 1: B +\= Expect no match + abcde\nBar +No match + +/(?s:.*X|^B)/ + abcde\n1234Xyz + 0: abcde\x0a1234X + BarFoo + 0: B +\= Expect no match + abcde\nBar +No match + +/^.*B/ +\= Expect no match + abc\nB +No match + +/(?s)^.*B/ + abc\nB + 0: abc\x0aB + +/(?m)^.*B/ + abc\nB + 0: B + +/(?ms)^.*B/ + abc\nB + 0: abc\x0aB + +/(?ms)^B/ + abc\nB + 0: B + +/(?s)B$/ + B\n + 0: B + +/^[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]/ + 123456654321 + 0: 123456654321 + +/^\d\d\d\d\d\d\d\d\d\d\d\d/ + 123456654321 + 0: 123456654321 + +/^[\d][\d][\d][\d][\d][\d][\d][\d][\d][\d][\d][\d]/ + 123456654321 + 0: 123456654321 + +/^[abc]{12}/ + abcabcabcabc + 0: abcabcabcabc + +/^[a-c]{12}/ + abcabcabcabc + 0: abcabcabcabc + +/^(a|b|c){12}/ + abcabcabcabc + 0: abcabcabcabc + 1: c + +/^[abcdefghijklmnopqrstuvwxy0123456789]/ + n + 0: n +\= Expect no match + z +No match + +/abcde{0,0}/ + abcd + 0: abcd +\= Expect no match + abce +No match + +/ab[cd]{0,0}e/ + abe + 0: abe +\= Expect no match + abcde +No match + +/ab(c){0,0}d/ + abd + 0: abd +\= Expect no match + abcd +No match + +/a(b*)/ + a + 0: a + 1: + ab + 0: ab + 1: b + abbbb + 0: abbbb + 1: bbbb +\= Expect no match + bbbbb +No match + +/ab\d{0}e/ + abe + 0: abe +\= Expect no match + ab1e +No match + +/"([^\\"]+|\\.)*"/ + the \"quick\" brown fox + 0: "quick" + 1: quick + \"the \\\"quick\\\" brown fox\" + 0: "the \"quick\" brown fox" + 1: brown fox + +/]{0,})>]{0,})>([\d]{0,}\.)(.*)((
([\w\W\s\d][^<>]{0,})|[\s]{0,}))<\/a><\/TD>]{0,})>([\w\W\s\d][^<>]{0,})<\/TD>]{0,})>([\w\W\s\d][^<>]{0,})<\/TD><\/TR>/is + 43.Word Processor
(N-1286)
Lega lstaff.comCA - Statewide + 0: 43.Word Processor
(N-1286)
Lega lstaff.comCA - Statewide + 1: BGCOLOR='#DBE9E9' + 2: align=left valign=top + 3: 43. + 4: Word Processor
(N-1286) + 5: + 6: + 7: + 8: align=left valign=top + 9: Lega lstaff.com +10: align=left valign=top +11: CA - Statewide + +/a[^a]b/ + acb + 0: acb + a\nb + 0: a\x0ab + +/a.b/ + acb + 0: acb +\= Expect no match + a\nb +No match + +/a[^a]b/s + acb + 0: acb + a\nb + 0: a\x0ab + +/a.b/s + acb + 0: acb + a\nb + 0: a\x0ab + +/^(b+?|a){1,2}?c/ + bac + 0: bac + 1: a + bbac + 0: bbac + 1: a + bbbac + 0: bbbac + 1: a + bbbbac + 0: bbbbac + 1: a + bbbbbac + 0: bbbbbac + 1: a + +/^(b+|a){1,2}?c/ + bac + 0: bac + 1: a + bbac + 0: bbac + 1: a + bbbac + 0: bbbac + 1: a + bbbbac + 0: bbbbac + 1: a + bbbbbac + 0: bbbbbac + 1: a + +/(?!\A)x/m + a\bx\n + 0: x + a\nx\n + 0: x +\= Expect no match + x\nb\n +No match + +/(A|B)*?CD/ + CD + 0: CD + +/(A|B)*CD/ + CD + 0: CD + +/(AB)*?\1/ + ABABAB + 0: ABAB + 1: AB + +/(AB)*\1/ + ABABAB + 0: ABABAB + 1: AB + +/(?.*/)foo" + /this/is/a/very/long/line/in/deed/with/very/many/slashes/in/and/foo + 0: /this/is/a/very/long/line/in/deed/with/very/many/slashes/in/and/foo +\= Expect no match + /this/is/a/very/long/line/in/deed/with/very/many/slashes/in/it/you/see/ +No match + +/(?>(\.\d\d[1-9]?))\d+/ + 1.230003938 + 0: .230003938 + 1: .23 + 1.875000282 + 0: .875000282 + 1: .875 +\= Expect no match + 1.235 +No match + +/^((?>\w+)|(?>\s+))*$/ + now is the time for all good men to come to the aid of the party + 0: now is the time for all good men to come to the aid of the party + 1: party +\= Expect no match + this is not a line with only words and spaces! +No match + +/(\d+)(\w)/ + 12345a + 0: 12345a + 1: 12345 + 2: a + 12345+ + 0: 12345 + 1: 1234 + 2: 5 + +/((?>\d+))(\w)/ + 12345a + 0: 12345a + 1: 12345 + 2: a +\= Expect no match + 12345+ +No match + +/(?>a+)b/ + aaab + 0: aaab + +/((?>a+)b)/ + aaab + 0: aaab + 1: aaab + +/(?>(a+))b/ + aaab + 0: aaab + 1: aaa + +/(?>b)+/ + aaabbbccc + 0: bbb + +/(?>a+|b+|c+)*c/ + aaabbbbccccd + 0: aaabbbbc + +/((?>[^()]+)|\([^()]*\))+/ + ((abc(ade)ufh()()x + 0: abc(ade)ufh()()x + 1: x + +/\(((?>[^()]+)|\([^()]+\))+\)/ + (abc) + 0: (abc) + 1: abc + (abc(def)xyz) + 0: (abc(def)xyz) + 1: xyz +\= Expect no match + ((()aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +No match + +/a(?-i)b/i + ab + 0: ab + Ab + 0: Ab +\= Expect no match + aB +No match + AB +No match + +/(a (?x)b c)d e/ + a bcd e + 0: a bcd e + 1: a bc +\= Expect no match + a b cd e +No match + abcd e +No match + a bcde +No match + +/(a b(?x)c d (?-x)e f)/ + a bcde f + 0: a bcde f + 1: a bcde f +\= Expect no match + abcdef +No match + +/(a(?i)b)c/ + abc + 0: abc + 1: ab + aBc + 0: aBc + 1: aB +\= Expect no match + abC +No match + aBC +No match + Abc +No match + ABc +No match + ABC +No match + AbC +No match + +/a(?i:b)c/ + abc + 0: abc + aBc + 0: aBc +\= Expect no match + ABC +No match + abC +No match + aBC +No match + +/a(?i:b)*c/ + aBc + 0: aBc + aBBc + 0: aBBc +\= Expect no match + aBC +No match + aBBC +No match + +/a(?=b(?i)c)\w\wd/ + abcd + 0: abcd + abCd + 0: abCd +\= Expect no match + aBCd +No match + abcD +No match + +/(?s-i:more.*than).*million/i + more than million + 0: more than million + more than MILLION + 0: more than MILLION + more \n than Million + 0: more \x0a than Million +\= Expect no match + MORE THAN MILLION +No match + more \n than \n million +No match + +/(?:(?s-i)more.*than).*million/i + more than million + 0: more than million + more than MILLION + 0: more than MILLION + more \n than Million + 0: more \x0a than Million +\= Expect no match + MORE THAN MILLION +No match + more \n than \n million +No match + +/(?>a(?i)b+)+c/ + abc + 0: abc + aBbc + 0: aBbc + aBBc + 0: aBBc +\= Expect no match + Abc +No match + abAb +No match + abbC +No match + +/(?=a(?i)b)\w\wc/ + abc + 0: abc + aBc + 0: aBc +\= Expect no match + Ab +No match + abC +No match + aBC +No match + +/(?<=a(?i)b)(\w\w)c/ + abxxc + 0: xxc + 1: xx + aBxxc + 0: xxc + 1: xx +\= Expect no match + Abxxc +No match + ABxxc +No match + abxxC +No match + +/(?:(a)|b)(?(1)A|B)/ + aA + 0: aA + 1: a + bB + 0: bB +\= Expect no match + aB +No match + bA +No match + +/^(a)?(?(1)a|b)+$/ + aa + 0: aa + 1: a + b + 0: b + bb + 0: bb +\= Expect no match + ab +No match + +# Perl gets this next one wrong if the pattern ends with $; in that case it +# fails to match "12". + +/^(?(?=abc)\w{3}:|\d\d)/ + abc: + 0: abc: + 12 + 0: 12 + 123 + 0: 12 +\= Expect no match + xyz +No match + +/^(?(?!abc)\d\d|\w{3}:)$/ + abc: + 0: abc: + 12 + 0: 12 +\= Expect no match + 123 +No match + xyz +No match + +/(?(?<=foo)bar|cat)/ + foobar + 0: bar + cat + 0: cat + fcat + 0: cat + focat + 0: cat +\= Expect no match + foocat +No match + +/(?(?a*)*/ + a + 0: a + aa + 0: aa + aaaa + 0: aaaa + +/(abc|)+/ + abc + 0: abc + 1: + abcabc + 0: abcabc + 1: + abcabcabc + 0: abcabcabc + 1: + xyz + 0: + 1: + +/([a]*)*/ + a + 0: a + 1: + aaaaa + 0: aaaaa + 1: + +/([ab]*)*/ + a + 0: a + 1: + b + 0: b + 1: + ababab + 0: ababab + 1: + aaaabcde + 0: aaaab + 1: + bbbb + 0: bbbb + 1: + +/([^a]*)*/ + b + 0: b + 1: + bbbb + 0: bbbb + 1: + aaa + 0: + 1: + +/([^ab]*)*/ + cccc + 0: cccc + 1: + abab + 0: + 1: + +/([a]*?)*/ + a + 0: + 1: + aaaa + 0: + 1: + +/([ab]*?)*/ + a + 0: + 1: + b + 0: + 1: + abab + 0: + 1: + baba + 0: + 1: + +/([^a]*?)*/ + b + 0: + 1: + bbbb + 0: + 1: + aaa + 0: + 1: + +/([^ab]*?)*/ + c + 0: + 1: + cccc + 0: + 1: + baba + 0: + 1: + +/(?>a*)*/ + a + 0: a + aaabcde + 0: aaa + +/((?>a*))*/ + aaaaa + 0: aaaaa + 1: + aabbaa + 0: aa + 1: + +/((?>a*?))*/ + aaaaa + 0: + 1: + aabbaa + 0: + 1: + +/(?(?=[^a-z]+[a-z]) \d{2}-[a-z]{3}-\d{2} | \d{2}-\d{2}-\d{2} ) /x + 12-sep-98 + 0: 12-sep-98 + 12-09-98 + 0: 12-09-98 +\= Expect no match + sep-12-98 +No match + +/(?<=(foo))bar\1/ + foobarfoo + 0: barfoo + 1: foo + foobarfootling + 0: barfoo + 1: foo +\= Expect no match + foobar +No match + barfoo +No match + +/(?i:saturday|sunday)/ + saturday + 0: saturday + sunday + 0: sunday + Saturday + 0: Saturday + Sunday + 0: Sunday + SATURDAY + 0: SATURDAY + SUNDAY + 0: SUNDAY + SunDay + 0: SunDay + +/(a(?i)bc|BB)x/ + abcx + 0: abcx + 1: abc + aBCx + 0: aBCx + 1: aBC + bbx + 0: bbx + 1: bb + BBx + 0: BBx + 1: BB +\= Expect no match + abcX +No match + aBCX +No match + bbX +No match + BBX +No match + +/^([ab](?i)[cd]|[ef])/ + ac + 0: ac + 1: ac + aC + 0: aC + 1: aC + bD + 0: bD + 1: bD + elephant + 0: e + 1: e + Europe + 0: E + 1: E + frog + 0: f + 1: f + France + 0: F + 1: F +\= Expect no match + Africa +No match + +/^(ab|a(?i)[b-c](?m-i)d|x(?i)y|z)/ + ab + 0: ab + 1: ab + aBd + 0: aBd + 1: aBd + xy + 0: xy + 1: xy + xY + 0: xY + 1: xY + zebra + 0: z + 1: z + Zambesi + 0: Z + 1: Z +\= Expect no match + aCD +No match + XY +No match + +/(?<=foo\n)^bar/m + foo\nbar + 0: bar +\= Expect no match + bar +No match + baz\nbar +No match + +/(?<=(?]&/ + <&OUT + 0: <& + +/^(a\1?){4}$/ + aaaaaaaaaa + 0: aaaaaaaaaa + 1: aaaa +\= Expect no match + AB +No match + aaaaaaaaa +No match + aaaaaaaaaaa +No match + +/^(a(?(1)\1)){4}$/ + aaaaaaaaaa + 0: aaaaaaaaaa + 1: aaaa +\= Expect no match + aaaaaaaaa +No match + aaaaaaaaaaa +No match + +/(?:(f)(o)(o)|(b)(a)(r))*/ + foobar + 0: foobar + 1: f + 2: o + 3: o + 4: b + 5: a + 6: r + +/(?<=a)b/ + ab + 0: b +\= Expect no match + cb +No match + b +No match + +/(? + 2: abcd + xy:z:::abcd + 0: xy:z:::abcd + 1: xy:z::: + 2: abcd + +/^[^bcd]*(c+)/ + aexycd + 0: aexyc + 1: c + +/(a*)b+/ + caab + 0: aab + 1: aa + +/([\w:]+::)?(\w+)$/ + abcd + 0: abcd + 1: + 2: abcd + xy:z:::abcd + 0: xy:z:::abcd + 1: xy:z::: + 2: abcd +\= Expect no match + abcd: +No match + abcd: +No match + +/^[^bcd]*(c+)/ + aexycd + 0: aexyc + 1: c + +/(>a+)ab/ + +/(?>a+)b/ + aaab + 0: aaab + +/([[:]+)/ + a:[b]: + 0: :[ + 1: :[ + +/([[=]+)/ + a=[b]= + 0: =[ + 1: =[ + +/([[.]+)/ + a.[b]. + 0: .[ + 1: .[ + +/((?>a+)b)/ + aaab + 0: aaab + 1: aaab + +/(?>(a+))b/ + aaab + 0: aaab + 1: aaa + +/((?>[^()]+)|\([^()]*\))+/ + ((abc(ade)ufh()()x + 0: abc(ade)ufh()()x + 1: x + +/a\Z/ +\= Expect no match + aaab +No match + a\nb\n +No match + +/b\Z/ + a\nb\n + 0: b + +/b\z/ + +/b\Z/ + a\nb + 0: b + +/b\z/ + a\nb + 0: b + +/^(?>(?(1)\.|())[^\W_](?>[a-z0-9-]*[^\W_])?)+$/ + a + 0: a + 1: + abc + 0: abc + 1: + a-b + 0: a-b + 1: + 0-9 + 0: 0-9 + 1: + a.b + 0: a.b + 1: + 5.6.7 + 0: 5.6.7 + 1: + the.quick.brown.fox + 0: the.quick.brown.fox + 1: + a100.b200.300c + 0: a100.b200.300c + 1: + 12-ab.1245 + 0: 12-ab.1245 + 1: +\= Expect no match + \ +No match + .a +No match + -a +No match + a- +No match + a. +No match + a_b +No match + a.- +No match + a.. +No match + ab..bc +No match + the.quick.brown.fox- +No match + the.quick.brown.fox. +No match + the.quick.brown.fox_ +No match + the.quick.brown.fox+ +No match + +/(?>.*)(?<=(abcd|wxyz))/ + alphabetabcd + 0: alphabetabcd + 1: abcd + endingwxyz + 0: endingwxyz + 1: wxyz +\= Expect no match + a rather long string that doesn't end with one of them +No match + +/word (?>(?:(?!otherword)[a-zA-Z0-9]+ ){0,30})otherword/ + word cat dog elephant mussel cow horse canary baboon snake shark otherword + 0: word cat dog elephant mussel cow horse canary baboon snake shark otherword +\= Expect no match + word cat dog elephant mussel cow horse canary baboon snake shark +No match + +/word (?>[a-zA-Z0-9]+ ){0,30}otherword/ +\= Expect no match + word cat dog elephant mussel cow horse canary baboon snake shark the quick brown fox and the lazy dog and several other words getting close to thirty by now I hope +No match + +/(?<=\d{3}(?!999))foo/ + 999foo + 0: foo + 123999foo + 0: foo +\= Expect no match + 123abcfoo +No match + +/(?<=(?!...999)\d{3})foo/ + 999foo + 0: foo + 123999foo + 0: foo +\= Expect no match + 123abcfoo +No match + +/(?<=\d{3}(?!999)...)foo/ + 123abcfoo + 0: foo + 123456foo + 0: foo +\= Expect no match + 123999foo +No match + +/(?<=\d{3}...)(? + 2: + 3: abcd +
+ 2: + 3: abcd + \s*)=(?>\s*) # find + 2: + 3: abcd + Z)+|A)*/ + ZABCDEFG + 0: ZA + 1: A + +/((?>)+|A)*/ + ZABCDEFG + 0: + 1: + +/^[\d-a]/ + abcde + 0: a + -things + 0: - + 0digit + 0: 0 +\= Expect no match + bcdef +No match + +/[\s]+/ + > \x09\x0a\x0c\x0d\x0b< + 0: \x09\x0a\x0c\x0d\x0b + +/\s+/ + > \x09\x0a\x0c\x0d\x0b< + 0: \x09\x0a\x0c\x0d\x0b + +/a b/x + ab + 0: ab + +/(?!\A)x/m + a\nxb\n + 0: x + +/(?!^)x/m +\= Expect no match + a\nxb\n +No match + +#/abc\Qabc\Eabc/ +# abcabcabc +# 0: abcabcabc + +#/abc\Q(*+|\Eabc/ +# abc(*+|abc +# 0: abc(*+|abc + +#/ abc\Q abc\Eabc/x +# abc abcabc +# 0: abc abcabc +#\= Expect no match +# abcabcabc +#No match + +#/abc#comment +# \Q#not comment +# literal\E/x +# abc#not comment\n literal +# 0: abc#not comment\x0a literal + +#/abc#comment +# \Q#not comment +# literal/x +# abc#not comment\n literal +# 0: abc#not comment\x0a literal + +#/abc#comment +# \Q#not comment +# literal\E #more comment +# /x +# abc#not comment\n literal +# 0: abc#not comment\x0a literal + +#/abc#comment +# \Q#not comment +# literal\E #more comment/x +# abc#not comment\n literal +# 0: abc#not comment\x0a literal + +#/\Qabc\$xyz\E/ +# abc\\\$xyz +# 0: abc\$xyz + +#/\Qabc\E\$\Qxyz\E/ +# abc\$xyz +# 0: abc$xyz + +/\Gabc/ + abc + 0: abc +\= Expect no match + xyzabc +No match + +/a(?x: b c )d/ + XabcdY + 0: abcd +\= Expect no match + Xa b c d Y +No match + +/((?x)x y z | a b c)/ + XabcY + 0: abc + 1: abc + AxyzB + 0: xyz + 1: xyz + +/(?i)AB(?-i)C/ + XabCY + 0: abC +\= Expect no match + XabcY +No match + +/((?i)AB(?-i)C|D)E/ + abCE + 0: abCE + 1: abC + DE + 0: DE + 1: D +\= Expect no match + abcE +No match + abCe +No match + dE +No match + De +No match + +/(.*)\d+\1/ + abc123abc + 0: abc123abc + 1: abc + abc123bc + 0: bc123bc + 1: bc + +/(.*)\d+\1/s + abc123abc + 0: abc123abc + 1: abc + abc123bc + 0: bc123bc + 1: bc + +/((.*))\d+\1/ + abc123abc + 0: abc123abc + 1: abc + 2: abc + abc123bc + 0: bc123bc + 1: bc + 2: bc + +# This tests for an IPv6 address in the form where it can have up to +# eight components, one and only one of which is empty. This must be +# an internal component. + +/^(?!:) # colon disallowed at start + (?: # start of item + (?: [0-9a-f]{1,4} | # 1-4 hex digits or + (?(1)0 | () ) ) # if null previously matched, fail; else null + : # followed by colon + ){1,7} # end item; 1-7 of them required + [0-9a-f]{1,4} $ # final hex number at end of string + (?(1)|.) # check that there was an empty component + /ix + a123::a123 + 0: a123::a123 + 1: + a123:b342::abcd + 0: a123:b342::abcd + 1: + a123:b342::324e:abcd + 0: a123:b342::324e:abcd + 1: + a123:ddde:b342::324e:abcd + 0: a123:ddde:b342::324e:abcd + 1: + a123:ddde:b342::324e:dcba:abcd + 0: a123:ddde:b342::324e:dcba:abcd + 1: + a123:ddde:9999:b342::324e:dcba:abcd + 0: a123:ddde:9999:b342::324e:dcba:abcd + 1: +\= Expect no match + 1:2:3:4:5:6:7:8 +No match + a123:bce:ddde:9999:b342::324e:dcba:abcd +No match + a123::9999:b342::324e:dcba:abcd +No match + abcde:2:3:4:5:6:7:8 +No match + ::1 +No match + abcd:fee0:123:: +No match + :1 +No match + 1: +No match + +#/[z\Qa-d]\E]/ +# z +# 0: z +# a +# 0: a +# - +# 0: - +# d +# 0: d +# ] +# 0: ] +#\= Expect no match +# b +#No match + +#TODO: PCRE has an optimization to make this workable, .NET does not +#/(a+)*b/ +#\= Expect no match +# aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +#No match + +# All these had to be updated because we understand unicode +# and this looks like it's expecting single byte matches + +# .NET generates \xe4...not sure what's up, might just be different code pages +/(?i)reg(?:ul(?:[aä]|ae)r|ex)/ + REGular + 0: REGular + regulaer + 0: regulaer + Regex + 0: Regex + regulär + 0: regul\xc3\xa4r + +#/Åæåä[à-ÿÀ-ß]+/ +# Åæåäà +# 0: \xc5\xe6\xe5\xe4\xe0 +# Åæåäÿ +# 0: \xc5\xe6\xe5\xe4\xff +# ÅæåäÀ +# 0: \xc5\xe6\xe5\xe4\xc0 +# Åæåäß +# 0: \xc5\xe6\xe5\xe4\xdf + +/(?<=Z)X./ + \x84XAZXB + 0: XB + +/ab cd (?x) de fg/ + ab cd defg + 0: ab cd defg + +/ab cd(?x) de fg/ + ab cddefg + 0: ab cddefg +\= Expect no match + abcddefg +No match + +/(? + 2: + D + 0: D + 1: + 2: + +# this is really long with debug -- removing for now +#/(a|)*\d/ +# aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4 +# 0: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4 +# 1: +#\= Expect no match +# aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +#No match + +/(?>a|)*\d/ + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4 + 0: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4 +\= Expect no match + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +No match + +/(?:a|)*\d/ + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4 + 0: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4 +\= Expect no match + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +No match + +/^(?s)(?>.*)(? + 2: a + +/(?>(a))b|(a)c/ + ac + 0: ac + 1: + 2: a + +/(?=(a))ab|(a)c/ + ac + 0: ac + 1: + 2: a + +/((?>(a))b|(a)c)/ + ac + 0: ac + 1: ac + 2: + 3: a + +/(?=(?>(a))b|(a)c)(..)/ + ac + 0: ac + 1: + 2: a + 3: ac + +/(?>(?>(a))b|(a)c)/ + ac + 0: ac + 1: + 2: a + +/((?>(a+)b)+(aabab))/ + aaaabaaabaabab + 0: aaaabaaabaabab + 1: aaaabaaabaabab + 2: aaa + 3: aabab + +/(?>a+|ab)+?c/ +\= Expect no match + aabc +No match + +/(?>a+|ab)+c/ +\= Expect no match + aabc +No match + +/(?:a+|ab)+c/ + aabc + 0: aabc + +/^(?:a|ab)+c/ + aaaabc + 0: aaaabc + +/(?=abc){0}xyz/ + xyz + 0: xyz + +/(?=abc){1}xyz/ +\= Expect no match + xyz +No match + +/(?=(a))?./ + ab + 0: a + 1: a + bc + 0: b + +/(?=(a))??./ + ab + 0: a + bc + 0: b + +/^(?!a){0}\w+/ + aaaaa + 0: aaaaa + +/(?<=(abc))?xyz/ + abcxyz + 0: xyz + 1: abc + pqrxyz + 0: xyz + +/^[g]+/ + ggg<<>> + 0: ggg<<>> +\= Expect no match + \\ga +No match + +/^[ga]+/ + gggagagaxyz + 0: gggagaga + +/[:a]xxx[b:]/ + :xxx: + 0: :xxx: + +/(?<=a{2})b/i + xaabc + 0: b +\= Expect no match + xabc +No match + +/(? +# 4: +# 5: c +# 6: d +# 7: Y + +#/^X(?7)(a)(?|(b|(?|(r)|(t))(s))|(q))(c)(d)(Y)/ +# XYabcdY +# 0: XYabcdY +# 1: a +# 2: b +# 3: +# 4: +# 5: c +# 6: d +# 7: Y + +/(?'abc'\w+):\k{2}/ + a:aaxyz + 0: a:aa + 1: a + ab:ababxyz + 0: ab:abab + 1: ab +\= Expect no match + a:axyz +No match + ab:abxyz +No match + +/^(?a)? (?(ab)b|c) (?(ab)d|e)/x + abd + 0: abd + 1: a + ce + 0: ce + +# .NET has more consistent grouping numbers with these dupe groups for the two options +/(?:a(? (?')|(?")) |b(? (?')|(?")) ) (?(quote)[a-z]+|[0-9]+)/x,dupnames + a\"aaaaa + 0: a"aaaaa + 1: " + 2: + 3: " + b\"aaaaa + 0: b"aaaaa + 1: " + 2: + 3: " +\= Expect no match + b\"11111 +No match + +#/(?P(?P0)(?P>L1)|(?P>L2))/ +# 0 +# 0: 0 +# 1: 0 +# 00 +# 0: 00 +# 1: 00 +# 2: 0 +# 0000 +# 0: 0000 +# 1: 0000 +# 2: 0 + +#/(?P(?P0)|(?P>L2)(?P>L1))/ +# 0 +# 0: 0 +# 1: 0 +# 2: 0 +# 00 +# 0: 0 +# 1: 0 +# 2: 0 +# 0000 +# 0: 0 +# 1: 0 +# 2: 0 + +# Check the use of names for failure + +# Check opening parens in comment when seeking forward reference. + +#/(?P(?P=abn)xxx|)+/ +# xxx +# 0: +# 1: + +#Posses +/^(a)?(\w)/ + aaaaX + 0: aa + 1: a + 2: a + YZ + 0: Y + 1: + 2: Y + +#Posses +/^(?:a)?(\w)/ + aaaaX + 0: aa + 1: a + YZ + 0: Y + 1: Y + +/\A.*?(a|bc)/ + ba + 0: ba + 1: a + +/\A.*?(?:a|bc|d)/ + ba + 0: ba + +# -------------------------- + +/(another)?(\1?)test/ + hello world test + 0: test + 1: + 2: + +/(another)?(\1+)test/ +\= Expect no match + hello world test +No match + +/((?:a?)*)*c/ + aac + 0: aac + 1: + +/((?>a?)*)*c/ + aac + 0: aac + 1: + +/(?>.*?a)(?<=ba)/ + aba + 0: ba + +/(?:.*?a)(?<=ba)/ + aba + 0: aba + +/(?>.*?a)b/s + aab + 0: ab + +/(?>.*?a)b/ + aab + 0: ab + +/(?>^a)b/s +\= Expect no match + aab +No match + +/(?>.*?)(?<=(abcd)|(wxyz))/ + alphabetabcd + 0: + 1: abcd + endingwxyz + 0: + 1: + 2: wxyz + +/(?>.*)(?<=(abcd)|(wxyz))/ + alphabetabcd + 0: alphabetabcd + 1: abcd + endingwxyz + 0: endingwxyz + 1: + 2: wxyz + +"(?>.*)foo" +\= Expect no match + abcdfooxyz +No match + +"(?>.*?)foo" + abcdfooxyz + 0: foo + +# Tests that try to figure out how Perl works. My hypothesis is that the first +# verb that is backtracked onto is the one that acts. This seems to be the case +# almost all the time, but there is one exception that is perhaps a bug. + +/a(?=bc).|abd/ + abd + 0: abd + abc + 0: ab + +/a(?>bc)d|abd/ + abceabd + 0: abd + +# These tests were formerly in test 2, but changes in PCRE and Perl have +# made them compatible. + +/^(a)?(?(1)a|b)+$/ +\= Expect no match + a +No match + +# ---- + +/^\d*\w{4}/ + 1234 + 0: 1234 +\= Expect no match + 123 +No match + +/^[^b]*\w{4}/ + aaaa + 0: aaaa +\= Expect no match + aaa +No match + +/^[^b]*\w{4}/i + aaaa + 0: aaaa +\= Expect no match + aaa +No match + +/^a*\w{4}/ + aaaa + 0: aaaa +\= Expect no match + aaa +No match + +/^a*\w{4}/i + aaaa + 0: aaaa +\= Expect no match + aaa +No match + +/(?:(?foo)|(?bar))\k/dupnames + foofoo + 0: foofoo + 1: foo + barbar + 0: barbar + 1: bar + +# A notable difference between PCRE and .NET. According to +# the PCRE docs: +# If you make a subroutine call to a non-unique named +# subpattern, the one that corresponds to the first +# occurrence of the name is used. In the absence of +# duplicate numbers (see the previous section) this is +# the one with the lowest number. +# .NET takes the most recently captured number according to MSDN: +# A backreference refers to the most recent definition of +# a group (the definition most immediately to the left, +# when matching left to right). When a group makes multiple +# captures, a backreference refers to the most recent capture. + +#/(?A)(?:(?foo)|(?bar))\k/dupnames +# AfooA +# 0: AfooA +# 1: A +# 2: foo +# AbarA +# 0: AbarA +# 1: A +# 2: +# 3: bar +#\= Expect no match +# Afoofoo +#No match +# Abarbar +#No match + +/^(\d+)\s+IN\s+SOA\s+(\S+)\s+(\S+)\s*\(\s*$/ + 1 IN SOA non-sp1 non-sp2( + 0: 1 IN SOA non-sp1 non-sp2( + 1: 1 + 2: non-sp1 + 3: non-sp2 + +# TODO: .NET's group number ordering here in the second example is a bit odd +/^ (?:(?A)|(?'B'B)(?A)) (?(A)x) (?(B)y)$/x,dupnames + Ax + 0: Ax + 1: A + BAxy + 0: BAxy + 1: A + 2: B + +/ ^ a + b $ /x + aaaab + 0: aaaab + +/ ^ a + #comment + b $ /x + aaaab + 0: aaaab + +/ ^ a + #comment + #comment + b $ /x + aaaab + 0: aaaab + +/ ^ (?> a + ) b $ /x + aaaab + 0: aaaab + +/ ^ ( a + ) + \w $ /x + aaaab + 0: aaaab + 1: aaaa + +/(?:x|(?:(xx|yy)+|x|x|x|x|x)|a|a|a)bc/ +\= Expect no match + acb +No match + +#Posses +#/\A(?:[^\"]+|\"(?:[^\"]*|\"\")*\")+/ +# NON QUOTED \"QUOT\"\"ED\" AFTER \"NOT MATCHED +# 0: NON QUOTED "QUOT""ED" AFTER + +#Posses +#/\A(?:[^\"]+|\"(?:[^\"]+|\"\")*\")+/ +# NON QUOTED \"QUOT\"\"ED\" AFTER \"NOT MATCHED +# 0: NON QUOTED "QUOT""ED" AFTER + +#Posses +#/\A(?:[^\"]+|\"(?:[^\"]+|\"\")+\")+/ +# NON QUOTED \"QUOT\"\"ED\" AFTER \"NOT MATCHED +# 0: NON QUOTED "QUOT""ED" AFTER + +#Posses +#/\A([^\"1]+|[\"2]([^\"3]*|[\"4][\"5])*[\"6])+/ +# NON QUOTED \"QUOT\"\"ED\" AFTER \"NOT MATCHED +# 0: NON QUOTED "QUOT""ED" AFTER +# 1: AFTER +# 2: + +/^\w+(?>\s*)(?<=\w)/ + test test + 0: tes + +#/(?Pa)?(?Pb)?(?()c|d)*l/ +# acl +# 0: acl +# 1: a +# bdl +# 0: bdl +# 1: +# 2: b +# adl +# 0: dl +# bcl +# 0: l + +/\sabc/ + \x0babc + 0: \x0babc + +#/[\Qa]\E]+/ +# aa]] +# 0: aa]] + +#/[\Q]a\E]+/ +# aa]] +# 0: aa]] + +/A((((((((a))))))))\8B/ + AaaB + 0: AaaB + 1: a + 2: a + 3: a + 4: a + 5: a + 6: a + 7: a + 8: a + +/A(((((((((a)))))))))\9B/ + AaaB + 0: AaaB + 1: a + 2: a + 3: a + 4: a + 5: a + 6: a + 7: a + 8: a + 9: a + +/(|ab)*?d/ + abd + 0: abd + 1: ab + xyd + 0: d + +/(\2|a)(\1)/ + aaa + 0: aa + 1: a + 2: a + +/(\2)(\1)/ + +"Z*(|d*){216}" + +/((((((((((((x))))))))))))\12/ + xx + 0: xx + 1: x + 2: x + 3: x + 4: x + 5: x + 6: x + 7: x + 8: x + 9: x +10: x +11: x +12: x + +#"(?|(\k'Pm')|(?'Pm'))" +# abcd +# 0: +# 1: + +#/(?|(aaa)|(b))\g{1}/ +# aaaaaa +# 0: aaaaaa +# 1: aaa +# bb +# 0: bb +# 1: b + +#/(?|(aaa)|(b))(?1)/ +# aaaaaa +# 0: aaaaaa +# 1: aaa +# baaa +# 0: baaa +# 1: b +#\= Expect no match +# bb +#No match + +#/(?|(aaa)|(b))/ +# xaaa +# 0: aaa +# 1: aaa +# xbc +# 0: b +# 1: b + +#/(?|(?'a'aaa)|(?'a'b))\k'a'/ +# aaaaaa +# 0: aaaaaa +# 1: aaa +# bb +# 0: bb +# 1: b + +#/(?|(?'a'aaa)|(?'a'b))(?'a'cccc)\k'a'/dupnames +# aaaccccaaa +# 0: aaaccccaaa +# 1: aaa +# 2: cccc +# bccccb +# 0: bccccb +# 1: b +# 2: cccc + +# End of testinput1 diff --git a/vendor/github.com/fatih/color/LICENSE.md b/vendor/github.com/fatih/color/LICENSE.md new file mode 100644 index 0000000..25fdaf6 --- /dev/null +++ b/vendor/github.com/fatih/color/LICENSE.md @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2013 Fatih Arslan + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/fatih/color/README.md b/vendor/github.com/fatih/color/README.md new file mode 100644 index 0000000..be82827 --- /dev/null +++ b/vendor/github.com/fatih/color/README.md @@ -0,0 +1,176 @@ +# color [![](https://github.com/fatih/color/workflows/build/badge.svg)](https://github.com/fatih/color/actions) [![PkgGoDev](https://pkg.go.dev/badge/github.com/fatih/color)](https://pkg.go.dev/github.com/fatih/color) + +Color lets you use colorized outputs in terms of [ANSI Escape +Codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors) in Go (Golang). It +has support for Windows too! The API can be used in several ways, pick one that +suits you. + +![Color](https://user-images.githubusercontent.com/438920/96832689-03b3e000-13f4-11eb-9803-46f4c4de3406.jpg) + +## Install + +```bash +go get github.com/fatih/color +``` + +## Examples + +### Standard colors + +```go +// Print with default helper functions +color.Cyan("Prints text in cyan.") + +// A newline will be appended automatically +color.Blue("Prints %s in blue.", "text") + +// These are using the default foreground colors +color.Red("We have red") +color.Magenta("And many others ..") + +``` + +### Mix and reuse colors + +```go +// Create a new color object +c := color.New(color.FgCyan).Add(color.Underline) +c.Println("Prints cyan text with an underline.") + +// Or just add them to New() +d := color.New(color.FgCyan, color.Bold) +d.Printf("This prints bold cyan %s\n", "too!.") + +// Mix up foreground and background colors, create new mixes! +red := color.New(color.FgRed) + +boldRed := red.Add(color.Bold) +boldRed.Println("This will print text in bold red.") + +whiteBackground := red.Add(color.BgWhite) +whiteBackground.Println("Red text with white background.") +``` + +### Use your own output (io.Writer) + +```go +// Use your own io.Writer output +color.New(color.FgBlue).Fprintln(myWriter, "blue color!") + +blue := color.New(color.FgBlue) +blue.Fprint(writer, "This will print text in blue.") +``` + +### Custom print functions (PrintFunc) + +```go +// Create a custom print function for convenience +red := color.New(color.FgRed).PrintfFunc() +red("Warning") +red("Error: %s", err) + +// Mix up multiple attributes +notice := color.New(color.Bold, color.FgGreen).PrintlnFunc() +notice("Don't forget this...") +``` + +### Custom fprint functions (FprintFunc) + +```go +blue := color.New(color.FgBlue).FprintfFunc() +blue(myWriter, "important notice: %s", stars) + +// Mix up with multiple attributes +success := color.New(color.Bold, color.FgGreen).FprintlnFunc() +success(myWriter, "Don't forget this...") +``` + +### Insert into noncolor strings (SprintFunc) + +```go +// Create SprintXxx functions to mix strings with other non-colorized strings: +yellow := color.New(color.FgYellow).SprintFunc() +red := color.New(color.FgRed).SprintFunc() +fmt.Printf("This is a %s and this is %s.\n", yellow("warning"), red("error")) + +info := color.New(color.FgWhite, color.BgGreen).SprintFunc() +fmt.Printf("This %s rocks!\n", info("package")) + +// Use helper functions +fmt.Println("This", color.RedString("warning"), "should be not neglected.") +fmt.Printf("%v %v\n", color.GreenString("Info:"), "an important message.") + +// Windows supported too! Just don't forget to change the output to color.Output +fmt.Fprintf(color.Output, "Windows support: %s", color.GreenString("PASS")) +``` + +### Plug into existing code + +```go +// Use handy standard colors +color.Set(color.FgYellow) + +fmt.Println("Existing text will now be in yellow") +fmt.Printf("This one %s\n", "too") + +color.Unset() // Don't forget to unset + +// You can mix up parameters +color.Set(color.FgMagenta, color.Bold) +defer color.Unset() // Use it in your function + +fmt.Println("All text will now be bold magenta.") +``` + +### Disable/Enable color + +There might be a case where you want to explicitly disable/enable color output. the +`go-isatty` package will automatically disable color output for non-tty output streams +(for example if the output were piped directly to `less`). + +The `color` package also disables color output if the [`NO_COLOR`](https://no-color.org) environment +variable is set to a non-empty string. + +`Color` has support to disable/enable colors programmatically both globally and +for single color definitions. For example suppose you have a CLI app and a +`-no-color` bool flag. You can easily disable the color output with: + +```go +var flagNoColor = flag.Bool("no-color", false, "Disable color output") + +if *flagNoColor { + color.NoColor = true // disables colorized output +} +``` + +It also has support for single color definitions (local). You can +disable/enable color output on the fly: + +```go +c := color.New(color.FgCyan) +c.Println("Prints cyan text") + +c.DisableColor() +c.Println("This is printed without any color") + +c.EnableColor() +c.Println("This prints again cyan...") +``` + +## GitHub Actions + +To output color in GitHub Actions (or other CI systems that support ANSI colors), make sure to set `color.NoColor = false` so that it bypasses the check for non-tty output streams. + +## Todo + +* Save/Return previous values +* Evaluate fmt.Formatter interface + +## Credits + +* [Fatih Arslan](https://github.com/fatih) +* Windows support via @mattn: [colorable](https://github.com/mattn/go-colorable) + +## License + +The MIT License (MIT) - see [`LICENSE.md`](https://github.com/fatih/color/blob/master/LICENSE.md) for more details diff --git a/vendor/github.com/fatih/color/color.go b/vendor/github.com/fatih/color/color.go new file mode 100644 index 0000000..c423428 --- /dev/null +++ b/vendor/github.com/fatih/color/color.go @@ -0,0 +1,650 @@ +package color + +import ( + "fmt" + "io" + "os" + "strconv" + "strings" + "sync" + + "github.com/mattn/go-colorable" + "github.com/mattn/go-isatty" +) + +var ( + // NoColor defines if the output is colorized or not. It's dynamically set to + // false or true based on the stdout's file descriptor referring to a terminal + // or not. It's also set to true if the NO_COLOR environment variable is + // set (regardless of its value). This is a global option and affects all + // colors. For more control over each color block use the methods + // DisableColor() individually. + NoColor = noColorIsSet() || os.Getenv("TERM") == "dumb" || + (!isatty.IsTerminal(os.Stdout.Fd()) && !isatty.IsCygwinTerminal(os.Stdout.Fd())) + + // Output defines the standard output of the print functions. By default, + // os.Stdout is used. + Output = colorable.NewColorableStdout() + + // Error defines a color supporting writer for os.Stderr. + Error = colorable.NewColorableStderr() + + // colorsCache is used to reduce the count of created Color objects and + // allows to reuse already created objects with required Attribute. + colorsCache = make(map[Attribute]*Color) + colorsCacheMu sync.Mutex // protects colorsCache +) + +// noColorIsSet returns true if the environment variable NO_COLOR is set to a non-empty string. +func noColorIsSet() bool { + return os.Getenv("NO_COLOR") != "" +} + +// Color defines a custom color object which is defined by SGR parameters. +type Color struct { + params []Attribute + noColor *bool +} + +// Attribute defines a single SGR Code +type Attribute int + +const escape = "\x1b" + +// Base attributes +const ( + Reset Attribute = iota + Bold + Faint + Italic + Underline + BlinkSlow + BlinkRapid + ReverseVideo + Concealed + CrossedOut +) + +const ( + ResetBold Attribute = iota + 22 + ResetItalic + ResetUnderline + ResetBlinking + _ + ResetReversed + ResetConcealed + ResetCrossedOut +) + +var mapResetAttributes map[Attribute]Attribute = map[Attribute]Attribute{ + Bold: ResetBold, + Faint: ResetBold, + Italic: ResetItalic, + Underline: ResetUnderline, + BlinkSlow: ResetBlinking, + BlinkRapid: ResetBlinking, + ReverseVideo: ResetReversed, + Concealed: ResetConcealed, + CrossedOut: ResetCrossedOut, +} + +// Foreground text colors +const ( + FgBlack Attribute = iota + 30 + FgRed + FgGreen + FgYellow + FgBlue + FgMagenta + FgCyan + FgWhite +) + +// Foreground Hi-Intensity text colors +const ( + FgHiBlack Attribute = iota + 90 + FgHiRed + FgHiGreen + FgHiYellow + FgHiBlue + FgHiMagenta + FgHiCyan + FgHiWhite +) + +// Background text colors +const ( + BgBlack Attribute = iota + 40 + BgRed + BgGreen + BgYellow + BgBlue + BgMagenta + BgCyan + BgWhite +) + +// Background Hi-Intensity text colors +const ( + BgHiBlack Attribute = iota + 100 + BgHiRed + BgHiGreen + BgHiYellow + BgHiBlue + BgHiMagenta + BgHiCyan + BgHiWhite +) + +// New returns a newly created color object. +func New(value ...Attribute) *Color { + c := &Color{ + params: make([]Attribute, 0), + } + + if noColorIsSet() { + c.noColor = boolPtr(true) + } + + c.Add(value...) + return c +} + +// Set sets the given parameters immediately. It will change the color of +// output with the given SGR parameters until color.Unset() is called. +func Set(p ...Attribute) *Color { + c := New(p...) + c.Set() + return c +} + +// Unset resets all escape attributes and clears the output. Usually should +// be called after Set(). +func Unset() { + if NoColor { + return + } + + fmt.Fprintf(Output, "%s[%dm", escape, Reset) +} + +// Set sets the SGR sequence. +func (c *Color) Set() *Color { + if c.isNoColorSet() { + return c + } + + fmt.Fprint(Output, c.format()) + return c +} + +func (c *Color) unset() { + if c.isNoColorSet() { + return + } + + Unset() +} + +// SetWriter is used to set the SGR sequence with the given io.Writer. This is +// a low-level function, and users should use the higher-level functions, such +// as color.Fprint, color.Print, etc. +func (c *Color) SetWriter(w io.Writer) *Color { + if c.isNoColorSet() { + return c + } + + fmt.Fprint(w, c.format()) + return c +} + +// UnsetWriter resets all escape attributes and clears the output with the give +// io.Writer. Usually should be called after SetWriter(). +func (c *Color) UnsetWriter(w io.Writer) { + if c.isNoColorSet() { + return + } + + if NoColor { + return + } + + fmt.Fprintf(w, "%s[%dm", escape, Reset) +} + +// Add is used to chain SGR parameters. Use as many as parameters to combine +// and create custom color objects. Example: Add(color.FgRed, color.Underline). +func (c *Color) Add(value ...Attribute) *Color { + c.params = append(c.params, value...) + return c +} + +// Fprint formats using the default formats for its operands and writes to w. +// Spaces are added between operands when neither is a string. +// It returns the number of bytes written and any write error encountered. +// On Windows, users should wrap w with colorable.NewColorable() if w is of +// type *os.File. +func (c *Color) Fprint(w io.Writer, a ...interface{}) (n int, err error) { + c.SetWriter(w) + defer c.UnsetWriter(w) + + return fmt.Fprint(w, a...) +} + +// Print formats using the default formats for its operands and writes to +// standard output. Spaces are added between operands when neither is a +// string. It returns the number of bytes written and any write error +// encountered. This is the standard fmt.Print() method wrapped with the given +// color. +func (c *Color) Print(a ...interface{}) (n int, err error) { + c.Set() + defer c.unset() + + return fmt.Fprint(Output, a...) +} + +// Fprintf formats according to a format specifier and writes to w. +// It returns the number of bytes written and any write error encountered. +// On Windows, users should wrap w with colorable.NewColorable() if w is of +// type *os.File. +func (c *Color) Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) { + c.SetWriter(w) + defer c.UnsetWriter(w) + + return fmt.Fprintf(w, format, a...) +} + +// Printf formats according to a format specifier and writes to standard output. +// It returns the number of bytes written and any write error encountered. +// This is the standard fmt.Printf() method wrapped with the given color. +func (c *Color) Printf(format string, a ...interface{}) (n int, err error) { + c.Set() + defer c.unset() + + return fmt.Fprintf(Output, format, a...) +} + +// Fprintln formats using the default formats for its operands and writes to w. +// Spaces are always added between operands and a newline is appended. +// On Windows, users should wrap w with colorable.NewColorable() if w is of +// type *os.File. +func (c *Color) Fprintln(w io.Writer, a ...interface{}) (n int, err error) { + return fmt.Fprintln(w, c.wrap(fmt.Sprint(a...))) +} + +// Println formats using the default formats for its operands and writes to +// standard output. Spaces are always added between operands and a newline is +// appended. It returns the number of bytes written and any write error +// encountered. This is the standard fmt.Print() method wrapped with the given +// color. +func (c *Color) Println(a ...interface{}) (n int, err error) { + return fmt.Fprintln(Output, c.wrap(fmt.Sprint(a...))) +} + +// Sprint is just like Print, but returns a string instead of printing it. +func (c *Color) Sprint(a ...interface{}) string { + return c.wrap(fmt.Sprint(a...)) +} + +// Sprintln is just like Println, but returns a string instead of printing it. +func (c *Color) Sprintln(a ...interface{}) string { + return fmt.Sprintln(c.Sprint(a...)) +} + +// Sprintf is just like Printf, but returns a string instead of printing it. +func (c *Color) Sprintf(format string, a ...interface{}) string { + return c.wrap(fmt.Sprintf(format, a...)) +} + +// FprintFunc returns a new function that prints the passed arguments as +// colorized with color.Fprint(). +func (c *Color) FprintFunc() func(w io.Writer, a ...interface{}) { + return func(w io.Writer, a ...interface{}) { + c.Fprint(w, a...) + } +} + +// PrintFunc returns a new function that prints the passed arguments as +// colorized with color.Print(). +func (c *Color) PrintFunc() func(a ...interface{}) { + return func(a ...interface{}) { + c.Print(a...) + } +} + +// FprintfFunc returns a new function that prints the passed arguments as +// colorized with color.Fprintf(). +func (c *Color) FprintfFunc() func(w io.Writer, format string, a ...interface{}) { + return func(w io.Writer, format string, a ...interface{}) { + c.Fprintf(w, format, a...) + } +} + +// PrintfFunc returns a new function that prints the passed arguments as +// colorized with color.Printf(). +func (c *Color) PrintfFunc() func(format string, a ...interface{}) { + return func(format string, a ...interface{}) { + c.Printf(format, a...) + } +} + +// FprintlnFunc returns a new function that prints the passed arguments as +// colorized with color.Fprintln(). +func (c *Color) FprintlnFunc() func(w io.Writer, a ...interface{}) { + return func(w io.Writer, a ...interface{}) { + c.Fprintln(w, a...) + } +} + +// PrintlnFunc returns a new function that prints the passed arguments as +// colorized with color.Println(). +func (c *Color) PrintlnFunc() func(a ...interface{}) { + return func(a ...interface{}) { + c.Println(a...) + } +} + +// SprintFunc returns a new function that returns colorized strings for the +// given arguments with fmt.Sprint(). Useful to put into or mix into other +// string. Windows users should use this in conjunction with color.Output, example: +// +// put := New(FgYellow).SprintFunc() +// fmt.Fprintf(color.Output, "This is a %s", put("warning")) +func (c *Color) SprintFunc() func(a ...interface{}) string { + return func(a ...interface{}) string { + return c.wrap(fmt.Sprint(a...)) + } +} + +// SprintfFunc returns a new function that returns colorized strings for the +// given arguments with fmt.Sprintf(). Useful to put into or mix into other +// string. Windows users should use this in conjunction with color.Output. +func (c *Color) SprintfFunc() func(format string, a ...interface{}) string { + return func(format string, a ...interface{}) string { + return c.wrap(fmt.Sprintf(format, a...)) + } +} + +// SprintlnFunc returns a new function that returns colorized strings for the +// given arguments with fmt.Sprintln(). Useful to put into or mix into other +// string. Windows users should use this in conjunction with color.Output. +func (c *Color) SprintlnFunc() func(a ...interface{}) string { + return func(a ...interface{}) string { + return fmt.Sprintln(c.Sprint(a...)) + } +} + +// sequence returns a formatted SGR sequence to be plugged into a "\x1b[...m" +// an example output might be: "1;36" -> bold cyan +func (c *Color) sequence() string { + format := make([]string, len(c.params)) + for i, v := range c.params { + format[i] = strconv.Itoa(int(v)) + } + + return strings.Join(format, ";") +} + +// wrap wraps the s string with the colors attributes. The string is ready to +// be printed. +func (c *Color) wrap(s string) string { + if c.isNoColorSet() { + return s + } + + return c.format() + s + c.unformat() +} + +func (c *Color) format() string { + return fmt.Sprintf("%s[%sm", escape, c.sequence()) +} + +func (c *Color) unformat() string { + //return fmt.Sprintf("%s[%dm", escape, Reset) + //for each element in sequence let's use the speficic reset escape, ou the generic one if not found + format := make([]string, len(c.params)) + for i, v := range c.params { + format[i] = strconv.Itoa(int(Reset)) + ra, ok := mapResetAttributes[v] + if ok { + format[i] = strconv.Itoa(int(ra)) + } + } + + return fmt.Sprintf("%s[%sm", escape, strings.Join(format, ";")) +} + +// DisableColor disables the color output. Useful to not change any existing +// code and still being able to output. Can be used for flags like +// "--no-color". To enable back use EnableColor() method. +func (c *Color) DisableColor() { + c.noColor = boolPtr(true) +} + +// EnableColor enables the color output. Use it in conjunction with +// DisableColor(). Otherwise, this method has no side effects. +func (c *Color) EnableColor() { + c.noColor = boolPtr(false) +} + +func (c *Color) isNoColorSet() bool { + // check first if we have user set action + if c.noColor != nil { + return *c.noColor + } + + // if not return the global option, which is disabled by default + return NoColor +} + +// Equals returns a boolean value indicating whether two colors are equal. +func (c *Color) Equals(c2 *Color) bool { + if c == nil && c2 == nil { + return true + } + if c == nil || c2 == nil { + return false + } + if len(c.params) != len(c2.params) { + return false + } + + for _, attr := range c.params { + if !c2.attrExists(attr) { + return false + } + } + + return true +} + +func (c *Color) attrExists(a Attribute) bool { + for _, attr := range c.params { + if attr == a { + return true + } + } + + return false +} + +func boolPtr(v bool) *bool { + return &v +} + +func getCachedColor(p Attribute) *Color { + colorsCacheMu.Lock() + defer colorsCacheMu.Unlock() + + c, ok := colorsCache[p] + if !ok { + c = New(p) + colorsCache[p] = c + } + + return c +} + +func colorPrint(format string, p Attribute, a ...interface{}) { + c := getCachedColor(p) + + if !strings.HasSuffix(format, "\n") { + format += "\n" + } + + if len(a) == 0 { + c.Print(format) + } else { + c.Printf(format, a...) + } +} + +func colorString(format string, p Attribute, a ...interface{}) string { + c := getCachedColor(p) + + if len(a) == 0 { + return c.SprintFunc()(format) + } + + return c.SprintfFunc()(format, a...) +} + +// Black is a convenient helper function to print with black foreground. A +// newline is appended to format by default. +func Black(format string, a ...interface{}) { colorPrint(format, FgBlack, a...) } + +// Red is a convenient helper function to print with red foreground. A +// newline is appended to format by default. +func Red(format string, a ...interface{}) { colorPrint(format, FgRed, a...) } + +// Green is a convenient helper function to print with green foreground. A +// newline is appended to format by default. +func Green(format string, a ...interface{}) { colorPrint(format, FgGreen, a...) } + +// Yellow is a convenient helper function to print with yellow foreground. +// A newline is appended to format by default. +func Yellow(format string, a ...interface{}) { colorPrint(format, FgYellow, a...) } + +// Blue is a convenient helper function to print with blue foreground. A +// newline is appended to format by default. +func Blue(format string, a ...interface{}) { colorPrint(format, FgBlue, a...) } + +// Magenta is a convenient helper function to print with magenta foreground. +// A newline is appended to format by default. +func Magenta(format string, a ...interface{}) { colorPrint(format, FgMagenta, a...) } + +// Cyan is a convenient helper function to print with cyan foreground. A +// newline is appended to format by default. +func Cyan(format string, a ...interface{}) { colorPrint(format, FgCyan, a...) } + +// White is a convenient helper function to print with white foreground. A +// newline is appended to format by default. +func White(format string, a ...interface{}) { colorPrint(format, FgWhite, a...) } + +// BlackString is a convenient helper function to return a string with black +// foreground. +func BlackString(format string, a ...interface{}) string { return colorString(format, FgBlack, a...) } + +// RedString is a convenient helper function to return a string with red +// foreground. +func RedString(format string, a ...interface{}) string { return colorString(format, FgRed, a...) } + +// GreenString is a convenient helper function to return a string with green +// foreground. +func GreenString(format string, a ...interface{}) string { return colorString(format, FgGreen, a...) } + +// YellowString is a convenient helper function to return a string with yellow +// foreground. +func YellowString(format string, a ...interface{}) string { return colorString(format, FgYellow, a...) } + +// BlueString is a convenient helper function to return a string with blue +// foreground. +func BlueString(format string, a ...interface{}) string { return colorString(format, FgBlue, a...) } + +// MagentaString is a convenient helper function to return a string with magenta +// foreground. +func MagentaString(format string, a ...interface{}) string { + return colorString(format, FgMagenta, a...) +} + +// CyanString is a convenient helper function to return a string with cyan +// foreground. +func CyanString(format string, a ...interface{}) string { return colorString(format, FgCyan, a...) } + +// WhiteString is a convenient helper function to return a string with white +// foreground. +func WhiteString(format string, a ...interface{}) string { return colorString(format, FgWhite, a...) } + +// HiBlack is a convenient helper function to print with hi-intensity black foreground. A +// newline is appended to format by default. +func HiBlack(format string, a ...interface{}) { colorPrint(format, FgHiBlack, a...) } + +// HiRed is a convenient helper function to print with hi-intensity red foreground. A +// newline is appended to format by default. +func HiRed(format string, a ...interface{}) { colorPrint(format, FgHiRed, a...) } + +// HiGreen is a convenient helper function to print with hi-intensity green foreground. A +// newline is appended to format by default. +func HiGreen(format string, a ...interface{}) { colorPrint(format, FgHiGreen, a...) } + +// HiYellow is a convenient helper function to print with hi-intensity yellow foreground. +// A newline is appended to format by default. +func HiYellow(format string, a ...interface{}) { colorPrint(format, FgHiYellow, a...) } + +// HiBlue is a convenient helper function to print with hi-intensity blue foreground. A +// newline is appended to format by default. +func HiBlue(format string, a ...interface{}) { colorPrint(format, FgHiBlue, a...) } + +// HiMagenta is a convenient helper function to print with hi-intensity magenta foreground. +// A newline is appended to format by default. +func HiMagenta(format string, a ...interface{}) { colorPrint(format, FgHiMagenta, a...) } + +// HiCyan is a convenient helper function to print with hi-intensity cyan foreground. A +// newline is appended to format by default. +func HiCyan(format string, a ...interface{}) { colorPrint(format, FgHiCyan, a...) } + +// HiWhite is a convenient helper function to print with hi-intensity white foreground. A +// newline is appended to format by default. +func HiWhite(format string, a ...interface{}) { colorPrint(format, FgHiWhite, a...) } + +// HiBlackString is a convenient helper function to return a string with hi-intensity black +// foreground. +func HiBlackString(format string, a ...interface{}) string { + return colorString(format, FgHiBlack, a...) +} + +// HiRedString is a convenient helper function to return a string with hi-intensity red +// foreground. +func HiRedString(format string, a ...interface{}) string { return colorString(format, FgHiRed, a...) } + +// HiGreenString is a convenient helper function to return a string with hi-intensity green +// foreground. +func HiGreenString(format string, a ...interface{}) string { + return colorString(format, FgHiGreen, a...) +} + +// HiYellowString is a convenient helper function to return a string with hi-intensity yellow +// foreground. +func HiYellowString(format string, a ...interface{}) string { + return colorString(format, FgHiYellow, a...) +} + +// HiBlueString is a convenient helper function to return a string with hi-intensity blue +// foreground. +func HiBlueString(format string, a ...interface{}) string { return colorString(format, FgHiBlue, a...) } + +// HiMagentaString is a convenient helper function to return a string with hi-intensity magenta +// foreground. +func HiMagentaString(format string, a ...interface{}) string { + return colorString(format, FgHiMagenta, a...) +} + +// HiCyanString is a convenient helper function to return a string with hi-intensity cyan +// foreground. +func HiCyanString(format string, a ...interface{}) string { return colorString(format, FgHiCyan, a...) } + +// HiWhiteString is a convenient helper function to return a string with hi-intensity white +// foreground. +func HiWhiteString(format string, a ...interface{}) string { + return colorString(format, FgHiWhite, a...) +} diff --git a/vendor/github.com/fatih/color/color_windows.go b/vendor/github.com/fatih/color/color_windows.go new file mode 100644 index 0000000..be01c55 --- /dev/null +++ b/vendor/github.com/fatih/color/color_windows.go @@ -0,0 +1,19 @@ +package color + +import ( + "os" + + "golang.org/x/sys/windows" +) + +func init() { + // Opt-in for ansi color support for current process. + // https://learn.microsoft.com/en-us/windows/console/console-virtual-terminal-sequences#output-sequences + var outMode uint32 + out := windows.Handle(os.Stdout.Fd()) + if err := windows.GetConsoleMode(out, &outMode); err != nil { + return + } + outMode |= windows.ENABLE_PROCESSED_OUTPUT | windows.ENABLE_VIRTUAL_TERMINAL_PROCESSING + _ = windows.SetConsoleMode(out, outMode) +} diff --git a/vendor/github.com/fatih/color/doc.go b/vendor/github.com/fatih/color/doc.go new file mode 100644 index 0000000..9491ad5 --- /dev/null +++ b/vendor/github.com/fatih/color/doc.go @@ -0,0 +1,134 @@ +/* +Package color is an ANSI color package to output colorized or SGR defined +output to the standard output. The API can be used in several way, pick one +that suits you. + +Use simple and default helper functions with predefined foreground colors: + + color.Cyan("Prints text in cyan.") + + // a newline will be appended automatically + color.Blue("Prints %s in blue.", "text") + + // More default foreground colors.. + color.Red("We have red") + color.Yellow("Yellow color too!") + color.Magenta("And many others ..") + + // Hi-intensity colors + color.HiGreen("Bright green color.") + color.HiBlack("Bright black means gray..") + color.HiWhite("Shiny white color!") + +However, there are times when custom color mixes are required. Below are some +examples to create custom color objects and use the print functions of each +separate color object. + + // Create a new color object + c := color.New(color.FgCyan).Add(color.Underline) + c.Println("Prints cyan text with an underline.") + + // Or just add them to New() + d := color.New(color.FgCyan, color.Bold) + d.Printf("This prints bold cyan %s\n", "too!.") + + + // Mix up foreground and background colors, create new mixes! + red := color.New(color.FgRed) + + boldRed := red.Add(color.Bold) + boldRed.Println("This will print text in bold red.") + + whiteBackground := red.Add(color.BgWhite) + whiteBackground.Println("Red text with White background.") + + // Use your own io.Writer output + color.New(color.FgBlue).Fprintln(myWriter, "blue color!") + + blue := color.New(color.FgBlue) + blue.Fprint(myWriter, "This will print text in blue.") + +You can create PrintXxx functions to simplify even more: + + // Create a custom print function for convenient + red := color.New(color.FgRed).PrintfFunc() + red("warning") + red("error: %s", err) + + // Mix up multiple attributes + notice := color.New(color.Bold, color.FgGreen).PrintlnFunc() + notice("don't forget this...") + +You can also FprintXxx functions to pass your own io.Writer: + + blue := color.New(FgBlue).FprintfFunc() + blue(myWriter, "important notice: %s", stars) + + // Mix up with multiple attributes + success := color.New(color.Bold, color.FgGreen).FprintlnFunc() + success(myWriter, don't forget this...") + +Or create SprintXxx functions to mix strings with other non-colorized strings: + + yellow := New(FgYellow).SprintFunc() + red := New(FgRed).SprintFunc() + + fmt.Printf("this is a %s and this is %s.\n", yellow("warning"), red("error")) + + info := New(FgWhite, BgGreen).SprintFunc() + fmt.Printf("this %s rocks!\n", info("package")) + +Windows support is enabled by default. All Print functions work as intended. +However, only for color.SprintXXX functions, user should use fmt.FprintXXX and +set the output to color.Output: + + fmt.Fprintf(color.Output, "Windows support: %s", color.GreenString("PASS")) + + info := New(FgWhite, BgGreen).SprintFunc() + fmt.Fprintf(color.Output, "this %s rocks!\n", info("package")) + +Using with existing code is possible. Just use the Set() method to set the +standard output to the given parameters. That way a rewrite of an existing +code is not required. + + // Use handy standard colors. + color.Set(color.FgYellow) + + fmt.Println("Existing text will be now in Yellow") + fmt.Printf("This one %s\n", "too") + + color.Unset() // don't forget to unset + + // You can mix up parameters + color.Set(color.FgMagenta, color.Bold) + defer color.Unset() // use it in your function + + fmt.Println("All text will be now bold magenta.") + +There might be a case where you want to disable color output (for example to +pipe the standard output of your app to somewhere else). `Color` has support to +disable colors both globally and for single color definition. For example +suppose you have a CLI app and a `--no-color` bool flag. You can easily disable +the color output with: + + var flagNoColor = flag.Bool("no-color", false, "Disable color output") + + if *flagNoColor { + color.NoColor = true // disables colorized output + } + +You can also disable the color by setting the NO_COLOR environment variable to any value. + +It also has support for single color definitions (local). You can +disable/enable color output on the fly: + + c := color.New(color.FgCyan) + c.Println("Prints cyan text") + + c.DisableColor() + c.Println("This is printed without any color") + + c.EnableColor() + c.Println("This prints again cyan...") +*/ +package color diff --git a/vendor/github.com/frankban/quicktest/.gitignore b/vendor/github.com/frankban/quicktest/.gitignore new file mode 100644 index 0000000..722d5e7 --- /dev/null +++ b/vendor/github.com/frankban/quicktest/.gitignore @@ -0,0 +1 @@ +.vscode diff --git a/vendor/github.com/frankban/quicktest/.godocdown.template b/vendor/github.com/frankban/quicktest/.godocdown.template new file mode 100644 index 0000000..70c8981 --- /dev/null +++ b/vendor/github.com/frankban/quicktest/.godocdown.template @@ -0,0 +1,13 @@ +[![Go Reference](https://pkg.go.dev/badge/github.com/frankban/quicktest.svg)](https://pkg.go.dev/github.com/frankban/quicktest#section-documentation) +[![Build Status](https://github.com/frankban/quicktest/actions/workflows/ci.yaml/badge.svg)](https://github.com/frankban/quicktest/actions/workflows/ci.yaml) + +[//]: # (Generated with: godocdown -template=.godocdown.template -o README.md && sed -i= 's/^# /### /' README.md ) + +# quicktest + +`go get github.com/frankban/quicktest@latest` + +{{ .EmitSynopsis }} + +For a complete API reference, see the +[package documentation](https://pkg.go.dev/github.com/frankban/quicktest#section-documentation). diff --git a/vendor/github.com/frankban/quicktest/LICENSE b/vendor/github.com/frankban/quicktest/LICENSE new file mode 100644 index 0000000..23a294c --- /dev/null +++ b/vendor/github.com/frankban/quicktest/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Canonical Ltd. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/frankban/quicktest/README.md b/vendor/github.com/frankban/quicktest/README.md new file mode 100644 index 0000000..a3017fb --- /dev/null +++ b/vendor/github.com/frankban/quicktest/README.md @@ -0,0 +1,347 @@ +[![Go Reference](https://pkg.go.dev/badge/github.com/frankban/quicktest.svg)](https://pkg.go.dev/github.com/frankban/quicktest#section-documentation) +[![Build Status](https://github.com/frankban/quicktest/actions/workflows/ci.yaml/badge.svg)](https://github.com/frankban/quicktest/actions/workflows/ci.yaml) + +[//]: # (Generated with: godocdown -template=.godocdown.template -o README.md) + +### quicktest + +`go get github.com/frankban/quicktest@latest` + +Package quicktest provides a collection of Go helpers for writing tests. + +Quicktest helpers can be easily integrated inside regular Go tests, for +instance: + + import qt "github.com/frankban/quicktest" + + func TestFoo(t *testing.T) { + t.Run("numbers", func(t *testing.T) { + c := qt.New(t) + numbers, err := somepackage.Numbers() + c.Assert(err, qt.IsNil) + c.Assert(numbers, qt.DeepEquals, []int{42, 47}) + }) + t.Run("bad wolf error", func(t *testing.T) { + c := qt.New(t) + numbers, err := somepackage.Numbers() + c.Assert(err, qt.ErrorMatches, "bad wolf") + }) + t.Run("nil", func(t *testing.T) { + c := qt.New(t) + got := somepackage.MaybeNil() + c.Assert(got, qt.IsNil, qt.Commentf("value: %v", somepackage.Value)) + }) + } + +### Assertions + +An assertion looks like this, where qt.Equals could be replaced by any available +checker. If the assertion fails, the underlying Fatal method is called to +describe the error and abort the test. + + c := qt.New(t) + c.Assert(someValue, qt.Equals, wantValue) + +If you don’t want to abort on failure, use Check instead, which calls Error +instead of Fatal: + + c.Check(someValue, qt.Equals, wantValue) + +For really short tests, the extra line for instantiating *qt.C can be avoided: + + qt.Assert(t, someValue, qt.Equals, wantValue) + qt.Check(t, someValue, qt.Equals, wantValue) + +The library provides some base checkers like Equals, DeepEquals, Matches, +ErrorMatches, IsNil and others. More can be added by implementing the Checker +interface. Below, we list the checkers implemented by the package in +alphabetical order. + +### All + +All returns a Checker that uses the given checker to check elements of slice or +array or the values of a map. It succeeds if all elements pass the check. On +failure it prints the error from the first index that failed. + +For example: + + c.Assert([]int{3, 5, 8}, qt.All(qt.Not(qt.Equals)), 0) + c.Assert([][]string{{"a", "b"}, {"a", "b"}}, qt.All(qt.DeepEquals), []string{"c", "d"}) + +See also Any and Contains. + +### Any + +Any returns a Checker that uses the given checker to check elements of a slice +or array or the values from a map. It succeeds if any element passes the check. + +For example: + + c.Assert([]int{3,5,7,99}, qt.Any(qt.Equals), 7) + c.Assert([][]string{{"a", "b"}, {"c", "d"}}, qt.Any(qt.DeepEquals), []string{"c", "d"}) + +See also All and Contains. + +### CmpEquals + +CmpEquals checks equality of two arbitrary values according to the provided +compare options. DeepEquals is more commonly used when no compare options are +required. + +Example calls: + + c.Assert(list, qt.CmpEquals(cmpopts.SortSlices), []int{42, 47}) + c.Assert(got, qt.CmpEquals(), []int{42, 47}) // Same as qt.DeepEquals. + +### CodecEquals + +CodecEquals returns a checker that checks for codec value equivalence. + + func CodecEquals( + marshal func(interface{}) ([]byte, error), + unmarshal func([]byte, interface{}) error, + opts ...cmp.Option, + ) Checker + +It expects two arguments: a byte slice or a string containing some +codec-marshaled data, and a Go value. + +It uses unmarshal to unmarshal the data into an interface{} value. It marshals +the Go value using marshal, then unmarshals the result into an interface{} +value. + +It then checks that the two interface{} values are deep-equal to one another, +using CmpEquals(opts) to perform the check. + +See JSONEquals for an example of this in use. + +### Contains + +Contains checks that a map, slice, array or string contains a value. It's the +same as using Any(Equals), except that it has a special case for strings - if +the first argument is a string, the second argument must also be a string and +strings.Contains will be used. + +For example: + + c.Assert("hello world", qt.Contains, "world") + c.Assert([]int{3,5,7,99}, qt.Contains, 7) + +### ContentEquals + +ContentEquals is is like DeepEquals but any slices in the compared values will +be sorted before being compared. + +For example: + + c.Assert([]string{"c", "a", "b"}, qt.ContentEquals, []string{"a", "b", "c"}) + +### DeepEquals + +DeepEquals checks that two arbitrary values are deeply equal. The comparison is +done using the github.com/google/go-cmp/cmp package. When comparing structs, by +default no exported fields are allowed. If a more sophisticated comparison is +required, use CmpEquals (see below). + +Example call: + + c.Assert(got, qt.DeepEquals, []int{42, 47}) + +### Equals + +Equals checks that two values are equal, as compared with Go's == operator. + +For instance: + + c.Assert(answer, qt.Equals, 42) + +Note that the following will fail: + + c.Assert((*sometype)(nil), qt.Equals, nil) + +Use the IsNil checker below for this kind of nil check. + +### ErrorAs + +ErrorAs checks that the error is or wraps a specific error type. If so, it +assigns it to the provided pointer. This is analogous to calling errors.As. + +For instance: + + // Checking for a specific error type + c.Assert(err, qt.ErrorAs, new(*os.PathError)) + + // Checking fields on a specific error type + var pathError *os.PathError + if c.Check(err, qt.ErrorAs, &pathError) { + c.Assert(pathError.Path, Equals, "some_path") + } + +### ErrorIs + +ErrorIs checks that the error is or wraps a specific error value. This is +analogous to calling errors.Is. + +For instance: + + c.Assert(err, qt.ErrorIs, os.ErrNotExist) + +### ErrorMatches + +ErrorMatches checks that the provided value is an error whose message matches +the provided regular expression. + +For instance: + + c.Assert(err, qt.ErrorMatches, `bad wolf .*`) + +### HasLen + +HasLen checks that the provided value has the given length. + +For instance: + + c.Assert([]int{42, 47}, qt.HasLen, 2) + c.Assert(myMap, qt.HasLen, 42) + +### Implements + +Implements checks that the provided value implements an interface. The interface +is specified with a pointer to an interface variable. + +For instance: + + var rc io.ReadCloser + c.Assert(myReader, qt.Implements, &rc) + +### IsFalse + +IsFalse checks that the provided value is false. The value must have a boolean +underlying type. + +For instance: + + c.Assert(false, qt.IsFalse) + c.Assert(IsValid(), qt.IsFalse) + +### IsNil + +IsNil checks that the provided value is nil. + +For instance: + + c.Assert(got, qt.IsNil) + +As a special case, if the value is nil but implements the error interface, it is +still considered to be non-nil. This means that IsNil will fail on an error +value that happens to have an underlying nil value, because that's invariably a +mistake. See https://golang.org/doc/faq#nil_error. + +So it's just fine to check an error like this: + + c.Assert(err, qt.IsNil) + +### IsNotNil + +IsNotNil is a Checker checking that the provided value is not nil. IsNotNil is +the equivalent of qt.Not(qt.IsNil) + +For instance: + + c.Assert(got, qt.IsNotNil) + +### IsTrue + +IsTrue checks that the provided value is true. The value must have a boolean +underlying type. + +For instance: + + c.Assert(true, qt.IsTrue) + c.Assert(myBoolean(false), qt.IsTrue) + +### JSONEquals + +JSONEquals checks whether a byte slice or string is JSON-equivalent to a Go +value. See CodecEquals for more information. + +It uses DeepEquals to do the comparison. If a more sophisticated comparison is +required, use CodecEquals directly. + +For instance: + + c.Assert(`{"First": 47.11}`, qt.JSONEquals, &MyStruct{First: 47.11}) + +### Matches + +Matches checks that a string or result of calling the String method (if the +value implements fmt.Stringer) matches the provided regular expression. + +For instance: + + c.Assert("these are the voyages", qt.Matches, `these are .*`) + c.Assert(net.ParseIP("1.2.3.4"), qt.Matches, `1.*`) + +### Not + +Not returns a Checker negating the given Checker. + +For instance: + + c.Assert(got, qt.Not(qt.IsNil)) + c.Assert(answer, qt.Not(qt.Equals), 42) + +### PanicMatches + +PanicMatches checks that the provided function panics with a message matching +the provided regular expression. + +For instance: + + c.Assert(func() {panic("bad wolf ...")}, qt.PanicMatches, `bad wolf .*`) + +### Satisfies + +Satisfies checks that the provided value, when used as argument of the provided +predicate function, causes the function to return true. The function must be of +type func(T) bool, having got assignable to T. + +For instance: + + // Check that an error from os.Open satisfies os.IsNotExist. + c.Assert(err, qt.Satisfies, os.IsNotExist) + + // Check that a floating point number is a not-a-number. + c.Assert(f, qt.Satisfies, math.IsNaN) + +### Deferred Execution + +The testing.TB.Cleanup helper provides the ability to defer the execution of +functions that will be run when the test completes. This is often useful for +creating OS-level resources such as temporary directories (see c.Mkdir). + +When targeting Go versions that don't have Cleanup (< 1.14), the same can be +achieved using c.Defer. In this case, to trigger the deferred behavior, calling +c.Done is required. For instance, if you create a *C instance at the top level, +you’ll have to add a defer to trigger the cleanups at the end of the test: + + defer c.Done() + +However, if you use quicktest to create a subtest, Done will be called +automatically at the end of that subtest. For example: + + func TestFoo(t *testing.T) { + c := qt.New(t) + c.Run("subtest", func(c *qt.C) { + c.Setenv("HOME", c.Mkdir()) + // Here $HOME is set the path to a newly created directory. + // At the end of the test the directory will be removed + // and HOME set back to its original value. + }) + } + +The c.Patch, c.Setenv, c.Unsetenv and c.Mkdir helpers use t.Cleanup for cleaning +up resources when available, and fall back to Defer otherwise. + +For a complete API reference, see the +[package documentation](https://pkg.go.dev/github.com/frankban/quicktest#section-documentation). diff --git a/vendor/github.com/frankban/quicktest/checker.go b/vendor/github.com/frankban/quicktest/checker.go new file mode 100644 index 0000000..401437a --- /dev/null +++ b/vendor/github.com/frankban/quicktest/checker.go @@ -0,0 +1,799 @@ +// Licensed under the MIT license, see LICENSE file for details. + +package quicktest + +import ( + "encoding/json" + "errors" + "fmt" + "reflect" + "regexp" + "strings" + + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/kr/pretty" +) + +// Checker is implemented by types used as part of Check/Assert invocations. +type Checker interface { + // Check checks that the obtained value (got) is correct with respect to + // the checker's arguments (args). On failure, the returned error is + // printed along with the checker arguments and any key-value pairs added + // by calling the note function. Values are pretty-printed unless they are + // of type Unquoted. + // + // When the check arguments are invalid, Check may return a BadCheck error, + // which suppresses printing of the checker arguments. Values added with + // note are still printed. + // + // If Check returns ErrSilent, neither the checker arguments nor the error + // are printed. Again, values added with note are still printed. + Check(got interface{}, args []interface{}, note func(key string, value interface{})) error + + // ArgNames returns the names of all required arguments, including the + // mandatory got argument and any additional args. + ArgNames() []string +} + +// Equals is a Checker checking equality of two comparable values. +// +// For instance: +// +// c.Assert(answer, qt.Equals, 42) +// +// Note that the following will fail: +// +// c.Assert((*sometype)(nil), qt.Equals, nil) +// +// Use the IsNil checker below for this kind of nil check. +var Equals Checker = &equalsChecker{ + argNames: []string{"got", "want"}, +} + +type equalsChecker struct { + argNames +} + +// Check implements Checker.Check by checking that got == args[0]. +func (c *equalsChecker) Check(got interface{}, args []interface{}, note func(key string, value interface{})) (err error) { + defer func() { + // A panic is raised when the provided values are not comparable. + if r := recover(); r != nil { + err = fmt.Errorf("%s", r) + } + }() + want := args[0] + if got == want { + return nil + } + + // Customize error message for non-nil errors. + if _, ok := got.(error); ok && want == nil { + return errors.New("got non-nil error") + } + + // Show error types when comparing errors with different types. + if got, ok := got.(error); ok { + if want, ok := want.(error); ok { + gotType := reflect.TypeOf(got) + wantType := reflect.TypeOf(want) + if gotType != wantType { + note("got type", Unquoted(gotType.String())) + note("want type", Unquoted(wantType.String())) + } + } + return errors.New("values are not equal") + } + + // Show line diff when comparing different multi-line strings. + if got, ok := got.(string); ok { + if want, ok := want.(string); ok { + isMultiLine := func(s string) bool { + i := strings.Index(s, "\n") + return i != -1 && i < len(s)-1 + } + if isMultiLine(got) || isMultiLine(want) { + diff := cmp.Diff(strings.SplitAfter(got, "\n"), strings.SplitAfter(want, "\n")) + note("line diff (-got +want)", Unquoted(diff)) + } + } + } + + return errors.New("values are not equal") +} + +// CmpEquals returns a Checker checking equality of two arbitrary values +// according to the provided compare options. See DeepEquals as an example of +// such a checker, commonly used when no compare options are required. +// +// Example calls: +// +// c.Assert(list, qt.CmpEquals(cmpopts.SortSlices), []int{42, 47}) +// c.Assert(got, qt.CmpEquals(), []int{42, 47}) // Same as qt.DeepEquals. +func CmpEquals(opts ...cmp.Option) Checker { + return &cmpEqualsChecker{ + argNames: []string{"got", "want"}, + opts: opts, + } +} + +type cmpEqualsChecker struct { + argNames + opts cmp.Options +} + +// Check implements Checker.Check by checking that got == args[0] according to +// the compare options stored in the checker. +func (c *cmpEqualsChecker) Check(got interface{}, args []interface{}, note func(key string, value interface{})) (err error) { + defer func() { + // A panic is raised in some cases, for instance when trying to compare + // structs with unexported fields and neither AllowUnexported nor + // cmpopts.IgnoreUnexported are provided. + if r := recover(); r != nil { + err = BadCheckf("%s", r) + } + }() + want := args[0] + if diff := cmp.Diff(got, want, c.opts...); diff != "" { + // Only output values when the verbose flag is set. + note("error", Unquoted("values are not deep equal")) + note("diff (-got +want)", Unquoted(diff)) + note("got", SuppressedIfLong{got}) + note("want", SuppressedIfLong{want}) + return ErrSilent + } + return nil +} + +// DeepEquals is a Checker deeply checking equality of two arbitrary values. +// The comparison is done using the github.com/google/go-cmp/cmp package. +// When comparing structs, by default no exported fields are allowed. CmpEquals +// can be used when more customized compare options are required. +// +// Example call: +// +// c.Assert(got, qt.DeepEquals, []int{42, 47}) +var DeepEquals = CmpEquals() + +// ContentEquals is like DeepEquals but any slices in the compared values will +// be sorted before being compared. +var ContentEquals = CmpEquals(cmpopts.SortSlices(func(x, y interface{}) bool { + // TODO frankban: implement a proper sort function. + return pretty.Sprint(x) < pretty.Sprint(y) +})) + +// Matches is a Checker checking that the provided string or fmt.Stringer +// matches the provided regular expression pattern. +// +// For instance: +// +// c.Assert("these are the voyages", qt.Matches, "these are .*") +// c.Assert(net.ParseIP("1.2.3.4"), qt.Matches, "1.*") +// c.Assert("my multi-line\nnumber", qt.Matches, regexp.MustCompile(`my multi-line\n(string|number)`)) +var Matches Checker = &matchesChecker{ + argNames: []string{"got value", "regexp"}, +} + +type matchesChecker struct { + argNames +} + +// Check implements Checker.Check by checking that got is a string or a +// fmt.Stringer and that it matches args[0]. +func (c *matchesChecker) Check(got interface{}, args []interface{}, note func(key string, value interface{})) error { + pattern := args[0] + switch v := got.(type) { + case string: + return match(v, pattern, "value does not match regexp", note) + case fmt.Stringer: + return match(v.String(), pattern, "value.String() does not match regexp", note) + } + note("value", got) + return BadCheckf("value is not a string or a fmt.Stringer") +} + +func checkFirstArgIsError(got interface{}, note func(key string, value interface{})) error { + if got == nil { + return errors.New("got nil error but want non-nil") + } + _, ok := got.(error) + if !ok { + note("got", got) + return BadCheckf("first argument is not an error") + } + return nil +} + +// ErrorMatches is a Checker checking that the provided value is an error whose +// message matches the provided regular expression pattern. +// +// For instance: +// +// c.Assert(err, qt.ErrorMatches, "bad wolf .*") +// c.Assert(err, qt.ErrorMatches, regexp.MustCompile("bad wolf .*")) +var ErrorMatches Checker = &errorMatchesChecker{ + argNames: []string{"got error", "regexp"}, +} + +type errorMatchesChecker struct { + argNames +} + +// Check implements Checker.Check by checking that got is an error whose +// Error() matches args[0]. +func (c *errorMatchesChecker) Check(got interface{}, args []interface{}, note func(key string, value interface{})) error { + if err := checkFirstArgIsError(got, note); err != nil { + return err + } + + gotErr := got.(error) + return match(gotErr.Error(), args[0], "error does not match regexp", note) +} + +// PanicMatches is a Checker checking that the provided function panics with a +// message matching the provided regular expression pattern. +// +// For instance: +// +// c.Assert(func() {panic("bad wolf ...")}, qt.PanicMatches, "bad wolf .*") +// c.Assert(func() {panic("bad wolf ...")}, qt.PanicMatches, regexp.MustCompile(`bad wolf .*`)) +var PanicMatches Checker = &panicMatchesChecker{ + argNames: []string{"function", "regexp"}, +} + +type panicMatchesChecker struct { + argNames +} + +// Check implements Checker.Check by checking that got is a func() that panics +// with a message matching args[0]. +func (c *panicMatchesChecker) Check(got interface{}, args []interface{}, note func(key string, value interface{})) (err error) { + f := reflect.ValueOf(got) + if f.Kind() != reflect.Func { + note("got", got) + return BadCheckf("first argument is not a function") + } + ftype := f.Type() + if ftype.NumIn() != 0 { + note("function", got) + return BadCheckf("cannot use a function receiving arguments") + } + + defer func() { + r := recover() + if r == nil { + err = errors.New("function did not panic") + return + } + msg := fmt.Sprint(r) + note("panic value", msg) + err = match(msg, args[0], "panic value does not match regexp", note) + }() + + f.Call(nil) + return nil +} + +// IsNil is a Checker checking that the provided value is nil. +// +// For instance: +// +// c.Assert(got, qt.IsNil) +// +// As a special case, if the value is nil but implements the +// error interface, it is still considered to be non-nil. +// This means that IsNil will fail on an error value that happens +// to have an underlying nil value, because that's +// invariably a mistake. +// See https://golang.org/doc/faq#nil_error. +var IsNil Checker = &isNilChecker{ + argNames: []string{"got"}, +} + +type isNilChecker struct { + argNames +} + +// Check implements Checker.Check by checking that got is nil. +func (c *isNilChecker) Check(got interface{}, args []interface{}, note func(key string, value interface{})) (err error) { + if got == nil { + return nil + } + value := reflect.ValueOf(got) + _, isError := got.(error) + if canBeNil(value.Kind()) && value.IsNil() { + if isError { + // It's an error with an underlying nil value. + return fmt.Errorf("error containing nil value of type %T. See https://golang.org/doc/faq#nil_error", got) + } + return nil + } + if isError { + return errors.New("got non-nil error") + } + return errors.New("got non-nil value") +} + +// IsNotNil is a Checker checking that the provided value is not nil. +// IsNotNil is the equivalent of qt.Not(qt.IsNil) +// +// For instance: +// +// c.Assert(got, qt.IsNotNil) +var IsNotNil Checker = ¬Checker{ + Checker: IsNil, +} + +// HasLen is a Checker checking that the provided value has the given length. +// +// For instance: +// +// c.Assert([]int{42, 47}, qt.HasLen, 2) +// c.Assert(myMap, qt.HasLen, 42) +var HasLen Checker = &hasLenChecker{ + argNames: []string{"got", "want length"}, +} + +type hasLenChecker struct { + argNames +} + +// Check implements Checker.Check by checking that len(got) == args[0]. +func (c *hasLenChecker) Check(got interface{}, args []interface{}, note func(key string, value interface{})) (err error) { + v := reflect.ValueOf(got) + switch v.Kind() { + case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice, reflect.String: + default: + note("got", got) + return BadCheckf("first argument has no length") + } + want, ok := args[0].(int) + if !ok { + note("length", args[0]) + return BadCheckf("length is not an int") + } + length := v.Len() + note("len(got)", length) + if length != want { + return fmt.Errorf("unexpected length") + } + return nil +} + +// Implements checks that the provided value implements an interface. The +// interface is specified with a pointer to an interface variable. +// +// For instance: +// +// var rc io.ReadCloser +// c.Assert(myReader, qt.Implements, &rc) +var Implements Checker = &implementsChecker{ + argNames: []string{"got", "want interface pointer"}, +} + +type implementsChecker struct { + argNames +} + +var emptyInterface = reflect.TypeOf((*interface{})(nil)).Elem() + +// Check implements Checker.Check by checking that got implements the +// interface pointed to by args[0]. +func (c *implementsChecker) Check(got interface{}, args []interface{}, note func(key string, value interface{})) (err error) { + if got == nil { + note("error", Unquoted("got nil value but want non-nil")) + note("got", got) + return ErrSilent + } + + if args[0] == nil { + return BadCheckf("want a pointer to an interface variable but nil was provided") + } + wantType := reflect.TypeOf(args[0]) + if wantType.Kind() != reflect.Ptr { + note("want", Unquoted(wantType.String())) + return BadCheckf("want a pointer to an interface variable but a non-pointer value was provided") + } else if wantType.Elem().Kind() != reflect.Interface { + note("want pointer type", Unquoted(wantType.Elem().String())) + return BadCheckf("want a pointer to an interface variable but a pointer to a concrete type was provided") + } else if wantType.Elem() == emptyInterface { + note("want pointer type", Unquoted(wantType.Elem().String())) + return BadCheckf("all types implement the empty interface, want a pointer to a variable that isn't the empty interface") + } + + gotType := reflect.TypeOf(got) + if !gotType.Implements(wantType.Elem()) { + note("error", Unquoted("got value does not implement wanted interface")) + note("got", got) + note("want interface", Unquoted(wantType.Elem().String())) + return ErrSilent + } + + return nil +} + +// Satisfies is a Checker checking that the provided value, when used as +// argument of the provided predicate function, causes the function to return +// true. The function must be of type func(T) bool, having got assignable to T. +// +// For instance: +// +// // Check that an error from os.Open satisfies os.IsNotExist. +// c.Assert(err, qt.Satisfies, os.IsNotExist) +// +// // Check that a floating point number is a not-a-number. +// c.Assert(f, qt.Satisfies, math.IsNaN) +var Satisfies Checker = &satisfiesChecker{ + argNames: []string{"arg", "predicate function"}, +} + +type satisfiesChecker struct { + argNames +} + +// Check implements Checker.Check by checking that args[0](got) == true. +func (c *satisfiesChecker) Check(got interface{}, args []interface{}, note func(key string, value interface{})) (err error) { + // Original code at + // . + predicate := args[0] + f := reflect.ValueOf(predicate) + ftype := f.Type() + if ftype.Kind() != reflect.Func || ftype.NumIn() != 1 || ftype.NumOut() != 1 || ftype.Out(0).Kind() != reflect.Bool { + note("predicate function", predicate) + return BadCheckf("predicate function is not a func(T) bool") + } + v, t := reflect.ValueOf(got), ftype.In(0) + if !v.IsValid() { + if !canBeNil(t.Kind()) { + note("predicate function", predicate) + return BadCheckf("cannot use nil as type %v in argument to predicate function", t) + } + v = reflect.Zero(t) + } else if !v.Type().AssignableTo(t) { + note("arg", got) + note("predicate function", predicate) + return BadCheckf("cannot use value of type %v as type %v in argument to predicate function", v.Type(), t) + } + if f.Call([]reflect.Value{v})[0].Interface().(bool) { + return nil + } + return fmt.Errorf("value does not satisfy predicate function") +} + +// IsTrue is a Checker checking that the provided value is true. +// The value must have a boolean underlying type. +// +// For instance: +// +// c.Assert(true, qt.IsTrue) +// c.Assert(myBoolean(false), qt.IsTrue) +var IsTrue Checker = &boolChecker{ + want: true, +} + +// IsFalse is a Checker checking that the provided value is false. +// The value must have a boolean underlying type. +// +// For instance: +// +// c.Assert(false, qt.IsFalse) +// c.Assert(IsValid(), qt.IsFalse) +var IsFalse Checker = &boolChecker{ + want: false, +} + +type boolChecker struct { + want bool +} + +// Check implements Checker.Check by checking that got == c.want. +func (c *boolChecker) Check(got interface{}, args []interface{}, note func(key string, value interface{})) (err error) { + v := reflect.ValueOf(got) + if v.IsValid() && v.Kind() == reflect.Bool { + if v.Bool() != c.want { + return fmt.Errorf("value is not %v", c.want) + } + return nil + } + note("value", got) + return BadCheckf("value does not have a bool underlying type") +} + +// ArgNames implements Checker.ArgNames. +func (c *boolChecker) ArgNames() []string { + return []string{"got"} +} + +// Not returns a Checker negating the given Checker. +// +// For instance: +// +// c.Assert(got, qt.Not(qt.IsNil)) +// c.Assert(answer, qt.Not(qt.Equals), 42) +func Not(checker Checker) Checker { + return ¬Checker{ + Checker: checker, + } +} + +type notChecker struct { + Checker +} + +// Check implements Checker.Check by checking that the stored checker fails. +func (c *notChecker) Check(got interface{}, args []interface{}, note func(key string, value interface{})) (err error) { + if nc, ok := c.Checker.(*notChecker); ok { + return nc.Checker.Check(got, args, note) + } + err = c.Checker.Check(got, args, note) + if IsBadCheck(err) { + return err + } + if err != nil { + return nil + } + if c.Checker == IsNil { + return errors.New("got nil value but want non-nil") + } + return errors.New("unexpected success") +} + +// Contains is a checker that checks that a map, slice, array +// or string contains a value. It's the same as using +// Any(Equals), except that it has a special case +// for strings - if the first argument is a string, +// the second argument must also be a string +// and strings.Contains will be used. +// +// For example: +// +// c.Assert("hello world", qt.Contains, "world") +// c.Assert([]int{3,5,7,99}, qt.Contains, 7) +var Contains Checker = &containsChecker{ + argNames: []string{"container", "want"}, +} + +type containsChecker struct { + argNames +} + +// Check implements Checker.Check by checking that got contains args[0]. +func (c *containsChecker) Check(got interface{}, args []interface{}, note func(key string, value interface{})) error { + if got, ok := got.(string); ok { + want, ok := args[0].(string) + if !ok { + return BadCheckf("strings can only contain strings, not %T", args[0]) + } + if strings.Contains(got, want) { + return nil + } + return errors.New("no substring match found") + } + return Any(Equals).Check(got, args, note) +} + +// Any returns a Checker that uses the given checker to check elements +// of a slice or array or the values from a map. It succeeds if any element +// passes the check. +// +// For example: +// +// c.Assert([]int{3,5,7,99}, qt.Any(qt.Equals), 7) +// c.Assert([][]string{{"a", "b"}, {"c", "d"}}, qt.Any(qt.DeepEquals), []string{"c", "d"}) +// +// See also All and Contains. +func Any(c Checker) Checker { + return &anyChecker{ + argNames: append([]string{"container"}, c.ArgNames()[1:]...), + elemChecker: c, + } +} + +type anyChecker struct { + argNames + elemChecker Checker +} + +// Check implements Checker.Check by checking that one of the elements of +// got passes the c.elemChecker check. +func (c *anyChecker) Check(got interface{}, args []interface{}, note func(key string, value interface{})) error { + iter, err := newIter(got) + if err != nil { + return BadCheckf("%v", err) + } + for iter.next() { + // For the time being, discard the notes added by the sub-checker, + // because it's not clear what a good behaviour would be. + // Should we print all the failed check for all elements? If there's only + // one element in the container, the answer is probably yes, + // but let's leave it for now. + err := c.elemChecker.Check( + iter.value().Interface(), + args, + func(key string, value interface{}) {}, + ) + if err == nil { + return nil + } + if IsBadCheck(err) { + return BadCheckf("at %s: %v", iter.key(), err) + } + } + return errors.New("no matching element found") +} + +// All returns a Checker that uses the given checker to check elements +// of slice or array or the values of a map. It succeeds if all elements +// pass the check. +// On failure it prints the error from the first index that failed. +// +// For example: +// +// c.Assert([]int{3, 5, 8}, qt.All(qt.Not(qt.Equals)), 0) +// c.Assert([][]string{{"a", "b"}, {"a", "b"}}, qt.All(qt.DeepEquals), []string{"c", "d"}) +// +// See also Any and Contains. +func All(c Checker) Checker { + return &allChecker{ + argNames: append([]string{"container"}, c.ArgNames()[1:]...), + elemChecker: c, + } +} + +type allChecker struct { + argNames + elemChecker Checker +} + +// Check implement Checker.Check by checking that all the elements of got +// pass the c.elemChecker check. +func (c *allChecker) Check(got interface{}, args []interface{}, notef func(key string, value interface{})) error { + iter, err := newIter(got) + if err != nil { + return BadCheckf("%v", err) + } + for iter.next() { + // Store any notes added by the checker so + // we can add our own note at the start + // to say which element failed. + var notes []note + err := c.elemChecker.Check( + iter.value().Interface(), + args, + func(key string, val interface{}) { + notes = append(notes, note{key, val}) + }, + ) + if err == nil { + continue + } + if IsBadCheck(err) { + return BadCheckf("at %s: %v", iter.key(), err) + } + notef("error", Unquoted("mismatch at "+iter.key())) + if err != ErrSilent { + // If the error's not silent, the checker is expecting + // the caller to print the error and the value that failed. + notef("error", Unquoted(err.Error())) + notef("first mismatched element", iter.value().Interface()) + } + for _, n := range notes { + notef(n.key, n.value) + } + return ErrSilent + } + return nil +} + +// JSONEquals is a checker that checks whether a byte slice +// or string is JSON-equivalent to a Go value. See CodecEquals for +// more information. +// +// It uses DeepEquals to do the comparison. If a more sophisticated +// comparison is required, use CodecEquals directly. +// +// For instance: +// +// c.Assert(`{"First": 47.11}`, qt.JSONEquals, &MyStruct{First: 47.11}) +var JSONEquals = CodecEquals(json.Marshal, json.Unmarshal) + +type codecEqualChecker struct { + argNames + marshal func(interface{}) ([]byte, error) + unmarshal func([]byte, interface{}) error + deepEquals Checker +} + +// CodecEquals returns a checker that checks for codec value equivalence. +// +// It expects two arguments: a byte slice or a string containing some +// codec-marshaled data, and a Go value. +// +// It uses unmarshal to unmarshal the data into an interface{} value. +// It marshals the Go value using marshal, then unmarshals the result into +// an interface{} value. +// +// It then checks that the two interface{} values are deep-equal to one +// another, using CmpEquals(opts) to perform the check. +// +// See JSONEquals for an example of this in use. +func CodecEquals( + marshal func(interface{}) ([]byte, error), + unmarshal func([]byte, interface{}) error, + opts ...cmp.Option, +) Checker { + return &codecEqualChecker{ + argNames: argNames{"got", "want"}, + marshal: marshal, + unmarshal: unmarshal, + deepEquals: CmpEquals(opts...), + } +} + +func (c *codecEqualChecker) Check(got interface{}, args []interface{}, note func(key string, value interface{})) error { + var gotContent []byte + switch got := got.(type) { + case string: + gotContent = []byte(got) + case []byte: + gotContent = got + default: + return BadCheckf("expected string or byte, got %T", got) + } + wantContent := args[0] + wantContentBytes, err := c.marshal(wantContent) + if err != nil { + return BadCheckf("cannot marshal expected contents: %v", err) + } + var wantContentVal interface{} + if err := c.unmarshal(wantContentBytes, &wantContentVal); err != nil { + return BadCheckf("cannot unmarshal expected contents: %v", err) + } + var gotContentVal interface{} + if err := c.unmarshal([]byte(gotContent), &gotContentVal); err != nil { + return fmt.Errorf("cannot unmarshal obtained contents: %v; %q", err, gotContent) + } + return c.deepEquals.Check(gotContentVal, []interface{}{wantContentVal}, note) +} + +// argNames helps implementing Checker.ArgNames. +type argNames []string + +// ArgNames implements Checker.ArgNames by returning the argument names. +func (a argNames) ArgNames() []string { + return a +} + +// match checks that the given error message matches the given pattern. +func match(got string, pattern interface{}, msg string, note func(key string, value interface{})) error { + if actualRegex, ok := pattern.(*regexp.Regexp); ok { + if actualRegex.MatchString(got) { + return nil + } + return errors.New(msg) + } + regex, ok := pattern.(string) + if !ok { + note("regexp", pattern) + return BadCheckf("regexp is not a string") + } + matches, err := regexp.MatchString("^("+regex+")$", got) + if err != nil { + note("regexp", regex) + return BadCheckf("cannot compile regexp: %s", err) + } + if matches { + return nil + } + return errors.New(msg) +} + +// canBeNil reports whether a value or type of the given kind can be nil. +func canBeNil(k reflect.Kind) bool { + switch k { + case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: + return true + } + return false +} diff --git a/vendor/github.com/frankban/quicktest/checker_err.go b/vendor/github.com/frankban/quicktest/checker_err.go new file mode 100644 index 0000000..5033cef --- /dev/null +++ b/vendor/github.com/frankban/quicktest/checker_err.go @@ -0,0 +1,92 @@ +// Licensed under the MIT license, see LICENSE file for details. + +package quicktest + +import ( + "errors" + "fmt" +) + +// ErrorAs checks that the error is or wraps a specific error type. If so, it +// assigns it to the provided pointer. This is analogous to calling errors.As. +// +// For instance: +// +// // Checking for a specific error type +// c.Assert(err, qt.ErrorAs, new(*os.PathError)) +// +// // Checking fields on a specific error type +// var pathError *os.PathError +// if c.Check(err, qt.ErrorAs, &pathError) { +// c.Assert(pathError.Path, qt.Equals, "some_path") +// } +var ErrorAs Checker = &errorAsChecker{ + argNames: []string{"got", "as"}, +} + +type errorAsChecker struct { + argNames +} + +// Check implements Checker.Check by checking that got is an error whose error +// chain matches args[0] and assigning it to args[0]. +func (c *errorAsChecker) Check(got interface{}, args []interface{}, note func(key string, value interface{})) (err error) { + if err := checkFirstArgIsError(got, note); err != nil { + return err + } + + gotErr := got.(error) + defer func() { + // A panic is raised when the target is not a pointer to an interface + // or error. + if r := recover(); r != nil { + err = BadCheckf("%s", r) + } + }() + as := args[0] + if errors.As(gotErr, as) { + return nil + } + + note("error", Unquoted("wanted type is not found in error chain")) + note("got", gotErr) + note("as", Unquoted(fmt.Sprintf("%T", as))) + return ErrSilent +} + +// ErrorIs checks that the error is or wraps a specific error value. This is +// analogous to calling errors.Is. +// +// For instance: +// +// c.Assert(err, qt.ErrorIs, os.ErrNotExist) +var ErrorIs Checker = &errorIsChecker{ + argNames: []string{"got", "want"}, +} + +type errorIsChecker struct { + argNames +} + +// Check implements Checker.Check by checking that got is an error whose error +// chain matches args[0]. +func (c *errorIsChecker) Check(got interface{}, args []interface{}, note func(key string, value interface{})) error { + if got == nil && args[0] == nil { + return nil + } + if err := checkFirstArgIsError(got, note); err != nil { + return err + } + + gotErr := got.(error) + wantErr, ok := args[0].(error) + if !ok && args[0] != nil { + note("want", args[0]) + return BadCheckf("second argument is not an error") + } + + if !errors.Is(gotErr, wantErr) { + return errors.New("wanted error is not found in error chain") + } + return nil +} diff --git a/vendor/github.com/frankban/quicktest/comment.go b/vendor/github.com/frankban/quicktest/comment.go new file mode 100644 index 0000000..e9d7230 --- /dev/null +++ b/vendor/github.com/frankban/quicktest/comment.go @@ -0,0 +1,31 @@ +// Licensed under the MIT license, see LICENSE file for details. + +package quicktest + +import "fmt" + +// Commentf returns a test comment whose output is formatted according to +// the given format specifier and args. It may be provided as the last argument +// to any check or assertion and will be displayed if the check or assertion +// fails. For instance: +// +// c.Assert(a, qt.Equals, 42, qt.Commentf("answer is not %d", 42)) +func Commentf(format string, args ...interface{}) Comment { + return Comment{ + format: format, + args: args, + } +} + +// Comment represents additional information on a check or an assertion which is +// displayed when the check or assertion fails. +type Comment struct { + format string + args []interface{} +} + +// String outputs a string formatted according to the stored format specifier +// and args. +func (c Comment) String() string { + return fmt.Sprintf(c.format, c.args...) +} diff --git a/vendor/github.com/frankban/quicktest/doc.go b/vendor/github.com/frankban/quicktest/doc.go new file mode 100644 index 0000000..16d7301 --- /dev/null +++ b/vendor/github.com/frankban/quicktest/doc.go @@ -0,0 +1,340 @@ +// Licensed under the MIT license, see LICENSE file for details. + +/* +Package quicktest provides a collection of Go helpers for writing tests. + +Quicktest helpers can be easily integrated inside regular Go tests, for +instance: + + import qt "github.com/frankban/quicktest" + + func TestFoo(t *testing.T) { + t.Run("numbers", func(t *testing.T) { + c := qt.New(t) + numbers, err := somepackage.Numbers() + c.Assert(err, qt.IsNil) + c.Assert(numbers, qt.DeepEquals, []int{42, 47}) + }) + t.Run("bad wolf error", func(t *testing.T) { + c := qt.New(t) + numbers, err := somepackage.Numbers() + c.Assert(err, qt.ErrorMatches, "bad wolf") + }) + t.Run("nil", func(t *testing.T) { + c := qt.New(t) + got := somepackage.MaybeNil() + c.Assert(got, qt.IsNil, qt.Commentf("value: %v", somepackage.Value)) + }) + } + +# Assertions + +An assertion looks like this, where qt.Equals could be replaced by any +available checker. If the assertion fails, the underlying Fatal method is +called to describe the error and abort the test. + + c := qt.New(t) + c.Assert(someValue, qt.Equals, wantValue) + +If you don’t want to abort on failure, use Check instead, which calls Error +instead of Fatal: + + c.Check(someValue, qt.Equals, wantValue) + +For really short tests, the extra line for instantiating *qt.C can be avoided: + + qt.Assert(t, someValue, qt.Equals, wantValue) + qt.Check(t, someValue, qt.Equals, wantValue) + +The library provides some base checkers like Equals, DeepEquals, Matches, +ErrorMatches, IsNil and others. More can be added by implementing the Checker +interface. Below, we list the checkers implemented by the package in alphabetical +order. + +# All + +All returns a Checker that uses the given checker to check elements of slice or +array or the values of a map. It succeeds if all elements pass the check. +On failure it prints the error from the first index that failed. + +For example: + + c.Assert([]int{3, 5, 8}, qt.All(qt.Not(qt.Equals)), 0) + c.Assert([][]string{{"a", "b"}, {"a", "b"}}, qt.All(qt.DeepEquals), []string{"c", "d"}) + +See also Any and Contains. + +# Any + +Any returns a Checker that uses the given checker to check elements of a slice +or array or the values from a map. It succeeds if any element passes the check. + +For example: + + c.Assert([]int{3,5,7,99}, qt.Any(qt.Equals), 7) + c.Assert([][]string{{"a", "b"}, {"c", "d"}}, qt.Any(qt.DeepEquals), []string{"c", "d"}) + +See also All and Contains. + +# CmpEquals + +CmpEquals checks equality of two arbitrary values according to the provided +compare options. DeepEquals is more commonly used when no compare options are +required. + +Example calls: + + c.Assert(list, qt.CmpEquals(cmpopts.SortSlices), []int{42, 47}) + c.Assert(got, qt.CmpEquals(), []int{42, 47}) // Same as qt.DeepEquals. + +# CodecEquals + +CodecEquals returns a checker that checks for codec value equivalence. + + func CodecEquals( + marshal func(interface{}) ([]byte, error), + unmarshal func([]byte, interface{}) error, + opts ...cmp.Option, + ) Checker + +It expects two arguments: a byte slice or a string containing some +codec-marshaled data, and a Go value. + +It uses unmarshal to unmarshal the data into an interface{} value. +It marshals the Go value using marshal, then unmarshals the result into +an interface{} value. + +It then checks that the two interface{} values are deep-equal to one another, +using CmpEquals(opts) to perform the check. + +See JSONEquals for an example of this in use. + +# Contains + +Contains checks that a map, slice, array or string contains a value. It's the +same as using Any(Equals), except that it has a special case for strings - if +the first argument is a string, the second argument must also be a string and +strings.Contains will be used. + +For example: + + c.Assert("hello world", qt.Contains, "world") + c.Assert([]int{3,5,7,99}, qt.Contains, 7) + +# ContentEquals + +ContentEquals is is like DeepEquals but any slices in the compared values will be sorted before being compared. + +For example: + + c.Assert([]string{"c", "a", "b"}, qt.ContentEquals, []string{"a", "b", "c"}) + +# DeepEquals + +DeepEquals checks that two arbitrary values are deeply equal. +The comparison is done using the github.com/google/go-cmp/cmp package. +When comparing structs, by default no exported fields are allowed. +If a more sophisticated comparison is required, use CmpEquals (see below). + +Example call: + + c.Assert(got, qt.DeepEquals, []int{42, 47}) + +# Equals + +Equals checks that two values are equal, as compared with Go's == operator. + +For instance: + + c.Assert(answer, qt.Equals, 42) + +Note that the following will fail: + + c.Assert((*sometype)(nil), qt.Equals, nil) + +Use the IsNil checker below for this kind of nil check. + +# ErrorAs + +ErrorAs checks that the error is or wraps a specific error type. If so, it +assigns it to the provided pointer. This is analogous to calling errors.As. + +For instance: + + // Checking for a specific error type + c.Assert(err, qt.ErrorAs, new(*os.PathError)) + + // Checking fields on a specific error type + var pathError *os.PathError + if c.Check(err, qt.ErrorAs, &pathError) { + c.Assert(pathError.Path, Equals, "some_path") + } + +# ErrorIs + +ErrorIs checks that the error is or wraps a specific error value. This is +analogous to calling errors.Is. + +For instance: + + c.Assert(err, qt.ErrorIs, os.ErrNotExist) + +# ErrorMatches + +ErrorMatches checks that the provided value is an error whose message matches +the provided regular expression. + +For instance: + + c.Assert(err, qt.ErrorMatches, `bad wolf .*`) + +# HasLen + +HasLen checks that the provided value has the given length. + +For instance: + + c.Assert([]int{42, 47}, qt.HasLen, 2) + c.Assert(myMap, qt.HasLen, 42) + +# Implements + +Implements checks that the provided value implements an interface. The +interface is specified with a pointer to an interface variable. + +For instance: + + var rc io.ReadCloser + c.Assert(myReader, qt.Implements, &rc) + +# IsFalse + +IsFalse checks that the provided value is false. +The value must have a boolean underlying type. + +For instance: + + c.Assert(false, qt.IsFalse) + c.Assert(IsValid(), qt.IsFalse) + +# IsNil + +IsNil checks that the provided value is nil. + +For instance: + + c.Assert(got, qt.IsNil) + +As a special case, if the value is nil but implements the +error interface, it is still considered to be non-nil. +This means that IsNil will fail on an error value that happens +to have an underlying nil value, because that's +invariably a mistake. See https://golang.org/doc/faq#nil_error. + +So it's just fine to check an error like this: + + c.Assert(err, qt.IsNil) + +# IsNotNil + +IsNotNil is a Checker checking that the provided value is not nil. +IsNotNil is the equivalent of qt.Not(qt.IsNil) + +For instance: + + c.Assert(got, qt.IsNotNil) + +# IsTrue + +IsTrue checks that the provided value is true. +The value must have a boolean underlying type. + +For instance: + + c.Assert(true, qt.IsTrue) + c.Assert(myBoolean(false), qt.IsTrue) + +# JSONEquals + +JSONEquals checks whether a byte slice or string is JSON-equivalent to a Go +value. See CodecEquals for more information. + +It uses DeepEquals to do the comparison. If a more sophisticated comparison is +required, use CodecEquals directly. + +For instance: + + c.Assert(`{"First": 47.11}`, qt.JSONEquals, &MyStruct{First: 47.11}) + +# Matches + +Matches checks that a string or result of calling the String method +(if the value implements fmt.Stringer) matches the provided regular expression. + +For instance: + + c.Assert("these are the voyages", qt.Matches, `these are .*`) + c.Assert(net.ParseIP("1.2.3.4"), qt.Matches, `1.*`) + +# Not + +Not returns a Checker negating the given Checker. + +For instance: + + c.Assert(got, qt.Not(qt.IsNil)) + c.Assert(answer, qt.Not(qt.Equals), 42) + +# PanicMatches + +PanicMatches checks that the provided function panics with a message matching +the provided regular expression. + +For instance: + + c.Assert(func() {panic("bad wolf ...")}, qt.PanicMatches, `bad wolf .*`) + +# Satisfies + +Satisfies checks that the provided value, when used as argument of the provided +predicate function, causes the function to return true. The function must be of +type func(T) bool, having got assignable to T. + +For instance: + + // Check that an error from os.Open satisfies os.IsNotExist. + c.Assert(err, qt.Satisfies, os.IsNotExist) + + // Check that a floating point number is a not-a-number. + c.Assert(f, qt.Satisfies, math.IsNaN) + +# Deferred Execution + +The testing.TB.Cleanup helper provides the ability to defer the execution of +functions that will be run when the test completes. This is often useful for +creating OS-level resources such as temporary directories (see c.Mkdir). + +When targeting Go versions that don't have Cleanup (< 1.14), the same can be +achieved using c.Defer. In this case, to trigger the deferred behavior, calling +c.Done is required. For instance, if you create a *C instance at the top level, +you’ll have to add a defer to trigger the cleanups at the end of the test: + + defer c.Done() + +However, if you use quicktest to create a subtest, Done will be called +automatically at the end of that subtest. For example: + + func TestFoo(t *testing.T) { + c := qt.New(t) + c.Run("subtest", func(c *qt.C) { + c.Setenv("HOME", c.Mkdir()) + // Here $HOME is set the path to a newly created directory. + // At the end of the test the directory will be removed + // and HOME set back to its original value. + }) + } + +The c.Patch, c.Setenv, c.Unsetenv and c.Mkdir helpers use t.Cleanup for +cleaning up resources when available, and fall back to Defer otherwise. +*/ +package quicktest diff --git a/vendor/github.com/frankban/quicktest/error.go b/vendor/github.com/frankban/quicktest/error.go new file mode 100644 index 0000000..7b44a3e --- /dev/null +++ b/vendor/github.com/frankban/quicktest/error.go @@ -0,0 +1,35 @@ +// Licensed under the MIT license, see LICENSE file for details. + +package quicktest + +import ( + "fmt" +) + +// BadCheckf returns an error used to report a problem with the checker +// invocation or testing execution itself (like wrong number or type of +// arguments) rather than a real Check or Assert failure. +// This helper can be used when implementing checkers. +func BadCheckf(format string, a ...interface{}) error { + e := badCheck(fmt.Sprintf(format, a...)) + return &e +} + +// IsBadCheck reports whether the given error has been created by BadCheckf. +// This helper can be used when implementing checkers. +func IsBadCheck(err error) bool { + _, ok := err.(*badCheck) + return ok +} + +type badCheck string + +// Error implements the error interface. +func (e *badCheck) Error() string { + return "bad check: " + string(*e) +} + +// ErrSilent is the error used when there is no need to include in the failure +// output the "error" and "check" keys and all the keys automatically +// added for args. This helper can be used when implementing checkers. +var ErrSilent = fmt.Errorf("silent failure") diff --git a/vendor/github.com/frankban/quicktest/format.go b/vendor/github.com/frankban/quicktest/format.go new file mode 100644 index 0000000..a7c88b0 --- /dev/null +++ b/vendor/github.com/frankban/quicktest/format.go @@ -0,0 +1,91 @@ +// Licensed under the MIT license, see LICENSE file for details. + +package quicktest + +import ( + "fmt" + "reflect" + "strconv" + "strings" + "unicode/utf8" + + "github.com/kr/pretty" +) + +// Format formats the given value as a string. It is used to print values in +// test failures unless that's changed by calling C.SetFormat. +func Format(v interface{}) string { + switch v := v.(type) { + case error: + s, ok := checkStringCall(v, v.Error) + if !ok { + return "e" + } + if msg := fmt.Sprintf("%+v", v); msg != s { + // The error has formatted itself with additional information. + // Leave that as is. + return msg + } + return "e" + quoteString(s) + case fmt.Stringer: + s, ok := checkStringCall(v, v.String) + if !ok { + return "s" + } + return "s" + quoteString(s) + case string: + return quoteString(v) + case uintptr, uint, uint8, uint16, uint32, uint64: + // Use decimal base (rather than hexadecimal) for representing uint types. + return fmt.Sprintf("%T(%d)", v, v) + } + if bytes, ok := byteSlice(v); ok && bytes != nil && utf8.Valid(bytes) { + // It's a top level slice of bytes that's also valid UTF-8. + // Ideally, this would happen at deeper levels too, + // but this is sufficient for some significant cases + // (json.RawMessage for example). + return fmt.Sprintf("%T(%s)", v, quoteString(string(bytes))) + } + // The pretty.Sprint equivalent does not quote string values. + return fmt.Sprintf("%# v", pretty.Formatter(v)) +} + +func byteSlice(x interface{}) ([]byte, bool) { + v := reflect.ValueOf(x) + if !v.IsValid() { + return nil, false + } + t := v.Type() + if t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8 { + return v.Bytes(), true + } + return nil, false +} + +func quoteString(s string) string { + // TODO think more about what to do about multi-line strings. + if strings.Contains(s, `"`) && !strings.Contains(s, "\n") && strconv.CanBackquote(s) { + return "`" + s + "`" + } + return strconv.Quote(s) +} + +// checkStringCall calls f and returns its result, and reports if the call +// succeeded without panicking due to a nil pointer. +// If f panics and v is a nil pointer, it returns false. +func checkStringCall(v interface{}, f func() string) (s string, ok bool) { + defer func() { + err := recover() + if err == nil { + return + } + if val := reflect.ValueOf(v); val.Kind() == reflect.Ptr && val.IsNil() { + ok = false + return + } + panic(err) + }() + return f(), true +} + +type formatFunc func(interface{}) string diff --git a/vendor/github.com/frankban/quicktest/iter.go b/vendor/github.com/frankban/quicktest/iter.go new file mode 100644 index 0000000..28abd8a --- /dev/null +++ b/vendor/github.com/frankban/quicktest/iter.go @@ -0,0 +1,55 @@ +// Licensed under the MIT license, see LICENSE file for details. + +package quicktest + +import ( + "fmt" + "reflect" +) + +// containerIter provides an interface for iterating over a container +// (map, slice or array). +type containerIter interface { + // next advances to the next item in the container. + next() bool + // key returns the current key as a string. + key() string + // value returns the current value. + value() reflect.Value +} + +// newIter returns an iterator over x which must be a map, slice +// or array. +func newIter(x interface{}) (containerIter, error) { + v := reflect.ValueOf(x) + switch v.Kind() { + case reflect.Map: + return newMapIter(v), nil + case reflect.Slice, reflect.Array: + return &sliceIter{ + index: -1, + v: v, + }, nil + default: + return nil, fmt.Errorf("map, slice or array required") + } +} + +// sliceIter implements containerIter for slices and arrays. +type sliceIter struct { + v reflect.Value + index int +} + +func (i *sliceIter) next() bool { + i.index++ + return i.index < i.v.Len() +} + +func (i *sliceIter) value() reflect.Value { + return i.v.Index(i.index) +} + +func (i *sliceIter) key() string { + return fmt.Sprintf("index %d", i.index) +} diff --git a/vendor/github.com/frankban/quicktest/mapiter.go b/vendor/github.com/frankban/quicktest/mapiter.go new file mode 100644 index 0000000..b0e2b6f --- /dev/null +++ b/vendor/github.com/frankban/quicktest/mapiter.go @@ -0,0 +1,29 @@ +// Licensed under the MIT license, see LICENSE file for details. + +package quicktest + +import ( + "fmt" + "reflect" +) + +func newMapIter(v reflect.Value) containerIter { + return mapIter{v.MapRange()} +} + +// mapIter implements containerIter for maps. +type mapIter struct { + iter *reflect.MapIter +} + +func (i mapIter) next() bool { + return i.iter.Next() +} + +func (i mapIter) key() string { + return fmt.Sprintf("key %#v", i.iter.Key()) +} + +func (i mapIter) value() reflect.Value { + return i.iter.Value() +} diff --git a/vendor/github.com/frankban/quicktest/patch.go b/vendor/github.com/frankban/quicktest/patch.go new file mode 100644 index 0000000..282894b --- /dev/null +++ b/vendor/github.com/frankban/quicktest/patch.go @@ -0,0 +1,72 @@ +// Licensed under the MIT license, see LICENSE file for details. + +package quicktest + +import ( + "io/ioutil" + "os" + "reflect" +) + +// Patch sets a variable to a temporary value for the duration of the test. +// +// It sets the value pointed to by the given destination to the given +// value, which must be assignable to the element type of the destination. +// +// At the end of the test (see "Deferred execution" in the package docs), the +// destination is set back to its original value. +func (c *C) Patch(dest, value interface{}) { + destv := reflect.ValueOf(dest).Elem() + oldv := reflect.New(destv.Type()).Elem() + oldv.Set(destv) + valuev := reflect.ValueOf(value) + if !valuev.IsValid() { + // This isn't quite right when the destination type is not + // nilable, but it's better than the complex alternative. + valuev = reflect.Zero(destv.Type()) + } + destv.Set(valuev) + c.cleanup(func() { + destv.Set(oldv) + }) +} + +// Unsetenv unsets an environment variable for the duration of a test. +func (c *C) Unsetenv(name string) { + c.Setenv(name, "") + os.Unsetenv(name) +} + +// Mkdir makes a temporary directory and returns its name. +// +// At the end of the test (see "Deferred execution" in the package docs), the +// directory and its contents are removed. +// +// Deprecated: in Go >= 1.15 use testing.TB.TempDir instead. +func (c *C) Mkdir() string { + td, ok := c.TB.(interface { + TempDir() string + }) + if ok { + return td.TempDir() + } + name, err := ioutil.TempDir("", "quicktest-") + c.Assert(err, Equals, nil) + c.cleanup(func() { + if err := os.RemoveAll(name); err != nil { + // Don't call c.Check because the stack traverse logic won't + // print the source location, so just log instead. + c.Errorf("quicktest cannot remove temporary testing directory: %v", err) + } + }) + return name +} + +// cleanup uses Cleanup when it can, falling back to using Defer. +func (c *C) cleanup(f func()) { + if tb, ok := c.TB.(cleaner); ok { + tb.Cleanup(f) + } else { + c.Defer(f) + } +} diff --git a/vendor/github.com/frankban/quicktest/patch_go1.14.go b/vendor/github.com/frankban/quicktest/patch_go1.14.go new file mode 100644 index 0000000..8c74432 --- /dev/null +++ b/vendor/github.com/frankban/quicktest/patch_go1.14.go @@ -0,0 +1,42 @@ +// Licensed under the MIT license, see LICENSE file for details. + +//go:build go1.14 +// +build go1.14 + +package quicktest + +import "testing" + +// Patch sets a variable to a temporary value for the duration of the test. +// +// It sets the value pointed to by the given destination to the given value, +// which must be assignable to the element type of the destination. +// +// At the end of the test the destination is set back to its original value +// using t.Cleanup. +// +// The top level Patch function is only available on Go >= 1.14. Use (*C).Patch +// when on prior versions. +func Patch(t testing.TB, dest, value interface{}) { + New(t).Patch(dest, value) +} + +// Setenv sets an environment variable to a temporary value for the duration of +// the test. +// +// At the end of the test the environment variable is returned to its original +// value using t.Cleanup. +// +// The top level Setenv function is only available on Go >= 1.14. Use +// (*C).Setenv when on prior versions. +func Setenv(t testing.TB, name, val string) { + New(t).Setenv(name, val) +} + +// Unsetenv unsets an environment variable for the duration of a test. +// +// The top level Unsetenv function is only available on Go >= 1.14. Use +// (*C).Unsetenv when on prior versions. +func Unsetenv(t testing.TB, name string) { + New(t).Unsetenv(name) +} diff --git a/vendor/github.com/frankban/quicktest/patch_go1.17.go b/vendor/github.com/frankban/quicktest/patch_go1.17.go new file mode 100644 index 0000000..b43469e --- /dev/null +++ b/vendor/github.com/frankban/quicktest/patch_go1.17.go @@ -0,0 +1,27 @@ +// Licensed under the MIT license, see LICENSE file for details. + +//go:build !go1.17 +// +build !go1.17 + +package quicktest + +import "os" + +// Setenv sets an environment variable to a temporary value for the +// duration of the test. +// +// At the end of the test (see "Deferred execution" in the package docs), the +// environment variable is returned to its original value. +// +// This is the equivalent of testing.T.Setenv introduced in Go 1.17. +func (c *C) Setenv(name, val string) { + oldVal, oldOK := os.LookupEnv(name) + os.Setenv(name, val) + c.cleanup(func() { + if oldOK { + os.Setenv(name, oldVal) + } else { + os.Unsetenv(name) + } + }) +} diff --git a/vendor/github.com/frankban/quicktest/quicktest.go b/vendor/github.com/frankban/quicktest/quicktest.go new file mode 100644 index 0000000..65243cf --- /dev/null +++ b/vendor/github.com/frankban/quicktest/quicktest.go @@ -0,0 +1,370 @@ +// Licensed under the MIT license, see LICENSE file for details. + +package quicktest + +import ( + "fmt" + "reflect" + "strings" + "sync" + "testing" +) + +// Check runs the given check using the provided t and continues execution in +// case of failure. For instance: +// +// qt.Check(t, answer, qt.Equals, 42) +// qt.Check(t, got, qt.IsNil, qt.Commentf("iteration %d", i)) +// +// Additional args (not consumed by the checker), when provided, are included as +// comments in the failure output when the check fails. +func Check(t testing.TB, got interface{}, checker Checker, args ...interface{}) bool { + t.Helper() + return New(t).Check(got, checker, args...) +} + +// Assert runs the given check using the provided t and stops execution in case +// of failure. For instance: +// +// qt.Assert(t, got, qt.DeepEquals, []int{42, 47}) +// qt.Assert(t, got, qt.ErrorMatches, "bad wolf .*", qt.Commentf("a comment")) +// +// Additional args (not consumed by the checker), when provided, are included as +// comments in the failure output when the check fails. +func Assert(t testing.TB, got interface{}, checker Checker, args ...interface{}) bool { + t.Helper() + return New(t).Assert(got, checker, args...) +} + +// New returns a new checker instance that uses t to fail the test when checks +// fail. It only ever calls the Fatal, Error and (when available) Run methods +// of t. For instance. +// +// func TestFoo(t *testing.T) { +// t.Run("A=42", func(t *testing.T) { +// c := qt.New(t) +// c.Assert(a, qt.Equals, 42) +// }) +// } +// +// The library already provides some base checkers, and more can be added by +// implementing the Checker interface. +// +// If there is a likelihood that Defer will be called, then +// a call to Done should be deferred after calling New. +// For example: +// +// func TestFoo(t *testing.T) { +// c := qt.New(t) +// defer c.Done() +// c.Setenv("HOME", "/non-existent") +// c.Assert(os.Getenv("HOME"), qt.Equals, "/non-existent") +// }) +// +// A value of C that's has a non-nil TB field but is otherwise zero is valid. +// So: +// +// c := &qt.C{TB: t} +// +// is valid a way to create a C value; it's exactly the same as: +// +// c := qt.New(t) +// +// Methods on C may be called concurrently, assuming the underlying +// `testing.TB` implementation also allows that. +func New(t testing.TB) *C { + return &C{ + TB: t, + } +} + +// C is a quicktest checker. It embeds a testing.TB value and provides +// additional checking functionality. If an Assert or Check operation fails, it +// uses the wrapped TB value to fail the test appropriately. +type C struct { + testing.TB + + mu sync.Mutex + doneNeeded bool + deferred func() + format formatFunc +} + +// cleaner is implemented by testing.TB on Go 1.14 and later. +type cleaner interface { + Cleanup(func()) +} + +// Defer registers a function to be called when c.Done is +// called. Deferred functions will be called in last added, first called +// order. If c.Done is not called by the end of the test, the test +// may panic. Note that if Cleanup is called, there is no +// need to call Done. +// +// Deprecated: in Go >= 1.14 use testing.TB.Cleanup instead. +func (c *C) Defer(f func()) { + c.mu.Lock() + defer c.mu.Unlock() + if cleaner, ok := c.TB.(cleaner); ok { + // Use TB.Cleanup when available, but add a check + // that Done has been called so that we don't run + // into unexpected Go version incompatibilities. + if !c.doneNeeded { + c.doneNeeded = true + cleaner.Cleanup(func() { + c.mu.Lock() + defer c.mu.Unlock() + if c.doneNeeded { + panic("Done not called after Defer") + } + }) + } + + cleaner.Cleanup(f) + + return + } + + oldDeferred := c.deferred + if oldDeferred != nil { + c.deferred = func() { + defer oldDeferred() + f() + } + } else { + c.deferred = f + } +} + +// Done calls all the functions registered by Defer in reverse +// registration order. After it's called, the functions are +// unregistered, so calling Done twice will only call them once. +// +// When a test function is called by Run, Done will be called +// automatically on the C value passed into it. +// +// Deprecated: in Go >= 1.14 this is no longer needed if using +// testing.TB.Cleanup. +func (c *C) Done() { + c.mu.Lock() + deferred := c.deferred + c.deferred = nil + c.doneNeeded = false + c.mu.Unlock() + + if deferred != nil { + deferred() + } +} + +// SetFormat sets the function used to print values in test failures. +// By default Format is used. +// Any subsequent subtests invoked with c.Run will also use this function by +// default. +func (c *C) SetFormat(format func(interface{}) string) { + c.mu.Lock() + c.format = format + c.mu.Unlock() +} + +// getFormat returns the format function +// safely acquired under lock. +func (c *C) getFormat() func(interface{}) string { + c.mu.Lock() + defer c.mu.Unlock() + return c.format +} + +// Check runs the given check and continues execution in case of failure. +// For instance: +// +// c.Check(answer, qt.Equals, 42) +// c.Check(got, qt.IsNil, qt.Commentf("iteration %d", i)) +// +// Additional args (not consumed by the checker), when provided, are included +// as comments in the failure output when the check fails. +func (c *C) Check(got interface{}, checker Checker, args ...interface{}) bool { + c.TB.Helper() + return check(c, checkParams{ + fail: c.TB.Error, + checker: checker, + got: got, + args: args, + }) +} + +// Assert runs the given check and stops execution in case of failure. +// For instance: +// +// c.Assert(got, qt.DeepEquals, []int{42, 47}) +// c.Assert(got, qt.ErrorMatches, "bad wolf .*", qt.Commentf("a comment")) +// +// Additional args (not consumed by the checker), when provided, are included +// as comments in the failure output when the check fails. +func (c *C) Assert(got interface{}, checker Checker, args ...interface{}) bool { + c.TB.Helper() + return check(c, checkParams{ + fail: c.TB.Fatal, + checker: checker, + got: got, + args: args, + }) +} + +var ( + stringType = reflect.TypeOf("") + boolType = reflect.TypeOf(true) + tbType = reflect.TypeOf(new(testing.TB)).Elem() +) + +// Run runs f as a subtest of t called name. It's a wrapper around +// the Run method of c.TB that provides the quicktest checker to f. When +// the function completes, c.Done will be called to run any +// functions registered with c.Defer. +// +// c.TB must implement a Run method of the following form: +// +// Run(string, func(T)) bool +// +// where T is any type that is assignable to testing.TB. +// Implementations include *testing.T, *testing.B and *C itself. +// +// The TB field in the subtest will hold the value passed +// by Run to its argument function. +// +// func TestFoo(t *testing.T) { +// c := qt.New(t) +// c.Run("A=42", func(c *qt.C) { +// // This assertion only stops the current subtest. +// c.Assert(a, qt.Equals, 42) +// }) +// } +// +// A panic is raised when Run is called and the embedded concrete type does not +// implement a Run method with a correct signature. +func (c *C) Run(name string, f func(c *C)) bool { + badType := func(m string) { + panic(fmt.Sprintf("cannot execute Run with underlying concrete type %T (%s)", c.TB, m)) + } + m := reflect.ValueOf(c.TB).MethodByName("Run") + if !m.IsValid() { + // c.TB doesn't implement a Run method. + badType("no Run method") + } + mt := m.Type() + if mt.NumIn() != 2 || + mt.In(0) != stringType || + mt.NumOut() != 1 || + mt.Out(0) != boolType { + // The Run method doesn't have the right argument counts and types. + badType("wrong argument count for Run method") + } + farg := mt.In(1) + if farg.Kind() != reflect.Func || + farg.NumIn() != 1 || + farg.NumOut() != 0 || + !farg.In(0).AssignableTo(tbType) { + // The first argument to the Run function arg isn't right. + badType("bad first argument type for Run method") + } + cFormat := c.getFormat() + fv := reflect.MakeFunc(farg, func(args []reflect.Value) []reflect.Value { + c2 := New(args[0].Interface().(testing.TB)) + defer c2.Done() + c2.SetFormat(cFormat) + f(c2) + return nil + }) + return m.Call([]reflect.Value{reflect.ValueOf(name), fv})[0].Interface().(bool) +} + +// Parallel signals that this test is to be run in parallel with (and only with) other parallel tests. +// It's a wrapper around *testing.T.Parallel. +// +// A panic is raised when Parallel is called and the embedded concrete type does not +// implement Parallel, for instance if TB's concrete type is a benchmark. +func (c *C) Parallel() { + p, ok := c.TB.(interface { + Parallel() + }) + if !ok { + panic(fmt.Sprintf("cannot execute Parallel with underlying concrete type %T", c.TB)) + } + p.Parallel() +} + +// check performs the actual check with the provided params. +// In case of failure p.fail is called. In the fail report values are formatted +// using p.format. +func check(c *C, p checkParams) bool { + c.TB.Helper() + rp := reportParams{ + got: p.got, + args: p.args, + format: c.getFormat(), + } + if rp.format == nil { + // No format set; use the default: Format. + rp.format = Format + } + + // Allow checkers to annotate messages. + note := func(key string, value interface{}) { + rp.notes = append(rp.notes, note{ + key: key, + value: value, + }) + } + + // Ensure that we have a checker. + if p.checker == nil { + p.fail(report(BadCheckf("nil checker provided"), rp)) + return false + } + + // Extract comments if provided. + for len(p.args) > 0 { + comment, ok := p.args[len(p.args)-1].(Comment) + if !ok { + break + } + rp.comments = append([]Comment{comment}, rp.comments...) + p.args = p.args[:len(p.args)-1] + } + rp.args = p.args + + // Validate that we have the correct number of arguments. + rp.argNames = p.checker.ArgNames() + wantNumArgs := len(rp.argNames) - 1 + if gotNumArgs := len(rp.args); gotNumArgs != wantNumArgs { + if gotNumArgs > 0 { + note("got args", rp.args) + } + if wantNumArgs > 0 { + note("want args", Unquoted(strings.Join(rp.argNames[1:], ", "))) + } + var prefix string + if gotNumArgs > wantNumArgs { + prefix = "too many arguments provided to checker" + } else { + prefix = "not enough arguments provided to checker" + } + p.fail(report(BadCheckf("%s: got %d, want %d", prefix, gotNumArgs, wantNumArgs), rp)) + return false + } + + // Execute the check and report the failure if necessary. + if err := p.checker.Check(p.got, p.args, note); err != nil { + p.fail(report(err, rp)) + return false + } + return true +} + +// checkParams holds parameters for executing a check. +type checkParams struct { + fail func(...interface{}) + checker Checker + got interface{} + args []interface{} +} diff --git a/vendor/github.com/frankban/quicktest/report.go b/vendor/github.com/frankban/quicktest/report.go new file mode 100644 index 0000000..af944d2 --- /dev/null +++ b/vendor/github.com/frankban/quicktest/report.go @@ -0,0 +1,248 @@ +// Licensed under the MIT license, see LICENSE file for details. + +package quicktest + +import ( + "bytes" + "fmt" + "go/ast" + "go/parser" + "go/printer" + "go/token" + "io" + "reflect" + "runtime" + "strings" + "testing" +) + +// reportParams holds parameters for reporting a test error. +type reportParams struct { + // argNames holds the names for the arguments passed to the checker. + argNames []string + // got holds the value that was checked. + got interface{} + // args holds all other arguments (if any) provided to the checker. + args []interface{} + // comment optionally holds the comment passed when performing the check. + comments []Comment + // notes holds notes added while doing the check. + notes []note + // format holds the format function that must be used when outputting + // values. + format formatFunc +} + +// Unquoted indicates that the string must not be pretty printed in the failure +// output. This is useful when a checker calls note and does not want the +// provided value to be quoted. +type Unquoted string + +// SuppressedIfLong indicates that the value must be suppressed if verbose +// testing is off and the pretty printed version of the value is long. This is +// useful when a checker calls note and does not want the provided value to be +// printed in non-verbose test runs if the value is too long. +type SuppressedIfLong struct { + // Value holds the original annotated value. + Value interface{} +} + +// longValueLines holds the number of lines after which a value is long. +const longValueLines = 10 + +// report generates a failure report for the given error, optionally including +// in the output the checker arguments, comment and notes included in the +// provided report parameters. +func report(err error, p reportParams) string { + var buf bytes.Buffer + buf.WriteByte('\n') + writeError(&buf, err, p) + writeStack(&buf) + return buf.String() +} + +// writeError writes a pretty formatted output of the given error using the +// provided report parameters. +func writeError(w io.Writer, err error, p reportParams) { + ptrs := make(map[string]interface{}) + values := make(map[string]string) + + printPair := func(key string, value interface{}) { + fmt.Fprintln(w, key+":") + var v string + + if u, ok := value.(Unquoted); ok { + // Output the raw string without quotes. + v = string(u) + } else if s, ok := value.(SuppressedIfLong); ok { + // Check whether the output is too long and must be suppressed. + v = p.format(s.Value) + if !testingVerbose() { + if n := strings.Count(v, "\n"); n > longValueLines { + fmt.Fprint(w, prefixf(prefix, "", n)) + return + } + } + } else { + // Check whether the output has been already seen. + v = p.format(value) + isPtr := reflect.ValueOf(value).Kind() == reflect.Ptr + if k := values[v]; k != "" { + if previousValue, ok := ptrs[k]; ok && isPtr && previousValue != value { + fmt.Fprint(w, prefixf(prefix, "", k)) + return + } + fmt.Fprint(w, prefixf(prefix, "", k)) + return + } + if isPtr { + ptrs[key] = value + } + } + + values[v] = key + fmt.Fprint(w, prefixf(prefix, "%s", v)) + } + + // Write the checker error. + if err != ErrSilent { + printPair("error", Unquoted(err.Error())) + } + + // Write comments if provided. + for _, c := range p.comments { + if comment := c.String(); comment != "" { + printPair("comment", Unquoted(comment)) + } + } + + // Write notes if present. + for _, n := range p.notes { + printPair(n.key, n.value) + } + if IsBadCheck(err) || err == ErrSilent { + // For errors in the checker invocation or for silent errors, do not + // show output from args. + return + } + + // Write provided args. + for i, arg := range append([]interface{}{p.got}, p.args...) { + printPair(p.argNames[i], arg) + } +} + +// testingVerbose is defined as a variable for testing. +var testingVerbose = func() bool { + return testing.Verbose() +} + +// writeStack writes the traceback information for the current failure into the +// provided writer. +func writeStack(w io.Writer) { + fmt.Fprintln(w, "stack:") + pc := make([]uintptr, 8) + sg := &stmtGetter{ + fset: token.NewFileSet(), + files: make(map[string]*ast.File, 8), + config: &printer.Config{ + Mode: printer.UseSpaces, + Tabwidth: 4, + }, + } + runtime.Callers(5, pc) + frames := runtime.CallersFrames(pc) + thisPackage := reflect.TypeOf(C{}).PkgPath() + "." + for { + frame, more := frames.Next() + if strings.HasPrefix(frame.Function, "testing.") { + // Stop before getting back to stdlib test runner calls. + break + } + if fname := strings.TrimPrefix(frame.Function, thisPackage); fname != frame.Function { + if ast.IsExported(fname) { + // Continue without printing frames for quicktest exported API. + continue + } + // Stop when entering quicktest internal calls. + // This is useful for instance when using qtsuite. + break + } + fmt.Fprint(w, prefixf(prefix, "%s:%d", frame.File, frame.Line)) + if strings.HasSuffix(frame.File, ".go") { + stmt, err := sg.Get(frame.File, frame.Line) + if err != nil { + fmt.Fprint(w, prefixf(prefix+prefix, "<%s>", err)) + } else { + fmt.Fprint(w, prefixf(prefix+prefix, "%s", stmt)) + } + } + if !more { + // There are no more callers. + break + } + } +} + +type stmtGetter struct { + fset *token.FileSet + files map[string]*ast.File + config *printer.Config +} + +// Get returns the lines of code of the statement at the given file and line. +func (sg *stmtGetter) Get(file string, line int) (string, error) { + f := sg.files[file] + if f == nil { + var err error + f, err = parser.ParseFile(sg.fset, file, nil, parser.ParseComments) + if err != nil { + return "", fmt.Errorf("cannot parse source file: %s", err) + } + sg.files[file] = f + } + var stmt string + ast.Inspect(f, func(n ast.Node) bool { + if n == nil || stmt != "" { + return false + } + pos := sg.fset.Position(n.Pos()).Line + end := sg.fset.Position(n.End()).Line + // Go < v1.9 reports the line where the statements ends, not the line + // where it begins. + if line == pos || line == end { + var buf bytes.Buffer + // TODO: include possible comment after the statement. + sg.config.Fprint(&buf, sg.fset, &printer.CommentedNode{ + Node: n, + Comments: f.Comments, + }) + stmt = buf.String() + return false + } + return pos < line && line <= end + }) + return stmt, nil +} + +// prefixf formats the given string with the given args. It also inserts the +// final newline if needed and indentation with the given prefix. +func prefixf(prefix, format string, args ...interface{}) string { + var buf []byte + s := strings.TrimSuffix(fmt.Sprintf(format, args...), "\n") + for _, line := range strings.Split(s, "\n") { + buf = append(buf, prefix...) + buf = append(buf, line...) + buf = append(buf, '\n') + } + return string(buf) +} + +// note holds a key/value annotation. +type note struct { + key string + value interface{} +} + +// prefix is the string used to indent blocks of output. +const prefix = " " diff --git a/vendor/github.com/glycerine/go-unsnap-stream/.gitignore b/vendor/github.com/glycerine/go-unsnap-stream/.gitignore new file mode 100644 index 0000000..0026861 --- /dev/null +++ b/vendor/github.com/glycerine/go-unsnap-stream/.gitignore @@ -0,0 +1,22 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe diff --git a/vendor/github.com/glycerine/go-unsnap-stream/LICENSE b/vendor/github.com/glycerine/go-unsnap-stream/LICENSE new file mode 100644 index 0000000..a441b99 --- /dev/null +++ b/vendor/github.com/glycerine/go-unsnap-stream/LICENSE @@ -0,0 +1,21 @@ +The MIT license. + +Copyright (c) 2014 the go-unsnap-stream authors. + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/vendor/github.com/glycerine/go-unsnap-stream/README.md b/vendor/github.com/glycerine/go-unsnap-stream/README.md new file mode 100644 index 0000000..b1b8c74 --- /dev/null +++ b/vendor/github.com/glycerine/go-unsnap-stream/README.md @@ -0,0 +1,20 @@ +go-unsnap-stream +================ + +This is a small golang library for decoding and encoding the snappy *streaming* format, specified here: https://github.com/google/snappy/blob/master/framing_format.txt + +Note that the *streaming or framing format* for snappy is different from snappy itself. Think of it as a train of boxcars: the streaming format breaks your data in chunks, applies snappy to each chunk alone, then puts a thin wrapper around the chunk, and sends it along in turn. You can begin decoding before receiving everything. And memory requirements for decoding are sane. + +Strangely, though the streaming format was first proposed in Go[1][2], it was never upated, and I could not locate any other library for Go that would handle the streaming/framed snappy format. Hence this implementation of the spec. There is a command line tool[3] that has a C implementation, but this is the only Go implementation that I am aware of. The reference for the framing/streaming spec seems to be the python implementation[4]. + +For binary compatibility with the python implementation, one could use the C-snappy compressor/decompressor code directly; using github.com/dgryski/go-csnappy. In fact we did this for a while to verify byte-for-byte compatiblity, as the native Go implementation produces slightly different binary compression (still conformant with the standard of course), which made test-diffs harder, and some have complained about it being slower than the C. + +However, while the c-snappy was useful for checking compatibility, it introduced dependencies on external C libraries (both the c-snappy library and the C standard library). Our go binary executable that used the go-unsnap-stream library was no longer standalone, and deployment was painful if not impossible if the target had a different C standard library. So we've gone back to using the snappy-go implementation (entirely in Go) for ease of deployment. See the comments at the top of unsnap.go if you wish to use c-snappy instead. + +[1] https://groups.google.com/forum/#!msg/snappy-compression/qvLNe2cSH9s/R19oBC-p7g4J + +[2] https://codereview.appspot.com/5167058 + +[3] https://github.com/kubo/snzip + +[4] https://pypi.python.org/pypi/python-snappy \ No newline at end of file diff --git a/vendor/github.com/glycerine/go-unsnap-stream/binary.dat b/vendor/github.com/glycerine/go-unsnap-stream/binary.dat new file mode 100644 index 0000000000000000000000000000000000000000..f31eee2e24025b5f68a8b4c0fdad4af60b6db461 GIT binary patch literal 5592 zcmV;}6({N+iwFSa!qHLy1MOUUciP6b@4x0#OzBDjjv|cXJPs$$DZx15)&?KIO>$nK)qMB>Upt2QA1exc(KSUItT1Wl zfon@Yk}YxA8=E|NKL|xAXEKz&Ee+xNwwE|E0#HW^D;DuwifItX!9oNxzM8onzCzrf ztL0@p4}5~;E|#8LNI$k>7t3O_BLT(rPkA7ryS&6iYeBbWLAbDDQ;ej7*dh?7X*Mk1 zvIF0-8m6lE$ zP&#fDhwe0C<`cC^1Qz%r3X;&4d}r$VR(Q#*j|_3)#&ZGb)88P80b~Jb&43_*FoYF? zoibdwaSWC%!{FR?@FSjUt(XO#7hI4mNnS2Zig?mO#w|_W<_cmFsfEUfWwd}fgOu0; zrUcZQ2In#tTgD|nh+Wdpcn+z0fJlIf>-x?LQdrjZEO#M8^IBxEXn`jwG9cASfZ(5n zk5IH`u$;h7a=XFI2BZ>TL?~b)GIT93%K6V?BhW=sauWN}<(V|Xw-z$BS}TE+!w86n zrNwCsOT~@JTzmzRMlP&NF_ol7ASDpecW_sd#sHT^5KEyL6-PNGKo%;QdIzv33TE*I zDTUTD5y2Kn$uQX^&LsW>(2;@TlE?Bf38w4ZEX}7z{=4 z@aUiiaDYE-_s6~N$Pm4L=iqp+*MDgUfDrw`SRC}=DKTt3Flbr8x;jw|_Qhd$*m;G^ z_KV&@Z~TT=-0zM1wAwzFYm1}yaNO%0AGC+!=y-TE7!fccZm&1$9JG6f-8~Z+U{%rm zvD+Vu(W~~s0RaNa;&3qR_J(coq6?JTFAlm2Drnm44ZEGOL74URZz2S|0yYPR7#(#x zJ^Io8sSDKG!#4)e4^EA`|2)PZJQREF!}iPW2taBdVFKjPIUaTo2}965I({)4_r}NL zu6Q{Z>@jOc-QkbDPIvTofIAqBm_Ns(u7Op?ZC)Hez?{+F>GzA{QIFZw>yNv`;qlS9 zH|W3@C zU$pmr>=E*MIAlKRY5icah9XG|<1Ql{zN_Bfi?F za4hUl!ubeGIAO4S;Q9~X7(EI|@KlY95Ch?5;5Ol~#AXngG)o1CIb4_rr{V-@>{rRk zG7Byu@IBWDaCjMnQ;O7BP5TWuu2kYn#LpZX0Z#`ITF@5Kr#c*A2XwgZ;qTMr>CI85Yari07=Aec;YE97FA}6Z;RSYZ03xpi@jvA ztksDV&;s)aT!?42s-_xp7oPM_5h3n49D~TCLts(sXa7$0vtKpnd2ME@3FZr;u2!!b znxR*p)XNKLu6+7JYQnmOR-V*}Q)@{cLN#hj@%DsdQ5Q9z*a%65A^i(N=0>C819KW` z2p^!^j>F=_pw_N9W(6Ghav$lZ#Nwv^Cp@mrdq3EVO?7~K1f>C7Jx<*6zTmcX4oj#67_AD2G`{ajVV*)r2QIHe50 z6-D?^V7Ke>97cqcj3(dBm!P4nO)ZtL8FWp`hifc{E5)#u*!1N^t?05~aA1=1D4ct% zDolGhtq~jA*?sDK0MI;50ptv%0c9SpnsdhsA_*79EF(7PncARC{ha1&Pic|GBOX!c zAtWrWBDp+;14c^C#bX{pD8uHAm9?lQGi{0xCcTt+O5T$wOO)$ceUOkA0~TkqoFg+# zMA|(?IzZB@JU{jN!fy#bSxgnLm^THqXe8DSFPBtMDXbKWR$NO+%1N$_bXsG-S(J)V z#$)y2tgNx*7zz8wJIXbtzgHdT#x&Dhl~FJbC^>87m+<_G0^?rEci|dR+u#LZ-uOk! z(Jy+YSdlELnt1%^(Zk1@Lsw;HkZ57W5ec}^I6!>&Lw4~Qo+cIW{aQK?)H7&yfl4f> zc`ibfE4-MtGscj79zk1YnflId$xXB4WFm6smAXH9t$WMO9E&Om6ub}nALYuVhw=*U zYfj!vu?Cf5N?8h@YA!a4yQ4FLOuUPX(zW|-)Rytsw-`q*>afX}T0y;$2!a7B9*?g)QXxVmC_Zl5#zZ^J<~sLV{zY6Y{-_l*e+;EuWe>5!;$WeLF!OgfhYc z6ix$BME#aRQs(AHnnmvwW$a3e=H)2DxhwYtNp1bD$I_^`gIB5FscS9hke1TpEgl~#NBv9-u8?N&=%Ra>g1j?j zl?71u@8dFC_wBK57@`e?5Oi{rp z(}&IJcbC1;$p$9+B0H)`9wS;}n4)&~ECAGlGwf13U197LPRz7>Ze~ zt5{L=@hd8M8pH)i8njwR`?z=1-<+ZFHld>RLhw;2r>tGa*_LAMii=q{Qo&6F?79(Z zNgbWiXtP#3wzhKZfZth$2qbV)jul&}q|=C^SwaliUn-6u{T{_w5#p|yI#Qfli2cfh9X4SAsp)xjqhNdwc2eHyP>VS95tC7jO=DQAmPNDsX%MTQD4>zF zxkHjE`ns`Y02;*;g^ok%MZjT2za3GDVvVZ|4ZfbT4+9HXk4hXaK$};lm zwnZsxIasK-LtyAKpteRjJ2=}_qgoJI7$!^jT7Q}31Bj78Qu(m-ZqbxTh9fP*D#Ewt zP`f?Neo8je68G*z_teWB7#4=?>V5>!(4LsHVIy0Xm{YG;GIUO5ET)!yMp>zCd7e~n zM4%#|PqBh6D?r8)UYpP9yeUiON&zT1)A0-8MHpF-q1%K}szxGOx-n(;S#=e|$kK;? zmh@hjuc=63YLv09=EJp8d0=Qy*q0ZoLg1O13an(ltJz*k6E_X0Z9&CDE#>B_7tF<_ z3MnvD+t+IGRB}KZ?YIY#k_)?bXP2C>u+|HBMpnW+6;lHRFG>A3%Z?MQcgZ4U&5X&> zQV`O@L>je(0dQmJT+&;ZAUT~=$1w#K&6xmx?~!K8pjdiv?V-bOGDy3i4HyC(VGF2I zOf`Q~v0?#iInYOE(wQ*!cCBQ1#7r6mc9QSVEL^MpOuGVwrCiTVLTP4nP`bf!1u9Cv zp;ee+5O6Kvm5@==%b)#}nUUfXgM(RW%gI1#Nc?BBkuqyRug1V)A6lwIjM5aJ-C@y3 z6aI#J^O$++c1ApIjm#1cxZnXuTRKgqLLKdbx%_lMlzqz zru!~nR4K`N5C;!9*-~7<1lpEVY=XjBmfD{ebiw{OtczN?WqD)2@~Uqtzh${?V12;` z*7R-b+m6uJ*%7*ul&@>Vltli;+aEXdK0dz$$@)*f zzvxIY9uM{gt&g`V{*g_%u4FD7$+h|QXIaXp6un=So?eyms&0OMvH2xS%|A<0UqgBL zN67k%YO}2FQhWaIMJp!|3{IJ|vkk3EMJ3&n&a31850k_8PoS5x!t}vtJnXg)C*wi- z>&3yK^TTA+``<2x=JQs@^H$s5ey=;ksDDL$>uCArU)f@VL*8@T-Ba_0iel;Gq z&2&HxyzqE7OCMuqclaab9`{E-^p1{bYw(r=vCT1(85~;vsZ^Wu-|`8M2$d+T?Y;h4 zzX@DZC=I(ZW@W*fMl@5jD`}PE`tBlG(7tb5@9I%zmpUr)B)lI#vXsl5(&iabTZigo zD~SW-{H|SLujrdn@&r zUFEmTL!{C?u%@m%7E-WNKk!`MXTrT^K5Y_0&yk4HP(!s;Wt0a7+qBiG2i{XZy-W|! zq&ua`9b@v-*`~*Yy)jVZVkwg9bVB(sO}4k5)gQI0A~v0y64TL9i5D<5JL;KT)oMhf zc>Y{8AJxULg0G%E6U`^}I^4n5du#jYHr&HgwL@#OVtZB;36;`OVxi<0gtcJ$4&122 z@aSwnwOSn$pVaWoR26%i!n_9&vh?)W0+QBcK-^M0$QV+YYBKW#?!k)HI`sKM1#a

R1|b+(rkd40(SX+5gG>UbC-rT*F#m=%5oirhF;w~S1>z6lz>fx zz^m&=sU_+UNX)^Cdj#X2q4%GWs#hc)k8VF{IwPkpqIHmC#Zl)ARy2sWU9|Sv1g(U?n`P}^4g?glo1s{ zmURklQ;S@Tg8bAGzxh=t>f=PqtncjlMR05;KGj}o_dDB(N5`KE8`sPgo`1aM!7pE? z^v;Elz@1Vmj44+(yk6;w(Db@^CU#WnsNG}{PQE)^WkXq8jHVVIerC_)i#AP=pHm}l zSJaBR<8$$JWzO9+P&57CmJA@>16df3Ec!*vYt9eqIh3h0-H)r?&Sl(X9 zdo?Np=iGk9x~T!A`%}Q{nr!IVZV}^^Jo&nur?BlY#ZPze9oik7<=SxRKE3JIl2Z+BV>#)+Y?`HFM$vQ&20Nu?w`7TneRb)8t~tcUBLTb}FO zD4If1a77-!LmI2HxC(wn4X-59W!N-iO}1T>^EB7u-gN+(j)Gn|cj%%-KNzipU986JPo^m19*(=ZdLY z{>V&^v2H9b6>NS^5a|`O;*~<)>@a4vYVmZb%8vQ$IF_SeDo`y#SR|W6i65WMY>*D+vQw2d_jZqZADnTt^j8Tr+`HEo5B~T z#&1tlf$)JCYlw77j&BQQC&`-LXf064)vApo3hBsk!*OR+rrD8cngn|qWfWkdIpEr% zb`9#~T_?1q6eTI@n3&G>0h^W}@ZwG+^r2BZLbA$#nyRf_dXd0uWp6(9Nz!SgHlD1W zQ6XWE0^X^JA%KZX7IgRj5(}|(i(^!N?6j<+olI~y=%&ia*%?o$zA*81RZZ88x>;kl mzl6%(D(ru)!v57k==O8_x&7RJZa@F=&%XhQj8LQiPyhgD2T9!5@nV0Oi8bQUC+(Tzhxg#Q!kE?2BTO=Ts`c#X}|mV?LDIx#!k|vr@3dDwSti}vuD3w znn~hEzO^(@gKz%4&lWz9AMeoL=95QT#lQGGe6s!I(KpTQ?Z-P0ceZwRaKG7n(%kt* zZ2gHiWuGL9tx$+>-dVBqZy5Kn=l@SXOUpj9PNhg#4=R;LL)gLcGIUSpv8dT~vAxxN z_yAu!hWH;V3VhKuMH{RzY3PA#OFxn=ao8K1Jb6C|MJQ)7l)fzu;rh0hI5GlIM+z$z z@mz{&5XZqn1T(&xxgNek+@P!FWjqgjg5)lio?J*jwqh5{VzeUx#r02lAfdaw#6)XB zw`M`Ouwqk;q=MKY5TSH55>Dc}cU! z89EAkH!85k@*UwR>K$Q4SZ=fOUFAE`S!G_+5N+QH!Ggmelx}E=XBJ<7ul_95UDFO0 z&oRIJQSYPFOkc)pr*1=JV=?fYNIZKNe;^G-2iwwMhgP_#z6D(3X5>>iSlA$*hkIapA^u z0qN7uILPo-VA#$Uya49!NFd4I1=st9`5$X!`_SI@c=if?Ga{H z$!zeb-G3vxKOGIbqmdX4Mep$Fpa*b(KWz8Mz3#{my?*E5c(2!gX$XK2{lQoq^x!Em zY&l`1nhvMjXcr+Lh zFd}ZRH|iX;dxzaU6BuAs(fzU8AB)ke_Q3%G0?OiWFzoh*ZSkTDl-e&2x(X_2+UpIw zov}ff_4RKe1iS(^2Zk6Obvr%!(fz3l)Z4>12GI{rjk^Cl#vnWtd+o#a%kBt3Y9C<& zt-d$D^)+RmN>z96-RF z(ckI!i{nv`+0^TgyTjq}(YQD0*CEx{U<2@MV=75+56}jECK-Yl4Brq~;srC@5U*c# zaeqiGWG!eDBSz4M&bT-XD?&NOB}zrV`|_aovfJ-;>G^=*yzY&`=n_2unC&uzcY958xO*3P|u&jf)Ti;bq`9;jqMJ z5ScVf1&29YmHS$fHAGQR`>_ zPV}>1HRyS5W~vG13!<)8uN#`7SD)0&3u&%=`a){Lx`kGr)QMATNghHqYD@94iVt~h1|9Qbk{>8HfvtT%?Zo6^fa6-I`@ zW70B^J~6~Z0r~wf+!+J-c31eWC+^-&uiU`w!VLiTjiu$gb`AG2{I|-+G@?wE@LuIo zWy1$1l#bM}vKmJcN{98Klr_Ijit2>OsUnf7vGA2HR~RE-JQrKMwo=Udzz!EmeFd^V z;L5mQ>{>yJ-CYgf<`JNGt;Xf^Av*F0KoCYyyO<*z(CQ4gAQ#K{vMy{mnF}dSkzHuM znR;fMEYS)*SB3^FB8GX=Iwzk2JA&(Zpe^xRnxP5YGBg<6`;F<$9}wlKDh`&wojs0H zTWBAbKL!0ge%48Rpd_)uWC>+u{$gp`aX-_4hxp{z|Um9H6eP0ELBEQc$_ zu$I{LU;puJWT=Q45R^N9BO3lS%9ziI>=8ToKs3tRQiV!Bflz2+s zlPF7+>soz~kQM_LXS19mGfYIkZ2|rm(6|a~#1+-`+)($V1R8T3b z6pL0|OGwH|u8eeAW4~FHic!X6_2I0nvE&#D`^P)VHKxB;9q7h1(_EENFb*g=Yvh;k z{E7nOUdeai8dBTf1!3O!Ma$7IdZt*BEUB7!{OHlc$C^V|WoD3QVZ{*%xX?I2eD_0k z@fn^b74ZF9IuFz{Xm){0EU0-dLX<1Kn6@*v& z0Vs(Q)Szn3mMF1wPNoP+9l$EAN*P{J$jV`|KxdxCog6CWXt7ga&lCSlMD8!JPR~8{ z*<@io5w&Y78|N~d!n!%Mp{kVt12iFKCFD^Iq!zDO^GCk`o#s|LV4mOHN$+uno8AIC zMApJOMRD1Dsu^h6c90+~VJr)6sEAi0j1}O#V0S};Ok_lw4$5xk(msol1urv?<7KPS zxGt@3sQyM{8U`1UY#?*>&lp+NP3nJ&W^dq2NM-W26)Ey^EB` za?UNEnmG~MnnQg%K^}xM!U7ad15iZ$mO@hI=0=)D?-gb2N{i;@D8jib_XSC9{jJH+ z5JrgFg>@!tDw|8Km9Z=(nz`GYagd!7_As>89o+&sx4Z-a@y>VWS++Ta51-aQ4$4mA zPl1MEbv0VySAuC>Xbq#j4{OUW)cgVn1!4NO*=#j;PH5^3TvCHqso$woXc1;Z~4ClcPzW-TJ( z1G#Hulfrf2Gbr+g6?RNf!71cxwAOiylhh+`kuO(4u@fwo5KOHI#?ZyY2SUIA8e5O~ zdn^Bb@{qoFBz-?_()S~EpT8gd?YDaB-IZ^wa$Y#MoYV!zR>&Ta)r;1F`g7wHfgsRs z>WVXhkB$R7@PrnRH4PYwS*@#BQS|XEDtQ{j1xOmST1Wf1chujUq3|}LqV+=XQ7EUZ zUB}s$V(p5HSvOL_O#|$@5o$>toziHtRy($~a_xZMS%wHCa8iyHTdAbeh@x3S4B1~Q zjv)OW#aI#IuBMs|vxmuxi#bV47v(xqoLh+f%7h&@VF0P=d3B>;d=GX~-sw<_IP?*d zQH)JvSgV#rv-@cftDh*Kk+iu(k}3MSv1I@n#S?{&L+M4pVMV_kQHf%Ws|*dkp0W=E z3t5j!94_R&b8hZVttrYf^6IulDQh`csJBC4=rW+TMmjq<+f}1l5Lp-|OZZxUndAeA zkw8-Uu=H-xlt_jnEyF6px93p1JWO&3(8U=Qe@6aqEBGU{-NUa;(5p}2b#FF9EZFAoR&pr z?ov6TRS?e!#8d917VbV{z)r5%VyHI+FsLO$w}zt1`?B4;ezt$$%5ZMJ5V>gprQh{G z&e>}?cpbyboUKq|GqVyeZsq-mlBZ1k5ITD5qp)=vi613PdY6`#w1Sy-C#o6+Ce~Fq zqXHjE?HBlUR9+wG#+nGveWON0KT$;V>shN)w<<5@fv2~~fQtc&b?Qy3_M|PTeU01> zDhvTwX~C1V3iTcc{dO+T;LUT)+g{mQ-wl!daHupBHq&{y40Q zTDfI;W54pMZz{iKxou#5!3Ng!ZR_MK>NMRpnm)bJblY3H?Ja#3y`|fZ(AU`!x{{Qy zYs8d9{>9rLH}pO}zXZwpPrtwDNHHD{_6Dtww<`XTO}MUPE*r_U`SoX6%BK{)UzMI- zmGY`?etxm}B}>geOHyA$dH6@j`ip9_tnN~K{_jOAClCxynX|JEtx82D-ILC%t#I+unY!JH)7eMSbgN`Q~5Q zVuM58bKKoi^M#6H>Eq-6`T@;!Kn}d{cs5HPV`g{wBjz6WM?dtAj%aJ}mIATOF_IY^ zTK=h2oAclD36BVsD6H+h{#m~XTvI3wyE0~F!J9@jQ?x5-mE-#EB3aPBZ(Hx`QD&Dq zD)J<}A3w5`%be2Y8B$w^>SQa41LXX!U16{4$W?y#2K~wS6@Jn7aI9`Nw;!pUnD7KK zirh{12YYl!DOP(c^_X4dx6DJN(mb%Ht~(Y|uv0(qT;6BGy=Fdb5<<_Bh|y3( zwNz!42L{`;)u{*GQ$M{-56`4KrOF*+^3&O-$ArBxP~&1LlInCq`7lkkx1QA>wW=aE zotqNV(NT#PFf=>rnO)UtM5K8BTr?lm#jk>|o;?%IC-pkq!Pa|g`{_2^!&9|GYqMf| zRul=9(otfeJz=8H&`m~$qEMlFlT6M=B$iQ8xT|m>fvT8dBv^CB=UkdnoOXJ zZ>tkj>2Uegi3&CoW>58`qIogHa5Bir1RU^YP@_~7bZOFTfSCez`F;@^2JCZ}hBntj zRoTjN9fgKo-8@$?J42L!O@qLz>qn_2>JLcF!HRnX59+FaYKrpfud3^( zi><49QksS9BwROV_yAbmUdVejDg)=-e#N?}0i^p=!0Vc9=-F-&nq zckmtB9h~LR)OZ0jKe+G}+-jx;W>z`Ym>)a@sLQ!x<9=}5xtFpKXenkzhB+_NrG-OS-U6k`Q?Xd9- zEy#@xehpqOkYJywt?pdI|Me@SU?#cfbq}eMJ-(q?qld2V)LbcPO8^SmsT53ev7B`3 z7I1N6LJAhwuux8OGS-w7n2v<`PEJm-azn0&82ncsp8qfSsFMj3xQh5s`%fTl{{J7g z9&T-A|Nlet;r63%wwl{d9zMSP{~!JnpD+6VKNQZJmd=z0k@{}P_tK`Hq;3e!lyKSS zqI!xdAenMa1&0vEmlP^w95qe@xao6w*-(G@Tq!5rV*tnx0Ua0IMu*%*Ef$-44Rg%d zK>&)#304zd`ZkqgRZ8cIsayWYOpmc{EG`vneoheS6|>@%Lf-5!X0>YZbg9aY`SqQS zaD|olB}*x7R8E{;%9to%r~=hn@F~a3nwWg_E1$&<9@rF5hjsXjZ7oH*URm4aTseF} zgYs=fSiG(PX9uT%ME0A)7pKN=PgH^Mff#FubV-hH3uY(Dn%-zFP{`G)jU)=`$Z^AQ zXH=%yk!hL)dm3dFV4^wT+M#w0>g8Q0w51d!De9P*&h-JCmLTxrP9*f9Q9DAi%72=w ztz3GMz-wi1KJ`h`X{0uute#OJVUGgdsfZzfiAxrA_x} 0 bytes, it returns the number of bytes +read. It may return the (non-nil) error from the same call or +return the error (and n == 0) from a subsequent call. An instance +of this general case is that a Reader returning a non-zero number +of bytes at the end of the input stream may return +either err == EOF or err == nil. The next Read should +return 0, EOF regardless. + +Callers should always process the n > 0 bytes returned before +considering the error err. Doing so correctly handles I/O errors +that happen after reading some bytes and also both of the +allowed EOF behaviors. + +Implementations of Read are discouraged from returning a zero +byte count with a nil error, and callers should treat that +situation as a no-op. +*/ +// + +func (b *FixedSizeRingBuf) Read(p []byte) (n int, err error) { + return b.ReadAndMaybeAdvance(p, true) +} + +// if you want to Read the data and leave it in the buffer, so as +// to peek ahead for example. +func (b *FixedSizeRingBuf) ReadWithoutAdvance(p []byte) (n int, err error) { + return b.ReadAndMaybeAdvance(p, false) +} + +func (b *FixedSizeRingBuf) ReadAndMaybeAdvance(p []byte, doAdvance bool) (n int, err error) { + if len(p) == 0 { + return 0, nil + } + if b.Readable == 0 { + return 0, io.EOF + } + extent := b.Beg + b.Readable + if extent <= b.N { + n += copy(p, b.A[b.Use][b.Beg:extent]) + } else { + n += copy(p, b.A[b.Use][b.Beg:b.N]) + if n < len(p) { + n += copy(p[n:], b.A[b.Use][0:(extent%b.N)]) + } + } + if doAdvance { + b.Advance(n) + } + return +} + +// +// Write writes len(p) bytes from p to the underlying data stream. +// It returns the number of bytes written from p (0 <= n <= len(p)) +// and any error encountered that caused the write to stop early. +// Write must return a non-nil error if it returns n < len(p). +// +func (b *FixedSizeRingBuf) Write(p []byte) (n int, err error) { + for { + if len(p) == 0 { + // nothing (left) to copy in; notice we shorten our + // local copy p (below) as we read from it. + return + } + + writeCapacity := b.N - b.Readable + if writeCapacity <= 0 { + // we are all full up already. + return n, io.ErrShortWrite + } + if len(p) > writeCapacity { + err = io.ErrShortWrite + // leave err set and + // keep going, write what we can. + } + + writeStart := (b.Beg + b.Readable) % b.N + + upperLim := intMin(writeStart+writeCapacity, b.N) + + k := copy(b.A[b.Use][writeStart:upperLim], p) + + n += k + b.Readable += k + p = p[k:] + + // we can fill from b.A[b.Use][0:something] from + // p's remainder, so loop + } +} + +// WriteTo and ReadFrom avoid intermediate allocation and copies. + +// WriteTo writes data to w until there's no more data to write +// or when an error occurs. The return value n is the number of +// bytes written. Any error encountered during the write is also returned. +func (b *FixedSizeRingBuf) WriteTo(w io.Writer) (n int64, err error) { + + if b.Readable == 0 { + return 0, io.EOF + } + + extent := b.Beg + b.Readable + firstWriteLen := intMin(extent, b.N) - b.Beg + secondWriteLen := b.Readable - firstWriteLen + if firstWriteLen > 0 { + m, e := w.Write(b.A[b.Use][b.Beg:(b.Beg + firstWriteLen)]) + n += int64(m) + b.Advance(m) + + if e != nil { + return n, e + } + // all bytes should have been written, by definition of + // Write method in io.Writer + if m != firstWriteLen { + return n, io.ErrShortWrite + } + } + if secondWriteLen > 0 { + m, e := w.Write(b.A[b.Use][0:secondWriteLen]) + n += int64(m) + b.Advance(m) + + if e != nil { + return n, e + } + // all bytes should have been written, by definition of + // Write method in io.Writer + if m != secondWriteLen { + return n, io.ErrShortWrite + } + } + + return n, nil +} + +// ReadFrom() reads data from r until EOF or error. The return value n +// is the number of bytes read. Any error except io.EOF encountered +// during the read is also returned. +func (b *FixedSizeRingBuf) ReadFrom(r io.Reader) (n int64, err error) { + for { + writeCapacity := b.N - b.Readable + if writeCapacity <= 0 { + // we are all full + return n, nil + } + writeStart := (b.Beg + b.Readable) % b.N + upperLim := intMin(writeStart+writeCapacity, b.N) + + m, e := r.Read(b.A[b.Use][writeStart:upperLim]) + n += int64(m) + b.Readable += m + if e == io.EOF { + return n, nil + } + if e != nil { + return n, e + } + } +} + +func (b *FixedSizeRingBuf) Reset() { + b.Beg = 0 + b.Readable = 0 + b.Use = 0 +} + +// Advance(): non-standard, but better than Next(), +// because we don't have to unwrap our buffer and pay the cpu time +// for the copy that unwrapping may need. +// Useful in conjuction/after ReadWithoutAdvance() above. +func (b *FixedSizeRingBuf) Advance(n int) { + if n <= 0 { + return + } + if n > b.Readable { + n = b.Readable + } + b.Readable -= n + b.Beg = (b.Beg + n) % b.N +} + +// Adopt(): non-standard. +// +// For efficiency's sake, (possibly) take ownership of +// already allocated slice offered in me. +// +// If me is large we will adopt it, and we will potentially then +// write to the me buffer. +// If we already have a bigger buffer, copy me into the existing +// buffer instead. +func (b *FixedSizeRingBuf) Adopt(me []byte) { + n := len(me) + if n > b.N { + b.A[0] = me + b.OneMade = false + b.N = n + b.Use = 0 + b.Beg = 0 + b.Readable = n + } else { + // we already have a larger buffer, reuse it. + copy(b.A[0], me) + b.Use = 0 + b.Beg = 0 + b.Readable = n + } +} + +func intMax(a, b int) int { + if a > b { + return a + } else { + return b + } +} + +func intMin(a, b int) int { + if a < b { + return a + } else { + return b + } +} + +// Get the (beg, end] indices of the tailing empty buffer of bytes slice that from that is free for writing. +// Note: not guaranteed to be zeroed. At all. +func (b *FixedSizeRingBuf) GetEndmostWritable() (beg int, end int) { + extent := b.Beg + b.Readable + if extent < b.N { + return extent, b.N + } + + return extent % b.N, b.Beg +} + +// Note: not guaranteed to be zeroed. +func (b *FixedSizeRingBuf) GetEndmostWritableSlice() []byte { + beg, e := b.GetEndmostWritable() + return b.A[b.Use][beg:e] +} diff --git a/vendor/github.com/glycerine/go-unsnap-stream/snap.go b/vendor/github.com/glycerine/go-unsnap-stream/snap.go new file mode 100644 index 0000000..12a8d40 --- /dev/null +++ b/vendor/github.com/glycerine/go-unsnap-stream/snap.go @@ -0,0 +1,100 @@ +package unsnap + +import ( + "encoding/binary" + + // no c lib dependency + snappy "github.com/golang/snappy" + // or, use the C wrapper for speed + //snappy "github.com/dgryski/go-csnappy" +) + +// add Write() method for SnappyFile (see unsnap.go) + +// reference for snappy framing/streaming format: +// http://code.google.com/p/snappy/source/browse/trunk/framing_format.txt +// ?spec=svn68&r=71 + +// +// Write writes len(p) bytes from p to the underlying data stream. +// It returns the number of bytes written from p (0 <= n <= len(p)) and +// any error encountered that caused the write to stop early. Write +// must return a non-nil error if it returns n < len(p). +// +func (sf *SnappyFile) Write(p []byte) (n int, err error) { + + if sf.SnappyEncodeDecodeOff { + return sf.Writer.Write(p) + } + + if !sf.Writing { + panic("Writing on a read-only SnappyFile") + } + + // encoding in snappy can apparently go beyond the original size, beware. + // so our buffers must be sized 2*max snappy chunk => 2 * CHUNK_MAX(65536) + + sf.DecBuf.Reset() + sf.EncBuf.Reset() + + if !sf.HeaderChunkWritten { + sf.HeaderChunkWritten = true + _, err = sf.Writer.Write(SnappyStreamHeaderMagic) + if err != nil { + return + } + } + var chunk []byte + var chunk_type byte + var crc uint32 + + for len(p) > 0 { + + // chunk points to input p by default, unencoded input. + chunk = p[:IntMin(len(p), CHUNK_MAX)] + crc = masked_crc32c(chunk) + + writeme := chunk[:] + + // first write to EncBuf, as a temp, in case we want + // to discard and send uncompressed instead. + compressed_chunk := snappy.Encode(sf.EncBuf.GetEndmostWritableSlice(), chunk) + + if len(compressed_chunk) <= int((1-_COMPRESSION_THRESHOLD)*float64(len(chunk))) { + writeme = compressed_chunk + chunk_type = _COMPRESSED_CHUNK + } else { + // keep writeme pointing at original chunk (uncompressed) + chunk_type = _UNCOMPRESSED_CHUNK + } + + const crc32Sz = 4 + var tag32 uint32 = uint32(chunk_type) + (uint32(len(writeme)+crc32Sz) << 8) + + err = binary.Write(sf.Writer, binary.LittleEndian, tag32) + if err != nil { + return + } + + err = binary.Write(sf.Writer, binary.LittleEndian, crc) + if err != nil { + return + } + + _, err = sf.Writer.Write(writeme) + if err != nil { + return + } + + n += len(chunk) + p = p[len(chunk):] + } + return n, nil +} + +func IntMin(a int, b int) int { + if a < b { + return a + } + return b +} diff --git a/vendor/github.com/glycerine/go-unsnap-stream/unenc.txt b/vendor/github.com/glycerine/go-unsnap-stream/unenc.txt new file mode 100644 index 0000000..5f50279 --- /dev/null +++ b/vendor/github.com/glycerine/go-unsnap-stream/unenc.txt @@ -0,0 +1 @@ +hello_snappy diff --git a/vendor/github.com/glycerine/go-unsnap-stream/unenc.txt.snappy b/vendor/github.com/glycerine/go-unsnap-stream/unenc.txt.snappy new file mode 100644 index 0000000000000000000000000000000000000000..ba45ecd4269dd0d763893470fde8639f40203d06 GIT binary patch literal 31 mcmey*#=ubQml#kG$tcLcu=-fm#f;RPoc#FWyu^ZnN-hAVp$iuP literal 0 HcmV?d00001 diff --git a/vendor/github.com/glycerine/go-unsnap-stream/unsnap.go b/vendor/github.com/glycerine/go-unsnap-stream/unsnap.go new file mode 100644 index 0000000..8789445 --- /dev/null +++ b/vendor/github.com/glycerine/go-unsnap-stream/unsnap.go @@ -0,0 +1,513 @@ +package unsnap + +import ( + "bytes" + "encoding/binary" + "fmt" + "io" + "io/ioutil" + "os" + + "hash/crc32" + + snappy "github.com/golang/snappy" + // The C library can be used, but this makes the binary dependent + // lots of extraneous c-libraries; it is no longer stand-alone. Yuck. + // + // Therefore we comment out the "dgryski/go-csnappy" path and use the + // "github.com/golang/snappy/snappy" above instead. If you are + // performance limited and can deal with distributing more libraries, + // then this is easy to swap. + // + // If you swap, note that some of the tests won't pass + // because snappy-go produces slightly different (but still + // conformant) encodings on some data. Here are bindings + // to the C-snappy: + // snappy "github.com/dgryski/go-csnappy" +) + +// SnappyFile: create a drop-in-replacement/wrapper for an *os.File that handles doing the unsnappification online as more is read from it + +type SnappyFile struct { + Fname string + + Reader io.Reader + Writer io.Writer + + // allow clients to substitute us for an os.File and just switch + // off compression if they don't want it. + SnappyEncodeDecodeOff bool // if true, we bypass straight to Filep + + EncBuf FixedSizeRingBuf // holds any extra that isn't yet returned, encoded + DecBuf FixedSizeRingBuf // holds any extra that isn't yet returned, decoded + + // for writing to stream-framed snappy + HeaderChunkWritten bool + + // Sanity check: we can only read, or only write, to one SnappyFile. + // EncBuf and DecBuf are used differently in each mode. Verify + // that we are consistent with this flag. + Writing bool +} + +var total int + +// for debugging, show state of buffers +func (f *SnappyFile) Dump() { + fmt.Printf("EncBuf has length %d and contents:\n%s\n", len(f.EncBuf.Bytes()), string(f.EncBuf.Bytes())) + fmt.Printf("DecBuf has length %d and contents:\n%s\n", len(f.DecBuf.Bytes()), string(f.DecBuf.Bytes())) +} + +func (f *SnappyFile) Read(p []byte) (n int, err error) { + + if f.SnappyEncodeDecodeOff { + return f.Reader.Read(p) + } + + if f.Writing { + panic("Reading on a write-only SnappyFile") + } + + // before we unencrypt more, try to drain the DecBuf first + n, _ = f.DecBuf.Read(p) + if n > 0 { + total += n + return n, nil + } + + //nEncRead, nDecAdded, err := UnsnapOneFrame(f.Filep, &f.EncBuf, &f.DecBuf, f.Fname) + _, _, err = UnsnapOneFrame(f.Reader, &f.EncBuf, &f.DecBuf, f.Fname) + if err != nil && err != io.EOF { + panic(err) + } + + n, _ = f.DecBuf.Read(p) + + if n > 0 { + total += n + return n, nil + } + if f.DecBuf.Readable == 0 { + if f.DecBuf.Readable == 0 && f.EncBuf.Readable == 0 { + // only now (when EncBuf is empty) can we give io.EOF. + // Any earlier, and we leave stuff un-decoded! + return 0, io.EOF + } + } + return 0, nil +} + +func Open(name string) (file *SnappyFile, err error) { + fp, err := os.Open(name) + if err != nil { + return nil, err + } + // encoding in snappy can apparently go beyond the original size, so + // we make our buffers big enough, 2*max snappy chunk => 2 * CHUNK_MAX(65536) + + snap := NewReader(fp) + snap.Fname = name + return snap, nil +} + +func NewReader(r io.Reader) *SnappyFile { + return &SnappyFile{ + Reader: r, + EncBuf: *NewFixedSizeRingBuf(CHUNK_MAX * 2), // buffer of snappy encoded bytes + DecBuf: *NewFixedSizeRingBuf(CHUNK_MAX * 2), // buffer of snapppy decoded bytes + Writing: false, + } +} + +func NewWriter(w io.Writer) *SnappyFile { + return &SnappyFile{ + Writer: w, + EncBuf: *NewFixedSizeRingBuf(65536), // on writing: temp for testing compression + DecBuf: *NewFixedSizeRingBuf(65536 * 2), // on writing: final buffer of snappy framed and encoded bytes + Writing: true, + } +} + +func Create(name string) (file *SnappyFile, err error) { + fp, err := os.Create(name) + if err != nil { + return nil, err + } + snap := NewWriter(fp) + snap.Fname = name + return snap, nil +} + +func (f *SnappyFile) Close() error { + if f.Writing { + wc, ok := f.Writer.(io.WriteCloser) + if ok { + return wc.Close() + } + return nil + } + rc, ok := f.Reader.(io.ReadCloser) + if ok { + return rc.Close() + } + return nil +} + +func (f *SnappyFile) Sync() error { + file, ok := f.Writer.(*os.File) + if ok { + return file.Sync() + } + return nil +} + +// for an increment of a frame at a time: +// read from r into encBuf (encBuf is still encoded, thus the name), and write unsnappified frames into outDecodedBuf +// the returned n: number of bytes read from the encrypted encBuf +func UnsnapOneFrame(r io.Reader, encBuf *FixedSizeRingBuf, outDecodedBuf *FixedSizeRingBuf, fname string) (nEnc int64, nDec int64, err error) { + // b, err := ioutil.ReadAll(r) + // if err != nil { + // panic(err) + // } + + nEnc = 0 + nDec = 0 + + // read up to 65536 bytes from r into encBuf, at least a snappy frame + nread, err := io.CopyN(encBuf, r, 65536) // returns nwrotebytes, err + nEnc += nread + if err != nil { + if err == io.EOF { + if nread == 0 { + if encBuf.Readable == 0 { + return nEnc, nDec, io.EOF + } + // else we have bytes in encBuf, so decode them! + err = nil + } else { + // continue below, processing the nread bytes + err = nil + } + } else { + panic(err) + } + } + + // flag for printing chunk size alignment messages + verbose := false + + const snappyStreamHeaderSz = 10 + const headerSz = 4 + const crc32Sz = 4 + // the magic 18 bytes accounts for the snappy streaming header and the first chunks size and checksum + // http://code.google.com/p/snappy/source/browse/trunk/framing_format.txt + + chunk := (*encBuf).Bytes() + + // however we exit, advance as + // defer func() { (*encBuf).Next(N) }() + + // 65536 is the max size of a snappy framed chunk. See + // http://code.google.com/p/snappy/source/browse/trunk/framing_format.txt:91 + // buf := make([]byte, 65536) + + // fmt.Printf("read from file, b is len:%d with value: %#v\n", len(b), b) + // fmt.Printf("read from file, bcut is len:%d with value: %#v\n", len(bcut), bcut) + + //fmt.Printf("raw bytes of chunksz are: %v\n", b[11:14]) + + fourbytes := make([]byte, 4) + chunkCount := 0 + + for nDec < 65536 { + if len(chunk) == 0 { + break + } + chunkCount++ + fourbytes[3] = 0 + copy(fourbytes, chunk[1:4]) + chunksz := binary.LittleEndian.Uint32(fourbytes) + chunk_type := chunk[0] + + switch true { + case chunk_type == 0xff: + { // stream identifier + + streamHeader := chunk[:snappyStreamHeaderSz] + if 0 != bytes.Compare(streamHeader, []byte{0xff, 0x06, 0x00, 0x00, 0x73, 0x4e, 0x61, 0x50, 0x70, 0x59}) { + panic("file had chunk starting with 0xff but then no magic snappy streaming protocol bytes, aborting.") + } else { + //fmt.Printf("got streaming snappy magic header just fine.\n") + } + chunk = chunk[snappyStreamHeaderSz:] + (*encBuf).Advance(snappyStreamHeaderSz) + nEnc += snappyStreamHeaderSz + continue + } + case chunk_type == 0x00: + { // compressed data + if verbose { + fmt.Fprintf(os.Stderr, "chunksz is %d while total bytes avail are: %d\n", int(chunksz), len(chunk)-4) + } + + crc := binary.LittleEndian.Uint32(chunk[headerSz:(headerSz + crc32Sz)]) + section := chunk[(headerSz + crc32Sz):(headerSz + chunksz)] + + dec, ok := snappy.Decode(nil, section) + if ok != nil { + // we've probably truncated a snappy frame at this point + // ok=snappy: corrupt input + // len(dec) == 0 + // + panic(fmt.Sprintf("could not decode snappy stream: '%s' and len dec=%d and ok=%v\n", fname, len(dec), ok)) + + // get back to caller with what we've got so far + return nEnc, nDec, nil + } + // fmt.Printf("ok, b is %#v , %#v\n", ok, dec) + + // spit out decoded text + // n, err := w.Write(dec) + //fmt.Printf("len(dec) = %d, outDecodedBuf.Readable=%d\n", len(dec), outDecodedBuf.Readable) + bnb := bytes.NewBuffer(dec) + n, err := io.Copy(outDecodedBuf, bnb) + if err != nil { + //fmt.Printf("got n=%d, err= %s ; when trying to io.Copy(outDecodedBuf: N=%d, Readable=%d)\n", n, err, outDecodedBuf.N, outDecodedBuf.Readable) + panic(err) + } + if n != int64(len(dec)) { + panic("could not write all bytes to outDecodedBuf") + } + nDec += n + + // verify the crc32 rotated checksum + m32 := masked_crc32c(dec) + if m32 != crc { + panic(fmt.Sprintf("crc32 masked failiure. expected: %v but got: %v", crc, m32)) + } else { + //fmt.Printf("\nchecksums match: %v == %v\n", crc, m32) + } + + // move to next header + inc := (headerSz + int(chunksz)) + chunk = chunk[inc:] + (*encBuf).Advance(inc) + nEnc += int64(inc) + continue + } + case chunk_type == 0x01: + { // uncompressed data + + //n, err := w.Write(chunk[(headerSz+crc32Sz):(headerSz + int(chunksz))]) + n, err := io.Copy(outDecodedBuf, bytes.NewBuffer(chunk[(headerSz+crc32Sz):(headerSz+int(chunksz))])) + if verbose { + //fmt.Printf("debug: n=%d err=%v chunksz=%d outDecodedBuf='%v'\n", n, err, chunksz, outDecodedBuf) + } + if err != nil { + panic(err) + } + if n != int64(chunksz-crc32Sz) { + panic("could not write all bytes to stdout") + } + nDec += n + + inc := (headerSz + int(chunksz)) + chunk = chunk[inc:] + (*encBuf).Advance(inc) + nEnc += int64(inc) + continue + } + case chunk_type == 0xfe: + fallthrough // padding, just skip it + case chunk_type >= 0x80 && chunk_type <= 0xfd: + { // Reserved skippable chunks + //fmt.Printf("\nin reserved skippable chunks, at nEnc=%v\n", nEnc) + inc := (headerSz + int(chunksz)) + chunk = chunk[inc:] + nEnc += int64(inc) + (*encBuf).Advance(inc) + continue + } + + default: + panic(fmt.Sprintf("unrecognized/unsupported chunk type %#v", chunk_type)) + } + + } // end for{} + + return nEnc, nDec, err + //return int64(N), nil +} + +// for whole file at once: +// +// receive on stdin a stream of bytes in the snappy-streaming framed +// format, defined here: http://code.google.com/p/snappy/source/browse/trunk/framing_format.txt +// Grab each frame, run it through the snappy decoder, and spit out +// each frame all joined back-to-back on stdout. +// +func Unsnappy(r io.Reader, w io.Writer) (err error) { + b, err := ioutil.ReadAll(r) + if err != nil { + panic(err) + } + + // flag for printing chunk size alignment messages + verbose := false + + const snappyStreamHeaderSz = 10 + const headerSz = 4 + const crc32Sz = 4 + // the magic 18 bytes accounts for the snappy streaming header and the first chunks size and checksum + // http://code.google.com/p/snappy/source/browse/trunk/framing_format.txt + + chunk := b[:] + + // 65536 is the max size of a snappy framed chunk. See + // http://code.google.com/p/snappy/source/browse/trunk/framing_format.txt:91 + //buf := make([]byte, 65536) + + // fmt.Printf("read from file, b is len:%d with value: %#v\n", len(b), b) + // fmt.Printf("read from file, bcut is len:%d with value: %#v\n", len(bcut), bcut) + + //fmt.Printf("raw bytes of chunksz are: %v\n", b[11:14]) + + fourbytes := make([]byte, 4) + chunkCount := 0 + + for { + if len(chunk) == 0 { + break + } + chunkCount++ + fourbytes[3] = 0 + copy(fourbytes, chunk[1:4]) + chunksz := binary.LittleEndian.Uint32(fourbytes) + chunk_type := chunk[0] + + switch true { + case chunk_type == 0xff: + { // stream identifier + + streamHeader := chunk[:snappyStreamHeaderSz] + if 0 != bytes.Compare(streamHeader, []byte{0xff, 0x06, 0x00, 0x00, 0x73, 0x4e, 0x61, 0x50, 0x70, 0x59}) { + panic("file had chunk starting with 0xff but then no magic snappy streaming protocol bytes, aborting.") + } else { + //fmt.Printf("got streaming snappy magic header just fine.\n") + } + chunk = chunk[snappyStreamHeaderSz:] + continue + } + case chunk_type == 0x00: + { // compressed data + if verbose { + fmt.Fprintf(os.Stderr, "chunksz is %d while total bytes avail are: %d\n", int(chunksz), len(chunk)-4) + } + + //crc := binary.LittleEndian.Uint32(chunk[headerSz:(headerSz + crc32Sz)]) + section := chunk[(headerSz + crc32Sz):(headerSz + chunksz)] + + dec, ok := snappy.Decode(nil, section) + if ok != nil { + panic("could not decode snappy stream") + } + // fmt.Printf("ok, b is %#v , %#v\n", ok, dec) + + // spit out decoded text + n, err := w.Write(dec) + if err != nil { + panic(err) + } + if n != len(dec) { + panic("could not write all bytes to stdout") + } + + // TODO: verify the crc32 rotated checksum? + + // move to next header + chunk = chunk[(headerSz + int(chunksz)):] + continue + } + case chunk_type == 0x01: + { // uncompressed data + + //crc := binary.LittleEndian.Uint32(chunk[headerSz:(headerSz + crc32Sz)]) + section := chunk[(headerSz + crc32Sz):(headerSz + chunksz)] + + n, err := w.Write(section) + if err != nil { + panic(err) + } + if n != int(chunksz-crc32Sz) { + panic("could not write all bytes to stdout") + } + + chunk = chunk[(headerSz + int(chunksz)):] + continue + } + case chunk_type == 0xfe: + fallthrough // padding, just skip it + case chunk_type >= 0x80 && chunk_type <= 0xfd: + { // Reserved skippable chunks + chunk = chunk[(headerSz + int(chunksz)):] + continue + } + + default: + panic(fmt.Sprintf("unrecognized/unsupported chunk type %#v", chunk_type)) + } + + } // end for{} + + return nil +} + +// 0xff 0x06 0x00 0x00 sNaPpY +var SnappyStreamHeaderMagic = []byte{0xff, 0x06, 0x00, 0x00, 0x73, 0x4e, 0x61, 0x50, 0x70, 0x59} + +const CHUNK_MAX = 65536 +const _STREAM_TO_STREAM_BLOCK_SIZE = CHUNK_MAX +const _STREAM_IDENTIFIER = `sNaPpY` +const _COMPRESSED_CHUNK = 0x00 +const _UNCOMPRESSED_CHUNK = 0x01 +const _IDENTIFIER_CHUNK = 0xff +const _RESERVED_UNSKIPPABLE0 = 0x02 // chunk ranges are [inclusive, exclusive) +const _RESERVED_UNSKIPPABLE1 = 0x80 +const _RESERVED_SKIPPABLE0 = 0x80 +const _RESERVED_SKIPPABLE1 = 0xff + +// the minimum percent of bytes compression must save to be enabled in automatic +// mode +const _COMPRESSION_THRESHOLD = .125 + +var crctab *crc32.Table + +func init() { + crctab = crc32.MakeTable(crc32.Castagnoli) // this is correct table, matches the crc32c.c code used by python +} + +func masked_crc32c(data []byte) uint32 { + + // see the framing format specification, http://code.google.com/p/snappy/source/browse/trunk/framing_format.txt + var crc uint32 = crc32.Checksum(data, crctab) + return (uint32((crc>>15)|(crc<<17)) + 0xa282ead8) +} + +func ReadSnappyStreamCompressedFile(filename string) ([]byte, error) { + + snappyFile, err := Open(filename) + if err != nil { + return []byte{}, err + } + + var bb bytes.Buffer + _, err = bb.ReadFrom(snappyFile) + if err == io.EOF { + err = nil + } + if err != nil { + panic(err) + } + + return bb.Bytes(), err +} diff --git a/vendor/github.com/go-logr/logr/.golangci.yaml b/vendor/github.com/go-logr/logr/.golangci.yaml new file mode 100644 index 0000000..0cffafa --- /dev/null +++ b/vendor/github.com/go-logr/logr/.golangci.yaml @@ -0,0 +1,26 @@ +run: + timeout: 1m + tests: true + +linters: + disable-all: true + enable: + - asciicheck + - errcheck + - forcetypeassert + - gocritic + - gofmt + - goimports + - gosimple + - govet + - ineffassign + - misspell + - revive + - staticcheck + - typecheck + - unused + +issues: + exclude-use-default: false + max-issues-per-linter: 0 + max-same-issues: 10 diff --git a/vendor/github.com/go-logr/logr/CHANGELOG.md b/vendor/github.com/go-logr/logr/CHANGELOG.md new file mode 100644 index 0000000..c356960 --- /dev/null +++ b/vendor/github.com/go-logr/logr/CHANGELOG.md @@ -0,0 +1,6 @@ +# CHANGELOG + +## v1.0.0-rc1 + +This is the first logged release. Major changes (including breaking changes) +have occurred since earlier tags. diff --git a/vendor/github.com/go-logr/logr/CONTRIBUTING.md b/vendor/github.com/go-logr/logr/CONTRIBUTING.md new file mode 100644 index 0000000..5d37e29 --- /dev/null +++ b/vendor/github.com/go-logr/logr/CONTRIBUTING.md @@ -0,0 +1,17 @@ +# Contributing + +Logr is open to pull-requests, provided they fit within the intended scope of +the project. Specifically, this library aims to be VERY small and minimalist, +with no external dependencies. + +## Compatibility + +This project intends to follow [semantic versioning](http://semver.org) and +is very strict about compatibility. Any proposed changes MUST follow those +rules. + +## Performance + +As a logging library, logr must be as light-weight as possible. Any proposed +code change must include results of running the [benchmark](./benchmark) +before and after the change. diff --git a/vendor/github.com/go-logr/logr/LICENSE b/vendor/github.com/go-logr/logr/LICENSE new file mode 100644 index 0000000..8dada3e --- /dev/null +++ b/vendor/github.com/go-logr/logr/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/go-logr/logr/README.md b/vendor/github.com/go-logr/logr/README.md new file mode 100644 index 0000000..8969526 --- /dev/null +++ b/vendor/github.com/go-logr/logr/README.md @@ -0,0 +1,406 @@ +# A minimal logging API for Go + +[![Go Reference](https://pkg.go.dev/badge/github.com/go-logr/logr.svg)](https://pkg.go.dev/github.com/go-logr/logr) +[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/go-logr/logr/badge)](https://securityscorecards.dev/viewer/?platform=github.com&org=go-logr&repo=logr) + +logr offers an(other) opinion on how Go programs and libraries can do logging +without becoming coupled to a particular logging implementation. This is not +an implementation of logging - it is an API. In fact it is two APIs with two +different sets of users. + +The `Logger` type is intended for application and library authors. It provides +a relatively small API which can be used everywhere you want to emit logs. It +defers the actual act of writing logs (to files, to stdout, or whatever) to the +`LogSink` interface. + +The `LogSink` interface is intended for logging library implementers. It is a +pure interface which can be implemented by logging frameworks to provide the actual logging +functionality. + +This decoupling allows application and library developers to write code in +terms of `logr.Logger` (which has very low dependency fan-out) while the +implementation of logging is managed "up stack" (e.g. in or near `main()`.) +Application developers can then switch out implementations as necessary. + +Many people assert that libraries should not be logging, and as such efforts +like this are pointless. Those people are welcome to convince the authors of +the tens-of-thousands of libraries that *DO* write logs that they are all +wrong. In the meantime, logr takes a more practical approach. + +## Typical usage + +Somewhere, early in an application's life, it will make a decision about which +logging library (implementation) it actually wants to use. Something like: + +``` + func main() { + // ... other setup code ... + + // Create the "root" logger. We have chosen the "logimpl" implementation, + // which takes some initial parameters and returns a logr.Logger. + logger := logimpl.New(param1, param2) + + // ... other setup code ... +``` + +Most apps will call into other libraries, create structures to govern the flow, +etc. The `logr.Logger` object can be passed to these other libraries, stored +in structs, or even used as a package-global variable, if needed. For example: + +``` + app := createTheAppObject(logger) + app.Run() +``` + +Outside of this early setup, no other packages need to know about the choice of +implementation. They write logs in terms of the `logr.Logger` that they +received: + +``` + type appObject struct { + // ... other fields ... + logger logr.Logger + // ... other fields ... + } + + func (app *appObject) Run() { + app.logger.Info("starting up", "timestamp", time.Now()) + + // ... app code ... +``` + +## Background + +If the Go standard library had defined an interface for logging, this project +probably would not be needed. Alas, here we are. + +When the Go developers started developing such an interface with +[slog](https://github.com/golang/go/issues/56345), they adopted some of the +logr design but also left out some parts and changed others: + +| Feature | logr | slog | +|---------|------|------| +| High-level API | `Logger` (passed by value) | `Logger` (passed by [pointer](https://github.com/golang/go/issues/59126)) | +| Low-level API | `LogSink` | `Handler` | +| Stack unwinding | done by `LogSink` | done by `Logger` | +| Skipping helper functions | `WithCallDepth`, `WithCallStackHelper` | [not supported by Logger](https://github.com/golang/go/issues/59145) | +| Generating a value for logging on demand | `Marshaler` | `LogValuer` | +| Log levels | >= 0, higher meaning "less important" | positive and negative, with 0 for "info" and higher meaning "more important" | +| Error log entries | always logged, don't have a verbosity level | normal log entries with level >= `LevelError` | +| Passing logger via context | `NewContext`, `FromContext` | no API | +| Adding a name to a logger | `WithName` | no API | +| Modify verbosity of log entries in a call chain | `V` | no API | +| Grouping of key/value pairs | not supported | `WithGroup`, `GroupValue` | +| Pass context for extracting additional values | no API | API variants like `InfoCtx` | + +The high-level slog API is explicitly meant to be one of many different APIs +that can be layered on top of a shared `slog.Handler`. logr is one such +alternative API, with [interoperability](#slog-interoperability) provided by +some conversion functions. + +### Inspiration + +Before you consider this package, please read [this blog post by the +inimitable Dave Cheney][warning-makes-no-sense]. We really appreciate what +he has to say, and it largely aligns with our own experiences. + +### Differences from Dave's ideas + +The main differences are: + +1. Dave basically proposes doing away with the notion of a logging API in favor +of `fmt.Printf()`. We disagree, especially when you consider things like output +locations, timestamps, file and line decorations, and structured logging. This +package restricts the logging API to just 2 types of logs: info and error. + +Info logs are things you want to tell the user which are not errors. Error +logs are, well, errors. If your code receives an `error` from a subordinate +function call and is logging that `error` *and not returning it*, use error +logs. + +2. Verbosity-levels on info logs. This gives developers a chance to indicate +arbitrary grades of importance for info logs, without assigning names with +semantic meaning such as "warning", "trace", and "debug." Superficially this +may feel very similar, but the primary difference is the lack of semantics. +Because verbosity is a numerical value, it's safe to assume that an app running +with higher verbosity means more (and less important) logs will be generated. + +## Implementations (non-exhaustive) + +There are implementations for the following logging libraries: + +- **a function** (can bridge to non-structured libraries): [funcr](https://github.com/go-logr/logr/tree/master/funcr) +- **a testing.T** (for use in Go tests, with JSON-like output): [testr](https://github.com/go-logr/logr/tree/master/testr) +- **github.com/google/glog**: [glogr](https://github.com/go-logr/glogr) +- **k8s.io/klog** (for Kubernetes): [klogr](https://git.k8s.io/klog/klogr) +- **a testing.T** (with klog-like text output): [ktesting](https://git.k8s.io/klog/ktesting) +- **go.uber.org/zap**: [zapr](https://github.com/go-logr/zapr) +- **log** (the Go standard library logger): [stdr](https://github.com/go-logr/stdr) +- **github.com/sirupsen/logrus**: [logrusr](https://github.com/bombsimon/logrusr) +- **github.com/wojas/genericr**: [genericr](https://github.com/wojas/genericr) (makes it easy to implement your own backend) +- **logfmt** (Heroku style [logging](https://www.brandur.org/logfmt)): [logfmtr](https://github.com/iand/logfmtr) +- **github.com/rs/zerolog**: [zerologr](https://github.com/go-logr/zerologr) +- **github.com/go-kit/log**: [gokitlogr](https://github.com/tonglil/gokitlogr) (also compatible with github.com/go-kit/kit/log since v0.12.0) +- **bytes.Buffer** (writing to a buffer): [bufrlogr](https://github.com/tonglil/buflogr) (useful for ensuring values were logged, like during testing) + +## slog interoperability + +Interoperability goes both ways, using the `logr.Logger` API with a `slog.Handler` +and using the `slog.Logger` API with a `logr.LogSink`. `FromSlogHandler` and +`ToSlogHandler` convert between a `logr.Logger` and a `slog.Handler`. +As usual, `slog.New` can be used to wrap such a `slog.Handler` in the high-level +slog API. + +### Using a `logr.LogSink` as backend for slog + +Ideally, a logr sink implementation should support both logr and slog by +implementing both the normal logr interface(s) and `SlogSink`. Because +of a conflict in the parameters of the common `Enabled` method, it is [not +possible to implement both slog.Handler and logr.Sink in the same +type](https://github.com/golang/go/issues/59110). + +If both are supported, log calls can go from the high-level APIs to the backend +without the need to convert parameters. `FromSlogHandler` and `ToSlogHandler` can +convert back and forth without adding additional wrappers, with one exception: +when `Logger.V` was used to adjust the verbosity for a `slog.Handler`, then +`ToSlogHandler` has to use a wrapper which adjusts the verbosity for future +log calls. + +Such an implementation should also support values that implement specific +interfaces from both packages for logging (`logr.Marshaler`, `slog.LogValuer`, +`slog.GroupValue`). logr does not convert those. + +Not supporting slog has several drawbacks: +- Recording source code locations works correctly if the handler gets called + through `slog.Logger`, but may be wrong in other cases. That's because a + `logr.Sink` does its own stack unwinding instead of using the program counter + provided by the high-level API. +- slog levels <= 0 can be mapped to logr levels by negating the level without a + loss of information. But all slog levels > 0 (e.g. `slog.LevelWarning` as + used by `slog.Logger.Warn`) must be mapped to 0 before calling the sink + because logr does not support "more important than info" levels. +- The slog group concept is supported by prefixing each key in a key/value + pair with the group names, separated by a dot. For structured output like + JSON it would be better to group the key/value pairs inside an object. +- Special slog values and interfaces don't work as expected. +- The overhead is likely to be higher. + +These drawbacks are severe enough that applications using a mixture of slog and +logr should switch to a different backend. + +### Using a `slog.Handler` as backend for logr + +Using a plain `slog.Handler` without support for logr works better than the +other direction: +- All logr verbosity levels can be mapped 1:1 to their corresponding slog level + by negating them. +- Stack unwinding is done by the `SlogSink` and the resulting program + counter is passed to the `slog.Handler`. +- Names added via `Logger.WithName` are gathered and recorded in an additional + attribute with `logger` as key and the names separated by slash as value. +- `Logger.Error` is turned into a log record with `slog.LevelError` as level + and an additional attribute with `err` as key, if an error was provided. + +The main drawback is that `logr.Marshaler` will not be supported. Types should +ideally support both `logr.Marshaler` and `slog.Valuer`. If compatibility +with logr implementations without slog support is not important, then +`slog.Valuer` is sufficient. + +### Context support for slog + +Storing a logger in a `context.Context` is not supported by +slog. `NewContextWithSlogLogger` and `FromContextAsSlogLogger` can be +used to fill this gap. They store and retrieve a `slog.Logger` pointer +under the same context key that is also used by `NewContext` and +`FromContext` for `logr.Logger` value. + +When `NewContextWithSlogLogger` is followed by `FromContext`, the latter will +automatically convert the `slog.Logger` to a +`logr.Logger`. `FromContextAsSlogLogger` does the same for the other direction. + +With this approach, binaries which use either slog or logr are as efficient as +possible with no unnecessary allocations. This is also why the API stores a +`slog.Logger` pointer: when storing a `slog.Handler`, creating a `slog.Logger` +on retrieval would need to allocate one. + +The downside is that switching back and forth needs more allocations. Because +logr is the API that is already in use by different packages, in particular +Kubernetes, the recommendation is to use the `logr.Logger` API in code which +uses contextual logging. + +An alternative to adding values to a logger and storing that logger in the +context is to store the values in the context and to configure a logging +backend to extract those values when emitting log entries. This only works when +log calls are passed the context, which is not supported by the logr API. + +With the slog API, it is possible, but not +required. https://github.com/veqryn/slog-context is a package for slog which +provides additional support code for this approach. It also contains wrappers +for the context functions in logr, so developers who prefer to not use the logr +APIs directly can use those instead and the resulting code will still be +interoperable with logr. + +## FAQ + +### Conceptual + +#### Why structured logging? + +- **Structured logs are more easily queryable**: Since you've got + key-value pairs, it's much easier to query your structured logs for + particular values by filtering on the contents of a particular key -- + think searching request logs for error codes, Kubernetes reconcilers for + the name and namespace of the reconciled object, etc. + +- **Structured logging makes it easier to have cross-referenceable logs**: + Similarly to searchability, if you maintain conventions around your + keys, it becomes easy to gather all log lines related to a particular + concept. + +- **Structured logs allow better dimensions of filtering**: if you have + structure to your logs, you've got more precise control over how much + information is logged -- you might choose in a particular configuration + to log certain keys but not others, only log lines where a certain key + matches a certain value, etc., instead of just having v-levels and names + to key off of. + +- **Structured logs better represent structured data**: sometimes, the + data that you want to log is inherently structured (think tuple-link + objects.) Structured logs allow you to preserve that structure when + outputting. + +#### Why V-levels? + +**V-levels give operators an easy way to control the chattiness of log +operations**. V-levels provide a way for a given package to distinguish +the relative importance or verbosity of a given log message. Then, if +a particular logger or package is logging too many messages, the user +of the package can simply change the v-levels for that library. + +#### Why not named levels, like Info/Warning/Error? + +Read [Dave Cheney's post][warning-makes-no-sense]. Then read [Differences +from Dave's ideas](#differences-from-daves-ideas). + +#### Why not allow format strings, too? + +**Format strings negate many of the benefits of structured logs**: + +- They're not easily searchable without resorting to fuzzy searching, + regular expressions, etc. + +- They don't store structured data well, since contents are flattened into + a string. + +- They're not cross-referenceable. + +- They don't compress easily, since the message is not constant. + +(Unless you turn positional parameters into key-value pairs with numerical +keys, at which point you've gotten key-value logging with meaningless +keys.) + +### Practical + +#### Why key-value pairs, and not a map? + +Key-value pairs are *much* easier to optimize, especially around +allocations. Zap (a structured logger that inspired logr's interface) has +[performance measurements](https://github.com/uber-go/zap#performance) +that show this quite nicely. + +While the interface ends up being a little less obvious, you get +potentially better performance, plus avoid making users type +`map[string]string{}` every time they want to log. + +#### What if my V-levels differ between libraries? + +That's fine. Control your V-levels on a per-logger basis, and use the +`WithName` method to pass different loggers to different libraries. + +Generally, you should take care to ensure that you have relatively +consistent V-levels within a given logger, however, as this makes deciding +on what verbosity of logs to request easier. + +#### But I really want to use a format string! + +That's not actually a question. Assuming your question is "how do +I convert my mental model of logging with format strings to logging with +constant messages": + +1. Figure out what the error actually is, as you'd write in a TL;DR style, + and use that as a message. + +2. For every place you'd write a format specifier, look to the word before + it, and add that as a key value pair. + +For instance, consider the following examples (all taken from spots in the +Kubernetes codebase): + +- `klog.V(4).Infof("Client is returning errors: code %v, error %v", + responseCode, err)` becomes `logger.Error(err, "client returned an + error", "code", responseCode)` + +- `klog.V(4).Infof("Got a Retry-After %ds response for attempt %d to %v", + seconds, retries, url)` becomes `logger.V(4).Info("got a retry-after + response when requesting url", "attempt", retries, "after + seconds", seconds, "url", url)` + +If you *really* must use a format string, use it in a key's value, and +call `fmt.Sprintf` yourself. For instance: `log.Printf("unable to +reflect over type %T")` becomes `logger.Info("unable to reflect over +type", "type", fmt.Sprintf("%T"))`. In general though, the cases where +this is necessary should be few and far between. + +#### How do I choose my V-levels? + +This is basically the only hard constraint: increase V-levels to denote +more verbose or more debug-y logs. + +Otherwise, you can start out with `0` as "you always want to see this", +`1` as "common logging that you might *possibly* want to turn off", and +`10` as "I would like to performance-test your log collection stack." + +Then gradually choose levels in between as you need them, working your way +down from 10 (for debug and trace style logs) and up from 1 (for chattier +info-type logs). For reference, slog pre-defines -4 for debug logs +(corresponds to 4 in logr), which matches what is +[recommended for Kubernetes](https://github.com/kubernetes/community/blob/master/contributors/devel/sig-instrumentation/logging.md#what-method-to-use). + +#### How do I choose my keys? + +Keys are fairly flexible, and can hold more or less any string +value. For best compatibility with implementations and consistency +with existing code in other projects, there are a few conventions you +should consider. + +- Make your keys human-readable. +- Constant keys are generally a good idea. +- Be consistent across your codebase. +- Keys should naturally match parts of the message string. +- Use lower case for simple keys and + [lowerCamelCase](https://en.wiktionary.org/wiki/lowerCamelCase) for + more complex ones. Kubernetes is one example of a project that has + [adopted that + convention](https://github.com/kubernetes/community/blob/HEAD/contributors/devel/sig-instrumentation/migration-to-structured-logging.md#name-arguments). + +While key names are mostly unrestricted (and spaces are acceptable), +it's generally a good idea to stick to printable ascii characters, or at +least match the general character set of your log lines. + +#### Why should keys be constant values? + +The point of structured logging is to make later log processing easier. Your +keys are, effectively, the schema of each log message. If you use different +keys across instances of the same log line, you will make your structured logs +much harder to use. `Sprintf()` is for values, not for keys! + +#### Why is this not a pure interface? + +The Logger type is implemented as a struct in order to allow the Go compiler to +optimize things like high-V `Info` logs that are not triggered. Not all of +these implementations are implemented yet, but this structure was suggested as +a way to ensure they *can* be implemented. All of the real work is behind the +`LogSink` interface. + +[warning-makes-no-sense]: http://dave.cheney.net/2015/11/05/lets-talk-about-logging diff --git a/vendor/github.com/go-logr/logr/SECURITY.md b/vendor/github.com/go-logr/logr/SECURITY.md new file mode 100644 index 0000000..1ca756f --- /dev/null +++ b/vendor/github.com/go-logr/logr/SECURITY.md @@ -0,0 +1,18 @@ +# Security Policy + +If you have discovered a security vulnerability in this project, please report it +privately. **Do not disclose it as a public issue.** This gives us time to work with you +to fix the issue before public exposure, reducing the chance that the exploit will be +used before a patch is released. + +You may submit the report in the following ways: + +- send an email to go-logr-security@googlegroups.com +- send us a [private vulnerability report](https://github.com/go-logr/logr/security/advisories/new) + +Please provide the following information in your report: + +- A description of the vulnerability and its impact +- How to reproduce the issue + +We ask that you give us 90 days to work on a fix before public exposure. diff --git a/vendor/github.com/go-logr/logr/context.go b/vendor/github.com/go-logr/logr/context.go new file mode 100644 index 0000000..de8bcc3 --- /dev/null +++ b/vendor/github.com/go-logr/logr/context.go @@ -0,0 +1,33 @@ +/* +Copyright 2023 The logr Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package logr + +// contextKey is how we find Loggers in a context.Context. With Go < 1.21, +// the value is always a Logger value. With Go >= 1.21, the value can be a +// Logger value or a slog.Logger pointer. +type contextKey struct{} + +// notFoundError exists to carry an IsNotFound method. +type notFoundError struct{} + +func (notFoundError) Error() string { + return "no logr.Logger was present" +} + +func (notFoundError) IsNotFound() bool { + return true +} diff --git a/vendor/github.com/go-logr/logr/context_noslog.go b/vendor/github.com/go-logr/logr/context_noslog.go new file mode 100644 index 0000000..f012f9a --- /dev/null +++ b/vendor/github.com/go-logr/logr/context_noslog.go @@ -0,0 +1,49 @@ +//go:build !go1.21 +// +build !go1.21 + +/* +Copyright 2019 The logr Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package logr + +import ( + "context" +) + +// FromContext returns a Logger from ctx or an error if no Logger is found. +func FromContext(ctx context.Context) (Logger, error) { + if v, ok := ctx.Value(contextKey{}).(Logger); ok { + return v, nil + } + + return Logger{}, notFoundError{} +} + +// FromContextOrDiscard returns a Logger from ctx. If no Logger is found, this +// returns a Logger that discards all log messages. +func FromContextOrDiscard(ctx context.Context) Logger { + if v, ok := ctx.Value(contextKey{}).(Logger); ok { + return v + } + + return Discard() +} + +// NewContext returns a new Context, derived from ctx, which carries the +// provided Logger. +func NewContext(ctx context.Context, logger Logger) context.Context { + return context.WithValue(ctx, contextKey{}, logger) +} diff --git a/vendor/github.com/go-logr/logr/context_slog.go b/vendor/github.com/go-logr/logr/context_slog.go new file mode 100644 index 0000000..065ef0b --- /dev/null +++ b/vendor/github.com/go-logr/logr/context_slog.go @@ -0,0 +1,83 @@ +//go:build go1.21 +// +build go1.21 + +/* +Copyright 2019 The logr Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package logr + +import ( + "context" + "fmt" + "log/slog" +) + +// FromContext returns a Logger from ctx or an error if no Logger is found. +func FromContext(ctx context.Context) (Logger, error) { + v := ctx.Value(contextKey{}) + if v == nil { + return Logger{}, notFoundError{} + } + + switch v := v.(type) { + case Logger: + return v, nil + case *slog.Logger: + return FromSlogHandler(v.Handler()), nil + default: + // Not reached. + panic(fmt.Sprintf("unexpected value type for logr context key: %T", v)) + } +} + +// FromContextAsSlogLogger returns a slog.Logger from ctx or nil if no such Logger is found. +func FromContextAsSlogLogger(ctx context.Context) *slog.Logger { + v := ctx.Value(contextKey{}) + if v == nil { + return nil + } + + switch v := v.(type) { + case Logger: + return slog.New(ToSlogHandler(v)) + case *slog.Logger: + return v + default: + // Not reached. + panic(fmt.Sprintf("unexpected value type for logr context key: %T", v)) + } +} + +// FromContextOrDiscard returns a Logger from ctx. If no Logger is found, this +// returns a Logger that discards all log messages. +func FromContextOrDiscard(ctx context.Context) Logger { + if logger, err := FromContext(ctx); err == nil { + return logger + } + return Discard() +} + +// NewContext returns a new Context, derived from ctx, which carries the +// provided Logger. +func NewContext(ctx context.Context, logger Logger) context.Context { + return context.WithValue(ctx, contextKey{}, logger) +} + +// NewContextWithSlogLogger returns a new Context, derived from ctx, which carries the +// provided slog.Logger. +func NewContextWithSlogLogger(ctx context.Context, logger *slog.Logger) context.Context { + return context.WithValue(ctx, contextKey{}, logger) +} diff --git a/vendor/github.com/go-logr/logr/discard.go b/vendor/github.com/go-logr/logr/discard.go new file mode 100644 index 0000000..99fe8be --- /dev/null +++ b/vendor/github.com/go-logr/logr/discard.go @@ -0,0 +1,24 @@ +/* +Copyright 2020 The logr Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package logr + +// Discard returns a Logger that discards all messages logged to it. It can be +// used whenever the caller is not interested in the logs. Logger instances +// produced by this function always compare as equal. +func Discard() Logger { + return New(nil) +} diff --git a/vendor/github.com/go-logr/logr/funcr/funcr.go b/vendor/github.com/go-logr/logr/funcr/funcr.go new file mode 100644 index 0000000..fb2f866 --- /dev/null +++ b/vendor/github.com/go-logr/logr/funcr/funcr.go @@ -0,0 +1,911 @@ +/* +Copyright 2021 The logr Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Package funcr implements formatting of structured log messages and +// optionally captures the call site and timestamp. +// +// The simplest way to use it is via its implementation of a +// github.com/go-logr/logr.LogSink with output through an arbitrary +// "write" function. See New and NewJSON for details. +// +// # Custom LogSinks +// +// For users who need more control, a funcr.Formatter can be embedded inside +// your own custom LogSink implementation. This is useful when the LogSink +// needs to implement additional methods, for example. +// +// # Formatting +// +// This will respect logr.Marshaler, fmt.Stringer, and error interfaces for +// values which are being logged. When rendering a struct, funcr will use Go's +// standard JSON tags (all except "string"). +package funcr + +import ( + "bytes" + "encoding" + "encoding/json" + "fmt" + "path/filepath" + "reflect" + "runtime" + "strconv" + "strings" + "time" + + "github.com/go-logr/logr" +) + +// New returns a logr.Logger which is implemented by an arbitrary function. +func New(fn func(prefix, args string), opts Options) logr.Logger { + return logr.New(newSink(fn, NewFormatter(opts))) +} + +// NewJSON returns a logr.Logger which is implemented by an arbitrary function +// and produces JSON output. +func NewJSON(fn func(obj string), opts Options) logr.Logger { + fnWrapper := func(_, obj string) { + fn(obj) + } + return logr.New(newSink(fnWrapper, NewFormatterJSON(opts))) +} + +// Underlier exposes access to the underlying logging function. Since +// callers only have a logr.Logger, they have to know which +// implementation is in use, so this interface is less of an +// abstraction and more of a way to test type conversion. +type Underlier interface { + GetUnderlying() func(prefix, args string) +} + +func newSink(fn func(prefix, args string), formatter Formatter) logr.LogSink { + l := &fnlogger{ + Formatter: formatter, + write: fn, + } + // For skipping fnlogger.Info and fnlogger.Error. + l.Formatter.AddCallDepth(1) + return l +} + +// Options carries parameters which influence the way logs are generated. +type Options struct { + // LogCaller tells funcr to add a "caller" key to some or all log lines. + // This has some overhead, so some users might not want it. + LogCaller MessageClass + + // LogCallerFunc tells funcr to also log the calling function name. This + // has no effect if caller logging is not enabled (see Options.LogCaller). + LogCallerFunc bool + + // LogTimestamp tells funcr to add a "ts" key to log lines. This has some + // overhead, so some users might not want it. + LogTimestamp bool + + // TimestampFormat tells funcr how to render timestamps when LogTimestamp + // is enabled. If not specified, a default format will be used. For more + // details, see docs for Go's time.Layout. + TimestampFormat string + + // LogInfoLevel tells funcr what key to use to log the info level. + // If not specified, the info level will be logged as "level". + // If this is set to "", the info level will not be logged at all. + LogInfoLevel *string + + // Verbosity tells funcr which V logs to produce. Higher values enable + // more logs. Info logs at or below this level will be written, while logs + // above this level will be discarded. + Verbosity int + + // RenderBuiltinsHook allows users to mutate the list of key-value pairs + // while a log line is being rendered. The kvList argument follows logr + // conventions - each pair of slice elements is comprised of a string key + // and an arbitrary value (verified and sanitized before calling this + // hook). The value returned must follow the same conventions. This hook + // can be used to audit or modify logged data. For example, you might want + // to prefix all of funcr's built-in keys with some string. This hook is + // only called for built-in (provided by funcr itself) key-value pairs. + // Equivalent hooks are offered for key-value pairs saved via + // logr.Logger.WithValues or Formatter.AddValues (see RenderValuesHook) and + // for user-provided pairs (see RenderArgsHook). + RenderBuiltinsHook func(kvList []any) []any + + // RenderValuesHook is the same as RenderBuiltinsHook, except that it is + // only called for key-value pairs saved via logr.Logger.WithValues. See + // RenderBuiltinsHook for more details. + RenderValuesHook func(kvList []any) []any + + // RenderArgsHook is the same as RenderBuiltinsHook, except that it is only + // called for key-value pairs passed directly to Info and Error. See + // RenderBuiltinsHook for more details. + RenderArgsHook func(kvList []any) []any + + // MaxLogDepth tells funcr how many levels of nested fields (e.g. a struct + // that contains a struct, etc.) it may log. Every time it finds a struct, + // slice, array, or map the depth is increased by one. When the maximum is + // reached, the value will be converted to a string indicating that the max + // depth has been exceeded. If this field is not specified, a default + // value will be used. + MaxLogDepth int +} + +// MessageClass indicates which category or categories of messages to consider. +type MessageClass int + +const ( + // None ignores all message classes. + None MessageClass = iota + // All considers all message classes. + All + // Info only considers info messages. + Info + // Error only considers error messages. + Error +) + +// fnlogger inherits some of its LogSink implementation from Formatter +// and just needs to add some glue code. +type fnlogger struct { + Formatter + write func(prefix, args string) +} + +func (l fnlogger) WithName(name string) logr.LogSink { + l.Formatter.AddName(name) + return &l +} + +func (l fnlogger) WithValues(kvList ...any) logr.LogSink { + l.Formatter.AddValues(kvList) + return &l +} + +func (l fnlogger) WithCallDepth(depth int) logr.LogSink { + l.Formatter.AddCallDepth(depth) + return &l +} + +func (l fnlogger) Info(level int, msg string, kvList ...any) { + prefix, args := l.FormatInfo(level, msg, kvList) + l.write(prefix, args) +} + +func (l fnlogger) Error(err error, msg string, kvList ...any) { + prefix, args := l.FormatError(err, msg, kvList) + l.write(prefix, args) +} + +func (l fnlogger) GetUnderlying() func(prefix, args string) { + return l.write +} + +// Assert conformance to the interfaces. +var _ logr.LogSink = &fnlogger{} +var _ logr.CallDepthLogSink = &fnlogger{} +var _ Underlier = &fnlogger{} + +// NewFormatter constructs a Formatter which emits a JSON-like key=value format. +func NewFormatter(opts Options) Formatter { + return newFormatter(opts, outputKeyValue) +} + +// NewFormatterJSON constructs a Formatter which emits strict JSON. +func NewFormatterJSON(opts Options) Formatter { + return newFormatter(opts, outputJSON) +} + +// Defaults for Options. +const defaultTimestampFormat = "2006-01-02 15:04:05.000000" +const defaultMaxLogDepth = 16 + +func newFormatter(opts Options, outfmt outputFormat) Formatter { + if opts.TimestampFormat == "" { + opts.TimestampFormat = defaultTimestampFormat + } + if opts.MaxLogDepth == 0 { + opts.MaxLogDepth = defaultMaxLogDepth + } + if opts.LogInfoLevel == nil { + opts.LogInfoLevel = new(string) + *opts.LogInfoLevel = "level" + } + f := Formatter{ + outputFormat: outfmt, + prefix: "", + values: nil, + depth: 0, + opts: &opts, + } + return f +} + +// Formatter is an opaque struct which can be embedded in a LogSink +// implementation. It should be constructed with NewFormatter. Some of +// its methods directly implement logr.LogSink. +type Formatter struct { + outputFormat outputFormat + prefix string + values []any + valuesStr string + parentValuesStr string + depth int + opts *Options + group string // for slog groups + groupDepth int +} + +// outputFormat indicates which outputFormat to use. +type outputFormat int + +const ( + // outputKeyValue emits a JSON-like key=value format, but not strict JSON. + outputKeyValue outputFormat = iota + // outputJSON emits strict JSON. + outputJSON +) + +// PseudoStruct is a list of key-value pairs that gets logged as a struct. +type PseudoStruct []any + +// render produces a log line, ready to use. +func (f Formatter) render(builtins, args []any) string { + // Empirically bytes.Buffer is faster than strings.Builder for this. + buf := bytes.NewBuffer(make([]byte, 0, 1024)) + if f.outputFormat == outputJSON { + buf.WriteByte('{') // for the whole line + } + + vals := builtins + if hook := f.opts.RenderBuiltinsHook; hook != nil { + vals = hook(f.sanitize(vals)) + } + f.flatten(buf, vals, false, false) // keys are ours, no need to escape + continuing := len(builtins) > 0 + + if f.parentValuesStr != "" { + if continuing { + buf.WriteByte(f.comma()) + } + buf.WriteString(f.parentValuesStr) + continuing = true + } + + groupDepth := f.groupDepth + if f.group != "" { + if f.valuesStr != "" || len(args) != 0 { + if continuing { + buf.WriteByte(f.comma()) + } + buf.WriteString(f.quoted(f.group, true)) // escape user-provided keys + buf.WriteByte(f.colon()) + buf.WriteByte('{') // for the group + continuing = false + } else { + // The group was empty + groupDepth-- + } + } + + if f.valuesStr != "" { + if continuing { + buf.WriteByte(f.comma()) + } + buf.WriteString(f.valuesStr) + continuing = true + } + + vals = args + if hook := f.opts.RenderArgsHook; hook != nil { + vals = hook(f.sanitize(vals)) + } + f.flatten(buf, vals, continuing, true) // escape user-provided keys + + for i := 0; i < groupDepth; i++ { + buf.WriteByte('}') // for the groups + } + + if f.outputFormat == outputJSON { + buf.WriteByte('}') // for the whole line + } + + return buf.String() +} + +// flatten renders a list of key-value pairs into a buffer. If continuing is +// true, it assumes that the buffer has previous values and will emit a +// separator (which depends on the output format) before the first pair it +// writes. If escapeKeys is true, the keys are assumed to have +// non-JSON-compatible characters in them and must be evaluated for escapes. +// +// This function returns a potentially modified version of kvList, which +// ensures that there is a value for every key (adding a value if needed) and +// that each key is a string (substituting a key if needed). +func (f Formatter) flatten(buf *bytes.Buffer, kvList []any, continuing bool, escapeKeys bool) []any { + // This logic overlaps with sanitize() but saves one type-cast per key, + // which can be measurable. + if len(kvList)%2 != 0 { + kvList = append(kvList, noValue) + } + copied := false + for i := 0; i < len(kvList); i += 2 { + k, ok := kvList[i].(string) + if !ok { + if !copied { + newList := make([]any, len(kvList)) + copy(newList, kvList) + kvList = newList + copied = true + } + k = f.nonStringKey(kvList[i]) + kvList[i] = k + } + v := kvList[i+1] + + if i > 0 || continuing { + if f.outputFormat == outputJSON { + buf.WriteByte(f.comma()) + } else { + // In theory the format could be something we don't understand. In + // practice, we control it, so it won't be. + buf.WriteByte(' ') + } + } + + buf.WriteString(f.quoted(k, escapeKeys)) + buf.WriteByte(f.colon()) + buf.WriteString(f.pretty(v)) + } + return kvList +} + +func (f Formatter) quoted(str string, escape bool) string { + if escape { + return prettyString(str) + } + // this is faster + return `"` + str + `"` +} + +func (f Formatter) comma() byte { + if f.outputFormat == outputJSON { + return ',' + } + return ' ' +} + +func (f Formatter) colon() byte { + if f.outputFormat == outputJSON { + return ':' + } + return '=' +} + +func (f Formatter) pretty(value any) string { + return f.prettyWithFlags(value, 0, 0) +} + +const ( + flagRawStruct = 0x1 // do not print braces on structs +) + +// TODO: This is not fast. Most of the overhead goes here. +func (f Formatter) prettyWithFlags(value any, flags uint32, depth int) string { + if depth > f.opts.MaxLogDepth { + return `""` + } + + // Handle types that take full control of logging. + if v, ok := value.(logr.Marshaler); ok { + // Replace the value with what the type wants to get logged. + // That then gets handled below via reflection. + value = invokeMarshaler(v) + } + + // Handle types that want to format themselves. + switch v := value.(type) { + case fmt.Stringer: + value = invokeStringer(v) + case error: + value = invokeError(v) + } + + // Handling the most common types without reflect is a small perf win. + switch v := value.(type) { + case bool: + return strconv.FormatBool(v) + case string: + return prettyString(v) + case int: + return strconv.FormatInt(int64(v), 10) + case int8: + return strconv.FormatInt(int64(v), 10) + case int16: + return strconv.FormatInt(int64(v), 10) + case int32: + return strconv.FormatInt(int64(v), 10) + case int64: + return strconv.FormatInt(int64(v), 10) + case uint: + return strconv.FormatUint(uint64(v), 10) + case uint8: + return strconv.FormatUint(uint64(v), 10) + case uint16: + return strconv.FormatUint(uint64(v), 10) + case uint32: + return strconv.FormatUint(uint64(v), 10) + case uint64: + return strconv.FormatUint(v, 10) + case uintptr: + return strconv.FormatUint(uint64(v), 10) + case float32: + return strconv.FormatFloat(float64(v), 'f', -1, 32) + case float64: + return strconv.FormatFloat(v, 'f', -1, 64) + case complex64: + return `"` + strconv.FormatComplex(complex128(v), 'f', -1, 64) + `"` + case complex128: + return `"` + strconv.FormatComplex(v, 'f', -1, 128) + `"` + case PseudoStruct: + buf := bytes.NewBuffer(make([]byte, 0, 1024)) + v = f.sanitize(v) + if flags&flagRawStruct == 0 { + buf.WriteByte('{') + } + for i := 0; i < len(v); i += 2 { + if i > 0 { + buf.WriteByte(f.comma()) + } + k, _ := v[i].(string) // sanitize() above means no need to check success + // arbitrary keys might need escaping + buf.WriteString(prettyString(k)) + buf.WriteByte(f.colon()) + buf.WriteString(f.prettyWithFlags(v[i+1], 0, depth+1)) + } + if flags&flagRawStruct == 0 { + buf.WriteByte('}') + } + return buf.String() + } + + buf := bytes.NewBuffer(make([]byte, 0, 256)) + t := reflect.TypeOf(value) + if t == nil { + return "null" + } + v := reflect.ValueOf(value) + switch t.Kind() { + case reflect.Bool: + return strconv.FormatBool(v.Bool()) + case reflect.String: + return prettyString(v.String()) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return strconv.FormatInt(int64(v.Int()), 10) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return strconv.FormatUint(uint64(v.Uint()), 10) + case reflect.Float32: + return strconv.FormatFloat(float64(v.Float()), 'f', -1, 32) + case reflect.Float64: + return strconv.FormatFloat(v.Float(), 'f', -1, 64) + case reflect.Complex64: + return `"` + strconv.FormatComplex(complex128(v.Complex()), 'f', -1, 64) + `"` + case reflect.Complex128: + return `"` + strconv.FormatComplex(v.Complex(), 'f', -1, 128) + `"` + case reflect.Struct: + if flags&flagRawStruct == 0 { + buf.WriteByte('{') + } + printComma := false // testing i>0 is not enough because of JSON omitted fields + for i := 0; i < t.NumField(); i++ { + fld := t.Field(i) + if fld.PkgPath != "" { + // reflect says this field is only defined for non-exported fields. + continue + } + if !v.Field(i).CanInterface() { + // reflect isn't clear exactly what this means, but we can't use it. + continue + } + name := "" + omitempty := false + if tag, found := fld.Tag.Lookup("json"); found { + if tag == "-" { + continue + } + if comma := strings.Index(tag, ","); comma != -1 { + if n := tag[:comma]; n != "" { + name = n + } + rest := tag[comma:] + if strings.Contains(rest, ",omitempty,") || strings.HasSuffix(rest, ",omitempty") { + omitempty = true + } + } else { + name = tag + } + } + if omitempty && isEmpty(v.Field(i)) { + continue + } + if printComma { + buf.WriteByte(f.comma()) + } + printComma = true // if we got here, we are rendering a field + if fld.Anonymous && fld.Type.Kind() == reflect.Struct && name == "" { + buf.WriteString(f.prettyWithFlags(v.Field(i).Interface(), flags|flagRawStruct, depth+1)) + continue + } + if name == "" { + name = fld.Name + } + // field names can't contain characters which need escaping + buf.WriteString(f.quoted(name, false)) + buf.WriteByte(f.colon()) + buf.WriteString(f.prettyWithFlags(v.Field(i).Interface(), 0, depth+1)) + } + if flags&flagRawStruct == 0 { + buf.WriteByte('}') + } + return buf.String() + case reflect.Slice, reflect.Array: + // If this is outputing as JSON make sure this isn't really a json.RawMessage. + // If so just emit "as-is" and don't pretty it as that will just print + // it as [X,Y,Z,...] which isn't terribly useful vs the string form you really want. + if f.outputFormat == outputJSON { + if rm, ok := value.(json.RawMessage); ok { + // If it's empty make sure we emit an empty value as the array style would below. + if len(rm) > 0 { + buf.Write(rm) + } else { + buf.WriteString("null") + } + return buf.String() + } + } + buf.WriteByte('[') + for i := 0; i < v.Len(); i++ { + if i > 0 { + buf.WriteByte(f.comma()) + } + e := v.Index(i) + buf.WriteString(f.prettyWithFlags(e.Interface(), 0, depth+1)) + } + buf.WriteByte(']') + return buf.String() + case reflect.Map: + buf.WriteByte('{') + // This does not sort the map keys, for best perf. + it := v.MapRange() + i := 0 + for it.Next() { + if i > 0 { + buf.WriteByte(f.comma()) + } + // If a map key supports TextMarshaler, use it. + keystr := "" + if m, ok := it.Key().Interface().(encoding.TextMarshaler); ok { + txt, err := m.MarshalText() + if err != nil { + keystr = fmt.Sprintf("", err.Error()) + } else { + keystr = string(txt) + } + keystr = prettyString(keystr) + } else { + // prettyWithFlags will produce already-escaped values + keystr = f.prettyWithFlags(it.Key().Interface(), 0, depth+1) + if t.Key().Kind() != reflect.String { + // JSON only does string keys. Unlike Go's standard JSON, we'll + // convert just about anything to a string. + keystr = prettyString(keystr) + } + } + buf.WriteString(keystr) + buf.WriteByte(f.colon()) + buf.WriteString(f.prettyWithFlags(it.Value().Interface(), 0, depth+1)) + i++ + } + buf.WriteByte('}') + return buf.String() + case reflect.Ptr, reflect.Interface: + if v.IsNil() { + return "null" + } + return f.prettyWithFlags(v.Elem().Interface(), 0, depth) + } + return fmt.Sprintf(`""`, t.Kind().String()) +} + +func prettyString(s string) string { + // Avoid escaping (which does allocations) if we can. + if needsEscape(s) { + return strconv.Quote(s) + } + b := bytes.NewBuffer(make([]byte, 0, 1024)) + b.WriteByte('"') + b.WriteString(s) + b.WriteByte('"') + return b.String() +} + +// needsEscape determines whether the input string needs to be escaped or not, +// without doing any allocations. +func needsEscape(s string) bool { + for _, r := range s { + if !strconv.IsPrint(r) || r == '\\' || r == '"' { + return true + } + } + return false +} + +func isEmpty(v reflect.Value) bool { + switch v.Kind() { + case reflect.Array, reflect.Map, reflect.Slice, reflect.String: + return v.Len() == 0 + case reflect.Bool: + return !v.Bool() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Float32, reflect.Float64: + return v.Float() == 0 + case reflect.Complex64, reflect.Complex128: + return v.Complex() == 0 + case reflect.Interface, reflect.Ptr: + return v.IsNil() + } + return false +} + +func invokeMarshaler(m logr.Marshaler) (ret any) { + defer func() { + if r := recover(); r != nil { + ret = fmt.Sprintf("", r) + } + }() + return m.MarshalLog() +} + +func invokeStringer(s fmt.Stringer) (ret string) { + defer func() { + if r := recover(); r != nil { + ret = fmt.Sprintf("", r) + } + }() + return s.String() +} + +func invokeError(e error) (ret string) { + defer func() { + if r := recover(); r != nil { + ret = fmt.Sprintf("", r) + } + }() + return e.Error() +} + +// Caller represents the original call site for a log line, after considering +// logr.Logger.WithCallDepth and logr.Logger.WithCallStackHelper. The File and +// Line fields will always be provided, while the Func field is optional. +// Users can set the render hook fields in Options to examine logged key-value +// pairs, one of which will be {"caller", Caller} if the Options.LogCaller +// field is enabled for the given MessageClass. +type Caller struct { + // File is the basename of the file for this call site. + File string `json:"file"` + // Line is the line number in the file for this call site. + Line int `json:"line"` + // Func is the function name for this call site, or empty if + // Options.LogCallerFunc is not enabled. + Func string `json:"function,omitempty"` +} + +func (f Formatter) caller() Caller { + // +1 for this frame, +1 for Info/Error. + pc, file, line, ok := runtime.Caller(f.depth + 2) + if !ok { + return Caller{"", 0, ""} + } + fn := "" + if f.opts.LogCallerFunc { + if fp := runtime.FuncForPC(pc); fp != nil { + fn = fp.Name() + } + } + + return Caller{filepath.Base(file), line, fn} +} + +const noValue = "" + +func (f Formatter) nonStringKey(v any) string { + return fmt.Sprintf("", f.snippet(v)) +} + +// snippet produces a short snippet string of an arbitrary value. +func (f Formatter) snippet(v any) string { + const snipLen = 16 + + snip := f.pretty(v) + if len(snip) > snipLen { + snip = snip[:snipLen] + } + return snip +} + +// sanitize ensures that a list of key-value pairs has a value for every key +// (adding a value if needed) and that each key is a string (substituting a key +// if needed). +func (f Formatter) sanitize(kvList []any) []any { + if len(kvList)%2 != 0 { + kvList = append(kvList, noValue) + } + for i := 0; i < len(kvList); i += 2 { + _, ok := kvList[i].(string) + if !ok { + kvList[i] = f.nonStringKey(kvList[i]) + } + } + return kvList +} + +// startGroup opens a new group scope (basically a sub-struct), which locks all +// the current saved values and starts them anew. This is needed to satisfy +// slog. +func (f *Formatter) startGroup(group string) { + // Unnamed groups are just inlined. + if group == "" { + return + } + + // Any saved values can no longer be changed. + buf := bytes.NewBuffer(make([]byte, 0, 1024)) + continuing := false + + if f.parentValuesStr != "" { + buf.WriteString(f.parentValuesStr) + continuing = true + } + + if f.group != "" && f.valuesStr != "" { + if continuing { + buf.WriteByte(f.comma()) + } + buf.WriteString(f.quoted(f.group, true)) // escape user-provided keys + buf.WriteByte(f.colon()) + buf.WriteByte('{') // for the group + continuing = false + } + + if f.valuesStr != "" { + if continuing { + buf.WriteByte(f.comma()) + } + buf.WriteString(f.valuesStr) + } + + // NOTE: We don't close the scope here - that's done later, when a log line + // is actually rendered (because we have N scopes to close). + + f.parentValuesStr = buf.String() + + // Start collecting new values. + f.group = group + f.groupDepth++ + f.valuesStr = "" + f.values = nil +} + +// Init configures this Formatter from runtime info, such as the call depth +// imposed by logr itself. +// Note that this receiver is a pointer, so depth can be saved. +func (f *Formatter) Init(info logr.RuntimeInfo) { + f.depth += info.CallDepth +} + +// Enabled checks whether an info message at the given level should be logged. +func (f Formatter) Enabled(level int) bool { + return level <= f.opts.Verbosity +} + +// GetDepth returns the current depth of this Formatter. This is useful for +// implementations which do their own caller attribution. +func (f Formatter) GetDepth() int { + return f.depth +} + +// FormatInfo renders an Info log message into strings. The prefix will be +// empty when no names were set (via AddNames), or when the output is +// configured for JSON. +func (f Formatter) FormatInfo(level int, msg string, kvList []any) (prefix, argsStr string) { + args := make([]any, 0, 64) // using a constant here impacts perf + prefix = f.prefix + if f.outputFormat == outputJSON { + args = append(args, "logger", prefix) + prefix = "" + } + if f.opts.LogTimestamp { + args = append(args, "ts", time.Now().Format(f.opts.TimestampFormat)) + } + if policy := f.opts.LogCaller; policy == All || policy == Info { + args = append(args, "caller", f.caller()) + } + if key := *f.opts.LogInfoLevel; key != "" { + args = append(args, key, level) + } + args = append(args, "msg", msg) + return prefix, f.render(args, kvList) +} + +// FormatError renders an Error log message into strings. The prefix will be +// empty when no names were set (via AddNames), or when the output is +// configured for JSON. +func (f Formatter) FormatError(err error, msg string, kvList []any) (prefix, argsStr string) { + args := make([]any, 0, 64) // using a constant here impacts perf + prefix = f.prefix + if f.outputFormat == outputJSON { + args = append(args, "logger", prefix) + prefix = "" + } + if f.opts.LogTimestamp { + args = append(args, "ts", time.Now().Format(f.opts.TimestampFormat)) + } + if policy := f.opts.LogCaller; policy == All || policy == Error { + args = append(args, "caller", f.caller()) + } + args = append(args, "msg", msg) + var loggableErr any + if err != nil { + loggableErr = err.Error() + } + args = append(args, "error", loggableErr) + return prefix, f.render(args, kvList) +} + +// AddName appends the specified name. funcr uses '/' characters to separate +// name elements. Callers should not pass '/' in the provided name string, but +// this library does not actually enforce that. +func (f *Formatter) AddName(name string) { + if len(f.prefix) > 0 { + f.prefix += "/" + } + f.prefix += name +} + +// AddValues adds key-value pairs to the set of saved values to be logged with +// each log line. +func (f *Formatter) AddValues(kvList []any) { + // Three slice args forces a copy. + n := len(f.values) + f.values = append(f.values[:n:n], kvList...) + + vals := f.values + if hook := f.opts.RenderValuesHook; hook != nil { + vals = hook(f.sanitize(vals)) + } + + // Pre-render values, so we don't have to do it on each Info/Error call. + buf := bytes.NewBuffer(make([]byte, 0, 1024)) + f.flatten(buf, vals, false, true) // escape user-provided keys + f.valuesStr = buf.String() +} + +// AddCallDepth increases the number of stack-frames to skip when attributing +// the log line to a file and line. +func (f *Formatter) AddCallDepth(depth int) { + f.depth += depth +} diff --git a/vendor/github.com/go-logr/logr/funcr/slogsink.go b/vendor/github.com/go-logr/logr/funcr/slogsink.go new file mode 100644 index 0000000..7bd8476 --- /dev/null +++ b/vendor/github.com/go-logr/logr/funcr/slogsink.go @@ -0,0 +1,105 @@ +//go:build go1.21 +// +build go1.21 + +/* +Copyright 2023 The logr Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package funcr + +import ( + "context" + "log/slog" + + "github.com/go-logr/logr" +) + +var _ logr.SlogSink = &fnlogger{} + +const extraSlogSinkDepth = 3 // 2 for slog, 1 for SlogSink + +func (l fnlogger) Handle(_ context.Context, record slog.Record) error { + kvList := make([]any, 0, 2*record.NumAttrs()) + record.Attrs(func(attr slog.Attr) bool { + kvList = attrToKVs(attr, kvList) + return true + }) + + if record.Level >= slog.LevelError { + l.WithCallDepth(extraSlogSinkDepth).Error(nil, record.Message, kvList...) + } else { + level := l.levelFromSlog(record.Level) + l.WithCallDepth(extraSlogSinkDepth).Info(level, record.Message, kvList...) + } + return nil +} + +func (l fnlogger) WithAttrs(attrs []slog.Attr) logr.SlogSink { + kvList := make([]any, 0, 2*len(attrs)) + for _, attr := range attrs { + kvList = attrToKVs(attr, kvList) + } + l.AddValues(kvList) + return &l +} + +func (l fnlogger) WithGroup(name string) logr.SlogSink { + l.startGroup(name) + return &l +} + +// attrToKVs appends a slog.Attr to a logr-style kvList. It handle slog Groups +// and other details of slog. +func attrToKVs(attr slog.Attr, kvList []any) []any { + attrVal := attr.Value.Resolve() + if attrVal.Kind() == slog.KindGroup { + groupVal := attrVal.Group() + grpKVs := make([]any, 0, 2*len(groupVal)) + for _, attr := range groupVal { + grpKVs = attrToKVs(attr, grpKVs) + } + if attr.Key == "" { + // slog says we have to inline these + kvList = append(kvList, grpKVs...) + } else { + kvList = append(kvList, attr.Key, PseudoStruct(grpKVs)) + } + } else if attr.Key != "" { + kvList = append(kvList, attr.Key, attrVal.Any()) + } + + return kvList +} + +// levelFromSlog adjusts the level by the logger's verbosity and negates it. +// It ensures that the result is >= 0. This is necessary because the result is +// passed to a LogSink and that API did not historically document whether +// levels could be negative or what that meant. +// +// Some example usage: +// +// logrV0 := getMyLogger() +// logrV2 := logrV0.V(2) +// slogV2 := slog.New(logr.ToSlogHandler(logrV2)) +// slogV2.Debug("msg") // =~ logrV2.V(4) =~ logrV0.V(6) +// slogV2.Info("msg") // =~ logrV2.V(0) =~ logrV0.V(2) +// slogv2.Warn("msg") // =~ logrV2.V(-4) =~ logrV0.V(0) +func (l fnlogger) levelFromSlog(level slog.Level) int { + result := -level + if result < 0 { + result = 0 // because LogSink doesn't expect negative V levels + } + return int(result) +} diff --git a/vendor/github.com/go-logr/logr/logr.go b/vendor/github.com/go-logr/logr/logr.go new file mode 100644 index 0000000..b4428e1 --- /dev/null +++ b/vendor/github.com/go-logr/logr/logr.go @@ -0,0 +1,520 @@ +/* +Copyright 2019 The logr Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// This design derives from Dave Cheney's blog: +// http://dave.cheney.net/2015/11/05/lets-talk-about-logging + +// Package logr defines a general-purpose logging API and abstract interfaces +// to back that API. Packages in the Go ecosystem can depend on this package, +// while callers can implement logging with whatever backend is appropriate. +// +// # Usage +// +// Logging is done using a Logger instance. Logger is a concrete type with +// methods, which defers the actual logging to a LogSink interface. The main +// methods of Logger are Info() and Error(). Arguments to Info() and Error() +// are key/value pairs rather than printf-style formatted strings, emphasizing +// "structured logging". +// +// With Go's standard log package, we might write: +// +// log.Printf("setting target value %s", targetValue) +// +// With logr's structured logging, we'd write: +// +// logger.Info("setting target", "value", targetValue) +// +// Errors are much the same. Instead of: +// +// log.Printf("failed to open the pod bay door for user %s: %v", user, err) +// +// We'd write: +// +// logger.Error(err, "failed to open the pod bay door", "user", user) +// +// Info() and Error() are very similar, but they are separate methods so that +// LogSink implementations can choose to do things like attach additional +// information (such as stack traces) on calls to Error(). Error() messages are +// always logged, regardless of the current verbosity. If there is no error +// instance available, passing nil is valid. +// +// # Verbosity +// +// Often we want to log information only when the application in "verbose +// mode". To write log lines that are more verbose, Logger has a V() method. +// The higher the V-level of a log line, the less critical it is considered. +// Log-lines with V-levels that are not enabled (as per the LogSink) will not +// be written. Level V(0) is the default, and logger.V(0).Info() has the same +// meaning as logger.Info(). Negative V-levels have the same meaning as V(0). +// Error messages do not have a verbosity level and are always logged. +// +// Where we might have written: +// +// if flVerbose >= 2 { +// log.Printf("an unusual thing happened") +// } +// +// We can write: +// +// logger.V(2).Info("an unusual thing happened") +// +// # Logger Names +// +// Logger instances can have name strings so that all messages logged through +// that instance have additional context. For example, you might want to add +// a subsystem name: +// +// logger.WithName("compactor").Info("started", "time", time.Now()) +// +// The WithName() method returns a new Logger, which can be passed to +// constructors or other functions for further use. Repeated use of WithName() +// will accumulate name "segments". These name segments will be joined in some +// way by the LogSink implementation. It is strongly recommended that name +// segments contain simple identifiers (letters, digits, and hyphen), and do +// not contain characters that could muddle the log output or confuse the +// joining operation (e.g. whitespace, commas, periods, slashes, brackets, +// quotes, etc). +// +// # Saved Values +// +// Logger instances can store any number of key/value pairs, which will be +// logged alongside all messages logged through that instance. For example, +// you might want to create a Logger instance per managed object: +// +// With the standard log package, we might write: +// +// log.Printf("decided to set field foo to value %q for object %s/%s", +// targetValue, object.Namespace, object.Name) +// +// With logr we'd write: +// +// // Elsewhere: set up the logger to log the object name. +// obj.logger = mainLogger.WithValues( +// "name", obj.name, "namespace", obj.namespace) +// +// // later on... +// obj.logger.Info("setting foo", "value", targetValue) +// +// # Best Practices +// +// Logger has very few hard rules, with the goal that LogSink implementations +// might have a lot of freedom to differentiate. There are, however, some +// things to consider. +// +// The log message consists of a constant message attached to the log line. +// This should generally be a simple description of what's occurring, and should +// never be a format string. Variable information can then be attached using +// named values. +// +// Keys are arbitrary strings, but should generally be constant values. Values +// may be any Go value, but how the value is formatted is determined by the +// LogSink implementation. +// +// Logger instances are meant to be passed around by value. Code that receives +// such a value can call its methods without having to check whether the +// instance is ready for use. +// +// The zero logger (= Logger{}) is identical to Discard() and discards all log +// entries. Code that receives a Logger by value can simply call it, the methods +// will never crash. For cases where passing a logger is optional, a pointer to Logger +// should be used. +// +// # Key Naming Conventions +// +// Keys are not strictly required to conform to any specification or regex, but +// it is recommended that they: +// - be human-readable and meaningful (not auto-generated or simple ordinals) +// - be constant (not dependent on input data) +// - contain only printable characters +// - not contain whitespace or punctuation +// - use lower case for simple keys and lowerCamelCase for more complex ones +// +// These guidelines help ensure that log data is processed properly regardless +// of the log implementation. For example, log implementations will try to +// output JSON data or will store data for later database (e.g. SQL) queries. +// +// While users are generally free to use key names of their choice, it's +// generally best to avoid using the following keys, as they're frequently used +// by implementations: +// - "caller": the calling information (file/line) of a particular log line +// - "error": the underlying error value in the `Error` method +// - "level": the log level +// - "logger": the name of the associated logger +// - "msg": the log message +// - "stacktrace": the stack trace associated with a particular log line or +// error (often from the `Error` message) +// - "ts": the timestamp for a log line +// +// Implementations are encouraged to make use of these keys to represent the +// above concepts, when necessary (for example, in a pure-JSON output form, it +// would be necessary to represent at least message and timestamp as ordinary +// named values). +// +// # Break Glass +// +// Implementations may choose to give callers access to the underlying +// logging implementation. The recommended pattern for this is: +// +// // Underlier exposes access to the underlying logging implementation. +// // Since callers only have a logr.Logger, they have to know which +// // implementation is in use, so this interface is less of an abstraction +// // and more of way to test type conversion. +// type Underlier interface { +// GetUnderlying() +// } +// +// Logger grants access to the sink to enable type assertions like this: +// +// func DoSomethingWithImpl(log logr.Logger) { +// if underlier, ok := log.GetSink().(impl.Underlier); ok { +// implLogger := underlier.GetUnderlying() +// ... +// } +// } +// +// Custom `With*` functions can be implemented by copying the complete +// Logger struct and replacing the sink in the copy: +// +// // WithFooBar changes the foobar parameter in the log sink and returns a +// // new logger with that modified sink. It does nothing for loggers where +// // the sink doesn't support that parameter. +// func WithFoobar(log logr.Logger, foobar int) logr.Logger { +// if foobarLogSink, ok := log.GetSink().(FoobarSink); ok { +// log = log.WithSink(foobarLogSink.WithFooBar(foobar)) +// } +// return log +// } +// +// Don't use New to construct a new Logger with a LogSink retrieved from an +// existing Logger. Source code attribution might not work correctly and +// unexported fields in Logger get lost. +// +// Beware that the same LogSink instance may be shared by different logger +// instances. Calling functions that modify the LogSink will affect all of +// those. +package logr + +// New returns a new Logger instance. This is primarily used by libraries +// implementing LogSink, rather than end users. Passing a nil sink will create +// a Logger which discards all log lines. +func New(sink LogSink) Logger { + logger := Logger{} + logger.setSink(sink) + if sink != nil { + sink.Init(runtimeInfo) + } + return logger +} + +// setSink stores the sink and updates any related fields. It mutates the +// logger and thus is only safe to use for loggers that are not currently being +// used concurrently. +func (l *Logger) setSink(sink LogSink) { + l.sink = sink +} + +// GetSink returns the stored sink. +func (l Logger) GetSink() LogSink { + return l.sink +} + +// WithSink returns a copy of the logger with the new sink. +func (l Logger) WithSink(sink LogSink) Logger { + l.setSink(sink) + return l +} + +// Logger is an interface to an abstract logging implementation. This is a +// concrete type for performance reasons, but all the real work is passed on to +// a LogSink. Implementations of LogSink should provide their own constructors +// that return Logger, not LogSink. +// +// The underlying sink can be accessed through GetSink and be modified through +// WithSink. This enables the implementation of custom extensions (see "Break +// Glass" in the package documentation). Normally the sink should be used only +// indirectly. +type Logger struct { + sink LogSink + level int +} + +// Enabled tests whether this Logger is enabled. For example, commandline +// flags might be used to set the logging verbosity and disable some info logs. +func (l Logger) Enabled() bool { + // Some implementations of LogSink look at the caller in Enabled (e.g. + // different verbosity levels per package or file), but we only pass one + // CallDepth in (via Init). This means that all calls from Logger to the + // LogSink's Enabled, Info, and Error methods must have the same number of + // frames. In other words, Logger methods can't call other Logger methods + // which call these LogSink methods unless we do it the same in all paths. + return l.sink != nil && l.sink.Enabled(l.level) +} + +// Info logs a non-error message with the given key/value pairs as context. +// +// The msg argument should be used to add some constant description to the log +// line. The key/value pairs can then be used to add additional variable +// information. The key/value pairs must alternate string keys and arbitrary +// values. +func (l Logger) Info(msg string, keysAndValues ...any) { + if l.sink == nil { + return + } + if l.sink.Enabled(l.level) { // see comment in Enabled + if withHelper, ok := l.sink.(CallStackHelperLogSink); ok { + withHelper.GetCallStackHelper()() + } + l.sink.Info(l.level, msg, keysAndValues...) + } +} + +// Error logs an error, with the given message and key/value pairs as context. +// It functions similarly to Info, but may have unique behavior, and should be +// preferred for logging errors (see the package documentations for more +// information). The log message will always be emitted, regardless of +// verbosity level. +// +// The msg argument should be used to add context to any underlying error, +// while the err argument should be used to attach the actual error that +// triggered this log line, if present. The err parameter is optional +// and nil may be passed instead of an error instance. +func (l Logger) Error(err error, msg string, keysAndValues ...any) { + if l.sink == nil { + return + } + if withHelper, ok := l.sink.(CallStackHelperLogSink); ok { + withHelper.GetCallStackHelper()() + } + l.sink.Error(err, msg, keysAndValues...) +} + +// V returns a new Logger instance for a specific verbosity level, relative to +// this Logger. In other words, V-levels are additive. A higher verbosity +// level means a log message is less important. Negative V-levels are treated +// as 0. +func (l Logger) V(level int) Logger { + if l.sink == nil { + return l + } + if level < 0 { + level = 0 + } + l.level += level + return l +} + +// GetV returns the verbosity level of the logger. If the logger's LogSink is +// nil as in the Discard logger, this will always return 0. +func (l Logger) GetV() int { + // 0 if l.sink nil because of the if check in V above. + return l.level +} + +// WithValues returns a new Logger instance with additional key/value pairs. +// See Info for documentation on how key/value pairs work. +func (l Logger) WithValues(keysAndValues ...any) Logger { + if l.sink == nil { + return l + } + l.setSink(l.sink.WithValues(keysAndValues...)) + return l +} + +// WithName returns a new Logger instance with the specified name element added +// to the Logger's name. Successive calls with WithName append additional +// suffixes to the Logger's name. It's strongly recommended that name segments +// contain only letters, digits, and hyphens (see the package documentation for +// more information). +func (l Logger) WithName(name string) Logger { + if l.sink == nil { + return l + } + l.setSink(l.sink.WithName(name)) + return l +} + +// WithCallDepth returns a Logger instance that offsets the call stack by the +// specified number of frames when logging call site information, if possible. +// This is useful for users who have helper functions between the "real" call +// site and the actual calls to Logger methods. If depth is 0 the attribution +// should be to the direct caller of this function. If depth is 1 the +// attribution should skip 1 call frame, and so on. Successive calls to this +// are additive. +// +// If the underlying log implementation supports a WithCallDepth(int) method, +// it will be called and the result returned. If the implementation does not +// support CallDepthLogSink, the original Logger will be returned. +// +// To skip one level, WithCallStackHelper() should be used instead of +// WithCallDepth(1) because it works with implementions that support the +// CallDepthLogSink and/or CallStackHelperLogSink interfaces. +func (l Logger) WithCallDepth(depth int) Logger { + if l.sink == nil { + return l + } + if withCallDepth, ok := l.sink.(CallDepthLogSink); ok { + l.setSink(withCallDepth.WithCallDepth(depth)) + } + return l +} + +// WithCallStackHelper returns a new Logger instance that skips the direct +// caller when logging call site information, if possible. This is useful for +// users who have helper functions between the "real" call site and the actual +// calls to Logger methods and want to support loggers which depend on marking +// each individual helper function, like loggers based on testing.T. +// +// In addition to using that new logger instance, callers also must call the +// returned function. +// +// If the underlying log implementation supports a WithCallDepth(int) method, +// WithCallDepth(1) will be called to produce a new logger. If it supports a +// WithCallStackHelper() method, that will be also called. If the +// implementation does not support either of these, the original Logger will be +// returned. +func (l Logger) WithCallStackHelper() (func(), Logger) { + if l.sink == nil { + return func() {}, l + } + var helper func() + if withCallDepth, ok := l.sink.(CallDepthLogSink); ok { + l.setSink(withCallDepth.WithCallDepth(1)) + } + if withHelper, ok := l.sink.(CallStackHelperLogSink); ok { + helper = withHelper.GetCallStackHelper() + } else { + helper = func() {} + } + return helper, l +} + +// IsZero returns true if this logger is an uninitialized zero value +func (l Logger) IsZero() bool { + return l.sink == nil +} + +// RuntimeInfo holds information that the logr "core" library knows which +// LogSinks might want to know. +type RuntimeInfo struct { + // CallDepth is the number of call frames the logr library adds between the + // end-user and the LogSink. LogSink implementations which choose to print + // the original logging site (e.g. file & line) should climb this many + // additional frames to find it. + CallDepth int +} + +// runtimeInfo is a static global. It must not be changed at run time. +var runtimeInfo = RuntimeInfo{ + CallDepth: 1, +} + +// LogSink represents a logging implementation. End-users will generally not +// interact with this type. +type LogSink interface { + // Init receives optional information about the logr library for LogSink + // implementations that need it. + Init(info RuntimeInfo) + + // Enabled tests whether this LogSink is enabled at the specified V-level. + // For example, commandline flags might be used to set the logging + // verbosity and disable some info logs. + Enabled(level int) bool + + // Info logs a non-error message with the given key/value pairs as context. + // The level argument is provided for optional logging. This method will + // only be called when Enabled(level) is true. See Logger.Info for more + // details. + Info(level int, msg string, keysAndValues ...any) + + // Error logs an error, with the given message and key/value pairs as + // context. See Logger.Error for more details. + Error(err error, msg string, keysAndValues ...any) + + // WithValues returns a new LogSink with additional key/value pairs. See + // Logger.WithValues for more details. + WithValues(keysAndValues ...any) LogSink + + // WithName returns a new LogSink with the specified name appended. See + // Logger.WithName for more details. + WithName(name string) LogSink +} + +// CallDepthLogSink represents a LogSink that knows how to climb the call stack +// to identify the original call site and can offset the depth by a specified +// number of frames. This is useful for users who have helper functions +// between the "real" call site and the actual calls to Logger methods. +// Implementations that log information about the call site (such as file, +// function, or line) would otherwise log information about the intermediate +// helper functions. +// +// This is an optional interface and implementations are not required to +// support it. +type CallDepthLogSink interface { + // WithCallDepth returns a LogSink that will offset the call + // stack by the specified number of frames when logging call + // site information. + // + // If depth is 0, the LogSink should skip exactly the number + // of call frames defined in RuntimeInfo.CallDepth when Info + // or Error are called, i.e. the attribution should be to the + // direct caller of Logger.Info or Logger.Error. + // + // If depth is 1 the attribution should skip 1 call frame, and so on. + // Successive calls to this are additive. + WithCallDepth(depth int) LogSink +} + +// CallStackHelperLogSink represents a LogSink that knows how to climb +// the call stack to identify the original call site and can skip +// intermediate helper functions if they mark themselves as +// helper. Go's testing package uses that approach. +// +// This is useful for users who have helper functions between the +// "real" call site and the actual calls to Logger methods. +// Implementations that log information about the call site (such as +// file, function, or line) would otherwise log information about the +// intermediate helper functions. +// +// This is an optional interface and implementations are not required +// to support it. Implementations that choose to support this must not +// simply implement it as WithCallDepth(1), because +// Logger.WithCallStackHelper will call both methods if they are +// present. This should only be implemented for LogSinks that actually +// need it, as with testing.T. +type CallStackHelperLogSink interface { + // GetCallStackHelper returns a function that must be called + // to mark the direct caller as helper function when logging + // call site information. + GetCallStackHelper() func() +} + +// Marshaler is an optional interface that logged values may choose to +// implement. Loggers with structured output, such as JSON, should +// log the object return by the MarshalLog method instead of the +// original value. +type Marshaler interface { + // MarshalLog can be used to: + // - ensure that structs are not logged as strings when the original + // value has a String method: return a different type without a + // String method + // - select which fields of a complex type should get logged: + // return a simpler struct with fewer fields + // - log unexported fields: return a different struct + // with exported fields + // + // It may return any value of any type. + MarshalLog() any +} diff --git a/vendor/github.com/go-logr/logr/sloghandler.go b/vendor/github.com/go-logr/logr/sloghandler.go new file mode 100644 index 0000000..82d1ba4 --- /dev/null +++ b/vendor/github.com/go-logr/logr/sloghandler.go @@ -0,0 +1,192 @@ +//go:build go1.21 +// +build go1.21 + +/* +Copyright 2023 The logr Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package logr + +import ( + "context" + "log/slog" +) + +type slogHandler struct { + // May be nil, in which case all logs get discarded. + sink LogSink + // Non-nil if sink is non-nil and implements SlogSink. + slogSink SlogSink + + // groupPrefix collects values from WithGroup calls. It gets added as + // prefix to value keys when handling a log record. + groupPrefix string + + // levelBias can be set when constructing the handler to influence the + // slog.Level of log records. A positive levelBias reduces the + // slog.Level value. slog has no API to influence this value after the + // handler got created, so it can only be set indirectly through + // Logger.V. + levelBias slog.Level +} + +var _ slog.Handler = &slogHandler{} + +// groupSeparator is used to concatenate WithGroup names and attribute keys. +const groupSeparator = "." + +// GetLevel is used for black box unit testing. +func (l *slogHandler) GetLevel() slog.Level { + return l.levelBias +} + +func (l *slogHandler) Enabled(_ context.Context, level slog.Level) bool { + return l.sink != nil && (level >= slog.LevelError || l.sink.Enabled(l.levelFromSlog(level))) +} + +func (l *slogHandler) Handle(ctx context.Context, record slog.Record) error { + if l.slogSink != nil { + // Only adjust verbosity level of log entries < slog.LevelError. + if record.Level < slog.LevelError { + record.Level -= l.levelBias + } + return l.slogSink.Handle(ctx, record) + } + + // No need to check for nil sink here because Handle will only be called + // when Enabled returned true. + + kvList := make([]any, 0, 2*record.NumAttrs()) + record.Attrs(func(attr slog.Attr) bool { + kvList = attrToKVs(attr, l.groupPrefix, kvList) + return true + }) + if record.Level >= slog.LevelError { + l.sinkWithCallDepth().Error(nil, record.Message, kvList...) + } else { + level := l.levelFromSlog(record.Level) + l.sinkWithCallDepth().Info(level, record.Message, kvList...) + } + return nil +} + +// sinkWithCallDepth adjusts the stack unwinding so that when Error or Info +// are called by Handle, code in slog gets skipped. +// +// This offset currently (Go 1.21.0) works for calls through +// slog.New(ToSlogHandler(...)). There's no guarantee that the call +// chain won't change. Wrapping the handler will also break unwinding. It's +// still better than not adjusting at all.... +// +// This cannot be done when constructing the handler because FromSlogHandler needs +// access to the original sink without this adjustment. A second copy would +// work, but then WithAttrs would have to be called for both of them. +func (l *slogHandler) sinkWithCallDepth() LogSink { + if sink, ok := l.sink.(CallDepthLogSink); ok { + return sink.WithCallDepth(2) + } + return l.sink +} + +func (l *slogHandler) WithAttrs(attrs []slog.Attr) slog.Handler { + if l.sink == nil || len(attrs) == 0 { + return l + } + + clone := *l + if l.slogSink != nil { + clone.slogSink = l.slogSink.WithAttrs(attrs) + clone.sink = clone.slogSink + } else { + kvList := make([]any, 0, 2*len(attrs)) + for _, attr := range attrs { + kvList = attrToKVs(attr, l.groupPrefix, kvList) + } + clone.sink = l.sink.WithValues(kvList...) + } + return &clone +} + +func (l *slogHandler) WithGroup(name string) slog.Handler { + if l.sink == nil { + return l + } + if name == "" { + // slog says to inline empty groups + return l + } + clone := *l + if l.slogSink != nil { + clone.slogSink = l.slogSink.WithGroup(name) + clone.sink = clone.slogSink + } else { + clone.groupPrefix = addPrefix(clone.groupPrefix, name) + } + return &clone +} + +// attrToKVs appends a slog.Attr to a logr-style kvList. It handle slog Groups +// and other details of slog. +func attrToKVs(attr slog.Attr, groupPrefix string, kvList []any) []any { + attrVal := attr.Value.Resolve() + if attrVal.Kind() == slog.KindGroup { + groupVal := attrVal.Group() + grpKVs := make([]any, 0, 2*len(groupVal)) + prefix := groupPrefix + if attr.Key != "" { + prefix = addPrefix(groupPrefix, attr.Key) + } + for _, attr := range groupVal { + grpKVs = attrToKVs(attr, prefix, grpKVs) + } + kvList = append(kvList, grpKVs...) + } else if attr.Key != "" { + kvList = append(kvList, addPrefix(groupPrefix, attr.Key), attrVal.Any()) + } + + return kvList +} + +func addPrefix(prefix, name string) string { + if prefix == "" { + return name + } + if name == "" { + return prefix + } + return prefix + groupSeparator + name +} + +// levelFromSlog adjusts the level by the logger's verbosity and negates it. +// It ensures that the result is >= 0. This is necessary because the result is +// passed to a LogSink and that API did not historically document whether +// levels could be negative or what that meant. +// +// Some example usage: +// +// logrV0 := getMyLogger() +// logrV2 := logrV0.V(2) +// slogV2 := slog.New(logr.ToSlogHandler(logrV2)) +// slogV2.Debug("msg") // =~ logrV2.V(4) =~ logrV0.V(6) +// slogV2.Info("msg") // =~ logrV2.V(0) =~ logrV0.V(2) +// slogv2.Warn("msg") // =~ logrV2.V(-4) =~ logrV0.V(0) +func (l *slogHandler) levelFromSlog(level slog.Level) int { + result := -level + result += l.levelBias // in case the original Logger had a V level + if result < 0 { + result = 0 // because LogSink doesn't expect negative V levels + } + return int(result) +} diff --git a/vendor/github.com/go-logr/logr/slogr.go b/vendor/github.com/go-logr/logr/slogr.go new file mode 100644 index 0000000..28a83d0 --- /dev/null +++ b/vendor/github.com/go-logr/logr/slogr.go @@ -0,0 +1,100 @@ +//go:build go1.21 +// +build go1.21 + +/* +Copyright 2023 The logr Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package logr + +import ( + "context" + "log/slog" +) + +// FromSlogHandler returns a Logger which writes to the slog.Handler. +// +// The logr verbosity level is mapped to slog levels such that V(0) becomes +// slog.LevelInfo and V(4) becomes slog.LevelDebug. +func FromSlogHandler(handler slog.Handler) Logger { + if handler, ok := handler.(*slogHandler); ok { + if handler.sink == nil { + return Discard() + } + return New(handler.sink).V(int(handler.levelBias)) + } + return New(&slogSink{handler: handler}) +} + +// ToSlogHandler returns a slog.Handler which writes to the same sink as the Logger. +// +// The returned logger writes all records with level >= slog.LevelError as +// error log entries with LogSink.Error, regardless of the verbosity level of +// the Logger: +// +// logger := +// slog.New(ToSlogHandler(logger.V(10))).Error(...) -> logSink.Error(...) +// +// The level of all other records gets reduced by the verbosity +// level of the Logger and the result is negated. If it happens +// to be negative, then it gets replaced by zero because a LogSink +// is not expected to handled negative levels: +// +// slog.New(ToSlogHandler(logger)).Debug(...) -> logger.GetSink().Info(level=4, ...) +// slog.New(ToSlogHandler(logger)).Warning(...) -> logger.GetSink().Info(level=0, ...) +// slog.New(ToSlogHandler(logger)).Info(...) -> logger.GetSink().Info(level=0, ...) +// slog.New(ToSlogHandler(logger.V(4))).Info(...) -> logger.GetSink().Info(level=4, ...) +func ToSlogHandler(logger Logger) slog.Handler { + if sink, ok := logger.GetSink().(*slogSink); ok && logger.GetV() == 0 { + return sink.handler + } + + handler := &slogHandler{sink: logger.GetSink(), levelBias: slog.Level(logger.GetV())} + if slogSink, ok := handler.sink.(SlogSink); ok { + handler.slogSink = slogSink + } + return handler +} + +// SlogSink is an optional interface that a LogSink can implement to support +// logging through the slog.Logger or slog.Handler APIs better. It then should +// also support special slog values like slog.Group. When used as a +// slog.Handler, the advantages are: +// +// - stack unwinding gets avoided in favor of logging the pre-recorded PC, +// as intended by slog +// - proper grouping of key/value pairs via WithGroup +// - verbosity levels > slog.LevelInfo can be recorded +// - less overhead +// +// Both APIs (Logger and slog.Logger/Handler) then are supported equally +// well. Developers can pick whatever API suits them better and/or mix +// packages which use either API in the same binary with a common logging +// implementation. +// +// This interface is necessary because the type implementing the LogSink +// interface cannot also implement the slog.Handler interface due to the +// different prototype of the common Enabled method. +// +// An implementation could support both interfaces in two different types, but then +// additional interfaces would be needed to convert between those types in FromSlogHandler +// and ToSlogHandler. +type SlogSink interface { + LogSink + + Handle(ctx context.Context, record slog.Record) error + WithAttrs(attrs []slog.Attr) SlogSink + WithGroup(name string) SlogSink +} diff --git a/vendor/github.com/go-logr/logr/slogsink.go b/vendor/github.com/go-logr/logr/slogsink.go new file mode 100644 index 0000000..4060fcb --- /dev/null +++ b/vendor/github.com/go-logr/logr/slogsink.go @@ -0,0 +1,120 @@ +//go:build go1.21 +// +build go1.21 + +/* +Copyright 2023 The logr Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package logr + +import ( + "context" + "log/slog" + "runtime" + "time" +) + +var ( + _ LogSink = &slogSink{} + _ CallDepthLogSink = &slogSink{} + _ Underlier = &slogSink{} +) + +// Underlier is implemented by the LogSink returned by NewFromLogHandler. +type Underlier interface { + // GetUnderlying returns the Handler used by the LogSink. + GetUnderlying() slog.Handler +} + +const ( + // nameKey is used to log the `WithName` values as an additional attribute. + nameKey = "logger" + + // errKey is used to log the error parameter of Error as an additional attribute. + errKey = "err" +) + +type slogSink struct { + callDepth int + name string + handler slog.Handler +} + +func (l *slogSink) Init(info RuntimeInfo) { + l.callDepth = info.CallDepth +} + +func (l *slogSink) GetUnderlying() slog.Handler { + return l.handler +} + +func (l *slogSink) WithCallDepth(depth int) LogSink { + newLogger := *l + newLogger.callDepth += depth + return &newLogger +} + +func (l *slogSink) Enabled(level int) bool { + return l.handler.Enabled(context.Background(), slog.Level(-level)) +} + +func (l *slogSink) Info(level int, msg string, kvList ...interface{}) { + l.log(nil, msg, slog.Level(-level), kvList...) +} + +func (l *slogSink) Error(err error, msg string, kvList ...interface{}) { + l.log(err, msg, slog.LevelError, kvList...) +} + +func (l *slogSink) log(err error, msg string, level slog.Level, kvList ...interface{}) { + var pcs [1]uintptr + // skip runtime.Callers, this function, Info/Error, and all helper functions above that. + runtime.Callers(3+l.callDepth, pcs[:]) + + record := slog.NewRecord(time.Now(), level, msg, pcs[0]) + if l.name != "" { + record.AddAttrs(slog.String(nameKey, l.name)) + } + if err != nil { + record.AddAttrs(slog.Any(errKey, err)) + } + record.Add(kvList...) + _ = l.handler.Handle(context.Background(), record) +} + +func (l slogSink) WithName(name string) LogSink { + if l.name != "" { + l.name += "/" + } + l.name += name + return &l +} + +func (l slogSink) WithValues(kvList ...interface{}) LogSink { + l.handler = l.handler.WithAttrs(kvListToAttrs(kvList...)) + return &l +} + +func kvListToAttrs(kvList ...interface{}) []slog.Attr { + // We don't need the record itself, only its Add method. + record := slog.NewRecord(time.Time{}, 0, "", 0) + record.Add(kvList...) + attrs := make([]slog.Attr, 0, record.NumAttrs()) + record.Attrs(func(attr slog.Attr) bool { + attrs = append(attrs, attr) + return true + }) + return attrs +} diff --git a/vendor/github.com/go-logr/stdr/LICENSE b/vendor/github.com/go-logr/stdr/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/vendor/github.com/go-logr/stdr/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/go-logr/stdr/README.md b/vendor/github.com/go-logr/stdr/README.md new file mode 100644 index 0000000..5158667 --- /dev/null +++ b/vendor/github.com/go-logr/stdr/README.md @@ -0,0 +1,6 @@ +# Minimal Go logging using logr and Go's standard library + +[![Go Reference](https://pkg.go.dev/badge/github.com/go-logr/stdr.svg)](https://pkg.go.dev/github.com/go-logr/stdr) + +This package implements the [logr interface](https://github.com/go-logr/logr) +in terms of Go's standard log package(https://pkg.go.dev/log). diff --git a/vendor/github.com/go-logr/stdr/stdr.go b/vendor/github.com/go-logr/stdr/stdr.go new file mode 100644 index 0000000..93a8aab --- /dev/null +++ b/vendor/github.com/go-logr/stdr/stdr.go @@ -0,0 +1,170 @@ +/* +Copyright 2019 The logr Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Package stdr implements github.com/go-logr/logr.Logger in terms of +// Go's standard log package. +package stdr + +import ( + "log" + "os" + + "github.com/go-logr/logr" + "github.com/go-logr/logr/funcr" +) + +// The global verbosity level. See SetVerbosity(). +var globalVerbosity int + +// SetVerbosity sets the global level against which all info logs will be +// compared. If this is greater than or equal to the "V" of the logger, the +// message will be logged. A higher value here means more logs will be written. +// The previous verbosity value is returned. This is not concurrent-safe - +// callers must be sure to call it from only one goroutine. +func SetVerbosity(v int) int { + old := globalVerbosity + globalVerbosity = v + return old +} + +// New returns a logr.Logger which is implemented by Go's standard log package, +// or something like it. If std is nil, this will use a default logger +// instead. +// +// Example: stdr.New(log.New(os.Stderr, "", log.LstdFlags|log.Lshortfile))) +func New(std StdLogger) logr.Logger { + return NewWithOptions(std, Options{}) +} + +// NewWithOptions returns a logr.Logger which is implemented by Go's standard +// log package, or something like it. See New for details. +func NewWithOptions(std StdLogger, opts Options) logr.Logger { + if std == nil { + // Go's log.Default() is only available in 1.16 and higher. + std = log.New(os.Stderr, "", log.LstdFlags) + } + + if opts.Depth < 0 { + opts.Depth = 0 + } + + fopts := funcr.Options{ + LogCaller: funcr.MessageClass(opts.LogCaller), + } + + sl := &logger{ + Formatter: funcr.NewFormatter(fopts), + std: std, + } + + // For skipping our own logger.Info/Error. + sl.Formatter.AddCallDepth(1 + opts.Depth) + + return logr.New(sl) +} + +// Options carries parameters which influence the way logs are generated. +type Options struct { + // Depth biases the assumed number of call frames to the "true" caller. + // This is useful when the calling code calls a function which then calls + // stdr (e.g. a logging shim to another API). Values less than zero will + // be treated as zero. + Depth int + + // LogCaller tells stdr to add a "caller" key to some or all log lines. + // Go's log package has options to log this natively, too. + LogCaller MessageClass + + // TODO: add an option to log the date/time +} + +// MessageClass indicates which category or categories of messages to consider. +type MessageClass int + +const ( + // None ignores all message classes. + None MessageClass = iota + // All considers all message classes. + All + // Info only considers info messages. + Info + // Error only considers error messages. + Error +) + +// StdLogger is the subset of the Go stdlib log.Logger API that is needed for +// this adapter. +type StdLogger interface { + // Output is the same as log.Output and log.Logger.Output. + Output(calldepth int, logline string) error +} + +type logger struct { + funcr.Formatter + std StdLogger +} + +var _ logr.LogSink = &logger{} +var _ logr.CallDepthLogSink = &logger{} + +func (l logger) Enabled(level int) bool { + return globalVerbosity >= level +} + +func (l logger) Info(level int, msg string, kvList ...interface{}) { + prefix, args := l.FormatInfo(level, msg, kvList) + if prefix != "" { + args = prefix + ": " + args + } + _ = l.std.Output(l.Formatter.GetDepth()+1, args) +} + +func (l logger) Error(err error, msg string, kvList ...interface{}) { + prefix, args := l.FormatError(err, msg, kvList) + if prefix != "" { + args = prefix + ": " + args + } + _ = l.std.Output(l.Formatter.GetDepth()+1, args) +} + +func (l logger) WithName(name string) logr.LogSink { + l.Formatter.AddName(name) + return &l +} + +func (l logger) WithValues(kvList ...interface{}) logr.LogSink { + l.Formatter.AddValues(kvList) + return &l +} + +func (l logger) WithCallDepth(depth int) logr.LogSink { + l.Formatter.AddCallDepth(depth) + return &l +} + +// Underlier exposes access to the underlying logging implementation. Since +// callers only have a logr.Logger, they have to know which implementation is +// in use, so this interface is less of an abstraction and more of way to test +// type conversion. +type Underlier interface { + GetUnderlying() StdLogger +} + +// GetUnderlying returns the StdLogger underneath this logger. Since StdLogger +// is itself an interface, the result may or may not be a Go log.Logger. +func (l logger) GetUnderlying() StdLogger { + return l.std +} diff --git a/vendor/github.com/go-openapi/analysis/.codecov.yml b/vendor/github.com/go-openapi/analysis/.codecov.yml new file mode 100644 index 0000000..841c428 --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/.codecov.yml @@ -0,0 +1,5 @@ +coverage: + status: + patch: + default: + target: 80% diff --git a/vendor/github.com/go-openapi/analysis/.gitattributes b/vendor/github.com/go-openapi/analysis/.gitattributes new file mode 100644 index 0000000..d020be8 --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/.gitattributes @@ -0,0 +1,2 @@ +*.go text eol=lf + diff --git a/vendor/github.com/go-openapi/analysis/.gitignore b/vendor/github.com/go-openapi/analysis/.gitignore new file mode 100644 index 0000000..87c3bd3 --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/.gitignore @@ -0,0 +1,5 @@ +secrets.yml +coverage.out +coverage.txt +*.cov +.idea diff --git a/vendor/github.com/go-openapi/analysis/.golangci.yml b/vendor/github.com/go-openapi/analysis/.golangci.yml new file mode 100644 index 0000000..22f8d21 --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/.golangci.yml @@ -0,0 +1,61 @@ +linters-settings: + govet: + check-shadowing: true + golint: + min-confidence: 0 + gocyclo: + min-complexity: 45 + maligned: + suggest-new: true + dupl: + threshold: 200 + goconst: + min-len: 2 + min-occurrences: 3 + +linters: + enable-all: true + disable: + - maligned + - unparam + - lll + - gochecknoinits + - gochecknoglobals + - funlen + - godox + - gocognit + - whitespace + - wsl + - wrapcheck + - testpackage + - nlreturn + - gomnd + - exhaustivestruct + - goerr113 + - errorlint + - nestif + - godot + - gofumpt + - paralleltest + - tparallel + - thelper + - ifshort + - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..9322b06 --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md @@ -0,0 +1,74 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or +advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at ivan+abuse@flanders.co.nz. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/github.com/go-openapi/analysis/LICENSE b/vendor/github.com/go-openapi/analysis/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/go-openapi/analysis/README.md b/vendor/github.com/go-openapi/analysis/README.md new file mode 100644 index 0000000..e005d4b --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/README.md @@ -0,0 +1,27 @@ +# OpenAPI analysis [![Build Status](https://github.com/go-openapi/analysis/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/analysis/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/analysis/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/analysis) + +[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) +[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/analysis/master/LICENSE) +[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/analysis.svg)](https://pkg.go.dev/github.com/go-openapi/analysis) +[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/analysis)](https://goreportcard.com/report/github.com/go-openapi/analysis) + + +A foundational library to analyze an OAI specification document for easier reasoning about the content. + +## What's inside? + +* An analyzer providing methods to walk the functional content of a specification +* A spec flattener producing a self-contained document bundle, while preserving `$ref`s +* A spec merger ("mixin") to merge several spec documents into a primary spec +* A spec "fixer" ensuring that response descriptions are non empty + +[Documentation](https://pkg.go.dev/github.com/go-openapi/analysis) + +## FAQ + +* Does this library support OpenAPI 3? + +> No. +> This package currently only supports OpenAPI 2.0 (aka Swagger 2.0). +> There is no plan to make it evolve toward supporting OpenAPI 3.x. +> This [discussion thread](https://github.com/go-openapi/spec/issues/21) relates the full story. diff --git a/vendor/github.com/go-openapi/analysis/analyzer.go b/vendor/github.com/go-openapi/analysis/analyzer.go new file mode 100644 index 0000000..c17aee1 --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/analyzer.go @@ -0,0 +1,1064 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package analysis + +import ( + "fmt" + slashpath "path" + "strconv" + "strings" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/spec" + "github.com/go-openapi/swag" +) + +type referenceAnalysis struct { + schemas map[string]spec.Ref + responses map[string]spec.Ref + parameters map[string]spec.Ref + items map[string]spec.Ref + headerItems map[string]spec.Ref + parameterItems map[string]spec.Ref + allRefs map[string]spec.Ref + pathItems map[string]spec.Ref +} + +func (r *referenceAnalysis) addRef(key string, ref spec.Ref) { + r.allRefs["#"+key] = ref +} + +func (r *referenceAnalysis) addItemsRef(key string, items *spec.Items, location string) { + r.items["#"+key] = items.Ref + r.addRef(key, items.Ref) + if location == "header" { + // NOTE: in swagger 2.0, headers and parameters (but not body param schemas) are simple schemas + // and $ref are not supported here. However it is possible to analyze this. + r.headerItems["#"+key] = items.Ref + } else { + r.parameterItems["#"+key] = items.Ref + } +} + +func (r *referenceAnalysis) addSchemaRef(key string, ref SchemaRef) { + r.schemas["#"+key] = ref.Schema.Ref + r.addRef(key, ref.Schema.Ref) +} + +func (r *referenceAnalysis) addResponseRef(key string, resp *spec.Response) { + r.responses["#"+key] = resp.Ref + r.addRef(key, resp.Ref) +} + +func (r *referenceAnalysis) addParamRef(key string, param *spec.Parameter) { + r.parameters["#"+key] = param.Ref + r.addRef(key, param.Ref) +} + +func (r *referenceAnalysis) addPathItemRef(key string, pathItem *spec.PathItem) { + r.pathItems["#"+key] = pathItem.Ref + r.addRef(key, pathItem.Ref) +} + +type patternAnalysis struct { + parameters map[string]string + headers map[string]string + items map[string]string + schemas map[string]string + allPatterns map[string]string +} + +func (p *patternAnalysis) addPattern(key, pattern string) { + p.allPatterns["#"+key] = pattern +} + +func (p *patternAnalysis) addParameterPattern(key, pattern string) { + p.parameters["#"+key] = pattern + p.addPattern(key, pattern) +} + +func (p *patternAnalysis) addHeaderPattern(key, pattern string) { + p.headers["#"+key] = pattern + p.addPattern(key, pattern) +} + +func (p *patternAnalysis) addItemsPattern(key, pattern string) { + p.items["#"+key] = pattern + p.addPattern(key, pattern) +} + +func (p *patternAnalysis) addSchemaPattern(key, pattern string) { + p.schemas["#"+key] = pattern + p.addPattern(key, pattern) +} + +type enumAnalysis struct { + parameters map[string][]interface{} + headers map[string][]interface{} + items map[string][]interface{} + schemas map[string][]interface{} + allEnums map[string][]interface{} +} + +func (p *enumAnalysis) addEnum(key string, enum []interface{}) { + p.allEnums["#"+key] = enum +} + +func (p *enumAnalysis) addParameterEnum(key string, enum []interface{}) { + p.parameters["#"+key] = enum + p.addEnum(key, enum) +} + +func (p *enumAnalysis) addHeaderEnum(key string, enum []interface{}) { + p.headers["#"+key] = enum + p.addEnum(key, enum) +} + +func (p *enumAnalysis) addItemsEnum(key string, enum []interface{}) { + p.items["#"+key] = enum + p.addEnum(key, enum) +} + +func (p *enumAnalysis) addSchemaEnum(key string, enum []interface{}) { + p.schemas["#"+key] = enum + p.addEnum(key, enum) +} + +// New takes a swagger spec object and returns an analyzed spec document. +// The analyzed document contains a number of indices that make it easier to +// reason about semantics of a swagger specification for use in code generation +// or validation etc. +func New(doc *spec.Swagger) *Spec { + a := &Spec{ + spec: doc, + references: referenceAnalysis{}, + patterns: patternAnalysis{}, + enums: enumAnalysis{}, + } + a.reset() + a.initialize() + + return a +} + +// Spec is an analyzed specification object. It takes a swagger spec object and turns it into a registry +// with a bunch of utility methods to act on the information in the spec. +type Spec struct { + spec *spec.Swagger + consumes map[string]struct{} + produces map[string]struct{} + authSchemes map[string]struct{} + operations map[string]map[string]*spec.Operation + references referenceAnalysis + patterns patternAnalysis + enums enumAnalysis + allSchemas map[string]SchemaRef + allOfs map[string]SchemaRef +} + +func (s *Spec) reset() { + s.consumes = make(map[string]struct{}, 150) + s.produces = make(map[string]struct{}, 150) + s.authSchemes = make(map[string]struct{}, 150) + s.operations = make(map[string]map[string]*spec.Operation, 150) + s.allSchemas = make(map[string]SchemaRef, 150) + s.allOfs = make(map[string]SchemaRef, 150) + s.references.schemas = make(map[string]spec.Ref, 150) + s.references.pathItems = make(map[string]spec.Ref, 150) + s.references.responses = make(map[string]spec.Ref, 150) + s.references.parameters = make(map[string]spec.Ref, 150) + s.references.items = make(map[string]spec.Ref, 150) + s.references.headerItems = make(map[string]spec.Ref, 150) + s.references.parameterItems = make(map[string]spec.Ref, 150) + s.references.allRefs = make(map[string]spec.Ref, 150) + s.patterns.parameters = make(map[string]string, 150) + s.patterns.headers = make(map[string]string, 150) + s.patterns.items = make(map[string]string, 150) + s.patterns.schemas = make(map[string]string, 150) + s.patterns.allPatterns = make(map[string]string, 150) + s.enums.parameters = make(map[string][]interface{}, 150) + s.enums.headers = make(map[string][]interface{}, 150) + s.enums.items = make(map[string][]interface{}, 150) + s.enums.schemas = make(map[string][]interface{}, 150) + s.enums.allEnums = make(map[string][]interface{}, 150) +} + +func (s *Spec) reload() { + s.reset() + s.initialize() +} + +func (s *Spec) initialize() { + for _, c := range s.spec.Consumes { + s.consumes[c] = struct{}{} + } + for _, c := range s.spec.Produces { + s.produces[c] = struct{}{} + } + for _, ss := range s.spec.Security { + for k := range ss { + s.authSchemes[k] = struct{}{} + } + } + for path, pathItem := range s.AllPaths() { + s.analyzeOperations(path, &pathItem) //#nosec + } + + for name, parameter := range s.spec.Parameters { + refPref := slashpath.Join("/parameters", jsonpointer.Escape(name)) + if parameter.Items != nil { + s.analyzeItems("items", parameter.Items, refPref, "parameter") + } + if parameter.In == "body" && parameter.Schema != nil { + s.analyzeSchema("schema", parameter.Schema, refPref) + } + if parameter.Pattern != "" { + s.patterns.addParameterPattern(refPref, parameter.Pattern) + } + if len(parameter.Enum) > 0 { + s.enums.addParameterEnum(refPref, parameter.Enum) + } + } + + for name, response := range s.spec.Responses { + refPref := slashpath.Join("/responses", jsonpointer.Escape(name)) + for k, v := range response.Headers { + hRefPref := slashpath.Join(refPref, "headers", k) + if v.Items != nil { + s.analyzeItems("items", v.Items, hRefPref, "header") + } + if v.Pattern != "" { + s.patterns.addHeaderPattern(hRefPref, v.Pattern) + } + if len(v.Enum) > 0 { + s.enums.addHeaderEnum(hRefPref, v.Enum) + } + } + if response.Schema != nil { + s.analyzeSchema("schema", response.Schema, refPref) + } + } + + for name := range s.spec.Definitions { + schema := s.spec.Definitions[name] + s.analyzeSchema(name, &schema, "/definitions") + } + // TODO: after analyzing all things and flattening schemas etc + // resolve all the collected references to their final representations + // best put in a separate method because this could get expensive +} + +func (s *Spec) analyzeOperations(path string, pi *spec.PathItem) { + // TODO: resolve refs here? + // Currently, operations declared via pathItem $ref are known only after expansion + op := pi + if pi.Ref.String() != "" { + key := slashpath.Join("/paths", jsonpointer.Escape(path)) + s.references.addPathItemRef(key, pi) + } + s.analyzeOperation("GET", path, op.Get) + s.analyzeOperation("PUT", path, op.Put) + s.analyzeOperation("POST", path, op.Post) + s.analyzeOperation("PATCH", path, op.Patch) + s.analyzeOperation("DELETE", path, op.Delete) + s.analyzeOperation("HEAD", path, op.Head) + s.analyzeOperation("OPTIONS", path, op.Options) + for i, param := range op.Parameters { + refPref := slashpath.Join("/paths", jsonpointer.Escape(path), "parameters", strconv.Itoa(i)) + if param.Ref.String() != "" { + s.references.addParamRef(refPref, ¶m) //#nosec + } + if param.Pattern != "" { + s.patterns.addParameterPattern(refPref, param.Pattern) + } + if len(param.Enum) > 0 { + s.enums.addParameterEnum(refPref, param.Enum) + } + if param.Items != nil { + s.analyzeItems("items", param.Items, refPref, "parameter") + } + if param.Schema != nil { + s.analyzeSchema("schema", param.Schema, refPref) + } + } +} + +func (s *Spec) analyzeItems(name string, items *spec.Items, prefix, location string) { + if items == nil { + return + } + refPref := slashpath.Join(prefix, name) + s.analyzeItems(name, items.Items, refPref, location) + if items.Ref.String() != "" { + s.references.addItemsRef(refPref, items, location) + } + if items.Pattern != "" { + s.patterns.addItemsPattern(refPref, items.Pattern) + } + if len(items.Enum) > 0 { + s.enums.addItemsEnum(refPref, items.Enum) + } +} + +func (s *Spec) analyzeParameter(prefix string, i int, param spec.Parameter) { + refPref := slashpath.Join(prefix, "parameters", strconv.Itoa(i)) + if param.Ref.String() != "" { + s.references.addParamRef(refPref, ¶m) //#nosec + } + + if param.Pattern != "" { + s.patterns.addParameterPattern(refPref, param.Pattern) + } + + if len(param.Enum) > 0 { + s.enums.addParameterEnum(refPref, param.Enum) + } + + s.analyzeItems("items", param.Items, refPref, "parameter") + if param.In == "body" && param.Schema != nil { + s.analyzeSchema("schema", param.Schema, refPref) + } +} + +func (s *Spec) analyzeOperation(method, path string, op *spec.Operation) { + if op == nil { + return + } + + for _, c := range op.Consumes { + s.consumes[c] = struct{}{} + } + + for _, c := range op.Produces { + s.produces[c] = struct{}{} + } + + for _, ss := range op.Security { + for k := range ss { + s.authSchemes[k] = struct{}{} + } + } + + if _, ok := s.operations[method]; !ok { + s.operations[method] = make(map[string]*spec.Operation) + } + + s.operations[method][path] = op + prefix := slashpath.Join("/paths", jsonpointer.Escape(path), strings.ToLower(method)) + for i, param := range op.Parameters { + s.analyzeParameter(prefix, i, param) + } + + if op.Responses == nil { + return + } + + if op.Responses.Default != nil { + s.analyzeDefaultResponse(prefix, op.Responses.Default) + } + + for k, res := range op.Responses.StatusCodeResponses { + s.analyzeResponse(prefix, k, res) + } +} + +func (s *Spec) analyzeDefaultResponse(prefix string, res *spec.Response) { + refPref := slashpath.Join(prefix, "responses", "default") + if res.Ref.String() != "" { + s.references.addResponseRef(refPref, res) + } + + for k, v := range res.Headers { + hRefPref := slashpath.Join(refPref, "headers", k) + s.analyzeItems("items", v.Items, hRefPref, "header") + if v.Pattern != "" { + s.patterns.addHeaderPattern(hRefPref, v.Pattern) + } + } + + if res.Schema != nil { + s.analyzeSchema("schema", res.Schema, refPref) + } +} + +func (s *Spec) analyzeResponse(prefix string, k int, res spec.Response) { + refPref := slashpath.Join(prefix, "responses", strconv.Itoa(k)) + if res.Ref.String() != "" { + s.references.addResponseRef(refPref, &res) //#nosec + } + + for k, v := range res.Headers { + hRefPref := slashpath.Join(refPref, "headers", k) + s.analyzeItems("items", v.Items, hRefPref, "header") + if v.Pattern != "" { + s.patterns.addHeaderPattern(hRefPref, v.Pattern) + } + + if len(v.Enum) > 0 { + s.enums.addHeaderEnum(hRefPref, v.Enum) + } + } + + if res.Schema != nil { + s.analyzeSchema("schema", res.Schema, refPref) + } +} + +func (s *Spec) analyzeSchema(name string, schema *spec.Schema, prefix string) { + refURI := slashpath.Join(prefix, jsonpointer.Escape(name)) + schRef := SchemaRef{ + Name: name, + Schema: schema, + Ref: spec.MustCreateRef("#" + refURI), + TopLevel: prefix == "/definitions", + } + + s.allSchemas["#"+refURI] = schRef + + if schema.Ref.String() != "" { + s.references.addSchemaRef(refURI, schRef) + } + + if schema.Pattern != "" { + s.patterns.addSchemaPattern(refURI, schema.Pattern) + } + + if len(schema.Enum) > 0 { + s.enums.addSchemaEnum(refURI, schema.Enum) + } + + for k, v := range schema.Definitions { + v := v + s.analyzeSchema(k, &v, slashpath.Join(refURI, "definitions")) + } + + for k, v := range schema.Properties { + v := v + s.analyzeSchema(k, &v, slashpath.Join(refURI, "properties")) + } + + for k, v := range schema.PatternProperties { + v := v + // NOTE: swagger 2.0 does not support PatternProperties. + // However it is possible to analyze this in a schema + s.analyzeSchema(k, &v, slashpath.Join(refURI, "patternProperties")) + } + + for i := range schema.AllOf { + v := &schema.AllOf[i] + s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "allOf")) + } + + if len(schema.AllOf) > 0 { + s.allOfs["#"+refURI] = schRef + } + + for i := range schema.AnyOf { + v := &schema.AnyOf[i] + // NOTE: swagger 2.0 does not support anyOf constructs. + // However it is possible to analyze this in a schema + s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "anyOf")) + } + + for i := range schema.OneOf { + v := &schema.OneOf[i] + // NOTE: swagger 2.0 does not support oneOf constructs. + // However it is possible to analyze this in a schema + s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "oneOf")) + } + + if schema.Not != nil { + // NOTE: swagger 2.0 does not support "not" constructs. + // However it is possible to analyze this in a schema + s.analyzeSchema("not", schema.Not, refURI) + } + + if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil { + s.analyzeSchema("additionalProperties", schema.AdditionalProperties.Schema, refURI) + } + + if schema.AdditionalItems != nil && schema.AdditionalItems.Schema != nil { + // NOTE: swagger 2.0 does not support AdditionalItems. + // However it is possible to analyze this in a schema + s.analyzeSchema("additionalItems", schema.AdditionalItems.Schema, refURI) + } + + if schema.Items != nil { + if schema.Items.Schema != nil { + s.analyzeSchema("items", schema.Items.Schema, refURI) + } + + for i := range schema.Items.Schemas { + sch := &schema.Items.Schemas[i] + s.analyzeSchema(strconv.Itoa(i), sch, slashpath.Join(refURI, "items")) + } + } +} + +// SecurityRequirement is a representation of a security requirement for an operation +type SecurityRequirement struct { + Name string + Scopes []string +} + +// SecurityRequirementsFor gets the security requirements for the operation +func (s *Spec) SecurityRequirementsFor(operation *spec.Operation) [][]SecurityRequirement { + if s.spec.Security == nil && operation.Security == nil { + return nil + } + + schemes := s.spec.Security + if operation.Security != nil { + schemes = operation.Security + } + + result := [][]SecurityRequirement{} + for _, scheme := range schemes { + if len(scheme) == 0 { + // append a zero object for anonymous + result = append(result, []SecurityRequirement{{}}) + + continue + } + + var reqs []SecurityRequirement + for k, v := range scheme { + if v == nil { + v = []string{} + } + reqs = append(reqs, SecurityRequirement{Name: k, Scopes: v}) + } + + result = append(result, reqs) + } + + return result +} + +// SecurityDefinitionsForRequirements gets the matching security definitions for a set of requirements +func (s *Spec) SecurityDefinitionsForRequirements(requirements []SecurityRequirement) map[string]spec.SecurityScheme { + result := make(map[string]spec.SecurityScheme) + + for _, v := range requirements { + if definition, ok := s.spec.SecurityDefinitions[v.Name]; ok { + if definition != nil { + result[v.Name] = *definition + } + } + } + + return result +} + +// SecurityDefinitionsFor gets the matching security definitions for a set of requirements +func (s *Spec) SecurityDefinitionsFor(operation *spec.Operation) map[string]spec.SecurityScheme { + requirements := s.SecurityRequirementsFor(operation) + if len(requirements) == 0 { + return nil + } + + result := make(map[string]spec.SecurityScheme) + for _, reqs := range requirements { + for _, v := range reqs { + if v.Name == "" { + // optional requirement + continue + } + + if _, ok := result[v.Name]; ok { + // duplicate requirement + continue + } + + if definition, ok := s.spec.SecurityDefinitions[v.Name]; ok { + if definition != nil { + result[v.Name] = *definition + } + } + } + } + + return result +} + +// ConsumesFor gets the mediatypes for the operation +func (s *Spec) ConsumesFor(operation *spec.Operation) []string { + if len(operation.Consumes) == 0 { + cons := make(map[string]struct{}, len(s.spec.Consumes)) + for _, k := range s.spec.Consumes { + cons[k] = struct{}{} + } + + return s.structMapKeys(cons) + } + + cons := make(map[string]struct{}, len(operation.Consumes)) + for _, c := range operation.Consumes { + cons[c] = struct{}{} + } + + return s.structMapKeys(cons) +} + +// ProducesFor gets the mediatypes for the operation +func (s *Spec) ProducesFor(operation *spec.Operation) []string { + if len(operation.Produces) == 0 { + prod := make(map[string]struct{}, len(s.spec.Produces)) + for _, k := range s.spec.Produces { + prod[k] = struct{}{} + } + + return s.structMapKeys(prod) + } + + prod := make(map[string]struct{}, len(operation.Produces)) + for _, c := range operation.Produces { + prod[c] = struct{}{} + } + + return s.structMapKeys(prod) +} + +func mapKeyFromParam(param *spec.Parameter) string { + return fmt.Sprintf("%s#%s", param.In, fieldNameFromParam(param)) +} + +func fieldNameFromParam(param *spec.Parameter) string { + // TODO: this should be x-go-name + if nm, ok := param.Extensions.GetString("go-name"); ok { + return nm + } + + return swag.ToGoName(param.Name) +} + +// ErrorOnParamFunc is a callback function to be invoked +// whenever an error is encountered while resolving references +// on parameters. +// +// This function takes as input the spec.Parameter which triggered the +// error and the error itself. +// +// If the callback function returns false, the calling function should bail. +// +// If it returns true, the calling function should continue evaluating parameters. +// A nil ErrorOnParamFunc must be evaluated as equivalent to panic(). +type ErrorOnParamFunc func(spec.Parameter, error) bool + +func (s *Spec) paramsAsMap(parameters []spec.Parameter, res map[string]spec.Parameter, callmeOnError ErrorOnParamFunc) { + for _, param := range parameters { + pr := param + if pr.Ref.String() == "" { + res[mapKeyFromParam(&pr)] = pr + + continue + } + + // resolve $ref + if callmeOnError == nil { + callmeOnError = func(_ spec.Parameter, err error) bool { + panic(err) + } + } + + obj, _, err := pr.Ref.GetPointer().Get(s.spec) + if err != nil { + if callmeOnError(param, fmt.Errorf("invalid reference: %q", pr.Ref.String())) { + continue + } + + break + } + + objAsParam, ok := obj.(spec.Parameter) + if !ok { + if callmeOnError(param, fmt.Errorf("resolved reference is not a parameter: %q", pr.Ref.String())) { + continue + } + + break + } + + pr = objAsParam + res[mapKeyFromParam(&pr)] = pr + } +} + +// ParametersFor the specified operation id. +// +// Assumes parameters properly resolve references if any and that +// such references actually resolve to a parameter object. +// Otherwise, panics. +func (s *Spec) ParametersFor(operationID string) []spec.Parameter { + return s.SafeParametersFor(operationID, nil) +} + +// SafeParametersFor the specified operation id. +// +// Does not assume parameters properly resolve references or that +// such references actually resolve to a parameter object. +// +// Upon error, invoke a ErrorOnParamFunc callback with the erroneous +// parameters. If the callback is set to nil, panics upon errors. +func (s *Spec) SafeParametersFor(operationID string, callmeOnError ErrorOnParamFunc) []spec.Parameter { + gatherParams := func(pi *spec.PathItem, op *spec.Operation) []spec.Parameter { + bag := make(map[string]spec.Parameter) + s.paramsAsMap(pi.Parameters, bag, callmeOnError) + s.paramsAsMap(op.Parameters, bag, callmeOnError) + + var res []spec.Parameter + for _, v := range bag { + res = append(res, v) + } + + return res + } + + for _, pi := range s.spec.Paths.Paths { + if pi.Get != nil && pi.Get.ID == operationID { + return gatherParams(&pi, pi.Get) //#nosec + } + if pi.Head != nil && pi.Head.ID == operationID { + return gatherParams(&pi, pi.Head) //#nosec + } + if pi.Options != nil && pi.Options.ID == operationID { + return gatherParams(&pi, pi.Options) //#nosec + } + if pi.Post != nil && pi.Post.ID == operationID { + return gatherParams(&pi, pi.Post) //#nosec + } + if pi.Patch != nil && pi.Patch.ID == operationID { + return gatherParams(&pi, pi.Patch) //#nosec + } + if pi.Put != nil && pi.Put.ID == operationID { + return gatherParams(&pi, pi.Put) //#nosec + } + if pi.Delete != nil && pi.Delete.ID == operationID { + return gatherParams(&pi, pi.Delete) //#nosec + } + } + + return nil +} + +// ParamsFor the specified method and path. Aggregates them with the defaults etc, so it's all the params that +// apply for the method and path. +// +// Assumes parameters properly resolve references if any and that +// such references actually resolve to a parameter object. +// Otherwise, panics. +func (s *Spec) ParamsFor(method, path string) map[string]spec.Parameter { + return s.SafeParamsFor(method, path, nil) +} + +// SafeParamsFor the specified method and path. Aggregates them with the defaults etc, so it's all the params that +// apply for the method and path. +// +// Does not assume parameters properly resolve references or that +// such references actually resolve to a parameter object. +// +// Upon error, invoke a ErrorOnParamFunc callback with the erroneous +// parameters. If the callback is set to nil, panics upon errors. +func (s *Spec) SafeParamsFor(method, path string, callmeOnError ErrorOnParamFunc) map[string]spec.Parameter { + res := make(map[string]spec.Parameter) + if pi, ok := s.spec.Paths.Paths[path]; ok { + s.paramsAsMap(pi.Parameters, res, callmeOnError) + s.paramsAsMap(s.operations[strings.ToUpper(method)][path].Parameters, res, callmeOnError) + } + + return res +} + +// OperationForName gets the operation for the given id +func (s *Spec) OperationForName(operationID string) (string, string, *spec.Operation, bool) { + for method, pathItem := range s.operations { + for path, op := range pathItem { + if operationID == op.ID { + return method, path, op, true + } + } + } + + return "", "", nil, false +} + +// OperationFor the given method and path +func (s *Spec) OperationFor(method, path string) (*spec.Operation, bool) { + if mp, ok := s.operations[strings.ToUpper(method)]; ok { + op, fn := mp[path] + + return op, fn + } + + return nil, false +} + +// Operations gathers all the operations specified in the spec document +func (s *Spec) Operations() map[string]map[string]*spec.Operation { + return s.operations +} + +func (s *Spec) structMapKeys(mp map[string]struct{}) []string { + if len(mp) == 0 { + return nil + } + + result := make([]string, 0, len(mp)) + for k := range mp { + result = append(result, k) + } + + return result +} + +// AllPaths returns all the paths in the swagger spec +func (s *Spec) AllPaths() map[string]spec.PathItem { + if s.spec == nil || s.spec.Paths == nil { + return nil + } + + return s.spec.Paths.Paths +} + +// OperationIDs gets all the operation ids based on method an dpath +func (s *Spec) OperationIDs() []string { + if len(s.operations) == 0 { + return nil + } + + result := make([]string, 0, len(s.operations)) + for method, v := range s.operations { + for p, o := range v { + if o.ID != "" { + result = append(result, o.ID) + } else { + result = append(result, fmt.Sprintf("%s %s", strings.ToUpper(method), p)) + } + } + } + + return result +} + +// OperationMethodPaths gets all the operation ids based on method an dpath +func (s *Spec) OperationMethodPaths() []string { + if len(s.operations) == 0 { + return nil + } + + result := make([]string, 0, len(s.operations)) + for method, v := range s.operations { + for p := range v { + result = append(result, fmt.Sprintf("%s %s", strings.ToUpper(method), p)) + } + } + + return result +} + +// RequiredConsumes gets all the distinct consumes that are specified in the specification document +func (s *Spec) RequiredConsumes() []string { + return s.structMapKeys(s.consumes) +} + +// RequiredProduces gets all the distinct produces that are specified in the specification document +func (s *Spec) RequiredProduces() []string { + return s.structMapKeys(s.produces) +} + +// RequiredSecuritySchemes gets all the distinct security schemes that are specified in the swagger spec +func (s *Spec) RequiredSecuritySchemes() []string { + return s.structMapKeys(s.authSchemes) +} + +// SchemaRef is a reference to a schema +type SchemaRef struct { + Name string + Ref spec.Ref + Schema *spec.Schema + TopLevel bool +} + +// SchemasWithAllOf returns schema references to all schemas that are defined +// with an allOf key +func (s *Spec) SchemasWithAllOf() (result []SchemaRef) { + for _, v := range s.allOfs { + result = append(result, v) + } + + return +} + +// AllDefinitions returns schema references for all the definitions that were discovered +func (s *Spec) AllDefinitions() (result []SchemaRef) { + for _, v := range s.allSchemas { + result = append(result, v) + } + + return +} + +// AllDefinitionReferences returns json refs for all the discovered schemas +func (s *Spec) AllDefinitionReferences() (result []string) { + for _, v := range s.references.schemas { + result = append(result, v.String()) + } + + return +} + +// AllParameterReferences returns json refs for all the discovered parameters +func (s *Spec) AllParameterReferences() (result []string) { + for _, v := range s.references.parameters { + result = append(result, v.String()) + } + + return +} + +// AllResponseReferences returns json refs for all the discovered responses +func (s *Spec) AllResponseReferences() (result []string) { + for _, v := range s.references.responses { + result = append(result, v.String()) + } + + return +} + +// AllPathItemReferences returns the references for all the items +func (s *Spec) AllPathItemReferences() (result []string) { + for _, v := range s.references.pathItems { + result = append(result, v.String()) + } + + return +} + +// AllItemsReferences returns the references for all the items in simple schemas (parameters or headers). +// +// NOTE: since Swagger 2.0 forbids $ref in simple params, this should always yield an empty slice for a valid +// Swagger 2.0 spec. +func (s *Spec) AllItemsReferences() (result []string) { + for _, v := range s.references.items { + result = append(result, v.String()) + } + + return +} + +// AllReferences returns all the references found in the document, with possible duplicates +func (s *Spec) AllReferences() (result []string) { + for _, v := range s.references.allRefs { + result = append(result, v.String()) + } + + return +} + +// AllRefs returns all the unique references found in the document +func (s *Spec) AllRefs() (result []spec.Ref) { + set := make(map[string]struct{}) + for _, v := range s.references.allRefs { + a := v.String() + if a == "" { + continue + } + + if _, ok := set[a]; !ok { + set[a] = struct{}{} + result = append(result, v) + } + } + + return +} + +func cloneStringMap(source map[string]string) map[string]string { + res := make(map[string]string, len(source)) + for k, v := range source { + res[k] = v + } + + return res +} + +func cloneEnumMap(source map[string][]interface{}) map[string][]interface{} { + res := make(map[string][]interface{}, len(source)) + for k, v := range source { + res[k] = v + } + + return res +} + +// ParameterPatterns returns all the patterns found in parameters +// the map is cloned to avoid accidental changes +func (s *Spec) ParameterPatterns() map[string]string { + return cloneStringMap(s.patterns.parameters) +} + +// HeaderPatterns returns all the patterns found in response headers +// the map is cloned to avoid accidental changes +func (s *Spec) HeaderPatterns() map[string]string { + return cloneStringMap(s.patterns.headers) +} + +// ItemsPatterns returns all the patterns found in simple array items +// the map is cloned to avoid accidental changes +func (s *Spec) ItemsPatterns() map[string]string { + return cloneStringMap(s.patterns.items) +} + +// SchemaPatterns returns all the patterns found in schemas +// the map is cloned to avoid accidental changes +func (s *Spec) SchemaPatterns() map[string]string { + return cloneStringMap(s.patterns.schemas) +} + +// AllPatterns returns all the patterns found in the spec +// the map is cloned to avoid accidental changes +func (s *Spec) AllPatterns() map[string]string { + return cloneStringMap(s.patterns.allPatterns) +} + +// ParameterEnums returns all the enums found in parameters +// the map is cloned to avoid accidental changes +func (s *Spec) ParameterEnums() map[string][]interface{} { + return cloneEnumMap(s.enums.parameters) +} + +// HeaderEnums returns all the enums found in response headers +// the map is cloned to avoid accidental changes +func (s *Spec) HeaderEnums() map[string][]interface{} { + return cloneEnumMap(s.enums.headers) +} + +// ItemsEnums returns all the enums found in simple array items +// the map is cloned to avoid accidental changes +func (s *Spec) ItemsEnums() map[string][]interface{} { + return cloneEnumMap(s.enums.items) +} + +// SchemaEnums returns all the enums found in schemas +// the map is cloned to avoid accidental changes +func (s *Spec) SchemaEnums() map[string][]interface{} { + return cloneEnumMap(s.enums.schemas) +} + +// AllEnums returns all the enums found in the spec +// the map is cloned to avoid accidental changes +func (s *Spec) AllEnums() map[string][]interface{} { + return cloneEnumMap(s.enums.allEnums) +} diff --git a/vendor/github.com/go-openapi/analysis/debug.go b/vendor/github.com/go-openapi/analysis/debug.go new file mode 100644 index 0000000..33c1570 --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/debug.go @@ -0,0 +1,23 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package analysis + +import ( + "os" + + "github.com/go-openapi/analysis/internal/debug" +) + +var debugLog = debug.GetLogger("analysis", os.Getenv("SWAGGER_DEBUG") != "") diff --git a/vendor/github.com/go-openapi/analysis/doc.go b/vendor/github.com/go-openapi/analysis/doc.go new file mode 100644 index 0000000..e8d9f9b --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/doc.go @@ -0,0 +1,43 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/* +Package analysis provides methods to work with a Swagger specification document from +package go-openapi/spec. + +## Analyzing a specification + +An analysed specification object (type Spec) provides methods to work with swagger definition. + +## Flattening or expanding a specification + +Flattening a specification bundles all remote $ref in the main spec document. +Depending on flattening options, additional preprocessing may take place: + - full flattening: replacing all inline complex constructs by a named entry in #/definitions + - expand: replace all $ref's in the document by their expanded content + +## Merging several specifications + +Mixin several specifications merges all Swagger constructs, and warns about found conflicts. + +## Fixing a specification + +Unmarshalling a specification with golang json unmarshalling may lead to +some unwanted result on present but empty fields. + +## Analyzing a Swagger schema + +Swagger schemas are analyzed to determine their complexity and qualify their content. +*/ +package analysis diff --git a/vendor/github.com/go-openapi/analysis/fixer.go b/vendor/github.com/go-openapi/analysis/fixer.go new file mode 100644 index 0000000..7c2ca08 --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/fixer.go @@ -0,0 +1,79 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package analysis + +import "github.com/go-openapi/spec" + +// FixEmptyResponseDescriptions replaces empty ("") response +// descriptions in the input with "(empty)" to ensure that the +// resulting Swagger is stays valid. The problem appears to arise +// from reading in valid specs that have a explicit response +// description of "" (valid, response.description is required), but +// due to zero values being omitted upon re-serializing (omitempty) we +// lose them unless we stick some chars in there. +func FixEmptyResponseDescriptions(s *spec.Swagger) { + for k, v := range s.Responses { + FixEmptyDesc(&v) //#nosec + s.Responses[k] = v + } + + if s.Paths == nil { + return + } + + for _, v := range s.Paths.Paths { + if v.Get != nil { + FixEmptyDescs(v.Get.Responses) + } + if v.Put != nil { + FixEmptyDescs(v.Put.Responses) + } + if v.Post != nil { + FixEmptyDescs(v.Post.Responses) + } + if v.Delete != nil { + FixEmptyDescs(v.Delete.Responses) + } + if v.Options != nil { + FixEmptyDescs(v.Options.Responses) + } + if v.Head != nil { + FixEmptyDescs(v.Head.Responses) + } + if v.Patch != nil { + FixEmptyDescs(v.Patch.Responses) + } + } +} + +// FixEmptyDescs adds "(empty)" as the description for any Response in +// the given Responses object that doesn't already have one. +func FixEmptyDescs(rs *spec.Responses) { + FixEmptyDesc(rs.Default) + for k, v := range rs.StatusCodeResponses { + FixEmptyDesc(&v) //#nosec + rs.StatusCodeResponses[k] = v + } +} + +// FixEmptyDesc adds "(empty)" as the description to the given +// Response object if it doesn't already have one and isn't a +// ref. No-op on nil input. +func FixEmptyDesc(rs *spec.Response) { + if rs == nil || rs.Description != "" || rs.Ref.Ref.GetURL() != nil { + return + } + rs.Description = "(empty)" +} diff --git a/vendor/github.com/go-openapi/analysis/flatten.go b/vendor/github.com/go-openapi/analysis/flatten.go new file mode 100644 index 0000000..ebedcc9 --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/flatten.go @@ -0,0 +1,814 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package analysis + +import ( + "fmt" + "log" + "path" + "sort" + "strings" + + "github.com/go-openapi/analysis/internal/flatten/normalize" + "github.com/go-openapi/analysis/internal/flatten/operations" + "github.com/go-openapi/analysis/internal/flatten/replace" + "github.com/go-openapi/analysis/internal/flatten/schutils" + "github.com/go-openapi/analysis/internal/flatten/sortref" + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/spec" +) + +const definitionsPath = "#/definitions" + +// newRef stores information about refs created during the flattening process +type newRef struct { + key string + newName string + path string + isOAIGen bool + resolved bool + schema *spec.Schema + parents []string +} + +// context stores intermediary results from flatten +type context struct { + newRefs map[string]*newRef + warnings []string + resolved map[string]string +} + +func newContext() *context { + return &context{ + newRefs: make(map[string]*newRef, 150), + warnings: make([]string, 0), + resolved: make(map[string]string, 50), + } +} + +// Flatten an analyzed spec and produce a self-contained spec bundle. +// +// There is a minimal and a full flattening mode. +// +// Minimally flattening a spec means: +// - Expanding parameters, responses, path items, parameter items and header items (references to schemas are left +// unscathed) +// - Importing external (http, file) references so they become internal to the document +// - Moving every JSON pointer to a $ref to a named definition (i.e. the reworked spec does not contain pointers +// like "$ref": "#/definitions/myObject/allOfs/1") +// +// A minimally flattened spec thus guarantees the following properties: +// - all $refs point to a local definition (i.e. '#/definitions/...') +// - definitions are unique +// +// NOTE: arbitrary JSON pointers (other than $refs to top level definitions) are rewritten as definitions if they +// represent a complex schema or express commonality in the spec. +// Otherwise, they are simply expanded. +// Self-referencing JSON pointers cannot resolve to a type and trigger an error. +// +// Minimal flattening is necessary and sufficient for codegen rendering using go-swagger. +// +// Fully flattening a spec means: +// - Moving every complex inline schema to be a definition with an auto-generated name in a depth-first fashion. +// +// By complex, we mean every JSON object with some properties. +// Arrays, when they do not define a tuple, +// or empty objects with or without additionalProperties, are not considered complex and remain inline. +// +// NOTE: rewritten schemas get a vendor extension x-go-gen-location so we know from which part of the spec definitions +// have been created. +// +// Available flattening options: +// - Minimal: stops flattening after minimal $ref processing, leaving schema constructs untouched +// - Expand: expand all $ref's in the document (inoperant if Minimal set to true) +// - Verbose: croaks about name conflicts detected +// - RemoveUnused: removes unused parameters, responses and definitions after expansion/flattening +// +// NOTE: expansion removes all $ref save circular $ref, which remain in place +// +// TODO: additional options +// - ProgagateNameExtensions: ensure that created entries properly follow naming rules when their parent have set a +// x-go-name extension +// - LiftAllOfs: +// - limit the flattening of allOf members when simple objects +// - merge allOf with validation only +// - merge allOf with extensions only +// - ... +func Flatten(opts FlattenOpts) error { + debugLog("FlattenOpts: %#v", opts) + + opts.flattenContext = newContext() + + // 1. Recursively expand responses, parameters, path items and items in simple schemas. + // + // This simplifies the spec and leaves only the $ref's in schema objects. + if err := expand(&opts); err != nil { + return err + } + + // 2. Strip the current document from absolute $ref's that actually a in the root, + // so we can recognize them as proper definitions + // + // In particular, this works around issue go-openapi/spec#76: leading absolute file in $ref is stripped + if err := normalizeRef(&opts); err != nil { + return err + } + + // 3. Optionally remove shared parameters and responses already expanded (now unused). + // + // Operation parameters (i.e. under paths) remain. + if opts.RemoveUnused { + removeUnusedShared(&opts) + } + + // 4. Import all remote references. + if err := importReferences(&opts); err != nil { + return err + } + + // 5. full flattening: rewrite inline schemas (schemas that aren't simple types or arrays or maps) + if !opts.Minimal && !opts.Expand { + if err := nameInlinedSchemas(&opts); err != nil { + return err + } + } + + // 6. Rewrite JSON pointers other than $ref to named definitions + // and attempt to resolve conflicting names whenever possible. + if err := stripPointersAndOAIGen(&opts); err != nil { + return err + } + + // 7. Strip the spec from unused definitions + if opts.RemoveUnused { + removeUnused(&opts) + } + + // 8. Issue warning notifications, if any + opts.croak() + + // TODO: simplify known schema patterns to flat objects with properties + // examples: + // - lift simple allOf object, + // - empty allOf with validation only or extensions only + // - rework allOf arrays + // - rework allOf additionalProperties + + return nil +} + +func expand(opts *FlattenOpts) error { + if err := spec.ExpandSpec(opts.Swagger(), opts.ExpandOpts(!opts.Expand)); err != nil { + return err + } + + opts.Spec.reload() // re-analyze + + return nil +} + +// normalizeRef strips the current file from any absolute file $ref. This works around issue go-openapi/spec#76: +// leading absolute file in $ref is stripped +func normalizeRef(opts *FlattenOpts) error { + debugLog("normalizeRef") + + altered := false + for k, w := range opts.Spec.references.allRefs { + if !strings.HasPrefix(w.String(), opts.BasePath+definitionsPath) { // may be a mix of / and \, depending on OS + continue + } + + altered = true + debugLog("stripping absolute path for: %s", w.String()) + + // strip the base path from definition + if err := replace.UpdateRef(opts.Swagger(), k, + spec.MustCreateRef(path.Join(definitionsPath, path.Base(w.String())))); err != nil { + return err + } + } + + if altered { + opts.Spec.reload() // re-analyze + } + + return nil +} + +func removeUnusedShared(opts *FlattenOpts) { + opts.Swagger().Parameters = nil + opts.Swagger().Responses = nil + + opts.Spec.reload() // re-analyze +} + +func importReferences(opts *FlattenOpts) error { + var ( + imported bool + err error + ) + + for !imported && err == nil { + // iteratively import remote references until none left. + // This inlining deals with name conflicts by introducing auto-generated names ("OAIGen") + imported, err = importExternalReferences(opts) + + opts.Spec.reload() // re-analyze + } + + return err +} + +// nameInlinedSchemas replaces every complex inline construct by a named definition. +func nameInlinedSchemas(opts *FlattenOpts) error { + debugLog("nameInlinedSchemas") + + namer := &InlineSchemaNamer{ + Spec: opts.Swagger(), + Operations: operations.AllOpRefsByRef(opts.Spec, nil), + flattenContext: opts.flattenContext, + opts: opts, + } + + depthFirst := sortref.DepthFirst(opts.Spec.allSchemas) + for _, key := range depthFirst { + sch := opts.Spec.allSchemas[key] + if sch.Schema == nil || sch.Schema.Ref.String() != "" || sch.TopLevel { + continue + } + + asch, err := Schema(SchemaOpts{Schema: sch.Schema, Root: opts.Swagger(), BasePath: opts.BasePath}) + if err != nil { + return fmt.Errorf("schema analysis [%s]: %w", key, err) + } + + if asch.isAnalyzedAsComplex() { // move complex schemas to definitions + if err := namer.Name(key, sch.Schema, asch); err != nil { + return err + } + } + } + + opts.Spec.reload() // re-analyze + + return nil +} + +func removeUnused(opts *FlattenOpts) { + for removeUnusedSinglePass(opts) { + // continue until no unused definition remains + } +} + +func removeUnusedSinglePass(opts *FlattenOpts) (hasRemoved bool) { + expected := make(map[string]struct{}) + for k := range opts.Swagger().Definitions { + expected[path.Join(definitionsPath, jsonpointer.Escape(k))] = struct{}{} + } + + for _, k := range opts.Spec.AllDefinitionReferences() { + delete(expected, k) + } + + for k := range expected { + hasRemoved = true + debugLog("removing unused definition %s", path.Base(k)) + if opts.Verbose { + log.Printf("info: removing unused definition: %s", path.Base(k)) + } + delete(opts.Swagger().Definitions, path.Base(k)) + } + + opts.Spec.reload() // re-analyze + + return hasRemoved +} + +func importKnownRef(entry sortref.RefRevIdx, refStr, newName string, opts *FlattenOpts) error { + // rewrite ref with already resolved external ref (useful for cyclical refs): + // rewrite external refs to local ones + debugLog("resolving known ref [%s] to %s", refStr, newName) + + for _, key := range entry.Keys { + if err := replace.UpdateRef(opts.Swagger(), key, spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil { + return err + } + } + + return nil +} + +func importNewRef(entry sortref.RefRevIdx, refStr string, opts *FlattenOpts) error { + var ( + isOAIGen bool + newName string + ) + + debugLog("resolving schema from remote $ref [%s]", refStr) + + sch, err := spec.ResolveRefWithBase(opts.Swagger(), &entry.Ref, opts.ExpandOpts(false)) + if err != nil { + return fmt.Errorf("could not resolve schema: %w", err) + } + + // at this stage only $ref analysis matters + partialAnalyzer := &Spec{ + references: referenceAnalysis{}, + patterns: patternAnalysis{}, + enums: enumAnalysis{}, + } + partialAnalyzer.reset() + partialAnalyzer.analyzeSchema("", sch, "/") + + // now rewrite those refs with rebase + for key, ref := range partialAnalyzer.references.allRefs { + if err := replace.UpdateRef(sch, key, spec.MustCreateRef(normalize.RebaseRef(entry.Ref.String(), ref.String()))); err != nil { + return fmt.Errorf("failed to rewrite ref for key %q at %s: %w", key, entry.Ref.String(), err) + } + } + + // generate a unique name - isOAIGen means that a naming conflict was resolved by changing the name + newName, isOAIGen = uniqifyName(opts.Swagger().Definitions, nameFromRef(entry.Ref, opts)) + debugLog("new name for [%s]: %s - with name conflict:%t", strings.Join(entry.Keys, ", "), newName, isOAIGen) + + opts.flattenContext.resolved[refStr] = newName + + // rewrite the external refs to local ones + for _, key := range entry.Keys { + if err := replace.UpdateRef(opts.Swagger(), key, + spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil { + return err + } + + // keep track of created refs + resolved := false + if _, ok := opts.flattenContext.newRefs[key]; ok { + resolved = opts.flattenContext.newRefs[key].resolved + } + + debugLog("keeping track of ref: %s (%s), resolved: %t", key, newName, resolved) + opts.flattenContext.newRefs[key] = &newRef{ + key: key, + newName: newName, + path: path.Join(definitionsPath, newName), + isOAIGen: isOAIGen, + resolved: resolved, + schema: sch, + } + } + + // add the resolved schema to the definitions + schutils.Save(opts.Swagger(), newName, sch) + + return nil +} + +// importExternalReferences iteratively digs remote references and imports them into the main schema. +// +// At every iteration, new remotes may be found when digging deeper: they are rebased to the current schema before being imported. +// +// This returns true when no more remote references can be found. +func importExternalReferences(opts *FlattenOpts) (bool, error) { + debugLog("importExternalReferences") + + groupedRefs := sortref.ReverseIndex(opts.Spec.references.schemas, opts.BasePath) + sortedRefStr := make([]string, 0, len(groupedRefs)) + if opts.flattenContext == nil { + opts.flattenContext = newContext() + } + + // sort $ref resolution to ensure deterministic name conflict resolution + for refStr := range groupedRefs { + sortedRefStr = append(sortedRefStr, refStr) + } + sort.Strings(sortedRefStr) + + complete := true + + for _, refStr := range sortedRefStr { + entry := groupedRefs[refStr] + if entry.Ref.HasFragmentOnly { + continue + } + + complete = false + + newName := opts.flattenContext.resolved[refStr] + if newName != "" { + if err := importKnownRef(entry, refStr, newName, opts); err != nil { + return false, err + } + + continue + } + + // resolve schemas + if err := importNewRef(entry, refStr, opts); err != nil { + return false, err + } + } + + // maintains ref index entries + for k := range opts.flattenContext.newRefs { + r := opts.flattenContext.newRefs[k] + + // update tracking with resolved schemas + if r.schema.Ref.String() != "" { + ref := spec.MustCreateRef(r.path) + sch, err := spec.ResolveRefWithBase(opts.Swagger(), &ref, opts.ExpandOpts(false)) + if err != nil { + return false, fmt.Errorf("could not resolve schema: %w", err) + } + + r.schema = sch + } + + if r.path == k { + continue + } + + // update tracking with renamed keys: got a cascade of refs + renamed := *r + renamed.key = r.path + opts.flattenContext.newRefs[renamed.path] = &renamed + + // indirect ref + r.newName = path.Base(k) + r.schema = spec.RefSchema(r.path) + r.path = k + r.isOAIGen = strings.Contains(k, "OAIGen") + } + + return complete, nil +} + +// stripPointersAndOAIGen removes anonymous JSON pointers from spec and chain with name conflicts handler. +// This loops until the spec has no such pointer and all name conflicts have been reduced as much as possible. +func stripPointersAndOAIGen(opts *FlattenOpts) error { + // name all JSON pointers to anonymous documents + if err := namePointers(opts); err != nil { + return err + } + + // remove unnecessary OAIGen ref (created when flattening external refs creates name conflicts) + hasIntroducedPointerOrInline, ers := stripOAIGen(opts) + if ers != nil { + return ers + } + + // iterate as pointer or OAIGen resolution may introduce inline schemas or pointers + for hasIntroducedPointerOrInline { + if !opts.Minimal { + opts.Spec.reload() // re-analyze + if err := nameInlinedSchemas(opts); err != nil { + return err + } + } + + if err := namePointers(opts); err != nil { + return err + } + + // restrip and re-analyze + var err error + if hasIntroducedPointerOrInline, err = stripOAIGen(opts); err != nil { + return err + } + } + + return nil +} + +// stripOAIGen strips the spec from unnecessary OAIGen constructs, initially created to dedupe flattened definitions. +// +// A dedupe is deemed unnecessary whenever: +// - the only conflict is with its (single) parent: OAIGen is merged into its parent (reinlining) +// - there is a conflict with multiple parents: merge OAIGen in first parent, the rewrite other parents to point to +// the first parent. +// +// This function returns true whenever it re-inlined a complex schema, so the caller may chose to iterate +// pointer and name resolution again. +func stripOAIGen(opts *FlattenOpts) (bool, error) { + debugLog("stripOAIGen") + replacedWithComplex := false + + // figure out referers of OAIGen definitions (doing it before the ref start mutating) + for _, r := range opts.flattenContext.newRefs { + updateRefParents(opts.Spec.references.allRefs, r) + } + + for k := range opts.flattenContext.newRefs { + r := opts.flattenContext.newRefs[k] + debugLog("newRefs[%s]: isOAIGen: %t, resolved: %t, name: %s, path:%s, #parents: %d, parents: %v, ref: %s", + k, r.isOAIGen, r.resolved, r.newName, r.path, len(r.parents), r.parents, r.schema.Ref.String()) + + if !r.isOAIGen || len(r.parents) == 0 { + continue + } + + hasReplacedWithComplex, err := stripOAIGenForRef(opts, k, r) + if err != nil { + return replacedWithComplex, err + } + + replacedWithComplex = replacedWithComplex || hasReplacedWithComplex + } + + debugLog("replacedWithComplex: %t", replacedWithComplex) + opts.Spec.reload() // re-analyze + + return replacedWithComplex, nil +} + +// updateRefParents updates all parents of an updated $ref +func updateRefParents(allRefs map[string]spec.Ref, r *newRef) { + if !r.isOAIGen || r.resolved { // bail on already resolved entries (avoid looping) + return + } + for k, v := range allRefs { + if r.path != v.String() { + continue + } + + found := false + for _, p := range r.parents { + if p == k { + found = true + + break + } + } + if !found { + r.parents = append(r.parents, k) + } + } +} + +func stripOAIGenForRef(opts *FlattenOpts, k string, r *newRef) (bool, error) { + replacedWithComplex := false + + pr := sortref.TopmostFirst(r.parents) + + // rewrite first parent schema in hierarchical then lexicographical order + debugLog("rewrite first parent %s with schema", pr[0]) + if err := replace.UpdateRefWithSchema(opts.Swagger(), pr[0], r.schema); err != nil { + return false, err + } + + if pa, ok := opts.flattenContext.newRefs[pr[0]]; ok && pa.isOAIGen { + // update parent in ref index entry + debugLog("update parent entry: %s", pr[0]) + pa.schema = r.schema + pa.resolved = false + replacedWithComplex = true + } + + // rewrite other parents to point to first parent + if len(pr) > 1 { + for _, p := range pr[1:] { + replacingRef := spec.MustCreateRef(pr[0]) + + // set complex when replacing ref is an anonymous jsonpointer: further processing may be required + replacedWithComplex = replacedWithComplex || path.Dir(replacingRef.String()) != definitionsPath + debugLog("rewrite parent with ref: %s", replacingRef.String()) + + // NOTE: it is possible at this stage to introduce json pointers (to non-definitions places). + // Those are stripped later on. + if err := replace.UpdateRef(opts.Swagger(), p, replacingRef); err != nil { + return false, err + } + + if pa, ok := opts.flattenContext.newRefs[p]; ok && pa.isOAIGen { + // update parent in ref index + debugLog("update parent entry: %s", p) + pa.schema = r.schema + pa.resolved = false + replacedWithComplex = true + } + } + } + + // remove OAIGen definition + debugLog("removing definition %s", path.Base(r.path)) + delete(opts.Swagger().Definitions, path.Base(r.path)) + + // propagate changes in ref index for keys which have this one as a parent + for kk, value := range opts.flattenContext.newRefs { + if kk == k || !value.isOAIGen || value.resolved { + continue + } + + found := false + newParents := make([]string, 0, len(value.parents)) + for _, parent := range value.parents { + switch { + case parent == r.path: + found = true + parent = pr[0] + case strings.HasPrefix(parent, r.path+"/"): + found = true + parent = path.Join(pr[0], strings.TrimPrefix(parent, r.path)) + } + + newParents = append(newParents, parent) + } + + if found { + value.parents = newParents + } + } + + // mark naming conflict as resolved + debugLog("marking naming conflict resolved for key: %s", r.key) + opts.flattenContext.newRefs[r.key].isOAIGen = false + opts.flattenContext.newRefs[r.key].resolved = true + + // determine if the previous substitution did inline a complex schema + if r.schema != nil && r.schema.Ref.String() == "" { // inline schema + asch, err := Schema(SchemaOpts{Schema: r.schema, Root: opts.Swagger(), BasePath: opts.BasePath}) + if err != nil { + return false, err + } + + debugLog("re-inlined schema: parent: %s, %t", pr[0], asch.isAnalyzedAsComplex()) + replacedWithComplex = replacedWithComplex || !(path.Dir(pr[0]) == definitionsPath) && asch.isAnalyzedAsComplex() + } + + return replacedWithComplex, nil +} + +// namePointers replaces all JSON pointers to anonymous documents by a $ref to a new named definitions. +// +// This is carried on depth-first. Pointers to $refs which are top level definitions are replaced by the $ref itself. +// Pointers to simple types are expanded, unless they express commonality (i.e. several such $ref are used). +func namePointers(opts *FlattenOpts) error { + debugLog("name pointers") + + refsToReplace := make(map[string]SchemaRef, len(opts.Spec.references.schemas)) + for k, ref := range opts.Spec.references.allRefs { + debugLog("name pointers: %q => %#v", k, ref) + if path.Dir(ref.String()) == definitionsPath { + // this a ref to a top-level definition: ok + continue + } + + result, err := replace.DeepestRef(opts.Swagger(), opts.ExpandOpts(false), ref) + if err != nil { + return fmt.Errorf("at %s, %w", k, err) + } + + replacingRef := result.Ref + sch := result.Schema + if opts.flattenContext != nil { + opts.flattenContext.warnings = append(opts.flattenContext.warnings, result.Warnings...) + } + + debugLog("planning pointer to replace at %s: %s, resolved to: %s", k, ref.String(), replacingRef.String()) + refsToReplace[k] = SchemaRef{ + Name: k, // caller + Ref: replacingRef, // called + Schema: sch, + TopLevel: path.Dir(replacingRef.String()) == definitionsPath, + } + } + + depthFirst := sortref.DepthFirst(refsToReplace) + namer := &InlineSchemaNamer{ + Spec: opts.Swagger(), + Operations: operations.AllOpRefsByRef(opts.Spec, nil), + flattenContext: opts.flattenContext, + opts: opts, + } + + for _, key := range depthFirst { + v := refsToReplace[key] + // update current replacement, which may have been updated by previous changes of deeper elements + result, erd := replace.DeepestRef(opts.Swagger(), opts.ExpandOpts(false), v.Ref) + if erd != nil { + return fmt.Errorf("at %s, %w", key, erd) + } + + if opts.flattenContext != nil { + opts.flattenContext.warnings = append(opts.flattenContext.warnings, result.Warnings...) + } + + v.Ref = result.Ref + v.Schema = result.Schema + v.TopLevel = path.Dir(result.Ref.String()) == definitionsPath + debugLog("replacing pointer at %s: resolved to: %s", key, v.Ref.String()) + + if v.TopLevel { + debugLog("replace pointer %s by canonical definition: %s", key, v.Ref.String()) + + // if the schema is a $ref to a top level definition, just rewrite the pointer to this $ref + if err := replace.UpdateRef(opts.Swagger(), key, v.Ref); err != nil { + return err + } + + continue + } + + if err := flattenAnonPointer(key, v, refsToReplace, namer, opts); err != nil { + return err + } + } + + opts.Spec.reload() // re-analyze + + return nil +} + +func flattenAnonPointer(key string, v SchemaRef, refsToReplace map[string]SchemaRef, namer *InlineSchemaNamer, opts *FlattenOpts) error { + // this is a JSON pointer to an anonymous document (internal or external): + // create a definition for this schema when: + // - it is a complex schema + // - or it is pointed by more than one $ref (i.e. expresses commonality) + // otherwise, expand the pointer (single reference to a simple type) + // + // The named definition for this follows the target's key, not the caller's + debugLog("namePointers at %s for %s", key, v.Ref.String()) + + // qualify the expanded schema + asch, ers := Schema(SchemaOpts{Schema: v.Schema, Root: opts.Swagger(), BasePath: opts.BasePath}) + if ers != nil { + return fmt.Errorf("schema analysis [%s]: %w", key, ers) + } + callers := make([]string, 0, 64) + + debugLog("looking for callers") + + an := New(opts.Swagger()) + for k, w := range an.references.allRefs { + r, err := replace.DeepestRef(opts.Swagger(), opts.ExpandOpts(false), w) + if err != nil { + return fmt.Errorf("at %s, %w", key, err) + } + + if opts.flattenContext != nil { + opts.flattenContext.warnings = append(opts.flattenContext.warnings, r.Warnings...) + } + + if r.Ref.String() == v.Ref.String() { + callers = append(callers, k) + } + } + + debugLog("callers for %s: %d", v.Ref.String(), len(callers)) + if len(callers) == 0 { + // has already been updated and resolved + return nil + } + + parts := sortref.KeyParts(v.Ref.String()) + debugLog("number of callers for %s: %d", v.Ref.String(), len(callers)) + + // identifying edge case when the namer did nothing because we point to a non-schema object + // no definition is created and we expand the $ref for all callers + debugLog("decide what to do with the schema pointed to: asch.IsSimpleSchema=%t, len(callers)=%d, parts.IsSharedParam=%t, parts.IsSharedResponse=%t", + asch.IsSimpleSchema, len(callers), parts.IsSharedParam(), parts.IsSharedResponse(), + ) + + if (!asch.IsSimpleSchema || len(callers) > 1) && !parts.IsSharedParam() && !parts.IsSharedResponse() { + debugLog("replace JSON pointer at [%s] by definition: %s", key, v.Ref.String()) + if err := namer.Name(v.Ref.String(), v.Schema, asch); err != nil { + return err + } + + // regular case: we named the $ref as a definition, and we move all callers to this new $ref + for _, caller := range callers { + if caller == key { + continue + } + + // move $ref for next to resolve + debugLog("identified caller of %s at [%s]", v.Ref.String(), caller) + c := refsToReplace[caller] + c.Ref = v.Ref + refsToReplace[caller] = c + } + + return nil + } + + // everything that is a simple schema and not factorizable is expanded + debugLog("expand JSON pointer for key=%s", key) + + if err := replace.UpdateRefWithSchema(opts.Swagger(), key, v.Schema); err != nil { + return err + } + // NOTE: there is no other caller to update + + return nil +} diff --git a/vendor/github.com/go-openapi/analysis/flatten_name.go b/vendor/github.com/go-openapi/analysis/flatten_name.go new file mode 100644 index 0000000..c7d7938 --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/flatten_name.go @@ -0,0 +1,308 @@ +package analysis + +import ( + "fmt" + "path" + "sort" + "strings" + + "github.com/go-openapi/analysis/internal/flatten/operations" + "github.com/go-openapi/analysis/internal/flatten/replace" + "github.com/go-openapi/analysis/internal/flatten/schutils" + "github.com/go-openapi/analysis/internal/flatten/sortref" + "github.com/go-openapi/spec" + "github.com/go-openapi/swag" +) + +// InlineSchemaNamer finds a new name for an inlined type +type InlineSchemaNamer struct { + Spec *spec.Swagger + Operations map[string]operations.OpRef + flattenContext *context + opts *FlattenOpts +} + +// Name yields a new name for the inline schema +func (isn *InlineSchemaNamer) Name(key string, schema *spec.Schema, aschema *AnalyzedSchema) error { + debugLog("naming inlined schema at %s", key) + + parts := sortref.KeyParts(key) + for _, name := range namesFromKey(parts, aschema, isn.Operations) { + if name == "" { + continue + } + + // create unique name + mangle := mangler(isn.opts) + newName, isOAIGen := uniqifyName(isn.Spec.Definitions, mangle(name)) + + // clone schema + sch := schutils.Clone(schema) + + // replace values on schema + debugLog("rewriting schema to ref: key=%s with new name: %s", key, newName) + if err := replace.RewriteSchemaToRef(isn.Spec, key, + spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil { + return fmt.Errorf("error while creating definition %q from inline schema: %w", newName, err) + } + + // rewrite any dependent $ref pointing to this place, + // when not already pointing to a top-level definition. + // + // NOTE: this is important if such referers use arbitrary JSON pointers. + an := New(isn.Spec) + for k, v := range an.references.allRefs { + r, erd := replace.DeepestRef(isn.opts.Swagger(), isn.opts.ExpandOpts(false), v) + if erd != nil { + return fmt.Errorf("at %s, %w", k, erd) + } + + if isn.opts.flattenContext != nil { + isn.opts.flattenContext.warnings = append(isn.opts.flattenContext.warnings, r.Warnings...) + } + + if r.Ref.String() != key && (r.Ref.String() != path.Join(definitionsPath, newName) || path.Dir(v.String()) == definitionsPath) { + continue + } + + debugLog("found a $ref to a rewritten schema: %s points to %s", k, v.String()) + + // rewrite $ref to the new target + if err := replace.UpdateRef(isn.Spec, k, + spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil { + return err + } + } + + // NOTE: this extension is currently not used by go-swagger (provided for information only) + sch.AddExtension("x-go-gen-location", GenLocation(parts)) + + // save cloned schema to definitions + schutils.Save(isn.Spec, newName, sch) + + // keep track of created refs + if isn.flattenContext == nil { + continue + } + + debugLog("track created ref: key=%s, newName=%s, isOAIGen=%t", key, newName, isOAIGen) + resolved := false + + if _, ok := isn.flattenContext.newRefs[key]; ok { + resolved = isn.flattenContext.newRefs[key].resolved + } + + isn.flattenContext.newRefs[key] = &newRef{ + key: key, + newName: newName, + path: path.Join(definitionsPath, newName), + isOAIGen: isOAIGen, + resolved: resolved, + schema: sch, + } + } + + return nil +} + +// uniqifyName yields a unique name for a definition +func uniqifyName(definitions spec.Definitions, name string) (string, bool) { + isOAIGen := false + if name == "" { + name = "oaiGen" + isOAIGen = true + } + + if len(definitions) == 0 { + return name, isOAIGen + } + + unq := true + for k := range definitions { + if strings.EqualFold(k, name) { + unq = false + + break + } + } + + if unq { + return name, isOAIGen + } + + name += "OAIGen" + isOAIGen = true + var idx int + unique := name + _, known := definitions[unique] + + for known { + idx++ + unique = fmt.Sprintf("%s%d", name, idx) + _, known = definitions[unique] + } + + return unique, isOAIGen +} + +func namesFromKey(parts sortref.SplitKey, aschema *AnalyzedSchema, operations map[string]operations.OpRef) []string { + var ( + baseNames [][]string + startIndex int + ) + + switch { + case parts.IsOperation(): + baseNames, startIndex = namesForOperation(parts, operations) + case parts.IsDefinition(): + baseNames, startIndex = namesForDefinition(parts) + default: + // this a non-standard pointer: build a name by concatenating its parts + baseNames = [][]string{parts} + startIndex = len(baseNames) + 1 + } + + result := make([]string, 0, len(baseNames)) + for _, segments := range baseNames { + nm := parts.BuildName(segments, startIndex, partAdder(aschema)) + if nm == "" { + continue + } + + result = append(result, nm) + } + sort.Strings(result) + + debugLog("names from parts: %v => %v", parts, result) + return result +} + +func namesForParam(parts sortref.SplitKey, operations map[string]operations.OpRef) ([][]string, int) { + var ( + baseNames [][]string + startIndex int + ) + + piref := parts.PathItemRef() + if piref.String() != "" && parts.IsOperationParam() { + if op, ok := operations[piref.String()]; ok { + startIndex = 5 + baseNames = append(baseNames, []string{op.ID, "params", "body"}) + } + } else if parts.IsSharedOperationParam() { + pref := parts.PathRef() + for k, v := range operations { + if strings.HasPrefix(k, pref.String()) { + startIndex = 4 + baseNames = append(baseNames, []string{v.ID, "params", "body"}) + } + } + } + + return baseNames, startIndex +} + +func namesForOperation(parts sortref.SplitKey, operations map[string]operations.OpRef) ([][]string, int) { + var ( + baseNames [][]string + startIndex int + ) + + // params + if parts.IsOperationParam() || parts.IsSharedOperationParam() { + baseNames, startIndex = namesForParam(parts, operations) + } + + // responses + if parts.IsOperationResponse() { + piref := parts.PathItemRef() + if piref.String() != "" { + if op, ok := operations[piref.String()]; ok { + startIndex = 6 + baseNames = append(baseNames, []string{op.ID, parts.ResponseName(), "body"}) + } + } + } + + return baseNames, startIndex +} + +func namesForDefinition(parts sortref.SplitKey) ([][]string, int) { + nm := parts.DefinitionName() + if nm != "" { + return [][]string{{parts.DefinitionName()}}, 2 + } + + return [][]string{}, 0 +} + +// partAdder knows how to interpret a schema when it comes to build a name from parts +func partAdder(aschema *AnalyzedSchema) sortref.PartAdder { + return func(part string) []string { + segments := make([]string, 0, 2) + + if part == "items" || part == "additionalItems" { + if aschema.IsTuple || aschema.IsTupleWithExtra { + segments = append(segments, "tuple") + } else { + segments = append(segments, "items") + } + + if part == "additionalItems" { + segments = append(segments, part) + } + + return segments + } + + segments = append(segments, part) + + return segments + } +} + +func mangler(o *FlattenOpts) func(string) string { + if o.KeepNames { + return func(in string) string { return in } + } + + return swag.ToJSONName +} + +func nameFromRef(ref spec.Ref, o *FlattenOpts) string { + mangle := mangler(o) + + u := ref.GetURL() + if u.Fragment != "" { + return mangle(path.Base(u.Fragment)) + } + + if u.Path != "" { + bn := path.Base(u.Path) + if bn != "" && bn != "/" { + ext := path.Ext(bn) + if ext != "" { + return mangle(bn[:len(bn)-len(ext)]) + } + + return mangle(bn) + } + } + + return mangle(strings.ReplaceAll(u.Host, ".", " ")) +} + +// GenLocation indicates from which section of the specification (models or operations) a definition has been created. +// +// This is reflected in the output spec with a "x-go-gen-location" extension. At the moment, this is provided +// for information only. +func GenLocation(parts sortref.SplitKey) string { + switch { + case parts.IsOperation(): + return "operations" + case parts.IsDefinition(): + return "models" + default: + return "" + } +} diff --git a/vendor/github.com/go-openapi/analysis/flatten_options.go b/vendor/github.com/go-openapi/analysis/flatten_options.go new file mode 100644 index 0000000..c943fe1 --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/flatten_options.go @@ -0,0 +1,79 @@ +package analysis + +import ( + "log" + + "github.com/go-openapi/spec" +) + +// FlattenOpts configuration for flattening a swagger specification. +// +// The BasePath parameter is used to locate remote relative $ref found in the specification. +// This path is a file: it points to the location of the root document and may be either a local +// file path or a URL. +// +// If none specified, relative references (e.g. "$ref": "folder/schema.yaml#/definitions/...") +// found in the spec are searched from the current working directory. +type FlattenOpts struct { + Spec *Spec // The analyzed spec to work with + flattenContext *context // Internal context to track flattening activity + + BasePath string // The location of the root document for this spec to resolve relative $ref + + // Flattening options + Expand bool // When true, skip flattening the spec and expand it instead (if Minimal is false) + Minimal bool // When true, do not decompose complex structures such as allOf + Verbose bool // enable some reporting on possible name conflicts detected + RemoveUnused bool // When true, remove unused parameters, responses and definitions after expansion/flattening + ContinueOnError bool // Continue when spec expansion issues are found + KeepNames bool // Do not attempt to jsonify names from references when flattening + + /* Extra keys */ + _ struct{} // require keys +} + +// ExpandOpts creates a spec.ExpandOptions to configure expanding a specification document. +func (f *FlattenOpts) ExpandOpts(skipSchemas bool) *spec.ExpandOptions { + return &spec.ExpandOptions{ + RelativeBase: f.BasePath, + SkipSchemas: skipSchemas, + ContinueOnError: f.ContinueOnError, + } +} + +// Swagger gets the swagger specification for this flatten operation +func (f *FlattenOpts) Swagger() *spec.Swagger { + return f.Spec.spec +} + +// croak logs notifications and warnings about valid, but possibly unwanted constructs resulting +// from flattening a spec +func (f *FlattenOpts) croak() { + if !f.Verbose { + return + } + + reported := make(map[string]bool, len(f.flattenContext.newRefs)) + for _, v := range f.Spec.references.allRefs { + // warns about duplicate handling + for _, r := range f.flattenContext.newRefs { + if r.isOAIGen && r.path == v.String() { + reported[r.newName] = true + } + } + } + + for k := range reported { + log.Printf("warning: duplicate flattened definition name resolved as %s", k) + } + + // warns about possible type mismatches + uniqueMsg := make(map[string]bool) + for _, msg := range f.flattenContext.warnings { + if _, ok := uniqueMsg[msg]; ok { + continue + } + log.Printf("warning: %s", msg) + uniqueMsg[msg] = true + } +} diff --git a/vendor/github.com/go-openapi/analysis/internal/debug/debug.go b/vendor/github.com/go-openapi/analysis/internal/debug/debug.go new file mode 100644 index 0000000..39f55a9 --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/internal/debug/debug.go @@ -0,0 +1,41 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package debug + +import ( + "fmt" + "log" + "os" + "path/filepath" + "runtime" +) + +var ( + output = os.Stdout +) + +// GetLogger provides a prefix debug logger +func GetLogger(prefix string, debug bool) func(string, ...interface{}) { + if debug { + logger := log.New(output, prefix+":", log.LstdFlags) + + return func(msg string, args ...interface{}) { + _, file1, pos1, _ := runtime.Caller(1) + logger.Printf("%s:%d: %s", filepath.Base(file1), pos1, fmt.Sprintf(msg, args...)) + } + } + + return func(_ string, _ ...interface{}) {} +} diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/normalize/normalize.go b/vendor/github.com/go-openapi/analysis/internal/flatten/normalize/normalize.go new file mode 100644 index 0000000..8c9df05 --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/internal/flatten/normalize/normalize.go @@ -0,0 +1,87 @@ +package normalize + +import ( + "net/url" + "path" + "path/filepath" + "strings" + + "github.com/go-openapi/spec" +) + +// RebaseRef rebases a remote ref relative to a base ref. +// +// NOTE: does not support JSONschema ID for $ref (we assume we are working with swagger specs here). +// +// NOTE(windows): +// * refs are assumed to have been normalized with drive letter lower cased (from go-openapi/spec) +// * "/ in paths may appear as escape sequences +func RebaseRef(baseRef string, ref string) string { + baseRef, _ = url.PathUnescape(baseRef) + ref, _ = url.PathUnescape(ref) + + if baseRef == "" || baseRef == "." || strings.HasPrefix(baseRef, "#") { + return ref + } + + parts := strings.Split(ref, "#") + + baseParts := strings.Split(baseRef, "#") + baseURL, _ := url.Parse(baseParts[0]) + if strings.HasPrefix(ref, "#") { + if baseURL.Host == "" { + return strings.Join([]string{baseParts[0], parts[1]}, "#") + } + + return strings.Join([]string{baseParts[0], parts[1]}, "#") + } + + refURL, _ := url.Parse(parts[0]) + if refURL.Host != "" || filepath.IsAbs(parts[0]) { + // not rebasing an absolute path + return ref + } + + // there is a relative path + var basePath string + if baseURL.Host != "" { + // when there is a host, standard URI rules apply (with "/") + baseURL.Path = path.Dir(baseURL.Path) + baseURL.Path = path.Join(baseURL.Path, "/"+parts[0]) + + return baseURL.String() + } + + // this is a local relative path + // basePart[0] and parts[0] are local filesystem directories/files + basePath = filepath.Dir(baseParts[0]) + relPath := filepath.Join(basePath, string(filepath.Separator)+parts[0]) + if len(parts) > 1 { + return strings.Join([]string{relPath, parts[1]}, "#") + } + + return relPath +} + +// Path renders absolute path on remote file refs +// +// NOTE(windows): +// * refs are assumed to have been normalized with drive letter lower cased (from go-openapi/spec) +// * "/ in paths may appear as escape sequences +func Path(ref spec.Ref, basePath string) string { + uri, _ := url.PathUnescape(ref.String()) + if ref.HasFragmentOnly || filepath.IsAbs(uri) { + return uri + } + + refURL, _ := url.Parse(uri) + if refURL.Host != "" { + return uri + } + + parts := strings.Split(uri, "#") + // BasePath, parts[0] are local filesystem directories, guaranteed to be absolute at this stage + parts[0] = filepath.Join(filepath.Dir(basePath), parts[0]) + + return strings.Join(parts, "#") +} diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/operations/operations.go b/vendor/github.com/go-openapi/analysis/internal/flatten/operations/operations.go new file mode 100644 index 0000000..7f3a2b8 --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/internal/flatten/operations/operations.go @@ -0,0 +1,90 @@ +package operations + +import ( + "path" + "sort" + "strings" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/spec" + "github.com/go-openapi/swag" +) + +// AllOpRefsByRef returns an index of sortable operations +func AllOpRefsByRef(specDoc Provider, operationIDs []string) map[string]OpRef { + return OpRefsByRef(GatherOperations(specDoc, operationIDs)) +} + +// OpRefsByRef indexes a map of sortable operations +func OpRefsByRef(oprefs map[string]OpRef) map[string]OpRef { + result := make(map[string]OpRef, len(oprefs)) + for _, v := range oprefs { + result[v.Ref.String()] = v + } + + return result +} + +// OpRef is an indexable, sortable operation +type OpRef struct { + Method string + Path string + Key string + ID string + Op *spec.Operation + Ref spec.Ref +} + +// OpRefs is a sortable collection of operations +type OpRefs []OpRef + +func (o OpRefs) Len() int { return len(o) } +func (o OpRefs) Swap(i, j int) { o[i], o[j] = o[j], o[i] } +func (o OpRefs) Less(i, j int) bool { return o[i].Key < o[j].Key } + +// Provider knows how to collect operations from a spec +type Provider interface { + Operations() map[string]map[string]*spec.Operation +} + +// GatherOperations builds a map of sorted operations from a spec +func GatherOperations(specDoc Provider, operationIDs []string) map[string]OpRef { + var oprefs OpRefs + + for method, pathItem := range specDoc.Operations() { + for pth, operation := range pathItem { + vv := *operation + oprefs = append(oprefs, OpRef{ + Key: swag.ToGoName(strings.ToLower(method) + " " + pth), + Method: method, + Path: pth, + ID: vv.ID, + Op: &vv, + Ref: spec.MustCreateRef("#" + path.Join("/paths", jsonpointer.Escape(pth), method)), + }) + } + } + + sort.Sort(oprefs) + + operations := make(map[string]OpRef) + for _, opr := range oprefs { + nm := opr.ID + if nm == "" { + nm = opr.Key + } + + oo, found := operations[nm] + if found && oo.Method != opr.Method && oo.Path != opr.Path { + nm = opr.Key + } + + if len(operationIDs) == 0 || swag.ContainsStrings(operationIDs, opr.ID) || swag.ContainsStrings(operationIDs, nm) { + opr.ID = nm + opr.Op.ID = nm + operations[nm] = opr + } + } + + return operations +} diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go b/vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go new file mode 100644 index 0000000..c0f43e7 --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go @@ -0,0 +1,458 @@ +package replace + +import ( + "encoding/json" + "fmt" + "net/url" + "os" + "path" + "strconv" + + "github.com/go-openapi/analysis/internal/debug" + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/spec" +) + +const definitionsPath = "#/definitions" + +var debugLog = debug.GetLogger("analysis/flatten/replace", os.Getenv("SWAGGER_DEBUG") != "") + +// RewriteSchemaToRef replaces a schema with a Ref +func RewriteSchemaToRef(sp *spec.Swagger, key string, ref spec.Ref) error { + debugLog("rewriting schema to ref for %s with %s", key, ref.String()) + _, value, err := getPointerFromKey(sp, key) + if err != nil { + return err + } + + switch refable := value.(type) { + case *spec.Schema: + return rewriteParentRef(sp, key, ref) + + case spec.Schema: + return rewriteParentRef(sp, key, ref) + + case *spec.SchemaOrArray: + if refable.Schema != nil { + refable.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + } + + case *spec.SchemaOrBool: + if refable.Schema != nil { + refable.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + } + case map[string]interface{}: // this happens e.g. if a schema points to an extension unmarshaled as map[string]interface{} + return rewriteParentRef(sp, key, ref) + default: + return fmt.Errorf("no schema with ref found at %s for %T", key, value) + } + + return nil +} + +func rewriteParentRef(sp *spec.Swagger, key string, ref spec.Ref) error { + parent, entry, pvalue, err := getParentFromKey(sp, key) + if err != nil { + return err + } + + debugLog("rewriting holder for %T", pvalue) + switch container := pvalue.(type) { + case spec.Response: + if err := rewriteParentRef(sp, "#"+parent, ref); err != nil { + return err + } + + case *spec.Response: + container.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + + case *spec.Responses: + statusCode, err := strconv.Atoi(entry) + if err != nil { + return fmt.Errorf("%s not a number: %w", key[1:], err) + } + resp := container.StatusCodeResponses[statusCode] + resp.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + container.StatusCodeResponses[statusCode] = resp + + case map[string]spec.Response: + resp := container[entry] + resp.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + container[entry] = resp + + case spec.Parameter: + if err := rewriteParentRef(sp, "#"+parent, ref); err != nil { + return err + } + + case map[string]spec.Parameter: + param := container[entry] + param.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + container[entry] = param + + case []spec.Parameter: + idx, err := strconv.Atoi(entry) + if err != nil { + return fmt.Errorf("%s not a number: %w", key[1:], err) + } + param := container[idx] + param.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + container[idx] = param + + case spec.Definitions: + container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + + case map[string]spec.Schema: + container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + + case []spec.Schema: + idx, err := strconv.Atoi(entry) + if err != nil { + return fmt.Errorf("%s not a number: %w", key[1:], err) + } + container[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + + case *spec.SchemaOrArray: + // NOTE: this is necessarily an array - otherwise, the parent would be *Schema + idx, err := strconv.Atoi(entry) + if err != nil { + return fmt.Errorf("%s not a number: %w", key[1:], err) + } + container.Schemas[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + + case spec.SchemaProperties: + container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + + case *interface{}: + *container = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + + // NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema + + default: + return fmt.Errorf("unhandled parent schema rewrite %s (%T)", key, pvalue) + } + + return nil +} + +// getPointerFromKey retrieves the content of the JSON pointer "key" +func getPointerFromKey(sp interface{}, key string) (string, interface{}, error) { + switch sp.(type) { + case *spec.Schema: + case *spec.Swagger: + default: + panic("unexpected type used in getPointerFromKey") + } + if key == "#/" { + return "", sp, nil + } + // unescape chars in key, e.g. "{}" from path params + pth, _ := url.PathUnescape(key[1:]) + ptr, err := jsonpointer.New(pth) + if err != nil { + return "", nil, err + } + + value, _, err := ptr.Get(sp) + if err != nil { + debugLog("error when getting key: %s with path: %s", key, pth) + + return "", nil, err + } + + return pth, value, nil +} + +// getParentFromKey retrieves the container of the JSON pointer "key" +func getParentFromKey(sp interface{}, key string) (string, string, interface{}, error) { + switch sp.(type) { + case *spec.Schema: + case *spec.Swagger: + default: + panic("unexpected type used in getPointerFromKey") + } + // unescape chars in key, e.g. "{}" from path params + pth, _ := url.PathUnescape(key[1:]) + + parent, entry := path.Dir(pth), path.Base(pth) + debugLog("getting schema holder at: %s, with entry: %s", parent, entry) + + pptr, err := jsonpointer.New(parent) + if err != nil { + return "", "", nil, err + } + pvalue, _, err := pptr.Get(sp) + if err != nil { + return "", "", nil, fmt.Errorf("can't get parent for %s: %w", parent, err) + } + + return parent, entry, pvalue, nil +} + +// UpdateRef replaces a ref by another one +func UpdateRef(sp interface{}, key string, ref spec.Ref) error { + switch sp.(type) { + case *spec.Schema: + case *spec.Swagger: + default: + panic("unexpected type used in getPointerFromKey") + } + debugLog("updating ref for %s with %s", key, ref.String()) + pth, value, err := getPointerFromKey(sp, key) + if err != nil { + return err + } + + switch refable := value.(type) { + case *spec.Schema: + refable.Ref = ref + case *spec.SchemaOrArray: + if refable.Schema != nil { + refable.Schema.Ref = ref + } + case *spec.SchemaOrBool: + if refable.Schema != nil { + refable.Schema.Ref = ref + } + case spec.Schema: + debugLog("rewriting holder for %T", refable) + _, entry, pvalue, erp := getParentFromKey(sp, key) + if erp != nil { + return err + } + switch container := pvalue.(type) { + case spec.Definitions: + container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + + case map[string]spec.Schema: + container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + + case []spec.Schema: + idx, err := strconv.Atoi(entry) + if err != nil { + return fmt.Errorf("%s not a number: %w", pth, err) + } + container[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + + case *spec.SchemaOrArray: + // NOTE: this is necessarily an array - otherwise, the parent would be *Schema + idx, err := strconv.Atoi(entry) + if err != nil { + return fmt.Errorf("%s not a number: %w", pth, err) + } + container.Schemas[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + + case spec.SchemaProperties: + container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}} + + // NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema + + default: + return fmt.Errorf("unhandled container type at %s: %T", key, value) + } + + default: + return fmt.Errorf("no schema with ref found at %s for %T", key, value) + } + + return nil +} + +// UpdateRefWithSchema replaces a ref with a schema (i.e. re-inline schema) +func UpdateRefWithSchema(sp *spec.Swagger, key string, sch *spec.Schema) error { + debugLog("updating ref for %s with schema", key) + pth, value, err := getPointerFromKey(sp, key) + if err != nil { + return err + } + + switch refable := value.(type) { + case *spec.Schema: + *refable = *sch + case spec.Schema: + _, entry, pvalue, erp := getParentFromKey(sp, key) + if erp != nil { + return err + } + switch container := pvalue.(type) { + case spec.Definitions: + container[entry] = *sch + + case map[string]spec.Schema: + container[entry] = *sch + + case []spec.Schema: + idx, err := strconv.Atoi(entry) + if err != nil { + return fmt.Errorf("%s not a number: %w", pth, err) + } + container[idx] = *sch + + case *spec.SchemaOrArray: + // NOTE: this is necessarily an array - otherwise, the parent would be *Schema + idx, err := strconv.Atoi(entry) + if err != nil { + return fmt.Errorf("%s not a number: %w", pth, err) + } + container.Schemas[idx] = *sch + + case spec.SchemaProperties: + container[entry] = *sch + + // NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema + + default: + return fmt.Errorf("unhandled type for parent of [%s]: %T", key, value) + } + case *spec.SchemaOrArray: + *refable.Schema = *sch + // NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema + case *spec.SchemaOrBool: + *refable.Schema = *sch + default: + return fmt.Errorf("no schema with ref found at %s for %T", key, value) + } + + return nil +} + +// DeepestRefResult holds the results from DeepestRef analysis +type DeepestRefResult struct { + Ref spec.Ref + Schema *spec.Schema + Warnings []string +} + +// DeepestRef finds the first definition ref, from a cascade of nested refs which are not definitions. +// - if no definition is found, returns the deepest ref. +// - pointers to external files are expanded +// +// NOTE: all external $ref's are assumed to be already expanded at this stage. +func DeepestRef(sp *spec.Swagger, opts *spec.ExpandOptions, ref spec.Ref) (*DeepestRefResult, error) { + if !ref.HasFragmentOnly { + // we found an external $ref, which is odd at this stage: + // do nothing on external $refs + return &DeepestRefResult{Ref: ref}, nil + } + + currentRef := ref + visited := make(map[string]bool, 64) + warnings := make([]string, 0, 2) + +DOWNREF: + for currentRef.String() != "" { + if path.Dir(currentRef.String()) == definitionsPath { + // this is a top-level definition: stop here and return this ref + return &DeepestRefResult{Ref: currentRef}, nil + } + + if _, beenThere := visited[currentRef.String()]; beenThere { + return nil, + fmt.Errorf("cannot resolve cyclic chain of pointers under %s", currentRef.String()) + } + + visited[currentRef.String()] = true + value, _, err := currentRef.GetPointer().Get(sp) + if err != nil { + return nil, err + } + + switch refable := value.(type) { + case *spec.Schema: + if refable.Ref.String() == "" { + break DOWNREF + } + currentRef = refable.Ref + + case spec.Schema: + if refable.Ref.String() == "" { + break DOWNREF + } + currentRef = refable.Ref + + case *spec.SchemaOrArray: + if refable.Schema == nil || refable.Schema != nil && refable.Schema.Ref.String() == "" { + break DOWNREF + } + currentRef = refable.Schema.Ref + + case *spec.SchemaOrBool: + if refable.Schema == nil || refable.Schema != nil && refable.Schema.Ref.String() == "" { + break DOWNREF + } + currentRef = refable.Schema.Ref + + case spec.Response: + // a pointer points to a schema initially marshalled in responses section... + // Attempt to convert this to a schema. If this fails, the spec is invalid + asJSON, _ := refable.MarshalJSON() + var asSchema spec.Schema + + err := asSchema.UnmarshalJSON(asJSON) + if err != nil { + return nil, + fmt.Errorf("invalid type for resolved JSON pointer %s. Expected a schema a, got: %T (%v)", + currentRef.String(), value, err, + ) + } + warnings = append(warnings, fmt.Sprintf("found $ref %q (response) interpreted as schema", currentRef.String())) + + if asSchema.Ref.String() == "" { + break DOWNREF + } + currentRef = asSchema.Ref + + case spec.Parameter: + // a pointer points to a schema initially marshalled in parameters section... + // Attempt to convert this to a schema. If this fails, the spec is invalid + asJSON, _ := refable.MarshalJSON() + var asSchema spec.Schema + if err := asSchema.UnmarshalJSON(asJSON); err != nil { + return nil, + fmt.Errorf("invalid type for resolved JSON pointer %s. Expected a schema a, got: %T (%v)", + currentRef.String(), value, err, + ) + } + + warnings = append(warnings, fmt.Sprintf("found $ref %q (parameter) interpreted as schema", currentRef.String())) + + if asSchema.Ref.String() == "" { + break DOWNREF + } + currentRef = asSchema.Ref + + default: + // fallback: attempts to resolve the pointer as a schema + if refable == nil { + break DOWNREF + } + + asJSON, _ := json.Marshal(refable) + var asSchema spec.Schema + if err := asSchema.UnmarshalJSON(asJSON); err != nil { + return nil, + fmt.Errorf("unhandled type to resolve JSON pointer %s. Expected a Schema, got: %T (%v)", + currentRef.String(), value, err, + ) + } + warnings = append(warnings, fmt.Sprintf("found $ref %q (%T) interpreted as schema", currentRef.String(), refable)) + + if asSchema.Ref.String() == "" { + break DOWNREF + } + currentRef = asSchema.Ref + } + } + + // assess what schema we're ending with + sch, erv := spec.ResolveRefWithBase(sp, ¤tRef, opts) + if erv != nil { + return nil, erv + } + + if sch == nil { + return nil, fmt.Errorf("no schema found at %s", currentRef.String()) + } + + return &DeepestRefResult{Ref: currentRef, Schema: sch, Warnings: warnings}, nil +} diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/schutils/flatten_schema.go b/vendor/github.com/go-openapi/analysis/internal/flatten/schutils/flatten_schema.go new file mode 100644 index 0000000..4590236 --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/internal/flatten/schutils/flatten_schema.go @@ -0,0 +1,29 @@ +// Package schutils provides tools to save or clone a schema +// when flattening a spec. +package schutils + +import ( + "github.com/go-openapi/spec" + "github.com/go-openapi/swag" +) + +// Save registers a schema as an entry in spec #/definitions +func Save(sp *spec.Swagger, name string, schema *spec.Schema) { + if schema == nil { + return + } + + if sp.Definitions == nil { + sp.Definitions = make(map[string]spec.Schema, 150) + } + + sp.Definitions[name] = *schema +} + +// Clone deep-clones a schema +func Clone(schema *spec.Schema) *spec.Schema { + var sch spec.Schema + _ = swag.FromDynamicJSON(schema, &sch) + + return &sch +} diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go b/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go new file mode 100644 index 0000000..ac80fc2 --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go @@ -0,0 +1,201 @@ +package sortref + +import ( + "net/http" + "path" + "strconv" + "strings" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/spec" +) + +const ( + paths = "paths" + responses = "responses" + parameters = "parameters" + definitions = "definitions" +) + +var ( + ignoredKeys map[string]struct{} + validMethods map[string]struct{} +) + +func init() { + ignoredKeys = map[string]struct{}{ + "schema": {}, + "properties": {}, + "not": {}, + "anyOf": {}, + "oneOf": {}, + } + + validMethods = map[string]struct{}{ + "GET": {}, + "HEAD": {}, + "OPTIONS": {}, + "PATCH": {}, + "POST": {}, + "PUT": {}, + "DELETE": {}, + } +} + +// Key represent a key item constructed from /-separated segments +type Key struct { + Segments int + Key string +} + +// Keys is a sortable collable collection of Keys +type Keys []Key + +func (k Keys) Len() int { return len(k) } +func (k Keys) Swap(i, j int) { k[i], k[j] = k[j], k[i] } +func (k Keys) Less(i, j int) bool { + return k[i].Segments > k[j].Segments || (k[i].Segments == k[j].Segments && k[i].Key < k[j].Key) +} + +// KeyParts construct a SplitKey with all its /-separated segments decomposed. It is sortable. +func KeyParts(key string) SplitKey { + var res []string + for _, part := range strings.Split(key[1:], "/") { + if part != "" { + res = append(res, jsonpointer.Unescape(part)) + } + } + + return res +} + +// SplitKey holds of the parts of a /-separated key, so that their location may be determined. +type SplitKey []string + +// IsDefinition is true when the split key is in the #/definitions section of a spec +func (s SplitKey) IsDefinition() bool { + return len(s) > 1 && s[0] == definitions +} + +// DefinitionName yields the name of the definition +func (s SplitKey) DefinitionName() string { + if !s.IsDefinition() { + return "" + } + + return s[1] +} + +func (s SplitKey) isKeyName(i int) bool { + if i <= 0 { + return false + } + + count := 0 + for idx := i - 1; idx > 0; idx-- { + if s[idx] != "properties" { + break + } + count++ + } + + return count%2 != 0 +} + +// PartAdder know how to construct the components of a new name +type PartAdder func(string) []string + +// BuildName builds a name from segments +func (s SplitKey) BuildName(segments []string, startIndex int, adder PartAdder) string { + for i, part := range s[startIndex:] { + if _, ignored := ignoredKeys[part]; !ignored || s.isKeyName(startIndex+i) { + segments = append(segments, adder(part)...) + } + } + + return strings.Join(segments, " ") +} + +// IsOperation is true when the split key is in the operations section +func (s SplitKey) IsOperation() bool { + return len(s) > 1 && s[0] == paths +} + +// IsSharedOperationParam is true when the split key is in the parameters section of a path +func (s SplitKey) IsSharedOperationParam() bool { + return len(s) > 2 && s[0] == paths && s[2] == parameters +} + +// IsSharedParam is true when the split key is in the #/parameters section of a spec +func (s SplitKey) IsSharedParam() bool { + return len(s) > 1 && s[0] == parameters +} + +// IsOperationParam is true when the split key is in the parameters section of an operation +func (s SplitKey) IsOperationParam() bool { + return len(s) > 3 && s[0] == paths && s[3] == parameters +} + +// IsOperationResponse is true when the split key is in the responses section of an operation +func (s SplitKey) IsOperationResponse() bool { + return len(s) > 3 && s[0] == paths && s[3] == responses +} + +// IsSharedResponse is true when the split key is in the #/responses section of a spec +func (s SplitKey) IsSharedResponse() bool { + return len(s) > 1 && s[0] == responses +} + +// IsDefaultResponse is true when the split key is the default response for an operation +func (s SplitKey) IsDefaultResponse() bool { + return len(s) > 4 && s[0] == paths && s[3] == responses && s[4] == "default" +} + +// IsStatusCodeResponse is true when the split key is an operation response with a status code +func (s SplitKey) IsStatusCodeResponse() bool { + isInt := func() bool { + _, err := strconv.Atoi(s[4]) + + return err == nil + } + + return len(s) > 4 && s[0] == paths && s[3] == responses && isInt() +} + +// ResponseName yields either the status code or "Default" for a response +func (s SplitKey) ResponseName() string { + if s.IsStatusCodeResponse() { + code, _ := strconv.Atoi(s[4]) + + return http.StatusText(code) + } + + if s.IsDefaultResponse() { + return "Default" + } + + return "" +} + +// PathItemRef constructs a $ref object from a split key of the form /{path}/{method} +func (s SplitKey) PathItemRef() spec.Ref { + if len(s) < 3 { + return spec.Ref{} + } + + pth, method := s[1], s[2] + if _, isValidMethod := validMethods[strings.ToUpper(method)]; !isValidMethod && !strings.HasPrefix(method, "x-") { + return spec.Ref{} + } + + return spec.MustCreateRef("#" + path.Join("/", paths, jsonpointer.Escape(pth), strings.ToUpper(method))) +} + +// PathRef constructs a $ref object from a split key of the form /paths/{reference} +func (s SplitKey) PathRef() spec.Ref { + if !s.IsOperation() { + return spec.Ref{} + } + + return spec.MustCreateRef("#" + path.Join("/", paths, jsonpointer.Escape(s[1]))) +} diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/sort_ref.go b/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/sort_ref.go new file mode 100644 index 0000000..73243df --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/sort_ref.go @@ -0,0 +1,141 @@ +package sortref + +import ( + "reflect" + "sort" + "strings" + + "github.com/go-openapi/analysis/internal/flatten/normalize" + "github.com/go-openapi/spec" +) + +var depthGroupOrder = []string{ + "sharedParam", "sharedResponse", "sharedOpParam", "opParam", "codeResponse", "defaultResponse", "definition", +} + +type mapIterator struct { + len int + mapIter *reflect.MapIter +} + +func (i *mapIterator) Next() bool { + return i.mapIter.Next() +} + +func (i *mapIterator) Len() int { + return i.len +} + +func (i *mapIterator) Key() string { + return i.mapIter.Key().String() +} + +func mustMapIterator(anyMap interface{}) *mapIterator { + val := reflect.ValueOf(anyMap) + + return &mapIterator{mapIter: val.MapRange(), len: val.Len()} +} + +// DepthFirst sorts a map of anything. It groups keys by category +// (shared params, op param, statuscode response, default response, definitions) +// sort groups internally by number of parts in the key and lexical names +// flatten groups into a single list of keys +func DepthFirst(in interface{}) []string { + iterator := mustMapIterator(in) + sorted := make([]string, 0, iterator.Len()) + grouped := make(map[string]Keys, iterator.Len()) + + for iterator.Next() { + k := iterator.Key() + split := KeyParts(k) + var pk string + + if split.IsSharedOperationParam() { + pk = "sharedOpParam" + } + if split.IsOperationParam() { + pk = "opParam" + } + if split.IsStatusCodeResponse() { + pk = "codeResponse" + } + if split.IsDefaultResponse() { + pk = "defaultResponse" + } + if split.IsDefinition() { + pk = "definition" + } + if split.IsSharedParam() { + pk = "sharedParam" + } + if split.IsSharedResponse() { + pk = "sharedResponse" + } + grouped[pk] = append(grouped[pk], Key{Segments: len(split), Key: k}) + } + + for _, pk := range depthGroupOrder { + res := grouped[pk] + sort.Sort(res) + + for _, v := range res { + sorted = append(sorted, v.Key) + } + } + + return sorted +} + +// topMostRefs is able to sort refs by hierarchical then lexicographic order, +// yielding refs ordered breadth-first. +type topmostRefs []string + +func (k topmostRefs) Len() int { return len(k) } +func (k topmostRefs) Swap(i, j int) { k[i], k[j] = k[j], k[i] } +func (k topmostRefs) Less(i, j int) bool { + li, lj := len(strings.Split(k[i], "/")), len(strings.Split(k[j], "/")) + if li == lj { + return k[i] < k[j] + } + + return li < lj +} + +// TopmostFirst sorts references by depth +func TopmostFirst(refs []string) []string { + res := topmostRefs(refs) + sort.Sort(res) + + return res +} + +// RefRevIdx is a reverse index for references +type RefRevIdx struct { + Ref spec.Ref + Keys []string +} + +// ReverseIndex builds a reverse index for references in schemas +func ReverseIndex(schemas map[string]spec.Ref, basePath string) map[string]RefRevIdx { + collected := make(map[string]RefRevIdx) + for key, schRef := range schemas { + // normalize paths before sorting, + // so we get together keys that are from the same external file + normalizedPath := normalize.Path(schRef, basePath) + + entry, ok := collected[normalizedPath] + if ok { + entry.Keys = append(entry.Keys, key) + collected[normalizedPath] = entry + + continue + } + + collected[normalizedPath] = RefRevIdx{ + Ref: schRef, + Keys: []string{key}, + } + } + + return collected +} diff --git a/vendor/github.com/go-openapi/analysis/mixin.go b/vendor/github.com/go-openapi/analysis/mixin.go new file mode 100644 index 0000000..7785a29 --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/mixin.go @@ -0,0 +1,515 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package analysis + +import ( + "fmt" + "reflect" + + "github.com/go-openapi/spec" +) + +// Mixin modifies the primary swagger spec by adding the paths and +// definitions from the mixin specs. Top level parameters and +// responses from the mixins are also carried over. Operation id +// collisions are avoided by appending "Mixin" but only if +// needed. +// +// The following parts of primary are subject to merge, filling empty details +// - Info +// - BasePath +// - Host +// - ExternalDocs +// +// Consider calling FixEmptyResponseDescriptions() on the modified primary +// if you read them from storage and they are valid to start with. +// +// Entries in "paths", "definitions", "parameters" and "responses" are +// added to the primary in the order of the given mixins. If the entry +// already exists in primary it is skipped with a warning message. +// +// The count of skipped entries (from collisions) is returned so any +// deviation from the number expected can flag a warning in your build +// scripts. Carefully review the collisions before accepting them; +// consider renaming things if possible. +// +// No key normalization takes place (paths, type defs, +// etc). Ensure they are canonical if your downstream tools do +// key normalization of any form. +// +// Merging schemes (http, https), and consumers/producers do not account for +// collisions. +func Mixin(primary *spec.Swagger, mixins ...*spec.Swagger) []string { + skipped := make([]string, 0, len(mixins)) + opIDs := getOpIDs(primary) + initPrimary(primary) + + for i, m := range mixins { + skipped = append(skipped, mergeSwaggerProps(primary, m)...) + + skipped = append(skipped, mergeConsumes(primary, m)...) + + skipped = append(skipped, mergeProduces(primary, m)...) + + skipped = append(skipped, mergeTags(primary, m)...) + + skipped = append(skipped, mergeSchemes(primary, m)...) + + skipped = append(skipped, mergeSecurityDefinitions(primary, m)...) + + skipped = append(skipped, mergeSecurityRequirements(primary, m)...) + + skipped = append(skipped, mergeDefinitions(primary, m)...) + + // merging paths requires a map of operationIDs to work with + skipped = append(skipped, mergePaths(primary, m, opIDs, i)...) + + skipped = append(skipped, mergeParameters(primary, m)...) + + skipped = append(skipped, mergeResponses(primary, m)...) + } + + return skipped +} + +// getOpIDs extracts all the paths..operationIds from the given +// spec and returns them as the keys in a map with 'true' values. +func getOpIDs(s *spec.Swagger) map[string]bool { + rv := make(map[string]bool) + if s.Paths == nil { + return rv + } + + for _, v := range s.Paths.Paths { + piops := pathItemOps(v) + + for _, op := range piops { + rv[op.ID] = true + } + } + + return rv +} + +func pathItemOps(p spec.PathItem) []*spec.Operation { + var rv []*spec.Operation + rv = appendOp(rv, p.Get) + rv = appendOp(rv, p.Put) + rv = appendOp(rv, p.Post) + rv = appendOp(rv, p.Delete) + rv = appendOp(rv, p.Head) + rv = appendOp(rv, p.Patch) + + return rv +} + +func appendOp(ops []*spec.Operation, op *spec.Operation) []*spec.Operation { + if op == nil { + return ops + } + + return append(ops, op) +} + +func mergeSecurityDefinitions(primary *spec.Swagger, m *spec.Swagger) (skipped []string) { + for k, v := range m.SecurityDefinitions { + if _, exists := primary.SecurityDefinitions[k]; exists { + warn := fmt.Sprintf( + "SecurityDefinitions entry '%v' already exists in primary or higher priority mixin, skipping\n", k) + skipped = append(skipped, warn) + + continue + } + + primary.SecurityDefinitions[k] = v + } + + return +} + +func mergeSecurityRequirements(primary *spec.Swagger, m *spec.Swagger) (skipped []string) { + for _, v := range m.Security { + found := false + for _, vv := range primary.Security { + if reflect.DeepEqual(v, vv) { + found = true + + break + } + } + + if found { + warn := fmt.Sprintf( + "Security requirement: '%v' already exists in primary or higher priority mixin, skipping\n", v) + skipped = append(skipped, warn) + + continue + } + primary.Security = append(primary.Security, v) + } + + return +} + +func mergeDefinitions(primary *spec.Swagger, m *spec.Swagger) (skipped []string) { + for k, v := range m.Definitions { + // assume name collisions represent IDENTICAL type. careful. + if _, exists := primary.Definitions[k]; exists { + warn := fmt.Sprintf( + "definitions entry '%v' already exists in primary or higher priority mixin, skipping\n", k) + skipped = append(skipped, warn) + + continue + } + primary.Definitions[k] = v + } + + return +} + +func mergePaths(primary *spec.Swagger, m *spec.Swagger, opIDs map[string]bool, mixIndex int) (skipped []string) { + if m.Paths != nil { + for k, v := range m.Paths.Paths { + if _, exists := primary.Paths.Paths[k]; exists { + warn := fmt.Sprintf( + "paths entry '%v' already exists in primary or higher priority mixin, skipping\n", k) + skipped = append(skipped, warn) + + continue + } + + // Swagger requires that operationIds be + // unique within a spec. If we find a + // collision we append "Mixin0" to the + // operatoinId we are adding, where 0 is mixin + // index. We assume that operationIds with + // all the proivded specs are already unique. + piops := pathItemOps(v) + for _, piop := range piops { + if opIDs[piop.ID] { + piop.ID = fmt.Sprintf("%v%v%v", piop.ID, "Mixin", mixIndex) + } + opIDs[piop.ID] = true + } + primary.Paths.Paths[k] = v + } + } + + return +} + +func mergeParameters(primary *spec.Swagger, m *spec.Swagger) (skipped []string) { + for k, v := range m.Parameters { + // could try to rename on conflict but would + // have to fix $refs in the mixin. Complain + // for now + if _, exists := primary.Parameters[k]; exists { + warn := fmt.Sprintf( + "top level parameters entry '%v' already exists in primary or higher priority mixin, skipping\n", k) + skipped = append(skipped, warn) + + continue + } + primary.Parameters[k] = v + } + + return +} + +func mergeResponses(primary *spec.Swagger, m *spec.Swagger) (skipped []string) { + for k, v := range m.Responses { + // could try to rename on conflict but would + // have to fix $refs in the mixin. Complain + // for now + if _, exists := primary.Responses[k]; exists { + warn := fmt.Sprintf( + "top level responses entry '%v' already exists in primary or higher priority mixin, skipping\n", k) + skipped = append(skipped, warn) + + continue + } + primary.Responses[k] = v + } + + return skipped +} + +func mergeConsumes(primary *spec.Swagger, m *spec.Swagger) []string { + for _, v := range m.Consumes { + found := false + for _, vv := range primary.Consumes { + if v == vv { + found = true + + break + } + } + + if found { + // no warning here: we just skip it + continue + } + primary.Consumes = append(primary.Consumes, v) + } + + return []string{} +} + +func mergeProduces(primary *spec.Swagger, m *spec.Swagger) []string { + for _, v := range m.Produces { + found := false + for _, vv := range primary.Produces { + if v == vv { + found = true + + break + } + } + + if found { + // no warning here: we just skip it + continue + } + primary.Produces = append(primary.Produces, v) + } + + return []string{} +} + +func mergeTags(primary *spec.Swagger, m *spec.Swagger) (skipped []string) { + for _, v := range m.Tags { + found := false + for _, vv := range primary.Tags { + if v.Name == vv.Name { + found = true + + break + } + } + + if found { + warn := fmt.Sprintf( + "top level tags entry with name '%v' already exists in primary or higher priority mixin, skipping\n", + v.Name, + ) + skipped = append(skipped, warn) + + continue + } + + primary.Tags = append(primary.Tags, v) + } + + return +} + +func mergeSchemes(primary *spec.Swagger, m *spec.Swagger) []string { + for _, v := range m.Schemes { + found := false + for _, vv := range primary.Schemes { + if v == vv { + found = true + + break + } + } + + if found { + // no warning here: we just skip it + continue + } + primary.Schemes = append(primary.Schemes, v) + } + + return []string{} +} + +func mergeSwaggerProps(primary *spec.Swagger, m *spec.Swagger) []string { + var skipped, skippedInfo, skippedDocs []string + + primary.Extensions, skipped = mergeExtensions(primary.Extensions, m.Extensions) + + // merging details in swagger top properties + if primary.Host == "" { + primary.Host = m.Host + } + + if primary.BasePath == "" { + primary.BasePath = m.BasePath + } + + if primary.Info == nil { + primary.Info = m.Info + } else if m.Info != nil { + skippedInfo = mergeInfo(primary.Info, m.Info) + skipped = append(skipped, skippedInfo...) + } + + if primary.ExternalDocs == nil { + primary.ExternalDocs = m.ExternalDocs + } else if m != nil { + skippedDocs = mergeExternalDocs(primary.ExternalDocs, m.ExternalDocs) + skipped = append(skipped, skippedDocs...) + } + + return skipped +} + +//nolint:unparam +func mergeExternalDocs(primary *spec.ExternalDocumentation, m *spec.ExternalDocumentation) []string { + if primary.Description == "" { + primary.Description = m.Description + } + + if primary.URL == "" { + primary.URL = m.URL + } + + return nil +} + +func mergeInfo(primary *spec.Info, m *spec.Info) []string { + var sk, skipped []string + + primary.Extensions, sk = mergeExtensions(primary.Extensions, m.Extensions) + skipped = append(skipped, sk...) + + if primary.Description == "" { + primary.Description = m.Description + } + + if primary.Title == "" { + primary.Description = m.Description + } + + if primary.TermsOfService == "" { + primary.TermsOfService = m.TermsOfService + } + + if primary.Version == "" { + primary.Version = m.Version + } + + if primary.Contact == nil { + primary.Contact = m.Contact + } else if m.Contact != nil { + var csk []string + primary.Contact.Extensions, csk = mergeExtensions(primary.Contact.Extensions, m.Contact.Extensions) + skipped = append(skipped, csk...) + + if primary.Contact.Name == "" { + primary.Contact.Name = m.Contact.Name + } + + if primary.Contact.URL == "" { + primary.Contact.URL = m.Contact.URL + } + + if primary.Contact.Email == "" { + primary.Contact.Email = m.Contact.Email + } + } + + if primary.License == nil { + primary.License = m.License + } else if m.License != nil { + var lsk []string + primary.License.Extensions, lsk = mergeExtensions(primary.License.Extensions, m.License.Extensions) + skipped = append(skipped, lsk...) + + if primary.License.Name == "" { + primary.License.Name = m.License.Name + } + + if primary.License.URL == "" { + primary.License.URL = m.License.URL + } + } + + return skipped +} + +func mergeExtensions(primary spec.Extensions, m spec.Extensions) (result spec.Extensions, skipped []string) { + if primary == nil { + result = m + + return + } + + if m == nil { + result = primary + + return + } + + result = primary + for k, v := range m { + if _, found := primary[k]; found { + skipped = append(skipped, k) + + continue + } + + primary[k] = v + } + + return +} + +func initPrimary(primary *spec.Swagger) { + if primary.SecurityDefinitions == nil { + primary.SecurityDefinitions = make(map[string]*spec.SecurityScheme) + } + + if primary.Security == nil { + primary.Security = make([]map[string][]string, 0, 10) + } + + if primary.Produces == nil { + primary.Produces = make([]string, 0, 10) + } + + if primary.Consumes == nil { + primary.Consumes = make([]string, 0, 10) + } + + if primary.Tags == nil { + primary.Tags = make([]spec.Tag, 0, 10) + } + + if primary.Schemes == nil { + primary.Schemes = make([]string, 0, 10) + } + + if primary.Paths == nil { + primary.Paths = &spec.Paths{Paths: make(map[string]spec.PathItem)} + } + + if primary.Paths.Paths == nil { + primary.Paths.Paths = make(map[string]spec.PathItem) + } + + if primary.Definitions == nil { + primary.Definitions = make(spec.Definitions) + } + + if primary.Parameters == nil { + primary.Parameters = make(map[string]spec.Parameter) + } + + if primary.Responses == nil { + primary.Responses = make(map[string]spec.Response) + } +} diff --git a/vendor/github.com/go-openapi/analysis/schema.go b/vendor/github.com/go-openapi/analysis/schema.go new file mode 100644 index 0000000..ab190db --- /dev/null +++ b/vendor/github.com/go-openapi/analysis/schema.go @@ -0,0 +1,256 @@ +package analysis + +import ( + "errors" + + "github.com/go-openapi/spec" + "github.com/go-openapi/strfmt" +) + +// SchemaOpts configures the schema analyzer +type SchemaOpts struct { + Schema *spec.Schema + Root interface{} + BasePath string + _ struct{} +} + +// Schema analysis, will classify the schema according to known +// patterns. +func Schema(opts SchemaOpts) (*AnalyzedSchema, error) { + if opts.Schema == nil { + return nil, errors.New("no schema to analyze") + } + + a := &AnalyzedSchema{ + schema: opts.Schema, + root: opts.Root, + basePath: opts.BasePath, + } + + a.initializeFlags() + a.inferKnownType() + a.inferEnum() + a.inferBaseType() + + if err := a.inferMap(); err != nil { + return nil, err + } + if err := a.inferArray(); err != nil { + return nil, err + } + + a.inferTuple() + + if err := a.inferFromRef(); err != nil { + return nil, err + } + + a.inferSimpleSchema() + + return a, nil +} + +// AnalyzedSchema indicates what the schema represents +type AnalyzedSchema struct { + schema *spec.Schema + root interface{} + basePath string + + hasProps bool + hasAllOf bool + hasItems bool + hasAdditionalProps bool + hasAdditionalItems bool + hasRef bool + + IsKnownType bool + IsSimpleSchema bool + IsArray bool + IsSimpleArray bool + IsMap bool + IsSimpleMap bool + IsExtendedObject bool + IsTuple bool + IsTupleWithExtra bool + IsBaseType bool + IsEnum bool +} + +// Inherits copies value fields from other onto this schema +func (a *AnalyzedSchema) inherits(other *AnalyzedSchema) { + if other == nil { + return + } + a.hasProps = other.hasProps + a.hasAllOf = other.hasAllOf + a.hasItems = other.hasItems + a.hasAdditionalItems = other.hasAdditionalItems + a.hasAdditionalProps = other.hasAdditionalProps + a.hasRef = other.hasRef + + a.IsKnownType = other.IsKnownType + a.IsSimpleSchema = other.IsSimpleSchema + a.IsArray = other.IsArray + a.IsSimpleArray = other.IsSimpleArray + a.IsMap = other.IsMap + a.IsSimpleMap = other.IsSimpleMap + a.IsExtendedObject = other.IsExtendedObject + a.IsTuple = other.IsTuple + a.IsTupleWithExtra = other.IsTupleWithExtra + a.IsBaseType = other.IsBaseType + a.IsEnum = other.IsEnum +} + +func (a *AnalyzedSchema) inferFromRef() error { + if a.hasRef { + sch := new(spec.Schema) + sch.Ref = a.schema.Ref + err := spec.ExpandSchema(sch, a.root, nil) + if err != nil { + return err + } + rsch, err := Schema(SchemaOpts{ + Schema: sch, + Root: a.root, + BasePath: a.basePath, + }) + if err != nil { + // NOTE(fredbi): currently the only cause for errors is + // unresolved ref. Since spec.ExpandSchema() expands the + // schema recursively, there is no chance to get there, + // until we add more causes for error in this schema analysis. + return err + } + a.inherits(rsch) + } + + return nil +} + +func (a *AnalyzedSchema) inferSimpleSchema() { + a.IsSimpleSchema = a.IsKnownType || a.IsSimpleArray || a.IsSimpleMap +} + +func (a *AnalyzedSchema) inferKnownType() { + tpe := a.schema.Type + format := a.schema.Format + a.IsKnownType = tpe.Contains("boolean") || + tpe.Contains("integer") || + tpe.Contains("number") || + tpe.Contains("string") || + (format != "" && strfmt.Default.ContainsName(format)) || + (a.isObjectType() && !a.hasProps && !a.hasAllOf && !a.hasAdditionalProps && !a.hasAdditionalItems) +} + +func (a *AnalyzedSchema) inferMap() error { + if !a.isObjectType() { + return nil + } + + hasExtra := a.hasProps || a.hasAllOf + a.IsMap = a.hasAdditionalProps && !hasExtra + a.IsExtendedObject = a.hasAdditionalProps && hasExtra + + if !a.IsMap { + return nil + } + + // maps + if a.schema.AdditionalProperties.Schema != nil { + msch, err := Schema(SchemaOpts{ + Schema: a.schema.AdditionalProperties.Schema, + Root: a.root, + BasePath: a.basePath, + }) + if err != nil { + return err + } + a.IsSimpleMap = msch.IsSimpleSchema + } else if a.schema.AdditionalProperties.Allows { + a.IsSimpleMap = true + } + + return nil +} + +func (a *AnalyzedSchema) inferArray() error { + // an array has Items defined as an object schema, otherwise we qualify this JSON array as a tuple + // (yes, even if the Items array contains only one element). + // arrays in JSON schema may be unrestricted (i.e no Items specified). + // Note that arrays in Swagger MUST have Items. Nonetheless, we analyze unrestricted arrays. + // + // NOTE: the spec package misses the distinction between: + // items: [] and items: {}, so we consider both arrays here. + a.IsArray = a.isArrayType() && (a.schema.Items == nil || a.schema.Items.Schemas == nil) + if a.IsArray && a.hasItems { + if a.schema.Items.Schema != nil { + itsch, err := Schema(SchemaOpts{ + Schema: a.schema.Items.Schema, + Root: a.root, + BasePath: a.basePath, + }) + if err != nil { + return err + } + + a.IsSimpleArray = itsch.IsSimpleSchema + } + } + + if a.IsArray && !a.hasItems { + a.IsSimpleArray = true + } + + return nil +} + +func (a *AnalyzedSchema) inferTuple() { + tuple := a.hasItems && a.schema.Items.Schemas != nil + a.IsTuple = tuple && !a.hasAdditionalItems + a.IsTupleWithExtra = tuple && a.hasAdditionalItems +} + +func (a *AnalyzedSchema) inferBaseType() { + if a.isObjectType() { + a.IsBaseType = a.schema.Discriminator != "" + } +} + +func (a *AnalyzedSchema) inferEnum() { + a.IsEnum = len(a.schema.Enum) > 0 +} + +func (a *AnalyzedSchema) initializeFlags() { + a.hasProps = len(a.schema.Properties) > 0 + a.hasAllOf = len(a.schema.AllOf) > 0 + a.hasRef = a.schema.Ref.String() != "" + + a.hasItems = a.schema.Items != nil && + (a.schema.Items.Schema != nil || len(a.schema.Items.Schemas) > 0) + + a.hasAdditionalProps = a.schema.AdditionalProperties != nil && + (a.schema.AdditionalProperties.Schema != nil || a.schema.AdditionalProperties.Allows) + + a.hasAdditionalItems = a.schema.AdditionalItems != nil && + (a.schema.AdditionalItems.Schema != nil || a.schema.AdditionalItems.Allows) +} + +func (a *AnalyzedSchema) isObjectType() bool { + return !a.hasRef && (a.schema.Type == nil || a.schema.Type.Contains("") || a.schema.Type.Contains("object")) +} + +func (a *AnalyzedSchema) isArrayType() bool { + return !a.hasRef && (a.schema.Type != nil && a.schema.Type.Contains("array")) +} + +// isAnalyzedAsComplex determines if an analyzed schema is eligible to flattening (i.e. it is "complex"). +// +// Complex means the schema is any of: +// - a simple type (primitive) +// - an array of something (items are possibly complex ; if this is the case, items will generate a definition) +// - a map of something (additionalProperties are possibly complex ; if this is the case, additionalProperties will +// generate a definition) +func (a *AnalyzedSchema) isAnalyzedAsComplex() bool { + return !a.IsSimpleSchema && !a.IsArray && !a.IsMap +} diff --git a/vendor/github.com/go-openapi/errors/.gitattributes b/vendor/github.com/go-openapi/errors/.gitattributes new file mode 100644 index 0000000..a0717e4 --- /dev/null +++ b/vendor/github.com/go-openapi/errors/.gitattributes @@ -0,0 +1 @@ +*.go text eol=lf \ No newline at end of file diff --git a/vendor/github.com/go-openapi/errors/.gitignore b/vendor/github.com/go-openapi/errors/.gitignore new file mode 100644 index 0000000..dd91ed6 --- /dev/null +++ b/vendor/github.com/go-openapi/errors/.gitignore @@ -0,0 +1,2 @@ +secrets.yml +coverage.out diff --git a/vendor/github.com/go-openapi/errors/.golangci.yml b/vendor/github.com/go-openapi/errors/.golangci.yml new file mode 100644 index 0000000..cf88ead --- /dev/null +++ b/vendor/github.com/go-openapi/errors/.golangci.yml @@ -0,0 +1,62 @@ +linters-settings: + govet: + check-shadowing: true + golint: + min-confidence: 0 + gocyclo: + min-complexity: 45 + maligned: + suggest-new: true + dupl: + threshold: 200 + goconst: + min-len: 2 + min-occurrences: 3 + +linters: + enable-all: true + disable: + - errname # this repo doesn't follow the convention advised by this linter + - maligned + - unparam + - lll + - gochecknoinits + - gochecknoglobals + - funlen + - godox + - gocognit + - whitespace + - wsl + - wrapcheck + - testpackage + - nlreturn + - gomnd + - exhaustivestruct + - goerr113 + - errorlint + - nestif + - godot + - gofumpt + - paralleltest + - tparallel + - thelper + - ifshort + - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..9322b06 --- /dev/null +++ b/vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md @@ -0,0 +1,74 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or +advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at ivan+abuse@flanders.co.nz. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/github.com/go-openapi/errors/LICENSE b/vendor/github.com/go-openapi/errors/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/vendor/github.com/go-openapi/errors/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/go-openapi/errors/README.md b/vendor/github.com/go-openapi/errors/README.md new file mode 100644 index 0000000..6d57ea5 --- /dev/null +++ b/vendor/github.com/go-openapi/errors/README.md @@ -0,0 +1,8 @@ +# OpenAPI errors [![Build Status](https://github.com/go-openapi/errors/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/errors/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/errors/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/errors) + +[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) +[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/errors/master/LICENSE) +[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/errors.svg)](https://pkg.go.dev/github.com/go-openapi/errors) +[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/errors)](https://goreportcard.com/report/github.com/go-openapi/errors) + +Shared errors and error interface used throughout the various libraries found in the go-openapi toolkit. diff --git a/vendor/github.com/go-openapi/errors/api.go b/vendor/github.com/go-openapi/errors/api.go new file mode 100644 index 0000000..5320cb9 --- /dev/null +++ b/vendor/github.com/go-openapi/errors/api.go @@ -0,0 +1,192 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package errors + +import ( + "encoding/json" + "fmt" + "net/http" + "reflect" + "strings" +) + +// DefaultHTTPCode is used when the error Code cannot be used as an HTTP code. +var DefaultHTTPCode = http.StatusUnprocessableEntity + +// Error represents a error interface all swagger framework errors implement +type Error interface { + error + Code() int32 +} + +type apiError struct { + code int32 + message string +} + +func (a *apiError) Error() string { + return a.message +} + +func (a *apiError) Code() int32 { + return a.code +} + +// MarshalJSON implements the JSON encoding interface +func (a apiError) MarshalJSON() ([]byte, error) { + return json.Marshal(map[string]interface{}{ + "code": a.code, + "message": a.message, + }) +} + +// New creates a new API error with a code and a message +func New(code int32, message string, args ...interface{}) Error { + if len(args) > 0 { + return &apiError{ + code: code, + message: fmt.Sprintf(message, args...), + } + } + return &apiError{ + code: code, + message: message, + } +} + +// NotFound creates a new not found error +func NotFound(message string, args ...interface{}) Error { + if message == "" { + message = "Not found" + } + return New(http.StatusNotFound, fmt.Sprintf(message, args...)) +} + +// NotImplemented creates a new not implemented error +func NotImplemented(message string) Error { + return New(http.StatusNotImplemented, message) +} + +// MethodNotAllowedError represents an error for when the path matches but the method doesn't +type MethodNotAllowedError struct { + code int32 + Allowed []string + message string +} + +func (m *MethodNotAllowedError) Error() string { + return m.message +} + +// Code the error code +func (m *MethodNotAllowedError) Code() int32 { + return m.code +} + +// MarshalJSON implements the JSON encoding interface +func (m MethodNotAllowedError) MarshalJSON() ([]byte, error) { + return json.Marshal(map[string]interface{}{ + "code": m.code, + "message": m.message, + "allowed": m.Allowed, + }) +} + +func errorAsJSON(err Error) []byte { + //nolint:errchkjson + b, _ := json.Marshal(struct { + Code int32 `json:"code"` + Message string `json:"message"` + }{err.Code(), err.Error()}) + return b +} + +func flattenComposite(errs *CompositeError) *CompositeError { + var res []error + for _, er := range errs.Errors { + switch e := er.(type) { + case *CompositeError: + if e != nil && len(e.Errors) > 0 { + flat := flattenComposite(e) + if len(flat.Errors) > 0 { + res = append(res, flat.Errors...) + } + } + default: + if e != nil { + res = append(res, e) + } + } + } + return CompositeValidationError(res...) +} + +// MethodNotAllowed creates a new method not allowed error +func MethodNotAllowed(requested string, allow []string) Error { + msg := fmt.Sprintf("method %s is not allowed, but [%s] are", requested, strings.Join(allow, ",")) + return &MethodNotAllowedError{ + code: http.StatusMethodNotAllowed, + Allowed: allow, + message: msg, + } +} + +// ServeError implements the http error handler interface +func ServeError(rw http.ResponseWriter, r *http.Request, err error) { + rw.Header().Set("Content-Type", "application/json") + switch e := err.(type) { + case *CompositeError: + er := flattenComposite(e) + // strips composite errors to first element only + if len(er.Errors) > 0 { + ServeError(rw, r, er.Errors[0]) + } else { + // guard against empty CompositeError (invalid construct) + ServeError(rw, r, nil) + } + case *MethodNotAllowedError: + rw.Header().Add("Allow", strings.Join(e.Allowed, ",")) + rw.WriteHeader(asHTTPCode(int(e.Code()))) + if r == nil || r.Method != http.MethodHead { + _, _ = rw.Write(errorAsJSON(e)) + } + case Error: + value := reflect.ValueOf(e) + if value.Kind() == reflect.Ptr && value.IsNil() { + rw.WriteHeader(http.StatusInternalServerError) + _, _ = rw.Write(errorAsJSON(New(http.StatusInternalServerError, "Unknown error"))) + return + } + rw.WriteHeader(asHTTPCode(int(e.Code()))) + if r == nil || r.Method != http.MethodHead { + _, _ = rw.Write(errorAsJSON(e)) + } + case nil: + rw.WriteHeader(http.StatusInternalServerError) + _, _ = rw.Write(errorAsJSON(New(http.StatusInternalServerError, "Unknown error"))) + default: + rw.WriteHeader(http.StatusInternalServerError) + if r == nil || r.Method != http.MethodHead { + _, _ = rw.Write(errorAsJSON(New(http.StatusInternalServerError, err.Error()))) + } + } +} + +func asHTTPCode(input int) int { + if input >= 600 { + return DefaultHTTPCode + } + return input +} diff --git a/vendor/github.com/go-openapi/errors/auth.go b/vendor/github.com/go-openapi/errors/auth.go new file mode 100644 index 0000000..0545b50 --- /dev/null +++ b/vendor/github.com/go-openapi/errors/auth.go @@ -0,0 +1,22 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package errors + +import "net/http" + +// Unauthenticated returns an unauthenticated error +func Unauthenticated(scheme string) Error { + return New(http.StatusUnauthorized, "unauthenticated for %s", scheme) +} diff --git a/vendor/github.com/go-openapi/errors/doc.go b/vendor/github.com/go-openapi/errors/doc.go new file mode 100644 index 0000000..af01190 --- /dev/null +++ b/vendor/github.com/go-openapi/errors/doc.go @@ -0,0 +1,26 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/* +Package errors provides an Error interface and several concrete types +implementing this interface to manage API errors and JSON-schema validation +errors. + +A middleware handler ServeError() is provided to serve the errors types +it defines. + +It is used throughout the various go-openapi toolkit libraries +(https://github.com/go-openapi). +*/ +package errors diff --git a/vendor/github.com/go-openapi/errors/headers.go b/vendor/github.com/go-openapi/errors/headers.go new file mode 100644 index 0000000..dfebe8f --- /dev/null +++ b/vendor/github.com/go-openapi/errors/headers.go @@ -0,0 +1,103 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package errors + +import ( + "encoding/json" + "fmt" + "net/http" +) + +// Validation represents a failure of a precondition +type Validation struct { + code int32 + Name string + In string + Value interface{} + message string + Values []interface{} +} + +func (e *Validation) Error() string { + return e.message +} + +// Code the error code +func (e *Validation) Code() int32 { + return e.code +} + +// MarshalJSON implements the JSON encoding interface +func (e Validation) MarshalJSON() ([]byte, error) { + return json.Marshal(map[string]interface{}{ + "code": e.code, + "message": e.message, + "in": e.In, + "name": e.Name, + "value": e.Value, + "values": e.Values, + }) +} + +// ValidateName sets the name for a validation or updates it for a nested property +func (e *Validation) ValidateName(name string) *Validation { + if name != "" { + if e.Name == "" { + e.Name = name + e.message = name + e.message + } else { + e.Name = name + "." + e.Name + e.message = name + "." + e.message + } + } + return e +} + +const ( + contentTypeFail = `unsupported media type %q, only %v are allowed` + responseFormatFail = `unsupported media type requested, only %v are available` +) + +// InvalidContentType error for an invalid content type +func InvalidContentType(value string, allowed []string) *Validation { + values := make([]interface{}, 0, len(allowed)) + for _, v := range allowed { + values = append(values, v) + } + return &Validation{ + code: http.StatusUnsupportedMediaType, + Name: "Content-Type", + In: "header", + Value: value, + Values: values, + message: fmt.Sprintf(contentTypeFail, value, allowed), + } +} + +// InvalidResponseFormat error for an unacceptable response format request +func InvalidResponseFormat(value string, allowed []string) *Validation { + values := make([]interface{}, 0, len(allowed)) + for _, v := range allowed { + values = append(values, v) + } + return &Validation{ + code: http.StatusNotAcceptable, + Name: "Accept", + In: "header", + Value: value, + Values: values, + message: fmt.Sprintf(responseFormatFail, allowed), + } +} diff --git a/vendor/github.com/go-openapi/errors/middleware.go b/vendor/github.com/go-openapi/errors/middleware.go new file mode 100644 index 0000000..963472d --- /dev/null +++ b/vendor/github.com/go-openapi/errors/middleware.go @@ -0,0 +1,50 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package errors + +import ( + "bytes" + "fmt" + "strings" +) + +// APIVerificationFailed is an error that contains all the missing info for a mismatched section +// between the api registrations and the api spec +type APIVerificationFailed struct { + Section string `json:"section,omitempty"` + MissingSpecification []string `json:"missingSpecification,omitempty"` + MissingRegistration []string `json:"missingRegistration,omitempty"` +} + +func (v *APIVerificationFailed) Error() string { + buf := bytes.NewBuffer(nil) + + hasRegMissing := len(v.MissingRegistration) > 0 + hasSpecMissing := len(v.MissingSpecification) > 0 + + if hasRegMissing { + buf.WriteString(fmt.Sprintf("missing [%s] %s registrations", strings.Join(v.MissingRegistration, ", "), v.Section)) + } + + if hasRegMissing && hasSpecMissing { + buf.WriteString("\n") + } + + if hasSpecMissing { + buf.WriteString(fmt.Sprintf("missing from spec file [%s] %s", strings.Join(v.MissingSpecification, ", "), v.Section)) + } + + return buf.String() +} diff --git a/vendor/github.com/go-openapi/errors/parsing.go b/vendor/github.com/go-openapi/errors/parsing.go new file mode 100644 index 0000000..5096e1e --- /dev/null +++ b/vendor/github.com/go-openapi/errors/parsing.go @@ -0,0 +1,78 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package errors + +import ( + "encoding/json" + "fmt" +) + +// ParseError represents a parsing error +type ParseError struct { + code int32 + Name string + In string + Value string + Reason error + message string +} + +func (e *ParseError) Error() string { + return e.message +} + +// Code returns the http status code for this error +func (e *ParseError) Code() int32 { + return e.code +} + +// MarshalJSON implements the JSON encoding interface +func (e ParseError) MarshalJSON() ([]byte, error) { + var reason string + if e.Reason != nil { + reason = e.Reason.Error() + } + return json.Marshal(map[string]interface{}{ + "code": e.code, + "message": e.message, + "in": e.In, + "name": e.Name, + "value": e.Value, + "reason": reason, + }) +} + +const ( + parseErrorTemplContent = `parsing %s %s from %q failed, because %s` + parseErrorTemplContentNoIn = `parsing %s from %q failed, because %s` +) + +// NewParseError creates a new parse error +func NewParseError(name, in, value string, reason error) *ParseError { + var msg string + if in == "" { + msg = fmt.Sprintf(parseErrorTemplContentNoIn, name, value, reason) + } else { + msg = fmt.Sprintf(parseErrorTemplContent, name, in, value, reason) + } + return &ParseError{ + code: 400, + Name: name, + In: in, + Value: value, + Reason: reason, + message: msg, + } +} diff --git a/vendor/github.com/go-openapi/errors/schema.go b/vendor/github.com/go-openapi/errors/schema.go new file mode 100644 index 0000000..cf7ac2e --- /dev/null +++ b/vendor/github.com/go-openapi/errors/schema.go @@ -0,0 +1,615 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package errors + +import ( + "encoding/json" + "fmt" + "strings" +) + +const ( + invalidType = "%s is an invalid type name" + typeFail = "%s in %s must be of type %s" + typeFailWithData = "%s in %s must be of type %s: %q" + typeFailWithError = "%s in %s must be of type %s, because: %s" + requiredFail = "%s in %s is required" + readOnlyFail = "%s in %s is readOnly" + tooLongMessage = "%s in %s should be at most %d chars long" + tooShortMessage = "%s in %s should be at least %d chars long" + patternFail = "%s in %s should match '%s'" + enumFail = "%s in %s should be one of %v" + multipleOfFail = "%s in %s should be a multiple of %v" + maxIncFail = "%s in %s should be less than or equal to %v" + maxExcFail = "%s in %s should be less than %v" + minIncFail = "%s in %s should be greater than or equal to %v" + minExcFail = "%s in %s should be greater than %v" + uniqueFail = "%s in %s shouldn't contain duplicates" + maxItemsFail = "%s in %s should have at most %d items" + minItemsFail = "%s in %s should have at least %d items" + typeFailNoIn = "%s must be of type %s" + typeFailWithDataNoIn = "%s must be of type %s: %q" + typeFailWithErrorNoIn = "%s must be of type %s, because: %s" + requiredFailNoIn = "%s is required" + readOnlyFailNoIn = "%s is readOnly" + tooLongMessageNoIn = "%s should be at most %d chars long" + tooShortMessageNoIn = "%s should be at least %d chars long" + patternFailNoIn = "%s should match '%s'" + enumFailNoIn = "%s should be one of %v" + multipleOfFailNoIn = "%s should be a multiple of %v" + maxIncFailNoIn = "%s should be less than or equal to %v" + maxExcFailNoIn = "%s should be less than %v" + minIncFailNoIn = "%s should be greater than or equal to %v" + minExcFailNoIn = "%s should be greater than %v" + uniqueFailNoIn = "%s shouldn't contain duplicates" + maxItemsFailNoIn = "%s should have at most %d items" + minItemsFailNoIn = "%s should have at least %d items" + noAdditionalItems = "%s in %s can't have additional items" + noAdditionalItemsNoIn = "%s can't have additional items" + tooFewProperties = "%s in %s should have at least %d properties" + tooFewPropertiesNoIn = "%s should have at least %d properties" + tooManyProperties = "%s in %s should have at most %d properties" + tooManyPropertiesNoIn = "%s should have at most %d properties" + unallowedProperty = "%s.%s in %s is a forbidden property" + unallowedPropertyNoIn = "%s.%s is a forbidden property" + failedAllPatternProps = "%s.%s in %s failed all pattern properties" + failedAllPatternPropsNoIn = "%s.%s failed all pattern properties" + multipleOfMustBePositive = "factor MultipleOf declared for %s must be positive: %v" +) + +// All code responses can be used to differentiate errors for different handling +// by the consuming program +const ( + // CompositeErrorCode remains 422 for backwards-compatibility + // and to separate it from validation errors with cause + CompositeErrorCode = 422 + // InvalidTypeCode is used for any subclass of invalid types + InvalidTypeCode = 600 + iota + RequiredFailCode + TooLongFailCode + TooShortFailCode + PatternFailCode + EnumFailCode + MultipleOfFailCode + MaxFailCode + MinFailCode + UniqueFailCode + MaxItemsFailCode + MinItemsFailCode + NoAdditionalItemsCode + TooFewPropertiesCode + TooManyPropertiesCode + UnallowedPropertyCode + FailedAllPatternPropsCode + MultipleOfMustBePositiveCode + ReadOnlyFailCode +) + +// CompositeError is an error that groups several errors together +type CompositeError struct { + Errors []error + code int32 + message string +} + +// Code for this error +func (c *CompositeError) Code() int32 { + return c.code +} + +func (c *CompositeError) Error() string { + if len(c.Errors) > 0 { + msgs := []string{c.message + ":"} + for _, e := range c.Errors { + msgs = append(msgs, e.Error()) + } + return strings.Join(msgs, "\n") + } + return c.message +} + +func (c *CompositeError) Unwrap() []error { + return c.Errors +} + +// MarshalJSON implements the JSON encoding interface +func (c CompositeError) MarshalJSON() ([]byte, error) { + return json.Marshal(map[string]interface{}{ + "code": c.code, + "message": c.message, + "errors": c.Errors, + }) +} + +// CompositeValidationError an error to wrap a bunch of other errors +func CompositeValidationError(errors ...error) *CompositeError { + return &CompositeError{ + code: CompositeErrorCode, + Errors: append(make([]error, 0, len(errors)), errors...), + message: "validation failure list", + } +} + +// ValidateName recursively sets the name for all validations or updates them for nested properties +func (c *CompositeError) ValidateName(name string) *CompositeError { + for i, e := range c.Errors { + if ve, ok := e.(*Validation); ok { + c.Errors[i] = ve.ValidateName(name) + } else if ce, ok := e.(*CompositeError); ok { + c.Errors[i] = ce.ValidateName(name) + } + } + + return c +} + +// FailedAllPatternProperties an error for when the property doesn't match a pattern +func FailedAllPatternProperties(name, in, key string) *Validation { + msg := fmt.Sprintf(failedAllPatternProps, name, key, in) + if in == "" { + msg = fmt.Sprintf(failedAllPatternPropsNoIn, name, key) + } + return &Validation{ + code: FailedAllPatternPropsCode, + Name: name, + In: in, + Value: key, + message: msg, + } +} + +// PropertyNotAllowed an error for when the property doesn't match a pattern +func PropertyNotAllowed(name, in, key string) *Validation { + msg := fmt.Sprintf(unallowedProperty, name, key, in) + if in == "" { + msg = fmt.Sprintf(unallowedPropertyNoIn, name, key) + } + return &Validation{ + code: UnallowedPropertyCode, + Name: name, + In: in, + Value: key, + message: msg, + } +} + +// TooFewProperties an error for an object with too few properties +func TooFewProperties(name, in string, n int64) *Validation { + msg := fmt.Sprintf(tooFewProperties, name, in, n) + if in == "" { + msg = fmt.Sprintf(tooFewPropertiesNoIn, name, n) + } + return &Validation{ + code: TooFewPropertiesCode, + Name: name, + In: in, + Value: n, + message: msg, + } +} + +// TooManyProperties an error for an object with too many properties +func TooManyProperties(name, in string, n int64) *Validation { + msg := fmt.Sprintf(tooManyProperties, name, in, n) + if in == "" { + msg = fmt.Sprintf(tooManyPropertiesNoIn, name, n) + } + return &Validation{ + code: TooManyPropertiesCode, + Name: name, + In: in, + Value: n, + message: msg, + } +} + +// AdditionalItemsNotAllowed an error for invalid additional items +func AdditionalItemsNotAllowed(name, in string) *Validation { + msg := fmt.Sprintf(noAdditionalItems, name, in) + if in == "" { + msg = fmt.Sprintf(noAdditionalItemsNoIn, name) + } + return &Validation{ + code: NoAdditionalItemsCode, + Name: name, + In: in, + message: msg, + } +} + +// InvalidCollectionFormat another flavor of invalid type error +func InvalidCollectionFormat(name, in, format string) *Validation { + return &Validation{ + code: InvalidTypeCode, + Name: name, + In: in, + Value: format, + message: fmt.Sprintf("the collection format %q is not supported for the %s param %q", format, in, name), + } +} + +// InvalidTypeName an error for when the type is invalid +func InvalidTypeName(typeName string) *Validation { + return &Validation{ + code: InvalidTypeCode, + Value: typeName, + message: fmt.Sprintf(invalidType, typeName), + } +} + +// InvalidType creates an error for when the type is invalid +func InvalidType(name, in, typeName string, value interface{}) *Validation { + var message string + + if in != "" { + switch value.(type) { + case string: + message = fmt.Sprintf(typeFailWithData, name, in, typeName, value) + case error: + message = fmt.Sprintf(typeFailWithError, name, in, typeName, value) + default: + message = fmt.Sprintf(typeFail, name, in, typeName) + } + } else { + switch value.(type) { + case string: + message = fmt.Sprintf(typeFailWithDataNoIn, name, typeName, value) + case error: + message = fmt.Sprintf(typeFailWithErrorNoIn, name, typeName, value) + default: + message = fmt.Sprintf(typeFailNoIn, name, typeName) + } + } + + return &Validation{ + code: InvalidTypeCode, + Name: name, + In: in, + Value: value, + message: message, + } + +} + +// DuplicateItems error for when an array contains duplicates +func DuplicateItems(name, in string) *Validation { + msg := fmt.Sprintf(uniqueFail, name, in) + if in == "" { + msg = fmt.Sprintf(uniqueFailNoIn, name) + } + return &Validation{ + code: UniqueFailCode, + Name: name, + In: in, + message: msg, + } +} + +// TooManyItems error for when an array contains too many items +func TooManyItems(name, in string, max int64, value interface{}) *Validation { + msg := fmt.Sprintf(maxItemsFail, name, in, max) + if in == "" { + msg = fmt.Sprintf(maxItemsFailNoIn, name, max) + } + + return &Validation{ + code: MaxItemsFailCode, + Name: name, + In: in, + Value: value, + message: msg, + } +} + +// TooFewItems error for when an array contains too few items +func TooFewItems(name, in string, min int64, value interface{}) *Validation { + msg := fmt.Sprintf(minItemsFail, name, in, min) + if in == "" { + msg = fmt.Sprintf(minItemsFailNoIn, name, min) + } + return &Validation{ + code: MinItemsFailCode, + Name: name, + In: in, + Value: value, + message: msg, + } +} + +// ExceedsMaximumInt error for when maximum validation fails +func ExceedsMaximumInt(name, in string, max int64, exclusive bool, value interface{}) *Validation { + var message string + if in == "" { + m := maxIncFailNoIn + if exclusive { + m = maxExcFailNoIn + } + message = fmt.Sprintf(m, name, max) + } else { + m := maxIncFail + if exclusive { + m = maxExcFail + } + message = fmt.Sprintf(m, name, in, max) + } + return &Validation{ + code: MaxFailCode, + Name: name, + In: in, + Value: value, + message: message, + } +} + +// ExceedsMaximumUint error for when maximum validation fails +func ExceedsMaximumUint(name, in string, max uint64, exclusive bool, value interface{}) *Validation { + var message string + if in == "" { + m := maxIncFailNoIn + if exclusive { + m = maxExcFailNoIn + } + message = fmt.Sprintf(m, name, max) + } else { + m := maxIncFail + if exclusive { + m = maxExcFail + } + message = fmt.Sprintf(m, name, in, max) + } + return &Validation{ + code: MaxFailCode, + Name: name, + In: in, + Value: value, + message: message, + } +} + +// ExceedsMaximum error for when maximum validation fails +func ExceedsMaximum(name, in string, max float64, exclusive bool, value interface{}) *Validation { + var message string + if in == "" { + m := maxIncFailNoIn + if exclusive { + m = maxExcFailNoIn + } + message = fmt.Sprintf(m, name, max) + } else { + m := maxIncFail + if exclusive { + m = maxExcFail + } + message = fmt.Sprintf(m, name, in, max) + } + return &Validation{ + code: MaxFailCode, + Name: name, + In: in, + Value: value, + message: message, + } +} + +// ExceedsMinimumInt error for when minimum validation fails +func ExceedsMinimumInt(name, in string, min int64, exclusive bool, value interface{}) *Validation { + var message string + if in == "" { + m := minIncFailNoIn + if exclusive { + m = minExcFailNoIn + } + message = fmt.Sprintf(m, name, min) + } else { + m := minIncFail + if exclusive { + m = minExcFail + } + message = fmt.Sprintf(m, name, in, min) + } + return &Validation{ + code: MinFailCode, + Name: name, + In: in, + Value: value, + message: message, + } +} + +// ExceedsMinimumUint error for when minimum validation fails +func ExceedsMinimumUint(name, in string, min uint64, exclusive bool, value interface{}) *Validation { + var message string + if in == "" { + m := minIncFailNoIn + if exclusive { + m = minExcFailNoIn + } + message = fmt.Sprintf(m, name, min) + } else { + m := minIncFail + if exclusive { + m = minExcFail + } + message = fmt.Sprintf(m, name, in, min) + } + return &Validation{ + code: MinFailCode, + Name: name, + In: in, + Value: value, + message: message, + } +} + +// ExceedsMinimum error for when minimum validation fails +func ExceedsMinimum(name, in string, min float64, exclusive bool, value interface{}) *Validation { + var message string + if in == "" { + m := minIncFailNoIn + if exclusive { + m = minExcFailNoIn + } + message = fmt.Sprintf(m, name, min) + } else { + m := minIncFail + if exclusive { + m = minExcFail + } + message = fmt.Sprintf(m, name, in, min) + } + return &Validation{ + code: MinFailCode, + Name: name, + In: in, + Value: value, + message: message, + } +} + +// NotMultipleOf error for when multiple of validation fails +func NotMultipleOf(name, in string, multiple, value interface{}) *Validation { + var msg string + if in == "" { + msg = fmt.Sprintf(multipleOfFailNoIn, name, multiple) + } else { + msg = fmt.Sprintf(multipleOfFail, name, in, multiple) + } + return &Validation{ + code: MultipleOfFailCode, + Name: name, + In: in, + Value: value, + message: msg, + } +} + +// EnumFail error for when an enum validation fails +func EnumFail(name, in string, value interface{}, values []interface{}) *Validation { + var msg string + if in == "" { + msg = fmt.Sprintf(enumFailNoIn, name, values) + } else { + msg = fmt.Sprintf(enumFail, name, in, values) + } + + return &Validation{ + code: EnumFailCode, + Name: name, + In: in, + Value: value, + Values: values, + message: msg, + } +} + +// Required error for when a value is missing +func Required(name, in string, value interface{}) *Validation { + var msg string + if in == "" { + msg = fmt.Sprintf(requiredFailNoIn, name) + } else { + msg = fmt.Sprintf(requiredFail, name, in) + } + return &Validation{ + code: RequiredFailCode, + Name: name, + In: in, + Value: value, + message: msg, + } +} + +// ReadOnly error for when a value is present in request +func ReadOnly(name, in string, value interface{}) *Validation { + var msg string + if in == "" { + msg = fmt.Sprintf(readOnlyFailNoIn, name) + } else { + msg = fmt.Sprintf(readOnlyFail, name, in) + } + return &Validation{ + code: ReadOnlyFailCode, + Name: name, + In: in, + Value: value, + message: msg, + } +} + +// TooLong error for when a string is too long +func TooLong(name, in string, max int64, value interface{}) *Validation { + var msg string + if in == "" { + msg = fmt.Sprintf(tooLongMessageNoIn, name, max) + } else { + msg = fmt.Sprintf(tooLongMessage, name, in, max) + } + return &Validation{ + code: TooLongFailCode, + Name: name, + In: in, + Value: value, + message: msg, + } +} + +// TooShort error for when a string is too short +func TooShort(name, in string, min int64, value interface{}) *Validation { + var msg string + if in == "" { + msg = fmt.Sprintf(tooShortMessageNoIn, name, min) + } else { + msg = fmt.Sprintf(tooShortMessage, name, in, min) + } + + return &Validation{ + code: TooShortFailCode, + Name: name, + In: in, + Value: value, + message: msg, + } +} + +// FailedPattern error for when a string fails a regex pattern match +// the pattern that is returned is the ECMA syntax version of the pattern not the golang version. +func FailedPattern(name, in, pattern string, value interface{}) *Validation { + var msg string + if in == "" { + msg = fmt.Sprintf(patternFailNoIn, name, pattern) + } else { + msg = fmt.Sprintf(patternFail, name, in, pattern) + } + + return &Validation{ + code: PatternFailCode, + Name: name, + In: in, + Value: value, + message: msg, + } +} + +// MultipleOfMustBePositive error for when a +// multipleOf factor is negative +func MultipleOfMustBePositive(name, in string, factor interface{}) *Validation { + return &Validation{ + code: MultipleOfMustBePositiveCode, + Name: name, + In: in, + Value: factor, + message: fmt.Sprintf(multipleOfMustBePositive, name, factor), + } +} diff --git a/vendor/github.com/go-openapi/jsonpointer/.editorconfig b/vendor/github.com/go-openapi/jsonpointer/.editorconfig new file mode 100644 index 0000000..3152da6 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonpointer/.editorconfig @@ -0,0 +1,26 @@ +# top-most EditorConfig file +root = true + +# Unix-style newlines with a newline ending every file +[*] +end_of_line = lf +insert_final_newline = true +indent_style = space +indent_size = 2 +trim_trailing_whitespace = true + +# Set default charset +[*.{js,py,go,scala,rb,java,html,css,less,sass,md}] +charset = utf-8 + +# Tab indentation (no size specified) +[*.go] +indent_style = tab + +[*.md] +trim_trailing_whitespace = false + +# Matches the exact files either package.json or .travis.yml +[{package.json,.travis.yml}] +indent_style = space +indent_size = 2 diff --git a/vendor/github.com/go-openapi/jsonpointer/.gitignore b/vendor/github.com/go-openapi/jsonpointer/.gitignore new file mode 100644 index 0000000..769c244 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonpointer/.gitignore @@ -0,0 +1 @@ +secrets.yml diff --git a/vendor/github.com/go-openapi/jsonpointer/.golangci.yml b/vendor/github.com/go-openapi/jsonpointer/.golangci.yml new file mode 100644 index 0000000..22f8d21 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonpointer/.golangci.yml @@ -0,0 +1,61 @@ +linters-settings: + govet: + check-shadowing: true + golint: + min-confidence: 0 + gocyclo: + min-complexity: 45 + maligned: + suggest-new: true + dupl: + threshold: 200 + goconst: + min-len: 2 + min-occurrences: 3 + +linters: + enable-all: true + disable: + - maligned + - unparam + - lll + - gochecknoinits + - gochecknoglobals + - funlen + - godox + - gocognit + - whitespace + - wsl + - wrapcheck + - testpackage + - nlreturn + - gomnd + - exhaustivestruct + - goerr113 + - errorlint + - nestif + - godot + - gofumpt + - paralleltest + - tparallel + - thelper + - ifshort + - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..9322b06 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md @@ -0,0 +1,74 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or +advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at ivan+abuse@flanders.co.nz. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/github.com/go-openapi/jsonpointer/LICENSE b/vendor/github.com/go-openapi/jsonpointer/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonpointer/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/go-openapi/jsonpointer/README.md b/vendor/github.com/go-openapi/jsonpointer/README.md new file mode 100644 index 0000000..0108f1d --- /dev/null +++ b/vendor/github.com/go-openapi/jsonpointer/README.md @@ -0,0 +1,19 @@ +# gojsonpointer [![Build Status](https://github.com/go-openapi/jsonpointer/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/jsonpointer/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/jsonpointer/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/jsonpointer) + +[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) +[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/jsonpointer/master/LICENSE) +[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/jsonpointer.svg)](https://pkg.go.dev/github.com/go-openapi/jsonpointer) +[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/jsonpointer)](https://goreportcard.com/report/github.com/go-openapi/jsonpointer) + +An implementation of JSON Pointer - Go language + +## Status +Completed YES + +Tested YES + +## References +http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-07 + +### Note +The 4.Evaluation part of the previous reference, starting with 'If the currently referenced value is a JSON array, the reference token MUST contain either...' is not implemented. diff --git a/vendor/github.com/go-openapi/jsonpointer/pointer.go b/vendor/github.com/go-openapi/jsonpointer/pointer.go new file mode 100644 index 0000000..d970c7c --- /dev/null +++ b/vendor/github.com/go-openapi/jsonpointer/pointer.go @@ -0,0 +1,531 @@ +// Copyright 2013 sigu-399 ( https://github.com/sigu-399 ) +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// author sigu-399 +// author-github https://github.com/sigu-399 +// author-mail sigu.399@gmail.com +// +// repository-name jsonpointer +// repository-desc An implementation of JSON Pointer - Go language +// +// description Main and unique file. +// +// created 25-02-2013 + +package jsonpointer + +import ( + "encoding/json" + "errors" + "fmt" + "reflect" + "strconv" + "strings" + + "github.com/go-openapi/swag" +) + +const ( + emptyPointer = `` + pointerSeparator = `/` + + invalidStart = `JSON pointer must be empty or start with a "` + pointerSeparator + notFound = `Can't find the pointer in the document` +) + +var jsonPointableType = reflect.TypeOf(new(JSONPointable)).Elem() +var jsonSetableType = reflect.TypeOf(new(JSONSetable)).Elem() + +// JSONPointable is an interface for structs to implement when they need to customize the +// json pointer process +type JSONPointable interface { + JSONLookup(string) (any, error) +} + +// JSONSetable is an interface for structs to implement when they need to customize the +// json pointer process +type JSONSetable interface { + JSONSet(string, any) error +} + +// New creates a new json pointer for the given string +func New(jsonPointerString string) (Pointer, error) { + + var p Pointer + err := p.parse(jsonPointerString) + return p, err + +} + +// Pointer the json pointer reprsentation +type Pointer struct { + referenceTokens []string +} + +// "Constructor", parses the given string JSON pointer +func (p *Pointer) parse(jsonPointerString string) error { + + var err error + + if jsonPointerString != emptyPointer { + if !strings.HasPrefix(jsonPointerString, pointerSeparator) { + err = errors.New(invalidStart) + } else { + referenceTokens := strings.Split(jsonPointerString, pointerSeparator) + p.referenceTokens = append(p.referenceTokens, referenceTokens[1:]...) + } + } + + return err +} + +// Get uses the pointer to retrieve a value from a JSON document +func (p *Pointer) Get(document any) (any, reflect.Kind, error) { + return p.get(document, swag.DefaultJSONNameProvider) +} + +// Set uses the pointer to set a value from a JSON document +func (p *Pointer) Set(document any, value any) (any, error) { + return document, p.set(document, value, swag.DefaultJSONNameProvider) +} + +// GetForToken gets a value for a json pointer token 1 level deep +func GetForToken(document any, decodedToken string) (any, reflect.Kind, error) { + return getSingleImpl(document, decodedToken, swag.DefaultJSONNameProvider) +} + +// SetForToken gets a value for a json pointer token 1 level deep +func SetForToken(document any, decodedToken string, value any) (any, error) { + return document, setSingleImpl(document, value, decodedToken, swag.DefaultJSONNameProvider) +} + +func isNil(input any) bool { + if input == nil { + return true + } + + kind := reflect.TypeOf(input).Kind() + switch kind { //nolint:exhaustive + case reflect.Ptr, reflect.Map, reflect.Slice, reflect.Chan: + return reflect.ValueOf(input).IsNil() + default: + return false + } +} + +func getSingleImpl(node any, decodedToken string, nameProvider *swag.NameProvider) (any, reflect.Kind, error) { + rValue := reflect.Indirect(reflect.ValueOf(node)) + kind := rValue.Kind() + if isNil(node) { + return nil, kind, fmt.Errorf("nil value has not field %q", decodedToken) + } + + switch typed := node.(type) { + case JSONPointable: + r, err := typed.JSONLookup(decodedToken) + if err != nil { + return nil, kind, err + } + return r, kind, nil + case *any: // case of a pointer to interface, that is not resolved by reflect.Indirect + return getSingleImpl(*typed, decodedToken, nameProvider) + } + + switch kind { //nolint:exhaustive + case reflect.Struct: + nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken) + if !ok { + return nil, kind, fmt.Errorf("object has no field %q", decodedToken) + } + fld := rValue.FieldByName(nm) + return fld.Interface(), kind, nil + + case reflect.Map: + kv := reflect.ValueOf(decodedToken) + mv := rValue.MapIndex(kv) + + if mv.IsValid() { + return mv.Interface(), kind, nil + } + return nil, kind, fmt.Errorf("object has no key %q", decodedToken) + + case reflect.Slice: + tokenIndex, err := strconv.Atoi(decodedToken) + if err != nil { + return nil, kind, err + } + sLength := rValue.Len() + if tokenIndex < 0 || tokenIndex >= sLength { + return nil, kind, fmt.Errorf("index out of bounds array[0,%d] index '%d'", sLength-1, tokenIndex) + } + + elem := rValue.Index(tokenIndex) + return elem.Interface(), kind, nil + + default: + return nil, kind, fmt.Errorf("invalid token reference %q", decodedToken) + } + +} + +func setSingleImpl(node, data any, decodedToken string, nameProvider *swag.NameProvider) error { + rValue := reflect.Indirect(reflect.ValueOf(node)) + + if ns, ok := node.(JSONSetable); ok { // pointer impl + return ns.JSONSet(decodedToken, data) + } + + if rValue.Type().Implements(jsonSetableType) { + return node.(JSONSetable).JSONSet(decodedToken, data) + } + + switch rValue.Kind() { //nolint:exhaustive + case reflect.Struct: + nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken) + if !ok { + return fmt.Errorf("object has no field %q", decodedToken) + } + fld := rValue.FieldByName(nm) + if fld.IsValid() { + fld.Set(reflect.ValueOf(data)) + } + return nil + + case reflect.Map: + kv := reflect.ValueOf(decodedToken) + rValue.SetMapIndex(kv, reflect.ValueOf(data)) + return nil + + case reflect.Slice: + tokenIndex, err := strconv.Atoi(decodedToken) + if err != nil { + return err + } + sLength := rValue.Len() + if tokenIndex < 0 || tokenIndex >= sLength { + return fmt.Errorf("index out of bounds array[0,%d] index '%d'", sLength, tokenIndex) + } + + elem := rValue.Index(tokenIndex) + if !elem.CanSet() { + return fmt.Errorf("can't set slice index %s to %v", decodedToken, data) + } + elem.Set(reflect.ValueOf(data)) + return nil + + default: + return fmt.Errorf("invalid token reference %q", decodedToken) + } + +} + +func (p *Pointer) get(node any, nameProvider *swag.NameProvider) (any, reflect.Kind, error) { + + if nameProvider == nil { + nameProvider = swag.DefaultJSONNameProvider + } + + kind := reflect.Invalid + + // Full document when empty + if len(p.referenceTokens) == 0 { + return node, kind, nil + } + + for _, token := range p.referenceTokens { + + decodedToken := Unescape(token) + + r, knd, err := getSingleImpl(node, decodedToken, nameProvider) + if err != nil { + return nil, knd, err + } + node = r + } + + rValue := reflect.ValueOf(node) + kind = rValue.Kind() + + return node, kind, nil +} + +func (p *Pointer) set(node, data any, nameProvider *swag.NameProvider) error { + knd := reflect.ValueOf(node).Kind() + + if knd != reflect.Ptr && knd != reflect.Struct && knd != reflect.Map && knd != reflect.Slice && knd != reflect.Array { + return errors.New("only structs, pointers, maps and slices are supported for setting values") + } + + if nameProvider == nil { + nameProvider = swag.DefaultJSONNameProvider + } + + // Full document when empty + if len(p.referenceTokens) == 0 { + return nil + } + + lastI := len(p.referenceTokens) - 1 + for i, token := range p.referenceTokens { + isLastToken := i == lastI + decodedToken := Unescape(token) + + if isLastToken { + + return setSingleImpl(node, data, decodedToken, nameProvider) + } + + rValue := reflect.Indirect(reflect.ValueOf(node)) + kind := rValue.Kind() + + if rValue.Type().Implements(jsonPointableType) { + r, err := node.(JSONPointable).JSONLookup(decodedToken) + if err != nil { + return err + } + fld := reflect.ValueOf(r) + if fld.CanAddr() && fld.Kind() != reflect.Interface && fld.Kind() != reflect.Map && fld.Kind() != reflect.Slice && fld.Kind() != reflect.Ptr { + node = fld.Addr().Interface() + continue + } + node = r + continue + } + + switch kind { //nolint:exhaustive + case reflect.Struct: + nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken) + if !ok { + return fmt.Errorf("object has no field %q", decodedToken) + } + fld := rValue.FieldByName(nm) + if fld.CanAddr() && fld.Kind() != reflect.Interface && fld.Kind() != reflect.Map && fld.Kind() != reflect.Slice && fld.Kind() != reflect.Ptr { + node = fld.Addr().Interface() + continue + } + node = fld.Interface() + + case reflect.Map: + kv := reflect.ValueOf(decodedToken) + mv := rValue.MapIndex(kv) + + if !mv.IsValid() { + return fmt.Errorf("object has no key %q", decodedToken) + } + if mv.CanAddr() && mv.Kind() != reflect.Interface && mv.Kind() != reflect.Map && mv.Kind() != reflect.Slice && mv.Kind() != reflect.Ptr { + node = mv.Addr().Interface() + continue + } + node = mv.Interface() + + case reflect.Slice: + tokenIndex, err := strconv.Atoi(decodedToken) + if err != nil { + return err + } + sLength := rValue.Len() + if tokenIndex < 0 || tokenIndex >= sLength { + return fmt.Errorf("index out of bounds array[0,%d] index '%d'", sLength, tokenIndex) + } + + elem := rValue.Index(tokenIndex) + if elem.CanAddr() && elem.Kind() != reflect.Interface && elem.Kind() != reflect.Map && elem.Kind() != reflect.Slice && elem.Kind() != reflect.Ptr { + node = elem.Addr().Interface() + continue + } + node = elem.Interface() + + default: + return fmt.Errorf("invalid token reference %q", decodedToken) + } + + } + + return nil +} + +// DecodedTokens returns the decoded tokens +func (p *Pointer) DecodedTokens() []string { + result := make([]string, 0, len(p.referenceTokens)) + for _, t := range p.referenceTokens { + result = append(result, Unescape(t)) + } + return result +} + +// IsEmpty returns true if this is an empty json pointer +// this indicates that it points to the root document +func (p *Pointer) IsEmpty() bool { + return len(p.referenceTokens) == 0 +} + +// Pointer to string representation function +func (p *Pointer) String() string { + + if len(p.referenceTokens) == 0 { + return emptyPointer + } + + pointerString := pointerSeparator + strings.Join(p.referenceTokens, pointerSeparator) + + return pointerString +} + +func (p *Pointer) Offset(document string) (int64, error) { + dec := json.NewDecoder(strings.NewReader(document)) + var offset int64 + for _, ttk := range p.DecodedTokens() { + tk, err := dec.Token() + if err != nil { + return 0, err + } + switch tk := tk.(type) { + case json.Delim: + switch tk { + case '{': + offset, err = offsetSingleObject(dec, ttk) + if err != nil { + return 0, err + } + case '[': + offset, err = offsetSingleArray(dec, ttk) + if err != nil { + return 0, err + } + default: + return 0, fmt.Errorf("invalid token %#v", tk) + } + default: + return 0, fmt.Errorf("invalid token %#v", tk) + } + } + return offset, nil +} + +func offsetSingleObject(dec *json.Decoder, decodedToken string) (int64, error) { + for dec.More() { + offset := dec.InputOffset() + tk, err := dec.Token() + if err != nil { + return 0, err + } + switch tk := tk.(type) { + case json.Delim: + switch tk { + case '{': + if err = drainSingle(dec); err != nil { + return 0, err + } + case '[': + if err = drainSingle(dec); err != nil { + return 0, err + } + } + case string: + if tk == decodedToken { + return offset, nil + } + default: + return 0, fmt.Errorf("invalid token %#v", tk) + } + } + return 0, fmt.Errorf("token reference %q not found", decodedToken) +} + +func offsetSingleArray(dec *json.Decoder, decodedToken string) (int64, error) { + idx, err := strconv.Atoi(decodedToken) + if err != nil { + return 0, fmt.Errorf("token reference %q is not a number: %v", decodedToken, err) + } + var i int + for i = 0; i < idx && dec.More(); i++ { + tk, err := dec.Token() + if err != nil { + return 0, err + } + + if delim, isDelim := tk.(json.Delim); isDelim { + switch delim { + case '{': + if err = drainSingle(dec); err != nil { + return 0, err + } + case '[': + if err = drainSingle(dec); err != nil { + return 0, err + } + } + } + } + + if !dec.More() { + return 0, fmt.Errorf("token reference %q not found", decodedToken) + } + return dec.InputOffset(), nil +} + +// drainSingle drains a single level of object or array. +// The decoder has to guarantee the beginning delim (i.e. '{' or '[') has been consumed. +func drainSingle(dec *json.Decoder) error { + for dec.More() { + tk, err := dec.Token() + if err != nil { + return err + } + if delim, isDelim := tk.(json.Delim); isDelim { + switch delim { + case '{': + if err = drainSingle(dec); err != nil { + return err + } + case '[': + if err = drainSingle(dec); err != nil { + return err + } + } + } + } + + // Consumes the ending delim + if _, err := dec.Token(); err != nil { + return err + } + return nil +} + +// Specific JSON pointer encoding here +// ~0 => ~ +// ~1 => / +// ... and vice versa + +const ( + encRefTok0 = `~0` + encRefTok1 = `~1` + decRefTok0 = `~` + decRefTok1 = `/` +) + +// Unescape unescapes a json pointer reference token string to the original representation +func Unescape(token string) string { + step1 := strings.ReplaceAll(token, encRefTok1, decRefTok1) + step2 := strings.ReplaceAll(step1, encRefTok0, decRefTok0) + return step2 +} + +// Escape escapes a pointer reference token string +func Escape(token string) string { + step1 := strings.ReplaceAll(token, decRefTok0, encRefTok0) + step2 := strings.ReplaceAll(step1, decRefTok1, encRefTok1) + return step2 +} diff --git a/vendor/github.com/go-openapi/jsonreference/.gitignore b/vendor/github.com/go-openapi/jsonreference/.gitignore new file mode 100644 index 0000000..769c244 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonreference/.gitignore @@ -0,0 +1 @@ +secrets.yml diff --git a/vendor/github.com/go-openapi/jsonreference/.golangci.yml b/vendor/github.com/go-openapi/jsonreference/.golangci.yml new file mode 100644 index 0000000..22f8d21 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonreference/.golangci.yml @@ -0,0 +1,61 @@ +linters-settings: + govet: + check-shadowing: true + golint: + min-confidence: 0 + gocyclo: + min-complexity: 45 + maligned: + suggest-new: true + dupl: + threshold: 200 + goconst: + min-len: 2 + min-occurrences: 3 + +linters: + enable-all: true + disable: + - maligned + - unparam + - lll + - gochecknoinits + - gochecknoglobals + - funlen + - godox + - gocognit + - whitespace + - wsl + - wrapcheck + - testpackage + - nlreturn + - gomnd + - exhaustivestruct + - goerr113 + - errorlint + - nestif + - godot + - gofumpt + - paralleltest + - tparallel + - thelper + - ifshort + - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/jsonreference/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/jsonreference/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..9322b06 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonreference/CODE_OF_CONDUCT.md @@ -0,0 +1,74 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or +advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at ivan+abuse@flanders.co.nz. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/github.com/go-openapi/jsonreference/LICENSE b/vendor/github.com/go-openapi/jsonreference/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonreference/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/go-openapi/jsonreference/README.md b/vendor/github.com/go-openapi/jsonreference/README.md new file mode 100644 index 0000000..c7fc204 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonreference/README.md @@ -0,0 +1,19 @@ +# gojsonreference [![Build Status](https://github.com/go-openapi/jsonreference/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/jsonreference/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/jsonreference/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/jsonreference) + +[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) +[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/jsonreference/master/LICENSE) +[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/jsonreference.svg)](https://pkg.go.dev/github.com/go-openapi/jsonreference) +[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/jsonreference)](https://goreportcard.com/report/github.com/go-openapi/jsonreference) + +An implementation of JSON Reference - Go language + +## Status +Feature complete. Stable API + +## Dependencies +* https://github.com/go-openapi/jsonpointer + +## References + +* http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-07 +* http://tools.ietf.org/html/draft-pbryan-zyp-json-ref-03 diff --git a/vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go b/vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go new file mode 100644 index 0000000..f0610cf --- /dev/null +++ b/vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go @@ -0,0 +1,69 @@ +package internal + +import ( + "net/url" + "regexp" + "strings" +) + +const ( + defaultHTTPPort = ":80" + defaultHTTPSPort = ":443" +) + +// Regular expressions used by the normalizations +var rxPort = regexp.MustCompile(`(:\d+)/?$`) +var rxDupSlashes = regexp.MustCompile(`/{2,}`) + +// NormalizeURL will normalize the specified URL +// This was added to replace a previous call to the no longer maintained purell library: +// The call that was used looked like the following: +// +// url.Parse(purell.NormalizeURL(parsed, purell.FlagsSafe|purell.FlagRemoveDuplicateSlashes)) +// +// To explain all that was included in the call above, purell.FlagsSafe was really just the following: +// - FlagLowercaseScheme +// - FlagLowercaseHost +// - FlagRemoveDefaultPort +// - FlagRemoveDuplicateSlashes (and this was mixed in with the |) +// +// This also normalizes the URL into its urlencoded form by removing RawPath and RawFragment. +func NormalizeURL(u *url.URL) { + lowercaseScheme(u) + lowercaseHost(u) + removeDefaultPort(u) + removeDuplicateSlashes(u) + + u.RawPath = "" + u.RawFragment = "" +} + +func lowercaseScheme(u *url.URL) { + if len(u.Scheme) > 0 { + u.Scheme = strings.ToLower(u.Scheme) + } +} + +func lowercaseHost(u *url.URL) { + if len(u.Host) > 0 { + u.Host = strings.ToLower(u.Host) + } +} + +func removeDefaultPort(u *url.URL) { + if len(u.Host) > 0 { + scheme := strings.ToLower(u.Scheme) + u.Host = rxPort.ReplaceAllStringFunc(u.Host, func(val string) string { + if (scheme == "http" && val == defaultHTTPPort) || (scheme == "https" && val == defaultHTTPSPort) { + return "" + } + return val + }) + } +} + +func removeDuplicateSlashes(u *url.URL) { + if len(u.Path) > 0 { + u.Path = rxDupSlashes.ReplaceAllString(u.Path, "/") + } +} diff --git a/vendor/github.com/go-openapi/jsonreference/reference.go b/vendor/github.com/go-openapi/jsonreference/reference.go new file mode 100644 index 0000000..cfdef03 --- /dev/null +++ b/vendor/github.com/go-openapi/jsonreference/reference.go @@ -0,0 +1,158 @@ +// Copyright 2013 sigu-399 ( https://github.com/sigu-399 ) +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// author sigu-399 +// author-github https://github.com/sigu-399 +// author-mail sigu.399@gmail.com +// +// repository-name jsonreference +// repository-desc An implementation of JSON Reference - Go language +// +// description Main and unique file. +// +// created 26-02-2013 + +package jsonreference + +import ( + "errors" + "net/url" + "strings" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/jsonreference/internal" +) + +const ( + fragmentRune = `#` +) + +// New creates a new reference for the given string +func New(jsonReferenceString string) (Ref, error) { + + var r Ref + err := r.parse(jsonReferenceString) + return r, err + +} + +// MustCreateRef parses the ref string and panics when it's invalid. +// Use the New method for a version that returns an error +func MustCreateRef(ref string) Ref { + r, err := New(ref) + if err != nil { + panic(err) + } + return r +} + +// Ref represents a json reference object +type Ref struct { + referenceURL *url.URL + referencePointer jsonpointer.Pointer + + HasFullURL bool + HasURLPathOnly bool + HasFragmentOnly bool + HasFileScheme bool + HasFullFilePath bool +} + +// GetURL gets the URL for this reference +func (r *Ref) GetURL() *url.URL { + return r.referenceURL +} + +// GetPointer gets the json pointer for this reference +func (r *Ref) GetPointer() *jsonpointer.Pointer { + return &r.referencePointer +} + +// String returns the best version of the url for this reference +func (r *Ref) String() string { + + if r.referenceURL != nil { + return r.referenceURL.String() + } + + if r.HasFragmentOnly { + return fragmentRune + r.referencePointer.String() + } + + return r.referencePointer.String() +} + +// IsRoot returns true if this reference is a root document +func (r *Ref) IsRoot() bool { + return r.referenceURL != nil && + !r.IsCanonical() && + !r.HasURLPathOnly && + r.referenceURL.Fragment == "" +} + +// IsCanonical returns true when this pointer starts with http(s):// or file:// +func (r *Ref) IsCanonical() bool { + return (r.HasFileScheme && r.HasFullFilePath) || (!r.HasFileScheme && r.HasFullURL) +} + +// "Constructor", parses the given string JSON reference +func (r *Ref) parse(jsonReferenceString string) error { + + parsed, err := url.Parse(jsonReferenceString) + if err != nil { + return err + } + + internal.NormalizeURL(parsed) + + r.referenceURL = parsed + refURL := r.referenceURL + + if refURL.Scheme != "" && refURL.Host != "" { + r.HasFullURL = true + } else { + if refURL.Path != "" { + r.HasURLPathOnly = true + } else if refURL.RawQuery == "" && refURL.Fragment != "" { + r.HasFragmentOnly = true + } + } + + r.HasFileScheme = refURL.Scheme == "file" + r.HasFullFilePath = strings.HasPrefix(refURL.Path, "/") + + // invalid json-pointer error means url has no json-pointer fragment. simply ignore error + r.referencePointer, _ = jsonpointer.New(refURL.Fragment) + + return nil +} + +// Inherits creates a new reference from a parent and a child +// If the child cannot inherit from the parent, an error is returned +func (r *Ref) Inherits(child Ref) (*Ref, error) { + childURL := child.GetURL() + parentURL := r.GetURL() + if childURL == nil { + return nil, errors.New("child url is nil") + } + if parentURL == nil { + return &child, nil + } + + ref, err := New(parentURL.ResolveReference(childURL).String()) + if err != nil { + return nil, err + } + return &ref, nil +} diff --git a/vendor/github.com/go-openapi/loads/.editorconfig b/vendor/github.com/go-openapi/loads/.editorconfig new file mode 100644 index 0000000..3152da6 --- /dev/null +++ b/vendor/github.com/go-openapi/loads/.editorconfig @@ -0,0 +1,26 @@ +# top-most EditorConfig file +root = true + +# Unix-style newlines with a newline ending every file +[*] +end_of_line = lf +insert_final_newline = true +indent_style = space +indent_size = 2 +trim_trailing_whitespace = true + +# Set default charset +[*.{js,py,go,scala,rb,java,html,css,less,sass,md}] +charset = utf-8 + +# Tab indentation (no size specified) +[*.go] +indent_style = tab + +[*.md] +trim_trailing_whitespace = false + +# Matches the exact files either package.json or .travis.yml +[{package.json,.travis.yml}] +indent_style = space +indent_size = 2 diff --git a/vendor/github.com/go-openapi/loads/.gitignore b/vendor/github.com/go-openapi/loads/.gitignore new file mode 100644 index 0000000..e4f15f1 --- /dev/null +++ b/vendor/github.com/go-openapi/loads/.gitignore @@ -0,0 +1,4 @@ +secrets.yml +coverage.out +profile.cov +profile.out diff --git a/vendor/github.com/go-openapi/loads/.golangci.yml b/vendor/github.com/go-openapi/loads/.golangci.yml new file mode 100644 index 0000000..22f8d21 --- /dev/null +++ b/vendor/github.com/go-openapi/loads/.golangci.yml @@ -0,0 +1,61 @@ +linters-settings: + govet: + check-shadowing: true + golint: + min-confidence: 0 + gocyclo: + min-complexity: 45 + maligned: + suggest-new: true + dupl: + threshold: 200 + goconst: + min-len: 2 + min-occurrences: 3 + +linters: + enable-all: true + disable: + - maligned + - unparam + - lll + - gochecknoinits + - gochecknoglobals + - funlen + - godox + - gocognit + - whitespace + - wsl + - wrapcheck + - testpackage + - nlreturn + - gomnd + - exhaustivestruct + - goerr113 + - errorlint + - nestif + - godot + - gofumpt + - paralleltest + - tparallel + - thelper + - ifshort + - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/loads/.travis.yml b/vendor/github.com/go-openapi/loads/.travis.yml new file mode 100644 index 0000000..cd4a7c3 --- /dev/null +++ b/vendor/github.com/go-openapi/loads/.travis.yml @@ -0,0 +1,25 @@ +after_success: +- bash <(curl -s https://codecov.io/bash) +go: +- 1.16.x +- 1.x +install: +- go get gotest.tools/gotestsum +language: go +arch: +- amd64 +- ppc64le +jobs: + include: + # include linting job, but only for latest go version and amd64 arch + - go: 1.x + arch: amd64 + install: + go get github.com/golangci/golangci-lint/cmd/golangci-lint + script: + - golangci-lint run --new-from-rev master +notifications: + slack: + secure: OxkPwVp35qBTUilgWC8xykSj+sGMcj0h8IIOKD+Rflx2schZVlFfdYdyVBM+s9OqeOfvtuvnR9v1Ye2rPKAvcjWdC4LpRGUsgmItZaI6Um8Aj6+K9udCw5qrtZVfOVmRu8LieH//XznWWKdOultUuniW0MLqw5+II87Gd00RWbCGi0hk0PykHe7uK+PDA2BEbqyZ2WKKYCvfB3j+0nrFOHScXqnh0V05l2E83J4+Sgy1fsPy+1WdX58ZlNBG333ibaC1FS79XvKSmTgKRkx3+YBo97u6ZtUmJa5WZjf2OdLG3KIckGWAv6R5xgxeU31N0Ng8L332w/Edpp2O/M2bZwdnKJ8hJQikXIAQbICbr+lTDzsoNzMdEIYcHpJ5hjPbiUl3Bmd+Jnsjf5McgAZDiWIfpCKZ29tPCEkVwRsOCqkyPRMNMzHHmoja495P5jR+ODS7+J8RFg5xgcnOgpP9D4Wlhztlf5WyZMpkLxTUD+bZq2SRf50HfHFXTkfq22zPl3d1eq0yrLwh/Z/fWKkfb6SyysROL8y6s8u3dpFX1YHSg0BR6i913h4aoZw9B2BG27cafLLTwKYsp2dFo1PWl4O6u9giFJIeqwloZHLKKrwh0cBFhB7RH0I58asxkZpCH6uWjJierahmHe7iS+E6i+9oCHkOZ59hmCYNimIs3hM= +script: +- gotestsum -f short-verbose -- -race -timeout=20m -coverprofile=coverage.txt -covermode=atomic ./... diff --git a/vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..9322b06 --- /dev/null +++ b/vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md @@ -0,0 +1,74 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or +advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at ivan+abuse@flanders.co.nz. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/github.com/go-openapi/loads/LICENSE b/vendor/github.com/go-openapi/loads/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/vendor/github.com/go-openapi/loads/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/go-openapi/loads/README.md b/vendor/github.com/go-openapi/loads/README.md new file mode 100644 index 0000000..f8bd440 --- /dev/null +++ b/vendor/github.com/go-openapi/loads/README.md @@ -0,0 +1,6 @@ +# Loads OAI specs [![Build Status](https://github.com/go-openapi/loads/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/loads/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/loads/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/loads) + +[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/loads/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/loads?status.svg)](http://godoc.org/github.com/go-openapi/loads) +[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/loads)](https://goreportcard.com/report/github.com/go-openapi/loads) + +Loading of OAI specification documents from local or remote locations. Supports JSON and YAML documents. diff --git a/vendor/github.com/go-openapi/loads/doc.go b/vendor/github.com/go-openapi/loads/doc.go new file mode 100644 index 0000000..5bcaef5 --- /dev/null +++ b/vendor/github.com/go-openapi/loads/doc.go @@ -0,0 +1,18 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package loads provides document loading methods for swagger (OAI) specifications. +// +// It is used by other go-openapi packages to load and run analysis on local or remote spec documents. +package loads diff --git a/vendor/github.com/go-openapi/loads/loaders.go b/vendor/github.com/go-openapi/loads/loaders.go new file mode 100644 index 0000000..b2d1e03 --- /dev/null +++ b/vendor/github.com/go-openapi/loads/loaders.go @@ -0,0 +1,133 @@ +package loads + +import ( + "encoding/json" + "errors" + "net/url" + + "github.com/go-openapi/spec" + "github.com/go-openapi/swag" +) + +var ( + // Default chain of loaders, defined at the package level. + // + // By default this matches json and yaml documents. + // + // May be altered with AddLoader(). + loaders *loader +) + +func init() { + jsonLoader := &loader{ + DocLoaderWithMatch: DocLoaderWithMatch{ + Match: func(_ string) bool { + return true + }, + Fn: JSONDoc, + }, + } + + loaders = jsonLoader.WithHead(&loader{ + DocLoaderWithMatch: DocLoaderWithMatch{ + Match: swag.YAMLMatcher, + Fn: swag.YAMLDoc, + }, + }) + + // sets the global default loader for go-openapi/spec + spec.PathLoader = loaders.Load +} + +// DocLoader represents a doc loader type +type DocLoader func(string) (json.RawMessage, error) + +// DocMatcher represents a predicate to check if a loader matches +type DocMatcher func(string) bool + +// DocLoaderWithMatch describes a loading function for a given extension match. +type DocLoaderWithMatch struct { + Fn DocLoader + Match DocMatcher +} + +// NewDocLoaderWithMatch builds a DocLoaderWithMatch to be used in load options +func NewDocLoaderWithMatch(fn DocLoader, matcher DocMatcher) DocLoaderWithMatch { + return DocLoaderWithMatch{ + Fn: fn, + Match: matcher, + } +} + +type loader struct { + DocLoaderWithMatch + Next *loader +} + +// WithHead adds a loader at the head of the current stack +func (l *loader) WithHead(head *loader) *loader { + if head == nil { + return l + } + head.Next = l + return head +} + +// WithNext adds a loader at the trail of the current stack +func (l *loader) WithNext(next *loader) *loader { + l.Next = next + return next +} + +// Load the raw document from path +func (l *loader) Load(path string) (json.RawMessage, error) { + _, erp := url.Parse(path) + if erp != nil { + return nil, erp + } + + lastErr := errors.New("no loader matched") // default error if no match was found + for ldr := l; ldr != nil; ldr = ldr.Next { + if ldr.Match != nil && !ldr.Match(path) { + continue + } + + // try then move to next one if there is an error + b, err := ldr.Fn(path) + if err == nil { + return b, nil + } + + lastErr = err + } + + return nil, lastErr +} + +// JSONDoc loads a json document from either a file or a remote url +func JSONDoc(path string) (json.RawMessage, error) { + data, err := swag.LoadFromFileOrHTTP(path) + if err != nil { + return nil, err + } + return json.RawMessage(data), nil +} + +// AddLoader for a document, executed before other previously set loaders. +// +// This sets the configuration at the package level. +// +// NOTE: +// - this updates the default loader used by github.com/go-openapi/spec +// - since this sets package level globals, you shouln't call this concurrently +func AddLoader(predicate DocMatcher, load DocLoader) { + loaders = loaders.WithHead(&loader{ + DocLoaderWithMatch: DocLoaderWithMatch{ + Match: predicate, + Fn: load, + }, + }) + + // sets the global default loader for go-openapi/spec + spec.PathLoader = loaders.Load +} diff --git a/vendor/github.com/go-openapi/loads/options.go b/vendor/github.com/go-openapi/loads/options.go new file mode 100644 index 0000000..f8305d5 --- /dev/null +++ b/vendor/github.com/go-openapi/loads/options.go @@ -0,0 +1,61 @@ +package loads + +type options struct { + loader *loader +} + +func defaultOptions() *options { + return &options{ + loader: loaders, + } +} + +func loaderFromOptions(options []LoaderOption) *loader { + opts := defaultOptions() + for _, apply := range options { + apply(opts) + } + + return opts.loader +} + +// LoaderOption allows to fine-tune the spec loader behavior +type LoaderOption func(*options) + +// WithDocLoader sets a custom loader for loading specs +func WithDocLoader(l DocLoader) LoaderOption { + return func(opt *options) { + if l == nil { + return + } + opt.loader = &loader{ + DocLoaderWithMatch: DocLoaderWithMatch{ + Fn: l, + }, + } + } +} + +// WithDocLoaderMatches sets a chain of custom loaders for loading specs +// for different extension matches. +// +// Loaders are executed in the order of provided DocLoaderWithMatch'es. +func WithDocLoaderMatches(l ...DocLoaderWithMatch) LoaderOption { + return func(opt *options) { + var final, prev *loader + for _, ldr := range l { + if ldr.Fn == nil { + continue + } + + if prev == nil { + final = &loader{DocLoaderWithMatch: ldr} + prev = final + continue + } + + prev = prev.WithNext(&loader{DocLoaderWithMatch: ldr}) + } + opt.loader = final + } +} diff --git a/vendor/github.com/go-openapi/loads/spec.go b/vendor/github.com/go-openapi/loads/spec.go new file mode 100644 index 0000000..c9039cd --- /dev/null +++ b/vendor/github.com/go-openapi/loads/spec.go @@ -0,0 +1,275 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package loads + +import ( + "bytes" + "encoding/gob" + "encoding/json" + "fmt" + + "github.com/go-openapi/analysis" + "github.com/go-openapi/spec" + "github.com/go-openapi/swag" +) + +func init() { + gob.Register(map[string]interface{}{}) + gob.Register([]interface{}{}) +} + +// Document represents a swagger spec document +type Document struct { + // specAnalyzer + Analyzer *analysis.Spec + spec *spec.Swagger + specFilePath string + origSpec *spec.Swagger + schema *spec.Schema + pathLoader *loader + raw json.RawMessage +} + +// JSONSpec loads a spec from a json document +func JSONSpec(path string, options ...LoaderOption) (*Document, error) { + data, err := JSONDoc(path) + if err != nil { + return nil, err + } + // convert to json + doc, err := Analyzed(data, "", options...) + if err != nil { + return nil, err + } + + doc.specFilePath = path + + return doc, nil +} + +// Embedded returns a Document based on embedded specs. No analysis is required +func Embedded(orig, flat json.RawMessage, options ...LoaderOption) (*Document, error) { + var origSpec, flatSpec spec.Swagger + if err := json.Unmarshal(orig, &origSpec); err != nil { + return nil, err + } + if err := json.Unmarshal(flat, &flatSpec); err != nil { + return nil, err + } + return &Document{ + raw: orig, + origSpec: &origSpec, + spec: &flatSpec, + pathLoader: loaderFromOptions(options), + }, nil +} + +// Spec loads a new spec document from a local or remote path +func Spec(path string, options ...LoaderOption) (*Document, error) { + ldr := loaderFromOptions(options) + + b, err := ldr.Load(path) + if err != nil { + return nil, err + } + + document, err := Analyzed(b, "", options...) + if err != nil { + return nil, err + } + + document.specFilePath = path + document.pathLoader = ldr + + return document, nil +} + +// Analyzed creates a new analyzed spec document for a root json.RawMessage. +func Analyzed(data json.RawMessage, version string, options ...LoaderOption) (*Document, error) { + if version == "" { + version = "2.0" + } + if version != "2.0" { + return nil, fmt.Errorf("spec version %q is not supported", version) + } + + raw, err := trimData(data) // trim blanks, then convert yaml docs into json + if err != nil { + return nil, err + } + + swspec := new(spec.Swagger) + if err = json.Unmarshal(raw, swspec); err != nil { + return nil, err + } + + origsqspec, err := cloneSpec(swspec) + if err != nil { + return nil, err + } + + d := &Document{ + Analyzer: analysis.New(swspec), // NOTE: at this moment, analysis does not follow $refs to documents outside the root doc + schema: spec.MustLoadSwagger20Schema(), + spec: swspec, + raw: raw, + origSpec: origsqspec, + pathLoader: loaderFromOptions(options), + } + + return d, nil +} + +func trimData(in json.RawMessage) (json.RawMessage, error) { + trimmed := bytes.TrimSpace(in) + if len(trimmed) == 0 { + return in, nil + } + + if trimmed[0] == '{' || trimmed[0] == '[' { + return trimmed, nil + } + + // assume yaml doc: convert it to json + yml, err := swag.BytesToYAMLDoc(trimmed) + if err != nil { + return nil, fmt.Errorf("analyzed: %v", err) + } + + d, err := swag.YAMLToJSON(yml) + if err != nil { + return nil, fmt.Errorf("analyzed: %v", err) + } + + return d, nil +} + +// Expanded expands the $ref fields in the spec document and returns a new spec document +func (d *Document) Expanded(options ...*spec.ExpandOptions) (*Document, error) { + swspec := new(spec.Swagger) + if err := json.Unmarshal(d.raw, swspec); err != nil { + return nil, err + } + + var expandOptions *spec.ExpandOptions + if len(options) > 0 { + expandOptions = options[0] + if expandOptions.RelativeBase == "" { + expandOptions.RelativeBase = d.specFilePath + } + } else { + expandOptions = &spec.ExpandOptions{ + RelativeBase: d.specFilePath, + } + } + + if expandOptions.PathLoader == nil { + if d.pathLoader != nil { + // use loader from Document options + expandOptions.PathLoader = d.pathLoader.Load + } else { + // use package level loader + expandOptions.PathLoader = loaders.Load + } + } + + if err := spec.ExpandSpec(swspec, expandOptions); err != nil { + return nil, err + } + + dd := &Document{ + Analyzer: analysis.New(swspec), + spec: swspec, + specFilePath: d.specFilePath, + schema: spec.MustLoadSwagger20Schema(), + raw: d.raw, + origSpec: d.origSpec, + } + return dd, nil +} + +// BasePath the base path for the API specified by this spec +func (d *Document) BasePath() string { + return d.spec.BasePath +} + +// Version returns the version of this spec +func (d *Document) Version() string { + return d.spec.Swagger +} + +// Schema returns the swagger 2.0 schema +func (d *Document) Schema() *spec.Schema { + return d.schema +} + +// Spec returns the swagger spec object model +func (d *Document) Spec() *spec.Swagger { + return d.spec +} + +// Host returns the host for the API +func (d *Document) Host() string { + return d.spec.Host +} + +// Raw returns the raw swagger spec as json bytes +func (d *Document) Raw() json.RawMessage { + return d.raw +} + +// OrigSpec yields the original spec +func (d *Document) OrigSpec() *spec.Swagger { + return d.origSpec +} + +// ResetDefinitions gives a shallow copy with the models reset to the original spec +func (d *Document) ResetDefinitions() *Document { + defs := make(map[string]spec.Schema, len(d.origSpec.Definitions)) + for k, v := range d.origSpec.Definitions { + defs[k] = v + } + + d.spec.Definitions = defs + return d +} + +// Pristine creates a new pristine document instance based on the input data +func (d *Document) Pristine() *Document { + raw, _ := json.Marshal(d.Spec()) + dd, _ := Analyzed(raw, d.Version()) + dd.pathLoader = d.pathLoader + dd.specFilePath = d.specFilePath + + return dd +} + +// SpecFilePath returns the file path of the spec if one is defined +func (d *Document) SpecFilePath() string { + return d.specFilePath +} + +func cloneSpec(src *spec.Swagger) (*spec.Swagger, error) { + var b bytes.Buffer + if err := gob.NewEncoder(&b).Encode(src); err != nil { + return nil, err + } + + var dst spec.Swagger + if err := gob.NewDecoder(&b).Decode(&dst); err != nil { + return nil, err + } + return &dst, nil +} diff --git a/vendor/github.com/go-openapi/runtime/.editorconfig b/vendor/github.com/go-openapi/runtime/.editorconfig new file mode 100644 index 0000000..3152da6 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/.editorconfig @@ -0,0 +1,26 @@ +# top-most EditorConfig file +root = true + +# Unix-style newlines with a newline ending every file +[*] +end_of_line = lf +insert_final_newline = true +indent_style = space +indent_size = 2 +trim_trailing_whitespace = true + +# Set default charset +[*.{js,py,go,scala,rb,java,html,css,less,sass,md}] +charset = utf-8 + +# Tab indentation (no size specified) +[*.go] +indent_style = tab + +[*.md] +trim_trailing_whitespace = false + +# Matches the exact files either package.json or .travis.yml +[{package.json,.travis.yml}] +indent_style = space +indent_size = 2 diff --git a/vendor/github.com/go-openapi/runtime/.gitattributes b/vendor/github.com/go-openapi/runtime/.gitattributes new file mode 100644 index 0000000..d207b18 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/.gitattributes @@ -0,0 +1 @@ +*.go text eol=lf diff --git a/vendor/github.com/go-openapi/runtime/.gitignore b/vendor/github.com/go-openapi/runtime/.gitignore new file mode 100644 index 0000000..fea8b84 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/.gitignore @@ -0,0 +1,5 @@ +secrets.yml +coverage.out +*.cov +*.out +playground diff --git a/vendor/github.com/go-openapi/runtime/.golangci.yml b/vendor/github.com/go-openapi/runtime/.golangci.yml new file mode 100644 index 0000000..1c75557 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/.golangci.yml @@ -0,0 +1,62 @@ +linters-settings: + govet: + check-shadowing: true + golint: + min-confidence: 0 + gocyclo: + min-complexity: 45 + maligned: + suggest-new: true + dupl: + threshold: 200 + goconst: + min-len: 2 + min-occurrences: 3 + +linters: + enable-all: true + disable: + - nilerr # nilerr crashes on this repo + - maligned + - unparam + - lll + - gochecknoinits + - gochecknoglobals + - funlen + - godox + - gocognit + - whitespace + - wsl + - wrapcheck + - testpackage + - nlreturn + - gomnd + - exhaustivestruct + - goerr113 + - errorlint + - nestif + - godot + - gofumpt + - paralleltest + - tparallel + - thelper + - ifshort + - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/runtime/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/runtime/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..9322b06 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/CODE_OF_CONDUCT.md @@ -0,0 +1,74 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or +advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at ivan+abuse@flanders.co.nz. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/github.com/go-openapi/runtime/LICENSE b/vendor/github.com/go-openapi/runtime/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/go-openapi/runtime/README.md b/vendor/github.com/go-openapi/runtime/README.md new file mode 100644 index 0000000..b07e0ad --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/README.md @@ -0,0 +1,10 @@ +# runtime [![Build Status](https://github.com/go-openapi/runtime/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/runtime/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/runtime/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/runtime) + +[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) +[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/runtime/master/LICENSE) +[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/runtime.svg)](https://pkg.go.dev/github.com/go-openapi/runtime) +[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/runtime)](https://goreportcard.com/report/github.com/go-openapi/runtime) + +# go OpenAPI toolkit runtime + +The runtime component for use in code generation or as untyped usage. diff --git a/vendor/github.com/go-openapi/runtime/bytestream.go b/vendor/github.com/go-openapi/runtime/bytestream.go new file mode 100644 index 0000000..f8fb482 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/bytestream.go @@ -0,0 +1,222 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package runtime + +import ( + "bytes" + "encoding" + "errors" + "fmt" + "io" + "reflect" + + "github.com/go-openapi/swag" +) + +func defaultCloser() error { return nil } + +type byteStreamOpt func(opts *byteStreamOpts) + +// ClosesStream when the bytestream consumer or producer is finished +func ClosesStream(opts *byteStreamOpts) { + opts.Close = true +} + +type byteStreamOpts struct { + Close bool +} + +// ByteStreamConsumer creates a consumer for byte streams. +// +// The consumer consumes from a provided reader into the data passed by reference. +// +// Supported output underlying types and interfaces, prioritized in this order: +// - io.ReaderFrom (for maximum control) +// - io.Writer (performs io.Copy) +// - encoding.BinaryUnmarshaler +// - *string +// - *[]byte +func ByteStreamConsumer(opts ...byteStreamOpt) Consumer { + var vals byteStreamOpts + for _, opt := range opts { + opt(&vals) + } + + return ConsumerFunc(func(reader io.Reader, data interface{}) error { + if reader == nil { + return errors.New("ByteStreamConsumer requires a reader") // early exit + } + if data == nil { + return errors.New("nil destination for ByteStreamConsumer") + } + + closer := defaultCloser + if vals.Close { + if cl, isReaderCloser := reader.(io.Closer); isReaderCloser { + closer = cl.Close + } + } + defer func() { + _ = closer() + }() + + if readerFrom, isReaderFrom := data.(io.ReaderFrom); isReaderFrom { + _, err := readerFrom.ReadFrom(reader) + return err + } + + if writer, isDataWriter := data.(io.Writer); isDataWriter { + _, err := io.Copy(writer, reader) + return err + } + + // buffers input before writing to data + var buf bytes.Buffer + _, err := buf.ReadFrom(reader) + if err != nil { + return err + } + b := buf.Bytes() + + switch destinationPointer := data.(type) { + case encoding.BinaryUnmarshaler: + return destinationPointer.UnmarshalBinary(b) + case *any: + switch (*destinationPointer).(type) { + case string: + *destinationPointer = string(b) + + return nil + + case []byte: + *destinationPointer = b + + return nil + } + default: + // check for the underlying type to be pointer to []byte or string, + if ptr := reflect.TypeOf(data); ptr.Kind() != reflect.Ptr { + return errors.New("destination must be a pointer") + } + + v := reflect.Indirect(reflect.ValueOf(data)) + t := v.Type() + + switch { + case t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8: + v.SetBytes(b) + return nil + + case t.Kind() == reflect.String: + v.SetString(string(b)) + return nil + } + } + + return fmt.Errorf("%v (%T) is not supported by the ByteStreamConsumer, %s", + data, data, "can be resolved by supporting Writer/BinaryUnmarshaler interface") + }) +} + +// ByteStreamProducer creates a producer for byte streams. +// +// The producer takes input data then writes to an output writer (essentially as a pipe). +// +// Supported input underlying types and interfaces, prioritized in this order: +// - io.WriterTo (for maximum control) +// - io.Reader (performs io.Copy). A ReadCloser is closed before exiting. +// - encoding.BinaryMarshaler +// - error (writes as a string) +// - []byte +// - string +// - struct, other slices: writes as JSON +func ByteStreamProducer(opts ...byteStreamOpt) Producer { + var vals byteStreamOpts + for _, opt := range opts { + opt(&vals) + } + + return ProducerFunc(func(writer io.Writer, data interface{}) error { + if writer == nil { + return errors.New("ByteStreamProducer requires a writer") // early exit + } + if data == nil { + return errors.New("nil data for ByteStreamProducer") + } + + closer := defaultCloser + if vals.Close { + if cl, isWriterCloser := writer.(io.Closer); isWriterCloser { + closer = cl.Close + } + } + defer func() { + _ = closer() + }() + + if rc, isDataCloser := data.(io.ReadCloser); isDataCloser { + defer rc.Close() + } + + switch origin := data.(type) { + case io.WriterTo: + _, err := origin.WriteTo(writer) + return err + + case io.Reader: + _, err := io.Copy(writer, origin) + return err + + case encoding.BinaryMarshaler: + bytes, err := origin.MarshalBinary() + if err != nil { + return err + } + + _, err = writer.Write(bytes) + return err + + case error: + _, err := writer.Write([]byte(origin.Error())) + return err + + default: + v := reflect.Indirect(reflect.ValueOf(data)) + t := v.Type() + + switch { + case t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8: + _, err := writer.Write(v.Bytes()) + return err + + case t.Kind() == reflect.String: + _, err := writer.Write([]byte(v.String())) + return err + + case t.Kind() == reflect.Struct || t.Kind() == reflect.Slice: + b, err := swag.WriteJSON(data) + if err != nil { + return err + } + + _, err = writer.Write(b) + return err + } + } + + return fmt.Errorf("%v (%T) is not supported by the ByteStreamProducer, %s", + data, data, "can be resolved by supporting Reader/BinaryMarshaler interface") + }) +} diff --git a/vendor/github.com/go-openapi/runtime/client/auth_info.go b/vendor/github.com/go-openapi/runtime/client/auth_info.go new file mode 100644 index 0000000..4f26e92 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/client/auth_info.go @@ -0,0 +1,77 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "encoding/base64" + + "github.com/go-openapi/strfmt" + + "github.com/go-openapi/runtime" +) + +// PassThroughAuth never manipulates the request +var PassThroughAuth runtime.ClientAuthInfoWriter + +func init() { + PassThroughAuth = runtime.ClientAuthInfoWriterFunc(func(_ runtime.ClientRequest, _ strfmt.Registry) error { return nil }) +} + +// BasicAuth provides a basic auth info writer +func BasicAuth(username, password string) runtime.ClientAuthInfoWriter { + return runtime.ClientAuthInfoWriterFunc(func(r runtime.ClientRequest, _ strfmt.Registry) error { + encoded := base64.StdEncoding.EncodeToString([]byte(username + ":" + password)) + return r.SetHeaderParam(runtime.HeaderAuthorization, "Basic "+encoded) + }) +} + +// APIKeyAuth provides an API key auth info writer +func APIKeyAuth(name, in, value string) runtime.ClientAuthInfoWriter { + if in == "query" { + return runtime.ClientAuthInfoWriterFunc(func(r runtime.ClientRequest, _ strfmt.Registry) error { + return r.SetQueryParam(name, value) + }) + } + + if in == "header" { + return runtime.ClientAuthInfoWriterFunc(func(r runtime.ClientRequest, _ strfmt.Registry) error { + return r.SetHeaderParam(name, value) + }) + } + return nil +} + +// BearerToken provides a header based oauth2 bearer access token auth info writer +func BearerToken(token string) runtime.ClientAuthInfoWriter { + return runtime.ClientAuthInfoWriterFunc(func(r runtime.ClientRequest, _ strfmt.Registry) error { + return r.SetHeaderParam(runtime.HeaderAuthorization, "Bearer "+token) + }) +} + +// Compose combines multiple ClientAuthInfoWriters into a single one. +// Useful when multiple auth headers are needed. +func Compose(auths ...runtime.ClientAuthInfoWriter) runtime.ClientAuthInfoWriter { + return runtime.ClientAuthInfoWriterFunc(func(r runtime.ClientRequest, _ strfmt.Registry) error { + for _, auth := range auths { + if auth == nil { + continue + } + if err := auth.AuthenticateRequest(r, nil); err != nil { + return err + } + } + return nil + }) +} diff --git a/vendor/github.com/go-openapi/runtime/client/keepalive.go b/vendor/github.com/go-openapi/runtime/client/keepalive.go new file mode 100644 index 0000000..7dd6b51 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/client/keepalive.go @@ -0,0 +1,54 @@ +package client + +import ( + "io" + "net/http" + "sync/atomic" +) + +// KeepAliveTransport drains the remaining body from a response +// so that go will reuse the TCP connections. +// This is not enabled by default because there are servers where +// the response never gets closed and that would make the code hang forever. +// So instead it's provided as a http client middleware that can be used to override +// any request. +func KeepAliveTransport(rt http.RoundTripper) http.RoundTripper { + return &keepAliveTransport{wrapped: rt} +} + +type keepAliveTransport struct { + wrapped http.RoundTripper +} + +func (k *keepAliveTransport) RoundTrip(r *http.Request) (*http.Response, error) { + resp, err := k.wrapped.RoundTrip(r) + if err != nil { + return resp, err + } + resp.Body = &drainingReadCloser{rdr: resp.Body} + return resp, nil +} + +type drainingReadCloser struct { + rdr io.ReadCloser + seenEOF uint32 +} + +func (d *drainingReadCloser) Read(p []byte) (n int, err error) { + n, err = d.rdr.Read(p) + if err == io.EOF || n == 0 { + atomic.StoreUint32(&d.seenEOF, 1) + } + return +} + +func (d *drainingReadCloser) Close() error { + // drain buffer + if atomic.LoadUint32(&d.seenEOF) != 1 { + // If the reader side (a HTTP server) is misbehaving, it still may send + // some bytes, but the closer ignores them to keep the underling + // connection open. + _, _ = io.Copy(io.Discard, d.rdr) + } + return d.rdr.Close() +} diff --git a/vendor/github.com/go-openapi/runtime/client/opentelemetry.go b/vendor/github.com/go-openapi/runtime/client/opentelemetry.go new file mode 100644 index 0000000..256cd1b --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/client/opentelemetry.go @@ -0,0 +1,211 @@ +package client + +import ( + "fmt" + "net/http" + "strings" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/strfmt" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/codes" + "go.opentelemetry.io/otel/propagation" + semconv "go.opentelemetry.io/otel/semconv/v1.17.0" + "go.opentelemetry.io/otel/semconv/v1.17.0/httpconv" + "go.opentelemetry.io/otel/trace" +) + +const ( + instrumentationVersion = "1.0.0" + tracerName = "go-openapi" +) + +type config struct { + Tracer trace.Tracer + Propagator propagation.TextMapPropagator + SpanStartOptions []trace.SpanStartOption + SpanNameFormatter func(*runtime.ClientOperation) string + TracerProvider trace.TracerProvider +} + +type OpenTelemetryOpt interface { + apply(*config) +} + +type optionFunc func(*config) + +func (o optionFunc) apply(c *config) { + o(c) +} + +// WithTracerProvider specifies a tracer provider to use for creating a tracer. +// If none is specified, the global provider is used. +func WithTracerProvider(provider trace.TracerProvider) OpenTelemetryOpt { + return optionFunc(func(c *config) { + if provider != nil { + c.TracerProvider = provider + } + }) +} + +// WithPropagators configures specific propagators. If this +// option isn't specified, then the global TextMapPropagator is used. +func WithPropagators(ps propagation.TextMapPropagator) OpenTelemetryOpt { + return optionFunc(func(c *config) { + if ps != nil { + c.Propagator = ps + } + }) +} + +// WithSpanOptions configures an additional set of +// trace.SpanOptions, which are applied to each new span. +func WithSpanOptions(opts ...trace.SpanStartOption) OpenTelemetryOpt { + return optionFunc(func(c *config) { + c.SpanStartOptions = append(c.SpanStartOptions, opts...) + }) +} + +// WithSpanNameFormatter takes a function that will be called on every +// request and the returned string will become the Span Name. +func WithSpanNameFormatter(f func(op *runtime.ClientOperation) string) OpenTelemetryOpt { + return optionFunc(func(c *config) { + c.SpanNameFormatter = f + }) +} + +func defaultTransportFormatter(op *runtime.ClientOperation) string { + if op.ID != "" { + return op.ID + } + + return fmt.Sprintf("%s_%s", strings.ToLower(op.Method), op.PathPattern) +} + +type openTelemetryTransport struct { + transport runtime.ClientTransport + host string + tracer trace.Tracer + config *config +} + +func newOpenTelemetryTransport(transport runtime.ClientTransport, host string, opts []OpenTelemetryOpt) *openTelemetryTransport { + tr := &openTelemetryTransport{ + transport: transport, + host: host, + } + + defaultOpts := []OpenTelemetryOpt{ + WithSpanOptions(trace.WithSpanKind(trace.SpanKindClient)), + WithSpanNameFormatter(defaultTransportFormatter), + WithPropagators(otel.GetTextMapPropagator()), + WithTracerProvider(otel.GetTracerProvider()), + } + + c := newConfig(append(defaultOpts, opts...)...) + tr.config = c + + return tr +} + +func (t *openTelemetryTransport) Submit(op *runtime.ClientOperation) (interface{}, error) { + if op.Context == nil { + return t.transport.Submit(op) + } + + params := op.Params + reader := op.Reader + + var span trace.Span + defer func() { + if span != nil { + span.End() + } + }() + + op.Params = runtime.ClientRequestWriterFunc(func(req runtime.ClientRequest, reg strfmt.Registry) error { + span = t.newOpenTelemetrySpan(op, req.GetHeaderParams()) + return params.WriteToRequest(req, reg) + }) + + op.Reader = runtime.ClientResponseReaderFunc(func(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + if span != nil { + statusCode := response.Code() + // NOTE: this is replaced by semconv.HTTPResponseStatusCode in semconv v1.21 + span.SetAttributes(semconv.HTTPStatusCode(statusCode)) + // NOTE: the conversion from HTTP status code to trace code is no longer available with + // semconv v1.21 + span.SetStatus(httpconv.ServerStatus(statusCode)) + } + + return reader.ReadResponse(response, consumer) + }) + + submit, err := t.transport.Submit(op) + if err != nil && span != nil { + span.RecordError(err) + span.SetStatus(codes.Error, err.Error()) + } + + return submit, err +} + +func (t *openTelemetryTransport) newOpenTelemetrySpan(op *runtime.ClientOperation, header http.Header) trace.Span { + ctx := op.Context + + tracer := t.tracer + if tracer == nil { + if span := trace.SpanFromContext(ctx); span.SpanContext().IsValid() { + tracer = newTracer(span.TracerProvider()) + } else { + tracer = newTracer(otel.GetTracerProvider()) + } + } + + ctx, span := tracer.Start(ctx, t.config.SpanNameFormatter(op), t.config.SpanStartOptions...) + + var scheme string + if len(op.Schemes) > 0 { + scheme = op.Schemes[0] + } + + span.SetAttributes( + attribute.String("net.peer.name", t.host), + attribute.String(string(semconv.HTTPRouteKey), op.PathPattern), + attribute.String(string(semconv.HTTPMethodKey), op.Method), + attribute.String("span.kind", trace.SpanKindClient.String()), + attribute.String("http.scheme", scheme), + ) + + carrier := propagation.HeaderCarrier(header) + t.config.Propagator.Inject(ctx, carrier) + + return span +} + +func newTracer(tp trace.TracerProvider) trace.Tracer { + return tp.Tracer(tracerName, trace.WithInstrumentationVersion(version())) +} + +func newConfig(opts ...OpenTelemetryOpt) *config { + c := &config{ + Propagator: otel.GetTextMapPropagator(), + } + + for _, opt := range opts { + opt.apply(c) + } + + // Tracer is only initialized if manually specified. Otherwise, can be passed with the tracing context. + if c.TracerProvider != nil { + c.Tracer = newTracer(c.TracerProvider) + } + + return c +} + +// Version is the current release version of the go-runtime instrumentation. +func version() string { + return instrumentationVersion +} diff --git a/vendor/github.com/go-openapi/runtime/client/opentracing.go b/vendor/github.com/go-openapi/runtime/client/opentracing.go new file mode 100644 index 0000000..627286d --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/client/opentracing.go @@ -0,0 +1,99 @@ +package client + +import ( + "fmt" + "net/http" + + "github.com/go-openapi/strfmt" + "github.com/opentracing/opentracing-go" + "github.com/opentracing/opentracing-go/ext" + "github.com/opentracing/opentracing-go/log" + + "github.com/go-openapi/runtime" +) + +type tracingTransport struct { + transport runtime.ClientTransport + host string + opts []opentracing.StartSpanOption +} + +func newOpenTracingTransport(transport runtime.ClientTransport, host string, opts []opentracing.StartSpanOption, +) runtime.ClientTransport { + return &tracingTransport{ + transport: transport, + host: host, + opts: opts, + } +} + +func (t *tracingTransport) Submit(op *runtime.ClientOperation) (interface{}, error) { + if op.Context == nil { + return t.transport.Submit(op) + } + + params := op.Params + reader := op.Reader + + var span opentracing.Span + defer func() { + if span != nil { + span.Finish() + } + }() + + op.Params = runtime.ClientRequestWriterFunc(func(req runtime.ClientRequest, reg strfmt.Registry) error { + span = createClientSpan(op, req.GetHeaderParams(), t.host, t.opts) + return params.WriteToRequest(req, reg) + }) + + op.Reader = runtime.ClientResponseReaderFunc(func(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + if span != nil { + code := response.Code() + ext.HTTPStatusCode.Set(span, uint16(code)) + if code >= 400 { + ext.Error.Set(span, true) + } + } + return reader.ReadResponse(response, consumer) + }) + + submit, err := t.transport.Submit(op) + if err != nil && span != nil { + ext.Error.Set(span, true) + span.LogFields(log.Error(err)) + } + return submit, err +} + +func createClientSpan(op *runtime.ClientOperation, header http.Header, host string, + opts []opentracing.StartSpanOption) opentracing.Span { + ctx := op.Context + span := opentracing.SpanFromContext(ctx) + + if span != nil { + opts = append(opts, ext.SpanKindRPCClient) + span, _ = opentracing.StartSpanFromContextWithTracer( + ctx, span.Tracer(), operationName(op), opts...) + + ext.Component.Set(span, "go-openapi") + ext.PeerHostname.Set(span, host) + span.SetTag("http.path", op.PathPattern) + ext.HTTPMethod.Set(span, op.Method) + + _ = span.Tracer().Inject( + span.Context(), + opentracing.HTTPHeaders, + opentracing.HTTPHeadersCarrier(header)) + + return span + } + return nil +} + +func operationName(op *runtime.ClientOperation) string { + if op.ID != "" { + return op.ID + } + return fmt.Sprintf("%s_%s", op.Method, op.PathPattern) +} diff --git a/vendor/github.com/go-openapi/runtime/client/request.go b/vendor/github.com/go-openapi/runtime/client/request.go new file mode 100644 index 0000000..c4a891d --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/client/request.go @@ -0,0 +1,482 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "bytes" + "context" + "fmt" + "io" + "log" + "mime/multipart" + "net/http" + "net/textproto" + "net/url" + "os" + "path" + "path/filepath" + "strings" + "time" + + "github.com/go-openapi/strfmt" + + "github.com/go-openapi/runtime" +) + +// NewRequest creates a new swagger http client request +func newRequest(method, pathPattern string, writer runtime.ClientRequestWriter) *request { + return &request{ + pathPattern: pathPattern, + method: method, + writer: writer, + header: make(http.Header), + query: make(url.Values), + timeout: DefaultTimeout, + getBody: getRequestBuffer, + } +} + +// Request represents a swagger client request. +// +// This Request struct converts to a HTTP request. +// There might be others that convert to other transports. +// There is no error checking here, it is assumed to be used after a spec has been validated. +// so impossible combinations should not arise (hopefully). +// +// The main purpose of this struct is to hide the machinery of adding params to a transport request. +// The generated code only implements what is necessary to turn a param into a valid value for these methods. +type request struct { + pathPattern string + method string + writer runtime.ClientRequestWriter + + pathParams map[string]string + header http.Header + query url.Values + formFields url.Values + fileFields map[string][]runtime.NamedReadCloser + payload interface{} + timeout time.Duration + buf *bytes.Buffer + + getBody func(r *request) []byte +} + +var ( + // ensure interface compliance + _ runtime.ClientRequest = new(request) +) + +func (r *request) isMultipart(mediaType string) bool { + if len(r.fileFields) > 0 { + return true + } + + return runtime.MultipartFormMime == mediaType +} + +// BuildHTTP creates a new http request based on the data from the params +func (r *request) BuildHTTP(mediaType, basePath string, producers map[string]runtime.Producer, registry strfmt.Registry) (*http.Request, error) { + return r.buildHTTP(mediaType, basePath, producers, registry, nil) +} +func escapeQuotes(s string) string { + return strings.NewReplacer("\\", "\\\\", `"`, "\\\"").Replace(s) +} + +func logClose(err error, pw *io.PipeWriter) { + log.Println(err) + closeErr := pw.CloseWithError(err) + if closeErr != nil { + log.Println(closeErr) + } +} + +func (r *request) buildHTTP(mediaType, basePath string, producers map[string]runtime.Producer, registry strfmt.Registry, auth runtime.ClientAuthInfoWriter) (*http.Request, error) { //nolint:gocyclo,maintidx + // build the data + if err := r.writer.WriteToRequest(r, registry); err != nil { + return nil, err + } + + // Our body must be an io.Reader. + // When we create the http.Request, if we pass it a + // bytes.Buffer then it will wrap it in an io.ReadCloser + // and set the content length automatically. + var body io.Reader + var pr *io.PipeReader + var pw *io.PipeWriter + + r.buf = bytes.NewBuffer(nil) + if r.payload != nil || len(r.formFields) > 0 || len(r.fileFields) > 0 { + body = r.buf + if r.isMultipart(mediaType) { + pr, pw = io.Pipe() + body = pr + } + } + + // check if this is a form type request + if len(r.formFields) > 0 || len(r.fileFields) > 0 { + if !r.isMultipart(mediaType) { + r.header.Set(runtime.HeaderContentType, mediaType) + formString := r.formFields.Encode() + r.buf.WriteString(formString) + goto DoneChoosingBodySource + } + + mp := multipart.NewWriter(pw) + r.header.Set(runtime.HeaderContentType, mangleContentType(mediaType, mp.Boundary())) + + go func() { + defer func() { + mp.Close() + pw.Close() + }() + + for fn, v := range r.formFields { + for _, vi := range v { + if err := mp.WriteField(fn, vi); err != nil { + logClose(err, pw) + return + } + } + } + + defer func() { + for _, ff := range r.fileFields { + for _, ffi := range ff { + ffi.Close() + } + } + }() + for fn, f := range r.fileFields { + for _, fi := range f { + var fileContentType string + if p, ok := fi.(interface { + ContentType() string + }); ok { + fileContentType = p.ContentType() + } else { + // Need to read the data so that we can detect the content type + buf := make([]byte, 512) + size, err := fi.Read(buf) + if err != nil && err != io.EOF { + logClose(err, pw) + return + } + fileContentType = http.DetectContentType(buf) + fi = runtime.NamedReader(fi.Name(), io.MultiReader(bytes.NewReader(buf[:size]), fi)) + } + + // Create the MIME headers for the new part + h := make(textproto.MIMEHeader) + h.Set("Content-Disposition", + fmt.Sprintf(`form-data; name="%s"; filename="%s"`, + escapeQuotes(fn), escapeQuotes(filepath.Base(fi.Name())))) + h.Set("Content-Type", fileContentType) + + wrtr, err := mp.CreatePart(h) + if err != nil { + logClose(err, pw) + return + } + if _, err := io.Copy(wrtr, fi); err != nil { + logClose(err, pw) + } + } + } + }() + + goto DoneChoosingBodySource + } + + // if there is payload, use the producer to write the payload, and then + // set the header to the content-type appropriate for the payload produced + if r.payload != nil { + // TODO: infer most appropriate content type based on the producer used, + // and the `consumers` section of the spec/operation + r.header.Set(runtime.HeaderContentType, mediaType) + if rdr, ok := r.payload.(io.ReadCloser); ok { + body = rdr + goto DoneChoosingBodySource + } + + if rdr, ok := r.payload.(io.Reader); ok { + body = rdr + goto DoneChoosingBodySource + } + + producer := producers[mediaType] + if err := producer.Produce(r.buf, r.payload); err != nil { + return nil, err + } + } + +DoneChoosingBodySource: + + if runtime.CanHaveBody(r.method) && body != nil && r.header.Get(runtime.HeaderContentType) == "" { + r.header.Set(runtime.HeaderContentType, mediaType) + } + + if auth != nil { + // If we're not using r.buf as our http.Request's body, + // either the payload is an io.Reader or io.ReadCloser, + // or we're doing a multipart form/file. + // + // In those cases, if the AuthenticateRequest call asks for the body, + // we must read it into a buffer and provide that, then use that buffer + // as the body of our http.Request. + // + // This is done in-line with the GetBody() request rather than ahead + // of time, because there's no way to know if the AuthenticateRequest + // will even ask for the body of the request. + // + // If for some reason the copy fails, there's no way to return that + // error to the GetBody() call, so return it afterwards. + // + // An error from the copy action is prioritized over any error + // from the AuthenticateRequest call, because the mis-read + // body may have interfered with the auth. + // + var copyErr error + if buf, ok := body.(*bytes.Buffer); body != nil && (!ok || buf != r.buf) { + var copied bool + r.getBody = func(r *request) []byte { + if copied { + return getRequestBuffer(r) + } + + defer func() { + copied = true + }() + + if _, copyErr = io.Copy(r.buf, body); copyErr != nil { + return nil + } + + if closer, ok := body.(io.ReadCloser); ok { + if copyErr = closer.Close(); copyErr != nil { + return nil + } + } + + body = r.buf + return getRequestBuffer(r) + } + } + + authErr := auth.AuthenticateRequest(r, registry) + + if copyErr != nil { + return nil, fmt.Errorf("error retrieving the response body: %v", copyErr) + } + + if authErr != nil { + return nil, authErr + } + } + + // In case the basePath or the request pathPattern include static query parameters, + // parse those out before constructing the final path. The parameters themselves + // will be merged with the ones set by the client, with the priority given first to + // the ones set by the client, then the path pattern, and lastly the base path. + basePathURL, err := url.Parse(basePath) + if err != nil { + return nil, err + } + staticQueryParams := basePathURL.Query() + + pathPatternURL, err := url.Parse(r.pathPattern) + if err != nil { + return nil, err + } + for name, values := range pathPatternURL.Query() { + if _, present := staticQueryParams[name]; present { + staticQueryParams.Del(name) + } + for _, value := range values { + staticQueryParams.Add(name, value) + } + } + + // create http request + var reinstateSlash bool + if pathPatternURL.Path != "" && pathPatternURL.Path != "/" && pathPatternURL.Path[len(pathPatternURL.Path)-1] == '/' { + reinstateSlash = true + } + + urlPath := path.Join(basePathURL.Path, pathPatternURL.Path) + for k, v := range r.pathParams { + urlPath = strings.ReplaceAll(urlPath, "{"+k+"}", url.PathEscape(v)) + } + if reinstateSlash { + urlPath += "/" + } + + req, err := http.NewRequestWithContext(context.Background(), r.method, urlPath, body) + if err != nil { + return nil, err + } + + originalParams := r.GetQueryParams() + + // Merge the query parameters extracted from the basePath with the ones set by + // the client in this struct. In case of conflict, the client wins. + for k, v := range staticQueryParams { + _, present := originalParams[k] + if !present { + if err = r.SetQueryParam(k, v...); err != nil { + return nil, err + } + } + } + + req.URL.RawQuery = r.query.Encode() + req.Header = r.header + + return req, nil +} + +func mangleContentType(mediaType, boundary string) string { + if strings.ToLower(mediaType) == runtime.URLencodedFormMime { + return fmt.Sprintf("%s; boundary=%s", mediaType, boundary) + } + return "multipart/form-data; boundary=" + boundary +} + +func (r *request) GetMethod() string { + return r.method +} + +func (r *request) GetPath() string { + path := r.pathPattern + for k, v := range r.pathParams { + path = strings.ReplaceAll(path, "{"+k+"}", v) + } + return path +} + +func (r *request) GetBody() []byte { + return r.getBody(r) +} + +func getRequestBuffer(r *request) []byte { + if r.buf == nil { + return nil + } + return r.buf.Bytes() +} + +// SetHeaderParam adds a header param to the request +// when there is only 1 value provided for the varargs, it will set it. +// when there are several values provided for the varargs it will add it (no overriding) +func (r *request) SetHeaderParam(name string, values ...string) error { + if r.header == nil { + r.header = make(http.Header) + } + r.header[http.CanonicalHeaderKey(name)] = values + return nil +} + +// GetHeaderParams returns the all headers currently set for the request +func (r *request) GetHeaderParams() http.Header { + return r.header +} + +// SetQueryParam adds a query param to the request +// when there is only 1 value provided for the varargs, it will set it. +// when there are several values provided for the varargs it will add it (no overriding) +func (r *request) SetQueryParam(name string, values ...string) error { + if r.query == nil { + r.query = make(url.Values) + } + r.query[name] = values + return nil +} + +// GetQueryParams returns a copy of all query params currently set for the request +func (r *request) GetQueryParams() url.Values { + var result = make(url.Values) + for key, value := range r.query { + result[key] = append([]string{}, value...) + } + return result +} + +// SetFormParam adds a forn param to the request +// when there is only 1 value provided for the varargs, it will set it. +// when there are several values provided for the varargs it will add it (no overriding) +func (r *request) SetFormParam(name string, values ...string) error { + if r.formFields == nil { + r.formFields = make(url.Values) + } + r.formFields[name] = values + return nil +} + +// SetPathParam adds a path param to the request +func (r *request) SetPathParam(name string, value string) error { + if r.pathParams == nil { + r.pathParams = make(map[string]string) + } + + r.pathParams[name] = value + return nil +} + +// SetFileParam adds a file param to the request +func (r *request) SetFileParam(name string, files ...runtime.NamedReadCloser) error { + for _, file := range files { + if actualFile, ok := file.(*os.File); ok { + fi, err := os.Stat(actualFile.Name()) + if err != nil { + return err + } + if fi.IsDir() { + return fmt.Errorf("%q is a directory, only files are supported", file.Name()) + } + } + } + + if r.fileFields == nil { + r.fileFields = make(map[string][]runtime.NamedReadCloser) + } + if r.formFields == nil { + r.formFields = make(url.Values) + } + + r.fileFields[name] = files + return nil +} + +func (r *request) GetFileParam() map[string][]runtime.NamedReadCloser { + return r.fileFields +} + +// SetBodyParam sets a body parameter on the request. +// This does not yet serialze the object, this happens as late as possible. +func (r *request) SetBodyParam(payload interface{}) error { + r.payload = payload + return nil +} + +func (r *request) GetBodyParam() interface{} { + return r.payload +} + +// SetTimeout sets the timeout for a request +func (r *request) SetTimeout(timeout time.Duration) error { + r.timeout = timeout + return nil +} diff --git a/vendor/github.com/go-openapi/runtime/client/response.go b/vendor/github.com/go-openapi/runtime/client/response.go new file mode 100644 index 0000000..0bbd388 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/client/response.go @@ -0,0 +1,50 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "io" + "net/http" + + "github.com/go-openapi/runtime" +) + +var _ runtime.ClientResponse = response{} + +func newResponse(resp *http.Response) runtime.ClientResponse { return response{resp: resp} } + +type response struct { + resp *http.Response +} + +func (r response) Code() int { + return r.resp.StatusCode +} + +func (r response) Message() string { + return r.resp.Status +} + +func (r response) GetHeader(name string) string { + return r.resp.Header.Get(name) +} + +func (r response) GetHeaders(name string) []string { + return r.resp.Header.Values(name) +} + +func (r response) Body() io.ReadCloser { + return r.resp.Body +} diff --git a/vendor/github.com/go-openapi/runtime/client/runtime.go b/vendor/github.com/go-openapi/runtime/client/runtime.go new file mode 100644 index 0000000..5bd4d75 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/client/runtime.go @@ -0,0 +1,552 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "context" + "crypto" + "crypto/ecdsa" + "crypto/rsa" + "crypto/tls" + "crypto/x509" + "encoding/pem" + "errors" + "fmt" + "mime" + "net/http" + "net/http/httputil" + "os" + "strings" + "sync" + "time" + + "github.com/go-openapi/strfmt" + "github.com/opentracing/opentracing-go" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/runtime/logger" + "github.com/go-openapi/runtime/middleware" + "github.com/go-openapi/runtime/yamlpc" +) + +const ( + schemeHTTP = "http" + schemeHTTPS = "https" +) + +// TLSClientOptions to configure client authentication with mutual TLS +type TLSClientOptions struct { + // Certificate is the path to a PEM-encoded certificate to be used for + // client authentication. If set then Key must also be set. + Certificate string + + // LoadedCertificate is the certificate to be used for client authentication. + // This field is ignored if Certificate is set. If this field is set, LoadedKey + // is also required. + LoadedCertificate *x509.Certificate + + // Key is the path to an unencrypted PEM-encoded private key for client + // authentication. This field is required if Certificate is set. + Key string + + // LoadedKey is the key for client authentication. This field is required if + // LoadedCertificate is set. + LoadedKey crypto.PrivateKey + + // CA is a path to a PEM-encoded certificate that specifies the root certificate + // to use when validating the TLS certificate presented by the server. If this field + // (and LoadedCA) is not set, the system certificate pool is used. This field is ignored if LoadedCA + // is set. + CA string + + // LoadedCA specifies the root certificate to use when validating the server's TLS certificate. + // If this field (and CA) is not set, the system certificate pool is used. + LoadedCA *x509.Certificate + + // LoadedCAPool specifies a pool of RootCAs to use when validating the server's TLS certificate. + // If set, it will be combined with the other loaded certificates (see LoadedCA and CA). + // If neither LoadedCA or CA is set, the provided pool with override the system + // certificate pool. + // The caller must not use the supplied pool after calling TLSClientAuth. + LoadedCAPool *x509.CertPool + + // ServerName specifies the hostname to use when verifying the server certificate. + // If this field is set then InsecureSkipVerify will be ignored and treated as + // false. + ServerName string + + // InsecureSkipVerify controls whether the certificate chain and hostname presented + // by the server are validated. If true, any certificate is accepted. + InsecureSkipVerify bool + + // VerifyPeerCertificate, if not nil, is called after normal + // certificate verification. It receives the raw ASN.1 certificates + // provided by the peer and also any verified chains that normal processing found. + // If it returns a non-nil error, the handshake is aborted and that error results. + // + // If normal verification fails then the handshake will abort before + // considering this callback. If normal verification is disabled by + // setting InsecureSkipVerify then this callback will be considered but + // the verifiedChains argument will always be nil. + VerifyPeerCertificate func(rawCerts [][]byte, verifiedChains [][]*x509.Certificate) error + + // SessionTicketsDisabled may be set to true to disable session ticket and + // PSK (resumption) support. Note that on clients, session ticket support is + // also disabled if ClientSessionCache is nil. + SessionTicketsDisabled bool + + // ClientSessionCache is a cache of ClientSessionState entries for TLS + // session resumption. It is only used by clients. + ClientSessionCache tls.ClientSessionCache + + // Prevents callers using unkeyed fields. + _ struct{} +} + +// TLSClientAuth creates a tls.Config for mutual auth +func TLSClientAuth(opts TLSClientOptions) (*tls.Config, error) { + // create client tls config + cfg := &tls.Config{ + MinVersion: tls.VersionTLS12, + } + + // load client cert if specified + if opts.Certificate != "" { + cert, err := tls.LoadX509KeyPair(opts.Certificate, opts.Key) + if err != nil { + return nil, fmt.Errorf("tls client cert: %v", err) + } + cfg.Certificates = []tls.Certificate{cert} + } else if opts.LoadedCertificate != nil { + block := pem.Block{Type: "CERTIFICATE", Bytes: opts.LoadedCertificate.Raw} + certPem := pem.EncodeToMemory(&block) + + var keyBytes []byte + switch k := opts.LoadedKey.(type) { + case *rsa.PrivateKey: + keyBytes = x509.MarshalPKCS1PrivateKey(k) + case *ecdsa.PrivateKey: + var err error + keyBytes, err = x509.MarshalECPrivateKey(k) + if err != nil { + return nil, fmt.Errorf("tls client priv key: %v", err) + } + default: + return nil, errors.New("tls client priv key: unsupported key type") + } + + block = pem.Block{Type: "PRIVATE KEY", Bytes: keyBytes} + keyPem := pem.EncodeToMemory(&block) + + cert, err := tls.X509KeyPair(certPem, keyPem) + if err != nil { + return nil, fmt.Errorf("tls client cert: %v", err) + } + cfg.Certificates = []tls.Certificate{cert} + } + + cfg.InsecureSkipVerify = opts.InsecureSkipVerify + + cfg.VerifyPeerCertificate = opts.VerifyPeerCertificate + cfg.SessionTicketsDisabled = opts.SessionTicketsDisabled + cfg.ClientSessionCache = opts.ClientSessionCache + + // When no CA certificate is provided, default to the system cert pool + // that way when a request is made to a server known by the system trust store, + // the name is still verified + switch { + case opts.LoadedCA != nil: + caCertPool := basePool(opts.LoadedCAPool) + caCertPool.AddCert(opts.LoadedCA) + cfg.RootCAs = caCertPool + case opts.CA != "": + // load ca cert + caCert, err := os.ReadFile(opts.CA) + if err != nil { + return nil, fmt.Errorf("tls client ca: %v", err) + } + caCertPool := basePool(opts.LoadedCAPool) + caCertPool.AppendCertsFromPEM(caCert) + cfg.RootCAs = caCertPool + case opts.LoadedCAPool != nil: + cfg.RootCAs = opts.LoadedCAPool + } + + // apply servername overrride + if opts.ServerName != "" { + cfg.InsecureSkipVerify = false + cfg.ServerName = opts.ServerName + } + + return cfg, nil +} + +func basePool(pool *x509.CertPool) *x509.CertPool { + if pool == nil { + return x509.NewCertPool() + } + return pool +} + +// TLSTransport creates a http client transport suitable for mutual tls auth +func TLSTransport(opts TLSClientOptions) (http.RoundTripper, error) { + cfg, err := TLSClientAuth(opts) + if err != nil { + return nil, err + } + + return &http.Transport{TLSClientConfig: cfg}, nil +} + +// TLSClient creates a http.Client for mutual auth +func TLSClient(opts TLSClientOptions) (*http.Client, error) { + transport, err := TLSTransport(opts) + if err != nil { + return nil, err + } + return &http.Client{Transport: transport}, nil +} + +// DefaultTimeout the default request timeout +var DefaultTimeout = 30 * time.Second + +// Runtime represents an API client that uses the transport +// to make http requests based on a swagger specification. +type Runtime struct { + DefaultMediaType string + DefaultAuthentication runtime.ClientAuthInfoWriter + Consumers map[string]runtime.Consumer + Producers map[string]runtime.Producer + + Transport http.RoundTripper + Jar http.CookieJar + // Spec *spec.Document + Host string + BasePath string + Formats strfmt.Registry + Context context.Context //nolint:containedctx // we precisely want this type to contain the request context + + Debug bool + logger logger.Logger + + clientOnce *sync.Once + client *http.Client + schemes []string + response ClientResponseFunc +} + +// New creates a new default runtime for a swagger api runtime.Client +func New(host, basePath string, schemes []string) *Runtime { + var rt Runtime + rt.DefaultMediaType = runtime.JSONMime + + // TODO: actually infer this stuff from the spec + rt.Consumers = map[string]runtime.Consumer{ + runtime.YAMLMime: yamlpc.YAMLConsumer(), + runtime.JSONMime: runtime.JSONConsumer(), + runtime.XMLMime: runtime.XMLConsumer(), + runtime.TextMime: runtime.TextConsumer(), + runtime.HTMLMime: runtime.TextConsumer(), + runtime.CSVMime: runtime.CSVConsumer(), + runtime.DefaultMime: runtime.ByteStreamConsumer(), + } + rt.Producers = map[string]runtime.Producer{ + runtime.YAMLMime: yamlpc.YAMLProducer(), + runtime.JSONMime: runtime.JSONProducer(), + runtime.XMLMime: runtime.XMLProducer(), + runtime.TextMime: runtime.TextProducer(), + runtime.HTMLMime: runtime.TextProducer(), + runtime.CSVMime: runtime.CSVProducer(), + runtime.DefaultMime: runtime.ByteStreamProducer(), + } + rt.Transport = http.DefaultTransport + rt.Jar = nil + rt.Host = host + rt.BasePath = basePath + rt.Context = context.Background() + rt.clientOnce = new(sync.Once) + if !strings.HasPrefix(rt.BasePath, "/") { + rt.BasePath = "/" + rt.BasePath + } + + rt.Debug = logger.DebugEnabled() + rt.logger = logger.StandardLogger{} + rt.response = newResponse + + if len(schemes) > 0 { + rt.schemes = schemes + } + return &rt +} + +// NewWithClient allows you to create a new transport with a configured http.Client +func NewWithClient(host, basePath string, schemes []string, client *http.Client) *Runtime { + rt := New(host, basePath, schemes) + if client != nil { + rt.clientOnce.Do(func() { + rt.client = client + }) + } + return rt +} + +// WithOpenTracing adds opentracing support to the provided runtime. +// A new client span is created for each request. +// If the context of the client operation does not contain an active span, no span is created. +// The provided opts are applied to each spans - for example to add global tags. +func (r *Runtime) WithOpenTracing(opts ...opentracing.StartSpanOption) runtime.ClientTransport { + return newOpenTracingTransport(r, r.Host, opts) +} + +// WithOpenTelemetry adds opentelemetry support to the provided runtime. +// A new client span is created for each request. +// If the context of the client operation does not contain an active span, no span is created. +// The provided opts are applied to each spans - for example to add global tags. +func (r *Runtime) WithOpenTelemetry(opts ...OpenTelemetryOpt) runtime.ClientTransport { + return newOpenTelemetryTransport(r, r.Host, opts) +} + +func (r *Runtime) pickScheme(schemes []string) string { + if v := r.selectScheme(r.schemes); v != "" { + return v + } + if v := r.selectScheme(schemes); v != "" { + return v + } + return schemeHTTP +} + +func (r *Runtime) selectScheme(schemes []string) string { + schLen := len(schemes) + if schLen == 0 { + return "" + } + + scheme := schemes[0] + // prefer https, but skip when not possible + if scheme != schemeHTTPS && schLen > 1 { + for _, sch := range schemes { + if sch == schemeHTTPS { + scheme = sch + break + } + } + } + return scheme +} + +func transportOrDefault(left, right http.RoundTripper) http.RoundTripper { + if left == nil { + return right + } + return left +} + +// EnableConnectionReuse drains the remaining body from a response +// so that go will reuse the TCP connections. +// +// This is not enabled by default because there are servers where +// the response never gets closed and that would make the code hang forever. +// So instead it's provided as a http client middleware that can be used to override +// any request. +func (r *Runtime) EnableConnectionReuse() { + if r.client == nil { + r.Transport = KeepAliveTransport( + transportOrDefault(r.Transport, http.DefaultTransport), + ) + return + } + + r.client.Transport = KeepAliveTransport( + transportOrDefault(r.client.Transport, + transportOrDefault(r.Transport, http.DefaultTransport), + ), + ) +} + +// takes a client operation and creates equivalent http.Request +func (r *Runtime) createHttpRequest(operation *runtime.ClientOperation) (*request, *http.Request, error) { //nolint:revive,stylecheck + params, _, auth := operation.Params, operation.Reader, operation.AuthInfo + + request := newRequest(operation.Method, operation.PathPattern, params) + + var accept []string + accept = append(accept, operation.ProducesMediaTypes...) + if err := request.SetHeaderParam(runtime.HeaderAccept, accept...); err != nil { + return nil, nil, err + } + + if auth == nil && r.DefaultAuthentication != nil { + auth = runtime.ClientAuthInfoWriterFunc(func(req runtime.ClientRequest, reg strfmt.Registry) error { + if req.GetHeaderParams().Get(runtime.HeaderAuthorization) != "" { + return nil + } + return r.DefaultAuthentication.AuthenticateRequest(req, reg) + }) + } + // if auth != nil { + // if err := auth.AuthenticateRequest(request, r.Formats); err != nil { + // return nil, err + // } + //} + + // TODO: pick appropriate media type + cmt := r.DefaultMediaType + for _, mediaType := range operation.ConsumesMediaTypes { + // Pick first non-empty media type + if mediaType != "" { + cmt = mediaType + break + } + } + + if _, ok := r.Producers[cmt]; !ok && cmt != runtime.MultipartFormMime && cmt != runtime.URLencodedFormMime { + return nil, nil, fmt.Errorf("none of producers: %v registered. try %s", r.Producers, cmt) + } + + req, err := request.buildHTTP(cmt, r.BasePath, r.Producers, r.Formats, auth) + if err != nil { + return nil, nil, err + } + req.URL.Scheme = r.pickScheme(operation.Schemes) + req.URL.Host = r.Host + req.Host = r.Host + return request, req, nil +} + +func (r *Runtime) CreateHttpRequest(operation *runtime.ClientOperation) (req *http.Request, err error) { //nolint:revive,stylecheck + _, req, err = r.createHttpRequest(operation) + return +} + +// Submit a request and when there is a body on success it will turn that into the result +// all other things are turned into an api error for swagger which retains the status code +func (r *Runtime) Submit(operation *runtime.ClientOperation) (interface{}, error) { + _, readResponse, _ := operation.Params, operation.Reader, operation.AuthInfo + + request, req, err := r.createHttpRequest(operation) + if err != nil { + return nil, err + } + + r.clientOnce.Do(func() { + r.client = &http.Client{ + Transport: r.Transport, + Jar: r.Jar, + } + }) + + if r.Debug { + b, err2 := httputil.DumpRequestOut(req, true) + if err2 != nil { + return nil, err2 + } + r.logger.Debugf("%s\n", string(b)) + } + + var parentCtx context.Context + switch { + case operation.Context != nil: + parentCtx = operation.Context + case r.Context != nil: + parentCtx = r.Context + default: + parentCtx = context.Background() + } + + var ( + ctx context.Context + cancel context.CancelFunc + ) + if request.timeout == 0 { + // There may be a deadline in the context passed to the operation. + // Otherwise, there is no timeout set. + ctx, cancel = context.WithCancel(parentCtx) + } else { + // Sets the timeout passed from request params (by default runtime.DefaultTimeout). + // If there is already a deadline in the parent context, the shortest will + // apply. + ctx, cancel = context.WithTimeout(parentCtx, request.timeout) + } + defer cancel() + + var client *http.Client + if operation.Client != nil { + client = operation.Client + } else { + client = r.client + } + req = req.WithContext(ctx) + res, err := client.Do(req) // make requests, by default follows 10 redirects before failing + if err != nil { + return nil, err + } + defer res.Body.Close() + + ct := res.Header.Get(runtime.HeaderContentType) + if ct == "" { // this should really never occur + ct = r.DefaultMediaType + } + + if r.Debug { + printBody := true + if ct == runtime.DefaultMime { + printBody = false // Spare the terminal from a binary blob. + } + b, err2 := httputil.DumpResponse(res, printBody) + if err2 != nil { + return nil, err2 + } + r.logger.Debugf("%s\n", string(b)) + } + + mt, _, err := mime.ParseMediaType(ct) + if err != nil { + return nil, fmt.Errorf("parse content type: %s", err) + } + + cons, ok := r.Consumers[mt] + if !ok { + if cons, ok = r.Consumers["*/*"]; !ok { + // scream about not knowing what to do + return nil, fmt.Errorf("no consumer: %q", ct) + } + } + return readResponse.ReadResponse(r.response(res), cons) +} + +// SetDebug changes the debug flag. +// It ensures that client and middlewares have the set debug level. +func (r *Runtime) SetDebug(debug bool) { + r.Debug = debug + middleware.Debug = debug +} + +// SetLogger changes the logger stream. +// It ensures that client and middlewares use the same logger. +func (r *Runtime) SetLogger(logger logger.Logger) { + r.logger = logger + middleware.Logger = logger +} + +type ClientResponseFunc = func(*http.Response) runtime.ClientResponse //nolint:revive + +// SetResponseReader changes the response reader implementation. +func (r *Runtime) SetResponseReader(f ClientResponseFunc) { + if f == nil { + return + } + r.response = f +} diff --git a/vendor/github.com/go-openapi/runtime/client_auth_info.go b/vendor/github.com/go-openapi/runtime/client_auth_info.go new file mode 100644 index 0000000..c6c97d9 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/client_auth_info.go @@ -0,0 +1,30 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package runtime + +import "github.com/go-openapi/strfmt" + +// A ClientAuthInfoWriterFunc converts a function to a request writer interface +type ClientAuthInfoWriterFunc func(ClientRequest, strfmt.Registry) error + +// AuthenticateRequest adds authentication data to the request +func (fn ClientAuthInfoWriterFunc) AuthenticateRequest(req ClientRequest, reg strfmt.Registry) error { + return fn(req, reg) +} + +// A ClientAuthInfoWriter implementor knows how to write authentication info to a request +type ClientAuthInfoWriter interface { + AuthenticateRequest(ClientRequest, strfmt.Registry) error +} diff --git a/vendor/github.com/go-openapi/runtime/client_operation.go b/vendor/github.com/go-openapi/runtime/client_operation.go new file mode 100644 index 0000000..5a5d635 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/client_operation.go @@ -0,0 +1,41 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package runtime + +import ( + "context" + "net/http" +) + +// ClientOperation represents the context for a swagger operation to be submitted to the transport +type ClientOperation struct { + ID string + Method string + PathPattern string + ProducesMediaTypes []string + ConsumesMediaTypes []string + Schemes []string + AuthInfo ClientAuthInfoWriter + Params ClientRequestWriter + Reader ClientResponseReader + Context context.Context //nolint:containedctx // we precisely want this type to contain the request context + Client *http.Client +} + +// A ClientTransport implementor knows how to submit Request objects to some destination +type ClientTransport interface { + // Submit(string, RequestWriter, ResponseReader, AuthInfoWriter) (interface{}, error) + Submit(*ClientOperation) (interface{}, error) +} diff --git a/vendor/github.com/go-openapi/runtime/client_request.go b/vendor/github.com/go-openapi/runtime/client_request.go new file mode 100644 index 0000000..4ebb2de --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/client_request.go @@ -0,0 +1,152 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package runtime + +import ( + "io" + "net/http" + "net/url" + "time" + + "github.com/go-openapi/strfmt" +) + +// ClientRequestWriterFunc converts a function to a request writer interface +type ClientRequestWriterFunc func(ClientRequest, strfmt.Registry) error + +// WriteToRequest adds data to the request +func (fn ClientRequestWriterFunc) WriteToRequest(req ClientRequest, reg strfmt.Registry) error { + return fn(req, reg) +} + +// ClientRequestWriter is an interface for things that know how to write to a request +type ClientRequestWriter interface { + WriteToRequest(ClientRequest, strfmt.Registry) error +} + +// ClientRequest is an interface for things that know how to +// add information to a swagger client request. +type ClientRequest interface { //nolint:interfacebloat // a swagger-capable request is quite rich, hence the many getter/setters + SetHeaderParam(string, ...string) error + + GetHeaderParams() http.Header + + SetQueryParam(string, ...string) error + + SetFormParam(string, ...string) error + + SetPathParam(string, string) error + + GetQueryParams() url.Values + + SetFileParam(string, ...NamedReadCloser) error + + SetBodyParam(interface{}) error + + SetTimeout(time.Duration) error + + GetMethod() string + + GetPath() string + + GetBody() []byte + + GetBodyParam() interface{} + + GetFileParam() map[string][]NamedReadCloser +} + +// NamedReadCloser represents a named ReadCloser interface +type NamedReadCloser interface { + io.ReadCloser + Name() string +} + +// NamedReader creates a NamedReadCloser for use as file upload +func NamedReader(name string, rdr io.Reader) NamedReadCloser { + rc, ok := rdr.(io.ReadCloser) + if !ok { + rc = io.NopCloser(rdr) + } + return &namedReadCloser{ + name: name, + cr: rc, + } +} + +type namedReadCloser struct { + name string + cr io.ReadCloser +} + +func (n *namedReadCloser) Close() error { + return n.cr.Close() +} +func (n *namedReadCloser) Read(p []byte) (int, error) { + return n.cr.Read(p) +} +func (n *namedReadCloser) Name() string { + return n.name +} + +type TestClientRequest struct { + Headers http.Header + Body interface{} +} + +func (t *TestClientRequest) SetHeaderParam(name string, values ...string) error { + if t.Headers == nil { + t.Headers = make(http.Header) + } + t.Headers.Set(name, values[0]) + return nil +} + +func (t *TestClientRequest) SetQueryParam(_ string, _ ...string) error { return nil } + +func (t *TestClientRequest) SetFormParam(_ string, _ ...string) error { return nil } + +func (t *TestClientRequest) SetPathParam(_ string, _ string) error { return nil } + +func (t *TestClientRequest) SetFileParam(_ string, _ ...NamedReadCloser) error { return nil } + +func (t *TestClientRequest) SetBodyParam(body interface{}) error { + t.Body = body + return nil +} + +func (t *TestClientRequest) SetTimeout(time.Duration) error { + return nil +} + +func (t *TestClientRequest) GetQueryParams() url.Values { return nil } + +func (t *TestClientRequest) GetMethod() string { return "" } + +func (t *TestClientRequest) GetPath() string { return "" } + +func (t *TestClientRequest) GetBody() []byte { return nil } + +func (t *TestClientRequest) GetBodyParam() interface{} { + return t.Body +} + +func (t *TestClientRequest) GetFileParam() map[string][]NamedReadCloser { + return nil +} + +func (t *TestClientRequest) GetHeaderParams() http.Header { + return t.Headers +} diff --git a/vendor/github.com/go-openapi/runtime/client_response.go b/vendor/github.com/go-openapi/runtime/client_response.go new file mode 100644 index 0000000..0d16911 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/client_response.go @@ -0,0 +1,110 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package runtime + +import ( + "encoding/json" + "fmt" + "io" +) + +// A ClientResponse represents a client response +// This bridges between responses obtained from different transports +type ClientResponse interface { + Code() int + Message() string + GetHeader(string) string + GetHeaders(string) []string + Body() io.ReadCloser +} + +// A ClientResponseReaderFunc turns a function into a ClientResponseReader interface implementation +type ClientResponseReaderFunc func(ClientResponse, Consumer) (interface{}, error) + +// ReadResponse reads the response +func (read ClientResponseReaderFunc) ReadResponse(resp ClientResponse, consumer Consumer) (interface{}, error) { + return read(resp, consumer) +} + +// A ClientResponseReader is an interface for things want to read a response. +// An application of this is to create structs from response values +type ClientResponseReader interface { + ReadResponse(ClientResponse, Consumer) (interface{}, error) +} + +// NewAPIError creates a new API error +func NewAPIError(opName string, payload interface{}, code int) *APIError { + return &APIError{ + OperationName: opName, + Response: payload, + Code: code, + } +} + +// APIError wraps an error model and captures the status code +type APIError struct { + OperationName string + Response interface{} + Code int +} + +func (o *APIError) Error() string { + var resp []byte + if err, ok := o.Response.(error); ok { + resp = []byte("'" + err.Error() + "'") + } else { + resp, _ = json.Marshal(o.Response) + } + return fmt.Sprintf("%s (status %d): %s", o.OperationName, o.Code, resp) +} + +func (o *APIError) String() string { + return o.Error() +} + +// IsSuccess returns true when this elapse o k response returns a 2xx status code +func (o *APIError) IsSuccess() bool { + return o.Code/100 == 2 +} + +// IsRedirect returns true when this elapse o k response returns a 3xx status code +func (o *APIError) IsRedirect() bool { + return o.Code/100 == 3 +} + +// IsClientError returns true when this elapse o k response returns a 4xx status code +func (o *APIError) IsClientError() bool { + return o.Code/100 == 4 +} + +// IsServerError returns true when this elapse o k response returns a 5xx status code +func (o *APIError) IsServerError() bool { + return o.Code/100 == 5 +} + +// IsCode returns true when this elapse o k response returns a 4xx status code +func (o *APIError) IsCode(code int) bool { + return o.Code == code +} + +// A ClientResponseStatus is a common interface implemented by all responses on the generated code +// You can use this to treat any client response based on status code +type ClientResponseStatus interface { + IsSuccess() bool + IsRedirect() bool + IsClientError() bool + IsServerError() bool + IsCode(int) bool +} diff --git a/vendor/github.com/go-openapi/runtime/constants.go b/vendor/github.com/go-openapi/runtime/constants.go new file mode 100644 index 0000000..5159692 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/constants.go @@ -0,0 +1,49 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package runtime + +const ( + // HeaderContentType represents a http content-type header, it's value is supposed to be a mime type + HeaderContentType = "Content-Type" + + // HeaderTransferEncoding represents a http transfer-encoding header. + HeaderTransferEncoding = "Transfer-Encoding" + + // HeaderAccept the Accept header + HeaderAccept = "Accept" + // HeaderAuthorization the Authorization header + HeaderAuthorization = "Authorization" + + charsetKey = "charset" + + // DefaultMime the default fallback mime type + DefaultMime = "application/octet-stream" + // JSONMime the json mime type + JSONMime = "application/json" + // YAMLMime the yaml mime type + YAMLMime = "application/x-yaml" + // XMLMime the xml mime type + XMLMime = "application/xml" + // TextMime the text mime type + TextMime = "text/plain" + // HTMLMime the html mime type + HTMLMime = "text/html" + // CSVMime the csv mime type + CSVMime = "text/csv" + // MultipartFormMime the multipart form mime type + MultipartFormMime = "multipart/form-data" + // URLencodedFormMime the url encoded form mime type + URLencodedFormMime = "application/x-www-form-urlencoded" +) diff --git a/vendor/github.com/go-openapi/runtime/csv.go b/vendor/github.com/go-openapi/runtime/csv.go new file mode 100644 index 0000000..c9597bc --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/csv.go @@ -0,0 +1,350 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package runtime + +import ( + "bytes" + "context" + "encoding" + "encoding/csv" + "errors" + "fmt" + "io" + "reflect" + + "golang.org/x/sync/errgroup" +) + +// CSVConsumer creates a new CSV consumer. +// +// The consumer consumes CSV records from a provided reader into the data passed by reference. +// +// CSVOpts options may be specified to alter the default CSV behavior on the reader and the writer side (e.g. separator, skip header, ...). +// The defaults are those of the standard library's csv.Reader and csv.Writer. +// +// Supported output underlying types and interfaces, prioritized in this order: +// - *csv.Writer +// - CSVWriter (writer options are ignored) +// - io.Writer (as raw bytes) +// - io.ReaderFrom (as raw bytes) +// - encoding.BinaryUnmarshaler (as raw bytes) +// - *[][]string (as a collection of records) +// - *[]byte (as raw bytes) +// - *string (a raw bytes) +// +// The consumer prioritizes situations where buffering the input is not required. +func CSVConsumer(opts ...CSVOpt) Consumer { + o := csvOptsWithDefaults(opts) + + return ConsumerFunc(func(reader io.Reader, data interface{}) error { + if reader == nil { + return errors.New("CSVConsumer requires a reader") + } + if data == nil { + return errors.New("nil destination for CSVConsumer") + } + + csvReader := csv.NewReader(reader) + o.applyToReader(csvReader) + closer := defaultCloser + if o.closeStream { + if cl, isReaderCloser := reader.(io.Closer); isReaderCloser { + closer = cl.Close + } + } + defer func() { + _ = closer() + }() + + switch destination := data.(type) { + case *csv.Writer: + csvWriter := destination + o.applyToWriter(csvWriter) + + return pipeCSV(csvWriter, csvReader, o) + + case CSVWriter: + csvWriter := destination + // no writer options available + + return pipeCSV(csvWriter, csvReader, o) + + case io.Writer: + csvWriter := csv.NewWriter(destination) + o.applyToWriter(csvWriter) + + return pipeCSV(csvWriter, csvReader, o) + + case io.ReaderFrom: + var buf bytes.Buffer + csvWriter := csv.NewWriter(&buf) + o.applyToWriter(csvWriter) + if err := bufferedCSV(csvWriter, csvReader, o); err != nil { + return err + } + _, err := destination.ReadFrom(&buf) + + return err + + case encoding.BinaryUnmarshaler: + var buf bytes.Buffer + csvWriter := csv.NewWriter(&buf) + o.applyToWriter(csvWriter) + if err := bufferedCSV(csvWriter, csvReader, o); err != nil { + return err + } + + return destination.UnmarshalBinary(buf.Bytes()) + + default: + // support *[][]string, *[]byte, *string + if ptr := reflect.TypeOf(data); ptr.Kind() != reflect.Ptr { + return errors.New("destination must be a pointer") + } + + v := reflect.Indirect(reflect.ValueOf(data)) + t := v.Type() + + switch { + case t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Slice && t.Elem().Elem().Kind() == reflect.String: + csvWriter := &csvRecordsWriter{} + // writer options are ignored + if err := pipeCSV(csvWriter, csvReader, o); err != nil { + return err + } + + v.Grow(len(csvWriter.records)) + v.SetCap(len(csvWriter.records)) // in case Grow was unnessary, trim down the capacity + v.SetLen(len(csvWriter.records)) + reflect.Copy(v, reflect.ValueOf(csvWriter.records)) + + return nil + + case t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8: + var buf bytes.Buffer + csvWriter := csv.NewWriter(&buf) + o.applyToWriter(csvWriter) + if err := bufferedCSV(csvWriter, csvReader, o); err != nil { + return err + } + v.SetBytes(buf.Bytes()) + + return nil + + case t.Kind() == reflect.String: + var buf bytes.Buffer + csvWriter := csv.NewWriter(&buf) + o.applyToWriter(csvWriter) + if err := bufferedCSV(csvWriter, csvReader, o); err != nil { + return err + } + v.SetString(buf.String()) + + return nil + + default: + return fmt.Errorf("%v (%T) is not supported by the CSVConsumer, %s", + data, data, "can be resolved by supporting CSVWriter/Writer/BinaryUnmarshaler interface", + ) + } + } + }) +} + +// CSVProducer creates a new CSV producer. +// +// The producer takes input data then writes as CSV to an output writer (essentially as a pipe). +// +// Supported input underlying types and interfaces, prioritized in this order: +// - *csv.Reader +// - CSVReader (reader options are ignored) +// - io.Reader +// - io.WriterTo +// - encoding.BinaryMarshaler +// - [][]string +// - []byte +// - string +// +// The producer prioritizes situations where buffering the input is not required. +func CSVProducer(opts ...CSVOpt) Producer { + o := csvOptsWithDefaults(opts) + + return ProducerFunc(func(writer io.Writer, data interface{}) error { + if writer == nil { + return errors.New("CSVProducer requires a writer") + } + if data == nil { + return errors.New("nil data for CSVProducer") + } + + csvWriter := csv.NewWriter(writer) + o.applyToWriter(csvWriter) + closer := defaultCloser + if o.closeStream { + if cl, isWriterCloser := writer.(io.Closer); isWriterCloser { + closer = cl.Close + } + } + defer func() { + _ = closer() + }() + + if rc, isDataCloser := data.(io.ReadCloser); isDataCloser { + defer rc.Close() + } + + switch origin := data.(type) { + case *csv.Reader: + csvReader := origin + o.applyToReader(csvReader) + + return pipeCSV(csvWriter, csvReader, o) + + case CSVReader: + csvReader := origin + // no reader options available + + return pipeCSV(csvWriter, csvReader, o) + + case io.Reader: + csvReader := csv.NewReader(origin) + o.applyToReader(csvReader) + + return pipeCSV(csvWriter, csvReader, o) + + case io.WriterTo: + // async piping of the writes performed by WriteTo + r, w := io.Pipe() + csvReader := csv.NewReader(r) + o.applyToReader(csvReader) + + pipe, _ := errgroup.WithContext(context.Background()) + pipe.Go(func() error { + _, err := origin.WriteTo(w) + _ = w.Close() + return err + }) + + pipe.Go(func() error { + defer func() { + _ = r.Close() + }() + + return pipeCSV(csvWriter, csvReader, o) + }) + + return pipe.Wait() + + case encoding.BinaryMarshaler: + buf, err := origin.MarshalBinary() + if err != nil { + return err + } + rdr := bytes.NewBuffer(buf) + csvReader := csv.NewReader(rdr) + + return bufferedCSV(csvWriter, csvReader, o) + + default: + // support [][]string, []byte, string (or pointers to those) + v := reflect.Indirect(reflect.ValueOf(data)) + t := v.Type() + + switch { + case t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Slice && t.Elem().Elem().Kind() == reflect.String: + csvReader := &csvRecordsWriter{ + records: make([][]string, v.Len()), + } + reflect.Copy(reflect.ValueOf(csvReader.records), v) + + return pipeCSV(csvWriter, csvReader, o) + + case t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8: + buf := bytes.NewBuffer(v.Bytes()) + csvReader := csv.NewReader(buf) + o.applyToReader(csvReader) + + return bufferedCSV(csvWriter, csvReader, o) + + case t.Kind() == reflect.String: + buf := bytes.NewBufferString(v.String()) + csvReader := csv.NewReader(buf) + o.applyToReader(csvReader) + + return bufferedCSV(csvWriter, csvReader, o) + + default: + return fmt.Errorf("%v (%T) is not supported by the CSVProducer, %s", + data, data, "can be resolved by supporting CSVReader/Reader/BinaryMarshaler interface", + ) + } + } + }) +} + +// pipeCSV copies CSV records from a CSV reader to a CSV writer +func pipeCSV(csvWriter CSVWriter, csvReader CSVReader, opts csvOpts) error { + for ; opts.skippedLines > 0; opts.skippedLines-- { + _, err := csvReader.Read() + if err != nil { + if errors.Is(err, io.EOF) { + return nil + } + + return err + } + } + + for { + record, err := csvReader.Read() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + + return err + } + + if err := csvWriter.Write(record); err != nil { + return err + } + } + + csvWriter.Flush() + + return csvWriter.Error() +} + +// bufferedCSV copies CSV records from a CSV reader to a CSV writer, +// by first reading all records then writing them at once. +func bufferedCSV(csvWriter *csv.Writer, csvReader *csv.Reader, opts csvOpts) error { + for ; opts.skippedLines > 0; opts.skippedLines-- { + _, err := csvReader.Read() + if err != nil { + if errors.Is(err, io.EOF) { + return nil + } + + return err + } + } + + records, err := csvReader.ReadAll() + if err != nil { + return err + } + + return csvWriter.WriteAll(records) +} diff --git a/vendor/github.com/go-openapi/runtime/csv_options.go b/vendor/github.com/go-openapi/runtime/csv_options.go new file mode 100644 index 0000000..c16464c --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/csv_options.go @@ -0,0 +1,121 @@ +package runtime + +import ( + "encoding/csv" + "io" +) + +// CSVOpts alter the behavior of the CSV consumer or producer. +type CSVOpt func(*csvOpts) + +type csvOpts struct { + csvReader csv.Reader + csvWriter csv.Writer + skippedLines int + closeStream bool +} + +// WithCSVReaderOpts specifies the options to csv.Reader +// when reading CSV. +func WithCSVReaderOpts(reader csv.Reader) CSVOpt { + return func(o *csvOpts) { + o.csvReader = reader + } +} + +// WithCSVWriterOpts specifies the options to csv.Writer +// when writing CSV. +func WithCSVWriterOpts(writer csv.Writer) CSVOpt { + return func(o *csvOpts) { + o.csvWriter = writer + } +} + +// WithCSVSkipLines will skip header lines. +func WithCSVSkipLines(skipped int) CSVOpt { + return func(o *csvOpts) { + o.skippedLines = skipped + } +} + +func WithCSVClosesStream() CSVOpt { + return func(o *csvOpts) { + o.closeStream = true + } +} + +func (o csvOpts) applyToReader(in *csv.Reader) { + if o.csvReader.Comma != 0 { + in.Comma = o.csvReader.Comma + } + if o.csvReader.Comment != 0 { + in.Comment = o.csvReader.Comment + } + if o.csvReader.FieldsPerRecord != 0 { + in.FieldsPerRecord = o.csvReader.FieldsPerRecord + } + + in.LazyQuotes = o.csvReader.LazyQuotes + in.TrimLeadingSpace = o.csvReader.TrimLeadingSpace + in.ReuseRecord = o.csvReader.ReuseRecord +} + +func (o csvOpts) applyToWriter(in *csv.Writer) { + if o.csvWriter.Comma != 0 { + in.Comma = o.csvWriter.Comma + } + in.UseCRLF = o.csvWriter.UseCRLF +} + +func csvOptsWithDefaults(opts []CSVOpt) csvOpts { + var o csvOpts + for _, apply := range opts { + apply(&o) + } + + return o +} + +type CSVWriter interface { + Write([]string) error + Flush() + Error() error +} + +type CSVReader interface { + Read() ([]string, error) +} + +var ( + _ CSVWriter = &csvRecordsWriter{} + _ CSVReader = &csvRecordsWriter{} +) + +// csvRecordsWriter is an internal container to move CSV records back and forth +type csvRecordsWriter struct { + i int + records [][]string +} + +func (w *csvRecordsWriter) Write(record []string) error { + w.records = append(w.records, record) + + return nil +} + +func (w *csvRecordsWriter) Read() ([]string, error) { + if w.i >= len(w.records) { + return nil, io.EOF + } + defer func() { + w.i++ + }() + + return w.records[w.i], nil +} + +func (w *csvRecordsWriter) Flush() {} + +func (w *csvRecordsWriter) Error() error { + return nil +} diff --git a/vendor/github.com/go-openapi/runtime/discard.go b/vendor/github.com/go-openapi/runtime/discard.go new file mode 100644 index 0000000..0d390cf --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/discard.go @@ -0,0 +1,9 @@ +package runtime + +import "io" + +// DiscardConsumer does absolutely nothing, it's a black hole. +var DiscardConsumer = ConsumerFunc(func(_ io.Reader, _ interface{}) error { return nil }) + +// DiscardProducer does absolutely nothing, it's a black hole. +var DiscardProducer = ProducerFunc(func(_ io.Writer, _ interface{}) error { return nil }) diff --git a/vendor/github.com/go-openapi/runtime/file.go b/vendor/github.com/go-openapi/runtime/file.go new file mode 100644 index 0000000..397d8a4 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/file.go @@ -0,0 +1,19 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package runtime + +import "github.com/go-openapi/swag" + +type File = swag.File diff --git a/vendor/github.com/go-openapi/runtime/headers.go b/vendor/github.com/go-openapi/runtime/headers.go new file mode 100644 index 0000000..4d111db --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/headers.go @@ -0,0 +1,45 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package runtime + +import ( + "mime" + "net/http" + + "github.com/go-openapi/errors" +) + +// ContentType parses a content type header +func ContentType(headers http.Header) (string, string, error) { + ct := headers.Get(HeaderContentType) + orig := ct + if ct == "" { + ct = DefaultMime + } + if ct == "" { + return "", "", nil + } + + mt, opts, err := mime.ParseMediaType(ct) + if err != nil { + return "", "", errors.NewParseError(HeaderContentType, "header", orig, err) + } + + if cs, ok := opts[charsetKey]; ok { + return mt, cs, nil + } + + return mt, "", nil +} diff --git a/vendor/github.com/go-openapi/runtime/interfaces.go b/vendor/github.com/go-openapi/runtime/interfaces.go new file mode 100644 index 0000000..e334128 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/interfaces.go @@ -0,0 +1,112 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package runtime + +import ( + "context" + "io" + "net/http" + + "github.com/go-openapi/strfmt" +) + +// OperationHandlerFunc an adapter for a function to the OperationHandler interface +type OperationHandlerFunc func(interface{}) (interface{}, error) + +// Handle implements the operation handler interface +func (s OperationHandlerFunc) Handle(data interface{}) (interface{}, error) { + return s(data) +} + +// OperationHandler a handler for a swagger operation +type OperationHandler interface { + Handle(interface{}) (interface{}, error) +} + +// ConsumerFunc represents a function that can be used as a consumer +type ConsumerFunc func(io.Reader, interface{}) error + +// Consume consumes the reader into the data parameter +func (fn ConsumerFunc) Consume(reader io.Reader, data interface{}) error { + return fn(reader, data) +} + +// Consumer implementations know how to bind the values on the provided interface to +// data provided by the request body +type Consumer interface { + // Consume performs the binding of request values + Consume(io.Reader, interface{}) error +} + +// ProducerFunc represents a function that can be used as a producer +type ProducerFunc func(io.Writer, interface{}) error + +// Produce produces the response for the provided data +func (f ProducerFunc) Produce(writer io.Writer, data interface{}) error { + return f(writer, data) +} + +// Producer implementations know how to turn the provided interface into a valid +// HTTP response +type Producer interface { + // Produce writes to the http response + Produce(io.Writer, interface{}) error +} + +// AuthenticatorFunc turns a function into an authenticator +type AuthenticatorFunc func(interface{}) (bool, interface{}, error) + +// Authenticate authenticates the request with the provided data +func (f AuthenticatorFunc) Authenticate(params interface{}) (bool, interface{}, error) { + return f(params) +} + +// Authenticator represents an authentication strategy +// implementations of Authenticator know how to authenticate the +// request data and translate that into a valid principal object or an error +type Authenticator interface { + Authenticate(interface{}) (bool, interface{}, error) +} + +// AuthorizerFunc turns a function into an authorizer +type AuthorizerFunc func(*http.Request, interface{}) error + +// Authorize authorizes the processing of the request for the principal +func (f AuthorizerFunc) Authorize(r *http.Request, principal interface{}) error { + return f(r, principal) +} + +// Authorizer represents an authorization strategy +// implementations of Authorizer know how to authorize the principal object +// using the request data and returns error if unauthorized +type Authorizer interface { + Authorize(*http.Request, interface{}) error +} + +// Validatable types implementing this interface allow customizing their validation +// this will be used instead of the reflective validation based on the spec document. +// the implementations are assumed to have been generated by the swagger tool so they should +// contain all the validations obtained from the spec +type Validatable interface { + Validate(strfmt.Registry) error +} + +// ContextValidatable types implementing this interface allow customizing their validation +// this will be used instead of the reflective validation based on the spec document. +// the implementations are assumed to have been generated by the swagger tool so they should +// contain all the context validations obtained from the spec +type ContextValidatable interface { + ContextValidate(context.Context, strfmt.Registry) error +} diff --git a/vendor/github.com/go-openapi/runtime/json.go b/vendor/github.com/go-openapi/runtime/json.go new file mode 100644 index 0000000..5a69055 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/json.go @@ -0,0 +1,38 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package runtime + +import ( + "encoding/json" + "io" +) + +// JSONConsumer creates a new JSON consumer +func JSONConsumer() Consumer { + return ConsumerFunc(func(reader io.Reader, data interface{}) error { + dec := json.NewDecoder(reader) + dec.UseNumber() // preserve number formats + return dec.Decode(data) + }) +} + +// JSONProducer creates a new JSON producer +func JSONProducer() Producer { + return ProducerFunc(func(writer io.Writer, data interface{}) error { + enc := json.NewEncoder(writer) + enc.SetEscapeHTML(false) + return enc.Encode(data) + }) +} diff --git a/vendor/github.com/go-openapi/runtime/logger/logger.go b/vendor/github.com/go-openapi/runtime/logger/logger.go new file mode 100644 index 0000000..6f4debc --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/logger/logger.go @@ -0,0 +1,20 @@ +package logger + +import "os" + +type Logger interface { + Printf(format string, args ...interface{}) + Debugf(format string, args ...interface{}) +} + +func DebugEnabled() bool { + d := os.Getenv("SWAGGER_DEBUG") + if d != "" && d != "false" && d != "0" { + return true + } + d = os.Getenv("DEBUG") + if d != "" && d != "false" && d != "0" { + return true + } + return false +} diff --git a/vendor/github.com/go-openapi/runtime/logger/standard.go b/vendor/github.com/go-openapi/runtime/logger/standard.go new file mode 100644 index 0000000..30035a7 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/logger/standard.go @@ -0,0 +1,24 @@ +package logger + +import ( + "fmt" + "os" +) + +var _ Logger = StandardLogger{} + +type StandardLogger struct{} + +func (StandardLogger) Printf(format string, args ...interface{}) { + if len(format) == 0 || format[len(format)-1] != '\n' { + format += "\n" + } + fmt.Fprintf(os.Stderr, format, args...) +} + +func (StandardLogger) Debugf(format string, args ...interface{}) { + if len(format) == 0 || format[len(format)-1] != '\n' { + format += "\n" + } + fmt.Fprintf(os.Stderr, format, args...) +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/context.go b/vendor/github.com/go-openapi/runtime/middleware/context.go new file mode 100644 index 0000000..44cecf1 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/context.go @@ -0,0 +1,722 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package middleware + +import ( + stdContext "context" + "fmt" + "net/http" + "net/url" + "path" + "strings" + "sync" + + "github.com/go-openapi/analysis" + "github.com/go-openapi/errors" + "github.com/go-openapi/loads" + "github.com/go-openapi/spec" + "github.com/go-openapi/strfmt" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/runtime/logger" + "github.com/go-openapi/runtime/middleware/untyped" + "github.com/go-openapi/runtime/security" +) + +// Debug when true turns on verbose logging +var Debug = logger.DebugEnabled() + +// Logger is the standard libray logger used for printing debug messages +var Logger logger.Logger = logger.StandardLogger{} + +func debugLogfFunc(lg logger.Logger) func(string, ...any) { + if logger.DebugEnabled() { + if lg == nil { + return Logger.Debugf + } + + return lg.Debugf + } + + // muted logger + return func(_ string, _ ...any) {} +} + +// A Builder can create middlewares +type Builder func(http.Handler) http.Handler + +// PassthroughBuilder returns the handler, aka the builder identity function +func PassthroughBuilder(handler http.Handler) http.Handler { return handler } + +// RequestBinder is an interface for types to implement +// when they want to be able to bind from a request +type RequestBinder interface { + BindRequest(*http.Request, *MatchedRoute) error +} + +// Responder is an interface for types to implement +// when they want to be considered for writing HTTP responses +type Responder interface { + WriteResponse(http.ResponseWriter, runtime.Producer) +} + +// ResponderFunc wraps a func as a Responder interface +type ResponderFunc func(http.ResponseWriter, runtime.Producer) + +// WriteResponse writes to the response +func (fn ResponderFunc) WriteResponse(rw http.ResponseWriter, pr runtime.Producer) { + fn(rw, pr) +} + +// Context is a type safe wrapper around an untyped request context +// used throughout to store request context with the standard context attached +// to the http.Request +type Context struct { + spec *loads.Document + analyzer *analysis.Spec + api RoutableAPI + router Router + debugLogf func(string, ...any) // a logging function to debug context and all components using it +} + +type routableUntypedAPI struct { + api *untyped.API + hlock *sync.Mutex + handlers map[string]map[string]http.Handler + defaultConsumes string + defaultProduces string +} + +func newRoutableUntypedAPI(spec *loads.Document, api *untyped.API, context *Context) *routableUntypedAPI { + var handlers map[string]map[string]http.Handler + if spec == nil || api == nil { + return nil + } + analyzer := analysis.New(spec.Spec()) + for method, hls := range analyzer.Operations() { + um := strings.ToUpper(method) + for path, op := range hls { + schemes := analyzer.SecurityRequirementsFor(op) + + if oh, ok := api.OperationHandlerFor(method, path); ok { + if handlers == nil { + handlers = make(map[string]map[string]http.Handler) + } + if b, ok := handlers[um]; !ok || b == nil { + handlers[um] = make(map[string]http.Handler) + } + + var handler http.Handler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // lookup route info in the context + route, rCtx, _ := context.RouteInfo(r) + if rCtx != nil { + r = rCtx + } + + // bind and validate the request using reflection + var bound interface{} + var validation error + bound, r, validation = context.BindAndValidate(r, route) + if validation != nil { + context.Respond(w, r, route.Produces, route, validation) + return + } + + // actually handle the request + result, err := oh.Handle(bound) + if err != nil { + // respond with failure + context.Respond(w, r, route.Produces, route, err) + return + } + + // respond with success + context.Respond(w, r, route.Produces, route, result) + }) + + if len(schemes) > 0 { + handler = newSecureAPI(context, handler) + } + handlers[um][path] = handler + } + } + } + + return &routableUntypedAPI{ + api: api, + hlock: new(sync.Mutex), + handlers: handlers, + defaultProduces: api.DefaultProduces, + defaultConsumes: api.DefaultConsumes, + } +} + +func (r *routableUntypedAPI) HandlerFor(method, path string) (http.Handler, bool) { + r.hlock.Lock() + paths, ok := r.handlers[strings.ToUpper(method)] + if !ok { + r.hlock.Unlock() + return nil, false + } + handler, ok := paths[path] + r.hlock.Unlock() + return handler, ok +} +func (r *routableUntypedAPI) ServeErrorFor(_ string) func(http.ResponseWriter, *http.Request, error) { + return r.api.ServeError +} +func (r *routableUntypedAPI) ConsumersFor(mediaTypes []string) map[string]runtime.Consumer { + return r.api.ConsumersFor(mediaTypes) +} +func (r *routableUntypedAPI) ProducersFor(mediaTypes []string) map[string]runtime.Producer { + return r.api.ProducersFor(mediaTypes) +} +func (r *routableUntypedAPI) AuthenticatorsFor(schemes map[string]spec.SecurityScheme) map[string]runtime.Authenticator { + return r.api.AuthenticatorsFor(schemes) +} +func (r *routableUntypedAPI) Authorizer() runtime.Authorizer { + return r.api.Authorizer() +} +func (r *routableUntypedAPI) Formats() strfmt.Registry { + return r.api.Formats() +} + +func (r *routableUntypedAPI) DefaultProduces() string { + return r.defaultProduces +} + +func (r *routableUntypedAPI) DefaultConsumes() string { + return r.defaultConsumes +} + +// NewRoutableContext creates a new context for a routable API. +// +// If a nil Router is provided, the DefaultRouter (denco-based) will be used. +func NewRoutableContext(spec *loads.Document, routableAPI RoutableAPI, routes Router) *Context { + var an *analysis.Spec + if spec != nil { + an = analysis.New(spec.Spec()) + } + + return NewRoutableContextWithAnalyzedSpec(spec, an, routableAPI, routes) +} + +// NewRoutableContextWithAnalyzedSpec is like NewRoutableContext but takes as input an already analysed spec. +// +// If a nil Router is provided, the DefaultRouter (denco-based) will be used. +func NewRoutableContextWithAnalyzedSpec(spec *loads.Document, an *analysis.Spec, routableAPI RoutableAPI, routes Router) *Context { + // Either there are no spec doc and analysis, or both of them. + if !((spec == nil && an == nil) || (spec != nil && an != nil)) { + panic(errors.New(http.StatusInternalServerError, "routable context requires either both spec doc and analysis, or none of them")) + } + + return &Context{ + spec: spec, + api: routableAPI, + analyzer: an, + router: routes, + debugLogf: debugLogfFunc(nil), + } +} + +// NewContext creates a new context wrapper. +// +// If a nil Router is provided, the DefaultRouter (denco-based) will be used. +func NewContext(spec *loads.Document, api *untyped.API, routes Router) *Context { + var an *analysis.Spec + if spec != nil { + an = analysis.New(spec.Spec()) + } + ctx := &Context{ + spec: spec, + analyzer: an, + router: routes, + debugLogf: debugLogfFunc(nil), + } + ctx.api = newRoutableUntypedAPI(spec, api, ctx) + + return ctx +} + +// Serve serves the specified spec with the specified api registrations as a http.Handler +func Serve(spec *loads.Document, api *untyped.API) http.Handler { + return ServeWithBuilder(spec, api, PassthroughBuilder) +} + +// ServeWithBuilder serves the specified spec with the specified api registrations as a http.Handler that is decorated +// by the Builder +func ServeWithBuilder(spec *loads.Document, api *untyped.API, builder Builder) http.Handler { + context := NewContext(spec, api, nil) + return context.APIHandler(builder) +} + +type contextKey int8 + +const ( + _ contextKey = iota + ctxContentType + ctxResponseFormat + ctxMatchedRoute + ctxBoundParams + ctxSecurityPrincipal + ctxSecurityScopes +) + +// MatchedRouteFrom request context value. +func MatchedRouteFrom(req *http.Request) *MatchedRoute { + mr := req.Context().Value(ctxMatchedRoute) + if mr == nil { + return nil + } + if res, ok := mr.(*MatchedRoute); ok { + return res + } + return nil +} + +// SecurityPrincipalFrom request context value. +func SecurityPrincipalFrom(req *http.Request) interface{} { + return req.Context().Value(ctxSecurityPrincipal) +} + +// SecurityScopesFrom request context value. +func SecurityScopesFrom(req *http.Request) []string { + rs := req.Context().Value(ctxSecurityScopes) + if res, ok := rs.([]string); ok { + return res + } + return nil +} + +type contentTypeValue struct { + MediaType string + Charset string +} + +// BasePath returns the base path for this API +func (c *Context) BasePath() string { + return c.spec.BasePath() +} + +// SetLogger allows for injecting a logger to catch debug entries. +// +// The logger is enabled in DEBUG mode only. +func (c *Context) SetLogger(lg logger.Logger) { + c.debugLogf = debugLogfFunc(lg) +} + +// RequiredProduces returns the accepted content types for responses +func (c *Context) RequiredProduces() []string { + return c.analyzer.RequiredProduces() +} + +// BindValidRequest binds a params object to a request but only when the request is valid +// if the request is not valid an error will be returned +func (c *Context) BindValidRequest(request *http.Request, route *MatchedRoute, binder RequestBinder) error { + var res []error + var requestContentType string + + // check and validate content type, select consumer + if runtime.HasBody(request) { + ct, _, err := runtime.ContentType(request.Header) + if err != nil { + res = append(res, err) + } else { + c.debugLogf("validating content type for %q against [%s]", ct, strings.Join(route.Consumes, ", ")) + if err := validateContentType(route.Consumes, ct); err != nil { + res = append(res, err) + } + if len(res) == 0 { + cons, ok := route.Consumers[ct] + if !ok { + res = append(res, errors.New(500, "no consumer registered for %s", ct)) + } else { + route.Consumer = cons + requestContentType = ct + } + } + } + } + + // check and validate the response format + if len(res) == 0 { + // if the route does not provide Produces and a default contentType could not be identified + // based on a body, typical for GET and DELETE requests, then default contentType to. + if len(route.Produces) == 0 && requestContentType == "" { + requestContentType = "*/*" + } + + if str := NegotiateContentType(request, route.Produces, requestContentType); str == "" { + res = append(res, errors.InvalidResponseFormat(request.Header.Get(runtime.HeaderAccept), route.Produces)) + } + } + + // now bind the request with the provided binder + // it's assumed the binder will also validate the request and return an error if the + // request is invalid + if binder != nil && len(res) == 0 { + if err := binder.BindRequest(request, route); err != nil { + return err + } + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +// ContentType gets the parsed value of a content type +// Returns the media type, its charset and a shallow copy of the request +// when its context doesn't contain the content type value, otherwise it returns +// the same request +// Returns the error that runtime.ContentType may retunrs. +func (c *Context) ContentType(request *http.Request) (string, string, *http.Request, error) { + var rCtx = request.Context() + + if v, ok := rCtx.Value(ctxContentType).(*contentTypeValue); ok { + return v.MediaType, v.Charset, request, nil + } + + mt, cs, err := runtime.ContentType(request.Header) + if err != nil { + return "", "", nil, err + } + rCtx = stdContext.WithValue(rCtx, ctxContentType, &contentTypeValue{mt, cs}) + return mt, cs, request.WithContext(rCtx), nil +} + +// LookupRoute looks a route up and returns true when it is found +func (c *Context) LookupRoute(request *http.Request) (*MatchedRoute, bool) { + if route, ok := c.router.Lookup(request.Method, request.URL.EscapedPath()); ok { + return route, ok + } + return nil, false +} + +// RouteInfo tries to match a route for this request +// Returns the matched route, a shallow copy of the request if its context +// contains the matched router, otherwise the same request, and a bool to +// indicate if it the request matches one of the routes, if it doesn't +// then it returns false and nil for the other two return values +func (c *Context) RouteInfo(request *http.Request) (*MatchedRoute, *http.Request, bool) { + var rCtx = request.Context() + + if v, ok := rCtx.Value(ctxMatchedRoute).(*MatchedRoute); ok { + return v, request, ok + } + + if route, ok := c.LookupRoute(request); ok { + rCtx = stdContext.WithValue(rCtx, ctxMatchedRoute, route) + return route, request.WithContext(rCtx), ok + } + + return nil, nil, false +} + +// ResponseFormat negotiates the response content type +// Returns the response format and a shallow copy of the request if its context +// doesn't contain the response format, otherwise the same request +func (c *Context) ResponseFormat(r *http.Request, offers []string) (string, *http.Request) { + var rCtx = r.Context() + + if v, ok := rCtx.Value(ctxResponseFormat).(string); ok { + c.debugLogf("[%s %s] found response format %q in context", r.Method, r.URL.Path, v) + return v, r + } + + format := NegotiateContentType(r, offers, "") + if format != "" { + c.debugLogf("[%s %s] set response format %q in context", r.Method, r.URL.Path, format) + r = r.WithContext(stdContext.WithValue(rCtx, ctxResponseFormat, format)) + } + c.debugLogf("[%s %s] negotiated response format %q", r.Method, r.URL.Path, format) + return format, r +} + +// AllowedMethods gets the allowed methods for the path of this request +func (c *Context) AllowedMethods(request *http.Request) []string { + return c.router.OtherMethods(request.Method, request.URL.EscapedPath()) +} + +// ResetAuth removes the current principal from the request context +func (c *Context) ResetAuth(request *http.Request) *http.Request { + rctx := request.Context() + rctx = stdContext.WithValue(rctx, ctxSecurityPrincipal, nil) + rctx = stdContext.WithValue(rctx, ctxSecurityScopes, nil) + return request.WithContext(rctx) +} + +// Authorize authorizes the request +// Returns the principal object and a shallow copy of the request when its +// context doesn't contain the principal, otherwise the same request or an error +// (the last) if one of the authenticators returns one or an Unauthenticated error +func (c *Context) Authorize(request *http.Request, route *MatchedRoute) (interface{}, *http.Request, error) { + if route == nil || !route.HasAuth() { + return nil, nil, nil + } + + var rCtx = request.Context() + if v := rCtx.Value(ctxSecurityPrincipal); v != nil { + return v, request, nil + } + + applies, usr, err := route.Authenticators.Authenticate(request, route) + if !applies || err != nil || !route.Authenticators.AllowsAnonymous() && usr == nil { + if err != nil { + return nil, nil, err + } + return nil, nil, errors.Unauthenticated("invalid credentials") + } + if route.Authorizer != nil { + if err := route.Authorizer.Authorize(request, usr); err != nil { + if _, ok := err.(errors.Error); ok { + return nil, nil, err + } + + return nil, nil, errors.New(http.StatusForbidden, err.Error()) + } + } + + rCtx = request.Context() + + rCtx = stdContext.WithValue(rCtx, ctxSecurityPrincipal, usr) + rCtx = stdContext.WithValue(rCtx, ctxSecurityScopes, route.Authenticator.AllScopes()) + return usr, request.WithContext(rCtx), nil +} + +// BindAndValidate binds and validates the request +// Returns the validation map and a shallow copy of the request when its context +// doesn't contain the validation, otherwise it returns the same request or an +// CompositeValidationError error +func (c *Context) BindAndValidate(request *http.Request, matched *MatchedRoute) (interface{}, *http.Request, error) { + var rCtx = request.Context() + + if v, ok := rCtx.Value(ctxBoundParams).(*validation); ok { + c.debugLogf("got cached validation (valid: %t)", len(v.result) == 0) + if len(v.result) > 0 { + return v.bound, request, errors.CompositeValidationError(v.result...) + } + return v.bound, request, nil + } + result := validateRequest(c, request, matched) + rCtx = stdContext.WithValue(rCtx, ctxBoundParams, result) + request = request.WithContext(rCtx) + if len(result.result) > 0 { + return result.bound, request, errors.CompositeValidationError(result.result...) + } + c.debugLogf("no validation errors found") + return result.bound, request, nil +} + +// NotFound the default not found responder for when no route has been matched yet +func (c *Context) NotFound(rw http.ResponseWriter, r *http.Request) { + c.Respond(rw, r, []string{c.api.DefaultProduces()}, nil, errors.NotFound("not found")) +} + +// Respond renders the response after doing some content negotiation +func (c *Context) Respond(rw http.ResponseWriter, r *http.Request, produces []string, route *MatchedRoute, data interface{}) { + c.debugLogf("responding to %s %s with produces: %v", r.Method, r.URL.Path, produces) + offers := []string{} + for _, mt := range produces { + if mt != c.api.DefaultProduces() { + offers = append(offers, mt) + } + } + // the default producer is last so more specific producers take precedence + offers = append(offers, c.api.DefaultProduces()) + c.debugLogf("offers: %v", offers) + + var format string + format, r = c.ResponseFormat(r, offers) + rw.Header().Set(runtime.HeaderContentType, format) + + if resp, ok := data.(Responder); ok { + producers := route.Producers + // producers contains keys with normalized format, if a format has MIME type parameter such as `text/plain; charset=utf-8` + // then you must provide `text/plain` to get the correct producer. HOWEVER, format here is not normalized. + prod, ok := producers[normalizeOffer(format)] + if !ok { + prods := c.api.ProducersFor(normalizeOffers([]string{c.api.DefaultProduces()})) + pr, ok := prods[c.api.DefaultProduces()] + if !ok { + panic(errors.New(http.StatusInternalServerError, cantFindProducer(format))) + } + prod = pr + } + resp.WriteResponse(rw, prod) + return + } + + if err, ok := data.(error); ok { + if format == "" { + rw.Header().Set(runtime.HeaderContentType, runtime.JSONMime) + } + + if realm := security.FailedBasicAuth(r); realm != "" { + rw.Header().Set("WWW-Authenticate", fmt.Sprintf("Basic realm=%q", realm)) + } + + if route == nil || route.Operation == nil { + c.api.ServeErrorFor("")(rw, r, err) + return + } + c.api.ServeErrorFor(route.Operation.ID)(rw, r, err) + return + } + + if route == nil || route.Operation == nil { + rw.WriteHeader(http.StatusOK) + if r.Method == http.MethodHead { + return + } + producers := c.api.ProducersFor(normalizeOffers(offers)) + prod, ok := producers[format] + if !ok { + panic(errors.New(http.StatusInternalServerError, cantFindProducer(format))) + } + if err := prod.Produce(rw, data); err != nil { + panic(err) // let the recovery middleware deal with this + } + return + } + + if _, code, ok := route.Operation.SuccessResponse(); ok { + rw.WriteHeader(code) + if code == http.StatusNoContent || r.Method == http.MethodHead { + return + } + + producers := route.Producers + prod, ok := producers[format] + if !ok { + if !ok { + prods := c.api.ProducersFor(normalizeOffers([]string{c.api.DefaultProduces()})) + pr, ok := prods[c.api.DefaultProduces()] + if !ok { + panic(errors.New(http.StatusInternalServerError, cantFindProducer(format))) + } + prod = pr + } + } + if err := prod.Produce(rw, data); err != nil { + panic(err) // let the recovery middleware deal with this + } + return + } + + c.api.ServeErrorFor(route.Operation.ID)(rw, r, errors.New(http.StatusInternalServerError, "can't produce response")) +} + +// APIHandlerSwaggerUI returns a handler to serve the API. +// +// This handler includes a swagger spec, router and the contract defined in the swagger spec. +// +// A spec UI (SwaggerUI) is served at {API base path}/docs and the spec document at /swagger.json +// (these can be modified with uiOptions). +func (c *Context) APIHandlerSwaggerUI(builder Builder, opts ...UIOption) http.Handler { + b := builder + if b == nil { + b = PassthroughBuilder + } + + specPath, uiOpts, specOpts := c.uiOptionsForHandler(opts) + var swaggerUIOpts SwaggerUIOpts + fromCommonToAnyOptions(uiOpts, &swaggerUIOpts) + + return Spec(specPath, c.spec.Raw(), SwaggerUI(swaggerUIOpts, c.RoutesHandler(b)), specOpts...) +} + +// APIHandlerRapiDoc returns a handler to serve the API. +// +// This handler includes a swagger spec, router and the contract defined in the swagger spec. +// +// A spec UI (RapiDoc) is served at {API base path}/docs and the spec document at /swagger.json +// (these can be modified with uiOptions). +func (c *Context) APIHandlerRapiDoc(builder Builder, opts ...UIOption) http.Handler { + b := builder + if b == nil { + b = PassthroughBuilder + } + + specPath, uiOpts, specOpts := c.uiOptionsForHandler(opts) + var rapidocUIOpts RapiDocOpts + fromCommonToAnyOptions(uiOpts, &rapidocUIOpts) + + return Spec(specPath, c.spec.Raw(), RapiDoc(rapidocUIOpts, c.RoutesHandler(b)), specOpts...) +} + +// APIHandler returns a handler to serve the API. +// +// This handler includes a swagger spec, router and the contract defined in the swagger spec. +// +// A spec UI (Redoc) is served at {API base path}/docs and the spec document at /swagger.json +// (these can be modified with uiOptions). +func (c *Context) APIHandler(builder Builder, opts ...UIOption) http.Handler { + b := builder + if b == nil { + b = PassthroughBuilder + } + + specPath, uiOpts, specOpts := c.uiOptionsForHandler(opts) + var redocOpts RedocOpts + fromCommonToAnyOptions(uiOpts, &redocOpts) + + return Spec(specPath, c.spec.Raw(), Redoc(redocOpts, c.RoutesHandler(b)), specOpts...) +} + +func (c Context) uiOptionsForHandler(opts []UIOption) (string, uiOptions, []SpecOption) { + var title string + sp := c.spec.Spec() + if sp != nil && sp.Info != nil && sp.Info.Title != "" { + title = sp.Info.Title + } + + // default options (may be overridden) + optsForContext := []UIOption{ + WithUIBasePath(c.BasePath()), + WithUITitle(title), + } + optsForContext = append(optsForContext, opts...) + uiOpts := uiOptionsWithDefaults(optsForContext) + + // If spec URL is provided, there is a non-default path to serve the spec. + // This makes sure that the UI middleware is aligned with the Spec middleware. + u, _ := url.Parse(uiOpts.SpecURL) + var specPath string + if u != nil { + specPath = u.Path + } + + pth, doc := path.Split(specPath) + if pth == "." { + pth = "" + } + + return pth, uiOpts, []SpecOption{WithSpecDocument(doc)} +} + +// RoutesHandler returns a handler to serve the API, just the routes and the contract defined in the swagger spec +func (c *Context) RoutesHandler(builder Builder) http.Handler { + b := builder + if b == nil { + b = PassthroughBuilder + } + return NewRouter(c, b(NewOperationExecutor(c))) +} + +func cantFindProducer(format string) string { + return "can't find a producer for " + format +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/denco/LICENSE b/vendor/github.com/go-openapi/runtime/middleware/denco/LICENSE new file mode 100644 index 0000000..e65039a --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/denco/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2014 Naoya Inada + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/go-openapi/runtime/middleware/denco/README.md b/vendor/github.com/go-openapi/runtime/middleware/denco/README.md new file mode 100644 index 0000000..30109e1 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/denco/README.md @@ -0,0 +1,180 @@ +# Denco [![Build Status](https://travis-ci.org/naoina/denco.png?branch=master)](https://travis-ci.org/naoina/denco) + +The fast and flexible HTTP request router for [Go](http://golang.org). + +Denco is based on Double-Array implementation of [Kocha-urlrouter](https://github.com/naoina/kocha-urlrouter). +However, Denco is optimized and some features added. + +## Features + +* Fast (See [go-http-routing-benchmark](https://github.com/naoina/go-http-routing-benchmark)) +* [URL patterns](#url-patterns) (`/foo/:bar` and `/foo/*wildcard`) +* Small (but enough) URL router API +* HTTP request multiplexer like `http.ServeMux` + +## Installation + + go get -u github.com/go-openapi/runtime/middleware/denco + +## Using as HTTP request multiplexer + +```go +package main + +import ( + "fmt" + "log" + "net/http" + + "github.com/go-openapi/runtime/middleware/denco" +) + +func Index(w http.ResponseWriter, r *http.Request, params denco.Params) { + fmt.Fprintf(w, "Welcome to Denco!\n") +} + +func User(w http.ResponseWriter, r *http.Request, params denco.Params) { + fmt.Fprintf(w, "Hello %s!\n", params.Get("name")) +} + +func main() { + mux := denco.NewMux() + handler, err := mux.Build([]denco.Handler{ + mux.GET("/", Index), + mux.GET("/user/:name", User), + mux.POST("/user/:name", User), + }) + if err != nil { + panic(err) + } + log.Fatal(http.ListenAndServe(":8080", handler)) +} +``` + +## Using as URL router + +```go +package main + +import ( + "fmt" + + "github.com/go-openapi/runtime/middleware/denco" +) + +type route struct { + name string +} + +func main() { + router := denco.New() + router.Build([]denco.Record{ + {"/", &route{"root"}}, + {"/user/:id", &route{"user"}}, + {"/user/:name/:id", &route{"username"}}, + {"/static/*filepath", &route{"static"}}, + }) + + data, params, found := router.Lookup("/") + // print `&main.route{name:"root"}, denco.Params(nil), true`. + fmt.Printf("%#v, %#v, %#v\n", data, params, found) + + data, params, found = router.Lookup("/user/hoge") + // print `&main.route{name:"user"}, denco.Params{denco.Param{Name:"id", Value:"hoge"}}, true`. + fmt.Printf("%#v, %#v, %#v\n", data, params, found) + + data, params, found = router.Lookup("/user/hoge/7") + // print `&main.route{name:"username"}, denco.Params{denco.Param{Name:"name", Value:"hoge"}, denco.Param{Name:"id", Value:"7"}}, true`. + fmt.Printf("%#v, %#v, %#v\n", data, params, found) + + data, params, found = router.Lookup("/static/path/to/file") + // print `&main.route{name:"static"}, denco.Params{denco.Param{Name:"filepath", Value:"path/to/file"}}, true`. + fmt.Printf("%#v, %#v, %#v\n", data, params, found) +} +``` + +See [Godoc](http://godoc.org/github.com/go-openapi/runtime/middleware/denco) for more details. + +## Getting the value of path parameter + +You can get the value of path parameter by 2 ways. + +1. Using [`denco.Params.Get`](http://godoc.org/github.com/go-openapi/runtime/middleware/denco#Params.Get) method +2. Find by loop + +```go +package main + +import ( + "fmt" + + "github.com/go-openapi/runtime/middleware/denco" +) + +func main() { + router := denco.New() + if err := router.Build([]denco.Record{ + {"/user/:name/:id", "route1"}, + }); err != nil { + panic(err) + } + + // 1. Using denco.Params.Get method. + _, params, _ := router.Lookup("/user/alice/1") + name := params.Get("name") + if name != "" { + fmt.Printf("Hello %s.\n", name) // prints "Hello alice.". + } + + // 2. Find by loop. + for _, param := range params { + if param.Name == "name" { + fmt.Printf("Hello %s.\n", name) // prints "Hello alice.". + } + } +} +``` + +## URL patterns + +Denco's route matching strategy is "most nearly matching". + +When routes `/:name` and `/alice` have been built, URI `/alice` matches the route `/alice`, not `/:name`. +Because URI `/alice` is more match with the route `/alice` than `/:name`. + +For more example, when routes below have been built: + +``` +/user/alice +/user/:name +/user/:name/:id +/user/alice/:id +/user/:id/bob +``` + +Routes matching are: + +``` +/user/alice => "/user/alice" (no match with "/user/:name") +/user/bob => "/user/:name" +/user/naoina/1 => "/user/:name/1" +/user/alice/1 => "/user/alice/:id" (no match with "/user/:name/:id") +/user/1/bob => "/user/:id/bob" (no match with "/user/:name/:id") +/user/alice/bob => "/user/alice/:id" (no match with "/user/:name/:id" and "/user/:id/bob") +``` + +## Limitation + +Denco has some limitations below. + +* Number of param records (such as `/:name`) must be less than 2^22 +* Number of elements of internal slice must be less than 2^22 + +## Benchmarks + + cd $GOPATH/github.com/go-openapi/runtime/middleware/denco + go test -bench . -benchmem + +## License + +Denco is licensed under the MIT License. diff --git a/vendor/github.com/go-openapi/runtime/middleware/denco/router.go b/vendor/github.com/go-openapi/runtime/middleware/denco/router.go new file mode 100644 index 0000000..4377f77 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/denco/router.go @@ -0,0 +1,467 @@ +// Package denco provides fast URL router. +package denco + +import ( + "errors" + "fmt" + "sort" + "strings" +) + +const ( + // ParamCharacter is a special character for path parameter. + ParamCharacter = ':' + + // WildcardCharacter is a special character for wildcard path parameter. + WildcardCharacter = '*' + + // TerminationCharacter is a special character for end of path. + TerminationCharacter = '#' + + // SeparatorCharacter separates path segments. + SeparatorCharacter = '/' + + // PathParamCharacter indicates a RESTCONF path param + PathParamCharacter = '=' + + // MaxSize is max size of records and internal slice. + MaxSize = (1 << 22) - 1 +) + +// Router represents a URL router. +type Router struct { + param *doubleArray + // SizeHint expects the maximum number of path parameters in records to Build. + // SizeHint will be used to determine the capacity of the memory to allocate. + // By default, SizeHint will be determined from given records to Build. + SizeHint int + + static map[string]interface{} +} + +// New returns a new Router. +func New() *Router { + return &Router{ + SizeHint: -1, + static: make(map[string]interface{}), + param: newDoubleArray(), + } +} + +// Lookup returns data and path parameters that associated with path. +// params is a slice of the Param that arranged in the order in which parameters appeared. +// e.g. when built routing path is "/path/to/:id/:name" and given path is "/path/to/1/alice". params order is [{"id": "1"}, {"name": "alice"}], not [{"name": "alice"}, {"id": "1"}]. +func (rt *Router) Lookup(path string) (data interface{}, params Params, found bool) { + if data, found = rt.static[path]; found { + return data, nil, true + } + if len(rt.param.node) == 1 { + return nil, nil, false + } + nd, params, found := rt.param.lookup(path, make([]Param, 0, rt.SizeHint), 1) + if !found { + return nil, nil, false + } + for i := 0; i < len(params); i++ { + params[i].Name = nd.paramNames[i] + } + return nd.data, params, true +} + +// Build builds URL router from records. +func (rt *Router) Build(records []Record) error { + statics, params := makeRecords(records) + if len(params) > MaxSize { + return errors.New("denco: too many records") + } + if rt.SizeHint < 0 { + rt.SizeHint = 0 + for _, p := range params { + size := 0 + for _, k := range p.Key { + if k == ParamCharacter || k == WildcardCharacter { + size++ + } + } + if size > rt.SizeHint { + rt.SizeHint = size + } + } + } + for _, r := range statics { + rt.static[r.Key] = r.Value + } + if err := rt.param.build(params, 1, 0, make(map[int]struct{})); err != nil { + return err + } + return nil +} + +// Param represents name and value of path parameter. +type Param struct { + Name string + Value string +} + +// Params represents the name and value of path parameters. +type Params []Param + +// Get gets the first value associated with the given name. +// If there are no values associated with the key, Get returns "". +func (ps Params) Get(name string) string { + for _, p := range ps { + if p.Name == name { + return p.Value + } + } + return "" +} + +type doubleArray struct { + bc []baseCheck + node []*node +} + +func newDoubleArray() *doubleArray { + return &doubleArray{ + bc: []baseCheck{0}, + node: []*node{nil}, // A start index is adjusting to 1 because 0 will be used as a mark of non-existent node. + } +} + +// baseCheck contains BASE, CHECK and Extra flags. +// From the top, 22bits of BASE, 2bits of Extra flags and 8bits of CHECK. +// +// BASE (22bit) | Extra flags (2bit) | CHECK (8bit) +// +// |----------------------|--|--------| +// 32 10 8 0 +type baseCheck uint32 + +func (bc baseCheck) Base() int { + return int(bc >> 10) +} + +func (bc *baseCheck) SetBase(base int) { + *bc |= baseCheck(base) << 10 +} + +func (bc baseCheck) Check() byte { + return byte(bc) +} + +func (bc *baseCheck) SetCheck(check byte) { + *bc |= baseCheck(check) +} + +func (bc baseCheck) IsEmpty() bool { + return bc&0xfffffcff == 0 +} + +func (bc baseCheck) IsSingleParam() bool { + return bc¶mTypeSingle == paramTypeSingle +} + +func (bc baseCheck) IsWildcardParam() bool { + return bc¶mTypeWildcard == paramTypeWildcard +} + +func (bc baseCheck) IsAnyParam() bool { + return bc¶mTypeAny != 0 +} + +func (bc *baseCheck) SetSingleParam() { + *bc |= (1 << 8) +} + +func (bc *baseCheck) SetWildcardParam() { + *bc |= (1 << 9) +} + +const ( + paramTypeSingle = 0x0100 + paramTypeWildcard = 0x0200 + paramTypeAny = 0x0300 +) + +func (da *doubleArray) lookup(path string, params []Param, idx int) (*node, []Param, bool) { + indices := make([]uint64, 0, 1) + for i := 0; i < len(path); i++ { + if da.bc[idx].IsAnyParam() { + indices = append(indices, (uint64(i)<<32)|(uint64(idx)&0xffffffff)) + } + c := path[i] + if idx = nextIndex(da.bc[idx].Base(), c); idx >= len(da.bc) || da.bc[idx].Check() != c { + goto BACKTRACKING + } + } + if next := nextIndex(da.bc[idx].Base(), TerminationCharacter); next < len(da.bc) && da.bc[next].Check() == TerminationCharacter { + return da.node[da.bc[next].Base()], params, true + } + +BACKTRACKING: + for j := len(indices) - 1; j >= 0; j-- { + i, idx := int(indices[j]>>32), int(indices[j]&0xffffffff) + if da.bc[idx].IsSingleParam() { + nextIdx := nextIndex(da.bc[idx].Base(), ParamCharacter) + if nextIdx >= len(da.bc) { + break + } + + next := NextSeparator(path, i) + nextParams := params + nextParams = append(nextParams, Param{Value: path[i:next]}) + if nd, nextNextParams, found := da.lookup(path[next:], nextParams, nextIdx); found { + return nd, nextNextParams, true + } + } + + if da.bc[idx].IsWildcardParam() { + nextIdx := nextIndex(da.bc[idx].Base(), WildcardCharacter) + nextParams := params + nextParams = append(nextParams, Param{Value: path[i:]}) + return da.node[da.bc[nextIdx].Base()], nextParams, true + } + } + return nil, nil, false +} + +// build builds double-array from records. +func (da *doubleArray) build(srcs []*record, idx, depth int, usedBase map[int]struct{}) error { + sort.Stable(recordSlice(srcs)) + base, siblings, leaf, err := da.arrange(srcs, idx, depth, usedBase) + if err != nil { + return err + } + if leaf != nil { + nd, err := makeNode(leaf) + if err != nil { + return err + } + da.bc[idx].SetBase(len(da.node)) + da.node = append(da.node, nd) + } + for _, sib := range siblings { + da.setCheck(nextIndex(base, sib.c), sib.c) + } + for _, sib := range siblings { + records := srcs[sib.start:sib.end] + switch sib.c { + case ParamCharacter: + for _, r := range records { + next := NextSeparator(r.Key, depth+1) + name := r.Key[depth+1 : next] + r.paramNames = append(r.paramNames, name) + r.Key = r.Key[next:] + } + da.bc[idx].SetSingleParam() + if err := da.build(records, nextIndex(base, sib.c), 0, usedBase); err != nil { + return err + } + case WildcardCharacter: + r := records[0] + name := r.Key[depth+1 : len(r.Key)-1] + r.paramNames = append(r.paramNames, name) + r.Key = "" + da.bc[idx].SetWildcardParam() + if err := da.build(records, nextIndex(base, sib.c), 0, usedBase); err != nil { + return err + } + default: + if err := da.build(records, nextIndex(base, sib.c), depth+1, usedBase); err != nil { + return err + } + } + } + return nil +} + +// setBase sets BASE. +func (da *doubleArray) setBase(i, base int) { + da.bc[i].SetBase(base) +} + +// setCheck sets CHECK. +func (da *doubleArray) setCheck(i int, check byte) { + da.bc[i].SetCheck(check) +} + +// findEmptyIndex returns an index of unused BASE/CHECK node. +func (da *doubleArray) findEmptyIndex(start int) int { + i := start + for ; i < len(da.bc); i++ { + if da.bc[i].IsEmpty() { + break + } + } + return i +} + +// findBase returns good BASE. +func (da *doubleArray) findBase(siblings []sibling, start int, usedBase map[int]struct{}) (base int) { + for idx, firstChar := start+1, siblings[0].c; ; idx = da.findEmptyIndex(idx + 1) { + base = nextIndex(idx, firstChar) + if _, used := usedBase[base]; used { + continue + } + i := 0 + for ; i < len(siblings); i++ { + next := nextIndex(base, siblings[i].c) + if len(da.bc) <= next { + da.bc = append(da.bc, make([]baseCheck, next-len(da.bc)+1)...) + } + if !da.bc[next].IsEmpty() { + break + } + } + if i == len(siblings) { + break + } + } + usedBase[base] = struct{}{} + return base +} + +func (da *doubleArray) arrange(records []*record, idx, depth int, usedBase map[int]struct{}) (base int, siblings []sibling, leaf *record, err error) { + siblings, leaf, err = makeSiblings(records, depth) + if err != nil { + return -1, nil, nil, err + } + if len(siblings) < 1 { + return -1, nil, leaf, nil + } + base = da.findBase(siblings, idx, usedBase) + if base > MaxSize { + return -1, nil, nil, errors.New("denco: too many elements of internal slice") + } + da.setBase(idx, base) + return base, siblings, leaf, err +} + +// node represents a node of Double-Array. +type node struct { + data interface{} + + // Names of path parameters. + paramNames []string +} + +// makeNode returns a new node from record. +func makeNode(r *record) (*node, error) { + dups := make(map[string]bool) + for _, name := range r.paramNames { + if dups[name] { + return nil, fmt.Errorf("denco: path parameter `%v' is duplicated in the key `%v'", name, r.Key) + } + dups[name] = true + } + return &node{data: r.Value, paramNames: r.paramNames}, nil +} + +// sibling represents an intermediate data of build for Double-Array. +type sibling struct { + // An index of start of duplicated characters. + start int + + // An index of end of duplicated characters. + end int + + // A character of sibling. + c byte +} + +// nextIndex returns a next index of array of BASE/CHECK. +func nextIndex(base int, c byte) int { + return base ^ int(c) +} + +// makeSiblings returns slice of sibling. +func makeSiblings(records []*record, depth int) (sib []sibling, leaf *record, err error) { + var ( + pc byte + n int + ) + for i, r := range records { + if len(r.Key) <= depth { + leaf = r + continue + } + c := r.Key[depth] + switch { + case pc < c: + sib = append(sib, sibling{start: i, c: c}) + case pc == c: + continue + default: + return nil, nil, errors.New("denco: BUG: routing table hasn't been sorted") + } + if n > 0 { + sib[n-1].end = i + } + pc = c + n++ + } + if n == 0 { + return nil, leaf, nil + } + sib[n-1].end = len(records) + return sib, leaf, nil +} + +// Record represents a record data for router construction. +type Record struct { + // Key for router construction. + Key string + + // Result value for Key. + Value interface{} +} + +// NewRecord returns a new Record. +func NewRecord(key string, value interface{}) Record { + return Record{ + Key: key, + Value: value, + } +} + +// record represents a record that use to build the Double-Array. +type record struct { + Record + paramNames []string +} + +// makeRecords returns the records that use to build Double-Arrays. +func makeRecords(srcs []Record) (statics, params []*record) { + termChar := string(TerminationCharacter) + paramPrefix := string(SeparatorCharacter) + string(ParamCharacter) + wildcardPrefix := string(SeparatorCharacter) + string(WildcardCharacter) + restconfPrefix := string(PathParamCharacter) + string(ParamCharacter) + for _, r := range srcs { + if strings.Contains(r.Key, paramPrefix) || strings.Contains(r.Key, wildcardPrefix) || strings.Contains(r.Key, restconfPrefix) { + r.Key += termChar + params = append(params, &record{Record: r}) + } else { + statics = append(statics, &record{Record: r}) + } + } + return statics, params +} + +// recordSlice represents a slice of Record for sort and implements the sort.Interface. +type recordSlice []*record + +// Len implements the sort.Interface.Len. +func (rs recordSlice) Len() int { + return len(rs) +} + +// Less implements the sort.Interface.Less. +func (rs recordSlice) Less(i, j int) bool { + return rs[i].Key < rs[j].Key +} + +// Swap implements the sort.Interface.Swap. +func (rs recordSlice) Swap(i, j int) { + rs[i], rs[j] = rs[j], rs[i] +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/denco/server.go b/vendor/github.com/go-openapi/runtime/middleware/denco/server.go new file mode 100644 index 0000000..0886713 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/denco/server.go @@ -0,0 +1,106 @@ +package denco + +import ( + "net/http" +) + +// Mux represents a multiplexer for HTTP request. +type Mux struct{} + +// NewMux returns a new Mux. +func NewMux() *Mux { + return &Mux{} +} + +// GET is shorthand of Mux.Handler("GET", path, handler). +func (m *Mux) GET(path string, handler HandlerFunc) Handler { + return m.Handler("GET", path, handler) +} + +// POST is shorthand of Mux.Handler("POST", path, handler). +func (m *Mux) POST(path string, handler HandlerFunc) Handler { + return m.Handler("POST", path, handler) +} + +// PUT is shorthand of Mux.Handler("PUT", path, handler). +func (m *Mux) PUT(path string, handler HandlerFunc) Handler { + return m.Handler("PUT", path, handler) +} + +// HEAD is shorthand of Mux.Handler("HEAD", path, handler). +func (m *Mux) HEAD(path string, handler HandlerFunc) Handler { + return m.Handler("HEAD", path, handler) +} + +// Handler returns a handler for HTTP method. +func (m *Mux) Handler(method, path string, handler HandlerFunc) Handler { + return Handler{ + Method: method, + Path: path, + Func: handler, + } +} + +// Build builds a http.Handler. +func (m *Mux) Build(handlers []Handler) (http.Handler, error) { + recordMap := make(map[string][]Record) + for _, h := range handlers { + recordMap[h.Method] = append(recordMap[h.Method], NewRecord(h.Path, h.Func)) + } + mux := newServeMux() + for m, records := range recordMap { + router := New() + if err := router.Build(records); err != nil { + return nil, err + } + mux.routers[m] = router + } + return mux, nil +} + +// Handler represents a handler of HTTP request. +type Handler struct { + // Method is an HTTP method. + Method string + + // Path is a routing path for handler. + Path string + + // Func is a function of handler of HTTP request. + Func HandlerFunc +} + +// The HandlerFunc type is aliased to type of handler function. +type HandlerFunc func(w http.ResponseWriter, r *http.Request, params Params) + +type serveMux struct { + routers map[string]*Router +} + +func newServeMux() *serveMux { + return &serveMux{ + routers: make(map[string]*Router), + } +} + +// ServeHTTP implements http.Handler interface. +func (mux *serveMux) ServeHTTP(w http.ResponseWriter, r *http.Request) { + handler, params := mux.handler(r.Method, r.URL.Path) + handler(w, r, params) +} + +func (mux *serveMux) handler(method, path string) (HandlerFunc, []Param) { + if router, found := mux.routers[method]; found { + if handler, params, found := router.Lookup(path); found { + return handler.(HandlerFunc), params + } + } + return NotFound, nil +} + +// NotFound replies to the request with an HTTP 404 not found error. +// NotFound is called when unknown HTTP method or a handler not found. +// If you want to use the your own NotFound handler, please overwrite this variable. +var NotFound = func(w http.ResponseWriter, r *http.Request, _ Params) { + http.NotFound(w, r) +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/denco/util.go b/vendor/github.com/go-openapi/runtime/middleware/denco/util.go new file mode 100644 index 0000000..edc1f6a --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/denco/util.go @@ -0,0 +1,12 @@ +package denco + +// NextSeparator returns an index of next separator in path. +func NextSeparator(path string, start int) int { + for start < len(path) { + if c := path[start]; c == '/' || c == TerminationCharacter { + break + } + start++ + } + return start +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/doc.go b/vendor/github.com/go-openapi/runtime/middleware/doc.go new file mode 100644 index 0000000..836a988 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/doc.go @@ -0,0 +1,63 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/* +Package middleware provides the library with helper functions for serving swagger APIs. + +Pseudo middleware handler + + import ( + "net/http" + + "github.com/go-openapi/errors" + ) + + func newCompleteMiddleware(ctx *Context) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + // use context to lookup routes + if matched, ok := ctx.RouteInfo(r); ok { + + if matched.NeedsAuth() { + if _, err := ctx.Authorize(r, matched); err != nil { + ctx.Respond(rw, r, matched.Produces, matched, err) + return + } + } + + bound, validation := ctx.BindAndValidate(r, matched) + if validation != nil { + ctx.Respond(rw, r, matched.Produces, matched, validation) + return + } + + result, err := matched.Handler.Handle(bound) + if err != nil { + ctx.Respond(rw, r, matched.Produces, matched, err) + return + } + + ctx.Respond(rw, r, matched.Produces, matched, result) + return + } + + // Not found, check if it exists in the other methods first + if others := ctx.AllowedMethods(r); len(others) > 0 { + ctx.Respond(rw, r, ctx.spec.RequiredProduces(), nil, errors.MethodNotAllowed(r.Method, others)) + return + } + ctx.Respond(rw, r, ctx.spec.RequiredProduces(), nil, errors.NotFound("path %s was not found", r.URL.Path)) + }) + } +*/ +package middleware diff --git a/vendor/github.com/go-openapi/runtime/middleware/header/header.go b/vendor/github.com/go-openapi/runtime/middleware/header/header.go new file mode 100644 index 0000000..df073c8 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/header/header.go @@ -0,0 +1,332 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file or at +// https://developers.google.com/open-source/licenses/bsd. + +// this file was taken from the github.com/golang/gddo repository + +// Package header provides functions for parsing HTTP headers. +package header + +import ( + "net/http" + "strings" + "time" +) + +// Octet types from RFC 2616. +var octetTypes [256]octetType + +type octetType byte + +const ( + isToken octetType = 1 << iota + isSpace +) + +func init() { + // OCTET = + // CHAR = + // CTL = + // CR = + // LF = + // SP = + // HT = + // <"> = + // CRLF = CR LF + // LWS = [CRLF] 1*( SP | HT ) + // TEXT = + // separators = "(" | ")" | "<" | ">" | "@" | "," | ";" | ":" | "\" | <"> + // | "/" | "[" | "]" | "?" | "=" | "{" | "}" | SP | HT + // token = 1* + // qdtext = > + + for c := 0; c < 256; c++ { + var t octetType + isCtl := c <= 31 || c == 127 + isChar := 0 <= c && c <= 127 + isSeparator := strings.ContainsRune(" \t\"(),/:;<=>?@[]\\{}", rune(c)) + if strings.ContainsRune(" \t\r\n", rune(c)) { + t |= isSpace + } + if isChar && !isCtl && !isSeparator { + t |= isToken + } + octetTypes[c] = t + } +} + +// Copy returns a shallow copy of the header. +func Copy(header http.Header) http.Header { + h := make(http.Header) + for k, vs := range header { + h[k] = vs + } + return h +} + +var timeLayouts = []string{"Mon, 02 Jan 2006 15:04:05 GMT", time.RFC850, time.ANSIC} + +// ParseTime parses the header as time. The zero value is returned if the +// header is not present or there is an error parsing the +// header. +func ParseTime(header http.Header, key string) time.Time { + if s := header.Get(key); s != "" { + for _, layout := range timeLayouts { + if t, err := time.Parse(layout, s); err == nil { + return t.UTC() + } + } + } + return time.Time{} +} + +// ParseList parses a comma separated list of values. Commas are ignored in +// quoted strings. Quoted values are not unescaped or unquoted. Whitespace is +// trimmed. +func ParseList(header http.Header, key string) []string { + var result []string + for _, s := range header[http.CanonicalHeaderKey(key)] { + begin := 0 + end := 0 + escape := false + quote := false + for i := 0; i < len(s); i++ { + b := s[i] + switch { + case escape: + escape = false + end = i + 1 + case quote: + switch b { + case '\\': + escape = true + case '"': + quote = false + } + end = i + 1 + case b == '"': + quote = true + end = i + 1 + case octetTypes[b]&isSpace != 0: + if begin == end { + begin = i + 1 + end = begin + } + case b == ',': + if begin < end { + result = append(result, s[begin:end]) + } + begin = i + 1 + end = begin + default: + end = i + 1 + } + } + if begin < end { + result = append(result, s[begin:end]) + } + } + return result +} + +// ParseValueAndParams parses a comma separated list of values with optional +// semicolon separated name-value pairs. Content-Type and Content-Disposition +// headers are in this format. +func ParseValueAndParams(header http.Header, key string) (string, map[string]string) { + return parseValueAndParams(header.Get(key)) +} + +func parseValueAndParams(s string) (value string, params map[string]string) { + params = make(map[string]string) + value, s = expectTokenSlash(s) + if value == "" { + return + } + value = strings.ToLower(value) + s = skipSpace(s) + for strings.HasPrefix(s, ";") { + var pkey string + pkey, s = expectToken(skipSpace(s[1:])) + if pkey == "" { + return + } + if !strings.HasPrefix(s, "=") { + return + } + var pvalue string + pvalue, s = expectTokenOrQuoted(s[1:]) + if pvalue == "" { + return + } + pkey = strings.ToLower(pkey) + params[pkey] = pvalue + s = skipSpace(s) + } + return +} + +// AcceptSpec ... +type AcceptSpec struct { + Value string + Q float64 +} + +// ParseAccept2 ... +func ParseAccept2(header http.Header, key string) (specs []AcceptSpec) { + for _, en := range ParseList(header, key) { + v, p := parseValueAndParams(en) + var spec AcceptSpec + spec.Value = v + spec.Q = 1.0 + if p != nil { + if q, ok := p["q"]; ok { + spec.Q, _ = expectQuality(q) + } + } + if spec.Q < 0.0 { + continue + } + specs = append(specs, spec) + } + + return +} + +// ParseAccept parses Accept* headers. +func ParseAccept(header http.Header, key string) []AcceptSpec { + var specs []AcceptSpec +loop: + for _, s := range header[key] { + for { + var spec AcceptSpec + spec.Value, s = expectTokenSlash(s) + if spec.Value == "" { + continue loop + } + spec.Q = 1.0 + s = skipSpace(s) + if strings.HasPrefix(s, ";") { + s = skipSpace(s[1:]) + for !strings.HasPrefix(s, "q=") && s != "" && !strings.HasPrefix(s, ",") { + s = skipSpace(s[1:]) + } + if strings.HasPrefix(s, "q=") { + spec.Q, s = expectQuality(s[2:]) + if spec.Q < 0.0 { + continue loop + } + } + } + + specs = append(specs, spec) + s = skipSpace(s) + if !strings.HasPrefix(s, ",") { + continue loop + } + s = skipSpace(s[1:]) + } + } + + return specs +} + +func skipSpace(s string) (rest string) { + i := 0 + for ; i < len(s); i++ { + if octetTypes[s[i]]&isSpace == 0 { + break + } + } + return s[i:] +} + +func expectToken(s string) (token, rest string) { + i := 0 + for ; i < len(s); i++ { + if octetTypes[s[i]]&isToken == 0 { + break + } + } + return s[:i], s[i:] +} + +func expectTokenSlash(s string) (token, rest string) { + i := 0 + for ; i < len(s); i++ { + b := s[i] + if (octetTypes[b]&isToken == 0) && b != '/' { + break + } + } + return s[:i], s[i:] +} + +func expectQuality(s string) (q float64, rest string) { + switch { + case len(s) == 0: + return -1, "" + case s[0] == '0': + // q is already 0 + s = s[1:] + case s[0] == '1': + s = s[1:] + q = 1 + case s[0] == '.': + // q is already 0 + default: + return -1, "" + } + if !strings.HasPrefix(s, ".") { + return q, s + } + s = s[1:] + i := 0 + n := 0 + d := 1 + for ; i < len(s); i++ { + b := s[i] + if b < '0' || b > '9' { + break + } + n = n*10 + int(b) - '0' + d *= 10 + } + return q + float64(n)/float64(d), s[i:] +} + +func expectTokenOrQuoted(s string) (value string, rest string) { + if !strings.HasPrefix(s, "\"") { + return expectToken(s) + } + s = s[1:] + for i := 0; i < len(s); i++ { + switch s[i] { + case '"': + return s[:i], s[i+1:] + case '\\': + p := make([]byte, len(s)-1) + j := copy(p, s[:i]) + escape := true + for i++; i < len(s); i++ { + b := s[i] + switch { + case escape: + escape = false + p[j] = b + j++ + case b == '\\': + escape = true + case b == '"': + return string(p[:j]), s[i+1:] + default: + p[j] = b + j++ + } + } + return "", "" + } + } + return "", "" +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/negotiate.go b/vendor/github.com/go-openapi/runtime/middleware/negotiate.go new file mode 100644 index 0000000..a9b6f27 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/negotiate.go @@ -0,0 +1,98 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file or at +// https://developers.google.com/open-source/licenses/bsd. + +// this file was taken from the github.com/golang/gddo repository + +package middleware + +import ( + "net/http" + "strings" + + "github.com/go-openapi/runtime/middleware/header" +) + +// NegotiateContentEncoding returns the best offered content encoding for the +// request's Accept-Encoding header. If two offers match with equal weight and +// then the offer earlier in the list is preferred. If no offers are +// acceptable, then "" is returned. +func NegotiateContentEncoding(r *http.Request, offers []string) string { + bestOffer := "identity" + bestQ := -1.0 + specs := header.ParseAccept(r.Header, "Accept-Encoding") + for _, offer := range offers { + for _, spec := range specs { + if spec.Q > bestQ && + (spec.Value == "*" || spec.Value == offer) { + bestQ = spec.Q + bestOffer = offer + } + } + } + if bestQ == 0 { + bestOffer = "" + } + return bestOffer +} + +// NegotiateContentType returns the best offered content type for the request's +// Accept header. If two offers match with equal weight, then the more specific +// offer is preferred. For example, text/* trumps */*. If two offers match +// with equal weight and specificity, then the offer earlier in the list is +// preferred. If no offers match, then defaultOffer is returned. +func NegotiateContentType(r *http.Request, offers []string, defaultOffer string) string { + bestOffer := defaultOffer + bestQ := -1.0 + bestWild := 3 + specs := header.ParseAccept(r.Header, "Accept") + for _, rawOffer := range offers { + offer := normalizeOffer(rawOffer) + // No Accept header: just return the first offer. + if len(specs) == 0 { + return rawOffer + } + for _, spec := range specs { + switch { + case spec.Q == 0.0: + // ignore + case spec.Q < bestQ: + // better match found + case spec.Value == "*/*": + if spec.Q > bestQ || bestWild > 2 { + bestQ = spec.Q + bestWild = 2 + bestOffer = rawOffer + } + case strings.HasSuffix(spec.Value, "/*"): + if strings.HasPrefix(offer, spec.Value[:len(spec.Value)-1]) && + (spec.Q > bestQ || bestWild > 1) { + bestQ = spec.Q + bestWild = 1 + bestOffer = rawOffer + } + default: + if spec.Value == offer && + (spec.Q > bestQ || bestWild > 0) { + bestQ = spec.Q + bestWild = 0 + bestOffer = rawOffer + } + } + } + } + return bestOffer +} + +func normalizeOffers(orig []string) (norm []string) { + for _, o := range orig { + norm = append(norm, normalizeOffer(o)) + } + return +} + +func normalizeOffer(orig string) string { + return strings.SplitN(orig, ";", 2)[0] +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/not_implemented.go b/vendor/github.com/go-openapi/runtime/middleware/not_implemented.go new file mode 100644 index 0000000..bc6942a --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/not_implemented.go @@ -0,0 +1,67 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package middleware + +import ( + "net/http" + + "github.com/go-openapi/runtime" +) + +type errorResp struct { + code int + response interface{} + headers http.Header +} + +func (e *errorResp) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) { + for k, v := range e.headers { + for _, val := range v { + rw.Header().Add(k, val) + } + } + if e.code > 0 { + rw.WriteHeader(e.code) + } else { + rw.WriteHeader(http.StatusInternalServerError) + } + if err := producer.Produce(rw, e.response); err != nil { + Logger.Printf("failed to write error response: %v", err) + } +} + +// NotImplemented the error response when the response is not implemented +func NotImplemented(message string) Responder { + return Error(http.StatusNotImplemented, message) +} + +// Error creates a generic responder for returning errors, the data will be serialized +// with the matching producer for the request +func Error(code int, data interface{}, headers ...http.Header) Responder { + var hdr http.Header + for _, h := range headers { + for k, v := range h { + if hdr == nil { + hdr = make(http.Header) + } + hdr[k] = v + } + } + return &errorResp{ + code: code, + response: data, + headers: hdr, + } +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/operation.go b/vendor/github.com/go-openapi/runtime/middleware/operation.go new file mode 100644 index 0000000..1175a63 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/operation.go @@ -0,0 +1,30 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package middleware + +import "net/http" + +// NewOperationExecutor creates a context aware middleware that handles the operations after routing +func NewOperationExecutor(ctx *Context) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + // use context to lookup routes + route, rCtx, _ := ctx.RouteInfo(r) + if rCtx != nil { + r = rCtx + } + + route.Handler.ServeHTTP(rw, r) + }) +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/parameter.go b/vendor/github.com/go-openapi/runtime/middleware/parameter.go new file mode 100644 index 0000000..9c3353a --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/parameter.go @@ -0,0 +1,491 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package middleware + +import ( + "encoding" + "encoding/base64" + "fmt" + "io" + "net/http" + "reflect" + "strconv" + + "github.com/go-openapi/errors" + "github.com/go-openapi/spec" + "github.com/go-openapi/strfmt" + "github.com/go-openapi/swag" + "github.com/go-openapi/validate" + + "github.com/go-openapi/runtime" +) + +const defaultMaxMemory = 32 << 20 + +const ( + typeString = "string" + typeArray = "array" +) + +var textUnmarshalType = reflect.TypeOf(new(encoding.TextUnmarshaler)).Elem() + +func newUntypedParamBinder(param spec.Parameter, spec *spec.Swagger, formats strfmt.Registry) *untypedParamBinder { + binder := new(untypedParamBinder) + binder.Name = param.Name + binder.parameter = ¶m + binder.formats = formats + if param.In != "body" { + binder.validator = validate.NewParamValidator(¶m, formats) + } else { + binder.validator = validate.NewSchemaValidator(param.Schema, spec, param.Name, formats) + } + + return binder +} + +type untypedParamBinder struct { + parameter *spec.Parameter + formats strfmt.Registry + Name string + validator validate.EntityValidator +} + +func (p *untypedParamBinder) Type() reflect.Type { + return p.typeForSchema(p.parameter.Type, p.parameter.Format, p.parameter.Items) +} + +func (p *untypedParamBinder) typeForSchema(tpe, format string, items *spec.Items) reflect.Type { + switch tpe { + case "boolean": + return reflect.TypeOf(true) + + case typeString: + if tt, ok := p.formats.GetType(format); ok { + return tt + } + return reflect.TypeOf("") + + case "integer": + switch format { + case "int8": + return reflect.TypeOf(int8(0)) + case "int16": + return reflect.TypeOf(int16(0)) + case "int32": + return reflect.TypeOf(int32(0)) + case "int64": + return reflect.TypeOf(int64(0)) + default: + return reflect.TypeOf(int64(0)) + } + + case "number": + switch format { + case "float": + return reflect.TypeOf(float32(0)) + case "double": + return reflect.TypeOf(float64(0)) + } + + case typeArray: + if items == nil { + return nil + } + itemsType := p.typeForSchema(items.Type, items.Format, items.Items) + if itemsType == nil { + return nil + } + return reflect.MakeSlice(reflect.SliceOf(itemsType), 0, 0).Type() + + case "file": + return reflect.TypeOf(&runtime.File{}).Elem() + + case "object": + return reflect.TypeOf(map[string]interface{}{}) + } + return nil +} + +func (p *untypedParamBinder) allowsMulti() bool { + return p.parameter.In == "query" || p.parameter.In == "formData" +} + +func (p *untypedParamBinder) readValue(values runtime.Gettable, target reflect.Value) ([]string, bool, bool, error) { + name, in, cf, tpe := p.parameter.Name, p.parameter.In, p.parameter.CollectionFormat, p.parameter.Type + if tpe == typeArray { + if cf == "multi" { + if !p.allowsMulti() { + return nil, false, false, errors.InvalidCollectionFormat(name, in, cf) + } + vv, hasKey, _ := values.GetOK(name) + return vv, false, hasKey, nil + } + + v, hk, hv := values.GetOK(name) + if !hv { + return nil, false, hk, nil + } + d, c, e := p.readFormattedSliceFieldValue(v[len(v)-1], target) + return d, c, hk, e + } + + vv, hk, _ := values.GetOK(name) + return vv, false, hk, nil +} + +func (p *untypedParamBinder) Bind(request *http.Request, routeParams RouteParams, consumer runtime.Consumer, target reflect.Value) error { + // fmt.Println("binding", p.name, "as", p.Type()) + switch p.parameter.In { + case "query": + data, custom, hasKey, err := p.readValue(runtime.Values(request.URL.Query()), target) + if err != nil { + return err + } + if custom { + return nil + } + + return p.bindValue(data, hasKey, target) + + case "header": + data, custom, hasKey, err := p.readValue(runtime.Values(request.Header), target) + if err != nil { + return err + } + if custom { + return nil + } + return p.bindValue(data, hasKey, target) + + case "path": + data, custom, hasKey, err := p.readValue(routeParams, target) + if err != nil { + return err + } + if custom { + return nil + } + return p.bindValue(data, hasKey, target) + + case "formData": + var err error + var mt string + + mt, _, e := runtime.ContentType(request.Header) + if e != nil { + // because of the interface conversion go thinks the error is not nil + // so we first check for nil and then set the err var if it's not nil + err = e + } + + if err != nil { + return errors.InvalidContentType("", []string{"multipart/form-data", "application/x-www-form-urlencoded"}) + } + + if mt != "multipart/form-data" && mt != "application/x-www-form-urlencoded" { + return errors.InvalidContentType(mt, []string{"multipart/form-data", "application/x-www-form-urlencoded"}) + } + + if mt == "multipart/form-data" { + if err = request.ParseMultipartForm(defaultMaxMemory); err != nil { + return errors.NewParseError(p.Name, p.parameter.In, "", err) + } + } + + if err = request.ParseForm(); err != nil { + return errors.NewParseError(p.Name, p.parameter.In, "", err) + } + + if p.parameter.Type == "file" { + file, header, ffErr := request.FormFile(p.parameter.Name) + if ffErr != nil { + if p.parameter.Required { + return errors.NewParseError(p.Name, p.parameter.In, "", ffErr) + } + + return nil + } + + target.Set(reflect.ValueOf(runtime.File{Data: file, Header: header})) + return nil + } + + if request.MultipartForm != nil { + data, custom, hasKey, rvErr := p.readValue(runtime.Values(request.MultipartForm.Value), target) + if rvErr != nil { + return rvErr + } + if custom { + return nil + } + return p.bindValue(data, hasKey, target) + } + data, custom, hasKey, err := p.readValue(runtime.Values(request.PostForm), target) + if err != nil { + return err + } + if custom { + return nil + } + return p.bindValue(data, hasKey, target) + + case "body": + newValue := reflect.New(target.Type()) + if !runtime.HasBody(request) { + if p.parameter.Default != nil { + target.Set(reflect.ValueOf(p.parameter.Default)) + } + + return nil + } + if err := consumer.Consume(request.Body, newValue.Interface()); err != nil { + if err == io.EOF && p.parameter.Default != nil { + target.Set(reflect.ValueOf(p.parameter.Default)) + return nil + } + tpe := p.parameter.Type + if p.parameter.Format != "" { + tpe = p.parameter.Format + } + return errors.InvalidType(p.Name, p.parameter.In, tpe, nil) + } + target.Set(reflect.Indirect(newValue)) + return nil + default: + return errors.New(500, fmt.Sprintf("invalid parameter location %q", p.parameter.In)) + } +} + +func (p *untypedParamBinder) bindValue(data []string, hasKey bool, target reflect.Value) error { + if p.parameter.Type == typeArray { + return p.setSliceFieldValue(target, p.parameter.Default, data, hasKey) + } + var d string + if len(data) > 0 { + d = data[len(data)-1] + } + return p.setFieldValue(target, p.parameter.Default, d, hasKey) +} + +func (p *untypedParamBinder) setFieldValue(target reflect.Value, defaultValue interface{}, data string, hasKey bool) error { //nolint:gocyclo + tpe := p.parameter.Type + if p.parameter.Format != "" { + tpe = p.parameter.Format + } + + if (!hasKey || (!p.parameter.AllowEmptyValue && data == "")) && p.parameter.Required && p.parameter.Default == nil { + return errors.Required(p.Name, p.parameter.In, data) + } + + ok, err := p.tryUnmarshaler(target, defaultValue, data) + if err != nil { + return errors.InvalidType(p.Name, p.parameter.In, tpe, data) + } + if ok { + return nil + } + + defVal := reflect.Zero(target.Type()) + if defaultValue != nil { + defVal = reflect.ValueOf(defaultValue) + } + + if tpe == "byte" { + if data == "" { + if target.CanSet() { + target.SetBytes(defVal.Bytes()) + } + return nil + } + + b, err := base64.StdEncoding.DecodeString(data) + if err != nil { + b, err = base64.URLEncoding.DecodeString(data) + if err != nil { + return errors.InvalidType(p.Name, p.parameter.In, tpe, data) + } + } + if target.CanSet() { + target.SetBytes(b) + } + return nil + } + + switch target.Kind() { //nolint:exhaustive // we want to check only types that map from a swagger parameter + case reflect.Bool: + if data == "" { + if target.CanSet() { + target.SetBool(defVal.Bool()) + } + return nil + } + b, err := swag.ConvertBool(data) + if err != nil { + return err + } + if target.CanSet() { + target.SetBool(b) + } + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + if data == "" { + if target.CanSet() { + rd := defVal.Convert(reflect.TypeOf(int64(0))) + target.SetInt(rd.Int()) + } + return nil + } + i, err := strconv.ParseInt(data, 10, 64) + if err != nil { + return errors.InvalidType(p.Name, p.parameter.In, tpe, data) + } + if target.OverflowInt(i) { + return errors.InvalidType(p.Name, p.parameter.In, tpe, data) + } + if target.CanSet() { + target.SetInt(i) + } + + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + if data == "" { + if target.CanSet() { + rd := defVal.Convert(reflect.TypeOf(uint64(0))) + target.SetUint(rd.Uint()) + } + return nil + } + u, err := strconv.ParseUint(data, 10, 64) + if err != nil { + return errors.InvalidType(p.Name, p.parameter.In, tpe, data) + } + if target.OverflowUint(u) { + return errors.InvalidType(p.Name, p.parameter.In, tpe, data) + } + if target.CanSet() { + target.SetUint(u) + } + + case reflect.Float32, reflect.Float64: + if data == "" { + if target.CanSet() { + rd := defVal.Convert(reflect.TypeOf(float64(0))) + target.SetFloat(rd.Float()) + } + return nil + } + f, err := strconv.ParseFloat(data, 64) + if err != nil { + return errors.InvalidType(p.Name, p.parameter.In, tpe, data) + } + if target.OverflowFloat(f) { + return errors.InvalidType(p.Name, p.parameter.In, tpe, data) + } + if target.CanSet() { + target.SetFloat(f) + } + + case reflect.String: + value := data + if value == "" { + value = defVal.String() + } + // validate string + if target.CanSet() { + target.SetString(value) + } + + case reflect.Ptr: + if data == "" && defVal.Kind() == reflect.Ptr { + if target.CanSet() { + target.Set(defVal) + } + return nil + } + newVal := reflect.New(target.Type().Elem()) + if err := p.setFieldValue(reflect.Indirect(newVal), defVal, data, hasKey); err != nil { + return err + } + if target.CanSet() { + target.Set(newVal) + } + + default: + return errors.InvalidType(p.Name, p.parameter.In, tpe, data) + } + return nil +} + +func (p *untypedParamBinder) tryUnmarshaler(target reflect.Value, defaultValue interface{}, data string) (bool, error) { + if !target.CanSet() { + return false, nil + } + // When a type implements encoding.TextUnmarshaler we'll use that instead of reflecting some more + if reflect.PtrTo(target.Type()).Implements(textUnmarshalType) { + if defaultValue != nil && len(data) == 0 { + target.Set(reflect.ValueOf(defaultValue)) + return true, nil + } + value := reflect.New(target.Type()) + if err := value.Interface().(encoding.TextUnmarshaler).UnmarshalText([]byte(data)); err != nil { + return true, err + } + target.Set(reflect.Indirect(value)) + return true, nil + } + return false, nil +} + +func (p *untypedParamBinder) readFormattedSliceFieldValue(data string, target reflect.Value) ([]string, bool, error) { + ok, err := p.tryUnmarshaler(target, p.parameter.Default, data) + if err != nil { + return nil, true, err + } + if ok { + return nil, true, nil + } + + return swag.SplitByFormat(data, p.parameter.CollectionFormat), false, nil +} + +func (p *untypedParamBinder) setSliceFieldValue(target reflect.Value, defaultValue interface{}, data []string, hasKey bool) error { + sz := len(data) + if (!hasKey || (!p.parameter.AllowEmptyValue && (sz == 0 || (sz == 1 && data[0] == "")))) && p.parameter.Required && defaultValue == nil { + return errors.Required(p.Name, p.parameter.In, data) + } + + defVal := reflect.Zero(target.Type()) + if defaultValue != nil { + defVal = reflect.ValueOf(defaultValue) + } + + if !target.CanSet() { + return nil + } + if sz == 0 { + target.Set(defVal) + return nil + } + + value := reflect.MakeSlice(reflect.SliceOf(target.Type().Elem()), sz, sz) + + for i := 0; i < sz; i++ { + if err := p.setFieldValue(value.Index(i), nil, data[i], hasKey); err != nil { + return err + } + } + + target.Set(value) + + return nil +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/rapidoc.go b/vendor/github.com/go-openapi/runtime/middleware/rapidoc.go new file mode 100644 index 0000000..ef75e74 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/rapidoc.go @@ -0,0 +1,80 @@ +package middleware + +import ( + "bytes" + "fmt" + "html/template" + "net/http" + "path" +) + +// RapiDocOpts configures the RapiDoc middlewares +type RapiDocOpts struct { + // BasePath for the UI, defaults to: / + BasePath string + + // Path combines with BasePath to construct the path to the UI, defaults to: "docs". + Path string + + // SpecURL is the URL of the spec document. + // + // Defaults to: /swagger.json + SpecURL string + + // Title for the documentation site, default to: API documentation + Title string + + // Template specifies a custom template to serve the UI + Template string + + // RapiDocURL points to the js asset that generates the rapidoc site. + // + // Defaults to https://unpkg.com/rapidoc/dist/rapidoc-min.js + RapiDocURL string +} + +func (r *RapiDocOpts) EnsureDefaults() { + common := toCommonUIOptions(r) + common.EnsureDefaults() + fromCommonToAnyOptions(common, r) + + // rapidoc-specifics + if r.RapiDocURL == "" { + r.RapiDocURL = rapidocLatest + } + if r.Template == "" { + r.Template = rapidocTemplate + } +} + +// RapiDoc creates a middleware to serve a documentation site for a swagger spec. +// +// This allows for altering the spec before starting the http listener. +func RapiDoc(opts RapiDocOpts, next http.Handler) http.Handler { + opts.EnsureDefaults() + + pth := path.Join(opts.BasePath, opts.Path) + tmpl := template.Must(template.New("rapidoc").Parse(opts.Template)) + assets := bytes.NewBuffer(nil) + if err := tmpl.Execute(assets, opts); err != nil { + panic(fmt.Errorf("cannot execute template: %w", err)) + } + + return serveUI(pth, assets.Bytes(), next) +} + +const ( + rapidocLatest = "https://unpkg.com/rapidoc/dist/rapidoc-min.js" + rapidocTemplate = ` + + + {{ .Title }} + + + + + + + +` +) diff --git a/vendor/github.com/go-openapi/runtime/middleware/redoc.go b/vendor/github.com/go-openapi/runtime/middleware/redoc.go new file mode 100644 index 0000000..b96b01e --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/redoc.go @@ -0,0 +1,94 @@ +package middleware + +import ( + "bytes" + "fmt" + "html/template" + "net/http" + "path" +) + +// RedocOpts configures the Redoc middlewares +type RedocOpts struct { + // BasePath for the UI, defaults to: / + BasePath string + + // Path combines with BasePath to construct the path to the UI, defaults to: "docs". + Path string + + // SpecURL is the URL of the spec document. + // + // Defaults to: /swagger.json + SpecURL string + + // Title for the documentation site, default to: API documentation + Title string + + // Template specifies a custom template to serve the UI + Template string + + // RedocURL points to the js that generates the redoc site. + // + // Defaults to: https://cdn.jsdelivr.net/npm/redoc/bundles/redoc.standalone.js + RedocURL string +} + +// EnsureDefaults in case some options are missing +func (r *RedocOpts) EnsureDefaults() { + common := toCommonUIOptions(r) + common.EnsureDefaults() + fromCommonToAnyOptions(common, r) + + // redoc-specifics + if r.RedocURL == "" { + r.RedocURL = redocLatest + } + if r.Template == "" { + r.Template = redocTemplate + } +} + +// Redoc creates a middleware to serve a documentation site for a swagger spec. +// +// This allows for altering the spec before starting the http listener. +func Redoc(opts RedocOpts, next http.Handler) http.Handler { + opts.EnsureDefaults() + + pth := path.Join(opts.BasePath, opts.Path) + tmpl := template.Must(template.New("redoc").Parse(opts.Template)) + assets := bytes.NewBuffer(nil) + if err := tmpl.Execute(assets, opts); err != nil { + panic(fmt.Errorf("cannot execute template: %w", err)) + } + + return serveUI(pth, assets.Bytes(), next) +} + +const ( + redocLatest = "https://cdn.jsdelivr.net/npm/redoc/bundles/redoc.standalone.js" + redocTemplate = ` + + + {{ .Title }} + + + + + + + + + + + + + +` +) diff --git a/vendor/github.com/go-openapi/runtime/middleware/request.go b/vendor/github.com/go-openapi/runtime/middleware/request.go new file mode 100644 index 0000000..82e1436 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/request.go @@ -0,0 +1,117 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package middleware + +import ( + "net/http" + "reflect" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + "github.com/go-openapi/runtime/logger" + "github.com/go-openapi/spec" + "github.com/go-openapi/strfmt" +) + +// UntypedRequestBinder binds and validates the data from a http request +type UntypedRequestBinder struct { + Spec *spec.Swagger + Parameters map[string]spec.Parameter + Formats strfmt.Registry + paramBinders map[string]*untypedParamBinder + debugLogf func(string, ...any) // a logging function to debug context and all components using it +} + +// NewUntypedRequestBinder creates a new binder for reading a request. +func NewUntypedRequestBinder(parameters map[string]spec.Parameter, spec *spec.Swagger, formats strfmt.Registry) *UntypedRequestBinder { + binders := make(map[string]*untypedParamBinder) + for fieldName, param := range parameters { + binders[fieldName] = newUntypedParamBinder(param, spec, formats) + } + return &UntypedRequestBinder{ + Parameters: parameters, + paramBinders: binders, + Spec: spec, + Formats: formats, + debugLogf: debugLogfFunc(nil), + } +} + +// Bind perform the databinding and validation +func (o *UntypedRequestBinder) Bind(request *http.Request, routeParams RouteParams, consumer runtime.Consumer, data interface{}) error { + val := reflect.Indirect(reflect.ValueOf(data)) + isMap := val.Kind() == reflect.Map + var result []error + o.debugLogf("binding %d parameters for %s %s", len(o.Parameters), request.Method, request.URL.EscapedPath()) + for fieldName, param := range o.Parameters { + binder := o.paramBinders[fieldName] + o.debugLogf("binding parameter %s for %s %s", fieldName, request.Method, request.URL.EscapedPath()) + var target reflect.Value + if !isMap { + binder.Name = fieldName + target = val.FieldByName(fieldName) + } + + if isMap { + tpe := binder.Type() + if tpe == nil { + if param.Schema.Type.Contains(typeArray) { + tpe = reflect.TypeOf([]interface{}{}) + } else { + tpe = reflect.TypeOf(map[string]interface{}{}) + } + } + target = reflect.Indirect(reflect.New(tpe)) + } + + if !target.IsValid() { + result = append(result, errors.New(500, "parameter name %q is an unknown field", binder.Name)) + continue + } + + if err := binder.Bind(request, routeParams, consumer, target); err != nil { + result = append(result, err) + continue + } + + if binder.validator != nil { + rr := binder.validator.Validate(target.Interface()) + if rr != nil && rr.HasErrors() { + result = append(result, rr.AsError()) + } + } + + if isMap { + val.SetMapIndex(reflect.ValueOf(param.Name), target) + } + } + + if len(result) > 0 { + return errors.CompositeValidationError(result...) + } + + return nil +} + +// SetLogger allows for injecting a logger to catch debug entries. +// +// The logger is enabled in DEBUG mode only. +func (o *UntypedRequestBinder) SetLogger(lg logger.Logger) { + o.debugLogf = debugLogfFunc(lg) +} + +func (o *UntypedRequestBinder) setDebugLogf(fn func(string, ...any)) { + o.debugLogf = fn +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/router.go b/vendor/github.com/go-openapi/runtime/middleware/router.go new file mode 100644 index 0000000..3a6aee9 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/router.go @@ -0,0 +1,531 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package middleware + +import ( + "fmt" + "net/http" + "net/url" + fpath "path" + "regexp" + "strings" + + "github.com/go-openapi/runtime/logger" + "github.com/go-openapi/runtime/security" + "github.com/go-openapi/swag" + + "github.com/go-openapi/analysis" + "github.com/go-openapi/errors" + "github.com/go-openapi/loads" + "github.com/go-openapi/spec" + "github.com/go-openapi/strfmt" + + "github.com/go-openapi/runtime" + "github.com/go-openapi/runtime/middleware/denco" +) + +// RouteParam is a object to capture route params in a framework agnostic way. +// implementations of the muxer should use these route params to communicate with the +// swagger framework +type RouteParam struct { + Name string + Value string +} + +// RouteParams the collection of route params +type RouteParams []RouteParam + +// Get gets the value for the route param for the specified key +func (r RouteParams) Get(name string) string { + vv, _, _ := r.GetOK(name) + if len(vv) > 0 { + return vv[len(vv)-1] + } + return "" +} + +// GetOK gets the value but also returns booleans to indicate if a key or value +// is present. This aids in validation and satisfies an interface in use there +// +// The returned values are: data, has key, has value +func (r RouteParams) GetOK(name string) ([]string, bool, bool) { + for _, p := range r { + if p.Name == name { + return []string{p.Value}, true, p.Value != "" + } + } + return nil, false, false +} + +// NewRouter creates a new context-aware router middleware +func NewRouter(ctx *Context, next http.Handler) http.Handler { + if ctx.router == nil { + ctx.router = DefaultRouter(ctx.spec, ctx.api, WithDefaultRouterLoggerFunc(ctx.debugLogf)) + } + + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + if _, rCtx, ok := ctx.RouteInfo(r); ok { + next.ServeHTTP(rw, rCtx) + return + } + + // Not found, check if it exists in the other methods first + if others := ctx.AllowedMethods(r); len(others) > 0 { + ctx.Respond(rw, r, ctx.analyzer.RequiredProduces(), nil, errors.MethodNotAllowed(r.Method, others)) + return + } + + ctx.Respond(rw, r, ctx.analyzer.RequiredProduces(), nil, errors.NotFound("path %s was not found", r.URL.EscapedPath())) + }) +} + +// RoutableAPI represents an interface for things that can serve +// as a provider of implementations for the swagger router +type RoutableAPI interface { + HandlerFor(string, string) (http.Handler, bool) + ServeErrorFor(string) func(http.ResponseWriter, *http.Request, error) + ConsumersFor([]string) map[string]runtime.Consumer + ProducersFor([]string) map[string]runtime.Producer + AuthenticatorsFor(map[string]spec.SecurityScheme) map[string]runtime.Authenticator + Authorizer() runtime.Authorizer + Formats() strfmt.Registry + DefaultProduces() string + DefaultConsumes() string +} + +// Router represents a swagger-aware router +type Router interface { + Lookup(method, path string) (*MatchedRoute, bool) + OtherMethods(method, path string) []string +} + +type defaultRouteBuilder struct { + spec *loads.Document + analyzer *analysis.Spec + api RoutableAPI + records map[string][]denco.Record + debugLogf func(string, ...any) // a logging function to debug context and all components using it +} + +type defaultRouter struct { + spec *loads.Document + routers map[string]*denco.Router + debugLogf func(string, ...any) // a logging function to debug context and all components using it +} + +func newDefaultRouteBuilder(spec *loads.Document, api RoutableAPI, opts ...DefaultRouterOpt) *defaultRouteBuilder { + var o defaultRouterOpts + for _, apply := range opts { + apply(&o) + } + if o.debugLogf == nil { + o.debugLogf = debugLogfFunc(nil) // defaults to standard logger + } + + return &defaultRouteBuilder{ + spec: spec, + analyzer: analysis.New(spec.Spec()), + api: api, + records: make(map[string][]denco.Record), + debugLogf: o.debugLogf, + } +} + +// DefaultRouterOpt allows to inject optional behavior to the default router. +type DefaultRouterOpt func(*defaultRouterOpts) + +type defaultRouterOpts struct { + debugLogf func(string, ...any) +} + +// WithDefaultRouterLogger sets the debug logger for the default router. +// +// This is enabled only in DEBUG mode. +func WithDefaultRouterLogger(lg logger.Logger) DefaultRouterOpt { + return func(o *defaultRouterOpts) { + o.debugLogf = debugLogfFunc(lg) + } +} + +// WithDefaultRouterLoggerFunc sets a logging debug method for the default router. +func WithDefaultRouterLoggerFunc(fn func(string, ...any)) DefaultRouterOpt { + return func(o *defaultRouterOpts) { + o.debugLogf = fn + } +} + +// DefaultRouter creates a default implementation of the router +func DefaultRouter(spec *loads.Document, api RoutableAPI, opts ...DefaultRouterOpt) Router { + builder := newDefaultRouteBuilder(spec, api, opts...) + if spec != nil { + for method, paths := range builder.analyzer.Operations() { + for path, operation := range paths { + fp := fpath.Join(spec.BasePath(), path) + builder.debugLogf("adding route %s %s %q", method, fp, operation.ID) + builder.AddRoute(method, fp, operation) + } + } + } + return builder.Build() +} + +// RouteAuthenticator is an authenticator that can compose several authenticators together. +// It also knows when it contains an authenticator that allows for anonymous pass through. +// Contains a group of 1 or more authenticators that have a logical AND relationship +type RouteAuthenticator struct { + Authenticator map[string]runtime.Authenticator + Schemes []string + Scopes map[string][]string + allScopes []string + commonScopes []string + allowAnonymous bool +} + +func (ra *RouteAuthenticator) AllowsAnonymous() bool { + return ra.allowAnonymous +} + +// AllScopes returns a list of unique scopes that is the combination +// of all the scopes in the requirements +func (ra *RouteAuthenticator) AllScopes() []string { + return ra.allScopes +} + +// CommonScopes returns a list of unique scopes that are common in all the +// scopes in the requirements +func (ra *RouteAuthenticator) CommonScopes() []string { + return ra.commonScopes +} + +// Authenticate Authenticator interface implementation +func (ra *RouteAuthenticator) Authenticate(req *http.Request, route *MatchedRoute) (bool, interface{}, error) { + if ra.allowAnonymous { + route.Authenticator = ra + return true, nil, nil + } + // iterate in proper order + var lastResult interface{} + for _, scheme := range ra.Schemes { + if authenticator, ok := ra.Authenticator[scheme]; ok { + applies, princ, err := authenticator.Authenticate(&security.ScopedAuthRequest{ + Request: req, + RequiredScopes: ra.Scopes[scheme], + }) + if !applies { + return false, nil, nil + } + if err != nil { + route.Authenticator = ra + return true, nil, err + } + lastResult = princ + } + } + route.Authenticator = ra + return true, lastResult, nil +} + +func stringSliceUnion(slices ...[]string) []string { + unique := make(map[string]struct{}) + var result []string + for _, slice := range slices { + for _, entry := range slice { + if _, ok := unique[entry]; ok { + continue + } + unique[entry] = struct{}{} + result = append(result, entry) + } + } + return result +} + +func stringSliceIntersection(slices ...[]string) []string { + unique := make(map[string]int) + var intersection []string + + total := len(slices) + var emptyCnt int + for _, slice := range slices { + if len(slice) == 0 { + emptyCnt++ + continue + } + + for _, entry := range slice { + unique[entry]++ + if unique[entry] == total-emptyCnt { // this entry appeared in all the non-empty slices + intersection = append(intersection, entry) + } + } + } + + return intersection +} + +// RouteAuthenticators represents a group of authenticators that represent a logical OR +type RouteAuthenticators []RouteAuthenticator + +// AllowsAnonymous returns true when there is an authenticator that means optional auth +func (ras RouteAuthenticators) AllowsAnonymous() bool { + for _, ra := range ras { + if ra.AllowsAnonymous() { + return true + } + } + return false +} + +// Authenticate method implemention so this collection can be used as authenticator +func (ras RouteAuthenticators) Authenticate(req *http.Request, route *MatchedRoute) (bool, interface{}, error) { + var lastError error + var allowsAnon bool + var anonAuth RouteAuthenticator + + for _, ra := range ras { + if ra.AllowsAnonymous() { + anonAuth = ra + allowsAnon = true + continue + } + applies, usr, err := ra.Authenticate(req, route) + if !applies || err != nil || usr == nil { + if err != nil { + lastError = err + } + continue + } + return applies, usr, nil + } + + if allowsAnon && lastError == nil { + route.Authenticator = &anonAuth + return true, nil, lastError + } + return lastError != nil, nil, lastError +} + +type routeEntry struct { + PathPattern string + BasePath string + Operation *spec.Operation + Consumes []string + Consumers map[string]runtime.Consumer + Produces []string + Producers map[string]runtime.Producer + Parameters map[string]spec.Parameter + Handler http.Handler + Formats strfmt.Registry + Binder *UntypedRequestBinder + Authenticators RouteAuthenticators + Authorizer runtime.Authorizer +} + +// MatchedRoute represents the route that was matched in this request +type MatchedRoute struct { + routeEntry + Params RouteParams + Consumer runtime.Consumer + Producer runtime.Producer + Authenticator *RouteAuthenticator +} + +// HasAuth returns true when the route has a security requirement defined +func (m *MatchedRoute) HasAuth() bool { + return len(m.Authenticators) > 0 +} + +// NeedsAuth returns true when the request still +// needs to perform authentication +func (m *MatchedRoute) NeedsAuth() bool { + return m.HasAuth() && m.Authenticator == nil +} + +func (d *defaultRouter) Lookup(method, path string) (*MatchedRoute, bool) { + mth := strings.ToUpper(method) + d.debugLogf("looking up route for %s %s", method, path) + if Debug { + if len(d.routers) == 0 { + d.debugLogf("there are no known routers") + } + for meth := range d.routers { + d.debugLogf("got a router for %s", meth) + } + } + if router, ok := d.routers[mth]; ok { + if m, rp, ok := router.Lookup(fpath.Clean(path)); ok && m != nil { + if entry, ok := m.(*routeEntry); ok { + d.debugLogf("found a route for %s %s with %d parameters", method, path, len(entry.Parameters)) + var params RouteParams + for _, p := range rp { + v, err := url.PathUnescape(p.Value) + if err != nil { + d.debugLogf("failed to escape %q: %v", p.Value, err) + v = p.Value + } + // a workaround to handle fragment/composing parameters until they are supported in denco router + // check if this parameter is a fragment within a path segment + if xpos := strings.Index(entry.PathPattern, fmt.Sprintf("{%s}", p.Name)) + len(p.Name) + 2; xpos < len(entry.PathPattern) && entry.PathPattern[xpos] != '/' { + // extract fragment parameters + ep := strings.Split(entry.PathPattern[xpos:], "/")[0] + pnames, pvalues := decodeCompositParams(p.Name, v, ep, nil, nil) + for i, pname := range pnames { + params = append(params, RouteParam{Name: pname, Value: pvalues[i]}) + } + } else { + // use the parameter directly + params = append(params, RouteParam{Name: p.Name, Value: v}) + } + } + return &MatchedRoute{routeEntry: *entry, Params: params}, true + } + } else { + d.debugLogf("couldn't find a route by path for %s %s", method, path) + } + } else { + d.debugLogf("couldn't find a route by method for %s %s", method, path) + } + return nil, false +} + +func (d *defaultRouter) OtherMethods(method, path string) []string { + mn := strings.ToUpper(method) + var methods []string + for k, v := range d.routers { + if k != mn { + if _, _, ok := v.Lookup(fpath.Clean(path)); ok { + methods = append(methods, k) + continue + } + } + } + return methods +} + +func (d *defaultRouter) SetLogger(lg logger.Logger) { + d.debugLogf = debugLogfFunc(lg) +} + +// convert swagger parameters per path segment into a denco parameter as multiple parameters per segment are not supported in denco +var pathConverter = regexp.MustCompile(`{(.+?)}([^/]*)`) + +func decodeCompositParams(name string, value string, pattern string, names []string, values []string) ([]string, []string) { + pleft := strings.Index(pattern, "{") + names = append(names, name) + if pleft < 0 { + if strings.HasSuffix(value, pattern) { + values = append(values, value[:len(value)-len(pattern)]) + } else { + values = append(values, "") + } + } else { + toskip := pattern[:pleft] + pright := strings.Index(pattern, "}") + vright := strings.Index(value, toskip) + if vright >= 0 { + values = append(values, value[:vright]) + } else { + values = append(values, "") + value = "" + } + return decodeCompositParams(pattern[pleft+1:pright], value[vright+len(toskip):], pattern[pright+1:], names, values) + } + return names, values +} + +func (d *defaultRouteBuilder) AddRoute(method, path string, operation *spec.Operation) { + mn := strings.ToUpper(method) + + bp := fpath.Clean(d.spec.BasePath()) + if len(bp) > 0 && bp[len(bp)-1] == '/' { + bp = bp[:len(bp)-1] + } + + d.debugLogf("operation: %#v", *operation) + if handler, ok := d.api.HandlerFor(method, strings.TrimPrefix(path, bp)); ok { + consumes := d.analyzer.ConsumesFor(operation) + produces := d.analyzer.ProducesFor(operation) + parameters := d.analyzer.ParamsFor(method, strings.TrimPrefix(path, bp)) + + // add API defaults if not part of the spec + if defConsumes := d.api.DefaultConsumes(); defConsumes != "" && !swag.ContainsStringsCI(consumes, defConsumes) { + consumes = append(consumes, defConsumes) + } + + if defProduces := d.api.DefaultProduces(); defProduces != "" && !swag.ContainsStringsCI(produces, defProduces) { + produces = append(produces, defProduces) + } + + requestBinder := NewUntypedRequestBinder(parameters, d.spec.Spec(), d.api.Formats()) + requestBinder.setDebugLogf(d.debugLogf) + record := denco.NewRecord(pathConverter.ReplaceAllString(path, ":$1"), &routeEntry{ + BasePath: bp, + PathPattern: path, + Operation: operation, + Handler: handler, + Consumes: consumes, + Produces: produces, + Consumers: d.api.ConsumersFor(normalizeOffers(consumes)), + Producers: d.api.ProducersFor(normalizeOffers(produces)), + Parameters: parameters, + Formats: d.api.Formats(), + Binder: requestBinder, + Authenticators: d.buildAuthenticators(operation), + Authorizer: d.api.Authorizer(), + }) + d.records[mn] = append(d.records[mn], record) + } +} + +func (d *defaultRouteBuilder) buildAuthenticators(operation *spec.Operation) RouteAuthenticators { + requirements := d.analyzer.SecurityRequirementsFor(operation) + auths := make([]RouteAuthenticator, 0, len(requirements)) + for _, reqs := range requirements { + schemes := make([]string, 0, len(reqs)) + scopes := make(map[string][]string, len(reqs)) + scopeSlices := make([][]string, 0, len(reqs)) + for _, req := range reqs { + schemes = append(schemes, req.Name) + scopes[req.Name] = req.Scopes + scopeSlices = append(scopeSlices, req.Scopes) + } + + definitions := d.analyzer.SecurityDefinitionsForRequirements(reqs) + authenticators := d.api.AuthenticatorsFor(definitions) + auths = append(auths, RouteAuthenticator{ + Authenticator: authenticators, + Schemes: schemes, + Scopes: scopes, + allScopes: stringSliceUnion(scopeSlices...), + commonScopes: stringSliceIntersection(scopeSlices...), + allowAnonymous: len(reqs) == 1 && reqs[0].Name == "", + }) + } + return auths +} + +func (d *defaultRouteBuilder) Build() *defaultRouter { + routers := make(map[string]*denco.Router) + for method, records := range d.records { + router := denco.New() + _ = router.Build(records) + routers[method] = router + } + return &defaultRouter{ + spec: d.spec, + routers: routers, + debugLogf: d.debugLogf, + } +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/security.go b/vendor/github.com/go-openapi/runtime/middleware/security.go new file mode 100644 index 0000000..2b061ca --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/security.go @@ -0,0 +1,39 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package middleware + +import "net/http" + +func newSecureAPI(ctx *Context, next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + route, rCtx, _ := ctx.RouteInfo(r) + if rCtx != nil { + r = rCtx + } + if route != nil && !route.NeedsAuth() { + next.ServeHTTP(rw, r) + return + } + + _, rCtx, err := ctx.Authorize(r, route) + if err != nil { + ctx.Respond(rw, r, route.Produces, route, err) + return + } + r = rCtx + + next.ServeHTTP(rw, r) + }) +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/spec.go b/vendor/github.com/go-openapi/runtime/middleware/spec.go new file mode 100644 index 0000000..87e17e3 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/spec.go @@ -0,0 +1,102 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package middleware + +import ( + "net/http" + "path" +) + +const ( + contentTypeHeader = "Content-Type" + applicationJSON = "application/json" +) + +// SpecOption can be applied to the Spec serving middleware +type SpecOption func(*specOptions) + +var defaultSpecOptions = specOptions{ + Path: "", + Document: "swagger.json", +} + +type specOptions struct { + Path string + Document string +} + +func specOptionsWithDefaults(opts []SpecOption) specOptions { + o := defaultSpecOptions + for _, apply := range opts { + apply(&o) + } + + return o +} + +// Spec creates a middleware to serve a swagger spec as a JSON document. +// +// This allows for altering the spec before starting the http listener. +// +// The basePath argument indicates the path of the spec document (defaults to "/"). +// Additional SpecOption can be used to change the name of the document (defaults to "swagger.json"). +func Spec(basePath string, b []byte, next http.Handler, opts ...SpecOption) http.Handler { + if basePath == "" { + basePath = "/" + } + o := specOptionsWithDefaults(opts) + pth := path.Join(basePath, o.Path, o.Document) + + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + if path.Clean(r.URL.Path) == pth { + rw.Header().Set(contentTypeHeader, applicationJSON) + rw.WriteHeader(http.StatusOK) + _, _ = rw.Write(b) + + return + } + + if next != nil { + next.ServeHTTP(rw, r) + + return + } + + rw.Header().Set(contentTypeHeader, applicationJSON) + rw.WriteHeader(http.StatusNotFound) + }) +} + +// WithSpecPath sets the path to be joined to the base path of the Spec middleware. +// +// This is empty by default. +func WithSpecPath(pth string) SpecOption { + return func(o *specOptions) { + o.Path = pth + } +} + +// WithSpecDocument sets the name of the JSON document served as a spec. +// +// By default, this is "swagger.json" +func WithSpecDocument(doc string) SpecOption { + return func(o *specOptions) { + if doc == "" { + return + } + + o.Document = doc + } +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/swaggerui.go b/vendor/github.com/go-openapi/runtime/middleware/swaggerui.go new file mode 100644 index 0000000..ec3c10c --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/swaggerui.go @@ -0,0 +1,175 @@ +package middleware + +import ( + "bytes" + "fmt" + "html/template" + "net/http" + "path" +) + +// SwaggerUIOpts configures the SwaggerUI middleware +type SwaggerUIOpts struct { + // BasePath for the API, defaults to: / + BasePath string + + // Path combines with BasePath to construct the path to the UI, defaults to: "docs". + Path string + + // SpecURL is the URL of the spec document. + // + // Defaults to: /swagger.json + SpecURL string + + // Title for the documentation site, default to: API documentation + Title string + + // Template specifies a custom template to serve the UI + Template string + + // OAuthCallbackURL the url called after OAuth2 login + OAuthCallbackURL string + + // The three components needed to embed swagger-ui + + // SwaggerURL points to the js that generates the SwaggerUI site. + // + // Defaults to: https://unpkg.com/swagger-ui-dist/swagger-ui-bundle.js + SwaggerURL string + + SwaggerPresetURL string + SwaggerStylesURL string + + Favicon32 string + Favicon16 string +} + +// EnsureDefaults in case some options are missing +func (r *SwaggerUIOpts) EnsureDefaults() { + r.ensureDefaults() + + if r.Template == "" { + r.Template = swaggeruiTemplate + } +} + +func (r *SwaggerUIOpts) EnsureDefaultsOauth2() { + r.ensureDefaults() + + if r.Template == "" { + r.Template = swaggerOAuthTemplate + } +} + +func (r *SwaggerUIOpts) ensureDefaults() { + common := toCommonUIOptions(r) + common.EnsureDefaults() + fromCommonToAnyOptions(common, r) + + // swaggerui-specifics + if r.OAuthCallbackURL == "" { + r.OAuthCallbackURL = path.Join(r.BasePath, r.Path, "oauth2-callback") + } + if r.SwaggerURL == "" { + r.SwaggerURL = swaggerLatest + } + if r.SwaggerPresetURL == "" { + r.SwaggerPresetURL = swaggerPresetLatest + } + if r.SwaggerStylesURL == "" { + r.SwaggerStylesURL = swaggerStylesLatest + } + if r.Favicon16 == "" { + r.Favicon16 = swaggerFavicon16Latest + } + if r.Favicon32 == "" { + r.Favicon32 = swaggerFavicon32Latest + } +} + +// SwaggerUI creates a middleware to serve a documentation site for a swagger spec. +// +// This allows for altering the spec before starting the http listener. +func SwaggerUI(opts SwaggerUIOpts, next http.Handler) http.Handler { + opts.EnsureDefaults() + + pth := path.Join(opts.BasePath, opts.Path) + tmpl := template.Must(template.New("swaggerui").Parse(opts.Template)) + assets := bytes.NewBuffer(nil) + if err := tmpl.Execute(assets, opts); err != nil { + panic(fmt.Errorf("cannot execute template: %w", err)) + } + + return serveUI(pth, assets.Bytes(), next) +} + +const ( + swaggerLatest = "https://unpkg.com/swagger-ui-dist/swagger-ui-bundle.js" + swaggerPresetLatest = "https://unpkg.com/swagger-ui-dist/swagger-ui-standalone-preset.js" + swaggerStylesLatest = "https://unpkg.com/swagger-ui-dist/swagger-ui.css" + swaggerFavicon32Latest = "https://unpkg.com/swagger-ui-dist/favicon-32x32.png" + swaggerFavicon16Latest = "https://unpkg.com/swagger-ui-dist/favicon-16x16.png" + swaggeruiTemplate = ` + + + + + {{ .Title }} + + + + + + + + +

+ + + + + + +` +) diff --git a/vendor/github.com/go-openapi/runtime/middleware/swaggerui_oauth2.go b/vendor/github.com/go-openapi/runtime/middleware/swaggerui_oauth2.go new file mode 100644 index 0000000..e81212f --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/swaggerui_oauth2.go @@ -0,0 +1,105 @@ +package middleware + +import ( + "bytes" + "fmt" + "net/http" + "text/template" +) + +func SwaggerUIOAuth2Callback(opts SwaggerUIOpts, next http.Handler) http.Handler { + opts.EnsureDefaultsOauth2() + + pth := opts.OAuthCallbackURL + tmpl := template.Must(template.New("swaggeroauth").Parse(opts.Template)) + assets := bytes.NewBuffer(nil) + if err := tmpl.Execute(assets, opts); err != nil { + panic(fmt.Errorf("cannot execute template: %w", err)) + } + + return serveUI(pth, assets.Bytes(), next) +} + +const ( + swaggerOAuthTemplate = ` + + + + {{ .Title }} + + + + + +` +) diff --git a/vendor/github.com/go-openapi/runtime/middleware/ui_options.go b/vendor/github.com/go-openapi/runtime/middleware/ui_options.go new file mode 100644 index 0000000..b86efa0 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/ui_options.go @@ -0,0 +1,173 @@ +package middleware + +import ( + "bytes" + "encoding/gob" + "fmt" + "net/http" + "path" + "strings" +) + +const ( + // constants that are common to all UI-serving middlewares + defaultDocsPath = "docs" + defaultDocsURL = "/swagger.json" + defaultDocsTitle = "API Documentation" +) + +// uiOptions defines common options for UI serving middlewares. +type uiOptions struct { + // BasePath for the UI, defaults to: / + BasePath string + + // Path combines with BasePath to construct the path to the UI, defaults to: "docs". + Path string + + // SpecURL is the URL of the spec document. + // + // Defaults to: /swagger.json + SpecURL string + + // Title for the documentation site, default to: API documentation + Title string + + // Template specifies a custom template to serve the UI + Template string +} + +// toCommonUIOptions converts any UI option type to retain the common options. +// +// This uses gob encoding/decoding to convert common fields from one struct to another. +func toCommonUIOptions(opts interface{}) uiOptions { + var buf bytes.Buffer + enc := gob.NewEncoder(&buf) + dec := gob.NewDecoder(&buf) + var o uiOptions + err := enc.Encode(opts) + if err != nil { + panic(err) + } + + err = dec.Decode(&o) + if err != nil { + panic(err) + } + + return o +} + +func fromCommonToAnyOptions[T any](source uiOptions, target *T) { + var buf bytes.Buffer + enc := gob.NewEncoder(&buf) + dec := gob.NewDecoder(&buf) + err := enc.Encode(source) + if err != nil { + panic(err) + } + + err = dec.Decode(target) + if err != nil { + panic(err) + } +} + +// UIOption can be applied to UI serving middleware, such as Context.APIHandler or +// Context.APIHandlerSwaggerUI to alter the defaut behavior. +type UIOption func(*uiOptions) + +func uiOptionsWithDefaults(opts []UIOption) uiOptions { + var o uiOptions + for _, apply := range opts { + apply(&o) + } + + return o +} + +// WithUIBasePath sets the base path from where to serve the UI assets. +// +// By default, Context middleware sets this value to the API base path. +func WithUIBasePath(base string) UIOption { + return func(o *uiOptions) { + if !strings.HasPrefix(base, "/") { + base = "/" + base + } + o.BasePath = base + } +} + +// WithUIPath sets the path from where to serve the UI assets (i.e. /{basepath}/{path}. +func WithUIPath(pth string) UIOption { + return func(o *uiOptions) { + o.Path = pth + } +} + +// WithUISpecURL sets the path from where to serve swagger spec document. +// +// This may be specified as a full URL or a path. +// +// By default, this is "/swagger.json" +func WithUISpecURL(specURL string) UIOption { + return func(o *uiOptions) { + o.SpecURL = specURL + } +} + +// WithUITitle sets the title of the UI. +// +// By default, Context middleware sets this value to the title found in the API spec. +func WithUITitle(title string) UIOption { + return func(o *uiOptions) { + o.Title = title + } +} + +// WithTemplate allows to set a custom template for the UI. +// +// UI middleware will panic if the template does not parse or execute properly. +func WithTemplate(tpl string) UIOption { + return func(o *uiOptions) { + o.Template = tpl + } +} + +// EnsureDefaults in case some options are missing +func (r *uiOptions) EnsureDefaults() { + if r.BasePath == "" { + r.BasePath = "/" + } + if r.Path == "" { + r.Path = defaultDocsPath + } + if r.SpecURL == "" { + r.SpecURL = defaultDocsURL + } + if r.Title == "" { + r.Title = defaultDocsTitle + } +} + +// serveUI creates a middleware that serves a templated asset as text/html. +func serveUI(pth string, assets []byte, next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + if path.Clean(r.URL.Path) == pth { + rw.Header().Set(contentTypeHeader, "text/html; charset=utf-8") + rw.WriteHeader(http.StatusOK) + _, _ = rw.Write(assets) + + return + } + + if next != nil { + next.ServeHTTP(rw, r) + + return + } + + rw.Header().Set(contentTypeHeader, "text/plain") + rw.WriteHeader(http.StatusNotFound) + _, _ = rw.Write([]byte(fmt.Sprintf("%q not found", pth))) + }) +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/untyped/api.go b/vendor/github.com/go-openapi/runtime/middleware/untyped/api.go new file mode 100644 index 0000000..7b7269b --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/untyped/api.go @@ -0,0 +1,287 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package untyped + +import ( + "fmt" + "net/http" + "sort" + "strings" + + "github.com/go-openapi/analysis" + "github.com/go-openapi/errors" + "github.com/go-openapi/loads" + "github.com/go-openapi/spec" + "github.com/go-openapi/strfmt" + + "github.com/go-openapi/runtime" +) + +// NewAPI creates the default untyped API +func NewAPI(spec *loads.Document) *API { + var an *analysis.Spec + if spec != nil && spec.Spec() != nil { + an = analysis.New(spec.Spec()) + } + api := &API{ + spec: spec, + analyzer: an, + consumers: make(map[string]runtime.Consumer, 10), + producers: make(map[string]runtime.Producer, 10), + authenticators: make(map[string]runtime.Authenticator), + operations: make(map[string]map[string]runtime.OperationHandler), + ServeError: errors.ServeError, + Models: make(map[string]func() interface{}), + formats: strfmt.NewFormats(), + } + return api.WithJSONDefaults() +} + +// API represents an untyped mux for a swagger spec +type API struct { + spec *loads.Document + analyzer *analysis.Spec + DefaultProduces string + DefaultConsumes string + consumers map[string]runtime.Consumer + producers map[string]runtime.Producer + authenticators map[string]runtime.Authenticator + authorizer runtime.Authorizer + operations map[string]map[string]runtime.OperationHandler + ServeError func(http.ResponseWriter, *http.Request, error) + Models map[string]func() interface{} + formats strfmt.Registry +} + +// WithJSONDefaults loads the json defaults for this api +func (d *API) WithJSONDefaults() *API { + d.DefaultConsumes = runtime.JSONMime + d.DefaultProduces = runtime.JSONMime + d.consumers[runtime.JSONMime] = runtime.JSONConsumer() + d.producers[runtime.JSONMime] = runtime.JSONProducer() + return d +} + +// WithoutJSONDefaults clears the json defaults for this api +func (d *API) WithoutJSONDefaults() *API { + d.DefaultConsumes = "" + d.DefaultProduces = "" + delete(d.consumers, runtime.JSONMime) + delete(d.producers, runtime.JSONMime) + return d +} + +// Formats returns the registered string formats +func (d *API) Formats() strfmt.Registry { + if d.formats == nil { + d.formats = strfmt.NewFormats() + } + return d.formats +} + +// RegisterFormat registers a custom format validator +func (d *API) RegisterFormat(name string, format strfmt.Format, validator strfmt.Validator) { + if d.formats == nil { + d.formats = strfmt.NewFormats() + } + d.formats.Add(name, format, validator) +} + +// RegisterAuth registers an auth handler in this api +func (d *API) RegisterAuth(scheme string, handler runtime.Authenticator) { + if d.authenticators == nil { + d.authenticators = make(map[string]runtime.Authenticator) + } + d.authenticators[scheme] = handler +} + +// RegisterAuthorizer registers an authorizer handler in this api +func (d *API) RegisterAuthorizer(handler runtime.Authorizer) { + d.authorizer = handler +} + +// RegisterConsumer registers a consumer for a media type. +func (d *API) RegisterConsumer(mediaType string, handler runtime.Consumer) { + if d.consumers == nil { + d.consumers = make(map[string]runtime.Consumer, 10) + } + d.consumers[strings.ToLower(mediaType)] = handler +} + +// RegisterProducer registers a producer for a media type +func (d *API) RegisterProducer(mediaType string, handler runtime.Producer) { + if d.producers == nil { + d.producers = make(map[string]runtime.Producer, 10) + } + d.producers[strings.ToLower(mediaType)] = handler +} + +// RegisterOperation registers an operation handler for an operation name +func (d *API) RegisterOperation(method, path string, handler runtime.OperationHandler) { + if d.operations == nil { + d.operations = make(map[string]map[string]runtime.OperationHandler, 30) + } + um := strings.ToUpper(method) + if b, ok := d.operations[um]; !ok || b == nil { + d.operations[um] = make(map[string]runtime.OperationHandler) + } + d.operations[um][path] = handler +} + +// OperationHandlerFor returns the operation handler for the specified id if it can be found +func (d *API) OperationHandlerFor(method, path string) (runtime.OperationHandler, bool) { + if d.operations == nil { + return nil, false + } + if pi, ok := d.operations[strings.ToUpper(method)]; ok { + h, ok := pi[path] + return h, ok + } + return nil, false +} + +// ConsumersFor gets the consumers for the specified media types +func (d *API) ConsumersFor(mediaTypes []string) map[string]runtime.Consumer { + result := make(map[string]runtime.Consumer) + for _, mt := range mediaTypes { + if consumer, ok := d.consumers[mt]; ok { + result[mt] = consumer + } + } + return result +} + +// ProducersFor gets the producers for the specified media types +func (d *API) ProducersFor(mediaTypes []string) map[string]runtime.Producer { + result := make(map[string]runtime.Producer) + for _, mt := range mediaTypes { + if producer, ok := d.producers[mt]; ok { + result[mt] = producer + } + } + return result +} + +// AuthenticatorsFor gets the authenticators for the specified security schemes +func (d *API) AuthenticatorsFor(schemes map[string]spec.SecurityScheme) map[string]runtime.Authenticator { + result := make(map[string]runtime.Authenticator) + for k := range schemes { + if a, ok := d.authenticators[k]; ok { + result[k] = a + } + } + return result +} + +// Authorizer returns the registered authorizer +func (d *API) Authorizer() runtime.Authorizer { + return d.authorizer +} + +// Validate validates this API for any missing items +func (d *API) Validate() error { + return d.validate() +} + +// validateWith validates the registrations in this API against the provided spec analyzer +func (d *API) validate() error { + consumes := make([]string, 0, len(d.consumers)) + for k := range d.consumers { + consumes = append(consumes, k) + } + + produces := make([]string, 0, len(d.producers)) + for k := range d.producers { + produces = append(produces, k) + } + + authenticators := make([]string, 0, len(d.authenticators)) + for k := range d.authenticators { + authenticators = append(authenticators, k) + } + + operations := make([]string, 0, len(d.operations)) + for m, v := range d.operations { + for p := range v { + operations = append(operations, fmt.Sprintf("%s %s", strings.ToUpper(m), p)) + } + } + + secDefinitions := d.spec.Spec().SecurityDefinitions + definedAuths := make([]string, 0, len(secDefinitions)) + for k := range secDefinitions { + definedAuths = append(definedAuths, k) + } + + if err := d.verify("consumes", consumes, d.analyzer.RequiredConsumes()); err != nil { + return err + } + if err := d.verify("produces", produces, d.analyzer.RequiredProduces()); err != nil { + return err + } + if err := d.verify("operation", operations, d.analyzer.OperationMethodPaths()); err != nil { + return err + } + + requiredAuths := d.analyzer.RequiredSecuritySchemes() + if err := d.verify("auth scheme", authenticators, requiredAuths); err != nil { + return err + } + if err := d.verify("security definitions", definedAuths, requiredAuths); err != nil { + return err + } + return nil +} + +func (d *API) verify(name string, registrations []string, expectations []string) error { + sort.Strings(registrations) + sort.Strings(expectations) + + expected := map[string]struct{}{} + seen := map[string]struct{}{} + + for _, v := range expectations { + expected[v] = struct{}{} + } + + var unspecified []string + for _, v := range registrations { + seen[v] = struct{}{} + if _, ok := expected[v]; !ok { + unspecified = append(unspecified, v) + } + } + + for k := range seen { + delete(expected, k) + } + + unregistered := make([]string, 0, len(expected)) + for k := range expected { + unregistered = append(unregistered, k) + } + sort.Strings(unspecified) + sort.Strings(unregistered) + + if len(unregistered) > 0 || len(unspecified) > 0 { + return &errors.APIVerificationFailed{ + Section: name, + MissingSpecification: unspecified, + MissingRegistration: unregistered, + } + } + + return nil +} diff --git a/vendor/github.com/go-openapi/runtime/middleware/validation.go b/vendor/github.com/go-openapi/runtime/middleware/validation.go new file mode 100644 index 0000000..0a5356c --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/middleware/validation.go @@ -0,0 +1,130 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package middleware + +import ( + "mime" + "net/http" + "strings" + + "github.com/go-openapi/errors" + "github.com/go-openapi/swag" + + "github.com/go-openapi/runtime" +) + +type validation struct { + context *Context + result []error + request *http.Request + route *MatchedRoute + bound map[string]interface{} +} + +// ContentType validates the content type of a request +func validateContentType(allowed []string, actual string) error { + if len(allowed) == 0 { + return nil + } + mt, _, err := mime.ParseMediaType(actual) + if err != nil { + return errors.InvalidContentType(actual, allowed) + } + if swag.ContainsStringsCI(allowed, mt) { + return nil + } + if swag.ContainsStringsCI(allowed, "*/*") { + return nil + } + parts := strings.Split(actual, "/") + if len(parts) == 2 && swag.ContainsStringsCI(allowed, parts[0]+"/*") { + return nil + } + return errors.InvalidContentType(actual, allowed) +} + +func validateRequest(ctx *Context, request *http.Request, route *MatchedRoute) *validation { + validate := &validation{ + context: ctx, + request: request, + route: route, + bound: make(map[string]interface{}), + } + validate.debugLogf("validating request %s %s", request.Method, request.URL.EscapedPath()) + + validate.contentType() + if len(validate.result) == 0 { + validate.responseFormat() + } + if len(validate.result) == 0 { + validate.parameters() + } + + return validate +} + +func (v *validation) debugLogf(format string, args ...any) { + v.context.debugLogf(format, args...) +} + +func (v *validation) parameters() { + v.debugLogf("validating request parameters for %s %s", v.request.Method, v.request.URL.EscapedPath()) + if result := v.route.Binder.Bind(v.request, v.route.Params, v.route.Consumer, v.bound); result != nil { + if result.Error() == "validation failure list" { + for _, e := range result.(*errors.Validation).Value.([]interface{}) { + v.result = append(v.result, e.(error)) + } + return + } + v.result = append(v.result, result) + } +} + +func (v *validation) contentType() { + if len(v.result) == 0 && runtime.HasBody(v.request) { + v.debugLogf("validating body content type for %s %s", v.request.Method, v.request.URL.EscapedPath()) + ct, _, req, err := v.context.ContentType(v.request) + if err != nil { + v.result = append(v.result, err) + } else { + v.request = req + } + + if len(v.result) == 0 { + v.debugLogf("validating content type for %q against [%s]", ct, strings.Join(v.route.Consumes, ", ")) + if err := validateContentType(v.route.Consumes, ct); err != nil { + v.result = append(v.result, err) + } + } + if ct != "" && v.route.Consumer == nil { + cons, ok := v.route.Consumers[ct] + if !ok { + v.result = append(v.result, errors.New(500, "no consumer registered for %s", ct)) + } else { + v.route.Consumer = cons + } + } + } +} + +func (v *validation) responseFormat() { + // if the route provides values for Produces and no format could be identify then return an error. + // if the route does not specify values for Produces then treat request as valid since the API designer + // choose not to specify the format for responses. + if str, rCtx := v.context.ResponseFormat(v.request, v.route.Produces); str == "" && len(v.route.Produces) > 0 { + v.request = rCtx + v.result = append(v.result, errors.InvalidResponseFormat(v.request.Header.Get(runtime.HeaderAccept), v.route.Produces)) + } +} diff --git a/vendor/github.com/go-openapi/runtime/request.go b/vendor/github.com/go-openapi/runtime/request.go new file mode 100644 index 0000000..9e3e1ec --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/request.go @@ -0,0 +1,149 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package runtime + +import ( + "bufio" + "context" + "errors" + "io" + "net/http" + "strings" + + "github.com/go-openapi/swag" +) + +// CanHaveBody returns true if this method can have a body +func CanHaveBody(method string) bool { + mn := strings.ToUpper(method) + return mn == "POST" || mn == "PUT" || mn == "PATCH" || mn == "DELETE" +} + +// IsSafe returns true if this is a request with a safe method +func IsSafe(r *http.Request) bool { + mn := strings.ToUpper(r.Method) + return mn == "GET" || mn == "HEAD" +} + +// AllowsBody returns true if the request allows for a body +func AllowsBody(r *http.Request) bool { + mn := strings.ToUpper(r.Method) + return mn != "HEAD" +} + +// HasBody returns true if this method needs a content-type +func HasBody(r *http.Request) bool { + // happy case: we have a content length set + if r.ContentLength > 0 { + return true + } + + if r.Header.Get("content-length") != "" { + // in this case, no Transfer-Encoding should be present + // we have a header set but it was explicitly set to 0, so we assume no body + return false + } + + rdr := newPeekingReader(r.Body) + r.Body = rdr + return rdr.HasContent() +} + +func newPeekingReader(r io.ReadCloser) *peekingReader { + if r == nil { + return nil + } + return &peekingReader{ + underlying: bufio.NewReader(r), + orig: r, + } +} + +type peekingReader struct { + underlying interface { + Buffered() int + Peek(int) ([]byte, error) + Read([]byte) (int, error) + } + orig io.ReadCloser +} + +func (p *peekingReader) HasContent() bool { + if p == nil { + return false + } + if p.underlying.Buffered() > 0 { + return true + } + b, err := p.underlying.Peek(1) + if err != nil { + return false + } + return len(b) > 0 +} + +func (p *peekingReader) Read(d []byte) (int, error) { + if p == nil { + return 0, io.EOF + } + if p.underlying == nil { + return 0, io.ErrUnexpectedEOF + } + return p.underlying.Read(d) +} + +func (p *peekingReader) Close() error { + if p.underlying == nil { + return errors.New("reader already closed") + } + p.underlying = nil + if p.orig != nil { + return p.orig.Close() + } + return nil +} + +// JSONRequest creates a new http request with json headers set. +// +// It uses context.Background. +func JSONRequest(method, urlStr string, body io.Reader) (*http.Request, error) { + req, err := http.NewRequestWithContext(context.Background(), method, urlStr, body) + if err != nil { + return nil, err + } + req.Header.Add(HeaderContentType, JSONMime) + req.Header.Add(HeaderAccept, JSONMime) + return req, nil +} + +// Gettable for things with a method GetOK(string) (data string, hasKey bool, hasValue bool) +type Gettable interface { + GetOK(string) ([]string, bool, bool) +} + +// ReadSingleValue reads a single value from the source +func ReadSingleValue(values Gettable, name string) string { + vv, _, hv := values.GetOK(name) + if hv { + return vv[len(vv)-1] + } + return "" +} + +// ReadCollectionValue reads a collection value from a string data source +func ReadCollectionValue(values Gettable, name, collectionFormat string) []string { + v := ReadSingleValue(values, name) + return swag.SplitByFormat(v, collectionFormat) +} diff --git a/vendor/github.com/go-openapi/runtime/security/authenticator.go b/vendor/github.com/go-openapi/runtime/security/authenticator.go new file mode 100644 index 0000000..bb30472 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/security/authenticator.go @@ -0,0 +1,277 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package security + +import ( + "context" + "net/http" + "strings" + + "github.com/go-openapi/errors" + + "github.com/go-openapi/runtime" +) + +const ( + query = "query" + header = "header" + accessTokenParam = "access_token" +) + +// HttpAuthenticator is a function that authenticates a HTTP request +func HttpAuthenticator(handler func(*http.Request) (bool, interface{}, error)) runtime.Authenticator { //nolint:revive,stylecheck + return runtime.AuthenticatorFunc(func(params interface{}) (bool, interface{}, error) { + if request, ok := params.(*http.Request); ok { + return handler(request) + } + if scoped, ok := params.(*ScopedAuthRequest); ok { + return handler(scoped.Request) + } + return false, nil, nil + }) +} + +// ScopedAuthenticator is a function that authenticates a HTTP request against a list of valid scopes +func ScopedAuthenticator(handler func(*ScopedAuthRequest) (bool, interface{}, error)) runtime.Authenticator { + return runtime.AuthenticatorFunc(func(params interface{}) (bool, interface{}, error) { + if request, ok := params.(*ScopedAuthRequest); ok { + return handler(request) + } + return false, nil, nil + }) +} + +// UserPassAuthentication authentication function +type UserPassAuthentication func(string, string) (interface{}, error) + +// UserPassAuthenticationCtx authentication function with context.Context +type UserPassAuthenticationCtx func(context.Context, string, string) (context.Context, interface{}, error) + +// TokenAuthentication authentication function +type TokenAuthentication func(string) (interface{}, error) + +// TokenAuthenticationCtx authentication function with context.Context +type TokenAuthenticationCtx func(context.Context, string) (context.Context, interface{}, error) + +// ScopedTokenAuthentication authentication function +type ScopedTokenAuthentication func(string, []string) (interface{}, error) + +// ScopedTokenAuthenticationCtx authentication function with context.Context +type ScopedTokenAuthenticationCtx func(context.Context, string, []string) (context.Context, interface{}, error) + +var DefaultRealmName = "API" + +type secCtxKey uint8 + +const ( + failedBasicAuth secCtxKey = iota + oauth2SchemeName +) + +func FailedBasicAuth(r *http.Request) string { + return FailedBasicAuthCtx(r.Context()) +} + +func FailedBasicAuthCtx(ctx context.Context) string { + v, ok := ctx.Value(failedBasicAuth).(string) + if !ok { + return "" + } + return v +} + +func OAuth2SchemeName(r *http.Request) string { + return OAuth2SchemeNameCtx(r.Context()) +} + +func OAuth2SchemeNameCtx(ctx context.Context) string { + v, ok := ctx.Value(oauth2SchemeName).(string) + if !ok { + return "" + } + return v +} + +// BasicAuth creates a basic auth authenticator with the provided authentication function +func BasicAuth(authenticate UserPassAuthentication) runtime.Authenticator { + return BasicAuthRealm(DefaultRealmName, authenticate) +} + +// BasicAuthRealm creates a basic auth authenticator with the provided authentication function and realm name +func BasicAuthRealm(realm string, authenticate UserPassAuthentication) runtime.Authenticator { + if realm == "" { + realm = DefaultRealmName + } + + return HttpAuthenticator(func(r *http.Request) (bool, interface{}, error) { + if usr, pass, ok := r.BasicAuth(); ok { + p, err := authenticate(usr, pass) + if err != nil { + *r = *r.WithContext(context.WithValue(r.Context(), failedBasicAuth, realm)) + } + return true, p, err + } + *r = *r.WithContext(context.WithValue(r.Context(), failedBasicAuth, realm)) + return false, nil, nil + }) +} + +// BasicAuthCtx creates a basic auth authenticator with the provided authentication function with support for context.Context +func BasicAuthCtx(authenticate UserPassAuthenticationCtx) runtime.Authenticator { + return BasicAuthRealmCtx(DefaultRealmName, authenticate) +} + +// BasicAuthRealmCtx creates a basic auth authenticator with the provided authentication function and realm name with support for context.Context +func BasicAuthRealmCtx(realm string, authenticate UserPassAuthenticationCtx) runtime.Authenticator { + if realm == "" { + realm = DefaultRealmName + } + + return HttpAuthenticator(func(r *http.Request) (bool, interface{}, error) { + if usr, pass, ok := r.BasicAuth(); ok { + ctx, p, err := authenticate(r.Context(), usr, pass) + if err != nil { + ctx = context.WithValue(ctx, failedBasicAuth, realm) + } + *r = *r.WithContext(ctx) + return true, p, err + } + *r = *r.WithContext(context.WithValue(r.Context(), failedBasicAuth, realm)) + return false, nil, nil + }) +} + +// APIKeyAuth creates an authenticator that uses a token for authorization. +// This token can be obtained from either a header or a query string +func APIKeyAuth(name, in string, authenticate TokenAuthentication) runtime.Authenticator { + inl := strings.ToLower(in) + if inl != query && inl != header { + // panic because this is most likely a typo + panic(errors.New(500, "api key auth: in value needs to be either \"query\" or \"header\"")) + } + + var getToken func(*http.Request) string + switch inl { + case header: + getToken = func(r *http.Request) string { return r.Header.Get(name) } + case query: + getToken = func(r *http.Request) string { return r.URL.Query().Get(name) } + } + + return HttpAuthenticator(func(r *http.Request) (bool, interface{}, error) { + token := getToken(r) + if token == "" { + return false, nil, nil + } + + p, err := authenticate(token) + return true, p, err + }) +} + +// APIKeyAuthCtx creates an authenticator that uses a token for authorization with support for context.Context. +// This token can be obtained from either a header or a query string +func APIKeyAuthCtx(name, in string, authenticate TokenAuthenticationCtx) runtime.Authenticator { + inl := strings.ToLower(in) + if inl != query && inl != header { + // panic because this is most likely a typo + panic(errors.New(500, "api key auth: in value needs to be either \"query\" or \"header\"")) + } + + var getToken func(*http.Request) string + switch inl { + case header: + getToken = func(r *http.Request) string { return r.Header.Get(name) } + case query: + getToken = func(r *http.Request) string { return r.URL.Query().Get(name) } + } + + return HttpAuthenticator(func(r *http.Request) (bool, interface{}, error) { + token := getToken(r) + if token == "" { + return false, nil, nil + } + + ctx, p, err := authenticate(r.Context(), token) + *r = *r.WithContext(ctx) + return true, p, err + }) +} + +// ScopedAuthRequest contains both a http request and the required scopes for a particular operation +type ScopedAuthRequest struct { + Request *http.Request + RequiredScopes []string +} + +// BearerAuth for use with oauth2 flows +func BearerAuth(name string, authenticate ScopedTokenAuthentication) runtime.Authenticator { + const prefix = "Bearer " + return ScopedAuthenticator(func(r *ScopedAuthRequest) (bool, interface{}, error) { + var token string + hdr := r.Request.Header.Get(runtime.HeaderAuthorization) + if strings.HasPrefix(hdr, prefix) { + token = strings.TrimPrefix(hdr, prefix) + } + if token == "" { + qs := r.Request.URL.Query() + token = qs.Get(accessTokenParam) + } + //#nosec + ct, _, _ := runtime.ContentType(r.Request.Header) + if token == "" && (ct == "application/x-www-form-urlencoded" || ct == "multipart/form-data") { + token = r.Request.FormValue(accessTokenParam) + } + + if token == "" { + return false, nil, nil + } + + rctx := context.WithValue(r.Request.Context(), oauth2SchemeName, name) + *r.Request = *r.Request.WithContext(rctx) + p, err := authenticate(token, r.RequiredScopes) + return true, p, err + }) +} + +// BearerAuthCtx for use with oauth2 flows with support for context.Context. +func BearerAuthCtx(name string, authenticate ScopedTokenAuthenticationCtx) runtime.Authenticator { + const prefix = "Bearer " + return ScopedAuthenticator(func(r *ScopedAuthRequest) (bool, interface{}, error) { + var token string + hdr := r.Request.Header.Get(runtime.HeaderAuthorization) + if strings.HasPrefix(hdr, prefix) { + token = strings.TrimPrefix(hdr, prefix) + } + if token == "" { + qs := r.Request.URL.Query() + token = qs.Get(accessTokenParam) + } + //#nosec + ct, _, _ := runtime.ContentType(r.Request.Header) + if token == "" && (ct == "application/x-www-form-urlencoded" || ct == "multipart/form-data") { + token = r.Request.FormValue(accessTokenParam) + } + + if token == "" { + return false, nil, nil + } + + rctx := context.WithValue(r.Request.Context(), oauth2SchemeName, name) + ctx, p, err := authenticate(rctx, token, r.RequiredScopes) + *r.Request = *r.Request.WithContext(ctx) + return true, p, err + }) +} diff --git a/vendor/github.com/go-openapi/runtime/security/authorizer.go b/vendor/github.com/go-openapi/runtime/security/authorizer.go new file mode 100644 index 0000000..00c1a4d --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/security/authorizer.go @@ -0,0 +1,27 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package security + +import ( + "net/http" + + "github.com/go-openapi/runtime" +) + +// Authorized provides a default implementation of the Authorizer interface where all +// requests are authorized (successful) +func Authorized() runtime.Authorizer { + return runtime.AuthorizerFunc(func(_ *http.Request, _ interface{}) error { return nil }) +} diff --git a/vendor/github.com/go-openapi/runtime/statuses.go b/vendor/github.com/go-openapi/runtime/statuses.go new file mode 100644 index 0000000..3b011a0 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/statuses.go @@ -0,0 +1,90 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package runtime + +// Statuses lists the most common HTTP status codes to default message +// taken from https://httpstatuses.com/ +var Statuses = map[int]string{ + 100: "Continue", + 101: "Switching Protocols", + 102: "Processing", + 103: "Checkpoint", + 122: "URI too long", + 200: "OK", + 201: "Created", + 202: "Accepted", + 203: "Request Processed", + 204: "No Content", + 205: "Reset Content", + 206: "Partial Content", + 207: "Multi-Status", + 208: "Already Reported", + 226: "IM Used", + 300: "Multiple Choices", + 301: "Moved Permanently", + 302: "Found", + 303: "See Other", + 304: "Not Modified", + 305: "Use Proxy", + 306: "Switch Proxy", + 307: "Temporary Redirect", + 308: "Permanent Redirect", + 400: "Bad Request", + 401: "Unauthorized", + 402: "Payment Required", + 403: "Forbidden", + 404: "Not Found", + 405: "Method Not Allowed", + 406: "Not Acceptable", + 407: "Proxy Authentication Required", + 408: "Request Timeout", + 409: "Conflict", + 410: "Gone", + 411: "Length Required", + 412: "Precondition Failed", + 413: "Request Entity Too Large", + 414: "Request-URI Too Long", + 415: "Unsupported Media Type", + 416: "Request Range Not Satisfiable", + 417: "Expectation Failed", + 418: "I'm a teapot", + 420: "Enhance Your Calm", + 422: "Unprocessable Entity", + 423: "Locked", + 424: "Failed Dependency", + 426: "Upgrade Required", + 428: "Precondition Required", + 429: "Too Many Requests", + 431: "Request Header Fields Too Large", + 444: "No Response", + 449: "Retry With", + 450: "Blocked by Windows Parental Controls", + 451: "Wrong Exchange Server", + 499: "Client Closed Request", + 500: "Internal Server Error", + 501: "Not Implemented", + 502: "Bad Gateway", + 503: "Service Unavailable", + 504: "Gateway Timeout", + 505: "HTTP Version Not Supported", + 506: "Variant Also Negotiates", + 507: "Insufficient Storage", + 508: "Loop Detected", + 509: "Bandwidth Limit Exceeded", + 510: "Not Extended", + 511: "Network Authentication Required", + 598: "Network read timeout error", + 599: "Network connect timeout error", +} diff --git a/vendor/github.com/go-openapi/runtime/text.go b/vendor/github.com/go-openapi/runtime/text.go new file mode 100644 index 0000000..f33320b --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/text.go @@ -0,0 +1,116 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package runtime + +import ( + "bytes" + "encoding" + "errors" + "fmt" + "io" + "reflect" + + "github.com/go-openapi/swag" +) + +// TextConsumer creates a new text consumer +func TextConsumer() Consumer { + return ConsumerFunc(func(reader io.Reader, data interface{}) error { + if reader == nil { + return errors.New("TextConsumer requires a reader") // early exit + } + + buf := new(bytes.Buffer) + _, err := buf.ReadFrom(reader) + if err != nil { + return err + } + b := buf.Bytes() + + // If the buffer is empty, no need to unmarshal it, which causes a panic. + if len(b) == 0 { + return nil + } + + if tu, ok := data.(encoding.TextUnmarshaler); ok { + err := tu.UnmarshalText(b) + if err != nil { + return fmt.Errorf("text consumer: %v", err) + } + + return nil + } + + t := reflect.TypeOf(data) + if data != nil && t.Kind() == reflect.Ptr { + v := reflect.Indirect(reflect.ValueOf(data)) + if t.Elem().Kind() == reflect.String { + v.SetString(string(b)) + return nil + } + } + + return fmt.Errorf("%v (%T) is not supported by the TextConsumer, %s", + data, data, "can be resolved by supporting TextUnmarshaler interface") + }) +} + +// TextProducer creates a new text producer +func TextProducer() Producer { + return ProducerFunc(func(writer io.Writer, data interface{}) error { + if writer == nil { + return errors.New("TextProducer requires a writer") // early exit + } + + if data == nil { + return errors.New("no data given to produce text from") + } + + if tm, ok := data.(encoding.TextMarshaler); ok { + txt, err := tm.MarshalText() + if err != nil { + return fmt.Errorf("text producer: %v", err) + } + _, err = writer.Write(txt) + return err + } + + if str, ok := data.(error); ok { + _, err := writer.Write([]byte(str.Error())) + return err + } + + if str, ok := data.(fmt.Stringer); ok { + _, err := writer.Write([]byte(str.String())) + return err + } + + v := reflect.Indirect(reflect.ValueOf(data)) + if t := v.Type(); t.Kind() == reflect.Struct || t.Kind() == reflect.Slice { + b, err := swag.WriteJSON(data) + if err != nil { + return err + } + _, err = writer.Write(b) + return err + } + if v.Kind() != reflect.String { + return fmt.Errorf("%T is not a supported type by the TextProducer", data) + } + + _, err := writer.Write([]byte(v.String())) + return err + }) +} diff --git a/vendor/github.com/go-openapi/runtime/values.go b/vendor/github.com/go-openapi/runtime/values.go new file mode 100644 index 0000000..11f5732 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/values.go @@ -0,0 +1,19 @@ +package runtime + +// Values typically represent parameters on a http request. +type Values map[string][]string + +// GetOK returns the values collection for the given key. +// When the key is present in the map it will return true for hasKey. +// When the value is not empty it will return true for hasValue. +func (v Values) GetOK(key string) (value []string, hasKey bool, hasValue bool) { + value, hasKey = v[key] + if !hasKey { + return + } + if len(value) == 0 { + return + } + hasValue = true + return +} diff --git a/vendor/github.com/go-openapi/runtime/xml.go b/vendor/github.com/go-openapi/runtime/xml.go new file mode 100644 index 0000000..821c739 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/xml.go @@ -0,0 +1,36 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package runtime + +import ( + "encoding/xml" + "io" +) + +// XMLConsumer creates a new XML consumer +func XMLConsumer() Consumer { + return ConsumerFunc(func(reader io.Reader, data interface{}) error { + dec := xml.NewDecoder(reader) + return dec.Decode(data) + }) +} + +// XMLProducer creates a new XML producer +func XMLProducer() Producer { + return ProducerFunc(func(writer io.Writer, data interface{}) error { + enc := xml.NewEncoder(writer) + return enc.Encode(data) + }) +} diff --git a/vendor/github.com/go-openapi/runtime/yamlpc/yaml.go b/vendor/github.com/go-openapi/runtime/yamlpc/yaml.go new file mode 100644 index 0000000..a1a0a58 --- /dev/null +++ b/vendor/github.com/go-openapi/runtime/yamlpc/yaml.go @@ -0,0 +1,39 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package yamlpc + +import ( + "io" + + "github.com/go-openapi/runtime" + "gopkg.in/yaml.v3" +) + +// YAMLConsumer creates a consumer for yaml data +func YAMLConsumer() runtime.Consumer { + return runtime.ConsumerFunc(func(r io.Reader, v interface{}) error { + dec := yaml.NewDecoder(r) + return dec.Decode(v) + }) +} + +// YAMLProducer creates a producer for yaml data +func YAMLProducer() runtime.Producer { + return runtime.ProducerFunc(func(w io.Writer, v interface{}) error { + enc := yaml.NewEncoder(w) + defer enc.Close() + return enc.Encode(v) + }) +} diff --git a/vendor/github.com/go-openapi/spec/.editorconfig b/vendor/github.com/go-openapi/spec/.editorconfig new file mode 100644 index 0000000..3152da6 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/.editorconfig @@ -0,0 +1,26 @@ +# top-most EditorConfig file +root = true + +# Unix-style newlines with a newline ending every file +[*] +end_of_line = lf +insert_final_newline = true +indent_style = space +indent_size = 2 +trim_trailing_whitespace = true + +# Set default charset +[*.{js,py,go,scala,rb,java,html,css,less,sass,md}] +charset = utf-8 + +# Tab indentation (no size specified) +[*.go] +indent_style = tab + +[*.md] +trim_trailing_whitespace = false + +# Matches the exact files either package.json or .travis.yml +[{package.json,.travis.yml}] +indent_style = space +indent_size = 2 diff --git a/vendor/github.com/go-openapi/spec/.gitignore b/vendor/github.com/go-openapi/spec/.gitignore new file mode 100644 index 0000000..f47cb20 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/.gitignore @@ -0,0 +1 @@ +*.out diff --git a/vendor/github.com/go-openapi/spec/.golangci.yml b/vendor/github.com/go-openapi/spec/.golangci.yml new file mode 100644 index 0000000..22f8d21 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/.golangci.yml @@ -0,0 +1,61 @@ +linters-settings: + govet: + check-shadowing: true + golint: + min-confidence: 0 + gocyclo: + min-complexity: 45 + maligned: + suggest-new: true + dupl: + threshold: 200 + goconst: + min-len: 2 + min-occurrences: 3 + +linters: + enable-all: true + disable: + - maligned + - unparam + - lll + - gochecknoinits + - gochecknoglobals + - funlen + - godox + - gocognit + - whitespace + - wsl + - wrapcheck + - testpackage + - nlreturn + - gomnd + - exhaustivestruct + - goerr113 + - errorlint + - nestif + - godot + - gofumpt + - paralleltest + - tparallel + - thelper + - ifshort + - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/spec/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/spec/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..9322b06 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/CODE_OF_CONDUCT.md @@ -0,0 +1,74 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or +advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at ivan+abuse@flanders.co.nz. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/github.com/go-openapi/spec/LICENSE b/vendor/github.com/go-openapi/spec/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/go-openapi/spec/README.md b/vendor/github.com/go-openapi/spec/README.md new file mode 100644 index 0000000..7fd2810 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/README.md @@ -0,0 +1,54 @@ +# OpenAPI v2 object model [![Build Status](https://github.com/go-openapi/spec/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/spec/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/spec/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/spec) + +[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) +[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/spec/master/LICENSE) +[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/spec.svg)](https://pkg.go.dev/github.com/go-openapi/spec) +[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/spec)](https://goreportcard.com/report/github.com/go-openapi/spec) + +The object model for OpenAPI specification documents. + +### FAQ + +* What does this do? + +> 1. This package knows how to marshal and unmarshal Swagger API specifications into a golang object model +> 2. It knows how to resolve $ref and expand them to make a single root document + +* How does it play with the rest of the go-openapi packages ? + +> 1. This package is at the core of the go-openapi suite of packages and [code generator](https://github.com/go-swagger/go-swagger) +> 2. There is a [spec loading package](https://github.com/go-openapi/loads) to fetch specs as JSON or YAML from local or remote locations +> 3. There is a [spec validation package](https://github.com/go-openapi/validate) built on top of it +> 4. There is a [spec analysis package](https://github.com/go-openapi/analysis) built on top of it, to analyze, flatten, fix and merge spec documents + +* Does this library support OpenAPI 3? + +> No. +> This package currently only supports OpenAPI 2.0 (aka Swagger 2.0). +> There is no plan to make it evolve toward supporting OpenAPI 3.x. +> This [discussion thread](https://github.com/go-openapi/spec/issues/21) relates the full story. +> +> An early attempt to support Swagger 3 may be found at: https://github.com/go-openapi/spec3 + +* Does the unmarshaling support YAML? + +> Not directly. The exposed types know only how to unmarshal from JSON. +> +> In order to load a YAML document as a Swagger spec, you need to use the loaders provided by +> github.com/go-openapi/loads +> +> Take a look at the example there: https://pkg.go.dev/github.com/go-openapi/loads#example-Spec +> +> See also https://github.com/go-openapi/spec/issues/164 + +* How can I validate a spec? + +> Validation is provided by [the validate package](http://github.com/go-openapi/validate) + +* Why do we have an `ID` field for `Schema` which is not part of the swagger spec? + +> We found jsonschema compatibility more important: since `id` in jsonschema influences +> how `$ref` are resolved. +> This `id` does not conflict with any property named `id`. +> +> See also https://github.com/go-openapi/spec/issues/23 diff --git a/vendor/github.com/go-openapi/spec/cache.go b/vendor/github.com/go-openapi/spec/cache.go new file mode 100644 index 0000000..122993b --- /dev/null +++ b/vendor/github.com/go-openapi/spec/cache.go @@ -0,0 +1,98 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "sync" +) + +// ResolutionCache a cache for resolving urls +type ResolutionCache interface { + Get(string) (interface{}, bool) + Set(string, interface{}) +} + +type simpleCache struct { + lock sync.RWMutex + store map[string]interface{} +} + +func (s *simpleCache) ShallowClone() ResolutionCache { + store := make(map[string]interface{}, len(s.store)) + s.lock.RLock() + for k, v := range s.store { + store[k] = v + } + s.lock.RUnlock() + + return &simpleCache{ + store: store, + } +} + +// Get retrieves a cached URI +func (s *simpleCache) Get(uri string) (interface{}, bool) { + s.lock.RLock() + v, ok := s.store[uri] + + s.lock.RUnlock() + return v, ok +} + +// Set caches a URI +func (s *simpleCache) Set(uri string, data interface{}) { + s.lock.Lock() + s.store[uri] = data + s.lock.Unlock() +} + +var ( + // resCache is a package level cache for $ref resolution and expansion. + // It is initialized lazily by methods that have the need for it: no + // memory is allocated unless some expander methods are called. + // + // It is initialized with JSON schema and swagger schema, + // which do not mutate during normal operations. + // + // All subsequent utilizations of this cache are produced from a shallow + // clone of this initial version. + resCache *simpleCache + onceCache sync.Once + + _ ResolutionCache = &simpleCache{} +) + +// initResolutionCache initializes the URI resolution cache. To be wrapped in a sync.Once.Do call. +func initResolutionCache() { + resCache = defaultResolutionCache() +} + +func defaultResolutionCache() *simpleCache { + return &simpleCache{store: map[string]interface{}{ + "http://swagger.io/v2/schema.json": MustLoadSwagger20Schema(), + "http://json-schema.org/draft-04/schema": MustLoadJSONSchemaDraft04(), + }} +} + +func cacheOrDefault(cache ResolutionCache) ResolutionCache { + onceCache.Do(initResolutionCache) + + if cache != nil { + return cache + } + + // get a shallow clone of the base cache with swagger and json schema + return resCache.ShallowClone() +} diff --git a/vendor/github.com/go-openapi/spec/contact_info.go b/vendor/github.com/go-openapi/spec/contact_info.go new file mode 100644 index 0000000..2f7bb21 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/contact_info.go @@ -0,0 +1,57 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + + "github.com/go-openapi/swag" +) + +// ContactInfo contact information for the exposed API. +// +// For more information: http://goo.gl/8us55a#contactObject +type ContactInfo struct { + ContactInfoProps + VendorExtensible +} + +// ContactInfoProps hold the properties of a ContactInfo object +type ContactInfoProps struct { + Name string `json:"name,omitempty"` + URL string `json:"url,omitempty"` + Email string `json:"email,omitempty"` +} + +// UnmarshalJSON hydrates ContactInfo from json +func (c *ContactInfo) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &c.ContactInfoProps); err != nil { + return err + } + return json.Unmarshal(data, &c.VendorExtensible) +} + +// MarshalJSON produces ContactInfo as json +func (c ContactInfo) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(c.ContactInfoProps) + if err != nil { + return nil, err + } + b2, err := json.Marshal(c.VendorExtensible) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b1, b2), nil +} diff --git a/vendor/github.com/go-openapi/spec/debug.go b/vendor/github.com/go-openapi/spec/debug.go new file mode 100644 index 0000000..fc889f6 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/debug.go @@ -0,0 +1,49 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "fmt" + "log" + "os" + "path" + "runtime" +) + +// Debug is true when the SWAGGER_DEBUG env var is not empty. +// +// It enables a more verbose logging of this package. +var Debug = os.Getenv("SWAGGER_DEBUG") != "" + +var ( + // specLogger is a debug logger for this package + specLogger *log.Logger +) + +func init() { + debugOptions() +} + +func debugOptions() { + specLogger = log.New(os.Stdout, "spec:", log.LstdFlags) +} + +func debugLog(msg string, args ...interface{}) { + // A private, trivial trace logger, based on go-openapi/spec/expander.go:debugLog() + if Debug { + _, file1, pos1, _ := runtime.Caller(1) + specLogger.Printf("%s:%d: %s", path.Base(file1), pos1, fmt.Sprintf(msg, args...)) + } +} diff --git a/vendor/github.com/go-openapi/spec/embed.go b/vendor/github.com/go-openapi/spec/embed.go new file mode 100644 index 0000000..1f42847 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/embed.go @@ -0,0 +1,17 @@ +package spec + +import ( + "embed" + "path" +) + +//go:embed schemas/*.json schemas/*/*.json +var assets embed.FS + +func jsonschemaDraft04JSONBytes() ([]byte, error) { + return assets.ReadFile(path.Join("schemas", "jsonschema-draft-04.json")) +} + +func v2SchemaJSONBytes() ([]byte, error) { + return assets.ReadFile(path.Join("schemas", "v2", "schema.json")) +} diff --git a/vendor/github.com/go-openapi/spec/errors.go b/vendor/github.com/go-openapi/spec/errors.go new file mode 100644 index 0000000..6992c7b --- /dev/null +++ b/vendor/github.com/go-openapi/spec/errors.go @@ -0,0 +1,19 @@ +package spec + +import "errors" + +// Error codes +var ( + // ErrUnknownTypeForReference indicates that a resolved reference was found in an unsupported container type + ErrUnknownTypeForReference = errors.New("unknown type for the resolved reference") + + // ErrResolveRefNeedsAPointer indicates that a $ref target must be a valid JSON pointer + ErrResolveRefNeedsAPointer = errors.New("resolve ref: target needs to be a pointer") + + // ErrDerefUnsupportedType indicates that a resolved reference was found in an unsupported container type. + // At the moment, $ref are supported only inside: schemas, parameters, responses, path items + ErrDerefUnsupportedType = errors.New("deref: unsupported type") + + // ErrExpandUnsupportedType indicates that $ref expansion is attempted on some invalid type + ErrExpandUnsupportedType = errors.New("expand: unsupported type. Input should be of type *Parameter or *Response") +) diff --git a/vendor/github.com/go-openapi/spec/expander.go b/vendor/github.com/go-openapi/spec/expander.go new file mode 100644 index 0000000..b81a569 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/expander.go @@ -0,0 +1,607 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + "fmt" +) + +// ExpandOptions provides options for the spec expander. +// +// RelativeBase is the path to the root document. This can be a remote URL or a path to a local file. +// +// If left empty, the root document is assumed to be located in the current working directory: +// all relative $ref's will be resolved from there. +// +// PathLoader injects a document loading method. By default, this resolves to the function provided by the SpecLoader package variable. +type ExpandOptions struct { + RelativeBase string // the path to the root document to expand. This is a file, not a directory + SkipSchemas bool // do not expand schemas, just paths, parameters and responses + ContinueOnError bool // continue expanding even after and error is found + PathLoader func(string) (json.RawMessage, error) `json:"-"` // the document loading method that takes a path as input and yields a json document + AbsoluteCircularRef bool // circular $ref remaining after expansion remain absolute URLs +} + +func optionsOrDefault(opts *ExpandOptions) *ExpandOptions { + if opts != nil { + clone := *opts // shallow clone to avoid internal changes to be propagated to the caller + if clone.RelativeBase != "" { + clone.RelativeBase = normalizeBase(clone.RelativeBase) + } + // if the relative base is empty, let the schema loader choose a pseudo root document + return &clone + } + return &ExpandOptions{} +} + +// ExpandSpec expands the references in a swagger spec +func ExpandSpec(spec *Swagger, options *ExpandOptions) error { + options = optionsOrDefault(options) + resolver := defaultSchemaLoader(spec, options, nil, nil) + + specBasePath := options.RelativeBase + + if !options.SkipSchemas { + for key, definition := range spec.Definitions { + parentRefs := make([]string, 0, 10) + parentRefs = append(parentRefs, "#/definitions/"+key) + + def, err := expandSchema(definition, parentRefs, resolver, specBasePath) + if resolver.shouldStopOnError(err) { + return err + } + if def != nil { + spec.Definitions[key] = *def + } + } + } + + for key := range spec.Parameters { + parameter := spec.Parameters[key] + if err := expandParameterOrResponse(¶meter, resolver, specBasePath); resolver.shouldStopOnError(err) { + return err + } + spec.Parameters[key] = parameter + } + + for key := range spec.Responses { + response := spec.Responses[key] + if err := expandParameterOrResponse(&response, resolver, specBasePath); resolver.shouldStopOnError(err) { + return err + } + spec.Responses[key] = response + } + + if spec.Paths != nil { + for key := range spec.Paths.Paths { + pth := spec.Paths.Paths[key] + if err := expandPathItem(&pth, resolver, specBasePath); resolver.shouldStopOnError(err) { + return err + } + spec.Paths.Paths[key] = pth + } + } + + return nil +} + +const rootBase = ".root" + +// baseForRoot loads in the cache the root document and produces a fake ".root" base path entry +// for further $ref resolution +func baseForRoot(root interface{}, cache ResolutionCache) string { + // cache the root document to resolve $ref's + normalizedBase := normalizeBase(rootBase) + + if root == nil { + // ensure that we never leave a nil root: always cache the root base pseudo-document + cachedRoot, found := cache.Get(normalizedBase) + if found && cachedRoot != nil { + // the cache is already preloaded with a root + return normalizedBase + } + + root = map[string]interface{}{} + } + + cache.Set(normalizedBase, root) + + return normalizedBase +} + +// ExpandSchema expands the refs in the schema object with reference to the root object. +// +// go-openapi/validate uses this function. +// +// Notice that it is impossible to reference a json schema in a different document other than root +// (use ExpandSchemaWithBasePath to resolve external references). +// +// Setting the cache is optional and this parameter may safely be left to nil. +func ExpandSchema(schema *Schema, root interface{}, cache ResolutionCache) error { + cache = cacheOrDefault(cache) + if root == nil { + root = schema + } + + opts := &ExpandOptions{ + // when a root is specified, cache the root as an in-memory document for $ref retrieval + RelativeBase: baseForRoot(root, cache), + SkipSchemas: false, + ContinueOnError: false, + } + + return ExpandSchemaWithBasePath(schema, cache, opts) +} + +// ExpandSchemaWithBasePath expands the refs in the schema object, base path configured through expand options. +// +// Setting the cache is optional and this parameter may safely be left to nil. +func ExpandSchemaWithBasePath(schema *Schema, cache ResolutionCache, opts *ExpandOptions) error { + if schema == nil { + return nil + } + + cache = cacheOrDefault(cache) + + opts = optionsOrDefault(opts) + + resolver := defaultSchemaLoader(nil, opts, cache, nil) + + parentRefs := make([]string, 0, 10) + s, err := expandSchema(*schema, parentRefs, resolver, opts.RelativeBase) + if err != nil { + return err + } + if s != nil { + // guard for when continuing on error + *schema = *s + } + + return nil +} + +func expandItems(target Schema, parentRefs []string, resolver *schemaLoader, basePath string) (*Schema, error) { + if target.Items == nil { + return &target, nil + } + + // array + if target.Items.Schema != nil { + t, err := expandSchema(*target.Items.Schema, parentRefs, resolver, basePath) + if err != nil { + return nil, err + } + *target.Items.Schema = *t + } + + // tuple + for i := range target.Items.Schemas { + t, err := expandSchema(target.Items.Schemas[i], parentRefs, resolver, basePath) + if err != nil { + return nil, err + } + target.Items.Schemas[i] = *t + } + + return &target, nil +} + +func expandSchema(target Schema, parentRefs []string, resolver *schemaLoader, basePath string) (*Schema, error) { + if target.Ref.String() == "" && target.Ref.IsRoot() { + newRef := normalizeRef(&target.Ref, basePath) + target.Ref = *newRef + return &target, nil + } + + // change the base path of resolution when an ID is encountered + // otherwise the basePath should inherit the parent's + if target.ID != "" { + basePath, _ = resolver.setSchemaID(target, target.ID, basePath) + } + + if target.Ref.String() != "" { + if !resolver.options.SkipSchemas { + return expandSchemaRef(target, parentRefs, resolver, basePath) + } + + // when "expand" with SkipSchema, we just rebase the existing $ref without replacing + // the full schema. + rebasedRef, err := NewRef(normalizeURI(target.Ref.String(), basePath)) + if err != nil { + return nil, err + } + target.Ref = denormalizeRef(&rebasedRef, resolver.context.basePath, resolver.context.rootID) + + return &target, nil + } + + for k := range target.Definitions { + tt, err := expandSchema(target.Definitions[k], parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if tt != nil { + target.Definitions[k] = *tt + } + } + + t, err := expandItems(target, parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + target = *t + } + + for i := range target.AllOf { + t, err := expandSchema(target.AllOf[i], parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + target.AllOf[i] = *t + } + } + + for i := range target.AnyOf { + t, err := expandSchema(target.AnyOf[i], parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + target.AnyOf[i] = *t + } + } + + for i := range target.OneOf { + t, err := expandSchema(target.OneOf[i], parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + target.OneOf[i] = *t + } + } + + if target.Not != nil { + t, err := expandSchema(*target.Not, parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + *target.Not = *t + } + } + + for k := range target.Properties { + t, err := expandSchema(target.Properties[k], parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + target.Properties[k] = *t + } + } + + if target.AdditionalProperties != nil && target.AdditionalProperties.Schema != nil { + t, err := expandSchema(*target.AdditionalProperties.Schema, parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + *target.AdditionalProperties.Schema = *t + } + } + + for k := range target.PatternProperties { + t, err := expandSchema(target.PatternProperties[k], parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + target.PatternProperties[k] = *t + } + } + + for k := range target.Dependencies { + if target.Dependencies[k].Schema != nil { + t, err := expandSchema(*target.Dependencies[k].Schema, parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + *target.Dependencies[k].Schema = *t + } + } + } + + if target.AdditionalItems != nil && target.AdditionalItems.Schema != nil { + t, err := expandSchema(*target.AdditionalItems.Schema, parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return &target, err + } + if t != nil { + *target.AdditionalItems.Schema = *t + } + } + return &target, nil +} + +func expandSchemaRef(target Schema, parentRefs []string, resolver *schemaLoader, basePath string) (*Schema, error) { + // if a Ref is found, all sibling fields are skipped + // Ref also changes the resolution scope of children expandSchema + + // here the resolution scope is changed because a $ref was encountered + normalizedRef := normalizeRef(&target.Ref, basePath) + normalizedBasePath := normalizedRef.RemoteURI() + + if resolver.isCircular(normalizedRef, basePath, parentRefs...) { + // this means there is a cycle in the recursion tree: return the Ref + // - circular refs cannot be expanded. We leave them as ref. + // - denormalization means that a new local file ref is set relative to the original basePath + debugLog("short circuit circular ref: basePath: %s, normalizedPath: %s, normalized ref: %s", + basePath, normalizedBasePath, normalizedRef.String()) + if !resolver.options.AbsoluteCircularRef { + target.Ref = denormalizeRef(normalizedRef, resolver.context.basePath, resolver.context.rootID) + } else { + target.Ref = *normalizedRef + } + return &target, nil + } + + var t *Schema + err := resolver.Resolve(&target.Ref, &t, basePath) + if resolver.shouldStopOnError(err) { + return nil, err + } + + if t == nil { + // guard for when continuing on error + return &target, nil + } + + parentRefs = append(parentRefs, normalizedRef.String()) + transitiveResolver := resolver.transitiveResolver(basePath, target.Ref) + + basePath = resolver.updateBasePath(transitiveResolver, normalizedBasePath) + + return expandSchema(*t, parentRefs, transitiveResolver, basePath) +} + +func expandPathItem(pathItem *PathItem, resolver *schemaLoader, basePath string) error { + if pathItem == nil { + return nil + } + + parentRefs := make([]string, 0, 10) + if err := resolver.deref(pathItem, parentRefs, basePath); resolver.shouldStopOnError(err) { + return err + } + + if pathItem.Ref.String() != "" { + transitiveResolver := resolver.transitiveResolver(basePath, pathItem.Ref) + basePath = transitiveResolver.updateBasePath(resolver, basePath) + resolver = transitiveResolver + } + + pathItem.Ref = Ref{} + for i := range pathItem.Parameters { + if err := expandParameterOrResponse(&(pathItem.Parameters[i]), resolver, basePath); resolver.shouldStopOnError(err) { + return err + } + } + + ops := []*Operation{ + pathItem.Get, + pathItem.Head, + pathItem.Options, + pathItem.Put, + pathItem.Post, + pathItem.Patch, + pathItem.Delete, + } + for _, op := range ops { + if err := expandOperation(op, resolver, basePath); resolver.shouldStopOnError(err) { + return err + } + } + + return nil +} + +func expandOperation(op *Operation, resolver *schemaLoader, basePath string) error { + if op == nil { + return nil + } + + for i := range op.Parameters { + param := op.Parameters[i] + if err := expandParameterOrResponse(¶m, resolver, basePath); resolver.shouldStopOnError(err) { + return err + } + op.Parameters[i] = param + } + + if op.Responses == nil { + return nil + } + + responses := op.Responses + if err := expandParameterOrResponse(responses.Default, resolver, basePath); resolver.shouldStopOnError(err) { + return err + } + + for code := range responses.StatusCodeResponses { + response := responses.StatusCodeResponses[code] + if err := expandParameterOrResponse(&response, resolver, basePath); resolver.shouldStopOnError(err) { + return err + } + responses.StatusCodeResponses[code] = response + } + + return nil +} + +// ExpandResponseWithRoot expands a response based on a root document, not a fetchable document +// +// Notice that it is impossible to reference a json schema in a different document other than root +// (use ExpandResponse to resolve external references). +// +// Setting the cache is optional and this parameter may safely be left to nil. +func ExpandResponseWithRoot(response *Response, root interface{}, cache ResolutionCache) error { + cache = cacheOrDefault(cache) + opts := &ExpandOptions{ + RelativeBase: baseForRoot(root, cache), + } + resolver := defaultSchemaLoader(root, opts, cache, nil) + + return expandParameterOrResponse(response, resolver, opts.RelativeBase) +} + +// ExpandResponse expands a response based on a basepath +// +// All refs inside response will be resolved relative to basePath +func ExpandResponse(response *Response, basePath string) error { + opts := optionsOrDefault(&ExpandOptions{ + RelativeBase: basePath, + }) + resolver := defaultSchemaLoader(nil, opts, nil, nil) + + return expandParameterOrResponse(response, resolver, opts.RelativeBase) +} + +// ExpandParameterWithRoot expands a parameter based on a root document, not a fetchable document. +// +// Notice that it is impossible to reference a json schema in a different document other than root +// (use ExpandParameter to resolve external references). +func ExpandParameterWithRoot(parameter *Parameter, root interface{}, cache ResolutionCache) error { + cache = cacheOrDefault(cache) + + opts := &ExpandOptions{ + RelativeBase: baseForRoot(root, cache), + } + resolver := defaultSchemaLoader(root, opts, cache, nil) + + return expandParameterOrResponse(parameter, resolver, opts.RelativeBase) +} + +// ExpandParameter expands a parameter based on a basepath. +// This is the exported version of expandParameter +// all refs inside parameter will be resolved relative to basePath +func ExpandParameter(parameter *Parameter, basePath string) error { + opts := optionsOrDefault(&ExpandOptions{ + RelativeBase: basePath, + }) + resolver := defaultSchemaLoader(nil, opts, nil, nil) + + return expandParameterOrResponse(parameter, resolver, opts.RelativeBase) +} + +func getRefAndSchema(input interface{}) (*Ref, *Schema, error) { + var ( + ref *Ref + sch *Schema + ) + + switch refable := input.(type) { + case *Parameter: + if refable == nil { + return nil, nil, nil + } + ref = &refable.Ref + sch = refable.Schema + case *Response: + if refable == nil { + return nil, nil, nil + } + ref = &refable.Ref + sch = refable.Schema + default: + return nil, nil, fmt.Errorf("unsupported type: %T: %w", input, ErrExpandUnsupportedType) + } + + return ref, sch, nil +} + +func expandParameterOrResponse(input interface{}, resolver *schemaLoader, basePath string) error { + ref, sch, err := getRefAndSchema(input) + if err != nil { + return err + } + + if ref == nil && sch == nil { // nothing to do + return nil + } + + parentRefs := make([]string, 0, 10) + if ref != nil { + // dereference this $ref + if err = resolver.deref(input, parentRefs, basePath); resolver.shouldStopOnError(err) { + return err + } + + ref, sch, _ = getRefAndSchema(input) + } + + if ref.String() != "" { + transitiveResolver := resolver.transitiveResolver(basePath, *ref) + basePath = resolver.updateBasePath(transitiveResolver, basePath) + resolver = transitiveResolver + } + + if sch == nil { + // nothing to be expanded + if ref != nil { + *ref = Ref{} + } + + return nil + } + + if sch.Ref.String() != "" { + rebasedRef, ern := NewRef(normalizeURI(sch.Ref.String(), basePath)) + if ern != nil { + return ern + } + + if resolver.isCircular(&rebasedRef, basePath, parentRefs...) { + // this is a circular $ref: stop expansion + if !resolver.options.AbsoluteCircularRef { + sch.Ref = denormalizeRef(&rebasedRef, resolver.context.basePath, resolver.context.rootID) + } else { + sch.Ref = rebasedRef + } + } + } + + // $ref expansion or rebasing is performed by expandSchema below + if ref != nil { + *ref = Ref{} + } + + // expand schema + // yes, we do it even if options.SkipSchema is true: we have to go down that rabbit hole and rebase nested $ref) + s, err := expandSchema(*sch, parentRefs, resolver, basePath) + if resolver.shouldStopOnError(err) { + return err + } + + if s != nil { // guard for when continuing on error + *sch = *s + } + + return nil +} diff --git a/vendor/github.com/go-openapi/spec/external_docs.go b/vendor/github.com/go-openapi/spec/external_docs.go new file mode 100644 index 0000000..88add91 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/external_docs.go @@ -0,0 +1,24 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +// ExternalDocumentation allows referencing an external resource for +// extended documentation. +// +// For more information: http://goo.gl/8us55a#externalDocumentationObject +type ExternalDocumentation struct { + Description string `json:"description,omitempty"` + URL string `json:"url,omitempty"` +} diff --git a/vendor/github.com/go-openapi/spec/header.go b/vendor/github.com/go-openapi/spec/header.go new file mode 100644 index 0000000..9dfd17b --- /dev/null +++ b/vendor/github.com/go-openapi/spec/header.go @@ -0,0 +1,203 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + "strings" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +const ( + jsonArray = "array" +) + +// HeaderProps describes a response header +type HeaderProps struct { + Description string `json:"description,omitempty"` +} + +// Header describes a header for a response of the API +// +// For more information: http://goo.gl/8us55a#headerObject +type Header struct { + CommonValidations + SimpleSchema + VendorExtensible + HeaderProps +} + +// ResponseHeader creates a new header instance for use in a response +func ResponseHeader() *Header { + return new(Header) +} + +// WithDescription sets the description on this response, allows for chaining +func (h *Header) WithDescription(description string) *Header { + h.Description = description + return h +} + +// Typed a fluent builder method for the type of parameter +func (h *Header) Typed(tpe, format string) *Header { + h.Type = tpe + h.Format = format + return h +} + +// CollectionOf a fluent builder method for an array item +func (h *Header) CollectionOf(items *Items, format string) *Header { + h.Type = jsonArray + h.Items = items + h.CollectionFormat = format + return h +} + +// WithDefault sets the default value on this item +func (h *Header) WithDefault(defaultValue interface{}) *Header { + h.Default = defaultValue + return h +} + +// WithMaxLength sets a max length value +func (h *Header) WithMaxLength(max int64) *Header { + h.MaxLength = &max + return h +} + +// WithMinLength sets a min length value +func (h *Header) WithMinLength(min int64) *Header { + h.MinLength = &min + return h +} + +// WithPattern sets a pattern value +func (h *Header) WithPattern(pattern string) *Header { + h.Pattern = pattern + return h +} + +// WithMultipleOf sets a multiple of value +func (h *Header) WithMultipleOf(number float64) *Header { + h.MultipleOf = &number + return h +} + +// WithMaximum sets a maximum number value +func (h *Header) WithMaximum(max float64, exclusive bool) *Header { + h.Maximum = &max + h.ExclusiveMaximum = exclusive + return h +} + +// WithMinimum sets a minimum number value +func (h *Header) WithMinimum(min float64, exclusive bool) *Header { + h.Minimum = &min + h.ExclusiveMinimum = exclusive + return h +} + +// WithEnum sets a the enum values (replace) +func (h *Header) WithEnum(values ...interface{}) *Header { + h.Enum = append([]interface{}{}, values...) + return h +} + +// WithMaxItems sets the max items +func (h *Header) WithMaxItems(size int64) *Header { + h.MaxItems = &size + return h +} + +// WithMinItems sets the min items +func (h *Header) WithMinItems(size int64) *Header { + h.MinItems = &size + return h +} + +// UniqueValues dictates that this array can only have unique items +func (h *Header) UniqueValues() *Header { + h.UniqueItems = true + return h +} + +// AllowDuplicates this array can have duplicates +func (h *Header) AllowDuplicates() *Header { + h.UniqueItems = false + return h +} + +// WithValidations is a fluent method to set header validations +func (h *Header) WithValidations(val CommonValidations) *Header { + h.SetValidations(SchemaValidations{CommonValidations: val}) + return h +} + +// MarshalJSON marshal this to JSON +func (h Header) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(h.CommonValidations) + if err != nil { + return nil, err + } + b2, err := json.Marshal(h.SimpleSchema) + if err != nil { + return nil, err + } + b3, err := json.Marshal(h.HeaderProps) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b1, b2, b3), nil +} + +// UnmarshalJSON unmarshals this header from JSON +func (h *Header) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &h.CommonValidations); err != nil { + return err + } + if err := json.Unmarshal(data, &h.SimpleSchema); err != nil { + return err + } + if err := json.Unmarshal(data, &h.VendorExtensible); err != nil { + return err + } + return json.Unmarshal(data, &h.HeaderProps) +} + +// JSONLookup look up a value by the json property name +func (h Header) JSONLookup(token string) (interface{}, error) { + if ex, ok := h.Extensions[token]; ok { + return &ex, nil + } + + r, _, err := jsonpointer.GetForToken(h.CommonValidations, token) + if err != nil && !strings.HasPrefix(err.Error(), "object has no field") { + return nil, err + } + if r != nil { + return r, nil + } + r, _, err = jsonpointer.GetForToken(h.SimpleSchema, token) + if err != nil && !strings.HasPrefix(err.Error(), "object has no field") { + return nil, err + } + if r != nil { + return r, nil + } + r, _, err = jsonpointer.GetForToken(h.HeaderProps, token) + return r, err +} diff --git a/vendor/github.com/go-openapi/spec/info.go b/vendor/github.com/go-openapi/spec/info.go new file mode 100644 index 0000000..582f0fd --- /dev/null +++ b/vendor/github.com/go-openapi/spec/info.go @@ -0,0 +1,184 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + "strconv" + "strings" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +// Extensions vendor specific extensions +type Extensions map[string]interface{} + +// Add adds a value to these extensions +func (e Extensions) Add(key string, value interface{}) { + realKey := strings.ToLower(key) + e[realKey] = value +} + +// GetString gets a string value from the extensions +func (e Extensions) GetString(key string) (string, bool) { + if v, ok := e[strings.ToLower(key)]; ok { + str, ok := v.(string) + return str, ok + } + return "", false +} + +// GetInt gets a int value from the extensions +func (e Extensions) GetInt(key string) (int, bool) { + realKey := strings.ToLower(key) + + if v, ok := e.GetString(realKey); ok { + if r, err := strconv.Atoi(v); err == nil { + return r, true + } + } + + if v, ok := e[realKey]; ok { + if r, rOk := v.(float64); rOk { + return int(r), true + } + } + return -1, false +} + +// GetBool gets a string value from the extensions +func (e Extensions) GetBool(key string) (bool, bool) { + if v, ok := e[strings.ToLower(key)]; ok { + str, ok := v.(bool) + return str, ok + } + return false, false +} + +// GetStringSlice gets a string value from the extensions +func (e Extensions) GetStringSlice(key string) ([]string, bool) { + if v, ok := e[strings.ToLower(key)]; ok { + arr, isSlice := v.([]interface{}) + if !isSlice { + return nil, false + } + var strs []string + for _, iface := range arr { + str, isString := iface.(string) + if !isString { + return nil, false + } + strs = append(strs, str) + } + return strs, ok + } + return nil, false +} + +// VendorExtensible composition block. +type VendorExtensible struct { + Extensions Extensions +} + +// AddExtension adds an extension to this extensible object +func (v *VendorExtensible) AddExtension(key string, value interface{}) { + if value == nil { + return + } + if v.Extensions == nil { + v.Extensions = make(map[string]interface{}) + } + v.Extensions.Add(key, value) +} + +// MarshalJSON marshals the extensions to json +func (v VendorExtensible) MarshalJSON() ([]byte, error) { + toser := make(map[string]interface{}) + for k, v := range v.Extensions { + lk := strings.ToLower(k) + if strings.HasPrefix(lk, "x-") { + toser[k] = v + } + } + return json.Marshal(toser) +} + +// UnmarshalJSON for this extensible object +func (v *VendorExtensible) UnmarshalJSON(data []byte) error { + var d map[string]interface{} + if err := json.Unmarshal(data, &d); err != nil { + return err + } + for k, vv := range d { + lk := strings.ToLower(k) + if strings.HasPrefix(lk, "x-") { + if v.Extensions == nil { + v.Extensions = map[string]interface{}{} + } + v.Extensions[k] = vv + } + } + return nil +} + +// InfoProps the properties for an info definition +type InfoProps struct { + Description string `json:"description,omitempty"` + Title string `json:"title,omitempty"` + TermsOfService string `json:"termsOfService,omitempty"` + Contact *ContactInfo `json:"contact,omitempty"` + License *License `json:"license,omitempty"` + Version string `json:"version,omitempty"` +} + +// Info object provides metadata about the API. +// The metadata can be used by the clients if needed, and can be presented in the Swagger-UI for convenience. +// +// For more information: http://goo.gl/8us55a#infoObject +type Info struct { + VendorExtensible + InfoProps +} + +// JSONLookup look up a value by the json property name +func (i Info) JSONLookup(token string) (interface{}, error) { + if ex, ok := i.Extensions[token]; ok { + return &ex, nil + } + r, _, err := jsonpointer.GetForToken(i.InfoProps, token) + return r, err +} + +// MarshalJSON marshal this to JSON +func (i Info) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(i.InfoProps) + if err != nil { + return nil, err + } + b2, err := json.Marshal(i.VendorExtensible) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b1, b2), nil +} + +// UnmarshalJSON marshal this from JSON +func (i *Info) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &i.InfoProps); err != nil { + return err + } + return json.Unmarshal(data, &i.VendorExtensible) +} diff --git a/vendor/github.com/go-openapi/spec/items.go b/vendor/github.com/go-openapi/spec/items.go new file mode 100644 index 0000000..e2afb21 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/items.go @@ -0,0 +1,234 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + "strings" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +const ( + jsonRef = "$ref" +) + +// SimpleSchema describe swagger simple schemas for parameters and headers +type SimpleSchema struct { + Type string `json:"type,omitempty"` + Nullable bool `json:"nullable,omitempty"` + Format string `json:"format,omitempty"` + Items *Items `json:"items,omitempty"` + CollectionFormat string `json:"collectionFormat,omitempty"` + Default interface{} `json:"default,omitempty"` + Example interface{} `json:"example,omitempty"` +} + +// TypeName return the type (or format) of a simple schema +func (s *SimpleSchema) TypeName() string { + if s.Format != "" { + return s.Format + } + return s.Type +} + +// ItemsTypeName yields the type of items in a simple schema array +func (s *SimpleSchema) ItemsTypeName() string { + if s.Items == nil { + return "" + } + return s.Items.TypeName() +} + +// Items a limited subset of JSON-Schema's items object. +// It is used by parameter definitions that are not located in "body". +// +// For more information: http://goo.gl/8us55a#items-object +type Items struct { + Refable + CommonValidations + SimpleSchema + VendorExtensible +} + +// NewItems creates a new instance of items +func NewItems() *Items { + return &Items{} +} + +// Typed a fluent builder method for the type of item +func (i *Items) Typed(tpe, format string) *Items { + i.Type = tpe + i.Format = format + return i +} + +// AsNullable flags this schema as nullable. +func (i *Items) AsNullable() *Items { + i.Nullable = true + return i +} + +// CollectionOf a fluent builder method for an array item +func (i *Items) CollectionOf(items *Items, format string) *Items { + i.Type = jsonArray + i.Items = items + i.CollectionFormat = format + return i +} + +// WithDefault sets the default value on this item +func (i *Items) WithDefault(defaultValue interface{}) *Items { + i.Default = defaultValue + return i +} + +// WithMaxLength sets a max length value +func (i *Items) WithMaxLength(max int64) *Items { + i.MaxLength = &max + return i +} + +// WithMinLength sets a min length value +func (i *Items) WithMinLength(min int64) *Items { + i.MinLength = &min + return i +} + +// WithPattern sets a pattern value +func (i *Items) WithPattern(pattern string) *Items { + i.Pattern = pattern + return i +} + +// WithMultipleOf sets a multiple of value +func (i *Items) WithMultipleOf(number float64) *Items { + i.MultipleOf = &number + return i +} + +// WithMaximum sets a maximum number value +func (i *Items) WithMaximum(max float64, exclusive bool) *Items { + i.Maximum = &max + i.ExclusiveMaximum = exclusive + return i +} + +// WithMinimum sets a minimum number value +func (i *Items) WithMinimum(min float64, exclusive bool) *Items { + i.Minimum = &min + i.ExclusiveMinimum = exclusive + return i +} + +// WithEnum sets a the enum values (replace) +func (i *Items) WithEnum(values ...interface{}) *Items { + i.Enum = append([]interface{}{}, values...) + return i +} + +// WithMaxItems sets the max items +func (i *Items) WithMaxItems(size int64) *Items { + i.MaxItems = &size + return i +} + +// WithMinItems sets the min items +func (i *Items) WithMinItems(size int64) *Items { + i.MinItems = &size + return i +} + +// UniqueValues dictates that this array can only have unique items +func (i *Items) UniqueValues() *Items { + i.UniqueItems = true + return i +} + +// AllowDuplicates this array can have duplicates +func (i *Items) AllowDuplicates() *Items { + i.UniqueItems = false + return i +} + +// WithValidations is a fluent method to set Items validations +func (i *Items) WithValidations(val CommonValidations) *Items { + i.SetValidations(SchemaValidations{CommonValidations: val}) + return i +} + +// UnmarshalJSON hydrates this items instance with the data from JSON +func (i *Items) UnmarshalJSON(data []byte) error { + var validations CommonValidations + if err := json.Unmarshal(data, &validations); err != nil { + return err + } + var ref Refable + if err := json.Unmarshal(data, &ref); err != nil { + return err + } + var simpleSchema SimpleSchema + if err := json.Unmarshal(data, &simpleSchema); err != nil { + return err + } + var vendorExtensible VendorExtensible + if err := json.Unmarshal(data, &vendorExtensible); err != nil { + return err + } + i.Refable = ref + i.CommonValidations = validations + i.SimpleSchema = simpleSchema + i.VendorExtensible = vendorExtensible + return nil +} + +// MarshalJSON converts this items object to JSON +func (i Items) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(i.CommonValidations) + if err != nil { + return nil, err + } + b2, err := json.Marshal(i.SimpleSchema) + if err != nil { + return nil, err + } + b3, err := json.Marshal(i.Refable) + if err != nil { + return nil, err + } + b4, err := json.Marshal(i.VendorExtensible) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b4, b3, b1, b2), nil +} + +// JSONLookup look up a value by the json property name +func (i Items) JSONLookup(token string) (interface{}, error) { + if token == jsonRef { + return &i.Ref, nil + } + + r, _, err := jsonpointer.GetForToken(i.CommonValidations, token) + if err != nil && !strings.HasPrefix(err.Error(), "object has no field") { + return nil, err + } + if r != nil { + return r, nil + } + r, _, err = jsonpointer.GetForToken(i.SimpleSchema, token) + return r, err +} diff --git a/vendor/github.com/go-openapi/spec/license.go b/vendor/github.com/go-openapi/spec/license.go new file mode 100644 index 0000000..b42f803 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/license.go @@ -0,0 +1,56 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + + "github.com/go-openapi/swag" +) + +// License information for the exposed API. +// +// For more information: http://goo.gl/8us55a#licenseObject +type License struct { + LicenseProps + VendorExtensible +} + +// LicenseProps holds the properties of a License object +type LicenseProps struct { + Name string `json:"name,omitempty"` + URL string `json:"url,omitempty"` +} + +// UnmarshalJSON hydrates License from json +func (l *License) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &l.LicenseProps); err != nil { + return err + } + return json.Unmarshal(data, &l.VendorExtensible) +} + +// MarshalJSON produces License as json +func (l License) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(l.LicenseProps) + if err != nil { + return nil, err + } + b2, err := json.Marshal(l.VendorExtensible) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b1, b2), nil +} diff --git a/vendor/github.com/go-openapi/spec/normalizer.go b/vendor/github.com/go-openapi/spec/normalizer.go new file mode 100644 index 0000000..e8b6009 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/normalizer.go @@ -0,0 +1,202 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "net/url" + "path" + "strings" +) + +const fileScheme = "file" + +// normalizeURI ensures that all $ref paths used internally by the expander are canonicalized. +// +// NOTE(windows): there is a tolerance over the strict URI format on windows. +// +// The normalizer accepts relative file URLs like 'Path\File.JSON' as well as absolute file URLs like +// 'C:\Path\file.Yaml'. +// +// Both are canonicalized with a "file://" scheme, slashes and a lower-cased path: +// 'file:///c:/path/file.yaml' +// +// URLs can be specified with a file scheme, like in 'file:///folder/file.json' or +// 'file:///c:\folder\File.json'. +// +// URLs like file://C:\folder are considered invalid (i.e. there is no host 'c:\folder') and a "repair" +// is attempted. +// +// The base path argument is assumed to be canonicalized (e.g. using normalizeBase()). +func normalizeURI(refPath, base string) string { + refURL, err := parseURL(refPath) + if err != nil { + specLogger.Printf("warning: invalid URI in $ref %q: %v", refPath, err) + refURL, refPath = repairURI(refPath) + } + + fixWindowsURI(refURL, refPath) // noop on non-windows OS + + refURL.Path = path.Clean(refURL.Path) + if refURL.Path == "." { + refURL.Path = "" + } + + r := MustCreateRef(refURL.String()) + if r.IsCanonical() { + return refURL.String() + } + + baseURL, _ := parseURL(base) + if path.IsAbs(refURL.Path) { + baseURL.Path = refURL.Path + } else if refURL.Path != "" { + baseURL.Path = path.Join(path.Dir(baseURL.Path), refURL.Path) + } + // copying fragment from ref to base + baseURL.Fragment = refURL.Fragment + + return baseURL.String() +} + +// denormalizeRef returns the simplest notation for a normalized $ref, given the path of the original root document. +// +// When calling this, we assume that: +// * $ref is a canonical URI +// * originalRelativeBase is a canonical URI +// +// denormalizeRef is currently used when we rewrite a $ref after a circular $ref has been detected. +// In this case, expansion stops and normally renders the internal canonical $ref. +// +// This internal $ref is eventually rebased to the original RelativeBase used for the expansion. +// +// There is a special case for schemas that are anchored with an "id": +// in that case, the rebasing is performed // against the id only if this is an anchor for the initial root document. +// All other intermediate "id"'s found along the way are ignored for the purpose of rebasing. +func denormalizeRef(ref *Ref, originalRelativeBase, id string) Ref { + debugLog("denormalizeRef called:\n$ref: %q\noriginal: %s\nroot ID:%s", ref.String(), originalRelativeBase, id) + + if ref.String() == "" || ref.IsRoot() || ref.HasFragmentOnly { + // short circuit: $ref to current doc + return *ref + } + + if id != "" { + idBaseURL, err := parseURL(id) + if err == nil { // if the schema id is not usable as a URI, ignore it + if ref, ok := rebase(ref, idBaseURL, true); ok { // rebase, but keep references to root unchaged (do not want $ref: "") + // $ref relative to the ID of the schema in the root document + return ref + } + } + } + + originalRelativeBaseURL, _ := parseURL(originalRelativeBase) + + r, _ := rebase(ref, originalRelativeBaseURL, false) + + return r +} + +func rebase(ref *Ref, v *url.URL, notEqual bool) (Ref, bool) { + var newBase url.URL + + u := ref.GetURL() + + if u.Scheme != v.Scheme || u.Host != v.Host { + return *ref, false + } + + docPath := v.Path + v.Path = path.Dir(v.Path) + + if v.Path == "." { + v.Path = "" + } else if !strings.HasSuffix(v.Path, "/") { + v.Path += "/" + } + + newBase.Fragment = u.Fragment + + if strings.HasPrefix(u.Path, docPath) { + newBase.Path = strings.TrimPrefix(u.Path, docPath) + } else { + newBase.Path = strings.TrimPrefix(u.Path, v.Path) + } + + if notEqual && newBase.Path == "" && newBase.Fragment == "" { + // do not want rebasing to end up in an empty $ref + return *ref, false + } + + if path.IsAbs(newBase.Path) { + // whenever we end up with an absolute path, specify the scheme and host + newBase.Scheme = v.Scheme + newBase.Host = v.Host + } + + return MustCreateRef(newBase.String()), true +} + +// normalizeRef canonicalize a Ref, using a canonical relativeBase as its absolute anchor +func normalizeRef(ref *Ref, relativeBase string) *Ref { + r := MustCreateRef(normalizeURI(ref.String(), relativeBase)) + return &r +} + +// normalizeBase performs a normalization of the input base path. +// +// This always yields a canonical URI (absolute), usable for the document cache. +// +// It ensures that all further internal work on basePath may safely assume +// a non-empty, cross-platform, canonical URI (i.e. absolute). +// +// This normalization tolerates windows paths (e.g. C:\x\y\File.dat) and transform this +// in a file:// URL with lower cased drive letter and path. +// +// See also: https://en.wikipedia.org/wiki/File_URI_scheme +func normalizeBase(in string) string { + u, err := parseURL(in) + if err != nil { + specLogger.Printf("warning: invalid URI in RelativeBase %q: %v", in, err) + u, in = repairURI(in) + } + + u.Fragment = "" // any fragment in the base is irrelevant + + fixWindowsURI(u, in) // noop on non-windows OS + + u.Path = path.Clean(u.Path) + if u.Path == "." { // empty after Clean() + u.Path = "" + } + + if u.Scheme != "" { + if path.IsAbs(u.Path) || u.Scheme != fileScheme { + // this is absolute or explicitly not a local file: we're good + return u.String() + } + } + + // no scheme or file scheme with relative path: assume file and make it absolute + // enforce scheme file://... with absolute path. + // + // If the input path is relative, we anchor the path to the current working directory. + // NOTE: we may end up with a host component. Leave it unchanged: e.g. file://host/folder/file.json + + u.Scheme = fileScheme + u.Path = absPath(u.Path) // platform-dependent + u.RawQuery = "" // any query component is irrelevant for a base + return u.String() +} diff --git a/vendor/github.com/go-openapi/spec/normalizer_nonwindows.go b/vendor/github.com/go-openapi/spec/normalizer_nonwindows.go new file mode 100644 index 0000000..f19f1a8 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/normalizer_nonwindows.go @@ -0,0 +1,44 @@ +//go:build !windows +// +build !windows + +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "net/url" + "path/filepath" +) + +// absPath makes a file path absolute and compatible with a URI path component. +// +// The parameter must be a path, not an URI. +func absPath(in string) string { + anchored, err := filepath.Abs(in) + if err != nil { + specLogger.Printf("warning: could not resolve current working directory: %v", err) + return in + } + return anchored +} + +func repairURI(in string) (*url.URL, string) { + u, _ := parseURL("") + debugLog("repaired URI: original: %q, repaired: %q", in, "") + return u, "" +} + +func fixWindowsURI(_ *url.URL, _ string) { +} diff --git a/vendor/github.com/go-openapi/spec/normalizer_windows.go b/vendor/github.com/go-openapi/spec/normalizer_windows.go new file mode 100644 index 0000000..a66c532 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/normalizer_windows.go @@ -0,0 +1,154 @@ +// -build windows + +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "net/url" + "os" + "path" + "path/filepath" + "strings" +) + +// absPath makes a file path absolute and compatible with a URI path component +// +// The parameter must be a path, not an URI. +func absPath(in string) string { + // NOTE(windows): filepath.Abs exhibits a special behavior on windows for empty paths. + // See https://github.com/golang/go/issues/24441 + if in == "" { + in = "." + } + + anchored, err := filepath.Abs(in) + if err != nil { + specLogger.Printf("warning: could not resolve current working directory: %v", err) + return in + } + + pth := strings.ReplaceAll(strings.ToLower(anchored), `\`, `/`) + if !strings.HasPrefix(pth, "/") { + pth = "/" + pth + } + + return path.Clean(pth) +} + +// repairURI tolerates invalid file URIs with common typos +// such as 'file://E:\folder\file', that break the regular URL parser. +// +// Adopting the same defaults as for unixes (e.g. return an empty path) would +// result into a counter-intuitive result for that case (e.g. E:\folder\file is +// eventually resolved as the current directory). The repair will detect the missing "/". +// +// Note that this only works for the file scheme. +func repairURI(in string) (*url.URL, string) { + const prefix = fileScheme + "://" + if !strings.HasPrefix(in, prefix) { + // giving up: resolve to empty path + u, _ := parseURL("") + + return u, "" + } + + // attempt the repair, stripping the scheme should be sufficient + u, _ := parseURL(strings.TrimPrefix(in, prefix)) + debugLog("repaired URI: original: %q, repaired: %q", in, u.String()) + + return u, u.String() +} + +// fixWindowsURI tolerates an absolute file path on windows such as C:\Base\File.yaml or \\host\share\Base\File.yaml +// and makes it a canonical URI: file:///c:/base/file.yaml +// +// Catch 22 notes for Windows: +// +// * There may be a drive letter on windows (it is lower-cased) +// * There may be a share UNC, e.g. \\server\folder\data.xml +// * Paths are case insensitive +// * Paths may already contain slashes +// * Paths must be slashed +// +// NOTE: there is no escaping. "/" may be valid separators just like "\". +// We don't use ToSlash() (which escapes everything) because windows now also +// tolerates the use of "/". Hence, both C:\File.yaml and C:/File.yaml will work. +func fixWindowsURI(u *url.URL, in string) { + drive := filepath.VolumeName(in) + + if len(drive) > 0 { + if len(u.Scheme) == 1 && strings.EqualFold(u.Scheme, drive[:1]) { // a path with a drive letter + u.Scheme = fileScheme + u.Host = "" + u.Path = strings.Join([]string{drive, u.Opaque, u.Path}, `/`) // reconstruct the full path component (no fragment, no query) + } else if u.Host == "" && strings.HasPrefix(u.Path, drive) { // a path with a \\host volume + // NOTE: the special host@port syntax for UNC is not supported (yet) + u.Scheme = fileScheme + + // this is a modified version of filepath.Dir() to apply on the VolumeName itself + i := len(drive) - 1 + for i >= 0 && !os.IsPathSeparator(drive[i]) { + i-- + } + host := drive[:i] // \\host\share => host + + u.Path = strings.TrimPrefix(u.Path, host) + u.Host = strings.TrimPrefix(host, `\\`) + } + + u.Opaque = "" + u.Path = strings.ReplaceAll(strings.ToLower(u.Path), `\`, `/`) + + // ensure we form an absolute path + if !strings.HasPrefix(u.Path, "/") { + u.Path = "/" + u.Path + } + + u.Path = path.Clean(u.Path) + + return + } + + if u.Scheme == fileScheme { + // Handle dodgy cases for file://{...} URIs on windows. + // A canonical URI should always be followed by an absolute path. + // + // Examples: + // * file:///folder/file => valid, unchanged + // * file:///c:\folder\file => slashed + // * file:///./folder/file => valid, cleaned to remove the dot + // * file:///.\folder\file => remapped to cwd + // * file:///. => dodgy, remapped to / (consistent with the behavior on unix) + // * file:///.. => dodgy, remapped to / (consistent with the behavior on unix) + if (!path.IsAbs(u.Path) && !filepath.IsAbs(u.Path)) || (strings.HasPrefix(u.Path, `/.`) && strings.Contains(u.Path, `\`)) { + // ensure we form an absolute path + u.Path, _ = filepath.Abs(strings.TrimLeft(u.Path, `/`)) + if !strings.HasPrefix(u.Path, "/") { + u.Path = "/" + u.Path + } + } + u.Path = strings.ToLower(u.Path) + } + + // NOTE: lower case normalization does not propagate to inner resources, + // generated when rebasing: when joining a relative URI with a file to an absolute base, + // only the base is currently lower-cased. + // + // For now, we assume this is good enough for most use cases + // and try not to generate too many differences + // between the output produced on different platforms. + u.Path = path.Clean(strings.ReplaceAll(u.Path, `\`, `/`)) +} diff --git a/vendor/github.com/go-openapi/spec/operation.go b/vendor/github.com/go-openapi/spec/operation.go new file mode 100644 index 0000000..a69cca8 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/operation.go @@ -0,0 +1,400 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "bytes" + "encoding/gob" + "encoding/json" + "sort" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +func init() { + gob.Register(map[string]interface{}{}) + gob.Register([]interface{}{}) +} + +// OperationProps describes an operation +// +// NOTES: +// - schemes, when present must be from [http, https, ws, wss]: see validate +// - Security is handled as a special case: see MarshalJSON function +type OperationProps struct { + Description string `json:"description,omitempty"` + Consumes []string `json:"consumes,omitempty"` + Produces []string `json:"produces,omitempty"` + Schemes []string `json:"schemes,omitempty"` + Tags []string `json:"tags,omitempty"` + Summary string `json:"summary,omitempty"` + ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"` + ID string `json:"operationId,omitempty"` + Deprecated bool `json:"deprecated,omitempty"` + Security []map[string][]string `json:"security,omitempty"` + Parameters []Parameter `json:"parameters,omitempty"` + Responses *Responses `json:"responses,omitempty"` +} + +// MarshalJSON takes care of serializing operation properties to JSON +// +// We use a custom marhaller here to handle a special cases related to +// the Security field. We need to preserve zero length slice +// while omitting the field when the value is nil/unset. +func (op OperationProps) MarshalJSON() ([]byte, error) { + type Alias OperationProps + if op.Security == nil { + return json.Marshal(&struct { + Security []map[string][]string `json:"security,omitempty"` + *Alias + }{ + Security: op.Security, + Alias: (*Alias)(&op), + }) + } + return json.Marshal(&struct { + Security []map[string][]string `json:"security"` + *Alias + }{ + Security: op.Security, + Alias: (*Alias)(&op), + }) +} + +// Operation describes a single API operation on a path. +// +// For more information: http://goo.gl/8us55a#operationObject +type Operation struct { + VendorExtensible + OperationProps +} + +// SuccessResponse gets a success response model +func (o *Operation) SuccessResponse() (*Response, int, bool) { + if o.Responses == nil { + return nil, 0, false + } + + responseCodes := make([]int, 0, len(o.Responses.StatusCodeResponses)) + for k := range o.Responses.StatusCodeResponses { + if k >= 200 && k < 300 { + responseCodes = append(responseCodes, k) + } + } + if len(responseCodes) > 0 { + sort.Ints(responseCodes) + v := o.Responses.StatusCodeResponses[responseCodes[0]] + return &v, responseCodes[0], true + } + + return o.Responses.Default, 0, false +} + +// JSONLookup look up a value by the json property name +func (o Operation) JSONLookup(token string) (interface{}, error) { + if ex, ok := o.Extensions[token]; ok { + return &ex, nil + } + r, _, err := jsonpointer.GetForToken(o.OperationProps, token) + return r, err +} + +// UnmarshalJSON hydrates this items instance with the data from JSON +func (o *Operation) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &o.OperationProps); err != nil { + return err + } + return json.Unmarshal(data, &o.VendorExtensible) +} + +// MarshalJSON converts this items object to JSON +func (o Operation) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(o.OperationProps) + if err != nil { + return nil, err + } + b2, err := json.Marshal(o.VendorExtensible) + if err != nil { + return nil, err + } + concated := swag.ConcatJSON(b1, b2) + return concated, nil +} + +// NewOperation creates a new operation instance. +// It expects an ID as parameter but not passing an ID is also valid. +func NewOperation(id string) *Operation { + op := new(Operation) + op.ID = id + return op +} + +// WithID sets the ID property on this operation, allows for chaining. +func (o *Operation) WithID(id string) *Operation { + o.ID = id + return o +} + +// WithDescription sets the description on this operation, allows for chaining +func (o *Operation) WithDescription(description string) *Operation { + o.Description = description + return o +} + +// WithSummary sets the summary on this operation, allows for chaining +func (o *Operation) WithSummary(summary string) *Operation { + o.Summary = summary + return o +} + +// WithExternalDocs sets/removes the external docs for/from this operation. +// When you pass empty strings as params the external documents will be removed. +// When you pass non-empty string as one value then those values will be used on the external docs object. +// So when you pass a non-empty description, you should also pass the url and vice versa. +func (o *Operation) WithExternalDocs(description, url string) *Operation { + if description == "" && url == "" { + o.ExternalDocs = nil + return o + } + + if o.ExternalDocs == nil { + o.ExternalDocs = &ExternalDocumentation{} + } + o.ExternalDocs.Description = description + o.ExternalDocs.URL = url + return o +} + +// Deprecate marks the operation as deprecated +func (o *Operation) Deprecate() *Operation { + o.Deprecated = true + return o +} + +// Undeprecate marks the operation as not deprected +func (o *Operation) Undeprecate() *Operation { + o.Deprecated = false + return o +} + +// WithConsumes adds media types for incoming body values +func (o *Operation) WithConsumes(mediaTypes ...string) *Operation { + o.Consumes = append(o.Consumes, mediaTypes...) + return o +} + +// WithProduces adds media types for outgoing body values +func (o *Operation) WithProduces(mediaTypes ...string) *Operation { + o.Produces = append(o.Produces, mediaTypes...) + return o +} + +// WithTags adds tags for this operation +func (o *Operation) WithTags(tags ...string) *Operation { + o.Tags = append(o.Tags, tags...) + return o +} + +// AddParam adds a parameter to this operation, when a parameter for that location +// and with that name already exists it will be replaced +func (o *Operation) AddParam(param *Parameter) *Operation { + if param == nil { + return o + } + + for i, p := range o.Parameters { + if p.Name == param.Name && p.In == param.In { + params := make([]Parameter, 0, len(o.Parameters)+1) + params = append(params, o.Parameters[:i]...) + params = append(params, *param) + params = append(params, o.Parameters[i+1:]...) + o.Parameters = params + + return o + } + } + + o.Parameters = append(o.Parameters, *param) + return o +} + +// RemoveParam removes a parameter from the operation +func (o *Operation) RemoveParam(name, in string) *Operation { + for i, p := range o.Parameters { + if p.Name == name && p.In == in { + o.Parameters = append(o.Parameters[:i], o.Parameters[i+1:]...) + return o + } + } + return o +} + +// SecuredWith adds a security scope to this operation. +func (o *Operation) SecuredWith(name string, scopes ...string) *Operation { + o.Security = append(o.Security, map[string][]string{name: scopes}) + return o +} + +// WithDefaultResponse adds a default response to the operation. +// Passing a nil value will remove the response +func (o *Operation) WithDefaultResponse(response *Response) *Operation { + return o.RespondsWith(0, response) +} + +// RespondsWith adds a status code response to the operation. +// When the code is 0 the value of the response will be used as default response value. +// When the value of the response is nil it will be removed from the operation +func (o *Operation) RespondsWith(code int, response *Response) *Operation { + if o.Responses == nil { + o.Responses = new(Responses) + } + if code == 0 { + o.Responses.Default = response + return o + } + if response == nil { + delete(o.Responses.StatusCodeResponses, code) + return o + } + if o.Responses.StatusCodeResponses == nil { + o.Responses.StatusCodeResponses = make(map[int]Response) + } + o.Responses.StatusCodeResponses[code] = *response + return o +} + +type opsAlias OperationProps + +type gobAlias struct { + Security []map[string]struct { + List []string + Pad bool + } + Alias *opsAlias + SecurityIsEmpty bool +} + +// GobEncode provides a safe gob encoder for Operation, including empty security requirements +func (o Operation) GobEncode() ([]byte, error) { + raw := struct { + Ext VendorExtensible + Props OperationProps + }{ + Ext: o.VendorExtensible, + Props: o.OperationProps, + } + var b bytes.Buffer + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err +} + +// GobDecode provides a safe gob decoder for Operation, including empty security requirements +func (o *Operation) GobDecode(b []byte) error { + var raw struct { + Ext VendorExtensible + Props OperationProps + } + + buf := bytes.NewBuffer(b) + err := gob.NewDecoder(buf).Decode(&raw) + if err != nil { + return err + } + o.VendorExtensible = raw.Ext + o.OperationProps = raw.Props + return nil +} + +// GobEncode provides a safe gob encoder for Operation, including empty security requirements +func (op OperationProps) GobEncode() ([]byte, error) { + raw := gobAlias{ + Alias: (*opsAlias)(&op), + } + + var b bytes.Buffer + if op.Security == nil { + // nil security requirement + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err + } + + if len(op.Security) == 0 { + // empty, but non-nil security requirement + raw.SecurityIsEmpty = true + raw.Alias.Security = nil + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err + } + + raw.Security = make([]map[string]struct { + List []string + Pad bool + }, 0, len(op.Security)) + for _, req := range op.Security { + v := make(map[string]struct { + List []string + Pad bool + }, len(req)) + for k, val := range req { + v[k] = struct { + List []string + Pad bool + }{ + List: val, + } + } + raw.Security = append(raw.Security, v) + } + + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err +} + +// GobDecode provides a safe gob decoder for Operation, including empty security requirements +func (op *OperationProps) GobDecode(b []byte) error { + var raw gobAlias + + buf := bytes.NewBuffer(b) + err := gob.NewDecoder(buf).Decode(&raw) + if err != nil { + return err + } + if raw.Alias == nil { + return nil + } + + switch { + case raw.SecurityIsEmpty: + // empty, but non-nil security requirement + raw.Alias.Security = []map[string][]string{} + case len(raw.Alias.Security) == 0: + // nil security requirement + raw.Alias.Security = nil + default: + raw.Alias.Security = make([]map[string][]string, 0, len(raw.Security)) + for _, req := range raw.Security { + v := make(map[string][]string, len(req)) + for k, val := range req { + v[k] = make([]string, 0, len(val.List)) + v[k] = append(v[k], val.List...) + } + raw.Alias.Security = append(raw.Alias.Security, v) + } + } + + *op = *(*OperationProps)(raw.Alias) + return nil +} diff --git a/vendor/github.com/go-openapi/spec/parameter.go b/vendor/github.com/go-openapi/spec/parameter.go new file mode 100644 index 0000000..bd4f1cd --- /dev/null +++ b/vendor/github.com/go-openapi/spec/parameter.go @@ -0,0 +1,326 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + "strings" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +// QueryParam creates a query parameter +func QueryParam(name string) *Parameter { + return &Parameter{ParamProps: ParamProps{Name: name, In: "query"}} +} + +// HeaderParam creates a header parameter, this is always required by default +func HeaderParam(name string) *Parameter { + return &Parameter{ParamProps: ParamProps{Name: name, In: "header", Required: true}} +} + +// PathParam creates a path parameter, this is always required +func PathParam(name string) *Parameter { + return &Parameter{ParamProps: ParamProps{Name: name, In: "path", Required: true}} +} + +// BodyParam creates a body parameter +func BodyParam(name string, schema *Schema) *Parameter { + return &Parameter{ParamProps: ParamProps{Name: name, In: "body", Schema: schema}} +} + +// FormDataParam creates a body parameter +func FormDataParam(name string) *Parameter { + return &Parameter{ParamProps: ParamProps{Name: name, In: "formData"}} +} + +// FileParam creates a body parameter +func FileParam(name string) *Parameter { + return &Parameter{ParamProps: ParamProps{Name: name, In: "formData"}, + SimpleSchema: SimpleSchema{Type: "file"}} +} + +// SimpleArrayParam creates a param for a simple array (string, int, date etc) +func SimpleArrayParam(name, tpe, fmt string) *Parameter { + return &Parameter{ParamProps: ParamProps{Name: name}, + SimpleSchema: SimpleSchema{Type: jsonArray, CollectionFormat: "csv", + Items: &Items{SimpleSchema: SimpleSchema{Type: tpe, Format: fmt}}}} +} + +// ParamRef creates a parameter that's a json reference +func ParamRef(uri string) *Parameter { + p := new(Parameter) + p.Ref = MustCreateRef(uri) + return p +} + +// ParamProps describes the specific attributes of an operation parameter +// +// NOTE: +// - Schema is defined when "in" == "body": see validate +// - AllowEmptyValue is allowed where "in" == "query" || "formData" +type ParamProps struct { + Description string `json:"description,omitempty"` + Name string `json:"name,omitempty"` + In string `json:"in,omitempty"` + Required bool `json:"required,omitempty"` + Schema *Schema `json:"schema,omitempty"` + AllowEmptyValue bool `json:"allowEmptyValue,omitempty"` +} + +// Parameter a unique parameter is defined by a combination of a [name](#parameterName) and [location](#parameterIn). +// +// There are five possible parameter types. +// - Path - Used together with [Path Templating](#pathTemplating), where the parameter value is actually part +// of the operation's URL. This does not include the host or base path of the API. For example, in `/items/{itemId}`, +// the path parameter is `itemId`. +// - Query - Parameters that are appended to the URL. For example, in `/items?id=###`, the query parameter is `id`. +// - Header - Custom headers that are expected as part of the request. +// - Body - The payload that's appended to the HTTP request. Since there can only be one payload, there can only be +// _one_ body parameter. The name of the body parameter has no effect on the parameter itself and is used for +// documentation purposes only. Since Form parameters are also in the payload, body and form parameters cannot exist +// together for the same operation. +// - Form - Used to describe the payload of an HTTP request when either `application/x-www-form-urlencoded` or +// `multipart/form-data` are used as the content type of the request (in Swagger's definition, +// the [`consumes`](#operationConsumes) property of an operation). This is the only parameter type that can be used +// to send files, thus supporting the `file` type. Since form parameters are sent in the payload, they cannot be +// declared together with a body parameter for the same operation. Form parameters have a different format based on +// the content-type used (for further details, consult http://www.w3.org/TR/html401/interact/forms.html#h-17.13.4). +// - `application/x-www-form-urlencoded` - Similar to the format of Query parameters but as a payload. +// For example, `foo=1&bar=swagger` - both `foo` and `bar` are form parameters. This is normally used for simple +// parameters that are being transferred. +// - `multipart/form-data` - each parameter takes a section in the payload with an internal header. +// For example, for the header `Content-Disposition: form-data; name="submit-name"` the name of the parameter is +// `submit-name`. This type of form parameters is more commonly used for file transfers. +// +// For more information: http://goo.gl/8us55a#parameterObject +type Parameter struct { + Refable + CommonValidations + SimpleSchema + VendorExtensible + ParamProps +} + +// JSONLookup look up a value by the json property name +func (p Parameter) JSONLookup(token string) (interface{}, error) { + if ex, ok := p.Extensions[token]; ok { + return &ex, nil + } + if token == jsonRef { + return &p.Ref, nil + } + + r, _, err := jsonpointer.GetForToken(p.CommonValidations, token) + if err != nil && !strings.HasPrefix(err.Error(), "object has no field") { + return nil, err + } + if r != nil { + return r, nil + } + r, _, err = jsonpointer.GetForToken(p.SimpleSchema, token) + if err != nil && !strings.HasPrefix(err.Error(), "object has no field") { + return nil, err + } + if r != nil { + return r, nil + } + r, _, err = jsonpointer.GetForToken(p.ParamProps, token) + return r, err +} + +// WithDescription a fluent builder method for the description of the parameter +func (p *Parameter) WithDescription(description string) *Parameter { + p.Description = description + return p +} + +// Named a fluent builder method to override the name of the parameter +func (p *Parameter) Named(name string) *Parameter { + p.Name = name + return p +} + +// WithLocation a fluent builder method to override the location of the parameter +func (p *Parameter) WithLocation(in string) *Parameter { + p.In = in + return p +} + +// Typed a fluent builder method for the type of the parameter value +func (p *Parameter) Typed(tpe, format string) *Parameter { + p.Type = tpe + p.Format = format + return p +} + +// CollectionOf a fluent builder method for an array parameter +func (p *Parameter) CollectionOf(items *Items, format string) *Parameter { + p.Type = jsonArray + p.Items = items + p.CollectionFormat = format + return p +} + +// WithDefault sets the default value on this parameter +func (p *Parameter) WithDefault(defaultValue interface{}) *Parameter { + p.AsOptional() // with default implies optional + p.Default = defaultValue + return p +} + +// AllowsEmptyValues flags this parameter as being ok with empty values +func (p *Parameter) AllowsEmptyValues() *Parameter { + p.AllowEmptyValue = true + return p +} + +// NoEmptyValues flags this parameter as not liking empty values +func (p *Parameter) NoEmptyValues() *Parameter { + p.AllowEmptyValue = false + return p +} + +// AsOptional flags this parameter as optional +func (p *Parameter) AsOptional() *Parameter { + p.Required = false + return p +} + +// AsRequired flags this parameter as required +func (p *Parameter) AsRequired() *Parameter { + if p.Default != nil { // with a default required makes no sense + return p + } + p.Required = true + return p +} + +// WithMaxLength sets a max length value +func (p *Parameter) WithMaxLength(max int64) *Parameter { + p.MaxLength = &max + return p +} + +// WithMinLength sets a min length value +func (p *Parameter) WithMinLength(min int64) *Parameter { + p.MinLength = &min + return p +} + +// WithPattern sets a pattern value +func (p *Parameter) WithPattern(pattern string) *Parameter { + p.Pattern = pattern + return p +} + +// WithMultipleOf sets a multiple of value +func (p *Parameter) WithMultipleOf(number float64) *Parameter { + p.MultipleOf = &number + return p +} + +// WithMaximum sets a maximum number value +func (p *Parameter) WithMaximum(max float64, exclusive bool) *Parameter { + p.Maximum = &max + p.ExclusiveMaximum = exclusive + return p +} + +// WithMinimum sets a minimum number value +func (p *Parameter) WithMinimum(min float64, exclusive bool) *Parameter { + p.Minimum = &min + p.ExclusiveMinimum = exclusive + return p +} + +// WithEnum sets a the enum values (replace) +func (p *Parameter) WithEnum(values ...interface{}) *Parameter { + p.Enum = append([]interface{}{}, values...) + return p +} + +// WithMaxItems sets the max items +func (p *Parameter) WithMaxItems(size int64) *Parameter { + p.MaxItems = &size + return p +} + +// WithMinItems sets the min items +func (p *Parameter) WithMinItems(size int64) *Parameter { + p.MinItems = &size + return p +} + +// UniqueValues dictates that this array can only have unique items +func (p *Parameter) UniqueValues() *Parameter { + p.UniqueItems = true + return p +} + +// AllowDuplicates this array can have duplicates +func (p *Parameter) AllowDuplicates() *Parameter { + p.UniqueItems = false + return p +} + +// WithValidations is a fluent method to set parameter validations +func (p *Parameter) WithValidations(val CommonValidations) *Parameter { + p.SetValidations(SchemaValidations{CommonValidations: val}) + return p +} + +// UnmarshalJSON hydrates this items instance with the data from JSON +func (p *Parameter) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &p.CommonValidations); err != nil { + return err + } + if err := json.Unmarshal(data, &p.Refable); err != nil { + return err + } + if err := json.Unmarshal(data, &p.SimpleSchema); err != nil { + return err + } + if err := json.Unmarshal(data, &p.VendorExtensible); err != nil { + return err + } + return json.Unmarshal(data, &p.ParamProps) +} + +// MarshalJSON converts this items object to JSON +func (p Parameter) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(p.CommonValidations) + if err != nil { + return nil, err + } + b2, err := json.Marshal(p.SimpleSchema) + if err != nil { + return nil, err + } + b3, err := json.Marshal(p.Refable) + if err != nil { + return nil, err + } + b4, err := json.Marshal(p.VendorExtensible) + if err != nil { + return nil, err + } + b5, err := json.Marshal(p.ParamProps) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b3, b1, b2, b4, b5), nil +} diff --git a/vendor/github.com/go-openapi/spec/path_item.go b/vendor/github.com/go-openapi/spec/path_item.go new file mode 100644 index 0000000..68fc8e9 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/path_item.go @@ -0,0 +1,87 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +// PathItemProps the path item specific properties +type PathItemProps struct { + Get *Operation `json:"get,omitempty"` + Put *Operation `json:"put,omitempty"` + Post *Operation `json:"post,omitempty"` + Delete *Operation `json:"delete,omitempty"` + Options *Operation `json:"options,omitempty"` + Head *Operation `json:"head,omitempty"` + Patch *Operation `json:"patch,omitempty"` + Parameters []Parameter `json:"parameters,omitempty"` +} + +// PathItem describes the operations available on a single path. +// A Path Item may be empty, due to [ACL constraints](http://goo.gl/8us55a#securityFiltering). +// The path itself is still exposed to the documentation viewer but they will +// not know which operations and parameters are available. +// +// For more information: http://goo.gl/8us55a#pathItemObject +type PathItem struct { + Refable + VendorExtensible + PathItemProps +} + +// JSONLookup look up a value by the json property name +func (p PathItem) JSONLookup(token string) (interface{}, error) { + if ex, ok := p.Extensions[token]; ok { + return &ex, nil + } + if token == jsonRef { + return &p.Ref, nil + } + r, _, err := jsonpointer.GetForToken(p.PathItemProps, token) + return r, err +} + +// UnmarshalJSON hydrates this items instance with the data from JSON +func (p *PathItem) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &p.Refable); err != nil { + return err + } + if err := json.Unmarshal(data, &p.VendorExtensible); err != nil { + return err + } + return json.Unmarshal(data, &p.PathItemProps) +} + +// MarshalJSON converts this items object to JSON +func (p PathItem) MarshalJSON() ([]byte, error) { + b3, err := json.Marshal(p.Refable) + if err != nil { + return nil, err + } + b4, err := json.Marshal(p.VendorExtensible) + if err != nil { + return nil, err + } + b5, err := json.Marshal(p.PathItemProps) + if err != nil { + return nil, err + } + concated := swag.ConcatJSON(b3, b4, b5) + return concated, nil +} diff --git a/vendor/github.com/go-openapi/spec/paths.go b/vendor/github.com/go-openapi/spec/paths.go new file mode 100644 index 0000000..9dc82a2 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/paths.go @@ -0,0 +1,97 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/go-openapi/swag" +) + +// Paths holds the relative paths to the individual endpoints. +// The path is appended to the [`basePath`](http://goo.gl/8us55a#swaggerBasePath) in order +// to construct the full URL. +// The Paths may be empty, due to [ACL constraints](http://goo.gl/8us55a#securityFiltering). +// +// For more information: http://goo.gl/8us55a#pathsObject +type Paths struct { + VendorExtensible + Paths map[string]PathItem `json:"-"` // custom serializer to flatten this, each entry must start with "/" +} + +// JSONLookup look up a value by the json property name +func (p Paths) JSONLookup(token string) (interface{}, error) { + if pi, ok := p.Paths[token]; ok { + return &pi, nil + } + if ex, ok := p.Extensions[token]; ok { + return &ex, nil + } + return nil, fmt.Errorf("object has no field %q", token) +} + +// UnmarshalJSON hydrates this items instance with the data from JSON +func (p *Paths) UnmarshalJSON(data []byte) error { + var res map[string]json.RawMessage + if err := json.Unmarshal(data, &res); err != nil { + return err + } + for k, v := range res { + if strings.HasPrefix(strings.ToLower(k), "x-") { + if p.Extensions == nil { + p.Extensions = make(map[string]interface{}) + } + var d interface{} + if err := json.Unmarshal(v, &d); err != nil { + return err + } + p.Extensions[k] = d + } + if strings.HasPrefix(k, "/") { + if p.Paths == nil { + p.Paths = make(map[string]PathItem) + } + var pi PathItem + if err := json.Unmarshal(v, &pi); err != nil { + return err + } + p.Paths[k] = pi + } + } + return nil +} + +// MarshalJSON converts this items object to JSON +func (p Paths) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(p.VendorExtensible) + if err != nil { + return nil, err + } + + pths := make(map[string]PathItem) + for k, v := range p.Paths { + if strings.HasPrefix(k, "/") { + pths[k] = v + } + } + b2, err := json.Marshal(pths) + if err != nil { + return nil, err + } + concated := swag.ConcatJSON(b1, b2) + return concated, nil +} diff --git a/vendor/github.com/go-openapi/spec/properties.go b/vendor/github.com/go-openapi/spec/properties.go new file mode 100644 index 0000000..91d2435 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/properties.go @@ -0,0 +1,91 @@ +package spec + +import ( + "bytes" + "encoding/json" + "reflect" + "sort" +) + +// OrderSchemaItem holds a named schema (e.g. from a property of an object) +type OrderSchemaItem struct { + Name string + Schema +} + +// OrderSchemaItems is a sortable slice of named schemas. +// The ordering is defined by the x-order schema extension. +type OrderSchemaItems []OrderSchemaItem + +// MarshalJSON produces a json object with keys defined by the name schemas +// of the OrderSchemaItems slice, keeping the original order of the slice. +func (items OrderSchemaItems) MarshalJSON() ([]byte, error) { + buf := bytes.NewBuffer(nil) + buf.WriteString("{") + for i := range items { + if i > 0 { + buf.WriteString(",") + } + buf.WriteString("\"") + buf.WriteString(items[i].Name) + buf.WriteString("\":") + bs, err := json.Marshal(&items[i].Schema) + if err != nil { + return nil, err + } + buf.Write(bs) + } + buf.WriteString("}") + return buf.Bytes(), nil +} + +func (items OrderSchemaItems) Len() int { return len(items) } +func (items OrderSchemaItems) Swap(i, j int) { items[i], items[j] = items[j], items[i] } +func (items OrderSchemaItems) Less(i, j int) (ret bool) { + ii, oki := items[i].Extensions.GetInt("x-order") + ij, okj := items[j].Extensions.GetInt("x-order") + if oki { + if okj { + defer func() { + if err := recover(); err != nil { + defer func() { + if err = recover(); err != nil { + ret = items[i].Name < items[j].Name + } + }() + ret = reflect.ValueOf(ii).String() < reflect.ValueOf(ij).String() + } + }() + return ii < ij + } + return true + } else if okj { + return false + } + return items[i].Name < items[j].Name +} + +// SchemaProperties is a map representing the properties of a Schema object. +// It knows how to transform its keys into an ordered slice. +type SchemaProperties map[string]Schema + +// ToOrderedSchemaItems transforms the map of properties into a sortable slice +func (properties SchemaProperties) ToOrderedSchemaItems() OrderSchemaItems { + items := make(OrderSchemaItems, 0, len(properties)) + for k, v := range properties { + items = append(items, OrderSchemaItem{ + Name: k, + Schema: v, + }) + } + sort.Sort(items) + return items +} + +// MarshalJSON produces properties as json, keeping their order. +func (properties SchemaProperties) MarshalJSON() ([]byte, error) { + if properties == nil { + return []byte("null"), nil + } + return json.Marshal(properties.ToOrderedSchemaItems()) +} diff --git a/vendor/github.com/go-openapi/spec/ref.go b/vendor/github.com/go-openapi/spec/ref.go new file mode 100644 index 0000000..b0ef9bd --- /dev/null +++ b/vendor/github.com/go-openapi/spec/ref.go @@ -0,0 +1,193 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "bytes" + "encoding/gob" + "encoding/json" + "net/http" + "os" + "path/filepath" + + "github.com/go-openapi/jsonreference" +) + +// Refable is a struct for things that accept a $ref property +type Refable struct { + Ref Ref +} + +// MarshalJSON marshals the ref to json +func (r Refable) MarshalJSON() ([]byte, error) { + return r.Ref.MarshalJSON() +} + +// UnmarshalJSON unmarshalss the ref from json +func (r *Refable) UnmarshalJSON(d []byte) error { + return json.Unmarshal(d, &r.Ref) +} + +// Ref represents a json reference that is potentially resolved +type Ref struct { + jsonreference.Ref +} + +// RemoteURI gets the remote uri part of the ref +func (r *Ref) RemoteURI() string { + if r.String() == "" { + return "" + } + + u := *r.GetURL() + u.Fragment = "" + return u.String() +} + +// IsValidURI returns true when the url the ref points to can be found +func (r *Ref) IsValidURI(basepaths ...string) bool { + if r.String() == "" { + return true + } + + v := r.RemoteURI() + if v == "" { + return true + } + + if r.HasFullURL { + //nolint:noctx,gosec + rr, err := http.Get(v) + if err != nil { + return false + } + defer rr.Body.Close() + + return rr.StatusCode/100 == 2 + } + + if !(r.HasFileScheme || r.HasFullFilePath || r.HasURLPathOnly) { + return false + } + + // check for local file + pth := v + if r.HasURLPathOnly { + base := "." + if len(basepaths) > 0 { + base = filepath.Dir(filepath.Join(basepaths...)) + } + p, e := filepath.Abs(filepath.ToSlash(filepath.Join(base, pth))) + if e != nil { + return false + } + pth = p + } + + fi, err := os.Stat(filepath.ToSlash(pth)) + if err != nil { + return false + } + + return !fi.IsDir() +} + +// Inherits creates a new reference from a parent and a child +// If the child cannot inherit from the parent, an error is returned +func (r *Ref) Inherits(child Ref) (*Ref, error) { + ref, err := r.Ref.Inherits(child.Ref) + if err != nil { + return nil, err + } + return &Ref{Ref: *ref}, nil +} + +// NewRef creates a new instance of a ref object +// returns an error when the reference uri is an invalid uri +func NewRef(refURI string) (Ref, error) { + ref, err := jsonreference.New(refURI) + if err != nil { + return Ref{}, err + } + return Ref{Ref: ref}, nil +} + +// MustCreateRef creates a ref object but panics when refURI is invalid. +// Use the NewRef method for a version that returns an error. +func MustCreateRef(refURI string) Ref { + return Ref{Ref: jsonreference.MustCreateRef(refURI)} +} + +// MarshalJSON marshals this ref into a JSON object +func (r Ref) MarshalJSON() ([]byte, error) { + str := r.String() + if str == "" { + if r.IsRoot() { + return []byte(`{"$ref":""}`), nil + } + return []byte("{}"), nil + } + v := map[string]interface{}{"$ref": str} + return json.Marshal(v) +} + +// UnmarshalJSON unmarshals this ref from a JSON object +func (r *Ref) UnmarshalJSON(d []byte) error { + var v map[string]interface{} + if err := json.Unmarshal(d, &v); err != nil { + return err + } + return r.fromMap(v) +} + +// GobEncode provides a safe gob encoder for Ref +func (r Ref) GobEncode() ([]byte, error) { + var b bytes.Buffer + raw, err := r.MarshalJSON() + if err != nil { + return nil, err + } + err = gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err +} + +// GobDecode provides a safe gob decoder for Ref +func (r *Ref) GobDecode(b []byte) error { + var raw []byte + buf := bytes.NewBuffer(b) + err := gob.NewDecoder(buf).Decode(&raw) + if err != nil { + return err + } + return json.Unmarshal(raw, r) +} + +func (r *Ref) fromMap(v map[string]interface{}) error { + if v == nil { + return nil + } + + if vv, ok := v["$ref"]; ok { + if str, ok := vv.(string); ok { + ref, err := jsonreference.New(str) + if err != nil { + return err + } + *r = Ref{Ref: ref} + } + } + + return nil +} diff --git a/vendor/github.com/go-openapi/spec/resolver.go b/vendor/github.com/go-openapi/spec/resolver.go new file mode 100644 index 0000000..47d1ee1 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/resolver.go @@ -0,0 +1,127 @@ +package spec + +import ( + "fmt" + + "github.com/go-openapi/swag" +) + +func resolveAnyWithBase(root interface{}, ref *Ref, result interface{}, options *ExpandOptions) error { + options = optionsOrDefault(options) + resolver := defaultSchemaLoader(root, options, nil, nil) + + if err := resolver.Resolve(ref, result, options.RelativeBase); err != nil { + return err + } + + return nil +} + +// ResolveRefWithBase resolves a reference against a context root with preservation of base path +func ResolveRefWithBase(root interface{}, ref *Ref, options *ExpandOptions) (*Schema, error) { + result := new(Schema) + + if err := resolveAnyWithBase(root, ref, result, options); err != nil { + return nil, err + } + + return result, nil +} + +// ResolveRef resolves a reference for a schema against a context root +// ref is guaranteed to be in root (no need to go to external files) +// +// ResolveRef is ONLY called from the code generation module +func ResolveRef(root interface{}, ref *Ref) (*Schema, error) { + res, _, err := ref.GetPointer().Get(root) + if err != nil { + return nil, err + } + + switch sch := res.(type) { + case Schema: + return &sch, nil + case *Schema: + return sch, nil + case map[string]interface{}: + newSch := new(Schema) + if err = swag.DynamicJSONToStruct(sch, newSch); err != nil { + return nil, err + } + return newSch, nil + default: + return nil, fmt.Errorf("type: %T: %w", sch, ErrUnknownTypeForReference) + } +} + +// ResolveParameterWithBase resolves a parameter reference against a context root and base path +func ResolveParameterWithBase(root interface{}, ref Ref, options *ExpandOptions) (*Parameter, error) { + result := new(Parameter) + + if err := resolveAnyWithBase(root, &ref, result, options); err != nil { + return nil, err + } + + return result, nil +} + +// ResolveParameter resolves a parameter reference against a context root +func ResolveParameter(root interface{}, ref Ref) (*Parameter, error) { + return ResolveParameterWithBase(root, ref, nil) +} + +// ResolveResponseWithBase resolves response a reference against a context root and base path +func ResolveResponseWithBase(root interface{}, ref Ref, options *ExpandOptions) (*Response, error) { + result := new(Response) + + err := resolveAnyWithBase(root, &ref, result, options) + if err != nil { + return nil, err + } + + return result, nil +} + +// ResolveResponse resolves response a reference against a context root +func ResolveResponse(root interface{}, ref Ref) (*Response, error) { + return ResolveResponseWithBase(root, ref, nil) +} + +// ResolvePathItemWithBase resolves response a path item against a context root and base path +func ResolvePathItemWithBase(root interface{}, ref Ref, options *ExpandOptions) (*PathItem, error) { + result := new(PathItem) + + if err := resolveAnyWithBase(root, &ref, result, options); err != nil { + return nil, err + } + + return result, nil +} + +// ResolvePathItem resolves response a path item against a context root and base path +// +// Deprecated: use ResolvePathItemWithBase instead +func ResolvePathItem(root interface{}, ref Ref, options *ExpandOptions) (*PathItem, error) { + return ResolvePathItemWithBase(root, ref, options) +} + +// ResolveItemsWithBase resolves parameter items reference against a context root and base path. +// +// NOTE: stricly speaking, this construct is not supported by Swagger 2.0. +// Similarly, $ref are forbidden in response headers. +func ResolveItemsWithBase(root interface{}, ref Ref, options *ExpandOptions) (*Items, error) { + result := new(Items) + + if err := resolveAnyWithBase(root, &ref, result, options); err != nil { + return nil, err + } + + return result, nil +} + +// ResolveItems resolves parameter items reference against a context root and base path. +// +// Deprecated: use ResolveItemsWithBase instead +func ResolveItems(root interface{}, ref Ref, options *ExpandOptions) (*Items, error) { + return ResolveItemsWithBase(root, ref, options) +} diff --git a/vendor/github.com/go-openapi/spec/response.go b/vendor/github.com/go-openapi/spec/response.go new file mode 100644 index 0000000..0340b60 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/response.go @@ -0,0 +1,152 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +// ResponseProps properties specific to a response +type ResponseProps struct { + Description string `json:"description"` + Schema *Schema `json:"schema,omitempty"` + Headers map[string]Header `json:"headers,omitempty"` + Examples map[string]interface{} `json:"examples,omitempty"` +} + +// Response describes a single response from an API Operation. +// +// For more information: http://goo.gl/8us55a#responseObject +type Response struct { + Refable + ResponseProps + VendorExtensible +} + +// JSONLookup look up a value by the json property name +func (r Response) JSONLookup(token string) (interface{}, error) { + if ex, ok := r.Extensions[token]; ok { + return &ex, nil + } + if token == "$ref" { + return &r.Ref, nil + } + ptr, _, err := jsonpointer.GetForToken(r.ResponseProps, token) + return ptr, err +} + +// UnmarshalJSON hydrates this items instance with the data from JSON +func (r *Response) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &r.ResponseProps); err != nil { + return err + } + if err := json.Unmarshal(data, &r.Refable); err != nil { + return err + } + return json.Unmarshal(data, &r.VendorExtensible) +} + +// MarshalJSON converts this items object to JSON +func (r Response) MarshalJSON() ([]byte, error) { + var ( + b1 []byte + err error + ) + + if r.Ref.String() == "" { + // when there is no $ref, empty description is rendered as an empty string + b1, err = json.Marshal(r.ResponseProps) + } else { + // when there is $ref inside the schema, description should be omitempty-ied + b1, err = json.Marshal(struct { + Description string `json:"description,omitempty"` + Schema *Schema `json:"schema,omitempty"` + Headers map[string]Header `json:"headers,omitempty"` + Examples map[string]interface{} `json:"examples,omitempty"` + }{ + Description: r.ResponseProps.Description, + Schema: r.ResponseProps.Schema, + Examples: r.ResponseProps.Examples, + }) + } + if err != nil { + return nil, err + } + + b2, err := json.Marshal(r.Refable) + if err != nil { + return nil, err + } + b3, err := json.Marshal(r.VendorExtensible) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b1, b2, b3), nil +} + +// NewResponse creates a new response instance +func NewResponse() *Response { + return new(Response) +} + +// ResponseRef creates a response as a json reference +func ResponseRef(url string) *Response { + resp := NewResponse() + resp.Ref = MustCreateRef(url) + return resp +} + +// WithDescription sets the description on this response, allows for chaining +func (r *Response) WithDescription(description string) *Response { + r.Description = description + return r +} + +// WithSchema sets the schema on this response, allows for chaining. +// Passing a nil argument removes the schema from this response +func (r *Response) WithSchema(schema *Schema) *Response { + r.Schema = schema + return r +} + +// AddHeader adds a header to this response +func (r *Response) AddHeader(name string, header *Header) *Response { + if header == nil { + return r.RemoveHeader(name) + } + if r.Headers == nil { + r.Headers = make(map[string]Header) + } + r.Headers[name] = *header + return r +} + +// RemoveHeader removes a header from this response +func (r *Response) RemoveHeader(name string) *Response { + delete(r.Headers, name) + return r +} + +// AddExample adds an example to this response +func (r *Response) AddExample(mediaType string, example interface{}) *Response { + if r.Examples == nil { + r.Examples = make(map[string]interface{}) + } + r.Examples[mediaType] = example + return r +} diff --git a/vendor/github.com/go-openapi/spec/responses.go b/vendor/github.com/go-openapi/spec/responses.go new file mode 100644 index 0000000..16c3076 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/responses.go @@ -0,0 +1,140 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + "fmt" + "reflect" + "strconv" + "strings" + + "github.com/go-openapi/swag" +) + +// Responses is a container for the expected responses of an operation. +// The container maps a HTTP response code to the expected response. +// It is not expected from the documentation to necessarily cover all possible HTTP response codes, +// since they may not be known in advance. However, it is expected from the documentation to cover +// a successful operation response and any known errors. +// +// The `default` can be used a default response object for all HTTP codes that are not covered +// individually by the specification. +// +// The `Responses Object` MUST contain at least one response code, and it SHOULD be the response +// for a successful operation call. +// +// For more information: http://goo.gl/8us55a#responsesObject +type Responses struct { + VendorExtensible + ResponsesProps +} + +// JSONLookup implements an interface to customize json pointer lookup +func (r Responses) JSONLookup(token string) (interface{}, error) { + if token == "default" { + return r.Default, nil + } + if ex, ok := r.Extensions[token]; ok { + return &ex, nil + } + if i, err := strconv.Atoi(token); err == nil { + if scr, ok := r.StatusCodeResponses[i]; ok { + return scr, nil + } + } + return nil, fmt.Errorf("object has no field %q", token) +} + +// UnmarshalJSON hydrates this items instance with the data from JSON +func (r *Responses) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &r.ResponsesProps); err != nil { + return err + } + + if err := json.Unmarshal(data, &r.VendorExtensible); err != nil { + return err + } + if reflect.DeepEqual(ResponsesProps{}, r.ResponsesProps) { + r.ResponsesProps = ResponsesProps{} + } + return nil +} + +// MarshalJSON converts this items object to JSON +func (r Responses) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(r.ResponsesProps) + if err != nil { + return nil, err + } + b2, err := json.Marshal(r.VendorExtensible) + if err != nil { + return nil, err + } + concated := swag.ConcatJSON(b1, b2) + return concated, nil +} + +// ResponsesProps describes all responses for an operation. +// It tells what is the default response and maps all responses with a +// HTTP status code. +type ResponsesProps struct { + Default *Response + StatusCodeResponses map[int]Response +} + +// MarshalJSON marshals responses as JSON +func (r ResponsesProps) MarshalJSON() ([]byte, error) { + toser := map[string]Response{} + if r.Default != nil { + toser["default"] = *r.Default + } + for k, v := range r.StatusCodeResponses { + toser[strconv.Itoa(k)] = v + } + return json.Marshal(toser) +} + +// UnmarshalJSON unmarshals responses from JSON +func (r *ResponsesProps) UnmarshalJSON(data []byte) error { + var res map[string]json.RawMessage + if err := json.Unmarshal(data, &res); err != nil { + return err + } + + if v, ok := res["default"]; ok { + var defaultRes Response + if err := json.Unmarshal(v, &defaultRes); err != nil { + return err + } + r.Default = &defaultRes + delete(res, "default") + } + for k, v := range res { + if !strings.HasPrefix(k, "x-") { + var statusCodeResp Response + if err := json.Unmarshal(v, &statusCodeResp); err != nil { + return err + } + if nk, err := strconv.Atoi(k); err == nil { + if r.StatusCodeResponses == nil { + r.StatusCodeResponses = map[int]Response{} + } + r.StatusCodeResponses[nk] = statusCodeResp + } + } + } + return nil +} diff --git a/vendor/github.com/go-openapi/spec/schema.go b/vendor/github.com/go-openapi/spec/schema.go new file mode 100644 index 0000000..4e9be85 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/schema.go @@ -0,0 +1,645 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +// BooleanProperty creates a boolean property +func BooleanProperty() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"boolean"}}} +} + +// BoolProperty creates a boolean property +func BoolProperty() *Schema { return BooleanProperty() } + +// StringProperty creates a string property +func StringProperty() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}}} +} + +// CharProperty creates a string property +func CharProperty() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}}} +} + +// Float64Property creates a float64/double property +func Float64Property() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"number"}, Format: "double"}} +} + +// Float32Property creates a float32/float property +func Float32Property() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"number"}, Format: "float"}} +} + +// Int8Property creates an int8 property +func Int8Property() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"integer"}, Format: "int8"}} +} + +// Int16Property creates an int16 property +func Int16Property() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"integer"}, Format: "int16"}} +} + +// Int32Property creates an int32 property +func Int32Property() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"integer"}, Format: "int32"}} +} + +// Int64Property creates an int64 property +func Int64Property() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"integer"}, Format: "int64"}} +} + +// StrFmtProperty creates a property for the named string format +func StrFmtProperty(format string) *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}, Format: format}} +} + +// DateProperty creates a date property +func DateProperty() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}, Format: "date"}} +} + +// DateTimeProperty creates a date time property +func DateTimeProperty() *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}, Format: "date-time"}} +} + +// MapProperty creates a map property +func MapProperty(property *Schema) *Schema { + return &Schema{SchemaProps: SchemaProps{Type: []string{"object"}, + AdditionalProperties: &SchemaOrBool{Allows: true, Schema: property}}} +} + +// RefProperty creates a ref property +func RefProperty(name string) *Schema { + return &Schema{SchemaProps: SchemaProps{Ref: MustCreateRef(name)}} +} + +// RefSchema creates a ref property +func RefSchema(name string) *Schema { + return &Schema{SchemaProps: SchemaProps{Ref: MustCreateRef(name)}} +} + +// ArrayProperty creates an array property +func ArrayProperty(items *Schema) *Schema { + if items == nil { + return &Schema{SchemaProps: SchemaProps{Type: []string{"array"}}} + } + return &Schema{SchemaProps: SchemaProps{Items: &SchemaOrArray{Schema: items}, Type: []string{"array"}}} +} + +// ComposedSchema creates a schema with allOf +func ComposedSchema(schemas ...Schema) *Schema { + s := new(Schema) + s.AllOf = schemas + return s +} + +// SchemaURL represents a schema url +type SchemaURL string + +// MarshalJSON marshal this to JSON +func (r SchemaURL) MarshalJSON() ([]byte, error) { + if r == "" { + return []byte("{}"), nil + } + v := map[string]interface{}{"$schema": string(r)} + return json.Marshal(v) +} + +// UnmarshalJSON unmarshal this from JSON +func (r *SchemaURL) UnmarshalJSON(data []byte) error { + var v map[string]interface{} + if err := json.Unmarshal(data, &v); err != nil { + return err + } + return r.fromMap(v) +} + +func (r *SchemaURL) fromMap(v map[string]interface{}) error { + if v == nil { + return nil + } + if vv, ok := v["$schema"]; ok { + if str, ok := vv.(string); ok { + u, err := parseURL(str) + if err != nil { + return err + } + + *r = SchemaURL(u.String()) + } + } + return nil +} + +// SchemaProps describes a JSON schema (draft 4) +type SchemaProps struct { + ID string `json:"id,omitempty"` + Ref Ref `json:"-"` + Schema SchemaURL `json:"-"` + Description string `json:"description,omitempty"` + Type StringOrArray `json:"type,omitempty"` + Nullable bool `json:"nullable,omitempty"` + Format string `json:"format,omitempty"` + Title string `json:"title,omitempty"` + Default interface{} `json:"default,omitempty"` + Maximum *float64 `json:"maximum,omitempty"` + ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty"` + Minimum *float64 `json:"minimum,omitempty"` + ExclusiveMinimum bool `json:"exclusiveMinimum,omitempty"` + MaxLength *int64 `json:"maxLength,omitempty"` + MinLength *int64 `json:"minLength,omitempty"` + Pattern string `json:"pattern,omitempty"` + MaxItems *int64 `json:"maxItems,omitempty"` + MinItems *int64 `json:"minItems,omitempty"` + UniqueItems bool `json:"uniqueItems,omitempty"` + MultipleOf *float64 `json:"multipleOf,omitempty"` + Enum []interface{} `json:"enum,omitempty"` + MaxProperties *int64 `json:"maxProperties,omitempty"` + MinProperties *int64 `json:"minProperties,omitempty"` + Required []string `json:"required,omitempty"` + Items *SchemaOrArray `json:"items,omitempty"` + AllOf []Schema `json:"allOf,omitempty"` + OneOf []Schema `json:"oneOf,omitempty"` + AnyOf []Schema `json:"anyOf,omitempty"` + Not *Schema `json:"not,omitempty"` + Properties SchemaProperties `json:"properties,omitempty"` + AdditionalProperties *SchemaOrBool `json:"additionalProperties,omitempty"` + PatternProperties SchemaProperties `json:"patternProperties,omitempty"` + Dependencies Dependencies `json:"dependencies,omitempty"` + AdditionalItems *SchemaOrBool `json:"additionalItems,omitempty"` + Definitions Definitions `json:"definitions,omitempty"` +} + +// SwaggerSchemaProps are additional properties supported by swagger schemas, but not JSON-schema (draft 4) +type SwaggerSchemaProps struct { + Discriminator string `json:"discriminator,omitempty"` + ReadOnly bool `json:"readOnly,omitempty"` + XML *XMLObject `json:"xml,omitempty"` + ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"` + Example interface{} `json:"example,omitempty"` +} + +// Schema the schema object allows the definition of input and output data types. +// These types can be objects, but also primitives and arrays. +// This object is based on the [JSON Schema Specification Draft 4](http://json-schema.org/) +// and uses a predefined subset of it. +// On top of this subset, there are extensions provided by this specification to allow for more complete documentation. +// +// For more information: http://goo.gl/8us55a#schemaObject +type Schema struct { + VendorExtensible + SchemaProps + SwaggerSchemaProps + ExtraProps map[string]interface{} `json:"-"` +} + +// JSONLookup implements an interface to customize json pointer lookup +func (s Schema) JSONLookup(token string) (interface{}, error) { + if ex, ok := s.Extensions[token]; ok { + return &ex, nil + } + + if ex, ok := s.ExtraProps[token]; ok { + return &ex, nil + } + + r, _, err := jsonpointer.GetForToken(s.SchemaProps, token) + if r != nil || (err != nil && !strings.HasPrefix(err.Error(), "object has no field")) { + return r, err + } + r, _, err = jsonpointer.GetForToken(s.SwaggerSchemaProps, token) + return r, err +} + +// WithID sets the id for this schema, allows for chaining +func (s *Schema) WithID(id string) *Schema { + s.ID = id + return s +} + +// WithTitle sets the title for this schema, allows for chaining +func (s *Schema) WithTitle(title string) *Schema { + s.Title = title + return s +} + +// WithDescription sets the description for this schema, allows for chaining +func (s *Schema) WithDescription(description string) *Schema { + s.Description = description + return s +} + +// WithProperties sets the properties for this schema +func (s *Schema) WithProperties(schemas map[string]Schema) *Schema { + s.Properties = schemas + return s +} + +// SetProperty sets a property on this schema +func (s *Schema) SetProperty(name string, schema Schema) *Schema { + if s.Properties == nil { + s.Properties = make(map[string]Schema) + } + s.Properties[name] = schema + return s +} + +// WithAllOf sets the all of property +func (s *Schema) WithAllOf(schemas ...Schema) *Schema { + s.AllOf = schemas + return s +} + +// WithMaxProperties sets the max number of properties an object can have +func (s *Schema) WithMaxProperties(max int64) *Schema { + s.MaxProperties = &max + return s +} + +// WithMinProperties sets the min number of properties an object must have +func (s *Schema) WithMinProperties(min int64) *Schema { + s.MinProperties = &min + return s +} + +// Typed sets the type of this schema for a single value item +func (s *Schema) Typed(tpe, format string) *Schema { + s.Type = []string{tpe} + s.Format = format + return s +} + +// AddType adds a type with potential format to the types for this schema +func (s *Schema) AddType(tpe, format string) *Schema { + s.Type = append(s.Type, tpe) + if format != "" { + s.Format = format + } + return s +} + +// AsNullable flags this schema as nullable. +func (s *Schema) AsNullable() *Schema { + s.Nullable = true + return s +} + +// CollectionOf a fluent builder method for an array parameter +func (s *Schema) CollectionOf(items Schema) *Schema { + s.Type = []string{jsonArray} + s.Items = &SchemaOrArray{Schema: &items} + return s +} + +// WithDefault sets the default value on this parameter +func (s *Schema) WithDefault(defaultValue interface{}) *Schema { + s.Default = defaultValue + return s +} + +// WithRequired flags this parameter as required +func (s *Schema) WithRequired(items ...string) *Schema { + s.Required = items + return s +} + +// AddRequired adds field names to the required properties array +func (s *Schema) AddRequired(items ...string) *Schema { + s.Required = append(s.Required, items...) + return s +} + +// WithMaxLength sets a max length value +func (s *Schema) WithMaxLength(max int64) *Schema { + s.MaxLength = &max + return s +} + +// WithMinLength sets a min length value +func (s *Schema) WithMinLength(min int64) *Schema { + s.MinLength = &min + return s +} + +// WithPattern sets a pattern value +func (s *Schema) WithPattern(pattern string) *Schema { + s.Pattern = pattern + return s +} + +// WithMultipleOf sets a multiple of value +func (s *Schema) WithMultipleOf(number float64) *Schema { + s.MultipleOf = &number + return s +} + +// WithMaximum sets a maximum number value +func (s *Schema) WithMaximum(max float64, exclusive bool) *Schema { + s.Maximum = &max + s.ExclusiveMaximum = exclusive + return s +} + +// WithMinimum sets a minimum number value +func (s *Schema) WithMinimum(min float64, exclusive bool) *Schema { + s.Minimum = &min + s.ExclusiveMinimum = exclusive + return s +} + +// WithEnum sets a the enum values (replace) +func (s *Schema) WithEnum(values ...interface{}) *Schema { + s.Enum = append([]interface{}{}, values...) + return s +} + +// WithMaxItems sets the max items +func (s *Schema) WithMaxItems(size int64) *Schema { + s.MaxItems = &size + return s +} + +// WithMinItems sets the min items +func (s *Schema) WithMinItems(size int64) *Schema { + s.MinItems = &size + return s +} + +// UniqueValues dictates that this array can only have unique items +func (s *Schema) UniqueValues() *Schema { + s.UniqueItems = true + return s +} + +// AllowDuplicates this array can have duplicates +func (s *Schema) AllowDuplicates() *Schema { + s.UniqueItems = false + return s +} + +// AddToAllOf adds a schema to the allOf property +func (s *Schema) AddToAllOf(schemas ...Schema) *Schema { + s.AllOf = append(s.AllOf, schemas...) + return s +} + +// WithDiscriminator sets the name of the discriminator field +func (s *Schema) WithDiscriminator(discriminator string) *Schema { + s.Discriminator = discriminator + return s +} + +// AsReadOnly flags this schema as readonly +func (s *Schema) AsReadOnly() *Schema { + s.ReadOnly = true + return s +} + +// AsWritable flags this schema as writeable (not read-only) +func (s *Schema) AsWritable() *Schema { + s.ReadOnly = false + return s +} + +// WithExample sets the example for this schema +func (s *Schema) WithExample(example interface{}) *Schema { + s.Example = example + return s +} + +// WithExternalDocs sets/removes the external docs for/from this schema. +// When you pass empty strings as params the external documents will be removed. +// When you pass non-empty string as one value then those values will be used on the external docs object. +// So when you pass a non-empty description, you should also pass the url and vice versa. +func (s *Schema) WithExternalDocs(description, url string) *Schema { + if description == "" && url == "" { + s.ExternalDocs = nil + return s + } + + if s.ExternalDocs == nil { + s.ExternalDocs = &ExternalDocumentation{} + } + s.ExternalDocs.Description = description + s.ExternalDocs.URL = url + return s +} + +// WithXMLName sets the xml name for the object +func (s *Schema) WithXMLName(name string) *Schema { + if s.XML == nil { + s.XML = new(XMLObject) + } + s.XML.Name = name + return s +} + +// WithXMLNamespace sets the xml namespace for the object +func (s *Schema) WithXMLNamespace(namespace string) *Schema { + if s.XML == nil { + s.XML = new(XMLObject) + } + s.XML.Namespace = namespace + return s +} + +// WithXMLPrefix sets the xml prefix for the object +func (s *Schema) WithXMLPrefix(prefix string) *Schema { + if s.XML == nil { + s.XML = new(XMLObject) + } + s.XML.Prefix = prefix + return s +} + +// AsXMLAttribute flags this object as xml attribute +func (s *Schema) AsXMLAttribute() *Schema { + if s.XML == nil { + s.XML = new(XMLObject) + } + s.XML.Attribute = true + return s +} + +// AsXMLElement flags this object as an xml node +func (s *Schema) AsXMLElement() *Schema { + if s.XML == nil { + s.XML = new(XMLObject) + } + s.XML.Attribute = false + return s +} + +// AsWrappedXML flags this object as wrapped, this is mostly useful for array types +func (s *Schema) AsWrappedXML() *Schema { + if s.XML == nil { + s.XML = new(XMLObject) + } + s.XML.Wrapped = true + return s +} + +// AsUnwrappedXML flags this object as an xml node +func (s *Schema) AsUnwrappedXML() *Schema { + if s.XML == nil { + s.XML = new(XMLObject) + } + s.XML.Wrapped = false + return s +} + +// SetValidations defines all schema validations. +// +// NOTE: Required, ReadOnly, AllOf, AnyOf, OneOf and Not are not considered. +func (s *Schema) SetValidations(val SchemaValidations) { + s.Maximum = val.Maximum + s.ExclusiveMaximum = val.ExclusiveMaximum + s.Minimum = val.Minimum + s.ExclusiveMinimum = val.ExclusiveMinimum + s.MaxLength = val.MaxLength + s.MinLength = val.MinLength + s.Pattern = val.Pattern + s.MaxItems = val.MaxItems + s.MinItems = val.MinItems + s.UniqueItems = val.UniqueItems + s.MultipleOf = val.MultipleOf + s.Enum = val.Enum + s.MinProperties = val.MinProperties + s.MaxProperties = val.MaxProperties + s.PatternProperties = val.PatternProperties +} + +// WithValidations is a fluent method to set schema validations +func (s *Schema) WithValidations(val SchemaValidations) *Schema { + s.SetValidations(val) + return s +} + +// Validations returns a clone of the validations for this schema +func (s Schema) Validations() SchemaValidations { + return SchemaValidations{ + CommonValidations: CommonValidations{ + Maximum: s.Maximum, + ExclusiveMaximum: s.ExclusiveMaximum, + Minimum: s.Minimum, + ExclusiveMinimum: s.ExclusiveMinimum, + MaxLength: s.MaxLength, + MinLength: s.MinLength, + Pattern: s.Pattern, + MaxItems: s.MaxItems, + MinItems: s.MinItems, + UniqueItems: s.UniqueItems, + MultipleOf: s.MultipleOf, + Enum: s.Enum, + }, + MinProperties: s.MinProperties, + MaxProperties: s.MaxProperties, + PatternProperties: s.PatternProperties, + } +} + +// MarshalJSON marshal this to JSON +func (s Schema) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(s.SchemaProps) + if err != nil { + return nil, fmt.Errorf("schema props %v", err) + } + b2, err := json.Marshal(s.VendorExtensible) + if err != nil { + return nil, fmt.Errorf("vendor props %v", err) + } + b3, err := s.Ref.MarshalJSON() + if err != nil { + return nil, fmt.Errorf("ref prop %v", err) + } + b4, err := s.Schema.MarshalJSON() + if err != nil { + return nil, fmt.Errorf("schema prop %v", err) + } + b5, err := json.Marshal(s.SwaggerSchemaProps) + if err != nil { + return nil, fmt.Errorf("common validations %v", err) + } + var b6 []byte + if s.ExtraProps != nil { + jj, err := json.Marshal(s.ExtraProps) + if err != nil { + return nil, fmt.Errorf("extra props %v", err) + } + b6 = jj + } + return swag.ConcatJSON(b1, b2, b3, b4, b5, b6), nil +} + +// UnmarshalJSON marshal this from JSON +func (s *Schema) UnmarshalJSON(data []byte) error { + props := struct { + SchemaProps + SwaggerSchemaProps + }{} + if err := json.Unmarshal(data, &props); err != nil { + return err + } + + sch := Schema{ + SchemaProps: props.SchemaProps, + SwaggerSchemaProps: props.SwaggerSchemaProps, + } + + var d map[string]interface{} + if err := json.Unmarshal(data, &d); err != nil { + return err + } + + _ = sch.Ref.fromMap(d) + _ = sch.Schema.fromMap(d) + + delete(d, "$ref") + delete(d, "$schema") + for _, pn := range swag.DefaultJSONNameProvider.GetJSONNames(s) { + delete(d, pn) + } + + for k, vv := range d { + lk := strings.ToLower(k) + if strings.HasPrefix(lk, "x-") { + if sch.Extensions == nil { + sch.Extensions = map[string]interface{}{} + } + sch.Extensions[k] = vv + continue + } + if sch.ExtraProps == nil { + sch.ExtraProps = map[string]interface{}{} + } + sch.ExtraProps[k] = vv + } + + *s = sch + + return nil +} diff --git a/vendor/github.com/go-openapi/spec/schema_loader.go b/vendor/github.com/go-openapi/spec/schema_loader.go new file mode 100644 index 0000000..0059b99 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/schema_loader.go @@ -0,0 +1,331 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + "fmt" + "log" + "net/url" + "reflect" + "strings" + + "github.com/go-openapi/swag" +) + +// PathLoader is a function to use when loading remote refs. +// +// This is a package level default. It may be overridden or bypassed by +// specifying the loader in ExpandOptions. +// +// NOTE: if you are using the go-openapi/loads package, it will override +// this value with its own default (a loader to retrieve YAML documents as +// well as JSON ones). +var PathLoader = func(pth string) (json.RawMessage, error) { + data, err := swag.LoadFromFileOrHTTP(pth) + if err != nil { + return nil, err + } + return json.RawMessage(data), nil +} + +// resolverContext allows to share a context during spec processing. +// At the moment, it just holds the index of circular references found. +type resolverContext struct { + // circulars holds all visited circular references, to shortcircuit $ref resolution. + // + // This structure is privately instantiated and needs not be locked against + // concurrent access, unless we chose to implement a parallel spec walking. + circulars map[string]bool + basePath string + loadDoc func(string) (json.RawMessage, error) + rootID string +} + +func newResolverContext(options *ExpandOptions) *resolverContext { + expandOptions := optionsOrDefault(options) + + // path loader may be overridden by options + var loader func(string) (json.RawMessage, error) + if expandOptions.PathLoader == nil { + loader = PathLoader + } else { + loader = expandOptions.PathLoader + } + + return &resolverContext{ + circulars: make(map[string]bool), + basePath: expandOptions.RelativeBase, // keep the root base path in context + loadDoc: loader, + } +} + +type schemaLoader struct { + root interface{} + options *ExpandOptions + cache ResolutionCache + context *resolverContext +} + +func (r *schemaLoader) transitiveResolver(basePath string, ref Ref) *schemaLoader { + if ref.IsRoot() || ref.HasFragmentOnly { + return r + } + + baseRef := MustCreateRef(basePath) + currentRef := normalizeRef(&ref, basePath) + if strings.HasPrefix(currentRef.String(), baseRef.String()) { + return r + } + + // set a new root against which to resolve + rootURL := currentRef.GetURL() + rootURL.Fragment = "" + root, _ := r.cache.Get(rootURL.String()) + + // shallow copy of resolver options to set a new RelativeBase when + // traversing multiple documents + newOptions := r.options + newOptions.RelativeBase = rootURL.String() + + return defaultSchemaLoader(root, newOptions, r.cache, r.context) +} + +func (r *schemaLoader) updateBasePath(transitive *schemaLoader, basePath string) string { + if transitive != r { + if transitive.options != nil && transitive.options.RelativeBase != "" { + return normalizeBase(transitive.options.RelativeBase) + } + } + + return basePath +} + +func (r *schemaLoader) resolveRef(ref *Ref, target interface{}, basePath string) error { + tgt := reflect.ValueOf(target) + if tgt.Kind() != reflect.Ptr { + return ErrResolveRefNeedsAPointer + } + + if ref.GetURL() == nil { + return nil + } + + var ( + res interface{} + data interface{} + err error + ) + + // Resolve against the root if it isn't nil, and if ref is pointing at the root, or has a fragment only which means + // it is pointing somewhere in the root. + root := r.root + if (ref.IsRoot() || ref.HasFragmentOnly) && root == nil && basePath != "" { + if baseRef, erb := NewRef(basePath); erb == nil { + root, _, _, _ = r.load(baseRef.GetURL()) + } + } + + if (ref.IsRoot() || ref.HasFragmentOnly) && root != nil { + data = root + } else { + baseRef := normalizeRef(ref, basePath) + data, _, _, err = r.load(baseRef.GetURL()) + if err != nil { + return err + } + } + + res = data + if ref.String() != "" { + res, _, err = ref.GetPointer().Get(data) + if err != nil { + return err + } + } + return swag.DynamicJSONToStruct(res, target) +} + +func (r *schemaLoader) load(refURL *url.URL) (interface{}, url.URL, bool, error) { + debugLog("loading schema from url: %s", refURL) + toFetch := *refURL + toFetch.Fragment = "" + + var err error + pth := toFetch.String() + normalized := normalizeBase(pth) + debugLog("loading doc from: %s", normalized) + + data, fromCache := r.cache.Get(normalized) + if fromCache { + return data, toFetch, fromCache, nil + } + + b, err := r.context.loadDoc(normalized) + if err != nil { + return nil, url.URL{}, false, err + } + + var doc interface{} + if err := json.Unmarshal(b, &doc); err != nil { + return nil, url.URL{}, false, err + } + r.cache.Set(normalized, doc) + + return doc, toFetch, fromCache, nil +} + +// isCircular detects cycles in sequences of $ref. +// +// It relies on a private context (which needs not be locked). +func (r *schemaLoader) isCircular(ref *Ref, basePath string, parentRefs ...string) (foundCycle bool) { + normalizedRef := normalizeURI(ref.String(), basePath) + if _, ok := r.context.circulars[normalizedRef]; ok { + // circular $ref has been already detected in another explored cycle + foundCycle = true + return + } + foundCycle = swag.ContainsStrings(parentRefs, normalizedRef) // normalized windows url's are lower cased + if foundCycle { + r.context.circulars[normalizedRef] = true + } + return +} + +// Resolve resolves a reference against basePath and stores the result in target. +// +// Resolve is not in charge of following references: it only resolves ref by following its URL. +// +// If the schema the ref is referring to holds nested refs, Resolve doesn't resolve them. +// +// If basePath is an empty string, ref is resolved against the root schema stored in the schemaLoader struct +func (r *schemaLoader) Resolve(ref *Ref, target interface{}, basePath string) error { + return r.resolveRef(ref, target, basePath) +} + +func (r *schemaLoader) deref(input interface{}, parentRefs []string, basePath string) error { + var ref *Ref + switch refable := input.(type) { + case *Schema: + ref = &refable.Ref + case *Parameter: + ref = &refable.Ref + case *Response: + ref = &refable.Ref + case *PathItem: + ref = &refable.Ref + default: + return fmt.Errorf("unsupported type: %T: %w", input, ErrDerefUnsupportedType) + } + + curRef := ref.String() + if curRef == "" { + return nil + } + + normalizedRef := normalizeRef(ref, basePath) + normalizedBasePath := normalizedRef.RemoteURI() + + if r.isCircular(normalizedRef, basePath, parentRefs...) { + return nil + } + + if err := r.resolveRef(ref, input, basePath); r.shouldStopOnError(err) { + return err + } + + if ref.String() == "" || ref.String() == curRef { + // done with rereferencing + return nil + } + + parentRefs = append(parentRefs, normalizedRef.String()) + return r.deref(input, parentRefs, normalizedBasePath) +} + +func (r *schemaLoader) shouldStopOnError(err error) bool { + if err != nil && !r.options.ContinueOnError { + return true + } + + if err != nil { + log.Println(err) + } + + return false +} + +func (r *schemaLoader) setSchemaID(target interface{}, id, basePath string) (string, string) { + debugLog("schema has ID: %s", id) + + // handling the case when id is a folder + // remember that basePath has to point to a file + var refPath string + if strings.HasSuffix(id, "/") { + // ensure this is detected as a file, not a folder + refPath = fmt.Sprintf("%s%s", id, "placeholder.json") + } else { + refPath = id + } + + // updates the current base path + // * important: ID can be a relative path + // * registers target to be fetchable from the new base proposed by this id + newBasePath := normalizeURI(refPath, basePath) + + // store found IDs for possible future reuse in $ref + r.cache.Set(newBasePath, target) + + // the root document has an ID: all $ref relative to that ID may + // be rebased relative to the root document + if basePath == r.context.basePath { + debugLog("root document is a schema with ID: %s (normalized as:%s)", id, newBasePath) + r.context.rootID = newBasePath + } + + return newBasePath, refPath +} + +func defaultSchemaLoader( + root interface{}, + expandOptions *ExpandOptions, + cache ResolutionCache, + context *resolverContext) *schemaLoader { + + if expandOptions == nil { + expandOptions = &ExpandOptions{} + } + + cache = cacheOrDefault(cache) + + if expandOptions.RelativeBase == "" { + // if no relative base is provided, assume the root document + // contains all $ref, or at least, that the relative documents + // may be resolved from the current working directory. + expandOptions.RelativeBase = baseForRoot(root, cache) + } + debugLog("effective expander options: %#v", expandOptions) + + if context == nil { + context = newResolverContext(expandOptions) + } + + return &schemaLoader{ + root: root, + options: expandOptions, + cache: cache, + context: context, + } +} diff --git a/vendor/github.com/go-openapi/spec/schemas/jsonschema-draft-04.json b/vendor/github.com/go-openapi/spec/schemas/jsonschema-draft-04.json new file mode 100644 index 0000000..bcbb847 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/schemas/jsonschema-draft-04.json @@ -0,0 +1,149 @@ +{ + "id": "http://json-schema.org/draft-04/schema#", + "$schema": "http://json-schema.org/draft-04/schema#", + "description": "Core schema meta-schema", + "definitions": { + "schemaArray": { + "type": "array", + "minItems": 1, + "items": { "$ref": "#" } + }, + "positiveInteger": { + "type": "integer", + "minimum": 0 + }, + "positiveIntegerDefault0": { + "allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ] + }, + "simpleTypes": { + "enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ] + }, + "stringArray": { + "type": "array", + "items": { "type": "string" }, + "minItems": 1, + "uniqueItems": true + } + }, + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "$schema": { + "type": "string" + }, + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "default": {}, + "multipleOf": { + "type": "number", + "minimum": 0, + "exclusiveMinimum": true + }, + "maximum": { + "type": "number" + }, + "exclusiveMaximum": { + "type": "boolean", + "default": false + }, + "minimum": { + "type": "number" + }, + "exclusiveMinimum": { + "type": "boolean", + "default": false + }, + "maxLength": { "$ref": "#/definitions/positiveInteger" }, + "minLength": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "pattern": { + "type": "string", + "format": "regex" + }, + "additionalItems": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "items": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/schemaArray" } + ], + "default": {} + }, + "maxItems": { "$ref": "#/definitions/positiveInteger" }, + "minItems": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "uniqueItems": { + "type": "boolean", + "default": false + }, + "maxProperties": { "$ref": "#/definitions/positiveInteger" }, + "minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "required": { "$ref": "#/definitions/stringArray" }, + "additionalProperties": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "definitions": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "properties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "patternProperties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "dependencies": { + "type": "object", + "additionalProperties": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/stringArray" } + ] + } + }, + "enum": { + "type": "array", + "minItems": 1, + "uniqueItems": true + }, + "type": { + "anyOf": [ + { "$ref": "#/definitions/simpleTypes" }, + { + "type": "array", + "items": { "$ref": "#/definitions/simpleTypes" }, + "minItems": 1, + "uniqueItems": true + } + ] + }, + "format": { "type": "string" }, + "allOf": { "$ref": "#/definitions/schemaArray" }, + "anyOf": { "$ref": "#/definitions/schemaArray" }, + "oneOf": { "$ref": "#/definitions/schemaArray" }, + "not": { "$ref": "#" } + }, + "dependencies": { + "exclusiveMaximum": [ "maximum" ], + "exclusiveMinimum": [ "minimum" ] + }, + "default": {} +} diff --git a/vendor/github.com/go-openapi/spec/schemas/v2/schema.json b/vendor/github.com/go-openapi/spec/schemas/v2/schema.json new file mode 100644 index 0000000..ebe10ed --- /dev/null +++ b/vendor/github.com/go-openapi/spec/schemas/v2/schema.json @@ -0,0 +1,1607 @@ +{ + "title": "A JSON Schema for Swagger 2.0 API.", + "id": "http://swagger.io/v2/schema.json#", + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "required": [ + "swagger", + "info", + "paths" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "swagger": { + "type": "string", + "enum": [ + "2.0" + ], + "description": "The Swagger version of this document." + }, + "info": { + "$ref": "#/definitions/info" + }, + "host": { + "type": "string", + "pattern": "^[^{}/ :\\\\]+(?::\\d+)?$", + "description": "The host (name or ip) of the API. Example: 'swagger.io'" + }, + "basePath": { + "type": "string", + "pattern": "^/", + "description": "The base path to the API. Example: '/api'." + }, + "schemes": { + "$ref": "#/definitions/schemesList" + }, + "consumes": { + "description": "A list of MIME types accepted by the API.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "produces": { + "description": "A list of MIME types the API can produce.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "paths": { + "$ref": "#/definitions/paths" + }, + "definitions": { + "$ref": "#/definitions/definitions" + }, + "parameters": { + "$ref": "#/definitions/parameterDefinitions" + }, + "responses": { + "$ref": "#/definitions/responseDefinitions" + }, + "security": { + "$ref": "#/definitions/security" + }, + "securityDefinitions": { + "$ref": "#/definitions/securityDefinitions" + }, + "tags": { + "type": "array", + "items": { + "$ref": "#/definitions/tag" + }, + "uniqueItems": true + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + } + }, + "definitions": { + "info": { + "type": "object", + "description": "General information about the API.", + "required": [ + "version", + "title" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "title": { + "type": "string", + "description": "A unique and precise title of the API." + }, + "version": { + "type": "string", + "description": "A semantic version number of the API." + }, + "description": { + "type": "string", + "description": "A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed." + }, + "termsOfService": { + "type": "string", + "description": "The terms of service for the API." + }, + "contact": { + "$ref": "#/definitions/contact" + }, + "license": { + "$ref": "#/definitions/license" + } + } + }, + "contact": { + "type": "object", + "description": "Contact information for the owners of the API.", + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The identifying name of the contact person/organization." + }, + "url": { + "type": "string", + "description": "The URL pointing to the contact information.", + "format": "uri" + }, + "email": { + "type": "string", + "description": "The email address of the contact person/organization.", + "format": "email" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "license": { + "type": "object", + "required": [ + "name" + ], + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The name of the license type. It's encouraged to use an OSI compatible license." + }, + "url": { + "type": "string", + "description": "The URL pointing to the license.", + "format": "uri" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "paths": { + "type": "object", + "description": "Relative paths to the individual endpoints. They must be relative to the 'basePath'.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + }, + "^/": { + "$ref": "#/definitions/pathItem" + } + }, + "additionalProperties": false + }, + "definitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/schema" + }, + "description": "One or more JSON objects describing the schemas being consumed and produced by the API." + }, + "parameterDefinitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/parameter" + }, + "description": "One or more JSON representations for parameters" + }, + "responseDefinitions": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/response" + }, + "description": "One or more JSON representations for responses" + }, + "externalDocs": { + "type": "object", + "additionalProperties": false, + "description": "information about external documentation", + "required": [ + "url" + ], + "properties": { + "description": { + "type": "string" + }, + "url": { + "type": "string", + "format": "uri" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "examples": { + "type": "object", + "additionalProperties": true + }, + "mimeType": { + "type": "string", + "description": "The MIME type of the HTTP message." + }, + "operation": { + "type": "object", + "required": [ + "responses" + ], + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "tags": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "summary": { + "type": "string", + "description": "A brief summary of the operation." + }, + "description": { + "type": "string", + "description": "A longer description of the operation, GitHub Flavored Markdown is allowed." + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "operationId": { + "type": "string", + "description": "A unique identifier of the operation." + }, + "produces": { + "description": "A list of MIME types the API can produce.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "consumes": { + "description": "A list of MIME types the API can consume.", + "allOf": [ + { + "$ref": "#/definitions/mediaTypeList" + } + ] + }, + "parameters": { + "$ref": "#/definitions/parametersList" + }, + "responses": { + "$ref": "#/definitions/responses" + }, + "schemes": { + "$ref": "#/definitions/schemesList" + }, + "deprecated": { + "type": "boolean", + "default": false + }, + "security": { + "$ref": "#/definitions/security" + } + } + }, + "pathItem": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "$ref": { + "type": "string" + }, + "get": { + "$ref": "#/definitions/operation" + }, + "put": { + "$ref": "#/definitions/operation" + }, + "post": { + "$ref": "#/definitions/operation" + }, + "delete": { + "$ref": "#/definitions/operation" + }, + "options": { + "$ref": "#/definitions/operation" + }, + "head": { + "$ref": "#/definitions/operation" + }, + "patch": { + "$ref": "#/definitions/operation" + }, + "parameters": { + "$ref": "#/definitions/parametersList" + } + } + }, + "responses": { + "type": "object", + "description": "Response objects names can either be any valid HTTP status code or 'default'.", + "minProperties": 1, + "additionalProperties": false, + "patternProperties": { + "^([0-9]{3})$|^(default)$": { + "$ref": "#/definitions/responseValue" + }, + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "not": { + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + } + }, + "responseValue": { + "oneOf": [ + { + "$ref": "#/definitions/response" + }, + { + "$ref": "#/definitions/jsonReference" + } + ] + }, + "response": { + "type": "object", + "required": [ + "description" + ], + "properties": { + "description": { + "type": "string" + }, + "schema": { + "oneOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "$ref": "#/definitions/fileSchema" + } + ] + }, + "headers": { + "$ref": "#/definitions/headers" + }, + "examples": { + "$ref": "#/definitions/examples" + } + }, + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "headers": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/header" + } + }, + "header": { + "type": "object", + "additionalProperties": false, + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "string", + "number", + "integer", + "boolean", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "vendorExtension": { + "description": "Any property starting with x- is valid.", + "additionalProperties": true, + "additionalItems": true + }, + "bodyParameter": { + "type": "object", + "required": [ + "name", + "in", + "schema" + ], + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "body" + ] + }, + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "schema": { + "$ref": "#/definitions/schema" + } + }, + "additionalProperties": false + }, + "headerParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "header" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "queryParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "query" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "allowEmptyValue": { + "type": "boolean", + "default": false, + "description": "allows sending a parameter by name only or with an empty value." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormatWithMulti" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "formDataParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "required": { + "type": "boolean", + "description": "Determines whether or not this parameter is required or optional.", + "default": false + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "formData" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "allowEmptyValue": { + "type": "boolean", + "default": false, + "description": "allows sending a parameter by name only or with an empty value." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array", + "file" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormatWithMulti" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "pathParameterSubSchema": { + "additionalProperties": false, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "required": [ + "required" + ], + "properties": { + "required": { + "type": "boolean", + "enum": [ + true + ], + "description": "Determines whether or not this parameter is required or optional." + }, + "in": { + "type": "string", + "description": "Determines the location of the parameter.", + "enum": [ + "path" + ] + }, + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "name": { + "type": "string", + "description": "The name of the parameter." + }, + "type": { + "type": "string", + "enum": [ + "string", + "number", + "boolean", + "integer", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + } + }, + "nonBodyParameter": { + "type": "object", + "required": [ + "name", + "in", + "type" + ], + "oneOf": [ + { + "$ref": "#/definitions/headerParameterSubSchema" + }, + { + "$ref": "#/definitions/formDataParameterSubSchema" + }, + { + "$ref": "#/definitions/queryParameterSubSchema" + }, + { + "$ref": "#/definitions/pathParameterSubSchema" + } + ] + }, + "parameter": { + "oneOf": [ + { + "$ref": "#/definitions/bodyParameter" + }, + { + "$ref": "#/definitions/nonBodyParameter" + } + ] + }, + "schema": { + "type": "object", + "description": "A deterministic version of a JSON Schema object.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "properties": { + "$ref": { + "type": "string" + }, + "format": { + "type": "string" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "multipleOf": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" + }, + "maximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" + }, + "exclusiveMaximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" + }, + "minimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" + }, + "exclusiveMinimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" + }, + "maxLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "pattern": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" + }, + "maxItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "uniqueItems": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" + }, + "maxProperties": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minProperties": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "required": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray" + }, + "enum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" + }, + "additionalProperties": { + "anyOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "type": "boolean" + } + ], + "default": {} + }, + "type": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/type" + }, + "items": { + "anyOf": [ + { + "$ref": "#/definitions/schema" + }, + { + "type": "array", + "minItems": 1, + "items": { + "$ref": "#/definitions/schema" + } + } + ], + "default": {} + }, + "allOf": { + "type": "array", + "minItems": 1, + "items": { + "$ref": "#/definitions/schema" + } + }, + "properties": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/schema" + }, + "default": {} + }, + "discriminator": { + "type": "string" + }, + "readOnly": { + "type": "boolean", + "default": false + }, + "xml": { + "$ref": "#/definitions/xml" + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "example": {} + }, + "additionalProperties": false + }, + "fileSchema": { + "type": "object", + "description": "A deterministic version of a JSON Schema object.", + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + }, + "required": [ + "type" + ], + "properties": { + "format": { + "type": "string" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "required": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/stringArray" + }, + "type": { + "type": "string", + "enum": [ + "file" + ] + }, + "readOnly": { + "type": "boolean", + "default": false + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + }, + "example": {} + }, + "additionalProperties": false + }, + "primitivesItems": { + "type": "object", + "additionalProperties": false, + "properties": { + "type": { + "type": "string", + "enum": [ + "string", + "number", + "integer", + "boolean", + "array" + ] + }, + "format": { + "type": "string" + }, + "items": { + "$ref": "#/definitions/primitivesItems" + }, + "collectionFormat": { + "$ref": "#/definitions/collectionFormat" + }, + "default": { + "$ref": "#/definitions/default" + }, + "maximum": { + "$ref": "#/definitions/maximum" + }, + "exclusiveMaximum": { + "$ref": "#/definitions/exclusiveMaximum" + }, + "minimum": { + "$ref": "#/definitions/minimum" + }, + "exclusiveMinimum": { + "$ref": "#/definitions/exclusiveMinimum" + }, + "maxLength": { + "$ref": "#/definitions/maxLength" + }, + "minLength": { + "$ref": "#/definitions/minLength" + }, + "pattern": { + "$ref": "#/definitions/pattern" + }, + "maxItems": { + "$ref": "#/definitions/maxItems" + }, + "minItems": { + "$ref": "#/definitions/minItems" + }, + "uniqueItems": { + "$ref": "#/definitions/uniqueItems" + }, + "enum": { + "$ref": "#/definitions/enum" + }, + "multipleOf": { + "$ref": "#/definitions/multipleOf" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "security": { + "type": "array", + "items": { + "$ref": "#/definitions/securityRequirement" + }, + "uniqueItems": true + }, + "securityRequirement": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + } + }, + "xml": { + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "type": "string" + }, + "namespace": { + "type": "string" + }, + "prefix": { + "type": "string" + }, + "attribute": { + "type": "boolean", + "default": false + }, + "wrapped": { + "type": "boolean", + "default": false + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "tag": { + "type": "object", + "additionalProperties": false, + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "externalDocs": { + "$ref": "#/definitions/externalDocs" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "securityDefinitions": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "$ref": "#/definitions/basicAuthenticationSecurity" + }, + { + "$ref": "#/definitions/apiKeySecurity" + }, + { + "$ref": "#/definitions/oauth2ImplicitSecurity" + }, + { + "$ref": "#/definitions/oauth2PasswordSecurity" + }, + { + "$ref": "#/definitions/oauth2ApplicationSecurity" + }, + { + "$ref": "#/definitions/oauth2AccessCodeSecurity" + } + ] + } + }, + "basicAuthenticationSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "basic" + ] + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "apiKeySecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "name", + "in" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "apiKey" + ] + }, + "name": { + "type": "string" + }, + "in": { + "type": "string", + "enum": [ + "header", + "query" + ] + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2ImplicitSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "authorizationUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "implicit" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "authorizationUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2PasswordSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "password" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2ApplicationSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "application" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2AccessCodeSecurity": { + "type": "object", + "additionalProperties": false, + "required": [ + "type", + "flow", + "authorizationUrl", + "tokenUrl" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "oauth2" + ] + }, + "flow": { + "type": "string", + "enum": [ + "accessCode" + ] + }, + "scopes": { + "$ref": "#/definitions/oauth2Scopes" + }, + "authorizationUrl": { + "type": "string", + "format": "uri" + }, + "tokenUrl": { + "type": "string", + "format": "uri" + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-": { + "$ref": "#/definitions/vendorExtension" + } + } + }, + "oauth2Scopes": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "mediaTypeList": { + "type": "array", + "items": { + "$ref": "#/definitions/mimeType" + }, + "uniqueItems": true + }, + "parametersList": { + "type": "array", + "description": "The parameters needed to send a valid API call.", + "additionalItems": false, + "items": { + "oneOf": [ + { + "$ref": "#/definitions/parameter" + }, + { + "$ref": "#/definitions/jsonReference" + } + ] + }, + "uniqueItems": true + }, + "schemesList": { + "type": "array", + "description": "The transfer protocol of the API.", + "items": { + "type": "string", + "enum": [ + "http", + "https", + "ws", + "wss" + ] + }, + "uniqueItems": true + }, + "collectionFormat": { + "type": "string", + "enum": [ + "csv", + "ssv", + "tsv", + "pipes" + ], + "default": "csv" + }, + "collectionFormatWithMulti": { + "type": "string", + "enum": [ + "csv", + "ssv", + "tsv", + "pipes", + "multi" + ], + "default": "csv" + }, + "title": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/title" + }, + "description": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/description" + }, + "default": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/default" + }, + "multipleOf": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/multipleOf" + }, + "maximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/maximum" + }, + "exclusiveMaximum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMaximum" + }, + "minimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/minimum" + }, + "exclusiveMinimum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/exclusiveMinimum" + }, + "maxLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minLength": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "pattern": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/pattern" + }, + "maxItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveInteger" + }, + "minItems": { + "$ref": "http://json-schema.org/draft-04/schema#/definitions/positiveIntegerDefault0" + }, + "uniqueItems": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/uniqueItems" + }, + "enum": { + "$ref": "http://json-schema.org/draft-04/schema#/properties/enum" + }, + "jsonReference": { + "type": "object", + "required": [ + "$ref" + ], + "additionalProperties": false, + "properties": { + "$ref": { + "type": "string" + } + } + } + } +} diff --git a/vendor/github.com/go-openapi/spec/security_scheme.go b/vendor/github.com/go-openapi/spec/security_scheme.go new file mode 100644 index 0000000..9d0bdae --- /dev/null +++ b/vendor/github.com/go-openapi/spec/security_scheme.go @@ -0,0 +1,170 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +const ( + basic = "basic" + apiKey = "apiKey" + oauth2 = "oauth2" + implicit = "implicit" + password = "password" + application = "application" + accessCode = "accessCode" +) + +// BasicAuth creates a basic auth security scheme +func BasicAuth() *SecurityScheme { + return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{Type: basic}} +} + +// APIKeyAuth creates an api key auth security scheme +func APIKeyAuth(fieldName, valueSource string) *SecurityScheme { + return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{Type: apiKey, Name: fieldName, In: valueSource}} +} + +// OAuth2Implicit creates an implicit flow oauth2 security scheme +func OAuth2Implicit(authorizationURL string) *SecurityScheme { + return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{ + Type: oauth2, + Flow: implicit, + AuthorizationURL: authorizationURL, + }} +} + +// OAuth2Password creates a password flow oauth2 security scheme +func OAuth2Password(tokenURL string) *SecurityScheme { + return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{ + Type: oauth2, + Flow: password, + TokenURL: tokenURL, + }} +} + +// OAuth2Application creates an application flow oauth2 security scheme +func OAuth2Application(tokenURL string) *SecurityScheme { + return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{ + Type: oauth2, + Flow: application, + TokenURL: tokenURL, + }} +} + +// OAuth2AccessToken creates an access token flow oauth2 security scheme +func OAuth2AccessToken(authorizationURL, tokenURL string) *SecurityScheme { + return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{ + Type: oauth2, + Flow: accessCode, + AuthorizationURL: authorizationURL, + TokenURL: tokenURL, + }} +} + +// SecuritySchemeProps describes a swagger security scheme in the securityDefinitions section +type SecuritySchemeProps struct { + Description string `json:"description,omitempty"` + Type string `json:"type"` + Name string `json:"name,omitempty"` // api key + In string `json:"in,omitempty"` // api key + Flow string `json:"flow,omitempty"` // oauth2 + AuthorizationURL string `json:"authorizationUrl"` // oauth2 + TokenURL string `json:"tokenUrl,omitempty"` // oauth2 + Scopes map[string]string `json:"scopes,omitempty"` // oauth2 +} + +// AddScope adds a scope to this security scheme +func (s *SecuritySchemeProps) AddScope(scope, description string) { + if s.Scopes == nil { + s.Scopes = make(map[string]string) + } + s.Scopes[scope] = description +} + +// SecurityScheme allows the definition of a security scheme that can be used by the operations. +// Supported schemes are basic authentication, an API key (either as a header or as a query parameter) +// and OAuth2's common flows (implicit, password, application and access code). +// +// For more information: http://goo.gl/8us55a#securitySchemeObject +type SecurityScheme struct { + VendorExtensible + SecuritySchemeProps +} + +// JSONLookup implements an interface to customize json pointer lookup +func (s SecurityScheme) JSONLookup(token string) (interface{}, error) { + if ex, ok := s.Extensions[token]; ok { + return &ex, nil + } + + r, _, err := jsonpointer.GetForToken(s.SecuritySchemeProps, token) + return r, err +} + +// MarshalJSON marshal this to JSON +func (s SecurityScheme) MarshalJSON() ([]byte, error) { + var ( + b1 []byte + err error + ) + + if s.Type == oauth2 && (s.Flow == "implicit" || s.Flow == "accessCode") { + // when oauth2 for implicit or accessCode flows, empty AuthorizationURL is added as empty string + b1, err = json.Marshal(s.SecuritySchemeProps) + } else { + // when not oauth2, empty AuthorizationURL should be omitted + b1, err = json.Marshal(struct { + Description string `json:"description,omitempty"` + Type string `json:"type"` + Name string `json:"name,omitempty"` // api key + In string `json:"in,omitempty"` // api key + Flow string `json:"flow,omitempty"` // oauth2 + AuthorizationURL string `json:"authorizationUrl,omitempty"` // oauth2 + TokenURL string `json:"tokenUrl,omitempty"` // oauth2 + Scopes map[string]string `json:"scopes,omitempty"` // oauth2 + }{ + Description: s.Description, + Type: s.Type, + Name: s.Name, + In: s.In, + Flow: s.Flow, + AuthorizationURL: s.AuthorizationURL, + TokenURL: s.TokenURL, + Scopes: s.Scopes, + }) + } + if err != nil { + return nil, err + } + + b2, err := json.Marshal(s.VendorExtensible) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b1, b2), nil +} + +// UnmarshalJSON marshal this from JSON +func (s *SecurityScheme) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &s.SecuritySchemeProps); err != nil { + return err + } + return json.Unmarshal(data, &s.VendorExtensible) +} diff --git a/vendor/github.com/go-openapi/spec/spec.go b/vendor/github.com/go-openapi/spec/spec.go new file mode 100644 index 0000000..876aa12 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/spec.go @@ -0,0 +1,78 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" +) + +//go:generate curl -L --progress -o ./schemas/v2/schema.json http://swagger.io/v2/schema.json +//go:generate curl -L --progress -o ./schemas/jsonschema-draft-04.json http://json-schema.org/draft-04/schema +//go:generate go-bindata -pkg=spec -prefix=./schemas -ignore=.*\.md ./schemas/... +//go:generate perl -pi -e s,Json,JSON,g bindata.go + +const ( + // SwaggerSchemaURL the url for the swagger 2.0 schema to validate specs + SwaggerSchemaURL = "http://swagger.io/v2/schema.json#" + // JSONSchemaURL the url for the json schema + JSONSchemaURL = "http://json-schema.org/draft-04/schema#" +) + +// MustLoadJSONSchemaDraft04 panics when Swagger20Schema returns an error +func MustLoadJSONSchemaDraft04() *Schema { + d, e := JSONSchemaDraft04() + if e != nil { + panic(e) + } + return d +} + +// JSONSchemaDraft04 loads the json schema document for json shema draft04 +func JSONSchemaDraft04() (*Schema, error) { + b, err := jsonschemaDraft04JSONBytes() + if err != nil { + return nil, err + } + + schema := new(Schema) + if err := json.Unmarshal(b, schema); err != nil { + return nil, err + } + return schema, nil +} + +// MustLoadSwagger20Schema panics when Swagger20Schema returns an error +func MustLoadSwagger20Schema() *Schema { + d, e := Swagger20Schema() + if e != nil { + panic(e) + } + return d +} + +// Swagger20Schema loads the swagger 2.0 schema from the embedded assets +func Swagger20Schema() (*Schema, error) { + + b, err := v2SchemaJSONBytes() + if err != nil { + return nil, err + } + + schema := new(Schema) + if err := json.Unmarshal(b, schema); err != nil { + return nil, err + } + return schema, nil +} diff --git a/vendor/github.com/go-openapi/spec/swagger.go b/vendor/github.com/go-openapi/spec/swagger.go new file mode 100644 index 0000000..1590fd1 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/swagger.go @@ -0,0 +1,448 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "bytes" + "encoding/gob" + "encoding/json" + "fmt" + "strconv" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +// Swagger this is the root document object for the API specification. +// It combines what previously was the Resource Listing and API Declaration (version 1.2 and earlier) +// together into one document. +// +// For more information: http://goo.gl/8us55a#swagger-object- +type Swagger struct { + VendorExtensible + SwaggerProps +} + +// JSONLookup look up a value by the json property name +func (s Swagger) JSONLookup(token string) (interface{}, error) { + if ex, ok := s.Extensions[token]; ok { + return &ex, nil + } + r, _, err := jsonpointer.GetForToken(s.SwaggerProps, token) + return r, err +} + +// MarshalJSON marshals this swagger structure to json +func (s Swagger) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(s.SwaggerProps) + if err != nil { + return nil, err + } + b2, err := json.Marshal(s.VendorExtensible) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b1, b2), nil +} + +// UnmarshalJSON unmarshals a swagger spec from json +func (s *Swagger) UnmarshalJSON(data []byte) error { + var sw Swagger + if err := json.Unmarshal(data, &sw.SwaggerProps); err != nil { + return err + } + if err := json.Unmarshal(data, &sw.VendorExtensible); err != nil { + return err + } + *s = sw + return nil +} + +// GobEncode provides a safe gob encoder for Swagger, including extensions +func (s Swagger) GobEncode() ([]byte, error) { + var b bytes.Buffer + raw := struct { + Props SwaggerProps + Ext VendorExtensible + }{ + Props: s.SwaggerProps, + Ext: s.VendorExtensible, + } + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err +} + +// GobDecode provides a safe gob decoder for Swagger, including extensions +func (s *Swagger) GobDecode(b []byte) error { + var raw struct { + Props SwaggerProps + Ext VendorExtensible + } + buf := bytes.NewBuffer(b) + err := gob.NewDecoder(buf).Decode(&raw) + if err != nil { + return err + } + s.SwaggerProps = raw.Props + s.VendorExtensible = raw.Ext + return nil +} + +// SwaggerProps captures the top-level properties of an Api specification +// +// NOTE: validation rules +// - the scheme, when present must be from [http, https, ws, wss] +// - BasePath must start with a leading "/" +// - Paths is required +type SwaggerProps struct { + ID string `json:"id,omitempty"` + Consumes []string `json:"consumes,omitempty"` + Produces []string `json:"produces,omitempty"` + Schemes []string `json:"schemes,omitempty"` + Swagger string `json:"swagger,omitempty"` + Info *Info `json:"info,omitempty"` + Host string `json:"host,omitempty"` + BasePath string `json:"basePath,omitempty"` + Paths *Paths `json:"paths"` + Definitions Definitions `json:"definitions,omitempty"` + Parameters map[string]Parameter `json:"parameters,omitempty"` + Responses map[string]Response `json:"responses,omitempty"` + SecurityDefinitions SecurityDefinitions `json:"securityDefinitions,omitempty"` + Security []map[string][]string `json:"security,omitempty"` + Tags []Tag `json:"tags,omitempty"` + ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"` +} + +type swaggerPropsAlias SwaggerProps + +type gobSwaggerPropsAlias struct { + Security []map[string]struct { + List []string + Pad bool + } + Alias *swaggerPropsAlias + SecurityIsEmpty bool +} + +// GobEncode provides a safe gob encoder for SwaggerProps, including empty security requirements +func (o SwaggerProps) GobEncode() ([]byte, error) { + raw := gobSwaggerPropsAlias{ + Alias: (*swaggerPropsAlias)(&o), + } + + var b bytes.Buffer + if o.Security == nil { + // nil security requirement + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err + } + + if len(o.Security) == 0 { + // empty, but non-nil security requirement + raw.SecurityIsEmpty = true + raw.Alias.Security = nil + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err + } + + raw.Security = make([]map[string]struct { + List []string + Pad bool + }, 0, len(o.Security)) + for _, req := range o.Security { + v := make(map[string]struct { + List []string + Pad bool + }, len(req)) + for k, val := range req { + v[k] = struct { + List []string + Pad bool + }{ + List: val, + } + } + raw.Security = append(raw.Security, v) + } + + err := gob.NewEncoder(&b).Encode(raw) + return b.Bytes(), err +} + +// GobDecode provides a safe gob decoder for SwaggerProps, including empty security requirements +func (o *SwaggerProps) GobDecode(b []byte) error { + var raw gobSwaggerPropsAlias + + buf := bytes.NewBuffer(b) + err := gob.NewDecoder(buf).Decode(&raw) + if err != nil { + return err + } + if raw.Alias == nil { + return nil + } + + switch { + case raw.SecurityIsEmpty: + // empty, but non-nil security requirement + raw.Alias.Security = []map[string][]string{} + case len(raw.Alias.Security) == 0: + // nil security requirement + raw.Alias.Security = nil + default: + raw.Alias.Security = make([]map[string][]string, 0, len(raw.Security)) + for _, req := range raw.Security { + v := make(map[string][]string, len(req)) + for k, val := range req { + v[k] = make([]string, 0, len(val.List)) + v[k] = append(v[k], val.List...) + } + raw.Alias.Security = append(raw.Alias.Security, v) + } + } + + *o = *(*SwaggerProps)(raw.Alias) + return nil +} + +// Dependencies represent a dependencies property +type Dependencies map[string]SchemaOrStringArray + +// SchemaOrBool represents a schema or boolean value, is biased towards true for the boolean property +type SchemaOrBool struct { + Allows bool + Schema *Schema +} + +// JSONLookup implements an interface to customize json pointer lookup +func (s SchemaOrBool) JSONLookup(token string) (interface{}, error) { + if token == "allows" { + return s.Allows, nil + } + r, _, err := jsonpointer.GetForToken(s.Schema, token) + return r, err +} + +var jsTrue = []byte("true") +var jsFalse = []byte("false") + +// MarshalJSON convert this object to JSON +func (s SchemaOrBool) MarshalJSON() ([]byte, error) { + if s.Schema != nil { + return json.Marshal(s.Schema) + } + + if s.Schema == nil && !s.Allows { + return jsFalse, nil + } + return jsTrue, nil +} + +// UnmarshalJSON converts this bool or schema object from a JSON structure +func (s *SchemaOrBool) UnmarshalJSON(data []byte) error { + var nw SchemaOrBool + if len(data) > 0 { + if data[0] == '{' { + var sch Schema + if err := json.Unmarshal(data, &sch); err != nil { + return err + } + nw.Schema = &sch + } + nw.Allows = !bytes.Equal(data, []byte("false")) + } + *s = nw + return nil +} + +// SchemaOrStringArray represents a schema or a string array +type SchemaOrStringArray struct { + Schema *Schema + Property []string +} + +// JSONLookup implements an interface to customize json pointer lookup +func (s SchemaOrStringArray) JSONLookup(token string) (interface{}, error) { + r, _, err := jsonpointer.GetForToken(s.Schema, token) + return r, err +} + +// MarshalJSON converts this schema object or array into JSON structure +func (s SchemaOrStringArray) MarshalJSON() ([]byte, error) { + if len(s.Property) > 0 { + return json.Marshal(s.Property) + } + if s.Schema != nil { + return json.Marshal(s.Schema) + } + return []byte("null"), nil +} + +// UnmarshalJSON converts this schema object or array from a JSON structure +func (s *SchemaOrStringArray) UnmarshalJSON(data []byte) error { + var first byte + if len(data) > 1 { + first = data[0] + } + var nw SchemaOrStringArray + if first == '{' { + var sch Schema + if err := json.Unmarshal(data, &sch); err != nil { + return err + } + nw.Schema = &sch + } + if first == '[' { + if err := json.Unmarshal(data, &nw.Property); err != nil { + return err + } + } + *s = nw + return nil +} + +// Definitions contains the models explicitly defined in this spec +// An object to hold data types that can be consumed and produced by operations. +// These data types can be primitives, arrays or models. +// +// For more information: http://goo.gl/8us55a#definitionsObject +type Definitions map[string]Schema + +// SecurityDefinitions a declaration of the security schemes available to be used in the specification. +// This does not enforce the security schemes on the operations and only serves to provide +// the relevant details for each scheme. +// +// For more information: http://goo.gl/8us55a#securityDefinitionsObject +type SecurityDefinitions map[string]*SecurityScheme + +// StringOrArray represents a value that can either be a string +// or an array of strings. Mainly here for serialization purposes +type StringOrArray []string + +// Contains returns true when the value is contained in the slice +func (s StringOrArray) Contains(value string) bool { + for _, str := range s { + if str == value { + return true + } + } + return false +} + +// JSONLookup implements an interface to customize json pointer lookup +func (s SchemaOrArray) JSONLookup(token string) (interface{}, error) { + if _, err := strconv.Atoi(token); err == nil { + r, _, err := jsonpointer.GetForToken(s.Schemas, token) + return r, err + } + r, _, err := jsonpointer.GetForToken(s.Schema, token) + return r, err +} + +// UnmarshalJSON unmarshals this string or array object from a JSON array or JSON string +func (s *StringOrArray) UnmarshalJSON(data []byte) error { + var first byte + if len(data) > 1 { + first = data[0] + } + + if first == '[' { + var parsed []string + if err := json.Unmarshal(data, &parsed); err != nil { + return err + } + *s = StringOrArray(parsed) + return nil + } + + var single interface{} + if err := json.Unmarshal(data, &single); err != nil { + return err + } + if single == nil { + return nil + } + switch v := single.(type) { + case string: + *s = StringOrArray([]string{v}) + return nil + default: + return fmt.Errorf("only string or array is allowed, not %T", single) + } +} + +// MarshalJSON converts this string or array to a JSON array or JSON string +func (s StringOrArray) MarshalJSON() ([]byte, error) { + if len(s) == 1 { + return json.Marshal([]string(s)[0]) + } + return json.Marshal([]string(s)) +} + +// SchemaOrArray represents a value that can either be a Schema +// or an array of Schema. Mainly here for serialization purposes +type SchemaOrArray struct { + Schema *Schema + Schemas []Schema +} + +// Len returns the number of schemas in this property +func (s SchemaOrArray) Len() int { + if s.Schema != nil { + return 1 + } + return len(s.Schemas) +} + +// ContainsType returns true when one of the schemas is of the specified type +func (s *SchemaOrArray) ContainsType(name string) bool { + if s.Schema != nil { + return s.Schema.Type != nil && s.Schema.Type.Contains(name) + } + return false +} + +// MarshalJSON converts this schema object or array into JSON structure +func (s SchemaOrArray) MarshalJSON() ([]byte, error) { + if len(s.Schemas) > 0 { + return json.Marshal(s.Schemas) + } + return json.Marshal(s.Schema) +} + +// UnmarshalJSON converts this schema object or array from a JSON structure +func (s *SchemaOrArray) UnmarshalJSON(data []byte) error { + var nw SchemaOrArray + var first byte + if len(data) > 1 { + first = data[0] + } + if first == '{' { + var sch Schema + if err := json.Unmarshal(data, &sch); err != nil { + return err + } + nw.Schema = &sch + } + if first == '[' { + if err := json.Unmarshal(data, &nw.Schemas); err != nil { + return err + } + } + *s = nw + return nil +} + +// vim:set ft=go noet sts=2 sw=2 ts=2: diff --git a/vendor/github.com/go-openapi/spec/tag.go b/vendor/github.com/go-openapi/spec/tag.go new file mode 100644 index 0000000..faa3d3d --- /dev/null +++ b/vendor/github.com/go-openapi/spec/tag.go @@ -0,0 +1,75 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +import ( + "encoding/json" + + "github.com/go-openapi/jsonpointer" + "github.com/go-openapi/swag" +) + +// TagProps describe a tag entry in the top level tags section of a swagger spec +type TagProps struct { + Description string `json:"description,omitempty"` + Name string `json:"name,omitempty"` + ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"` +} + +// NewTag creates a new tag +func NewTag(name, description string, externalDocs *ExternalDocumentation) Tag { + return Tag{TagProps: TagProps{Description: description, Name: name, ExternalDocs: externalDocs}} +} + +// Tag allows adding meta data to a single tag that is used by the +// [Operation Object](http://goo.gl/8us55a#operationObject). +// It is not mandatory to have a Tag Object per tag used there. +// +// For more information: http://goo.gl/8us55a#tagObject +type Tag struct { + VendorExtensible + TagProps +} + +// JSONLookup implements an interface to customize json pointer lookup +func (t Tag) JSONLookup(token string) (interface{}, error) { + if ex, ok := t.Extensions[token]; ok { + return &ex, nil + } + + r, _, err := jsonpointer.GetForToken(t.TagProps, token) + return r, err +} + +// MarshalJSON marshal this to JSON +func (t Tag) MarshalJSON() ([]byte, error) { + b1, err := json.Marshal(t.TagProps) + if err != nil { + return nil, err + } + b2, err := json.Marshal(t.VendorExtensible) + if err != nil { + return nil, err + } + return swag.ConcatJSON(b1, b2), nil +} + +// UnmarshalJSON marshal this from JSON +func (t *Tag) UnmarshalJSON(data []byte) error { + if err := json.Unmarshal(data, &t.TagProps); err != nil { + return err + } + return json.Unmarshal(data, &t.VendorExtensible) +} diff --git a/vendor/github.com/go-openapi/spec/url_go19.go b/vendor/github.com/go-openapi/spec/url_go19.go new file mode 100644 index 0000000..5bdfe40 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/url_go19.go @@ -0,0 +1,11 @@ +package spec + +import "net/url" + +func parseURL(s string) (*url.URL, error) { + u, err := url.Parse(s) + if err == nil { + u.OmitHost = false + } + return u, err +} diff --git a/vendor/github.com/go-openapi/spec/validations.go b/vendor/github.com/go-openapi/spec/validations.go new file mode 100644 index 0000000..6360a8e --- /dev/null +++ b/vendor/github.com/go-openapi/spec/validations.go @@ -0,0 +1,215 @@ +package spec + +// CommonValidations describe common JSON-schema validations +type CommonValidations struct { + Maximum *float64 `json:"maximum,omitempty"` + ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty"` + Minimum *float64 `json:"minimum,omitempty"` + ExclusiveMinimum bool `json:"exclusiveMinimum,omitempty"` + MaxLength *int64 `json:"maxLength,omitempty"` + MinLength *int64 `json:"minLength,omitempty"` + Pattern string `json:"pattern,omitempty"` + MaxItems *int64 `json:"maxItems,omitempty"` + MinItems *int64 `json:"minItems,omitempty"` + UniqueItems bool `json:"uniqueItems,omitempty"` + MultipleOf *float64 `json:"multipleOf,omitempty"` + Enum []interface{} `json:"enum,omitempty"` +} + +// SetValidations defines all validations for a simple schema. +// +// NOTE: the input is the larger set of validations available for schemas. +// For simple schemas, MinProperties and MaxProperties are ignored. +func (v *CommonValidations) SetValidations(val SchemaValidations) { + v.Maximum = val.Maximum + v.ExclusiveMaximum = val.ExclusiveMaximum + v.Minimum = val.Minimum + v.ExclusiveMinimum = val.ExclusiveMinimum + v.MaxLength = val.MaxLength + v.MinLength = val.MinLength + v.Pattern = val.Pattern + v.MaxItems = val.MaxItems + v.MinItems = val.MinItems + v.UniqueItems = val.UniqueItems + v.MultipleOf = val.MultipleOf + v.Enum = val.Enum +} + +type clearedValidation struct { + Validation string + Value interface{} +} + +type clearedValidations []clearedValidation + +func (c clearedValidations) apply(cbs []func(string, interface{})) { + for _, cb := range cbs { + for _, cleared := range c { + cb(cleared.Validation, cleared.Value) + } + } +} + +// ClearNumberValidations clears all number validations. +// +// Some callbacks may be set by the caller to capture changed values. +func (v *CommonValidations) ClearNumberValidations(cbs ...func(string, interface{})) { + done := make(clearedValidations, 0, 5) + defer func() { + done.apply(cbs) + }() + + if v.Minimum != nil { + done = append(done, clearedValidation{Validation: "minimum", Value: v.Minimum}) + v.Minimum = nil + } + if v.Maximum != nil { + done = append(done, clearedValidation{Validation: "maximum", Value: v.Maximum}) + v.Maximum = nil + } + if v.ExclusiveMaximum { + done = append(done, clearedValidation{Validation: "exclusiveMaximum", Value: v.ExclusiveMaximum}) + v.ExclusiveMaximum = false + } + if v.ExclusiveMinimum { + done = append(done, clearedValidation{Validation: "exclusiveMinimum", Value: v.ExclusiveMinimum}) + v.ExclusiveMinimum = false + } + if v.MultipleOf != nil { + done = append(done, clearedValidation{Validation: "multipleOf", Value: v.MultipleOf}) + v.MultipleOf = nil + } +} + +// ClearStringValidations clears all string validations. +// +// Some callbacks may be set by the caller to capture changed values. +func (v *CommonValidations) ClearStringValidations(cbs ...func(string, interface{})) { + done := make(clearedValidations, 0, 3) + defer func() { + done.apply(cbs) + }() + + if v.Pattern != "" { + done = append(done, clearedValidation{Validation: "pattern", Value: v.Pattern}) + v.Pattern = "" + } + if v.MinLength != nil { + done = append(done, clearedValidation{Validation: "minLength", Value: v.MinLength}) + v.MinLength = nil + } + if v.MaxLength != nil { + done = append(done, clearedValidation{Validation: "maxLength", Value: v.MaxLength}) + v.MaxLength = nil + } +} + +// ClearArrayValidations clears all array validations. +// +// Some callbacks may be set by the caller to capture changed values. +func (v *CommonValidations) ClearArrayValidations(cbs ...func(string, interface{})) { + done := make(clearedValidations, 0, 3) + defer func() { + done.apply(cbs) + }() + + if v.MaxItems != nil { + done = append(done, clearedValidation{Validation: "maxItems", Value: v.MaxItems}) + v.MaxItems = nil + } + if v.MinItems != nil { + done = append(done, clearedValidation{Validation: "minItems", Value: v.MinItems}) + v.MinItems = nil + } + if v.UniqueItems { + done = append(done, clearedValidation{Validation: "uniqueItems", Value: v.UniqueItems}) + v.UniqueItems = false + } +} + +// Validations returns a clone of the validations for a simple schema. +// +// NOTE: in the context of simple schema objects, MinProperties, MaxProperties +// and PatternProperties remain unset. +func (v CommonValidations) Validations() SchemaValidations { + return SchemaValidations{ + CommonValidations: v, + } +} + +// HasNumberValidations indicates if the validations are for numbers or integers +func (v CommonValidations) HasNumberValidations() bool { + return v.Maximum != nil || v.Minimum != nil || v.MultipleOf != nil +} + +// HasStringValidations indicates if the validations are for strings +func (v CommonValidations) HasStringValidations() bool { + return v.MaxLength != nil || v.MinLength != nil || v.Pattern != "" +} + +// HasArrayValidations indicates if the validations are for arrays +func (v CommonValidations) HasArrayValidations() bool { + return v.MaxItems != nil || v.MinItems != nil || v.UniqueItems +} + +// HasEnum indicates if the validation includes some enum constraint +func (v CommonValidations) HasEnum() bool { + return len(v.Enum) > 0 +} + +// SchemaValidations describes the validation properties of a schema +// +// NOTE: at this moment, this is not embedded in SchemaProps because this would induce a breaking change +// in the exported members: all initializers using litterals would fail. +type SchemaValidations struct { + CommonValidations + + PatternProperties SchemaProperties `json:"patternProperties,omitempty"` + MaxProperties *int64 `json:"maxProperties,omitempty"` + MinProperties *int64 `json:"minProperties,omitempty"` +} + +// HasObjectValidations indicates if the validations are for objects +func (v SchemaValidations) HasObjectValidations() bool { + return v.MaxProperties != nil || v.MinProperties != nil || v.PatternProperties != nil +} + +// SetValidations for schema validations +func (v *SchemaValidations) SetValidations(val SchemaValidations) { + v.CommonValidations.SetValidations(val) + v.PatternProperties = val.PatternProperties + v.MaxProperties = val.MaxProperties + v.MinProperties = val.MinProperties +} + +// Validations for a schema +func (v SchemaValidations) Validations() SchemaValidations { + val := v.CommonValidations.Validations() + val.PatternProperties = v.PatternProperties + val.MinProperties = v.MinProperties + val.MaxProperties = v.MaxProperties + return val +} + +// ClearObjectValidations returns a clone of the validations with all object validations cleared. +// +// Some callbacks may be set by the caller to capture changed values. +func (v *SchemaValidations) ClearObjectValidations(cbs ...func(string, interface{})) { + done := make(clearedValidations, 0, 3) + defer func() { + done.apply(cbs) + }() + + if v.MaxProperties != nil { + done = append(done, clearedValidation{Validation: "maxProperties", Value: v.MaxProperties}) + v.MaxProperties = nil + } + if v.MinProperties != nil { + done = append(done, clearedValidation{Validation: "minProperties", Value: v.MinProperties}) + v.MinProperties = nil + } + if v.PatternProperties != nil { + done = append(done, clearedValidation{Validation: "patternProperties", Value: v.PatternProperties}) + v.PatternProperties = nil + } +} diff --git a/vendor/github.com/go-openapi/spec/xml_object.go b/vendor/github.com/go-openapi/spec/xml_object.go new file mode 100644 index 0000000..945a467 --- /dev/null +++ b/vendor/github.com/go-openapi/spec/xml_object.go @@ -0,0 +1,68 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package spec + +// XMLObject a metadata object that allows for more fine-tuned XML model definitions. +// +// For more information: http://goo.gl/8us55a#xmlObject +type XMLObject struct { + Name string `json:"name,omitempty"` + Namespace string `json:"namespace,omitempty"` + Prefix string `json:"prefix,omitempty"` + Attribute bool `json:"attribute,omitempty"` + Wrapped bool `json:"wrapped,omitempty"` +} + +// WithName sets the xml name for the object +func (x *XMLObject) WithName(name string) *XMLObject { + x.Name = name + return x +} + +// WithNamespace sets the xml namespace for the object +func (x *XMLObject) WithNamespace(namespace string) *XMLObject { + x.Namespace = namespace + return x +} + +// WithPrefix sets the xml prefix for the object +func (x *XMLObject) WithPrefix(prefix string) *XMLObject { + x.Prefix = prefix + return x +} + +// AsAttribute flags this object as xml attribute +func (x *XMLObject) AsAttribute() *XMLObject { + x.Attribute = true + return x +} + +// AsElement flags this object as an xml node +func (x *XMLObject) AsElement() *XMLObject { + x.Attribute = false + return x +} + +// AsWrapped flags this object as wrapped, this is mostly useful for array types +func (x *XMLObject) AsWrapped() *XMLObject { + x.Wrapped = true + return x +} + +// AsUnwrapped flags this object as an xml node +func (x *XMLObject) AsUnwrapped() *XMLObject { + x.Wrapped = false + return x +} diff --git a/vendor/github.com/go-openapi/strfmt/.editorconfig b/vendor/github.com/go-openapi/strfmt/.editorconfig new file mode 100644 index 0000000..3152da6 --- /dev/null +++ b/vendor/github.com/go-openapi/strfmt/.editorconfig @@ -0,0 +1,26 @@ +# top-most EditorConfig file +root = true + +# Unix-style newlines with a newline ending every file +[*] +end_of_line = lf +insert_final_newline = true +indent_style = space +indent_size = 2 +trim_trailing_whitespace = true + +# Set default charset +[*.{js,py,go,scala,rb,java,html,css,less,sass,md}] +charset = utf-8 + +# Tab indentation (no size specified) +[*.go] +indent_style = tab + +[*.md] +trim_trailing_whitespace = false + +# Matches the exact files either package.json or .travis.yml +[{package.json,.travis.yml}] +indent_style = space +indent_size = 2 diff --git a/vendor/github.com/go-openapi/strfmt/.gitattributes b/vendor/github.com/go-openapi/strfmt/.gitattributes new file mode 100644 index 0000000..d020be8 --- /dev/null +++ b/vendor/github.com/go-openapi/strfmt/.gitattributes @@ -0,0 +1,2 @@ +*.go text eol=lf + diff --git a/vendor/github.com/go-openapi/strfmt/.gitignore b/vendor/github.com/go-openapi/strfmt/.gitignore new file mode 100644 index 0000000..dd91ed6 --- /dev/null +++ b/vendor/github.com/go-openapi/strfmt/.gitignore @@ -0,0 +1,2 @@ +secrets.yml +coverage.out diff --git a/vendor/github.com/go-openapi/strfmt/.golangci.yml b/vendor/github.com/go-openapi/strfmt/.golangci.yml new file mode 100644 index 0000000..22f8d21 --- /dev/null +++ b/vendor/github.com/go-openapi/strfmt/.golangci.yml @@ -0,0 +1,61 @@ +linters-settings: + govet: + check-shadowing: true + golint: + min-confidence: 0 + gocyclo: + min-complexity: 45 + maligned: + suggest-new: true + dupl: + threshold: 200 + goconst: + min-len: 2 + min-occurrences: 3 + +linters: + enable-all: true + disable: + - maligned + - unparam + - lll + - gochecknoinits + - gochecknoglobals + - funlen + - godox + - gocognit + - whitespace + - wsl + - wrapcheck + - testpackage + - nlreturn + - gomnd + - exhaustivestruct + - goerr113 + - errorlint + - nestif + - godot + - gofumpt + - paralleltest + - tparallel + - thelper + - ifshort + - exhaustruct + - varnamelen + - gci + - depguard + - errchkjson + - inamedparam + - nonamedreturns + - musttag + - ireturn + - forcetypeassert + - cyclop + # deprecated linters + - deadcode + - interfacer + - scopelint + - varcheck + - structcheck + - golint + - nosnakecase diff --git a/vendor/github.com/go-openapi/strfmt/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/strfmt/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..9322b06 --- /dev/null +++ b/vendor/github.com/go-openapi/strfmt/CODE_OF_CONDUCT.md @@ -0,0 +1,74 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or +advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at ivan+abuse@flanders.co.nz. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/github.com/go-openapi/strfmt/LICENSE b/vendor/github.com/go-openapi/strfmt/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/vendor/github.com/go-openapi/strfmt/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/go-openapi/strfmt/README.md b/vendor/github.com/go-openapi/strfmt/README.md new file mode 100644 index 0000000..f6b39c6 --- /dev/null +++ b/vendor/github.com/go-openapi/strfmt/README.md @@ -0,0 +1,87 @@ +# Strfmt [![Build Status](https://github.com/go-openapi/strfmt/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/strfmt/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/strfmt/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/strfmt) +[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) +[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/strfmt/master/LICENSE) +[![GoDoc](https://godoc.org/github.com/go-openapi/strfmt?status.svg)](http://godoc.org/github.com/go-openapi/strfmt) +[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/strfmt)](https://goreportcard.com/report/github.com/go-openapi/strfmt) + +This package exposes a registry of data types to support string formats in the go-openapi toolkit. + +strfmt represents a well known string format such as credit card or email. The go toolkit for OpenAPI specifications knows how to deal with those. + +## Supported data formats +go-openapi/strfmt follows the swagger 2.0 specification with the following formats +defined [here](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types). + +It also provides convenient extensions to go-openapi users. + +- [x] JSON-schema draft 4 formats + - date-time + - email + - hostname + - ipv4 + - ipv6 + - uri +- [x] swagger 2.0 format extensions + - binary + - byte (e.g. base64 encoded string) + - date (e.g. "1970-01-01") + - password +- [x] go-openapi custom format extensions + - bsonobjectid (BSON objectID) + - creditcard + - duration (e.g. "3 weeks", "1ms") + - hexcolor (e.g. "#FFFFFF") + - isbn, isbn10, isbn13 + - mac (e.g "01:02:03:04:05:06") + - rgbcolor (e.g. "rgb(100,100,100)") + - ssn + - uuid, uuid3, uuid4, uuid5 + - cidr (e.g. "192.0.2.1/24", "2001:db8:a0b:12f0::1/32") + - ulid (e.g. "00000PP9HGSBSSDZ1JTEXBJ0PW", [spec](https://github.com/ulid/spec)) + +> NOTE: as the name stands for, this package is intended to support string formatting only. +> It does not provide validation for numerical values with swagger format extension for JSON types "number" or +> "integer" (e.g. float, double, int32...). + +## Type conversion + +All types defined here are stringers and may be converted to strings with `.String()`. +Note that most types defined by this package may be converted directly to string like `string(Email{})`. + +`Date` and `DateTime` may be converted directly to `time.Time` like `time.Time(Time{})`. +Similarly, you can convert `Duration` to `time.Duration` as in `time.Duration(Duration{})` + +## Using pointers + +The `conv` subpackage provides helpers to convert the types to and from pointers, just like `go-openapi/swag` does +with primitive types. + +## Format types +Types defined in strfmt expose marshaling and validation capabilities. + +List of defined types: +- Base64 +- CreditCard +- Date +- DateTime +- Duration +- Email +- HexColor +- Hostname +- IPv4 +- IPv6 +- CIDR +- ISBN +- ISBN10 +- ISBN13 +- MAC +- ObjectId +- Password +- RGBColor +- SSN +- URI +- UUID +- UUID3 +- UUID4 +- UUID5 +- [ULID](https://github.com/ulid/spec) diff --git a/vendor/github.com/go-openapi/strfmt/bson.go b/vendor/github.com/go-openapi/strfmt/bson.go new file mode 100644 index 0000000..cfa9a52 --- /dev/null +++ b/vendor/github.com/go-openapi/strfmt/bson.go @@ -0,0 +1,165 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package strfmt + +import ( + "database/sql/driver" + "fmt" + + "go.mongodb.org/mongo-driver/bson" + + "go.mongodb.org/mongo-driver/bson/bsontype" + bsonprim "go.mongodb.org/mongo-driver/bson/primitive" +) + +func init() { + var id ObjectId + // register this format in the default registry + Default.Add("bsonobjectid", &id, IsBSONObjectID) +} + +// IsBSONObjectID returns true when the string is a valid BSON.ObjectId +func IsBSONObjectID(str string) bool { + _, err := bsonprim.ObjectIDFromHex(str) + return err == nil +} + +// ObjectId represents a BSON object ID (alias to go.mongodb.org/mongo-driver/bson/primitive.ObjectID) +// +// swagger:strfmt bsonobjectid +type ObjectId bsonprim.ObjectID //nolint:revive,stylecheck + +// NewObjectId creates a ObjectId from a Hex String +func NewObjectId(hex string) ObjectId { //nolint:revive,stylecheck + oid, err := bsonprim.ObjectIDFromHex(hex) + if err != nil { + panic(err) + } + return ObjectId(oid) +} + +// MarshalText turns this instance into text +func (id ObjectId) MarshalText() ([]byte, error) { + oid := bsonprim.ObjectID(id) + if oid == bsonprim.NilObjectID { + return nil, nil + } + return []byte(oid.Hex()), nil +} + +// UnmarshalText hydrates this instance from text +func (id *ObjectId) UnmarshalText(data []byte) error { // validation is performed later on + if len(data) == 0 { + *id = ObjectId(bsonprim.NilObjectID) + return nil + } + oidstr := string(data) + oid, err := bsonprim.ObjectIDFromHex(oidstr) + if err != nil { + return err + } + *id = ObjectId(oid) + return nil +} + +// Scan read a value from a database driver +func (id *ObjectId) Scan(raw interface{}) error { + var data []byte + switch v := raw.(type) { + case []byte: + data = v + case string: + data = []byte(v) + default: + return fmt.Errorf("cannot sql.Scan() strfmt.URI from: %#v", v) + } + + return id.UnmarshalText(data) +} + +// Value converts a value to a database driver value +func (id ObjectId) Value() (driver.Value, error) { + return driver.Value(bsonprim.ObjectID(id).Hex()), nil +} + +func (id ObjectId) String() string { + return bsonprim.ObjectID(id).Hex() +} + +// MarshalJSON returns the ObjectId as JSON +func (id ObjectId) MarshalJSON() ([]byte, error) { + return bsonprim.ObjectID(id).MarshalJSON() +} + +// UnmarshalJSON sets the ObjectId from JSON +func (id *ObjectId) UnmarshalJSON(data []byte) error { + var obj bsonprim.ObjectID + if err := obj.UnmarshalJSON(data); err != nil { + return err + } + *id = ObjectId(obj) + return nil +} + +// MarshalBSON renders the object id as a BSON document +func (id ObjectId) MarshalBSON() ([]byte, error) { + return bson.Marshal(bson.M{"data": bsonprim.ObjectID(id)}) +} + +// UnmarshalBSON reads the objectId from a BSON document +func (id *ObjectId) UnmarshalBSON(data []byte) error { + var obj struct { + Data bsonprim.ObjectID + } + if err := bson.Unmarshal(data, &obj); err != nil { + return err + } + *id = ObjectId(obj.Data) + return nil +} + +// MarshalBSONValue is an interface implemented by types that can marshal themselves +// into a BSON document represented as bytes. The bytes returned must be a valid +// BSON document if the error is nil. +func (id ObjectId) MarshalBSONValue() (bsontype.Type, []byte, error) { + oid := bsonprim.ObjectID(id) + return bson.TypeObjectID, oid[:], nil +} + +// UnmarshalBSONValue is an interface implemented by types that can unmarshal a +// BSON value representation of themselves. The BSON bytes and type can be +// assumed to be valid. UnmarshalBSONValue must copy the BSON value bytes if it +// wishes to retain the data after returning. +func (id *ObjectId) UnmarshalBSONValue(_ bsontype.Type, data []byte) error { + var oid bsonprim.ObjectID + copy(oid[:], data) + *id = ObjectId(oid) + return nil +} + +// DeepCopyInto copies the receiver and writes its value into out. +func (id *ObjectId) DeepCopyInto(out *ObjectId) { + *out = *id +} + +// DeepCopy copies the receiver into a new ObjectId. +func (id *ObjectId) DeepCopy() *ObjectId { + if id == nil { + return nil + } + out := new(ObjectId) + id.DeepCopyInto(out) + return out +} diff --git a/vendor/github.com/go-openapi/strfmt/date.go b/vendor/github.com/go-openapi/strfmt/date.go new file mode 100644 index 0000000..3c93381 --- /dev/null +++ b/vendor/github.com/go-openapi/strfmt/date.go @@ -0,0 +1,187 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package strfmt + +import ( + "database/sql/driver" + "encoding/json" + "errors" + "fmt" + "time" + + "go.mongodb.org/mongo-driver/bson" +) + +func init() { + d := Date{} + // register this format in the default registry + Default.Add("date", &d, IsDate) +} + +// IsDate returns true when the string is a valid date +func IsDate(str string) bool { + _, err := time.Parse(RFC3339FullDate, str) + return err == nil +} + +const ( + // RFC3339FullDate represents a full-date as specified by RFC3339 + // See: http://goo.gl/xXOvVd + RFC3339FullDate = "2006-01-02" +) + +// Date represents a date from the API +// +// swagger:strfmt date +type Date time.Time + +// String converts this date into a string +func (d Date) String() string { + return time.Time(d).Format(RFC3339FullDate) +} + +// UnmarshalText parses a text representation into a date type +func (d *Date) UnmarshalText(text []byte) error { + if len(text) == 0 { + return nil + } + dd, err := time.ParseInLocation(RFC3339FullDate, string(text), DefaultTimeLocation) + if err != nil { + return err + } + *d = Date(dd) + return nil +} + +// MarshalText serializes this date type to string +func (d Date) MarshalText() ([]byte, error) { + return []byte(d.String()), nil +} + +// Scan scans a Date value from database driver type. +func (d *Date) Scan(raw interface{}) error { + switch v := raw.(type) { + case []byte: + return d.UnmarshalText(v) + case string: + return d.UnmarshalText([]byte(v)) + case time.Time: + *d = Date(v) + return nil + case nil: + *d = Date{} + return nil + default: + return fmt.Errorf("cannot sql.Scan() strfmt.Date from: %#v", v) + } +} + +// Value converts Date to a primitive value ready to written to a database. +func (d Date) Value() (driver.Value, error) { + return driver.Value(d.String()), nil +} + +// MarshalJSON returns the Date as JSON +func (d Date) MarshalJSON() ([]byte, error) { + return json.Marshal(time.Time(d).Format(RFC3339FullDate)) +} + +// UnmarshalJSON sets the Date from JSON +func (d *Date) UnmarshalJSON(data []byte) error { + if string(data) == jsonNull { + return nil + } + var strdate string + if err := json.Unmarshal(data, &strdate); err != nil { + return err + } + tt, err := time.ParseInLocation(RFC3339FullDate, strdate, DefaultTimeLocation) + if err != nil { + return err + } + *d = Date(tt) + return nil +} + +func (d Date) MarshalBSON() ([]byte, error) { + return bson.Marshal(bson.M{"data": d.String()}) +} + +func (d *Date) UnmarshalBSON(data []byte) error { + var m bson.M + if err := bson.Unmarshal(data, &m); err != nil { + return err + } + + if data, ok := m["data"].(string); ok { + rd, err := time.ParseInLocation(RFC3339FullDate, data, DefaultTimeLocation) + if err != nil { + return err + } + *d = Date(rd) + return nil + } + + return errors.New("couldn't unmarshal bson bytes value as Date") +} + +// DeepCopyInto copies the receiver and writes its value into out. +func (d *Date) DeepCopyInto(out *Date) { + *out = *d +} + +// DeepCopy copies the receiver into a new Date. +func (d *Date) DeepCopy() *Date { + if d == nil { + return nil + } + out := new(Date) + d.DeepCopyInto(out) + return out +} + +// GobEncode implements the gob.GobEncoder interface. +func (d Date) GobEncode() ([]byte, error) { + return d.MarshalBinary() +} + +// GobDecode implements the gob.GobDecoder interface. +func (d *Date) GobDecode(data []byte) error { + return d.UnmarshalBinary(data) +} + +// MarshalBinary implements the encoding.BinaryMarshaler interface. +func (d Date) MarshalBinary() ([]byte, error) { + return time.Time(d).MarshalBinary() +} + +// UnmarshalBinary implements the encoding.BinaryUnmarshaler interface. +func (d *Date) UnmarshalBinary(data []byte) error { + var original time.Time + + err := original.UnmarshalBinary(data) + if err != nil { + return err + } + + *d = Date(original) + + return nil +} + +// Equal checks if two Date instances are equal +func (d Date) Equal(d2 Date) bool { + return time.Time(d).Equal(time.Time(d2)) +} diff --git a/vendor/github.com/go-openapi/strfmt/default.go b/vendor/github.com/go-openapi/strfmt/default.go new file mode 100644 index 0000000..2813714 --- /dev/null +++ b/vendor/github.com/go-openapi/strfmt/default.go @@ -0,0 +1,2051 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package strfmt + +import ( + "database/sql/driver" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "net/mail" + "regexp" + "strings" + + "github.com/asaskevich/govalidator" + "github.com/google/uuid" + "go.mongodb.org/mongo-driver/bson" +) + +const ( + // HostnamePattern http://json-schema.org/latest/json-schema-validation.html#anchor114 + // A string instance is valid against this attribute if it is a valid + // representation for an Internet host name, as defined by RFC 1034, section 3.1 [RFC1034]. + // http://tools.ietf.org/html/rfc1034#section-3.5 + // ::= any one of the ten digits 0 through 9 + // var digit = /[0-9]/; + // ::= any one of the 52 alphabetic characters A through Z in upper case and a through z in lower case + // var letter = /[a-zA-Z]/; + // ::= | + // var letDig = /[0-9a-zA-Z]/; + // ::= | "-" + // var letDigHyp = /[-0-9a-zA-Z]/; + // ::= | + // var ldhStr = /[-0-9a-zA-Z]+/; + //
http://github.com/tinylib/msgp, +where it provided about a 2x speedup over `bufio` for certain +workloads. However, care must be taken to understand the semantics of the +extra methods provided by this package, as they allow +the user to access and manipulate the buffer memory +directly. + +The extra methods for `fwd.Reader` are `Peek`, `Skip` +and `Next`. `(*fwd.Reader).Peek`, unlike `(*bufio.Reader).Peek`, +will re-allocate the read buffer in order to accommodate arbitrarily +large read-ahead. `(*fwd.Reader).Skip` skips the next `n` bytes +in the stream, and uses the `io.Seeker` interface if the underlying +stream implements it. `(*fwd.Reader).Next` returns a slice pointing +to the next `n` bytes in the read buffer (like `Peek`), but also +increments the read position. This allows users to process streams +in arbitrary block sizes without having to manage appropriately-sized +slices. Additionally, obviating the need to copy the data from the +buffer to another location in memory can improve performance dramatically +in CPU-bound applications. + +`fwd.Writer` only has one extra method, which is `(*fwd.Writer).Next`, which +returns a slice pointing to the next `n` bytes of the writer, and increments +the write position by the length of the returned slice. This allows users +to write directly to the end of the buffer. + + + + +## Constants +``` go +const ( + // DefaultReaderSize is the default size of the read buffer + DefaultReaderSize = 2048 +) +``` +``` go +const ( + // DefaultWriterSize is the + // default write buffer size. + DefaultWriterSize = 2048 +) +``` + + + +## type Reader +``` go +type Reader struct { + // contains filtered or unexported fields +} +``` +Reader is a buffered look-ahead reader + + + + + + + + + +### func NewReader +``` go +func NewReader(r io.Reader) *Reader +``` +NewReader returns a new *Reader that reads from 'r' + + +### func NewReaderSize +``` go +func NewReaderSize(r io.Reader, n int) *Reader +``` +NewReaderSize returns a new *Reader that +reads from 'r' and has a buffer size 'n' + + + + +### func (\*Reader) BufferSize +``` go +func (r *Reader) BufferSize() int +``` +BufferSize returns the total size of the buffer + + + +### func (\*Reader) Buffered +``` go +func (r *Reader) Buffered() int +``` +Buffered returns the number of bytes currently in the buffer + + + +### func (\*Reader) Next +``` go +func (r *Reader) Next(n int) ([]byte, error) +``` +Next returns the next 'n' bytes in the stream. +Unlike Peek, Next advances the reader position. +The returned bytes point to the same +data as the buffer, so the slice is +only valid until the next reader method call. +An EOF is considered an unexpected error. +If an the returned slice is less than the +length asked for, an error will be returned, +and the reader position will not be incremented. + + + +### func (\*Reader) Peek +``` go +func (r *Reader) Peek(n int) ([]byte, error) +``` +Peek returns the next 'n' buffered bytes, +reading from the underlying reader if necessary. +It will only return a slice shorter than 'n' bytes +if it also returns an error. Peek does not advance +the reader. EOF errors are *not* returned as +io.ErrUnexpectedEOF. + + + +### func (\*Reader) Read +``` go +func (r *Reader) Read(b []byte) (int, error) +``` +Read implements `io.Reader` + + + +### func (\*Reader) ReadByte +``` go +func (r *Reader) ReadByte() (byte, error) +``` +ReadByte implements `io.ByteReader` + + + +### func (\*Reader) ReadFull +``` go +func (r *Reader) ReadFull(b []byte) (int, error) +``` +ReadFull attempts to read len(b) bytes into +'b'. It returns the number of bytes read into +'b', and an error if it does not return len(b). +EOF is considered an unexpected error. + + + +### func (\*Reader) Reset +``` go +func (r *Reader) Reset(rd io.Reader) +``` +Reset resets the underlying reader +and the read buffer. + + + +### func (\*Reader) Skip +``` go +func (r *Reader) Skip(n int) (int, error) +``` +Skip moves the reader forward 'n' bytes. +Returns the number of bytes skipped and any +errors encountered. It is analogous to Seek(n, 1). +If the underlying reader implements io.Seeker, then +that method will be used to skip forward. + +If the reader encounters +an EOF before skipping 'n' bytes, it +returns io.ErrUnexpectedEOF. If the +underlying reader implements io.Seeker, then +those rules apply instead. (Many implementations +will not return `io.EOF` until the next call +to Read.) + + + +### func (\*Reader) WriteTo +``` go +func (r *Reader) WriteTo(w io.Writer) (int64, error) +``` +WriteTo implements `io.WriterTo` + + + +## type Writer +``` go +type Writer struct { + // contains filtered or unexported fields +} +``` +Writer is a buffered writer + + + + + + + + + +### func NewWriter +``` go +func NewWriter(w io.Writer) *Writer +``` +NewWriter returns a new writer +that writes to 'w' and has a buffer +that is `DefaultWriterSize` bytes. + + +### func NewWriterSize +``` go +func NewWriterSize(w io.Writer, size int) *Writer +``` +NewWriterSize returns a new writer +that writes to 'w' and has a buffer +that is 'size' bytes. + + + + +### func (\*Writer) BufferSize +``` go +func (w *Writer) BufferSize() int +``` +BufferSize returns the maximum size of the buffer. + + + +### func (\*Writer) Buffered +``` go +func (w *Writer) Buffered() int +``` +Buffered returns the number of buffered bytes +in the reader. + + + +### func (\*Writer) Flush +``` go +func (w *Writer) Flush() error +``` +Flush flushes any buffered bytes +to the underlying writer. + + + +### func (\*Writer) Next +``` go +func (w *Writer) Next(n int) ([]byte, error) +``` +Next returns the next 'n' free bytes +in the write buffer, flushing the writer +as necessary. Next will return `io.ErrShortBuffer` +if 'n' is greater than the size of the write buffer. +Calls to 'next' increment the write position by +the size of the returned buffer. + + + +### func (\*Writer) ReadFrom +``` go +func (w *Writer) ReadFrom(r io.Reader) (int64, error) +``` +ReadFrom implements `io.ReaderFrom` + + + +### func (\*Writer) Write +``` go +func (w *Writer) Write(p []byte) (int, error) +``` +Write implements `io.Writer` + + + +### func (\*Writer) WriteByte +``` go +func (w *Writer) WriteByte(b byte) error +``` +WriteByte implements `io.ByteWriter` + + + +### func (\*Writer) WriteString +``` go +func (w *Writer) WriteString(s string) (int, error) +``` +WriteString is analogous to Write, but it takes a string. + + + + + + + + + +- - - +Generated by [godoc2md](http://godoc.org/github.com/davecheney/godoc2md) \ No newline at end of file diff --git a/vendor/github.com/philhofer/fwd/reader.go b/vendor/github.com/philhofer/fwd/reader.go new file mode 100644 index 0000000..75be62a --- /dev/null +++ b/vendor/github.com/philhofer/fwd/reader.go @@ -0,0 +1,383 @@ +// The `fwd` package provides a buffered reader +// and writer. Each has methods that help improve +// the encoding/decoding performance of some binary +// protocols. +// +// The `fwd.Writer` and `fwd.Reader` type provide similar +// functionality to their counterparts in `bufio`, plus +// a few extra utility methods that simplify read-ahead +// and write-ahead. I wrote this package to improve serialization +// performance for http://github.com/tinylib/msgp, +// where it provided about a 2x speedup over `bufio` for certain +// workloads. However, care must be taken to understand the semantics of the +// extra methods provided by this package, as they allow +// the user to access and manipulate the buffer memory +// directly. +// +// The extra methods for `fwd.Reader` are `Peek`, `Skip` +// and `Next`. `(*fwd.Reader).Peek`, unlike `(*bufio.Reader).Peek`, +// will re-allocate the read buffer in order to accommodate arbitrarily +// large read-ahead. `(*fwd.Reader).Skip` skips the next `n` bytes +// in the stream, and uses the `io.Seeker` interface if the underlying +// stream implements it. `(*fwd.Reader).Next` returns a slice pointing +// to the next `n` bytes in the read buffer (like `Peek`), but also +// increments the read position. This allows users to process streams +// in arbitrary block sizes without having to manage appropriately-sized +// slices. Additionally, obviating the need to copy the data from the +// buffer to another location in memory can improve performance dramatically +// in CPU-bound applications. +// +// `fwd.Writer` only has one extra method, which is `(*fwd.Writer).Next`, which +// returns a slice pointing to the next `n` bytes of the writer, and increments +// the write position by the length of the returned slice. This allows users +// to write directly to the end of the buffer. +// +package fwd + +import "io" + +const ( + // DefaultReaderSize is the default size of the read buffer + DefaultReaderSize = 2048 + + // minimum read buffer; straight from bufio + minReaderSize = 16 +) + +// NewReader returns a new *Reader that reads from 'r' +func NewReader(r io.Reader) *Reader { + return NewReaderSize(r, DefaultReaderSize) +} + +// NewReaderSize returns a new *Reader that +// reads from 'r' and has a buffer size 'n' +func NewReaderSize(r io.Reader, n int) *Reader { + rd := &Reader{ + r: r, + data: make([]byte, 0, max(minReaderSize, n)), + } + if s, ok := r.(io.Seeker); ok { + rd.rs = s + } + return rd +} + +// Reader is a buffered look-ahead reader +type Reader struct { + r io.Reader // underlying reader + + // data[n:len(data)] is buffered data; data[len(data):cap(data)] is free buffer space + data []byte // data + n int // read offset + state error // last read error + + // if the reader past to NewReader was + // also an io.Seeker, this is non-nil + rs io.Seeker +} + +// Reset resets the underlying reader +// and the read buffer. +func (r *Reader) Reset(rd io.Reader) { + r.r = rd + r.data = r.data[0:0] + r.n = 0 + r.state = nil + if s, ok := rd.(io.Seeker); ok { + r.rs = s + } else { + r.rs = nil + } +} + +// more() does one read on the underlying reader +func (r *Reader) more() { + // move data backwards so that + // the read offset is 0; this way + // we can supply the maximum number of + // bytes to the reader + if r.n != 0 { + if r.n < len(r.data) { + r.data = r.data[:copy(r.data[0:], r.data[r.n:])] + } else { + r.data = r.data[:0] + } + r.n = 0 + } + var a int + a, r.state = r.r.Read(r.data[len(r.data):cap(r.data)]) + if a == 0 && r.state == nil { + r.state = io.ErrNoProgress + return + } else if a > 0 && r.state == io.EOF { + // discard the io.EOF if we read more than 0 bytes. + // the next call to Read should return io.EOF again. + r.state = nil + } + r.data = r.data[:len(r.data)+a] +} + +// pop error +func (r *Reader) err() (e error) { + e, r.state = r.state, nil + return +} + +// pop error; EOF -> io.ErrUnexpectedEOF +func (r *Reader) noEOF() (e error) { + e, r.state = r.state, nil + if e == io.EOF { + e = io.ErrUnexpectedEOF + } + return +} + +// buffered bytes +func (r *Reader) buffered() int { return len(r.data) - r.n } + +// Buffered returns the number of bytes currently in the buffer +func (r *Reader) Buffered() int { return len(r.data) - r.n } + +// BufferSize returns the total size of the buffer +func (r *Reader) BufferSize() int { return cap(r.data) } + +// Peek returns the next 'n' buffered bytes, +// reading from the underlying reader if necessary. +// It will only return a slice shorter than 'n' bytes +// if it also returns an error. Peek does not advance +// the reader. EOF errors are *not* returned as +// io.ErrUnexpectedEOF. +func (r *Reader) Peek(n int) ([]byte, error) { + // in the degenerate case, + // we may need to realloc + // (the caller asked for more + // bytes than the size of the buffer) + if cap(r.data) < n { + old := r.data[r.n:] + r.data = make([]byte, n+r.buffered()) + r.data = r.data[:copy(r.data, old)] + r.n = 0 + } + + // keep filling until + // we hit an error or + // read enough bytes + for r.buffered() < n && r.state == nil { + r.more() + } + + // we must have hit an error + if r.buffered() < n { + return r.data[r.n:], r.err() + } + + return r.data[r.n : r.n+n], nil +} + +// Skip moves the reader forward 'n' bytes. +// Returns the number of bytes skipped and any +// errors encountered. It is analogous to Seek(n, 1). +// If the underlying reader implements io.Seeker, then +// that method will be used to skip forward. +// +// If the reader encounters +// an EOF before skipping 'n' bytes, it +// returns io.ErrUnexpectedEOF. If the +// underlying reader implements io.Seeker, then +// those rules apply instead. (Many implementations +// will not return `io.EOF` until the next call +// to Read.) +func (r *Reader) Skip(n int) (int, error) { + + // fast path + if r.buffered() >= n { + r.n += n + return n, nil + } + + // use seeker implementation + // if we can + if r.rs != nil { + return r.skipSeek(n) + } + + // loop on filling + // and then erasing + o := n + for r.buffered() < n && r.state == nil { + r.more() + // we can skip forward + // up to r.buffered() bytes + step := min(r.buffered(), n) + r.n += step + n -= step + } + // at this point, n should be + // 0 if everything went smoothly + return o - n, r.noEOF() +} + +// Next returns the next 'n' bytes in the stream. +// Unlike Peek, Next advances the reader position. +// The returned bytes point to the same +// data as the buffer, so the slice is +// only valid until the next reader method call. +// An EOF is considered an unexpected error. +// If an the returned slice is less than the +// length asked for, an error will be returned, +// and the reader position will not be incremented. +func (r *Reader) Next(n int) ([]byte, error) { + + // in case the buffer is too small + if cap(r.data) < n { + old := r.data[r.n:] + r.data = make([]byte, n+r.buffered()) + r.data = r.data[:copy(r.data, old)] + r.n = 0 + } + + // fill at least 'n' bytes + for r.buffered() < n && r.state == nil { + r.more() + } + + if r.buffered() < n { + return r.data[r.n:], r.noEOF() + } + out := r.data[r.n : r.n+n] + r.n += n + return out, nil +} + +// skipSeek uses the io.Seeker to seek forward. +// only call this function when n > r.buffered() +func (r *Reader) skipSeek(n int) (int, error) { + o := r.buffered() + // first, clear buffer + n -= o + r.n = 0 + r.data = r.data[:0] + + // then seek forward remaning bytes + i, err := r.rs.Seek(int64(n), 1) + return int(i) + o, err +} + +// Read implements `io.Reader` +func (r *Reader) Read(b []byte) (int, error) { + // if we have data in the buffer, just + // return that. + if r.buffered() != 0 { + x := copy(b, r.data[r.n:]) + r.n += x + return x, nil + } + var n int + // we have no buffered data; determine + // whether or not to buffer or call + // the underlying reader directly + if len(b) >= cap(r.data) { + n, r.state = r.r.Read(b) + } else { + r.more() + n = copy(b, r.data) + r.n = n + } + if n == 0 { + return 0, r.err() + } + return n, nil +} + +// ReadFull attempts to read len(b) bytes into +// 'b'. It returns the number of bytes read into +// 'b', and an error if it does not return len(b). +// EOF is considered an unexpected error. +func (r *Reader) ReadFull(b []byte) (int, error) { + var n int // read into b + var nn int // scratch + l := len(b) + // either read buffered data, + // or read directly for the underlying + // buffer, or fetch more buffered data. + for n < l && r.state == nil { + if r.buffered() != 0 { + nn = copy(b[n:], r.data[r.n:]) + n += nn + r.n += nn + } else if l-n > cap(r.data) { + nn, r.state = r.r.Read(b[n:]) + n += nn + } else { + r.more() + } + } + if n < l { + return n, r.noEOF() + } + return n, nil +} + +// ReadByte implements `io.ByteReader` +func (r *Reader) ReadByte() (byte, error) { + for r.buffered() < 1 && r.state == nil { + r.more() + } + if r.buffered() < 1 { + return 0, r.err() + } + b := r.data[r.n] + r.n++ + return b, nil +} + +// WriteTo implements `io.WriterTo` +func (r *Reader) WriteTo(w io.Writer) (int64, error) { + var ( + i int64 + ii int + err error + ) + // first, clear buffer + if r.buffered() > 0 { + ii, err = w.Write(r.data[r.n:]) + i += int64(ii) + if err != nil { + return i, err + } + r.data = r.data[0:0] + r.n = 0 + } + for r.state == nil { + // here we just do + // 1:1 reads and writes + r.more() + if r.buffered() > 0 { + ii, err = w.Write(r.data) + i += int64(ii) + if err != nil { + return i, err + } + r.data = r.data[0:0] + r.n = 0 + } + } + if r.state != io.EOF { + return i, r.err() + } + return i, nil +} + +func min(a int, b int) int { + if a < b { + return a + } + return b +} + +func max(a int, b int) int { + if a < b { + return b + } + return a +} diff --git a/vendor/github.com/philhofer/fwd/writer.go b/vendor/github.com/philhofer/fwd/writer.go new file mode 100644 index 0000000..2dc392a --- /dev/null +++ b/vendor/github.com/philhofer/fwd/writer.go @@ -0,0 +1,224 @@ +package fwd + +import "io" + +const ( + // DefaultWriterSize is the + // default write buffer size. + DefaultWriterSize = 2048 + + minWriterSize = minReaderSize +) + +// Writer is a buffered writer +type Writer struct { + w io.Writer // writer + buf []byte // 0:len(buf) is bufered data +} + +// NewWriter returns a new writer +// that writes to 'w' and has a buffer +// that is `DefaultWriterSize` bytes. +func NewWriter(w io.Writer) *Writer { + if wr, ok := w.(*Writer); ok { + return wr + } + return &Writer{ + w: w, + buf: make([]byte, 0, DefaultWriterSize), + } +} + +// NewWriterSize returns a new writer +// that writes to 'w' and has a buffer +// that is 'size' bytes. +func NewWriterSize(w io.Writer, size int) *Writer { + if wr, ok := w.(*Writer); ok && cap(wr.buf) >= size { + return wr + } + return &Writer{ + w: w, + buf: make([]byte, 0, max(size, minWriterSize)), + } +} + +// Buffered returns the number of buffered bytes +// in the reader. +func (w *Writer) Buffered() int { return len(w.buf) } + +// BufferSize returns the maximum size of the buffer. +func (w *Writer) BufferSize() int { return cap(w.buf) } + +// Flush flushes any buffered bytes +// to the underlying writer. +func (w *Writer) Flush() error { + l := len(w.buf) + if l > 0 { + n, err := w.w.Write(w.buf) + + // if we didn't write the whole + // thing, copy the unwritten + // bytes to the beginnning of the + // buffer. + if n < l && n > 0 { + w.pushback(n) + if err == nil { + err = io.ErrShortWrite + } + } + if err != nil { + return err + } + w.buf = w.buf[:0] + return nil + } + return nil +} + +// Write implements `io.Writer` +func (w *Writer) Write(p []byte) (int, error) { + c, l, ln := cap(w.buf), len(w.buf), len(p) + avail := c - l + + // requires flush + if avail < ln { + if err := w.Flush(); err != nil { + return 0, err + } + l = len(w.buf) + } + // too big to fit in buffer; + // write directly to w.w + if c < ln { + return w.w.Write(p) + } + + // grow buf slice; copy; return + w.buf = w.buf[:l+ln] + return copy(w.buf[l:], p), nil +} + +// WriteString is analogous to Write, but it takes a string. +func (w *Writer) WriteString(s string) (int, error) { + c, l, ln := cap(w.buf), len(w.buf), len(s) + avail := c - l + + // requires flush + if avail < ln { + if err := w.Flush(); err != nil { + return 0, err + } + l = len(w.buf) + } + // too big to fit in buffer; + // write directly to w.w + // + // yes, this is unsafe. *but* + // io.Writer is not allowed + // to mutate its input or + // maintain a reference to it, + // per the spec in package io. + // + // plus, if the string is really + // too big to fit in the buffer, then + // creating a copy to write it is + // expensive (and, strictly speaking, + // unnecessary) + if c < ln { + return w.w.Write(unsafestr(s)) + } + + // grow buf slice; copy; return + w.buf = w.buf[:l+ln] + return copy(w.buf[l:], s), nil +} + +// WriteByte implements `io.ByteWriter` +func (w *Writer) WriteByte(b byte) error { + if len(w.buf) == cap(w.buf) { + if err := w.Flush(); err != nil { + return err + } + } + w.buf = append(w.buf, b) + return nil +} + +// Next returns the next 'n' free bytes +// in the write buffer, flushing the writer +// as necessary. Next will return `io.ErrShortBuffer` +// if 'n' is greater than the size of the write buffer. +// Calls to 'next' increment the write position by +// the size of the returned buffer. +func (w *Writer) Next(n int) ([]byte, error) { + c, l := cap(w.buf), len(w.buf) + if n > c { + return nil, io.ErrShortBuffer + } + avail := c - l + if avail < n { + if err := w.Flush(); err != nil { + return nil, err + } + l = len(w.buf) + } + w.buf = w.buf[:l+n] + return w.buf[l:], nil +} + +// take the bytes from w.buf[n:len(w.buf)] +// and put them at the beginning of w.buf, +// and resize to the length of the copied segment. +func (w *Writer) pushback(n int) { + w.buf = w.buf[:copy(w.buf, w.buf[n:])] +} + +// ReadFrom implements `io.ReaderFrom` +func (w *Writer) ReadFrom(r io.Reader) (int64, error) { + // anticipatory flush + if err := w.Flush(); err != nil { + return 0, err + } + + w.buf = w.buf[0:cap(w.buf)] // expand buffer + + var nn int64 // written + var err error // error + var x int // read + + // 1:1 reads and writes + for err == nil { + x, err = r.Read(w.buf) + if x > 0 { + n, werr := w.w.Write(w.buf[:x]) + nn += int64(n) + + if err != nil { + if n < x && n > 0 { + w.pushback(n - x) + } + return nn, werr + } + if n < x { + w.pushback(n - x) + return nn, io.ErrShortWrite + } + } else if err == nil { + err = io.ErrNoProgress + break + } + } + if err != io.EOF { + return nn, err + } + + // we only clear here + // because we are sure + // the writes have + // succeeded. otherwise, + // we retain the data in case + // future writes succeed. + w.buf = w.buf[0:0] + + return nn, nil +} diff --git a/vendor/github.com/philhofer/fwd/writer_appengine.go b/vendor/github.com/philhofer/fwd/writer_appengine.go new file mode 100644 index 0000000..e367f39 --- /dev/null +++ b/vendor/github.com/philhofer/fwd/writer_appengine.go @@ -0,0 +1,5 @@ +// +build appengine + +package fwd + +func unsafestr(s string) []byte { return []byte(s) } diff --git a/vendor/github.com/philhofer/fwd/writer_unsafe.go b/vendor/github.com/philhofer/fwd/writer_unsafe.go new file mode 100644 index 0000000..a0bf453 --- /dev/null +++ b/vendor/github.com/philhofer/fwd/writer_unsafe.go @@ -0,0 +1,18 @@ +// +build !appengine + +package fwd + +import ( + "reflect" + "unsafe" +) + +// unsafe cast string as []byte +func unsafestr(b string) []byte { + l := len(b) + return *(*[]byte)(unsafe.Pointer(&reflect.SliceHeader{ + Len: l, + Cap: l, + Data: (*reflect.StringHeader)(unsafe.Pointer(&b)).Data, + })) +} diff --git a/vendor/github.com/pkg/errors/.gitignore b/vendor/github.com/pkg/errors/.gitignore new file mode 100644 index 0000000..daf913b --- /dev/null +++ b/vendor/github.com/pkg/errors/.gitignore @@ -0,0 +1,24 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof diff --git a/vendor/github.com/pkg/errors/.travis.yml b/vendor/github.com/pkg/errors/.travis.yml new file mode 100644 index 0000000..9159de0 --- /dev/null +++ b/vendor/github.com/pkg/errors/.travis.yml @@ -0,0 +1,10 @@ +language: go +go_import_path: github.com/pkg/errors +go: + - 1.11.x + - 1.12.x + - 1.13.x + - tip + +script: + - make check diff --git a/vendor/github.com/pkg/errors/LICENSE b/vendor/github.com/pkg/errors/LICENSE new file mode 100644 index 0000000..835ba3e --- /dev/null +++ b/vendor/github.com/pkg/errors/LICENSE @@ -0,0 +1,23 @@ +Copyright (c) 2015, Dave Cheney +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/pkg/errors/Makefile b/vendor/github.com/pkg/errors/Makefile new file mode 100644 index 0000000..ce9d7cd --- /dev/null +++ b/vendor/github.com/pkg/errors/Makefile @@ -0,0 +1,44 @@ +PKGS := github.com/pkg/errors +SRCDIRS := $(shell go list -f '{{.Dir}}' $(PKGS)) +GO := go + +check: test vet gofmt misspell unconvert staticcheck ineffassign unparam + +test: + $(GO) test $(PKGS) + +vet: | test + $(GO) vet $(PKGS) + +staticcheck: + $(GO) get honnef.co/go/tools/cmd/staticcheck + staticcheck -checks all $(PKGS) + +misspell: + $(GO) get github.com/client9/misspell/cmd/misspell + misspell \ + -locale GB \ + -error \ + *.md *.go + +unconvert: + $(GO) get github.com/mdempsky/unconvert + unconvert -v $(PKGS) + +ineffassign: + $(GO) get github.com/gordonklaus/ineffassign + find $(SRCDIRS) -name '*.go' | xargs ineffassign + +pedantic: check errcheck + +unparam: + $(GO) get mvdan.cc/unparam + unparam ./... + +errcheck: + $(GO) get github.com/kisielk/errcheck + errcheck $(PKGS) + +gofmt: + @echo Checking code is gofmted + @test -z "$(shell gofmt -s -l -d -e $(SRCDIRS) | tee /dev/stderr)" diff --git a/vendor/github.com/pkg/errors/README.md b/vendor/github.com/pkg/errors/README.md new file mode 100644 index 0000000..54dfdcb --- /dev/null +++ b/vendor/github.com/pkg/errors/README.md @@ -0,0 +1,59 @@ +# errors [![Travis-CI](https://travis-ci.org/pkg/errors.svg)](https://travis-ci.org/pkg/errors) [![AppVeyor](https://ci.appveyor.com/api/projects/status/b98mptawhudj53ep/branch/master?svg=true)](https://ci.appveyor.com/project/davecheney/errors/branch/master) [![GoDoc](https://godoc.org/github.com/pkg/errors?status.svg)](http://godoc.org/github.com/pkg/errors) [![Report card](https://goreportcard.com/badge/github.com/pkg/errors)](https://goreportcard.com/report/github.com/pkg/errors) [![Sourcegraph](https://sourcegraph.com/github.com/pkg/errors/-/badge.svg)](https://sourcegraph.com/github.com/pkg/errors?badge) + +Package errors provides simple error handling primitives. + +`go get github.com/pkg/errors` + +The traditional error handling idiom in Go is roughly akin to +```go +if err != nil { + return err +} +``` +which applied recursively up the call stack results in error reports without context or debugging information. The errors package allows programmers to add context to the failure path in their code in a way that does not destroy the original value of the error. + +## Adding context to an error + +The errors.Wrap function returns a new error that adds context to the original error. For example +```go +_, err := ioutil.ReadAll(r) +if err != nil { + return errors.Wrap(err, "read failed") +} +``` +## Retrieving the cause of an error + +Using `errors.Wrap` constructs a stack of errors, adding context to the preceding error. Depending on the nature of the error it may be necessary to reverse the operation of errors.Wrap to retrieve the original error for inspection. Any error value which implements this interface can be inspected by `errors.Cause`. +```go +type causer interface { + Cause() error +} +``` +`errors.Cause` will recursively retrieve the topmost error which does not implement `causer`, which is assumed to be the original cause. For example: +```go +switch err := errors.Cause(err).(type) { +case *MyError: + // handle specifically +default: + // unknown error +} +``` + +[Read the package documentation for more information](https://godoc.org/github.com/pkg/errors). + +## Roadmap + +With the upcoming [Go2 error proposals](https://go.googlesource.com/proposal/+/master/design/go2draft.md) this package is moving into maintenance mode. The roadmap for a 1.0 release is as follows: + +- 0.9. Remove pre Go 1.9 and Go 1.10 support, address outstanding pull requests (if possible) +- 1.0. Final release. + +## Contributing + +Because of the Go2 errors changes, this package is not accepting proposals for new functionality. With that said, we welcome pull requests, bug fixes and issue reports. + +Before sending a PR, please discuss your change by raising an issue. + +## License + +BSD-2-Clause diff --git a/vendor/github.com/pkg/errors/appveyor.yml b/vendor/github.com/pkg/errors/appveyor.yml new file mode 100644 index 0000000..a932ead --- /dev/null +++ b/vendor/github.com/pkg/errors/appveyor.yml @@ -0,0 +1,32 @@ +version: build-{build}.{branch} + +clone_folder: C:\gopath\src\github.com\pkg\errors +shallow_clone: true # for startup speed + +environment: + GOPATH: C:\gopath + +platform: + - x64 + +# http://www.appveyor.com/docs/installed-software +install: + # some helpful output for debugging builds + - go version + - go env + # pre-installed MinGW at C:\MinGW is 32bit only + # but MSYS2 at C:\msys64 has mingw64 + - set PATH=C:\msys64\mingw64\bin;%PATH% + - gcc --version + - g++ --version + +build_script: + - go install -v ./... + +test_script: + - set PATH=C:\gopath\bin;%PATH% + - go test -v ./... + +#artifacts: +# - path: '%GOPATH%\bin\*.exe' +deploy: off diff --git a/vendor/github.com/pkg/errors/errors.go b/vendor/github.com/pkg/errors/errors.go new file mode 100644 index 0000000..161aea2 --- /dev/null +++ b/vendor/github.com/pkg/errors/errors.go @@ -0,0 +1,288 @@ +// Package errors provides simple error handling primitives. +// +// The traditional error handling idiom in Go is roughly akin to +// +// if err != nil { +// return err +// } +// +// which when applied recursively up the call stack results in error reports +// without context or debugging information. The errors package allows +// programmers to add context to the failure path in their code in a way +// that does not destroy the original value of the error. +// +// Adding context to an error +// +// The errors.Wrap function returns a new error that adds context to the +// original error by recording a stack trace at the point Wrap is called, +// together with the supplied message. For example +// +// _, err := ioutil.ReadAll(r) +// if err != nil { +// return errors.Wrap(err, "read failed") +// } +// +// If additional control is required, the errors.WithStack and +// errors.WithMessage functions destructure errors.Wrap into its component +// operations: annotating an error with a stack trace and with a message, +// respectively. +// +// Retrieving the cause of an error +// +// Using errors.Wrap constructs a stack of errors, adding context to the +// preceding error. Depending on the nature of the error it may be necessary +// to reverse the operation of errors.Wrap to retrieve the original error +// for inspection. Any error value which implements this interface +// +// type causer interface { +// Cause() error +// } +// +// can be inspected by errors.Cause. errors.Cause will recursively retrieve +// the topmost error that does not implement causer, which is assumed to be +// the original cause. For example: +// +// switch err := errors.Cause(err).(type) { +// case *MyError: +// // handle specifically +// default: +// // unknown error +// } +// +// Although the causer interface is not exported by this package, it is +// considered a part of its stable public interface. +// +// Formatted printing of errors +// +// All error values returned from this package implement fmt.Formatter and can +// be formatted by the fmt package. The following verbs are supported: +// +// %s print the error. If the error has a Cause it will be +// printed recursively. +// %v see %s +// %+v extended format. Each Frame of the error's StackTrace will +// be printed in detail. +// +// Retrieving the stack trace of an error or wrapper +// +// New, Errorf, Wrap, and Wrapf record a stack trace at the point they are +// invoked. This information can be retrieved with the following interface: +// +// type stackTracer interface { +// StackTrace() errors.StackTrace +// } +// +// The returned errors.StackTrace type is defined as +// +// type StackTrace []Frame +// +// The Frame type represents a call site in the stack trace. Frame supports +// the fmt.Formatter interface that can be used for printing information about +// the stack trace of this error. For example: +// +// if err, ok := err.(stackTracer); ok { +// for _, f := range err.StackTrace() { +// fmt.Printf("%+s:%d\n", f, f) +// } +// } +// +// Although the stackTracer interface is not exported by this package, it is +// considered a part of its stable public interface. +// +// See the documentation for Frame.Format for more details. +package errors + +import ( + "fmt" + "io" +) + +// New returns an error with the supplied message. +// New also records the stack trace at the point it was called. +func New(message string) error { + return &fundamental{ + msg: message, + stack: callers(), + } +} + +// Errorf formats according to a format specifier and returns the string +// as a value that satisfies error. +// Errorf also records the stack trace at the point it was called. +func Errorf(format string, args ...interface{}) error { + return &fundamental{ + msg: fmt.Sprintf(format, args...), + stack: callers(), + } +} + +// fundamental is an error that has a message and a stack, but no caller. +type fundamental struct { + msg string + *stack +} + +func (f *fundamental) Error() string { return f.msg } + +func (f *fundamental) Format(s fmt.State, verb rune) { + switch verb { + case 'v': + if s.Flag('+') { + io.WriteString(s, f.msg) + f.stack.Format(s, verb) + return + } + fallthrough + case 's': + io.WriteString(s, f.msg) + case 'q': + fmt.Fprintf(s, "%q", f.msg) + } +} + +// WithStack annotates err with a stack trace at the point WithStack was called. +// If err is nil, WithStack returns nil. +func WithStack(err error) error { + if err == nil { + return nil + } + return &withStack{ + err, + callers(), + } +} + +type withStack struct { + error + *stack +} + +func (w *withStack) Cause() error { return w.error } + +// Unwrap provides compatibility for Go 1.13 error chains. +func (w *withStack) Unwrap() error { return w.error } + +func (w *withStack) Format(s fmt.State, verb rune) { + switch verb { + case 'v': + if s.Flag('+') { + fmt.Fprintf(s, "%+v", w.Cause()) + w.stack.Format(s, verb) + return + } + fallthrough + case 's': + io.WriteString(s, w.Error()) + case 'q': + fmt.Fprintf(s, "%q", w.Error()) + } +} + +// Wrap returns an error annotating err with a stack trace +// at the point Wrap is called, and the supplied message. +// If err is nil, Wrap returns nil. +func Wrap(err error, message string) error { + if err == nil { + return nil + } + err = &withMessage{ + cause: err, + msg: message, + } + return &withStack{ + err, + callers(), + } +} + +// Wrapf returns an error annotating err with a stack trace +// at the point Wrapf is called, and the format specifier. +// If err is nil, Wrapf returns nil. +func Wrapf(err error, format string, args ...interface{}) error { + if err == nil { + return nil + } + err = &withMessage{ + cause: err, + msg: fmt.Sprintf(format, args...), + } + return &withStack{ + err, + callers(), + } +} + +// WithMessage annotates err with a new message. +// If err is nil, WithMessage returns nil. +func WithMessage(err error, message string) error { + if err == nil { + return nil + } + return &withMessage{ + cause: err, + msg: message, + } +} + +// WithMessagef annotates err with the format specifier. +// If err is nil, WithMessagef returns nil. +func WithMessagef(err error, format string, args ...interface{}) error { + if err == nil { + return nil + } + return &withMessage{ + cause: err, + msg: fmt.Sprintf(format, args...), + } +} + +type withMessage struct { + cause error + msg string +} + +func (w *withMessage) Error() string { return w.msg + ": " + w.cause.Error() } +func (w *withMessage) Cause() error { return w.cause } + +// Unwrap provides compatibility for Go 1.13 error chains. +func (w *withMessage) Unwrap() error { return w.cause } + +func (w *withMessage) Format(s fmt.State, verb rune) { + switch verb { + case 'v': + if s.Flag('+') { + fmt.Fprintf(s, "%+v\n", w.Cause()) + io.WriteString(s, w.msg) + return + } + fallthrough + case 's', 'q': + io.WriteString(s, w.Error()) + } +} + +// Cause returns the underlying cause of the error, if possible. +// An error value has a cause if it implements the following +// interface: +// +// type causer interface { +// Cause() error +// } +// +// If the error does not implement Cause, the original error will +// be returned. If the error is nil, nil will be returned without further +// investigation. +func Cause(err error) error { + type causer interface { + Cause() error + } + + for err != nil { + cause, ok := err.(causer) + if !ok { + break + } + err = cause.Cause() + } + return err +} diff --git a/vendor/github.com/pkg/errors/go113.go b/vendor/github.com/pkg/errors/go113.go new file mode 100644 index 0000000..be0d10d --- /dev/null +++ b/vendor/github.com/pkg/errors/go113.go @@ -0,0 +1,38 @@ +// +build go1.13 + +package errors + +import ( + stderrors "errors" +) + +// Is reports whether any error in err's chain matches target. +// +// The chain consists of err itself followed by the sequence of errors obtained by +// repeatedly calling Unwrap. +// +// An error is considered to match a target if it is equal to that target or if +// it implements a method Is(error) bool such that Is(target) returns true. +func Is(err, target error) bool { return stderrors.Is(err, target) } + +// As finds the first error in err's chain that matches target, and if so, sets +// target to that error value and returns true. +// +// The chain consists of err itself followed by the sequence of errors obtained by +// repeatedly calling Unwrap. +// +// An error matches target if the error's concrete value is assignable to the value +// pointed to by target, or if the error has a method As(interface{}) bool such that +// As(target) returns true. In the latter case, the As method is responsible for +// setting target. +// +// As will panic if target is not a non-nil pointer to either a type that implements +// error, or to any interface type. As returns false if err is nil. +func As(err error, target interface{}) bool { return stderrors.As(err, target) } + +// Unwrap returns the result of calling the Unwrap method on err, if err's +// type contains an Unwrap method returning error. +// Otherwise, Unwrap returns nil. +func Unwrap(err error) error { + return stderrors.Unwrap(err) +} diff --git a/vendor/github.com/pkg/errors/stack.go b/vendor/github.com/pkg/errors/stack.go new file mode 100644 index 0000000..779a834 --- /dev/null +++ b/vendor/github.com/pkg/errors/stack.go @@ -0,0 +1,177 @@ +package errors + +import ( + "fmt" + "io" + "path" + "runtime" + "strconv" + "strings" +) + +// Frame represents a program counter inside a stack frame. +// For historical reasons if Frame is interpreted as a uintptr +// its value represents the program counter + 1. +type Frame uintptr + +// pc returns the program counter for this frame; +// multiple frames may have the same PC value. +func (f Frame) pc() uintptr { return uintptr(f) - 1 } + +// file returns the full path to the file that contains the +// function for this Frame's pc. +func (f Frame) file() string { + fn := runtime.FuncForPC(f.pc()) + if fn == nil { + return "unknown" + } + file, _ := fn.FileLine(f.pc()) + return file +} + +// line returns the line number of source code of the +// function for this Frame's pc. +func (f Frame) line() int { + fn := runtime.FuncForPC(f.pc()) + if fn == nil { + return 0 + } + _, line := fn.FileLine(f.pc()) + return line +} + +// name returns the name of this function, if known. +func (f Frame) name() string { + fn := runtime.FuncForPC(f.pc()) + if fn == nil { + return "unknown" + } + return fn.Name() +} + +// Format formats the frame according to the fmt.Formatter interface. +// +// %s source file +// %d source line +// %n function name +// %v equivalent to %s:%d +// +// Format accepts flags that alter the printing of some verbs, as follows: +// +// %+s function name and path of source file relative to the compile time +// GOPATH separated by \n\t (\n\t) +// %+v equivalent to %+s:%d +func (f Frame) Format(s fmt.State, verb rune) { + switch verb { + case 's': + switch { + case s.Flag('+'): + io.WriteString(s, f.name()) + io.WriteString(s, "\n\t") + io.WriteString(s, f.file()) + default: + io.WriteString(s, path.Base(f.file())) + } + case 'd': + io.WriteString(s, strconv.Itoa(f.line())) + case 'n': + io.WriteString(s, funcname(f.name())) + case 'v': + f.Format(s, 's') + io.WriteString(s, ":") + f.Format(s, 'd') + } +} + +// MarshalText formats a stacktrace Frame as a text string. The output is the +// same as that of fmt.Sprintf("%+v", f), but without newlines or tabs. +func (f Frame) MarshalText() ([]byte, error) { + name := f.name() + if name == "unknown" { + return []byte(name), nil + } + return []byte(fmt.Sprintf("%s %s:%d", name, f.file(), f.line())), nil +} + +// StackTrace is stack of Frames from innermost (newest) to outermost (oldest). +type StackTrace []Frame + +// Format formats the stack of Frames according to the fmt.Formatter interface. +// +// %s lists source files for each Frame in the stack +// %v lists the source file and line number for each Frame in the stack +// +// Format accepts flags that alter the printing of some verbs, as follows: +// +// %+v Prints filename, function, and line number for each Frame in the stack. +func (st StackTrace) Format(s fmt.State, verb rune) { + switch verb { + case 'v': + switch { + case s.Flag('+'): + for _, f := range st { + io.WriteString(s, "\n") + f.Format(s, verb) + } + case s.Flag('#'): + fmt.Fprintf(s, "%#v", []Frame(st)) + default: + st.formatSlice(s, verb) + } + case 's': + st.formatSlice(s, verb) + } +} + +// formatSlice will format this StackTrace into the given buffer as a slice of +// Frame, only valid when called with '%s' or '%v'. +func (st StackTrace) formatSlice(s fmt.State, verb rune) { + io.WriteString(s, "[") + for i, f := range st { + if i > 0 { + io.WriteString(s, " ") + } + f.Format(s, verb) + } + io.WriteString(s, "]") +} + +// stack represents a stack of program counters. +type stack []uintptr + +func (s *stack) Format(st fmt.State, verb rune) { + switch verb { + case 'v': + switch { + case st.Flag('+'): + for _, pc := range *s { + f := Frame(pc) + fmt.Fprintf(st, "\n%+v", f) + } + } + } +} + +func (s *stack) StackTrace() StackTrace { + f := make([]Frame, len(*s)) + for i := 0; i < len(f); i++ { + f[i] = Frame((*s)[i]) + } + return f +} + +func callers() *stack { + const depth = 32 + var pcs [depth]uintptr + n := runtime.Callers(3, pcs[:]) + var st stack = pcs[0:n] + return &st +} + +// funcname removes the path prefix component of a function's name reported by func.Name(). +func funcname(name string) string { + i := strings.LastIndex(name, "/") + name = name[i+1:] + i = strings.Index(name, ".") + return name[i+1:] +} diff --git a/vendor/github.com/rogpeppe/go-internal/LICENSE b/vendor/github.com/rogpeppe/go-internal/LICENSE new file mode 100644 index 0000000..49ea0f9 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2018 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/rogpeppe/go-internal/fmtsort/mapelem.go b/vendor/github.com/rogpeppe/go-internal/fmtsort/mapelem.go new file mode 100644 index 0000000..98e4e38 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/fmtsort/mapelem.go @@ -0,0 +1,20 @@ +package fmtsort + +import "reflect" + +const brokenNaNs = false + +func mapElems(mapValue reflect.Value) ([]reflect.Value, []reflect.Value) { + // Note: this code is arranged to not panic even in the presence + // of a concurrent map update. The runtime is responsible for + // yelling loudly if that happens. See issue 33275. + n := mapValue.Len() + key := make([]reflect.Value, 0, n) + value := make([]reflect.Value, 0, n) + iter := mapValue.MapRange() + for iter.Next() { + key = append(key, iter.Key()) + value = append(value, iter.Value()) + } + return key, value +} diff --git a/vendor/github.com/rogpeppe/go-internal/fmtsort/sort.go b/vendor/github.com/rogpeppe/go-internal/fmtsort/sort.go new file mode 100644 index 0000000..7f51854 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/fmtsort/sort.go @@ -0,0 +1,209 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package fmtsort provides a general stable ordering mechanism +// for maps, on behalf of the fmt and text/template packages. +// It is not guaranteed to be efficient and works only for types +// that are valid map keys. +package fmtsort + +import ( + "reflect" + "sort" +) + +// Note: Throughout this package we avoid calling reflect.Value.Interface as +// it is not always legal to do so and it's easier to avoid the issue than to face it. + +// SortedMap represents a map's keys and values. The keys and values are +// aligned in index order: Value[i] is the value in the map corresponding to Key[i]. +type SortedMap struct { + Key []reflect.Value + Value []reflect.Value +} + +func (o *SortedMap) Len() int { return len(o.Key) } +func (o *SortedMap) Less(i, j int) bool { return compare(o.Key[i], o.Key[j]) < 0 } +func (o *SortedMap) Swap(i, j int) { + o.Key[i], o.Key[j] = o.Key[j], o.Key[i] + o.Value[i], o.Value[j] = o.Value[j], o.Value[i] +} + +// Sort accepts a map and returns a SortedMap that has the same keys and +// values but in a stable sorted order according to the keys, modulo issues +// raised by unorderable key values such as NaNs. +// +// The ordering rules are more general than with Go's < operator: +// +// - when applicable, nil compares low +// - ints, floats, and strings order by < +// - NaN compares less than non-NaN floats +// - bool compares false before true +// - complex compares real, then imag +// - pointers compare by machine address +// - channel values compare by machine address +// - structs compare each field in turn +// - arrays compare each element in turn. +// Otherwise identical arrays compare by length. +// - interface values compare first by reflect.Type describing the concrete type +// and then by concrete value as described in the previous rules. +func Sort(mapValue reflect.Value) *SortedMap { + if mapValue.Type().Kind() != reflect.Map { + return nil + } + key, value := mapElems(mapValue) + sorted := &SortedMap{ + Key: key, + Value: value, + } + sort.Stable(sorted) + return sorted +} + +// compare compares two values of the same type. It returns -1, 0, 1 +// according to whether a > b (1), a == b (0), or a < b (-1). +// If the types differ, it returns -1. +// See the comment on Sort for the comparison rules. +func compare(aVal, bVal reflect.Value) int { + aType, bType := aVal.Type(), bVal.Type() + if aType != bType { + return -1 // No good answer possible, but don't return 0: they're not equal. + } + switch aVal.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + a, b := aVal.Int(), bVal.Int() + switch { + case a < b: + return -1 + case a > b: + return 1 + default: + return 0 + } + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + a, b := aVal.Uint(), bVal.Uint() + switch { + case a < b: + return -1 + case a > b: + return 1 + default: + return 0 + } + case reflect.String: + a, b := aVal.String(), bVal.String() + switch { + case a < b: + return -1 + case a > b: + return 1 + default: + return 0 + } + case reflect.Float32, reflect.Float64: + return floatCompare(aVal.Float(), bVal.Float()) + case reflect.Complex64, reflect.Complex128: + a, b := aVal.Complex(), bVal.Complex() + if c := floatCompare(real(a), real(b)); c != 0 { + return c + } + return floatCompare(imag(a), imag(b)) + case reflect.Bool: + a, b := aVal.Bool(), bVal.Bool() + switch { + case a == b: + return 0 + case a: + return 1 + default: + return -1 + } + case reflect.Ptr: + a, b := aVal.Pointer(), bVal.Pointer() + switch { + case a < b: + return -1 + case a > b: + return 1 + default: + return 0 + } + case reflect.Chan: + if c, ok := nilCompare(aVal, bVal); ok { + return c + } + ap, bp := aVal.Pointer(), bVal.Pointer() + switch { + case ap < bp: + return -1 + case ap > bp: + return 1 + default: + return 0 + } + case reflect.Struct: + for i := 0; i < aVal.NumField(); i++ { + if c := compare(aVal.Field(i), bVal.Field(i)); c != 0 { + return c + } + } + return 0 + case reflect.Array: + for i := 0; i < aVal.Len(); i++ { + if c := compare(aVal.Index(i), bVal.Index(i)); c != 0 { + return c + } + } + return 0 + case reflect.Interface: + if c, ok := nilCompare(aVal, bVal); ok { + return c + } + c := compare(reflect.ValueOf(aVal.Elem().Type()), reflect.ValueOf(bVal.Elem().Type())) + if c != 0 { + return c + } + return compare(aVal.Elem(), bVal.Elem()) + default: + // Certain types cannot appear as keys (maps, funcs, slices), but be explicit. + panic("bad type in compare: " + aType.String()) + } +} + +// nilCompare checks whether either value is nil. If not, the boolean is false. +// If either value is nil, the boolean is true and the integer is the comparison +// value. The comparison is defined to be 0 if both are nil, otherwise the one +// nil value compares low. Both arguments must represent a chan, func, +// interface, map, pointer, or slice. +func nilCompare(aVal, bVal reflect.Value) (int, bool) { + if aVal.IsNil() { + if bVal.IsNil() { + return 0, true + } + return -1, true + } + if bVal.IsNil() { + return 1, true + } + return 0, false +} + +// floatCompare compares two floating-point values. NaNs compare low. +func floatCompare(a, b float64) int { + switch { + case isNaN(a): + return -1 // No good answer if b is a NaN so don't bother checking. + case isNaN(b): + return 1 + case a < b: + return -1 + case a > b: + return 1 + } + return 0 +} + +func isNaN(a float64) bool { + return a != a +} diff --git a/vendor/github.com/rsc/goversion/LICENSE b/vendor/github.com/rsc/goversion/LICENSE new file mode 100644 index 0000000..6a66aea --- /dev/null +++ b/vendor/github.com/rsc/goversion/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/rsc/goversion/version/asm.go b/vendor/github.com/rsc/goversion/version/asm.go new file mode 100644 index 0000000..3d5122b --- /dev/null +++ b/vendor/github.com/rsc/goversion/version/asm.go @@ -0,0 +1,349 @@ +// Copyright 2017 The Go Authors. All Rights Reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package version + +import ( + "encoding/binary" + "fmt" + "os" +) + +type matcher [][]uint32 + +const ( + pWild uint32 = 0xff00 + pAddr uint32 = 0x10000 + pEnd uint32 = 0x20000 + pRelAddr uint32 = 0x30000 + + opMaybe = 1 + iota + opMust + opDone + opAnchor = 0x100 + opSub8 = 0x200 + opFlags = opAnchor | opSub8 +) + +var amd64Matcher = matcher{ + {opMaybe | opAnchor, + // __rt0_amd64_darwin: + // JMP __rt0_amd64 + 0xe9, pWild | pAddr, pWild, pWild, pWild | pEnd, 0xcc, 0xcc, 0xcc, + }, + {opMaybe, + // _rt0_amd64_linux: + // lea 0x8(%rsp), %rsi + // mov (%rsp), %rdi + // lea ADDR(%rip), %rax # main + // jmpq *%rax + 0x48, 0x8d, 0x74, 0x24, 0x08, + 0x48, 0x8b, 0x3c, 0x24, 0x48, + 0x8d, 0x05, pWild | pAddr, pWild, pWild, pWild | pEnd, + 0xff, 0xe0, + }, + {opMaybe, + // _rt0_amd64_linux: + // lea 0x8(%rsp), %rsi + // mov (%rsp), %rdi + // mov $ADDR, %eax # main + // jmpq *%rax + 0x48, 0x8d, 0x74, 0x24, 0x08, + 0x48, 0x8b, 0x3c, 0x24, + 0xb8, pWild | pAddr, pWild, pWild, pWild, + 0xff, 0xe0, + }, + {opMaybe, + // __rt0_amd64: + // mov (%rsp), %rdi + // lea 8(%rsp), %rsi + // jmp runtime.rt0_g0 + 0x48, 0x8b, 0x3c, 0x24, + 0x48, 0x8d, 0x74, 0x24, 0x08, + 0xe9, pWild | pAddr, pWild, pWild, pWild | pEnd, + 0xcc, 0xcc, + }, + {opMaybe, + // _start (toward end) + // lea __libc_csu_fini(%rip), %r8 + // lea __libc_csu_init(%rip), %rcx + // lea ADDR(%rip), %rdi # main + // callq *xxx(%rip) + 0x4c, 0x8d, 0x05, pWild, pWild, pWild, pWild, + 0x48, 0x8d, 0x0d, pWild, pWild, pWild, pWild, + 0x48, 0x8d, 0x3d, pWild | pAddr, pWild, pWild, pWild | pEnd, + 0xff, 0x15, + }, + {opMaybe, + // _start (toward end) + // push %rsp (1) + // mov $__libc_csu_fini, %r8 (7) + // mov $__libc_csu_init, %rcx (7) + // mov $ADDR, %rdi # main (7) + // callq *xxx(%rip) + 0x54, + 0x49, 0xc7, 0xc0, pWild, pWild, pWild, pWild, + 0x48, 0xc7, 0xc1, pWild, pWild, pWild, pWild, + 0x48, 0xc7, 0xc7, pAddr | pWild, pWild, pWild, pWild, + }, + {opMaybe | opAnchor, + // main: + // lea ADDR(%rip), %rax # rt0_go + // jmpq *%rax + 0x48, 0x8d, 0x05, pWild | pAddr, pWild, pWild, pWild | pEnd, + 0xff, 0xe0, + }, + {opMaybe | opAnchor, + // main: + // mov $ADDR, %eax + // jmpq *%rax + 0xb8, pWild | pAddr, pWild, pWild, pWild, + 0xff, 0xe0, + }, + {opMaybe | opAnchor, + // main: + // JMP runtime.rt0_go(SB) + 0xe9, pWild | pAddr, pWild, pWild, pWild | pEnd, 0xcc, 0xcc, 0xcc, + }, + {opMust | opAnchor, + // rt0_go: + // mov %rdi, %rax + // mov %rsi, %rbx + // sub %0x27, %rsp + // and $0xfffffffffffffff0,%rsp + // mov %rax,0x10(%rsp) + // mov %rbx,0x18(%rsp) + 0x48, 0x89, 0xf8, + 0x48, 0x89, 0xf3, + 0x48, 0x83, 0xec, 0x27, + 0x48, 0x83, 0xe4, 0xf0, + 0x48, 0x89, 0x44, 0x24, 0x10, + 0x48, 0x89, 0x5c, 0x24, 0x18, + }, + {opMust, + // later in rt0_go: + // mov %eax, (%rsp) + // mov 0x18(%rsp), %rax + // mov %rax, 0x8(%rsp) + // callq runtime.args + // callq runtime.osinit + // callq runtime.schedinit (ADDR) + 0x89, 0x04, 0x24, + 0x48, 0x8b, 0x44, 0x24, 0x18, + 0x48, 0x89, 0x44, 0x24, 0x08, + 0xe8, pWild, pWild, pWild, pWild, + 0xe8, pWild, pWild, pWild, pWild, + 0xe8, pWild, pWild, pWild, pWild, + }, + {opMaybe, + // later in rt0_go: + // mov %eax, (%rsp) + // mov 0x18(%rsp), %rax + // mov %rax, 0x8(%rsp) + // callq runtime.args + // callq runtime.osinit + // callq runtime.schedinit (ADDR) + // lea other(%rip), %rdi + 0x89, 0x04, 0x24, + 0x48, 0x8b, 0x44, 0x24, 0x18, + 0x48, 0x89, 0x44, 0x24, 0x08, + 0xe8, pWild, pWild, pWild, pWild, + 0xe8, pWild, pWild, pWild, pWild, + 0xe8, pWild | pAddr, pWild, pWild, pWild | pEnd, + 0x48, 0x8d, 0x05, + }, + {opMaybe, + // later in rt0_go: + // mov %eax, (%rsp) + // mov 0x18(%rsp), %rax + // mov %rax, 0x8(%rsp) + // callq runtime.args + // callq runtime.osinit + // callq runtime.hashinit + // callq runtime.schedinit (ADDR) + // pushq $main.main + 0x89, 0x04, 0x24, + 0x48, 0x8b, 0x44, 0x24, 0x18, + 0x48, 0x89, 0x44, 0x24, 0x08, + 0xe8, pWild, pWild, pWild, pWild, + 0xe8, pWild, pWild, pWild, pWild, + 0xe8, pWild, pWild, pWild, pWild, + 0xe8, pWild | pAddr, pWild, pWild, pWild | pEnd, + 0x68, + }, + {opDone | opSub8, + // schedinit (toward end) + // mov ADDR(%rip), %rax + // test %rax, %rax + // jne + // movq $0x7, ADDR(%rip) + // + 0x48, 0x8b, 0x05, pWild, pWild, pWild, pWild, + 0x48, 0x85, 0xc0, + 0x75, pWild, + 0x48, 0xc7, 0x05, pWild | pAddr, pWild, pWild, pWild, 0x07, 0x00, 0x00, 0x00 | pEnd, + }, + {opDone | opSub8, + // schedinit (toward end) + // mov ADDR(%rip), %rbx + // cmp $0x0, %rbx + // jne + // lea "unknown"(%rip), %rbx + // mov %rbx, ADDR(%rip) + // movq $7, (ADDR+8)(%rip) + 0x48, 0x8b, 0x1d, pWild, pWild, pWild, pWild, + 0x48, 0x83, 0xfb, 0x00, + 0x75, pWild, + 0x48, 0x8d, 0x1d, pWild, pWild, pWild, pWild, + 0x48, 0x89, 0x1d, pWild, pWild, pWild, pWild, + 0x48, 0xc7, 0x05, pWild | pAddr, pWild, pWild, pWild, 0x07, 0x00, 0x00, 0x00 | pEnd, + }, + {opDone, + // schedinit (toward end) + // cmpq $0x0, ADDR(%rip) + // jne + // lea "unknown"(%rip), %rax + // mov %rax, ADDR(%rip) + // lea ADDR(%rip), %rax + // movq $7, 8(%rax) + 0x48, 0x83, 0x3d, pWild | pAddr, pWild, pWild, pWild, 0x00, + 0x75, pWild, + 0x48, 0x8d, 0x05, pWild, pWild, pWild, pWild, + 0x48, 0x89, 0x05, pWild, pWild, pWild, pWild, + 0x48, 0x8d, 0x05, pWild | pAddr, pWild, pWild, pWild | pEnd, + 0x48, 0xc7, 0x40, 0x08, 0x07, 0x00, 0x00, 0x00, + }, + {opDone, + // schedinit (toward end) + // cmpq $0x0, ADDR(%rip) + // jne + // movq $0x7, ADDR(%rip) + 0x48, 0x83, 0x3d, pWild | pAddr, pWild, pWild, pWild, 0x00, + 0x75, pWild, + 0x48, 0xc7, 0x05 | pEnd, pWild | pAddr, pWild, pWild, pWild, 0x07, 0x00, 0x00, 0x00, + }, + {opDone, + // test %eax, %eax + // jne + // lea "unknown"(RIP), %rax + // mov %rax, ADDR(%rip) + 0x48, 0x85, 0xc0, 0x75, pWild, 0x48, 0x8d, 0x05, pWild, pWild, pWild, pWild, 0x48, 0x89, 0x05, pWild | pAddr, pWild, pWild, pWild | pEnd, + }, + {opDone, + // schedinit (toward end) + // mov ADDR(%rip), %rcx + // test %rcx, %rcx + // jne + // movq $0x7, ADDR(%rip) + // + 0x48, 0x8b, 0x0d, pWild, pWild, pWild, pWild, + 0x48, 0x85, 0xc9, + 0x75, pWild, + 0x48, 0xc7, 0x05 | pEnd, pWild | pAddr, pWild, pWild, pWild, 0x07, 0x00, 0x00, 0x00, + }, +} + +var DebugMatch bool + +func (m matcher) match(f exe, addr uint64) (uint64, bool) { + data, err := f.ReadData(addr, 512) + if DebugMatch { + fmt.Fprintf(os.Stderr, "data @%#x: %x\n", addr, data[:16]) + } + if err != nil { + if DebugMatch { + fmt.Fprintf(os.Stderr, "match: %v\n", err) + } + return 0, false + } + if DebugMatch { + fmt.Fprintf(os.Stderr, "data: %x\n", data[:32]) + } +Matchers: + for pc, p := range m { + op := p[0] + p = p[1:] + Search: + for i := 0; i <= len(data)-len(p); i++ { + a := -1 + e := -1 + if i > 0 && op&opAnchor != 0 { + break + } + for j := 0; j < len(p); j++ { + b := byte(p[j]) + m := byte(p[j] >> 8) + if data[i+j]&^m != b { + continue Search + } + if p[j]&pAddr != 0 { + a = j + } + if p[j]&pEnd != 0 { + e = j + 1 + } + } + // matched + if DebugMatch { + fmt.Fprintf(os.Stderr, "match (%d) %#x+%d %x %x\n", pc, addr, i, p, data[i:i+len(p)]) + } + if a != -1 { + val := uint64(int32(binary.LittleEndian.Uint32(data[i+a:]))) + if e == -1 { + addr = val + } else { + addr += uint64(i+e) + val + } + if op&opSub8 != 0 { + addr -= 8 + } + } + if op&^opFlags == opDone { + if DebugMatch { + fmt.Fprintf(os.Stderr, "done %x\n", addr) + } + return addr, true + } + if a != -1 { + // changed addr, so reload + data, err = f.ReadData(addr, 512) + if err != nil { + return 0, false + } + if DebugMatch { + fmt.Fprintf(os.Stderr, "reload @%#x: %x\n", addr, data[:32]) + } + } + continue Matchers + } + // not matched + if DebugMatch { + fmt.Fprintf(os.Stderr, "no match (%d) %#x %x %x\n", pc, addr, p, data[:32]) + } + if op&^opFlags == opMust { + return 0, false + } + } + // ran off end of matcher + return 0, false +} + +func readBuildVersionX86Asm(f exe) (isGo bool, buildVersion string) { + entry := f.Entry() + if entry == 0 { + if DebugMatch { + fmt.Fprintf(os.Stderr, "missing entry!\n") + } + return + } + addr, ok := amd64Matcher.match(f, entry) + if !ok { + return + } + v, err := readBuildVersion(f, addr, 16) + if err != nil { + return + } + return true, v +} diff --git a/vendor/github.com/rsc/goversion/version/exe.go b/vendor/github.com/rsc/goversion/version/exe.go new file mode 100644 index 0000000..dc87129 --- /dev/null +++ b/vendor/github.com/rsc/goversion/version/exe.go @@ -0,0 +1,317 @@ +// Copyright 2017 The Go Authors. All Rights Reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package version + +import ( + "bytes" + "debug/elf" + "debug/macho" + "debug/pe" + "encoding/binary" + "fmt" + "io" + "os" +) + +type sym struct { + Name string + Addr uint64 + Size uint64 +} + +type exe interface { + AddrSize() int // bytes + ReadData(addr, size uint64) ([]byte, error) + Symbols() ([]sym, error) + SectionNames() []string + Close() error + ByteOrder() binary.ByteOrder + Entry() uint64 + TextRange() (uint64, uint64) + RODataRange() (uint64, uint64) +} + +func openExe(file string) (exe, error) { + f, err := os.Open(file) + if err != nil { + return nil, err + } + data := make([]byte, 16) + if _, err := io.ReadFull(f, data); err != nil { + return nil, err + } + f.Seek(0, 0) + if bytes.HasPrefix(data, []byte("\x7FELF")) { + e, err := elf.NewFile(f) + if err != nil { + f.Close() + return nil, err + } + return &elfExe{f, e}, nil + } + if bytes.HasPrefix(data, []byte("MZ")) { + e, err := pe.NewFile(f) + if err != nil { + f.Close() + return nil, err + } + return &peExe{f, e}, nil + } + if bytes.HasPrefix(data, []byte("\xFE\xED\xFA")) || bytes.HasPrefix(data[1:], []byte("\xFA\xED\xFE")) { + e, err := macho.NewFile(f) + if err != nil { + f.Close() + return nil, err + } + return &machoExe{f, e}, nil + } + return nil, fmt.Errorf("unrecognized executable format") +} + +type elfExe struct { + os *os.File + f *elf.File +} + +func (x *elfExe) AddrSize() int { return 0 } + +func (x *elfExe) ByteOrder() binary.ByteOrder { return x.f.ByteOrder } + +func (x *elfExe) Close() error { + return x.os.Close() +} + +func (x *elfExe) Entry() uint64 { return x.f.Entry } + +func (x *elfExe) ReadData(addr, size uint64) ([]byte, error) { + data := make([]byte, size) + for _, prog := range x.f.Progs { + if prog.Vaddr <= addr && addr+size-1 <= prog.Vaddr+prog.Filesz-1 { + _, err := prog.ReadAt(data, int64(addr-prog.Vaddr)) + if err != nil { + return nil, err + } + return data, nil + } + } + return nil, fmt.Errorf("address not mapped") +} + +func (x *elfExe) Symbols() ([]sym, error) { + syms, err := x.f.Symbols() + if err != nil { + return nil, err + } + var out []sym + for _, s := range syms { + out = append(out, sym{s.Name, s.Value, s.Size}) + } + return out, nil +} + +func (x *elfExe) SectionNames() []string { + var names []string + for _, sect := range x.f.Sections { + names = append(names, sect.Name) + } + return names +} + +func (x *elfExe) TextRange() (uint64, uint64) { + for _, p := range x.f.Progs { + if p.Type == elf.PT_LOAD && p.Flags&elf.PF_X != 0 { + return p.Vaddr, p.Vaddr + p.Filesz + } + } + return 0, 0 +} + +func (x *elfExe) RODataRange() (uint64, uint64) { + for _, p := range x.f.Progs { + if p.Type == elf.PT_LOAD && p.Flags&(elf.PF_R|elf.PF_W|elf.PF_X) == elf.PF_R { + return p.Vaddr, p.Vaddr + p.Filesz + } + } + for _, p := range x.f.Progs { + if p.Type == elf.PT_LOAD && p.Flags&(elf.PF_R|elf.PF_W|elf.PF_X) == (elf.PF_R|elf.PF_X) { + return p.Vaddr, p.Vaddr + p.Filesz + } + } + return 0, 0 +} + +type peExe struct { + os *os.File + f *pe.File +} + +func (x *peExe) imageBase() uint64 { + switch oh := x.f.OptionalHeader.(type) { + case *pe.OptionalHeader32: + return uint64(oh.ImageBase) + case *pe.OptionalHeader64: + return oh.ImageBase + } + return 0 +} + +func (x *peExe) AddrSize() int { + if x.f.Machine == pe.IMAGE_FILE_MACHINE_AMD64 { + return 8 + } + return 4 +} + +func (x *peExe) ByteOrder() binary.ByteOrder { return binary.LittleEndian } + +func (x *peExe) Close() error { + return x.os.Close() +} + +func (x *peExe) Entry() uint64 { + switch oh := x.f.OptionalHeader.(type) { + case *pe.OptionalHeader32: + return uint64(oh.ImageBase + oh.AddressOfEntryPoint) + case *pe.OptionalHeader64: + return oh.ImageBase + uint64(oh.AddressOfEntryPoint) + } + return 0 +} + +func (x *peExe) ReadData(addr, size uint64) ([]byte, error) { + addr -= x.imageBase() + data := make([]byte, size) + for _, sect := range x.f.Sections { + if uint64(sect.VirtualAddress) <= addr && addr+size-1 <= uint64(sect.VirtualAddress+sect.Size-1) { + _, err := sect.ReadAt(data, int64(addr-uint64(sect.VirtualAddress))) + if err != nil { + return nil, err + } + return data, nil + } + } + return nil, fmt.Errorf("address not mapped") +} + +func (x *peExe) Symbols() ([]sym, error) { + base := x.imageBase() + var out []sym + for _, s := range x.f.Symbols { + if s.SectionNumber <= 0 || int(s.SectionNumber) > len(x.f.Sections) { + continue + } + sect := x.f.Sections[s.SectionNumber-1] + out = append(out, sym{s.Name, uint64(s.Value) + base + uint64(sect.VirtualAddress), 0}) + } + return out, nil +} + +func (x *peExe) SectionNames() []string { + var names []string + for _, sect := range x.f.Sections { + names = append(names, sect.Name) + } + return names +} + +func (x *peExe) TextRange() (uint64, uint64) { + // Assume text is first non-empty section. + for _, sect := range x.f.Sections { + if sect.VirtualAddress != 0 && sect.Size != 0 { + return uint64(sect.VirtualAddress) + x.imageBase(), uint64(sect.VirtualAddress+sect.Size) + x.imageBase() + } + } + return 0, 0 +} + +func (x *peExe) RODataRange() (uint64, uint64) { + return x.TextRange() +} + +type machoExe struct { + os *os.File + f *macho.File +} + +func (x *machoExe) AddrSize() int { + if x.f.Cpu&0x01000000 != 0 { + return 8 + } + return 4 +} + +func (x *machoExe) ByteOrder() binary.ByteOrder { return x.f.ByteOrder } + +func (x *machoExe) Close() error { + return x.os.Close() +} + +func (x *machoExe) Entry() uint64 { + for _, load := range x.f.Loads { + b, ok := load.(macho.LoadBytes) + if !ok { + continue + } + bo := x.f.ByteOrder + const x86_THREAD_STATE64 = 4 + cmd, siz := macho.LoadCmd(bo.Uint32(b[0:4])), bo.Uint32(b[4:8]) + if cmd == macho.LoadCmdUnixThread && siz == 184 && bo.Uint32(b[8:12]) == x86_THREAD_STATE64 { + return bo.Uint64(b[144:]) + } + } + return 0 +} + +func (x *machoExe) ReadData(addr, size uint64) ([]byte, error) { + data := make([]byte, size) + for _, load := range x.f.Loads { + seg, ok := load.(*macho.Segment) + if !ok { + continue + } + if seg.Addr <= addr && addr+size-1 <= seg.Addr+seg.Filesz-1 { + if seg.Name == "__PAGEZERO" { + continue + } + _, err := seg.ReadAt(data, int64(addr-seg.Addr)) + if err != nil { + return nil, err + } + return data, nil + } + } + return nil, fmt.Errorf("address not mapped") +} + +func (x *machoExe) Symbols() ([]sym, error) { + var out []sym + for _, s := range x.f.Symtab.Syms { + out = append(out, sym{s.Name, s.Value, 0}) + } + return out, nil +} + +func (x *machoExe) SectionNames() []string { + var names []string + for _, sect := range x.f.Sections { + names = append(names, sect.Name) + } + return names +} + +func (x *machoExe) TextRange() (uint64, uint64) { + // Assume text is first non-empty segment. + for _, load := range x.f.Loads { + seg, ok := load.(*macho.Segment) + if ok && seg.Name != "__PAGEZERO" && seg.Addr != 0 && seg.Filesz != 0 { + return seg.Addr, seg.Addr + seg.Filesz + } + } + return 0, 0 +} + +func (x *machoExe) RODataRange() (uint64, uint64) { + return x.TextRange() +} diff --git a/vendor/github.com/rsc/goversion/version/read.go b/vendor/github.com/rsc/goversion/version/read.go new file mode 100644 index 0000000..3792818 --- /dev/null +++ b/vendor/github.com/rsc/goversion/version/read.go @@ -0,0 +1,243 @@ +// Copyright 2017 The Go Authors. All Rights Reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package version reports the Go version used to build program executables. +package version + +import ( + "bytes" + "encoding/hex" + "errors" + "fmt" + "regexp" + "strings" +) + +// Version is the information reported by ReadExe. +type Version struct { + Release string // Go version (runtime.Version in the program) + ModuleInfo string // program's module information + BoringCrypto bool // program uses BoringCrypto + StandardCrypto bool // program uses standard crypto (replaced by BoringCrypto) + FIPSOnly bool // program imports "crypto/tls/fipsonly" +} + +// ReadExe reports information about the Go version used to build +// the program executable named by file. +func ReadExe(file string) (Version, error) { + var v Version + f, err := openExe(file) + if err != nil { + return v, err + } + defer f.Close() + isGo := false + for _, name := range f.SectionNames() { + if name == ".note.go.buildid" { + isGo = true + } + } + syms, symsErr := f.Symbols() + isGccgo := false + for _, sym := range syms { + name := sym.Name + if name == "runtime.main" || name == "main.main" { + isGo = true + } + if strings.HasPrefix(name, "runtime.") && strings.HasSuffix(name, "$descriptor") { + isGccgo = true + } + if name == "runtime.buildVersion" { + isGo = true + release, err := readBuildVersion(f, sym.Addr, sym.Size) + if err != nil { + return v, err + } + v.Release = release + + } + if strings.Contains(name, "_Cfunc__goboringcrypto_") || name == "crypto/internal/boring/sig.BoringCrypto" { + v.BoringCrypto = true + } + if name == "crypto/internal/boring/sig.FIPSOnly" { + v.FIPSOnly = true + } + for _, re := range standardCryptoNames { + if re.MatchString(name) { + v.StandardCrypto = true + } + } + if name == "crypto/internal/boring/sig.StandardCrypto" { + v.StandardCrypto = true + } + } + + if DebugMatch { + v.Release = "" + } + if err := findModuleInfo(&v, f); err != nil { + return v, err + } + if v.Release == "" { + g, release := readBuildVersionX86Asm(f) + if g { + isGo = true + v.Release = release + if err := findCryptoSigs(&v, f); err != nil { + return v, err + } + } + } + if isGccgo && v.Release == "" { + isGo = true + v.Release = "gccgo (version unknown)" + } + if !isGo && symsErr != nil { + return v, symsErr + } + + if !isGo { + return v, errors.New("not a Go executable") + } + if v.Release == "" { + v.Release = "unknown Go version" + } + return v, nil +} + +var re = regexp.MustCompile + +var standardCryptoNames = []*regexp.Regexp{ + re(`^crypto/sha1\.\(\*digest\)`), + re(`^crypto/sha256\.\(\*digest\)`), + re(`^crypto/rand\.\(\*devReader\)`), + re(`^crypto/rsa\.encrypt$`), + re(`^crypto/rsa\.decrypt$`), +} + +func readBuildVersion(f exe, addr, size uint64) (string, error) { + if size == 0 { + size = uint64(f.AddrSize() * 2) + } + if size != 8 && size != 16 { + return "", fmt.Errorf("invalid size for runtime.buildVersion") + } + data, err := f.ReadData(addr, size) + if err != nil { + return "", fmt.Errorf("reading runtime.buildVersion: %v", err) + } + + if size == 8 { + addr = uint64(f.ByteOrder().Uint32(data)) + size = uint64(f.ByteOrder().Uint32(data[4:])) + } else { + addr = f.ByteOrder().Uint64(data) + size = f.ByteOrder().Uint64(data[8:]) + } + if size > 1000 { + return "", fmt.Errorf("implausible string size %d for runtime.buildVersion", size) + } + + data, err = f.ReadData(addr, size) + if err != nil { + return "", fmt.Errorf("reading runtime.buildVersion string data: %v", err) + } + return string(data), nil +} + +// Code signatures that indicate BoringCrypto or crypto/internal/fipsonly. +// These are not byte literals in order to avoid the actual +// byte signatures appearing in the goversion binary, +// because on some systems you can't tell rodata from text. +var ( + sigBoringCrypto, _ = hex.DecodeString("EB1DF448F44BF4B332F52813A3B450D441CC2485F001454E92101B1D2F1950C3") + sigStandardCrypto, _ = hex.DecodeString("EB1DF448F44BF4BAEE4DFA9851CA56A91145E83E99C59CF911CB8E80DAF12FC3") + sigFIPSOnly, _ = hex.DecodeString("EB1DF448F44BF4363CB9CE9D68047D31F28D325D5CA5873F5D80CAF6D6151BC3") +) + +func findCryptoSigs(v *Version, f exe) error { + const maxSigLen = 1 << 10 + start, end := f.TextRange() + for addr := start; addr < end; { + size := uint64(1 << 20) + if end-addr < size { + size = end - addr + } + data, err := f.ReadData(addr, size) + if err != nil { + return fmt.Errorf("reading text: %v", err) + } + if haveSig(data, sigBoringCrypto) { + v.BoringCrypto = true + } + if haveSig(data, sigFIPSOnly) { + v.FIPSOnly = true + } + if haveSig(data, sigStandardCrypto) { + v.StandardCrypto = true + } + if addr+size < end { + size -= maxSigLen + } + addr += size + } + return nil +} + +func haveSig(data, sig []byte) bool { + const align = 16 + for { + i := bytes.Index(data, sig) + if i < 0 { + return false + } + if i&(align-1) == 0 { + return true + } + // Found unaligned match; unexpected but + // skip to next aligned boundary and keep searching. + data = data[(i+align-1)&^(align-1):] + } +} + +func findModuleInfo(v *Version, f exe) error { + const maxModInfo = 128 << 10 + start, end := f.RODataRange() + for addr := start; addr < end; { + size := uint64(4 << 20) + if end-addr < size { + size = end - addr + } + data, err := f.ReadData(addr, size) + if err != nil { + return fmt.Errorf("reading text: %v", err) + } + if haveModuleInfo(data, v) { + return nil + } + if addr+size < end { + size -= maxModInfo + } + addr += size + } + return nil +} + +var ( + infoStart, _ = hex.DecodeString("3077af0c9274080241e1c107e6d618e6") + infoEnd, _ = hex.DecodeString("f932433186182072008242104116d8f2") +) + +func haveModuleInfo(data []byte, v *Version) bool { + i := bytes.Index(data, infoStart) + if i < 0 { + return false + } + j := bytes.Index(data[i:], infoEnd) + if j < 0 { + return false + } + v.ModuleInfo = string(data[i+len(infoStart) : i+j]) + return true +} diff --git a/vendor/github.com/rwcarlsen/goexif/LICENSE b/vendor/github.com/rwcarlsen/goexif/LICENSE new file mode 100644 index 0000000..aa62504 --- /dev/null +++ b/vendor/github.com/rwcarlsen/goexif/LICENSE @@ -0,0 +1,24 @@ + +Copyright (c) 2012, Robert Carlsen & Contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/rwcarlsen/goexif/exif/README.md b/vendor/github.com/rwcarlsen/goexif/exif/README.md new file mode 100644 index 0000000..b3bf5fa --- /dev/null +++ b/vendor/github.com/rwcarlsen/goexif/exif/README.md @@ -0,0 +1,4 @@ + +To regenerate the regression test data, run `go generate` inside the exif +package directory and commit the changes to *regress_expected_test.go*. + diff --git a/vendor/github.com/rwcarlsen/goexif/exif/exif.go b/vendor/github.com/rwcarlsen/goexif/exif/exif.go new file mode 100644 index 0000000..03afe65 --- /dev/null +++ b/vendor/github.com/rwcarlsen/goexif/exif/exif.go @@ -0,0 +1,655 @@ +// Package exif implements decoding of EXIF data as defined in the EXIF 2.2 +// specification (http://www.exif.org/Exif2-2.PDF). +package exif + +import ( + "bufio" + "bytes" + "encoding/binary" + "encoding/json" + "errors" + "fmt" + "io" + "io/ioutil" + "math" + "strconv" + "strings" + "time" + + "github.com/rwcarlsen/goexif/tiff" +) + +const ( + jpeg_APP1 = 0xE1 + + exifPointer = 0x8769 + gpsPointer = 0x8825 + interopPointer = 0xA005 +) + +// A decodeError is returned when the image cannot be decoded as a tiff image. +type decodeError struct { + cause error +} + +func (de decodeError) Error() string { + return fmt.Sprintf("exif: decode failed (%v) ", de.cause.Error()) +} + +// IsShortReadTagValueError identifies a ErrShortReadTagValue error. +func IsShortReadTagValueError(err error) bool { + de, ok := err.(decodeError) + if ok { + return de.cause == tiff.ErrShortReadTagValue + } + return false +} + +// A TagNotPresentError is returned when the requested field is not +// present in the EXIF. +type TagNotPresentError FieldName + +func (tag TagNotPresentError) Error() string { + return fmt.Sprintf("exif: tag %q is not present", string(tag)) +} + +func IsTagNotPresentError(err error) bool { + _, ok := err.(TagNotPresentError) + return ok +} + +// Parser allows the registration of custom parsing and field loading +// in the Decode function. +type Parser interface { + // Parse should read data from x and insert parsed fields into x via + // LoadTags. + Parse(x *Exif) error +} + +var parsers []Parser + +func init() { + RegisterParsers(&parser{}) +} + +// RegisterParsers registers one or more parsers to be automatically called +// when decoding EXIF data via the Decode function. +func RegisterParsers(ps ...Parser) { + parsers = append(parsers, ps...) +} + +type parser struct{} + +type tiffErrors map[tiffError]string + +func (te tiffErrors) Error() string { + var allErrors []string + for k, v := range te { + allErrors = append(allErrors, fmt.Sprintf("%s: %v\n", stagePrefix[k], v)) + } + return strings.Join(allErrors, "\n") +} + +// IsCriticalError, given the error returned by Decode, reports whether the +// returned *Exif may contain usable information. +func IsCriticalError(err error) bool { + _, ok := err.(tiffErrors) + return !ok +} + +// IsExifError reports whether the error happened while decoding the EXIF +// sub-IFD. +func IsExifError(err error) bool { + if te, ok := err.(tiffErrors); ok { + _, isExif := te[loadExif] + return isExif + } + return false +} + +// IsGPSError reports whether the error happened while decoding the GPS sub-IFD. +func IsGPSError(err error) bool { + if te, ok := err.(tiffErrors); ok { + _, isGPS := te[loadExif] + return isGPS + } + return false +} + +// IsInteroperabilityError reports whether the error happened while decoding the +// Interoperability sub-IFD. +func IsInteroperabilityError(err error) bool { + if te, ok := err.(tiffErrors); ok { + _, isInterop := te[loadInteroperability] + return isInterop + } + return false +} + +type tiffError int + +const ( + loadExif tiffError = iota + loadGPS + loadInteroperability +) + +var stagePrefix = map[tiffError]string{ + loadExif: "loading EXIF sub-IFD", + loadGPS: "loading GPS sub-IFD", + loadInteroperability: "loading Interoperability sub-IFD", +} + +// Parse reads data from the tiff data in x and populates the tags +// in x. If parsing a sub-IFD fails, the error is recorded and +// parsing continues with the remaining sub-IFDs. +func (p *parser) Parse(x *Exif) error { + if len(x.Tiff.Dirs) == 0 { + return errors.New("Invalid exif data") + } + x.LoadTags(x.Tiff.Dirs[0], exifFields, false) + + // thumbnails + if len(x.Tiff.Dirs) >= 2 { + x.LoadTags(x.Tiff.Dirs[1], thumbnailFields, false) + } + + te := make(tiffErrors) + + // recurse into exif, gps, and interop sub-IFDs + if err := loadSubDir(x, ExifIFDPointer, exifFields); err != nil { + te[loadExif] = err.Error() + } + if err := loadSubDir(x, GPSInfoIFDPointer, gpsFields); err != nil { + te[loadGPS] = err.Error() + } + + if err := loadSubDir(x, InteroperabilityIFDPointer, interopFields); err != nil { + te[loadInteroperability] = err.Error() + } + if len(te) > 0 { + return te + } + return nil +} + +func loadSubDir(x *Exif, ptr FieldName, fieldMap map[uint16]FieldName) error { + r := bytes.NewReader(x.Raw) + + tag, err := x.Get(ptr) + if err != nil { + return nil + } + offset, err := tag.Int64(0) + if err != nil { + return nil + } + + _, err = r.Seek(offset, 0) + if err != nil { + return fmt.Errorf("exif: seek to sub-IFD %s failed: %v", ptr, err) + } + subDir, _, err := tiff.DecodeDir(r, x.Tiff.Order) + if err != nil { + return fmt.Errorf("exif: sub-IFD %s decode failed: %v", ptr, err) + } + x.LoadTags(subDir, fieldMap, false) + return nil +} + +// Exif provides access to decoded EXIF metadata fields and values. +type Exif struct { + Tiff *tiff.Tiff + main map[FieldName]*tiff.Tag + Raw []byte +} + +// Decode parses EXIF data from r (a TIFF, JPEG, or raw EXIF block) +// and returns a queryable Exif object. After the EXIF data section is +// called and the TIFF structure is decoded, each registered parser is +// called (in order of registration). If one parser returns an error, +// decoding terminates and the remaining parsers are not called. +// +// The error can be inspected with functions such as IsCriticalError +// to determine whether the returned object might still be usable. +func Decode(r io.Reader) (*Exif, error) { + + // EXIF data in JPEG is stored in the APP1 marker. EXIF data uses the TIFF + // format to store data. + // If we're parsing a TIFF image, we don't need to strip away any data. + // If we're parsing a JPEG image, we need to strip away the JPEG APP1 + // marker and also the EXIF header. + + header := make([]byte, 4) + n, err := io.ReadFull(r, header) + if err != nil { + return nil, fmt.Errorf("exif: error reading 4 byte header, got %d, %v", n, err) + } + + var isTiff bool + var isRawExif bool + var assumeJPEG bool + switch string(header) { + case "II*\x00": + // TIFF - Little endian (Intel) + isTiff = true + case "MM\x00*": + // TIFF - Big endian (Motorola) + isTiff = true + case "Exif": + isRawExif = true + default: + // Not TIFF, assume JPEG + assumeJPEG = true + } + + // Put the header bytes back into the reader. + r = io.MultiReader(bytes.NewReader(header), r) + var ( + er *bytes.Reader + tif *tiff.Tiff + sec *appSec + ) + + switch { + case isRawExif: + var header [6]byte + if _, err := io.ReadFull(r, header[:]); err != nil { + return nil, fmt.Errorf("exif: unexpected raw exif header read error") + } + if got, want := string(header[:]), "Exif\x00\x00"; got != want { + return nil, fmt.Errorf("exif: unexpected raw exif header; got %q, want %q", got, want) + } + fallthrough + case isTiff: + // Functions below need the IFDs from the TIFF data to be stored in a + // *bytes.Reader. We use TeeReader to get a copy of the bytes as a + // side-effect of tiff.Decode() doing its work. + b := &bytes.Buffer{} + tr := io.TeeReader(r, b) + tif, err = tiff.Decode(tr) + er = bytes.NewReader(b.Bytes()) + case assumeJPEG: + // Locate the JPEG APP1 header. + sec, err = newAppSec(jpeg_APP1, r) + if err != nil { + return nil, err + } + // Strip away EXIF header. + er, err = sec.exifReader() + if err != nil { + return nil, err + } + tif, err = tiff.Decode(er) + } + + if err != nil { + return nil, decodeError{cause: err} + } + + er.Seek(0, 0) + raw, err := ioutil.ReadAll(er) + if err != nil { + return nil, decodeError{cause: err} + } + + // build an exif structure from the tiff + x := &Exif{ + main: map[FieldName]*tiff.Tag{}, + Tiff: tif, + Raw: raw, + } + + for i, p := range parsers { + if err := p.Parse(x); err != nil { + if _, ok := err.(tiffErrors); ok { + return x, err + } + // This should never happen, as Parse always returns a tiffError + // for now, but that could change. + return x, fmt.Errorf("exif: parser %v failed (%v)", i, err) + } + } + + return x, nil +} + +// LoadTags loads tags into the available fields from the tiff Directory +// using the given tagid-fieldname mapping. Used to load makernote and +// other meta-data. If showMissing is true, tags in d that are not in the +// fieldMap will be loaded with the FieldName UnknownPrefix followed by the +// tag ID (in hex format). +func (x *Exif) LoadTags(d *tiff.Dir, fieldMap map[uint16]FieldName, showMissing bool) { + for _, tag := range d.Tags { + name := fieldMap[tag.Id] + if name == "" { + if !showMissing { + continue + } + name = FieldName(fmt.Sprintf("%v%x", UnknownPrefix, tag.Id)) + } + x.main[name] = tag + } +} + +// Get retrieves the EXIF tag for the given field name. +// +// If the tag is not known or not present, an error is returned. If the +// tag name is known, the error will be a TagNotPresentError. +func (x *Exif) Get(name FieldName) (*tiff.Tag, error) { + if tg, ok := x.main[name]; ok { + return tg, nil + } + return nil, TagNotPresentError(name) +} + +// Walker is the interface used to traverse all fields of an Exif object. +type Walker interface { + // Walk is called for each non-nil EXIF field. Returning a non-nil + // error aborts the walk/traversal. + Walk(name FieldName, tag *tiff.Tag) error +} + +// Walk calls the Walk method of w with the name and tag for every non-nil +// EXIF field. If w aborts the walk with an error, that error is returned. +func (x *Exif) Walk(w Walker) error { + for name, tag := range x.main { + if err := w.Walk(name, tag); err != nil { + return err + } + } + return nil +} + +// DateTime returns the EXIF's "DateTimeOriginal" field, which +// is the creation time of the photo. If not found, it tries +// the "DateTime" (which is meant as the modtime) instead. +// The error will be TagNotPresentErr if none of those tags +// were found, or a generic error if the tag value was +// not a string, or the error returned by time.Parse. +// +// If the EXIF lacks timezone information or GPS time, the returned +// time's Location will be time.Local. +func (x *Exif) DateTime() (time.Time, error) { + var dt time.Time + tag, err := x.Get(DateTimeOriginal) + if err != nil { + tag, err = x.Get(DateTime) + if err != nil { + return dt, err + } + } + if tag.Format() != tiff.StringVal { + return dt, errors.New("DateTime[Original] not in string format") + } + exifTimeLayout := "2006:01:02 15:04:05" + dateStr := strings.TrimRight(string(tag.Val), "\x00") + // TODO(bradfitz,mpl): look for timezone offset, GPS time, etc. + timeZone := time.Local + if tz, _ := x.TimeZone(); tz != nil { + timeZone = tz + } + return time.ParseInLocation(exifTimeLayout, dateStr, timeZone) +} + +func (x *Exif) TimeZone() (*time.Location, error) { + // TODO: parse more timezone fields (e.g. Nikon WorldTime). + timeInfo, err := x.Get("Canon.TimeInfo") + if err != nil { + return nil, err + } + if timeInfo.Count < 2 { + return nil, errors.New("Canon.TimeInfo does not contain timezone") + } + offsetMinutes, err := timeInfo.Int(1) + if err != nil { + return nil, err + } + return time.FixedZone("", offsetMinutes*60), nil +} + +func ratFloat(num, dem int64) float64 { + return float64(num) / float64(dem) +} + +// Tries to parse a Geo degrees value from a string as it was found in some +// EXIF data. +// Supported formats so far: +// - "52,00000,50,00000,34,01180" ==> 52 deg 50'34.0118" +// Probably due to locale the comma is used as decimal mark as well as the +// separator of three floats (degrees, minutes, seconds) +// http://en.wikipedia.org/wiki/Decimal_mark#Hindu.E2.80.93Arabic_numeral_system +// - "52.0,50.0,34.01180" ==> 52deg50'34.0118" +// - "52,50,34.01180" ==> 52deg50'34.0118" +func parseTagDegreesString(s string) (float64, error) { + const unparsableErrorFmt = "Unknown coordinate format: %s" + isSplitRune := func(c rune) bool { + return c == ',' || c == ';' + } + parts := strings.FieldsFunc(s, isSplitRune) + var degrees, minutes, seconds float64 + var err error + switch len(parts) { + case 6: + degrees, err = strconv.ParseFloat(parts[0]+"."+parts[1], 64) + if err != nil { + return 0.0, fmt.Errorf(unparsableErrorFmt, s) + } + minutes, err = strconv.ParseFloat(parts[2]+"."+parts[3], 64) + if err != nil { + return 0.0, fmt.Errorf(unparsableErrorFmt, s) + } + minutes = math.Copysign(minutes, degrees) + seconds, err = strconv.ParseFloat(parts[4]+"."+parts[5], 64) + if err != nil { + return 0.0, fmt.Errorf(unparsableErrorFmt, s) + } + seconds = math.Copysign(seconds, degrees) + case 3: + degrees, err = strconv.ParseFloat(parts[0], 64) + if err != nil { + return 0.0, fmt.Errorf(unparsableErrorFmt, s) + } + minutes, err = strconv.ParseFloat(parts[1], 64) + if err != nil { + return 0.0, fmt.Errorf(unparsableErrorFmt, s) + } + minutes = math.Copysign(minutes, degrees) + seconds, err = strconv.ParseFloat(parts[2], 64) + if err != nil { + return 0.0, fmt.Errorf(unparsableErrorFmt, s) + } + seconds = math.Copysign(seconds, degrees) + default: + return 0.0, fmt.Errorf(unparsableErrorFmt, s) + } + return degrees + minutes/60.0 + seconds/3600.0, nil +} + +func parse3Rat2(tag *tiff.Tag) ([3]float64, error) { + v := [3]float64{} + for i := range v { + num, den, err := tag.Rat2(i) + if err != nil { + return v, err + } + v[i] = ratFloat(num, den) + if tag.Count < uint32(i+2) { + break + } + } + return v, nil +} + +func tagDegrees(tag *tiff.Tag) (float64, error) { + switch tag.Format() { + case tiff.RatVal: + // The usual case, according to the Exif spec + // (http://www.kodak.com/global/plugins/acrobat/en/service/digCam/exifStandard2.pdf, + // sec 4.6.6, p. 52 et seq.) + v, err := parse3Rat2(tag) + if err != nil { + return 0.0, err + } + return v[0] + v[1]/60 + v[2]/3600.0, nil + case tiff.StringVal: + // Encountered this weird case with a panorama picture taken with a HTC phone + s, err := tag.StringVal() + if err != nil { + return 0.0, err + } + return parseTagDegreesString(s) + default: + // don't know how to parse value, give up + return 0.0, fmt.Errorf("Malformed EXIF Tag Degrees") + } +} + +// LatLong returns the latitude and longitude of the photo and +// whether it was present. +func (x *Exif) LatLong() (lat, long float64, err error) { + // All calls of x.Get might return an TagNotPresentError + longTag, err := x.Get(FieldName("GPSLongitude")) + if err != nil { + return + } + ewTag, err := x.Get(FieldName("GPSLongitudeRef")) + if err != nil { + return + } + latTag, err := x.Get(FieldName("GPSLatitude")) + if err != nil { + return + } + nsTag, err := x.Get(FieldName("GPSLatitudeRef")) + if err != nil { + return + } + if long, err = tagDegrees(longTag); err != nil { + return 0, 0, fmt.Errorf("Cannot parse longitude: %v", err) + } + if lat, err = tagDegrees(latTag); err != nil { + return 0, 0, fmt.Errorf("Cannot parse latitude: %v", err) + } + ew, err := ewTag.StringVal() + if err == nil && ew == "W" { + long *= -1.0 + } else if err != nil { + return 0, 0, fmt.Errorf("Cannot parse longitude: %v", err) + } + ns, err := nsTag.StringVal() + if err == nil && ns == "S" { + lat *= -1.0 + } else if err != nil { + return 0, 0, fmt.Errorf("Cannot parse longitude: %v", err) + } + return lat, long, nil +} + +// String returns a pretty text representation of the decoded exif data. +func (x *Exif) String() string { + var buf bytes.Buffer + for name, tag := range x.main { + fmt.Fprintf(&buf, "%s: %s\n", name, tag) + } + return buf.String() +} + +// JpegThumbnail returns the jpeg thumbnail if it exists. If it doesn't exist, +// TagNotPresentError will be returned +func (x *Exif) JpegThumbnail() ([]byte, error) { + offset, err := x.Get(ThumbJPEGInterchangeFormat) + if err != nil { + return nil, err + } + start, err := offset.Int(0) + if err != nil { + return nil, err + } + + length, err := x.Get(ThumbJPEGInterchangeFormatLength) + if err != nil { + return nil, err + } + l, err := length.Int(0) + if err != nil { + return nil, err + } + + return x.Raw[start : start+l], nil +} + +// MarshalJson implements the encoding/json.Marshaler interface providing output of +// all EXIF fields present (names and values). +func (x Exif) MarshalJSON() ([]byte, error) { + return json.Marshal(x.main) +} + +type appSec struct { + marker byte + data []byte +} + +// newAppSec finds marker in r and returns the corresponding application data +// section. +func newAppSec(marker byte, r io.Reader) (*appSec, error) { + br := bufio.NewReader(r) + app := &appSec{marker: marker} + var dataLen int + + // seek to marker + for dataLen == 0 { + if _, err := br.ReadBytes(0xFF); err != nil { + return nil, err + } + c, err := br.ReadByte() + if err != nil { + return nil, err + } else if c != marker { + continue + } + + dataLenBytes := make([]byte, 2) + for k, _ := range dataLenBytes { + c, err := br.ReadByte() + if err != nil { + return nil, err + } + dataLenBytes[k] = c + } + dataLen = int(binary.BigEndian.Uint16(dataLenBytes)) - 2 + } + + // read section data + nread := 0 + for nread < dataLen { + s := make([]byte, dataLen-nread) + n, err := br.Read(s) + nread += n + if err != nil && nread < dataLen { + return nil, err + } + app.data = append(app.data, s[:n]...) + } + return app, nil +} + +// reader returns a reader on this appSec. +func (app *appSec) reader() *bytes.Reader { + return bytes.NewReader(app.data) +} + +// exifReader returns a reader on this appSec with the read cursor advanced to +// the start of the exif's tiff encoded portion. +func (app *appSec) exifReader() (*bytes.Reader, error) { + if len(app.data) < 6 { + return nil, errors.New("exif: failed to find exif intro marker") + } + + // read/check for exif special mark + exif := app.data[:6] + if !bytes.Equal(exif, append([]byte("Exif"), 0x00, 0x00)) { + return nil, errors.New("exif: failed to find exif intro marker") + } + return bytes.NewReader(app.data[6:]), nil +} diff --git a/vendor/github.com/rwcarlsen/goexif/exif/fields.go b/vendor/github.com/rwcarlsen/goexif/exif/fields.go new file mode 100644 index 0000000..8b8ae0f --- /dev/null +++ b/vendor/github.com/rwcarlsen/goexif/exif/fields.go @@ -0,0 +1,309 @@ +package exif + +type FieldName string + +// UnknownPrefix is used as the first part of field names for decoded tags for +// which there is no known/supported EXIF field. +const UnknownPrefix = "UnknownTag_" + +// Primary EXIF fields +const ( + ImageWidth FieldName = "ImageWidth" + ImageLength FieldName = "ImageLength" // Image height called Length by EXIF spec + BitsPerSample FieldName = "BitsPerSample" + Compression FieldName = "Compression" + PhotometricInterpretation FieldName = "PhotometricInterpretation" + Orientation FieldName = "Orientation" + SamplesPerPixel FieldName = "SamplesPerPixel" + PlanarConfiguration FieldName = "PlanarConfiguration" + YCbCrSubSampling FieldName = "YCbCrSubSampling" + YCbCrPositioning FieldName = "YCbCrPositioning" + XResolution FieldName = "XResolution" + YResolution FieldName = "YResolution" + ResolutionUnit FieldName = "ResolutionUnit" + DateTime FieldName = "DateTime" + ImageDescription FieldName = "ImageDescription" + Make FieldName = "Make" + Model FieldName = "Model" + Software FieldName = "Software" + Artist FieldName = "Artist" + Copyright FieldName = "Copyright" + ExifIFDPointer FieldName = "ExifIFDPointer" + GPSInfoIFDPointer FieldName = "GPSInfoIFDPointer" + InteroperabilityIFDPointer FieldName = "InteroperabilityIFDPointer" + ExifVersion FieldName = "ExifVersion" + FlashpixVersion FieldName = "FlashpixVersion" + ColorSpace FieldName = "ColorSpace" + ComponentsConfiguration FieldName = "ComponentsConfiguration" + CompressedBitsPerPixel FieldName = "CompressedBitsPerPixel" + PixelXDimension FieldName = "PixelXDimension" + PixelYDimension FieldName = "PixelYDimension" + MakerNote FieldName = "MakerNote" + UserComment FieldName = "UserComment" + RelatedSoundFile FieldName = "RelatedSoundFile" + DateTimeOriginal FieldName = "DateTimeOriginal" + DateTimeDigitized FieldName = "DateTimeDigitized" + SubSecTime FieldName = "SubSecTime" + SubSecTimeOriginal FieldName = "SubSecTimeOriginal" + SubSecTimeDigitized FieldName = "SubSecTimeDigitized" + ImageUniqueID FieldName = "ImageUniqueID" + ExposureTime FieldName = "ExposureTime" + FNumber FieldName = "FNumber" + ExposureProgram FieldName = "ExposureProgram" + SpectralSensitivity FieldName = "SpectralSensitivity" + ISOSpeedRatings FieldName = "ISOSpeedRatings" + OECF FieldName = "OECF" + ShutterSpeedValue FieldName = "ShutterSpeedValue" + ApertureValue FieldName = "ApertureValue" + BrightnessValue FieldName = "BrightnessValue" + ExposureBiasValue FieldName = "ExposureBiasValue" + MaxApertureValue FieldName = "MaxApertureValue" + SubjectDistance FieldName = "SubjectDistance" + MeteringMode FieldName = "MeteringMode" + LightSource FieldName = "LightSource" + Flash FieldName = "Flash" + FocalLength FieldName = "FocalLength" + SubjectArea FieldName = "SubjectArea" + FlashEnergy FieldName = "FlashEnergy" + SpatialFrequencyResponse FieldName = "SpatialFrequencyResponse" + FocalPlaneXResolution FieldName = "FocalPlaneXResolution" + FocalPlaneYResolution FieldName = "FocalPlaneYResolution" + FocalPlaneResolutionUnit FieldName = "FocalPlaneResolutionUnit" + SubjectLocation FieldName = "SubjectLocation" + ExposureIndex FieldName = "ExposureIndex" + SensingMethod FieldName = "SensingMethod" + FileSource FieldName = "FileSource" + SceneType FieldName = "SceneType" + CFAPattern FieldName = "CFAPattern" + CustomRendered FieldName = "CustomRendered" + ExposureMode FieldName = "ExposureMode" + WhiteBalance FieldName = "WhiteBalance" + DigitalZoomRatio FieldName = "DigitalZoomRatio" + FocalLengthIn35mmFilm FieldName = "FocalLengthIn35mmFilm" + SceneCaptureType FieldName = "SceneCaptureType" + GainControl FieldName = "GainControl" + Contrast FieldName = "Contrast" + Saturation FieldName = "Saturation" + Sharpness FieldName = "Sharpness" + DeviceSettingDescription FieldName = "DeviceSettingDescription" + SubjectDistanceRange FieldName = "SubjectDistanceRange" + LensMake FieldName = "LensMake" + LensModel FieldName = "LensModel" +) + +// Windows-specific tags +const ( + XPTitle FieldName = "XPTitle" + XPComment FieldName = "XPComment" + XPAuthor FieldName = "XPAuthor" + XPKeywords FieldName = "XPKeywords" + XPSubject FieldName = "XPSubject" +) + +// thumbnail fields +const ( + ThumbJPEGInterchangeFormat FieldName = "ThumbJPEGInterchangeFormat" // offset to thumb jpeg SOI + ThumbJPEGInterchangeFormatLength FieldName = "ThumbJPEGInterchangeFormatLength" // byte length of thumb +) + +// GPS fields +const ( + GPSVersionID FieldName = "GPSVersionID" + GPSLatitudeRef FieldName = "GPSLatitudeRef" + GPSLatitude FieldName = "GPSLatitude" + GPSLongitudeRef FieldName = "GPSLongitudeRef" + GPSLongitude FieldName = "GPSLongitude" + GPSAltitudeRef FieldName = "GPSAltitudeRef" + GPSAltitude FieldName = "GPSAltitude" + GPSTimeStamp FieldName = "GPSTimeStamp" + GPSSatelites FieldName = "GPSSatelites" + GPSStatus FieldName = "GPSStatus" + GPSMeasureMode FieldName = "GPSMeasureMode" + GPSDOP FieldName = "GPSDOP" + GPSSpeedRef FieldName = "GPSSpeedRef" + GPSSpeed FieldName = "GPSSpeed" + GPSTrackRef FieldName = "GPSTrackRef" + GPSTrack FieldName = "GPSTrack" + GPSImgDirectionRef FieldName = "GPSImgDirectionRef" + GPSImgDirection FieldName = "GPSImgDirection" + GPSMapDatum FieldName = "GPSMapDatum" + GPSDestLatitudeRef FieldName = "GPSDestLatitudeRef" + GPSDestLatitude FieldName = "GPSDestLatitude" + GPSDestLongitudeRef FieldName = "GPSDestLongitudeRef" + GPSDestLongitude FieldName = "GPSDestLongitude" + GPSDestBearingRef FieldName = "GPSDestBearingRef" + GPSDestBearing FieldName = "GPSDestBearing" + GPSDestDistanceRef FieldName = "GPSDestDistanceRef" + GPSDestDistance FieldName = "GPSDestDistance" + GPSProcessingMethod FieldName = "GPSProcessingMethod" + GPSAreaInformation FieldName = "GPSAreaInformation" + GPSDateStamp FieldName = "GPSDateStamp" + GPSDifferential FieldName = "GPSDifferential" +) + +// interoperability fields +const ( + InteroperabilityIndex FieldName = "InteroperabilityIndex" +) + +var exifFields = map[uint16]FieldName{ + ///////////////////////////////////// + ////////// IFD 0 //////////////////// + ///////////////////////////////////// + + // image data structure for the thumbnail + 0x0100: ImageWidth, + 0x0101: ImageLength, + 0x0102: BitsPerSample, + 0x0103: Compression, + 0x0106: PhotometricInterpretation, + 0x0112: Orientation, + 0x0115: SamplesPerPixel, + 0x011C: PlanarConfiguration, + 0x0212: YCbCrSubSampling, + 0x0213: YCbCrPositioning, + 0x011A: XResolution, + 0x011B: YResolution, + 0x0128: ResolutionUnit, + + // Other tags + 0x0132: DateTime, + 0x010E: ImageDescription, + 0x010F: Make, + 0x0110: Model, + 0x0131: Software, + 0x013B: Artist, + 0x8298: Copyright, + + // Windows-specific tags + 0x9c9b: XPTitle, + 0x9c9c: XPComment, + 0x9c9d: XPAuthor, + 0x9c9e: XPKeywords, + 0x9c9f: XPSubject, + + // private tags + exifPointer: ExifIFDPointer, + + ///////////////////////////////////// + ////////// Exif sub IFD ///////////// + ///////////////////////////////////// + + gpsPointer: GPSInfoIFDPointer, + interopPointer: InteroperabilityIFDPointer, + + 0x9000: ExifVersion, + 0xA000: FlashpixVersion, + + 0xA001: ColorSpace, + + 0x9101: ComponentsConfiguration, + 0x9102: CompressedBitsPerPixel, + 0xA002: PixelXDimension, + 0xA003: PixelYDimension, + + 0x927C: MakerNote, + 0x9286: UserComment, + + 0xA004: RelatedSoundFile, + 0x9003: DateTimeOriginal, + 0x9004: DateTimeDigitized, + 0x9290: SubSecTime, + 0x9291: SubSecTimeOriginal, + 0x9292: SubSecTimeDigitized, + + 0xA420: ImageUniqueID, + + // picture conditions + 0x829A: ExposureTime, + 0x829D: FNumber, + 0x8822: ExposureProgram, + 0x8824: SpectralSensitivity, + 0x8827: ISOSpeedRatings, + 0x8828: OECF, + 0x9201: ShutterSpeedValue, + 0x9202: ApertureValue, + 0x9203: BrightnessValue, + 0x9204: ExposureBiasValue, + 0x9205: MaxApertureValue, + 0x9206: SubjectDistance, + 0x9207: MeteringMode, + 0x9208: LightSource, + 0x9209: Flash, + 0x920A: FocalLength, + 0x9214: SubjectArea, + 0xA20B: FlashEnergy, + 0xA20C: SpatialFrequencyResponse, + 0xA20E: FocalPlaneXResolution, + 0xA20F: FocalPlaneYResolution, + 0xA210: FocalPlaneResolutionUnit, + 0xA214: SubjectLocation, + 0xA215: ExposureIndex, + 0xA217: SensingMethod, + 0xA300: FileSource, + 0xA301: SceneType, + 0xA302: CFAPattern, + 0xA401: CustomRendered, + 0xA402: ExposureMode, + 0xA403: WhiteBalance, + 0xA404: DigitalZoomRatio, + 0xA405: FocalLengthIn35mmFilm, + 0xA406: SceneCaptureType, + 0xA407: GainControl, + 0xA408: Contrast, + 0xA409: Saturation, + 0xA40A: Sharpness, + 0xA40B: DeviceSettingDescription, + 0xA40C: SubjectDistanceRange, + 0xA433: LensMake, + 0xA434: LensModel, +} + +var gpsFields = map[uint16]FieldName{ + ///////////////////////////////////// + //// GPS sub-IFD //////////////////// + ///////////////////////////////////// + 0x0: GPSVersionID, + 0x1: GPSLatitudeRef, + 0x2: GPSLatitude, + 0x3: GPSLongitudeRef, + 0x4: GPSLongitude, + 0x5: GPSAltitudeRef, + 0x6: GPSAltitude, + 0x7: GPSTimeStamp, + 0x8: GPSSatelites, + 0x9: GPSStatus, + 0xA: GPSMeasureMode, + 0xB: GPSDOP, + 0xC: GPSSpeedRef, + 0xD: GPSSpeed, + 0xE: GPSTrackRef, + 0xF: GPSTrack, + 0x10: GPSImgDirectionRef, + 0x11: GPSImgDirection, + 0x12: GPSMapDatum, + 0x13: GPSDestLatitudeRef, + 0x14: GPSDestLatitude, + 0x15: GPSDestLongitudeRef, + 0x16: GPSDestLongitude, + 0x17: GPSDestBearingRef, + 0x18: GPSDestBearing, + 0x19: GPSDestDistanceRef, + 0x1A: GPSDestDistance, + 0x1B: GPSProcessingMethod, + 0x1C: GPSAreaInformation, + 0x1D: GPSDateStamp, + 0x1E: GPSDifferential, +} + +var interopFields = map[uint16]FieldName{ + ///////////////////////////////////// + //// Interoperability sub-IFD /////// + ///////////////////////////////////// + 0x1: InteroperabilityIndex, +} + +var thumbnailFields = map[uint16]FieldName{ + 0x0201: ThumbJPEGInterchangeFormat, + 0x0202: ThumbJPEGInterchangeFormatLength, +} diff --git a/vendor/github.com/rwcarlsen/goexif/exif/sample1.jpg b/vendor/github.com/rwcarlsen/goexif/exif/sample1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..87bcf8e33a54e6a6608c7d789175395f2b223b5a GIT binary patch literal 80603 zcmeFZbyyus(=R%>L(t$5+})kv?h@P<4hx5%0TM_cxCBWcxI=JBkl+@<9fBr6fZz^y zfb93nKIc2<-hIz={@eXLJzZT@-CbSNJ+t^tHP@5Z-++6HvI?>Q3@i)`00RKP^&%ds zjE|i)04OLh0mvW+6~Kal1#mzr49xdH8Wk*4z>EU}ciZNs9We~TEu9V0B){omkY@O; z0}IleU|VdktO7F^NaKNJCz$QR;cwecf;8>(JDLX$0G7b4s-UE%N-3qLsjj9esjZ-< z`kN!oB@f&*x*7YgQq9HO*~*+!9qJ8nyPE?S2M4bZ2fq*p7bPc$5VwF3mjFNuq=nqs zng$y6<~N-U(p1my<^v830I^5_JXmmWadE)K-q~6V>cIb|TR<8%?yjHV8-6_Mtu50a zjSzpwUjk{Q`2Xq?`8R#jJ<1<6>TmrGP(Lgl37`SxdmtSf4-4Gb!p98&aevV9f6&0& z-*$3xaKOC18|(TSwC(Mk9}hqW!TqMaK^p!oEPw&_NdR_yKl@G}90~w(kZ$e284u<+ zedAkLkiJ<3G@t+`_f8+I0D;O2I7^Z&QJoBMgw_9h{M`F5^gNrnOd zRyWMw$-~?M;!J7oL1|&`Zs%xcX-)~XrnI!RbB4G>DE~;HIxGMSE@W`;-1UrfTi%QV z3py3%Mhw!bH+2BvcD!hCP`KZG8ITYEo1YKzk$%%HAWa79pn>x78zaF)59XUu_ADTTMh47)CvY%S04z2P95&2#Cjj~i4(2A^+JAF_M?i#wg#nO2p~AhJTMG{d zj|7Vfe>+7OSU7lW00HL#E~g|S9+d_@HJ7<-aAjN;5)Jo5O$$r6kSYSYk3+OPQrcFb z@d?>E?ZZpIcEPTQZwL8v95`5b7ytnglrdq0j)Z}OgGGeuCTTRDs2Y!v@5GiwydZjOavS1FtdSxim_Z-VGN-7d#ad{hUZ`Zz|q* z&7aW&nL3m@M$Cy#F8!%tzEdPWc`PATK$NidJ}n0SzT5y=w_-`+w#;b|;lyB%rO=0iyAHVne2#%14Ql-DL|czRKDk3(44k1a=Y*x0JMxBO@% zqr$&nF@DmNMlLiaeU{yD<<#OsXNK{@l87pMqd(35byF7O5u6eNQ;9=s6_R*B*yLJ+ zX3Had5js|NxN=!fbmYc&BnfW5B&Jxg_H2aeg` zx-)sorPqBL^~-TP@0xjAH+w!aKYY84V{-OTGrYNRFZOgPT0`6a>Ri=Jo^tj@TDw=Y zpw7@Ff2dByg#;-{s^Z+03WgbrXsrLQy=;THE#)Uhp~V?(*cWmrv`>52w1h&&g}b2T zEPUB0@mlg!aGbQ;kJE9FEa}&yAP5jMgB8O7zV(-@#7@s}un;Md$7F^ zBh7`_ghePShT^kh#G#F+;&lXqK@@=3ave-{mYA2zN)Yrr? zA$%cRLNJ824VPQXpQm-?+m-Kg_w^$h!zD+Qu zbW*frkZk1$-TK&kB?XhV@n!$H7Ct zA0>GUaRiJz6dJqTfp5w-*U4U4SdDsP5nm0G1YzE9f7-ng7#h%lRSYKv;dw)1IFma6 zJ>wcMf*+X~vxUjp6TadKNK%cIeH86q5o(_Rb_t7tR@yQz+! z?6+~t8mr4Rl2mUaE-!rX*B~5xJ|P3Y6PSx}^~N^$)9YW6XUSQ*W6yVxjURxcIvbfJhy!O7(~bX6e4 z=S?BKwZ%1n&gHqiXP(G2ZCA}sBDlS8-!lFH6XB;O(&r&8gGWKH@C5DRh^tIsOC;g? z)5bSeSdYnMXO`)#3(i#xe8O~P)GjHd1B^TYG=tf9p?pH(sJ3S{>lFn;wFcuua1##BCjAp44`B4QBVI;s4r3`L$@!t#BB4CI&P@Kp7*`#gH~frYSW z@)Y;wE2ey-U}jBrBTFRRoWa_1qz-@PPucmoB|bk0@}w>J4x6A&7cr*w=yn?$Ok9aM ztf<3*GNxgUtV?VB-?w`R$tX)poq6xIBfACetM8C1@*sEzNb^@=oE~=W*hQ^m=MzV# zunjsxmtBu7R+-*^s3pcVNQahqOwTOo)u9~H7vhYNty7Fq+>*h)1)Et#KX!IDFcC*RIdlIAg z<^Usn8u_E9m#tL1lo5TZ6&E#A4-1WP3e-&Hr`&@mYN`i^>pOj-~%Z_{*54}vW&=`(y2@|~kklnr>Ze()d^W#&`U? z3l2_>a6&m`CN_4ddTK8+G$t>4Y6i4)8YlR`rjl(3aUfyinugc$<>IAHl@-wHMb9K(DACey8&!*6 zUw!|I9%CPQjYtJSByus@69WZ`LdV(oy%7%W6X`liJg?a_M%YF`ycn6w=PzP5bDeQd zUkGfhlU*p_{FK;$sca$qX(Kh6MQYvX#EUPcc~HrY-}UNYrWLTDtYTBWnTS0S)QUfR zWbIhXP(o*rR(SmD3l7Y(A=GAi-fxi!a&N-L@(q*iCJ~JYBh`}ZH%zmi-ecc`x2fMK zdZqh{ltsdHU-UbU({CrpLtYA!Jl;C%_`y-%YF&Gp=n*@in7AuK9ahMrK$X9?_*l^R z8Zcap%KJi9s7H%u3lpM7spEz`Q`CB7iCnlzw47$&FB=mkB?BjxlVuglwV_eT9*p{M zCivjVHd+*&+K^;VNix-IDayE-RG#qaB=2Wq?*|aB$glEvmr)U zd1}oo4EK(k!BD?xdQFy8fT5YFF({$5Yh(6N+~@SXGAkw6Y!^wMRoWdWjdy_W9zCXJme(kTPjfyHB>#XqMAA(TH_;WOGck7XjUOc) zFgEVPnY(_qq?R&Bg&^S+N61^2U_soy$7j|j9eE9xI37ONY(KvE{Hqg=WCRk@QqPmD zr@`Xsh}!&wC7{HqwsK}AHp*&%;;Cub^Ha|bk(S``7jr)z6fKX*U&xt}ugnBRRhE3^ ziR2)Hh;{{Q4p>?Cp6&z-@|Ro#aS@pIYI*qFO13kao0RWXfX!b5B)ri^)wF|}$vQrl z(p^LK=tjkpGhgPy=Z38kQK<5eo{vjIYCp~i?vYl8dBczXs%eiV43KuTz{(?ik%(xy z>eX^jppudy2hTe7!5-Ke(G@gJb@Q79z*kJtRm98qLcuQF_xtd|Poai;xqG>#2}yvPQymwshu zLHI{JAd`=0GfXZpQ{Bpg@R|Vv{5@eSUS4A!Dk= z3uG6i#Xrg!;#Q1hG0E?=Ykrmz7ofX3fbnC!AmYt)!v_>ADGXOTHR!BS+^F^pw8aLD zOSYdY@m1s1^CL0p&GS#_(9hD|ObKWaJkD+*0^Id@ z14!LKApk&rt4Z|tI^6zk9YksF63ktz8|dd2a=2+je`n7f2zSGWy|w4?h6JGrc57=8 zoNxrMH&Dk-t2<6sYTAD|T(^7!@cNey?=9yBX8O&!?ehez1Aq&NmHxL0?#GSzzZG5o zpU7@Ka|6ZQ`5tsFti)aZ?L~4>a_7Z6{}A6);UvM!o%J_W0LB~42e&*Mzy>e{vkTyU zBe;?N59tryO`Dsxe@}lX{6}zeBYyLpJslmv+6@MVXaV(fwsKdqu($j#xu&HDCs@0= z@qjkC#2lf{HaGN5QifQ2{8@kK0d=~o-DOty4l%)fSI-5j-L^tHjE{&4=^IV$cpkiT&d%^f|o z&29e1!?c7rI%-3FJQUpJwN;cYpisw~4w3(2|4kXi7V73J>1b#3m#47qCNKXN8*GVW z1+g~wbi4s7QM@2-9{-KH?qBS`tD{-iNI@N;Zhtrz`>y|oa{m#4O#oG>^Np`jJfJRW zo*wRyKNk+FBe;~j6Ys7G_Wzd-{yXe+4&1ZI*PSrppp^)B=q70JO#;6Vz=9|$5;6)Z z8u&%&??Uj60QZ;h9smOm1LB~t2uO&?2yp1!x7a590~}5SNey#cD%W5xM7+4H%7@f6 z_?jOrxOv<{hNO^a(q=RX^<*@m$`QrBBR# zR+H2@x^j?L+cma&sB7&NmYiSLJ-&9tFQ;eY9sa7IzGve5@h$WU3lCzvpur$W&3kLY z15O-x&;nN~Tm-J*IJ_GRK57pA;-;}!bPI`>(!#fN=h>x2yfFZYj<(atHz`b@EH4;gCpW>S&fU zCZU2O;$D~*&^ddxpM-@PzUmf2!Mot)Ui+Ars67D%QIhfnWfT@l3Xf({)wih^8N6F- zd7`?(Z}+SRSZ0S!mJ8wU6RzTNer(*k20Yyj=jSEw%`F^g)e|--IkF9#mtt~=VH$Y6 zPJV}NJJS{>e+_)efgX*zC6SOl|JfiBXXxN0QnI_Lwu>gnW2kypJCXSx=+ z;k)41cFChKjg^->=yVlyuE;4g=?D$FL~!j{9)7Jj7wsz6$8s364mbN9(RtZwK!3n1 z(69AjX2By_<~N?nVKXHL-F=)`cC%}>GvW&Cg2}q8b*6!t16Nb`Vo-`p1d}0nl9`Tc;NjyEzO!d9ef-G3?KuBPxCV&F#b=nt zJ)_UHj#6NefRmFl=F&b2#qMAlqGEvhs^JnKT&#yD&I>~g-7N+i*{?n|u8(=Hn;=|& z{kAntBJYvuKqX8_zG_%x=PptVU3DAj&D#oavH}RFGLcbZ^qps$bN21cE@<_-ouzu~ zX!y-lKdy6(50ehDO=CglbqaAUt<^-_+pBfIp4D;@Q+GVm&Hg5pwv|`q?IWQnS3Kjj zOVRXX9pPCn(Fz6sHne)(PxN_1eQl%TREeap?2oWy$v#NcWjeF>NTnru=?nTT)(I*4 zj&syGQ902lR%-|1kV3%>Nn?_;d~YNM_$6&-Nfn63IY%cMXhb#%(lf zIbN1U+m1{#b6x|%=h}gTg^|&%YUfE;D}GI<*~LE{-c%{QdGAQXA-Xl|ZLl%9FwA|$ zd_g6O-dmM6ZQ}jj#mA-W6KlA*i+jK|kkaE^wV+@YIJ6;??ZyQ2)hr^-W65y6;4*78 z$Fj;6#E`#U1G%J~(TwW%$%K+zyK2*pru>W#K7QSDVP&;-=hr`n3*u`X_QQk5k}G*t zMq$t?_rhEaVAVUXO`Jg#XJ;o=Hw#4%4y=Q!x{Ov=gYqgC&jeolddA}VhHs?HDi>Ry zn%a)o*_QGmjvcS5V5^jOFiLu#`V(r8{^qehWVNl~`3}YVE!p>{@1O1x`?tsLP~;G8 z4CG^tdpfM=0k!OTl3++6F+9BPNXShRArqn0`LtP_)9rUrN$ zJZ;5V6RwBWstZe3`6TCRoTA}@9k>3ow%!e09!%y^K)mq2T&hxLBJvr%-V0KgpN~>A z7RvDeB`+(B&5!r&MYh4e%*6CAJ(b|g&4RSAfnjGwA^C~D0Z-`4&)!C)FDb0=8_|G9<9gUvh%O~1gjtEG)7RtFR&@4a zd8(D3Il0o7VN;)~l7D||G(I9Z-~RPm0AB+(7glqyA{|yFcDG!p@fKA#?y~8nlExPK zKyDP$E=p;| zr_6BZ>KXg>@L*qg%yg7P!MH6TEr-Tj_@>pEczvdxopd|p`9aL^g6hJ#Z|2GL_?rMd zG?rHS;dlDq<6&klp?Rkz85>jMb7O29De3Fp#XZBnB>YH$gD>^_*MQ8HwxCb1)%{@+ z@yMJ6(i{{V1`=_5Z`}Y(N~eQ0zt4R-X_IFyJYs8)j?ZclvTU-ByblS&5k;Ss!2@0$ zwsy9e3+@C?HWzVK0(mRfz|2^Y>s)|^SlMH&loJKx2jzhTIxbR%5R-3a=TUJ5!#~T% zB8{Q0tOzq?*fgP_c;$q}X3FjL$8FD)7_P?-hVIAWu4if>#5SmF@a>#*q62PHimaC; zQgG(&rmD7qk8`@8A=An4E~!_aPLjIf&zPN8`qp9fUuM2Cc%$#1*g( zuHqw_?^F98>_fN?&k*d!T1Q#hs;Nnrn_JR1-Jkjyks1WVP^5f+5SKZf{4$Ltz-^8PB% z(R|dBd7;%)&z4^5-8ljKnBUa1(c-#q-v*fch$Y@QBGw6?y&wNFb7lf}+&GS;-vjd$7=YUX0!@>-JV9__R)yOWF#C5@X?dWZBm*vSb=vVc0UEr_p{Q>iKrarV`=L z7_C_zdKdQ;ih}pfYgbF;VzeHHT9vz|9tJAMCK446=^x%`_)PI_A2NkTIV<(r!rCg!*Qb_&@wDvIq^=)4xTa<&Cya>~ zv>obC)$H?+>q;GV4`U~Ed*sCP99nlGrW|Hub*V}D-Zf}aA_+uFpFej`8TY#e>cTkY zH{o8k`n?U?FR2$YIi&GDoq?~ilT&Iv{ggk+;J=7 z3Xwjz`*2mo z*4tGT>oDC@^<6}O@JO`WNmqNV)uAPFEbEZEjATyYoTIW7D^oqV_Ol<-347j8-^sI_ zS20nt#_WDlS6`7hACyAh&((V~J2hflmO&EEa3c+gPw$zqHg4M-9tlS4Uwr9o{2*Ah z{3He*__h>tEZ*&iP9>ofNVWUrEN)!Xeq9`LNIs!2E^ooiTYGk9`fE_zH%5AJT@S{y zRte6;HtRFnjPr)U&an1iP{GCbby1mxx!8s2lyPVB1lK)-WBDgb7gB~sL65h}Kb54O zEhNsAFGz*XtPf8hevF?nd9+RMJnkEPqV~`3L7$QI_8KjStd|lw-}up{$V?aLK=mou{3$IXu&-*>afv&P_d28+A!BqHP>$TpCbe$$l>B<0vJiMCx!z za(+HuLm%yi6AbUPzCFH}=o%+1G+!>dV(?Wgl5c#gQ|@cQXYnCHTWhYN&`718y7vZT z8e+pLPRGLQe)HYM;lyj>CT&u6Da19G_qoq9Gs-*41`njtNVEd1o69miJ>EIF8=fBW z&QLVUdr-HTPBy4^VR*t_t$y7g6gU|(6DA8C^$7!Cq%~-2G*|ZB8{^wP)OD>93)NYM zVY80uGL7gHGY*7E>m+rghTioyJ6Cbm^#wXf>s4zrk%?=Q0rb_QWgm0ad50(bn$;FG z&SO)TULXqFPOiEsk)v3?Np;Y>w7DKF7-bI1vK*KumCX=xxL6L_pV48j^%N=Xwj7H; z($Fd*PdDHb>(*7uY)-mFFQ{I)2Cy!r4TZ|Q!g0Rx0pA9Uh$rsJ)j$01d=0cb(2Gho z8gOcP#chsRoPF6d>~zXn*Q!}}6*X>B-5?(|D2=^Gk$&o5Hl<7yjicsTcxB3+6scF@ zx5+y1NFZ$I^>wR)FRbws=EcsL{bCP!97~B)j!OD~bdae=d<0jf}!AB;J=tBtaW3q zEA2nlF5Q?#xJ?# zp@XDZ5xJM%f|G--<1IaB(Uf8eGuu~nWBe9^OKqIYaX?LShmog4?Xlme&4Z?;Yar_y z@b5L_UliYO-7(5vT7K1mxwcK&-~wD+iKn|^sY6nX#aSD6Fl~OYvfGUcG>?r{cC|Ji zG)HRvJgWUBngrMUwL7^pbh}{v5-|Xetwg6**9niSVn}yc-D_N+ZjHCjV8^@O^Zm%l zVf5P#o`bVm11cs1?5v5g94Ya_S;MI4nbkqd9nY+nBx!3pKeQTax61~bN?NxLx+9Hc zxzW?~H!ry+B+eZ9_9mR2j~UVZe?lvpCM=C+eIwyy(!Ilhj@oVLe$>I`k2m62V;>7f zkNaw1Nev_&QX8tFLvziPmdFur1?tH~g63q1KZQ?tl_fj<*IP^V;|x0=vm z0m5f!GGOfB;AEN=&N0^N07gaej~>N$4sXKIoKGoCmEWNelk9Ovka?QXa__q^gr9+1tPn$DrpIco@EH{i5-x)_)( zx$bohTu^|l$?3`>I?0|^4kBhmb z0|boCgn;<0DE-d+R(eW1D^YqqUKI`%7b%FXoxHCbM9WuI+tSy;QqYQCTntr2{I)Ar zup3ruJ4X+Qn>!`M-BOfFN|WkF>>~t9oFE?Nls-<5&hA1!qV%^7gh2X+%}!5wtK#7x zO0Tb?PU!)KIy%^SP;#*euyL_+bF)xdyO}#dyrFIml$>n5^kA26R@OqA(z3rh1Z$%7 zzbEGH?ak)R%?5R|VdoSS6lCY%V&~#w1vOaRA3J-P`>;B@Q-kB(8e*sapJspahn@bd zJHUZZDQ}082B+g@=i*@p1p`+>sNF-9-qX|0N{H8**NV%Ei;opzZqCKZ$!R6XYR(4% zF9Mtbf;@u!R)UsX^r)17&D-2XMdiOA@vl4J@$UpM&zwY|iUH`}f|48}Y)%CBt{*ed%k@CN*>tA>MBMmPaGA1VL;U0oo)^A{=vaR#9vZxHkOBbMq0#JY>6LWGBh zM}S8}KtM!61Tz|VHWe8S9TgP~6&3y7UEtL3;(v^yx~s#0r&ux2F$nM7Bm6&zrGoK> zIe>!!gOBdcq29z&iG#6J*`S$7x3N?=2Hzxj1n^iWcpCLCmI@a3Z--U?9!qt5gcKeY z5eW{wz`BD+Rk6X7#b7X%qy{3cIT%cZ#|55K#lMSXTI43M_(kIuvP&z)qZOZBW$E6| zD@~t}Q_Xi@+sdP3cnO&<6a|%#=xL&iY;KM2hzdYyNJs>>qJpe};km&F24&$$t;m+D{HQ#~9@z z!mDPKmNW>cR_^&EcAp-dh0Ak~3SU=}M9=p{%*9BGoWUxF;E#n9avl1J{A*y>`EVDX zP+TB1KTdyMringQa-dByv(xt5zHnOYmUk6t}e23!KN6jbs^2272gJ3X&X3*kOm+=b6)f zRvz>XkcMciRhrOvF^O%w>>Jv17;{rHxumJq_em?1Y!-;-DWVYR!_HoEbqrSj+?(U3 zhJCP=U^d9|p+4I{jgxOQ+7E^kZ8zC)H~}Wre&O zD}vX&SwH$zH+T0Tm&iyyJnn2%&2YwyVPjMBH-W~eLAz#k=4k4`DHO<358iW^zVGll zX)Sc%M*$b1lF7+WeqPD%Tn}jqO=LsOjc}6CAcQkK!%i!WdmU2?uO#9L#qd@xbe`d1 zUIPSEiTr`c>oNyIJ4n}QbyWI3$R>rI;YK`~i#EcS>LEzrR$o2V5Iv$C& z`-`!8v6DQqEF~V_+KTK|vRoY+Z%uCsRlO8AILBS$ST+B(SFh3%)g{sTAH*EtWxn|2 z=V#5A5XR{|`be0wq(nGf7sVa7vs0TnTG-mi3X7gJM6hrr_9ueYHNa|bKwH(D_L8#E zY-rH(kbzNEY$_@PS6nF&rPCd9)?I0rJ0->Z&`8ma)J|s0c;7S>rH=|NRpd47{NobS z+RKZ2t=1$vO;9HGKq`L8w&#$s4}JM^^gJlurCw1( zJ(&?-!3sjwheSi_|2lTm;BbJ{`}*ttaZ+M{ffTqzI>i^e%{N}piPJ1u(ihK6wCT-o z%*Ma_L^+cl@SU8*gvq6`fs8ij!*bgP`|$}}k&ln2p1`Li#C-zMG!Kl|_O`1gLLE(I1u9T?4+<;K3qlT6-xy z>mSUKnkO@EHFVigs*_cYh9Mt<(Z#J>!B|Y3kDVXbyqwXA1STp)T}tTQCD~bB0|Q*w zKnLX@`WvId1Ig33xY*N);=AvIjFRJs%az@@Cr{S=DxA3$@M5N1T=1fyDrweAy_C_g z16$KA2! z9tX>cHpQOAwyI8=#$pjyH^!tdsY!Lut}YRl1ZpM4Zn<3_TtaQhajtLeakA5+@l}Mm zMNMS;{e{o%*|Q}oQ0wG9 z*$?>^IT2}ki;iO(@E^;M#nH1q;TJn-cIC?z+U=H&RYR%cXQIYj3Kal(6g*B;sxI@h6Lk5@{E#$1;LOzzcioPormR0p$ z^drV<4$~?;Gm7FL9blVGk2JpOSNy&$Sk?tTj}z{VY8dQ((W-A3x{OO_F_m}uPQt_# ztpBDzIGNLtpEV+23auK4ct zNEAjsg=~jJf8O4zkivY)7EEYu#G}Kfb-y&e2U|yO>_&58CT(-;@Y5kDkMVGicXRF) zUI}8NXMTqCAhhPzfM-|9wNJTYmgB$m_C#Z#HN%w1TNVAJvBx@b4DxA7EEv?7Nmq&~ zDPb{rZZGV{`+dBC%VRGo(sf^!g)V)BPZjNY=$n`>#gCqOpH>sIjmE~w$JE;|TPkQt z^p?d&2VL(O7BDG6KExU^=*i0ROqbd%zv`jL>t0^y!-?N#D^$7n^>t}?It#gc$vvId zl@)Dc=T$g-pBX7qyVw>fb``Bn(%0<=KGB<;#aBmK9AV%X z->354VzgMSx2rLre1g+|59dsuAr_7Qz0>i^Bo*9&u0g|gI-jI51rTEzMPhC863a3; z?K6~c>1~p5$U+AlAr}dqxOT1O7$p468vZJKr6(5p$yS>6;Hp-6$yK0!CAj4(e)3+C`H!{mOE4Ba#l&6i23RU6&F7 zGgE+1q~A%)c`;e%8b`rXKY&$?VLfm6GQRU;TWoy4eL<^ZwXS7bk4h&5LO-TAJO-r} z*3`W)$l-E@wP5VYdG#?{-iz!;g;3bGBE};-sl?ZmD5=mp@UwV-% zH}Ql?yj*~Y%7QO(jHfGuf)$utgxi%)HctLSj|Mg$0t;nWo)x~<){OE!-XxUIg(>@r zh%Dhp29}e)pVMA?4uav&N^u2esp#L2yPGv9GxK_hLNQJ6GwfSbH{RE*G;X}-ZXqb} zEQcneOtw8FEQ3OXX;~GL_?0ZOXpOlA2Ri&4R6(1`f?xQ&$3;% zWt!3O)xL<(UCf;C$(+W-JL4(%reFwnwEz6kHBj`+>@3y#Oy87Kd8z^HdiYz5E?%Xn z-_t=1Q+{DA8tuU)Ek#%QC9=Ke(PasuICW2Y+@x90CrYpt8BC@Uh#Z#{QQviV61{kg z_)_l;-{#pKE~gEQLQ*a4V9-)s!CYeULCh!ESGzIDT8Gxouhwldc~VH5BbDvT_PWaL z?cyyFe7dIyPAYoqc&6x>td@pvc5DsPqaAO2Isd0uiKPW}=MtI)yY0i@a*|h;iu&R^UXLCMFIl#Aqd9<&mw)r`h)R9HrG1?dtT` z^n@u_qeYUa5L~S`{UQ4W&hqK= zT)UMfw%FS< zEP5bghhbY`t)g_TCiQ*F21W{SBdpMh1&m2B^t{&PpYQ6@8xB0F4!y}tNkT&;X zC|rCg?51vn8!q#q$2b1qJ>dFUgO50mG=4IRpkt5`D~HhYX@(4b8~n-PfU2_9vtU-1A#wGw#-vuP2U;QN=fx;xS| zl$QN#S`z=0D~bu^Px!x-OtU=2Pi>$bXN$WQE$DB~NVKRgzk}7~r7k4&v`efatYay? zA3dEBCtL;|1Bb%xW9?ir@$`NMOB%~7{C|ihQBrV z!VE1jTX{yt)vX9UsA z$Z8kW-9fl%QvoUq?N7KEU!K`GhqBlyQO8TV3~tNV8GH>mO=37Rvh6og2pRo)h}2>! z$WUe$^j2`UF6bl~okEMHRS9`{*ZNai(Dlf-D_vl+6J>H1^%`h zr<)xjNu;esORljmDj40u(>DS;-BdLuPu*$8h#bkw2>&sLeXg?Ns~e7E3{Ao(o6CNf zMI3AESL=3}JgI(bQUsYTz!>j~iS@q9kfv2gAKH_)zWmMjhsgv<=0~!u{dH9`Wc?yZ z$jq7uYHH{&8z0!(!k0=B>t^SiyV~pUYgC?l&=;XyHe%wtB;QlpVK8hZpR_XwlFk!_ zo~UhgvZF%s1NYV9c~4?+V1s8x-6_~d7a|j=!;ZKj`@0_z0)krS?)Taj_(qkz>zM4P zty;inzoQi<)6XhhI&42N)PD5&WMslzH82JIEN7nIQF=+>Bm@C)9EjGBm$h~bF_k`v zNo$e6VJ@7{7VBB>E|`zCC_0Q1OS;78|1vB-fmG2<|ID^&q!F46J{YOqjh;{bY%sBF zJ+w=|l6g(aC8{^PQ?c&(b~vx#!b0H0ZO2qesnE{8^C~LE=jz96e?jkxJn)!L_7GfH=fBb$+~%7#Z;w`a~Q%*75H>JOXYTh9Dz zL@(*DQl1Zb8b+oqO9)2^X)K@DU8R7Ar)lsD5zRhQknkc;4^p-d_%T&2{_0uK5G`vYL6dOp6Cdn{&JaX!F)0f)8t6y><=+5LvE$kQnI7lN;WG?yEVaDHN(T3vqnJRk61 z|NL}&$Sta_tT|9Lmb|T9LExcd!T3&VITbq!AnunTYk;6zEi{b=A~q&lEg@VzH&+e9 z;5SjabaCazIMZOd5cR`UI@^pu(*n|b87VU#MR($=QX{Vox;?~9?7c=MT5$#t@QS#M{gNvE@5nwUWCb@-?tVX%;4@J@NGAG{$k#}AmdOLyiV&5;OBn}?F;`H^y{)zsnbFoT)lrPprF7}T-yWVh)Kl6kK^{>G);+W?;;tceDi^-MXdNW&W|MwQ z6KE^Gt@Dgg+UV0r04w)eV&TjJyT#f$#uIn@v@|R%Qd(?1jgu9lCxwWT@qGZW5}WU9 zHOW*+D&vMe8z)j;*^MSl%oKgH_mJU7TCLBers9dKAQm~36t1LHEr{mKUJ0;`WvG$qg?9IeQ1a$q$3AqALoJ;OR z=Hwg(J^@vldy~a^>aFbhEe(5+*xsuE()Kt}Sd&A@e)UXUppOIYHn$ z2<}8TQBp)aH_~0EnrFw_xj%w`#`NBNv1=uEc*EdpL1DeQ(NAf!3$ZaHQo;|ZENRz( z2~K_XD>~wR^iq>hfrK6w0?!p$rsiqg_%70VO5tzQ$!VeE7~MrW_msFpWS zMg@xZA}aDqjQ2NO-U{6M>Le za#MXg9vbPHKtfp&Tgt+#gbB(A=<%qIaEwU1MC?dJ@(4K!X0tDKj)Q9Gwu`>7Q=ZhzK`#WD=jUt~1s02u#Ek9*V@|p*LSl{>Ag)*;cFQ3l`rrX#6)8fgZQ& zY9s>&r(-cf66}c}vJ}&kY<0D-T!bxkkHwOajEmCcZZ^}{VxS4U-8+w~Jt?(BrxpkI zOXrUJbSvs(@Y(^()VNBVwz!t;MSVm;<9R|`iawBeVL3RLX$3SG(#-dlwdQA)T=0er z5z23u4;A`780#!EoR;Hd)~b!2O(CXZsnbDu`ToOc>X1PL;T-=;hqTFOzDwN2U?CiYmb_ z+`BRn$2H^i51GM%GmaSfki!r!63ID2sE6UNfgd3jcr~R z*bAhXJ{NiSOWIii(*443hpWTg!GXNhAM!du+B9fDoc=x<6!S_|aqt7uKDWR4zyR~**cdxb=w@Kx3ucOx zHSd#%f+D7H+pFR3&&B1-r*0o(h<<#Kuh>;f)l!MSV&^ihZPSofuEbIDnB?}p%&d?7 z7K87SKk=i*Zr>{lJKosq5Q5JoNAw~^%lsF?A=cRk&H-e(>}(eQvF~UlFT^U#=X!8j z^GMmKqur^78miKYLxOh&miSsCZj^X$^3wZH5|Ph_K$ zf+u*?ti*@5ag?2|zV7AM{s$3ikr!FN-kI)Ik&_bXc}B@pFmP`y^&^A;vWP z$jtY@{QMbI=^9_Amr+NG1T^My~df}>w*tusRuR&L5GC18yd9>5d!t4ux|eAtqsHRb7znG(hL zkBo@oT35i=$0Q*iHA}S{xvs2j;l3*EKsPuaw&?Y zL7J3R!)`3HoJ74#PAA4{3+cmT1eeKcuP!}?l zjmn-Z2Gz^)xbr9_a`!xvMM|LLPmg@OjhcjlG%;d0(UkQgG25*lH~Yo_{{VeUvHt*7 zau~k0%%4jIqH*eacxK7LfB)9uzFz=XTl@ON{{X-dn%v(}b{M%XkQy3y%GO9bi-w1f zz*$>h)c{v5jynxxuZBitEh2C#dJ|u#8H!{IHV!RmGcVcVDgOWkl-BK$>tvu($Hl=c`YFqadU z3LWr8Dnw9q$*^xA%@d{qZ{Zy`$mRM`52+CnwZ(|Fh8kTNDjJma6nmcPf8lZDZBrdP zN1j&s(Br&Oz?9qSbtdG_G7D=wD`b0GM zamFm>mKw&&ftkdAX(TVjl&buH3jYAD`x!%-eEf%Q23m1F*Jqmh1Wr z!mO=%1SL2hM*c}9`)Bq_kA(|;rR1v@EV~YHc`YaN$#MD~(^k+Vid$<~!rfVs@j9p# zKxtJg{{Rh`Q57Q@^Ow@Jl!3L z9KA?OTT9kb-eGfevc+dCe^;tIi7r|#k0#>lNcg`a@co~ME%76JDkD$PpHH|p8l}Xt zK@!Jtap^x8RbT8zc_n$go(=YJPIfltnp3*f?c>z%76Am`S&Z{pJ`wBso-TNo;wi}w z53!LL$%wTGod(ohN?^2vqua`|Goj%uGHx4~WAUs10FeBD$Xs$vLw31F(YmFo#i)yG zi&E`#C#@;t8@=SHpm9YtSmXZyesX2u$M&)~@)-_)@zz2Vlxcrdp5(333X#ans|z!j z6YQe%Pu!IyigBh+cOBBDcK30CXZ4vb8q!CORTYj<(^5ExLcG4$77g}4mnRMVO482I z`YEVz(^k~UYIrNOiteGg218xG7XJWO_OS!GQ_EZ3S=NR*HCt&AM{^8}yp~AnM+GQ> zk<|T<=KZY2$&Ls55>(4e)g-uzO-4y0x6_~kC0ZUrPU=|G>~!L*_Oh8_0*5)Kk}G{0 z^6ve}iY_ur@=MEy7{UF9P@fYr{iO9iAKJ{LlZG@_k+W$!0fin*k5!-~at!w1}+#01bH+e}uIU>^@rz{UxztQmN-trp-GP zKA9Y73ds^5-9IGcekCX2Z;Os(L~eqFzp~S9n$0cW>lY6Xq#$-7PX-OS0b8crKWpL1 zMnv7AF1to8^)Ms)!C);EBJ7ntU8w3u?5hm2By($*G(;LL+=RW%g+Qv+zS#ipxTOdA zKk!)$nZV?M8&bKkc&-`VCz94A0aPz00eW&_$6$Bk{{RJs89^RsF={lvyOKk18_cm9 z28`~+uFlH!QBT^S$=DpwhJ#OLX)Z1xStf+5vq%LE`U}Q}#l}fizM~LZ$ENu@VH90UEX5^<0 z{BcATRgE$Y@M*r*DQ4DdTMKsU{)~U(S*^+SOWxfdr3Ew=@}J%yZaoHB84<~%P_;1y zvi+0*e6oB}M+{mBRH%>OP0nv3aJs+$*Wvme z+I6=5r$2^kGks;)WBOF)CWfbqiqzyn%bHg^#VyM5k)}<{C0r<#se;tbfTL>~IfMtaN0nBim|USJQ>}*L?{?%_L*fK?OpT9|{>O@bYYao#{e% zZiphhIYEz*>yicKve2b$sCuK(a4^s zrfF5?=aWapoBOtJi;Dir<0HehY>-(#p?#rQNgb8M+(W1Cqj=?YW$8vVZ}a1rkc6dj zi#t(<{{Tj_ww5(uVvrh_3^|1XN5?TAvqfKta8DHEY@)(Qe8G39Y99lo&Y5*7w}2!f zVd?}@dDVFo`&?-s+FoB{Ge$lb{{Tq@p*3qOi>Fy^pX&B}H9y^K zym+H>{hXE~Lv;wqts=OwcwXWsyt-)aWJq|J)#Ct000VRj$KuMTG7a=(gnuo9TDG9Z z{1V}2lb0a1tf*g~$GGGEPug6N)ZbJT^G>K5 z;y_c>=9!~0xMhK3Z%`sElQ&GvExpiKs>Y z0ENd6OOFWk6cW~xBh9GXvfHv-&FRS(J-B>4S}%%~^yy#rU$uug;fU`;57iRwMy2S5 zdm!>h2yyVnv7u&lq5E8cUGrgzLW2n)=8~%$M{eI-C>SY!hm=c2SlRy9546ya_$JFL z(1`k<>0p-+82a1*!Ep@7f-8089-j+$=-B~216>hTj??se;}Wga%OjCiA?rY+V2#qP z@baqV63H9c6o$IEk=AszHy0B!QC40ts?hN)7vbN0vL0*ETGrj6(Jmon5;G1lw)~i# z!Dj2*;oBoA+?bwBbceJ8(m3AQ;gYXW>c`jOm0eFNai?bEnVy+$*GYBSOtuKJ9z;zb zE#kwe$&aZgk^?L(TG$hXg`=$OX1GA{k?|4&(37{zDxOgmK@|EW;oFHm+#4g~#U>Ix z6yHR=0-P!b`7Ql1{KQefP#2nY#kw=S02BmnI{fn3WCxQ>!3bmnhtrcp6;uj)0lq%HoYBx@jw62;yNbUeW>w{Q}>WF zS)1y1FyyBoG{{7;pEWSZqxnGVmTtnF>JIg;M#^X-?P>Kgqr8d#025q+ZI31`<^VrT zT>k*A{4+lf(i)}B05tF8)9l1yPvN(REuhBw4b}= z8U0cAfcSWj-M?$$$dyhblIDF1IW5r6>r{}fDoARzYhSgqf0dZVBae1!P(9RB!5ETT zbfY~KP;&yUUAVPr_I@0Z5h=3BE#|tC(l)q}Ai24xrcE~G&;U=`%vtlH$(^a&OR614 zYcmm9+$`p$Iphvvk1LU%LeNBUfGx?PBiqc{a!$VXckDgw;%Fym}+=M zNHeD6wA3!>dn&XXOC6@yq&|=gMse3J*t8=uTSCUCVS{Xnh6$@SyI{Y6W z!SLo*X+*?X`jJHoa`9hl&kg0j){VQ;ScoXM3>5mS7RuhL2lW*hzBX3>09P%HXdy2B zBX1qmmEWW2tPfwTpy4CO9G;WKFWKbko}cXU{%^IG7+gq5XYyIgclArVJA$_3cSyix zkyumhDfn|$jPTeYC05$fTQrfSxRO}WdA!#Q8MyRrD!$(Bf>Pq-5EARnX4cuuj&jC6 z&l0dG2Wq>@KMN;SHJ_t2Qw0)8B;v~P){F$GSx+pH#lgSShC6AZ zt(KsDDOb|9yJvv26Y(>waR&m1n?L+ZGUbxv%bKG8wI-pcTz={%c#4^YxGqZiL@OVR zFD_`m3H_ct@;ot!7F8&bHEs`lZERdwJaW%0Tv?9wZ-S*8hcAzk{5-xKzT;K=c$ZzlAmc+q-9lRPqp@bAM*0KWsG(M zbzOU_Tj@GCrKI}0i}DaPzy*_=H%S~&b^V2GjBp&1xq5%1o7pJ5gG><%M0Sq)^$M8b zP9fza@+z`0;2C}vZ;3x^GNWMMNVOEAeJ<6N2Bgy3(lE0nIikphj8*p^5%`%8`xb1! ziiO8?S~3_crGje<9YEdQ2!{!0=BgII3WfFpoWJ3+lOlaYY^jPZq*4nfyXSc8Nd+jp z=~AQNrAPAe%BBe%RkCd)y47zjrk>RCic}7ZBO&6%ZYa)p?!U8{5yU0CQ6&p|eDcdE zGRy0$C!nPIOaNY?K;YbtSqK1xR;HvStaBeq;ke3bIw7EXAG8A+430Ct+>*wx2OTaI z--w!84z%f+F#6cZFUhf3-A+lU^G&qpBE?xv7)h}ikCa@Qz?8y0LUsQDb~nnoSB4pk zihx5Qh*yXIAg7l5Oz+2oIt z^(686WixMzY-nwxrvCum5BOZiht%Xt>0mT(4eQf3-5qc7KmXC<+8_JJY&&`kclUM8 zR%7aRP~?Y@ZazY|#4(anbu`|quSx>H4{`5Y%Y;R4(J5|;u$A5-T-Jt)JJ4l$V3->T z#it*u4UY1s{7rKh^Kv7}ifhKYz1RA~^=5t_q%}*L2vCf*g2p>x9NMOw*icx-?!WQ2 zV&wXpv1}Gyw49fXC^rY(44gn=GF0rOJefz?6s>T~1;$B5Yb*tx-^`z@DZ)KX*-A(I zIhNL`g_<)&0ks7L)8TK`lxF%-6-#OEO{$WB#I9rYSkYIS6czsfR=Fxv>dD$Ah3qEu z#EQt;jnNa`OC5|>_g6}{42vH*3dZMei9>FieV%wCIHh&z zXB3KCOX%L*UNyoYkUzx#{>f!#d~!-L z6v>uNqF>v^HSUo!T-?!8TXvM1D5F(Nu{@XdQ1bXb(za`VF%o!YC^ARuSC1BR5$IpnPjr}^1o~M2P0N7)MaRVOpcAzymCxY(MRJo*qt>cn9FCTVOw_eM~{GW>> zkArxfnQl4Ob+WnRNth~x5A|upR*z6L` zt!lsNkk4ZR$S!W6^&(i3;pthuIf^o)C;42nAV-2Vd`Kl;9csc`+q)TKwY;*4>m~Hk zut_7Y7|CLc-2VV)pZpVjoQ$y=yQBt7X=P+#hf;lK`Qo$^H|AsZElm?GZa{4 zZMcGXtzVZOKWVn*%OjA{qIgsbOZ9zkThn1Yt?l7|g>A~0o1Y^_AS$oL5ImV%Eya!! z{wq=`E7o+oZ97kz@nz#Z)QGY;=NyjapWrq0TbKK@Aq=AGTNte;u*kR?`?n;K(e#hDVeruFYP>rUM zI_XcSq!)KW;$&i7UL|IkTkJ)~s=V62X<1wOek_IwT&|XtuQc-~m+Y*BOgS`uwYw4= zsHIX?6`NREfih5&>d^Uq+f|0gl=f4im4LzlX9r@IBR>R`#OZV)SmU9zti0nwtvo_Avy45*f$g*$-c2)rVa#3mWUOx^_ za^Vya&8F0Ep=9HyB0}xKO?fa;Q^*A#-wp`EC-{(r)$N*1PTl=x^(RW9*`o@eHv!Z3 zzsk#T%Vs4ZyXE>iK1>M{$yyqXz&jkaKT@7Z1DCK17y?hr7PHMX(?I%%{Pg}AjhCBb zIWC$;MJK84h9ET?3!;&kLHDKt83Idsx5Hvc*wHut0Az4s`jeCCNkCpEm8E;;Q%n^C z_;<)T5N*EE#aXtMQ>R)EgX+(}`yralzf_yy^r0nnK~Yna0@|d6OQm4tho=bmL2n-l zW!@QU;|h>V6OhTegHO97K5T(aRYA9lm3Ll*M_bEYZ3W^3;$50V?~S^>UosMG;KDM#@9U0+A%43aT0@ z6yK2EwfSXZC`~KrwzswnS#8}Bm-msgp%OQbicj1l37X1CWVI#Zm*9~{S|X<0xqLlxp(u;pv8x>ySh~G? zyPYygo;OyEr=%1DcUBGpw%^R$mkhH&3bsNek=R*kR(fRVEtJ7zk*-Qx2%poCzrv^i z<3$Rr-v{$%EV4pU=F5bo`bv4i)Ff7xGQGeuRbt}S?hA7!N|g!rz7|*eKkDU~d?^je zX+4T>F-yulN6X|zBLL4Fk45=7KUnWc>E9Vp4ie-Dc%3{eKb3M`>+ z#-n$sTHVQ`Ts&}pef8L&W_U#dDv*)$7vdCEa z5=0g?2zf>6g+@kwzCVZU{{XF(oSOl~Xr}FNe8QfyTwBNj%lnm<2?De{f0`6^=SKek zRezTRp^jVHQLU`yEnw=j!GDM$snm&d6Ij0j5iHD*F1~Ga`w`z7~A$*HK*@BPs9AYoUjYA z>0Q!0<<%}OrMr^ib8m8EY2}*EK{N{)SHyp`~;xFHT237$ zf(WDa42#4W5tEs4zqCZ0zR&G)$yn#rQMyK}CYGO5mMM&Xypb;?w~=E;W3uo7HxK6H z{NKx%`Ed!Gwd9;d=EqS3Tth67$~i?WQA$`3v&)SL;D2Z0`#-akOH1aWP8S|pYq+lF zyOvKy%!z8xAyDjV{m~WsN8#kZYYFiLXb<-+tw&M(#nX&N8yIGp&%qnIG;xA|XrGAj zuiD!jS}pEKkz%@q>v}OY)xzpCs|l8Ca8^~^nc^}452jb_wSFJu&4dR=vfihv$#tc~ zEL?|zNaa*SaW5lQ`*isIGxneTmPZ~g8CgQXuEBY&+FaZhQ*Roqp0YO?C2g3t?8=}Y z+AYicoSDbzAR!HPstL6go-IMetN=^!vzKOXRi_V`%(jRt zM75?t$bnq(@7EQRHqokd>o8CIrcZ=>70qUEs!m2fr7yb_Rtl{39RZ>H^4w*nX<9;P z7_Akgr#k#um2Nd7tcrbD5z#iN_V1Kr=GzqZA`PXvrs3Fq==qL5rzHI^I{yHVihq~< zb8Y-Cy1h^T)8v|D9Bd*TKhXjI0EZc+%6(4OC@GMCW={8OpEM7yqUgu6s9LiV-n}yO z9h&%pAP_*~xa>j7c`_TPHc*eN+n?s0e~HRucWtjr0@i^Vz1H0%Am-B_(j0knLx%Wz zZIeYP&Grw^B_3@vPHh&JQE&SRPo$j2$@MotdQjIv+Tt}*cixqz<1T;{*E3~(h;C0^ z>T6o$C$eB|5v_l`V1JfB;v*r4>TI6LDI6M*0MXJf>Tj`_{NxK0+C^8?G3vQ3hmr({)+mWeY7QZUfrazZk zR)02M$ZE#E)LL|=NAjVU5~Ln(zl4Jw5}iO*H9jW2aDG@a;zY&gJLPca<6&N^hps^A z!A}uRp#0d^A-4LuL2bveFsUQ)$`5A2v}G$J{LR$|1!y5)rlfG#`yY#@C$plSIWLp* zJ5_Y!>Bg&C&_+6VeXNM==mq1SdYoUF`hpW!V_-(#02}`Rv0;+@h+ZvjS&j+e}NiUo8S5cpjsT5<6P>`zE=f#E?_Iv_7S)Oi-^FHb%lD6^4&L|ki z$Bxw4E8zZ4iabKU{#M4N#gsuBt5Sd=qLI;u$JZa^`1hy9JANL%h zzh;a_ANX8U9EVmXB3u40vq0+|y#sWqH6Hse z`ETW=-oWH^o^8^k1!S;|zR6bIT}t>cTHzXL_pgYTrn_Yvvq+Hx>Iwtgd;OYao&=Z~ z+9FRg*vSlWn`I3a%IZq(^bC*Xp_?VCQ*B{GCd_tFNdbD@ZC_tz^ zMvz9e_(4!9xc1AjW8vnLD@rpUu~@`Z;?{YldR-dP)Suh_8I6~lII7DqLN@D?6j)1} zQA*NNlF(D_cF4>o1$L+Z037H4091d(vN(RGSw57O6|86HnPs8~n3jNK8=xH_-h=PP z+0!}DQBIv>%e*rFwasRKsJG+ff}Rv*E92KE0@Wa*)?t!0kKtI)DtxkjChRy@dK5yS zP#(h}#bua{ClILmy}x=%LVwskRu8GP{VzG={+s^*JwM}Y+y4Lyf&6d()aBN3k!S$@ zn@GPFKhrc>IlE{jS7i~KkqQB&Ny~U`qajF~Kd9cA4Zg)kF7@k~`%{Y|nCbLrRSQ+q z?E*OU%uJ!b6qz-YVI#Bu0F1)FLcU^N_T`>WI@V2fYrpu)*|*2^hZbDWsy-d^YI!M4 z52sIjlzF(*PVGnf9DdQK@yuzTQY#tcpYg6s0^~`Wn^Q8FTA?ErYeoxU#NIZx>YOQVqPVMg61_>tmQ=l1{IRx5fxBW- z3h>@J2FB`eGI1vq7P;Z37VbmC8f5N68dn#tG_3s_n_@jE3&%)Mvl;+sPtPpHyIx(ApI%dLv9IkC0Y1m}b0k1_Zb; znp3BaQ8B|OA{Ih<1Vr0F)Z5kqzw8?^$=tTcZ1X+8N^$=Hi|PLW51YI6f12z60FD3E z=N6Iw0H8mL+gq&%zcZQ%o!z_wO?48qj3iOVkpqzM%dD_4v?^)PsiU!|0IxSN`a_OJ z=wLSbGC>kin!G-onC9cmCw9h`YySWk)Abz1pVZ$2=|azODlnpxLs9Rs%Y<#L%9Sjo zVx>v15;yteCIVq4`se%3KjSC-X_3Gg%%rs&eDf)(RN~l-)2*#;KCo^bqgY}x236#0 zJ5%2($s#Mw5UuZ{VKhqbJj3M}Kk>E?S~#M)S3%`QxtK3w>3_}GFDG(t3zjc0Q+vs) z{{R@j=In1La@-dzUS8+%Ud$86(f-_c^7jN>6{Ca7q@G%o;CBB2NB-P|rI-1m=F5r- z$!X;>f8(h*{{W+E{{Y6>6D+;M=EzP8>tW>z@2pQx`Gs%mk>kt!*!${{X3iQ@>BDNr%PD z{L%iIJaAarcar}AXI_3)=xwAQ!x22c%^x;AQ15h_Kb9A7)63Go?>xW3aQL}Dnvh_y zXTg~!J~&6ml9worWr6*T=TWAsh^nK79E05(iEL!BBh#D0;qH0~tZ^<{%G zJrGv5lNyTtkhMQ4F!+bLS{h1`{`XsN{{R*Yd{f-5eu&)C^4R|X&q0I5J;`ZI`Csv_QJjHmRw9ybbLc{{V>$hCiv6Po*Qb`zPm_jSxl0P@~5oZd@H8HNi~e zE$IGQP;^~1^rUwb{{V%}R-pQ%8(=*tZPZBogD)kn^~)i%MorRylNjJtkK$6szBN7_ zshJ`}iN&pvx?yjtab@@gNN{UYUB*$Hn~{hXM{84Q2Hy{^5Bwu$E%S0E$>p>^raFJe z$NX=bzw&?Py6@wE|JH%Q9~*0hUVx8itu-Bors70pnblap$sg5Bti$>ZkliF(u8m zJe1^zm>bfSr_w2sjHwchuD65J+#1yHf^t~Kl=ZegijVlP{{X~BO9II#?ml^t!BvUH zu^Fe5E$`d@DK-1@oST@Vq>5Y*(U&;Z*CG7a(rs}oO}gT?;71yo{TQvb2-zV5IRYMjk`PI+8K9=Cs?!Nq%91inw0b zIG2SN6TuoUsTm6Uj%Q*H0?xKkn$_LdM5EK`%fR6U1}(#xR#J z5ooD|D-{$Krp1@9-;m-gsG-ab!14Pu1oGOo!U_tHBCX+196Z>PLg6K9DgnldFTF-r zQ6)S#rVtj#a)U~Qbf!!Kg6)8>QQ&bx(*D?YTWi!LQnVP-+gy*2(Ty#(9Py=h#**5} zw&dWYw9@&Q==x<(K`0;ac|<;`IWhV|0Z!ZW%W05PkdAm-&ot9T`w#xG{aHqSY~rhM zU66%4orVaSM1`z?g#l~vcV7y4VUe~mjVo#dn>YUejBWbHLm$-2e@!Iz{T@y>`y4(s^<(hp#^~t*z(t)1cc{O}`;f$K+6bzHu#&?l^ zT~1pV$h967SOiihD3}#JDbl$K6MMEmcQP9&ztG}e_`W(f%}&`tJC?#Wc|RZbKga%F zsZaUoo7?n%n(KIdZ~xbaBucjTKmZ6f^r!lV`sX(%bJ2@ycGF1_D1fT`Y)Pk1*_n!T zTx`*>mo}YnWqVty+%2S0p0CjZz8HU$Ia@6{x00abyjl^|SCViFW=OJP49g`$OlM z3>E2u*`n-REta8Q+JpiB00ouIdZNF>B(rcb>Tz5?);6pA*V_WEA7|;tm2e}F-?lWW zloUJQRZa}915_G*yj5c;qkOt!YPkpQ#@|jVjnjZx)tFRrAGJa4QQsh2B8_!sW{e<> zN5-yC5W#wbL6GFwr!u0!B+jRhKf4|^?eyfjqPDjLm+J9DLFvd0l0;}24`crT8sNHG zQn^_owpjiYWopL2@v8Oca1F}xUj*~5%1%t2W=qR=Z3+OGA?Ji8Y!6_CoUYPN7} zXeWRl!!XY>-!4xm^oRJn(*FRL>VL^x-e2y&&2|3(#{U5S(2BA*r+2kKdkC+}CH}e1 zo^I!bkCm0AV8GD$<=jXkW~&hRS%5v1kz98zVw;TtVAR}q$jOjobmJX>rv8l0{{RZ3 zD(G7z9xr>n`-$UsQRHn*lhS*6);6ucEg@)#wKtHUEF>vzpP2);i0ydye z_>N-}yKoyR0zt`J8dQwchrTjIP@3QDf3EpF37S>heDd}Pg1H80n=pe<)Jqy4O+tM6 zjEv8!E7D13UgEpoBTC;>inWqf&{c&6eXv@l38qYA5)z?Xa)8yx`y8){mklFhf(aq? z=5W$4*gY6~Z-UwI+FLX(yoEO3;v3-wfsHI*umH(1rC0*J!YQp-fS_-`>&Tu=;Cd!W zZE_vNax=)rq#!4A;Jxy`AlxEbzOcKBSfi46ju)#%>k1NlY440AvkteXq&U1&NQdEP zJsbS25=dTFX{9MA1F-m$?I{E0#)tWGB8Me5iu#?(knBi|&;cN*`HIu7C|tHT-&W@g zN>PdZ?;nJ%dH_BgM48h^O1%mG^YP|MM+(4>U4f@eJV6L^p=t_@VvVCjJgddP+pSI> zFtnzNc^Wb`AK{}$dxP8`3@V`RN|b;BBU6%dP#5HSoSa&WtCH>6_P?30r)9SzQe6DzHFi>q_=Vz9DHTfPZh6WnImB1%-LJ%d~5KX zg-EV8!kc?}C6I+3!iFPi;NqpEUDb$&6^#m!)>#Rw6W}}sB37OPfM^{mSqNUnn`Dqe zlXmyQ5bP?}D%T$jRk70|RW!MimXW8Z*rAXg_$uXKdW)4Gq#;6@9r%%->-UZdpri)eaK<<#6)zeUYPr)kNaK1eN2 z*`_i?ufnW(5Ic9njL^2JguH1{i5!Lsg+g%yem(yDzsD^e6)Qn#@RGiuXa@1A`$O9+ zGq%7fl3S7bJ^|GIVh&&MYyEPM;%_5omHKb~IIj~=(jFi2Zf?KHd>wUq-~Z8!wOZ10 zMGIUYr~0u+oY6j~chQdORgT(LR{J2HpN3tGT$*wk2_!K_*8y5GP#$0+8k6&Hii0x_ z;l>=(=*M;x%o!VUs+lu{L2O%p^jQM`0LeV2>^9BfGl16;pBH2J=KOMMTEKCp7Bd?l z?uiPU^(VD*ad5JP{G}~^d_eyIfEkC?zmP%C$rh@$+O_w~K(?}1T01=p3LaYw6WI|^ z*GKn%>p$zsY11^Y0nI!K8IZ=2uNr`M$Tpe?y9hbx+QWq(yNWZ&_Wk1Y%A|d9VbVzv z6d99lD)^z2ViF1 z8ONwb0J5Y~TSBSp0a)0B<6nw{!znT#(QOd1VHh;oULhkCjuI+S326_xttc|Hm${h4 zjazLp+xK!Hk`RNMQN&y3R4xWN*sqM`pV6Lsiuxit&T(nADgIUKAEU8-Rnlvir86SJS zM|`m3Cz{7BM&Kc?b&XbA)|%Z4av^|Z9aVTM4qgKcVGuWCJm&U8L!-|m63HW~G;}_n zR%-Vhw#gZWn8MXzi-AzqtYZDCc~{}@uT1wu>BLqm.use31wev_;qk$*cbRaDL&HV?L&5=}GkEMvduEDh~eu zEX$c{#En%^l#_0DF-=L=4YpI%{FXo59+6+?xfTBaU~^HbKfITlEPEmEBOh6iZ}yMt z!4v6WB%0#xRJ$QS^y8|s)K!fu-?n8#7S1&1Mv>$O3e?zvl#xZdn6&!dFZUB0Y^2;< z)BX)InIBa*{{RcjJwN#fK98v1@zDDf+wJ`Gaeuk^I_mx7|I#72fD3v27UBN@;bV+v zFi!68qZYl5=Yiu&DPhNv{Brmn64Nnalqa*4K#c=dYjemJ>fg){&ny`?YdFm!txT^9 zJcEl7x{=c=md&vi%I8%p5GzhLQV@Ol$G&5R>FypEA>|{V5_6#kW{?(;dg2yiXQF;z zepSQgA0+vcRP{tRT1>XW+0TDeuyybNkNA!gce8_&oiSM>0@jD9KdDAy->JOvOKu~O zxhRB!v?uY);?XNru|vpJ2yR<#k;rT&2wi*u==hT*f8erObjd4>6DoVK%k3QGn%qXj z)QSuZgjzFBw}0^Vxgy9FV~#FK-MLDmG3I}H79><;kil&8^4tYaw8Ve~etD91acMA* zNPu%=F_9Dk8ak-)?zsb!+c;6BtsW5zMarO+XEkkx?Y%O++3?y+ace&TAxbSs<+o1N z%k8I!Wf&&(=jJL1{!>gPk0wE78EAcXm@ovYflceV6#iVsnBB~TNfz5&%?vFhR>~wD z$fzGjjtFjfvm%0>tTDBt$W#d*cV;`D`<}xmCWE_`AoQcTOGiGfcq$sNA+hLo-^U8o zD!{kl+F95t{>&hQO{v$mMpVmn>1{i+EUL1`PbEsz?mN>WyRVl_vYOT7Wh(BxlrtA8 zC$nw0Y-5t7pAN#cr6XH#PY*_%&t5d;{c@S^T!4Kvm_6hR8Q|rO4MKtm9kUO;$k3?^I_8kh2Se0H6kjmOILAN z#{GqPcm7;`C6^44vdwgpnC|0X>T5y(e{u-fu`nGMam`h4E%*`ZfJmSrSauZ$=@<-Y zZ7bAJ&ue!aTxmR@gJ0RG-+Z?mw`<5QC}gvS-UNyv>Jn6k75QM@oFoMA2RwYIAcNA4B7f&+lO*8vM8a0ENOS9F&!9mOoj&c8)sMBTxs= zrcFyzhPMfJiRu-i$T@zo{{S0c8|;o-qN76b7W7C2)zlHOJ@@JURWcV$$gyfycK5Kt zWni9&G(oz6R9C-K-z6CWSk43sW31emu{EiwIg?EV!Aa?fC{C^P>sYOpMvxi$h9m6l z;om7UZ4p7uWbZKaq%*U1EI6OX4-*16LCYH}P>K=MWYJ0W_Yd&aiAAG-q;hW`-ffui z`sC$AdO`*uPfW7eMG17IOx5m}{Rkv$#eA0`4Lu2)K z^qhnKzGTXF)s8flsV0@%AVoRQ4kEa=)7E^82$Mk40ot=%@A8d~Y4u;;A0$Ca>?yjH zDn8R1eK;fZv5?)3+@#*5v{0<%l9j0nc09g+vzsVv92xFyIF1yJ5i84x>fQ4TB)2Y5 z6%_i|AJIK1zOBE=e(^GipI^beeJnrx537HVKT2|k>YlIHDa`WY>hR&ykFEP&pDuhp zEZgyZ;lQunKmXGvxdp7}AcCgmdLH{wf5y4X{{R!bt(eu!evswCn}669`|{}enq{(E z9qY96Dnec}K^le}t53$ckZ^$=%TUU1l5GXM6;r@~KqMc0#gzvpJF;uPg-K!Xt&!OH z8CUvcBXdlo@MF}U5_6ypZ5$w?T%NP}VdcBsZ}Pv*k;AJC2<`4I=aSuctdnq4v2sH! zW7&Qt$=Th&$-~6J<&1q(>8JcC%ptjMJd)O#V}2*FB$Jo$maa5KlX{O60o3FsXfwub z0p=0?!SUz+020e|=$K3Gl$*Fmi?8d;ix~Y(g7!siy)d-cj~()+cFYjW;W6>g&d37zGS(e}{MD$(^XbG;wd*#`Wb4EJqkC^dPjbkN6K?-Q4dH`u& zxlnE~h;B;=+SXYOxVJn@bBvInR-QZH8AZ~|8LLpW=F4px2a+{HbNV*5zbnCMe&FabPNZ21wCm9FiLZ-s4<=#~fv4 zhN(&8kO~(4rc*#bKg*Rz6&r4zdlah$yeSNKFvd8y_h>m()c*h}G{_lQq;R6GZ7wbY zy2K#$Bn8R%RI?7F_Hq-+O_9fF z#x#~#$SkYLsJq(W8(R z0P@&|r}Gcx!629_F<2Zd(Ju(eDvnp}1F<8a83H?*TA+jLXq=R~0zE5)vHr5PCYQ@@Y-SrayJ#cSiSkVPWFB{>3&gmMgemEOPX zejEcP=#~_{?N~e$2$@+LtIQTq!@mCj@e`Q>Z3YNCm@SqQJ;X=Tm5;{7s7-p2%i+nw zM3yp!%8=@sG!}OTSNg@%N5OrBda;)F*#NUkTQaI9lW z2|xh2>_{1G4YkNpGWA#dF`_Ucq}+l$KY5iye1tg>`a%-jYIed)CWN;#YR=f2T+azSb1n*2Y5nk!@TCe(2 zPK^j7kTNL+cK-ldDF@WvewJDJyRCWOP|!3Z=Dj^_9y_g1>hmB9vX-8*d_0+ytelsR z^5#)!#3|+IYBCcp!YkNr2aYYzVDcI>`Ex}bKhiE~{{SMJG}^Q4lNbaY^}79fSql(3 zI3RnR5k8biZ)tRvc8zReQN=k2EIQSDl5*iP)hL1y1swcT0Axs zABQ&UY&qNp9*8@z?=f_oY3u$Qs?Q zL5o7|4bwa-8H1okVcQ^#|RXifNEGSb?6#4px zf5Rh>>I*l|=X0G6wNsgW@rozO>)u21rIxYzCFVP+?pD@o9YJQca1~@`SVdEx+u+0Y zN7~7rDlWe#4-*2o2kL@@etmxI!il2fNhY|2sgh14u^mV1%&5xTauIrJs4K*N*VC}d zvNOlB1I!fV0Iz2M0EuO~eoUv$17djJTen(Brk_qva>WXbx5}GN%9g+n zDC%eR|hPFN7UCjk+Lc&otP>hUVmhC zZY!|jz4~#%Oi-`tNh>_NLMjz1Hb3N_93y2S)&)SU%Iw>7^xL6t5nh;Wcxgo-lHw*4 z$`#sy73s#L?rJ{G21|~)FgW?~QB5UokKr8v{6`ZF6RHzpjBvg#IPho>U( zQoVNjBAapFD&Rd^MnD!vEVImg6-lx71 z5XXM?OMk7rY*FJ-gbEnnf9s6^fos`#%NHdt!9Lvx4mh^_O+0?i0$JMmCy}5;pl^YA z06Rg%zSrU$ek%0of~|QWBWsD?Anu6?Br6lsj{DOO034dYJni!z(H^5R%;7>?ybUxdXE+u!SicFhS^+`q-Q>cKZv&)Ga)DWw9&` z(5B>*QCSo+#?0G*YxHf82tX53d)-UaSsW`E%~TVx_MoQN@C5qDG+Z(@PprRIb|kFz zsun&Z3j8<>Lm@J+422?S=X4YYi>c;Z*SW1KK3NLgGLY(!>hQ@Wg(G6?>N4%#k4>{P z5u)N|j>%(Mv^;-yJuyO3zYr&F$5y6Y#!HXkfU40D77-+E5K=<%a=#a5?hZ!jcuM;w zF@}kuCNL_LJeQXA`(2JikXb}G>RSd>;IO3Nt9qZir4K_v*CYW!G``mIw$E!c6lFXG z%T|R({D0tbxq=PbYzGi@pIh{!XaYY_>OLLD<8poSLO`)FAS9ZEs#znYS};VC9;{AZ z4V-C7Ii5J%%GxuJv}C#s^f;zf16fOv1L#w+g-efoTf}JrtVR~~xSD{gPrD}une#^Z zXn(yY*+1f0MyqOA1ZJiwB$RF^me?`!V={%bM&`ttZ}*Ix%UlB?u{TK8O>95XQe`k7 z_^^GC_-w@o)a1+QWWSq7&u``35^py@DI~ts{RL2hH6n59&;<`uCoi^s&mK9QZ8)}h zqA2@Arc`%!5DlKIg|O`QZ_Dtl<*g8P{{Tsxeeu3&b(>^9X~((;T%1}PsemrZ&Z!vc zNo}#i>S37E7dH^DpXrAjiz~>ao>G=xRNL&R{jAtV9=8O`(wb=U+sSOGXEM%e&Oc~Z zwGR7cHlo39!$p`u5FTH+T2!o%n*RXLM9eIvr|@$k9%<+w`COIT{pzpN$2VMG;O@4+ zdjA0b)hv};^i*eRGs@rcQcUF|^*i&tQkm6Pi&lf*$0P$2DnnY?dQUru?eQVU#My<+Z3<>5B5#{GB~+~n0OI8=Rnp1^3?!giyVVz zsrhHh_L`sMhOMYishD3+)Qpz)3Q(3NFj$}GdS)XKDL`^x`olWnh_bF>jJJT#Ev`i2exD7#wy&LIgcdcL(s1E3-q)^ z9OrsG@({{B+s7%(J{Y$`97dbJu$tcfOL&yZw$5UPKmz-kZ~j-sjAvrm_*=T)O1YXq zjZ*A3h#DWi+aWBB{55E_>m?(ck`$rdAO3oZpXbh74c+%f`BA_Xbm#*{fO zHnRlK=~i%PaMeh-xst6nR@jh0tw***PRlWlf)?J!6EuIllb7=^5TN)I$Ms=37k~t6 zqzqCp78`I$XOPfrIx(-s^#1_B`)$^gJk+OY)fBOj7c1fJt)~#h;E?u04edNCQZH=MkTxi z$u$$vN-*1sv$3EAY}8L|rc)`5S3Glx#4|F$38-MAH6IBp&{vCe=D2pNMej=23N0jf z<6>>-j!F9}>;dV?zu>>)Y=}He<|}7U=?gnj9n6wS9ZfzJtDcA7?PS9eDmH_ZZbiYR z7&}M_5^vRoYPg2GweUBqI9gChXylqG-)*`KRy@_^mJSabgg8G7G5EerYt(+vFSUd= zC5fcS&CQ&8WUneC72@4Ha@4|+pkf))>8EQ-=3kem* zk1RChC{wiqbv%W3pvydDcukEQx`vA3WG0+~kF)-^*bZgz)=6c3XhJi3u*#r@O8(aQ zD9jcz#gytqqLEi8;`sKjr_#;cZbP-+3q6Ks{x{{V$F zgI&FQpT4V@(LcQ|TnFygSx4~!_~e|Fw_1;l`>**bN8OWykC7MUfD2uJsK4)j|Eq%9!-0A;>ci@g~brN)|Jv$q?1 zlF1q3!{T2~Gstc7&6G1-+zI2TBXlTW7I92yWOB!l6hCOwG4)cm>=D_VT7uqMx;q+r zOhN1Y>gE<>>mKJWFF2t80F}wZZ}+3<`#HD2+&b$0>;Kj+oN>}^84{@j51uARhUO8SW}e=2d~mIT6((xn)$4(b{feWs@g^e-6l9Wgk*g9cRA3s zx!QnXiYLkIdRB>|Uj9N0>#N0sQMA?NYdBP@yrH8&%HOu@@ccirt(G1x66-Q>@rEyd z8Tz8vypR6?63tu=?qq^Tc?kGNHXN7*Ufp`;TnmpV+B(WGlC23esjC{Cre~{U0z>97 zJ$rrYSpNWyWoeJ4g!!mwk@}j=oOG>Z8yvnzYgqkE>sGQxEC#`8@QkO4xQk0QdvKmm z)SXz+U0W?XR*%>2%A$XN5Y?D|?SOYVuyB#uBM+~rBA}}RJVh(hzxugWU!|Tci_t$X zl&-d{M+oU40al+7Q}(O4Z}M|psrp@}kLp2RUs|o*!urvJ^_P;5*K|-lsa!lH#gwKv zkpBQ)v6Fd{ItYH77$;hrQlTp0DjKZBYN$U;8IpLuPZPvk+Xxd5J9bL zzDs~GvYy3yZ#27JaWc9wQBBc>PV3xt%A_fz$4v8-A$i=XoU08(a2ezOd-4fkedEBh5v zrG6iarc3i7iNzx&C-Tk3sP%-f#w2p9R)5}eJa`2w!G13fiwBAeBaTC5tD#)O6tc?z z62NMrHuV%oL&<_QIdT60E8@Y%%Yh@3KS{rG>2k=<%4x|6G_Sbv{{ScUa`;NxG71Tc zM-%B*v9p3%#bX1%CLc%q7D#g`m(mhWO&bM>D(*^#ALVx$CZH*AD~O|+-XP4?S$C%q z$xVHOf3m_UCc$2iy~7qzu?YA?vAHU=-16i;*ZfNpAQy6sPExzHi^5Oc)B;$I_v4j= zMVMcyYOz6}*;^{|l`dLfzp@7Z0JXQ;!x)n$C%Gm4Hy1O?+&Lgqn~Dx6r9Pau!eJ@3 z9;l5fh~p7-BY_8T(9;u$ql(e6%91ZNNO>`HMl|@jZQtR@5Xf#54KZN)-J=kS#!09b zpaoWdS0CXifb}f%Y_}Hr%*)6nzpll;!k%1spN;n-pUacQ8*%du-gB+EZBpfjLfyqX z4%=qK7}?j)nh19H$+2cn5FSU+$ooxmcVD2cVR3%?UQu(gk3bFS@zvCI%PyOezcGbf zRg`}sSN;brgG9xZZz_OV@BT3Z_ZsFtucE;;)KrU^ML_pIEUOmj$+BN;YR0lvhzm@% zZ5a8oD={adZRr|HvX4yv0F)J}m|*(cv-Gl0&MO~3Tls%Uo6UF0cCzX^h=L>mcu20k zs}Kk6x5f6eWL#_mca}%E%I57cDuKOn4ew|Cxj^1o(amU0pFX)(GwE3*a%wgrZYsf>azd$c2ho{O#Wx;VV|1-;ED}Vm4;GES$)_K}nN~nX z(q|bmi&s|i#pM@vNX;`1ZAmM0?PZuU%V;?!<*nq`$KWKdOSu;+?2){Z8T_;|4prjEf zAM70A!S%#3%gC32k=r^14of5(VT)c)PgK)1e=c2qN7`Q9YD{&TO+;QBB8=WJcP}1WkZh zXI2N{r9*pUC(4!#f6WR6_LLv)k-yWKT4S>bk5ZqN!9HWM6#cXR01-KnE=U8^&vlxl z(9PQ}7NNL+yj%a~tRDSIk>}m*LEB^o+X0uiF)3wq4R)f|p;yQ)y$|OlB6XD`^ zAMB4&lEz4YY{^_j0|uC8DyBfd{iGiV2f*)B{>LY6XvK{p9Jm6&(SRIi-)5aY8fAN_ zbX;a~ByaxkzLwYjS|6?Jtk%xq^DZ);z%dk?BsZjq;o>)R*NOf za4>J)gUp%=Ct=k1FZnqc%d3$H^iCo2%yZrt{X`QnC`%(L`&A^BAbuZbDmhy`6-a!Y z3VmS})=xgxk%sN$dXSb@Hsxf4ZXmlb8+<>^HvN;6 zYcbx^y;F&_xq#JW5XgBeDFUm+l1GIyQ0CP!Q7jha?B=wO(*xBh(KwHde%(On%QC-- zqEYnHVQZ&`(Cyt-y;g5gN}cv+@uhP6X(Q5;TE+~Kq>5LZykt@KLiBHo`LZ&#g>__K z;WtZ#j#uP(mJGX5PU-^C{{X{s{{Ul{v(y0QjWmxZ4=SgwtkWJ8{o#GoREm*GQ>`)l zz#)GtD@w1|&;VJIij2rrLn~BQes$lH<$PG^|v3}wEZ>&mV`HK?F8 z8|KD42VXtP&;M|%+TA=geiK8s`D1M* zI{B`ocjQ7PPlv zsYMumZx_QZ{U50ZHM#ee?p_f=mfI=+01sVsWc1Rl@|SGGne~F@!|7oDf&T!N=S}PD zf6Th(+W!E-zAg_3?|u*e&`(lB9gVDX+*?Oq_CZc@n9I?z+l!cCkSkO(t1|3q@a}ia zym;jxYpX=(7?Ey<@&+ie{iaV-Vf!TST6eBSc;p$zQ9P4vH{!d8z`FeU<}~>>1>Sp* zpDk%e0&1iyVWPcprW9p+FlfleSo z)ULj#Z^!~+WAS_)TCS_{{{XScWZDJRbI1~%O;6M!9l1XL0Q1qaQ;Txv&t@xf#io?5 z%g5m~>^A**vX0ISji&lAl*oaoD5jixduBdX>`51!D7O}C-|Fa3)0b&JlC%1n@^W+Y zEvN>R=4=0}`S(h+1 zm`IJgEf!E^O82Hz*0mz^sGXJ7g+bej{Qm&hgP|2Y<9_?miqPh}fcZh`k+VI2n1fSK=G3N#qT8>_^3z z(>D_4;-MhSte{yhSNCBgYO^s@kL~gB+wbYsr{KTdU|gmYIEPJ>|dq&pdCz z)mV>&_NnH2_xPJ7k^~wo@y;bF*Wz{3ZyL$Z?qcHc%!O;naiW#zTlO+I_K;yM!3+Z1 zjPoJ``{hT8V2q%n4}=w`Z}M|CTNVRwnnI;CYXXXXCIA8tjGsK86G`Nd8+NyN8ikdN zOp%Il+zo*rXX(U}L1QTF3)60$1=p<{q^OLKNG)54j=XEVYI9RM;-c;`*h}o~rTUbz zT)(G0X~@Pm1&OI&_BF_v07W+OR_rbZNu{5PpHGC0>Zshg3O6Pvx{<|mXm+PfEv+4< zh!@LHNg5i7SzOfgRc>8ASv0MVUk2!;(P4!i>N(2C8~R03i&CI+sAi;mzvl1qagL1$;W@Yk_u%jv`|;+=JHzqy}VOM)D?&8y|)%yRf6eya~(!FW*bbJWhTg z`=#1cT&eTRo6=7*Ck)`Of*t8rB-fP*`BN^OX`>moSEt6!r$b5)->nGiS`nBMKDRUv zN;~sQ%l>rJ{HbL>nDsWk536296inoZ*%eDS8dKx`Z?&6yBsf^Mo6Hq=`Q>EWwZy0e z2*}%$PjLLTYI#FSBdF8}r?n)_F1P!_)gHb}Ji`>yY8PBh6`iO@zC#i8u*tg{k4Ks@ z8pR^|h&Yofas-O}e+dLW{{U+yfp0vR>D zRr1a7T5SpZA2DWqLCJ{pmKnf*%JAjAE6e_!>ziBu00#KDpXI;JfB(_HFt}TIE4LV7 zU-(`B09J934@6i7w>Jy4a0eG%#Yo&%wf@Z8X|{`jpLr=;O(D2arAw<&$T_e)RBc3{ zDl7j08)dm-I9NhtOJ-}N1vij!-GdLqG?R9b9a{XrA$+T^gR^poQobmec8~kWvC8mg zQO`H_fTbt^Wa8DElht6f@^qh-9#?rTi}{%^X7ugeNB!I;L-r#-0;4y@&3|w9vC4e~ z)>rQW3sV)dgwx?A{{Z4`nw(Oi>?RU^5Sr|@QBm!eah9$St)|8!n1lc-sWitYBtUM) zyyai6+WT8anR-8tDB&84I;`1H-0L-cl`K zpYS7ry>(O^ztw^BA0PO~{E}+ae|ExINC8K-Y#n_eWnUbc9)bA}8b9U)3n1kmqbDKP zZL|>69lA3@%d99z)3$}!##!&6SZS?~RimXx+TXt36y|(L?!d62^ zILYPqq_p<1)X+)`YH#8SKN$Z2dn#-RE83fXvduJ&gKp;p?qHNM7KS3rNfLwc?l~|#Sy&3!k4^qx z@hp}}ZqXQ^>|jxLwwytdIM|kL%Mt@rZB?gFwUpI2>i+i{A;PV^Ex#Mp&XEGMs0Mz-G@)B{_wH*K+K037@~@Hpgl*+ zzD1B_A*tkTECKL~+nOKtDVqNP^3(pKpNspY+H)EYiks#^ZV6ACs@lU3<%%?~uw1eY z6WU0RB=3K85B#%9n7aP}pnKV4=E!xp44Q+3_I@3Y%Q3K;-8n_2QCYMy1IQnSoqN`~ zkY{ZWjWUfrKiq0<(q!N1%nX}N%#YHF{M7QNnDkF7S=?&cvuZZ->end*E?t^7^pT75 zW-57u{Jg)*&A=v{EMwO51nhFBZHOv&84GeA=lN$F{{Vz^rr3tmcKhEn+TZUBRZy3` zy?7wiE|8DgJ7pNYluWUSZxT!RE#Q(w^yEIF;M`x1{7E$M&4e-fTuIKPk46*3lR|47 zZlre^iZ(Re%v&V1Qr=k9h!rVqC(%Z(*^Q9;gM5CH#C<3K0F~3rZ?V_@jcc20{{RO6 z06D||04@G&|I&AwMJXP}o|vfZMLEV}dN~FcHj@nZO(`^xiYpHv=0DB<01?c=pMW^> z!NgQx(B}>7Ry2@16hf7!#mF6q%jc>DHOB<4l<2L)QG;#Ebow)0Fy@1@-^`^^B9>}9}w%z@?x$zD{bs>iCYV1-}8^j{{TDq+MruW z`o>}Qnf=&yJ)Avl-%BEDnu;Y#$$2AkuM!zqhRyOc%0Tpj=JD=AVQXtD=`gCFNP3P# z6Tg17@TbL?Sr6}i4s;_>rCBYch$MRiJXwI-;%)M%&cSY;#00Wo3f+}*-k5!f_(Oa_ zik}XdofGMG)7nKj)hB{2HU+BszZ{5^*egi;?Y`Jx8e~;GxfvSGmnw#M6*FH z)Fj)wl$|PghF#Pz6=gyzU$g%J63I#%Bg9l?WPv1wnvye>bgLCzsmXul+=Ne7G8Pw0 z0!u6~tA@DX+~iWesY6ygxP!>k_L;CmvN0tar`<7>x+P|T2@)OM_$O0N>^TgC!zf8@ zt210%GDN~Tq#T_|1bkZ`Y&@b%lM;5PugG0xnPP!Zg{r$g><3EM#+js>y2lO~AxsQo zc@5ijep^@7eCVE>Atl72;=&^ z%qZDIF~k%n$!?Bho-D@GP+A7Y@thX+&7PkuOH@Yw@PkOXw`iIB7$Ok z(WcyxsUisr{xHQn`DI}@n00&Eo=U|cM=Br`^$L6d5;`6y zHd<+V1Etn~dRT-p)3#+>jI3&Y9>*X{Aj%vl@;;YCxxKlf{FO5Y{Ivf7sORGT=}OX2 zzWZh5xd0`;W;N8Kkbk=%jfn6ftNb$DdxngkQY*=VnuX0j<(>NGF2Cp=_D~zLEp&+Q z9I*r81RtX@uQyip(|w?YBAFJH)&F%=EE>@A6t_jrGMs+ zly$uy%C@(otY)a|9 zE0mWjh3{XLz_*gLt|N8S#eVnAF0}sev`eT`^C(fN-!n<+{7gi@%kRF~UJc^LVT8{W z#Eltg+03$&3c2E00w0Ql?DPKsCpQq7bEl- z{WQun=4bNmrN_K8RDZC{9Kxg=hHrc#oN-~|d$-&3W zHbr@E;%Pj=rb5lkB&+H2B=HsGK;D2?C7b)Q>*4ji4_8%d8#Vs`Bl_l>2Qubl2@umt zhjd}cuNCAvHEUeU@fu75Wu?Ng{5_Em>dG>1&J0J+WSvqlMJaC=>dfsQ(ASgdYsn}Z z%rV#b9M{7ylPrHzCD%B7Yg#Zn5ADb{q4knm$U0yKfNB$ux-tE88xQyLXw_{BVwTpA zad8#8{kfzFzcw_?*=*qRWFBXS%l1Bnv{QbOyS0zgGWAd^vXjv8KWF)V&PNF{VJOkr zk^cbBK0zY+;qIM6A-0l&op)2P%wEzyigq2H^%SD^HTw1Bv;do#O=v%AR{og?8vq{X zERRxsZmBnx?LA6*=1JN@I{Zeo>(lmEFfx)dTd;xjDNVehMrPWB!k~1>LzOnn@+c!q zi5q27ysBt9ACHJ;@9)>){{UArc?VxT$xgNSp@!Pqi+l`tg{V?dS@-t)Sis%1lpdX` z8%vax)g)MB_;=z8np5om04`T7cHJ2!r_I;F6-vBLwV+YTidW&Gul#G3cpgWM?4}ax~c( z6U%l_+R(>9>NA!tMTUsAe(nm3hKkdTv2m+bS*9ZsVhcz4{n81T-yY>RKpeR6&; zBgUnbnn+Pq=%u?bHK6^Pull(YuYlP1riJ*+f8Vd0-@c`}@GVtnPU>c`{f|g!NX7ttJ zSf#gwb1{n5%NjQ?g}c-JWy^&$$ll8>=e&-@vO7m4?@oI$UL)fBAh@kp;^TzG61Pc) zl`L_XZrHO#oLWS7{oH|Cqp0qF*DR)e&O~mkGbuduZ=?+!iqV=sQCQTHs#|Igi;o<` zmXe&P>kZoZeBg*x*4d<2TDY$thmqs=@05N_6K($honzNx19LUPt43Zz%D=L_zSm_Q z49!pYmS(`6gNjWj0@Kp+<*Jj$C7tB{TGbs%UcNXceN8-~g41olx}XD}20VzRQ;8hE z<6NgW^toDN>SQT(Z2EJw%l1CU4~u@CK8{(1z(beD5Sr}q+i99r+;Z|qd8<%)Mp$t< zZjwH!dz!PzaHU>YTKSgE`4&`GAUPbq&p=1>e#h-*c3?&$P8gIjYag5Sn@go( zZ*d}i@khi8Qj9)TIkD0o-2&=0;?T*_sbMAX;p`jw5kf9uHAfJ>{Bqn{qggU3$Vle(jLhPBA{iCR|;F^P)4C1 z_5I5I8=Su%Cm$g7BYcdE`kl&-_R>_E2fZ-n$UMs*E0>LWx+Mt}%xx*^QJ7jz zwvw$Gy3lb1j@g!S2N*zxU!(#0D{G3$5DE}MCIJ;kNH z+>=Kft;s}$dS!)w$VFfFIlPC{ZYDA7OXBHVgx$6vuD@$3axL$Fl}^9lZ9ammx}yHJ z%_g_|!qG3POFYI^wQHpyG3QV_Q*5sQJS=2)NnY9+UODAo-oal?j({+w4qwM6t`#{) z1?R{x8=O4CX_a|qAFgQ{l16DrC9nJjV(K;_w-LRP?@@oo{-5l6_2KRMI+>lt`;I3M z{G7dTfX342MhewvTsh2y1J> zKNS)=mWo^Uhyb?L$|r(-3y?HYtIXRNDQAufUB!$)(+SDOoI?APf6GQRw{{Vu-%WpQZ85tpF zxUq&;jW|NUP&3G(8<6S^HyDV>hIK`P`NHNKjboH9vLeMA7D5Q*qkYC~w2?7|7O97M zKm)AQno=j$D3txi>K)ZVX8@E(;zvtQkD`Xf#9oIl^m#?^f>^JCgP z#vNA9X6YR9lk(&vKINMwf>8*~w_ORWpnwx8zmQO-!b}Jx>8-B&O zGW-3ku2`)1hb9x@x_TdzwXHV#TeY*eo_l#`Fh*{q4$D}6E-pa$7QORXuEQ%APHqWY zHbz{2VYZhfwwfvdY>Yv;W#ufj{jZ9OiSYfNSxD1yb7W^o-8an^zF~EHwGnEt#AboV z!|N)ZL=|n7faUQo+W!D$m1M)h6B*fIK4kM0F->(GI-Sj?p$sV{q%ja9F4C2H&=Opx z+$8k< ziE|sIv!h3HWME~kP;Xqg2w)8d(ydc@0X9JB+03W3(26YWaz z_*W>&_P1fbf;!DrrA{nL)G8g&cMZSV_;>h5KZ> z9r=7YUohEpXrirjO-5$*Vz-sq!w-QNt16v|s)cR7&QmP$k2AY5jz^HR#^Pf(lVxuZ z4>GwG2BadRdRv=4Qcj%-|XQvAx)7r zx+XnB>7AB8P{SQvg7Gw9O)8;7WqbiAA$7}NTtx#(Iu?(a^jmHq&;mbWs}^7Q+xBOU zgoy7_NeE3oXOGO*WtE7j;v?CQ5CW(h4~1Lyd2-8$^hhv-s$$R{aE`}Q*Dn;?@#8au zoViwZuFJuPVf#O8EYopDAh>#%#!@zsOAqMH7gmiMly5z}YHDbH+xG*dGdeQj-Pg|_CcG05iztoRecipa-NH!h$kG6@p;iT`J8hUyBn6$5 zOH)XKRCZ$``b4Q_jj8bag~>{EfHp;0G%;IxaT6>~$WlP9$7dDZnMPm?uGv7m&sa-w ze?O-gM>D@CELy)2?M<_JjFTF1qqNjw$j#egS6{W1xjZjx{IkCF{Fw?VQzTcus6u$u zDXFRL*EIU*{_@%;2NB`6fH{S$dYXmU3L&LW=}rl$4Sd>$grj;e?6kvK7p()9%XHHP)XR znC>EVRp%E+9CuN~hR9|e;W(w-QZ8_R%EIT&Q)w=Nk>nyTid@yqN!avBds)a~yq>>~QSB%q=!JVK=y zP@zu>W*iNd49-GU$2{}u4HHUP6;Xo8`=CC?{{Uvaf8etsJM1|W3GTTvR)f#fX|{{YMF=ln%7oFCuG#?^f>VhrVXQ@A;t z>*Q&a>tsc%ODeF9<|0Qts1G13UuyVeHM4#uSNTaI4Qln(mZI8PvR7hy?0RMfzi=I= zXOB@zpjKTwnN{L$X69K78gevIxa*b7+<qajfQ6iD3+f|Wj4W`Yun5o66!UX0vB)SgOy z(&bTr=F5+mR?w`4?ZwX*=L~Awzb1@@2km@3tFZl_^}Za2Ss1dz3rVh<>g>>&V=7R) zg*6Efmx{5g|rU)D`!CwM@u_x1iB52;EOY zvVrc>Ct>O}UN-7S!da9N-o6J8SrHakpb4aQqOse{_jWu)O*0+|$zS4l*S%?&`rlCmxf)+gX%v1ANir)kZ|TT7 zZBFG!{1#CS#?80hm}w~D_BM$}7?+ht9z1Av-{+NP8!{U(&3eRMYm?kiG;ta<)u#>1 z7UH|`P;+CYkr?UM=H6C}u(ukkqOvkHAG_rmucso8=XHOIyqljdvyMq{{8_;9By9{L za~*vx_#shR%vPOi_E?X#nF8wCeKL|~1;y-QYY5DXVnj)EAH^cHs5zF{0k2QDOTsbN_(pr;sFvwDu)X35p5WBw;@YXynZ4pKzh}-`(MkE;|5gB#PHhLQaP>` zIa%*5paGKP&>hFwWAX7}PCpOw{5YGX#~>xTA~@o<^G}y1NY+1D*@!|)m9N4_Dlp_I zek_I)ag%mWs3bmd`p5W!^Y?1uWoIB$?Kk_giysdR&(nP&)#^U7lIqcw^5kWgiIVQF zw4kcScc;B`G6u_ok&Xfh<6XyGxV45`B7ev@(#RYBlDXghd8S~4`}6L08gK5;y#rv( zDO|P9>XqxxTT&J67$1qtpyT8or5i~lhEGx#Xy2zLBpQ7g9GmQ0al{hz9UR)efcLjn z?{_>dzLm!_$|hmd)bu|r#FTm`FyI$RyX!mKofuBiJO~q)D4Ep!ThWau!~JZo;m5)p zI84=-p3AE0um*|?*+ghaUM}2zjLeQG6S#7=;jopwlE`^EW+c>>HK{p}C^&IGq~l1@ zFLeZWw+qEKJdG-SvmRy5{KRpc)+|LNDB_Hyh92W8k#{)iRD08HJy&2x)f;7QLB$ij zGl}B2lSP6zjaWxcH3Myq<16Zj;D>UIEEd9GM{SRWJRr7=Q5^#j)r)czW~DdW{{UQ= z05P5j9$_YTkqO}=rB3u_75sA0ZNQ!g4nb4blk{KPe^cvK?fUp^Nrvqv9+Nx#NYu9PV2qe6_4xSzSbi zNgCL&{ld8m%#bTg#LF9YnIUktcjgUNDQtqQPYZJuZ&z?v!hm)6$}wr>18%l+69SSC zJP6~{^vKk)V-a}^62!4S`Fwf1Y;X~a2baEI28jf3mW?l*kN4UAV+`8y1lsWZt@p}X zWD)xP6Ug3QQg>^M$EW-TQ>Omu^*qpoL2ffopiPj$xn z#IGbys<6sUSOY{Rfx2V}*|QYARLFlWVL^4QNVB^7W8rdnGY78i$8MwIT-EE(((U^{ zoV=3QwaW#u`lO5MD@!Sq{{SMhk||UCxg&(}-;-yI2&Ee|j^VC78H;^hBKEMKhZsyIwvyIMhBndB__Zoq zZ`%7^unbYVUNW*PUrfa`Zcx6%VYeaSguoOD5Waf>G(4t}%}2zf({B&?rx5@-CycA< z^W0hiEwq9kPK-VUKGmTf*-lL~XU&{5NOCC4jcZl8=XpG-%|Hf06kwmj-z$!eG1w+q z=6b8!>-xlTBEvLstfP|=3RL%@CZymxD0K|~05dw~l+SM0GbEEsNhBr1g#gqa)xX)x zp^71L}veT{v*ONwC=5LF33(M05#`OC< zznd{(jLuuRGRQH<-nFD_%@Eky%SLJ0>Uz*Md1h20a&U8I0x6H^v-K!ZV`D5#9ptDr zAL_PSJaUtV9YjQ|s%yHIoNes%+hulkQm!Rmhgaq#?q&6>P=S&5Qe$LC=va!YL| zg3D15UEPG!OLCPWaFl4PPE_RFg-^r#S!~3)4r_}fO%z+|);Iob(Ppt^`n)bxBOQV% zJJ;JP!-`OZt-mfnWg25%Zd;3=G-^$}s`F8nJ(vK-jI{lq=FOI$=nkh+YGoT9k7=sg zNue1TX17xI(jN0f8Jr*Wxi(+RcV8(JCIjg4m5_=uk{ElAxqaHQ)<5zM^!4t#oyAZ5 z@=U@104?x#T5s)!VU&k)T$)hm!A!4rBB+nly+Rc?7NmV3? zqXTte-KaWcuDoOdR?R%IqFmV8%5K9GAovn9(9^F?@|ni@HRjX_Bs!Lne`h00aHXS> ziCI6^H5|MSS2(>NXX@cG!PFxxt!7vwxnW3MxT|fs`DInLyEtdYWMg}VWQ>ob(MIBg z`{ix)g63AzKqzOV;#mNeD;-;zQQH z>zR6e_IYmHxXcl=()9aib;b0Jr4=xgIl%gyw-BV=v#){eJ^%>L|z zEF7&u^7|?W+utB1-mw)XLHThJ^tCscxrqlVDB_=Mbpy8%@n*ZUDAha8%>-FJSmnN* z>qqsYIaQQ10mKjwGN5%GX_p+7K<;9h=(>Hywxp`l2^7a5DhE)}l6U_ACnb+2?RtS= zGEXXw5Wv4vnVPgR@kZpw{g!5MigdZ9a^}eIptFwIWcsboGg7rd*J^Lieex|t?8PLJ zPPGY0UJ)5~c99o;3P=^J72h0`H*eDtMI~|DPNIo=zN4L^WJY4u`!xghdcI&BmR;b7 zqy!$kB$CPq=9ilpC`YCjkfVH69lw_?(@WIRsD&kVf&DIGrbt_huZ=T0+-gMJ+w+gt zjMW=r1`9}DuB{5p4LuR|5Ay}$1{Bhygo}Sw&nXC?ufO!cYfgqL`mwg(0>DD(;>oy-*30Wm&RzbwoHz#qm zYmt$-nrw*m1rISxEPi3XWegax+z={N&n~_>b{(J`El1Q-oib=3zb{T2gwchmbt&;J zd3!T1PEwPEVl9AbEgNbxo@8=z%BnnbEir$q?cT zABD#I1K8z}1ibEU?o97YlS-NZRJ25mwtfEq42-Ne)Or-VT(r^N&Pe3B1zdLxvHt*p zz(w?K!IpTkaWKgxRkyIWQ&dD}KINQ>{{X_~HbYV2dj86(%2^Zpdw^_GI@fLTmvDuV z>2D|DlBHvMfd{|K77{5u-uY}?EX8N6eLqgW2QF0ir855j;dNE}*Z$X{nMki-i+HsC-23=wFy`xEz)!qpBfha#}icqtBR5h>JU)uXQnsXj57m0?F zp~Y!0n`egFR#6(sP30j%fGa>o{Tq#P$;uCur`uqTlfr3M_MZf+#VK}ONMb2P6ekFu z7&O%=N(XnI>etS<-?;&!;#Ha5V~_>|W#+=Q+;Yrp)3Hw1v+)zTNoJY??<2X0mL)>E zH7nHA8Xj4S9l^A2kLB3B&8=JM_E)K9WF&^2xkdsZ;u?gS{{S!g*-m{})42&B^?{c-fPw2I*yyCJ;kJq%N)wc2PODeQJ5W$ zTdM7?HVJUfd#rI1jBmQu znjiVE0rs0k{Pg}_NRRnd9(^EJOsD+zhyH8CdrPDKaz+UyspKCvUTSmcw)O@~?LMDb zkJOLhR_XB;qh?I|YfT6Yl9fV#N<1P_t@%;s-eH$YHT|LL!aw7D#{U3B+Ars%V3M?7 zm0o*d;q=dh_cFu(05#w~(`dh(p8`ln{Gjvsc_p~Ai59fv_b{jn+xD=JX*6e)i9gj} z0!c@i{{Soe+oIb;tZEkUt%MTB9g=i*_>R;HcgytMxu?x94kYpa09Ap(E-UDOit|p5 z&lsQW0TU7#f>xznffaA^hFx7-b~`fi?(es=ZFF)tIcY# zOiDyEsYSTAQ_Z4BQX@`6v^~plqVM)|B3U3MPBbviaN>E#oOP%)*~g?E<|Tr23Gg1w zY5PDA`L{83Zo{&payG-`^98Xo`jJTDTGe?|spVdu&GB)jV0b0o14_2~v^N^XfQ=k1 zPDl^_P#uTM6ON);LVl%d`|E3eFwHc`vNTirvGND@T(Dj1vttVp3^E9kQD3Eem~>eq zpHFY`1ddE|Bd|(5h5jY4AB8>@%S2>Lf-t& z>J4@~_sLyHW$K9564`ISQ}G|~DYZN@xEl^s4F*MHE8E(edv-}8xs37v8tv0+f^e4=y4-UI;PUpjcbm}yuU;ePnE<o3;-0RPeR+y~*?HS>DRGfnjBGyJlebHvkN$c@Xe-xTqtT3NN8IoPOk>F!HV z1>2wnPx~yVB%}_`pI-}=vFpo-mtKqN^y3_;Lyfx9qvW}jkE4&mdp)P_8N%4b2TdhDQ)6 z)Da#_^a$WLL7WPOB?;efMjWM%E%~NgY}(bXBLmX}B<8#F`DQN^t;sNrY^iJ@w~;@2 zBx!Pgc8c6phibKGauT!AyBMylvfEFBJrj&l2;(IQR}@3Y(dyg(0K;ZKhS-_x^U0EkY>>?&#T0F@KVU1+uKxfRTIOaQqFn~Fi^SV@ zQ+;=*$z{eXkk=8n#RBf^yZk`8=|PjlQRKro$YXL~UZ$A~$#s7uW)R{efx#@q^()_u zf3=u;xEu7lj?O-v)Ndrwt*)+P`pv7Nuc$pLaia=_Q%|%%m-cfS5s@3a84bCpX^9=o z4Lox!WuqoGpk@R$H(j{+{{X{bk)gsxwLXUYxW3hI{G(@asY&X|$rG`md{}>9I_)Y4XNrl0Wz3 ztJ8Y%2a5jym7TA$_#h&35yddz>TQQfJ^j~_HEkyHWPLi??$vE2nCy)s648eHJ-*NR zS-O@eM^_w?`pB21j7bn;HadQg%RR)GmOL#eCPt?qXnx4!@&5p^<@~>EHrY(C-ohh$ zW&Tf?+T6TxDzq|=ae4q0_=eQz7yP`*!GufHdOA+h&SNSZi#WQtu{O}8&FRQK8B#iN z3Jqx6??(8uGFakeXPbqKG~)^!o>S+jVxIKPHAHcP$(Vo+=AY-wG4(RsI}Xf=^%RcR zOt4#^psPshScIKEU{m6!`B{hJp03>E^Gx+i1XPORM+qzr3|t2nUxd_=xNe{7u1Cea zkqlIMq!-?W#IoEQqTH!MN_<3(&f5+D0B0?M=}ct`-qgJqjM1@R#fSk70X#q(9J&)W z(I^UI(ki2s2Vlf;pz1!1${V=GlZ`6H=Cz5Ph_`tjHBMDuQBi8#b_=tvW8{@d$kTvg zR5VLjB-SE?xpreBsxheK6!EF~8jIfmd$djaE#>&sXLw|iVuYK#zWG` zSd}8~F060$$W~iMv_>R4QHwiv1(?#66y=r1JTbm*K+AhmH@dQ9)1!5C<8DEvRW<`* z%lWeoIlWZKbiNk6;^AyfWP z1daZAls=bZlzJJapLIN1y`fa9qI0OJQObY~#YZER;m60btk>n?gVl7&g~TlK0UT_E zGl1NJ@dKxZWy1_{!mZJG(wz8!NqpR|9)~@_bdon$T7o$%1^!Xcd^Ev|0~|O*lV>J) zk{)~IYd9vgGIAzHZUqz`zQ@>RzH;GutlUQ@bt5FVn%N|YzzlxEt+gP4CE+UOBW@UDn)-rhgprh?nf2)~s7rTud!+^x6UtQznvnQ=doxK4Y z1*uN$Q}W9NmrNr{dWNPU(|stV60`9DDC9>W+rx;-kZFL&9xh5Y6<;Rn1R6_nSZ}x? z0@Hu;M=ZzNh&wDIv7a)=bRb~WEoBbw=;$cMi@Bq1zwG6+Hp(#E*=L%Li#7C?{+|dC z5Z=7ZBU8jzh(8a975@N*%z5UFMIGF1Sgu{jUpiaO4WFqiq=3R=RE?GBRDw>--wiUX z2Ibu5&g16Cv^lMzR1js*zJxig3^YXmy*#-v7Lzu4^s62 zleS&M>1gBiuDgbLbqP2Co}|o*$LGE3kVCAXoadUf2Kh$J(fII7rb zvr8|j2>sl3V%q^h*Qv=J+6fU&?JO-tj5h58EyxF-Dsm~*@~>Q?QIVF-u#`AkG3oID zB86Xw6y&^){d{tonC4sf;7-iBbW8N|MTB+aR_flrg-KEP;iOgr#5~} zf8c%`uT>)7p&UHZ3+sed<7qT$W*d-&%v(7hwO(Hxe`>jj7)z_0DT`kPuA?N<-AVh~ zdE+J)ER9}24(h)T`!BVYRf<5neFfyX)^955Zjp$o0;9$U6%L;ZP;b?T+WoBZ&ee@- zCg{bpif^b-vz=l|SqbTc`f=Kh}@?rV`}A{{SQ=GI=S> z9+$JweF$@*_Odmq{{S*9F>+_Zw9a9*zP%0wZFW1HkVDPpo{p&op)$Q z{cFP152@(gdgmhEvBd>W+lpl1cTK$C^MFe{noYH|9PSXyBe)O9MnGO5s6S|r_+0D# zl?FU>y}z2`>KK-(0;lB*TN`9sv8gB45voI%bHKYe9KIii{GK_tfIrGtdvJfHWByFA z+go_n$>*NVA$J;uwJ3?mHva%=P!%cj8M`K2O*~QIS)}XI*~ctO%9nQSL?fpP_8lpi z@tFbCX2K;c(e^rm9S-8+Wu%c8TZ75IGHS}y_x8&8&9+An;iQ7%?CMs?mrRo?@vLlU zY4$~T>b1i>gtlp*B4zGhCAv1Z39+_uBuET085zmiq=HLjaTgujsn3-rer313Dz7{& zmh5@VW8}XZ?eTq|wN}b;?9YgXG5S8rSXL0MNNS`Z$W!7yiOiVDvB|?+*FKn&-Nhs< z#^p$=7UlL^v>Qp}p_Ng0Xb7ej~bR2J*R^%;Eu7c0b_Zq7A{g@&mBJW|ee;K%?{q!Ck5 z{$@QbMAKn zt@F)nvIxcA;gJUVr*?is(rukRPpsZEDR}u0MGCJ|S`UZ)mSIm-2Q$Q(L27!|pQ2qo zq}ySF87B)s5>6k)$&n=7!d#A)`j zL;=mk$Rs&2hS4?4+rO?)Wb!Yn(%jq|c$11(m#JDE{?{e05vs;Ew-myC$)ETmRn*$( z$7d|iT$h#$o@$)hN(vr9sGO_Ek0!w_bRn@JhvnvX0VlZf2DzqB6u(!wFIM``CRY^c zMEf<{$0jxKk>(WXr`&+E(DgGP)&8wFq-JR(jIt_l5&oYSeab_i`HYk z{{Ri!O>rp_&bJaQV!r~O2orw$a>^kwERI2hqaQXor&-#i^|Q+1ful5xYw=Y58)jBX zo*kOtA|<-?yI7|JA?iveX9@*ty#;$<;jqWX-;#qR)5`k=9LeT>BT|0soIv4fo=cK6 zLo)&KC=6&rd*w}&lPvPBoOZAo{X`QxvJocZj^pgM)UI0>6EunFoL@(8BZzI~63s2S zc_l)h3xUUn8h7}9%X~OEm{B~Kg~XQ@*DV}_i$z`v!j%N?(w%a-#Fr}x1DH+p3ybTc zD!Jup1-zl+W7m{y6ey-*YB*y+b93Tq6NNe2;d>u(Cj5J;E=Sk|gJ4t%** zoc{pXal?*q>$6njF6&g&JiC1z)vf&Y*CGg;>lYJC6;&UgM&(Vnw*} ziHYJMuhzVcskPM8tar%-bZci2D=o!N-5d8ppAz!@oFi3?dP#^W@T7N1r?S+onSc|Z zG?1sH%0!JpO7S@00Rxj_UGcJ1^uXmfk*SU;E_)F1Df@N#-JM`ONn?ao*2 z(;8L9u?eS=c_!jl{N>l#feA3VRX_2K(0|6c-1e?I?8tc-xRLtH)*0!XQHz)YjIN~Y zdU)Z8WFo-7H!dz>(ru@J(c>=cd+{7W-?v=rJ(QJYF+WLhwU-!Jd9zQfYrb2#g5q%w zlV!$P?V5|uK-E?UgELgOj}^|kcyVIPCq(3z@qb78t`0fMaNMy%M0WDq+i?NqO1b3x zLXx!(tHzuBS+E>D&dxM}8wHm7B-b*k$0!YQ#5|sp6@XTs6(~qQYh}s!&>dQ3jP*}+ zzbo7)mo*f+nnLyH0n=l%jwkgvGoke& zWnjy~ziVSs{{XYavSTz4Rp63)<@lE6trcU9zNpMu(ELTDpdJUtZ${(QNLz^K-Fp#T@?t=SARA}8&acFkO+i^K$P^LIOoQx?NQ5Fy@<$|E#1VS-mM7D#FJ+WS z&f=3Pbd{8jDy*O$JXkj>t_Tywy16jdOOc9SseWJUs~M8d)4hwWV_+Np1*!i43yfyn`IwZOOR>~#?_SP( zXB(5wsOm>A4!LxuICgaT#}w@Y`c3};#6R7RRP;F1b@AoXu3P*KRqB8L)Prgv zY_!R)n6xuSeK*u5V=S!nEPWtN6mcFIr#WsyAX~aIuTxs5o^{Fmt)^Uo!LBQ8C8uAj z_pkGPkN98Z%v?GsjHSr@#l`3M5|(-rS&!O1Njb3;$jfG9V{@z7c^=2=cFf_f<(65L zuc(9`C^$Iym9hjzRu3lwG^8bQPBa~B%(uttEo#gzN215lh%pjv)IVSLIjza3!wGR( z%O+V0EZ<7J*KN4zuVEldFk&@DsVn@%NxcvGIggi5r^zlk9%~%AA}NmfuUPXeVr~&U zQTXtPeV(05bjxt*wD+-Ob7egeEf~vrY?EAx8QYS|O#=tz`#gA_l+4LXgJ6*kA!UjA zu1j{+R*|nLg?TSpH$^n6{3A28#PIIoW{BdTmfKp#b2&E*XcL%2RoMNLTr-n6W2Qys zbO+12bZTkC>nJ>Gj)USo^BYYc-buGnKSLMO^zrqDdy2{j-R(h{(vz=@lW26*pk||Q zT6_H&9ws)rTUb z4rt=JIq9)anQHO!N24>-eD86p>62+}pf%w1%PbR?+=khHFXhdAG+41@;u2?S|nanwp60T-Nklh2p(NdEx4RVKd| zqcwhlWu|)M#J5oLiH{!$|+G8#-0k2%awlr^q2 z-d_Tem^%^vy!_kc3ZCvEI z9KIJUqL`Sk0`-=?aB!f-&=B%X%l~$-Yf!MO4Bz>&xA(7PhWCjnS>+|W(%WoNyHy{x4 zmQ%*2gMqGJmlR|-G}9T#7pD1l^8uD1lUXj*-F;L8Z}OpU*P2Y-oy2yVnl)9?lIuQY zTcy+W20*deKQ~8^n%9rQzX!wie`#{fHk{#vqGx_1kC!JD!KN4H56o*xOCmuTHK!(( zo2SYwmY=hx0J%3yQxSVH-5<=qUDcwFgc_O^5;6*a4>omQ3NB|)Od|tjjy&d4<+2{~ z-r8R?5qQC&H&Sz7W^+U|{hnX7lgE&ZX1kQ*637`P&6`Q5NI5KU!mRYFmpc-NlF?Ux zhcWTxHr+imXpZJCw^xOD$pci6=8viRGiWi%(vlF@&2<%{Ng}XPJ|EsG>$gFV0kulw zRgq{<(8u*;xE9e7*)lj~si*UF-`r!wkcE>?h)$9?3%>3;@AAQH;!?To?h%+H`l@$s zTQxn-PPR>(dLNskTdyZ*ajw!VKBCRFMG`u$Y4p}I$na}!aPLQY{M4UXQRwH zPwhwMaep-`pHcGi-dQ2UnCGFcX+}soNsc0JI#e}@R)wyM9Vm%KE(ESz4^23TP$Gw|so@KIF^ysX=(QaBf zC7Bp-COXn34e@e%az7Sf>F~-jCP!w8(Hkcgx`)<)x02o~r686Pk4|Xmy+f^IeoOXT zam>q%hR!^oQh(9@LjM30{fR$KQ_%e#@bj%X^W|PO;r{IBlHs2ZlMS!U|JCPyVe+20 ztW9eSE%kV%l2?_cs6|?Uy8+!+xvA6CWMuAV#Caz(d15!ybgeH@yq{5zPFGu7c&ds6 z8n+R?!~P5Tn=6dKku8G>M5c-QKY~H0L#Okv8KnK}r--Q(PHkW1UQ5Zc2Q*o5cb959 z&98}SX$TfJ>(x2;Or5R+#NT!*~gr}Zd^InlO(GRT1jV!Txf>m`; zRz)>p-wim9S)$cxCLFIA7H1!rG})(_68U}!B{fqg9pq!lSSek>%rD7gJy0`1Y(o(9 zcbY9ett4q7xMnTTHr4PXFI>#g=;ax1xiiFy3{yh0{mi!)5x99{s&Q|MzY!d^piTbH zZp?W&Vn=tLYVjY^q6U$Ddzr|PZrpbRf7;tNKvWzcZfKkNy*uaR%CSI;0o!Nv)`f^Z zHOS(|D$8{89zhW-(%oqGO*r@r)0pD|oKW!X{!>-5xNS3C%yMmvTFAw+8Wn1eiUwQ4 z;@DPSQH>Q0YuK7sCBVUWvsS6~DdGm_CrYC(mFqB7G7`iJk>m7al|GJWEo~7ajYMdm z6)8qNDmr8-7JHHUBv%bHNU#TnHz^}67vdlBKed?JvH)`QVA@UQEi&pE0=atW>=5~>;lA`@DYeh@ikH-nVRt0-`XlzF#V(dD0Zr|e|N^vIS1ey6Fy=L;AfNG=@%!gw$wgYf4mo3ke^$5&!CF%$}GXXfoJsF5!t z57~MD09@BkvvS_?*}L;Gr0U|1K6AI`RVRy*?pKnW$%kdjxTo_` zV;*nw3*T57B9$c(Ne;kr;H@C9O892;vUGkF`EzkI=MozwI--x!yj2UC)GGlS51;a^ zbso!`;5|++9?XA0Ydbk-g5U^a48cKObYs9!^`}O|sf>smEFfVL^-nKqzF%mPC7usc zr3s0D;EPlBXE~{cYuV0;fegCp>LH_QWeOODok?Cic>0Ef@!O_ggOpt z%dy?bD?OrwC1^M`d2i#JY;n!z&x!ysOUv6k;X!5n=fZ-Nr!Jo=Q#TX~&w9I*F41mmSqx-{S_R>_ z>r6!g$XZUiJgU49@oxqjNde8tyfAne_t z0FBdsFWOnl<|kXbb!i#T6qz{u71-0^_y_Z57Oc!d;N;Y0DwEc1G<_UEM0QZ{WgN!} zijmb$bCT5RJlniHXlvasNc|H8M#cjWxxB3q0zwi%e_yR zZFd_8pi7O)ito5QcB$X&=HEw?F_!4d9BF=~VWeMbESFZvEX5FVE*4L+JMGi`Y}%8} zs2V^|-7w!o@k?_bs1=72R)t6EU-;WGG4PC}2F@eGn0}46{{Rtw zmin}Qx2OH}KY7d5<6d4KpTUsvrN>vbr^6L}U;oxjFYle2#6fTApHSR}c^~1g!p&ce zz7AaURyIp=!~mhg?8+qWs}1=N7J}) zW0nS6v(GJQ+IF>W(pj3r8<^XYlwiko2Yz4hS)$fwmKu!u{VsfQ#Ep@9cboi&FFRQE zi#Q$!G0tZ`lQXSX;O9V4Fh3Ax$Yj#$c04U{%>%O%*Sz)le7c1CU8UN{$bz@9rz>_e z9|(Vlvwt-c)BgY~HaK)y#6*w!R=MVd(^@^B#Oll~gBe}qHD;;n@4h4X zspYzDt=%QgonmdtMn-tuRGNw+0biXtWq6^(ko%LGd{9H%T1r$iDWUFbnJy9~U_0?f zYIadvRo<7U@ktcG4#66qek!alW<(y8asYZ&PD6#aiENsc`j6TlfynT(;$)sjxR!IM zeQ4BhK_4#6d2T)S%5slfT%9@x&u(r@kv^j;B4o(*)CND$s z!|8g$%LbQoZ*wZtnr za*32~8A-3-l@XMVE)!qE&f<1p{%X~Hxdqg^a@;^}gkhwkDOwZR>&KrTw7Iv@_x4h9}n4eH^JPPqq#dZWxbj;Xe*xxo1mv<+b+T1hiUs`%I7vgB4SGcc+ zUy~iinjt!pdvwxCe{jJI@{zmvpKPKqs08FUGF8&vR8?ZUYtn|ls~!ts#<`5mva8d( z9-oN8;V&I5X1PjIwEqA#Z9|=Ax<5?EIleqr?RmbZC2*pV8 z8!VD%_}gP*%7+I!}piCPuE$ z^EmJ^GeJoztJRs5uGFnELMY>3XLBSmOC&A9gtp}vj$gKV*CRMtZO130dBzKSZ!F$V zZ~pWxy?Gii-|cKgJK&Z$il);!R^jG;Ue9)6;bKL1y zDRFDW@_1xf6u0doQmPq2`#FP?Hw3+L^)^@|W*w^7X>i<4533ig0i=owI}c(3{{XIY zZ-*rIAT&$_jkRqCD{GdHc!#9T2@#sNAZbM%O@C*OXTVII+*xpq*qU4jbN7><-9h1JS>S|mktxzfqiyodbTqIMupH^61{{Ul} z_?bp#zMJ(sL&LxW9EvYjMZSmCNTzm1WFY#VyRRh&-!k}pTuvmtJ2KD9YuTdEFC>m< zSBl^)RXvP-DBpfz>Yybp-0c#4{*~UKvT9Q=qXbC8X*foT(rkC2`$sTk#Tf9%cOr0! zm#LUpd2V495R?@FFsBpCr4K=xnd<#qEuIKdxbp48$H2SEY=o6xw7d0T`#w9f()JGgw3niKu)#F4)s`4D^&+M-C`(GEul*`SUF4EwnD#hlFBUX0w zO&ek0Ln-^Rc}+w&MKraVX>GYlBsZRzt476Rnm&~Wc21{{B`cJ8pdHd+4<#zu`qZSn z{Xa|Ud>ho|>Ux#&rGLud{$3vpad9%?l|TR24kCMrxy-=xQo=_qGdix#$B!lWe{15- zYx5E09*pdjPkVPxFo%|Pl@Fwd%&!?*kXS2l-}b-Q{?=qsk@9NEo4HQrN#(eQ>kAZo zA}9jyB^#=GaC1BST-+4@0ENiM005@vBfA^bE=x}ofHU)XBU%qg{Ay}IVrjbg=G^T7 z=<#ikv(L&FxLa*n2_Z4^h-_RQY<)qnU$gdyEaZE0Y2zoo=rsQTNe>ayAidM?w z{ZJNR$CujsJ8s#FpCql!!3~uB&!@?8sc2S7$)uX!;4+_xgDW41@B0TU)8+b$W>{RJ z5Z3oriF1E$5Jv!CPC5!NFiQXje+<}R<~`01Zb#mb{*H!QNa3D8Pt^oRH4yw@R<#G0 z#ZEkUdVuFOZn$z2$4(=A9*`VV)JDdiW%k~b`#a`-4o)Vl z%EQq7sWsGkR+g~b!@*G@^h*y^sXLIh{{W410%z1*>?Y#IVY851qx3V~NCS7Go>j**RPm@~ zr%#?dXSsM-})?qlK}j%#li)_7>;6yj09#pPlYj%N@K!uW5&W%{X41x88N)R_FY=F9DdbNz&mpXKcQeUmkNX^;EJX)y zROy`eZDRib3Bwm7M?N7U^UM2~E|zl?az`A{s)!^60Vhs^s(BsxXGez-k4FnWZ%Z)M zwEa2@)EZT+3W*sKhynr2dZ`2KpW5a>myhjawMIm-(ehHayfE6LJ33vOFA2G-yD=bf z_*uRm`MdVB6V=4#@zQUmNRGu>867#s!B>?Hc%Eu#elKj6LmJ(-GT9M(r0G{oS$RQl zQ`316pRzv6@jI&J93b*X2YQ$2ePilhSNg=9DN1ww-D}IAF0`-g^5d0Utu@jA&cSM-rk}U*XFtOT4W5!XdX#KBeNj27DD@!9%M4%+q;4z!01Vk# z)yI%#BM*vOsfZ`Dnh2=KIDezZ8Ccjc;=J6`uk5Zk^%CM2$){3%1Tl?EK%US%vKU(6 zD3K$JbIKOw@pyb6583^!*=aNJgC6HitH;41F7;nBL9A&yI_vs<;j#525gA}5jJEd_ z6e>b5!oM1~{15YH1~Uwk;F!AlTt@lxCJ;!oPl z-DXYPO#7?Hw(x$?n^;j1!>Qct7+tI+kWcFPB70~Qj{9xc%(&KROtV^2g>0>-WI z@A&us04p$Onoh$W7{)v)=Lo13PRalPV#RFyK*+KO$mWfgIs^PntIN6!mlise>>7{MW42Z< zs@xg)qc`f!Mr~+};~I7m)7nes*-KpLw~>KGv_~EHE_jU0(ha2IYCfo?nys*XIvaMN znrS_0oCEN3Jei3EY_+Crhom?X+?3YF!V9e~?#04-*6c#cXskbJ7!pQeYEK4GT-`sf z2?XoNBn0ko+0PJ#Fy)-h%)Gen^mQabV_E~Ha zV;qu1Z)78kx3a&*@c#fZSC_}TFY^2=n(5DjU}z8Fb{L~Bee4^~^F{hl!)(5i2UtZq zo&%Gy{!)1_!<&5=^x1Wq$=ton+se9q%sK{}c_pMtZ6m5B?MaZbmSq5cu%>HuIY5yE zo3YbEL;}Xhm)A>pSaDmHqjjd=7G`1st;NgFh3a}0t%Ul0vflLc#|rc(-1W^?Y@|ue zh963m_K|5Ad7(v;6(+RYihm4asF7#jig)NZ(E_|?S)F;1hl+(fNT~ z>2|56$$C(-@K=q40InOl)Q^XLT|UlA8KuQskvsrLh;EnYsb_ubv)Rm%f{SiQ;!8JF zru?XW%l_B$=069dQkI;{?vh|WgkwQ-k`g6zDj#ro<^eD^mD}ArE$#Sf-G34EX0!E5wY2|gjoWWiW zg0hki6z~peIG^%=;yFb1G6LHriXhV(^L@Y6mIz}ZmMM5KL-3u)eY@sHl4f^pdYAwK z2-eZAn%?sI8znZX)1;SWzABVpei|BmtfXd+!1HG;B<>zV98%ib$ttr3f?9-bpV3tb**>H zVqHH?YbB3VA6Eu{iQ|+8_|<>~g$+I-_S=qQ9X>rx8G5CU)&6QFl5jh6VY(f>I)=I} zl(DuY<&=e^NFypM#1Ds?@5eUTr@@FCaIlUnsLwH4EV{Wd61Fyut1{QBqc1PAC}`Ml zWFi+Xsm?g%?nLXB;?{XB?d#~y(7Q7Inuh*0{{V%-A=P0s$&qSW7O(ynywsz3R?2p{ zh7|||fQd?z&+TD3!!}UVaKrbf&T!C$>P~bhpUOrg#7|OX{VK)q0u#&HV+x74bz&$cHiB3W(KeO$ho>``eewp z?=9X&E#FfS$R}E)&<+7ofbuH8YwZv6vKaM|HX@UTQ3+<5g zWaq4tTw*x5cvootWYq2K?)7rYN~IsOP=5vV~uquj)LHP@E$8jZHg{qy3g^E_(#S2Sqz$p%Zay5ick@>*(|j?Xai5e;t4kRXzD@icBW$Zv6HYchx1jf zjl7(wo{Mf7g(|h_Rj0Sw{hYHTHf!Jrgwm#o6Y8w5>Max0EyMv&-Y5g&bN;&i&%=}v zOTvQvDXIFOqyDb{09^GwK6Lnho1c#we&72ZS(C@o+oSnqc0IIenF5N>zGsJY;Rx?D4}qumXb^ zK;4v@{^i6MR;-}I1GqO)BCEJcg`wOOW^UPR0S3Ye1X4=TOMm7lC57Z^*#k!uyaM`4 zjAf`m6!PQnX46fV!}S*jRftSb{uI3FSK5x1bEkcA#Bs{dTEy%MwOimyABb^QSKIk; z=aewyBo1v_c*`a*JCw9tA#L@4(f4-`Z#SuRZ*bQ1CC8-`1y?s$Nn=hd z?6INb#Cx-Uvdvzbi;h?H+nFfGh2*U#sS-k1D#nTf;gyN75ZeV^XzkN_%;y zXHXSr5Kz>WO^4r>a@mZh#CVw}m0lP$*?oBgKC6Qow#29zkD3-~oO4mM%5SPjzuqeX zHvY_~!EF4(6|B&Gk`BV8?so0IN)V4$oVlkjrO`lBb07=h-zSz4lMEBd6;*@*Ur-wR z=0AkyeqlymPx<|@cu4%iCj7Us(yp&>qr8P~f<}ysC3vZ;bpY?0p4T$Rz+&cTrN(z+ zer|M*`$VD+A6Hi6+KVe5-?N*J&m2L))%s+W-AX62yJ~{cpM-U!aWE&}i%5Pi^>a3R z033XA-qmh&+mg{kbvM<$TXmL3Pzt{lO&ccB!j9%)MvI znoZaItDPS5OZSr1Zle&{a8Ro!Cmgx`ujcr3oXwOSM2A;>h8#G92eg?U_{6(3Z!3~} zmOBjH{{RSelQk!&*iIs`fk{;;iYUjxbj@aMIg#1T#j1#kBx_J3Fo{rC?ICbM`ZAx% zO($0pr7o{Hr=$`!T5(a>4&N+)Bt9(v05cF2qtuo&Rf}?@KK}q}wHx6YX^u<|rU#mW z-CM+DDG5cEg^6HD;wU)Rf4#8&UUJ<=cn(W1^*cMuSm3&f7LF#ZBr3qurrj%t7MCO? z#a6P^!1FYdyt=y?JqyUD^_rg(acI}2BBe&v@XXPU=$P_xrI_*(=Dym?TD7wJq!+2h zxdC*@=5-HVRI3%`zuWsu`y6nd9xLHt>Z;;aipivmTHhFcsSr^NQIe*s-luw>+Q{Uy zyahHwajqH37YMntmf}lm$b;Pw{89%U8agHH3*{TViCpq2(NHxCfy7M177w3gsGHxZ&wM<>_tCCo_E^>PU3U zXK6&qb@fM%$t5fo?MWeNc#089)pKW@4@`Oj;OVAC$bS-O_FAkGS=@SG==`E~XFuMn z7A^9t2KbIwmT(d&by#GT zZsxg}Wrd_B5m_Bc5ub%l+0En4#j4a*vN{s5AziIkgx-JGhe1x20W{ z=~to;YVBX7<>MAQaOJ4WZHJnJkXoL*jK%PWM?M;jx8~qvJyB_0$Th*?GW^6>)^)gsqNX|KyC>c!~j}9xCeV%-5GMu~IjYfH6m-MRa+-2!8VdC(yJts=k=DSOQ@fPK^~*?pp$zbjQT42{eU$FY*ZvD3rxC@7k>0~l zWtS)jEhkgBk_$0?ExfI#j%bJsY*>yws8EL-PxEnX!Oeg`o*(j#&OAg@C(~}Mp5%)J zo)}~KeNX;X+l+91Ji?L z33+^NU=hNt2u3dqs^!Wwlq_3nGoC%hWa5V&>71zyWz+5&Gj|(WN6p?y=*vRM@N7B? z{7S#g_I?~jG6|-!Ev+x|t()Czk=#K5fLb|uCX}l%QjHqzRpr0i;<=fqmjnl!iwSX* zr$Kl>#Z$-0cb*q@WvSw-r{OHhqv`(uXPLUlaYFo@9S%y733PY1{{Zr`CrMOE1l*gC z+2S7FJ}k7a7Zm>hDIl0@aj8Bh^X z$eNRzAmL>`t`zk-S)y{lL@0N8tokHcUAN|&qFYjLe0Dfa? zjB#$;r^hMsWf0kb#5^2{=xN{Qm~+b+N9t~lEU}evqfp3+O{wAU=kw3vn@RY7VU_RFRtA?s-o;g-o~n^~UgMw^vX z*oLX^y#`A`9up=y&*4ezL#ilH$id>nBqhkU@DC<^}x8wMS+2U|Y^i+=8eTZ`t2+GCim-OR{vv zvmbJO&fQw#A#kd|HF&A$H=sPJl*1JE_cGP8S5|1Y75nTC(Wt(`` zc0zyE@JXXhaW7B%qEByEeW(P+z?n7K@R@PE7Uflgz z3sEO^VH*mN8o#m)IR5~xmq$`$HWtUgh~|rbCtS5DC}_%-lKtpbtuI8d(&& ze=3TCwU`iVMd|a)#4XKB zX_{4);#?J#W0uxq3&d#kG~}gS3o9uH?GLk#Y?FcAh8V_cF708nbcLF4tjG(;YYH>U zS86$3sT)_}c5jO%j%Hhx%Mlq+Mq6pt*HI(GGc`$4v}AN;{fSi5i__xc{JC86@HSz^ z#%UF65*BG=KWFw$6!-6w)9QHT zUP;NO0yY;NUU8(~Y0DuI$s)TU8y-O$bM$2+O%iyEXw>k+n8%lGSL*uvUBu3^Su|?H z;!!nuFT|uS!E$k*hPsf@)P=^?cPA57+_iz48IByy?bq$U8M;bdPVNlqL7a> zOs?@@{{RJR)4a+H#-(Tr0?W7B_=5HLb5LU;oLHwma|RNlYzs>&+AGeqEKE>60o{El zX}?2Hi{V_~Py5eiSO5TEi}kLnAXoAlR{g(2&U_|Pi1qbKOt%Li3LYMNv(eKnv)1d zd|$@wB{@se(JHvmuA`1AH1FOK_SUANgRflA)9WF1k;2cTom@Z8Kv~`1-ApZQm`ODc z!s$~{*K%_|Bq0%Ua3EzVR9OYSfYLyXa#e0W-HIzfd^1%&R5*=tc2Lsft=erH)2QSY zo&F>G@;{i1Kg-XnLcp%xr_tXcb29w7=zzYv)2*1pB=02S{nG#}_3RBgW=<%|UZUe> z;f}+arkCZZwJTX<)Y;=#3~iz$^G&|du1is@jASAi>9n#jB(URdX(U{^D(gzBQkW%?O#}kiAFV*Oj(`?+1R3c!YQ7PXSnjtuE4cgzNDO2`=3<< zas!b2EBiTqvV0y8{*>HZJUH@AD$%UkODmg*agPqk<(0x0xVJ;~c)9wQE)V@l4*BArgvdUb}ez0F{=<9M@o?*Y@W7>mOEFTl!biayIO56Q}Nf#~B)JVYcqu^k7^ z5qgrxa|*x;O)}0Z?9(i5>8TVSYvEt=b7iZA0}-!Ziq_=A(D&%Sm>hlvC^qp5 zb0!(GLOd-q5S+M?R<@DAz*>mo+Mfs=I&b5c`C}<_av_#6^LleL9PoLs>?Z5b`U^QhK#1$OX*YAmfkwV~L+P9IXEUcB`am z`gB?zrjg7ory>0~Bdr~Ze%EijG^ z7{Qq>&%-iNYTFRw{P^Vbn0PVQ@qZIWY@%5#B%Vi-HS;CK#IDghK|4U?eW_R;C@D|2 z$BrF$_fZ4+$&|8T9uWwh8-JO@?sVBXC{w_IK!RofjX66|8moDDZU*{We zNl8vsG(dgE{Bti;4mf+9d^6yez3S}^@6@zeL{Ud>^LnV`lE^_)+nSzSxqY0{M2jP< zxcY0&Eb>q>>L}-Ka?0#}k~2F|8-t67B}wTKYBwm27pPJi9@X6DrdVX>%RCxwLe<(i zZWyh^W}%g88;GaHQP=#ekAWs9G*7ui>X9&!0#Rf&{CCdjYk$ zD)F9@IOS2@5AA=e@q9mPF2*Gj%G#$mlqQ%BjhMGtoce7@QKaM&G4?{S`#CI;l%x*| zaZWH4sjhk#-js**m!NFUPm7NNMm5%F$5)=N#sED=m|a7fx&b>=(!%L}V}yE$TJ z_$^>i>B@qGo5%32dN%(6@hr=RlZIkJs(Pr(1QZULdwXZ2tT1U3Ey9*xL?;;P-5d6~ zARlY?*}fmMm*&I8J3O3RGvS3ts8V?OVVNyT4UYX-%9t8O6st}5#UiLuOuPCC5c`P!Rhgj@^UlB9FPcv%rg8F zJ0CLKr0}%1GKpP?^qL^?M3t!H%DevnGaueJ`n*>umXPr5+YHc`*VO^%+o`WCCV5L2 zeqZqe&|uPGsTR#fSnz=quEx^x>qyitB7s^s&@;`~f{20N?#n!AM8ry5GX85BMqAe^ zV=J%P*?A?RLRRWyBHcjM<;qTs2S#2hym;iJr0=U7~$C=@{XtVD@mBZE*eu; zjKi?}aAXpBd_4*D^ z!=i(dBq8*jH7wzEp=qRcc_v{-V#dDxGVJq|s5D^Vh54rz$sjDme#>w49K<+fW8U8^ z(cyWF>r1d`3fF_zru$~-4^x6q6j=wAt@%f)z*UDOJb>}~yuE+P+v3a)(E8%tqWUD7 zdBsE?QPS@&CR>)O!XT=KPlT^8gMGMn%CyAGG>7+VGg#vy$1F8ef++oaM!1D{3Rr?Y z#VMJ&M-+CGg^EHl`cw3l%3mjbmAGX~IGPBJqVI6(P{eqThN`AME&l)om>`&D;Ql}7 zIg$~UJ50YXJjbS8U&*Ii1-B9c$deimgX87M^4n(R{#P75qlRM$k>`)n)uxeozu~?drzR;9NE@aSR)YTkN`~-B z8T8dutV)RIi!u8|o?Oozx5!URkjK>7#2fn6ht~eH^}+RyJZbe`hP31ME5@JX{{X{d z;7^9QQ*wX*)PO&jHDaK8(H0|PN*~vleuny(So-d^BVrv6-)fD&FeL#(|`28i@TU?;4Rso!ymzJ%KgbDb&t*;5;b* z09mshIL!KrXD^U+jWWS6tuJSp%Z2pWqLDr-_u^~QF*LoUJh^xbX`Z7)2*A|2vQ0Pg z+eep4iuz0IzgFcrh>|!sg53ppbj&Qhy_z`;L{NT@ycpU%c_B`V+Q{6>2hyf0OZlvCNQ28*^xt6GxJ1ACUB?bMQjzg=D{#>s^!4a=?zoTMm<@q;^GYx z%S7BsCA{baP`M|E2-I{vH_RAFN=AnN0L1K$IYdW`{M1jB^d43ZaIzGcNKi=Z!;$@Y zk)Kh+K)8_8@m(XQ(6tgxZd7vU2sQn9BdUUbCpC+tUBp3Yl#d<8{{VTz`Iw$amX<@C zhM#a)@@g?V5WRM;+>ZF1(LlgTvRL1QQOMMtx&m@1G+BOJhM!Ema8=#c{ZmiXOo-G( zEVS}Gkjh(oPL-?x0Jw2a z^=bODADM$;(@ZQjn!in$__MJ6Ll2Dt$MW-3HhQ5z3F1d!TA!?A<3KOv=BE~!to%XB z1w8Q-T^jZDsh(*75iMy_eXL&=~7A%tA3{sR%^|Q=$e=2=wN+!6L+CcvRyF)^(uEvBbduH1{ zs#yAL>hbBp>drLZ%9&&;%cw#X_N0+I{wQ-J<}o* zJy4*tQmh-onUjaKA(|XQKi#xt(#|sByFuOlSYE+iI*qy^>*&P9`+d4)L$(Bu)4MOo zNOLJXo>@A5)x2#y*M74FC1`vqGSLioPB70RmQShb(6|!E>2es-6?@JHrk zn(6TC4-+-Q7$|-K?Z}bizF_K1LD|mJ!cqvug7=UfxLO^>a~Jb4Z~Uy%e=&Jsp|qD# zib3%ZCyakF*fReB#@UgFSS9IWJQ9mq9FnM#WdgTRpjz%fx{)5Vu2Zas5s7ZodSfK- zSMs`QUPZB3AL{y(-BspwA0{8?^&UKRo>5TezAFJIUP>*drAlj_@g)E@`f)9#iozT zW#izqy6^Ji%+@Yt{V4=IK_#F@W&)HnEEmTshihWW0H))ioP9{G9q-1U91}>54-NuH zP+p!qymLFiiS;#caQ^@jjJ{%q#`arpt9qsr!mCfbS0&&?qw{kL!_Dy%Ppi6u1y_pK z@2)?WivDvd@9}lB3Yeud{g418{q@K4(Ps^2MgIWeyIvIZrKM}mi{t*bNAmG({OsTV z)#dUcD~=>LAjXyH;hL*1$99-W(VO7*8)TzwWP6eJ`mU)3ia~8-JkX&bCRLED3fFEm zA8RsVBU`w0l&@qT=~>md-CEoU9jYB;cA#$>sUnyp2StgORQpHaGy&3NftoT^WQ|D- z81&<`Nl;S=X{$Goa`B{OJ90A{g z{{XEP(K**Gl)Q^}kLN{RCaG^L6X_LF+<>O)y}y_CvX0Y*V6b-LZ_$(DZh*dC8qTLP z63TAoaT|8v2vw&40BHXJ!DiD)2ZtYXMpNr4KbD!_Sq&iK+?k4o8PfvnagTV+O!4UB(hr?zP`c&1*re}}^#LY3(zBUPqX2>G*|hjJ@MS8pb2;aQc* zkTDs1y5>bVgzvg4OkI z1faN%MWfUZDN+9b3;zH#<_X{g*Cy$48op_UwAWCeStgQo6aoDUm5h$%c^~-KDaVfu znj+hH=flfp4X)}{UN+D_skbUbM^bz7&6XGqh%PkbS*`k$qWNVgwK2U~pil)*9{H@& z{{VfF+hzXh3HO#-B!b}Z4*;A|K|78h4aN;;HPd}1l3(7~Od(5%B#1R=cvFe_p1Cl^92Cge!7m#ksG3T)2g&`CrY3QI`n-q>@jm8FM5HDo1#NNLrL) z3j9?Es-3@QD8?o_l+=1Ltym?k{k6>B=}=oq7`%lxX=CUQ9z=8J`z*ev!|=Bk&5j-5 zM^_suI~H6u4L;%tq!2uEFit;MjiVI*04W>}KMrg0jj49Gdo{kF9NKNHEh(gXN9XdkOzS+({AtNv@>5o(cMVSQYsEX3;ygs02goCpj@*yIB`@?%@{IdtC%Te z#ohG6M}pa5wFaM9zvt>U=A|v6Ag@vOxi{@)(T*`eu5!sEV`!!pmoDI}(7HGxyh9jB zZPB7H`_6#+i6_M$!|`!!qfr1iiE^+`S5gU`l3BF0maKU45r$aT{gjSEkM=y9G3#$O z?3qZ|1-@m5(j7A8ZKd@=ROQEhHDTYr9Y6Ph5|!iSNf(ue3turKo?o~^8>pui>rAt> zeyDre^n9fAyIbp+ZlpWZaxy>R?QfL>01#UyGLk9qf5&BV= zkBZMWIjY@iODhY=rz(VoF0+*_*Y>^>nOO1S9Ei3VCy%5t7!LNhNA|fV9=v_=@g>~Kga6s9=K)9n literal 0 HcmV?d00001 diff --git a/vendor/github.com/rwcarlsen/goexif/tiff/sample1.tif b/vendor/github.com/rwcarlsen/goexif/tiff/sample1.tif new file mode 100644 index 0000000000000000000000000000000000000000..fe51399c542954e5cd837eb50626c5cd02991ebb GIT binary patch literal 18382 zcmYhidpy(q|HuFRbfIC`kO*aRO2d*II@?HdN-lFgY#KRj4q-*Oni(oulcBRv8X~b_ zp`jdBR}MLDa|m6;rh`Kf`@Oq9zt8XXd;PJw?Y6yV@7weBd_JD{=i`M&8~`G41dyT}4# z341I3_!9P?Z39}pqcNd1Dss}$CJ03*OGQ;D>fD9z&WXN=21smZx~o=bwhYcL(?)e0 z3Ij{WCh2&4a;3KGUNlfOR7V-5pM*&51?`lIC4W_Qb(6v+fi&~3F3w@m+fiY~Y=t#N zpD^g2MNNT-&?L@Go ztevE?3|lsAA3CkGEixlb{kkJ_=?^x3gr^yc#lJ2 zEK~`m?F`z!!=IMm)3mPKf=9Db7wAsIUgeY06JkDRW0k-W5rUoJB_tzZ9{=VZE>G`JyTla>h4e3p>V%%0GaJeXo_EVXX3orakVe_ws{dVgg2 zLhz3?zqt9G?Ph};Ya-##2>$9P0-yDLA~mpOi6~iP5z@~8xEL+r#8(;nksg1nmgR z)hyX){cLk(Kd$xHdvu*&!0L*G@vCuJ@_u7tQ%(}F6jU4Z&shCRn`C{Z^_QfoyDM#B zkK`*`k|tS@Y%X9*h$cDf75LVLl?Ne9l8qBBEgQ+hl7hsQ&8mW5K^yA@&%E+i*MWCb zJPTZ(*znqvNW_fs1mUn~a9m%qwz;wyR6U&F#9XNEuc!zZb^@+i?IfWlHY!S=9hYi! z>)suKBA{1W`9VwT!(~A|{}+Bbs!(514M}!5?A<%1rE}7#-EFU(;=TXK#1=*l6c6bX zTA%jbV`uZD80DDgt8-FnFQjulW|Mkz5tnc%R_7!omu~>3AYu_ZNK_Pp^63vQm=Ryt z6j5%9S_vt~y7{blJ!LGX2`#y=LdKDH|6Qg2VKCoZgvdmN+irfqb*jTa9ef}Y^tkGR z-7jEIo^ZKR*arWYC4XQq<-n}b&VvV)^cB;~Cua(`&BERyvg8b}e0!)0(v8R{u(oQI#d*0% zT}b5XUP47^nm67N*vnlLu$*^u@$A!tLd9zWm;2#&DVg(OT{P3N4g33dwCs=H5lgRa zOo_=@r_~K?=3?_@Gj?XHkM$pHm!LvoT1xn)Q;X^58ZR-XPb6Slp9VL7{i7AhxO2s{;S5v{EeS(?W+H*AYLIHiaOuG znZ9^#qaiPreO9KEb@ur#ssl$1_OdhaaO&4+d1n8{Dd?V@(Yltr_U!&8jl>@25KMkV0ycDg)I zog_acB8=iL-U4fo_ewREJcP7as=_p2^2V}PAu}jBE=>oeHf3!rN7=>|9!}&xM)5bv z?d#;(m@m~NUehsfe)d;HIhN~2_VNHhxs{KVxqtqLOAEQO1Ny&pCbD5S5Od*)IVgDO zUcI+_$EEfj&AW5n@|NGh<6pC0Yf&1N@tD)KFQw34UEh@JR(Z9@@PoxwO&-dk`qBs! zTPAbC-8T%X{h~-EGaKboeEHMCJL88cqIR?1HSpdU_PN~6Ld^~QQ)GRx5NGxM)6~Id z7Wp#Lv2SIX5O|+3E)B+uYS0T%kT27B`bMXCduNTHa_I~U%PDK+*F1tb@raf|u0k~X zuF;~%ZX!GNWp+G+&X&<4nJFZ;e<;8X>k2dEoI=;EK`lz{>URNsLA5iP4;W`a| za!x1iNy+8a`;TiiV3iOaMXy3EDg1%JI9rc5MU;?DM87A}NtDZ|5xrrq}y{ZvWg+i7{ zK6QLR_azZkcxz{4wF@2STVl<{GvURW_R`eIfaL9>a& z*8Vgo*6jGY+H-zsi6vaq{IvU#pe7`FHQYa3FucdNVyM63bNJ=_ag^~X_va?)O86l2 zYJstbd!JaDtlu)v7)sQFJ{P_rI5=o^RDgx?_0O7j2 zsYFMIo!LqBe|B}z67n&w0k`)_|EK7K6fzx~EeAUI=<@vr9r4^fwutfZp=Jw{7 zEYA23kd@TIlP3}B(he$}X_5H`)H(ROy;*KXvi|y2%(8)s!KP*RD*HqNH|goJWo+n? zwp9o`I)=Y=JoS7-r}krM8^Ju$og*YG7-YO;wK8iek=@b({l5M5M|F<%NUx{$3E`mO zbNNlp$(%8<6@F=g=!ehbqdbz|Cmrb&-M|#t{v>)f&**2-d@*~g{Qa+gX+s;w)Lth*@6jye~RfS%|F@{o&pEW)X{U=i+nh#uiJsB@#0DVhZaB3)Y~Nxfibc5|;%^ ztBXdv=bPSl)DM5^NL3y3CBv<+J`Vd*9#XzTUQLU`&`a0JkBDMt=78h{JZb{>l%|;- zPh3;U!u?dVI$%1{B52-$!d?QIGHunr813+9Tt$4@VJ?M2@Qp08usAo^s<>IS#;(ki zMqy#P$AeKrA#o>nFln%-pzWXhAIDfibo;@UR8?@}-ZmX|vb4NfB{bIV7w9pa%0%{Q z%RqOY`nLxEFH$9vC*9`yHz)DiOLSDOlOI2mEi(Ykcf5fJ__Lmaz7Y+CJ*>RRv}}QV zAg)%^nT=~-3&kzb55C0)m_E(Qa#c0z@+IsYV<(@7q-P-Tqnx^Dm~tk~qk_O06B1Q2 z`yQ8&(0G4jb9|9Nxe*gtPdF@aoE}NsXwm+_(bMnyywX&v+%lG^F?R3g#_AA9&&0ep zGq8XWND-z!v)MoXGb$~ae^0PvI#0ul*RRx@q7xdcgeeoxO-P!!y>Mi1FA1dog0qViZ+SzOG#7nIF#=C29&* zXVDE)o^kPo@Y7;UknWURffP?c_FILMlN+H1N7odT#&8KEF?kI)SU0|n--q~P5}NT& zPG$8UeDz(XHwYxQ;U9d_s%Cp)U~}tdV+`5|Np%9E(-z~D|C+*C6LLfZzVHotA)enX zQq#IzS&cEycVymvf^na_&T%BIvDc)*cl3|PWQS9u4G(79C9z&8E zO4`MumRU7{$nfg0JM?uKERv_QkAhG*7$p>v*!)_!{M)Il^513EJ{6{`D{ovsy9ZMo zXf1`pfu_$KNfZ!}ZpN{neZl6c+jt>|spB_RSt-H8#M(9SWJ}t$C7<9sM?-_Zyx%K# zOY0}(s1#492448bB@m-H5)>HLr4$e{7`YUnZ{J;YE+xayE6%xd=HktdHS~>ZGfv+y z%$+V)Z#sNFoJQFG$wurlBosq@@&oN*sq%y-@G5aG(y> z`HlE-5>o8ag|liS=UlbC>|->@tY_}d{urS-6k{c;l(}K|)f26s4^zi?;kOqp(sro9 zw`yL2w@>*~fRrBx1Pc>-i)do2z31rtsMxM>ull=923o%D!ifmZU_43i{awaCSkIeZ zc1fjUL$l1&{{q!#gyn~X@utpJttZ_S7;H=#ZWMp>Ed^Qaa~#UZSr9g_uBR>F1r=M` zW@eja*Io+IQqR{UWdEGdBc;t-0Wn%rVOd{H<;5GO^4eSIi>*U`HP|k4a3*eq7xnpeetA;Q7xEbTI~4~maUUS4y#_xT@meS>Xmx8gk-b#Cz^gr4Xk&j8alkZF zIvvc#U$iSMJNu!2*VW${?aacQ%6wg${h}1gHSx7G7R6|s*Ht@#%fy!53yp}$3ls0U zg4JH`$-%jGDR&v8F}I`S@dbVXRR@*ik4Y_He2LjOS?Ehb>|O%&;)pYbqL|3qt1O;S z%T}%Th#Y)!c^kA*wj*y6F`6SJED0-h-CSK=j#?hbk^@8KV0C}_fXKF?)c;)K@^tZu z9yI0K`4?xL%|lI~<}*6y`;8s5?t~YGwC^h&93s%?}#e-D#k^O2W6Zyh+qW&~v0H?;+ zdO*C0cEe#ZDvh5yx}9dam3!1ibSq(GeIxQivc-t@U@3gI*gZe|Nq||iA4}LSswMa( zvXhm?Gl+(KB1d6>Ol+^yVGj@xCUc2R@1imZzCUUS{N(719RAh~x{6j>xm_+gL%?-h zbZMu}?TedJE|>bLAE1x+%TVf2xw>5e;7-6?ls1d|4N-L4$AP(+pH zxR{D+g}YaE_)2U-GH}ULLOw^tC)`2cEd(rKi_&X!(IRspqnvJvHby(qpS6|iqn(V= z6~?A*w6=bxemI(38c26+0v)(HEEck(Ny+>~4xca~TxAIp`Qw{a!f}P<4VL{?)7?bi zXl`~C`!MumvpVpNvtVhf@l`*SV`u{1XN=ZZG zKvwt8Df(k1IG7R^*XPgF}c zc0V`#H1f@8pygQ450S|IbXs%#Je@hVskutD0=Kh;0mgG_`OFrN*Kb9xs!P}2kl0U} z!vOpgzUBL86@jhHiuak|Ap>T3U#H=rG4%pFNai^Ze)j1+N+{0zP*g30-q=xL+ zWG8dmyXpnS=Bmc19ksipP}rf`SCF5G$k1!sewVa=<L*0<^?1x%%GHJZiD5{=oRjiqGCbRS5u5U z1+IOVim%H3E~rrmq3jH9o8J?uyp!DsS!Z)c*TtQ%B6XyZVJf{{2f61hm!^R@skRGW zj#Q%}sYq|r46O%J*J#MMXz90|aFrX{+m)`^BIeyxZgY|9RDVSKruwuc5=D5GJD{%6( ztkhehNXb@B0)7NvCV}R1S|j?%jF`C5+TH9(cPuv-)m5ljN^Mq4jHf2-RFTt4xdk3+ zDFIH#{l&_%GyDUl0cr%~Hzp*rtn*6JUkkTH#A!$wKH#c+U4@aHJ6-Gbs z-&6`;WYQfo10@y9!!`PI$tLq@P0ySP^s?{kCy_@6^RHVIZG$2gB}>g55x@pX8y%QE z9~(raKOoUhrdX0SVj4ejla#z6VF!3#_>~r3Rp6L+1s%J|2s~W(?5NqC*AJ(NVK)iJ z42{X0ETe#JiR$i!0hEo4=dvJ2=F{Sm`$U19X4INwjqtr*nDRYPoZNo5iID4CqCA7b zw!43c)5JmJ<{EawAW@68SYGY=1d8a|{3;0IZOMg#9O`!&)}5u!YbikEu#l-7^RckK zT01nS!b*BzNTpn9o^Z+2$G4_3bCznH6_cr{d$jkIgKdl*iu!6Tu3>#7E4zQ*j_?1u0ZkmUq^W@Q`of{$)l;s5um-M92E><7mok|A^y&;mS2s6n|MLngJssOcE7rT4SQJ+ad0iN~ z^m~*3UIp@sOTHu1;X6jUA0HU?b}>3w?APey?2=j8lM_07KNC4%`st0}1B%k9DW&v{ zZ!6;igQM3&c390}nuWYLJuRy^%f&5yA1OJiT{NERx!(Vw>;fAUJm7jHr}I?wQeuzr zt+RKBefHe*e^hw!YQ<5vKC3bH29-&bdiVV4<_CxOxgA%7*)HlyKXtS9R5?ru&Aaj! z7>Rg)_hc3oj9j>1nEl$tVzgtg$qQ$z2O2Z1W68?S^*>)gZp^*q`tO4|fK^T?#lapK z+?Fj?N!807Exdd+R=g4O!L;wW7$uGWF^`=49AlhMy? zHVO*7f+*6zncC7mrUZE7nOy|LYyy>r1Bc}rIZ$~)8rJAq?hmL z*mK0%=2GNso;tjt~uBlNB-x6jYLv`DPyonhXB%6i<)2y zE!Bmr2>{#XthQ(iPdws?vJV~zEG^U;b6lO&&DCYo^=B9lYcHcwPEB|;4DOT^K99Xs z+V6zHxa0E47++?6sZ)PH2J=1!gQ+-rI5BxdGo>{;nxD+>TRk;VGm%J2XOogiYeJ#r zqU9n-$ZB3oB#|Wek(8>Sg#yd?;^l5#~0wel1BWlcj(jMPEq zqne@a(?&Og(YkA1=p7xO`k58XXBDN4z$#;t!C%9S+kbXf%l02Cj#5p00I6?IQ~4e} z@tNGrHd`8x2GkL2Vu(PpT737M^z06I2xzQ!OBpV|QlRhNL6y>#A}beanDTU0uLFDgtj-T}ByDzO%=m`I z@pz30WIG`~-r;56sXK0m$1VoNc-MSAb&ZypL*%}}aWykdxY`VYrYD;dJ<3s%SA%;q zx-V`=0XufAHt_655T-22r_9|aWDZ4je8@bE=RT}`rDNd(`tLr4Lcfl;us0YO4!=Fl zTO7l~b;WsOxvFY@8CVbw*ykUscr=)bRA!I)?<8UrU}9naPvUN9V^J(`3IL}yV-R(+ zZB{_m$nTE*xKoz3jqb4bvuC6oti-4+KYN5K^odJ(|E?Z^QFq8C^9#NKWl}4Z7rVkQ z1Q)R#5|D zXWm0zOhIEiMY!%iNcpE@Z}9h9g(}79ExwZ&?#{qeBn{7ZgzCQ{_eIdhQI!(P(N#$ME@Fzza?MZoU z^D&v=qI0>$v{?{};pf>!3&CW-Ymi|+-dX=N z5MM_w%Zt@6^J+J6J~2Gj%SrsN?v{Zmd&O&tPx#_dXZHIykB+=mQ$7on-Snk-AJb8p z>ERp_27?X?%B*BYoW?&;%+|upgq5Lyif(4LZ^$5{)XdB&X_y{ZT|XY#?BQFlxMikR z+G8*!%xl-pP|$yKPt~&fo~qhDFi_Q96%E-!7gKUGM)7g-D8i?%nI)(-$qZ?4U95s~Vb%jk~ogUqswvJ5(eu6O<6PUX(S zFY8P<^zmG$q_Yk_{nw2ORFNj>$Zp^J7}68wCk3J zHZ5OnusKbe{BFCQd)OhQBmu-r^0Ls@rac zx#`!m>}&iK^XN6xxUr;VlMS#H4rfg)5@%1+L{zl{M81Vu<3_jen2;FVAhhau|M-Bg zg^b0L`w|^Lyd``xnpPlkx)!;v^E;6g^X1+E{$dAd48fWn@|6~!qw>EQx zfiarLyT-*eQHXo(2B+J>mM{kkl~j->7g9U}naV_5$3-QqiHB_D$^Oja=_+Ft^Lif! z=^!Q4Z36V@Fog=s{&K3~lzgt*)Rn#PNJ@y!F1xT2yNe~1TsKva|EZ^y^^Dze8AxAP z-XE&9yKo;LYT`cX^ikW%Ijc3;b9|!`LRRly51e|)NIN+I3$&rQBZBc7a64J#*JMx4-?~?gvL-E$7p&QOxiA0IqpN+sGRx0A^m!x zR3|5yGus|#t;OK0RpPp|!Q}M$zko^yy=d(XkQkp_;9I;4!5Z7r!Q@d# zgNv_m5njvxrFyq4i~&SqqZ0>Hr({|F2KA{Iy`j*vn(nILU6#}g9?dp01NubaTX;q| zG00T~i65>5`!w>yJ1wpD%M*#%ZjT9*f_%*a=X+3nPn!G;^Us2-DxlJ`h=`!(?Nd!b5&`y17JAj3bPFE$y{4xDC^ zuEotlgsa7X?caLjPuQ)FcUd_|%AQ?h^g|j<{Tu%7qQ!2gtC0$G*8oIZu+>G!VVH^JOoLx!A?O>wr*#Gs0iW ziqWFJ1RNV+ckWw=hB<`_mNQSMn(}J3w9Psh^*r>&n*P3J-9BMQ;IeaD_r*YjU)X=h z*Tht*(bS0uFHzV}Ob{bL?710;$@|JkzoxYcsRWq9TwsjZnqI1U+#bfPq-r>~^K0w! z=1!ZdGi7!MyG6PV-J9_iCS9WVokEmi9ugtDJFd|AUKPqKV1l96V)#k2^I4iPZK?af zX!?_LwR`+a=UNJ`%_zumU5STaLrL|Ot-fp7w4FPb(R!KjBw^XG&=sCoE?yw%K9Y<}AbZl1#ETaPb_ z$6G%yyj)745xS~$+B7?L z#6||{npvXzNF0z=elCT5c~Bvx$}7c(+JnzzV%=&hAk?=tHb)X2*l7*`*>9H3B{lfT zUhr3pZV2hZaXT6kRi+gb*l=$u?dWfm@O#1a-hdSw{n=KwwLPsL*NZMq;}vQIh~ zX;BZ1X`gcd!0F?1cTU~FW(JW1kUnamLa6+^@x5SJ?RBGX^8eKn?sk69&KBna-X7Nq z?+eqhB@J3cdj6vCr_e(QyDB;q&SgW!T(rH-t(&?9_jA4n~;_I#&jXbLreUJ52#|7mM#r>@%xH*j_A zi$G8#^A70Lol*b2LzMFlkTr@+t$$P~&0VnbEQ%eIV#D1B9#M!-Px>QbLzZ$aCo zxjG1?yK=WQ=pZe!Es*nIyO8O%8k9_MmHQ5n{CGrpZ!JU(&eN%zWN0FJDe@+3;V!O*#Q;ZMwDE@SkY?mwJ0M(vsrk6S4zM_8K||cj@$oFhnD zGdlWC>|SeTbNFdUXQcvFq&}`zpEdt+FW%(OiS&SC*w5{Ts*Iv;pGZd!_izHcf5r0lO}2 zNlBZwHg0cQ+(vXaAz+Pxq3l-EHExTIkM~Fif~`a|PK%MlgCV&v~DQh_H*tap7G|FJQ?o4uC%x;J`5M;mWW8#^@ zX=9!_-e-jo!tIqtNvjkfwB5Mz>CSDo268<(s{xQhug%yJ{6-=j3kb2?HE%7N5KvV;lHnBjGj4|s{IHnV9%q4v z!5cz+Y9LTnM$J*Y&l~~|pu*_J9ERUB0PrF)PDUaSW+Rd0VZPHJ=t*U>KV{DSY7nU*S4=VadT1y8e6nY_N%&xsr&GP&)5v?*J(&F#{%f2KB3~?S0ZfDh zey^y7lg*Fj1OR>XaS)s2_mZ&25<&JPvo%Q^;jmXeLl2G7S9t0{*F)=G4PshU8l$U? zcQgV_-#EajjNV3%2*)^8Ec>|!I^~T;M=N{g;rOdW^TfPipeYyGo*|3GVgo?HuSW1$ zYl-cwsZBAFLkP6oAgc5e)3fu^LD5p0ngyxh^8moOXNDO!&Mz%1;|08HWco&gqlfEZ z;o;=#qePmAraMvzj#6{9j_}DfehNn&xf_F+nzF?Gbq^t6;SLq&)W{OzM~Irb@sqhV zSub;H`lg8I_D0E5K?Djwixd^NU!z4vX=z0PFZI6fw6}pg>GsRjGGwz+7Y@XtXWINlZh<=QA z8mAosz6pV=BN3gCOu{PVGL)y~jMJ4tnecRQk;g;)lUNN88E&O5BLl&=9;I83Mifi) zrm#K;eC>eK09QFT}Tv!#^_iMO9S)DulAlg1AJm{U>`P!N|-t)K$WurQQ zOF#s%vV7$m>4CEy!JUk5JX_?K4dS}dydteDd6K4<{5K)ciw$YbtW=WTSy<2TLKW{R zNQ}2D)7m=YwxWm7{?iuMxL2!rdvt)8u04@@spE6@9SEgJ#<+jo1LP_Akiumhj` ztwlcbQw8Jy$X!x-8}s6jpa1RG{(BvMdQ+s%8ro(Zp8~1ZVvCeAdm(TzpklkC7$IRx z+IC&g_EKl*Y?WE`l~L=d^KI5JIh0+wY`2|kZYC)05mwiF)6RbDm*JI@loR0MyOVZ6a?fgk1E_IK8F^$#d&^Skc3-j9Uod z&N^P_bNP4~ZJE3n^s@&FDfhwt!JQG|DGLj$_#i`vn=cJw>B%?wpNHm&w;yuC6&q(B zaV(rko2`p$ZB-R7nSh0wF)n&awXms;sXepmSL2-f79QuEi!C^ES*h`KpBxvb24`Fo z2*;DV+5WX$J)cm{a)KbKT|Fx>nHwW>(+w%)06!eW>EF#B$N66 zruYvg^+#u9Y;t36?KqQ59KElq1hBrAdjj#DbNm+3rbr}OUHUY*O104-wnwqqNXk*? z_c<^m4A8;F3YjX7O3-g3b7AV7g0?ZaE0}&d#xxw#pfP0og~kXujXP{*QB0tD)Z1Ro zBx_AkEM3EHP0x8#Cyi{L_0yO=nU;7T7%DL2@Wbbmlan=2DyVh08eCJGA>DaOB@>`? zd~022z%2>d&bRy(17L}ICDd*_=Lop0K#$sOEtMBAbLD4fPJ=}-`pg@^89j{gI&F^6 z_r+jBXgfT-x^!fz2mqn|t9tFGKAUdSRwD6*gv?o%A43{|Z#7tR*JO2ayPkgxB4|DX zcL4soK<5hEQ2H1j42BBU0Nh+2Zip^DE!~}kzG15vI^5pkXP58)u$&UH0trL=B0gO7 zu7PACv!*=L5sytmqM5fs3tul^m^QdW*5%2KY=N!^XF<;(+sJ2!@B zPD%Co^L$a^**Cf_bl123$E-g1yXY9GT_{?BcNoh(+7(j7z4cH^rR%clZ-NN?Zr}ll zGAo|VfU&RT>Si8FFfqEJQer*h(gS}k8`X9D z5H~$d<~GMzaNZ{SdI_PLoZ{zMV4p}*7H10QPiLsSHq68Yd;KJq3rcAl$?e|~IpT@P ziQ_k~r=@xQr2C7Ft0#3GcG&nLq!?X+GnU@-o<5I~yUZKg<1?iP0^}4>F%{(Arm7W) z%sHPQhVEM#k|52Y!_MCnN%D*K-{RubiC3=8Jf_6DzO=^P}-)GMp4`a#yNq+SN&=rg-+yEu7U2{Q=j z(q&O?!z|=POUqWP+74H$WFd`Ra6EByZ_<)NYwwmHK{M*&3~EcvmcXjNME*c;aV=Lj zBtbH3l9jals{WvzL#>+$#~j(sTbMm7`tD&jk96>#Ea-K9-8QJA)lep3Z%y1?mq=26 zg-9g))~Y?bOnc1YB$6tiFW20P=1W>9UZ(jaL8cTuOLD$QBT>07Tz?k?0Mh2XDw z`#;<{rIzJVG>r<9Jb9kh%pBL!_!RH-?kOxLQk|~P?juX1C`TP@usO!9tCFBp+-6g1 z`kG}>s#VbDM3rPy*G?e~FwtpFSs$i?{&Q~HJH|SYvB@Typ@te@UkrauVsb6HW>Q3! z%NHxD7T}`Zxx00j-zHi}5fxAh27f~Zz$oQEJK@&gE7AaPds$yUJLF3*fGwEgi`s0v zv_6utToWrP#u0%ZEoV<0FD|OiFK#i;!^6L}GOl{l&o4w{j0P)I>>|VH$8<9wz$W&p z=uM61v~H{s8drz`(!3_qNH6s``Ov1in3P99k`j1HsE|Pn16Bdn!23wm+G1T#A^0%3Hv!w!la!n&CUop!oyrxaly6MK9k-9WuL#7xvN*E&pEY{>!Vw;2 zZo&{B@#;oYUS$F%!=ngt^K{dn0=uzn)qI0hOAOWC+LZe-R6&-&;XkI!I>;m{f1pL# z1{U{{A32uqXZLpmJvw6~1NF_4e)`v?LTMNg4eMkHIeby_qiZL1M_6->2 z2A1YW#WwC0>~_6(%ev!@E*=C^G0(n($aXXTssfmDRL(2Ik<^(PXXoC#a~JL&+7oQs zr4rY1_lOUzi=x_}rxp4z{>52FmWmBC_+rDmf)ag*ARN?@JsDP<2~qp?j6kYg8Xv$R_RhuBCR>jf91*cZn`ZSN zdRGb(%9$+;_|Ccy1w#V|v~j_ib|l-tgk6qh81iT13Z_T@(rO}~oeX%jyT=m&_qn#q zM*yKI5QEwCtHdQv2A|bVZT1t5B2!%EdDb>lZ8RiU*5;O!ovEH#oJEn&)%`-3 ztMoh9^S;(Nt1OSrm};2o&CMW>hyY}2`V?ztQmj179K4OtElxrA``&AiCz=~(xO^&| zW6g>R3Ys%oINX+5U<4-;@|L!89y;8Cbn|1@6fcb`Ot6{_$&tUj0yHtFFoVc^o$13& zj|6mu$6==9$~C0`LKw!Y3UbyTRZ2r14W!pS9zC{|97Js7aQO8?z!9@b+}KEz46*H1 zC%Wr5j+HCb3n$s4iG?(6cfeZDbPs!4#jIEk@zAgK+Ei{lRA3jBb;Itxaog8l`@NV+ z^R0UMYz@K2#8L}KsowDF5-gX*U)yR)TU+`(D+1U!OCoBCzS16*xx;R51aN@t#rIZM zCqOsy&IK5rbFvqkU8tlphrG;t0f2zP=xaztlBJk1b=?`+O{fesX~f!*k>PE*9CNVc zPHePrSsXgCoYcP9c3sF{BCJ_D3#b~rwdM5e^M%~abkLGE70XTZ&dO1PHENo)t_>0O zRF@DRwp}R+v1ytG0Zsp5Uo|alV74CS69caU>vm0rL2S1%GA6C%bmQT5a%xfFE5jWP zl~sVvqJ8H7V1-^fS9?{D%#Wo^NY=#70?&>#5@*lE#w-<{=dXyu0#)2xDqRZ-jU_a% zF#L#vD`HHDPhc4^GS@$n!+U!PaBjGqRQp%A2OkmvzSw3#?PYZU1K=}z6pF47{C)dK zkWM;8?eU3!-=iC0XVLgqFCNQ$Ji>4SW^|FNtTwk_79sZzyTEwe)ZFYhg8Y9TKM|BW zt0`*8VR|tOA`yR~0;w2jeqXMA>NKSNlZH3_R5#+zb^~)RBXgLz)3d+AeBHB69GTrt@Ezg5LjLbq*(b1 zR_)fME$}UC6vFJIx~>-72#+(8Z^PxnP~A7l(wQ!*>`9^@(I=(`?j3`oP9gxKK@FU; zL_7j#!!u{;pUW`Jra-2$UWTJ_WaFHhJ4N~?$3s6(RR~jlCS)c15e*yjN<;)hcHUm+ zP-~mJT7a0 zAKl5%b1b~TqnJ*tlpdt(g+C?hl`BpV`KiDJeQYF!Q?fQbP6GPaBV#O)i`|rHmB8-| zTXGM_*rUtnffYlT42*ktu^w6*B zINN{|H+fU<0Lg2d@8PZh^ahS_Pjng3N8bwc=+_x4C`~}So0xR*#p5ab)x5vvYLX|4 z#VkJ1=(0Hc)D6}Ki@+v?YKX@d86P9^Z`~XRM|L-P;HY=LN7a9z-G1Tdj()dF?>FJO zQ|e%^QHlq(ItRn1h3dNgR}_KcADU3%DZpDDMajuwNvn|fZ>DPc0ISGO*bN0uLLGlo zPP-qIxNo`M&%tilxO#^>YKV#?CM*Ch;WgY(mNdzmGwYcIg%@6DW4RFLH?RuW{?q0j z#kFAbE{aT@Edt;zb${M9(HvP@BJN_R2V|FGz;4Ty+!%_Z$Y})VT+`=ku#Sf z2b$hYSRF{Gbfl;HWrYlVqMECKdX60}?(6>Q zuHneqapkud79|L8y5X)Qm{hq(0x)tzY+>rC!0%x>m>#NVtYgbDD84_GRoZWL1ha~Y z5H}GA4X)HYgiNVy`~Fayn7%2TP<`%l=*4ZhN9T^8wKWGZfOg#Gi|0?qtU=~JuBf&u#Y$8 zOaT5RL%)`@6tDair2>sXg5;@iZ@cYpA5r28QDE{bz4t*>tcJN1Fo>x#M!|k8wF+m` z-jB`4yl9vVB@_nA$K`$8=^NusoKNNIyl8|043B2ZzzqKg3><33a<@f$Gv-#)vtzS& zr=v|31`)?36{dVzkgp$q%gcuol$vkfHnH2zzui7p2I^H?z%|r*3o2-jUos{s;V$7e z)6~Ciq%Fox09Xrqx}#w;FXd8P{F$|7>ly5udD%SVW9eg4vj`AxKq#s2@meuEwmiAQ zRqa|y+u!{_i_^S_YB5~fMLb96Baga8SDg`dDln~+}D97>jie9WBe?knyN zsEtI|nxr|M*aFEq1Pr%kBieSXNKz#$tvO|T6gO9VUVa|8Obr^A+z40~i6oNU>#bk+ z1T73Rx;D3ejZI3G9Mc#&v211eSyG&mQm`peN}g*Sif)$dVWy2PRFn-%s;gHd1x_p5 zZ3PPwrL$k-fwna{#c$`Fq(XYTiV`rwANwW=>ev!)uJ7FUrrUexo$eZ&mah-hl;uNg@@TuA(-%bT2}456qF!}=rTs&L-qht zFqZ8XxLNa(4d)qq&(^Tu>`F3PkxT0?22A4i9f03z#j6}}1S2sa`9Rd#BJiMhZa;?p zf`8LZSuL)NkXemms`1G6LPj+!6zqADEbnA_w4a( z2`F+xtnl$1WDlScTR5*G(e)qPfx|$AK;I^X=n3JJ>oN=HDsxcq2DhXW<3WMNP;@)~ zfWobF$*E(Dc66 zd`Rk2qU+c)b|n>kJA#Dhb+7+3km~NCi5*^yLT<{h|Bm;plHp+f2|&0U?b* zVVW;H1Qj@5-(N~E)$elZFT+%L&`np!b&h8XbgJlSP^n>BX_J(#KKSY9ey7yz2t<#q zIX<9PIbJKoM+sLB#_hxVTts<>?Eu%RfsyIC2~n^Ay=hNEK$H)lKO9~4+~V#s9@_dV z$}?cYWDC5Jg>#&G<&32>dtiE|P|J^@R~`{=E8o=4V)NMqQZ-;DGd8Ij6tbSP*?n*1 z&~zE}VJ(3tKX=SUFI`?AU0lxzA@LP0N9;?O=4`zy>7;E0{#&ak4zB%KWxAtk)r#^z z8p04h11O8*Ej_h*d|yr}-7)!`mD)(n^C`>Apz)@76!LVrXs9NN9~a#P*@d*zi+Tne z`I8JZC3cvrWcT!h80aw-%%2uN*#Y{qT@ay;>HQL(Ed_=35ZSCLSgR(|)Kb_Q!d>oI zyLKOekKTlzeW>b8{^4vG>X@Z%|E(f4`()u6)nMlFFtQD=Nve3{G7y?b0XpZ#38o9SAtXAH_&W(RP1Q(se+a>j<2T z@vniek%x01YaI<7xcS=h$2{xoqw|+8-Se0UnNS`LGWcnq`)IF6eBE-qs*Pjd<9mq# zbf^AWeS$W#y1i6)BxVHg1+)0w0f0kSwdJLtW{@M&ralH{iCra|UsKUu^G;sq0;=&f zP?hv4K(Fi{uaNKeO;9vlBd+D(hQE&d4c5Tb0PKE^sFZwZspXkd+2J7N(-ofipMS@9 z)|Cf=<%QJwz9iwgC^6{SxaEf&U3h`Ha$DN! zGz%L~%~3hH>TD)Q-ifn%Je|zpp}<8WRuTKNR!)=Bb&``#^zKO9a?#+BTX)B{-nNU! zWG~*`AS1>TX}~g>xd^z0JX#94&p>agmdXm3L+ws#&6nP=@L1}Y^8y#9dg@t76tR46 zJRracoGnXoVF``_w@oD8UV0D&YLMUs%YAeqX&+UUi8uEl-$Oaerd*QA$a^t0MR@j8 zwP}$&OrHy8IXj%_2%6^m?mgc@0i8DKPxD?EMG73ZzO(Jfp+z5sY+k>W?vvSAuip2; zx90gwVcUSXZs>-9n)2g)-EmVF57h8hNrV< zc3)RncNzcb%`d8R-@Jd}CN3j(rA#3Iw0K<9jz=czR;&sYeYrniHE;=&^dyzijTcL9 z^xoR$C}R)oL;j9Wzqoz&QhA%rw|%$d&AWnqeZX$^3$sJp4s0tt-e!I(fwyVtiNe#HRAiP0UYwxnsPr(( z>sV$?mM3sArQ+!oW`*Ahgx&8*=UJa|h?EA6ZYCZ)YYUu}Tobx*l9Qry(1c3o4rgE` zc%)+nAE=Ef)!*d!P{MxgggI#oj~I!cQTXPnuHfyOSPvh>?yv6|Gh3ecn%UQMO z^z87{S|+o3MsC{@MQNju^%t66F8FZxu5QFMR_`(^p6xO`jf%GutA*b$RoxWLvtHG+ zDX!_$8DsS-o>fI2IhzB`;+7|~0ml~Q)Y@a8ou93ELnueugyTx?iQL=3Hu1$zDl>$q z|9blFMs#YmXuE_!>H^>zXr~!{mOI!aot!O?DE76kiqZ|e{NoINWJ}L$0hh;Sz3nDl zJB{>jwM(7efBN1hv%*dJH~q@)@148TczgZ)W4Wi_v)(>`$o=@m4bw8*+u}{`DO}9E zTyYV&yG!eI(Z!03@3i*EOj^vwd2Q|Q~j=6Kzkk8-hj-^0R>*Ovpw z%D?-{e0~4>@7?|H7g={a=E&uL^yB0HYeCt6PG7S9JnLtxz2y0Tzpqbu1b&|Jy(A*- zy7IgF|JUCeo_u=y*S4A;TaD*f{!yI%JNkB>WYgA%d)H2QI;-U}qpsNFl=UxvY^yt~ zp|UwQWQwZ0>&c5pSe=ekWj7~8ntAn!_bp!Zg!|5s#*Pot?Y+Hyukxae_O1Eue=NDF zZ*l75_qwO=W@oPtTeQRX@e1$Li7R!En*vvj16wO9dIpCzr)U0|77ku@dT>gB{J+#l zF&*PQrM%o>;2G||4hg}_a-ie^1lc-UH;5u-M|%b)1qO}=2ohvqU}R=sWME)8zy@SO z*&dt>3`|fqNRAoGW&yIR2?IlNeqLE>QAuiwLV%O6LRw}{Dg#49iyVW1I)VlO@X=Pk literal 0 HcmV?d00001 diff --git a/vendor/github.com/rwcarlsen/goexif/tiff/tag.go b/vendor/github.com/rwcarlsen/goexif/tiff/tag.go new file mode 100644 index 0000000..b9ce791 --- /dev/null +++ b/vendor/github.com/rwcarlsen/goexif/tiff/tag.go @@ -0,0 +1,445 @@ +package tiff + +import ( + "bytes" + "encoding/binary" + "errors" + "fmt" + "io" + "math/big" + "strings" + "unicode" + "unicode/utf8" +) + +// Format specifies the Go type equivalent used to represent the basic +// tiff data types. +type Format int + +const ( + IntVal Format = iota + FloatVal + RatVal + StringVal + UndefVal + OtherVal +) + +var ErrShortReadTagValue = errors.New("tiff: short read of tag value") + +var formatNames = map[Format]string{ + IntVal: "int", + FloatVal: "float", + RatVal: "rational", + StringVal: "string", + UndefVal: "undefined", + OtherVal: "other", +} + +// DataType represents the basic tiff tag data types. +type DataType uint16 + +const ( + DTByte DataType = 1 + DTAscii DataType = 2 + DTShort DataType = 3 + DTLong DataType = 4 + DTRational DataType = 5 + DTSByte DataType = 6 + DTUndefined DataType = 7 + DTSShort DataType = 8 + DTSLong DataType = 9 + DTSRational DataType = 10 + DTFloat DataType = 11 + DTDouble DataType = 12 +) + +var typeNames = map[DataType]string{ + DTByte: "byte", + DTAscii: "ascii", + DTShort: "short", + DTLong: "long", + DTRational: "rational", + DTSByte: "signed byte", + DTUndefined: "undefined", + DTSShort: "signed short", + DTSLong: "signed long", + DTSRational: "signed rational", + DTFloat: "float", + DTDouble: "double", +} + +// typeSize specifies the size in bytes of each type. +var typeSize = map[DataType]uint32{ + DTByte: 1, + DTAscii: 1, + DTShort: 2, + DTLong: 4, + DTRational: 8, + DTSByte: 1, + DTUndefined: 1, + DTSShort: 2, + DTSLong: 4, + DTSRational: 8, + DTFloat: 4, + DTDouble: 8, +} + +// Tag reflects the parsed content of a tiff IFD tag. +type Tag struct { + // Id is the 2-byte tiff tag identifier. + Id uint16 + // Type is an integer (1 through 12) indicating the tag value's data type. + Type DataType + // Count is the number of type Type stored in the tag's value (i.e. the + // tag's value is an array of type Type and length Count). + Count uint32 + // Val holds the bytes that represent the tag's value. + Val []byte + // ValOffset holds byte offset of the tag value w.r.t. the beginning of the + // reader it was decoded from. Zero if the tag value fit inside the offset + // field. + ValOffset uint32 + + order binary.ByteOrder + intVals []int64 + floatVals []float64 + ratVals [][]int64 + strVal string + format Format +} + +// DecodeTag parses a tiff-encoded IFD tag from r and returns a Tag object. The +// first read from r should be the first byte of the tag. ReadAt offsets should +// generally be relative to the beginning of the tiff structure (not relative +// to the beginning of the tag). +func DecodeTag(r ReadAtReader, order binary.ByteOrder) (*Tag, error) { + t := new(Tag) + t.order = order + + err := binary.Read(r, order, &t.Id) + if err != nil { + return nil, errors.New("tiff: tag id read failed: " + err.Error()) + } + + err = binary.Read(r, order, &t.Type) + if err != nil { + return nil, errors.New("tiff: tag type read failed: " + err.Error()) + } + + err = binary.Read(r, order, &t.Count) + if err != nil { + return nil, errors.New("tiff: tag component count read failed: " + err.Error()) + } + + // There seems to be a relatively common corrupt tag which has a Count of + // MaxUint32. This is probably not a valid value, so return early. + if t.Count == 1<<32-1 { + return t, errors.New("invalid Count offset in tag") + } + + valLen := typeSize[t.Type] * t.Count + if valLen == 0 { + return t, errors.New("zero length tag value") + } + + if valLen > 4 { + binary.Read(r, order, &t.ValOffset) + + // Use a bytes.Buffer so we don't allocate a huge slice if the tag + // is corrupt. + var buff bytes.Buffer + sr := io.NewSectionReader(r, int64(t.ValOffset), int64(valLen)) + n, err := io.Copy(&buff, sr) + if err != nil { + return t, errors.New("tiff: tag value read failed: " + err.Error()) + } else if n != int64(valLen) { + return t, ErrShortReadTagValue + } + t.Val = buff.Bytes() + + } else { + val := make([]byte, valLen) + if _, err = io.ReadFull(r, val); err != nil { + return t, errors.New("tiff: tag offset read failed: " + err.Error()) + } + // ignore padding. + if _, err = io.ReadFull(r, make([]byte, 4-valLen)); err != nil { + return t, errors.New("tiff: tag offset read failed: " + err.Error()) + } + + t.Val = val + } + + return t, t.convertVals() +} + +func (t *Tag) convertVals() error { + r := bytes.NewReader(t.Val) + + switch t.Type { + case DTAscii: + if len(t.Val) <= 0 { + break + } + nullPos := bytes.IndexByte(t.Val, 0) + if nullPos == -1 { + t.strVal = string(t.Val) + } else { + // ignore all trailing NULL bytes, in case of a broken t.Count + t.strVal = string(t.Val[:nullPos]) + } + case DTByte: + var v uint8 + t.intVals = make([]int64, int(t.Count)) + for i := range t.intVals { + err := binary.Read(r, t.order, &v) + if err != nil { + return err + } + t.intVals[i] = int64(v) + } + case DTShort: + var v uint16 + t.intVals = make([]int64, int(t.Count)) + for i := range t.intVals { + err := binary.Read(r, t.order, &v) + if err != nil { + return err + } + t.intVals[i] = int64(v) + } + case DTLong: + var v uint32 + t.intVals = make([]int64, int(t.Count)) + for i := range t.intVals { + err := binary.Read(r, t.order, &v) + if err != nil { + return err + } + t.intVals[i] = int64(v) + } + case DTSByte: + var v int8 + t.intVals = make([]int64, int(t.Count)) + for i := range t.intVals { + err := binary.Read(r, t.order, &v) + if err != nil { + return err + } + t.intVals[i] = int64(v) + } + case DTSShort: + var v int16 + t.intVals = make([]int64, int(t.Count)) + for i := range t.intVals { + err := binary.Read(r, t.order, &v) + if err != nil { + return err + } + t.intVals[i] = int64(v) + } + case DTSLong: + var v int32 + t.intVals = make([]int64, int(t.Count)) + for i := range t.intVals { + err := binary.Read(r, t.order, &v) + if err != nil { + return err + } + t.intVals[i] = int64(v) + } + case DTRational: + t.ratVals = make([][]int64, int(t.Count)) + for i := range t.ratVals { + var n, d uint32 + err := binary.Read(r, t.order, &n) + if err != nil { + return err + } + err = binary.Read(r, t.order, &d) + if err != nil { + return err + } + t.ratVals[i] = []int64{int64(n), int64(d)} + } + case DTSRational: + t.ratVals = make([][]int64, int(t.Count)) + for i := range t.ratVals { + var n, d int32 + err := binary.Read(r, t.order, &n) + if err != nil { + return err + } + err = binary.Read(r, t.order, &d) + if err != nil { + return err + } + t.ratVals[i] = []int64{int64(n), int64(d)} + } + case DTFloat: // float32 + t.floatVals = make([]float64, int(t.Count)) + for i := range t.floatVals { + var v float32 + err := binary.Read(r, t.order, &v) + if err != nil { + return err + } + t.floatVals[i] = float64(v) + } + case DTDouble: + t.floatVals = make([]float64, int(t.Count)) + for i := range t.floatVals { + var u float64 + err := binary.Read(r, t.order, &u) + if err != nil { + return err + } + t.floatVals[i] = u + } + } + + switch t.Type { + case DTByte, DTShort, DTLong, DTSByte, DTSShort, DTSLong: + t.format = IntVal + case DTRational, DTSRational: + t.format = RatVal + case DTFloat, DTDouble: + t.format = FloatVal + case DTAscii: + t.format = StringVal + case DTUndefined: + t.format = UndefVal + default: + t.format = OtherVal + } + + return nil +} + +// Format returns a value indicating which method can be called to retrieve the +// tag's value properly typed (e.g. integer, rational, etc.). +func (t *Tag) Format() Format { return t.format } + +func (t *Tag) typeErr(to Format) error { + return &wrongFmtErr{typeNames[t.Type], formatNames[to]} +} + +// Rat returns the tag's i'th value as a rational number. It returns a nil and +// an error if this tag's Format is not RatVal. It panics for zero deminators +// or if i is out of range. +func (t *Tag) Rat(i int) (*big.Rat, error) { + n, d, err := t.Rat2(i) + if err != nil { + return nil, err + } + return big.NewRat(n, d), nil +} + +// Rat2 returns the tag's i'th value as a rational number represented by a +// numerator-denominator pair. It returns an error if the tag's Format is not +// RatVal. It panics if i is out of range. +func (t *Tag) Rat2(i int) (num, den int64, err error) { + if t.format != RatVal { + return 0, 0, t.typeErr(RatVal) + } + return t.ratVals[i][0], t.ratVals[i][1], nil +} + +// Int64 returns the tag's i'th value as an integer. It returns an error if the +// tag's Format is not IntVal. It panics if i is out of range. +func (t *Tag) Int64(i int) (int64, error) { + if t.format != IntVal { + return 0, t.typeErr(IntVal) + } + return t.intVals[i], nil +} + +// Int returns the tag's i'th value as an integer. It returns an error if the +// tag's Format is not IntVal. It panics if i is out of range. +func (t *Tag) Int(i int) (int, error) { + if t.format != IntVal { + return 0, t.typeErr(IntVal) + } + return int(t.intVals[i]), nil +} + +// Float returns the tag's i'th value as a float. It returns an error if the +// tag's Format is not IntVal. It panics if i is out of range. +func (t *Tag) Float(i int) (float64, error) { + if t.format != FloatVal { + return 0, t.typeErr(FloatVal) + } + return t.floatVals[i], nil +} + +// StringVal returns the tag's value as a string. It returns an error if the +// tag's Format is not StringVal. It panics if i is out of range. +func (t *Tag) StringVal() (string, error) { + if t.format != StringVal { + return "", t.typeErr(StringVal) + } + return t.strVal, nil +} + +// String returns a nicely formatted version of the tag. +func (t *Tag) String() string { + data, err := t.MarshalJSON() + if err != nil { + return "ERROR: " + err.Error() + } + + if t.Count == 1 { + return strings.Trim(fmt.Sprintf("%s", data), "[]") + } + return fmt.Sprintf("%s", data) +} + +func (t *Tag) MarshalJSON() ([]byte, error) { + switch t.format { + case StringVal, UndefVal: + return nullString(t.Val), nil + case OtherVal: + return []byte(fmt.Sprintf("unknown tag type '%v'", t.Type)), nil + } + + rv := []string{} + for i := 0; i < int(t.Count); i++ { + switch t.format { + case RatVal: + n, d, _ := t.Rat2(i) + rv = append(rv, fmt.Sprintf(`"%v/%v"`, n, d)) + case FloatVal: + v, _ := t.Float(i) + rv = append(rv, fmt.Sprintf("%v", v)) + case IntVal: + v, _ := t.Int(i) + rv = append(rv, fmt.Sprintf("%v", v)) + } + } + return []byte(fmt.Sprintf(`[%s]`, strings.Join(rv, ","))), nil +} + +func nullString(in []byte) []byte { + rv := bytes.Buffer{} + rv.WriteByte('"') + for _, b := range in { + if unicode.IsPrint(rune(b)) { + rv.WriteByte(b) + } + } + rv.WriteByte('"') + rvb := rv.Bytes() + if utf8.Valid(rvb) { + return rvb + } + return []byte(`""`) +} + +type wrongFmtErr struct { + From, To string +} + +func (e *wrongFmtErr) Error() string { + return fmt.Sprintf("cannot convert tag type '%v' into '%v'", e.From, e.To) +} diff --git a/vendor/github.com/rwcarlsen/goexif/tiff/tiff.go b/vendor/github.com/rwcarlsen/goexif/tiff/tiff.go new file mode 100644 index 0000000..771e918 --- /dev/null +++ b/vendor/github.com/rwcarlsen/goexif/tiff/tiff.go @@ -0,0 +1,153 @@ +// Package tiff implements TIFF decoding as defined in TIFF 6.0 specification at +// http://partners.adobe.com/public/developer/en/tiff/TIFF6.pdf +package tiff + +import ( + "bytes" + "encoding/binary" + "errors" + "fmt" + "io" + "io/ioutil" +) + +// ReadAtReader is used when decoding Tiff tags and directories +type ReadAtReader interface { + io.Reader + io.ReaderAt +} + +// Tiff provides access to a decoded tiff data structure. +type Tiff struct { + // Dirs is an ordered slice of the tiff's Image File Directories (IFDs). + // The IFD at index 0 is IFD0. + Dirs []*Dir + // The tiff's byte-encoding (i.e. big/little endian). + Order binary.ByteOrder +} + +// Decode parses tiff-encoded data from r and returns a Tiff struct that +// reflects the structure and content of the tiff data. The first read from r +// should be the first byte of the tiff-encoded data and not necessarily the +// first byte of an os.File object. +func Decode(r io.Reader) (*Tiff, error) { + data, err := ioutil.ReadAll(r) + if err != nil { + return nil, errors.New("tiff: could not read data") + } + buf := bytes.NewReader(data) + + t := new(Tiff) + + // read byte order + bo := make([]byte, 2) + if _, err = io.ReadFull(buf, bo); err != nil { + return nil, errors.New("tiff: could not read tiff byte order") + } + if string(bo) == "II" { + t.Order = binary.LittleEndian + } else if string(bo) == "MM" { + t.Order = binary.BigEndian + } else { + return nil, errors.New("tiff: could not read tiff byte order") + } + + // check for special tiff marker + var sp int16 + err = binary.Read(buf, t.Order, &sp) + if err != nil || 42 != sp { + return nil, errors.New("tiff: could not find special tiff marker") + } + + // load offset to first IFD + var offset int32 + err = binary.Read(buf, t.Order, &offset) + if err != nil { + return nil, errors.New("tiff: could not read offset to first IFD") + } + + // load IFD's + var d *Dir + prev := offset + for offset != 0 { + // seek to offset + _, err := buf.Seek(int64(offset), 0) + if err != nil { + return nil, errors.New("tiff: seek to IFD failed") + } + + if buf.Len() == 0 { + return nil, errors.New("tiff: seek offset after EOF") + } + + // load the dir + d, offset, err = DecodeDir(buf, t.Order) + if err != nil { + return nil, err + } + + if offset == prev { + return nil, errors.New("tiff: recursive IFD") + } + prev = offset + + t.Dirs = append(t.Dirs, d) + } + + return t, nil +} + +func (tf *Tiff) String() string { + var buf bytes.Buffer + fmt.Fprint(&buf, "Tiff{") + for _, d := range tf.Dirs { + fmt.Fprintf(&buf, "%s, ", d.String()) + } + fmt.Fprintf(&buf, "}") + return buf.String() +} + +// Dir provides access to the parsed content of a tiff Image File Directory (IFD). +type Dir struct { + Tags []*Tag +} + +// DecodeDir parses a tiff-encoded IFD from r and returns a Dir object. offset +// is the offset to the next IFD. The first read from r should be at the first +// byte of the IFD. ReadAt offsets should generally be relative to the +// beginning of the tiff structure (not relative to the beginning of the IFD). +func DecodeDir(r ReadAtReader, order binary.ByteOrder) (d *Dir, offset int32, err error) { + d = new(Dir) + + // get num of tags in ifd + var nTags int16 + err = binary.Read(r, order, &nTags) + if err != nil { + return nil, 0, errors.New("tiff: failed to read IFD tag count: " + err.Error()) + } + + // load tags + for n := 0; n < int(nTags); n++ { + t, err := DecodeTag(r, order) + if err != nil { + return nil, 0, err + } + d.Tags = append(d.Tags, t) + } + + // get offset to next ifd + err = binary.Read(r, order, &offset) + if err != nil { + return nil, 0, errors.New("tiff: falied to read offset to next IFD: " + err.Error()) + } + + return d, offset, nil +} + +func (d *Dir) String() string { + s := "Dir{" + for _, t := range d.Tags { + s += t.String() + ", " + } + return s + "}" +} diff --git a/vendor/github.com/sirupsen/logrus/.gitignore b/vendor/github.com/sirupsen/logrus/.gitignore new file mode 100644 index 0000000..6b7d7d1 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/.gitignore @@ -0,0 +1,2 @@ +logrus +vendor diff --git a/vendor/github.com/sirupsen/logrus/.golangci.yml b/vendor/github.com/sirupsen/logrus/.golangci.yml new file mode 100644 index 0000000..65dc285 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/.golangci.yml @@ -0,0 +1,40 @@ +run: + # do not run on test files yet + tests: false + +# all available settings of specific linters +linters-settings: + errcheck: + # report about not checking of errors in type assetions: `a := b.(MyStruct)`; + # default is false: such cases aren't reported by default. + check-type-assertions: false + + # report about assignment of errors to blank identifier: `num, _ := strconv.Atoi(numStr)`; + # default is false: such cases aren't reported by default. + check-blank: false + + lll: + line-length: 100 + tab-width: 4 + + prealloc: + simple: false + range-loops: false + for-loops: false + + whitespace: + multi-if: false # Enforces newlines (or comments) after every multi-line if statement + multi-func: false # Enforces newlines (or comments) after every multi-line function signature + +linters: + enable: + - megacheck + - govet + disable: + - maligned + - prealloc + disable-all: false + presets: + - bugs + - unused + fast: false diff --git a/vendor/github.com/sirupsen/logrus/.travis.yml b/vendor/github.com/sirupsen/logrus/.travis.yml new file mode 100644 index 0000000..5e20aa4 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/.travis.yml @@ -0,0 +1,17 @@ +language: go +go_import_path: github.com/sirupsen/logrus +git: + depth: 1 +env: + - GO111MODULE=on +go: [1.13.x, 1.14.x] +os: [linux, osx] +install: + - ./travis/install.sh +script: + - ./travis/cross_build.sh + - ./travis/lint.sh + - export GOMAXPROCS=4 + - export GORACE=halt_on_error=1 + - go test -race -v ./... + - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then go test -race -v -tags appengine ./... ; fi diff --git a/vendor/github.com/sirupsen/logrus/CHANGELOG.md b/vendor/github.com/sirupsen/logrus/CHANGELOG.md new file mode 100644 index 0000000..584026d --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/CHANGELOG.md @@ -0,0 +1,223 @@ +# 1.6.0 +Fixes: + * end of line cleanup + * revert the entry concurrency bug fix whic leads to deadlock under some circumstances + * update dependency on go-windows-terminal-sequences to fix a crash with go 1.14 + +Features: + * add an option to the `TextFormatter` to completely disable fields quoting + +# 1.5.0 +Code quality: + * add golangci linter run on travis + +Fixes: + * add mutex for hooks concurrent access on `Entry` data + * caller function field for go1.14 + * fix build issue for gopherjs target + +Feature: + * add an hooks/writer sub-package whose goal is to split output on different stream depending on the trace level + * add a `DisableHTMLEscape` option in the `JSONFormatter` + * add `ForceQuote` and `PadLevelText` options in the `TextFormatter` + +# 1.4.2 + * Fixes build break for plan9, nacl, solaris +# 1.4.1 +This new release introduces: + * Enhance TextFormatter to not print caller information when they are empty (#944) + * Remove dependency on golang.org/x/crypto (#932, #943) + +Fixes: + * Fix Entry.WithContext method to return a copy of the initial entry (#941) + +# 1.4.0 +This new release introduces: + * Add `DeferExitHandler`, similar to `RegisterExitHandler` but prepending the handler to the list of handlers (semantically like `defer`) (#848). + * Add `CallerPrettyfier` to `JSONFormatter` and `TextFormatter` (#909, #911) + * Add `Entry.WithContext()` and `Entry.Context`, to set a context on entries to be used e.g. in hooks (#919). + +Fixes: + * Fix wrong method calls `Logger.Print` and `Logger.Warningln` (#893). + * Update `Entry.Logf` to not do string formatting unless the log level is enabled (#903) + * Fix infinite recursion on unknown `Level.String()` (#907) + * Fix race condition in `getCaller` (#916). + + +# 1.3.0 +This new release introduces: + * Log, Logf, Logln functions for Logger and Entry that take a Level + +Fixes: + * Building prometheus node_exporter on AIX (#840) + * Race condition in TextFormatter (#468) + * Travis CI import path (#868) + * Remove coloured output on Windows (#862) + * Pointer to func as field in JSONFormatter (#870) + * Properly marshal Levels (#873) + +# 1.2.0 +This new release introduces: + * A new method `SetReportCaller` in the `Logger` to enable the file, line and calling function from which the trace has been issued + * A new trace level named `Trace` whose level is below `Debug` + * A configurable exit function to be called upon a Fatal trace + * The `Level` object now implements `encoding.TextUnmarshaler` interface + +# 1.1.1 +This is a bug fix release. + * fix the build break on Solaris + * don't drop a whole trace in JSONFormatter when a field param is a function pointer which can not be serialized + +# 1.1.0 +This new release introduces: + * several fixes: + * a fix for a race condition on entry formatting + * proper cleanup of previously used entries before putting them back in the pool + * the extra new line at the end of message in text formatter has been removed + * a new global public API to check if a level is activated: IsLevelEnabled + * the following methods have been added to the Logger object + * IsLevelEnabled + * SetFormatter + * SetOutput + * ReplaceHooks + * introduction of go module + * an indent configuration for the json formatter + * output colour support for windows + * the field sort function is now configurable for text formatter + * the CLICOLOR and CLICOLOR\_FORCE environment variable support in text formater + +# 1.0.6 + +This new release introduces: + * a new api WithTime which allows to easily force the time of the log entry + which is mostly useful for logger wrapper + * a fix reverting the immutability of the entry given as parameter to the hooks + a new configuration field of the json formatter in order to put all the fields + in a nested dictionnary + * a new SetOutput method in the Logger + * a new configuration of the textformatter to configure the name of the default keys + * a new configuration of the text formatter to disable the level truncation + +# 1.0.5 + +* Fix hooks race (#707) +* Fix panic deadlock (#695) + +# 1.0.4 + +* Fix race when adding hooks (#612) +* Fix terminal check in AppEngine (#635) + +# 1.0.3 + +* Replace example files with testable examples + +# 1.0.2 + +* bug: quote non-string values in text formatter (#583) +* Make (*Logger) SetLevel a public method + +# 1.0.1 + +* bug: fix escaping in text formatter (#575) + +# 1.0.0 + +* Officially changed name to lower-case +* bug: colors on Windows 10 (#541) +* bug: fix race in accessing level (#512) + +# 0.11.5 + +* feature: add writer and writerlevel to entry (#372) + +# 0.11.4 + +* bug: fix undefined variable on solaris (#493) + +# 0.11.3 + +* formatter: configure quoting of empty values (#484) +* formatter: configure quoting character (default is `"`) (#484) +* bug: fix not importing io correctly in non-linux environments (#481) + +# 0.11.2 + +* bug: fix windows terminal detection (#476) + +# 0.11.1 + +* bug: fix tty detection with custom out (#471) + +# 0.11.0 + +* performance: Use bufferpool to allocate (#370) +* terminal: terminal detection for app-engine (#343) +* feature: exit handler (#375) + +# 0.10.0 + +* feature: Add a test hook (#180) +* feature: `ParseLevel` is now case-insensitive (#326) +* feature: `FieldLogger` interface that generalizes `Logger` and `Entry` (#308) +* performance: avoid re-allocations on `WithFields` (#335) + +# 0.9.0 + +* logrus/text_formatter: don't emit empty msg +* logrus/hooks/airbrake: move out of main repository +* logrus/hooks/sentry: move out of main repository +* logrus/hooks/papertrail: move out of main repository +* logrus/hooks/bugsnag: move out of main repository +* logrus/core: run tests with `-race` +* logrus/core: detect TTY based on `stderr` +* logrus/core: support `WithError` on logger +* logrus/core: Solaris support + +# 0.8.7 + +* logrus/core: fix possible race (#216) +* logrus/doc: small typo fixes and doc improvements + + +# 0.8.6 + +* hooks/raven: allow passing an initialized client + +# 0.8.5 + +* logrus/core: revert #208 + +# 0.8.4 + +* formatter/text: fix data race (#218) + +# 0.8.3 + +* logrus/core: fix entry log level (#208) +* logrus/core: improve performance of text formatter by 40% +* logrus/core: expose `LevelHooks` type +* logrus/core: add support for DragonflyBSD and NetBSD +* formatter/text: print structs more verbosely + +# 0.8.2 + +* logrus: fix more Fatal family functions + +# 0.8.1 + +* logrus: fix not exiting on `Fatalf` and `Fatalln` + +# 0.8.0 + +* logrus: defaults to stderr instead of stdout +* hooks/sentry: add special field for `*http.Request` +* formatter/text: ignore Windows for colors + +# 0.7.3 + +* formatter/\*: allow configuration of timestamp layout + +# 0.7.2 + +* formatter/text: Add configuration option for time format (#158) diff --git a/vendor/github.com/sirupsen/logrus/LICENSE b/vendor/github.com/sirupsen/logrus/LICENSE new file mode 100644 index 0000000..f090cb4 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Simon Eskildsen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/sirupsen/logrus/README.md b/vendor/github.com/sirupsen/logrus/README.md new file mode 100644 index 0000000..5796706 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/README.md @@ -0,0 +1,513 @@ +# Logrus :walrus: [![Build Status](https://travis-ci.org/sirupsen/logrus.svg?branch=master)](https://travis-ci.org/sirupsen/logrus) [![GoDoc](https://godoc.org/github.com/sirupsen/logrus?status.svg)](https://godoc.org/github.com/sirupsen/logrus) + +Logrus is a structured logger for Go (golang), completely API compatible with +the standard library logger. + +**Logrus is in maintenance-mode.** We will not be introducing new features. It's +simply too hard to do in a way that won't break many people's projects, which is +the last thing you want from your Logging library (again...). + +This does not mean Logrus is dead. Logrus will continue to be maintained for +security, (backwards compatible) bug fixes, and performance (where we are +limited by the interface). + +I believe Logrus' biggest contribution is to have played a part in today's +widespread use of structured logging in Golang. There doesn't seem to be a +reason to do a major, breaking iteration into Logrus V2, since the fantastic Go +community has built those independently. Many fantastic alternatives have sprung +up. Logrus would look like those, had it been re-designed with what we know +about structured logging in Go today. Check out, for example, +[Zerolog][zerolog], [Zap][zap], and [Apex][apex]. + +[zerolog]: https://github.com/rs/zerolog +[zap]: https://github.com/uber-go/zap +[apex]: https://github.com/apex/log + +**Seeing weird case-sensitive problems?** It's in the past been possible to +import Logrus as both upper- and lower-case. Due to the Go package environment, +this caused issues in the community and we needed a standard. Some environments +experienced problems with the upper-case variant, so the lower-case was decided. +Everything using `logrus` will need to use the lower-case: +`github.com/sirupsen/logrus`. Any package that isn't, should be changed. + +To fix Glide, see [these +comments](https://github.com/sirupsen/logrus/issues/553#issuecomment-306591437). +For an in-depth explanation of the casing issue, see [this +comment](https://github.com/sirupsen/logrus/issues/570#issuecomment-313933276). + +Nicely color-coded in development (when a TTY is attached, otherwise just +plain text): + +![Colored](http://i.imgur.com/PY7qMwd.png) + +With `log.SetFormatter(&log.JSONFormatter{})`, for easy parsing by logstash +or Splunk: + +```json +{"animal":"walrus","level":"info","msg":"A group of walrus emerges from the +ocean","size":10,"time":"2014-03-10 19:57:38.562264131 -0400 EDT"} + +{"level":"warning","msg":"The group's number increased tremendously!", +"number":122,"omg":true,"time":"2014-03-10 19:57:38.562471297 -0400 EDT"} + +{"animal":"walrus","level":"info","msg":"A giant walrus appears!", +"size":10,"time":"2014-03-10 19:57:38.562500591 -0400 EDT"} + +{"animal":"walrus","level":"info","msg":"Tremendously sized cow enters the ocean.", +"size":9,"time":"2014-03-10 19:57:38.562527896 -0400 EDT"} + +{"level":"fatal","msg":"The ice breaks!","number":100,"omg":true, +"time":"2014-03-10 19:57:38.562543128 -0400 EDT"} +``` + +With the default `log.SetFormatter(&log.TextFormatter{})` when a TTY is not +attached, the output is compatible with the +[logfmt](http://godoc.org/github.com/kr/logfmt) format: + +```text +time="2015-03-26T01:27:38-04:00" level=debug msg="Started observing beach" animal=walrus number=8 +time="2015-03-26T01:27:38-04:00" level=info msg="A group of walrus emerges from the ocean" animal=walrus size=10 +time="2015-03-26T01:27:38-04:00" level=warning msg="The group's number increased tremendously!" number=122 omg=true +time="2015-03-26T01:27:38-04:00" level=debug msg="Temperature changes" temperature=-4 +time="2015-03-26T01:27:38-04:00" level=panic msg="It's over 9000!" animal=orca size=9009 +time="2015-03-26T01:27:38-04:00" level=fatal msg="The ice breaks!" err=&{0x2082280c0 map[animal:orca size:9009] 2015-03-26 01:27:38.441574009 -0400 EDT panic It's over 9000!} number=100 omg=true +``` +To ensure this behaviour even if a TTY is attached, set your formatter as follows: + +```go + log.SetFormatter(&log.TextFormatter{ + DisableColors: true, + FullTimestamp: true, + }) +``` + +#### Logging Method Name + +If you wish to add the calling method as a field, instruct the logger via: +```go +log.SetReportCaller(true) +``` +This adds the caller as 'method' like so: + +```json +{"animal":"penguin","level":"fatal","method":"github.com/sirupsen/arcticcreatures.migrate","msg":"a penguin swims by", +"time":"2014-03-10 19:57:38.562543129 -0400 EDT"} +``` + +```text +time="2015-03-26T01:27:38-04:00" level=fatal method=github.com/sirupsen/arcticcreatures.migrate msg="a penguin swims by" animal=penguin +``` +Note that this does add measurable overhead - the cost will depend on the version of Go, but is +between 20 and 40% in recent tests with 1.6 and 1.7. You can validate this in your +environment via benchmarks: +``` +go test -bench=.*CallerTracing +``` + + +#### Case-sensitivity + +The organization's name was changed to lower-case--and this will not be changed +back. If you are getting import conflicts due to case sensitivity, please use +the lower-case import: `github.com/sirupsen/logrus`. + +#### Example + +The simplest way to use Logrus is simply the package-level exported logger: + +```go +package main + +import ( + log "github.com/sirupsen/logrus" +) + +func main() { + log.WithFields(log.Fields{ + "animal": "walrus", + }).Info("A walrus appears") +} +``` + +Note that it's completely api-compatible with the stdlib logger, so you can +replace your `log` imports everywhere with `log "github.com/sirupsen/logrus"` +and you'll now have the flexibility of Logrus. You can customize it all you +want: + +```go +package main + +import ( + "os" + log "github.com/sirupsen/logrus" +) + +func init() { + // Log as JSON instead of the default ASCII formatter. + log.SetFormatter(&log.JSONFormatter{}) + + // Output to stdout instead of the default stderr + // Can be any io.Writer, see below for File example + log.SetOutput(os.Stdout) + + // Only log the warning severity or above. + log.SetLevel(log.WarnLevel) +} + +func main() { + log.WithFields(log.Fields{ + "animal": "walrus", + "size": 10, + }).Info("A group of walrus emerges from the ocean") + + log.WithFields(log.Fields{ + "omg": true, + "number": 122, + }).Warn("The group's number increased tremendously!") + + log.WithFields(log.Fields{ + "omg": true, + "number": 100, + }).Fatal("The ice breaks!") + + // A common pattern is to re-use fields between logging statements by re-using + // the logrus.Entry returned from WithFields() + contextLogger := log.WithFields(log.Fields{ + "common": "this is a common field", + "other": "I also should be logged always", + }) + + contextLogger.Info("I'll be logged with common and other field") + contextLogger.Info("Me too") +} +``` + +For more advanced usage such as logging to multiple locations from the same +application, you can also create an instance of the `logrus` Logger: + +```go +package main + +import ( + "os" + "github.com/sirupsen/logrus" +) + +// Create a new instance of the logger. You can have any number of instances. +var log = logrus.New() + +func main() { + // The API for setting attributes is a little different than the package level + // exported logger. See Godoc. + log.Out = os.Stdout + + // You could set this to any `io.Writer` such as a file + // file, err := os.OpenFile("logrus.log", os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666) + // if err == nil { + // log.Out = file + // } else { + // log.Info("Failed to log to file, using default stderr") + // } + + log.WithFields(logrus.Fields{ + "animal": "walrus", + "size": 10, + }).Info("A group of walrus emerges from the ocean") +} +``` + +#### Fields + +Logrus encourages careful, structured logging through logging fields instead of +long, unparseable error messages. For example, instead of: `log.Fatalf("Failed +to send event %s to topic %s with key %d")`, you should log the much more +discoverable: + +```go +log.WithFields(log.Fields{ + "event": event, + "topic": topic, + "key": key, +}).Fatal("Failed to send event") +``` + +We've found this API forces you to think about logging in a way that produces +much more useful logging messages. We've been in countless situations where just +a single added field to a log statement that was already there would've saved us +hours. The `WithFields` call is optional. + +In general, with Logrus using any of the `printf`-family functions should be +seen as a hint you should add a field, however, you can still use the +`printf`-family functions with Logrus. + +#### Default Fields + +Often it's helpful to have fields _always_ attached to log statements in an +application or parts of one. For example, you may want to always log the +`request_id` and `user_ip` in the context of a request. Instead of writing +`log.WithFields(log.Fields{"request_id": request_id, "user_ip": user_ip})` on +every line, you can create a `logrus.Entry` to pass around instead: + +```go +requestLogger := log.WithFields(log.Fields{"request_id": request_id, "user_ip": user_ip}) +requestLogger.Info("something happened on that request") # will log request_id and user_ip +requestLogger.Warn("something not great happened") +``` + +#### Hooks + +You can add hooks for logging levels. For example to send errors to an exception +tracking service on `Error`, `Fatal` and `Panic`, info to StatsD or log to +multiple places simultaneously, e.g. syslog. + +Logrus comes with [built-in hooks](hooks/). Add those, or your custom hook, in +`init`: + +```go +import ( + log "github.com/sirupsen/logrus" + "gopkg.in/gemnasium/logrus-airbrake-hook.v2" // the package is named "airbrake" + logrus_syslog "github.com/sirupsen/logrus/hooks/syslog" + "log/syslog" +) + +func init() { + + // Use the Airbrake hook to report errors that have Error severity or above to + // an exception tracker. You can create custom hooks, see the Hooks section. + log.AddHook(airbrake.NewHook(123, "xyz", "production")) + + hook, err := logrus_syslog.NewSyslogHook("udp", "localhost:514", syslog.LOG_INFO, "") + if err != nil { + log.Error("Unable to connect to local syslog daemon") + } else { + log.AddHook(hook) + } +} +``` +Note: Syslog hook also support connecting to local syslog (Ex. "/dev/log" or "/var/run/syslog" or "/var/run/log"). For the detail, please check the [syslog hook README](hooks/syslog/README.md). + +A list of currently known service hooks can be found in this wiki [page](https://github.com/sirupsen/logrus/wiki/Hooks) + + +#### Level logging + +Logrus has seven logging levels: Trace, Debug, Info, Warning, Error, Fatal and Panic. + +```go +log.Trace("Something very low level.") +log.Debug("Useful debugging information.") +log.Info("Something noteworthy happened!") +log.Warn("You should probably take a look at this.") +log.Error("Something failed but I'm not quitting.") +// Calls os.Exit(1) after logging +log.Fatal("Bye.") +// Calls panic() after logging +log.Panic("I'm bailing.") +``` + +You can set the logging level on a `Logger`, then it will only log entries with +that severity or anything above it: + +```go +// Will log anything that is info or above (warn, error, fatal, panic). Default. +log.SetLevel(log.InfoLevel) +``` + +It may be useful to set `log.Level = logrus.DebugLevel` in a debug or verbose +environment if your application has that. + +#### Entries + +Besides the fields added with `WithField` or `WithFields` some fields are +automatically added to all logging events: + +1. `time`. The timestamp when the entry was created. +2. `msg`. The logging message passed to `{Info,Warn,Error,Fatal,Panic}` after + the `AddFields` call. E.g. `Failed to send event.` +3. `level`. The logging level. E.g. `info`. + +#### Environments + +Logrus has no notion of environment. + +If you wish for hooks and formatters to only be used in specific environments, +you should handle that yourself. For example, if your application has a global +variable `Environment`, which is a string representation of the environment you +could do: + +```go +import ( + log "github.com/sirupsen/logrus" +) + +init() { + // do something here to set environment depending on an environment variable + // or command-line flag + if Environment == "production" { + log.SetFormatter(&log.JSONFormatter{}) + } else { + // The TextFormatter is default, you don't actually have to do this. + log.SetFormatter(&log.TextFormatter{}) + } +} +``` + +This configuration is how `logrus` was intended to be used, but JSON in +production is mostly only useful if you do log aggregation with tools like +Splunk or Logstash. + +#### Formatters + +The built-in logging formatters are: + +* `logrus.TextFormatter`. Logs the event in colors if stdout is a tty, otherwise + without colors. + * *Note:* to force colored output when there is no TTY, set the `ForceColors` + field to `true`. To force no colored output even if there is a TTY set the + `DisableColors` field to `true`. For Windows, see + [github.com/mattn/go-colorable](https://github.com/mattn/go-colorable). + * When colors are enabled, levels are truncated to 4 characters by default. To disable + truncation set the `DisableLevelTruncation` field to `true`. + * When outputting to a TTY, it's often helpful to visually scan down a column where all the levels are the same width. Setting the `PadLevelText` field to `true` enables this behavior, by adding padding to the level text. + * All options are listed in the [generated docs](https://godoc.org/github.com/sirupsen/logrus#TextFormatter). +* `logrus.JSONFormatter`. Logs fields as JSON. + * All options are listed in the [generated docs](https://godoc.org/github.com/sirupsen/logrus#JSONFormatter). + +Third party logging formatters: + +* [`FluentdFormatter`](https://github.com/joonix/log). Formats entries that can be parsed by Kubernetes and Google Container Engine. +* [`GELF`](https://github.com/fabienm/go-logrus-formatters). Formats entries so they comply to Graylog's [GELF 1.1 specification](http://docs.graylog.org/en/2.4/pages/gelf.html). +* [`logstash`](https://github.com/bshuster-repo/logrus-logstash-hook). Logs fields as [Logstash](http://logstash.net) Events. +* [`prefixed`](https://github.com/x-cray/logrus-prefixed-formatter). Displays log entry source along with alternative layout. +* [`zalgo`](https://github.com/aybabtme/logzalgo). Invoking the Power of Zalgo. +* [`nested-logrus-formatter`](https://github.com/antonfisher/nested-logrus-formatter). Converts logrus fields to a nested structure. +* [`powerful-logrus-formatter`](https://github.com/zput/zxcTool). get fileName, log's line number and the latest function's name when print log; Sava log to files. +* [`caption-json-formatter`](https://github.com/nolleh/caption_json_formatter). logrus's message json formatter with human-readable caption added. + +You can define your formatter by implementing the `Formatter` interface, +requiring a `Format` method. `Format` takes an `*Entry`. `entry.Data` is a +`Fields` type (`map[string]interface{}`) with all your fields as well as the +default ones (see Entries section above): + +```go +type MyJSONFormatter struct { +} + +log.SetFormatter(new(MyJSONFormatter)) + +func (f *MyJSONFormatter) Format(entry *Entry) ([]byte, error) { + // Note this doesn't include Time, Level and Message which are available on + // the Entry. Consult `godoc` on information about those fields or read the + // source of the official loggers. + serialized, err := json.Marshal(entry.Data) + if err != nil { + return nil, fmt.Errorf("Failed to marshal fields to JSON, %v", err) + } + return append(serialized, '\n'), nil +} +``` + +#### Logger as an `io.Writer` + +Logrus can be transformed into an `io.Writer`. That writer is the end of an `io.Pipe` and it is your responsibility to close it. + +```go +w := logger.Writer() +defer w.Close() + +srv := http.Server{ + // create a stdlib log.Logger that writes to + // logrus.Logger. + ErrorLog: log.New(w, "", 0), +} +``` + +Each line written to that writer will be printed the usual way, using formatters +and hooks. The level for those entries is `info`. + +This means that we can override the standard library logger easily: + +```go +logger := logrus.New() +logger.Formatter = &logrus.JSONFormatter{} + +// Use logrus for standard log output +// Note that `log` here references stdlib's log +// Not logrus imported under the name `log`. +log.SetOutput(logger.Writer()) +``` + +#### Rotation + +Log rotation is not provided with Logrus. Log rotation should be done by an +external program (like `logrotate(8)`) that can compress and delete old log +entries. It should not be a feature of the application-level logger. + +#### Tools + +| Tool | Description | +| ---- | ----------- | +|[Logrus Mate](https://github.com/gogap/logrus_mate)|Logrus mate is a tool for Logrus to manage loggers, you can initial logger's level, hook and formatter by config file, the logger will be generated with different configs in different environments.| +|[Logrus Viper Helper](https://github.com/heirko/go-contrib/tree/master/logrusHelper)|An Helper around Logrus to wrap with spf13/Viper to load configuration with fangs! And to simplify Logrus configuration use some behavior of [Logrus Mate](https://github.com/gogap/logrus_mate). [sample](https://github.com/heirko/iris-contrib/blob/master/middleware/logrus-logger/example) | + +#### Testing + +Logrus has a built in facility for asserting the presence of log messages. This is implemented through the `test` hook and provides: + +* decorators for existing logger (`test.NewLocal` and `test.NewGlobal`) which basically just adds the `test` hook +* a test logger (`test.NewNullLogger`) that just records log messages (and does not output any): + +```go +import( + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/assert" + "testing" +) + +func TestSomething(t*testing.T){ + logger, hook := test.NewNullLogger() + logger.Error("Helloerror") + + assert.Equal(t, 1, len(hook.Entries)) + assert.Equal(t, logrus.ErrorLevel, hook.LastEntry().Level) + assert.Equal(t, "Helloerror", hook.LastEntry().Message) + + hook.Reset() + assert.Nil(t, hook.LastEntry()) +} +``` + +#### Fatal handlers + +Logrus can register one or more functions that will be called when any `fatal` +level message is logged. The registered handlers will be executed before +logrus performs an `os.Exit(1)`. This behavior may be helpful if callers need +to gracefully shutdown. Unlike a `panic("Something went wrong...")` call which can be intercepted with a deferred `recover` a call to `os.Exit(1)` can not be intercepted. + +``` +... +handler := func() { + // gracefully shutdown something... +} +logrus.RegisterExitHandler(handler) +... +``` + +#### Thread safety + +By default, Logger is protected by a mutex for concurrent writes. The mutex is held when calling hooks and writing logs. +If you are sure such locking is not needed, you can call logger.SetNoLock() to disable the locking. + +Situation when locking is not needed includes: + +* You have no hooks registered, or hooks calling is already thread-safe. + +* Writing to logger.Out is already thread-safe, for example: + + 1) logger.Out is protected by locks. + + 2) logger.Out is an os.File handler opened with `O_APPEND` flag, and every write is smaller than 4k. (This allows multi-thread/multi-process writing) + + (Refer to http://www.notthewizard.com/2014/06/17/are-files-appends-really-atomic/) diff --git a/vendor/github.com/sirupsen/logrus/alt_exit.go b/vendor/github.com/sirupsen/logrus/alt_exit.go new file mode 100644 index 0000000..8fd189e --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/alt_exit.go @@ -0,0 +1,76 @@ +package logrus + +// The following code was sourced and modified from the +// https://github.com/tebeka/atexit package governed by the following license: +// +// Copyright (c) 2012 Miki Tebeka . +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of +// this software and associated documentation files (the "Software"), to deal in +// the Software without restriction, including without limitation the rights to +// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +// the Software, and to permit persons to whom the Software is furnished to do so, +// subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +import ( + "fmt" + "os" +) + +var handlers = []func(){} + +func runHandler(handler func()) { + defer func() { + if err := recover(); err != nil { + fmt.Fprintln(os.Stderr, "Error: Logrus exit handler error:", err) + } + }() + + handler() +} + +func runHandlers() { + for _, handler := range handlers { + runHandler(handler) + } +} + +// Exit runs all the Logrus atexit handlers and then terminates the program using os.Exit(code) +func Exit(code int) { + runHandlers() + os.Exit(code) +} + +// RegisterExitHandler appends a Logrus Exit handler to the list of handlers, +// call logrus.Exit to invoke all handlers. The handlers will also be invoked when +// any Fatal log entry is made. +// +// This method is useful when a caller wishes to use logrus to log a fatal +// message but also needs to gracefully shutdown. An example usecase could be +// closing database connections, or sending a alert that the application is +// closing. +func RegisterExitHandler(handler func()) { + handlers = append(handlers, handler) +} + +// DeferExitHandler prepends a Logrus Exit handler to the list of handlers, +// call logrus.Exit to invoke all handlers. The handlers will also be invoked when +// any Fatal log entry is made. +// +// This method is useful when a caller wishes to use logrus to log a fatal +// message but also needs to gracefully shutdown. An example usecase could be +// closing database connections, or sending a alert that the application is +// closing. +func DeferExitHandler(handler func()) { + handlers = append([]func(){handler}, handlers...) +} diff --git a/vendor/github.com/sirupsen/logrus/appveyor.yml b/vendor/github.com/sirupsen/logrus/appveyor.yml new file mode 100644 index 0000000..df9d65c --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/appveyor.yml @@ -0,0 +1,14 @@ +version: "{build}" +platform: x64 +clone_folder: c:\gopath\src\github.com\sirupsen\logrus +environment: + GOPATH: c:\gopath +branches: + only: + - master +install: + - set PATH=%GOPATH%\bin;c:\go\bin;%PATH% + - go version +build_script: + - go get -t + - go test diff --git a/vendor/github.com/sirupsen/logrus/doc.go b/vendor/github.com/sirupsen/logrus/doc.go new file mode 100644 index 0000000..da67aba --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/doc.go @@ -0,0 +1,26 @@ +/* +Package logrus is a structured logger for Go, completely API compatible with the standard library logger. + + +The simplest way to use Logrus is simply the package-level exported logger: + + package main + + import ( + log "github.com/sirupsen/logrus" + ) + + func main() { + log.WithFields(log.Fields{ + "animal": "walrus", + "number": 1, + "size": 10, + }).Info("A walrus appears") + } + +Output: + time="2015-09-07T08:48:33Z" level=info msg="A walrus appears" animal=walrus number=1 size=10 + +For a full guide visit https://github.com/sirupsen/logrus +*/ +package logrus diff --git a/vendor/github.com/sirupsen/logrus/entry.go b/vendor/github.com/sirupsen/logrus/entry.go new file mode 100644 index 0000000..f6e062a --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/entry.go @@ -0,0 +1,426 @@ +package logrus + +import ( + "bytes" + "context" + "fmt" + "os" + "reflect" + "runtime" + "strings" + "sync" + "time" +) + +var ( + bufferPool *sync.Pool + + // qualified package name, cached at first use + logrusPackage string + + // Positions in the call stack when tracing to report the calling method + minimumCallerDepth int + + // Used for caller information initialisation + callerInitOnce sync.Once +) + +const ( + maximumCallerDepth int = 25 + knownLogrusFrames int = 4 +) + +func init() { + bufferPool = &sync.Pool{ + New: func() interface{} { + return new(bytes.Buffer) + }, + } + + // start at the bottom of the stack before the package-name cache is primed + minimumCallerDepth = 1 +} + +// Defines the key when adding errors using WithError. +var ErrorKey = "error" + +// An entry is the final or intermediate Logrus logging entry. It contains all +// the fields passed with WithField{,s}. It's finally logged when Trace, Debug, +// Info, Warn, Error, Fatal or Panic is called on it. These objects can be +// reused and passed around as much as you wish to avoid field duplication. +type Entry struct { + Logger *Logger + + // Contains all the fields set by the user. + Data Fields + + // Time at which the log entry was created + Time time.Time + + // Level the log entry was logged at: Trace, Debug, Info, Warn, Error, Fatal or Panic + // This field will be set on entry firing and the value will be equal to the one in Logger struct field. + Level Level + + // Calling method, with package name + Caller *runtime.Frame + + // Message passed to Trace, Debug, Info, Warn, Error, Fatal or Panic + Message string + + // When formatter is called in entry.log(), a Buffer may be set to entry + Buffer *bytes.Buffer + + // Contains the context set by the user. Useful for hook processing etc. + Context context.Context + + // err may contain a field formatting error + err string +} + +func NewEntry(logger *Logger) *Entry { + return &Entry{ + Logger: logger, + // Default is three fields, plus one optional. Give a little extra room. + Data: make(Fields, 6), + } +} + +// Returns the bytes representation of this entry from the formatter. +func (entry *Entry) Bytes() ([]byte, error) { + return entry.Logger.Formatter.Format(entry) +} + +// Returns the string representation from the reader and ultimately the +// formatter. +func (entry *Entry) String() (string, error) { + serialized, err := entry.Bytes() + if err != nil { + return "", err + } + str := string(serialized) + return str, nil +} + +// Add an error as single field (using the key defined in ErrorKey) to the Entry. +func (entry *Entry) WithError(err error) *Entry { + return entry.WithField(ErrorKey, err) +} + +// Add a context to the Entry. +func (entry *Entry) WithContext(ctx context.Context) *Entry { + dataCopy := make(Fields, len(entry.Data)) + for k, v := range entry.Data { + dataCopy[k] = v + } + return &Entry{Logger: entry.Logger, Data: dataCopy, Time: entry.Time, err: entry.err, Context: ctx} +} + +// Add a single field to the Entry. +func (entry *Entry) WithField(key string, value interface{}) *Entry { + return entry.WithFields(Fields{key: value}) +} + +// Add a map of fields to the Entry. +func (entry *Entry) WithFields(fields Fields) *Entry { + data := make(Fields, len(entry.Data)+len(fields)) + for k, v := range entry.Data { + data[k] = v + } + fieldErr := entry.err + for k, v := range fields { + isErrField := false + if t := reflect.TypeOf(v); t != nil { + switch t.Kind() { + case reflect.Func: + isErrField = true + case reflect.Ptr: + isErrField = t.Elem().Kind() == reflect.Func + } + } + if isErrField { + tmp := fmt.Sprintf("can not add field %q", k) + if fieldErr != "" { + fieldErr = entry.err + ", " + tmp + } else { + fieldErr = tmp + } + } else { + data[k] = v + } + } + return &Entry{Logger: entry.Logger, Data: data, Time: entry.Time, err: fieldErr, Context: entry.Context} +} + +// Overrides the time of the Entry. +func (entry *Entry) WithTime(t time.Time) *Entry { + dataCopy := make(Fields, len(entry.Data)) + for k, v := range entry.Data { + dataCopy[k] = v + } + return &Entry{Logger: entry.Logger, Data: dataCopy, Time: t, err: entry.err, Context: entry.Context} +} + +// getPackageName reduces a fully qualified function name to the package name +// There really ought to be to be a better way... +func getPackageName(f string) string { + for { + lastPeriod := strings.LastIndex(f, ".") + lastSlash := strings.LastIndex(f, "/") + if lastPeriod > lastSlash { + f = f[:lastPeriod] + } else { + break + } + } + + return f +} + +// getCaller retrieves the name of the first non-logrus calling function +func getCaller() *runtime.Frame { + // cache this package's fully-qualified name + callerInitOnce.Do(func() { + pcs := make([]uintptr, maximumCallerDepth) + _ = runtime.Callers(0, pcs) + + // dynamic get the package name and the minimum caller depth + for i := 0; i < maximumCallerDepth; i++ { + funcName := runtime.FuncForPC(pcs[i]).Name() + if strings.Contains(funcName, "getCaller") { + logrusPackage = getPackageName(funcName) + break + } + } + + minimumCallerDepth = knownLogrusFrames + }) + + // Restrict the lookback frames to avoid runaway lookups + pcs := make([]uintptr, maximumCallerDepth) + depth := runtime.Callers(minimumCallerDepth, pcs) + frames := runtime.CallersFrames(pcs[:depth]) + + for f, again := frames.Next(); again; f, again = frames.Next() { + pkg := getPackageName(f.Function) + + // If the caller isn't part of this package, we're done + if pkg != logrusPackage { + return &f //nolint:scopelint + } + } + + // if we got here, we failed to find the caller's context + return nil +} + +func (entry Entry) HasCaller() (has bool) { + return entry.Logger != nil && + entry.Logger.ReportCaller && + entry.Caller != nil +} + +// This function is not declared with a pointer value because otherwise +// race conditions will occur when using multiple goroutines +func (entry Entry) log(level Level, msg string) { + var buffer *bytes.Buffer + + // Default to now, but allow users to override if they want. + // + // We don't have to worry about polluting future calls to Entry#log() + // with this assignment because this function is declared with a + // non-pointer receiver. + if entry.Time.IsZero() { + entry.Time = time.Now() + } + + entry.Level = level + entry.Message = msg + entry.Logger.mu.Lock() + if entry.Logger.ReportCaller { + entry.Caller = getCaller() + } + entry.Logger.mu.Unlock() + + entry.fireHooks() + + buffer = bufferPool.Get().(*bytes.Buffer) + buffer.Reset() + defer bufferPool.Put(buffer) + entry.Buffer = buffer + + entry.write() + + entry.Buffer = nil + + // To avoid Entry#log() returning a value that only would make sense for + // panic() to use in Entry#Panic(), we avoid the allocation by checking + // directly here. + if level <= PanicLevel { + panic(&entry) + } +} + +func (entry *Entry) fireHooks() { + entry.Logger.mu.Lock() + defer entry.Logger.mu.Unlock() + err := entry.Logger.Hooks.Fire(entry.Level, entry) + if err != nil { + fmt.Fprintf(os.Stderr, "Failed to fire hook: %v\n", err) + } +} + +func (entry *Entry) write() { + entry.Logger.mu.Lock() + defer entry.Logger.mu.Unlock() + serialized, err := entry.Logger.Formatter.Format(entry) + if err != nil { + fmt.Fprintf(os.Stderr, "Failed to obtain reader, %v\n", err) + return + } + if _, err = entry.Logger.Out.Write(serialized); err != nil { + fmt.Fprintf(os.Stderr, "Failed to write to log, %v\n", err) + } +} + +func (entry *Entry) Log(level Level, args ...interface{}) { + if entry.Logger.IsLevelEnabled(level) { + entry.log(level, fmt.Sprint(args...)) + } +} + +func (entry *Entry) Trace(args ...interface{}) { + entry.Log(TraceLevel, args...) +} + +func (entry *Entry) Debug(args ...interface{}) { + entry.Log(DebugLevel, args...) +} + +func (entry *Entry) Print(args ...interface{}) { + entry.Info(args...) +} + +func (entry *Entry) Info(args ...interface{}) { + entry.Log(InfoLevel, args...) +} + +func (entry *Entry) Warn(args ...interface{}) { + entry.Log(WarnLevel, args...) +} + +func (entry *Entry) Warning(args ...interface{}) { + entry.Warn(args...) +} + +func (entry *Entry) Error(args ...interface{}) { + entry.Log(ErrorLevel, args...) +} + +func (entry *Entry) Fatal(args ...interface{}) { + entry.Log(FatalLevel, args...) + entry.Logger.Exit(1) +} + +func (entry *Entry) Panic(args ...interface{}) { + entry.Log(PanicLevel, args...) + panic(fmt.Sprint(args...)) +} + +// Entry Printf family functions + +func (entry *Entry) Logf(level Level, format string, args ...interface{}) { + if entry.Logger.IsLevelEnabled(level) { + entry.Log(level, fmt.Sprintf(format, args...)) + } +} + +func (entry *Entry) Tracef(format string, args ...interface{}) { + entry.Logf(TraceLevel, format, args...) +} + +func (entry *Entry) Debugf(format string, args ...interface{}) { + entry.Logf(DebugLevel, format, args...) +} + +func (entry *Entry) Infof(format string, args ...interface{}) { + entry.Logf(InfoLevel, format, args...) +} + +func (entry *Entry) Printf(format string, args ...interface{}) { + entry.Infof(format, args...) +} + +func (entry *Entry) Warnf(format string, args ...interface{}) { + entry.Logf(WarnLevel, format, args...) +} + +func (entry *Entry) Warningf(format string, args ...interface{}) { + entry.Warnf(format, args...) +} + +func (entry *Entry) Errorf(format string, args ...interface{}) { + entry.Logf(ErrorLevel, format, args...) +} + +func (entry *Entry) Fatalf(format string, args ...interface{}) { + entry.Logf(FatalLevel, format, args...) + entry.Logger.Exit(1) +} + +func (entry *Entry) Panicf(format string, args ...interface{}) { + entry.Logf(PanicLevel, format, args...) +} + +// Entry Println family functions + +func (entry *Entry) Logln(level Level, args ...interface{}) { + if entry.Logger.IsLevelEnabled(level) { + entry.Log(level, entry.sprintlnn(args...)) + } +} + +func (entry *Entry) Traceln(args ...interface{}) { + entry.Logln(TraceLevel, args...) +} + +func (entry *Entry) Debugln(args ...interface{}) { + entry.Logln(DebugLevel, args...) +} + +func (entry *Entry) Infoln(args ...interface{}) { + entry.Logln(InfoLevel, args...) +} + +func (entry *Entry) Println(args ...interface{}) { + entry.Infoln(args...) +} + +func (entry *Entry) Warnln(args ...interface{}) { + entry.Logln(WarnLevel, args...) +} + +func (entry *Entry) Warningln(args ...interface{}) { + entry.Warnln(args...) +} + +func (entry *Entry) Errorln(args ...interface{}) { + entry.Logln(ErrorLevel, args...) +} + +func (entry *Entry) Fatalln(args ...interface{}) { + entry.Logln(FatalLevel, args...) + entry.Logger.Exit(1) +} + +func (entry *Entry) Panicln(args ...interface{}) { + entry.Logln(PanicLevel, args...) +} + +// Sprintlnn => Sprint no newline. This is to get the behavior of how +// fmt.Sprintln where spaces are always added between operands, regardless of +// their type. Instead of vendoring the Sprintln implementation to spare a +// string allocation, we do the simplest thing. +func (entry *Entry) sprintlnn(args ...interface{}) string { + msg := fmt.Sprintln(args...) + return msg[:len(msg)-1] +} diff --git a/vendor/github.com/sirupsen/logrus/exported.go b/vendor/github.com/sirupsen/logrus/exported.go new file mode 100644 index 0000000..42b04f6 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/exported.go @@ -0,0 +1,225 @@ +package logrus + +import ( + "context" + "io" + "time" +) + +var ( + // std is the name of the standard logger in stdlib `log` + std = New() +) + +func StandardLogger() *Logger { + return std +} + +// SetOutput sets the standard logger output. +func SetOutput(out io.Writer) { + std.SetOutput(out) +} + +// SetFormatter sets the standard logger formatter. +func SetFormatter(formatter Formatter) { + std.SetFormatter(formatter) +} + +// SetReportCaller sets whether the standard logger will include the calling +// method as a field. +func SetReportCaller(include bool) { + std.SetReportCaller(include) +} + +// SetLevel sets the standard logger level. +func SetLevel(level Level) { + std.SetLevel(level) +} + +// GetLevel returns the standard logger level. +func GetLevel() Level { + return std.GetLevel() +} + +// IsLevelEnabled checks if the log level of the standard logger is greater than the level param +func IsLevelEnabled(level Level) bool { + return std.IsLevelEnabled(level) +} + +// AddHook adds a hook to the standard logger hooks. +func AddHook(hook Hook) { + std.AddHook(hook) +} + +// WithError creates an entry from the standard logger and adds an error to it, using the value defined in ErrorKey as key. +func WithError(err error) *Entry { + return std.WithField(ErrorKey, err) +} + +// WithContext creates an entry from the standard logger and adds a context to it. +func WithContext(ctx context.Context) *Entry { + return std.WithContext(ctx) +} + +// WithField creates an entry from the standard logger and adds a field to +// it. If you want multiple fields, use `WithFields`. +// +// Note that it doesn't log until you call Debug, Print, Info, Warn, Fatal +// or Panic on the Entry it returns. +func WithField(key string, value interface{}) *Entry { + return std.WithField(key, value) +} + +// WithFields creates an entry from the standard logger and adds multiple +// fields to it. This is simply a helper for `WithField`, invoking it +// once for each field. +// +// Note that it doesn't log until you call Debug, Print, Info, Warn, Fatal +// or Panic on the Entry it returns. +func WithFields(fields Fields) *Entry { + return std.WithFields(fields) +} + +// WithTime creates an entry from the standard logger and overrides the time of +// logs generated with it. +// +// Note that it doesn't log until you call Debug, Print, Info, Warn, Fatal +// or Panic on the Entry it returns. +func WithTime(t time.Time) *Entry { + return std.WithTime(t) +} + +// Trace logs a message at level Trace on the standard logger. +func Trace(args ...interface{}) { + std.Trace(args...) +} + +// Debug logs a message at level Debug on the standard logger. +func Debug(args ...interface{}) { + std.Debug(args...) +} + +// Print logs a message at level Info on the standard logger. +func Print(args ...interface{}) { + std.Print(args...) +} + +// Info logs a message at level Info on the standard logger. +func Info(args ...interface{}) { + std.Info(args...) +} + +// Warn logs a message at level Warn on the standard logger. +func Warn(args ...interface{}) { + std.Warn(args...) +} + +// Warning logs a message at level Warn on the standard logger. +func Warning(args ...interface{}) { + std.Warning(args...) +} + +// Error logs a message at level Error on the standard logger. +func Error(args ...interface{}) { + std.Error(args...) +} + +// Panic logs a message at level Panic on the standard logger. +func Panic(args ...interface{}) { + std.Panic(args...) +} + +// Fatal logs a message at level Fatal on the standard logger then the process will exit with status set to 1. +func Fatal(args ...interface{}) { + std.Fatal(args...) +} + +// Tracef logs a message at level Trace on the standard logger. +func Tracef(format string, args ...interface{}) { + std.Tracef(format, args...) +} + +// Debugf logs a message at level Debug on the standard logger. +func Debugf(format string, args ...interface{}) { + std.Debugf(format, args...) +} + +// Printf logs a message at level Info on the standard logger. +func Printf(format string, args ...interface{}) { + std.Printf(format, args...) +} + +// Infof logs a message at level Info on the standard logger. +func Infof(format string, args ...interface{}) { + std.Infof(format, args...) +} + +// Warnf logs a message at level Warn on the standard logger. +func Warnf(format string, args ...interface{}) { + std.Warnf(format, args...) +} + +// Warningf logs a message at level Warn on the standard logger. +func Warningf(format string, args ...interface{}) { + std.Warningf(format, args...) +} + +// Errorf logs a message at level Error on the standard logger. +func Errorf(format string, args ...interface{}) { + std.Errorf(format, args...) +} + +// Panicf logs a message at level Panic on the standard logger. +func Panicf(format string, args ...interface{}) { + std.Panicf(format, args...) +} + +// Fatalf logs a message at level Fatal on the standard logger then the process will exit with status set to 1. +func Fatalf(format string, args ...interface{}) { + std.Fatalf(format, args...) +} + +// Traceln logs a message at level Trace on the standard logger. +func Traceln(args ...interface{}) { + std.Traceln(args...) +} + +// Debugln logs a message at level Debug on the standard logger. +func Debugln(args ...interface{}) { + std.Debugln(args...) +} + +// Println logs a message at level Info on the standard logger. +func Println(args ...interface{}) { + std.Println(args...) +} + +// Infoln logs a message at level Info on the standard logger. +func Infoln(args ...interface{}) { + std.Infoln(args...) +} + +// Warnln logs a message at level Warn on the standard logger. +func Warnln(args ...interface{}) { + std.Warnln(args...) +} + +// Warningln logs a message at level Warn on the standard logger. +func Warningln(args ...interface{}) { + std.Warningln(args...) +} + +// Errorln logs a message at level Error on the standard logger. +func Errorln(args ...interface{}) { + std.Errorln(args...) +} + +// Panicln logs a message at level Panic on the standard logger. +func Panicln(args ...interface{}) { + std.Panicln(args...) +} + +// Fatalln logs a message at level Fatal on the standard logger then the process will exit with status set to 1. +func Fatalln(args ...interface{}) { + std.Fatalln(args...) +} diff --git a/vendor/github.com/sirupsen/logrus/formatter.go b/vendor/github.com/sirupsen/logrus/formatter.go new file mode 100644 index 0000000..4088837 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/formatter.go @@ -0,0 +1,78 @@ +package logrus + +import "time" + +// Default key names for the default fields +const ( + defaultTimestampFormat = time.RFC3339 + FieldKeyMsg = "msg" + FieldKeyLevel = "level" + FieldKeyTime = "time" + FieldKeyLogrusError = "logrus_error" + FieldKeyFunc = "func" + FieldKeyFile = "file" +) + +// The Formatter interface is used to implement a custom Formatter. It takes an +// `Entry`. It exposes all the fields, including the default ones: +// +// * `entry.Data["msg"]`. The message passed from Info, Warn, Error .. +// * `entry.Data["time"]`. The timestamp. +// * `entry.Data["level"]. The level the entry was logged at. +// +// Any additional fields added with `WithField` or `WithFields` are also in +// `entry.Data`. Format is expected to return an array of bytes which are then +// logged to `logger.Out`. +type Formatter interface { + Format(*Entry) ([]byte, error) +} + +// This is to not silently overwrite `time`, `msg`, `func` and `level` fields when +// dumping it. If this code wasn't there doing: +// +// logrus.WithField("level", 1).Info("hello") +// +// Would just silently drop the user provided level. Instead with this code +// it'll logged as: +// +// {"level": "info", "fields.level": 1, "msg": "hello", "time": "..."} +// +// It's not exported because it's still using Data in an opinionated way. It's to +// avoid code duplication between the two default formatters. +func prefixFieldClashes(data Fields, fieldMap FieldMap, reportCaller bool) { + timeKey := fieldMap.resolve(FieldKeyTime) + if t, ok := data[timeKey]; ok { + data["fields."+timeKey] = t + delete(data, timeKey) + } + + msgKey := fieldMap.resolve(FieldKeyMsg) + if m, ok := data[msgKey]; ok { + data["fields."+msgKey] = m + delete(data, msgKey) + } + + levelKey := fieldMap.resolve(FieldKeyLevel) + if l, ok := data[levelKey]; ok { + data["fields."+levelKey] = l + delete(data, levelKey) + } + + logrusErrKey := fieldMap.resolve(FieldKeyLogrusError) + if l, ok := data[logrusErrKey]; ok { + data["fields."+logrusErrKey] = l + delete(data, logrusErrKey) + } + + // If reportCaller is not set, 'func' will not conflict. + if reportCaller { + funcKey := fieldMap.resolve(FieldKeyFunc) + if l, ok := data[funcKey]; ok { + data["fields."+funcKey] = l + } + fileKey := fieldMap.resolve(FieldKeyFile) + if l, ok := data[fileKey]; ok { + data["fields."+fileKey] = l + } + } +} diff --git a/vendor/github.com/sirupsen/logrus/hooks.go b/vendor/github.com/sirupsen/logrus/hooks.go new file mode 100644 index 0000000..3f151cd --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/hooks.go @@ -0,0 +1,34 @@ +package logrus + +// A hook to be fired when logging on the logging levels returned from +// `Levels()` on your implementation of the interface. Note that this is not +// fired in a goroutine or a channel with workers, you should handle such +// functionality yourself if your call is non-blocking and you don't wish for +// the logging calls for levels returned from `Levels()` to block. +type Hook interface { + Levels() []Level + Fire(*Entry) error +} + +// Internal type for storing the hooks on a logger instance. +type LevelHooks map[Level][]Hook + +// Add a hook to an instance of logger. This is called with +// `log.Hooks.Add(new(MyHook))` where `MyHook` implements the `Hook` interface. +func (hooks LevelHooks) Add(hook Hook) { + for _, level := range hook.Levels() { + hooks[level] = append(hooks[level], hook) + } +} + +// Fire all the hooks for the passed level. Used by `entry.log` to fire +// appropriate hooks for a log entry. +func (hooks LevelHooks) Fire(level Level, entry *Entry) error { + for _, hook := range hooks[level] { + if err := hook.Fire(entry); err != nil { + return err + } + } + + return nil +} diff --git a/vendor/github.com/sirupsen/logrus/json_formatter.go b/vendor/github.com/sirupsen/logrus/json_formatter.go new file mode 100644 index 0000000..ba7f237 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/json_formatter.go @@ -0,0 +1,125 @@ +package logrus + +import ( + "bytes" + "encoding/json" + "fmt" + "runtime" +) + +type fieldKey string + +// FieldMap allows customization of the key names for default fields. +type FieldMap map[fieldKey]string + +func (f FieldMap) resolve(key fieldKey) string { + if k, ok := f[key]; ok { + return k + } + + return string(key) +} + +// JSONFormatter formats logs into parsable json +type JSONFormatter struct { + // TimestampFormat sets the format used for marshaling timestamps. + TimestampFormat string + + // DisableTimestamp allows disabling automatic timestamps in output + DisableTimestamp bool + + // DisableHTMLEscape allows disabling html escaping in output + DisableHTMLEscape bool + + // DataKey allows users to put all the log entry parameters into a nested dictionary at a given key. + DataKey string + + // FieldMap allows users to customize the names of keys for default fields. + // As an example: + // formatter := &JSONFormatter{ + // FieldMap: FieldMap{ + // FieldKeyTime: "@timestamp", + // FieldKeyLevel: "@level", + // FieldKeyMsg: "@message", + // FieldKeyFunc: "@caller", + // }, + // } + FieldMap FieldMap + + // CallerPrettyfier can be set by the user to modify the content + // of the function and file keys in the json data when ReportCaller is + // activated. If any of the returned value is the empty string the + // corresponding key will be removed from json fields. + CallerPrettyfier func(*runtime.Frame) (function string, file string) + + // PrettyPrint will indent all json logs + PrettyPrint bool +} + +// Format renders a single log entry +func (f *JSONFormatter) Format(entry *Entry) ([]byte, error) { + data := make(Fields, len(entry.Data)+4) + for k, v := range entry.Data { + switch v := v.(type) { + case error: + // Otherwise errors are ignored by `encoding/json` + // https://github.com/sirupsen/logrus/issues/137 + data[k] = v.Error() + default: + data[k] = v + } + } + + if f.DataKey != "" { + newData := make(Fields, 4) + newData[f.DataKey] = data + data = newData + } + + prefixFieldClashes(data, f.FieldMap, entry.HasCaller()) + + timestampFormat := f.TimestampFormat + if timestampFormat == "" { + timestampFormat = defaultTimestampFormat + } + + if entry.err != "" { + data[f.FieldMap.resolve(FieldKeyLogrusError)] = entry.err + } + if !f.DisableTimestamp { + data[f.FieldMap.resolve(FieldKeyTime)] = entry.Time.Format(timestampFormat) + } + data[f.FieldMap.resolve(FieldKeyMsg)] = entry.Message + data[f.FieldMap.resolve(FieldKeyLevel)] = entry.Level.String() + if entry.HasCaller() { + funcVal := entry.Caller.Function + fileVal := fmt.Sprintf("%s:%d", entry.Caller.File, entry.Caller.Line) + if f.CallerPrettyfier != nil { + funcVal, fileVal = f.CallerPrettyfier(entry.Caller) + } + if funcVal != "" { + data[f.FieldMap.resolve(FieldKeyFunc)] = funcVal + } + if fileVal != "" { + data[f.FieldMap.resolve(FieldKeyFile)] = fileVal + } + } + + var b *bytes.Buffer + if entry.Buffer != nil { + b = entry.Buffer + } else { + b = &bytes.Buffer{} + } + + encoder := json.NewEncoder(b) + encoder.SetEscapeHTML(!f.DisableHTMLEscape) + if f.PrettyPrint { + encoder.SetIndent("", " ") + } + if err := encoder.Encode(data); err != nil { + return nil, fmt.Errorf("failed to marshal fields to JSON, %v", err) + } + + return b.Bytes(), nil +} diff --git a/vendor/github.com/sirupsen/logrus/logger.go b/vendor/github.com/sirupsen/logrus/logger.go new file mode 100644 index 0000000..6fdda74 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/logger.go @@ -0,0 +1,352 @@ +package logrus + +import ( + "context" + "io" + "os" + "sync" + "sync/atomic" + "time" +) + +type Logger struct { + // The logs are `io.Copy`'d to this in a mutex. It's common to set this to a + // file, or leave it default which is `os.Stderr`. You can also set this to + // something more adventurous, such as logging to Kafka. + Out io.Writer + // Hooks for the logger instance. These allow firing events based on logging + // levels and log entries. For example, to send errors to an error tracking + // service, log to StatsD or dump the core on fatal errors. + Hooks LevelHooks + // All log entries pass through the formatter before logged to Out. The + // included formatters are `TextFormatter` and `JSONFormatter` for which + // TextFormatter is the default. In development (when a TTY is attached) it + // logs with colors, but to a file it wouldn't. You can easily implement your + // own that implements the `Formatter` interface, see the `README` or included + // formatters for examples. + Formatter Formatter + + // Flag for whether to log caller info (off by default) + ReportCaller bool + + // The logging level the logger should log at. This is typically (and defaults + // to) `logrus.Info`, which allows Info(), Warn(), Error() and Fatal() to be + // logged. + Level Level + // Used to sync writing to the log. Locking is enabled by Default + mu MutexWrap + // Reusable empty entry + entryPool sync.Pool + // Function to exit the application, defaults to `os.Exit()` + ExitFunc exitFunc +} + +type exitFunc func(int) + +type MutexWrap struct { + lock sync.Mutex + disabled bool +} + +func (mw *MutexWrap) Lock() { + if !mw.disabled { + mw.lock.Lock() + } +} + +func (mw *MutexWrap) Unlock() { + if !mw.disabled { + mw.lock.Unlock() + } +} + +func (mw *MutexWrap) Disable() { + mw.disabled = true +} + +// Creates a new logger. Configuration should be set by changing `Formatter`, +// `Out` and `Hooks` directly on the default logger instance. You can also just +// instantiate your own: +// +// var log = &logrus.Logger{ +// Out: os.Stderr, +// Formatter: new(logrus.JSONFormatter), +// Hooks: make(logrus.LevelHooks), +// Level: logrus.DebugLevel, +// } +// +// It's recommended to make this a global instance called `log`. +func New() *Logger { + return &Logger{ + Out: os.Stderr, + Formatter: new(TextFormatter), + Hooks: make(LevelHooks), + Level: InfoLevel, + ExitFunc: os.Exit, + ReportCaller: false, + } +} + +func (logger *Logger) newEntry() *Entry { + entry, ok := logger.entryPool.Get().(*Entry) + if ok { + return entry + } + return NewEntry(logger) +} + +func (logger *Logger) releaseEntry(entry *Entry) { + entry.Data = map[string]interface{}{} + logger.entryPool.Put(entry) +} + +// WithField allocates a new entry and adds a field to it. +// Debug, Print, Info, Warn, Error, Fatal or Panic must be then applied to +// this new returned entry. +// If you want multiple fields, use `WithFields`. +func (logger *Logger) WithField(key string, value interface{}) *Entry { + entry := logger.newEntry() + defer logger.releaseEntry(entry) + return entry.WithField(key, value) +} + +// Adds a struct of fields to the log entry. All it does is call `WithField` for +// each `Field`. +func (logger *Logger) WithFields(fields Fields) *Entry { + entry := logger.newEntry() + defer logger.releaseEntry(entry) + return entry.WithFields(fields) +} + +// Add an error as single field to the log entry. All it does is call +// `WithError` for the given `error`. +func (logger *Logger) WithError(err error) *Entry { + entry := logger.newEntry() + defer logger.releaseEntry(entry) + return entry.WithError(err) +} + +// Add a context to the log entry. +func (logger *Logger) WithContext(ctx context.Context) *Entry { + entry := logger.newEntry() + defer logger.releaseEntry(entry) + return entry.WithContext(ctx) +} + +// Overrides the time of the log entry. +func (logger *Logger) WithTime(t time.Time) *Entry { + entry := logger.newEntry() + defer logger.releaseEntry(entry) + return entry.WithTime(t) +} + +func (logger *Logger) Logf(level Level, format string, args ...interface{}) { + if logger.IsLevelEnabled(level) { + entry := logger.newEntry() + entry.Logf(level, format, args...) + logger.releaseEntry(entry) + } +} + +func (logger *Logger) Tracef(format string, args ...interface{}) { + logger.Logf(TraceLevel, format, args...) +} + +func (logger *Logger) Debugf(format string, args ...interface{}) { + logger.Logf(DebugLevel, format, args...) +} + +func (logger *Logger) Infof(format string, args ...interface{}) { + logger.Logf(InfoLevel, format, args...) +} + +func (logger *Logger) Printf(format string, args ...interface{}) { + entry := logger.newEntry() + entry.Printf(format, args...) + logger.releaseEntry(entry) +} + +func (logger *Logger) Warnf(format string, args ...interface{}) { + logger.Logf(WarnLevel, format, args...) +} + +func (logger *Logger) Warningf(format string, args ...interface{}) { + logger.Warnf(format, args...) +} + +func (logger *Logger) Errorf(format string, args ...interface{}) { + logger.Logf(ErrorLevel, format, args...) +} + +func (logger *Logger) Fatalf(format string, args ...interface{}) { + logger.Logf(FatalLevel, format, args...) + logger.Exit(1) +} + +func (logger *Logger) Panicf(format string, args ...interface{}) { + logger.Logf(PanicLevel, format, args...) +} + +func (logger *Logger) Log(level Level, args ...interface{}) { + if logger.IsLevelEnabled(level) { + entry := logger.newEntry() + entry.Log(level, args...) + logger.releaseEntry(entry) + } +} + +func (logger *Logger) Trace(args ...interface{}) { + logger.Log(TraceLevel, args...) +} + +func (logger *Logger) Debug(args ...interface{}) { + logger.Log(DebugLevel, args...) +} + +func (logger *Logger) Info(args ...interface{}) { + logger.Log(InfoLevel, args...) +} + +func (logger *Logger) Print(args ...interface{}) { + entry := logger.newEntry() + entry.Print(args...) + logger.releaseEntry(entry) +} + +func (logger *Logger) Warn(args ...interface{}) { + logger.Log(WarnLevel, args...) +} + +func (logger *Logger) Warning(args ...interface{}) { + logger.Warn(args...) +} + +func (logger *Logger) Error(args ...interface{}) { + logger.Log(ErrorLevel, args...) +} + +func (logger *Logger) Fatal(args ...interface{}) { + logger.Log(FatalLevel, args...) + logger.Exit(1) +} + +func (logger *Logger) Panic(args ...interface{}) { + logger.Log(PanicLevel, args...) +} + +func (logger *Logger) Logln(level Level, args ...interface{}) { + if logger.IsLevelEnabled(level) { + entry := logger.newEntry() + entry.Logln(level, args...) + logger.releaseEntry(entry) + } +} + +func (logger *Logger) Traceln(args ...interface{}) { + logger.Logln(TraceLevel, args...) +} + +func (logger *Logger) Debugln(args ...interface{}) { + logger.Logln(DebugLevel, args...) +} + +func (logger *Logger) Infoln(args ...interface{}) { + logger.Logln(InfoLevel, args...) +} + +func (logger *Logger) Println(args ...interface{}) { + entry := logger.newEntry() + entry.Println(args...) + logger.releaseEntry(entry) +} + +func (logger *Logger) Warnln(args ...interface{}) { + logger.Logln(WarnLevel, args...) +} + +func (logger *Logger) Warningln(args ...interface{}) { + logger.Warnln(args...) +} + +func (logger *Logger) Errorln(args ...interface{}) { + logger.Logln(ErrorLevel, args...) +} + +func (logger *Logger) Fatalln(args ...interface{}) { + logger.Logln(FatalLevel, args...) + logger.Exit(1) +} + +func (logger *Logger) Panicln(args ...interface{}) { + logger.Logln(PanicLevel, args...) +} + +func (logger *Logger) Exit(code int) { + runHandlers() + if logger.ExitFunc == nil { + logger.ExitFunc = os.Exit + } + logger.ExitFunc(code) +} + +//When file is opened with appending mode, it's safe to +//write concurrently to a file (within 4k message on Linux). +//In these cases user can choose to disable the lock. +func (logger *Logger) SetNoLock() { + logger.mu.Disable() +} + +func (logger *Logger) level() Level { + return Level(atomic.LoadUint32((*uint32)(&logger.Level))) +} + +// SetLevel sets the logger level. +func (logger *Logger) SetLevel(level Level) { + atomic.StoreUint32((*uint32)(&logger.Level), uint32(level)) +} + +// GetLevel returns the logger level. +func (logger *Logger) GetLevel() Level { + return logger.level() +} + +// AddHook adds a hook to the logger hooks. +func (logger *Logger) AddHook(hook Hook) { + logger.mu.Lock() + defer logger.mu.Unlock() + logger.Hooks.Add(hook) +} + +// IsLevelEnabled checks if the log level of the logger is greater than the level param +func (logger *Logger) IsLevelEnabled(level Level) bool { + return logger.level() >= level +} + +// SetFormatter sets the logger formatter. +func (logger *Logger) SetFormatter(formatter Formatter) { + logger.mu.Lock() + defer logger.mu.Unlock() + logger.Formatter = formatter +} + +// SetOutput sets the logger output. +func (logger *Logger) SetOutput(output io.Writer) { + logger.mu.Lock() + defer logger.mu.Unlock() + logger.Out = output +} + +func (logger *Logger) SetReportCaller(reportCaller bool) { + logger.mu.Lock() + defer logger.mu.Unlock() + logger.ReportCaller = reportCaller +} + +// ReplaceHooks replaces the logger hooks and returns the old ones +func (logger *Logger) ReplaceHooks(hooks LevelHooks) LevelHooks { + logger.mu.Lock() + oldHooks := logger.Hooks + logger.Hooks = hooks + logger.mu.Unlock() + return oldHooks +} diff --git a/vendor/github.com/sirupsen/logrus/logrus.go b/vendor/github.com/sirupsen/logrus/logrus.go new file mode 100644 index 0000000..2f16224 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/logrus.go @@ -0,0 +1,186 @@ +package logrus + +import ( + "fmt" + "log" + "strings" +) + +// Fields type, used to pass to `WithFields`. +type Fields map[string]interface{} + +// Level type +type Level uint32 + +// Convert the Level to a string. E.g. PanicLevel becomes "panic". +func (level Level) String() string { + if b, err := level.MarshalText(); err == nil { + return string(b) + } else { + return "unknown" + } +} + +// ParseLevel takes a string level and returns the Logrus log level constant. +func ParseLevel(lvl string) (Level, error) { + switch strings.ToLower(lvl) { + case "panic": + return PanicLevel, nil + case "fatal": + return FatalLevel, nil + case "error": + return ErrorLevel, nil + case "warn", "warning": + return WarnLevel, nil + case "info": + return InfoLevel, nil + case "debug": + return DebugLevel, nil + case "trace": + return TraceLevel, nil + } + + var l Level + return l, fmt.Errorf("not a valid logrus Level: %q", lvl) +} + +// UnmarshalText implements encoding.TextUnmarshaler. +func (level *Level) UnmarshalText(text []byte) error { + l, err := ParseLevel(string(text)) + if err != nil { + return err + } + + *level = l + + return nil +} + +func (level Level) MarshalText() ([]byte, error) { + switch level { + case TraceLevel: + return []byte("trace"), nil + case DebugLevel: + return []byte("debug"), nil + case InfoLevel: + return []byte("info"), nil + case WarnLevel: + return []byte("warning"), nil + case ErrorLevel: + return []byte("error"), nil + case FatalLevel: + return []byte("fatal"), nil + case PanicLevel: + return []byte("panic"), nil + } + + return nil, fmt.Errorf("not a valid logrus level %d", level) +} + +// A constant exposing all logging levels +var AllLevels = []Level{ + PanicLevel, + FatalLevel, + ErrorLevel, + WarnLevel, + InfoLevel, + DebugLevel, + TraceLevel, +} + +// These are the different logging levels. You can set the logging level to log +// on your instance of logger, obtained with `logrus.New()`. +const ( + // PanicLevel level, highest level of severity. Logs and then calls panic with the + // message passed to Debug, Info, ... + PanicLevel Level = iota + // FatalLevel level. Logs and then calls `logger.Exit(1)`. It will exit even if the + // logging level is set to Panic. + FatalLevel + // ErrorLevel level. Logs. Used for errors that should definitely be noted. + // Commonly used for hooks to send errors to an error tracking service. + ErrorLevel + // WarnLevel level. Non-critical entries that deserve eyes. + WarnLevel + // InfoLevel level. General operational entries about what's going on inside the + // application. + InfoLevel + // DebugLevel level. Usually only enabled when debugging. Very verbose logging. + DebugLevel + // TraceLevel level. Designates finer-grained informational events than the Debug. + TraceLevel +) + +// Won't compile if StdLogger can't be realized by a log.Logger +var ( + _ StdLogger = &log.Logger{} + _ StdLogger = &Entry{} + _ StdLogger = &Logger{} +) + +// StdLogger is what your logrus-enabled library should take, that way +// it'll accept a stdlib logger and a logrus logger. There's no standard +// interface, this is the closest we get, unfortunately. +type StdLogger interface { + Print(...interface{}) + Printf(string, ...interface{}) + Println(...interface{}) + + Fatal(...interface{}) + Fatalf(string, ...interface{}) + Fatalln(...interface{}) + + Panic(...interface{}) + Panicf(string, ...interface{}) + Panicln(...interface{}) +} + +// The FieldLogger interface generalizes the Entry and Logger types +type FieldLogger interface { + WithField(key string, value interface{}) *Entry + WithFields(fields Fields) *Entry + WithError(err error) *Entry + + Debugf(format string, args ...interface{}) + Infof(format string, args ...interface{}) + Printf(format string, args ...interface{}) + Warnf(format string, args ...interface{}) + Warningf(format string, args ...interface{}) + Errorf(format string, args ...interface{}) + Fatalf(format string, args ...interface{}) + Panicf(format string, args ...interface{}) + + Debug(args ...interface{}) + Info(args ...interface{}) + Print(args ...interface{}) + Warn(args ...interface{}) + Warning(args ...interface{}) + Error(args ...interface{}) + Fatal(args ...interface{}) + Panic(args ...interface{}) + + Debugln(args ...interface{}) + Infoln(args ...interface{}) + Println(args ...interface{}) + Warnln(args ...interface{}) + Warningln(args ...interface{}) + Errorln(args ...interface{}) + Fatalln(args ...interface{}) + Panicln(args ...interface{}) + + // IsDebugEnabled() bool + // IsInfoEnabled() bool + // IsWarnEnabled() bool + // IsErrorEnabled() bool + // IsFatalEnabled() bool + // IsPanicEnabled() bool +} + +// Ext1FieldLogger (the first extension to FieldLogger) is superfluous, it is +// here for consistancy. Do not use. Use Logger or Entry instead. +type Ext1FieldLogger interface { + FieldLogger + Tracef(format string, args ...interface{}) + Trace(args ...interface{}) + Traceln(args ...interface{}) +} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_appengine.go b/vendor/github.com/sirupsen/logrus/terminal_check_appengine.go new file mode 100644 index 0000000..2403de9 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/terminal_check_appengine.go @@ -0,0 +1,11 @@ +// +build appengine + +package logrus + +import ( + "io" +) + +func checkIfTerminal(w io.Writer) bool { + return true +} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_bsd.go b/vendor/github.com/sirupsen/logrus/terminal_check_bsd.go new file mode 100644 index 0000000..4997899 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/terminal_check_bsd.go @@ -0,0 +1,13 @@ +// +build darwin dragonfly freebsd netbsd openbsd +// +build !js + +package logrus + +import "golang.org/x/sys/unix" + +const ioctlReadTermios = unix.TIOCGETA + +func isTerminal(fd int) bool { + _, err := unix.IoctlGetTermios(fd, ioctlReadTermios) + return err == nil +} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_js.go b/vendor/github.com/sirupsen/logrus/terminal_check_js.go new file mode 100644 index 0000000..ebdae3e --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/terminal_check_js.go @@ -0,0 +1,7 @@ +// +build js + +package logrus + +func isTerminal(fd int) bool { + return false +} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_no_terminal.go b/vendor/github.com/sirupsen/logrus/terminal_check_no_terminal.go new file mode 100644 index 0000000..97af92c --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/terminal_check_no_terminal.go @@ -0,0 +1,11 @@ +// +build js nacl plan9 + +package logrus + +import ( + "io" +) + +func checkIfTerminal(w io.Writer) bool { + return false +} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_notappengine.go b/vendor/github.com/sirupsen/logrus/terminal_check_notappengine.go new file mode 100644 index 0000000..3293fb3 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/terminal_check_notappengine.go @@ -0,0 +1,17 @@ +// +build !appengine,!js,!windows,!nacl,!plan9 + +package logrus + +import ( + "io" + "os" +) + +func checkIfTerminal(w io.Writer) bool { + switch v := w.(type) { + case *os.File: + return isTerminal(int(v.Fd())) + default: + return false + } +} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_solaris.go b/vendor/github.com/sirupsen/logrus/terminal_check_solaris.go new file mode 100644 index 0000000..f6710b3 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/terminal_check_solaris.go @@ -0,0 +1,11 @@ +package logrus + +import ( + "golang.org/x/sys/unix" +) + +// IsTerminal returns true if the given file descriptor is a terminal. +func isTerminal(fd int) bool { + _, err := unix.IoctlGetTermio(fd, unix.TCGETA) + return err == nil +} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_unix.go b/vendor/github.com/sirupsen/logrus/terminal_check_unix.go new file mode 100644 index 0000000..cc4fe6e --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/terminal_check_unix.go @@ -0,0 +1,13 @@ +// +build linux aix +// +build !js + +package logrus + +import "golang.org/x/sys/unix" + +const ioctlReadTermios = unix.TCGETS + +func isTerminal(fd int) bool { + _, err := unix.IoctlGetTermios(fd, ioctlReadTermios) + return err == nil +} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_windows.go b/vendor/github.com/sirupsen/logrus/terminal_check_windows.go new file mode 100644 index 0000000..572889d --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/terminal_check_windows.go @@ -0,0 +1,34 @@ +// +build !appengine,!js,windows + +package logrus + +import ( + "io" + "os" + "syscall" + + sequences "github.com/konsorten/go-windows-terminal-sequences" +) + +func initTerminal(w io.Writer) { + switch v := w.(type) { + case *os.File: + sequences.EnableVirtualTerminalProcessing(syscall.Handle(v.Fd()), true) + } +} + +func checkIfTerminal(w io.Writer) bool { + var ret bool + switch v := w.(type) { + case *os.File: + var mode uint32 + err := syscall.GetConsoleMode(syscall.Handle(v.Fd()), &mode) + ret = (err == nil) + default: + ret = false + } + if ret { + initTerminal(w) + } + return ret +} diff --git a/vendor/github.com/sirupsen/logrus/text_formatter.go b/vendor/github.com/sirupsen/logrus/text_formatter.go new file mode 100644 index 0000000..3c28b54 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/text_formatter.go @@ -0,0 +1,334 @@ +package logrus + +import ( + "bytes" + "fmt" + "os" + "runtime" + "sort" + "strconv" + "strings" + "sync" + "time" + "unicode/utf8" +) + +const ( + red = 31 + yellow = 33 + blue = 36 + gray = 37 +) + +var baseTimestamp time.Time + +func init() { + baseTimestamp = time.Now() +} + +// TextFormatter formats logs into text +type TextFormatter struct { + // Set to true to bypass checking for a TTY before outputting colors. + ForceColors bool + + // Force disabling colors. + DisableColors bool + + // Force quoting of all values + ForceQuote bool + + // DisableQuote disables quoting for all values. + // DisableQuote will have a lower priority than ForceQuote. + // If both of them are set to true, quote will be forced on all values. + DisableQuote bool + + // Override coloring based on CLICOLOR and CLICOLOR_FORCE. - https://bixense.com/clicolors/ + EnvironmentOverrideColors bool + + // Disable timestamp logging. useful when output is redirected to logging + // system that already adds timestamps. + DisableTimestamp bool + + // Enable logging the full timestamp when a TTY is attached instead of just + // the time passed since beginning of execution. + FullTimestamp bool + + // TimestampFormat to use for display when a full timestamp is printed + TimestampFormat string + + // The fields are sorted by default for a consistent output. For applications + // that log extremely frequently and don't use the JSON formatter this may not + // be desired. + DisableSorting bool + + // The keys sorting function, when uninitialized it uses sort.Strings. + SortingFunc func([]string) + + // Disables the truncation of the level text to 4 characters. + DisableLevelTruncation bool + + // PadLevelText Adds padding the level text so that all the levels output at the same length + // PadLevelText is a superset of the DisableLevelTruncation option + PadLevelText bool + + // QuoteEmptyFields will wrap empty fields in quotes if true + QuoteEmptyFields bool + + // Whether the logger's out is to a terminal + isTerminal bool + + // FieldMap allows users to customize the names of keys for default fields. + // As an example: + // formatter := &TextFormatter{ + // FieldMap: FieldMap{ + // FieldKeyTime: "@timestamp", + // FieldKeyLevel: "@level", + // FieldKeyMsg: "@message"}} + FieldMap FieldMap + + // CallerPrettyfier can be set by the user to modify the content + // of the function and file keys in the data when ReportCaller is + // activated. If any of the returned value is the empty string the + // corresponding key will be removed from fields. + CallerPrettyfier func(*runtime.Frame) (function string, file string) + + terminalInitOnce sync.Once + + // The max length of the level text, generated dynamically on init + levelTextMaxLength int +} + +func (f *TextFormatter) init(entry *Entry) { + if entry.Logger != nil { + f.isTerminal = checkIfTerminal(entry.Logger.Out) + } + // Get the max length of the level text + for _, level := range AllLevels { + levelTextLength := utf8.RuneCount([]byte(level.String())) + if levelTextLength > f.levelTextMaxLength { + f.levelTextMaxLength = levelTextLength + } + } +} + +func (f *TextFormatter) isColored() bool { + isColored := f.ForceColors || (f.isTerminal && (runtime.GOOS != "windows")) + + if f.EnvironmentOverrideColors { + switch force, ok := os.LookupEnv("CLICOLOR_FORCE"); { + case ok && force != "0": + isColored = true + case ok && force == "0", os.Getenv("CLICOLOR") == "0": + isColored = false + } + } + + return isColored && !f.DisableColors +} + +// Format renders a single log entry +func (f *TextFormatter) Format(entry *Entry) ([]byte, error) { + data := make(Fields) + for k, v := range entry.Data { + data[k] = v + } + prefixFieldClashes(data, f.FieldMap, entry.HasCaller()) + keys := make([]string, 0, len(data)) + for k := range data { + keys = append(keys, k) + } + + var funcVal, fileVal string + + fixedKeys := make([]string, 0, 4+len(data)) + if !f.DisableTimestamp { + fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyTime)) + } + fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyLevel)) + if entry.Message != "" { + fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyMsg)) + } + if entry.err != "" { + fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyLogrusError)) + } + if entry.HasCaller() { + if f.CallerPrettyfier != nil { + funcVal, fileVal = f.CallerPrettyfier(entry.Caller) + } else { + funcVal = entry.Caller.Function + fileVal = fmt.Sprintf("%s:%d", entry.Caller.File, entry.Caller.Line) + } + + if funcVal != "" { + fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyFunc)) + } + if fileVal != "" { + fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyFile)) + } + } + + if !f.DisableSorting { + if f.SortingFunc == nil { + sort.Strings(keys) + fixedKeys = append(fixedKeys, keys...) + } else { + if !f.isColored() { + fixedKeys = append(fixedKeys, keys...) + f.SortingFunc(fixedKeys) + } else { + f.SortingFunc(keys) + } + } + } else { + fixedKeys = append(fixedKeys, keys...) + } + + var b *bytes.Buffer + if entry.Buffer != nil { + b = entry.Buffer + } else { + b = &bytes.Buffer{} + } + + f.terminalInitOnce.Do(func() { f.init(entry) }) + + timestampFormat := f.TimestampFormat + if timestampFormat == "" { + timestampFormat = defaultTimestampFormat + } + if f.isColored() { + f.printColored(b, entry, keys, data, timestampFormat) + } else { + + for _, key := range fixedKeys { + var value interface{} + switch { + case key == f.FieldMap.resolve(FieldKeyTime): + value = entry.Time.Format(timestampFormat) + case key == f.FieldMap.resolve(FieldKeyLevel): + value = entry.Level.String() + case key == f.FieldMap.resolve(FieldKeyMsg): + value = entry.Message + case key == f.FieldMap.resolve(FieldKeyLogrusError): + value = entry.err + case key == f.FieldMap.resolve(FieldKeyFunc) && entry.HasCaller(): + value = funcVal + case key == f.FieldMap.resolve(FieldKeyFile) && entry.HasCaller(): + value = fileVal + default: + value = data[key] + } + f.appendKeyValue(b, key, value) + } + } + + b.WriteByte('\n') + return b.Bytes(), nil +} + +func (f *TextFormatter) printColored(b *bytes.Buffer, entry *Entry, keys []string, data Fields, timestampFormat string) { + var levelColor int + switch entry.Level { + case DebugLevel, TraceLevel: + levelColor = gray + case WarnLevel: + levelColor = yellow + case ErrorLevel, FatalLevel, PanicLevel: + levelColor = red + default: + levelColor = blue + } + + levelText := strings.ToUpper(entry.Level.String()) + if !f.DisableLevelTruncation && !f.PadLevelText { + levelText = levelText[0:4] + } + if f.PadLevelText { + // Generates the format string used in the next line, for example "%-6s" or "%-7s". + // Based on the max level text length. + formatString := "%-" + strconv.Itoa(f.levelTextMaxLength) + "s" + // Formats the level text by appending spaces up to the max length, for example: + // - "INFO " + // - "WARNING" + levelText = fmt.Sprintf(formatString, levelText) + } + + // Remove a single newline if it already exists in the message to keep + // the behavior of logrus text_formatter the same as the stdlib log package + entry.Message = strings.TrimSuffix(entry.Message, "\n") + + caller := "" + if entry.HasCaller() { + funcVal := fmt.Sprintf("%s()", entry.Caller.Function) + fileVal := fmt.Sprintf("%s:%d", entry.Caller.File, entry.Caller.Line) + + if f.CallerPrettyfier != nil { + funcVal, fileVal = f.CallerPrettyfier(entry.Caller) + } + + if fileVal == "" { + caller = funcVal + } else if funcVal == "" { + caller = fileVal + } else { + caller = fileVal + " " + funcVal + } + } + + switch { + case f.DisableTimestamp: + fmt.Fprintf(b, "\x1b[%dm%s\x1b[0m%s %-44s ", levelColor, levelText, caller, entry.Message) + case !f.FullTimestamp: + fmt.Fprintf(b, "\x1b[%dm%s\x1b[0m[%04d]%s %-44s ", levelColor, levelText, int(entry.Time.Sub(baseTimestamp)/time.Second), caller, entry.Message) + default: + fmt.Fprintf(b, "\x1b[%dm%s\x1b[0m[%s]%s %-44s ", levelColor, levelText, entry.Time.Format(timestampFormat), caller, entry.Message) + } + for _, k := range keys { + v := data[k] + fmt.Fprintf(b, " \x1b[%dm%s\x1b[0m=", levelColor, k) + f.appendValue(b, v) + } +} + +func (f *TextFormatter) needsQuoting(text string) bool { + if f.ForceQuote { + return true + } + if f.QuoteEmptyFields && len(text) == 0 { + return true + } + if f.DisableQuote { + return false + } + for _, ch := range text { + if !((ch >= 'a' && ch <= 'z') || + (ch >= 'A' && ch <= 'Z') || + (ch >= '0' && ch <= '9') || + ch == '-' || ch == '.' || ch == '_' || ch == '/' || ch == '@' || ch == '^' || ch == '+') { + return true + } + } + return false +} + +func (f *TextFormatter) appendKeyValue(b *bytes.Buffer, key string, value interface{}) { + if b.Len() > 0 { + b.WriteByte(' ') + } + b.WriteString(key) + b.WriteByte('=') + f.appendValue(b, value) +} + +func (f *TextFormatter) appendValue(b *bytes.Buffer, value interface{}) { + stringVal, ok := value.(string) + if !ok { + stringVal = fmt.Sprint(value) + } + + if !f.needsQuoting(stringVal) { + b.WriteString(stringVal) + } else { + b.WriteString(fmt.Sprintf("%q", stringVal)) + } +} diff --git a/vendor/github.com/sirupsen/logrus/writer.go b/vendor/github.com/sirupsen/logrus/writer.go new file mode 100644 index 0000000..72e8e3a --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/writer.go @@ -0,0 +1,70 @@ +package logrus + +import ( + "bufio" + "io" + "runtime" +) + +// Writer at INFO level. See WriterLevel for details. +func (logger *Logger) Writer() *io.PipeWriter { + return logger.WriterLevel(InfoLevel) +} + +// WriterLevel returns an io.Writer that can be used to write arbitrary text to +// the logger at the given log level. Each line written to the writer will be +// printed in the usual way using formatters and hooks. The writer is part of an +// io.Pipe and it is the callers responsibility to close the writer when done. +// This can be used to override the standard library logger easily. +func (logger *Logger) WriterLevel(level Level) *io.PipeWriter { + return NewEntry(logger).WriterLevel(level) +} + +func (entry *Entry) Writer() *io.PipeWriter { + return entry.WriterLevel(InfoLevel) +} + +func (entry *Entry) WriterLevel(level Level) *io.PipeWriter { + reader, writer := io.Pipe() + + var printFunc func(args ...interface{}) + + switch level { + case TraceLevel: + printFunc = entry.Trace + case DebugLevel: + printFunc = entry.Debug + case InfoLevel: + printFunc = entry.Info + case WarnLevel: + printFunc = entry.Warn + case ErrorLevel: + printFunc = entry.Error + case FatalLevel: + printFunc = entry.Fatal + case PanicLevel: + printFunc = entry.Panic + default: + printFunc = entry.Print + } + + go entry.writerScanner(reader, printFunc) + runtime.SetFinalizer(writer, writerFinalizer) + + return writer +} + +func (entry *Entry) writerScanner(reader *io.PipeReader, printFunc func(args ...interface{})) { + scanner := bufio.NewScanner(reader) + for scanner.Scan() { + printFunc(scanner.Text()) + } + if err := scanner.Err(); err != nil { + entry.Errorf("Error while reading from Writer: %s", err) + } + reader.Close() +} + +func writerFinalizer(writer *io.PipeWriter) { + writer.Close() +} diff --git a/vendor/github.com/spf13/afero/.gitignore b/vendor/github.com/spf13/afero/.gitignore new file mode 100644 index 0000000..9c1d986 --- /dev/null +++ b/vendor/github.com/spf13/afero/.gitignore @@ -0,0 +1,2 @@ +sftpfs/file1 +sftpfs/test/ diff --git a/vendor/github.com/spf13/afero/LICENSE.txt b/vendor/github.com/spf13/afero/LICENSE.txt new file mode 100644 index 0000000..298f0e2 --- /dev/null +++ b/vendor/github.com/spf13/afero/LICENSE.txt @@ -0,0 +1,174 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/vendor/github.com/spf13/afero/README.md b/vendor/github.com/spf13/afero/README.md new file mode 100644 index 0000000..3bafbfd --- /dev/null +++ b/vendor/github.com/spf13/afero/README.md @@ -0,0 +1,442 @@ +![afero logo-sm](https://cloud.githubusercontent.com/assets/173412/11490338/d50e16dc-97a5-11e5-8b12-019a300d0fcb.png) + +A FileSystem Abstraction System for Go + +[![Test](https://github.com/spf13/afero/actions/workflows/test.yml/badge.svg)](https://github.com/spf13/afero/actions/workflows/test.yml) [![GoDoc](https://godoc.org/github.com/spf13/afero?status.svg)](https://godoc.org/github.com/spf13/afero) [![Join the chat at https://gitter.im/spf13/afero](https://badges.gitter.im/Dev%20Chat.svg)](https://gitter.im/spf13/afero?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) + +# Overview + +Afero is a filesystem framework providing a simple, uniform and universal API +interacting with any filesystem, as an abstraction layer providing interfaces, +types and methods. Afero has an exceptionally clean interface and simple design +without needless constructors or initialization methods. + +Afero is also a library providing a base set of interoperable backend +filesystems that make it easy to work with afero while retaining all the power +and benefit of the os and ioutil packages. + +Afero provides significant improvements over using the os package alone, most +notably the ability to create mock and testing filesystems without relying on the disk. + +It is suitable for use in any situation where you would consider using the OS +package as it provides an additional abstraction that makes it easy to use a +memory backed file system during testing. It also adds support for the http +filesystem for full interoperability. + + +## Afero Features + +* A single consistent API for accessing a variety of filesystems +* Interoperation between a variety of file system types +* A set of interfaces to encourage and enforce interoperability between backends +* An atomic cross platform memory backed file system +* Support for compositional (union) file systems by combining multiple file systems acting as one +* Specialized backends which modify existing filesystems (Read Only, Regexp filtered) +* A set of utility functions ported from io, ioutil & hugo to be afero aware +* Wrapper for go 1.16 filesystem abstraction `io/fs.FS` + +# Using Afero + +Afero is easy to use and easier to adopt. + +A few different ways you could use Afero: + +* Use the interfaces alone to define your own file system. +* Wrapper for the OS packages. +* Define different filesystems for different parts of your application. +* Use Afero for mock filesystems while testing + +## Step 1: Install Afero + +First use go get to install the latest version of the library. + + $ go get github.com/spf13/afero + +Next include Afero in your application. +```go +import "github.com/spf13/afero" +``` + +## Step 2: Declare a backend + +First define a package variable and set it to a pointer to a filesystem. +```go +var AppFs = afero.NewMemMapFs() + +or + +var AppFs = afero.NewOsFs() +``` +It is important to note that if you repeat the composite literal you +will be using a completely new and isolated filesystem. In the case of +OsFs it will still use the same underlying filesystem but will reduce +the ability to drop in other filesystems as desired. + +## Step 3: Use it like you would the OS package + +Throughout your application use any function and method like you normally +would. + +So if my application before had: +```go +os.Open("/tmp/foo") +``` +We would replace it with: +```go +AppFs.Open("/tmp/foo") +``` + +`AppFs` being the variable we defined above. + + +## List of all available functions + +File System Methods Available: +```go +Chmod(name string, mode os.FileMode) : error +Chown(name string, uid, gid int) : error +Chtimes(name string, atime time.Time, mtime time.Time) : error +Create(name string) : File, error +Mkdir(name string, perm os.FileMode) : error +MkdirAll(path string, perm os.FileMode) : error +Name() : string +Open(name string) : File, error +OpenFile(name string, flag int, perm os.FileMode) : File, error +Remove(name string) : error +RemoveAll(path string) : error +Rename(oldname, newname string) : error +Stat(name string) : os.FileInfo, error +``` +File Interfaces and Methods Available: +```go +io.Closer +io.Reader +io.ReaderAt +io.Seeker +io.Writer +io.WriterAt + +Name() : string +Readdir(count int) : []os.FileInfo, error +Readdirnames(n int) : []string, error +Stat() : os.FileInfo, error +Sync() : error +Truncate(size int64) : error +WriteString(s string) : ret int, err error +``` +In some applications it may make sense to define a new package that +simply exports the file system variable for easy access from anywhere. + +## Using Afero's utility functions + +Afero provides a set of functions to make it easier to use the underlying file systems. +These functions have been primarily ported from io & ioutil with some developed for Hugo. + +The afero utilities support all afero compatible backends. + +The list of utilities includes: + +```go +DirExists(path string) (bool, error) +Exists(path string) (bool, error) +FileContainsBytes(filename string, subslice []byte) (bool, error) +GetTempDir(subPath string) string +IsDir(path string) (bool, error) +IsEmpty(path string) (bool, error) +ReadDir(dirname string) ([]os.FileInfo, error) +ReadFile(filename string) ([]byte, error) +SafeWriteReader(path string, r io.Reader) (err error) +TempDir(dir, prefix string) (name string, err error) +TempFile(dir, prefix string) (f File, err error) +Walk(root string, walkFn filepath.WalkFunc) error +WriteFile(filename string, data []byte, perm os.FileMode) error +WriteReader(path string, r io.Reader) (err error) +``` +For a complete list see [Afero's GoDoc](https://godoc.org/github.com/spf13/afero) + +They are available under two different approaches to use. You can either call +them directly where the first parameter of each function will be the file +system, or you can declare a new `Afero`, a custom type used to bind these +functions as methods to a given filesystem. + +### Calling utilities directly + +```go +fs := new(afero.MemMapFs) +f, err := afero.TempFile(fs,"", "ioutil-test") + +``` + +### Calling via Afero + +```go +fs := afero.NewMemMapFs() +afs := &afero.Afero{Fs: fs} +f, err := afs.TempFile("", "ioutil-test") +``` + +## Using Afero for Testing + +There is a large benefit to using a mock filesystem for testing. It has a +completely blank state every time it is initialized and can be easily +reproducible regardless of OS. You could create files to your heart’s content +and the file access would be fast while also saving you from all the annoying +issues with deleting temporary files, Windows file locking, etc. The MemMapFs +backend is perfect for testing. + +* Much faster than performing I/O operations on disk +* Avoid security issues and permissions +* Far more control. 'rm -rf /' with confidence +* Test setup is far more easier to do +* No test cleanup needed + +One way to accomplish this is to define a variable as mentioned above. +In your application this will be set to afero.NewOsFs() during testing you +can set it to afero.NewMemMapFs(). + +It wouldn't be uncommon to have each test initialize a blank slate memory +backend. To do this I would define my `appFS = afero.NewOsFs()` somewhere +appropriate in my application code. This approach ensures that Tests are order +independent, with no test relying on the state left by an earlier test. + +Then in my tests I would initialize a new MemMapFs for each test: +```go +func TestExist(t *testing.T) { + appFS := afero.NewMemMapFs() + // create test files and directories + appFS.MkdirAll("src/a", 0755) + afero.WriteFile(appFS, "src/a/b", []byte("file b"), 0644) + afero.WriteFile(appFS, "src/c", []byte("file c"), 0644) + name := "src/c" + _, err := appFS.Stat(name) + if os.IsNotExist(err) { + t.Errorf("file \"%s\" does not exist.\n", name) + } +} +``` + +# Available Backends + +## Operating System Native + +### OsFs + +The first is simply a wrapper around the native OS calls. This makes it +very easy to use as all of the calls are the same as the existing OS +calls. It also makes it trivial to have your code use the OS during +operation and a mock filesystem during testing or as needed. + +```go +appfs := afero.NewOsFs() +appfs.MkdirAll("src/a", 0755) +``` + +## Memory Backed Storage + +### MemMapFs + +Afero also provides a fully atomic memory backed filesystem perfect for use in +mocking and to speed up unnecessary disk io when persistence isn’t +necessary. It is fully concurrent and will work within go routines +safely. + +```go +mm := afero.NewMemMapFs() +mm.MkdirAll("src/a", 0755) +``` + +#### InMemoryFile + +As part of MemMapFs, Afero also provides an atomic, fully concurrent memory +backed file implementation. This can be used in other memory backed file +systems with ease. Plans are to add a radix tree memory stored file +system using InMemoryFile. + +## Network Interfaces + +### SftpFs + +Afero has experimental support for secure file transfer protocol (sftp). Which can +be used to perform file operations over a encrypted channel. + +### GCSFs + +Afero has experimental support for Google Cloud Storage (GCS). You can either set the +`GOOGLE_APPLICATION_CREDENTIALS_JSON` env variable to your JSON credentials or use `opts` in +`NewGcsFS` to configure access to your GCS bucket. + +Some known limitations of the existing implementation: +* No Chmod support - The GCS ACL could probably be mapped to *nix style permissions but that would add another level of complexity and is ignored in this version. +* No Chtimes support - Could be simulated with attributes (gcs a/m-times are set implicitly) but that's is left for another version. +* Not thread safe - Also assumes all file operations are done through the same instance of the GcsFs. File operations between different GcsFs instances are not guaranteed to be consistent. + + +## Filtering Backends + +### BasePathFs + +The BasePathFs restricts all operations to a given path within an Fs. +The given file name to the operations on this Fs will be prepended with +the base path before calling the source Fs. + +```go +bp := afero.NewBasePathFs(afero.NewOsFs(), "/base/path") +``` + +### ReadOnlyFs + +A thin wrapper around the source Fs providing a read only view. + +```go +fs := afero.NewReadOnlyFs(afero.NewOsFs()) +_, err := fs.Create("/file.txt") +// err = syscall.EPERM +``` + +# RegexpFs + +A filtered view on file names, any file NOT matching +the passed regexp will be treated as non-existing. +Files not matching the regexp provided will not be created. +Directories are not filtered. + +```go +fs := afero.NewRegexpFs(afero.NewMemMapFs(), regexp.MustCompile(`\.txt$`)) +_, err := fs.Create("/file.html") +// err = syscall.ENOENT +``` + +### HttpFs + +Afero provides an http compatible backend which can wrap any of the existing +backends. + +The Http package requires a slightly specific version of Open which +returns an http.File type. + +Afero provides an httpFs file system which satisfies this requirement. +Any Afero FileSystem can be used as an httpFs. + +```go +httpFs := afero.NewHttpFs() +fileserver := http.FileServer(httpFs.Dir()) +http.Handle("/", fileserver) +``` + +## Composite Backends + +Afero provides the ability have two filesystems (or more) act as a single +file system. + +### CacheOnReadFs + +The CacheOnReadFs will lazily make copies of any accessed files from the base +layer into the overlay. Subsequent reads will be pulled from the overlay +directly permitting the request is within the cache duration of when it was +created in the overlay. + +If the base filesystem is writeable, any changes to files will be +done first to the base, then to the overlay layer. Write calls to open file +handles like `Write()` or `Truncate()` to the overlay first. + +To writing files to the overlay only, you can use the overlay Fs directly (not +via the union Fs). + +Cache files in the layer for the given time.Duration, a cache duration of 0 +means "forever" meaning the file will not be re-requested from the base ever. + +A read-only base will make the overlay also read-only but still copy files +from the base to the overlay when they're not present (or outdated) in the +caching layer. + +```go +base := afero.NewOsFs() +layer := afero.NewMemMapFs() +ufs := afero.NewCacheOnReadFs(base, layer, 100 * time.Second) +``` + +### CopyOnWriteFs() + +The CopyOnWriteFs is a read only base file system with a potentially +writeable layer on top. + +Read operations will first look in the overlay and if not found there, will +serve the file from the base. + +Changes to the file system will only be made in the overlay. + +Any attempt to modify a file found only in the base will copy the file to the +overlay layer before modification (including opening a file with a writable +handle). + +Removing and Renaming files present only in the base layer is not currently +permitted. If a file is present in the base layer and the overlay, only the +overlay will be removed/renamed. + +```go + base := afero.NewOsFs() + roBase := afero.NewReadOnlyFs(base) + ufs := afero.NewCopyOnWriteFs(roBase, afero.NewMemMapFs()) + + fh, _ = ufs.Create("/home/test/file2.txt") + fh.WriteString("This is a test") + fh.Close() +``` + +In this example all write operations will only occur in memory (MemMapFs) +leaving the base filesystem (OsFs) untouched. + + +## Desired/possible backends + +The following is a short list of possible backends we hope someone will +implement: + +* SSH +* S3 + +# About the project + +## What's in the name + +Afero comes from the latin roots Ad-Facere. + +**"Ad"** is a prefix meaning "to". + +**"Facere"** is a form of the root "faciÅ" making "make or do". + +The literal meaning of afero is "to make" or "to do" which seems very fitting +for a library that allows one to make files and directories and do things with them. + +The English word that shares the same roots as Afero is "affair". Affair shares +the same concept but as a noun it means "something that is made or done" or "an +object of a particular type". + +It's also nice that unlike some of my other libraries (hugo, cobra, viper) it +Googles very well. + +## Release Notes + +See the [Releases Page](https://github.com/spf13/afero/releases). + +## Contributing + +1. Fork it +2. Create your feature branch (`git checkout -b my-new-feature`) +3. Commit your changes (`git commit -am 'Add some feature'`) +4. Push to the branch (`git push origin my-new-feature`) +5. Create new Pull Request + +## Contributors + +Names in no particular order: + +* [spf13](https://github.com/spf13) +* [jaqx0r](https://github.com/jaqx0r) +* [mbertschler](https://github.com/mbertschler) +* [xor-gate](https://github.com/xor-gate) + +## License + +Afero is released under the Apache 2.0 license. See +[LICENSE.txt](https://github.com/spf13/afero/blob/master/LICENSE.txt) diff --git a/vendor/github.com/spf13/afero/afero.go b/vendor/github.com/spf13/afero/afero.go new file mode 100644 index 0000000..39f6585 --- /dev/null +++ b/vendor/github.com/spf13/afero/afero.go @@ -0,0 +1,111 @@ +// Copyright © 2014 Steve Francia . +// Copyright 2013 tsuru authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package afero provides types and methods for interacting with the filesystem, +// as an abstraction layer. + +// Afero also provides a few implementations that are mostly interoperable. One that +// uses the operating system filesystem, one that uses memory to store files +// (cross platform) and an interface that should be implemented if you want to +// provide your own filesystem. + +package afero + +import ( + "errors" + "io" + "os" + "time" +) + +type Afero struct { + Fs +} + +// File represents a file in the filesystem. +type File interface { + io.Closer + io.Reader + io.ReaderAt + io.Seeker + io.Writer + io.WriterAt + + Name() string + Readdir(count int) ([]os.FileInfo, error) + Readdirnames(n int) ([]string, error) + Stat() (os.FileInfo, error) + Sync() error + Truncate(size int64) error + WriteString(s string) (ret int, err error) +} + +// Fs is the filesystem interface. +// +// Any simulated or real filesystem should implement this interface. +type Fs interface { + // Create creates a file in the filesystem, returning the file and an + // error, if any happens. + Create(name string) (File, error) + + // Mkdir creates a directory in the filesystem, return an error if any + // happens. + Mkdir(name string, perm os.FileMode) error + + // MkdirAll creates a directory path and all parents that does not exist + // yet. + MkdirAll(path string, perm os.FileMode) error + + // Open opens a file, returning it or an error, if any happens. + Open(name string) (File, error) + + // OpenFile opens a file using the given flags and the given mode. + OpenFile(name string, flag int, perm os.FileMode) (File, error) + + // Remove removes a file identified by name, returning an error, if any + // happens. + Remove(name string) error + + // RemoveAll removes a directory path and any children it contains. It + // does not fail if the path does not exist (return nil). + RemoveAll(path string) error + + // Rename renames a file. + Rename(oldname, newname string) error + + // Stat returns a FileInfo describing the named file, or an error, if any + // happens. + Stat(name string) (os.FileInfo, error) + + // The name of this FileSystem + Name() string + + // Chmod changes the mode of the named file to mode. + Chmod(name string, mode os.FileMode) error + + // Chown changes the uid and gid of the named file. + Chown(name string, uid, gid int) error + + // Chtimes changes the access and modification times of the named file + Chtimes(name string, atime time.Time, mtime time.Time) error +} + +var ( + ErrFileClosed = errors.New("File is closed") + ErrOutOfRange = errors.New("out of range") + ErrTooLarge = errors.New("too large") + ErrFileNotFound = os.ErrNotExist + ErrFileExists = os.ErrExist + ErrDestinationExists = os.ErrExist +) diff --git a/vendor/github.com/spf13/afero/appveyor.yml b/vendor/github.com/spf13/afero/appveyor.yml new file mode 100644 index 0000000..65e20e8 --- /dev/null +++ b/vendor/github.com/spf13/afero/appveyor.yml @@ -0,0 +1,10 @@ +# This currently does nothing. We have moved to GitHub action, but this is kept +# until spf13 has disabled this project in AppVeyor. +version: '{build}' +clone_folder: C:\gopath\src\github.com\spf13\afero +environment: + GOPATH: C:\gopath +build_script: +- cmd: >- + go version + diff --git a/vendor/github.com/spf13/afero/basepath.go b/vendor/github.com/spf13/afero/basepath.go new file mode 100644 index 0000000..2e72793 --- /dev/null +++ b/vendor/github.com/spf13/afero/basepath.go @@ -0,0 +1,222 @@ +package afero + +import ( + "io/fs" + "os" + "path/filepath" + "runtime" + "strings" + "time" +) + +var ( + _ Lstater = (*BasePathFs)(nil) + _ fs.ReadDirFile = (*BasePathFile)(nil) +) + +// The BasePathFs restricts all operations to a given path within an Fs. +// The given file name to the operations on this Fs will be prepended with +// the base path before calling the base Fs. +// Any file name (after filepath.Clean()) outside this base path will be +// treated as non existing file. +// +// Note that it does not clean the error messages on return, so you may +// reveal the real path on errors. +type BasePathFs struct { + source Fs + path string +} + +type BasePathFile struct { + File + path string +} + +func (f *BasePathFile) Name() string { + sourcename := f.File.Name() + return strings.TrimPrefix(sourcename, filepath.Clean(f.path)) +} + +func (f *BasePathFile) ReadDir(n int) ([]fs.DirEntry, error) { + if rdf, ok := f.File.(fs.ReadDirFile); ok { + return rdf.ReadDir(n) + } + return readDirFile{f.File}.ReadDir(n) +} + +func NewBasePathFs(source Fs, path string) Fs { + return &BasePathFs{source: source, path: path} +} + +// on a file outside the base path it returns the given file name and an error, +// else the given file with the base path prepended +func (b *BasePathFs) RealPath(name string) (path string, err error) { + if err := validateBasePathName(name); err != nil { + return name, err + } + + bpath := filepath.Clean(b.path) + path = filepath.Clean(filepath.Join(bpath, name)) + if !strings.HasPrefix(path, bpath) { + return name, os.ErrNotExist + } + + return path, nil +} + +func validateBasePathName(name string) error { + if runtime.GOOS != "windows" { + // Not much to do here; + // the virtual file paths all look absolute on *nix. + return nil + } + + // On Windows a common mistake would be to provide an absolute OS path + // We could strip out the base part, but that would not be very portable. + if filepath.IsAbs(name) { + return os.ErrNotExist + } + + return nil +} + +func (b *BasePathFs) Chtimes(name string, atime, mtime time.Time) (err error) { + if name, err = b.RealPath(name); err != nil { + return &os.PathError{Op: "chtimes", Path: name, Err: err} + } + return b.source.Chtimes(name, atime, mtime) +} + +func (b *BasePathFs) Chmod(name string, mode os.FileMode) (err error) { + if name, err = b.RealPath(name); err != nil { + return &os.PathError{Op: "chmod", Path: name, Err: err} + } + return b.source.Chmod(name, mode) +} + +func (b *BasePathFs) Chown(name string, uid, gid int) (err error) { + if name, err = b.RealPath(name); err != nil { + return &os.PathError{Op: "chown", Path: name, Err: err} + } + return b.source.Chown(name, uid, gid) +} + +func (b *BasePathFs) Name() string { + return "BasePathFs" +} + +func (b *BasePathFs) Stat(name string) (fi os.FileInfo, err error) { + if name, err = b.RealPath(name); err != nil { + return nil, &os.PathError{Op: "stat", Path: name, Err: err} + } + return b.source.Stat(name) +} + +func (b *BasePathFs) Rename(oldname, newname string) (err error) { + if oldname, err = b.RealPath(oldname); err != nil { + return &os.PathError{Op: "rename", Path: oldname, Err: err} + } + if newname, err = b.RealPath(newname); err != nil { + return &os.PathError{Op: "rename", Path: newname, Err: err} + } + return b.source.Rename(oldname, newname) +} + +func (b *BasePathFs) RemoveAll(name string) (err error) { + if name, err = b.RealPath(name); err != nil { + return &os.PathError{Op: "remove_all", Path: name, Err: err} + } + return b.source.RemoveAll(name) +} + +func (b *BasePathFs) Remove(name string) (err error) { + if name, err = b.RealPath(name); err != nil { + return &os.PathError{Op: "remove", Path: name, Err: err} + } + return b.source.Remove(name) +} + +func (b *BasePathFs) OpenFile(name string, flag int, mode os.FileMode) (f File, err error) { + if name, err = b.RealPath(name); err != nil { + return nil, &os.PathError{Op: "openfile", Path: name, Err: err} + } + sourcef, err := b.source.OpenFile(name, flag, mode) + if err != nil { + return nil, err + } + return &BasePathFile{sourcef, b.path}, nil +} + +func (b *BasePathFs) Open(name string) (f File, err error) { + if name, err = b.RealPath(name); err != nil { + return nil, &os.PathError{Op: "open", Path: name, Err: err} + } + sourcef, err := b.source.Open(name) + if err != nil { + return nil, err + } + return &BasePathFile{File: sourcef, path: b.path}, nil +} + +func (b *BasePathFs) Mkdir(name string, mode os.FileMode) (err error) { + if name, err = b.RealPath(name); err != nil { + return &os.PathError{Op: "mkdir", Path: name, Err: err} + } + return b.source.Mkdir(name, mode) +} + +func (b *BasePathFs) MkdirAll(name string, mode os.FileMode) (err error) { + if name, err = b.RealPath(name); err != nil { + return &os.PathError{Op: "mkdir", Path: name, Err: err} + } + return b.source.MkdirAll(name, mode) +} + +func (b *BasePathFs) Create(name string) (f File, err error) { + if name, err = b.RealPath(name); err != nil { + return nil, &os.PathError{Op: "create", Path: name, Err: err} + } + sourcef, err := b.source.Create(name) + if err != nil { + return nil, err + } + return &BasePathFile{File: sourcef, path: b.path}, nil +} + +func (b *BasePathFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { + name, err := b.RealPath(name) + if err != nil { + return nil, false, &os.PathError{Op: "lstat", Path: name, Err: err} + } + if lstater, ok := b.source.(Lstater); ok { + return lstater.LstatIfPossible(name) + } + fi, err := b.source.Stat(name) + return fi, false, err +} + +func (b *BasePathFs) SymlinkIfPossible(oldname, newname string) error { + oldname, err := b.RealPath(oldname) + if err != nil { + return &os.LinkError{Op: "symlink", Old: oldname, New: newname, Err: err} + } + newname, err = b.RealPath(newname) + if err != nil { + return &os.LinkError{Op: "symlink", Old: oldname, New: newname, Err: err} + } + if linker, ok := b.source.(Linker); ok { + return linker.SymlinkIfPossible(oldname, newname) + } + return &os.LinkError{Op: "symlink", Old: oldname, New: newname, Err: ErrNoSymlink} +} + +func (b *BasePathFs) ReadlinkIfPossible(name string) (string, error) { + name, err := b.RealPath(name) + if err != nil { + return "", &os.PathError{Op: "readlink", Path: name, Err: err} + } + if reader, ok := b.source.(LinkReader); ok { + return reader.ReadlinkIfPossible(name) + } + return "", &os.PathError{Op: "readlink", Path: name, Err: ErrNoReadlink} +} diff --git a/vendor/github.com/spf13/afero/cacheOnReadFs.go b/vendor/github.com/spf13/afero/cacheOnReadFs.go new file mode 100644 index 0000000..017d344 --- /dev/null +++ b/vendor/github.com/spf13/afero/cacheOnReadFs.go @@ -0,0 +1,315 @@ +package afero + +import ( + "os" + "syscall" + "time" +) + +// If the cache duration is 0, cache time will be unlimited, i.e. once +// a file is in the layer, the base will never be read again for this file. +// +// For cache times greater than 0, the modification time of a file is +// checked. Note that a lot of file system implementations only allow a +// resolution of a second for timestamps... or as the godoc for os.Chtimes() +// states: "The underlying filesystem may truncate or round the values to a +// less precise time unit." +// +// This caching union will forward all write calls also to the base file +// system first. To prevent writing to the base Fs, wrap it in a read-only +// filter - Note: this will also make the overlay read-only, for writing files +// in the overlay, use the overlay Fs directly, not via the union Fs. +type CacheOnReadFs struct { + base Fs + layer Fs + cacheTime time.Duration +} + +func NewCacheOnReadFs(base Fs, layer Fs, cacheTime time.Duration) Fs { + return &CacheOnReadFs{base: base, layer: layer, cacheTime: cacheTime} +} + +type cacheState int + +const ( + // not present in the overlay, unknown if it exists in the base: + cacheMiss cacheState = iota + // present in the overlay and in base, base file is newer: + cacheStale + // present in the overlay - with cache time == 0 it may exist in the base, + // with cacheTime > 0 it exists in the base and is same age or newer in the + // overlay + cacheHit + // happens if someone writes directly to the overlay without + // going through this union + cacheLocal +) + +func (u *CacheOnReadFs) cacheStatus(name string) (state cacheState, fi os.FileInfo, err error) { + var lfi, bfi os.FileInfo + lfi, err = u.layer.Stat(name) + if err == nil { + if u.cacheTime == 0 { + return cacheHit, lfi, nil + } + if lfi.ModTime().Add(u.cacheTime).Before(time.Now()) { + bfi, err = u.base.Stat(name) + if err != nil { + return cacheLocal, lfi, nil + } + if bfi.ModTime().After(lfi.ModTime()) { + return cacheStale, bfi, nil + } + } + return cacheHit, lfi, nil + } + + if err == syscall.ENOENT || os.IsNotExist(err) { + return cacheMiss, nil, nil + } + + return cacheMiss, nil, err +} + +func (u *CacheOnReadFs) copyToLayer(name string) error { + return copyToLayer(u.base, u.layer, name) +} + +func (u *CacheOnReadFs) copyFileToLayer(name string, flag int, perm os.FileMode) error { + return copyFileToLayer(u.base, u.layer, name, flag, perm) +} + +func (u *CacheOnReadFs) Chtimes(name string, atime, mtime time.Time) error { + st, _, err := u.cacheStatus(name) + if err != nil { + return err + } + switch st { + case cacheLocal: + case cacheHit: + err = u.base.Chtimes(name, atime, mtime) + case cacheStale, cacheMiss: + if err := u.copyToLayer(name); err != nil { + return err + } + err = u.base.Chtimes(name, atime, mtime) + } + if err != nil { + return err + } + return u.layer.Chtimes(name, atime, mtime) +} + +func (u *CacheOnReadFs) Chmod(name string, mode os.FileMode) error { + st, _, err := u.cacheStatus(name) + if err != nil { + return err + } + switch st { + case cacheLocal: + case cacheHit: + err = u.base.Chmod(name, mode) + case cacheStale, cacheMiss: + if err := u.copyToLayer(name); err != nil { + return err + } + err = u.base.Chmod(name, mode) + } + if err != nil { + return err + } + return u.layer.Chmod(name, mode) +} + +func (u *CacheOnReadFs) Chown(name string, uid, gid int) error { + st, _, err := u.cacheStatus(name) + if err != nil { + return err + } + switch st { + case cacheLocal: + case cacheHit: + err = u.base.Chown(name, uid, gid) + case cacheStale, cacheMiss: + if err := u.copyToLayer(name); err != nil { + return err + } + err = u.base.Chown(name, uid, gid) + } + if err != nil { + return err + } + return u.layer.Chown(name, uid, gid) +} + +func (u *CacheOnReadFs) Stat(name string) (os.FileInfo, error) { + st, fi, err := u.cacheStatus(name) + if err != nil { + return nil, err + } + switch st { + case cacheMiss: + return u.base.Stat(name) + default: // cacheStale has base, cacheHit and cacheLocal the layer os.FileInfo + return fi, nil + } +} + +func (u *CacheOnReadFs) Rename(oldname, newname string) error { + st, _, err := u.cacheStatus(oldname) + if err != nil { + return err + } + switch st { + case cacheLocal: + case cacheHit: + err = u.base.Rename(oldname, newname) + case cacheStale, cacheMiss: + if err := u.copyToLayer(oldname); err != nil { + return err + } + err = u.base.Rename(oldname, newname) + } + if err != nil { + return err + } + return u.layer.Rename(oldname, newname) +} + +func (u *CacheOnReadFs) Remove(name string) error { + st, _, err := u.cacheStatus(name) + if err != nil { + return err + } + switch st { + case cacheLocal: + case cacheHit, cacheStale, cacheMiss: + err = u.base.Remove(name) + } + if err != nil { + return err + } + return u.layer.Remove(name) +} + +func (u *CacheOnReadFs) RemoveAll(name string) error { + st, _, err := u.cacheStatus(name) + if err != nil { + return err + } + switch st { + case cacheLocal: + case cacheHit, cacheStale, cacheMiss: + err = u.base.RemoveAll(name) + } + if err != nil { + return err + } + return u.layer.RemoveAll(name) +} + +func (u *CacheOnReadFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { + st, _, err := u.cacheStatus(name) + if err != nil { + return nil, err + } + switch st { + case cacheLocal, cacheHit: + default: + if err := u.copyFileToLayer(name, flag, perm); err != nil { + return nil, err + } + } + if flag&(os.O_WRONLY|syscall.O_RDWR|os.O_APPEND|os.O_CREATE|os.O_TRUNC) != 0 { + bfi, err := u.base.OpenFile(name, flag, perm) + if err != nil { + return nil, err + } + lfi, err := u.layer.OpenFile(name, flag, perm) + if err != nil { + bfi.Close() // oops, what if O_TRUNC was set and file opening in the layer failed...? + return nil, err + } + return &UnionFile{Base: bfi, Layer: lfi}, nil + } + return u.layer.OpenFile(name, flag, perm) +} + +func (u *CacheOnReadFs) Open(name string) (File, error) { + st, fi, err := u.cacheStatus(name) + if err != nil { + return nil, err + } + + switch st { + case cacheLocal: + return u.layer.Open(name) + + case cacheMiss: + bfi, err := u.base.Stat(name) + if err != nil { + return nil, err + } + if bfi.IsDir() { + return u.base.Open(name) + } + if err := u.copyToLayer(name); err != nil { + return nil, err + } + return u.layer.Open(name) + + case cacheStale: + if !fi.IsDir() { + if err := u.copyToLayer(name); err != nil { + return nil, err + } + return u.layer.Open(name) + } + case cacheHit: + if !fi.IsDir() { + return u.layer.Open(name) + } + } + // the dirs from cacheHit, cacheStale fall down here: + bfile, _ := u.base.Open(name) + lfile, err := u.layer.Open(name) + if err != nil && bfile == nil { + return nil, err + } + return &UnionFile{Base: bfile, Layer: lfile}, nil +} + +func (u *CacheOnReadFs) Mkdir(name string, perm os.FileMode) error { + err := u.base.Mkdir(name, perm) + if err != nil { + return err + } + return u.layer.MkdirAll(name, perm) // yes, MkdirAll... we cannot assume it exists in the cache +} + +func (u *CacheOnReadFs) Name() string { + return "CacheOnReadFs" +} + +func (u *CacheOnReadFs) MkdirAll(name string, perm os.FileMode) error { + err := u.base.MkdirAll(name, perm) + if err != nil { + return err + } + return u.layer.MkdirAll(name, perm) +} + +func (u *CacheOnReadFs) Create(name string) (File, error) { + bfh, err := u.base.Create(name) + if err != nil { + return nil, err + } + lfh, err := u.layer.Create(name) + if err != nil { + // oops, see comment about OS_TRUNC above, should we remove? then we have to + // remember if the file did not exist before + bfh.Close() + return nil, err + } + return &UnionFile{Base: bfh, Layer: lfh}, nil +} diff --git a/vendor/github.com/spf13/afero/const_bsds.go b/vendor/github.com/spf13/afero/const_bsds.go new file mode 100644 index 0000000..30855de --- /dev/null +++ b/vendor/github.com/spf13/afero/const_bsds.go @@ -0,0 +1,23 @@ +// Copyright © 2016 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build aix || darwin || openbsd || freebsd || netbsd || dragonfly || zos +// +build aix darwin openbsd freebsd netbsd dragonfly zos + +package afero + +import ( + "syscall" +) + +const BADFD = syscall.EBADF diff --git a/vendor/github.com/spf13/afero/const_win_unix.go b/vendor/github.com/spf13/afero/const_win_unix.go new file mode 100644 index 0000000..12792d2 --- /dev/null +++ b/vendor/github.com/spf13/afero/const_win_unix.go @@ -0,0 +1,22 @@ +// Copyright © 2016 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +//go:build !darwin && !openbsd && !freebsd && !dragonfly && !netbsd && !aix && !zos +// +build !darwin,!openbsd,!freebsd,!dragonfly,!netbsd,!aix,!zos + +package afero + +import ( + "syscall" +) + +const BADFD = syscall.EBADFD diff --git a/vendor/github.com/spf13/afero/copyOnWriteFs.go b/vendor/github.com/spf13/afero/copyOnWriteFs.go new file mode 100644 index 0000000..184d6dd --- /dev/null +++ b/vendor/github.com/spf13/afero/copyOnWriteFs.go @@ -0,0 +1,327 @@ +package afero + +import ( + "fmt" + "os" + "path/filepath" + "syscall" + "time" +) + +var _ Lstater = (*CopyOnWriteFs)(nil) + +// The CopyOnWriteFs is a union filesystem: a read only base file system with +// a possibly writeable layer on top. Changes to the file system will only +// be made in the overlay: Changing an existing file in the base layer which +// is not present in the overlay will copy the file to the overlay ("changing" +// includes also calls to e.g. Chtimes(), Chmod() and Chown()). +// +// Reading directories is currently only supported via Open(), not OpenFile(). +type CopyOnWriteFs struct { + base Fs + layer Fs +} + +func NewCopyOnWriteFs(base Fs, layer Fs) Fs { + return &CopyOnWriteFs{base: base, layer: layer} +} + +// Returns true if the file is not in the overlay +func (u *CopyOnWriteFs) isBaseFile(name string) (bool, error) { + if _, err := u.layer.Stat(name); err == nil { + return false, nil + } + _, err := u.base.Stat(name) + if err != nil { + if oerr, ok := err.(*os.PathError); ok { + if oerr.Err == os.ErrNotExist || oerr.Err == syscall.ENOENT || oerr.Err == syscall.ENOTDIR { + return false, nil + } + } + if err == syscall.ENOENT { + return false, nil + } + } + return true, err +} + +func (u *CopyOnWriteFs) copyToLayer(name string) error { + return copyToLayer(u.base, u.layer, name) +} + +func (u *CopyOnWriteFs) Chtimes(name string, atime, mtime time.Time) error { + b, err := u.isBaseFile(name) + if err != nil { + return err + } + if b { + if err := u.copyToLayer(name); err != nil { + return err + } + } + return u.layer.Chtimes(name, atime, mtime) +} + +func (u *CopyOnWriteFs) Chmod(name string, mode os.FileMode) error { + b, err := u.isBaseFile(name) + if err != nil { + return err + } + if b { + if err := u.copyToLayer(name); err != nil { + return err + } + } + return u.layer.Chmod(name, mode) +} + +func (u *CopyOnWriteFs) Chown(name string, uid, gid int) error { + b, err := u.isBaseFile(name) + if err != nil { + return err + } + if b { + if err := u.copyToLayer(name); err != nil { + return err + } + } + return u.layer.Chown(name, uid, gid) +} + +func (u *CopyOnWriteFs) Stat(name string) (os.FileInfo, error) { + fi, err := u.layer.Stat(name) + if err != nil { + isNotExist := u.isNotExist(err) + if isNotExist { + return u.base.Stat(name) + } + return nil, err + } + return fi, nil +} + +func (u *CopyOnWriteFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { + llayer, ok1 := u.layer.(Lstater) + lbase, ok2 := u.base.(Lstater) + + if ok1 { + fi, b, err := llayer.LstatIfPossible(name) + if err == nil { + return fi, b, nil + } + + if !u.isNotExist(err) { + return nil, b, err + } + } + + if ok2 { + fi, b, err := lbase.LstatIfPossible(name) + if err == nil { + return fi, b, nil + } + if !u.isNotExist(err) { + return nil, b, err + } + } + + fi, err := u.Stat(name) + + return fi, false, err +} + +func (u *CopyOnWriteFs) SymlinkIfPossible(oldname, newname string) error { + if slayer, ok := u.layer.(Linker); ok { + return slayer.SymlinkIfPossible(oldname, newname) + } + + return &os.LinkError{Op: "symlink", Old: oldname, New: newname, Err: ErrNoSymlink} +} + +func (u *CopyOnWriteFs) ReadlinkIfPossible(name string) (string, error) { + if rlayer, ok := u.layer.(LinkReader); ok { + return rlayer.ReadlinkIfPossible(name) + } + + if rbase, ok := u.base.(LinkReader); ok { + return rbase.ReadlinkIfPossible(name) + } + + return "", &os.PathError{Op: "readlink", Path: name, Err: ErrNoReadlink} +} + +func (u *CopyOnWriteFs) isNotExist(err error) bool { + if e, ok := err.(*os.PathError); ok { + err = e.Err + } + if err == os.ErrNotExist || err == syscall.ENOENT || err == syscall.ENOTDIR { + return true + } + return false +} + +// Renaming files present only in the base layer is not permitted +func (u *CopyOnWriteFs) Rename(oldname, newname string) error { + b, err := u.isBaseFile(oldname) + if err != nil { + return err + } + if b { + return syscall.EPERM + } + return u.layer.Rename(oldname, newname) +} + +// Removing files present only in the base layer is not permitted. If +// a file is present in the base layer and the overlay, only the overlay +// will be removed. +func (u *CopyOnWriteFs) Remove(name string) error { + err := u.layer.Remove(name) + switch err { + case syscall.ENOENT: + _, err = u.base.Stat(name) + if err == nil { + return syscall.EPERM + } + return syscall.ENOENT + default: + return err + } +} + +func (u *CopyOnWriteFs) RemoveAll(name string) error { + err := u.layer.RemoveAll(name) + switch err { + case syscall.ENOENT: + _, err = u.base.Stat(name) + if err == nil { + return syscall.EPERM + } + return syscall.ENOENT + default: + return err + } +} + +func (u *CopyOnWriteFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { + b, err := u.isBaseFile(name) + if err != nil { + return nil, err + } + + if flag&(os.O_WRONLY|os.O_RDWR|os.O_APPEND|os.O_CREATE|os.O_TRUNC) != 0 { + if b { + if err = u.copyToLayer(name); err != nil { + return nil, err + } + return u.layer.OpenFile(name, flag, perm) + } + + dir := filepath.Dir(name) + isaDir, err := IsDir(u.base, dir) + if err != nil && !os.IsNotExist(err) { + return nil, err + } + if isaDir { + if err = u.layer.MkdirAll(dir, 0o777); err != nil { + return nil, err + } + return u.layer.OpenFile(name, flag, perm) + } + + isaDir, err = IsDir(u.layer, dir) + if err != nil { + return nil, err + } + if isaDir { + return u.layer.OpenFile(name, flag, perm) + } + + return nil, &os.PathError{Op: "open", Path: name, Err: syscall.ENOTDIR} // ...or os.ErrNotExist? + } + if b { + return u.base.OpenFile(name, flag, perm) + } + return u.layer.OpenFile(name, flag, perm) +} + +// This function handles the 9 different possibilities caused +// by the union which are the intersection of the following... +// +// layer: doesn't exist, exists as a file, and exists as a directory +// base: doesn't exist, exists as a file, and exists as a directory +func (u *CopyOnWriteFs) Open(name string) (File, error) { + // Since the overlay overrides the base we check that first + b, err := u.isBaseFile(name) + if err != nil { + return nil, err + } + + // If overlay doesn't exist, return the base (base state irrelevant) + if b { + return u.base.Open(name) + } + + // If overlay is a file, return it (base state irrelevant) + dir, err := IsDir(u.layer, name) + if err != nil { + return nil, err + } + if !dir { + return u.layer.Open(name) + } + + // Overlay is a directory, base state now matters. + // Base state has 3 states to check but 2 outcomes: + // A. It's a file or non-readable in the base (return just the overlay) + // B. It's an accessible directory in the base (return a UnionFile) + + // If base is file or nonreadable, return overlay + dir, err = IsDir(u.base, name) + if !dir || err != nil { + return u.layer.Open(name) + } + + // Both base & layer are directories + // Return union file (if opens are without error) + bfile, bErr := u.base.Open(name) + lfile, lErr := u.layer.Open(name) + + // If either have errors at this point something is very wrong. Return nil and the errors + if bErr != nil || lErr != nil { + return nil, fmt.Errorf("BaseErr: %v\nOverlayErr: %v", bErr, lErr) + } + + return &UnionFile{Base: bfile, Layer: lfile}, nil +} + +func (u *CopyOnWriteFs) Mkdir(name string, perm os.FileMode) error { + dir, err := IsDir(u.base, name) + if err != nil { + return u.layer.MkdirAll(name, perm) + } + if dir { + return ErrFileExists + } + return u.layer.MkdirAll(name, perm) +} + +func (u *CopyOnWriteFs) Name() string { + return "CopyOnWriteFs" +} + +func (u *CopyOnWriteFs) MkdirAll(name string, perm os.FileMode) error { + dir, err := IsDir(u.base, name) + if err != nil { + return u.layer.MkdirAll(name, perm) + } + if dir { + // This is in line with how os.MkdirAll behaves. + return nil + } + return u.layer.MkdirAll(name, perm) +} + +func (u *CopyOnWriteFs) Create(name string) (File, error) { + return u.OpenFile(name, os.O_CREATE|os.O_TRUNC|os.O_RDWR, 0o666) +} diff --git a/vendor/github.com/spf13/afero/httpFs.go b/vendor/github.com/spf13/afero/httpFs.go new file mode 100644 index 0000000..ac0de6d --- /dev/null +++ b/vendor/github.com/spf13/afero/httpFs.go @@ -0,0 +1,114 @@ +// Copyright © 2014 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "errors" + "net/http" + "os" + "path" + "path/filepath" + "strings" + "time" +) + +type httpDir struct { + basePath string + fs HttpFs +} + +func (d httpDir) Open(name string) (http.File, error) { + if filepath.Separator != '/' && strings.ContainsRune(name, filepath.Separator) || + strings.Contains(name, "\x00") { + return nil, errors.New("http: invalid character in file path") + } + dir := string(d.basePath) + if dir == "" { + dir = "." + } + + f, err := d.fs.Open(filepath.Join(dir, filepath.FromSlash(path.Clean("/"+name)))) + if err != nil { + return nil, err + } + return f, nil +} + +type HttpFs struct { + source Fs +} + +func NewHttpFs(source Fs) *HttpFs { + return &HttpFs{source: source} +} + +func (h HttpFs) Dir(s string) *httpDir { + return &httpDir{basePath: s, fs: h} +} + +func (h HttpFs) Name() string { return "h HttpFs" } + +func (h HttpFs) Create(name string) (File, error) { + return h.source.Create(name) +} + +func (h HttpFs) Chmod(name string, mode os.FileMode) error { + return h.source.Chmod(name, mode) +} + +func (h HttpFs) Chown(name string, uid, gid int) error { + return h.source.Chown(name, uid, gid) +} + +func (h HttpFs) Chtimes(name string, atime time.Time, mtime time.Time) error { + return h.source.Chtimes(name, atime, mtime) +} + +func (h HttpFs) Mkdir(name string, perm os.FileMode) error { + return h.source.Mkdir(name, perm) +} + +func (h HttpFs) MkdirAll(path string, perm os.FileMode) error { + return h.source.MkdirAll(path, perm) +} + +func (h HttpFs) Open(name string) (http.File, error) { + f, err := h.source.Open(name) + if err == nil { + if httpfile, ok := f.(http.File); ok { + return httpfile, nil + } + } + return nil, err +} + +func (h HttpFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { + return h.source.OpenFile(name, flag, perm) +} + +func (h HttpFs) Remove(name string) error { + return h.source.Remove(name) +} + +func (h HttpFs) RemoveAll(path string) error { + return h.source.RemoveAll(path) +} + +func (h HttpFs) Rename(oldname, newname string) error { + return h.source.Rename(oldname, newname) +} + +func (h HttpFs) Stat(name string) (os.FileInfo, error) { + return h.source.Stat(name) +} diff --git a/vendor/github.com/spf13/afero/internal/common/adapters.go b/vendor/github.com/spf13/afero/internal/common/adapters.go new file mode 100644 index 0000000..60685ca --- /dev/null +++ b/vendor/github.com/spf13/afero/internal/common/adapters.go @@ -0,0 +1,27 @@ +// Copyright © 2022 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package common + +import "io/fs" + +// FileInfoDirEntry provides an adapter from os.FileInfo to fs.DirEntry +type FileInfoDirEntry struct { + fs.FileInfo +} + +var _ fs.DirEntry = FileInfoDirEntry{} + +func (d FileInfoDirEntry) Type() fs.FileMode { return d.FileInfo.Mode().Type() } + +func (d FileInfoDirEntry) Info() (fs.FileInfo, error) { return d.FileInfo, nil } diff --git a/vendor/github.com/spf13/afero/iofs.go b/vendor/github.com/spf13/afero/iofs.go new file mode 100644 index 0000000..938b931 --- /dev/null +++ b/vendor/github.com/spf13/afero/iofs.go @@ -0,0 +1,298 @@ +//go:build go1.16 +// +build go1.16 + +package afero + +import ( + "io" + "io/fs" + "os" + "path" + "sort" + "time" + + "github.com/spf13/afero/internal/common" +) + +// IOFS adopts afero.Fs to stdlib io/fs.FS +type IOFS struct { + Fs +} + +func NewIOFS(fs Fs) IOFS { + return IOFS{Fs: fs} +} + +var ( + _ fs.FS = IOFS{} + _ fs.GlobFS = IOFS{} + _ fs.ReadDirFS = IOFS{} + _ fs.ReadFileFS = IOFS{} + _ fs.StatFS = IOFS{} + _ fs.SubFS = IOFS{} +) + +func (iofs IOFS) Open(name string) (fs.File, error) { + const op = "open" + + // by convention for fs.FS implementations we should perform this check + if !fs.ValidPath(name) { + return nil, iofs.wrapError(op, name, fs.ErrInvalid) + } + + file, err := iofs.Fs.Open(name) + if err != nil { + return nil, iofs.wrapError(op, name, err) + } + + // file should implement fs.ReadDirFile + if _, ok := file.(fs.ReadDirFile); !ok { + file = readDirFile{file} + } + + return file, nil +} + +func (iofs IOFS) Glob(pattern string) ([]string, error) { + const op = "glob" + + // afero.Glob does not perform this check but it's required for implementations + if _, err := path.Match(pattern, ""); err != nil { + return nil, iofs.wrapError(op, pattern, err) + } + + items, err := Glob(iofs.Fs, pattern) + if err != nil { + return nil, iofs.wrapError(op, pattern, err) + } + + return items, nil +} + +func (iofs IOFS) ReadDir(name string) ([]fs.DirEntry, error) { + f, err := iofs.Fs.Open(name) + if err != nil { + return nil, iofs.wrapError("readdir", name, err) + } + + defer f.Close() + + if rdf, ok := f.(fs.ReadDirFile); ok { + items, err := rdf.ReadDir(-1) + if err != nil { + return nil, iofs.wrapError("readdir", name, err) + } + sort.Slice(items, func(i, j int) bool { return items[i].Name() < items[j].Name() }) + return items, nil + } + + items, err := f.Readdir(-1) + if err != nil { + return nil, iofs.wrapError("readdir", name, err) + } + sort.Sort(byName(items)) + + ret := make([]fs.DirEntry, len(items)) + for i := range items { + ret[i] = common.FileInfoDirEntry{FileInfo: items[i]} + } + + return ret, nil +} + +func (iofs IOFS) ReadFile(name string) ([]byte, error) { + const op = "readfile" + + if !fs.ValidPath(name) { + return nil, iofs.wrapError(op, name, fs.ErrInvalid) + } + + bytes, err := ReadFile(iofs.Fs, name) + if err != nil { + return nil, iofs.wrapError(op, name, err) + } + + return bytes, nil +} + +func (iofs IOFS) Sub(dir string) (fs.FS, error) { return IOFS{NewBasePathFs(iofs.Fs, dir)}, nil } + +func (IOFS) wrapError(op, path string, err error) error { + if _, ok := err.(*fs.PathError); ok { + return err // don't need to wrap again + } + + return &fs.PathError{ + Op: op, + Path: path, + Err: err, + } +} + +// readDirFile provides adapter from afero.File to fs.ReadDirFile needed for correct Open +type readDirFile struct { + File +} + +var _ fs.ReadDirFile = readDirFile{} + +func (r readDirFile) ReadDir(n int) ([]fs.DirEntry, error) { + items, err := r.File.Readdir(n) + if err != nil { + return nil, err + } + + ret := make([]fs.DirEntry, len(items)) + for i := range items { + ret[i] = common.FileInfoDirEntry{FileInfo: items[i]} + } + + return ret, nil +} + +// FromIOFS adopts io/fs.FS to use it as afero.Fs +// Note that io/fs.FS is read-only so all mutating methods will return fs.PathError with fs.ErrPermission +// To store modifications you may use afero.CopyOnWriteFs +type FromIOFS struct { + fs.FS +} + +var _ Fs = FromIOFS{} + +func (f FromIOFS) Create(name string) (File, error) { return nil, notImplemented("create", name) } + +func (f FromIOFS) Mkdir(name string, perm os.FileMode) error { return notImplemented("mkdir", name) } + +func (f FromIOFS) MkdirAll(path string, perm os.FileMode) error { + return notImplemented("mkdirall", path) +} + +func (f FromIOFS) Open(name string) (File, error) { + file, err := f.FS.Open(name) + if err != nil { + return nil, err + } + + return fromIOFSFile{File: file, name: name}, nil +} + +func (f FromIOFS) OpenFile(name string, flag int, perm os.FileMode) (File, error) { + return f.Open(name) +} + +func (f FromIOFS) Remove(name string) error { + return notImplemented("remove", name) +} + +func (f FromIOFS) RemoveAll(path string) error { + return notImplemented("removeall", path) +} + +func (f FromIOFS) Rename(oldname, newname string) error { + return notImplemented("rename", oldname) +} + +func (f FromIOFS) Stat(name string) (os.FileInfo, error) { return fs.Stat(f.FS, name) } + +func (f FromIOFS) Name() string { return "fromiofs" } + +func (f FromIOFS) Chmod(name string, mode os.FileMode) error { + return notImplemented("chmod", name) +} + +func (f FromIOFS) Chown(name string, uid, gid int) error { + return notImplemented("chown", name) +} + +func (f FromIOFS) Chtimes(name string, atime time.Time, mtime time.Time) error { + return notImplemented("chtimes", name) +} + +type fromIOFSFile struct { + fs.File + name string +} + +func (f fromIOFSFile) ReadAt(p []byte, off int64) (n int, err error) { + readerAt, ok := f.File.(io.ReaderAt) + if !ok { + return -1, notImplemented("readat", f.name) + } + + return readerAt.ReadAt(p, off) +} + +func (f fromIOFSFile) Seek(offset int64, whence int) (int64, error) { + seeker, ok := f.File.(io.Seeker) + if !ok { + return -1, notImplemented("seek", f.name) + } + + return seeker.Seek(offset, whence) +} + +func (f fromIOFSFile) Write(p []byte) (n int, err error) { + return -1, notImplemented("write", f.name) +} + +func (f fromIOFSFile) WriteAt(p []byte, off int64) (n int, err error) { + return -1, notImplemented("writeat", f.name) +} + +func (f fromIOFSFile) Name() string { return f.name } + +func (f fromIOFSFile) Readdir(count int) ([]os.FileInfo, error) { + rdfile, ok := f.File.(fs.ReadDirFile) + if !ok { + return nil, notImplemented("readdir", f.name) + } + + entries, err := rdfile.ReadDir(count) + if err != nil { + return nil, err + } + + ret := make([]os.FileInfo, len(entries)) + for i := range entries { + ret[i], err = entries[i].Info() + + if err != nil { + return nil, err + } + } + + return ret, nil +} + +func (f fromIOFSFile) Readdirnames(n int) ([]string, error) { + rdfile, ok := f.File.(fs.ReadDirFile) + if !ok { + return nil, notImplemented("readdir", f.name) + } + + entries, err := rdfile.ReadDir(n) + if err != nil { + return nil, err + } + + ret := make([]string, len(entries)) + for i := range entries { + ret[i] = entries[i].Name() + } + + return ret, nil +} + +func (f fromIOFSFile) Sync() error { return nil } + +func (f fromIOFSFile) Truncate(size int64) error { + return notImplemented("truncate", f.name) +} + +func (f fromIOFSFile) WriteString(s string) (ret int, err error) { + return -1, notImplemented("writestring", f.name) +} + +func notImplemented(op, path string) error { + return &fs.PathError{Op: op, Path: path, Err: fs.ErrPermission} +} diff --git a/vendor/github.com/spf13/afero/ioutil.go b/vendor/github.com/spf13/afero/ioutil.go new file mode 100644 index 0000000..fa6abe1 --- /dev/null +++ b/vendor/github.com/spf13/afero/ioutil.go @@ -0,0 +1,243 @@ +// Copyright ©2015 The Go Authors +// Copyright ©2015 Steve Francia +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "bytes" + "io" + "os" + "path/filepath" + "sort" + "strconv" + "strings" + "sync" + "time" +) + +// byName implements sort.Interface. +type byName []os.FileInfo + +func (f byName) Len() int { return len(f) } +func (f byName) Less(i, j int) bool { return f[i].Name() < f[j].Name() } +func (f byName) Swap(i, j int) { f[i], f[j] = f[j], f[i] } + +// ReadDir reads the directory named by dirname and returns +// a list of sorted directory entries. +func (a Afero) ReadDir(dirname string) ([]os.FileInfo, error) { + return ReadDir(a.Fs, dirname) +} + +func ReadDir(fs Fs, dirname string) ([]os.FileInfo, error) { + f, err := fs.Open(dirname) + if err != nil { + return nil, err + } + list, err := f.Readdir(-1) + f.Close() + if err != nil { + return nil, err + } + sort.Sort(byName(list)) + return list, nil +} + +// ReadFile reads the file named by filename and returns the contents. +// A successful call returns err == nil, not err == EOF. Because ReadFile +// reads the whole file, it does not treat an EOF from Read as an error +// to be reported. +func (a Afero) ReadFile(filename string) ([]byte, error) { + return ReadFile(a.Fs, filename) +} + +func ReadFile(fs Fs, filename string) ([]byte, error) { + f, err := fs.Open(filename) + if err != nil { + return nil, err + } + defer f.Close() + // It's a good but not certain bet that FileInfo will tell us exactly how much to + // read, so let's try it but be prepared for the answer to be wrong. + var n int64 + + if fi, err := f.Stat(); err == nil { + // Don't preallocate a huge buffer, just in case. + if size := fi.Size(); size < 1e9 { + n = size + } + } + // As initial capacity for readAll, use n + a little extra in case Size is zero, + // and to avoid another allocation after Read has filled the buffer. The readAll + // call will read into its allocated internal buffer cheaply. If the size was + // wrong, we'll either waste some space off the end or reallocate as needed, but + // in the overwhelmingly common case we'll get it just right. + return readAll(f, n+bytes.MinRead) +} + +// readAll reads from r until an error or EOF and returns the data it read +// from the internal buffer allocated with a specified capacity. +func readAll(r io.Reader, capacity int64) (b []byte, err error) { + buf := bytes.NewBuffer(make([]byte, 0, capacity)) + // If the buffer overflows, we will get bytes.ErrTooLarge. + // Return that as an error. Any other panic remains. + defer func() { + e := recover() + if e == nil { + return + } + if panicErr, ok := e.(error); ok && panicErr == bytes.ErrTooLarge { + err = panicErr + } else { + panic(e) + } + }() + _, err = buf.ReadFrom(r) + return buf.Bytes(), err +} + +// ReadAll reads from r until an error or EOF and returns the data it read. +// A successful call returns err == nil, not err == EOF. Because ReadAll is +// defined to read from src until EOF, it does not treat an EOF from Read +// as an error to be reported. +func ReadAll(r io.Reader) ([]byte, error) { + return readAll(r, bytes.MinRead) +} + +// WriteFile writes data to a file named by filename. +// If the file does not exist, WriteFile creates it with permissions perm; +// otherwise WriteFile truncates it before writing. +func (a Afero) WriteFile(filename string, data []byte, perm os.FileMode) error { + return WriteFile(a.Fs, filename, data, perm) +} + +func WriteFile(fs Fs, filename string, data []byte, perm os.FileMode) error { + f, err := fs.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, perm) + if err != nil { + return err + } + n, err := f.Write(data) + if err == nil && n < len(data) { + err = io.ErrShortWrite + } + if err1 := f.Close(); err == nil { + err = err1 + } + return err +} + +// Random number state. +// We generate random temporary file names so that there's a good +// chance the file doesn't exist yet - keeps the number of tries in +// TempFile to a minimum. +var ( + randNum uint32 + randmu sync.Mutex +) + +func reseed() uint32 { + return uint32(time.Now().UnixNano() + int64(os.Getpid())) +} + +func nextRandom() string { + randmu.Lock() + r := randNum + if r == 0 { + r = reseed() + } + r = r*1664525 + 1013904223 // constants from Numerical Recipes + randNum = r + randmu.Unlock() + return strconv.Itoa(int(1e9 + r%1e9))[1:] +} + +// TempFile creates a new temporary file in the directory dir, +// opens the file for reading and writing, and returns the resulting *os.File. +// The filename is generated by taking pattern and adding a random +// string to the end. If pattern includes a "*", the random string +// replaces the last "*". +// If dir is the empty string, TempFile uses the default directory +// for temporary files (see os.TempDir). +// Multiple programs calling TempFile simultaneously +// will not choose the same file. The caller can use f.Name() +// to find the pathname of the file. It is the caller's responsibility +// to remove the file when no longer needed. +func (a Afero) TempFile(dir, pattern string) (f File, err error) { + return TempFile(a.Fs, dir, pattern) +} + +func TempFile(fs Fs, dir, pattern string) (f File, err error) { + if dir == "" { + dir = os.TempDir() + } + + var prefix, suffix string + if pos := strings.LastIndex(pattern, "*"); pos != -1 { + prefix, suffix = pattern[:pos], pattern[pos+1:] + } else { + prefix = pattern + } + + nconflict := 0 + for i := 0; i < 10000; i++ { + name := filepath.Join(dir, prefix+nextRandom()+suffix) + f, err = fs.OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_EXCL, 0o600) + if os.IsExist(err) { + if nconflict++; nconflict > 10 { + randmu.Lock() + randNum = reseed() + randmu.Unlock() + } + continue + } + break + } + return +} + +// TempDir creates a new temporary directory in the directory dir +// with a name beginning with prefix and returns the path of the +// new directory. If dir is the empty string, TempDir uses the +// default directory for temporary files (see os.TempDir). +// Multiple programs calling TempDir simultaneously +// will not choose the same directory. It is the caller's responsibility +// to remove the directory when no longer needed. +func (a Afero) TempDir(dir, prefix string) (name string, err error) { + return TempDir(a.Fs, dir, prefix) +} + +func TempDir(fs Fs, dir, prefix string) (name string, err error) { + if dir == "" { + dir = os.TempDir() + } + + nconflict := 0 + for i := 0; i < 10000; i++ { + try := filepath.Join(dir, prefix+nextRandom()) + err = fs.Mkdir(try, 0o700) + if os.IsExist(err) { + if nconflict++; nconflict > 10 { + randmu.Lock() + randNum = reseed() + randmu.Unlock() + } + continue + } + if err == nil { + name = try + } + break + } + return +} diff --git a/vendor/github.com/spf13/afero/lstater.go b/vendor/github.com/spf13/afero/lstater.go new file mode 100644 index 0000000..89c1bfc --- /dev/null +++ b/vendor/github.com/spf13/afero/lstater.go @@ -0,0 +1,27 @@ +// Copyright © 2018 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "os" +) + +// Lstater is an optional interface in Afero. It is only implemented by the +// filesystems saying so. +// It will call Lstat if the filesystem iself is, or it delegates to, the os filesystem. +// Else it will call Stat. +// In addtion to the FileInfo, it will return a boolean telling whether Lstat was called or not. +type Lstater interface { + LstatIfPossible(name string) (os.FileInfo, bool, error) +} diff --git a/vendor/github.com/spf13/afero/match.go b/vendor/github.com/spf13/afero/match.go new file mode 100644 index 0000000..7db4b7d --- /dev/null +++ b/vendor/github.com/spf13/afero/match.go @@ -0,0 +1,110 @@ +// Copyright © 2014 Steve Francia . +// Copyright 2009 The Go Authors. All rights reserved. + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "path/filepath" + "sort" + "strings" +) + +// Glob returns the names of all files matching pattern or nil +// if there is no matching file. The syntax of patterns is the same +// as in Match. The pattern may describe hierarchical names such as +// /usr/*/bin/ed (assuming the Separator is '/'). +// +// Glob ignores file system errors such as I/O errors reading directories. +// The only possible returned error is ErrBadPattern, when pattern +// is malformed. +// +// This was adapted from (http://golang.org/pkg/path/filepath) and uses several +// built-ins from that package. +func Glob(fs Fs, pattern string) (matches []string, err error) { + if !hasMeta(pattern) { + // Lstat not supported by a ll filesystems. + if _, err = lstatIfPossible(fs, pattern); err != nil { + return nil, nil + } + return []string{pattern}, nil + } + + dir, file := filepath.Split(pattern) + switch dir { + case "": + dir = "." + case string(filepath.Separator): + // nothing + default: + dir = dir[0 : len(dir)-1] // chop off trailing separator + } + + if !hasMeta(dir) { + return glob(fs, dir, file, nil) + } + + var m []string + m, err = Glob(fs, dir) + if err != nil { + return + } + for _, d := range m { + matches, err = glob(fs, d, file, matches) + if err != nil { + return + } + } + return +} + +// glob searches for files matching pattern in the directory dir +// and appends them to matches. If the directory cannot be +// opened, it returns the existing matches. New matches are +// added in lexicographical order. +func glob(fs Fs, dir, pattern string, matches []string) (m []string, e error) { + m = matches + fi, err := fs.Stat(dir) + if err != nil { + return + } + if !fi.IsDir() { + return + } + d, err := fs.Open(dir) + if err != nil { + return + } + defer d.Close() + + names, _ := d.Readdirnames(-1) + sort.Strings(names) + + for _, n := range names { + matched, err := filepath.Match(pattern, n) + if err != nil { + return m, err + } + if matched { + m = append(m, filepath.Join(dir, n)) + } + } + return +} + +// hasMeta reports whether path contains any of the magic characters +// recognized by Match. +func hasMeta(path string) bool { + // TODO(niemeyer): Should other magic characters be added here? + return strings.ContainsAny(path, "*?[") +} diff --git a/vendor/github.com/spf13/afero/mem/dir.go b/vendor/github.com/spf13/afero/mem/dir.go new file mode 100644 index 0000000..e104013 --- /dev/null +++ b/vendor/github.com/spf13/afero/mem/dir.go @@ -0,0 +1,37 @@ +// Copyright © 2014 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package mem + +type Dir interface { + Len() int + Names() []string + Files() []*FileData + Add(*FileData) + Remove(*FileData) +} + +func RemoveFromMemDir(dir *FileData, f *FileData) { + dir.memDir.Remove(f) +} + +func AddToMemDir(dir *FileData, f *FileData) { + dir.memDir.Add(f) +} + +func InitializeDir(d *FileData) { + if d.memDir == nil { + d.dir = true + d.memDir = &DirMap{} + } +} diff --git a/vendor/github.com/spf13/afero/mem/dirmap.go b/vendor/github.com/spf13/afero/mem/dirmap.go new file mode 100644 index 0000000..03a57ee --- /dev/null +++ b/vendor/github.com/spf13/afero/mem/dirmap.go @@ -0,0 +1,43 @@ +// Copyright © 2015 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package mem + +import "sort" + +type DirMap map[string]*FileData + +func (m DirMap) Len() int { return len(m) } +func (m DirMap) Add(f *FileData) { m[f.name] = f } +func (m DirMap) Remove(f *FileData) { delete(m, f.name) } +func (m DirMap) Files() (files []*FileData) { + for _, f := range m { + files = append(files, f) + } + sort.Sort(filesSorter(files)) + return files +} + +// implement sort.Interface for []*FileData +type filesSorter []*FileData + +func (s filesSorter) Len() int { return len(s) } +func (s filesSorter) Swap(i, j int) { s[i], s[j] = s[j], s[i] } +func (s filesSorter) Less(i, j int) bool { return s[i].name < s[j].name } + +func (m DirMap) Names() (names []string) { + for x := range m { + names = append(names, x) + } + return names +} diff --git a/vendor/github.com/spf13/afero/mem/file.go b/vendor/github.com/spf13/afero/mem/file.go new file mode 100644 index 0000000..62fe449 --- /dev/null +++ b/vendor/github.com/spf13/afero/mem/file.go @@ -0,0 +1,359 @@ +// Copyright © 2015 Steve Francia . +// Copyright 2013 tsuru authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package mem + +import ( + "bytes" + "errors" + "io" + "io/fs" + "os" + "path/filepath" + "sync" + "sync/atomic" + "time" + + "github.com/spf13/afero/internal/common" +) + +const FilePathSeparator = string(filepath.Separator) + +var _ fs.ReadDirFile = &File{} + +type File struct { + // atomic requires 64-bit alignment for struct field access + at int64 + readDirCount int64 + closed bool + readOnly bool + fileData *FileData +} + +func NewFileHandle(data *FileData) *File { + return &File{fileData: data} +} + +func NewReadOnlyFileHandle(data *FileData) *File { + return &File{fileData: data, readOnly: true} +} + +func (f File) Data() *FileData { + return f.fileData +} + +type FileData struct { + sync.Mutex + name string + data []byte + memDir Dir + dir bool + mode os.FileMode + modtime time.Time + uid int + gid int +} + +func (d *FileData) Name() string { + d.Lock() + defer d.Unlock() + return d.name +} + +func CreateFile(name string) *FileData { + return &FileData{name: name, mode: os.ModeTemporary, modtime: time.Now()} +} + +func CreateDir(name string) *FileData { + return &FileData{name: name, memDir: &DirMap{}, dir: true, modtime: time.Now()} +} + +func ChangeFileName(f *FileData, newname string) { + f.Lock() + f.name = newname + f.Unlock() +} + +func SetMode(f *FileData, mode os.FileMode) { + f.Lock() + f.mode = mode + f.Unlock() +} + +func SetModTime(f *FileData, mtime time.Time) { + f.Lock() + setModTime(f, mtime) + f.Unlock() +} + +func setModTime(f *FileData, mtime time.Time) { + f.modtime = mtime +} + +func SetUID(f *FileData, uid int) { + f.Lock() + f.uid = uid + f.Unlock() +} + +func SetGID(f *FileData, gid int) { + f.Lock() + f.gid = gid + f.Unlock() +} + +func GetFileInfo(f *FileData) *FileInfo { + return &FileInfo{f} +} + +func (f *File) Open() error { + atomic.StoreInt64(&f.at, 0) + atomic.StoreInt64(&f.readDirCount, 0) + f.fileData.Lock() + f.closed = false + f.fileData.Unlock() + return nil +} + +func (f *File) Close() error { + f.fileData.Lock() + f.closed = true + if !f.readOnly { + setModTime(f.fileData, time.Now()) + } + f.fileData.Unlock() + return nil +} + +func (f *File) Name() string { + return f.fileData.Name() +} + +func (f *File) Stat() (os.FileInfo, error) { + return &FileInfo{f.fileData}, nil +} + +func (f *File) Sync() error { + return nil +} + +func (f *File) Readdir(count int) (res []os.FileInfo, err error) { + if !f.fileData.dir { + return nil, &os.PathError{Op: "readdir", Path: f.fileData.name, Err: errors.New("not a dir")} + } + var outLength int64 + + f.fileData.Lock() + files := f.fileData.memDir.Files()[f.readDirCount:] + if count > 0 { + if len(files) < count { + outLength = int64(len(files)) + } else { + outLength = int64(count) + } + if len(files) == 0 { + err = io.EOF + } + } else { + outLength = int64(len(files)) + } + f.readDirCount += outLength + f.fileData.Unlock() + + res = make([]os.FileInfo, outLength) + for i := range res { + res[i] = &FileInfo{files[i]} + } + + return res, err +} + +func (f *File) Readdirnames(n int) (names []string, err error) { + fi, err := f.Readdir(n) + names = make([]string, len(fi)) + for i, f := range fi { + _, names[i] = filepath.Split(f.Name()) + } + return names, err +} + +// Implements fs.ReadDirFile +func (f *File) ReadDir(n int) ([]fs.DirEntry, error) { + fi, err := f.Readdir(n) + if err != nil { + return nil, err + } + di := make([]fs.DirEntry, len(fi)) + for i, f := range fi { + di[i] = common.FileInfoDirEntry{FileInfo: f} + } + return di, nil +} + +func (f *File) Read(b []byte) (n int, err error) { + f.fileData.Lock() + defer f.fileData.Unlock() + if f.closed { + return 0, ErrFileClosed + } + if len(b) > 0 && int(f.at) == len(f.fileData.data) { + return 0, io.EOF + } + if int(f.at) > len(f.fileData.data) { + return 0, io.ErrUnexpectedEOF + } + if len(f.fileData.data)-int(f.at) >= len(b) { + n = len(b) + } else { + n = len(f.fileData.data) - int(f.at) + } + copy(b, f.fileData.data[f.at:f.at+int64(n)]) + atomic.AddInt64(&f.at, int64(n)) + return +} + +func (f *File) ReadAt(b []byte, off int64) (n int, err error) { + prev := atomic.LoadInt64(&f.at) + atomic.StoreInt64(&f.at, off) + n, err = f.Read(b) + atomic.StoreInt64(&f.at, prev) + return +} + +func (f *File) Truncate(size int64) error { + if f.closed { + return ErrFileClosed + } + if f.readOnly { + return &os.PathError{Op: "truncate", Path: f.fileData.name, Err: errors.New("file handle is read only")} + } + if size < 0 { + return ErrOutOfRange + } + f.fileData.Lock() + defer f.fileData.Unlock() + if size > int64(len(f.fileData.data)) { + diff := size - int64(len(f.fileData.data)) + f.fileData.data = append(f.fileData.data, bytes.Repeat([]byte{0o0}, int(diff))...) + } else { + f.fileData.data = f.fileData.data[0:size] + } + setModTime(f.fileData, time.Now()) + return nil +} + +func (f *File) Seek(offset int64, whence int) (int64, error) { + if f.closed { + return 0, ErrFileClosed + } + switch whence { + case io.SeekStart: + atomic.StoreInt64(&f.at, offset) + case io.SeekCurrent: + atomic.AddInt64(&f.at, offset) + case io.SeekEnd: + atomic.StoreInt64(&f.at, int64(len(f.fileData.data))+offset) + } + return f.at, nil +} + +func (f *File) Write(b []byte) (n int, err error) { + if f.closed { + return 0, ErrFileClosed + } + if f.readOnly { + return 0, &os.PathError{Op: "write", Path: f.fileData.name, Err: errors.New("file handle is read only")} + } + n = len(b) + cur := atomic.LoadInt64(&f.at) + f.fileData.Lock() + defer f.fileData.Unlock() + diff := cur - int64(len(f.fileData.data)) + var tail []byte + if n+int(cur) < len(f.fileData.data) { + tail = f.fileData.data[n+int(cur):] + } + if diff > 0 { + f.fileData.data = append(f.fileData.data, append(bytes.Repeat([]byte{0o0}, int(diff)), b...)...) + f.fileData.data = append(f.fileData.data, tail...) + } else { + f.fileData.data = append(f.fileData.data[:cur], b...) + f.fileData.data = append(f.fileData.data, tail...) + } + setModTime(f.fileData, time.Now()) + + atomic.AddInt64(&f.at, int64(n)) + return +} + +func (f *File) WriteAt(b []byte, off int64) (n int, err error) { + atomic.StoreInt64(&f.at, off) + return f.Write(b) +} + +func (f *File) WriteString(s string) (ret int, err error) { + return f.Write([]byte(s)) +} + +func (f *File) Info() *FileInfo { + return &FileInfo{f.fileData} +} + +type FileInfo struct { + *FileData +} + +// Implements os.FileInfo +func (s *FileInfo) Name() string { + s.Lock() + _, name := filepath.Split(s.name) + s.Unlock() + return name +} + +func (s *FileInfo) Mode() os.FileMode { + s.Lock() + defer s.Unlock() + return s.mode +} + +func (s *FileInfo) ModTime() time.Time { + s.Lock() + defer s.Unlock() + return s.modtime +} + +func (s *FileInfo) IsDir() bool { + s.Lock() + defer s.Unlock() + return s.dir +} +func (s *FileInfo) Sys() interface{} { return nil } +func (s *FileInfo) Size() int64 { + if s.IsDir() { + return int64(42) + } + s.Lock() + defer s.Unlock() + return int64(len(s.data)) +} + +var ( + ErrFileClosed = errors.New("File is closed") + ErrOutOfRange = errors.New("out of range") + ErrTooLarge = errors.New("too large") + ErrFileNotFound = os.ErrNotExist + ErrFileExists = os.ErrExist + ErrDestinationExists = os.ErrExist +) diff --git a/vendor/github.com/spf13/afero/memmap.go b/vendor/github.com/spf13/afero/memmap.go new file mode 100644 index 0000000..d6c744e --- /dev/null +++ b/vendor/github.com/spf13/afero/memmap.go @@ -0,0 +1,465 @@ +// Copyright © 2014 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "fmt" + "io" + + "log" + "os" + "path/filepath" + + "sort" + "strings" + "sync" + "time" + + "github.com/spf13/afero/mem" +) + +const chmodBits = os.ModePerm | os.ModeSetuid | os.ModeSetgid | os.ModeSticky // Only a subset of bits are allowed to be changed. Documented under os.Chmod() + +type MemMapFs struct { + mu sync.RWMutex + data map[string]*mem.FileData + init sync.Once +} + +func NewMemMapFs() Fs { + return &MemMapFs{} +} + +func (m *MemMapFs) getData() map[string]*mem.FileData { + m.init.Do(func() { + m.data = make(map[string]*mem.FileData) + // Root should always exist, right? + // TODO: what about windows? + root := mem.CreateDir(FilePathSeparator) + mem.SetMode(root, os.ModeDir|0o755) + m.data[FilePathSeparator] = root + }) + return m.data +} + +func (*MemMapFs) Name() string { return "MemMapFS" } + +func (m *MemMapFs) Create(name string) (File, error) { + name = normalizePath(name) + m.mu.Lock() + file := mem.CreateFile(name) + m.getData()[name] = file + m.registerWithParent(file, 0) + m.mu.Unlock() + return mem.NewFileHandle(file), nil +} + +func (m *MemMapFs) unRegisterWithParent(fileName string) error { + f, err := m.lockfreeOpen(fileName) + if err != nil { + return err + } + parent := m.findParent(f) + if parent == nil { + log.Panic("parent of ", f.Name(), " is nil") + } + + parent.Lock() + mem.RemoveFromMemDir(parent, f) + parent.Unlock() + return nil +} + +func (m *MemMapFs) findParent(f *mem.FileData) *mem.FileData { + pdir, _ := filepath.Split(f.Name()) + pdir = filepath.Clean(pdir) + pfile, err := m.lockfreeOpen(pdir) + if err != nil { + return nil + } + return pfile +} + +func (m *MemMapFs) findDescendants(name string) []*mem.FileData { + fData := m.getData() + descendants := make([]*mem.FileData, 0, len(fData)) + for p, dFile := range fData { + if strings.HasPrefix(p, name+FilePathSeparator) { + descendants = append(descendants, dFile) + } + } + + sort.Slice(descendants, func(i, j int) bool { + cur := len(strings.Split(descendants[i].Name(), FilePathSeparator)) + next := len(strings.Split(descendants[j].Name(), FilePathSeparator)) + return cur < next + }) + + return descendants +} + +func (m *MemMapFs) registerWithParent(f *mem.FileData, perm os.FileMode) { + if f == nil { + return + } + parent := m.findParent(f) + if parent == nil { + pdir := filepath.Dir(filepath.Clean(f.Name())) + err := m.lockfreeMkdir(pdir, perm) + if err != nil { + // log.Println("Mkdir error:", err) + return + } + parent, err = m.lockfreeOpen(pdir) + if err != nil { + // log.Println("Open after Mkdir error:", err) + return + } + } + + parent.Lock() + mem.InitializeDir(parent) + mem.AddToMemDir(parent, f) + parent.Unlock() +} + +func (m *MemMapFs) lockfreeMkdir(name string, perm os.FileMode) error { + name = normalizePath(name) + x, ok := m.getData()[name] + if ok { + // Only return ErrFileExists if it's a file, not a directory. + i := mem.FileInfo{FileData: x} + if !i.IsDir() { + return ErrFileExists + } + } else { + item := mem.CreateDir(name) + mem.SetMode(item, os.ModeDir|perm) + m.getData()[name] = item + m.registerWithParent(item, perm) + } + return nil +} + +func (m *MemMapFs) Mkdir(name string, perm os.FileMode) error { + perm &= chmodBits + name = normalizePath(name) + + m.mu.RLock() + _, ok := m.getData()[name] + m.mu.RUnlock() + if ok { + return &os.PathError{Op: "mkdir", Path: name, Err: ErrFileExists} + } + + m.mu.Lock() + // Dobule check that it doesn't exist. + if _, ok := m.getData()[name]; ok { + m.mu.Unlock() + return &os.PathError{Op: "mkdir", Path: name, Err: ErrFileExists} + } + item := mem.CreateDir(name) + mem.SetMode(item, os.ModeDir|perm) + m.getData()[name] = item + m.registerWithParent(item, perm) + m.mu.Unlock() + + return m.setFileMode(name, perm|os.ModeDir) +} + +func (m *MemMapFs) MkdirAll(path string, perm os.FileMode) error { + err := m.Mkdir(path, perm) + if err != nil { + if err.(*os.PathError).Err == ErrFileExists { + return nil + } + return err + } + return nil +} + +// Handle some relative paths +func normalizePath(path string) string { + path = filepath.Clean(path) + + switch path { + case ".": + return FilePathSeparator + case "..": + return FilePathSeparator + default: + return path + } +} + +func (m *MemMapFs) Open(name string) (File, error) { + f, err := m.open(name) + if f != nil { + return mem.NewReadOnlyFileHandle(f), err + } + return nil, err +} + +func (m *MemMapFs) openWrite(name string) (File, error) { + f, err := m.open(name) + if f != nil { + return mem.NewFileHandle(f), err + } + return nil, err +} + +func (m *MemMapFs) open(name string) (*mem.FileData, error) { + name = normalizePath(name) + + m.mu.RLock() + f, ok := m.getData()[name] + m.mu.RUnlock() + if !ok { + return nil, &os.PathError{Op: "open", Path: name, Err: ErrFileNotFound} + } + return f, nil +} + +func (m *MemMapFs) lockfreeOpen(name string) (*mem.FileData, error) { + name = normalizePath(name) + f, ok := m.getData()[name] + if ok { + return f, nil + } else { + return nil, ErrFileNotFound + } +} + +func (m *MemMapFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { + perm &= chmodBits + chmod := false + file, err := m.openWrite(name) + if err == nil && (flag&os.O_EXCL > 0) { + return nil, &os.PathError{Op: "open", Path: name, Err: ErrFileExists} + } + if os.IsNotExist(err) && (flag&os.O_CREATE > 0) { + file, err = m.Create(name) + chmod = true + } + if err != nil { + return nil, err + } + if flag == os.O_RDONLY { + file = mem.NewReadOnlyFileHandle(file.(*mem.File).Data()) + } + if flag&os.O_APPEND > 0 { + _, err = file.Seek(0, io.SeekEnd) + if err != nil { + file.Close() + return nil, err + } + } + if flag&os.O_TRUNC > 0 && flag&(os.O_RDWR|os.O_WRONLY) > 0 { + err = file.Truncate(0) + if err != nil { + file.Close() + return nil, err + } + } + if chmod { + return file, m.setFileMode(name, perm) + } + return file, nil +} + +func (m *MemMapFs) Remove(name string) error { + name = normalizePath(name) + + m.mu.Lock() + defer m.mu.Unlock() + + if _, ok := m.getData()[name]; ok { + err := m.unRegisterWithParent(name) + if err != nil { + return &os.PathError{Op: "remove", Path: name, Err: err} + } + delete(m.getData(), name) + } else { + return &os.PathError{Op: "remove", Path: name, Err: os.ErrNotExist} + } + return nil +} + +func (m *MemMapFs) RemoveAll(path string) error { + path = normalizePath(path) + m.mu.Lock() + m.unRegisterWithParent(path) + m.mu.Unlock() + + m.mu.RLock() + defer m.mu.RUnlock() + + for p := range m.getData() { + if p == path || strings.HasPrefix(p, path+FilePathSeparator) { + m.mu.RUnlock() + m.mu.Lock() + delete(m.getData(), p) + m.mu.Unlock() + m.mu.RLock() + } + } + return nil +} + +func (m *MemMapFs) Rename(oldname, newname string) error { + oldname = normalizePath(oldname) + newname = normalizePath(newname) + + if oldname == newname { + return nil + } + + m.mu.RLock() + defer m.mu.RUnlock() + if _, ok := m.getData()[oldname]; ok { + m.mu.RUnlock() + m.mu.Lock() + err := m.unRegisterWithParent(oldname) + if err != nil { + return err + } + + fileData := m.getData()[oldname] + mem.ChangeFileName(fileData, newname) + m.getData()[newname] = fileData + + err = m.renameDescendants(oldname, newname) + if err != nil { + return err + } + + delete(m.getData(), oldname) + + m.registerWithParent(fileData, 0) + m.mu.Unlock() + m.mu.RLock() + } else { + return &os.PathError{Op: "rename", Path: oldname, Err: ErrFileNotFound} + } + return nil +} + +func (m *MemMapFs) renameDescendants(oldname, newname string) error { + descendants := m.findDescendants(oldname) + removes := make([]string, 0, len(descendants)) + for _, desc := range descendants { + descNewName := strings.Replace(desc.Name(), oldname, newname, 1) + err := m.unRegisterWithParent(desc.Name()) + if err != nil { + return err + } + + removes = append(removes, desc.Name()) + mem.ChangeFileName(desc, descNewName) + m.getData()[descNewName] = desc + + m.registerWithParent(desc, 0) + } + for _, r := range removes { + delete(m.getData(), r) + } + + return nil +} + +func (m *MemMapFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { + fileInfo, err := m.Stat(name) + return fileInfo, false, err +} + +func (m *MemMapFs) Stat(name string) (os.FileInfo, error) { + f, err := m.Open(name) + if err != nil { + return nil, err + } + fi := mem.GetFileInfo(f.(*mem.File).Data()) + return fi, nil +} + +func (m *MemMapFs) Chmod(name string, mode os.FileMode) error { + mode &= chmodBits + + m.mu.RLock() + f, ok := m.getData()[name] + m.mu.RUnlock() + if !ok { + return &os.PathError{Op: "chmod", Path: name, Err: ErrFileNotFound} + } + prevOtherBits := mem.GetFileInfo(f).Mode() & ^chmodBits + + mode = prevOtherBits | mode + return m.setFileMode(name, mode) +} + +func (m *MemMapFs) setFileMode(name string, mode os.FileMode) error { + name = normalizePath(name) + + m.mu.RLock() + f, ok := m.getData()[name] + m.mu.RUnlock() + if !ok { + return &os.PathError{Op: "chmod", Path: name, Err: ErrFileNotFound} + } + + m.mu.Lock() + mem.SetMode(f, mode) + m.mu.Unlock() + + return nil +} + +func (m *MemMapFs) Chown(name string, uid, gid int) error { + name = normalizePath(name) + + m.mu.RLock() + f, ok := m.getData()[name] + m.mu.RUnlock() + if !ok { + return &os.PathError{Op: "chown", Path: name, Err: ErrFileNotFound} + } + + mem.SetUID(f, uid) + mem.SetGID(f, gid) + + return nil +} + +func (m *MemMapFs) Chtimes(name string, atime time.Time, mtime time.Time) error { + name = normalizePath(name) + + m.mu.RLock() + f, ok := m.getData()[name] + m.mu.RUnlock() + if !ok { + return &os.PathError{Op: "chtimes", Path: name, Err: ErrFileNotFound} + } + + m.mu.Lock() + mem.SetModTime(f, mtime) + m.mu.Unlock() + + return nil +} + +func (m *MemMapFs) List() { + for _, x := range m.data { + y := mem.FileInfo{FileData: x} + fmt.Println(x.Name(), y.Size()) + } +} diff --git a/vendor/github.com/spf13/afero/os.go b/vendor/github.com/spf13/afero/os.go new file mode 100644 index 0000000..f136632 --- /dev/null +++ b/vendor/github.com/spf13/afero/os.go @@ -0,0 +1,113 @@ +// Copyright © 2014 Steve Francia . +// Copyright 2013 tsuru authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "os" + "time" +) + +var _ Lstater = (*OsFs)(nil) + +// OsFs is a Fs implementation that uses functions provided by the os package. +// +// For details in any method, check the documentation of the os package +// (http://golang.org/pkg/os/). +type OsFs struct{} + +func NewOsFs() Fs { + return &OsFs{} +} + +func (OsFs) Name() string { return "OsFs" } + +func (OsFs) Create(name string) (File, error) { + f, e := os.Create(name) + if f == nil { + // while this looks strange, we need to return a bare nil (of type nil) not + // a nil value of type *os.File or nil won't be nil + return nil, e + } + return f, e +} + +func (OsFs) Mkdir(name string, perm os.FileMode) error { + return os.Mkdir(name, perm) +} + +func (OsFs) MkdirAll(path string, perm os.FileMode) error { + return os.MkdirAll(path, perm) +} + +func (OsFs) Open(name string) (File, error) { + f, e := os.Open(name) + if f == nil { + // while this looks strange, we need to return a bare nil (of type nil) not + // a nil value of type *os.File or nil won't be nil + return nil, e + } + return f, e +} + +func (OsFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { + f, e := os.OpenFile(name, flag, perm) + if f == nil { + // while this looks strange, we need to return a bare nil (of type nil) not + // a nil value of type *os.File or nil won't be nil + return nil, e + } + return f, e +} + +func (OsFs) Remove(name string) error { + return os.Remove(name) +} + +func (OsFs) RemoveAll(path string) error { + return os.RemoveAll(path) +} + +func (OsFs) Rename(oldname, newname string) error { + return os.Rename(oldname, newname) +} + +func (OsFs) Stat(name string) (os.FileInfo, error) { + return os.Stat(name) +} + +func (OsFs) Chmod(name string, mode os.FileMode) error { + return os.Chmod(name, mode) +} + +func (OsFs) Chown(name string, uid, gid int) error { + return os.Chown(name, uid, gid) +} + +func (OsFs) Chtimes(name string, atime time.Time, mtime time.Time) error { + return os.Chtimes(name, atime, mtime) +} + +func (OsFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { + fi, err := os.Lstat(name) + return fi, true, err +} + +func (OsFs) SymlinkIfPossible(oldname, newname string) error { + return os.Symlink(oldname, newname) +} + +func (OsFs) ReadlinkIfPossible(name string) (string, error) { + return os.Readlink(name) +} diff --git a/vendor/github.com/spf13/afero/path.go b/vendor/github.com/spf13/afero/path.go new file mode 100644 index 0000000..18f60a0 --- /dev/null +++ b/vendor/github.com/spf13/afero/path.go @@ -0,0 +1,106 @@ +// Copyright ©2015 The Go Authors +// Copyright ©2015 Steve Francia +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "os" + "path/filepath" + "sort" +) + +// readDirNames reads the directory named by dirname and returns +// a sorted list of directory entries. +// adapted from https://golang.org/src/path/filepath/path.go +func readDirNames(fs Fs, dirname string) ([]string, error) { + f, err := fs.Open(dirname) + if err != nil { + return nil, err + } + names, err := f.Readdirnames(-1) + f.Close() + if err != nil { + return nil, err + } + sort.Strings(names) + return names, nil +} + +// walk recursively descends path, calling walkFn +// adapted from https://golang.org/src/path/filepath/path.go +func walk(fs Fs, path string, info os.FileInfo, walkFn filepath.WalkFunc) error { + err := walkFn(path, info, nil) + if err != nil { + if info.IsDir() && err == filepath.SkipDir { + return nil + } + return err + } + + if !info.IsDir() { + return nil + } + + names, err := readDirNames(fs, path) + if err != nil { + return walkFn(path, info, err) + } + + for _, name := range names { + filename := filepath.Join(path, name) + fileInfo, err := lstatIfPossible(fs, filename) + if err != nil { + if err := walkFn(filename, fileInfo, err); err != nil && err != filepath.SkipDir { + return err + } + } else { + err = walk(fs, filename, fileInfo, walkFn) + if err != nil { + if !fileInfo.IsDir() || err != filepath.SkipDir { + return err + } + } + } + } + return nil +} + +// if the filesystem supports it, use Lstat, else use fs.Stat +func lstatIfPossible(fs Fs, path string) (os.FileInfo, error) { + if lfs, ok := fs.(Lstater); ok { + fi, _, err := lfs.LstatIfPossible(path) + return fi, err + } + return fs.Stat(path) +} + +// Walk walks the file tree rooted at root, calling walkFn for each file or +// directory in the tree, including root. All errors that arise visiting files +// and directories are filtered by walkFn. The files are walked in lexical +// order, which makes the output deterministic but means that for very +// large directories Walk can be inefficient. +// Walk does not follow symbolic links. + +func (a Afero) Walk(root string, walkFn filepath.WalkFunc) error { + return Walk(a.Fs, root, walkFn) +} + +func Walk(fs Fs, root string, walkFn filepath.WalkFunc) error { + info, err := lstatIfPossible(fs, root) + if err != nil { + return walkFn(root, nil, err) + } + return walk(fs, root, info, walkFn) +} diff --git a/vendor/github.com/spf13/afero/readonlyfs.go b/vendor/github.com/spf13/afero/readonlyfs.go new file mode 100644 index 0000000..bd8f926 --- /dev/null +++ b/vendor/github.com/spf13/afero/readonlyfs.go @@ -0,0 +1,96 @@ +package afero + +import ( + "os" + "syscall" + "time" +) + +var _ Lstater = (*ReadOnlyFs)(nil) + +type ReadOnlyFs struct { + source Fs +} + +func NewReadOnlyFs(source Fs) Fs { + return &ReadOnlyFs{source: source} +} + +func (r *ReadOnlyFs) ReadDir(name string) ([]os.FileInfo, error) { + return ReadDir(r.source, name) +} + +func (r *ReadOnlyFs) Chtimes(n string, a, m time.Time) error { + return syscall.EPERM +} + +func (r *ReadOnlyFs) Chmod(n string, m os.FileMode) error { + return syscall.EPERM +} + +func (r *ReadOnlyFs) Chown(n string, uid, gid int) error { + return syscall.EPERM +} + +func (r *ReadOnlyFs) Name() string { + return "ReadOnlyFilter" +} + +func (r *ReadOnlyFs) Stat(name string) (os.FileInfo, error) { + return r.source.Stat(name) +} + +func (r *ReadOnlyFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { + if lsf, ok := r.source.(Lstater); ok { + return lsf.LstatIfPossible(name) + } + fi, err := r.Stat(name) + return fi, false, err +} + +func (r *ReadOnlyFs) SymlinkIfPossible(oldname, newname string) error { + return &os.LinkError{Op: "symlink", Old: oldname, New: newname, Err: ErrNoSymlink} +} + +func (r *ReadOnlyFs) ReadlinkIfPossible(name string) (string, error) { + if srdr, ok := r.source.(LinkReader); ok { + return srdr.ReadlinkIfPossible(name) + } + + return "", &os.PathError{Op: "readlink", Path: name, Err: ErrNoReadlink} +} + +func (r *ReadOnlyFs) Rename(o, n string) error { + return syscall.EPERM +} + +func (r *ReadOnlyFs) RemoveAll(p string) error { + return syscall.EPERM +} + +func (r *ReadOnlyFs) Remove(n string) error { + return syscall.EPERM +} + +func (r *ReadOnlyFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { + if flag&(os.O_WRONLY|syscall.O_RDWR|os.O_APPEND|os.O_CREATE|os.O_TRUNC) != 0 { + return nil, syscall.EPERM + } + return r.source.OpenFile(name, flag, perm) +} + +func (r *ReadOnlyFs) Open(n string) (File, error) { + return r.source.Open(n) +} + +func (r *ReadOnlyFs) Mkdir(n string, p os.FileMode) error { + return syscall.EPERM +} + +func (r *ReadOnlyFs) MkdirAll(n string, p os.FileMode) error { + return syscall.EPERM +} + +func (r *ReadOnlyFs) Create(n string) (File, error) { + return nil, syscall.EPERM +} diff --git a/vendor/github.com/spf13/afero/regexpfs.go b/vendor/github.com/spf13/afero/regexpfs.go new file mode 100644 index 0000000..218f3b2 --- /dev/null +++ b/vendor/github.com/spf13/afero/regexpfs.go @@ -0,0 +1,223 @@ +package afero + +import ( + "os" + "regexp" + "syscall" + "time" +) + +// The RegexpFs filters files (not directories) by regular expression. Only +// files matching the given regexp will be allowed, all others get a ENOENT error ( +// "No such file or directory"). +type RegexpFs struct { + re *regexp.Regexp + source Fs +} + +func NewRegexpFs(source Fs, re *regexp.Regexp) Fs { + return &RegexpFs{source: source, re: re} +} + +type RegexpFile struct { + f File + re *regexp.Regexp +} + +func (r *RegexpFs) matchesName(name string) error { + if r.re == nil { + return nil + } + if r.re.MatchString(name) { + return nil + } + return syscall.ENOENT +} + +func (r *RegexpFs) dirOrMatches(name string) error { + dir, err := IsDir(r.source, name) + if err != nil { + return err + } + if dir { + return nil + } + return r.matchesName(name) +} + +func (r *RegexpFs) Chtimes(name string, a, m time.Time) error { + if err := r.dirOrMatches(name); err != nil { + return err + } + return r.source.Chtimes(name, a, m) +} + +func (r *RegexpFs) Chmod(name string, mode os.FileMode) error { + if err := r.dirOrMatches(name); err != nil { + return err + } + return r.source.Chmod(name, mode) +} + +func (r *RegexpFs) Chown(name string, uid, gid int) error { + if err := r.dirOrMatches(name); err != nil { + return err + } + return r.source.Chown(name, uid, gid) +} + +func (r *RegexpFs) Name() string { + return "RegexpFs" +} + +func (r *RegexpFs) Stat(name string) (os.FileInfo, error) { + if err := r.dirOrMatches(name); err != nil { + return nil, err + } + return r.source.Stat(name) +} + +func (r *RegexpFs) Rename(oldname, newname string) error { + dir, err := IsDir(r.source, oldname) + if err != nil { + return err + } + if dir { + return nil + } + if err := r.matchesName(oldname); err != nil { + return err + } + if err := r.matchesName(newname); err != nil { + return err + } + return r.source.Rename(oldname, newname) +} + +func (r *RegexpFs) RemoveAll(p string) error { + dir, err := IsDir(r.source, p) + if err != nil { + return err + } + if !dir { + if err := r.matchesName(p); err != nil { + return err + } + } + return r.source.RemoveAll(p) +} + +func (r *RegexpFs) Remove(name string) error { + if err := r.dirOrMatches(name); err != nil { + return err + } + return r.source.Remove(name) +} + +func (r *RegexpFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { + if err := r.dirOrMatches(name); err != nil { + return nil, err + } + return r.source.OpenFile(name, flag, perm) +} + +func (r *RegexpFs) Open(name string) (File, error) { + dir, err := IsDir(r.source, name) + if err != nil { + return nil, err + } + if !dir { + if err := r.matchesName(name); err != nil { + return nil, err + } + } + f, err := r.source.Open(name) + if err != nil { + return nil, err + } + return &RegexpFile{f: f, re: r.re}, nil +} + +func (r *RegexpFs) Mkdir(n string, p os.FileMode) error { + return r.source.Mkdir(n, p) +} + +func (r *RegexpFs) MkdirAll(n string, p os.FileMode) error { + return r.source.MkdirAll(n, p) +} + +func (r *RegexpFs) Create(name string) (File, error) { + if err := r.matchesName(name); err != nil { + return nil, err + } + return r.source.Create(name) +} + +func (f *RegexpFile) Close() error { + return f.f.Close() +} + +func (f *RegexpFile) Read(s []byte) (int, error) { + return f.f.Read(s) +} + +func (f *RegexpFile) ReadAt(s []byte, o int64) (int, error) { + return f.f.ReadAt(s, o) +} + +func (f *RegexpFile) Seek(o int64, w int) (int64, error) { + return f.f.Seek(o, w) +} + +func (f *RegexpFile) Write(s []byte) (int, error) { + return f.f.Write(s) +} + +func (f *RegexpFile) WriteAt(s []byte, o int64) (int, error) { + return f.f.WriteAt(s, o) +} + +func (f *RegexpFile) Name() string { + return f.f.Name() +} + +func (f *RegexpFile) Readdir(c int) (fi []os.FileInfo, err error) { + var rfi []os.FileInfo + rfi, err = f.f.Readdir(c) + if err != nil { + return nil, err + } + for _, i := range rfi { + if i.IsDir() || f.re.MatchString(i.Name()) { + fi = append(fi, i) + } + } + return fi, nil +} + +func (f *RegexpFile) Readdirnames(c int) (n []string, err error) { + fi, err := f.Readdir(c) + if err != nil { + return nil, err + } + for _, s := range fi { + n = append(n, s.Name()) + } + return n, nil +} + +func (f *RegexpFile) Stat() (os.FileInfo, error) { + return f.f.Stat() +} + +func (f *RegexpFile) Sync() error { + return f.f.Sync() +} + +func (f *RegexpFile) Truncate(s int64) error { + return f.f.Truncate(s) +} + +func (f *RegexpFile) WriteString(s string) (int, error) { + return f.f.WriteString(s) +} diff --git a/vendor/github.com/spf13/afero/symlink.go b/vendor/github.com/spf13/afero/symlink.go new file mode 100644 index 0000000..aa6ae12 --- /dev/null +++ b/vendor/github.com/spf13/afero/symlink.go @@ -0,0 +1,55 @@ +// Copyright © 2018 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "errors" +) + +// Symlinker is an optional interface in Afero. It is only implemented by the +// filesystems saying so. +// It indicates support for 3 symlink related interfaces that implement the +// behaviors of the os methods: +// - Lstat +// - Symlink, and +// - Readlink +type Symlinker interface { + Lstater + Linker + LinkReader +} + +// Linker is an optional interface in Afero. It is only implemented by the +// filesystems saying so. +// It will call Symlink if the filesystem itself is, or it delegates to, the os filesystem, +// or the filesystem otherwise supports Symlink's. +type Linker interface { + SymlinkIfPossible(oldname, newname string) error +} + +// ErrNoSymlink is the error that will be wrapped in an os.LinkError if a file system +// does not support Symlink's either directly or through its delegated filesystem. +// As expressed by support for the Linker interface. +var ErrNoSymlink = errors.New("symlink not supported") + +// LinkReader is an optional interface in Afero. It is only implemented by the +// filesystems saying so. +type LinkReader interface { + ReadlinkIfPossible(name string) (string, error) +} + +// ErrNoReadlink is the error that will be wrapped in an os.Path if a file system +// does not support the readlink operation either directly or through its delegated filesystem. +// As expressed by support for the LinkReader interface. +var ErrNoReadlink = errors.New("readlink not supported") diff --git a/vendor/github.com/spf13/afero/unionFile.go b/vendor/github.com/spf13/afero/unionFile.go new file mode 100644 index 0000000..62dd6c9 --- /dev/null +++ b/vendor/github.com/spf13/afero/unionFile.go @@ -0,0 +1,330 @@ +package afero + +import ( + "io" + "os" + "path/filepath" + "syscall" +) + +// The UnionFile implements the afero.File interface and will be returned +// when reading a directory present at least in the overlay or opening a file +// for writing. +// +// The calls to +// Readdir() and Readdirnames() merge the file os.FileInfo / names from the +// base and the overlay - for files present in both layers, only those +// from the overlay will be used. +// +// When opening files for writing (Create() / OpenFile() with the right flags) +// the operations will be done in both layers, starting with the overlay. A +// successful read in the overlay will move the cursor position in the base layer +// by the number of bytes read. +type UnionFile struct { + Base File + Layer File + Merger DirsMerger + off int + files []os.FileInfo +} + +func (f *UnionFile) Close() error { + // first close base, so we have a newer timestamp in the overlay. If we'd close + // the overlay first, we'd get a cacheStale the next time we access this file + // -> cache would be useless ;-) + if f.Base != nil { + f.Base.Close() + } + if f.Layer != nil { + return f.Layer.Close() + } + return BADFD +} + +func (f *UnionFile) Read(s []byte) (int, error) { + if f.Layer != nil { + n, err := f.Layer.Read(s) + if (err == nil || err == io.EOF) && f.Base != nil { + // advance the file position also in the base file, the next + // call may be a write at this position (or a seek with SEEK_CUR) + if _, seekErr := f.Base.Seek(int64(n), io.SeekCurrent); seekErr != nil { + // only overwrite err in case the seek fails: we need to + // report an eventual io.EOF to the caller + err = seekErr + } + } + return n, err + } + if f.Base != nil { + return f.Base.Read(s) + } + return 0, BADFD +} + +func (f *UnionFile) ReadAt(s []byte, o int64) (int, error) { + if f.Layer != nil { + n, err := f.Layer.ReadAt(s, o) + if (err == nil || err == io.EOF) && f.Base != nil { + _, err = f.Base.Seek(o+int64(n), io.SeekStart) + } + return n, err + } + if f.Base != nil { + return f.Base.ReadAt(s, o) + } + return 0, BADFD +} + +func (f *UnionFile) Seek(o int64, w int) (pos int64, err error) { + if f.Layer != nil { + pos, err = f.Layer.Seek(o, w) + if (err == nil || err == io.EOF) && f.Base != nil { + _, err = f.Base.Seek(o, w) + } + return pos, err + } + if f.Base != nil { + return f.Base.Seek(o, w) + } + return 0, BADFD +} + +func (f *UnionFile) Write(s []byte) (n int, err error) { + if f.Layer != nil { + n, err = f.Layer.Write(s) + if err == nil && f.Base != nil { // hmm, do we have fixed size files where a write may hit the EOF mark? + _, err = f.Base.Write(s) + } + return n, err + } + if f.Base != nil { + return f.Base.Write(s) + } + return 0, BADFD +} + +func (f *UnionFile) WriteAt(s []byte, o int64) (n int, err error) { + if f.Layer != nil { + n, err = f.Layer.WriteAt(s, o) + if err == nil && f.Base != nil { + _, err = f.Base.WriteAt(s, o) + } + return n, err + } + if f.Base != nil { + return f.Base.WriteAt(s, o) + } + return 0, BADFD +} + +func (f *UnionFile) Name() string { + if f.Layer != nil { + return f.Layer.Name() + } + return f.Base.Name() +} + +// DirsMerger is how UnionFile weaves two directories together. +// It takes the FileInfo slices from the layer and the base and returns a +// single view. +type DirsMerger func(lofi, bofi []os.FileInfo) ([]os.FileInfo, error) + +var defaultUnionMergeDirsFn = func(lofi, bofi []os.FileInfo) ([]os.FileInfo, error) { + files := make(map[string]os.FileInfo) + + for _, fi := range lofi { + files[fi.Name()] = fi + } + + for _, fi := range bofi { + if _, exists := files[fi.Name()]; !exists { + files[fi.Name()] = fi + } + } + + rfi := make([]os.FileInfo, len(files)) + + i := 0 + for _, fi := range files { + rfi[i] = fi + i++ + } + + return rfi, nil +} + +// Readdir will weave the two directories together and +// return a single view of the overlayed directories. +// At the end of the directory view, the error is io.EOF if c > 0. +func (f *UnionFile) Readdir(c int) (ofi []os.FileInfo, err error) { + var merge DirsMerger = f.Merger + if merge == nil { + merge = defaultUnionMergeDirsFn + } + + if f.off == 0 { + var lfi []os.FileInfo + if f.Layer != nil { + lfi, err = f.Layer.Readdir(-1) + if err != nil { + return nil, err + } + } + + var bfi []os.FileInfo + if f.Base != nil { + bfi, err = f.Base.Readdir(-1) + if err != nil { + return nil, err + } + + } + merged, err := merge(lfi, bfi) + if err != nil { + return nil, err + } + f.files = append(f.files, merged...) + } + files := f.files[f.off:] + + if c <= 0 { + return files, nil + } + + if len(files) == 0 { + return nil, io.EOF + } + + if c > len(files) { + c = len(files) + } + + defer func() { f.off += c }() + return files[:c], nil +} + +func (f *UnionFile) Readdirnames(c int) ([]string, error) { + rfi, err := f.Readdir(c) + if err != nil { + return nil, err + } + var names []string + for _, fi := range rfi { + names = append(names, fi.Name()) + } + return names, nil +} + +func (f *UnionFile) Stat() (os.FileInfo, error) { + if f.Layer != nil { + return f.Layer.Stat() + } + if f.Base != nil { + return f.Base.Stat() + } + return nil, BADFD +} + +func (f *UnionFile) Sync() (err error) { + if f.Layer != nil { + err = f.Layer.Sync() + if err == nil && f.Base != nil { + err = f.Base.Sync() + } + return err + } + if f.Base != nil { + return f.Base.Sync() + } + return BADFD +} + +func (f *UnionFile) Truncate(s int64) (err error) { + if f.Layer != nil { + err = f.Layer.Truncate(s) + if err == nil && f.Base != nil { + err = f.Base.Truncate(s) + } + return err + } + if f.Base != nil { + return f.Base.Truncate(s) + } + return BADFD +} + +func (f *UnionFile) WriteString(s string) (n int, err error) { + if f.Layer != nil { + n, err = f.Layer.WriteString(s) + if err == nil && f.Base != nil { + _, err = f.Base.WriteString(s) + } + return n, err + } + if f.Base != nil { + return f.Base.WriteString(s) + } + return 0, BADFD +} + +func copyFile(base Fs, layer Fs, name string, bfh File) error { + // First make sure the directory exists + exists, err := Exists(layer, filepath.Dir(name)) + if err != nil { + return err + } + if !exists { + err = layer.MkdirAll(filepath.Dir(name), 0o777) // FIXME? + if err != nil { + return err + } + } + + // Create the file on the overlay + lfh, err := layer.Create(name) + if err != nil { + return err + } + n, err := io.Copy(lfh, bfh) + if err != nil { + // If anything fails, clean up the file + layer.Remove(name) + lfh.Close() + return err + } + + bfi, err := bfh.Stat() + if err != nil || bfi.Size() != n { + layer.Remove(name) + lfh.Close() + return syscall.EIO + } + + err = lfh.Close() + if err != nil { + layer.Remove(name) + lfh.Close() + return err + } + return layer.Chtimes(name, bfi.ModTime(), bfi.ModTime()) +} + +func copyToLayer(base Fs, layer Fs, name string) error { + bfh, err := base.Open(name) + if err != nil { + return err + } + defer bfh.Close() + + return copyFile(base, layer, name, bfh) +} + +func copyFileToLayer(base Fs, layer Fs, name string, flag int, perm os.FileMode) error { + bfh, err := base.OpenFile(name, flag, perm) + if err != nil { + return err + } + defer bfh.Close() + + return copyFile(base, layer, name, bfh) +} diff --git a/vendor/github.com/spf13/afero/util.go b/vendor/github.com/spf13/afero/util.go new file mode 100644 index 0000000..9e4cba2 --- /dev/null +++ b/vendor/github.com/spf13/afero/util.go @@ -0,0 +1,329 @@ +// Copyright ©2015 Steve Francia +// Portions Copyright ©2015 The Hugo Authors +// Portions Copyright 2016-present Bjørn Erik Pedersen +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "bytes" + "fmt" + "io" + "os" + "path/filepath" + "strings" + "unicode" + + "golang.org/x/text/runes" + "golang.org/x/text/transform" + "golang.org/x/text/unicode/norm" +) + +// Filepath separator defined by os.Separator. +const FilePathSeparator = string(filepath.Separator) + +// Takes a reader and a path and writes the content +func (a Afero) WriteReader(path string, r io.Reader) (err error) { + return WriteReader(a.Fs, path, r) +} + +func WriteReader(fs Fs, path string, r io.Reader) (err error) { + dir, _ := filepath.Split(path) + ospath := filepath.FromSlash(dir) + + if ospath != "" { + err = fs.MkdirAll(ospath, 0o777) // rwx, rw, r + if err != nil { + if err != os.ErrExist { + return err + } + } + } + + file, err := fs.Create(path) + if err != nil { + return + } + defer file.Close() + + _, err = io.Copy(file, r) + return +} + +// Same as WriteReader but checks to see if file/directory already exists. +func (a Afero) SafeWriteReader(path string, r io.Reader) (err error) { + return SafeWriteReader(a.Fs, path, r) +} + +func SafeWriteReader(fs Fs, path string, r io.Reader) (err error) { + dir, _ := filepath.Split(path) + ospath := filepath.FromSlash(dir) + + if ospath != "" { + err = fs.MkdirAll(ospath, 0o777) // rwx, rw, r + if err != nil { + return + } + } + + exists, err := Exists(fs, path) + if err != nil { + return + } + if exists { + return fmt.Errorf("%v already exists", path) + } + + file, err := fs.Create(path) + if err != nil { + return + } + defer file.Close() + + _, err = io.Copy(file, r) + return +} + +func (a Afero) GetTempDir(subPath string) string { + return GetTempDir(a.Fs, subPath) +} + +// GetTempDir returns the default temp directory with trailing slash +// if subPath is not empty then it will be created recursively with mode 777 rwx rwx rwx +func GetTempDir(fs Fs, subPath string) string { + addSlash := func(p string) string { + if FilePathSeparator != p[len(p)-1:] { + p = p + FilePathSeparator + } + return p + } + dir := addSlash(os.TempDir()) + + if subPath != "" { + // preserve windows backslash :-( + if FilePathSeparator == "\\" { + subPath = strings.Replace(subPath, "\\", "____", -1) + } + dir = dir + UnicodeSanitize((subPath)) + if FilePathSeparator == "\\" { + dir = strings.Replace(dir, "____", "\\", -1) + } + + if exists, _ := Exists(fs, dir); exists { + return addSlash(dir) + } + + err := fs.MkdirAll(dir, 0o777) + if err != nil { + panic(err) + } + dir = addSlash(dir) + } + return dir +} + +// Rewrite string to remove non-standard path characters +func UnicodeSanitize(s string) string { + source := []rune(s) + target := make([]rune, 0, len(source)) + + for _, r := range source { + if unicode.IsLetter(r) || + unicode.IsDigit(r) || + unicode.IsMark(r) || + r == '.' || + r == '/' || + r == '\\' || + r == '_' || + r == '-' || + r == '%' || + r == ' ' || + r == '#' { + target = append(target, r) + } + } + + return string(target) +} + +// Transform characters with accents into plain forms. +func NeuterAccents(s string) string { + t := transform.Chain(norm.NFD, runes.Remove(runes.In(unicode.Mn)), norm.NFC) + result, _, _ := transform.String(t, string(s)) + + return result +} + +func (a Afero) FileContainsBytes(filename string, subslice []byte) (bool, error) { + return FileContainsBytes(a.Fs, filename, subslice) +} + +// Check if a file contains a specified byte slice. +func FileContainsBytes(fs Fs, filename string, subslice []byte) (bool, error) { + f, err := fs.Open(filename) + if err != nil { + return false, err + } + defer f.Close() + + return readerContainsAny(f, subslice), nil +} + +func (a Afero) FileContainsAnyBytes(filename string, subslices [][]byte) (bool, error) { + return FileContainsAnyBytes(a.Fs, filename, subslices) +} + +// Check if a file contains any of the specified byte slices. +func FileContainsAnyBytes(fs Fs, filename string, subslices [][]byte) (bool, error) { + f, err := fs.Open(filename) + if err != nil { + return false, err + } + defer f.Close() + + return readerContainsAny(f, subslices...), nil +} + +// readerContains reports whether any of the subslices is within r. +func readerContainsAny(r io.Reader, subslices ...[]byte) bool { + if r == nil || len(subslices) == 0 { + return false + } + + largestSlice := 0 + + for _, sl := range subslices { + if len(sl) > largestSlice { + largestSlice = len(sl) + } + } + + if largestSlice == 0 { + return false + } + + bufflen := largestSlice * 4 + halflen := bufflen / 2 + buff := make([]byte, bufflen) + var err error + var n, i int + + for { + i++ + if i == 1 { + n, err = io.ReadAtLeast(r, buff[:halflen], halflen) + } else { + if i != 2 { + // shift left to catch overlapping matches + copy(buff[:], buff[halflen:]) + } + n, err = io.ReadAtLeast(r, buff[halflen:], halflen) + } + + if n > 0 { + for _, sl := range subslices { + if bytes.Contains(buff, sl) { + return true + } + } + } + + if err != nil { + break + } + } + return false +} + +func (a Afero) DirExists(path string) (bool, error) { + return DirExists(a.Fs, path) +} + +// DirExists checks if a path exists and is a directory. +func DirExists(fs Fs, path string) (bool, error) { + fi, err := fs.Stat(path) + if err == nil && fi.IsDir() { + return true, nil + } + if os.IsNotExist(err) { + return false, nil + } + return false, err +} + +func (a Afero) IsDir(path string) (bool, error) { + return IsDir(a.Fs, path) +} + +// IsDir checks if a given path is a directory. +func IsDir(fs Fs, path string) (bool, error) { + fi, err := fs.Stat(path) + if err != nil { + return false, err + } + return fi.IsDir(), nil +} + +func (a Afero) IsEmpty(path string) (bool, error) { + return IsEmpty(a.Fs, path) +} + +// IsEmpty checks if a given file or directory is empty. +func IsEmpty(fs Fs, path string) (bool, error) { + if b, _ := Exists(fs, path); !b { + return false, fmt.Errorf("%q path does not exist", path) + } + fi, err := fs.Stat(path) + if err != nil { + return false, err + } + if fi.IsDir() { + f, err := fs.Open(path) + if err != nil { + return false, err + } + defer f.Close() + list, err := f.Readdir(-1) + if err != nil { + return false, err + } + return len(list) == 0, nil + } + return fi.Size() == 0, nil +} + +func (a Afero) Exists(path string) (bool, error) { + return Exists(a.Fs, path) +} + +// Check if a file or directory exists. +func Exists(fs Fs, path string) (bool, error) { + _, err := fs.Stat(path) + if err == nil { + return true, nil + } + if os.IsNotExist(err) { + return false, nil + } + return false, err +} + +func FullBaseFsPath(basePathFs *BasePathFs, relativePath string) string { + combinedPath := filepath.Join(basePathFs.path, relativePath) + if parent, ok := basePathFs.source.(*BasePathFs); ok { + return FullBaseFsPath(parent, combinedPath) + } + + return combinedPath +} diff --git a/vendor/github.com/spf13/cast/.gitignore b/vendor/github.com/spf13/cast/.gitignore new file mode 100644 index 0000000..53053a8 --- /dev/null +++ b/vendor/github.com/spf13/cast/.gitignore @@ -0,0 +1,25 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test + +*.bench diff --git a/vendor/github.com/spf13/cast/LICENSE b/vendor/github.com/spf13/cast/LICENSE new file mode 100644 index 0000000..4527efb --- /dev/null +++ b/vendor/github.com/spf13/cast/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Steve Francia + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/spf13/cast/Makefile b/vendor/github.com/spf13/cast/Makefile new file mode 100644 index 0000000..f01a5db --- /dev/null +++ b/vendor/github.com/spf13/cast/Makefile @@ -0,0 +1,40 @@ +GOVERSION := $(shell go version | cut -d ' ' -f 3 | cut -d '.' -f 2) + +.PHONY: check fmt lint test test-race vet test-cover-html help +.DEFAULT_GOAL := help + +check: test-race fmt vet lint ## Run tests and linters + +test: ## Run tests + go test ./... + +test-race: ## Run tests with race detector + go test -race ./... + +fmt: ## Run gofmt linter +ifeq "$(GOVERSION)" "12" + @for d in `go list` ; do \ + if [ "`gofmt -l -s $$GOPATH/src/$$d | tee /dev/stderr`" ]; then \ + echo "^ improperly formatted go files" && echo && exit 1; \ + fi \ + done +endif + +lint: ## Run golint linter + @for d in `go list` ; do \ + if [ "`golint $$d | tee /dev/stderr`" ]; then \ + echo "^ golint errors!" && echo && exit 1; \ + fi \ + done + +vet: ## Run go vet linter + @if [ "`go vet | tee /dev/stderr`" ]; then \ + echo "^ go vet errors!" && echo && exit 1; \ + fi + +test-cover-html: ## Generate test coverage report + go test -coverprofile=coverage.out -covermode=count + go tool cover -func=coverage.out + +help: + @grep -E '^[a-zA-Z0-9_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' diff --git a/vendor/github.com/spf13/cast/README.md b/vendor/github.com/spf13/cast/README.md new file mode 100644 index 0000000..0e9e145 --- /dev/null +++ b/vendor/github.com/spf13/cast/README.md @@ -0,0 +1,75 @@ +# cast + +[![GitHub Workflow Status](https://img.shields.io/github/actions/workflow/status/spf13/cast/ci.yaml?branch=master&style=flat-square)](https://github.com/spf13/cast/actions/workflows/ci.yaml) +[![PkgGoDev](https://pkg.go.dev/badge/mod/github.com/spf13/cast)](https://pkg.go.dev/mod/github.com/spf13/cast) +![Go Version](https://img.shields.io/badge/go%20version-%3E=1.16-61CFDD.svg?style=flat-square) +[![Go Report Card](https://goreportcard.com/badge/github.com/spf13/cast?style=flat-square)](https://goreportcard.com/report/github.com/spf13/cast) + +Easy and safe casting from one type to another in Go + +Don’t Panic! ... Cast + +## What is Cast? + +Cast is a library to convert between different go types in a consistent and easy way. + +Cast provides simple functions to easily convert a number to a string, an +interface into a bool, etc. Cast does this intelligently when an obvious +conversion is possible. It doesn’t make any attempts to guess what you meant, +for example you can only convert a string to an int when it is a string +representation of an int such as “8â€. Cast was developed for use in +[Hugo](https://gohugo.io), a website engine which uses YAML, TOML or JSON +for meta data. + +## Why use Cast? + +When working with dynamic data in Go you often need to cast or convert the data +from one type into another. Cast goes beyond just using type assertion (though +it uses that when possible) to provide a very straightforward and convenient +library. + +If you are working with interfaces to handle things like dynamic content +you’ll need an easy way to convert an interface into a given type. This +is the library for you. + +If you are taking in data from YAML, TOML or JSON or other formats which lack +full types, then Cast is the library for you. + +## Usage + +Cast provides a handful of To_____ methods. These methods will always return +the desired type. **If input is provided that will not convert to that type, the +0 or nil value for that type will be returned**. + +Cast also provides identical methods To_____E. These return the same result as +the To_____ methods, plus an additional error which tells you if it successfully +converted. Using these methods you can tell the difference between when the +input matched the zero value or when the conversion failed and the zero value +was returned. + +The following examples are merely a sample of what is available. Please review +the code for a complete set. + +### Example ‘ToString’: + + cast.ToString("mayonegg") // "mayonegg" + cast.ToString(8) // "8" + cast.ToString(8.31) // "8.31" + cast.ToString([]byte("one time")) // "one time" + cast.ToString(nil) // "" + + var foo interface{} = "one more time" + cast.ToString(foo) // "one more time" + + +### Example ‘ToInt’: + + cast.ToInt(8) // 8 + cast.ToInt(8.31) // 8 + cast.ToInt("8") // 8 + cast.ToInt(true) // 1 + cast.ToInt(false) // 0 + + var eight interface{} = 8 + cast.ToInt(eight) // 8 + cast.ToInt(nil) // 0 diff --git a/vendor/github.com/spf13/cast/cast.go b/vendor/github.com/spf13/cast/cast.go new file mode 100644 index 0000000..0cfe941 --- /dev/null +++ b/vendor/github.com/spf13/cast/cast.go @@ -0,0 +1,176 @@ +// Copyright © 2014 Steve Francia . +// +// Use of this source code is governed by an MIT-style +// license that can be found in the LICENSE file. + +// Package cast provides easy and safe casting in Go. +package cast + +import "time" + +// ToBool casts an interface to a bool type. +func ToBool(i interface{}) bool { + v, _ := ToBoolE(i) + return v +} + +// ToTime casts an interface to a time.Time type. +func ToTime(i interface{}) time.Time { + v, _ := ToTimeE(i) + return v +} + +func ToTimeInDefaultLocation(i interface{}, location *time.Location) time.Time { + v, _ := ToTimeInDefaultLocationE(i, location) + return v +} + +// ToDuration casts an interface to a time.Duration type. +func ToDuration(i interface{}) time.Duration { + v, _ := ToDurationE(i) + return v +} + +// ToFloat64 casts an interface to a float64 type. +func ToFloat64(i interface{}) float64 { + v, _ := ToFloat64E(i) + return v +} + +// ToFloat32 casts an interface to a float32 type. +func ToFloat32(i interface{}) float32 { + v, _ := ToFloat32E(i) + return v +} + +// ToInt64 casts an interface to an int64 type. +func ToInt64(i interface{}) int64 { + v, _ := ToInt64E(i) + return v +} + +// ToInt32 casts an interface to an int32 type. +func ToInt32(i interface{}) int32 { + v, _ := ToInt32E(i) + return v +} + +// ToInt16 casts an interface to an int16 type. +func ToInt16(i interface{}) int16 { + v, _ := ToInt16E(i) + return v +} + +// ToInt8 casts an interface to an int8 type. +func ToInt8(i interface{}) int8 { + v, _ := ToInt8E(i) + return v +} + +// ToInt casts an interface to an int type. +func ToInt(i interface{}) int { + v, _ := ToIntE(i) + return v +} + +// ToUint casts an interface to a uint type. +func ToUint(i interface{}) uint { + v, _ := ToUintE(i) + return v +} + +// ToUint64 casts an interface to a uint64 type. +func ToUint64(i interface{}) uint64 { + v, _ := ToUint64E(i) + return v +} + +// ToUint32 casts an interface to a uint32 type. +func ToUint32(i interface{}) uint32 { + v, _ := ToUint32E(i) + return v +} + +// ToUint16 casts an interface to a uint16 type. +func ToUint16(i interface{}) uint16 { + v, _ := ToUint16E(i) + return v +} + +// ToUint8 casts an interface to a uint8 type. +func ToUint8(i interface{}) uint8 { + v, _ := ToUint8E(i) + return v +} + +// ToString casts an interface to a string type. +func ToString(i interface{}) string { + v, _ := ToStringE(i) + return v +} + +// ToStringMapString casts an interface to a map[string]string type. +func ToStringMapString(i interface{}) map[string]string { + v, _ := ToStringMapStringE(i) + return v +} + +// ToStringMapStringSlice casts an interface to a map[string][]string type. +func ToStringMapStringSlice(i interface{}) map[string][]string { + v, _ := ToStringMapStringSliceE(i) + return v +} + +// ToStringMapBool casts an interface to a map[string]bool type. +func ToStringMapBool(i interface{}) map[string]bool { + v, _ := ToStringMapBoolE(i) + return v +} + +// ToStringMapInt casts an interface to a map[string]int type. +func ToStringMapInt(i interface{}) map[string]int { + v, _ := ToStringMapIntE(i) + return v +} + +// ToStringMapInt64 casts an interface to a map[string]int64 type. +func ToStringMapInt64(i interface{}) map[string]int64 { + v, _ := ToStringMapInt64E(i) + return v +} + +// ToStringMap casts an interface to a map[string]interface{} type. +func ToStringMap(i interface{}) map[string]interface{} { + v, _ := ToStringMapE(i) + return v +} + +// ToSlice casts an interface to a []interface{} type. +func ToSlice(i interface{}) []interface{} { + v, _ := ToSliceE(i) + return v +} + +// ToBoolSlice casts an interface to a []bool type. +func ToBoolSlice(i interface{}) []bool { + v, _ := ToBoolSliceE(i) + return v +} + +// ToStringSlice casts an interface to a []string type. +func ToStringSlice(i interface{}) []string { + v, _ := ToStringSliceE(i) + return v +} + +// ToIntSlice casts an interface to a []int type. +func ToIntSlice(i interface{}) []int { + v, _ := ToIntSliceE(i) + return v +} + +// ToDurationSlice casts an interface to a []time.Duration type. +func ToDurationSlice(i interface{}) []time.Duration { + v, _ := ToDurationSliceE(i) + return v +} diff --git a/vendor/github.com/spf13/cast/caste.go b/vendor/github.com/spf13/cast/caste.go new file mode 100644 index 0000000..cd9c048 --- /dev/null +++ b/vendor/github.com/spf13/cast/caste.go @@ -0,0 +1,1510 @@ +// Copyright © 2014 Steve Francia . +// +// Use of this source code is governed by an MIT-style +// license that can be found in the LICENSE file. + +package cast + +import ( + "encoding/json" + "errors" + "fmt" + "html/template" + "reflect" + "strconv" + "strings" + "time" +) + +var errNegativeNotAllowed = errors.New("unable to cast negative value") + +type float64EProvider interface { + Float64() (float64, error) +} + +type float64Provider interface { + Float64() float64 +} + +// ToTimeE casts an interface to a time.Time type. +func ToTimeE(i interface{}) (tim time.Time, err error) { + return ToTimeInDefaultLocationE(i, time.UTC) +} + +// ToTimeInDefaultLocationE casts an empty interface to time.Time, +// interpreting inputs without a timezone to be in the given location, +// or the local timezone if nil. +func ToTimeInDefaultLocationE(i interface{}, location *time.Location) (tim time.Time, err error) { + i = indirect(i) + + switch v := i.(type) { + case time.Time: + return v, nil + case string: + return StringToDateInDefaultLocation(v, location) + case json.Number: + s, err1 := ToInt64E(v) + if err1 != nil { + return time.Time{}, fmt.Errorf("unable to cast %#v of type %T to Time", i, i) + } + return time.Unix(s, 0), nil + case int: + return time.Unix(int64(v), 0), nil + case int64: + return time.Unix(v, 0), nil + case int32: + return time.Unix(int64(v), 0), nil + case uint: + return time.Unix(int64(v), 0), nil + case uint64: + return time.Unix(int64(v), 0), nil + case uint32: + return time.Unix(int64(v), 0), nil + default: + return time.Time{}, fmt.Errorf("unable to cast %#v of type %T to Time", i, i) + } +} + +// ToDurationE casts an interface to a time.Duration type. +func ToDurationE(i interface{}) (d time.Duration, err error) { + i = indirect(i) + + switch s := i.(type) { + case time.Duration: + return s, nil + case int, int64, int32, int16, int8, uint, uint64, uint32, uint16, uint8: + d = time.Duration(ToInt64(s)) + return + case float32, float64: + d = time.Duration(ToFloat64(s)) + return + case string: + if strings.ContainsAny(s, "nsuµmh") { + d, err = time.ParseDuration(s) + } else { + d, err = time.ParseDuration(s + "ns") + } + return + case float64EProvider: + var v float64 + v, err = s.Float64() + d = time.Duration(v) + return + case float64Provider: + d = time.Duration(s.Float64()) + return + default: + err = fmt.Errorf("unable to cast %#v of type %T to Duration", i, i) + return + } +} + +// ToBoolE casts an interface to a bool type. +func ToBoolE(i interface{}) (bool, error) { + i = indirect(i) + + switch b := i.(type) { + case bool: + return b, nil + case nil: + return false, nil + case int: + return b != 0, nil + case int64: + return b != 0, nil + case int32: + return b != 0, nil + case int16: + return b != 0, nil + case int8: + return b != 0, nil + case uint: + return b != 0, nil + case uint64: + return b != 0, nil + case uint32: + return b != 0, nil + case uint16: + return b != 0, nil + case uint8: + return b != 0, nil + case float64: + return b != 0, nil + case float32: + return b != 0, nil + case time.Duration: + return b != 0, nil + case string: + return strconv.ParseBool(i.(string)) + case json.Number: + v, err := ToInt64E(b) + if err == nil { + return v != 0, nil + } + return false, fmt.Errorf("unable to cast %#v of type %T to bool", i, i) + default: + return false, fmt.Errorf("unable to cast %#v of type %T to bool", i, i) + } +} + +// ToFloat64E casts an interface to a float64 type. +func ToFloat64E(i interface{}) (float64, error) { + i = indirect(i) + + intv, ok := toInt(i) + if ok { + return float64(intv), nil + } + + switch s := i.(type) { + case float64: + return s, nil + case float32: + return float64(s), nil + case int64: + return float64(s), nil + case int32: + return float64(s), nil + case int16: + return float64(s), nil + case int8: + return float64(s), nil + case uint: + return float64(s), nil + case uint64: + return float64(s), nil + case uint32: + return float64(s), nil + case uint16: + return float64(s), nil + case uint8: + return float64(s), nil + case string: + v, err := strconv.ParseFloat(s, 64) + if err == nil { + return v, nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to float64", i, i) + case float64EProvider: + v, err := s.Float64() + if err == nil { + return v, nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to float64", i, i) + case float64Provider: + return s.Float64(), nil + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to float64", i, i) + } +} + +// ToFloat32E casts an interface to a float32 type. +func ToFloat32E(i interface{}) (float32, error) { + i = indirect(i) + + intv, ok := toInt(i) + if ok { + return float32(intv), nil + } + + switch s := i.(type) { + case float64: + return float32(s), nil + case float32: + return s, nil + case int64: + return float32(s), nil + case int32: + return float32(s), nil + case int16: + return float32(s), nil + case int8: + return float32(s), nil + case uint: + return float32(s), nil + case uint64: + return float32(s), nil + case uint32: + return float32(s), nil + case uint16: + return float32(s), nil + case uint8: + return float32(s), nil + case string: + v, err := strconv.ParseFloat(s, 32) + if err == nil { + return float32(v), nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to float32", i, i) + case float64EProvider: + v, err := s.Float64() + if err == nil { + return float32(v), nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to float32", i, i) + case float64Provider: + return float32(s.Float64()), nil + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to float32", i, i) + } +} + +// ToInt64E casts an interface to an int64 type. +func ToInt64E(i interface{}) (int64, error) { + i = indirect(i) + + intv, ok := toInt(i) + if ok { + return int64(intv), nil + } + + switch s := i.(type) { + case int64: + return s, nil + case int32: + return int64(s), nil + case int16: + return int64(s), nil + case int8: + return int64(s), nil + case uint: + return int64(s), nil + case uint64: + return int64(s), nil + case uint32: + return int64(s), nil + case uint16: + return int64(s), nil + case uint8: + return int64(s), nil + case float64: + return int64(s), nil + case float32: + return int64(s), nil + case string: + v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) + if err == nil { + return v, nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to int64", i, i) + case json.Number: + return ToInt64E(string(s)) + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to int64", i, i) + } +} + +// ToInt32E casts an interface to an int32 type. +func ToInt32E(i interface{}) (int32, error) { + i = indirect(i) + + intv, ok := toInt(i) + if ok { + return int32(intv), nil + } + + switch s := i.(type) { + case int64: + return int32(s), nil + case int32: + return s, nil + case int16: + return int32(s), nil + case int8: + return int32(s), nil + case uint: + return int32(s), nil + case uint64: + return int32(s), nil + case uint32: + return int32(s), nil + case uint16: + return int32(s), nil + case uint8: + return int32(s), nil + case float64: + return int32(s), nil + case float32: + return int32(s), nil + case string: + v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) + if err == nil { + return int32(v), nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to int32", i, i) + case json.Number: + return ToInt32E(string(s)) + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to int32", i, i) + } +} + +// ToInt16E casts an interface to an int16 type. +func ToInt16E(i interface{}) (int16, error) { + i = indirect(i) + + intv, ok := toInt(i) + if ok { + return int16(intv), nil + } + + switch s := i.(type) { + case int64: + return int16(s), nil + case int32: + return int16(s), nil + case int16: + return s, nil + case int8: + return int16(s), nil + case uint: + return int16(s), nil + case uint64: + return int16(s), nil + case uint32: + return int16(s), nil + case uint16: + return int16(s), nil + case uint8: + return int16(s), nil + case float64: + return int16(s), nil + case float32: + return int16(s), nil + case string: + v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) + if err == nil { + return int16(v), nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to int16", i, i) + case json.Number: + return ToInt16E(string(s)) + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to int16", i, i) + } +} + +// ToInt8E casts an interface to an int8 type. +func ToInt8E(i interface{}) (int8, error) { + i = indirect(i) + + intv, ok := toInt(i) + if ok { + return int8(intv), nil + } + + switch s := i.(type) { + case int64: + return int8(s), nil + case int32: + return int8(s), nil + case int16: + return int8(s), nil + case int8: + return s, nil + case uint: + return int8(s), nil + case uint64: + return int8(s), nil + case uint32: + return int8(s), nil + case uint16: + return int8(s), nil + case uint8: + return int8(s), nil + case float64: + return int8(s), nil + case float32: + return int8(s), nil + case string: + v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) + if err == nil { + return int8(v), nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to int8", i, i) + case json.Number: + return ToInt8E(string(s)) + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to int8", i, i) + } +} + +// ToIntE casts an interface to an int type. +func ToIntE(i interface{}) (int, error) { + i = indirect(i) + + intv, ok := toInt(i) + if ok { + return intv, nil + } + + switch s := i.(type) { + case int64: + return int(s), nil + case int32: + return int(s), nil + case int16: + return int(s), nil + case int8: + return int(s), nil + case uint: + return int(s), nil + case uint64: + return int(s), nil + case uint32: + return int(s), nil + case uint16: + return int(s), nil + case uint8: + return int(s), nil + case float64: + return int(s), nil + case float32: + return int(s), nil + case string: + v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) + if err == nil { + return int(v), nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to int64", i, i) + case json.Number: + return ToIntE(string(s)) + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to int", i, i) + } +} + +// ToUintE casts an interface to a uint type. +func ToUintE(i interface{}) (uint, error) { + i = indirect(i) + + intv, ok := toInt(i) + if ok { + if intv < 0 { + return 0, errNegativeNotAllowed + } + return uint(intv), nil + } + + switch s := i.(type) { + case string: + v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) + if err == nil { + if v < 0 { + return 0, errNegativeNotAllowed + } + return uint(v), nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to uint", i, i) + case json.Number: + return ToUintE(string(s)) + case int64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint(s), nil + case int32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint(s), nil + case int16: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint(s), nil + case int8: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint(s), nil + case uint: + return s, nil + case uint64: + return uint(s), nil + case uint32: + return uint(s), nil + case uint16: + return uint(s), nil + case uint8: + return uint(s), nil + case float64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint(s), nil + case float32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint(s), nil + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to uint", i, i) + } +} + +// ToUint64E casts an interface to a uint64 type. +func ToUint64E(i interface{}) (uint64, error) { + i = indirect(i) + + intv, ok := toInt(i) + if ok { + if intv < 0 { + return 0, errNegativeNotAllowed + } + return uint64(intv), nil + } + + switch s := i.(type) { + case string: + v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) + if err == nil { + if v < 0 { + return 0, errNegativeNotAllowed + } + return uint64(v), nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to uint64", i, i) + case json.Number: + return ToUint64E(string(s)) + case int64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint64(s), nil + case int32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint64(s), nil + case int16: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint64(s), nil + case int8: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint64(s), nil + case uint: + return uint64(s), nil + case uint64: + return s, nil + case uint32: + return uint64(s), nil + case uint16: + return uint64(s), nil + case uint8: + return uint64(s), nil + case float32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint64(s), nil + case float64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint64(s), nil + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to uint64", i, i) + } +} + +// ToUint32E casts an interface to a uint32 type. +func ToUint32E(i interface{}) (uint32, error) { + i = indirect(i) + + intv, ok := toInt(i) + if ok { + if intv < 0 { + return 0, errNegativeNotAllowed + } + return uint32(intv), nil + } + + switch s := i.(type) { + case string: + v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) + if err == nil { + if v < 0 { + return 0, errNegativeNotAllowed + } + return uint32(v), nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to uint32", i, i) + case json.Number: + return ToUint32E(string(s)) + case int64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint32(s), nil + case int32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint32(s), nil + case int16: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint32(s), nil + case int8: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint32(s), nil + case uint: + return uint32(s), nil + case uint64: + return uint32(s), nil + case uint32: + return s, nil + case uint16: + return uint32(s), nil + case uint8: + return uint32(s), nil + case float64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint32(s), nil + case float32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint32(s), nil + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to uint32", i, i) + } +} + +// ToUint16E casts an interface to a uint16 type. +func ToUint16E(i interface{}) (uint16, error) { + i = indirect(i) + + intv, ok := toInt(i) + if ok { + if intv < 0 { + return 0, errNegativeNotAllowed + } + return uint16(intv), nil + } + + switch s := i.(type) { + case string: + v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) + if err == nil { + if v < 0 { + return 0, errNegativeNotAllowed + } + return uint16(v), nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to uint16", i, i) + case json.Number: + return ToUint16E(string(s)) + case int64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint16(s), nil + case int32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint16(s), nil + case int16: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint16(s), nil + case int8: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint16(s), nil + case uint: + return uint16(s), nil + case uint64: + return uint16(s), nil + case uint32: + return uint16(s), nil + case uint16: + return s, nil + case uint8: + return uint16(s), nil + case float64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint16(s), nil + case float32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint16(s), nil + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to uint16", i, i) + } +} + +// ToUint8E casts an interface to a uint type. +func ToUint8E(i interface{}) (uint8, error) { + i = indirect(i) + + intv, ok := toInt(i) + if ok { + if intv < 0 { + return 0, errNegativeNotAllowed + } + return uint8(intv), nil + } + + switch s := i.(type) { + case string: + v, err := strconv.ParseInt(trimZeroDecimal(s), 0, 0) + if err == nil { + if v < 0 { + return 0, errNegativeNotAllowed + } + return uint8(v), nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to uint8", i, i) + case json.Number: + return ToUint8E(string(s)) + case int64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint8(s), nil + case int32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint8(s), nil + case int16: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint8(s), nil + case int8: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint8(s), nil + case uint: + return uint8(s), nil + case uint64: + return uint8(s), nil + case uint32: + return uint8(s), nil + case uint16: + return uint8(s), nil + case uint8: + return s, nil + case float64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint8(s), nil + case float32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint8(s), nil + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to uint8", i, i) + } +} + +// From html/template/content.go +// Copyright 2011 The Go Authors. All rights reserved. +// indirect returns the value, after dereferencing as many times +// as necessary to reach the base type (or nil). +func indirect(a interface{}) interface{} { + if a == nil { + return nil + } + if t := reflect.TypeOf(a); t.Kind() != reflect.Ptr { + // Avoid creating a reflect.Value if it's not a pointer. + return a + } + v := reflect.ValueOf(a) + for v.Kind() == reflect.Ptr && !v.IsNil() { + v = v.Elem() + } + return v.Interface() +} + +// From html/template/content.go +// Copyright 2011 The Go Authors. All rights reserved. +// indirectToStringerOrError returns the value, after dereferencing as many times +// as necessary to reach the base type (or nil) or an implementation of fmt.Stringer +// or error, +func indirectToStringerOrError(a interface{}) interface{} { + if a == nil { + return nil + } + + errorType := reflect.TypeOf((*error)(nil)).Elem() + fmtStringerType := reflect.TypeOf((*fmt.Stringer)(nil)).Elem() + + v := reflect.ValueOf(a) + for !v.Type().Implements(fmtStringerType) && !v.Type().Implements(errorType) && v.Kind() == reflect.Ptr && !v.IsNil() { + v = v.Elem() + } + return v.Interface() +} + +// ToStringE casts an interface to a string type. +func ToStringE(i interface{}) (string, error) { + i = indirectToStringerOrError(i) + + switch s := i.(type) { + case string: + return s, nil + case bool: + return strconv.FormatBool(s), nil + case float64: + return strconv.FormatFloat(s, 'f', -1, 64), nil + case float32: + return strconv.FormatFloat(float64(s), 'f', -1, 32), nil + case int: + return strconv.Itoa(s), nil + case int64: + return strconv.FormatInt(s, 10), nil + case int32: + return strconv.Itoa(int(s)), nil + case int16: + return strconv.FormatInt(int64(s), 10), nil + case int8: + return strconv.FormatInt(int64(s), 10), nil + case uint: + return strconv.FormatUint(uint64(s), 10), nil + case uint64: + return strconv.FormatUint(uint64(s), 10), nil + case uint32: + return strconv.FormatUint(uint64(s), 10), nil + case uint16: + return strconv.FormatUint(uint64(s), 10), nil + case uint8: + return strconv.FormatUint(uint64(s), 10), nil + case json.Number: + return s.String(), nil + case []byte: + return string(s), nil + case template.HTML: + return string(s), nil + case template.URL: + return string(s), nil + case template.JS: + return string(s), nil + case template.CSS: + return string(s), nil + case template.HTMLAttr: + return string(s), nil + case nil: + return "", nil + case fmt.Stringer: + return s.String(), nil + case error: + return s.Error(), nil + default: + return "", fmt.Errorf("unable to cast %#v of type %T to string", i, i) + } +} + +// ToStringMapStringE casts an interface to a map[string]string type. +func ToStringMapStringE(i interface{}) (map[string]string, error) { + m := map[string]string{} + + switch v := i.(type) { + case map[string]string: + return v, nil + case map[string]interface{}: + for k, val := range v { + m[ToString(k)] = ToString(val) + } + return m, nil + case map[interface{}]string: + for k, val := range v { + m[ToString(k)] = ToString(val) + } + return m, nil + case map[interface{}]interface{}: + for k, val := range v { + m[ToString(k)] = ToString(val) + } + return m, nil + case string: + err := jsonStringToObject(v, &m) + return m, err + default: + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]string", i, i) + } +} + +// ToStringMapStringSliceE casts an interface to a map[string][]string type. +func ToStringMapStringSliceE(i interface{}) (map[string][]string, error) { + m := map[string][]string{} + + switch v := i.(type) { + case map[string][]string: + return v, nil + case map[string][]interface{}: + for k, val := range v { + m[ToString(k)] = ToStringSlice(val) + } + return m, nil + case map[string]string: + for k, val := range v { + m[ToString(k)] = []string{val} + } + case map[string]interface{}: + for k, val := range v { + switch vt := val.(type) { + case []interface{}: + m[ToString(k)] = ToStringSlice(vt) + case []string: + m[ToString(k)] = vt + default: + m[ToString(k)] = []string{ToString(val)} + } + } + return m, nil + case map[interface{}][]string: + for k, val := range v { + m[ToString(k)] = ToStringSlice(val) + } + return m, nil + case map[interface{}]string: + for k, val := range v { + m[ToString(k)] = ToStringSlice(val) + } + return m, nil + case map[interface{}][]interface{}: + for k, val := range v { + m[ToString(k)] = ToStringSlice(val) + } + return m, nil + case map[interface{}]interface{}: + for k, val := range v { + key, err := ToStringE(k) + if err != nil { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string][]string", i, i) + } + value, err := ToStringSliceE(val) + if err != nil { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string][]string", i, i) + } + m[key] = value + } + case string: + err := jsonStringToObject(v, &m) + return m, err + default: + return m, fmt.Errorf("unable to cast %#v of type %T to map[string][]string", i, i) + } + return m, nil +} + +// ToStringMapBoolE casts an interface to a map[string]bool type. +func ToStringMapBoolE(i interface{}) (map[string]bool, error) { + m := map[string]bool{} + + switch v := i.(type) { + case map[interface{}]interface{}: + for k, val := range v { + m[ToString(k)] = ToBool(val) + } + return m, nil + case map[string]interface{}: + for k, val := range v { + m[ToString(k)] = ToBool(val) + } + return m, nil + case map[string]bool: + return v, nil + case string: + err := jsonStringToObject(v, &m) + return m, err + default: + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]bool", i, i) + } +} + +// ToStringMapE casts an interface to a map[string]interface{} type. +func ToStringMapE(i interface{}) (map[string]interface{}, error) { + m := map[string]interface{}{} + + switch v := i.(type) { + case map[interface{}]interface{}: + for k, val := range v { + m[ToString(k)] = val + } + return m, nil + case map[string]interface{}: + return v, nil + case string: + err := jsonStringToObject(v, &m) + return m, err + default: + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]interface{}", i, i) + } +} + +// ToStringMapIntE casts an interface to a map[string]int{} type. +func ToStringMapIntE(i interface{}) (map[string]int, error) { + m := map[string]int{} + if i == nil { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int", i, i) + } + + switch v := i.(type) { + case map[interface{}]interface{}: + for k, val := range v { + m[ToString(k)] = ToInt(val) + } + return m, nil + case map[string]interface{}: + for k, val := range v { + m[k] = ToInt(val) + } + return m, nil + case map[string]int: + return v, nil + case string: + err := jsonStringToObject(v, &m) + return m, err + } + + if reflect.TypeOf(i).Kind() != reflect.Map { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int", i, i) + } + + mVal := reflect.ValueOf(m) + v := reflect.ValueOf(i) + for _, keyVal := range v.MapKeys() { + val, err := ToIntE(v.MapIndex(keyVal).Interface()) + if err != nil { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int", i, i) + } + mVal.SetMapIndex(keyVal, reflect.ValueOf(val)) + } + return m, nil +} + +// ToStringMapInt64E casts an interface to a map[string]int64{} type. +func ToStringMapInt64E(i interface{}) (map[string]int64, error) { + m := map[string]int64{} + if i == nil { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int64", i, i) + } + + switch v := i.(type) { + case map[interface{}]interface{}: + for k, val := range v { + m[ToString(k)] = ToInt64(val) + } + return m, nil + case map[string]interface{}: + for k, val := range v { + m[k] = ToInt64(val) + } + return m, nil + case map[string]int64: + return v, nil + case string: + err := jsonStringToObject(v, &m) + return m, err + } + + if reflect.TypeOf(i).Kind() != reflect.Map { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int64", i, i) + } + mVal := reflect.ValueOf(m) + v := reflect.ValueOf(i) + for _, keyVal := range v.MapKeys() { + val, err := ToInt64E(v.MapIndex(keyVal).Interface()) + if err != nil { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int64", i, i) + } + mVal.SetMapIndex(keyVal, reflect.ValueOf(val)) + } + return m, nil +} + +// ToSliceE casts an interface to a []interface{} type. +func ToSliceE(i interface{}) ([]interface{}, error) { + var s []interface{} + + switch v := i.(type) { + case []interface{}: + return append(s, v...), nil + case []map[string]interface{}: + for _, u := range v { + s = append(s, u) + } + return s, nil + default: + return s, fmt.Errorf("unable to cast %#v of type %T to []interface{}", i, i) + } +} + +// ToBoolSliceE casts an interface to a []bool type. +func ToBoolSliceE(i interface{}) ([]bool, error) { + if i == nil { + return []bool{}, fmt.Errorf("unable to cast %#v of type %T to []bool", i, i) + } + + switch v := i.(type) { + case []bool: + return v, nil + } + + kind := reflect.TypeOf(i).Kind() + switch kind { + case reflect.Slice, reflect.Array: + s := reflect.ValueOf(i) + a := make([]bool, s.Len()) + for j := 0; j < s.Len(); j++ { + val, err := ToBoolE(s.Index(j).Interface()) + if err != nil { + return []bool{}, fmt.Errorf("unable to cast %#v of type %T to []bool", i, i) + } + a[j] = val + } + return a, nil + default: + return []bool{}, fmt.Errorf("unable to cast %#v of type %T to []bool", i, i) + } +} + +// ToStringSliceE casts an interface to a []string type. +func ToStringSliceE(i interface{}) ([]string, error) { + var a []string + + switch v := i.(type) { + case []interface{}: + for _, u := range v { + a = append(a, ToString(u)) + } + return a, nil + case []string: + return v, nil + case []int8: + for _, u := range v { + a = append(a, ToString(u)) + } + return a, nil + case []int: + for _, u := range v { + a = append(a, ToString(u)) + } + return a, nil + case []int32: + for _, u := range v { + a = append(a, ToString(u)) + } + return a, nil + case []int64: + for _, u := range v { + a = append(a, ToString(u)) + } + return a, nil + case []float32: + for _, u := range v { + a = append(a, ToString(u)) + } + return a, nil + case []float64: + for _, u := range v { + a = append(a, ToString(u)) + } + return a, nil + case string: + return strings.Fields(v), nil + case []error: + for _, err := range i.([]error) { + a = append(a, err.Error()) + } + return a, nil + case interface{}: + str, err := ToStringE(v) + if err != nil { + return a, fmt.Errorf("unable to cast %#v of type %T to []string", i, i) + } + return []string{str}, nil + default: + return a, fmt.Errorf("unable to cast %#v of type %T to []string", i, i) + } +} + +// ToIntSliceE casts an interface to a []int type. +func ToIntSliceE(i interface{}) ([]int, error) { + if i == nil { + return []int{}, fmt.Errorf("unable to cast %#v of type %T to []int", i, i) + } + + switch v := i.(type) { + case []int: + return v, nil + } + + kind := reflect.TypeOf(i).Kind() + switch kind { + case reflect.Slice, reflect.Array: + s := reflect.ValueOf(i) + a := make([]int, s.Len()) + for j := 0; j < s.Len(); j++ { + val, err := ToIntE(s.Index(j).Interface()) + if err != nil { + return []int{}, fmt.Errorf("unable to cast %#v of type %T to []int", i, i) + } + a[j] = val + } + return a, nil + default: + return []int{}, fmt.Errorf("unable to cast %#v of type %T to []int", i, i) + } +} + +// ToDurationSliceE casts an interface to a []time.Duration type. +func ToDurationSliceE(i interface{}) ([]time.Duration, error) { + if i == nil { + return []time.Duration{}, fmt.Errorf("unable to cast %#v of type %T to []time.Duration", i, i) + } + + switch v := i.(type) { + case []time.Duration: + return v, nil + } + + kind := reflect.TypeOf(i).Kind() + switch kind { + case reflect.Slice, reflect.Array: + s := reflect.ValueOf(i) + a := make([]time.Duration, s.Len()) + for j := 0; j < s.Len(); j++ { + val, err := ToDurationE(s.Index(j).Interface()) + if err != nil { + return []time.Duration{}, fmt.Errorf("unable to cast %#v of type %T to []time.Duration", i, i) + } + a[j] = val + } + return a, nil + default: + return []time.Duration{}, fmt.Errorf("unable to cast %#v of type %T to []time.Duration", i, i) + } +} + +// StringToDate attempts to parse a string into a time.Time type using a +// predefined list of formats. If no suitable format is found, an error is +// returned. +func StringToDate(s string) (time.Time, error) { + return parseDateWith(s, time.UTC, timeFormats) +} + +// StringToDateInDefaultLocation casts an empty interface to a time.Time, +// interpreting inputs without a timezone to be in the given location, +// or the local timezone if nil. +func StringToDateInDefaultLocation(s string, location *time.Location) (time.Time, error) { + return parseDateWith(s, location, timeFormats) +} + +type timeFormatType int + +const ( + timeFormatNoTimezone timeFormatType = iota + timeFormatNamedTimezone + timeFormatNumericTimezone + timeFormatNumericAndNamedTimezone + timeFormatTimeOnly +) + +type timeFormat struct { + format string + typ timeFormatType +} + +func (f timeFormat) hasTimezone() bool { + // We don't include the formats with only named timezones, see + // https://github.com/golang/go/issues/19694#issuecomment-289103522 + return f.typ >= timeFormatNumericTimezone && f.typ <= timeFormatNumericAndNamedTimezone +} + +var timeFormats = []timeFormat{ + // Keep common formats at the top. + {"2006-01-02", timeFormatNoTimezone}, + {time.RFC3339, timeFormatNumericTimezone}, + {"2006-01-02T15:04:05", timeFormatNoTimezone}, // iso8601 without timezone + {time.RFC1123Z, timeFormatNumericTimezone}, + {time.RFC1123, timeFormatNamedTimezone}, + {time.RFC822Z, timeFormatNumericTimezone}, + {time.RFC822, timeFormatNamedTimezone}, + {time.RFC850, timeFormatNamedTimezone}, + {"2006-01-02 15:04:05.999999999 -0700 MST", timeFormatNumericAndNamedTimezone}, // Time.String() + {"2006-01-02T15:04:05-0700", timeFormatNumericTimezone}, // RFC3339 without timezone hh:mm colon + {"2006-01-02 15:04:05Z0700", timeFormatNumericTimezone}, // RFC3339 without T or timezone hh:mm colon + {"2006-01-02 15:04:05", timeFormatNoTimezone}, + {time.ANSIC, timeFormatNoTimezone}, + {time.UnixDate, timeFormatNamedTimezone}, + {time.RubyDate, timeFormatNumericTimezone}, + {"2006-01-02 15:04:05Z07:00", timeFormatNumericTimezone}, + {"02 Jan 2006", timeFormatNoTimezone}, + {"2006-01-02 15:04:05 -07:00", timeFormatNumericTimezone}, + {"2006-01-02 15:04:05 -0700", timeFormatNumericTimezone}, + {time.Kitchen, timeFormatTimeOnly}, + {time.Stamp, timeFormatTimeOnly}, + {time.StampMilli, timeFormatTimeOnly}, + {time.StampMicro, timeFormatTimeOnly}, + {time.StampNano, timeFormatTimeOnly}, +} + +func parseDateWith(s string, location *time.Location, formats []timeFormat) (d time.Time, e error) { + for _, format := range formats { + if d, e = time.Parse(format.format, s); e == nil { + + // Some time formats have a zone name, but no offset, so it gets + // put in that zone name (not the default one passed in to us), but + // without that zone's offset. So set the location manually. + if format.typ <= timeFormatNamedTimezone { + if location == nil { + location = time.Local + } + year, month, day := d.Date() + hour, min, sec := d.Clock() + d = time.Date(year, month, day, hour, min, sec, d.Nanosecond(), location) + } + + return + } + } + return d, fmt.Errorf("unable to parse date: %s", s) +} + +// jsonStringToObject attempts to unmarshall a string as JSON into +// the object passed as pointer. +func jsonStringToObject(s string, v interface{}) error { + data := []byte(s) + return json.Unmarshal(data, v) +} + +// toInt returns the int value of v if v or v's underlying type +// is an int. +// Note that this will return false for int64 etc. types. +func toInt(v interface{}) (int, bool) { + switch v := v.(type) { + case int: + return v, true + case time.Weekday: + return int(v), true + case time.Month: + return int(v), true + default: + return 0, false + } +} + +func trimZeroDecimal(s string) string { + var foundZero bool + for i := len(s); i > 0; i-- { + switch s[i-1] { + case '.': + if foundZero { + return s[:i-1] + } + case '0': + foundZero = true + default: + return s + } + } + return s +} diff --git a/vendor/github.com/spf13/cast/timeformattype_string.go b/vendor/github.com/spf13/cast/timeformattype_string.go new file mode 100644 index 0000000..1524fc8 --- /dev/null +++ b/vendor/github.com/spf13/cast/timeformattype_string.go @@ -0,0 +1,27 @@ +// Code generated by "stringer -type timeFormatType"; DO NOT EDIT. + +package cast + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[timeFormatNoTimezone-0] + _ = x[timeFormatNamedTimezone-1] + _ = x[timeFormatNumericTimezone-2] + _ = x[timeFormatNumericAndNamedTimezone-3] + _ = x[timeFormatTimeOnly-4] +} + +const _timeFormatType_name = "timeFormatNoTimezonetimeFormatNamedTimezonetimeFormatNumericTimezonetimeFormatNumericAndNamedTimezonetimeFormatTimeOnly" + +var _timeFormatType_index = [...]uint8{0, 20, 43, 68, 101, 119} + +func (i timeFormatType) String() string { + if i < 0 || i >= timeFormatType(len(_timeFormatType_index)-1) { + return "timeFormatType(" + strconv.FormatInt(int64(i), 10) + ")" + } + return _timeFormatType_name[_timeFormatType_index[i]:_timeFormatType_index[i+1]] +} diff --git a/vendor/github.com/spf13/fsync/.gitignore b/vendor/github.com/spf13/fsync/.gitignore new file mode 100644 index 0000000..8945929 --- /dev/null +++ b/vendor/github.com/spf13/fsync/.gitignore @@ -0,0 +1,25 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test + +*~ \ No newline at end of file diff --git a/vendor/github.com/spf13/fsync/LICENSE b/vendor/github.com/spf13/fsync/LICENSE new file mode 100644 index 0000000..c9d3d40 --- /dev/null +++ b/vendor/github.com/spf13/fsync/LICENSE @@ -0,0 +1,19 @@ +Copyright (C) 2012 Mostafa Hajizadeh + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/vendor/github.com/spf13/fsync/README.md b/vendor/github.com/spf13/fsync/README.md new file mode 100644 index 0000000..35bb967 --- /dev/null +++ b/vendor/github.com/spf13/fsync/README.md @@ -0,0 +1,2 @@ +Package `fsync` keeps files and directories in sync. Read the documentation on +[GoDoc](http://godoc.org/github.com/mostafah/fsync). diff --git a/vendor/github.com/spf13/fsync/fsync.go b/vendor/github.com/spf13/fsync/fsync.go new file mode 100644 index 0000000..a358508 --- /dev/null +++ b/vendor/github.com/spf13/fsync/fsync.go @@ -0,0 +1,370 @@ +// Copyright (C) 2012 Mostafa Hajizadeh +// Copyright (C) 2014-2022 Steve Francia + +// package fsync keeps two files or directories in sync. +// +// err := fsync.Sync("~/dst", ".") +// +// After the above code, if err is nil, every file and directory in the current +// directory is copied to ~/dst and has the same permissions. Consequent calls +// will only copy changed or new files. +// +// SyncTo is a helper function which helps you sync a groups of files or +// directories into a single destination. For instance, calling +// +// SyncTo("public", "build/app.js", "build/app.css", "images", "fonts") +// +// is equivalent to calling +// +// Sync("public/app.js", "build/app.js") +// Sync("public/app.css", "build/app.css") +// Sync("public/images", "images") +// Sync("public/fonts", "fonts") +// +// Actually, this is how SyncTo is implemented: consequent calls to Sync. +// +// By default, sync code ignores extra files in the destination that don’t have +// identicals in the source. Setting Delete field of a Syncer to true changes +// this behavior and deletes these extra files. + +package fsync + +import ( + "bytes" + "errors" + "io" + iofs "io/fs" + "os" + "path/filepath" + "runtime" + + "github.com/spf13/afero" +) + +var ErrFileOverDir = errors.New( + "fsync: trying to overwrite a non-empty directory with a file") + +// FileInfo contains the shared methods between os.FileInfo and fs.DirEntry. +type FileInfo interface { + Name() string + IsDir() bool +} + +// Sync copies files and directories inside src into dst. +func Sync(dst, src string) error { + return NewSyncer().Sync(dst, src) +} + +// SyncTo syncs srcs files and directories into to directory. +func SyncTo(to string, srcs ...string) error { + return NewSyncer().SyncTo(to, srcs...) +} + +// Type Syncer provides functions for syncing files. +type Syncer struct { + // Set this to true to delete files in the destination that don't exist + // in the source. + Delete bool + // To allow certain files to remain in the destination, implement this function. + // Return true to skip file, false to delete. + // Note that src may be either os.FileInfo or fs.DirEntry depending on the file system. + DeleteFilter func(f FileInfo) bool + // By default, modification times are synced. This can be turned off by + // setting this to true. + NoTimes bool + // NoChmod disables permission mode syncing. + NoChmod bool + // Implement this function to skip Chmod syncing for only certain files + // or directories. Return true to skip Chmod. + ChmodFilter func(dst, src os.FileInfo) bool + + // TODO add options for not checking content for equality + + SrcFs afero.Fs + DestFs afero.Fs +} + +// NewSyncer creates a new instance of Syncer with default options. +func NewSyncer() *Syncer { + s := Syncer{SrcFs: new(afero.OsFs), DestFs: new(afero.OsFs)} + s.DeleteFilter = func(f FileInfo) bool { + return false + } + return &s +} + +// Sync copies files and directories inside src into dst. +func (s *Syncer) Sync(dst, src string) error { + // make sure src exists + if _, err := s.SrcFs.Stat(src); err != nil { + return err + } + // return error instead of replacing a non-empty directory with a file + if b, err := s.checkDir(dst, src); err != nil { + return err + } else if b { + return ErrFileOverDir + } + + return s.syncRecover(dst, src) +} + +// SyncTo syncs srcs files or directories into to directory. +func (s *Syncer) SyncTo(to string, srcs ...string) error { + for _, src := range srcs { + dst := filepath.Join(to, filepath.Base(src)) + if err := s.Sync(dst, src); err != nil { + return err + } + } + return nil +} + +// syncRecover handles errors and calls sync +func (s *Syncer) syncRecover(dst, src string) (err error) { + defer func() { + if r := recover(); r != nil { + switch r := r.(type) { + case runtime.Error: + panic(r) + case error: + err = r + default: + panic(r) + } + } + }() + + s.sync(dst, src) + return nil +} + +// sync updates dst to match with src, handling both files and directories. +func (s *Syncer) sync(dst, src string) { + // sync permissions and modification times after handling content + defer s.syncstats(dst, src) + + // read files info + dstat, err := s.DestFs.Stat(dst) + if err != nil && !os.IsNotExist(err) { + panic(err) + } + sstat, err := s.SrcFs.Stat(src) + if err != nil && os.IsNotExist(err) { + return // src was deleted before we could copy it + } + check(err) + + if !sstat.IsDir() { + // src is a file + // delete dst if its a directory + if dstat != nil && dstat.IsDir() { + check(s.DestFs.RemoveAll(dst)) + } + if !s.equal(dst, src, dstat, sstat) { + // perform copy + df, err := s.DestFs.Create(dst) + check(err) + defer df.Close() + sf, err := s.SrcFs.Open(src) + if os.IsNotExist(err) { + return + } + check(err) + defer sf.Close() + _, err = io.Copy(df, sf) + if os.IsNotExist(err) { + return + } + check(err) + } + return + } + + // src is a directory + // make dst if necessary + if dstat == nil { + // dst does not exist; create directory + check(s.DestFs.MkdirAll(dst, 0o755)) // permissions will be synced later + } else if !dstat.IsDir() { + // dst is a file; remove and create directory + check(s.DestFs.Remove(dst)) + check(s.DestFs.MkdirAll(dst, 0o755)) // permissions will be synced later + } + + // make a map of filenames for quick lookup; used in deletion + // deletion below + m := make(map[string]bool) + err = withDirEntry(s.SrcFs, src, func(fi FileInfo) bool { + dst2 := filepath.Join(dst, fi.Name()) + src2 := filepath.Join(src, fi.Name()) + s.sync(dst2, src2) + m[fi.Name()] = true + + return false + }) + + if os.IsNotExist(err) { + return + } + check(err) + + // delete files from dst that does not exist in src + if s.Delete { + err = withDirEntry(s.DestFs, dst, func(fi FileInfo) bool { + if !m[fi.Name()] && !s.DeleteFilter(fi) { + check(s.DestFs.RemoveAll(filepath.Join(dst, fi.Name()))) + } + return false + }) + check(err) + + } +} + +// syncstats makes sure dst has the same pemissions and modification time as src +func (s *Syncer) syncstats(dst, src string) { + // get file infos; return if not exist and panic if error + dstat, err1 := s.DestFs.Stat(dst) + sstat, err2 := s.SrcFs.Stat(src) + if os.IsNotExist(err1) || os.IsNotExist(err2) { + return + } + check(err1) + check(err2) + + // update dst's permission bits + noChmod := s.NoChmod + if !noChmod && s.ChmodFilter != nil { + noChmod = s.ChmodFilter(dstat, sstat) + } + if !noChmod { + if dstat.Mode().Perm() != sstat.Mode().Perm() { + check(s.DestFs.Chmod(dst, sstat.Mode().Perm())) + } + } + + // update dst's modification time + if !s.NoTimes { + if !dstat.ModTime().Equal(sstat.ModTime()) { + err := s.DestFs.Chtimes(dst, sstat.ModTime(), sstat.ModTime()) + check(err) + } + } +} + +// equal returns true if both dst and src files are equal +func (s *Syncer) equal(dst, src string, dstat, sstat os.FileInfo) bool { + if sstat == nil || dstat == nil { + return false + } + + // check sizes + if dstat.Size() != sstat.Size() { + return false + } + + // both have the same size, check the contents + f1, err := s.DestFs.Open(dst) + check(err) + defer f1.Close() + f2, err := s.SrcFs.Open(src) + check(err) + defer f2.Close() + buf1 := make([]byte, 1000) + buf2 := make([]byte, 1000) + for { + // read from both + n1, err := f1.Read(buf1) + if err != nil && err != io.EOF { + panic(err) + } + n2, err := f2.Read(buf2) + if err != nil && err != io.EOF { + panic(err) + } + + // compare read bytes + if !bytes.Equal(buf1[:n1], buf2[:n2]) { + return false + } + + // end of both files + if n1 == 0 && n2 == 0 { + break + } + } + + return true +} + +// checkDir returns true if dst is a non-empty directory and src is a file +func (s *Syncer) checkDir(dst, src string) (b bool, err error) { + // read file info + dstat, err := s.DestFs.Stat(dst) + if os.IsNotExist(err) { + return false, nil + } else if err != nil { + return false, err + } + sstat, err := s.SrcFs.Stat(src) + if err != nil { + return false, err + } + + // return false is dst is not a directory or src is a directory + if !dstat.IsDir() || sstat.IsDir() { + return false, nil + } + + // dst is a directory and src is a file + // check if dst is non-empty + // read dst directory + var isNonEmpty bool + err = withDirEntry(s.DestFs, dst, func(FileInfo) bool { + isNonEmpty = true + return true + }) + + return isNonEmpty, err +} + +func withDirEntry(fs afero.Fs, path string, fn func(FileInfo) bool) error { + f, err := fs.Open(path) + if err != nil { + return err + } + defer f.Close() + + if rdf, ok := f.(iofs.ReadDirFile); ok { + fis, err := rdf.ReadDir(-1) + if err != nil { + return err + } + for _, fi := range fis { + if fn(fi) { + return nil + } + } + return nil + } + + fis, err := f.Readdir(-1) + if err != nil { + return err + } + + for _, fi := range fis { + if fn(fi) { + return nil + } + } + + return nil +} + +func check(err error) { + if err != nil { + panic(err) + } +} diff --git a/vendor/github.com/steveyen/gtreap/.gitignore b/vendor/github.com/steveyen/gtreap/.gitignore new file mode 100644 index 0000000..94b2ac3 --- /dev/null +++ b/vendor/github.com/steveyen/gtreap/.gitignore @@ -0,0 +1,5 @@ +#* +*~ +*.test +tmp + diff --git a/vendor/github.com/steveyen/gtreap/LICENSE b/vendor/github.com/steveyen/gtreap/LICENSE new file mode 100644 index 0000000..2665630 --- /dev/null +++ b/vendor/github.com/steveyen/gtreap/LICENSE @@ -0,0 +1,20 @@ +Copyright (C) 2012 Steve Yen + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/steveyen/gtreap/README.md b/vendor/github.com/steveyen/gtreap/README.md new file mode 100644 index 0000000..4cd8de7 --- /dev/null +++ b/vendor/github.com/steveyen/gtreap/README.md @@ -0,0 +1,90 @@ +gtreap +------ + +gtreap is an immutable treap implementation in the Go Language + +[![GoDoc](https://godoc.org/github.com/steveyen/gtreap?status.svg)](https://godoc.org/github.com/steveyen/gtreap) [![Build Status](https://drone.io/github.com/steveyen/gtreap/status.png)](https://drone.io/github.com/steveyen/gtreap/latest) [![Coverage Status](https://coveralls.io/repos/steveyen/gtreap/badge.png)](https://coveralls.io/r/steveyen/gtreap) + +Overview +======== + +gtreap implements an immutable treap data structure in golang. + +By treap, this data structure is both a heap and a binary search tree. + +By immutable, any updates/deletes to a treap will return a new treap +which can share internal nodes with the previous treap. All nodes in +this implementation are read-only after their creation. This allows +concurrent readers to operate safely with concurrent writers as +modifications only create new data structures and never modify +existing data structures. This is a simple approach to achieving MVCC +or multi-version concurrency control. + +By heap, items in the treap follow the heap-priority property, where a +parent node will have higher priority than its left and right children +nodes. + +By binary search tree, items are store lexigraphically, ordered by a +user-supplied Compare function. + +To get a probabilistic O(lg N) tree height, you should use a random +priority number during the Upsert() operation. + +LICENSE +======= + +MIT + +Example +======= + + import ( + "math/rand" + "github.com/steveyen/gtreap" + ) + + func stringCompare(a, b interface{}) int { + return bytes.Compare([]byte(a.(string)), []byte(b.(string))) + } + + t := gtreap.NewTreap(stringCompare) + t = t.Upsert("hi", rand.Int()) + t = t.Upsert("hola", rand.Int()) + t = t.Upsert("bye", rand.Int()) + t = t.Upsert("adios", rand.Int()) + + hi = t.Get("hi") + bye = t.Get("bye") + + // Some example Delete()'s... + t = t.Delete("bye") + nilValueHere = t.Get("bye") + t2 = t.Delete("hi") + nilValueHere2 = t2.Get("hi") + + // Since we still hold onto treap t, we can still access "hi". + hiStillExistsInTreapT = t.Get("hi") + + t.VisitAscend("cya", func(i Item) bool { + // This visitor callback will be invoked with every item + // from "cya" onwards. So: "hi", "hola". + // If we want to stop visiting, return false; + // otherwise a true return result means keep visiting items. + return true + }) + +Tips +==== + +The Upsert() method takes both an Item (an interface{}) and a heap +priority. Usually, that priority should be a random int +(math/rand.Int()) or perhaps even a hash of the item. However, if you +want to shuffle more commonly accessed items nearer to the top of the +treap for faster access, at the potential cost of not approaching a +probabilistic O(lg N) tree height, then you might tweak the priority. + +See also +======== + +For a simple, ordered, key-value storage or persistence library built +on immutable treaps, see: https://github.com/steveyen/gkvlite diff --git a/vendor/github.com/steveyen/gtreap/treap.go b/vendor/github.com/steveyen/gtreap/treap.go new file mode 100644 index 0000000..f758ffe --- /dev/null +++ b/vendor/github.com/steveyen/gtreap/treap.go @@ -0,0 +1,188 @@ +package gtreap + +type Treap struct { + compare Compare + root *node +} + +// Compare returns an integer comparing the two items +// lexicographically. The result will be 0 if a==b, -1 if a < b, and +// +1 if a > b. +type Compare func(a, b interface{}) int + +// Item can be anything. +type Item interface{} + +type node struct { + item Item + priority int + left *node + right *node +} + +func NewTreap(c Compare) *Treap { + return &Treap{compare: c, root: nil} +} + +func (t *Treap) Min() Item { + n := t.root + if n == nil { + return nil + } + for n.left != nil { + n = n.left + } + return n.item +} + +func (t *Treap) Max() Item { + n := t.root + if n == nil { + return nil + } + for n.right != nil { + n = n.right + } + return n.item +} + +func (t *Treap) Get(target Item) Item { + n := t.root + for n != nil { + c := t.compare(target, n.item) + if c < 0 { + n = n.left + } else if c > 0 { + n = n.right + } else { + return n.item + } + } + return nil +} + +// Note: only the priority of the first insert of an item is used. +// Priorities from future updates on already existing items are +// ignored. To change the priority for an item, you need to do a +// Delete then an Upsert. +func (t *Treap) Upsert(item Item, itemPriority int) *Treap { + r := t.union(t.root, &node{item: item, priority: itemPriority}) + return &Treap{compare: t.compare, root: r} +} + +func (t *Treap) union(this *node, that *node) *node { + if this == nil { + return that + } + if that == nil { + return this + } + if this.priority > that.priority { + left, middle, right := t.split(that, this.item) + if middle == nil { + return &node{ + item: this.item, + priority: this.priority, + left: t.union(this.left, left), + right: t.union(this.right, right), + } + } + return &node{ + item: middle.item, + priority: this.priority, + left: t.union(this.left, left), + right: t.union(this.right, right), + } + } + // We don't use middle because the "that" has precendence. + left, _, right := t.split(this, that.item) + return &node{ + item: that.item, + priority: that.priority, + left: t.union(left, that.left), + right: t.union(right, that.right), + } +} + +// Splits a treap into two treaps based on a split item "s". +// The result tuple-3 means (left, X, right), where X is either... +// nil - meaning the item s was not in the original treap. +// non-nil - returning the node that had item s. +// The tuple-3's left result treap has items < s, +// and the tuple-3's right result treap has items > s. +func (t *Treap) split(n *node, s Item) (*node, *node, *node) { + if n == nil { + return nil, nil, nil + } + c := t.compare(s, n.item) + if c == 0 { + return n.left, n, n.right + } + if c < 0 { + left, middle, right := t.split(n.left, s) + return left, middle, &node{ + item: n.item, + priority: n.priority, + left: right, + right: n.right, + } + } + left, middle, right := t.split(n.right, s) + return &node{ + item: n.item, + priority: n.priority, + left: n.left, + right: left, + }, middle, right +} + +func (t *Treap) Delete(target Item) *Treap { + left, _, right := t.split(t.root, target) + return &Treap{compare: t.compare, root: t.join(left, right)} +} + +// All the items from this are < items from that. +func (t *Treap) join(this *node, that *node) *node { + if this == nil { + return that + } + if that == nil { + return this + } + if this.priority > that.priority { + return &node{ + item: this.item, + priority: this.priority, + left: this.left, + right: t.join(this.right, that), + } + } + return &node{ + item: that.item, + priority: that.priority, + left: t.join(this, that.left), + right: that.right, + } +} + +type ItemVisitor func(i Item) bool + +// Visit items greater-than-or-equal to the pivot. +func (t *Treap) VisitAscend(pivot Item, visitor ItemVisitor) { + t.visitAscend(t.root, pivot, visitor) +} + +func (t *Treap) visitAscend(n *node, pivot Item, visitor ItemVisitor) bool { + if n == nil { + return true + } + if t.compare(pivot, n.item) <= 0 { + if !t.visitAscend(n.left, pivot, visitor) { + return false + } + if !visitor(n.item) { + return false + } + } + return t.visitAscend(n.right, pivot, visitor) +} diff --git a/vendor/github.com/tdewolff/minify/v2/.gitattributes b/vendor/github.com/tdewolff/minify/v2/.gitattributes new file mode 100644 index 0000000..16a3a8b --- /dev/null +++ b/vendor/github.com/tdewolff/minify/v2/.gitattributes @@ -0,0 +1,2 @@ +benchmarks/sample_* linguist-generated +tests/*/corpus/* linguist-generated diff --git a/vendor/github.com/tdewolff/minify/v2/.gitignore b/vendor/github.com/tdewolff/minify/v2/.gitignore new file mode 100644 index 0000000..e2cf6cf --- /dev/null +++ b/vendor/github.com/tdewolff/minify/v2/.gitignore @@ -0,0 +1,32 @@ +release.sh +dist +benchmarks/* +!benchmarks/*.go +!benchmarks/sample_* +tests/*/fuzz-fuzz.zip +tests/*/crashers +tests/*/suppressions +tests/*/corpus/* +!tests/*/corpus/*.* +parse/tests/*/fuzz-fuzz.zip +parse/tests/*/crashers +parse/tests/*/suppressions +parse/tests/*/corpus/* +!parse/tests/*/corpus/*.* +bindings/js/build +bindings/js/prebuilds +bindings/js/minify.h +bindings/js/minify.a +bindings/js/node_modules +bindings/js/example/package-lock.json +bindings/js/example/node_modules +bindings/js/example/test.min.html +bindings/py/go.mod +bindings/py/go.sum +bindings/py/**/*.h +bindings/py/**/*.so +bindings/py/**/*.egg-info +bindings/py/example/example.min.html +bindings/py/dist +bindings/py/build +bindings/py/**/*.pyc diff --git a/vendor/github.com/tdewolff/minify/v2/.golangci.yml b/vendor/github.com/tdewolff/minify/v2/.golangci.yml new file mode 100644 index 0000000..7009f92 --- /dev/null +++ b/vendor/github.com/tdewolff/minify/v2/.golangci.yml @@ -0,0 +1,16 @@ +linters: + enable: + - depguard + - dogsled + - gofmt + - goimports + - golint + - gosec + - govet + - megacheck + - misspell + - nakedret + - prealloc + - unconvert + - unparam + - wastedassign diff --git a/vendor/github.com/tdewolff/minify/v2/Dockerfile b/vendor/github.com/tdewolff/minify/v2/Dockerfile new file mode 100644 index 0000000..0f7fde4 --- /dev/null +++ b/vendor/github.com/tdewolff/minify/v2/Dockerfile @@ -0,0 +1,17 @@ +# Use this image to build the executable +FROM golang:1.18-alpine AS build + +WORKDIR /go/src/github.com/tdewolff/minify +COPY . /go/src/github.com/tdewolff/minify/ + +RUN apk add --no-cache git ca-certificates make bash +RUN /usr/bin/env bash -c make install + + +# Final image containing the executable from the previous step +FROM alpine:3 + +COPY --from=build /go/bin/minify /usr/bin/minify +COPY "containerfiles/container-entrypoint.sh" "/init.sh" + +ENTRYPOINT ["/init.sh"] diff --git a/vendor/github.com/tdewolff/minify/v2/LICENSE b/vendor/github.com/tdewolff/minify/v2/LICENSE new file mode 100644 index 0000000..41677de --- /dev/null +++ b/vendor/github.com/tdewolff/minify/v2/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2015 Taco de Wolff + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following + conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/tdewolff/minify/v2/Makefile b/vendor/github.com/tdewolff/minify/v2/Makefile new file mode 100644 index 0000000..9eede28 --- /dev/null +++ b/vendor/github.com/tdewolff/minify/v2/Makefile @@ -0,0 +1,58 @@ +SHELL=/usr/bin/env bash +NAME=minify +CMD=./cmd/minify +TARGETS=linux_amd64 linux_arm64 darwin_amd64 darwin_arm64 freebsd_amd64 netbsd_amd64 openbsd_amd64 windows_amd64 +VERSION=`git describe --tags` +FLAGS=-ldflags "-s -w -X 'main.Version=${VERSION}'" -trimpath +ENVS=GO111MODULES=on CGO_ENABLED=0 + +all: install + +install: + echo "Installing ${VERSION}" + ${ENVS} go install ${FLAGS} ./cmd/minify + . cmd/minify/bash_completion + +release: + TAG=$(shell git describe --tags --exact-match 2> /dev/null); + if [ "${.SHELLSTATUS}" -eq 0 ]; then \ + echo "Releasing ${VERSION}"; \ + else \ + echo "ERROR: commit is not tagged with a version"; \ + echo ""; \ + exit 1; \ + fi + rm -rf dist + mkdir -p dist + for t in ${TARGETS}; do \ + echo Building $$t...; \ + mkdir dist/$$t; \ + os=$$(echo $$t | cut -f1 -d_); \ + arch=$$(echo $$t | cut -f2 -d_); \ + ${ENVS} GOOS=$$os GOARCH=$$arch go build ${FLAGS} -o dist/$$t/${NAME} ${CMD}; \ + \ + cp LICENSE dist/$$t/.; \ + cp cmd/minify/README.md dist/$$t/.; \ + if [ "$$os" == "windows" ]; then \ + mv dist/$$t/${NAME} dist/$$t/${NAME}.exe; \ + zip -jq dist/${NAME}_$$t.zip dist/$$t/*; \ + cd dist; \ + sha256sum ${NAME}_$$t.zip >> checksums.txt; \ + cd ..; \ + else \ + cp cmd/minify/bash_completion dist/$$t/.; \ + cd dist/$$t; \ + tar -cf - * | gzip -9 > ../${NAME}_$$t.tar.gz; \ + cd ..; \ + sha256sum ${NAME}_$$t.tar.gz >> checksums.txt; \ + cd ..; \ + fi; \ + rm -rf dist/$$t; \ + done + +clean: + echo "Cleaning dist/" + rm -rf dist + +.PHONY: install release clean +.SILENT: install release clean diff --git a/vendor/github.com/tdewolff/minify/v2/README.md b/vendor/github.com/tdewolff/minify/v2/README.md new file mode 100644 index 0000000..a3c9f48 --- /dev/null +++ b/vendor/github.com/tdewolff/minify/v2/README.md @@ -0,0 +1,735 @@ +# Minify [![API reference](https://img.shields.io/badge/godoc-reference-5272B4)](https://pkg.go.dev/github.com/tdewolff/minify/v2?tab=doc) [![Go Report Card](https://goreportcard.com/badge/github.com/tdewolff/minify)](https://goreportcard.com/report/github.com/tdewolff/minify) [![codecov](https://codecov.io/gh/tdewolff/minify/branch/master/graph/badge.svg?token=Cr7r2EKPj2)](https://codecov.io/gh/tdewolff/minify) + +**[Online demo](https://go.tacodewolff.nl/minify)** if you need to minify files *now*. + +**[Binaries](https://github.com/tdewolff/minify/releases) of CLI for various platforms.** See [CLI](https://github.com/tdewolff/minify/tree/master/cmd/minify) for more installation instructions. + +**[Python bindings](https://pypi.org/project/tdewolff-minify/)** install with `pip install tdewolff-minify` + +**[JavaScript bindings](https://www.npmjs.com/package/@tdewolff/minify)** install with `npm i @tdewolff/minify` + +**[.NET bindings](https://github.com/JKamsker/NMinify)** install with `Install-Package NMinify` or `dotnet add package NMinify`, thanks to Jonas Kamsker for the port + +--- + +*Did you know that the shortest valid piece of HTML5 is `x`? See for yourself at the [W3C Validator](http://validator.w3.org/)!* + +Minify is a minifier package written in [Go][1]. It provides HTML5, CSS3, JS, JSON, SVG and XML minifiers and an interface to implement any other minifier. Minification is the process of removing bytes from a file (such as whitespace) without changing its output and therefore shrinking its size and speeding up transmission over the internet and possibly parsing. The implemented minifiers are designed for high performance (see https://github.com/privatenumber/minification-benchmarks where this library is (one of) the fastest JS minifiers). + +The core functionality associates mimetypes with minification functions, allowing embedded resources (like CSS or JS within HTML files) to be minified as well. Users can add new implementations that are triggered based on a mimetype (or pattern), or redirect to an external command (like ClosureCompiler, UglifyCSS, ...). + +### Sponsors +I'm actively looking for support in the form of donations or sponsorships to keep developing this library and highly appreciate any gesture. Please see the Sponsors button in GitHub for ways to contribute, or contact me directly. + +[![SiteGround](https://www.siteground.com/img/downloads/siteground-logo-black-transparent-vector.svg)](https://www.siteground.com/) + +#### Table of Contents + +- [Minify](#minify) + - [Prologue](#prologue) + - [Installation](#installation) + - [API stability](#api-stability) + - [Testing](#testing) + - [Performance](#performance) + - [HTML](#html) + - [Whitespace removal](#whitespace-removal) + - [CSS](#css) + - [JS](#js) + - [Comparison with other tools](#comparison-with-other-tools) + - [Compression ratio (lower is better)](#compression-ratio-lower-is-better) + - [Time (lower is better)](#time-lower-is-better) + - [JSON](#json) + - [SVG](#svg) + - [XML](#xml) + - [Usage](#usage) + - [New](#new) + - [From reader](#from-reader) + - [From bytes](#from-bytes) + - [From string](#from-string) + - [To reader](#to-reader) + - [To writer](#to-writer) + - [Middleware](#middleware) + - [Custom minifier](#custom-minifier) + - [Mediatypes](#mediatypes) + - [Examples](#examples) + - [Common minifiers](#common-minifiers) + - [External minifiers](#external-minifiers) + - [Closure Compiler](#closure-compiler) + - [UglifyJS](#uglifyjs) + - [esbuild](#esbuild) + - [Custom minifier](#custom-minifier-example) + - [ResponseWriter](#responsewriter) + - [Templates](#templates) + - [FAQ](#faq) + - [License](#license) + +### Roadmap + +- [ ] Use ASM/SSE to further speed-up core parts of the parsers/minifiers +- [x] Improve JS minifiers by shortening variables and proper semicolon omission +- [ ] Speed-up SVG minifier, it is very slow +- [x] Proper parser error reporting and line number + column information +- [ ] Generation of source maps (uncertain, might slow down parsers too much if it cannot run separately nicely) +- [ ] Create a cmd to pack webfiles (much like webpack), ie. merging CSS and JS files, inlining small external files, minification and gzipping. This would work on HTML files. + +## Prologue +Minifiers or bindings to minifiers exist in almost all programming languages. Some implementations are merely using several regular expressions to trim whitespace and comments (even though regex for parsing HTML/XML is ill-advised, for a good read see [Regular Expressions: Now You Have Two Problems](http://blog.codinghorror.com/regular-expressions-now-you-have-two-problems/)). Some implementations are much more profound, such as the [YUI Compressor](http://yui.github.io/yuicompressor/) and [Google Closure Compiler](https://github.com/google/closure-compiler) for JS. As most existing implementations either use JavaScript, use regexes, and don't focus on performance, they are pretty slow. + +This minifier proves to be that fast and extensive minifier that can handle HTML and any other filetype it may contain (CSS, JS, ...). It is usually orders of magnitude faster than existing minifiers. + +## Installation +Make sure you have [Git](https://git-scm.com/) and [Go](https://golang.org/dl/) (1.18 or higher) installed, run +``` +mkdir Project +cd Project +go mod init +go get -u github.com/tdewolff/minify/v2 +``` + +Then add the following imports to be able to use the various minifiers +``` go +import ( + "github.com/tdewolff/minify/v2" + "github.com/tdewolff/minify/v2/css" + "github.com/tdewolff/minify/v2/html" + "github.com/tdewolff/minify/v2/js" + "github.com/tdewolff/minify/v2/json" + "github.com/tdewolff/minify/v2/svg" + "github.com/tdewolff/minify/v2/xml" +) +``` + +You can optionally run `go mod tidy` to clean up the `go.mod` and `go.sum` files. + +See [CLI tool](https://github.com/tdewolff/minify/tree/master/cmd/minify) for installation instructions of the binary. + +### Docker + +If you want to use Docker, please see https://hub.docker.com/r/tdewolff/minify. + +```bash +$ docker run -it tdewolff/minify --help +``` + +## API stability +There is no guarantee for absolute stability, but I take issues and bugs seriously and don't take API changes lightly. The library will be maintained in a compatible way unless vital bugs prevent me from doing so. There has been one API change after v1 which added options support and I took the opportunity to push through some more API clean up as well. There are no plans whatsoever for future API changes. + +## Testing +For all subpackages and the imported `parse` package, test coverage of 100% is pursued. Besides full coverage, the minifiers are [fuzz tested](https://github.com/tdewolff/fuzz) using [github.com/dvyukov/go-fuzz](http://www.github.com/dvyukov/go-fuzz), see [the wiki](https://github.com/tdewolff/minify/wiki) for the most important bugs found by fuzz testing. These tests ensure that everything works as intended and that the code does not crash (whatever the input). If you still encounter a bug, please file a [bug report](https://github.com/tdewolff/minify/issues)! + +## Performance +The benchmarks directory contains a number of standardized samples used to compare performance between changes. To give an indication of the speed of this library, I've ran the tests on my Thinkpad T460 (i5-6300U quad-core 2.4GHz running Arch Linux) using Go 1.15. + +``` +name time/op +CSS/sample_bootstrap.css-4 2.70ms ± 0% +CSS/sample_gumby.css-4 3.57ms ± 0% +CSS/sample_fontawesome.css-4 767µs ± 0% +CSS/sample_normalize.css-4 85.5µs ± 0% +HTML/sample_amazon.html-4 15.2ms ± 0% +HTML/sample_bbc.html-4 3.90ms ± 0% +HTML/sample_blogpost.html-4 420µs ± 0% +HTML/sample_es6.html-4 15.6ms ± 0% +HTML/sample_stackoverflow.html-4 3.73ms ± 0% +HTML/sample_wikipedia.html-4 6.60ms ± 0% +JS/sample_ace.js-4 28.7ms ± 0% +JS/sample_dot.js-4 357µs ± 0% +JS/sample_jquery.js-4 10.0ms ± 0% +JS/sample_jqueryui.js-4 20.4ms ± 0% +JS/sample_moment.js-4 3.47ms ± 0% +JSON/sample_large.json-4 3.25ms ± 0% +JSON/sample_testsuite.json-4 1.74ms ± 0% +JSON/sample_twitter.json-4 24.2µs ± 0% +SVG/sample_arctic.svg-4 34.7ms ± 0% +SVG/sample_gopher.svg-4 307µs ± 0% +SVG/sample_usa.svg-4 57.4ms ± 0% +SVG/sample_car.svg-4 18.0ms ± 0% +SVG/sample_tiger.svg-4 5.61ms ± 0% +XML/sample_books.xml-4 54.7µs ± 0% +XML/sample_catalog.xml-4 33.0µs ± 0% +XML/sample_omg.xml-4 7.17ms ± 0% + +name speed +CSS/sample_bootstrap.css-4 50.7MB/s ± 0% +CSS/sample_gumby.css-4 52.1MB/s ± 0% +CSS/sample_fontawesome.css-4 61.2MB/s ± 0% +CSS/sample_normalize.css-4 70.8MB/s ± 0% +HTML/sample_amazon.html-4 31.1MB/s ± 0% +HTML/sample_bbc.html-4 29.5MB/s ± 0% +HTML/sample_blogpost.html-4 49.8MB/s ± 0% +HTML/sample_es6.html-4 65.6MB/s ± 0% +HTML/sample_stackoverflow.html-4 55.0MB/s ± 0% +HTML/sample_wikipedia.html-4 67.5MB/s ± 0% +JS/sample_ace.js-4 22.4MB/s ± 0% +JS/sample_dot.js-4 14.5MB/s ± 0% +JS/sample_jquery.js-4 24.8MB/s ± 0% +JS/sample_jqueryui.js-4 23.0MB/s ± 0% +JS/sample_moment.js-4 28.6MB/s ± 0% +JSON/sample_large.json-4 234MB/s ± 0% +JSON/sample_testsuite.json-4 394MB/s ± 0% +JSON/sample_twitter.json-4 63.0MB/s ± 0% +SVG/sample_arctic.svg-4 42.4MB/s ± 0% +SVG/sample_gopher.svg-4 19.0MB/s ± 0% +SVG/sample_usa.svg-4 17.8MB/s ± 0% +SVG/sample_car.svg-4 29.3MB/s ± 0% +SVG/sample_tiger.svg-4 12.2MB/s ± 0% +XML/sample_books.xml-4 81.0MB/s ± 0% +XML/sample_catalog.xml-4 58.6MB/s ± 0% +XML/sample_omg.xml-4 159MB/s ± 0% +``` + +## HTML + +HTML (with JS and CSS) minification typically shaves off about 10%. + +The HTML5 minifier uses these minifications: + +- strip unnecessary whitespace and otherwise collapse it to one space (or newline if it originally contained a newline) +- strip superfluous quotes, or uses single/double quotes whichever requires fewer escapes +- strip default attribute values and attribute boolean values +- strip some empty attributes +- strip unrequired tags (`html`, `head`, `body`, ...) +- strip unrequired end tags (`tr`, `td`, `li`, ... and often `p`) +- strip default protocols (`http:`, `https:` and `javascript:`) +- strip all comments (including conditional comments, old IE versions are not supported anymore by Microsoft) +- shorten `doctype` and `meta` charset +- lowercase tags, attributes and some values to enhance gzip compression + +Options: + +- `KeepSpecialComments` preserve all special comments, including Server Side Includes such as `` and IE conditional comments such as `` and ``, see https://msdn.microsoft.com/en-us/library/ms537512(v=vs.85).aspx#syntax +- `KeepDefaultAttrVals` preserve default attribute values such as `` // Faulty JS + req := httptest.NewRequest(http.MethodGet, "/", nil) + rec := httptest.NewRecorder() + m.Middleware(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "text/html") + _, _ = w.Write([]byte(input)) + + if err = w.(io.Closer).Close(); err != nil { + panic(err) + } + })).ServeHTTP(rec, req) +} +``` + +#### ResponseWriter +``` go +func Serve(w http.ResponseWriter, r *http.Request) { + mw := m.ResponseWriter(w, r) + defer mw.Close() + w = mw + + http.ServeFile(w, r, path.Join("www", r.URL.Path)) +} +``` + +#### Custom response writer +ResponseWriter example which returns a ResponseWriter that minifies the content and then writes to the original ResponseWriter. Any write after applying this filter will be minified. +``` go +type MinifyResponseWriter struct { + http.ResponseWriter + io.WriteCloser +} + +func (m MinifyResponseWriter) Write(b []byte) (int, error) { + return m.WriteCloser.Write(b) +} + +// MinifyResponseWriter must be closed explicitly by calling site. +func MinifyFilter(mediatype string, res http.ResponseWriter) MinifyResponseWriter { + m := minify.New() + // add minfiers + + mw := m.Writer(mediatype, res) + return MinifyResponseWriter{res, mw} +} +``` + +``` go +// Usage +func(w http.ResponseWriter, req *http.Request) { + w = MinifyFilter("text/html", w) + if _, err := io.WriteString(w, "

This HTTP response will be minified.

"); err != nil { + panic(err) + } + if err := w.Close(); err != nil { + panic(err) + } + // Output:

This HTTP response will be minified. +} +``` + +### Templates + +Here's an example of a replacement for `template.ParseFiles` from `template/html`, which automatically minifies each template before parsing it. + +Be aware that minifying templates will work in most cases but not all. Because the HTML minifier only works for valid HTML5, your template must be valid HTML5 of itself. Template tags are parsed as regular text by the minifier. + +``` go +func compileTemplates(filenames ...string) (*template.Template, error) { + m := minify.New() + m.AddFunc("text/html", html.Minify) + + var tmpl *template.Template + for _, filename := range filenames { + name := filepath.Base(filename) + if tmpl == nil { + tmpl = template.New(name) + } else { + tmpl = tmpl.New(name) + } + + b, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + + mb, err := m.Bytes("text/html", b) + if err != nil { + return nil, err + } + tmpl.Parse(string(mb)) + } + return tmpl, nil +} +``` + +Example usage: + +``` go +templates := template.Must(compileTemplates("view.html", "home.html")) +``` + +## FAQ +### Newlines remain in minified output +While you might expect the minified output to be on a single line for it to be fully minified, this is not true. In many cases, using a literal newline doesn't affect the file size, and in some cases it may even reduce the file size. + +A typical example is HTML. Whitespace is significant in HTML, meaning that spaces and newlines between or around tags may affect how they are displayed. There is no distinction between a space or a newline and they may be interchanged without affecting the displayed HTML. Remember that a space (0x20) and a newline (0x0A) are both one byte long, so that there is no difference in file size when interchanging them. This minifier removes unnecessary whitespace by replacing stretches of spaces and newlines by a single whitespace character. Specifically, if the stretch of white space characters contains a newline, it will replace it by a newline and otherwise by a space. This doesn't affect the file size, but may help somewhat for debugging or file transmission objectives. + +Another example is JavaScript. Single or double quoted string literals may not contain newline characters but instead need to escape them as `\n`. These are two bytes instead of a single newline byte. Using template literals it is allowed to have literal newline characters and we can use that fact to shave-off one byte! The result is that the minified output contains newlines instead of escaped newline characters, which makes the final file size smaller. Of course, changing from single or double quotes to template literals depends on other factors as well, and this minifier makes a calculation whether the template literal results in a shorter file size or not before converting a string literal. + +## License +Released under the [MIT license](LICENSE.md). + +[1]: http://golang.org/ "Go Language" diff --git a/vendor/github.com/tdewolff/minify/v2/common.go b/vendor/github.com/tdewolff/minify/v2/common.go new file mode 100644 index 0000000..3773a9b --- /dev/null +++ b/vendor/github.com/tdewolff/minify/v2/common.go @@ -0,0 +1,524 @@ +package minify + +import ( + "bytes" + "encoding/base64" + + "github.com/tdewolff/parse/v2" + "github.com/tdewolff/parse/v2/strconv" +) + +var ( + textMimeBytes = []byte("text/plain") + charsetASCIIBytes = []byte("charset=us-ascii") + dataBytes = []byte("data:") + base64Bytes = []byte(";base64") +) + +// Epsilon is the closest number to zero that is not considered to be zero. +var Epsilon = 0.00001 + +// Mediatype minifies a given mediatype by removing all whitespace and lowercasing all parts except strings (which may be case sensitive). +func Mediatype(b []byte) []byte { + j := 0 + inString := false + start, lastString := 0, 0 + for i, c := range b { + if !inString && parse.IsWhitespace(c) { + if start != 0 { + j += copy(b[j:], b[start:i]) + } else { + j += i + } + start = i + 1 + } else if c == '"' { + inString = !inString + if inString { + if i-lastString < 1024 { // ToLower may otherwise slow down minification greatly + parse.ToLower(b[lastString:i]) + } + } else { + lastString = j + (i + 1 - start) + } + } + } + if start != 0 { + j += copy(b[j:], b[start:]) + parse.ToLower(b[lastString:j]) + return b[:j] + } + parse.ToLower(b[lastString:]) + return b +} + +// DataURI minifies a data URI and calls a minifier by the specified mediatype. Specifications: https://www.ietf.org/rfc/rfc2397.txt. +func DataURI(m *M, dataURI []byte) []byte { + origData := parse.Copy(dataURI) + mediatype, data, err := parse.DataURI(dataURI) + if err != nil { + return dataURI + } + + data, _ = m.Bytes(string(mediatype), data) + base64Len := len(";base64") + base64.StdEncoding.EncodedLen(len(data)) + asciiLen := len(data) + for _, c := range data { + if parse.DataURIEncodingTable[c] { + asciiLen += 2 + } + if asciiLen > base64Len { + break + } + } + if len(origData) < base64Len && len(origData) < asciiLen { + return origData + } + if base64Len < asciiLen { + encoded := make([]byte, base64Len-len(";base64")) + base64.StdEncoding.Encode(encoded, data) + data = encoded + mediatype = append(mediatype, base64Bytes...) + } else { + data = parse.EncodeURL(data, parse.DataURIEncodingTable) + } + if len("text/plain") <= len(mediatype) && parse.EqualFold(mediatype[:len("text/plain")], textMimeBytes) { + mediatype = mediatype[len("text/plain"):] + } + for i := 0; i+len(";charset=us-ascii") <= len(mediatype); i++ { + // must start with semicolon and be followed by end of mediatype or semicolon + if mediatype[i] == ';' && parse.EqualFold(mediatype[i+1:i+len(";charset=us-ascii")], charsetASCIIBytes) && (i+len(";charset=us-ascii") >= len(mediatype) || mediatype[i+len(";charset=us-ascii")] == ';') { + mediatype = append(mediatype[:i], mediatype[i+len(";charset=us-ascii"):]...) + break + } + } + return append(append(append(dataBytes, mediatype...), ','), data...) +} + +// MaxInt is the maximum value of int. +const MaxInt = int(^uint(0) >> 1) + +// MinInt is the minimum value of int. +const MinInt = -MaxInt - 1 + +// Decimal minifies a given byte slice containing a decimal and removes superfluous characters. It differs from Number in that it does not parse exponents. +// It does not parse or output exponents. prec is the number of significant digits. When prec is zero it will keep all digits. Only digits after the dot can be removed to reach the number of significant digits. Very large number may thus have more significant digits. +func Decimal(num []byte, prec int) []byte { + if len(num) <= 1 { + return num + } + + // omit first + and register mantissa start and end, whether it's negative and the exponent + neg := false + start := 0 + dot := -1 + end := len(num) + if 0 < end && (num[0] == '+' || num[0] == '-') { + if num[0] == '-' { + neg = true + } + start++ + } + for i, c := range num[start:] { + if c == '.' { + dot = start + i + break + } + } + if dot == -1 { + dot = end + } + + // trim leading zeros but leave at least one digit + for start < end-1 && num[start] == '0' { + start++ + } + // trim trailing zeros + i := end - 1 + for ; dot < i; i-- { + if num[i] != '0' { + end = i + 1 + break + } + } + if i == dot { + end = dot + if start == end { + num[start] = '0' + return num[start : start+1] + } + } else if start == end-1 && num[start] == '0' { + return num[start:end] + } + + // apply precision + if 0 < prec && dot <= start+prec { + precEnd := start + prec + 1 // include dot + if dot == start { // for numbers like .012 + digit := start + 1 + for digit < end && num[digit] == '0' { + digit++ + } + precEnd = digit + prec + } + if precEnd < end { + end = precEnd + + // process either an increase from a lesser significant decimal (>= 5) + // or remove trailing zeros after the dot, or both + i := end - 1 + inc := '5' <= num[end] + for ; start < i; i-- { + if i == dot { + // no-op + } else if inc && num[i] != '9' { + num[i]++ + inc = false + break + } else if inc && i < dot { // end inc for integer + num[i] = '0' + } else if !inc && (i < dot || num[i] != '0') { + break + } + } + if i < dot { + end = dot + } else { + end = i + 1 + } + + if inc { + if dot == start && end == start+1 { + num[start] = '1' + } else if num[start] == '9' { + num[start] = '1' + num[start+1] = '0' + end++ + } else { + num[start]++ + } + } + } + } + + if neg { + start-- + num[start] = '-' + } + return num[start:end] +} + +// Number minifies a given byte slice containing a number and removes superfluous characters. +func Number(num []byte, prec int) []byte { + if len(num) <= 1 { + return num + } + + // omit first + and register mantissa start and end, whether it's negative and the exponent + neg := false + start := 0 + dot := -1 + end := len(num) + origExp := 0 + if num[0] == '+' || num[0] == '-' { + if num[0] == '-' { + neg = true + } + start++ + } + for i, c := range num[start:] { + if c == '.' { + dot = start + i + } else if c == 'e' || c == 'E' { + end = start + i + i += start + 1 + if i < len(num) && num[i] == '+' { + i++ + } + if tmpOrigExp, n := strconv.ParseInt(num[i:]); 0 < n && int64(MinInt) <= tmpOrigExp && tmpOrigExp <= int64(MaxInt) { + // range checks for when int is 32 bit + origExp = int(tmpOrigExp) + } else { + return num + } + break + } + } + if dot == -1 { + dot = end + } + + // trim leading zeros but leave at least one digit + for start < end-1 && num[start] == '0' { + start++ + } + // trim trailing zeros + i := end - 1 + for ; dot < i; i-- { + if num[i] != '0' { + end = i + 1 + break + } + } + if i == dot { + end = dot + if start == end { + num[start] = '0' + return num[start : start+1] + } + } else if start == end-1 && num[start] == '0' { + return num[start:end] + } + + // apply precision + if 0 < prec { //&& (dot <= start+prec || start+prec+1 < dot || 0 < origExp) { // don't minify 9 to 10, but do 999 to 1e3 and 99e1 to 1e3 + precEnd := start + prec + if dot == start { // for numbers like .012 + digit := start + 1 + for digit < end && num[digit] == '0' { + digit++ + } + precEnd = digit + prec + } else if dot < precEnd { // for numbers where precision will include the dot + precEnd++ + } + if precEnd < end && (dot < end || 1 < dot-precEnd+origExp) { // do not minify 9=>10 or 99=>100 or 9e1=>1e2 (but 90), but 999=>1e3 and 99e1=>1e3 + end = precEnd + inc := '5' <= num[end] + if dot == end { + inc = end+1 < len(num) && '5' <= num[end+1] + } + if precEnd < dot { + origExp += dot - precEnd + dot = precEnd + } + // process either an increase from a lesser significant decimal (>= 5) + // and remove trailing zeros + i := end - 1 + for ; start < i; i-- { + if i == dot { + // no-op + } else if inc && num[i] != '9' { + num[i]++ + inc = false + break + } else if !inc && num[i] != '0' { + break + } + } + end = i + 1 + if end < dot { + origExp += dot - end + dot = end + } + if inc { // single digit left + if dot == start { + num[start] = '1' + dot = start + 1 + } else if num[start] == '9' { + num[start] = '1' + origExp++ + } else { + num[start]++ + } + } + } + } + + // n is the number of significant digits + // normExp would be the exponent if it were normalised (0.1 <= f < 1) + n := 0 + normExp := 0 + if dot == start { + for i = dot + 1; i < end; i++ { + if num[i] != '0' { + n = end - i + normExp = dot - i + 1 + break + } + } + } else if dot == end { + normExp = end - start + for i = end - 1; start <= i; i-- { + if num[i] != '0' { + n = i + 1 - start + end = i + 1 + break + } + } + } else { + n = end - start - 1 + normExp = dot - start + } + + if origExp < 0 && (normExp < MinInt-origExp || normExp-n < MinInt-origExp) || 0 < origExp && (MaxInt-origExp < normExp || MaxInt-origExp < normExp-n) { + return num // exponent overflow + } + normExp += origExp + + // intExp would be the exponent if it were an integer + intExp := normExp - n + lenIntExp := strconv.LenInt(int64(intExp)) + lenNormExp := strconv.LenInt(int64(normExp)) + + // there are three cases to consider when printing the number + // case 1: without decimals and with a positive exponent (large numbers: 5e4) + // case 2: with decimals and with a negative exponent (small numbers with many digits: .123456e-4) + // case 3: with decimals and without an exponent (around zero: 5.6) + // case 4: without decimals and with a negative exponent (small numbers: 123456e-9) + if n <= normExp { + // case 1: print number with positive exponent + if dot < end { + // remove dot, either from the front or copy the smallest part + if dot == start { + start = end - n + } else if dot-start < end-dot-1 { + copy(num[start+1:], num[start:dot]) + start++ + } else { + copy(num[dot:], num[dot+1:end]) + end-- + } + } + if n+3 <= normExp { + num[end] = 'e' + end++ + for i := end + lenIntExp - 1; end <= i; i-- { + num[i] = byte(intExp%10) + '0' + intExp /= 10 + } + end += lenIntExp + } else if n+2 == normExp { + num[end] = '0' + num[end+1] = '0' + end += 2 + } else if n+1 == normExp { + num[end] = '0' + end++ + } + } else if normExp < -3 && lenNormExp < lenIntExp && dot < end { + // case 2: print normalized number (0.1 <= f < 1) + zeroes := -normExp + origExp + if 0 < zeroes { + copy(num[start+1:], num[start+1+zeroes:end]) + end -= zeroes + } else if zeroes < 0 { + copy(num[start+1:], num[start:dot]) + num[start] = '.' + } + num[end] = 'e' + num[end+1] = '-' + end += 2 + for i := end + lenNormExp - 1; end <= i; i-- { + num[i] = -byte(normExp%10) + '0' + normExp /= 10 + } + end += lenNormExp + } else if -lenIntExp-1 <= normExp { + // case 3: print number without exponent + zeroes := -normExp + if 0 < zeroes { + // dot placed at the front and negative exponent, adding zeroes + newDot := end - n - zeroes - 1 + if newDot != dot { + d := start - newDot + if 0 < d { + if dot < end { + // copy original digits after the dot towards the end + copy(num[dot+1+d:], num[dot+1:end]) + if start < dot { + // copy original digits before the dot towards the end + copy(num[start+d+1:], num[start:dot]) + } + } else if start < dot { + // copy original digits before the dot towards the end + copy(num[start+d:], num[start:dot]) + } + newDot = start + end += d + } else { + start += -d + } + num[newDot] = '.' + for i := 0; i < zeroes; i++ { + num[newDot+1+i] = '0' + } + } + } else { + // dot placed in the middle of the number + if dot == start { + // when there are zeroes after the dot + dot = end - n - 1 + start = dot + } else if end <= dot { + // when input has no dot in it + dot = end + end++ + } + newDot := start + normExp + // move digits between dot and newDot towards the end + if dot < newDot { + copy(num[dot:], num[dot+1:newDot+1]) + } else if newDot < dot { + copy(num[newDot+1:], num[newDot:dot]) + } + num[newDot] = '.' + } + } else { + // case 4: print number with negative exponent + // find new end, considering moving numbers to the front, removing the dot and increasing the length of the exponent + newEnd := end + if dot == start { + newEnd = start + n + } else { + newEnd-- + } + newEnd += 2 + lenIntExp + + exp := intExp + lenExp := lenIntExp + if newEnd < len(num) { + // it saves space to convert the decimal to an integer and decrease the exponent + if dot < end { + if dot == start { + copy(num[start:], num[end-n:end]) + end = start + n + } else { + copy(num[dot:], num[dot+1:end]) + end-- + } + } + } else { + // it does not save space and will panic, so we revert to the original representation + exp = origExp + lenExp = 1 + if origExp <= -10 || 10 <= origExp { + lenExp = strconv.LenInt(int64(origExp)) + } + } + num[end] = 'e' + num[end+1] = '-' + end += 2 + for i := end + lenExp - 1; end <= i; i-- { + num[i] = -byte(exp%10) + '0' + exp /= 10 + } + end += lenExp + } + + if neg { + start-- + num[start] = '-' + } + return num[start:end] +} + +func UpdateErrorPosition(err error, input *parse.Input, offset int) error { + if perr, ok := err.(*parse.Error); ok { + r := bytes.NewBuffer(input.Bytes()) + line, column, _ := parse.Position(r, offset) + perr.Line += line - 1 + perr.Column += column - 1 + return perr + } + return err +} diff --git a/vendor/github.com/tdewolff/minify/v2/css/css.go b/vendor/github.com/tdewolff/minify/v2/css/css.go new file mode 100644 index 0000000..39b3828 --- /dev/null +++ b/vendor/github.com/tdewolff/minify/v2/css/css.go @@ -0,0 +1,1559 @@ +// Package css minifies CSS3 following the specifications at http://www.w3.org/TR/css-syntax-3/. +package css + +import ( + "bytes" + "fmt" + "io" + "math" + "sort" + "strconv" + "strings" + + "github.com/tdewolff/minify/v2" + "github.com/tdewolff/parse/v2" + "github.com/tdewolff/parse/v2/css" + strconvParse "github.com/tdewolff/parse/v2/strconv" +) + +var ( + spaceBytes = []byte(" ") + colonBytes = []byte(":") + semicolonBytes = []byte(";") + commaBytes = []byte(",") + leftBracketBytes = []byte("{") + rightBracketBytes = []byte("}") + rightParenBytes = []byte(")") + urlBytes = []byte("url(") + varBytes = []byte("var(") + zeroBytes = []byte("0") + oneBytes = []byte("1") + transparentBytes = []byte("transparent") + blackBytes = []byte("#0000") + initialBytes = []byte("initial") + noneBytes = []byte("none") + autoBytes = []byte("auto") + leftBytes = []byte("left") + topBytes = []byte("top") + n400Bytes = []byte("400") + n700Bytes = []byte("700") + n50pBytes = []byte("50%") + n100pBytes = []byte("100%") + repeatXBytes = []byte("repeat-x") + repeatYBytes = []byte("repeat-y") + importantBytes = []byte("!important") + dataSchemeBytes = []byte("data:") +) + +type cssMinifier struct { + m *minify.M + w io.Writer + p *css.Parser + o *Minifier + + tokenBuffer []Token + tokensLevel int +} + +//////////////////////////////////////////////////////////////// + +// Minifier is a CSS minifier. +type Minifier struct { + KeepCSS2 bool + Precision int // number of significant digits + newPrecision int // precision for new numbers +} + +// Minify minifies CSS data, it reads from r and writes to w. +func Minify(m *minify.M, w io.Writer, r io.Reader, params map[string]string) error { + return (&Minifier{}).Minify(m, w, r, params) +} + +// Token is a parsed token with extra information for functions. +type Token struct { + css.TokenType + Data []byte + Args []Token // only filled for functions + Fun, Ident Hash // only filled for functions and identifiers respectively +} + +func (t Token) String() string { + if len(t.Args) == 0 { + return t.TokenType.String() + "(" + string(t.Data) + ")" + } + + sb := strings.Builder{} + sb.Write(t.Data) + for _, arg := range t.Args { + sb.WriteString(arg.String()) + } + sb.WriteByte(')') + return sb.String() +} + +// Equal returns true if both tokens are equal. +func (t Token) Equal(t2 Token) bool { + if t.TokenType == t2.TokenType && bytes.Equal(t.Data, t2.Data) && len(t.Args) == len(t2.Args) { + for i := 0; i < len(t.Args); i++ { + if !t.Args[i].Equal(t2.Args[i]) { + return false + } + } + return true + } + return false +} + +// IsZero return true if a dimension, percentage, or number token is zero. +func (t Token) IsZero() bool { + // as each number is already minified, starting with a zero means it is zero + return (t.TokenType == css.DimensionToken || t.TokenType == css.PercentageToken || t.TokenType == css.NumberToken) && t.Data[0] == '0' +} + +// IsLength returns true if the token is a length. +func (t Token) IsLength() bool { + if t.TokenType == css.DimensionToken { + return true + } else if t.TokenType == css.NumberToken && t.Data[0] == '0' { + return true + } else if t.TokenType == css.FunctionToken { + fun := ToHash(t.Data[:len(t.Data)-1]) + if fun == Calc || fun == Min || fun == Max || fun == Clamp || fun == Attr || fun == Var || fun == Env { + return true + } + } + return false +} + +// IsLengthPercentage returns true if the token is a length or percentage token. +func (t Token) IsLengthPercentage() bool { + return t.TokenType == css.PercentageToken || t.IsLength() +} + +//////////////////////////////////////////////////////////////// + +// Minify minifies CSS data, it reads from r and writes to w. +func (o *Minifier) Minify(m *minify.M, w io.Writer, r io.Reader, params map[string]string) error { + o.newPrecision = o.Precision + if o.newPrecision <= 0 || 15 < o.newPrecision { + o.newPrecision = 15 // minimum number of digits a double can represent exactly + } + + z := parse.NewInput(r) + defer z.Restore() + + isInline := params != nil && params["inline"] == "1" + c := &cssMinifier{ + m: m, + w: w, + p: css.NewParser(z, isInline), + o: o, + } + c.minifyGrammar() + + if _, err := w.Write(nil); err != nil { + return err + } + if c.p.Err() == io.EOF { + return nil + } + return c.p.Err() +} + +func (c *cssMinifier) minifyGrammar() { + semicolonQueued := false + for { + gt, _, data := c.p.Next() + switch gt { + case css.ErrorGrammar: + if c.p.HasParseError() { + if semicolonQueued { + c.w.Write(semicolonBytes) + } + + // write out the offending declaration (but save the semicolon) + vals := c.p.Values() + if len(vals) > 0 && vals[len(vals)-1].TokenType == css.SemicolonToken { + vals = vals[:len(vals)-1] + semicolonQueued = true + } + for _, val := range vals { + c.w.Write(val.Data) + } + continue + } + return + case css.EndAtRuleGrammar, css.EndRulesetGrammar: + c.w.Write(rightBracketBytes) + semicolonQueued = false + continue + } + + if semicolonQueued { + c.w.Write(semicolonBytes) + semicolonQueued = false + } + + switch gt { + case css.AtRuleGrammar: + c.w.Write(data) + values := c.p.Values() + if ToHash(data[1:]) == Import && len(values) == 2 && values[1].TokenType == css.URLToken && 4 < len(values[1].Data) && values[1].Data[len(values[1].Data)-1] == ')' { + url := values[1].Data + if url[4] != '"' && url[4] != '\'' { + a := 4 + for parse.IsWhitespace(url[a]) || parse.IsNewline(url[a]) { + a++ + } + b := len(url) - 2 + for a < b && (parse.IsWhitespace(url[b]) || parse.IsNewline(url[b])) { + b-- + } + if a == b { + url = url[:2] + } else { + url = url[a-1 : b+2] + } + url[0] = '"' + url[len(url)-1] = '"' + } else { + url = url[4 : len(url)-1] + } + values[1].Data = url + } + for _, val := range values { + c.w.Write(val.Data) + } + semicolonQueued = true + case css.BeginAtRuleGrammar: + c.w.Write(data) + for _, val := range c.p.Values() { + c.w.Write(val.Data) + } + c.w.Write(leftBracketBytes) + case css.QualifiedRuleGrammar: + c.minifySelectors(data, c.p.Values()) + c.w.Write(commaBytes) + case css.BeginRulesetGrammar: + c.minifySelectors(data, c.p.Values()) + c.w.Write(leftBracketBytes) + case css.DeclarationGrammar: + c.minifyDeclaration(data, c.p.Values()) + semicolonQueued = true + case css.CustomPropertyGrammar: + c.w.Write(data) + c.w.Write(colonBytes) + value := parse.TrimWhitespace(c.p.Values()[0].Data) + if len(c.p.Values()[0].Data) != 0 && len(value) == 0 { + value = spaceBytes + } + c.w.Write(value) + semicolonQueued = true + case css.CommentGrammar: + if len(data) > 5 && data[1] == '*' && data[2] == '!' { + c.w.Write(data[:3]) + comment := parse.TrimWhitespace(parse.ReplaceMultipleWhitespace(data[3 : len(data)-2])) + c.w.Write(comment) + c.w.Write(data[len(data)-2:]) + } + default: + c.w.Write(data) + } + } +} + +func (c *cssMinifier) minifySelectors(property []byte, values []css.Token) { + inAttr := false + isClass := false + for _, val := range c.p.Values() { + if !inAttr { + if val.TokenType == css.IdentToken { + if !isClass { + parse.ToLower(val.Data) + } + isClass = false + } else if val.TokenType == css.DelimToken && val.Data[0] == '.' { + isClass = true + } else if val.TokenType == css.LeftBracketToken { + inAttr = true + } + } else { + if val.TokenType == css.StringToken && len(val.Data) > 2 { + s := val.Data[1 : len(val.Data)-1] + if css.IsIdent(s) { + c.w.Write(s) + continue + } + } else if val.TokenType == css.RightBracketToken { + inAttr = false + } else if val.TokenType == css.IdentToken && len(val.Data) == 1 && (val.Data[0] == 'i' || val.Data[0] == 'I') { + c.w.Write(spaceBytes) + } + } + c.w.Write(val.Data) + } +} + +func (c *cssMinifier) parseFunction(values []css.Token) ([]Token, int) { + i := 1 + level := 0 + args := []Token{} + for ; i < len(values); i++ { + tt := values[i].TokenType + data := values[i].Data + if tt == css.LeftParenthesisToken { + level++ + } else if tt == css.RightParenthesisToken { + if level == 0 { + i++ + break + } + level-- + } + if tt == css.FunctionToken { + subArgs, di := c.parseFunction(values[i:]) + h := ToHash(parse.ToLower(parse.Copy(data[:len(data)-1]))) // TODO: use ToHashFold + args = append(args, Token{tt, data, subArgs, h, 0}) + i += di - 1 + } else { + var h Hash + if tt == css.IdentToken { + h = ToHash(parse.ToLower(parse.Copy(data))) // TODO: use ToHashFold + } + args = append(args, Token{tt, data, nil, 0, h}) + } + } + return args, i +} + +func (c *cssMinifier) parseDeclaration(values []css.Token) []Token { + // Check if this is a simple list of values separated by whitespace or commas, otherwise we'll not be processing + prevSep := true + tokens := c.tokenBuffer[:0] + for i := 0; i < len(values); i++ { + tt := values[i].TokenType + data := values[i].Data + if tt == css.LeftParenthesisToken || tt == css.LeftBraceToken || tt == css.LeftBracketToken || + tt == css.RightParenthesisToken || tt == css.RightBraceToken || tt == css.RightBracketToken { + return nil + } + + if !prevSep && tt != css.WhitespaceToken && tt != css.CommaToken && (tt != css.DelimToken || values[i].Data[0] != '/') { + return nil + } + + if tt == css.WhitespaceToken || tt == css.CommaToken || tt == css.DelimToken && values[i].Data[0] == '/' { + if tt != css.WhitespaceToken { + tokens = append(tokens, Token{tt, data, nil, 0, 0}) + } + prevSep = true + } else if tt == css.FunctionToken { + args, di := c.parseFunction(values[i:]) + h := ToHash(parse.ToLower(parse.Copy(data[:len(data)-1]))) // TODO: use ToHashFold + tokens = append(tokens, Token{tt, data, args, h, 0}) + prevSep = true + i += di - 1 + } else { + var h Hash + if tt == css.IdentToken { + h = ToHash(parse.ToLower(parse.Copy(data))) // TODO: use ToHashFold + } + tokens = append(tokens, Token{tt, data, nil, 0, h}) + prevSep = tt == css.URLToken + } + } + c.tokenBuffer = tokens // update buffer size for memory reuse + return tokens +} + +func (c *cssMinifier) minifyDeclaration(property []byte, components []css.Token) { + c.w.Write(property) + c.w.Write(colonBytes) + + if len(components) == 0 { + return + } + + // Strip !important from the component list, this will be added later separately + important := false + if len(components) > 2 && components[len(components)-2].TokenType == css.DelimToken && components[len(components)-2].Data[0] == '!' && ToHash(components[len(components)-1].Data) == Important { + components = components[:len(components)-2] + important = true + } + + prop := ToHash(property) + values := c.parseDeclaration(components) + + // Do not process complex values (eg. containing blocks or is not alternated between whitespace/commas and flat values + if values == nil { + if prop == Filter && len(components) == 11 { + if bytes.Equal(components[0].Data, []byte("progid")) && + components[1].TokenType == css.ColonToken && + bytes.Equal(components[2].Data, []byte("DXImageTransform")) && + components[3].Data[0] == '.' && + bytes.Equal(components[4].Data, []byte("Microsoft")) && + components[5].Data[0] == '.' && + bytes.Equal(components[6].Data, []byte("Alpha(")) && + bytes.Equal(parse.ToLower(components[7].Data), []byte("opacity")) && + components[8].Data[0] == '=' && + components[10].Data[0] == ')' { + components = components[6:] + components[0].Data = []byte("alpha(") + } + } + + for _, component := range components { + c.w.Write(component.Data) + } + if important { + c.w.Write(importantBytes) + } + return + } + + values = c.minifyTokens(prop, 0, values) + if 0 < len(values) { + values = c.minifyProperty(prop, values) + } + c.writeDeclaration(values, important) +} + +func (c *cssMinifier) writeFunction(args []Token) { + for _, arg := range args { + c.w.Write(arg.Data) + if arg.TokenType == css.FunctionToken { + c.writeFunction(arg.Args) + c.w.Write(rightParenBytes) + } + } +} + +func (c *cssMinifier) writeDeclaration(values []Token, important bool) { + prevSep := true + for _, value := range values { + if !prevSep && value.TokenType != css.CommaToken && (value.TokenType != css.DelimToken || value.Data[0] != '/') { + c.w.Write(spaceBytes) + } + + c.w.Write(value.Data) + if value.TokenType == css.FunctionToken { + c.writeFunction(value.Args) + c.w.Write(rightParenBytes) + } + + if value.TokenType == css.CommaToken || value.TokenType == css.DelimToken && value.Data[0] == '/' || value.TokenType == css.FunctionToken || value.TokenType == css.URLToken { + prevSep = true + } else { + prevSep = false + } + } + + if important { + c.w.Write(importantBytes) + } +} + +func (c *cssMinifier) minifyTokens(prop Hash, fun Hash, values []Token) []Token { + if 100 < c.tokensLevel+1 { + return values + } + c.tokensLevel++ + + for i, value := range values { + tt := value.TokenType + switch tt { + case css.NumberToken: + if prop == Z_Index || prop == Counter_Increment || prop == Counter_Reset || prop == Orphans || prop == Widows { + break // integers + } + if c.o.KeepCSS2 { + values[i].Data = minify.Decimal(values[i].Data, c.o.Precision) // don't use exponents + } else { + values[i].Data = minify.Number(values[i].Data, c.o.Precision) + } + case css.PercentageToken: + n := len(values[i].Data) - 1 + if c.o.KeepCSS2 { + values[i].Data = minify.Decimal(values[i].Data[:n], c.o.Precision) // don't use exponents + } else { + values[i].Data = minify.Number(values[i].Data[:n], c.o.Precision) + } + values[i].Data = append(values[i].Data, '%') + case css.DimensionToken: + var dim []byte + values[i], dim = c.minifyDimension(values[i]) + if 1 < len(values[i].Data) && values[i].Data[0] == '0' && optionalZeroDimension[string(dim)] && prop != Flex && fun == 0 { + // cut dimension for zero value, TODO: don't hardcode check for Flex and remove the dimension in minifyDimension + values[i].Data = values[i].Data[:1] + } + case css.StringToken: + values[i].Data = removeMarkupNewlines(values[i].Data) + case css.URLToken: + if 10 < len(values[i].Data) { + uri := parse.TrimWhitespace(values[i].Data[4 : len(values[i].Data)-1]) + delim := byte('"') + if 1 < len(uri) && (uri[0] == '\'' || uri[0] == '"') { + delim = uri[0] + uri = removeMarkupNewlines(uri) + uri = uri[1 : len(uri)-1] + } + if 4 < len(uri) && parse.EqualFold(uri[:5], dataSchemeBytes) { + uri = minify.DataURI(c.m, uri) + } + if css.IsURLUnquoted(uri) { + values[i].Data = append(append(urlBytes, uri...), ')') + } else { + values[i].Data = append(append(append(urlBytes, delim), uri...), delim, ')') + } + } + case css.FunctionToken: + values[i].Args = c.minifyTokens(prop, values[i].Fun, values[i].Args) + + fun := values[i].Fun + args := values[i].Args + if fun == Rgb || fun == Rgba || fun == Hsl || fun == Hsla { + valid := true + vals := []float64{} + for i, arg := range args { + numeric := arg.TokenType == css.NumberToken || arg.TokenType == css.PercentageToken + separator := arg.TokenType == css.CommaToken || i != 5 && arg.TokenType == css.WhitespaceToken || i == 5 && arg.TokenType == css.DelimToken && arg.Data[0] == '/' + if i%2 == 0 && !numeric || i%2 == 1 && !separator { + valid = false + break + } else if numeric { + var d float64 + if arg.TokenType == css.PercentageToken { + var err error + d, err = strconv.ParseFloat(string(arg.Data[:len(arg.Data)-1]), 32) // can overflow + if err != nil { + valid = false + break + } + d /= 100.0 + if d < minify.Epsilon { + d = 0.0 + } else if 1.0-minify.Epsilon < d { + d = 1.0 + } + } else { + var err error + d, err = strconv.ParseFloat(string(arg.Data), 32) // can overflow + if err != nil { + valid = false + break + } + } + vals = append(vals, d) + } + } + if !valid { + break + } + + a := 1.0 + if len(vals) == 4 { + if vals[0] < minify.Epsilon && vals[1] < minify.Epsilon && vals[2] < minify.Epsilon && vals[3] < minify.Epsilon { + values[i] = Token{css.IdentToken, transparentBytes, nil, 0, Transparent} + break + } else if 1.0-minify.Epsilon < vals[3] { + vals = vals[:3] + values[i].Args = values[i].Args[:len(values[i].Args)-2] + if fun == Rgba || fun == Hsla { + values[i].Data = values[i].Data[:len(values[i].Data)-1] + values[i].Data[len(values[i].Data)-1] = '(' + } + } else { + a = vals[3] + } + } + + if a == 1.0 && (len(vals) == 3 || len(vals) == 4) { // only minify color if fully opaque + if fun == Rgb || fun == Rgba { + for j := 0; j < 3; j++ { + if args[j*2].TokenType == css.NumberToken { + vals[j] /= 255.0 + if vals[j] < minify.Epsilon { + vals[j] = 0.0 + } else if 1.0-minify.Epsilon < vals[j] { + vals[j] = 1.0 + } + } + } + values[i] = rgbToToken(vals[0], vals[1], vals[2]) + break + } else if fun == Hsl || fun == Hsla && args[0].TokenType == css.NumberToken && args[2].TokenType == css.PercentageToken && args[4].TokenType == css.PercentageToken { + vals[0] /= 360.0 + _, vals[0] = math.Modf(vals[0]) + if vals[0] < 0.0 { + vals[0] = 1.0 + vals[0] + } + r, g, b := css.HSL2RGB(vals[0], vals[1], vals[2]) + values[i] = rgbToToken(r, g, b) + break + } + } else if len(vals) == 4 { + args[6] = minifyNumberPercentage(args[6]) + } + + if 3 <= len(vals) && (fun == Rgb || fun == Rgba) { + // 0%, 20%, 40%, 60%, 80% and 100% can be represented exactly as, 51, 102, 153, 204, and 255 respectively + removePercentage := true + for j := 0; j < 3; j++ { + if args[j*2].TokenType != css.PercentageToken || 2.0*minify.Epsilon <= math.Mod(vals[j]+minify.Epsilon, 0.2) { + removePercentage = false + break + } + } + if removePercentage { + for j := 0; j < 3; j++ { + args[j*2].TokenType = css.NumberToken + if vals[j] < minify.Epsilon { + args[j*2].Data = zeroBytes + } else if math.Abs(vals[j]-0.2) < minify.Epsilon { + args[j*2].Data = []byte("51") + } else if math.Abs(vals[j]-0.4) < minify.Epsilon { + args[j*2].Data = []byte("102") + } else if math.Abs(vals[j]-0.6) < minify.Epsilon { + args[j*2].Data = []byte("153") + } else if math.Abs(vals[j]-0.8) < minify.Epsilon { + args[j*2].Data = []byte("204") + } else if math.Abs(vals[j]-1.0) < minify.Epsilon { + args[j*2].Data = []byte("255") + } + } + } + } + } + } + } + c.tokensLevel-- + return values +} + +func (c *cssMinifier) minifyProperty(prop Hash, values []Token) []Token { + // limit maximum to prevent slow recursions (e.g. for background's append) + if 100 < len(values) { + return values + } + + switch prop { + case Font: + if len(values) > 1 { // must contain atleast font-size and font-family + // the font-families are separated by commas and are at the end of font + // get index for last token before font family names + i := len(values) - 1 + for j, value := range values[2:] { + if value.TokenType == css.CommaToken { + i = 2 + j - 1 // identifier before first comma is a font-family + break + } + } + i-- + + // advance i while still at font-families when they contain spaces but no quotes + for ; i > 0; i-- { // i cannot be 0, font-family must be prepended by font-size + if values[i-1].TokenType == css.DelimToken && values[i-1].Data[0] == '/' { + break + } else if values[i].TokenType != css.IdentToken && values[i].TokenType != css.StringToken { + break + } else if h := values[i].Ident; h == Xx_Small || h == X_Small || h == Small || h == Medium || h == Large || h == X_Large || h == Xx_Large || h == Smaller || h == Larger || h == Inherit || h == Initial || h == Unset { + // inherit, initial and unset are followed by an IdentToken/StringToken, so must be for font-size + break + } + } + + // font-family minified in place + values = append(values[:i+1], c.minifyProperty(Font_Family, values[i+1:])...) + + // fix for IE9, IE10, IE11: font name starting with `-` is not recognized + if values[i+1].Data[0] == '-' { + v := make([]byte, len(values[i+1].Data)+2) + v[0] = '\'' + copy(v[1:], values[i+1].Data) + v[len(v)-1] = '\'' + values[i+1].Data = v + } + + if i > 0 { + // line-height + if i > 1 && values[i-1].TokenType == css.DelimToken && values[i-1].Data[0] == '/' { + if values[i].Ident == Normal { + values = append(values[:i-1], values[i+1:]...) + } + i -= 2 + } + + // font-size + i-- + + for ; i > -1; i-- { + if values[i].Ident == Normal { + values = append(values[:i], values[i+1:]...) + } else if values[i].Ident == Bold { + values[i].TokenType = css.NumberToken + values[i].Data = n700Bytes + } else if values[i].TokenType == css.NumberToken && bytes.Equal(values[i].Data, n400Bytes) { + values = append(values[:i], values[i+1:]...) + } + } + } + } + case Font_Family: + for i, value := range values { + if value.TokenType == css.StringToken && 2 < len(value.Data) { + unquote := true + parse.ToLower(value.Data) + s := value.Data[1 : len(value.Data)-1] + if 0 < len(s) { + for _, split := range bytes.Split(s, spaceBytes) { + // if len is zero, it contains two consecutive spaces + if len(split) == 0 || !css.IsIdent(split) { + unquote = false + break + } + } + } + if unquote { + values[i].Data = s + } + } + } + case Font_Weight: + if values[0].Ident == Normal { + values[0].TokenType = css.NumberToken + values[0].Data = n400Bytes + } else if values[0].Ident == Bold { + values[0].TokenType = css.NumberToken + values[0].Data = n700Bytes + } + case Url: + for i := 0; i < len(values); i++ { + if values[i].TokenType == css.FunctionToken && len(values[i].Args) == 1 { + fun := values[i].Fun + data := values[i].Args[0].Data + if fun == Local && (data[0] == '\'' || data[0] == '"') { + if css.IsURLUnquoted(data[1 : len(data)-1]) { + data = data[1 : len(data)-1] + } + values[i].Args[0].Data = data + } + } + } + case Margin, Padding, Border_Width: + switch len(values) { + case 2: + if values[0].Equal(values[1]) { + values = values[:1] + } + case 3: + if values[0].Equal(values[1]) && values[0].Equal(values[2]) { + values = values[:1] + } else if values[0].Equal(values[2]) { + values = values[:2] + } + case 4: + if values[0].Equal(values[1]) && values[0].Equal(values[2]) && values[0].Equal(values[3]) { + values = values[:1] + } else if values[0].Equal(values[2]) && values[1].Equal(values[3]) { + values = values[:2] + } else if values[1].Equal(values[3]) { + values = values[:3] + } + } + case Border, Border_Bottom, Border_Left, Border_Right, Border_Top: + for i := 0; i < len(values); i++ { + if values[i].Ident == None || values[i].Ident == Currentcolor || values[i].Ident == Medium { + values = append(values[:i], values[i+1:]...) + i-- + } else { + values[i] = minifyColor(values[i]) + } + } + if len(values) == 0 { + values = []Token{{css.IdentToken, noneBytes, nil, 0, None}} + } + case Outline: + for i := 0; i < len(values); i++ { + if values[i].Ident == Invert || values[i].Ident == None || values[i].Ident == Medium { + values = append(values[:i], values[i+1:]...) + i-- + } else { + values[i] = minifyColor(values[i]) + } + } + if len(values) == 0 { + values = []Token{{css.IdentToken, noneBytes, nil, 0, None}} + } + case Background: + start := 0 + for end := 0; end <= len(values); end++ { // loop over comma-separated lists + if end != len(values) && values[end].TokenType != css.CommaToken { + continue + } else if start == end { + start++ + continue + } + + // minify background-size and lowercase all identifiers + for i := start; i < end; i++ { + if values[i].TokenType == css.DelimToken && values[i].Data[0] == '/' { + // background-size consists of either [ | auto | cover | contain] or [ | auto]{2} + // we can only minify the latter + if i+1 < end && (values[i+1].TokenType == css.NumberToken || values[i+1].IsLengthPercentage() || values[i+1].Ident == Auto) { + if i+2 < end && (values[i+2].TokenType == css.NumberToken || values[i+2].IsLengthPercentage() || values[i+2].Ident == Auto) { + sizeValues := c.minifyProperty(Background_Size, values[i+1:i+3]) + if len(sizeValues) == 1 && sizeValues[0].Ident == Auto { + // remove background-size if it is '/ auto' after minifying the property + values = append(values[:i], values[i+3:]...) + end -= 3 + i-- + } else { + values = append(values[:i+1], append(sizeValues, values[i+3:]...)...) + end -= 2 - len(sizeValues) + i += len(sizeValues) - 1 + } + } else if values[i+1].Ident == Auto { + // remove background-size if it is '/ auto' + values = append(values[:i], values[i+2:]...) + end -= 2 + i-- + } + } + } + } + + // minify all other values + iPaddingBox := -1 // position of background-origin that is padding-box + for i := start; i < end; i++ { + h := values[i].Ident + values[i] = minifyColor(values[i]) + if values[i].TokenType == css.IdentToken { + if i+1 < end && values[i+1].TokenType == css.IdentToken && (h == Space || h == Round || h == Repeat || h == No_Repeat) { + if h2 := values[i+1].Ident; h2 == Space || h2 == Round || h2 == Repeat || h2 == No_Repeat { + repeatValues := c.minifyProperty(Background_Repeat, values[i:i+2]) + if len(repeatValues) == 1 && repeatValues[0].Ident == Repeat { + values = append(values[:i], values[i+2:]...) + end -= 2 + i-- + } else { + values = append(values[:i], append(repeatValues, values[i+2:]...)...) + end -= 2 - len(repeatValues) + i += len(repeatValues) - 1 + } + continue + } + } else if h == None || h == Scroll || h == Transparent { + values = append(values[:i], values[i+1:]...) + end-- + i-- + continue + } else if h == Border_Box || h == Padding_Box { + if iPaddingBox == -1 && h == Padding_Box { // background-origin + iPaddingBox = i + } else if iPaddingBox != -1 && h == Border_Box { // background-clip + values = append(values[:i], values[i+1:]...) + values = append(values[:iPaddingBox], values[iPaddingBox+1:]...) + end -= 2 + i -= 2 + } + continue + } + } else if values[i].TokenType == css.HashToken && bytes.Equal(values[i].Data, blackBytes) { + values = append(values[:i], values[i+1:]...) + end-- + i-- + continue + } else if values[i].TokenType == css.FunctionToken && bytes.Equal(values[i].Data, varBytes) { + continue + } + + // further minify background-position and background-size combination + if values[i].TokenType == css.NumberToken || values[i].IsLengthPercentage() || h == Left || h == Right || h == Top || h == Bottom || h == Center { + j := i + 1 + for ; j < len(values); j++ { + if h := values[j].Ident; h == Left || h == Right || h == Top || h == Bottom || h == Center { + continue + } else if values[j].TokenType == css.NumberToken || values[j].IsLengthPercentage() { + continue + } + break + } + + positionValues := c.minifyProperty(Background_Position, values[i:j]) + hasSize := j < len(values) && values[j].TokenType == css.DelimToken && values[j].Data[0] == '/' + if !hasSize && len(positionValues) == 2 && positionValues[0].IsZero() && positionValues[1].IsZero() { + if end-start == 2 { + values[i] = Token{css.NumberToken, zeroBytes, nil, 0, 0} + values[i+1] = Token{css.NumberToken, zeroBytes, nil, 0, 0} + i++ + } else { + values = append(values[:i], values[j:]...) + end -= j - i + i-- + } + } else { + if len(positionValues) == j-i { + for k, positionValue := range positionValues { + values[i+k] = positionValue + } + } else { + values = append(values[:i], append(positionValues, values[j:]...)...) + end -= j - i - len(positionValues) + } + i += len(positionValues) - 1 + } + } + } + + if end-start == 0 { + values = append(values[:start], append([]Token{{css.NumberToken, zeroBytes, nil, 0, 0}, {css.NumberToken, zeroBytes, nil, 0, 0}}, values[end:]...)...) + end += 2 + } + start = end + 1 + } + case Background_Size: + start := 0 + for end := 0; end <= len(values); end++ { // loop over comma-separated lists + if end != len(values) && values[end].TokenType != css.CommaToken { + continue + } else if start == end { + start++ + continue + } + + if end-start == 2 && values[start+1].Ident == Auto { + values = append(values[:start+1], values[start+2:]...) + end-- + } + start = end + 1 + } + case Background_Repeat: + start := 0 + for end := 0; end <= len(values); end++ { // loop over comma-separated lists + if end != len(values) && values[end].TokenType != css.CommaToken { + continue + } else if start == end { + start++ + continue + } + + if end-start == 2 && values[start].TokenType == css.IdentToken && values[start+1].TokenType == css.IdentToken { + if values[start].Ident == values[start+1].Ident { + values = append(values[:start+1], values[start+2:]...) + end-- + } else if values[start].Ident == Repeat && values[start+1].Ident == No_Repeat { + values[start].Data = repeatXBytes + values[start].Ident = Repeat_X + values = append(values[:start+1], values[start+2:]...) + end-- + } else if values[start].Ident == No_Repeat && values[start+1].Ident == Repeat { + values[start].Data = repeatYBytes + values[start].Ident = Repeat_Y + values = append(values[:start+1], values[start+2:]...) + end-- + } + } + start = end + 1 + } + case Background_Position: + start := 0 + for end := 0; end <= len(values); end++ { // loop over comma-separated lists + if end != len(values) && values[end].TokenType != css.CommaToken { + continue + } else if start == end { + start++ + continue + } + + if end-start == 3 || end-start == 4 { + // remove zero offsets + for _, i := range []int{end - start - 1, start + 1} { + if 2 < end-start && values[i].IsZero() { + values = append(values[:i], values[i+1:]...) + end-- + } + } + + j := start + 1 // position of second set of horizontal/vertical values + if 2 < end-start && values[start+2].TokenType == css.IdentToken { + j = start + 2 + } + + b := make([]byte, 0, 4) + offsets := make([]Token, 2) + for _, i := range []int{j, start} { + if i+1 < end && i+1 != j { + if values[i+1].TokenType == css.PercentageToken { + // change right or bottom with percentage offset to left or top respectively + if values[i].Ident == Right || values[i].Ident == Bottom { + n, _ := strconvParse.ParseInt(values[i+1].Data[:len(values[i+1].Data)-1]) + b = strconv.AppendInt(b[:0], 100-n, 10) + b = append(b, '%') + values[i+1].Data = b + if values[i].Ident == Right { + values[i].Data = leftBytes + values[i].Ident = Left + } else { + values[i].Data = topBytes + values[i].Ident = Top + } + } + } + if values[i].Ident == Left { + offsets[0] = values[i+1] + } else if values[i].Ident == Top { + offsets[1] = values[i+1] + } + } else if values[i].Ident == Left { + offsets[0] = Token{css.NumberToken, zeroBytes, nil, 0, 0} + } else if values[i].Ident == Top { + offsets[1] = Token{css.NumberToken, zeroBytes, nil, 0, 0} + } else if values[i].Ident == Right { + offsets[0] = Token{css.PercentageToken, n100pBytes, nil, 0, 0} + values[i].Ident = Left + } else if values[i].Ident == Bottom { + offsets[1] = Token{css.PercentageToken, n100pBytes, nil, 0, 0} + values[i].Ident = Top + } + } + + if values[start].Ident == Center || values[j].Ident == Center { + if values[start].Ident == Left || values[j].Ident == Left { + offsets = offsets[:1] + } else if values[start].Ident == Top || values[j].Ident == Top { + offsets[0] = Token{css.NumberToken, n50pBytes, nil, 0, 0} + } + } + + if offsets[0].Data != nil && (len(offsets) == 1 || offsets[1].Data != nil) { + values = append(append(values[:start], offsets...), values[end:]...) + end -= end - start - len(offsets) + } + } + // removing zero offsets in the previous loop might make it eligible for the next loop + if end-start == 1 || end-start == 2 { + if end-start == 1 && (values[start].Ident == Top || values[start].Ident == Bottom) { + // we can't make this smaller, and converting to a number will break it + // (https://github.com/tdewolff/minify/issues/221#issuecomment-415419918) + break + } + + if end-start == 2 && (values[start].Ident == Top || values[start].Ident == Bottom || values[start+1].Ident == Left || values[start+1].Ident == Right) { + // if it's a vertical position keyword, swap it with the next element + // since otherwise converted number positions won't be valid anymore + // (https://github.com/tdewolff/minify/issues/221#issue-353067229) + values[start], values[start+1] = values[start+1], values[start] + } + + // transform keywords to lengths|percentages + for i := start; i < end; i++ { + if values[i].TokenType == css.IdentToken { + if values[i].Ident == Left || values[i].Ident == Top { + values[i].TokenType = css.NumberToken + values[i].Data = zeroBytes + values[i].Ident = 0 + } else if values[i].Ident == Right || values[i].Ident == Bottom { + values[i].TokenType = css.PercentageToken + values[i].Data = n100pBytes + values[i].Ident = 0 + } else if values[i].Ident == Center { + if i == start { + values[i].TokenType = css.PercentageToken + values[i].Data = n50pBytes + values[i].Ident = 0 + } else { + values = append(values[:start+1], values[start+2:]...) + end-- + } + } + } else if i == start+1 && values[i].TokenType == css.PercentageToken && bytes.Equal(values[i].Data, n50pBytes) { + values = append(values[:start+1], values[start+2:]...) + end-- + } else if values[i].TokenType == css.PercentageToken && values[i].Data[0] == '0' { + values[i].TokenType = css.NumberToken + values[i].Data = zeroBytes + values[i].Ident = 0 + } + } + } + start = end + 1 + } + case Box_Shadow: + start := 0 + for end := 0; end <= len(values); end++ { // loop over comma-separated lists + if end != len(values) && values[end].TokenType != css.CommaToken { + continue + } else if start == end { + start++ + continue + } + + if end-start == 1 && values[start].Ident == Initial { + values[start].Ident = None + values[start].Data = noneBytes + } else { + numbers := []int{} + for i := start; i < end; i++ { + if values[i].IsLength() { + numbers = append(numbers, i) + } + } + if len(numbers) == 4 && values[numbers[3]].IsZero() { + values = append(values[:numbers[3]], values[numbers[3]+1:]...) + numbers = numbers[:3] + end-- + } + if len(numbers) == 3 && values[numbers[2]].IsZero() { + values = append(values[:numbers[2]], values[numbers[2]+1:]...) + end-- + } + } + start = end + 1 + } + case Ms_Filter: + alpha := []byte("progid:DXImageTransform.Microsoft.Alpha(Opacity=") + if values[0].TokenType == css.StringToken && 2 < len(values[0].Data) && bytes.HasPrefix(values[0].Data[1:len(values[0].Data)-1], alpha) { + values[0].Data = append(append([]byte{values[0].Data[0]}, []byte("alpha(opacity=")...), values[0].Data[1+len(alpha):]...) + } + case Color: + values[0] = minifyColor(values[0]) + case Background_Color: + values[0] = minifyColor(values[0]) + if !c.o.KeepCSS2 { + if values[0].Ident == Transparent { + values[0].Data = initialBytes + values[0].Ident = Initial + } + } + case Border_Color: + sameValues := true + for i := range values { + if values[i].Ident == Currentcolor { + values[i].Data = initialBytes + values[i].Ident = Initial + } else { + values[i] = minifyColor(values[i]) + } + if 0 < i && sameValues && !values[0].Equal(values[i]) { + sameValues = false + } + } + if sameValues { + values = values[:1] + } + case Border_Left_Color, Border_Right_Color, Border_Top_Color, Border_Bottom_Color, Text_Decoration_Color, Text_Emphasis_Color: + if values[0].Ident == Currentcolor { + values[0].Data = initialBytes + values[0].Ident = Initial + } else { + values[0] = minifyColor(values[0]) + } + case Caret_Color, Outline_Color, Fill, Stroke: + values[0] = minifyColor(values[0]) + case Column_Rule: + for i := 0; i < len(values); i++ { + if values[i].Ident == Currentcolor || values[i].Ident == None || values[i].Ident == Medium { + values = append(values[:i], values[i+1:]...) + i-- + } else { + values[i] = minifyColor(values[i]) + } + } + if len(values) == 0 { + values = []Token{{css.IdentToken, noneBytes, nil, 0, None}} + } + case Text_Shadow: + // TODO: minify better (can be comma separated list) + for i := 0; i < len(values); i++ { + values[i] = minifyColor(values[i]) + } + case Text_Decoration: + for i := 0; i < len(values); i++ { + if values[i].Ident == Currentcolor || values[i].Ident == None || values[i].Ident == Solid { + values = append(values[:i], values[i+1:]...) + i-- + } else { + values[i] = minifyColor(values[i]) + } + } + if len(values) == 0 { + values = []Token{{css.IdentToken, noneBytes, nil, 0, None}} + } + case Text_Emphasis: + for i := 0; i < len(values); i++ { + if values[i].Ident == Currentcolor || values[i].Ident == None { + values = append(values[:i], values[i+1:]...) + i-- + } else { + values[i] = minifyColor(values[i]) + } + } + if len(values) == 0 { + values = []Token{{css.IdentToken, noneBytes, nil, 0, None}} + } + case Flex: + if len(values) == 2 && values[0].TokenType == css.NumberToken { + if values[1].TokenType != css.NumberToken && values[1].IsZero() { + values = values[:1] // remove if it is zero + } + } else if len(values) == 3 && values[0].TokenType == css.NumberToken && values[1].TokenType == css.NumberToken { + if len(values[0].Data) == 1 && len(values[1].Data) == 1 { + if values[2].Ident == Auto { + if values[0].Data[0] == '0' && values[1].Data[0] == '1' { + values = values[:1] + values[0].TokenType = css.IdentToken + values[0].Data = initialBytes + values[0].Ident = Initial + } else if values[0].Data[0] == '1' && values[1].Data[0] == '1' { + values = values[:1] + values[0].TokenType = css.IdentToken + values[0].Data = autoBytes + values[0].Ident = Auto + } else if values[0].Data[0] == '0' && values[1].Data[0] == '0' { + values = values[:1] + values[0].TokenType = css.IdentToken + values[0].Data = noneBytes + values[0].Ident = None + } + } else if values[1].Data[0] == '1' && values[2].IsZero() { + values = values[:1] // remove and if they are 1 and 0 respectively + } else if values[2].IsZero() { + values = values[:2] // remove auto to write 2-value syntax of + } else { + values[2] = minifyLengthPercentage(values[2]) + } + } + } + case Flex_Basis: + if values[0].Ident == Initial { + values[0].Data = autoBytes + values[0].Ident = Auto + } else { + values[0] = minifyLengthPercentage(values[0]) + } + case Order, Flex_Grow: + if values[0].Ident == Initial { + values[0].TokenType = css.NumberToken + values[0].Data = zeroBytes + values[0].Ident = 0 + } + case Flex_Shrink: + if values[0].Ident == Initial { + values[0].TokenType = css.NumberToken + values[0].Data = oneBytes + values[0].Ident = 0 + } + case Unicode_Range: + ranges := [][2]int{} + for _, value := range values { + if value.TokenType == css.CommaToken { + continue + } else if value.TokenType != css.UnicodeRangeToken { + return values + } + + i := 2 + iWildcard := 0 + start := 0 + for i < len(value.Data) && value.Data[i] != '-' { + start *= 16 + if '0' <= value.Data[i] && value.Data[i] <= '9' { + start += int(value.Data[i] - '0') + } else if 'a' <= value.Data[i]|32 && value.Data[i]|32 <= 'f' { + start += int(value.Data[i]|32-'a') + 10 + } else if iWildcard == 0 && value.Data[i] == '?' { + iWildcard = i + } + i++ + } + end := start + if iWildcard != 0 { + end = start + int(math.Pow(16.0, float64(len(value.Data)-iWildcard))) - 1 + } else if i < len(value.Data) && value.Data[i] == '-' { + i++ + end = 0 + for i < len(value.Data) { + end *= 16 + if '0' <= value.Data[i] && value.Data[i] <= '9' { + end += int(value.Data[i] - '0') + } else if 'a' <= value.Data[i]|32 && value.Data[i]|32 <= 'f' { + end += int(value.Data[i]|32-'a') + 10 + } + i++ + } + if end <= start { + end = start + } + } + ranges = append(ranges, [2]int{start, end}) + } + + // sort and remove overlapping ranges + sort.Slice(ranges, func(i, j int) bool { return ranges[i][0] < ranges[j][0] }) + for i := 0; i < len(ranges)-1; i++ { + if ranges[i+1][1] <= ranges[i][1] { + // next range is fully contained in the current range + ranges = append(ranges[:i+1], ranges[i+2:]...) + } else if ranges[i+1][0] <= ranges[i][1]+1 { + // next range is partially covering the current range + ranges[i][1] = ranges[i+1][1] + ranges = append(ranges[:i+1], ranges[i+2:]...) + } + } + + values = values[:0] + for i, ran := range ranges { + if i != 0 { + values = append(values, Token{css.CommaToken, commaBytes, nil, 0, None}) + } + if ran[0] == ran[1] { + urange := []byte(fmt.Sprintf("U+%X", ran[0])) + values = append(values, Token{css.UnicodeRangeToken, urange, nil, 0, None}) + } else if ran[0] == 0 && ran[1] == 0x10FFFF { + values = append(values, Token{css.IdentToken, initialBytes, nil, 0, None}) + } else { + k := 0 + for k < 6 && (ran[0]>>(k*4))&0xF == 0 && (ran[1]>>(k*4))&0xF == 0xF { + k++ + } + wildcards := k + for k < 6 { + if (ran[0]>>(k*4))&0xF != (ran[1]>>(k*4))&0xF { + wildcards = 0 + break + } + k++ + } + var urange []byte + if wildcards != 0 { + if ran[0]>>(wildcards*4) == 0 { + urange = []byte(fmt.Sprintf("U+%s", strings.Repeat("?", wildcards))) + } else { + urange = []byte(fmt.Sprintf("U+%X%s", ran[0]>>(wildcards*4), strings.Repeat("?", wildcards))) + } + } else { + urange = []byte(fmt.Sprintf("U+%X-%X", ran[0], ran[1])) + } + values = append(values, Token{css.UnicodeRangeToken, urange, nil, 0, None}) + } + } + } + return values +} + +func minifyColor(value Token) Token { + data := value.Data + if value.TokenType == css.IdentToken { + if hexValue, ok := ShortenColorName[value.Ident]; ok { + value.TokenType = css.HashToken + value.Data = hexValue + } + } else if value.TokenType == css.HashToken { + parse.ToLower(data[1:]) + if len(data) == 9 && data[7] == data[8] { + if data[7] == 'f' { + data = data[:7] + } else if data[7] == '0' { + data = blackBytes + } + } + if ident, ok := ShortenColorHex[string(data)]; ok { + value.TokenType = css.IdentToken + data = ident + } else if len(data) == 7 && data[1] == data[2] && data[3] == data[4] && data[5] == data[6] { + value.TokenType = css.HashToken + data[2] = data[3] + data[3] = data[5] + data = data[:4] + } else if len(data) == 9 && data[1] == data[2] && data[3] == data[4] && data[5] == data[6] && data[7] == data[8] { + // from working draft Color Module Level 4 + value.TokenType = css.HashToken + data[2] = data[3] + data[3] = data[5] + data[4] = data[7] + data = data[:5] + } + value.Data = data + } + return value +} + +func minifyNumberPercentage(value Token) Token { + // assumes input already minified + if value.TokenType == css.PercentageToken && len(value.Data) == 3 && value.Data[len(value.Data)-2] == '0' { + value.Data[1] = value.Data[0] + value.Data[0] = '.' + value.Data = value.Data[:2] + value.TokenType = css.NumberToken + } else if value.TokenType == css.NumberToken && 2 < len(value.Data) && value.Data[0] == '.' && value.Data[1] == '0' { + if value.Data[2] == '0' { + value.Data[0] = '.' + copy(value.Data[1:], value.Data[3:]) + value.Data[len(value.Data)-2] = '%' + value.Data = value.Data[:len(value.Data)-1] + value.TokenType = css.PercentageToken + } else if len(value.Data) == 3 { + value.Data[0] = value.Data[2] + value.Data[1] = '%' + value.Data = value.Data[:2] + value.TokenType = css.PercentageToken + } + } + return value +} + +func minifyLengthPercentage(value Token) Token { + if value.TokenType != css.NumberToken && value.IsZero() { + value.TokenType = css.NumberToken + value.Data = value.Data[:1] // remove dimension for zero value + } + return value +} + +func (c *cssMinifier) minifyDimension(value Token) (Token, []byte) { + // TODO: add check for zero value + var dim []byte + if value.TokenType == css.DimensionToken { + n := len(value.Data) + for 0 < n { + lower := 'a' <= value.Data[n-1] && value.Data[n-1] <= 'z' + upper := 'A' <= value.Data[n-1] && value.Data[n-1] <= 'Z' + if !lower && !upper { + break + } else if upper { + value.Data[n-1] = value.Data[n-1] + ('a' - 'A') + } + n-- + } + + num := value.Data[:n] + if c.o.KeepCSS2 { + num = minify.Decimal(num, c.o.Precision) // don't use exponents + } else { + num = minify.Number(num, c.o.Precision) + } + dim = value.Data[n:] + value.Data = append(num, dim...) + } + return value, dim + + // TODO: optimize + //if value.TokenType == css.DimensionToken { + // // TODO: reverse; parse dim not number + // n := parse.Number(value.Data) + // num := value.Data[:n] + // dim = value.Data[n:] + // parse.ToLower(dim) + + // if c.o.KeepCSS2 { + // num = minify.Decimal(num, c.o.Precision) // don't use exponents + // } else { + // num = minify.Number(num, c.o.Precision) + // } + + // // change dimension to compress number + // h := ToHash(dim) + // if h == Px || h == Pt || h == Pc || h == In || h == Mm || h == Cm || h == Q || h == Deg || h == Grad || h == Rad || h == Turn || h == S || h == Ms || h == Hz || h == Khz || h == Dpi || h == Dpcm || h == Dppx { + // d, _ := strconv.ParseFloat(string(num), 64) // can never fail + // var dimensions []Hash + // var multipliers []float64 + // switch h { + // case Px: + // //dimensions = []Hash{In, Cm, Pc, Mm, Pt, Q} + // //multipliers = []float64{0.010416666666666667, 0.026458333333333333, 0.0625, 0.26458333333333333, 0.75, 1.0583333333333333} + // dimensions = []Hash{In, Pc, Pt} + // multipliers = []float64{0.010416666666666667, 0.0625, 0.75} + // case Pt: + // //dimensions = []Hash{In, Cm, Pc, Mm, Px, Q} + // //multipliers = []float64{0.013888888888888889, 0.035277777777777778, 0.083333333333333333, 0.35277777777777778, 1.3333333333333333, 1.4111111111111111} + // dimensions = []Hash{In, Pc, Px} + // multipliers = []float64{0.013888888888888889, 0.083333333333333333, 1.3333333333333333} + // case Pc: + // //dimensions = []Hash{In, Cm, Mm, Pt, Px, Q} + // //multipliers = []float64{0.16666666666666667, 0.42333333333333333, 4.2333333333333333, 12.0, 16.0, 16.933333333333333} + // dimensions = []Hash{In, Pt, Px} + // multipliers = []float64{0.16666666666666667, 12.0, 16.0} + // case In: + // //dimensions = []Hash{Cm, Pc, Mm, Pt, Px, Q} + // //multipliers = []float64{2.54, 6.0, 25.4, 72.0, 96.0, 101.6} + // dimensions = []Hash{Pc, Pt, Px} + // multipliers = []float64{6.0, 72.0, 96.0} + // case Cm: + // //dimensions = []Hash{In, Pc, Mm, Pt, Px, Q} + // //multipliers = []float64{0.39370078740157480, 2.3622047244094488, 10.0, 28.346456692913386, 37.795275590551181, 40.0} + // dimensions = []Hash{Mm, Q} + // multipliers = []float64{10.0, 40.0} + // case Mm: + // //dimensions = []Hash{In, Cm, Pc, Pt, Px, Q} + // //multipliers = []float64{0.039370078740157480, 0.1, 0.23622047244094488, 2.8346456692913386, 3.7795275590551181, 4.0} + // dimensions = []Hash{Cm, Q} + // multipliers = []float64{0.1, 4.0} + // case Q: + // //dimensions = []Hash{In, Cm, Pc, Pt, Px} // Q to mm is never smaller + // //multipliers = []float64{0.0098425196850393701, 0.025, 0.059055118110236220, 0.70866141732283465, 0.94488188976377953} + // dimensions = []Hash{Cm} // Q to mm is never smaller + // multipliers = []float64{0.025} + // case Deg: + // //dimensions = []Hash{Turn, Rad, Grad} + // //multipliers = []float64{0.0027777777777777778, 0.017453292519943296, 1.1111111111111111} + // dimensions = []Hash{Turn, Grad} + // multipliers = []float64{0.0027777777777777778, 1.1111111111111111} + // case Grad: + // //dimensions = []Hash{Turn, Rad, Deg} + // //multipliers = []float64{0.0025, 0.015707963267948966, 0.9} + // dimensions = []Hash{Turn, Deg} + // multipliers = []float64{0.0025, 0.9} + // case Turn: + // //dimensions = []Hash{Rad, Deg, Grad} + // //multipliers = []float64{6.2831853071795865, 360.0, 400.0} + // dimensions = []Hash{Deg, Grad} + // multipliers = []float64{360.0, 400.0} + // case Rad: + // //dimensions = []Hash{Turn, Deg, Grad} + // //multipliers = []float64{0.15915494309189534, 57.295779513082321, 63.661977236758134} + // case S: + // dimensions = []Hash{Ms} + // multipliers = []float64{1000.0} + // case Ms: + // dimensions = []Hash{S} + // multipliers = []float64{0.001} + // case Hz: + // dimensions = []Hash{Khz} + // multipliers = []float64{0.001} + // case Khz: + // dimensions = []Hash{Hz} + // multipliers = []float64{1000.0} + // case Dpi: + // dimensions = []Hash{Dppx, Dpcm} + // multipliers = []float64{0.010416666666666667, 0.39370078740157480} + // case Dpcm: + // //dimensions = []Hash{Dppx, Dpi} + // //multipliers = []float64{0.026458333333333333, 2.54} + // dimensions = []Hash{Dpi} + // multipliers = []float64{2.54} + // case Dppx: + // //dimensions = []Hash{Dpcm, Dpi} + // //multipliers = []float64{37.795275590551181, 96.0} + // dimensions = []Hash{Dpi} + // multipliers = []float64{96.0} + // } + // for i := range dimensions { + // if dimensions[i] != h { //&& (d < 1.0) == (multipliers[i] > 1.0) { + // b, _ := strconvParse.AppendFloat([]byte{}, d*multipliers[i], -1) + // if c.o.KeepCSS2 { + // b = minify.Decimal(b, c.o.newPrecision) // don't use exponents + // } else { + // b = minify.Number(b, c.o.newPrecision) + // } + // newDim := []byte(dimensions[i].String()) + // if len(b)+len(newDim) < len(num)+len(dim) { + // num = b + // dim = newDim + // } + // } + // } + // } + // value.Data = append(num, dim...) + //} + //return value, dim +} diff --git a/vendor/github.com/tdewolff/minify/v2/css/hash.go b/vendor/github.com/tdewolff/minify/v2/css/hash.go new file mode 100644 index 0000000..98692c8 --- /dev/null +++ b/vendor/github.com/tdewolff/minify/v2/css/hash.go @@ -0,0 +1,1392 @@ +package css + +// uses github.com/tdewolff/hasher +//go:generate hasher -type=Hash -file=hash.go + +// Hash defines perfect hashes for a predefined list of strings +type Hash uint32 + +// Identifiers for the hashes associated with the text in the comments. +const ( + Ms_Filter Hash = 0xa // -ms-filter + Accelerator Hash = 0x3760b // accelerator + Aliceblue Hash = 0x7a209 // aliceblue + Align_Content Hash = 0xd980d // align-content + Align_Items Hash = 0x7ef0b // align-items + Align_Self Hash = 0x8cb0a // align-self + All Hash = 0x69103 // all + Alpha Hash = 0x37205 // alpha + Animation Hash = 0xca09 // animation + Animation_Delay Hash = 0x2050f // animation-delay + Animation_Direction Hash = 0x8e913 // animation-direction + Animation_Duration Hash = 0x35d12 // animation-duration + Animation_Fill_Mode Hash = 0x66c13 // animation-fill-mode + Animation_Iteration_Count Hash = 0xd4919 // animation-iteration-count + Animation_Name Hash = 0xca0e // animation-name + Animation_Play_State Hash = 0xfc14 // animation-play-state + Animation_Timing_Function Hash = 0x14119 // animation-timing-function + Antiquewhite Hash = 0x6490c // antiquewhite + Aquamarine Hash = 0x9ec0a // aquamarine + Attr Hash = 0x59804 // attr + Auto Hash = 0x44504 // auto + Azimuth Hash = 0x15a07 // azimuth + Background Hash = 0x2b0a // background + Background_Attachment Hash = 0x2b15 // background-attachment + Background_Clip Hash = 0xb6e0f // background-clip + Background_Color Hash = 0x21710 // background-color + Background_Image Hash = 0x5ad10 // background-image + Background_Origin Hash = 0x17111 // background-origin + Background_Position Hash = 0x18e13 // background-position + Background_Position_X Hash = 0x18e15 // background-position-x + Background_Position_Y Hash = 0x1a315 // background-position-y + Background_Repeat Hash = 0x1b811 // background-repeat + Background_Size Hash = 0x1cb0f // background-size + Behavior Hash = 0x1da08 // behavior + Black Hash = 0x1e205 // black + Blanchedalmond Hash = 0x1e70e // blanchedalmond + Blueviolet Hash = 0x7a70a // blueviolet + Bold Hash = 0x1fc04 // bold + Border Hash = 0x22706 // border + Border_Bottom Hash = 0x2270d // border-bottom + Border_Bottom_Color Hash = 0x22713 // border-bottom-color + Border_Bottom_Style Hash = 0x23a13 // border-bottom-style + Border_Bottom_Width Hash = 0x25d13 // border-bottom-width + Border_Box Hash = 0x27e0a // border-box + Border_Collapse Hash = 0x2b60f // border-collapse + Border_Color Hash = 0x2d30c // border-color + Border_Left Hash = 0x2df0b // border-left + Border_Left_Color Hash = 0x2df11 // border-left-color + Border_Left_Style Hash = 0x2f011 // border-left-style + Border_Left_Width Hash = 0x30111 // border-left-width + Border_Right Hash = 0x3120c // border-right + Border_Right_Color Hash = 0x31212 // border-right-color + Border_Right_Style Hash = 0x32412 // border-right-style + Border_Right_Width Hash = 0x33612 // border-right-width + Border_Spacing Hash = 0x3480e // border-spacing + Border_Style Hash = 0x3ab0c // border-style + Border_Top Hash = 0x3b70a // border-top + Border_Top_Color Hash = 0x3b710 // border-top-color + Border_Top_Style Hash = 0x3c710 // border-top-style + Border_Top_Width Hash = 0x3d710 // border-top-width + Border_Width Hash = 0x3e70c // border-width + Bottom Hash = 0x22e06 // bottom + Box_Shadow Hash = 0x2850a // box-shadow + Burlywood Hash = 0x3f309 // burlywood + Cadetblue Hash = 0x9c609 // cadetblue + Calc Hash = 0x9c304 // calc + Caption_Side Hash = 0x40f0c // caption-side + Caret_Color Hash = 0x4240b // caret-color + Center Hash = 0xdb06 // center + Charset Hash = 0x62f07 // charset + Chartreuse Hash = 0x42f0a // chartreuse + Chocolate Hash = 0x43909 // chocolate + Clamp Hash = 0x44e05 // clamp + Clear Hash = 0x45d05 // clear + Clip Hash = 0xb7904 // clip + Cm Hash = 0x53802 // cm + Color Hash = 0x2505 // color + Column_Count Hash = 0x4620c // column-count + Column_Gap Hash = 0x6a30a // column-gap + Column_Rule Hash = 0x4880b // column-rule + Column_Rule_Color Hash = 0x48811 // column-rule-color + Column_Rule_Style Hash = 0x49911 // column-rule-style + Column_Rule_Width Hash = 0x4aa11 // column-rule-width + Column_Width Hash = 0x4bb0c // column-width + Columns Hash = 0x74607 // columns + Content Hash = 0x5607 // content + Cornflowerblue Hash = 0x4c70e // cornflowerblue + Cornsilk Hash = 0x4d508 // cornsilk + Counter_Increment Hash = 0xd5d11 // counter-increment + Counter_Reset Hash = 0x4690d // counter-reset + Cue Hash = 0x4dd03 // cue + Cue_After Hash = 0x4dd09 // cue-after + Cue_Before Hash = 0x4e60a // cue-before + Currentcolor Hash = 0x5010c // currentcolor + Cursive Hash = 0x50d07 // cursive + Cursor Hash = 0x51406 // cursor + Darkblue Hash = 0x1f408 // darkblue + Darkcyan Hash = 0x1ff08 // darkcyan + Darkgoldenrod Hash = 0x3fb0d // darkgoldenrod + Darkgray Hash = 0x40708 // darkgray + Darkgreen Hash = 0x75c09 // darkgreen + Darkkhaki Hash = 0xa1409 // darkkhaki + Darkmagenta Hash = 0xce90b // darkmagenta + Darkolivegreen Hash = 0x6d90e // darkolivegreen + Darkorange Hash = 0x7500a // darkorange + Darkorchid Hash = 0xa0b0a // darkorchid + Darksalmon Hash = 0xa990a // darksalmon + Darkseagreen Hash = 0xb110c // darkseagreen + Darkslateblue Hash = 0xc1c0d // darkslateblue + Darkslategray Hash = 0xbfa0d // darkslategray + Darkturquoise Hash = 0xcaa0d // darkturquoise + Darkviolet Hash = 0x51a0a // darkviolet + Deeppink Hash = 0x67d08 // deeppink + Deepskyblue Hash = 0x4190b // deepskyblue + Default Hash = 0xa2207 // default + Deg Hash = 0x70103 // deg + Direction Hash = 0x8d909 // direction + Display Hash = 0xcce07 // display + Document Hash = 0x52408 // document + Dodgerblue Hash = 0x52c0a // dodgerblue + Dpcm Hash = 0x53604 // dpcm + Dpi Hash = 0x54f03 // dpi + Dppx Hash = 0x55b04 // dppx + Elevation Hash = 0x6d09 // elevation + Empty_Cells Hash = 0x3910b // empty-cells + Env Hash = 0x4f503 // env + Fantasy Hash = 0x3a407 // fantasy + Fill Hash = 0x67604 // fill + Filter Hash = 0x406 // filter + Firebrick Hash = 0x83509 // firebrick + Flex Hash = 0x55f04 // flex + Flex_Basis Hash = 0x89d0a // flex-basis + Flex_Direction Hash = 0x8d40e // flex-direction + Flex_Flow Hash = 0xc8709 // flex-flow + Flex_Grow Hash = 0x55f09 // flex-grow + Flex_Shrink Hash = 0x5680b // flex-shrink + Flex_Wrap Hash = 0x57309 // flex-wrap + Float Hash = 0x59505 // float + Floralwhite Hash = 0x5bd0b // floralwhite + Font Hash = 0x25404 // font + Font_Face Hash = 0x25409 // font-face + Font_Family Hash = 0x5ee0b // font-family + Font_Size Hash = 0x5f909 // font-size + Font_Size_Adjust Hash = 0x5f910 // font-size-adjust + Font_Stretch Hash = 0x6250c // font-stretch + Font_Style Hash = 0x6360a // font-style + Font_Variant Hash = 0x6400c // font-variant + Font_Weight Hash = 0x65b0b // font-weight + Forestgreen Hash = 0x4ec0b // forestgreen + Fuchsia Hash = 0x66607 // fuchsia + Function Hash = 0x15208 // function + Gainsboro Hash = 0xec09 // gainsboro + Ghostwhite Hash = 0x2990a // ghostwhite + Goldenrod Hash = 0x3ff09 // goldenrod + Grad Hash = 0x1004 // grad + Greenyellow Hash = 0x7600b // greenyellow + Grid Hash = 0x35504 // grid + Grid_Area Hash = 0x35509 // grid-area + Grid_Auto_Columns Hash = 0x7bb11 // grid-auto-columns + Grid_Auto_Flow Hash = 0x81c0e // grid-auto-flow + Grid_Auto_Rows Hash = 0x8640e // grid-auto-rows + Grid_Column Hash = 0x69e0b // grid-column + Grid_Column_End Hash = 0xcdb0f // grid-column-end + Grid_Column_Gap Hash = 0x69e0f // grid-column-gap + Grid_Column_Start Hash = 0x6bd11 // grid-column-start + Grid_Row Hash = 0x6ce08 // grid-row + Grid_Row_End Hash = 0x6ce0c // grid-row-end + Grid_Row_Gap Hash = 0x6e70c // grid-row-gap + Grid_Row_Start Hash = 0x7030e // grid-row-start + Grid_Template Hash = 0x7110d // grid-template + Grid_Template_Areas Hash = 0x71113 // grid-template-areas + Grid_Template_Columns Hash = 0x73815 // grid-template-columns + Grid_Template_Rows Hash = 0x77012 // grid-template-rows + Height Hash = 0x9306 // height + Honeydew Hash = 0x16008 // honeydew + Hsl Hash = 0x26f03 // hsl + Hsla Hash = 0x26f04 // hsla + Hz Hash = 0x68502 // hz + Ime_Mode Hash = 0xa1c08 // ime-mode + Import Hash = 0x78d06 // import + Important Hash = 0x78d09 // important + In Hash = 0x4402 // in + Include_Source Hash = 0x1800e // include-source + Indianred Hash = 0xb0909 // indianred + Inherit Hash = 0x79607 // inherit + Initial Hash = 0x79d07 // initial + Invert Hash = 0x7e406 // invert + Justify_Content Hash = 0x4e0f // justify-content + Justify_Items Hash = 0x6050d // justify-items + Justify_Self Hash = 0x82a0c // justify-self + Keyframes Hash = 0x5cb09 // keyframes + Khz Hash = 0x68403 // khz + Large Hash = 0xa905 // large + Larger Hash = 0xa906 // larger + Lavender Hash = 0x27108 // lavender + Lavenderblush Hash = 0x2710d // lavenderblush + Lawngreen Hash = 0x2ca09 // lawngreen + Layer_Background_Color Hash = 0x21116 // layer-background-color + Layer_Background_Image Hash = 0x5a716 // layer-background-image + Layout_Flow Hash = 0xcf80b // layout-flow + Layout_Grid Hash = 0x8050b // layout-grid + Layout_Grid_Char Hash = 0x80510 // layout-grid-char + Layout_Grid_Char_Spacing Hash = 0x80518 // layout-grid-char-spacing + Layout_Grid_Line Hash = 0x83e10 // layout-grid-line + Layout_Grid_Mode Hash = 0x85410 // layout-grid-mode + Layout_Grid_Type Hash = 0x88710 // layout-grid-type + Left Hash = 0x2e604 // left + Lemonchiffon Hash = 0x24b0c // lemonchiffon + Letter_Spacing Hash = 0x7ae0e // letter-spacing + Lightblue Hash = 0x8ba09 // lightblue + Lightcoral Hash = 0x8c30a // lightcoral + Lightcyan Hash = 0x8e209 // lightcyan + Lightgoldenrodyellow Hash = 0x8fc14 // lightgoldenrodyellow + Lightgray Hash = 0x91009 // lightgray + Lightgreen Hash = 0x9190a // lightgreen + Lightpink Hash = 0x92309 // lightpink + Lightsalmon Hash = 0x92c0b // lightsalmon + Lightseagreen Hash = 0x9370d // lightseagreen + Lightskyblue Hash = 0x9440c // lightskyblue + Lightslateblue Hash = 0x9500e // lightslateblue + Lightsteelblue Hash = 0x95e0e // lightsteelblue + Lightyellow Hash = 0x96c0b // lightyellow + Limegreen Hash = 0x97709 // limegreen + Line_Break Hash = 0x84a0a // line-break + Line_Height Hash = 0x8e0b // line-height + Linear_Gradient Hash = 0x9800f // linear-gradient + List_Style Hash = 0x98f0a // list-style + List_Style_Image Hash = 0x98f10 // list-style-image + List_Style_Position Hash = 0x99f13 // list-style-position + List_Style_Type Hash = 0x9b20f // list-style-type + Local Hash = 0x9c105 // local + Magenta Hash = 0xced07 // magenta + Margin Hash = 0x53906 // margin + Margin_Bottom Hash = 0xdb10d // margin-bottom + Margin_Left Hash = 0xdbd0b // margin-left + Margin_Right Hash = 0xb890c // margin-right + Margin_Top Hash = 0x5390a // margin-top + Marker_Offset Hash = 0xad00d // marker-offset + Marks Hash = 0xaee05 // marks + Mask Hash = 0x9cf04 // mask + Max Hash = 0x9d303 // max + Max_Height Hash = 0x9d30a // max-height + Max_Width Hash = 0x9dd09 // max-width + Media Hash = 0xd4505 // media + Medium Hash = 0x9e606 // medium + Mediumaquamarine Hash = 0x9e610 // mediumaquamarine + Mediumblue Hash = 0x9f60a // mediumblue + Mediumorchid Hash = 0xa000c // mediumorchid + Mediumpurple Hash = 0xa420c // mediumpurple + Mediumseagreen Hash = 0xa4e0e // mediumseagreen + Mediumslateblue Hash = 0xa5c0f // mediumslateblue + Mediumspringgreen Hash = 0xa6b11 // mediumspringgreen + Mediumturquoise Hash = 0xa7c0f // mediumturquoise + Mediumvioletred Hash = 0xa8b0f // mediumvioletred + Midnightblue Hash = 0xaa90c // midnightblue + Min Hash = 0x14d03 // min + Min_Height Hash = 0xab50a // min-height + Min_Width Hash = 0xabf09 // min-width + Mintcream Hash = 0xac809 // mintcream + Mistyrose Hash = 0xae409 // mistyrose + Mm Hash = 0xaed02 // mm + Moccasin Hash = 0xb0308 // moccasin + Monospace Hash = 0xaa009 // monospace + Ms Hash = 0x102 // ms + Namespace Hash = 0xd409 // namespace + Navajowhite Hash = 0x750b // navajowhite + No_Repeat Hash = 0xbf09 // no-repeat + None Hash = 0x38e04 // none + Normal Hash = 0x36e06 // normal + Offset Hash = 0xad706 // offset + Offset_Anchor Hash = 0xad70d // offset-anchor + Offset_Distance Hash = 0xb1d0f // offset-distance + Offset_Path Hash = 0xb2c0b // offset-path + Offset_Position Hash = 0xb370f // offset-position + Offset_Rotate Hash = 0xb460d // offset-rotate + Olivedrab Hash = 0xb6609 // olivedrab + Orangered Hash = 0x75409 // orangered + Order Hash = 0x22805 // order + Orphans Hash = 0x37f07 // orphans + Outline Hash = 0xba707 // outline + Outline_Color Hash = 0xba70d // outline-color + Outline_Style Hash = 0xbb40d // outline-style + Outline_Width Hash = 0xbc10d // outline-width + Overflow Hash = 0x9d08 // overflow + Overflow_X Hash = 0x9d0a // overflow-x + Overflow_Y Hash = 0xbce0a // overflow-y + Padding Hash = 0x45207 // padding + Padding_Bottom Hash = 0xb7c0e // padding-bottom + Padding_Box Hash = 0x4520b // padding-box + Padding_Left Hash = 0xd0a0c // padding-left + Padding_Right Hash = 0x5420d // padding-right + Padding_Top Hash = 0x57b0b // padding-top + Page Hash = 0x58504 // page + Page_Break_After Hash = 0x58510 // page-break-after + Page_Break_Before Hash = 0x6ac11 // page-break-before + Page_Break_Inside Hash = 0x6f211 // page-break-inside + Palegoldenrod Hash = 0xc100d // palegoldenrod + Palegreen Hash = 0xbd809 // palegreen + Paleturquoise Hash = 0xbe10d // paleturquoise + Palevioletred Hash = 0xbee0d // palevioletred + Papayawhip Hash = 0xc070a // papayawhip + Pause Hash = 0xc2905 // pause + Pause_After Hash = 0xc290b // pause-after + Pause_Before Hash = 0xc340c // pause-before + Pc Hash = 0x53702 // pc + Peachpuff Hash = 0x89509 // peachpuff + Pitch Hash = 0x55005 // pitch + Pitch_Range Hash = 0x5500b // pitch-range + Place_Content Hash = 0xc400d // place-content + Place_Items Hash = 0xc4d0b // place-items + Place_Self Hash = 0xc7e0a // place-self + Play_During Hash = 0xcd10b // play-during + Position Hash = 0x13908 // position + Powderblue Hash = 0xc9b0a // powderblue + Progid Hash = 0xca506 // progid + Pt Hash = 0x39302 // pt + Px Hash = 0x55d02 // px + Q Hash = 0x64d01 // q + Quotes Hash = 0xcb706 // quotes + Rad Hash = 0x903 // rad + Radial_Gradient Hash = 0x90f // radial-gradient + Repeat Hash = 0xc206 // repeat + Repeat_X Hash = 0x1c308 // repeat-x + Repeat_Y Hash = 0xc208 // repeat-y + Rgb Hash = 0x2903 // rgb + Rgba Hash = 0x2904 // rgba + Richness Hash = 0xae08 // richness + Right Hash = 0x31905 // right + Rosybrown Hash = 0xf309 // rosybrown + Round Hash = 0x3005 // round + Row_Gap Hash = 0x6ec07 // row-gap + Royalblue Hash = 0x69509 // royalblue + Ruby_Align Hash = 0xd930a // ruby-align + Ruby_Overhang Hash = 0xe00d // ruby-overhang + Ruby_Position Hash = 0x1340d // ruby-position + S Hash = 0x201 // s + Saddlebrown Hash = 0xb50b // saddlebrown + Sandybrown Hash = 0x3850a // sandybrown + Sans_Serif Hash = 0x39b0a // sans-serif + Scroll Hash = 0x12006 // scroll + Scrollbar_3d_Light_Color Hash = 0xd7c18 // scrollbar-3d-light-color + Scrollbar_Arrow_Color Hash = 0x12015 // scrollbar-arrow-color + Scrollbar_Base_Color Hash = 0x8a614 // scrollbar-base-color + Scrollbar_Dark_Shadow_Color Hash = 0x5d31b // scrollbar-dark-shadow-color + Scrollbar_Face_Color Hash = 0x61114 // scrollbar-face-color + Scrollbar_Highlight_Color Hash = 0x7cb19 // scrollbar-highlight-color + Scrollbar_Shadow_Color Hash = 0x87116 // scrollbar-shadow-color + Scrollbar_Track_Color Hash = 0x72315 // scrollbar-track-color + Seagreen Hash = 0x93c08 // seagreen + Seashell Hash = 0x2c308 // seashell + Serif Hash = 0x3a005 // serif + Size Hash = 0x1d604 // size + Slateblue Hash = 0x95509 // slateblue + Slategray Hash = 0xbfe09 // slategray + Small Hash = 0x68f05 // small + Smaller Hash = 0x68f07 // smaller + Solid Hash = 0x74c05 // solid + Space Hash = 0x6905 // space + Speak Hash = 0x78105 // speak + Speak_Header Hash = 0x7810c // speak-header + Speak_Numeral Hash = 0x7f90d // speak-numeral + Speak_Punctuation Hash = 0xaf211 // speak-punctuation + Speech_Rate Hash = 0xc570b // speech-rate + Springgreen Hash = 0xa710b // springgreen + Steelblue Hash = 0x96309 // steelblue + Stress Hash = 0x11b06 // stress + Stroke Hash = 0xc7806 // stroke + Supports Hash = 0xcbc08 // supports + Table_Layout Hash = 0xcf20c // table-layout + Text_Align Hash = 0x10e0a // text-align + Text_Align_Last Hash = 0x10e0f // text-align-last + Text_Autospace Hash = 0x4400e // text-autospace + Text_Decoration Hash = 0x7e0f // text-decoration + Text_Decoration_Color Hash = 0x2a115 // text-decoration-color + Text_Decoration_Line Hash = 0x7e14 // text-decoration-line + Text_Decoration_Style Hash = 0xb5115 // text-decoration-style + Text_Decoration_Thickness Hash = 0xc6019 // text-decoration-thickness + Text_Emphasis Hash = 0x170d // text-emphasis + Text_Emphasis_Color Hash = 0x1713 // text-emphasis-color + Text_Indent Hash = 0x3f0b // text-indent + Text_Justify Hash = 0x490c // text-justify + Text_Kashida_Space Hash = 0x5c12 // text-kashida-space + Text_Overflow Hash = 0x980d // text-overflow + Text_Shadow Hash = 0xd6d0b // text-shadow + Text_Transform Hash = 0xda40e // text-transform + Text_Underline_Position Hash = 0xdc717 // text-underline-position + Top Hash = 0x3be03 // top + Transition Hash = 0x4750a // transition + Transition_Delay Hash = 0x59a10 // transition-delay + Transition_Duration Hash = 0xb9413 // transition-duration + Transition_Property Hash = 0x47513 // transition-property + Transition_Timing_Function Hash = 0xa281a // transition-timing-function + Transparent Hash = 0xd150b // transparent + Turn Hash = 0xd1f04 // turn + Turquoise Hash = 0xa8209 // turquoise + Unicode_Bidi Hash = 0xcc40c // unicode-bidi + Unicode_Range Hash = 0xd230d // unicode-range + Unset Hash = 0xd3005 // unset + Url Hash = 0x3f403 // url + Var Hash = 0x64503 // var + Vertical_Align Hash = 0x7e60e // vertical-align + Visibility Hash = 0x4f70a // visibility + Voice_Family Hash = 0xd350c // voice-family + Volume Hash = 0xd4106 // volume + White Hash = 0x7b05 // white + White_Space Hash = 0x6500b // white-space + Whitesmoke Hash = 0x5c30a // whitesmoke + Widows Hash = 0xd7706 // widows + Width Hash = 0x26b05 // width + Word_Break Hash = 0x1670a // word-break + Word_Spacing Hash = 0x28e0c // word-spacing + Word_Wrap Hash = 0xd0209 // word-wrap + Writing_Mode Hash = 0xc8f0c // writing-mode + X_Large Hash = 0xa707 // x-large + X_Small Hash = 0x68d07 // x-small + Xx_Large Hash = 0xa608 // xx-large + Xx_Small Hash = 0x68c08 // xx-small + Yellow Hash = 0x76506 // yellow + Yellowgreen Hash = 0x7650b // yellowgreen + Z_Index Hash = 0x68607 // z-index +) + +//var HashMap = map[string]Hash{ +// "-ms-filter": Ms_Filter, +// "accelerator": Accelerator, +// "aliceblue": Aliceblue, +// "align-content": Align_Content, +// "align-items": Align_Items, +// "align-self": Align_Self, +// "all": All, +// "alpha": Alpha, +// "animation": Animation, +// "animation-delay": Animation_Delay, +// "animation-direction": Animation_Direction, +// "animation-duration": Animation_Duration, +// "animation-fill-mode": Animation_Fill_Mode, +// "animation-iteration-count": Animation_Iteration_Count, +// "animation-name": Animation_Name, +// "animation-play-state": Animation_Play_State, +// "animation-timing-function": Animation_Timing_Function, +// "antiquewhite": Antiquewhite, +// "aquamarine": Aquamarine, +// "attr": Attr, +// "auto": Auto, +// "azimuth": Azimuth, +// "background": Background, +// "background-attachment": Background_Attachment, +// "background-clip": Background_Clip, +// "background-color": Background_Color, +// "background-image": Background_Image, +// "background-origin": Background_Origin, +// "background-position": Background_Position, +// "background-position-x": Background_Position_X, +// "background-position-y": Background_Position_Y, +// "background-repeat": Background_Repeat, +// "background-size": Background_Size, +// "behavior": Behavior, +// "black": Black, +// "blanchedalmond": Blanchedalmond, +// "blueviolet": Blueviolet, +// "bold": Bold, +// "border": Border, +// "border-bottom": Border_Bottom, +// "border-bottom-color": Border_Bottom_Color, +// "border-bottom-style": Border_Bottom_Style, +// "border-bottom-width": Border_Bottom_Width, +// "border-box": Border_Box, +// "border-collapse": Border_Collapse, +// "border-color": Border_Color, +// "border-left": Border_Left, +// "border-left-color": Border_Left_Color, +// "border-left-style": Border_Left_Style, +// "border-left-width": Border_Left_Width, +// "border-right": Border_Right, +// "border-right-color": Border_Right_Color, +// "border-right-style": Border_Right_Style, +// "border-right-width": Border_Right_Width, +// "border-spacing": Border_Spacing, +// "border-style": Border_Style, +// "border-top": Border_Top, +// "border-top-color": Border_Top_Color, +// "border-top-style": Border_Top_Style, +// "border-top-width": Border_Top_Width, +// "border-width": Border_Width, +// "bottom": Bottom, +// "box-shadow": Box_Shadow, +// "burlywood": Burlywood, +// "cadetblue": Cadetblue, +// "calc": Calc, +// "caption-side": Caption_Side, +// "caret-color": Caret_Color, +// "center": Center, +// "charset": Charset, +// "chartreuse": Chartreuse, +// "chocolate": Chocolate, +// "clamp": Clamp, +// "clear": Clear, +// "clip": Clip, +// "cm": Cm, +// "color": Color, +// "column-count": Column_Count, +// "column-gap": Column_Gap, +// "column-rule": Column_Rule, +// "column-rule-color": Column_Rule_Color, +// "column-rule-style": Column_Rule_Style, +// "column-rule-width": Column_Rule_Width, +// "column-width": Column_Width, +// "columns": Columns, +// "content": Content, +// "cornflowerblue": Cornflowerblue, +// "cornsilk": Cornsilk, +// "counter-increment": Counter_Increment, +// "counter-reset": Counter_Reset, +// "cue": Cue, +// "cue-after": Cue_After, +// "cue-before": Cue_Before, +// "currentcolor": Currentcolor, +// "cursive": Cursive, +// "cursor": Cursor, +// "darkblue": Darkblue, +// "darkcyan": Darkcyan, +// "darkgoldenrod": Darkgoldenrod, +// "darkgray": Darkgray, +// "darkgreen": Darkgreen, +// "darkkhaki": Darkkhaki, +// "darkmagenta": Darkmagenta, +// "darkolivegreen": Darkolivegreen, +// "darkorange": Darkorange, +// "darkorchid": Darkorchid, +// "darksalmon": Darksalmon, +// "darkseagreen": Darkseagreen, +// "darkslateblue": Darkslateblue, +// "darkslategray": Darkslategray, +// "darkturquoise": Darkturquoise, +// "darkviolet": Darkviolet, +// "deeppink": Deeppink, +// "deepskyblue": Deepskyblue, +// "default": Default, +// "deg": Deg, +// "direction": Direction, +// "display": Display, +// "document": Document, +// "dodgerblue": Dodgerblue, +// "dpcm": Dpcm, +// "dpi": Dpi, +// "dppx": Dppx, +// "elevation": Elevation, +// "empty-cells": Empty_Cells, +// "env": Env, +// "fantasy": Fantasy, +// "fill": Fill, +// "filter": Filter, +// "firebrick": Firebrick, +// "flex": Flex, +// "flex-basis": Flex_Basis, +// "flex-direction": Flex_Direction, +// "flex-flow": Flex_Flow, +// "flex-grow": Flex_Grow, +// "flex-shrink": Flex_Shrink, +// "flex-wrap": Flex_Wrap, +// "float": Float, +// "floralwhite": Floralwhite, +// "font": Font, +// "font-face": Font_Face, +// "font-family": Font_Family, +// "font-size": Font_Size, +// "font-size-adjust": Font_Size_Adjust, +// "font-stretch": Font_Stretch, +// "font-style": Font_Style, +// "font-variant": Font_Variant, +// "font-weight": Font_Weight, +// "forestgreen": Forestgreen, +// "fuchsia": Fuchsia, +// "function": Function, +// "gainsboro": Gainsboro, +// "ghostwhite": Ghostwhite, +// "goldenrod": Goldenrod, +// "grad": Grad, +// "greenyellow": Greenyellow, +// "grid": Grid, +// "grid-area": Grid_Area, +// "grid-auto-columns": Grid_Auto_Columns, +// "grid-auto-flow": Grid_Auto_Flow, +// "grid-auto-rows": Grid_Auto_Rows, +// "grid-column": Grid_Column, +// "grid-column-end": Grid_Column_End, +// "grid-column-gap": Grid_Column_Gap, +// "grid-column-start": Grid_Column_Start, +// "grid-row": Grid_Row, +// "grid-row-end": Grid_Row_End, +// "grid-row-gap": Grid_Row_Gap, +// "grid-row-start": Grid_Row_Start, +// "grid-template": Grid_Template, +// "grid-template-areas": Grid_Template_Areas, +// "grid-template-columns": Grid_Template_Columns, +// "grid-template-rows": Grid_Template_Rows, +// "height": Height, +// "honeydew": Honeydew, +// "hsl": Hsl, +// "hsla": Hsla, +// "hz": Hz, +// "ime-mode": Ime_Mode, +// "import": Import, +// "important": Important, +// "in": In, +// "include-source": Include_Source, +// "indianred": Indianred, +// "inherit": Inherit, +// "initial": Initial, +// "invert": Invert, +// "justify-content": Justify_Content, +// "justify-items": Justify_Items, +// "justify-self": Justify_Self, +// "keyframes": Keyframes, +// "khz": Khz, +// "large": Large, +// "larger": Larger, +// "lavender": Lavender, +// "lavenderblush": Lavenderblush, +// "lawngreen": Lawngreen, +// "layer-background-color": Layer_Background_Color, +// "layer-background-image": Layer_Background_Image, +// "layout-flow": Layout_Flow, +// "layout-grid": Layout_Grid, +// "layout-grid-char": Layout_Grid_Char, +// "layout-grid-char-spacing": Layout_Grid_Char_Spacing, +// "layout-grid-line": Layout_Grid_Line, +// "layout-grid-mode": Layout_Grid_Mode, +// "layout-grid-type": Layout_Grid_Type, +// "left": Left, +// "lemonchiffon": Lemonchiffon, +// "letter-spacing": Letter_Spacing, +// "lightblue": Lightblue, +// "lightcoral": Lightcoral, +// "lightcyan": Lightcyan, +// "lightgoldenrodyellow": Lightgoldenrodyellow, +// "lightgray": Lightgray, +// "lightgreen": Lightgreen, +// "lightpink": Lightpink, +// "lightsalmon": Lightsalmon, +// "lightseagreen": Lightseagreen, +// "lightskyblue": Lightskyblue, +// "lightslateblue": Lightslateblue, +// "lightsteelblue": Lightsteelblue, +// "lightyellow": Lightyellow, +// "limegreen": Limegreen, +// "line-break": Line_Break, +// "line-height": Line_Height, +// "linear-gradient": Linear_Gradient, +// "list-style": List_Style, +// "list-style-image": List_Style_Image, +// "list-style-position": List_Style_Position, +// "list-style-type": List_Style_Type, +// "local": Local, +// "magenta": Magenta, +// "margin": Margin, +// "margin-bottom": Margin_Bottom, +// "margin-left": Margin_Left, +// "margin-right": Margin_Right, +// "margin-top": Margin_Top, +// "marker-offset": Marker_Offset, +// "marks": Marks, +// "mask": Mask, +// "max": Max, +// "max-height": Max_Height, +// "max-width": Max_Width, +// "media": Media, +// "medium": Medium, +// "mediumaquamarine": Mediumaquamarine, +// "mediumblue": Mediumblue, +// "mediumorchid": Mediumorchid, +// "mediumpurple": Mediumpurple, +// "mediumseagreen": Mediumseagreen, +// "mediumslateblue": Mediumslateblue, +// "mediumspringgreen": Mediumspringgreen, +// "mediumturquoise": Mediumturquoise, +// "mediumvioletred": Mediumvioletred, +// "midnightblue": Midnightblue, +// "min": Min, +// "min-height": Min_Height, +// "min-width": Min_Width, +// "mintcream": Mintcream, +// "mistyrose": Mistyrose, +// "mm": Mm, +// "moccasin": Moccasin, +// "monospace": Monospace, +// "ms": Ms, +// "namespace": Namespace, +// "navajowhite": Navajowhite, +// "no-repeat": No_Repeat, +// "none": None, +// "normal": Normal, +// "offset": Offset, +// "offset-anchor": Offset_Anchor, +// "offset-distance": Offset_Distance, +// "offset-path": Offset_Path, +// "offset-position": Offset_Position, +// "offset-rotate": Offset_Rotate, +// "olivedrab": Olivedrab, +// "orangered": Orangered, +// "order": Order, +// "orphans": Orphans, +// "outline": Outline, +// "outline-color": Outline_Color, +// "outline-style": Outline_Style, +// "outline-width": Outline_Width, +// "overflow": Overflow, +// "overflow-x": Overflow_X, +// "overflow-y": Overflow_Y, +// "padding": Padding, +// "padding-bottom": Padding_Bottom, +// "padding-box": Padding_Box, +// "padding-left": Padding_Left, +// "padding-right": Padding_Right, +// "padding-top": Padding_Top, +// "page": Page, +// "page-break-after": Page_Break_After, +// "page-break-before": Page_Break_Before, +// "page-break-inside": Page_Break_Inside, +// "palegoldenrod": Palegoldenrod, +// "palegreen": Palegreen, +// "paleturquoise": Paleturquoise, +// "palevioletred": Palevioletred, +// "papayawhip": Papayawhip, +// "pause": Pause, +// "pause-after": Pause_After, +// "pause-before": Pause_Before, +// "pc": Pc, +// "peachpuff": Peachpuff, +// "pitch": Pitch, +// "pitch-range": Pitch_Range, +// "place-content": Place_Content, +// "place-items": Place_Items, +// "place-self": Place_Self, +// "play-during": Play_During, +// "position": Position, +// "powderblue": Powderblue, +// "progid": Progid, +// "pt": Pt, +// "px": Px, +// "q": Q, +// "quotes": Quotes, +// "rad": Rad, +// "radial-gradient": Radial_Gradient, +// "repeat": Repeat, +// "repeat-x": Repeat_X, +// "repeat-y": Repeat_Y, +// "rgb": Rgb, +// "rgba": Rgba, +// "richness": Richness, +// "right": Right, +// "rosybrown": Rosybrown, +// "round": Round, +// "row-gap": Row_Gap, +// "royalblue": Royalblue, +// "ruby-align": Ruby_Align, +// "ruby-overhang": Ruby_Overhang, +// "ruby-position": Ruby_Position, +// "s": S, +// "saddlebrown": Saddlebrown, +// "sandybrown": Sandybrown, +// "sans-serif": Sans_Serif, +// "scroll": Scroll, +// "scrollbar-3d-light-color": Scrollbar_3d_Light_Color, +// "scrollbar-arrow-color": Scrollbar_Arrow_Color, +// "scrollbar-base-color": Scrollbar_Base_Color, +// "scrollbar-dark-shadow-color": Scrollbar_Dark_Shadow_Color, +// "scrollbar-face-color": Scrollbar_Face_Color, +// "scrollbar-highlight-color": Scrollbar_Highlight_Color, +// "scrollbar-shadow-color": Scrollbar_Shadow_Color, +// "scrollbar-track-color": Scrollbar_Track_Color, +// "seagreen": Seagreen, +// "seashell": Seashell, +// "serif": Serif, +// "size": Size, +// "slateblue": Slateblue, +// "slategray": Slategray, +// "small": Small, +// "smaller": Smaller, +// "solid": Solid, +// "space": Space, +// "speak": Speak, +// "speak-header": Speak_Header, +// "speak-numeral": Speak_Numeral, +// "speak-punctuation": Speak_Punctuation, +// "speech-rate": Speech_Rate, +// "springgreen": Springgreen, +// "steelblue": Steelblue, +// "stress": Stress, +// "stroke": Stroke, +// "supports": Supports, +// "table-layout": Table_Layout, +// "text-align": Text_Align, +// "text-align-last": Text_Align_Last, +// "text-autospace": Text_Autospace, +// "text-decoration": Text_Decoration, +// "text-decoration-color": Text_Decoration_Color, +// "text-decoration-line": Text_Decoration_Line, +// "text-decoration-style": Text_Decoration_Style, +// "text-decoration-thickness": Text_Decoration_Thickness, +// "text-emphasis": Text_Emphasis, +// "text-emphasis-color": Text_Emphasis_Color, +// "text-indent": Text_Indent, +// "text-justify": Text_Justify, +// "text-kashida-space": Text_Kashida_Space, +// "text-overflow": Text_Overflow, +// "text-shadow": Text_Shadow, +// "text-transform": Text_Transform, +// "text-underline-position": Text_Underline_Position, +// "top": Top, +// "transition": Transition, +// "transition-delay": Transition_Delay, +// "transition-duration": Transition_Duration, +// "transition-property": Transition_Property, +// "transition-timing-function": Transition_Timing_Function, +// "transparent": Transparent, +// "turn": Turn, +// "turquoise": Turquoise, +// "unicode-bidi": Unicode_Bidi, +// "unicode-range": UnicodeRange, +// "unset": Unset, +// "url": Url, +// "var": Var, +// "vertical-align": Vertical_Align, +// "visibility": Visibility, +// "voice-family": Voice_Family, +// "volume": Volume, +// "white": White, +// "white-space": White_Space, +// "whitesmoke": Whitesmoke, +// "widows": Widows, +// "width": Width, +// "word-break": Word_Break, +// "word-spacing": Word_Spacing, +// "word-wrap": Word_Wrap, +// "writing-mode": Writing_Mode, +// "x-large": X_Large, +// "x-small": X_Small, +// "xx-large": Xx_Large, +// "xx-small": Xx_Small, +// "yellow": Yellow, +// "yellowgreen": Yellowgreen, +// "z-index": Z_Index, +//} + +// String returns the text associated with the hash. +func (i Hash) String() string { + return string(i.Bytes()) +} + +// Bytes returns the text associated with the hash. +func (i Hash) Bytes() []byte { + start := uint32(i >> 8) + n := uint32(i & 0xff) + if start+n > uint32(len(_Hash_text)) { + return []byte{} + } + return _Hash_text[start : start+n] +} + +// ToHash returns a hash Hash for a given []byte. Hash is a uint32 that is associated with the text in []byte. It returns zero if no match found. +func ToHash(s []byte) Hash { + if len(s) == 0 || len(s) > _Hash_maxLen { + return 0 + } + //if 3 < len(s) { + // return HashMap[string(s)] + //} + h := uint32(_Hash_hash0) + for i := 0; i < len(s); i++ { + h ^= uint32(s[i]) + h *= 16777619 + } + if i := _Hash_table[h&uint32(len(_Hash_table)-1)]; int(i&0xff) == len(s) { + t := _Hash_text[i>>8 : i>>8+i&0xff] + for i := 0; i < len(s); i++ { + if t[i] != s[i] { + goto NEXT + } + } + return i + } +NEXT: + if i := _Hash_table[(h>>16)&uint32(len(_Hash_table)-1)]; int(i&0xff) == len(s) { + t := _Hash_text[i>>8 : i>>8+i&0xff] + for i := 0; i < len(s); i++ { + if t[i] != s[i] { + return 0 + } + } + return i + } + return 0 +} + +const _Hash_hash0 = 0x9acb0442 +const _Hash_maxLen = 27 + +var _Hash_text = []byte("" + + "-ms-filteradial-gradientext-emphasis-colorgbackground-attach" + + "mentext-indentext-justify-contentext-kashida-spacelevationav" + + "ajowhitext-decoration-line-heightext-overflow-xx-largerichne" + + "ssaddlebrowno-repeat-yanimation-namespacenteruby-overhangain" + + "sborosybrownanimation-play-statext-align-lastresscrollbar-ar" + + "row-coloruby-positionanimation-timing-functionazimuthoneydew" + + "ord-breakbackground-originclude-sourcebackground-position-xb" + + "ackground-position-ybackground-repeat-xbackground-sizebehavi" + + "orblackblanchedalmondarkblueboldarkcyanimation-delayer-backg" + + "round-colorborder-bottom-colorborder-bottom-stylemonchiffont" + + "-faceborder-bottom-widthslavenderblushborder-box-shadoword-s" + + "pacinghostwhitext-decoration-colorborder-collapseashellawngr" + + "eenborder-colorborder-left-colorborder-left-styleborder-left" + + "-widthborder-right-colorborder-right-styleborder-right-width" + + "border-spacingrid-areanimation-durationormalphacceleratorpha" + + "nsandybrownonempty-cellsans-serifantasyborder-styleborder-to" + + "p-colorborder-top-styleborder-top-widthborder-widthburlywood" + + "arkgoldenrodarkgraycaption-sideepskybluecaret-colorchartreus" + + "echocolatext-autospaceclampadding-boxclearcolumn-counter-res" + + "etransition-propertycolumn-rule-colorcolumn-rule-stylecolumn" + + "-rule-widthcolumn-widthcornflowerbluecornsilkcue-aftercue-be" + + "forestgreenvisibilitycurrentcolorcursivecursordarkvioletdocu" + + "mentdodgerbluedpcmargin-topadding-rightdpitch-rangedppxflex-" + + "growflex-shrinkflex-wrapadding-topage-break-afterfloattransi" + + "tion-delayer-background-imagefloralwhitesmokeyframescrollbar" + + "-dark-shadow-colorfont-familyfont-size-adjustify-itemscrollb" + + "ar-face-colorfont-stretcharsetfont-stylefont-variantiquewhit" + + "e-spacefont-weightfuchsianimation-fill-modeeppinkhz-indexx-s" + + "malleroyalbluegrid-column-gapage-break-beforegrid-column-sta" + + "rtgrid-row-endarkolivegreengrid-row-gapage-break-insidegrid-" + + "row-startgrid-template-areascrollbar-track-colorgrid-templat" + + "e-columnsolidarkorangeredarkgreenyellowgreengrid-template-ro" + + "wspeak-headerimportantinheritinitialicebluevioletter-spacing" + + "rid-auto-columnscrollbar-highlight-colorinvertical-align-ite" + + "mspeak-numeralayout-grid-char-spacingrid-auto-flowjustify-se" + + "lfirebricklayout-grid-line-breaklayout-grid-modegrid-auto-ro" + + "wscrollbar-shadow-colorlayout-grid-typeachpufflex-basiscroll" + + "bar-base-colorlightbluelightcoralign-selflex-directionlightc" + + "yanimation-directionlightgoldenrodyellowlightgraylightgreenl" + + "ightpinklightsalmonlightseagreenlightskybluelightslateblueli" + + "ghtsteelbluelightyellowlimegreenlinear-gradientlist-style-im" + + "agelist-style-positionlist-style-typelocalcadetbluemaskmax-h" + + "eightmax-widthmediumaquamarinemediumbluemediumorchidarkorchi" + + "darkkhakime-modefaultransition-timing-functionmediumpurpleme" + + "diumseagreenmediumslatebluemediumspringgreenmediumturquoisem" + + "ediumvioletredarksalmonospacemidnightbluemin-heightmin-width" + + "mintcreamarker-offset-anchormistyrosemmarkspeak-punctuationm" + + "occasindianredarkseagreenoffset-distanceoffset-pathoffset-po" + + "sitionoffset-rotatext-decoration-styleolivedrabackground-cli" + + "padding-bottomargin-rightransition-durationoutline-coloroutl" + + "ine-styleoutline-widthoverflow-ypalegreenpaleturquoisepalevi" + + "oletredarkslategraypapayawhipalegoldenrodarkslatebluepause-a" + + "fterpause-beforeplace-contentplace-itemspeech-ratext-decorat" + + "ion-thicknesstrokeplace-selflex-flowriting-modepowderbluepro" + + "gidarkturquoisequotesupportsunicode-bidisplay-duringrid-colu" + + "mn-endarkmagentable-layout-floword-wrapadding-leftransparent" + + "urnunicode-rangeunsetvoice-familyvolumedianimation-iteration" + + "-counter-incrementext-shadowidowscrollbar-3d-light-coloruby-" + + "align-contentext-transformargin-bottomargin-leftext-underlin" + + "e-position") + +var _Hash_table = [1 << 10]Hash{ + 0x3: 0xc290b, // pause-after + 0x6: 0xd5d11, // counter-increment + 0x8: 0xcce07, // display + 0x9: 0x51a0a, // darkviolet + 0xb: 0xbf09, // no-repeat + 0xd: 0x4402, // in + 0x14: 0x6f211, // page-break-inside + 0x15: 0x6250c, // font-stretch + 0x19: 0x5f910, // font-size-adjust + 0x1a: 0x47513, // transition-property + 0x1c: 0x78105, // speak + 0x1f: 0x82a0c, // justify-self + 0x20: 0x61114, // scrollbar-face-color + 0x24: 0x2b60f, // border-collapse + 0x25: 0x68607, // z-index + 0x27: 0xd980d, // align-content + 0x2a: 0x99f13, // list-style-position + 0x2b: 0xcdb0f, // grid-column-end + 0x2c: 0x14119, // animation-timing-function + 0x30: 0xb0909, // indianred + 0x34: 0x97709, // limegreen + 0x35: 0xbc10d, // outline-width + 0x3f: 0x15a07, // azimuth + 0x40: 0x1e70e, // blanchedalmond + 0x41: 0x84a0a, // line-break + 0x42: 0x7a209, // aliceblue + 0x43: 0xf309, // rosybrown + 0x46: 0xa7c0f, // mediumturquoise + 0x49: 0xd7706, // widows + 0x4b: 0xb370f, // offset-position + 0x4d: 0xd150b, // transparent + 0x4e: 0x79d07, // initial + 0x52: 0x1cb0f, // background-size + 0x55: 0x2505, // color + 0x56: 0x59a10, // transition-delay + 0x5a: 0x750b, // navajowhite + 0x5b: 0x7110d, // grid-template + 0x5c: 0x3b710, // border-top-color + 0x62: 0xbce0a, // overflow-y + 0x64: 0x9370d, // lightseagreen + 0x6c: 0x10e0f, // text-align-last + 0x6f: 0x8050b, // layout-grid + 0x70: 0xca09, // animation + 0x71: 0x1da08, // behavior + 0x72: 0x5390a, // margin-top + 0x74: 0x3ab0c, // border-style + 0x78: 0x5d31b, // scrollbar-dark-shadow-color + 0x79: 0x69103, // all + 0x7a: 0x3f0b, // text-indent + 0x7b: 0xbe10d, // paleturquoise + 0x7e: 0x58510, // page-break-after + 0x80: 0x5420d, // padding-right + 0x84: 0x7e60e, // vertical-align + 0x85: 0x50d07, // cursive + 0x8a: 0x7030e, // grid-row-start + 0x8c: 0xae08, // richness + 0x8e: 0x3b70a, // border-top + 0x94: 0x35509, // grid-area + 0x95: 0x85410, // layout-grid-mode + 0x96: 0xaee05, // marks + 0x97: 0x64d01, // q + 0x98: 0x78d09, // important + 0x9c: 0x406, // filter + 0x9d: 0xa8b0f, // mediumvioletred + 0xa5: 0xc570b, // speech-rate + 0xa8: 0x53702, // pc + 0xab: 0x90f, // radial-gradient + 0xae: 0x11b06, // stress + 0xb4: 0x6050d, // justify-items + 0xb7: 0x9500e, // lightslateblue + 0xba: 0x35504, // grid + 0xbb: 0xb0308, // moccasin + 0xbe: 0xd0209, // word-wrap + 0xc0: 0x6d90e, // darkolivegreen + 0xc5: 0xc6019, // text-decoration-thickness + 0xc7: 0xdb06, // center + 0xc8: 0x2a115, // text-decoration-color + 0xcb: 0xabf09, // min-width + 0xce: 0x5ee0b, // font-family + 0xd1: 0xa1c08, // ime-mode + 0xd3: 0x3d710, // border-top-width + 0xd4: 0x53906, // margin + 0xd9: 0x4880b, // column-rule + 0xda: 0x98f0a, // list-style + 0xdf: 0x6ce0c, // grid-row-end + 0xe4: 0x2050f, // animation-delay + 0xe8: 0x4aa11, // column-rule-width + 0xec: 0x57309, // flex-wrap + 0xed: 0xced07, // magenta + 0xee: 0x88710, // layout-grid-type + 0xef: 0x4520b, // padding-box + 0xf0: 0x7e14, // text-decoration-line + 0xf2: 0x4dd09, // cue-after + 0xf4: 0x8640e, // grid-auto-rows + 0xf5: 0x7650b, // yellowgreen + 0xf8: 0x89509, // peachpuff + 0xf9: 0x74607, // columns + 0xfa: 0x22805, // order + 0xfb: 0x3120c, // border-right + 0x100: 0x1800e, // include-source + 0x104: 0xc2905, // pause + 0x105: 0x1fc04, // bold + 0x106: 0xcc40c, // unicode-bidi + 0x108: 0x67604, // fill + 0x109: 0x75c09, // darkgreen + 0x10b: 0x45d05, // clear + 0x10c: 0x67d08, // deeppink + 0x110: 0x8e913, // animation-direction + 0x112: 0x1b811, // background-repeat + 0x117: 0xca506, // progid + 0x11d: 0x8a614, // scrollbar-base-color + 0x11e: 0xa, // -ms-filter + 0x11f: 0x2ca09, // lawngreen + 0x120: 0x51406, // cursor + 0x121: 0x44e05, // clamp + 0x123: 0x48811, // column-rule-color + 0x128: 0x40f0c, // caption-side + 0x12a: 0xc9b0a, // powderblue + 0x12b: 0xdc717, // text-underline-position + 0x12d: 0x72315, // scrollbar-track-color + 0x131: 0x81c0e, // grid-auto-flow + 0x132: 0x7810c, // speak-header + 0x133: 0x25409, // font-face + 0x136: 0xa710b, // springgreen + 0x13a: 0xc7e0a, // place-self + 0x13d: 0xc206, // repeat + 0x13e: 0x9800f, // linear-gradient + 0x142: 0x5010c, // currentcolor + 0x145: 0xad706, // offset + 0x14a: 0x69e0f, // grid-column-gap + 0x14c: 0x6905, // space + 0x14e: 0x39b0a, // sans-serif + 0x14f: 0x6360a, // font-style + 0x153: 0x66607, // fuchsia + 0x154: 0xb7904, // clip + 0x155: 0xae409, // mistyrose + 0x158: 0x9d08, // overflow + 0x15d: 0xc7806, // stroke + 0x162: 0x80510, // layout-grid-char + 0x163: 0xa420c, // mediumpurple + 0x165: 0x4f503, // env + 0x168: 0x4690d, // counter-reset + 0x16b: 0x5cb09, // keyframes + 0x16f: 0x7b05, // white + 0x172: 0x1004, // grad + 0x174: 0xdb10d, // margin-bottom + 0x175: 0x31212, // border-right-color + 0x177: 0x25404, // font + 0x178: 0xc100d, // palegoldenrod + 0x179: 0x73815, // grid-template-columns + 0x17a: 0x7e0f, // text-decoration + 0x17e: 0x89d0a, // flex-basis + 0x186: 0x7ef0b, // align-items + 0x189: 0x4bb0c, // column-width + 0x18a: 0x3c710, // border-top-style + 0x18b: 0x1d604, // size + 0x18c: 0xd4505, // media + 0x191: 0xb7c0e, // padding-bottom + 0x194: 0x2df11, // border-left-color + 0x195: 0x7a70a, // blueviolet + 0x198: 0x92c0b, // lightsalmon + 0x19d: 0x27108, // lavender + 0x19e: 0x5a716, // layer-background-image + 0x1a0: 0x6500b, // white-space + 0x1a3: 0xe00d, // ruby-overhang + 0x1a4: 0x24b0c, // lemonchiffon + 0x1a5: 0x3be03, // top + 0x1a9: 0x2c308, // seashell + 0x1aa: 0x7ae0e, // letter-spacing + 0x1ac: 0x2b0a, // background + 0x1af: 0x64503, // var + 0x1b0: 0xaed02, // mm + 0x1b6: 0x12015, // scrollbar-arrow-color + 0x1b8: 0xda40e, // text-transform + 0x1b9: 0x65b0b, // font-weight + 0x1ba: 0x53802, // cm + 0x1bb: 0x12006, // scroll + 0x1c0: 0x21710, // background-color + 0x1c1: 0x2710d, // lavenderblush + 0x1c6: 0xb5115, // text-decoration-style + 0x1c9: 0x79607, // inherit + 0x1cf: 0x2e604, // left + 0x1d0: 0x6490c, // antiquewhite + 0x1d4: 0xb6609, // olivedrab + 0x1da: 0x2990a, // ghostwhite + 0x1dd: 0x91009, // lightgray + 0x1e2: 0x26f04, // hsla + 0x1e3: 0x26f03, // hsl + 0x1e4: 0xbd809, // palegreen + 0x1e5: 0x4190b, // deepskyblue + 0x1e8: 0xac809, // mintcream + 0x1ea: 0x7e406, // invert + 0x1eb: 0x6400c, // font-variant + 0x1ec: 0x8fc14, // lightgoldenrodyellow + 0x1ee: 0x62f07, // charset + 0x1ef: 0xc8f0c, // writing-mode + 0x1f0: 0x5c30a, // whitesmoke + 0x1f5: 0x9d0a, // overflow-x + 0x1f6: 0xaa90c, // midnightblue + 0x1f7: 0xcb706, // quotes + 0x1f8: 0x22706, // border + 0x1fa: 0x42f0a, // chartreuse + 0x1fc: 0xba707, // outline + 0x1fd: 0xa281a, // transition-timing-function + 0x1fe: 0xcbc08, // supports + 0x204: 0x1670a, // word-break + 0x205: 0xaa009, // monospace + 0x206: 0x2850a, // box-shadow + 0x209: 0x5680b, // flex-shrink + 0x20f: 0xd0a0c, // padding-left + 0x214: 0xc4d0b, // place-items + 0x216: 0xc070a, // papayawhip + 0x217: 0x17111, // background-origin + 0x218: 0x52408, // document + 0x219: 0x52c0a, // dodgerblue + 0x21c: 0x9440c, // lightskyblue + 0x21e: 0x6bd11, // grid-column-start + 0x221: 0x30111, // border-left-width + 0x224: 0x68c08, // xx-small + 0x226: 0x1f408, // darkblue + 0x229: 0x25d13, // border-bottom-width + 0x22a: 0x98f10, // list-style-image + 0x22d: 0x44504, // auto + 0x230: 0x1e205, // black + 0x231: 0xaf211, // speak-punctuation + 0x232: 0x13908, // position + 0x234: 0xc340c, // pause-before + 0x236: 0x95e0e, // lightsteelblue + 0x23a: 0xcd10b, // play-during + 0x23f: 0x83509, // firebrick + 0x249: 0x6ce08, // grid-row + 0x24a: 0x55d02, // px + 0x24c: 0x1a315, // background-position-y + 0x251: 0xd1f04, // turn + 0x256: 0xba70d, // outline-color + 0x257: 0x9c304, // calc + 0x258: 0xd4919, // animation-iteration-count + 0x259: 0xad70d, // offset-anchor + 0x25b: 0xa4e0e, // mediumseagreen + 0x25e: 0x4620c, // column-count + 0x263: 0x10e0a, // text-align + 0x266: 0x66c13, // animation-fill-mode + 0x267: 0x32412, // border-right-style + 0x268: 0xa707, // x-large + 0x269: 0x8d40e, // flex-direction + 0x26a: 0x4f70a, // visibility + 0x26f: 0xb2c0b, // offset-path + 0x270: 0x27e0a, // border-box + 0x276: 0x70103, // deg + 0x278: 0x1713, // text-emphasis-color + 0x27f: 0xc1c0d, // darkslateblue + 0x283: 0x55f09, // flex-grow + 0x285: 0x8e209, // lightcyan + 0x28a: 0x102, // ms + 0x28d: 0xa906, // larger + 0x28e: 0xa990a, // darksalmon + 0x292: 0x2f011, // border-left-style + 0x293: 0xa8209, // turquoise + 0x294: 0x3a407, // fantasy + 0x296: 0xec09, // gainsboro + 0x297: 0x201, // s + 0x298: 0x23a13, // border-bottom-style + 0x299: 0xce90b, // darkmagenta + 0x29b: 0xb50b, // saddlebrown + 0x2a0: 0x59505, // float + 0x2a3: 0x6ec07, // row-gap + 0x2a5: 0xd4106, // volume + 0x2a6: 0xab50a, // min-height + 0x2a7: 0x77012, // grid-template-rows + 0x2a9: 0x3760b, // accelerator + 0x2b0: 0x68f05, // small + 0x2b1: 0x59804, // attr + 0x2b2: 0x28e0c, // word-spacing + 0x2b3: 0x35d12, // animation-duration + 0x2b5: 0x4dd03, // cue + 0x2b6: 0x95509, // slateblue + 0x2b8: 0x38e04, // none + 0x2b9: 0x6a30a, // column-gap + 0x2ba: 0x4e0f, // justify-content + 0x2bb: 0x5607, // content + 0x2bd: 0x54f03, // dpi + 0x2be: 0x87116, // scrollbar-shadow-color + 0x2bf: 0x78d06, // import + 0x2c0: 0xc8709, // flex-flow + 0x2c1: 0x69509, // royalblue + 0x2c3: 0x9c609, // cadetblue + 0x2c4: 0x490c, // text-justify + 0x2cb: 0x8c30a, // lightcoral + 0x2cf: 0xb890c, // margin-right + 0x2d2: 0x76506, // yellow + 0x2d3: 0x26b05, // width + 0x2d6: 0x14d03, // min + 0x2da: 0x1340d, // ruby-position + 0x2dc: 0x40708, // darkgray + 0x2e2: 0x69e0b, // grid-column + 0x2e4: 0xa1409, // darkkhaki + 0x2e5: 0xc400d, // place-content + 0x2e7: 0xbee0d, // palevioletred + 0x2ea: 0x5bd0b, // floralwhite + 0x2eb: 0xc208, // repeat-y + 0x2ee: 0x980d, // text-overflow + 0x2f1: 0xca0e, // animation-name + 0x2fb: 0x7cb19, // scrollbar-highlight-color + 0x2fe: 0x5500b, // pitch-range + 0x302: 0x3005, // round + 0x305: 0x4c70e, // cornflowerblue + 0x307: 0x7f90d, // speak-numeral + 0x308: 0x9e606, // medium + 0x30a: 0x170d, // text-emphasis + 0x30d: 0x9dd09, // max-width + 0x311: 0x36e06, // normal + 0x312: 0x68403, // khz + 0x315: 0x2903, // rgb + 0x316: 0x8ba09, // lightblue + 0x317: 0x8d909, // direction + 0x31a: 0xd350c, // voice-family + 0x31c: 0x3480e, // border-spacing + 0x321: 0x6d09, // elevation + 0x323: 0x1c308, // repeat-x + 0x324: 0x83e10, // layout-grid-line + 0x326: 0xa000c, // mediumorchid + 0x32b: 0xa6b11, // mediumspringgreen + 0x32d: 0xa905, // large + 0x32e: 0xd930a, // ruby-align + 0x330: 0xbfa0d, // darkslategray + 0x332: 0x5c12, // text-kashida-space + 0x334: 0xbb40d, // outline-style + 0x336: 0x3a005, // serif + 0x337: 0x4240b, // caret-color + 0x33a: 0x37205, // alpha + 0x33c: 0x71113, // grid-template-areas + 0x33d: 0x49911, // column-rule-style + 0x33f: 0xcf80b, // layout-flow + 0x340: 0x31905, // right + 0x341: 0x3e70c, // border-width + 0x343: 0xb6e0f, // background-clip + 0x344: 0xd230d, // unicode-range + 0x345: 0x74c05, // solid + 0x346: 0x2df0b, // border-left + 0x348: 0x9ec0a, // aquamarine + 0x349: 0x3850a, // sandybrown + 0x34a: 0x16008, // honeydew + 0x34b: 0x75409, // orangered + 0x34c: 0xb110c, // darkseagreen + 0x34d: 0x37f07, // orphans + 0x34e: 0x6e70c, // grid-row-gap + 0x351: 0x22e06, // bottom + 0x359: 0x9c105, // local + 0x35c: 0x8cb0a, // align-self + 0x35e: 0x33612, // border-right-width + 0x360: 0x2b15, // background-attachment + 0x364: 0x9190a, // lightgreen + 0x366: 0x39302, // pt + 0x368: 0x4400e, // text-autospace + 0x36b: 0x3f403, // url + 0x36c: 0x68502, // hz + 0x371: 0x9306, // height + 0x372: 0x5ad10, // background-image + 0x377: 0x903, // rad + 0x37c: 0x21116, // layer-background-color + 0x37d: 0x1ff08, // darkcyan + 0x382: 0x18e13, // background-position + 0x384: 0x9d303, // max + 0x38c: 0xa608, // xx-large + 0x38d: 0x3f309, // burlywood + 0x38f: 0xd7c18, // scrollbar-3d-light-color + 0x390: 0x3ff09, // goldenrod + 0x392: 0x92309, // lightpink + 0x393: 0x8e0b, // line-height + 0x396: 0x22713, // border-bottom-color + 0x398: 0x80518, // layout-grid-char-spacing + 0x39c: 0x2904, // rgba + 0x3a1: 0x9f60a, // mediumblue + 0x3a3: 0x9d30a, // max-height + 0x3a4: 0x7bb11, // grid-auto-columns + 0x3a5: 0xa0b0a, // darkorchid + 0x3a9: 0x7600b, // greenyellow + 0x3ae: 0x96c0b, // lightyellow + 0x3b1: 0x4750a, // transition + 0x3b3: 0x4e60a, // cue-before + 0x3b6: 0x15208, // function + 0x3b9: 0x96309, // steelblue + 0x3be: 0xa5c0f, // mediumslateblue + 0x3bf: 0xcaa0d, // darkturquoise + 0x3c0: 0x43909, // chocolate + 0x3c3: 0x5f909, // font-size + 0x3c5: 0x55f04, // flex + 0x3c7: 0xd3005, // unset + 0x3c8: 0xd6d0b, // text-shadow + 0x3ca: 0x4ec0b, // forestgreen + 0x3cc: 0xbfe09, // slategray + 0x3cd: 0x6ac11, // page-break-before + 0x3ce: 0x55b04, // dppx + 0x3d0: 0x2270d, // border-bottom + 0x3d3: 0xb1d0f, // offset-distance + 0x3d4: 0x3fb0d, // darkgoldenrod + 0x3d6: 0x53604, // dpcm + 0x3d8: 0x7500a, // darkorange + 0x3dc: 0xb9413, // transition-duration + 0x3de: 0x2d30c, // border-color + 0x3df: 0x18e15, // background-position-x + 0x3e0: 0x55005, // pitch + 0x3e2: 0xdbd0b, // margin-left + 0x3e3: 0x58504, // page + 0x3e5: 0x57b0b, // padding-top + 0x3e7: 0xb460d, // offset-rotate + 0x3e8: 0x93c08, // seagreen + 0x3e9: 0x4d508, // cornsilk + 0x3ea: 0x68f07, // smaller + 0x3ec: 0xcf20c, // table-layout + 0x3ed: 0xfc14, // animation-play-state + 0x3ef: 0xa2207, // default + 0x3f0: 0x68d07, // x-small + 0x3f3: 0x9e610, // mediumaquamarine + 0x3f4: 0xad00d, // marker-offset + 0x3f9: 0xd409, // namespace + 0x3fa: 0x9cf04, // mask + 0x3fb: 0x45207, // padding + 0x3fd: 0x9b20f, // list-style-type + 0x3ff: 0x3910b, // empty-cells +} diff --git a/vendor/github.com/tdewolff/minify/v2/css/table.go b/vendor/github.com/tdewolff/minify/v2/css/table.go new file mode 100644 index 0000000..b7ecb84 --- /dev/null +++ b/vendor/github.com/tdewolff/minify/v2/css/table.go @@ -0,0 +1,198 @@ +package css + +var optionalZeroDimension = map[string]bool{ + "px": true, + "mm": true, + "q": true, + "cm": true, + "in": true, + "pt": true, + "pc": true, + "ch": true, + "em": true, + "ex": true, + "rem": true, + "vh": true, + "vw": true, + "vmin": true, + "vmax": true, + "deg": true, + "grad": true, + "rad": true, + "turn": true, +} + +// Uses http://www.w3.org/TR/2010/PR-css3-color-20101028/ for colors + +// ShortenColorHex maps a color hexcode to its shorter name +var ShortenColorHex = map[string][]byte{ + "#000080": []byte("navy"), + "#008000": []byte("green"), + "#008080": []byte("teal"), + "#4b0082": []byte("indigo"), + "#800000": []byte("maroon"), + "#800080": []byte("purple"), + "#808000": []byte("olive"), + "#808080": []byte("gray"), + "#a0522d": []byte("sienna"), + "#a52a2a": []byte("brown"), + "#c0c0c0": []byte("silver"), + "#cd853f": []byte("peru"), + "#d2b48c": []byte("tan"), + "#da70d6": []byte("orchid"), + "#dda0dd": []byte("plum"), + "#ee82ee": []byte("violet"), + "#f0e68c": []byte("khaki"), + "#f0ffff": []byte("azure"), + "#f5deb3": []byte("wheat"), + "#f5f5dc": []byte("beige"), + "#fa8072": []byte("salmon"), + "#faf0e6": []byte("linen"), + "#ff6347": []byte("tomato"), + "#ff7f50": []byte("coral"), + "#ffa500": []byte("orange"), + "#ffc0cb": []byte("pink"), + "#ffd700": []byte("gold"), + "#ffe4c4": []byte("bisque"), + "#fffafa": []byte("snow"), + "#fffff0": []byte("ivory"), + "#ff0000": []byte("red"), + "#f00": []byte("red"), +} + +// ShortenColorName maps a color name to its shorter hexcode +var ShortenColorName = map[Hash][]byte{ + Black: []byte("#000"), + Darkblue: []byte("#00008b"), + Mediumblue: []byte("#0000cd"), + Darkgreen: []byte("#006400"), + Darkcyan: []byte("#008b8b"), + Deepskyblue: []byte("#00bfff"), + Darkturquoise: []byte("#00ced1"), + Mediumspringgreen: []byte("#00fa9a"), + Springgreen: []byte("#00ff7f"), + Midnightblue: []byte("#191970"), + Dodgerblue: []byte("#1e90ff"), + Lightseagreen: []byte("#20b2aa"), + Forestgreen: []byte("#228b22"), + Seagreen: []byte("#2e8b57"), + Darkslategray: []byte("#2f4f4f"), + Limegreen: []byte("#32cd32"), + Mediumseagreen: []byte("#3cb371"), + Turquoise: []byte("#40e0d0"), + Royalblue: []byte("#4169e1"), + Steelblue: []byte("#4682b4"), + Darkslateblue: []byte("#483d8b"), + Mediumturquoise: []byte("#48d1cc"), + Darkolivegreen: []byte("#556b2f"), + Cadetblue: []byte("#5f9ea0"), + Cornflowerblue: []byte("#6495ed"), + Mediumaquamarine: []byte("#66cdaa"), + Slateblue: []byte("#6a5acd"), + Olivedrab: []byte("#6b8e23"), + Slategray: []byte("#708090"), + Lightslateblue: []byte("#789"), + Mediumslateblue: []byte("#7b68ee"), + Lawngreen: []byte("#7cfc00"), + Chartreuse: []byte("#7fff00"), + Aquamarine: []byte("#7fffd4"), + Lightskyblue: []byte("#87cefa"), + Blueviolet: []byte("#8a2be2"), + Darkmagenta: []byte("#8b008b"), + Saddlebrown: []byte("#8b4513"), + Darkseagreen: []byte("#8fbc8f"), + Lightgreen: []byte("#90ee90"), + Mediumpurple: []byte("#9370db"), + Darkviolet: []byte("#9400d3"), + Palegreen: []byte("#98fb98"), + Darkorchid: []byte("#9932cc"), + Yellowgreen: []byte("#9acd32"), + Darkgray: []byte("#a9a9a9"), + Lightblue: []byte("#add8e6"), + Greenyellow: []byte("#adff2f"), + Paleturquoise: []byte("#afeeee"), + Lightsteelblue: []byte("#b0c4de"), + Powderblue: []byte("#b0e0e6"), + Firebrick: []byte("#b22222"), + Darkgoldenrod: []byte("#b8860b"), + Mediumorchid: []byte("#ba55d3"), + Rosybrown: []byte("#bc8f8f"), + Darkkhaki: []byte("#bdb76b"), + Mediumvioletred: []byte("#c71585"), + Indianred: []byte("#cd5c5c"), + Chocolate: []byte("#d2691e"), + Lightgray: []byte("#d3d3d3"), + Goldenrod: []byte("#daa520"), + Palevioletred: []byte("#db7093"), + Gainsboro: []byte("#dcdcdc"), + Burlywood: []byte("#deb887"), + Lightcyan: []byte("#e0ffff"), + Lavender: []byte("#e6e6fa"), + Darksalmon: []byte("#e9967a"), + Palegoldenrod: []byte("#eee8aa"), + Lightcoral: []byte("#f08080"), + Aliceblue: []byte("#f0f8ff"), + Honeydew: []byte("#f0fff0"), + Sandybrown: []byte("#f4a460"), + Whitesmoke: []byte("#f5f5f5"), + Mintcream: []byte("#f5fffa"), + Ghostwhite: []byte("#f8f8ff"), + Antiquewhite: []byte("#faebd7"), + Lightgoldenrodyellow: []byte("#fafad2"), + Fuchsia: []byte("#f0f"), + Magenta: []byte("#f0f"), + Deeppink: []byte("#ff1493"), + Orangered: []byte("#ff4500"), + Darkorange: []byte("#ff8c00"), + Lightsalmon: []byte("#ffa07a"), + Lightpink: []byte("#ffb6c1"), + Peachpuff: []byte("#ffdab9"), + Navajowhite: []byte("#ffdead"), + Moccasin: []byte("#ffe4b5"), + Mistyrose: []byte("#ffe4e1"), + Blanchedalmond: []byte("#ffebcd"), + Papayawhip: []byte("#ffefd5"), + Lavenderblush: []byte("#fff0f5"), + Seashell: []byte("#fff5ee"), + Cornsilk: []byte("#fff8dc"), + Lemonchiffon: []byte("#fffacd"), + Floralwhite: []byte("#fffaf0"), + Yellow: []byte("#ff0"), + Lightyellow: []byte("#ffffe0"), + White: []byte("#fff"), +} + +// PropertyOverrides is a map of which properties are overridden by the given property. +var PropertyOverrides = map[Hash][]Hash{ + Background: {Background, Background_Image, Background_Position, Background_Size, Background_Repeat, Background_Origin, Background_Clip, Background_Attachment, Background_Color}, + Font: {Font, Font_Style, Font_Variant, Font_Weight, Font_Stretch, Font_Size, Font_Family, Line_Height}, + Border: {Border, Border_Width, Border_Top_Width, Border_Right_Width, Border_Bottom_Width, Border_Left_Width, Border_Style, Border_Top_Style, Border_Right_Style, Border_Bottom_Style, Border_Left_Style, Border_Color, Border_Top_Color, Border_Right_Color, Border_Bottom_Color, Border_Left_Color}, + Border_Width: {Border_Width, Border_Top_Width, Border_Right_Width, Border_Bottom_Width, Border_Left_Width}, + Border_Style: {Border_Style, Border_Top_Style, Border_Right_Style, Border_Bottom_Style, Border_Left_Style}, + Border_Color: {Border_Color, Border_Top_Color, Border_Right_Color, Border_Bottom_Color, Border_Left_Color}, + Border_Top: {Border_Top, Border_Top_Width, Border_Top_Style, Border_Top_Color}, + Border_Right: {Border_Right, Border_Right_Width, Border_Right_Style, Border_Right_Color}, + Border_Bottom: {Border_Bottom, Border_Bottom_Width, Border_Bottom_Style, Border_Bottom_Color}, + Border_Left: {Border_Left, Border_Left_Width, Border_Left_Style, Border_Left_Color}, + Margin: {Margin, Margin_Top, Margin_Right, Margin_Bottom, Margin_Left}, + Padding: {Padding, Padding_Top, Padding_Right, Padding_Bottom, Padding_Left}, + Column_Rule: {Column_Rule, Column_Rule_Width, Column_Rule_Style, Column_Rule_Color}, + Animation: {Animation, Animation_Name, Animation_Duration, Animation_Timing_Function, Animation_Delay, Animation_Iteration_Count, Animation_Direction, Animation_Fill_Mode, Animation_Play_State}, + Columns: {Columns, Column_Width, Column_Count}, + Flex: {Flex, Flex_Basis, Flex_Grow, Flex_Shrink}, + Flex_Flow: {Flex_Flow, Flex_Direction, Flex_Wrap}, + Grid: {Grid, Grid_Template_Rows, Grid_Template_Columns, Grid_Template_Areas, Grid_Auto_Rows, Grid_Auto_Columns, Grid_Auto_Flow, Grid_Column_Gap, Grid_Row_Gap, Column_Gap, Row_Gap}, + Grid_Area: {Grid_Area, Grid_Row_Start, Grid_Column_Start, Grid_Row_End, Grid_Column_End}, + Grid_Row: {Grid_Row, Grid_Row_Start, Grid_Row_End}, + Grid_Column: {Grid_Column, Grid_Column_Start, Grid_Column_End}, + Grid_Template: {Grid_Template, Grid_Template_Rows, Grid_Template_Columns, Grid_Template_Areas}, + List_Style: {List_Style, List_Style_Image, List_Style_Position, List_Style_Type}, + Offset: {Offset, Offset_Position, Offset_Path, Offset_Distance, Offset_Anchor, Offset_Rotate}, + Outline: {Outline, Outline_Width, Outline_Style, Outline_Color}, + Overflow: {Overflow, Overflow_X, Overflow_Y}, + Place_Content: {Place_Content, Align_Content, Justify_Content}, + Place_Items: {Place_Items, Align_Items, Justify_Items}, + Place_Self: {Place_Self, Align_Self, Justify_Self}, + Text_Decoration: {Text_Decoration, Text_Decoration_Color, Text_Decoration_Color, Text_Decoration_Line, Text_Decoration_Thickness}, + Transition: {Transition, Transition_Property, Transition_Duration, Transition_Timing_Function, Transition_Delay}, +} diff --git a/vendor/github.com/tdewolff/minify/v2/css/util.go b/vendor/github.com/tdewolff/minify/v2/css/util.go new file mode 100644 index 0000000..7325aca --- /dev/null +++ b/vendor/github.com/tdewolff/minify/v2/css/util.go @@ -0,0 +1,55 @@ +package css + +import ( + "encoding/hex" + + "github.com/tdewolff/parse/v2" + "github.com/tdewolff/parse/v2/css" +) + +func removeMarkupNewlines(data []byte) []byte { + // remove any \\\r\n \\\r \\\n + for i := 1; i < len(data)-2; i++ { + if data[i] == '\\' && (data[i+1] == '\n' || data[i+1] == '\r') { + // encountered first replacee, now start to move bytes to the front + j := i + 2 + if data[i+1] == '\r' && len(data) > i+2 && data[i+2] == '\n' { + j++ + } + for ; j < len(data); j++ { + if data[j] == '\\' && len(data) > j+1 && (data[j+1] == '\n' || data[j+1] == '\r') { + if data[j+1] == '\r' && len(data) > j+2 && data[j+2] == '\n' { + j++ + } + j++ + } else { + data[i] = data[j] + i++ + } + } + data = data[:i] + break + } + } + return data +} + +func rgbToToken(r, g, b float64) Token { + // r, g, b are in interval [0.0, 1.0] + rgb := []byte{byte((r * 255.0) + 0.5), byte((g * 255.0) + 0.5), byte((b * 255.0) + 0.5)} + + val := make([]byte, 7) + val[0] = '#' + hex.Encode(val[1:], rgb) + parse.ToLower(val) + if s, ok := ShortenColorHex[string(val[:7])]; ok { + return Token{css.IdentToken, s, nil, 0, 0} + } else if val[1] == val[2] && val[3] == val[4] && val[5] == val[6] { + val[2] = val[3] + val[3] = val[5] + val = val[:4] + } else { + val = val[:7] + } + return Token{css.HashToken, val, nil, 0, 0} +} diff --git a/vendor/github.com/tdewolff/minify/v2/html/buffer.go b/vendor/github.com/tdewolff/minify/v2/html/buffer.go new file mode 100644 index 0000000..f2a6f8c --- /dev/null +++ b/vendor/github.com/tdewolff/minify/v2/html/buffer.go @@ -0,0 +1,139 @@ +package html + +import ( + "github.com/tdewolff/parse/v2" + "github.com/tdewolff/parse/v2/html" +) + +// Token is a single token unit with an attribute value (if given) and hash of the data. +type Token struct { + html.TokenType + Hash Hash + Data []byte + Text []byte + AttrVal []byte + Traits traits + Offset int + HasTemplate bool +} + +// TokenBuffer is a buffer that allows for token look-ahead. +type TokenBuffer struct { + r *parse.Input + l *html.Lexer + + buf []Token + pos int + + attrBuffer []*Token +} + +// NewTokenBuffer returns a new TokenBuffer. +func NewTokenBuffer(r *parse.Input, l *html.Lexer) *TokenBuffer { + return &TokenBuffer{ + r: r, + l: l, + buf: make([]Token, 0, 8), + } +} + +func (z *TokenBuffer) read(t *Token) { + t.Offset = z.r.Offset() + t.TokenType, t.Data = z.l.Next() + t.Text = z.l.Text() + t.HasTemplate = z.l.HasTemplate() + if t.TokenType == html.AttributeToken { + t.Offset += 1 + len(t.Text) + 1 + t.AttrVal = z.l.AttrVal() + if 1 < len(t.AttrVal) && (t.AttrVal[0] == '"' || t.AttrVal[0] == '\'') { + t.Offset++ + t.AttrVal = t.AttrVal[1 : len(t.AttrVal)-1] // quotes will be readded in attribute loop if necessary + } + t.Hash = ToHash(t.Text) + t.Traits = attrMap[t.Hash] + } else if t.TokenType == html.StartTagToken || t.TokenType == html.EndTagToken { + t.AttrVal = nil + t.Hash = ToHash(t.Text) + t.Traits = tagMap[t.Hash] // zero if not exist + } else { + t.AttrVal = nil + t.Hash = 0 + t.Traits = 0 + } +} + +// Peek returns the ith element and possibly does an allocation. +// Peeking past an error will panic. +func (z *TokenBuffer) Peek(pos int) *Token { + pos += z.pos + if pos >= len(z.buf) { + if len(z.buf) > 0 && z.buf[len(z.buf)-1].TokenType == html.ErrorToken { + return &z.buf[len(z.buf)-1] + } + + c := cap(z.buf) + d := len(z.buf) - z.pos + p := pos - z.pos + 1 // required peek length + var buf []Token + if 2*p > c { + buf = make([]Token, 0, 2*c+p) + } else { + buf = z.buf + } + copy(buf[:d], z.buf[z.pos:]) + + buf = buf[:p] + pos -= z.pos + for i := d; i < p; i++ { + z.read(&buf[i]) + if buf[i].TokenType == html.ErrorToken { + buf = buf[:i+1] + pos = i + break + } + } + z.pos, z.buf = 0, buf + } + return &z.buf[pos] +} + +// Shift returns the first element and advances position. +func (z *TokenBuffer) Shift() *Token { + if z.pos >= len(z.buf) { + t := &z.buf[:1][0] + z.read(t) + return t + } + t := &z.buf[z.pos] + z.pos++ + return t +} + +// Attributes extracts the gives attribute hashes from a tag. +// It returns in the same order pointers to the requested token data or nil. +func (z *TokenBuffer) Attributes(hashes ...Hash) []*Token { + n := 0 + for { + if t := z.Peek(n); t.TokenType != html.AttributeToken { + break + } + n++ + } + if len(hashes) > cap(z.attrBuffer) { + z.attrBuffer = make([]*Token, len(hashes)) + } else { + z.attrBuffer = z.attrBuffer[:len(hashes)] + for i := range z.attrBuffer { + z.attrBuffer[i] = nil + } + } + for i := z.pos; i < z.pos+n; i++ { + attr := &z.buf[i] + for j, hash := range hashes { + if hash == attr.Hash { + z.attrBuffer[j] = attr + } + } + } + return z.attrBuffer +} diff --git a/vendor/github.com/tdewolff/minify/v2/html/hash.go b/vendor/github.com/tdewolff/minify/v2/html/hash.go new file mode 100644 index 0000000..5eefceb --- /dev/null +++ b/vendor/github.com/tdewolff/minify/v2/html/hash.go @@ -0,0 +1,610 @@ +package html + +// generated by hasher -type=Hash -file=hash.go; DO NOT EDIT, except for adding more constants to the list and rerun go generate + +// uses github.com/tdewolff/hasher +//go:generate hasher -type=Hash -file=hash.go + +// Hash defines perfect hashes for a predefined list of strings +type Hash uint32 + +// Unique hash definitions to be used instead of strings +const ( + A Hash = 0x1 // a + Abbr Hash = 0x40004 // abbr + About Hash = 0x5 // about + Accept Hash = 0xc06 // accept + Accept_Charset Hash = 0xc0e // accept-charset + Accesskey Hash = 0x2c09 // accesskey + Acronym Hash = 0x3507 // acronym + Action Hash = 0x26006 // action + Address Hash = 0x6d07 // address + Allow Hash = 0x31f05 // allow + Allowfullscreen Hash = 0x31f0f // allowfullscreen + Amp_Boilerplate Hash = 0x5e0f // amp-boilerplate + Applet Hash = 0xee06 // applet + Area Hash = 0x2c304 // area + Article Hash = 0x22507 // article + As Hash = 0x2102 // as + Aside Hash = 0x9205 // aside + Async Hash = 0x8a05 // async + Audio Hash = 0x9d05 // audio + Autocapitalize Hash = 0xc30e // autocapitalize + Autocomplete Hash = 0xd10c // autocomplete + Autofocus Hash = 0xe309 // autofocus + Autoplay Hash = 0xfc08 // autoplay + B Hash = 0x101 // b + Base Hash = 0x2004 // base + Basefont Hash = 0x2008 // basefont + Bb Hash = 0x40102 // bb + Bdi Hash = 0x8303 // bdi + Bdo Hash = 0x3dc03 // bdo + Big Hash = 0x12f03 // big + Blocking Hash = 0x13208 // blocking + Blockquote Hash = 0x13a0a // blockquote + Body Hash = 0x804 // body + Br Hash = 0x14b02 // br + Button Hash = 0x14406 // button + Canvas Hash = 0x8e06 // canvas + Caption Hash = 0x23707 // caption + Capture Hash = 0x10d07 // capture + Center Hash = 0x24f06 // center + Charset Hash = 0x1307 // charset + Checked Hash = 0x37707 // checked + Cite Hash = 0x14d04 // cite + Class Hash = 0x15a05 // class + Code Hash = 0x17604 // code + Col Hash = 0x17f03 // col + Colgroup Hash = 0x17f08 // colgroup + Color Hash = 0x19e05 // color + Cols Hash = 0x1a304 // cols + Colspan Hash = 0x1a307 // colspan + Content Hash = 0x1b107 // content + Contenteditable Hash = 0x1b10f // contenteditable + Controls Hash = 0x1cc08 // controls + Coords Hash = 0x1e306 // coords + Crossorigin Hash = 0x2160b // crossorigin + Data Hash = 0xad04 // data + Datalist Hash = 0xad08 // datalist + Datatype Hash = 0x11908 // datatype + Datetime Hash = 0x28508 // datetime + Dd Hash = 0x6e02 // dd + Decoding Hash = 0x9508 // decoding + Default Hash = 0x17807 // default + Defer Hash = 0x4405 // defer + Del Hash = 0x1f203 // del + Details Hash = 0x20b07 // details + Dfn Hash = 0x16a03 // dfn + Dialog Hash = 0x28d06 // dialog + Dir Hash = 0x8403 // dir + Disabled Hash = 0x19208 // disabled + Div Hash = 0x19903 // div + Dl Hash = 0x1c302 // dl + Draggable Hash = 0x1da09 // draggable + Dt Hash = 0x40902 // dt + Em Hash = 0xdc02 // em + Embed Hash = 0x16605 // embed + Enctype Hash = 0x26a07 // enctype + Enterkeyhint Hash = 0x2500c // enterkeyhint + Fetchpriority Hash = 0x1220d // fetchpriority + Fieldset Hash = 0x22c08 // fieldset + Figcaption Hash = 0x2340a // figcaption + Figure Hash = 0x24506 // figure + Font Hash = 0x2404 // font + Footer Hash = 0x1a06 // footer + For Hash = 0x25c03 // for + Form Hash = 0x25c04 // form + Formaction Hash = 0x25c0a // formaction + Formenctype Hash = 0x2660b // formenctype + Formmethod Hash = 0x2710a // formmethod + Formnovalidate Hash = 0x27b0e // formnovalidate + Formtarget Hash = 0x2930a // formtarget + Frame Hash = 0x16e05 // frame + Frameset Hash = 0x16e08 // frameset + H1 Hash = 0x2d502 // h1 + H2 Hash = 0x38602 // h2 + H3 Hash = 0x39502 // h3 + H4 Hash = 0x40b02 // h4 + H5 Hash = 0x29d02 // h5 + H6 Hash = 0x29f02 // h6 + Head Hash = 0x36c04 // head + Header Hash = 0x36c06 // header + Headers Hash = 0x36c07 // headers + Height Hash = 0x2a106 // height + Hgroup Hash = 0x2b506 // hgroup + Hidden Hash = 0x2cc06 // hidden + High Hash = 0x2d204 // high + Hr Hash = 0x2d702 // hr + Href Hash = 0x2d704 // href + Hreflang Hash = 0x2d708 // hreflang + Html Hash = 0x2a504 // html + Http_Equiv Hash = 0x2df0a // http-equiv + I Hash = 0x2801 // i + Id Hash = 0x9402 // id + Iframe Hash = 0x2f206 // iframe + Image Hash = 0x30005 // image + Imagesizes Hash = 0x3000a // imagesizes + Imagesrcset Hash = 0x30d0b // imagesrcset + Img Hash = 0x31803 // img + Inert Hash = 0x10805 // inert + Inlist Hash = 0x21f06 // inlist + Input Hash = 0x3d05 // input + Inputmode Hash = 0x3d09 // inputmode + Ins Hash = 0x31b03 // ins + Is Hash = 0xb202 // is + Ismap Hash = 0x32e05 // ismap + Itemid Hash = 0x2fa06 // itemid + Itemprop Hash = 0x14e08 // itemprop + Itemref Hash = 0x34507 // itemref + Itemscope Hash = 0x35709 // itemscope + Itemtype Hash = 0x36108 // itemtype + Kbd Hash = 0x8203 // kbd + Kind Hash = 0xaa04 // kind + Label Hash = 0x1c405 // label + Lang Hash = 0x2db04 // lang + Legend Hash = 0x1be06 // legend + Li Hash = 0xb102 // li + Link Hash = 0x1c804 // link + List Hash = 0xb104 // list + Loading Hash = 0x3ad07 // loading + Loop Hash = 0x2a804 // loop + Low Hash = 0x32103 // low + Main Hash = 0x3b04 // main + Map Hash = 0xed03 // map + Mark Hash = 0x7f04 // mark + Marquee Hash = 0x3e407 // marquee + Math Hash = 0x36904 // math + Max Hash = 0x37e03 // max + Maxlength Hash = 0x37e09 // maxlength + Media Hash = 0x28b05 // media + Menu Hash = 0x2f604 // menu + Menuitem Hash = 0x2f608 // menuitem + Meta Hash = 0x5004 // meta + Meter Hash = 0x38805 // meter + Method Hash = 0x27506 // method + Min Hash = 0x38d03 // min + Minlength Hash = 0x38d09 // minlength + Multiple Hash = 0x39708 // multiple + Muted Hash = 0x39f05 // muted + Name Hash = 0x4e04 // name + Nav Hash = 0xbc03 // nav + Nobr Hash = 0x14904 // nobr + Noembed Hash = 0x16407 // noembed + Noframes Hash = 0x16c08 // noframes + Nomodule Hash = 0x1a908 // nomodule + Noscript Hash = 0x23d08 // noscript + Novalidate Hash = 0x27f0a // novalidate + Object Hash = 0xa106 // object + Ol Hash = 0x18002 // ol + Open Hash = 0x35d04 // open + Optgroup Hash = 0x2aa08 // optgroup + Optimum Hash = 0x3de07 // optimum + Option Hash = 0x2ec06 // option + Output Hash = 0x206 // output + P Hash = 0x501 // p + Param Hash = 0x7b05 // param + Pattern Hash = 0xb607 // pattern + Picture Hash = 0x18607 // picture + Ping Hash = 0x2b104 // ping + Plaintext Hash = 0x2ba09 // plaintext + Playsinline Hash = 0x1000b // playsinline + Popover Hash = 0x33207 // popover + Popovertarget Hash = 0x3320d // popovertarget + Popovertargetaction Hash = 0x33213 // popovertargetaction + Portal Hash = 0x3f406 // portal + Poster Hash = 0x41006 // poster + Pre Hash = 0x3a403 // pre + Prefix Hash = 0x3a406 // prefix + Preload Hash = 0x3aa07 // preload + Profile Hash = 0x3b407 // profile + Progress Hash = 0x3bb08 // progress + Property Hash = 0x15208 // property + Q Hash = 0x11401 // q + Rb Hash = 0x1f02 // rb + Readonly Hash = 0x2c408 // readonly + Referrerpolicy Hash = 0x3490e // referrerpolicy + Rel Hash = 0x3ab03 // rel + Required Hash = 0x11208 // required + Resource Hash = 0x24908 // resource + Rev Hash = 0x18b03 // rev + Reversed Hash = 0x18b08 // reversed + Rows Hash = 0x4804 // rows + Rowspan Hash = 0x4807 // rowspan + Rp Hash = 0x6702 // rp + Rt Hash = 0x10b02 // rt + Rtc Hash = 0x10b03 // rtc + Ruby Hash = 0x8604 // ruby + S Hash = 0x1701 // s + Samp Hash = 0x5d04 // samp + Sandbox Hash = 0x7307 // sandbox + Scope Hash = 0x35b05 // scope + Script Hash = 0x23f06 // script + Section Hash = 0x15e07 // section + Select Hash = 0x1d306 // select + Selected Hash = 0x1d308 // selected + Shadowrootdelegatesfocus Hash = 0x1e818 // shadowrootdelegatesfocus + Shadowrootmode Hash = 0x1ff0e // shadowrootmode + Shape Hash = 0x21105 // shape + Size Hash = 0x30504 // size + Sizes Hash = 0x30505 // sizes + Slot Hash = 0x30904 // slot + Small Hash = 0x31d05 // small + Source Hash = 0x24b06 // source + Span Hash = 0x4b04 // span + Spellcheck Hash = 0x3720a // spellcheck + Src Hash = 0x31203 // src + Srclang Hash = 0x3c207 // srclang + Srcset Hash = 0x31206 // srcset + Start Hash = 0x22305 // start + Step Hash = 0xb304 // step + Strike Hash = 0x3c906 // strike + Strong Hash = 0x3cf06 // strong + Style Hash = 0x3d505 // style + Sub Hash = 0x3da03 // sub + Summary Hash = 0x3eb07 // summary + Sup Hash = 0x3f203 // sup + Svg Hash = 0x3fa03 // svg + Tabindex Hash = 0x5208 // tabindex + Table Hash = 0x1bb05 // table + Target Hash = 0x29706 // target + Tbody Hash = 0x705 // tbody + Td Hash = 0x1f102 // td + Template Hash = 0xdb08 // template + Text Hash = 0x2bf04 // text + Textarea Hash = 0x2bf08 // textarea + Tfoot Hash = 0x1905 // tfoot + Th Hash = 0x27702 // th + Thead Hash = 0x36b05 // thead + Time Hash = 0x28904 // time + Title Hash = 0x2705 // title + Tr Hash = 0xa602 // tr + Track Hash = 0xa605 // track + Translate Hash = 0xf309 // translate + Tt Hash = 0xb802 // tt + Type Hash = 0x11d04 // type + Typeof Hash = 0x11d06 // typeof + U Hash = 0x301 // u + Ul Hash = 0x17c02 // ul + Usemap Hash = 0xea06 // usemap + Value Hash = 0xbe05 // value + Var Hash = 0x19b03 // var + Video Hash = 0x2e805 // video + Vocab Hash = 0x3fd05 // vocab + Wbr Hash = 0x40403 // wbr + Width Hash = 0x40705 // width + Wrap Hash = 0x40d04 // wrap + Xmlns Hash = 0x5905 // xmlns + Xmp Hash = 0x7903 // xmp +) + +// String returns the hash' name. +func (i Hash) String() string { + start := uint32(i >> 8) + n := uint32(i & 0xff) + if start+n > uint32(len(_Hash_text)) { + return "" + } + return _Hash_text[start : start+n] +} + +// ToHash returns the hash whose name is s. It returns zero if there is no +// such hash. It is case sensitive. +func ToHash(s []byte) Hash { + if len(s) == 0 || len(s) > _Hash_maxLen { + return 0 + } + h := uint32(_Hash_hash0) + for i := 0; i < len(s); i++ { + h ^= uint32(s[i]) + h *= 16777619 + } + if i := _Hash_table[h&uint32(len(_Hash_table)-1)]; int(i&0xff) == len(s) { + t := _Hash_text[i>>8 : i>>8+i&0xff] + for i := 0; i < len(s); i++ { + if t[i] != s[i] { + goto NEXT + } + } + return i + } +NEXT: + if i := _Hash_table[(h>>16)&uint32(len(_Hash_table)-1)]; int(i&0xff) == len(s) { + t := _Hash_text[i>>8 : i>>8+i&0xff] + for i := 0; i < len(s); i++ { + if t[i] != s[i] { + return 0 + } + } + return i + } + return 0 +} + +const _Hash_hash0 = 0x51243bbc +const _Hash_maxLen = 24 +const _Hash_text = "aboutputbodyaccept-charsetfooterbasefontitleaccesskeyacronym" + + "ainputmodeferowspanametabindexmlnsamp-boilerplateaddressandb" + + "oxmparamarkbdirubyasyncanvasidecodingaudiobjectrackindatalis" + + "tepatternavalueautocapitalizeautocompletemplateautofocusemap" + + "pletranslateautoplaysinlinertcapturequiredatatypeofetchprior" + + "itybigblockingblockquotebuttonobrcitempropertyclassectionoem" + + "bedfnoframesetcodefaultcolgroupictureversedisabledivarcolorc" + + "olspanomodulecontenteditablegendlabelinkcontrolselectedragga" + + "blecoordshadowrootdelegatesfocushadowrootmodetailshapecrosso" + + "riginlistarticlefieldsetfigcaptionoscriptfiguresourcenterkey" + + "hintformactionformenctypeformmethodformnovalidatetimedialogf" + + "ormtargeth5h6heightmlooptgroupinghgrouplaintextareadonlyhidd" + + "enhigh1hreflanghttp-equivideoptioniframenuitemidimagesizeslo" + + "timagesrcsetimginsmallowfullscreenismapopovertargetactionite" + + "mreferrerpolicyitemscopenitemtypematheaderspellcheckedmaxlen" + + "gth2meterminlength3multiplemutedprefixpreloadingprofileprogr" + + "essrclangstrikestrongstylesubdoptimumarqueesummarysuportalsv" + + "gvocabbrwbrwidth4wraposter" + +var _Hash_table = [1 << 9]Hash{ + 0x0: 0x4405, // defer + 0x5: 0x18002, // ol + 0x6: 0x3720a, // spellcheck + 0x7: 0x40b02, // h4 + 0x8: 0x40705, // width + 0x9: 0x9402, // id + 0xb: 0x14904, // nobr + 0xc: 0x31d05, // small + 0xf: 0x2b506, // hgroup + 0x10: 0x27702, // th + 0x15: 0x24f06, // center + 0x18: 0xd10c, // autocomplete + 0x1b: 0x2c304, // area + 0x1e: 0x17f03, // col + 0x1f: 0x2a106, // height + 0x21: 0x4b04, // span + 0x22: 0x37e03, // max + 0x23: 0x3cf06, // strong + 0x24: 0x501, // p + 0x29: 0x24b06, // source + 0x2c: 0x8e06, // canvas + 0x2d: 0x2c09, // accesskey + 0x2e: 0x18607, // picture + 0x30: 0x3a403, // pre + 0x31: 0x5d04, // samp + 0x34: 0x40902, // dt + 0x36: 0x30505, // sizes + 0x37: 0x1a908, // nomodule + 0x39: 0x2a504, // html + 0x3a: 0x31203, // src + 0x3c: 0x28d06, // dialog + 0x3e: 0x3ab03, // rel + 0x40: 0x1a06, // footer + 0x43: 0x30d0b, // imagesrcset + 0x46: 0x3c906, // strike + 0x47: 0x2e805, // video + 0x4a: 0x2d702, // hr + 0x4b: 0x36108, // itemtype + 0x4c: 0x1c804, // link + 0x4e: 0x6702, // rp + 0x4f: 0x2801, // i + 0x50: 0xee06, // applet + 0x51: 0x17f08, // colgroup + 0x53: 0x1905, // tfoot + 0x54: 0xc06, // accept + 0x57: 0x14d04, // cite + 0x58: 0x1307, // charset + 0x59: 0x17604, // code + 0x5a: 0x4e04, // name + 0x5b: 0x2bf04, // text + 0x5d: 0x31f05, // allow + 0x5e: 0x36c04, // head + 0x61: 0x16605, // embed + 0x62: 0x3fa03, // svg + 0x63: 0x3fd05, // vocab + 0x64: 0x5e0f, // amp-boilerplate + 0x65: 0x38805, // meter + 0x67: 0x3320d, // popovertarget + 0x69: 0x3b04, // main + 0x6a: 0x41006, // poster + 0x6c: 0x1c302, // dl + 0x6e: 0x26006, // action + 0x71: 0x17807, // default + 0x72: 0x3d05, // input + 0x74: 0xb202, // is + 0x75: 0x27506, // method + 0x79: 0x7903, // xmp + 0x7a: 0x101, // b + 0x7b: 0x21f06, // inlist + 0x7c: 0x25c0a, // formaction + 0x7e: 0x39708, // multiple + 0x80: 0x1f203, // del + 0x81: 0x26a07, // enctype + 0x83: 0x27b0e, // formnovalidate + 0x84: 0x2404, // font + 0x85: 0x11d06, // typeof + 0x86: 0x2d704, // href + 0x87: 0x13a0a, // blockquote + 0x88: 0x4807, // rowspan + 0x89: 0x3aa07, // preload + 0x8a: 0x12f03, // big + 0x8c: 0x38d09, // minlength + 0x90: 0x1bb05, // table + 0x91: 0x39f05, // muted + 0x92: 0x3e407, // marquee + 0x94: 0x3507, // acronym + 0x96: 0x40d04, // wrap + 0x98: 0x14b02, // br + 0x9a: 0x10b02, // rt + 0x9e: 0xa602, // tr + 0x9f: 0x35709, // itemscope + 0xa4: 0xad04, // data + 0xa5: 0x29706, // target + 0xac: 0x11908, // datatype + 0xae: 0xb304, // step + 0xb3: 0x1cc08, // controls + 0xb5: 0xbe05, // value + 0xb6: 0x2ba09, // plaintext + 0xb7: 0x1da09, // draggable + 0xc0: 0x8a05, // async + 0xc2: 0x2a804, // loop + 0xc3: 0x28904, // time + 0xc6: 0x2004, // base + 0xc7: 0x23f06, // script + 0xce: 0x32103, // low + 0xcf: 0x3dc03, // bdo + 0xd1: 0x18b03, // rev + 0xd2: 0x1e306, // coords + 0xd3: 0x8403, // dir + 0xd4: 0x2f608, // menuitem + 0xd6: 0x22507, // article + 0xd8: 0x11d04, // type + 0xda: 0x18b08, // reversed + 0xdb: 0x23707, // caption + 0xdc: 0x35d04, // open + 0xdd: 0x1701, // s + 0xe0: 0x2705, // title + 0xe1: 0x9508, // decoding + 0xe3: 0xc0e, // accept-charset + 0xe4: 0x15a05, // class + 0xe5: 0x3f203, // sup + 0xe6: 0xdb08, // template + 0xe7: 0x16c08, // noframes + 0xe8: 0x3ad07, // loading + 0xeb: 0xa106, // object + 0xee: 0x3da03, // sub + 0xef: 0x2fa06, // itemid + 0xf0: 0x30904, // slot + 0xf1: 0x8604, // ruby + 0xf4: 0x1f102, // td + 0xf5: 0x11208, // required + 0xf9: 0x16e05, // frame + 0xfc: 0x2102, // as + 0xfd: 0x37e09, // maxlength + 0xff: 0x31f0f, // allowfullscreen + 0x101: 0x2160b, // crossorigin + 0x102: 0xed03, // map + 0x104: 0x6e02, // dd + 0x105: 0x705, // tbody + 0x107: 0x2d502, // h1 + 0x109: 0x5004, // meta + 0x10a: 0x1, // a + 0x10c: 0x16a03, // dfn + 0x10e: 0x34507, // itemref + 0x110: 0x38d03, // min + 0x111: 0x28508, // datetime + 0x114: 0xdc02, // em + 0x115: 0x7f04, // mark + 0x119: 0x2d708, // hreflang + 0x11a: 0x3de07, // optimum + 0x11c: 0x1220d, // fetchpriority + 0x11d: 0x39502, // h3 + 0x11e: 0x5905, // xmlns + 0x11f: 0x19903, // div + 0x121: 0x40403, // wbr + 0x128: 0x2bf08, // textarea + 0x129: 0x3d505, // style + 0x12a: 0x3f406, // portal + 0x12b: 0x1b107, // content + 0x12d: 0x19b03, // var + 0x12f: 0x40004, // abbr + 0x133: 0x31803, // img + 0x138: 0x35b05, // scope + 0x13b: 0x30504, // size + 0x13e: 0x29f02, // h6 + 0x141: 0xfc08, // autoplay + 0x142: 0x2c408, // readonly + 0x143: 0x3d09, // inputmode + 0x144: 0x19208, // disabled + 0x145: 0x4804, // rows + 0x149: 0x3490e, // referrerpolicy + 0x14a: 0x1c405, // label + 0x14b: 0x36c06, // header + 0x14c: 0xad08, // datalist + 0x14d: 0xe309, // autofocus + 0x14e: 0xb607, // pattern + 0x150: 0x2cc06, // hidden + 0x151: 0x5, // about + 0x152: 0x14406, // button + 0x154: 0x2f206, // iframe + 0x155: 0x1d308, // selected + 0x156: 0x3c207, // srclang + 0x15b: 0xb102, // li + 0x15c: 0x22305, // start + 0x15d: 0x7307, // sandbox + 0x15e: 0x31b03, // ins + 0x162: 0x1a307, // colspan + 0x163: 0x1ff0e, // shadowrootmode + 0x164: 0xb104, // list + 0x166: 0x5208, // tabindex + 0x169: 0x3b407, // profile + 0x16b: 0x301, // u + 0x16c: 0x23d08, // noscript + 0x16e: 0x2660b, // formenctype + 0x16f: 0x16e08, // frameset + 0x170: 0x28b05, // media + 0x174: 0x2008, // basefont + 0x176: 0x2b104, // ping + 0x177: 0x3bb08, // progress + 0x178: 0x206, // output + 0x17a: 0x36904, // math + 0x17b: 0x2930a, // formtarget + 0x17d: 0x7b05, // param + 0x180: 0x13208, // blocking + 0x185: 0x37707, // checked + 0x188: 0x32e05, // ismap + 0x18a: 0x38602, // h2 + 0x18c: 0x2df0a, // http-equiv + 0x18e: 0x10d07, // capture + 0x190: 0x2db04, // lang + 0x195: 0x27f0a, // novalidate + 0x197: 0x1a304, // cols + 0x198: 0x804, // body + 0x199: 0xbc03, // nav + 0x19a: 0x1b10f, // contenteditable + 0x19b: 0x15e07, // section + 0x19e: 0x14e08, // itemprop + 0x19f: 0x15208, // property + 0x1a1: 0xc30e, // autocapitalize + 0x1a4: 0x3eb07, // summary + 0x1a6: 0x1000b, // playsinline + 0x1a9: 0x8303, // bdi + 0x1ab: 0x29d02, // h5 + 0x1ac: 0x6d07, // address + 0x1b0: 0x2d204, // high + 0x1b1: 0x33207, // popover + 0x1b3: 0xa605, // track + 0x1b6: 0x8203, // kbd + 0x1b7: 0x11401, // q + 0x1b8: 0x2340a, // figcaption + 0x1b9: 0x30005, // image + 0x1ba: 0x25c04, // form + 0x1c1: 0x3000a, // imagesizes + 0x1c4: 0x1e818, // shadowrootdelegatesfocus + 0x1c5: 0x2ec06, // option + 0x1c6: 0x9d05, // audio + 0x1c8: 0x40102, // bb + 0x1c9: 0x16407, // noembed + 0x1cc: 0x10805, // inert + 0x1cf: 0x1d306, // select + 0x1d1: 0x22c08, // fieldset + 0x1d2: 0x31206, // srcset + 0x1d3: 0x2f604, // menu + 0x1d5: 0x36c07, // headers + 0x1dd: 0x1be06, // legend + 0x1de: 0xaa04, // kind + 0x1e0: 0x24908, // resource + 0x1e2: 0xf309, // translate + 0x1e4: 0x2aa08, // optgroup + 0x1e6: 0x33213, // popovertargetaction + 0x1e7: 0x2710a, // formmethod + 0x1e9: 0xb802, // tt + 0x1ea: 0x36b05, // thead + 0x1eb: 0x17c02, // ul + 0x1ee: 0x3a406, // prefix + 0x1ef: 0x19e05, // color + 0x1f1: 0x21105, // shape + 0x1f3: 0x25c03, // for + 0x1f4: 0x2500c, // enterkeyhint + 0x1f7: 0xea06, // usemap + 0x1f8: 0x1f02, // rb + 0x1fa: 0x20b07, // details + 0x1fb: 0x10b03, // rtc + 0x1fc: 0x9205, // aside + 0x1fe: 0x24506, // figure +} diff --git a/vendor/github.com/tdewolff/minify/v2/html/html.go b/vendor/github.com/tdewolff/minify/v2/html/html.go new file mode 100644 index 0000000..ab50ff6 --- /dev/null +++ b/vendor/github.com/tdewolff/minify/v2/html/html.go @@ -0,0 +1,531 @@ +// Package html minifies HTML5 following the specifications at http://www.w3.org/TR/html5/syntax.html. +package html + +import ( + "bytes" + "fmt" + "io" + + "github.com/tdewolff/minify/v2" + "github.com/tdewolff/parse/v2" + "github.com/tdewolff/parse/v2/buffer" + "github.com/tdewolff/parse/v2/html" +) + +var ( + gtBytes = []byte(">") + isBytes = []byte("=") + spaceBytes = []byte(" ") + doctypeBytes = []byte("") + jsMimeBytes = []byte("application/javascript") + cssMimeBytes = []byte("text/css") + htmlMimeBytes = []byte("text/html") + svgMimeBytes = []byte("image/svg+xml") + formMimeBytes = []byte("application/x-www-form-urlencoded") + mathMimeBytes = []byte("application/mathml+xml") + dataSchemeBytes = []byte("data:") + jsSchemeBytes = []byte("javascript:") + httpBytes = []byte("http") + radioBytes = []byte("radio") + onBytes = []byte("on") + textBytes = []byte("text") + noneBytes = []byte("none") + submitBytes = []byte("submit") + allBytes = []byte("all") + rectBytes = []byte("rect") + dataBytes = []byte("data") + getBytes = []byte("get") + autoBytes = []byte("auto") + oneBytes = []byte("one") + inlineParams = map[string]string{"inline": "1"} +) + +//////////////////////////////////////////////////////////////// + +var GoTemplateDelims = [2]string{"{{", "}}"} +var HandlebarsTemplateDelims = [2]string{"{{", "}}"} +var MustacheTemplateDelims = [2]string{"{{", "}}"} +var EJSTemplateDelims = [2]string{"<%", "%>"} +var ASPTemplateDelims = [2]string{"<%", "%>"} +var PHPTemplateDelims = [2]string{""} + +// Minifier is an HTML minifier. +type Minifier struct { + KeepComments bool + KeepConditionalComments bool + KeepSpecialComments bool + KeepDefaultAttrVals bool + KeepDocumentTags bool + KeepEndTags bool + KeepQuotes bool + KeepWhitespace bool + TemplateDelims [2]string +} + +// Minify minifies HTML data, it reads from r and writes to w. +func Minify(m *minify.M, w io.Writer, r io.Reader, params map[string]string) error { + return (&Minifier{}).Minify(m, w, r, params) +} + +// Minify minifies HTML data, it reads from r and writes to w. +func (o *Minifier) Minify(m *minify.M, w io.Writer, r io.Reader, _ map[string]string) error { + var rawTagHash Hash + var rawTagMediatype []byte + + if o.KeepConditionalComments { + fmt.Println("DEPRECATED: KeepConditionalComments is replaced by KeepSpecialComments") + o.KeepSpecialComments = true + } + + omitSpace := true // if true the next leading space is omitted + inPre := false + + attrMinifyBuffer := buffer.NewWriter(make([]byte, 0, 64)) + attrByteBuffer := make([]byte, 0, 64) + + z := parse.NewInput(r) + defer z.Restore() + + l := html.NewTemplateLexer(z, o.TemplateDelims) + tb := NewTokenBuffer(z, l) + for { + t := *tb.Shift() + switch t.TokenType { + case html.ErrorToken: + if _, err := w.Write(nil); err != nil { + return err + } + if l.Err() == io.EOF { + return nil + } + return l.Err() + case html.DoctypeToken: + w.Write(doctypeBytes) + case html.CommentToken: + if o.KeepComments { + w.Write(t.Data) + } else if o.KeepSpecialComments { + if 6 < len(t.Text) && (bytes.HasPrefix(t.Text, []byte("[if ")) || bytes.HasSuffix(t.Text, []byte("[endif]")) || bytes.HasSuffix(t.Text, []byte("[endif]--"))) { + // [if ...] is always 7 or more characters, [endif] is only encountered for downlevel-revealed + // see https://msdn.microsoft.com/en-us/library/ms537512(v=vs.85).aspx#syntax + if bytes.HasPrefix(t.Data, []byte("")) { // downlevel-hidden + begin := bytes.IndexByte(t.Data, '>') + 1 + end := len(t.Data) - len("") + if begin < end { + w.Write(t.Data[:begin]) + if err := o.Minify(m, w, buffer.NewReader(t.Data[begin:end]), nil); err != nil { + return minify.UpdateErrorPosition(err, z, t.Offset) + } + w.Write(t.Data[end:]) + } else { + w.Write(t.Data) // malformed + } + } else { + w.Write(t.Data) // downlevel-revealed or short downlevel-hidden + } + } else if 1 < len(t.Text) && t.Text[0] == '#' { + // SSI tags + w.Write(t.Data) + } + } + case html.SvgToken: + if err := m.MinifyMimetype(svgMimeBytes, w, buffer.NewReader(t.Data), nil); err != nil { + if err != minify.ErrNotExist { + return minify.UpdateErrorPosition(err, z, t.Offset) + } + w.Write(t.Data) + } + omitSpace = false + case html.MathToken: + if err := m.MinifyMimetype(mathMimeBytes, w, buffer.NewReader(t.Data), nil); err != nil { + if err != minify.ErrNotExist { + return minify.UpdateErrorPosition(err, z, t.Offset) + } + w.Write(t.Data) + } + omitSpace = false + case html.TextToken: + if t.HasTemplate { + w.Write(t.Data) + } else if rawTagHash != 0 { + if rawTagHash == Style || rawTagHash == Script || rawTagHash == Iframe { + var mimetype []byte + var params map[string]string + if rawTagHash == Iframe { + mimetype = htmlMimeBytes + } else if 0 < len(rawTagMediatype) { + mimetype, params = parse.Mediatype(rawTagMediatype) + } else if rawTagHash == Script { + mimetype = jsMimeBytes + } else if rawTagHash == Style { + mimetype = cssMimeBytes + } + if err := m.MinifyMimetype(mimetype, w, buffer.NewReader(t.Data), params); err != nil { + if err != minify.ErrNotExist { + return minify.UpdateErrorPosition(err, z, t.Offset) + } + w.Write(t.Data) + } + } else { + w.Write(t.Data) + } + } else if inPre { + w.Write(t.Data) + } else { + t.Data = parse.ReplaceMultipleWhitespaceAndEntities(t.Data, EntitiesMap, TextRevEntitiesMap) + + // whitespace removal; trim left + if omitSpace && parse.IsWhitespace(t.Data[0]) { + t.Data = t.Data[1:] + } + + // whitespace removal; trim right + omitSpace = false + if len(t.Data) == 0 { + omitSpace = true + } else if parse.IsWhitespace(t.Data[len(t.Data)-1]) { + omitSpace = true + i := 0 + for { + next := tb.Peek(i) + // trim if EOF, text token with leading whitespace or block token + if next.TokenType == html.ErrorToken { + t.Data = t.Data[:len(t.Data)-1] + omitSpace = false + break + } else if next.TokenType == html.TextToken && !parse.IsAllWhitespace(next.Data) { + // stop looking when text encountered + break + } else if next.TokenType == html.StartTagToken || next.TokenType == html.EndTagToken { + if o.KeepWhitespace { + break + } + // remove when followed by a block tag + if next.Traits&blockTag != 0 { + t.Data = t.Data[:len(t.Data)-1] + omitSpace = false + break + } else if next.TokenType == html.StartTagToken { + break + } + } + i++ + } + } + + w.Write(t.Data) + } + case html.StartTagToken, html.EndTagToken: + rawTagHash = 0 + hasAttributes := false + if t.TokenType == html.StartTagToken { + if next := tb.Peek(0); next.TokenType == html.AttributeToken { + hasAttributes = true + } + if t.Traits&rawTag != 0 { + // ignore empty script and style tags + if !hasAttributes && (t.Hash == Script || t.Hash == Style) { + if next := tb.Peek(1); next.TokenType == html.EndTagToken { + tb.Shift() + tb.Shift() + break + } + } + rawTagHash = t.Hash + rawTagMediatype = nil + + // do not minify content of